rts-heap.c

     
   1  //! @file rts-heap.c
   2  //! @author J. Marcel van der Veer
   3  
   4  //! @section Copyright
   5  //!
   6  //! This file is part of Algol68G - an Algol 68 compiler-interpreter.
   7  //! Copyright 2001-2025 J. Marcel van der Veer [algol68g@xs4all.nl].
   8  
   9  //! @section License
  10  //!
  11  //! This program is free software; you can redistribute it and/or modify it 
  12  //! under the terms of the GNU General Public License as published by the 
  13  //! Free Software Foundation; either version 3 of the License, or 
  14  //! (at your option) any later version.
  15  //!
  16  //! This program is distributed in the hope that it will be useful, but 
  17  //! WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 
  18  //! or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 
  19  //! more details. You should have received a copy of the GNU General Public 
  20  //! License along with this program. If not, see [http://www.gnu.org/licenses/].
  21  
  22  //! @section Synopsis
  23  //!
  24  //! Generator and garbage collector routines.
  25  
  26  // The generator allocates space in stack or heap and initialises dynamically sized objects.
  27  // 
  28  // A mark-and-gc garbage collector defragments the heap. When called, it walks
  29  // the stack frames and marks the heap space that is still active. This marking
  30  // process is called "colouring" here since we "pour paint" into the heap.
  31  // The active blocks are then joined, the non-active blocks are forgotten.
  32  // 
  33  // When colouring the heap, "cookies" are placed in objects as to find circular
  34  // references.
  35  // 
  36  // Algol68G introduces several anonymous tags in the symbol tables that save
  37  // temporary REF or ROW results, so that they do not get prematurely swept.
  38  // 
  39  // The genie is not smart enough to handle every heap clog, e.g. when copying
  40  // STOWED objects. This seems not very elegant, but garbage collectors in general
  41  // cannot solve all core management problems. To avoid many of the "unforeseen"
  42  // heap clogs, we try to keep heap occupation low by garbage collecting 
  43  // occasionally, before it fills up completely. If this automatic mechanism does
  44  // not help, one can always invoke the garbage collector by calling "gc heap"
  45  // from Algol 68 source text.
  46  // 
  47  // Mark-and-collect is simple but since it walks recursive structures, it could
  48  // exhaust the C-stack (segment violation). A rough check is in place.
  49  // 
  50  // For dynamically sized objects, first bounds are evaluated (right first, then down).
  51  // The object is generated keeping track of the bound-count.
  52  // 
  53  //      ...
  54  //      [#1]
  55  //      STRUCT
  56  //      (
  57  //      [#2]
  58  //      STRUCT
  59  //      (
  60  //      [#3] A a, b, ...
  61  //      )
  62  //      ,                       Advance bound-count here, max is #3
  63  //      [#4] B a, b, ...
  64  //      )
  65  //      ,                       Advance bound-count here, max is #4
  66  //      [#5] C a, b, ...
  67  //      ...
  68  // 
  69  // Bound-count is maximised when generator_stowed is entered recursively. 
  70  // Bound-count is advanced when completing a STRUCTURED_FIELD.
  71  // Note that A68G will not extend stack frames. Thus only 'static' LOC generators
  72  // are in the stack, and 'dynamic' LOC generators go into the heap. These local 
  73  // REFs in the heap get local scope however, and A68G's approach differs from the 
  74  // CDC ALGOL 68 approach that put all generators in the heap.
  75  // Note that part of memory is called 'COMMON'. This is meant for future extension
  76  // where a68g would need to point to external objects. The adressing scheme is that
  77  // of a HEAP pointer - handle pointer + offset.
  78  
  79  #include "a68g.h"
  80  #include "a68g-genie.h"
  81  #include "a68g-frames.h"
  82  #include "a68g-prelude.h"
  83  #include "a68g-parser.h"
  84  
  85  #define DEF_NODE(p) (NEXT_NEXT (NODE (TAX (p))))
  86  
  87  //! @brief Check overflow at size multiplication.
  88  
  89  BOOL_T size_mul_overflow (size_t u, size_t v)
  90  {
  91    if (u == 0 || v == 0) {
  92      return (A68G_FALSE);
  93    } else {
  94      return v > (MAX_MEM_SIZE / u);
  95    }
  96  }
  97  
  98  //! @brief PROC VOID gc heap
  99  
 100  void genie_gc_heap (NODE_T * p)
 101  {
 102    gc_heap (p, A68G_FP);
 103  }
 104  
 105  //! @brief PROC VOID preemptive gc heap
 106  
 107  void genie_preemptive_gc_heap (NODE_T * p)
 108  {
 109    if (A68G_GC (preemptive)) {
 110      gc_heap (p, A68G_FP);
 111      A68G_GC (preemptive) = A68G_FALSE;
 112    }
 113  }
 114  
 115  //! @brief INT blocks
 116  
 117  void genie_block (NODE_T * p)
 118  {
 119    PUSH_VALUE (p, 0, A68G_INT);
 120  }
 121  
 122  //! @brief INT garbage collections
 123  
 124  void genie_garbage_collections (NODE_T * p)
 125  {
 126    PUSH_VALUE (p, A68G_GC (sweeps), A68G_INT);
 127  }
 128  
 129  //! @brief INT garbage refused
 130  
 131  void genie_garbage_refused (NODE_T * p)
 132  {
 133    PUSH_VALUE (p, A68G_GC (refused), A68G_INT);
 134  }
 135  
 136  //! @brief LONG INT garbage freed
 137  
 138  void genie_garbage_freed (NODE_T * p)
 139  {
 140    PUSH_VALUE (p, A68G_GC (total), A68G_INT);
 141  }
 142  
 143  //! @brief REAL garbage seconds
 144  
 145  void genie_garbage_seconds (NODE_T * p)
 146  {
 147  // Note that this timing is a rough cut.
 148    PUSH_VALUE (p, A68G_GC (seconds), A68G_REAL);
 149  }
 150  
 151  //! @brief Size available for an object in the heap.
 152  
 153  size_t heap_available (void)
 154  {
 155    if (A68G (temp_heap_pointer) > A68G_HP) {
 156      return A68G (temp_heap_pointer) - A68G_HP;
 157    } else {
 158      return 0;
 159    }
 160  }
 161  
 162  //! @brief Initialise heap management.
 163  
 164  void genie_init_heap (NODE_T * p)
 165  {
 166    (void) p;
 167    if (A68G_HEAP == NO_BYTE) {
 168      diagnostic (A68G_RUNTIME_ERROR, TOP_NODE (&A68G_JOB), ERROR_MEMORY_FULL);
 169      exit_genie (TOP_NODE (&A68G_JOB), A68G_RUNTIME_ERROR);
 170    }
 171    if (A68G_HANDLES == NO_BYTE) {
 172      diagnostic (A68G_RUNTIME_ERROR, TOP_NODE (&A68G_JOB), ERROR_MEMORY_FULL);
 173      exit_genie (TOP_NODE (&A68G_JOB), A68G_RUNTIME_ERROR);
 174    }
 175    A68G_GC (seconds) = 0;
 176    A68G_GC (total) = 0;
 177    A68G_GC (sweeps) = 0;
 178    A68G_GC (refused) = 0;
 179    A68G_GC (preemptive) = A68G_FALSE;
 180    // Make sure we have some space for an A68 heap.
 181    ABEND ((A68G (fixed_heap_pointer) + 2 * A68G (storage_overhead)) >= A68G (temp_heap_pointer), ERROR_MEMORY_FULL, __func__);
 182    A68G_HP = A68G (fixed_heap_pointer);
 183    A68G (heap_is_fluid) = A68G_FALSE;
 184  // Assign handle space.
 185    A68G_HANDLE *z = (A68G_HANDLE *) A68G_HANDLES;
 186    A68G_GC (available_handles) = z;
 187    A68G_GC (busy_handles) = NO_HANDLE;
 188    size_t N = A68G (handle_pool_size) / SIZE_ALIGNED (A68G_HANDLE);
 189    A68G_GC (free_handles) = N;
 190    A68G_GC (max_handles) = N;
 191    for (int k = 0; k < N; k++) {
 192      STATUS (&(z[k])) = NULL_MASK;
 193      POINTER (&(z[k])) = NO_BYTE;
 194      SIZE (&(z[k])) = 0;
 195      NEXT (&z[k]) = (k == N - 1 ? NO_HANDLE : &z[k + 1]);
 196      PREVIOUS (&z[k]) = (k == 0 ? NO_HANDLE : &z[k - 1]);
 197    }
 198  }
 199  
 200  //! @brief Whether mode must be coloured.
 201  
 202  BOOL_T moid_needs_colouring (MOID_T * m)
 203  {
 204    if (IS_REF (m)) {
 205      return A68G_TRUE;
 206    } else if (IS (m, PROC_SYMBOL)) {
 207      return A68G_TRUE;
 208    } else if (IS_FLEX (m) || IS_ROW (m)) {
 209      return A68G_TRUE;
 210    } else if (IS_STRUCT (m) || IS_UNION (m)) {
 211      for (PACK_T *p = PACK (m); p != NO_PACK; FORWARD (p)) {
 212        if (moid_needs_colouring (MOID (p))) {
 213          return A68G_TRUE;
 214        }
 215      }
 216      return A68G_FALSE;
 217    } else if (m == M_SIMPLIN || m == M_SIMPLOUT) {
 218      return A68G_TRUE;
 219    } else if (m == M_SOUND) {
 220      return A68G_TRUE;
 221    } else {
 222      return A68G_FALSE;
 223    }
 224  }
 225  
 226  //! @brief Colour all elements of a row.
 227  
 228  void colour_row_elements (A68G_REF * z, MOID_T * m)
 229  {
 230    A68G_ARRAY *arr; A68G_TUPLE *tup;
 231    GET_DESCRIPTOR (arr, tup, z);
 232    if (get_row_size (tup, DIM (arr)) == 0) {
 233  // Empty rows have a ghost elements.
 234      BYTE_T *elem = ADDRESS (&ARRAY (arr));
 235      colour_object (&elem[0], SUB (m));
 236    } else {
 237  // The multi-dimensional garbage collector.
 238      BYTE_T *elem = ADDRESS (&ARRAY (arr));
 239      BOOL_T done = A68G_FALSE;
 240      initialise_internal_index (tup, DIM (arr));
 241      while (!done) {
 242        ADDR_T index = calculate_internal_index (tup, DIM (arr));
 243        ADDR_T addr = ROW_ELEMENT (arr, index);
 244        colour_object (&elem[addr], SUB (m));
 245        done = increment_internal_index (tup, DIM (arr));
 246      }
 247    }
 248  }
 249  
 250  //! @brief Colour an (active) object.
 251  
 252  void colour_object (BYTE_T * item, MOID_T * m)
 253  {
 254    if (item == NO_BYTE || m == NO_MOID) {
 255      return;
 256    }
 257    if (!moid_needs_colouring (m)) {
 258      return;
 259    }
 260  // Deeply recursive objects might exhaust the stack.
 261    LOW_STACK_ALERT (NO_NODE);
 262    if (IS_REF (m)) {
 263  // REF AMODE colour pointer and object to which it refers.
 264      A68G_REF *z = (A68G_REF *) item;
 265      if (INITIALISED (z) && IS_IN_HEAP (z)) {
 266        if (STATUS_TEST (REF_HANDLE (z), COOKIE_MASK)) {
 267          return;
 268        }
 269        STATUS_SET (REF_HANDLE (z), (COOKIE_MASK | COLOUR_MASK));
 270        if (!IS_NIL (*z)) {
 271          colour_object (ADDRESS (z), SUB (m));
 272        }
 273  //    STATUS_CLEAR (REF_HANDLE (z), COOKIE_MASK);.
 274      }
 275    } else if (IS_FLEXETY_ROW (m)) {
 276  // Claim the descriptor and the row itself.
 277      A68G_REF *z = (A68G_REF *) item;
 278      if (INITIALISED (z) && IS_IN_HEAP (z)) {
 279        if (STATUS_TEST (REF_HANDLE (z), COOKIE_MASK)) {
 280          return;
 281        }
 282  // An array is ALWAYS in the heap.
 283        STATUS_SET (REF_HANDLE (z), (COOKIE_MASK | COLOUR_MASK));
 284        A68G_ARRAY *arr; A68G_TUPLE *tup;
 285        GET_DESCRIPTOR (arr, tup, z);
 286        if (REF_HANDLE (&(ARRAY (arr))) != NO_HANDLE) {
 287  // Assume its initialisation.
 288          MOID_T *n = DEFLEX (m);
 289          STATUS_SET (REF_HANDLE (&(ARRAY (arr))), COLOUR_MASK);
 290          if (moid_needs_colouring (SUB (n))) {
 291            colour_row_elements (z, n);
 292          }
 293        }
 294  //    STATUS_CLEAR (REF_HANDLE (z), COOKIE_MASK);.
 295        (void) tup;
 296      }
 297    } else if (IS_STRUCT (m)) {
 298  // STRUCTures - colour fields.
 299      for (PACK_T *p = PACK (m); p != NO_PACK; FORWARD (p)) {
 300        colour_object (&item[OFFSET (p)], MOID (p));
 301      }
 302    } else if (IS_UNION (m)) {
 303  // UNIONs - a united object may contain a value that needs colouring.
 304      A68G_UNION *z = (A68G_UNION *) item;
 305      if (INITIALISED (z)) {
 306        MOID_T *united_moid = (MOID_T *) VALUE (z);
 307        colour_object (&item[A68G_UNION_SIZE], united_moid);
 308      }
 309    } else if (IS (m, PROC_SYMBOL)) {
 310  // PROCs - save a locale and the objects it points to.
 311      A68G_PROCEDURE *z = (A68G_PROCEDURE *) item;
 312      if (INITIALISED (z) && LOCALE (z) != NO_HANDLE && !(STATUS_TEST (LOCALE (z), COOKIE_MASK))) {
 313        BYTE_T *u = POINTER (LOCALE (z));
 314        STATUS_SET (LOCALE (z), (COOKIE_MASK | COLOUR_MASK));
 315        for (PACK_T *s = PACK (MOID (z)); s != NO_PACK; FORWARD (s)) {
 316          if (VALUE ((A68G_BOOL *) & u[0]) == A68G_TRUE) {
 317            colour_object (&u[SIZE (M_BOOL)], MOID (s));
 318          }
 319          u = &(u[SIZE (M_BOOL) + SIZE (MOID (s))]);
 320        }
 321  //    STATUS_CLEAR (LOCALE (z), COOKIE_MASK);.
 322      }
 323    } else if (m == M_SOUND) {
 324  // Claim the data of a SOUND object, that is in the heap.
 325      A68G_SOUND *w = (A68G_SOUND *) item;
 326      if (INITIALISED (w)) {
 327        STATUS_SET (REF_HANDLE (&(DATA (w))), (COOKIE_MASK | COLOUR_MASK));
 328      }
 329    } else if (m == M_SIMPLIN || m == M_SIMPLOUT) {
 330      A68G_UNION *z = (A68G_UNION *) item;
 331      if (INITIALISED (z)) {
 332        MOID_T *united_moid = (MOID_T *) VALUE (z);
 333        colour_object (&item[A68G_UNION_SIZE], united_moid);
 334      }
 335    }
 336  }
 337  
 338  //! @brief Colour active objects in the heap.
 339  
 340  void colour_heap (ADDR_T fp)
 341  {
 342    while (fp != 0) {
 343      NODE_T *p = FRAME_TREE (fp);
 344      TABLE_T *t = TABLE (p);
 345      if (t != NO_TABLE) {
 346        for (TAG_T *q = IDENTIFIERS (t); q != NO_TAG; FORWARD (q)) {
 347          colour_object (FRAME_LOCAL (fp, OFFSET (q)), MOID (q));
 348        }
 349        for (TAG_T *q = ANONYMOUS (t); q != NO_TAG; FORWARD (q)) {
 350          colour_object (FRAME_LOCAL (fp, OFFSET (q)), MOID (q));
 351        }
 352      }
 353      fp = FRAME_DYNAMIC_LINK (fp);
 354    }
 355  }
 356  
 357  //! @brief Join all active blocks in the heap.
 358  
 359  void defragment_heap (void)
 360  {
 361    A68G_HANDLE *z;
 362  // Free handles.
 363    z = A68G_GC (busy_handles);
 364    while (z != NO_HANDLE) {
 365      if (!(STATUS_TEST (z, COLOUR_MASK)) && !(STATUS_TEST (z, BLOCK_GC_MASK))) {
 366        A68G_HANDLE *y = NEXT (z);
 367        if (PREVIOUS (z) == NO_HANDLE) {
 368          A68G_GC (busy_handles) = NEXT (z);
 369        } else {
 370          NEXT (PREVIOUS (z)) = NEXT (z);
 371        }
 372        if (NEXT (z) != NO_HANDLE) {
 373          PREVIOUS (NEXT (z)) = PREVIOUS (z);
 374        }
 375        NEXT (z) = A68G_GC (available_handles);
 376        PREVIOUS (z) = NO_HANDLE;
 377        if (NEXT (z) != NO_HANDLE) {
 378          PREVIOUS (NEXT (z)) = z;
 379        }
 380        A68G_GC (available_handles) = z;
 381        STATUS_CLEAR (z, ALLOCATED_MASK);
 382        A68G_GC (freed) += SIZE (z);
 383        A68G_GC (free_handles)++;
 384        z = y;
 385      } else {
 386        FORWARD (z);
 387      }
 388    }
 389  // There can be no uncoloured allocated handle.
 390    for (z = A68G_GC (busy_handles); z != NO_HANDLE; FORWARD (z)) {
 391      ABEND (!(STATUS_TEST (z, COLOUR_MASK)) && !(STATUS_TEST (z, BLOCK_GC_MASK)), ERROR_INTERNAL_CONSISTENCY, __func__);
 392    }
 393  // Defragment the heap.
 394    A68G_HP = A68G (fixed_heap_pointer);
 395    for (z = A68G_GC (busy_handles); z != NO_HANDLE && NEXT (z) != NO_HANDLE; FORWARD (z)) {
 396      ;
 397    }
 398    for (; z != NO_HANDLE; BACKWARD (z)) {
 399      BYTE_T *dst = HEAP_ADDRESS (A68G_HP);
 400      if (dst != POINTER (z)) {
 401        MOVE (dst, POINTER (z), SIZE (z));
 402      }
 403      STATUS_CLEAR (z, (COLOUR_MASK | COOKIE_MASK));
 404      POINTER (z) = dst;
 405      A68G_HP += (SIZE (z));
 406      ABEND (A68G_HP % A68G_ALIGNMENT != 0, ERROR_ALIGNMENT, __func__);
 407    }
 408  }
 409  
 410  //! @brief Clean up garbage and defragment the heap.
 411  
 412  void gc_heap (NODE_T * p, ADDR_T fp)
 413  {
 414  // Must start with fp = current frame_pointer.
 415    A68G_HANDLE *z;
 416    REAL_T t0, t1;
 417  #if defined (BUILD_PARALLEL_CLAUSE)
 418    if (OTHER_THREAD (FRAME_THREAD_ID (A68G_FP), A68G_PAR (main_thread_id))) {
 419      A68G_GC (refused)++;
 420      return;
 421    }
 422  #endif
 423    if (STATUS_TEST (p, BLOCK_GC_MASK)) {
 424      A68G_GC (refused)++;
 425      return;
 426    }
 427    if (OPTION_CONSERVATIVE_GC (&A68G_JOB) == A68G_GC_HALT) {
 428      A68G_GC (refused)++;
 429      return;
 430    }
 431    if (OPTION_CONSERVATIVE_GC (&A68G_JOB) == A68G_GC_SAFE && (A68G_GC (sema) > 0)) {
 432      A68G_GC (refused)++;
 433      return;
 434    }
 435  // Take no risk when intermediate results are on the stack.
 436    if (OPTION_CONSERVATIVE_GC (&A68G_JOB) && (A68G_SP != A68G (stack_start))) {
 437      A68G_GC (refused)++;
 438      return;
 439    }
 440  // Give it a whirl then.
 441    t0 = seconds ();
 442  // Unfree handles are subject to inspection.
 443  // Release them all before colouring.
 444    for (z = A68G_GC (busy_handles); z != NO_HANDLE; FORWARD (z)) {
 445      STATUS_CLEAR (z, (COLOUR_MASK | COOKIE_MASK));
 446    }
 447  // Pour paint into the heap to reveal active objects.
 448    colour_heap (fp);
 449  // Start freeing and compacting.
 450    A68G_GC (freed) = 0;
 451    defragment_heap ();
 452  // Stats and logging.
 453    A68G_GC (total) += A68G_GC (freed);
 454    A68G_GC (sweeps)++;
 455    A68G_GC (preemptive) = A68G_FALSE;
 456    t1 = seconds ();
 457  // C optimiser can make last digit differ, so next condition is 
 458  // needed to determine a positive time difference
 459    if ((t1 - t0) > ((REAL_T) A68G (clock_res) / 2.0)) {
 460      A68G_GC (seconds) += (t1 - t0);
 461    } else {
 462      A68G_GC (seconds) += ((REAL_T) A68G (clock_res) / 2.0);
 463    }
 464  // Call the event handler.
 465    genie_call_event_routine (p, M_PROC_VOID, &A68G (on_gc_event), A68G_SP, A68G_FP);
 466  }
 467  
 468  //! @brief Yield a handle that will point to a block in the heap.
 469  
 470  A68G_HANDLE *give_handle (NODE_T * p, MOID_T * a68m)
 471  {
 472    if (A68G_GC (available_handles) != NO_HANDLE) {
 473      A68G_HANDLE *x = A68G_GC (available_handles);
 474      A68G_GC (available_handles) = NEXT (x);
 475      if (A68G_GC (available_handles) != NO_HANDLE) {
 476        PREVIOUS (A68G_GC (available_handles)) = NO_HANDLE;
 477      }
 478      STATUS (x) = ALLOCATED_MASK;
 479      POINTER (x) = NO_BYTE;
 480      SIZE (x) = 0;
 481      MOID (x) = a68m;
 482      NEXT (x) = A68G_GC (busy_handles);
 483      PREVIOUS (x) = NO_HANDLE;
 484      if (NEXT (x) != NO_HANDLE) {
 485        PREVIOUS (NEXT (x)) = x;
 486      }
 487      A68G_GC (busy_handles) = x;
 488      A68G_GC (free_handles)--;
 489      return x;
 490    } else {
 491  // Do not auto-GC!.
 492      diagnostic (A68G_RUNTIME_ERROR, p, ERROR_MEMORY_FULL);
 493      exit_genie (p, A68G_RUNTIME_ERROR);
 494    }
 495    return NO_HANDLE;
 496  }
 497  
 498  //! @brief Give a block of heap for an object of indicated mode.
 499  
 500  A68G_REF heap_generator (NODE_T * p, MOID_T * mode, size_t size)
 501  {
 502    ABEND (size < 0, ERROR_INVALID_SIZE, __func__);
 503    size = A68G_ALIGN (size);
 504    if (heap_available () >= size) {
 505      A68G_REF z;
 506      STATUS (&z) = (STATUS_MASK_T) (INIT_MASK | IN_HEAP_MASK);
 507      OFFSET (&z) = 0;
 508      A68G_HANDLE *x = give_handle (p, mode);
 509      SIZE (x) = size;
 510      POINTER (x) = HEAP_ADDRESS (A68G_HP);
 511      FILL (POINTER (x), 0, size);
 512      REF_SCOPE (&z) = PRIMAL_SCOPE;
 513      REF_HANDLE (&z) = x;
 514      ABEND (((size_t) ADDRESS (&z)) % A68G_ALIGNMENT != 0, ERROR_ALIGNMENT, __func__);
 515      A68G_HP += size;
 516      // Raise a flag for a preemptive sweep at a convenient moment.
 517      if (heap_available () < (A68G (storage_overhead) + size) || A68G_GC (free_handles) < 100) {
 518        // Emergency break.
 519        A68G_GC (preemptive) = A68G_TRUE;
 520      } else {
 521        // General case.
 522        REAL_T _f_ = (REAL_T) A68G_HP / (REAL_T) heap_available ();
 523        REAL_T _g_ = (REAL_T) (A68G_GC (max_handles) - A68G_GC (free_handles)) / (REAL_T) A68G_GC (max_handles);
 524        if (_f_ > PREEMPTIVE_FRACTION || _g_ > PREEMPTIVE_FRACTION) {
 525           A68G_GC (preemptive) = A68G_TRUE;
 526        }
 527      }
 528      return z;
 529    } else {
 530  // Do not auto-GC!.
 531      diagnostic (A68G_RUNTIME_ERROR, p, ERROR_MEMORY_FULL);
 532      exit_genie (p, A68G_RUNTIME_ERROR);
 533      return nil_ref;
 534    }
 535  }
 536  
 537  //! @brief Give a block of heap for an object of indicated mode.
 538  
 539  A68G_REF heap_generator_2 (NODE_T * p, MOID_T * mode, size_t len, size_t size)
 540  {
 541    if (len == 0 || size == 0) {
 542      return heap_generator (p, mode, 0);
 543    } else if (size_mul_overflow (len, size)) {
 544      diagnostic (A68G_RUNTIME_ERROR, p, ERROR_OBJECT_TOO_LARGE, mode);
 545      exit_genie (p, A68G_RUNTIME_ERROR);
 546    } else {
 547      return heap_generator (p, mode, len * size);
 548    }
 549    return nil_ref;
 550  }
 551  
 552  //! @brief Give a block of heap for an object of indicated mode.
 553  
 554  A68G_REF heap_generator_3 (NODE_T * p, MOID_T * mode, size_t len1, size_t len2, size_t size)
 555  {
 556    if (len1 == 0 || len2 == 0) {
 557      return heap_generator (p, mode, 0);
 558    } else if (size_mul_overflow (len1, len2)) {
 559      diagnostic (A68G_RUNTIME_ERROR, p, ERROR_OBJECT_TOO_LARGE, mode);
 560      exit_genie (p, A68G_RUNTIME_ERROR);
 561    } else {
 562      return heap_generator_2 (p, mode, len1 * len2, size);
 563    }
 564    return nil_ref;
 565  }
 566  
 567  // Following implements the generator.
 568  
 569  //! @brief Whether a moid needs work in allocation.
 570  
 571  BOOL_T mode_needs_allocation (MOID_T * m)
 572  {
 573    if (IS_UNION (m)) {
 574      return A68G_FALSE;
 575    } else {
 576      return HAS_ROWS (m);
 577    }
 578  }
 579  
 580  //! @brief Prepare bounds.
 581  
 582  void genie_compute_bounds (NODE_T * p)
 583  {
 584    for (; p != NO_NODE; FORWARD (p)) {
 585      if (IS (p, BOUNDS_LIST)) {
 586        genie_compute_bounds (SUB (p));
 587      } else if (IS (p, BOUND)) {
 588        genie_compute_bounds (SUB (p));
 589      } else if (IS (p, UNIT)) {
 590        if (NEXT (p) != NO_NODE && (is_one_of (NEXT (p), COLON_SYMBOL, DOTDOT_SYMBOL, STOP))) {
 591          GENIE_UNIT (p);
 592          p = NEXT_NEXT (p);
 593        } else {
 594  // Default lower bound.
 595          PUSH_VALUE (p, 1, A68G_INT);
 596        }
 597        GENIE_UNIT (p);
 598      }
 599    }
 600  }
 601  
 602  //! @brief Prepare bounds for a row.
 603  
 604  void genie_generator_bounds (NODE_T * p)
 605  {
 606    LOW_STACK_ALERT (p);
 607    for (; p != NO_NODE; FORWARD (p)) {
 608      if (IS (p, BOUNDS)) {
 609        genie_compute_bounds (SUB (p));
 610      } else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) {
 611        return;
 612      } else if (IS (p, INDICANT)) {
 613        if (TAX (p) != NO_TAG && HAS_ROWS (MOID (TAX (p)))) {
 614  // Continue from definition at MODE A = ....
 615          genie_generator_bounds (DEF_NODE (p));
 616        }
 617      } else if (IS (p, DECLARER) && !mode_needs_allocation (MOID (p))) {
 618        return;
 619      } else {
 620        genie_generator_bounds (SUB (p));
 621      }
 622    }
 623  }
 624  
 625  //! @brief Allocate a structure.
 626  
 627  void genie_generator_field (NODE_T * p, BYTE_T ** faddr, NODE_T ** decl, ADDR_T * cur_sp, ADDR_T * top_sp)
 628  {
 629    for (; p != NO_NODE; FORWARD (p)) {
 630      if (IS (p, STRUCTURED_FIELD)) {
 631        genie_generator_field (SUB (p), faddr, decl, cur_sp, top_sp);
 632      }
 633      if (IS (p, DECLARER)) {
 634        (*decl) = SUB (p);
 635        FORWARD (p);
 636      }
 637      if (IS (p, FIELD_IDENTIFIER)) {
 638        MOID_T *fmoid = MOID (*decl);
 639        if (HAS_ROWS (fmoid) && ISNT (fmoid, UNION_SYMBOL)) {
 640          ADDR_T pop_sp = *cur_sp;
 641          genie_generator_stowed (*decl, *faddr, NO_REF, cur_sp);
 642          *top_sp = *cur_sp;
 643          *cur_sp = pop_sp;
 644        }
 645        (*faddr) += SIZE (fmoid);
 646      }
 647    }
 648  }
 649  
 650  //! @brief Allocate a structure.
 651  
 652  void genie_generator_struct (NODE_T * p, BYTE_T ** faddr, ADDR_T * cur_sp)
 653  {
 654    for (; p != NO_NODE; FORWARD (p)) {
 655      if (IS (p, STRUCTURED_FIELD_LIST)) {
 656        genie_generator_struct (SUB (p), faddr, cur_sp);
 657      } else if (IS (p, STRUCTURED_FIELD)) {
 658        NODE_T *decl = NO_NODE;
 659        ADDR_T top_sp = *cur_sp;
 660        genie_generator_field (SUB (p), faddr, &decl, cur_sp, &top_sp);
 661        *cur_sp = top_sp;
 662      }
 663    }
 664  }
 665  
 666  //! @brief Allocate a stowed object.
 667  
 668  void genie_generator_stowed (NODE_T * p, BYTE_T * addr, NODE_T ** decl, ADDR_T * cur_sp)
 669  {
 670    if (p == NO_NODE) {
 671      return;
 672    } else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) {
 673  // The standard prelude definition is hard coded here.
 674      *((A68G_REF *) addr) = empty_string (p);
 675      return;
 676    } else if (IS (p, INDICANT) && TAX (p) != NO_TAG) {
 677  // Continue from definition at MODE A = ..
 678      genie_generator_stowed (DEF_NODE (p), addr, decl, cur_sp);
 679      return;
 680    } else if (IS (p, DECLARER) && mode_needs_allocation (MOID (p))) {
 681      genie_generator_stowed (SUB (p), addr, decl, cur_sp);
 682      return;
 683    } else if (IS_STRUCT (p)) {
 684      BYTE_T *faddr = addr;
 685      genie_generator_struct (SUB_NEXT (p), &faddr, cur_sp);
 686      return;
 687    } else if (IS_FLEX (p)) {
 688      genie_generator_stowed (NEXT (p), addr, decl, cur_sp);
 689      return;
 690    } else if (IS (p, BOUNDS)) {
 691      A68G_REF desc;
 692      MOID_T *rmod = MOID (p), *smod = MOID (NEXT (p));
 693      BYTE_T *bounds = STACK_ADDRESS (*cur_sp);
 694      int dim = DIM (DEFLEX (rmod)), esiz = SIZE (smod), rsiz = 1;
 695      BOOL_T alloc_sub = A68G_FALSE, alloc_str = A68G_FALSE;
 696      NODE_T *in = SUB_NEXT (p);
 697      if (IS (in, INDICANT) && IS_LITERALLY (in, "STRING")) {
 698        alloc_str = A68G_TRUE;
 699        alloc_sub = A68G_FALSE;
 700      } else {
 701        alloc_sub = mode_needs_allocation (smod);
 702        alloc_str = A68G_FALSE;
 703      }
 704      desc = heap_generator (p, rmod, DESCRIPTOR_SIZE (dim));
 705      A68G_ARRAY *arr; A68G_TUPLE *tup;
 706      GET_DESCRIPTOR (arr, tup, &desc);
 707      for (int k = 0; k < dim; k++) {
 708        CHECK_INIT (p, INITIALISED ((A68G_INT *) bounds), M_INT);
 709        LWB (&tup[k]) = VALUE ((A68G_INT *) bounds);
 710        bounds += SIZE (M_INT);
 711        CHECK_INIT (p, INITIALISED ((A68G_INT *) bounds), M_INT);
 712        UPB (&tup[k]) = VALUE ((A68G_INT *) bounds);
 713        bounds += SIZE (M_INT);
 714        SPAN (&tup[k]) = rsiz;
 715        SHIFT (&tup[k]) = LWB (&tup[k]) * SPAN (&tup[k]);
 716        rsiz *= ROW_SIZE (&tup[k]);
 717      }
 718      DIM (arr) = dim;
 719      MOID (arr) = smod;
 720      ELEM_SIZE (arr) = esiz;
 721      SLICE_OFFSET (arr) = 0;
 722      FIELD_OFFSET (arr) = 0;
 723      (*cur_sp) += (dim * 2 * SIZE (M_INT));
 724  // Generate a new row. Note that STRING is handled explicitly since
 725  // it has implicit bounds 
 726      if (rsiz == 0) {
 727  // Generate a ghost element.
 728        ADDR_T top_sp = *cur_sp;
 729        ARRAY (arr) = heap_generator (p, rmod, esiz);
 730        BYTE_T *elem = ADDRESS (&(ARRAY (arr)));
 731        if (alloc_sub) {
 732          genie_generator_stowed (NEXT (p), &(elem[0]), NO_REF, cur_sp);
 733          top_sp = *cur_sp;
 734        } else if (alloc_str) {
 735          *(A68G_REF *) elem = empty_string (p);
 736        }
 737        (*cur_sp) = top_sp;
 738      } else {
 739        ADDR_T pop_sp = *cur_sp, top_sp = *cur_sp;
 740        ARRAY (arr) = heap_generator_2 (p, rmod, rsiz, esiz);
 741        BYTE_T *elem = ADDRESS (&(ARRAY (arr)));
 742        for (int k = 0; k < rsiz; k++) {
 743          if (alloc_sub) {
 744            (*cur_sp) = pop_sp;
 745            genie_generator_stowed (NEXT (p), &(elem[k * esiz]), NO_REF, cur_sp);
 746            top_sp = *cur_sp;
 747          } else if (alloc_str) {
 748            *(A68G_REF *) (&(elem[k * esiz])) = empty_string (p);
 749          }
 750        }
 751        (*cur_sp) = top_sp;
 752      }
 753      *(A68G_REF *) addr = desc;
 754      return;
 755    }
 756  }
 757  
 758  //! @brief Generate space and push a REF.
 759  
 760  void genie_generator_internal (NODE_T * p, MOID_T * ref_mode, TAG_T * tag, LEAP_T leap, ADDR_T sp)
 761  {
 762  // Set up a REF MODE object, either in the stack or in the heap.
 763    MOID_T *mode = SUB (ref_mode);
 764    A68G_REF name = nil_ref;
 765    if (leap == LOC_SYMBOL) {
 766      STATUS (&name) = (STATUS_MASK_T) (INIT_MASK | IN_FRAME_MASK);
 767      REF_HANDLE (&name) = (A68G_HANDLE *) & nil_handle;
 768      OFFSET (&name) = A68G_FP + FRAME_INFO_SIZE + OFFSET (tag);
 769      REF_SCOPE (&name) = A68G_FP;
 770    } else if (leap == -LOC_SYMBOL && NON_LOCAL (p) != NO_TABLE) {
 771      name = heap_generator (p, mode, SIZE (mode));
 772      ADDR_T lev;
 773      FOLLOW_SL (lev, LEVEL (NON_LOCAL (p)));
 774      REF_SCOPE (&name) = lev;
 775    } else if (leap == -LOC_SYMBOL) {
 776      name = heap_generator (p, mode, SIZE (mode));
 777      REF_SCOPE (&name) = A68G_FP;
 778    } else if (leap == HEAP_SYMBOL || leap == -HEAP_SYMBOL) {
 779      name = heap_generator (p, mode, SIZE (mode));
 780      REF_SCOPE (&name) = PRIMAL_SCOPE;
 781    } else if (leap == NEW_SYMBOL || leap == -NEW_SYMBOL) {
 782      name = heap_generator (p, mode, SIZE (mode));
 783      REF_SCOPE (&name) = PRIMAL_SCOPE;
 784    } else {
 785      ABEND (A68G_TRUE, ERROR_INTERNAL_CONSISTENCY, __func__);
 786    }
 787    if (HAS_ROWS (mode)) {
 788      ADDR_T cur_sp = sp;
 789      genie_generator_stowed (p, ADDRESS (&name), NO_REF, &cur_sp);
 790    }
 791    PUSH_REF (p, name);
 792  }
 793  
 794  //! @brief Push a name refering to allocated space.
 795  
 796  PROP_T genie_generator (NODE_T * p)
 797  {
 798    ADDR_T pop_sp = A68G_SP;
 799    if (NEXT_SUB (p) != NO_NODE) {
 800      genie_generator_bounds (NEXT_SUB (p));
 801    }
 802    genie_generator_internal (NEXT_SUB (p), MOID (p), TAX (p), -ATTRIBUTE (SUB (p)), pop_sp);
 803    A68G_REF z;
 804    POP_REF (p, &z);
 805    A68G_SP = pop_sp;
 806    PUSH_REF (p, z);
 807    PROP_T self;
 808    UNIT (&self) = genie_generator;
 809    SOURCE (&self) = p;
 810    return self;
 811  }
 812  
 813  // Control of C heap
 814  
 815  //! @brief Discard_heap.
 816  
 817  void discard_heap (void)
 818  {
 819    a68g_free (A68G_HEAP);
 820    A68G (fixed_heap_pointer) = 0;
 821    A68G (temp_heap_pointer) = 0;
 822  }
     


© 2002-2025 J.M. van der Veer (jmvdveer@xs4all.nl)