rts-heap.c

     
   1  //! @file rts-heap.c
   2  //! @author J. Marcel van der Veer
   3  
   4  //! @section Copyright
   5  //!
   6  //! This file is part of Algol68G - an Algol 68 compiler-interpreter.
   7  //! Copyright 2001-2024 J. Marcel van der Veer [algol68g@xs4all.nl].
   8  
   9  //! @section License
  10  //!
  11  //! This program is free software; you can redistribute it and/or modify it 
  12  //! under the terms of the GNU General Public License as published by the 
  13  //! Free Software Foundation; either version 3 of the License, or 
  14  //! (at your option) any later version.
  15  //!
  16  //! This program is distributed in the hope that it will be useful, but 
  17  //! WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 
  18  //! or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 
  19  //! more details. You should have received a copy of the GNU General Public 
  20  //! License along with this program. If not, see [http://www.gnu.org/licenses/].
  21  
  22  //! @section Synopsis
  23  //!
  24  //! Generator and garbage collector routines.
  25  
  26  // The generator allocates space in stack or heap and initialises dynamically sized objects.
  27  // 
  28  // A mark-and-gc garbage collector defragments the heap. When called, it walks
  29  // the stack frames and marks the heap space that is still active. This marking
  30  // process is called "colouring" here since we "pour paint" into the heap.
  31  // The active blocks are then joined, the non-active blocks are forgotten.
  32  // 
  33  // When colouring the heap, "cookies" are placed in objects as to find circular
  34  // references.
  35  // 
  36  // Algol68G introduces several anonymous tags in the symbol tables that save
  37  // temporary REF or ROW results, so that they do not get prematurely swept.
  38  // 
  39  // The genie is not smart enough to handle every heap clog, e.g. when copying
  40  // STOWED objects. This seems not very elegant, but garbage collectors in general
  41  // cannot solve all core management problems. To avoid many of the "unforeseen"
  42  // heap clogs, we try to keep heap occupation low by garbage collecting 
  43  // occasionally, before it fills up completely. If this automatic mechanism does
  44  // not help, one can always invoke the garbage collector by calling "gc heap"
  45  // from Algol 68 source text.
  46  // 
  47  // Mark-and-collect is simple but since it walks recursive structures, it could
  48  // exhaust the C-stack (segment violation). A rough check is in place.
  49  // 
  50  // For dynamically sized objects, first bounds are evaluated (right first, then down).
  51  // The object is generated keeping track of the bound-count.
  52  // 
  53  //      ...
  54  //      [#1]
  55  //      STRUCT
  56  //      (
  57  //      [#2]
  58  //      STRUCT
  59  //      (
  60  //      [#3] A a, b, ...
  61  //      )
  62  //      ,                       Advance bound-count here, max is #3
  63  //      [#4] B a, b, ...
  64  //      )
  65  //      ,                       Advance bound-count here, max is #4
  66  //      [#5] C a, b, ...
  67  //      ...
  68  // 
  69  // Bound-count is maximised when generator_stowed is entered recursively. 
  70  // Bound-count is advanced when completing a STRUCTURED_FIELD.
  71  // Note that A68G will not extend stack frames. Thus only 'static' LOC generators
  72  // are in the stack, and 'dynamic' LOC generators go into the heap. These local 
  73  // REFs in the heap get local scope however, and A68G's approach differs from the 
  74  // CDC ALGOL 68 approach that put all generators in the heap.
  75  // Note that part of memory is called 'COMMON'. This is meant for future extension
  76  // where a68g would need to point to external objects. The adressing scheme is that
  77  // of a HEAP pointer - handle pointer + offset.
  78  
  79  #include "a68g.h"
  80  #include "a68g-genie.h"
  81  #include "a68g-frames.h"
  82  #include "a68g-prelude.h"
  83  #include "a68g-parser.h"
  84  
  85  #define DEF_NODE(p) (NEXT_NEXT (NODE (TAX (p))))
  86  
  87  //! @brief PROC VOID gc heap
  88  
  89  void genie_gc_heap (NODE_T * p)
  90  {
  91    gc_heap (p, A68_FP);
  92  }
  93  
  94  //! @brief PROC VOID preemptive gc heap
  95  
  96  void genie_preemptive_gc_heap (NODE_T * p)
  97  {
  98    if (A68_GC (preemptive)) {
  99      gc_heap (p, A68_FP);
 100    }
 101  }
 102  
 103  //! @brief INT blocks
 104  
 105  void genie_block (NODE_T * p)
 106  {
 107    PUSH_VALUE (p, 0, A68_INT);
 108  }
 109  
 110  //! @brief INT garbage collections
 111  
 112  void genie_garbage_collections (NODE_T * p)
 113  {
 114    PUSH_VALUE (p, A68_GC (sweeps), A68_INT);
 115  }
 116  
 117  //! @brief INT garbage refused
 118  
 119  void genie_garbage_refused (NODE_T * p)
 120  {
 121    PUSH_VALUE (p, A68_GC (refused), A68_INT);
 122  }
 123  
 124  //! @brief LONG INT garbage freed
 125  
 126  void genie_garbage_freed (NODE_T * p)
 127  {
 128    PUSH_VALUE (p, A68_GC (total), A68_INT);
 129  }
 130  
 131  //! @brief REAL garbage seconds
 132  
 133  void genie_garbage_seconds (NODE_T * p)
 134  {
 135  // Note that this timing is a rough cut.
 136    PUSH_VALUE (p, A68_GC (seconds), A68_REAL);
 137  }
 138  
 139  //! @brief Size available for an object in the heap.
 140  
 141  unt heap_available (void)
 142  {
 143    return A68 (heap_size) - A68_HP;
 144  }
 145  
 146  //! @brief Initialise heap management.
 147  
 148  void genie_init_heap (NODE_T * p)
 149  {
 150    (void) p;
 151    if (A68_HEAP == NO_BYTE) {
 152      diagnostic (A68_RUNTIME_ERROR, TOP_NODE (&A68_JOB), ERROR_OUT_OF_CORE);
 153      exit_genie (TOP_NODE (&A68_JOB), A68_RUNTIME_ERROR);
 154    }
 155    if (A68_HANDLES == NO_BYTE) {
 156      diagnostic (A68_RUNTIME_ERROR, TOP_NODE (&A68_JOB), ERROR_OUT_OF_CORE);
 157      exit_genie (TOP_NODE (&A68_JOB), A68_RUNTIME_ERROR);
 158    }
 159    A68_GC (seconds) = 0;
 160    A68_GC (total) = 0;
 161    A68_GC (sweeps) = 0;
 162    A68_GC (refused) = 0;
 163    A68_GC (preemptive) = A68_FALSE;
 164    ABEND (A68 (fixed_heap_pointer) >= (A68 (heap_size) - MIN_MEM_SIZE), ERROR_OUT_OF_CORE, __func__);
 165    A68_HP = A68 (fixed_heap_pointer);
 166    A68 (heap_is_fluid) = A68_FALSE;
 167  // Assign handle space.
 168    A68_HANDLE *z = (A68_HANDLE *) A68_HANDLES;
 169    A68_GC (available_handles) = z;
 170    A68_GC (busy_handles) = NO_HANDLE;
 171    int N = (unt) A68 (handle_pool_size) / SIZE_ALIGNED (A68_HANDLE);
 172    A68_GC (free_handles) = N;
 173    A68_GC (max_handles) = N;
 174    for (int k = 0; k < N; k++) {
 175      STATUS (&(z[k])) = NULL_MASK;
 176      POINTER (&(z[k])) = NO_BYTE;
 177      SIZE (&(z[k])) = 0;
 178      NEXT (&z[k]) = (k == N - 1 ? NO_HANDLE : &z[k + 1]);
 179      PREVIOUS (&z[k]) = (k == 0 ? NO_HANDLE : &z[k - 1]);
 180    }
 181  }
 182  
 183  //! @brief Whether mode must be coloured.
 184  
 185  BOOL_T moid_needs_colouring (MOID_T * m)
 186  {
 187    if (IS_REF (m)) {
 188      return A68_TRUE;
 189    } else if (IS (m, PROC_SYMBOL)) {
 190      return A68_TRUE;
 191    } else if (IS_FLEX (m) || IS_ROW (m)) {
 192      return A68_TRUE;
 193    } else if (IS_STRUCT (m) || IS_UNION (m)) {
 194      for (PACK_T *p = PACK (m); p != NO_PACK; FORWARD (p)) {
 195        if (moid_needs_colouring (MOID (p))) {
 196          return A68_TRUE;
 197        }
 198      }
 199      return A68_FALSE;
 200    } else {
 201      return A68_FALSE;
 202    }
 203  }
 204  
 205  //! @brief Colour all elements of a row.
 206  
 207  void colour_row_elements (A68_REF * z, MOID_T * m)
 208  {
 209    A68_ARRAY *arr; A68_TUPLE *tup;
 210    GET_DESCRIPTOR (arr, tup, z);
 211    if (get_row_size (tup, DIM (arr)) == 0) {
 212  // Empty rows have a ghost elements.
 213      BYTE_T *elem = ADDRESS (&ARRAY (arr));
 214      colour_object (&elem[0], SUB (m));
 215    } else {
 216  // The multi-dimensional garbage collector.
 217      BYTE_T *elem = ADDRESS (&ARRAY (arr));
 218      BOOL_T done = A68_FALSE;
 219      initialise_internal_index (tup, DIM (arr));
 220      while (!done) {
 221        ADDR_T index = calculate_internal_index (tup, DIM (arr));
 222        ADDR_T addr = ROW_ELEMENT (arr, index);
 223        colour_object (&elem[addr], SUB (m));
 224        done = increment_internal_index (tup, DIM (arr));
 225      }
 226    }
 227  }
 228  
 229  //! @brief Colour an (active) object.
 230  
 231  void colour_object (BYTE_T * item, MOID_T * m)
 232  {
 233    if (item == NO_BYTE || m == NO_MOID) {
 234      return;
 235    }
 236    if (!moid_needs_colouring (m)) {
 237      return;
 238    }
 239  // Deeply recursive objects might exhaust the stack.
 240    LOW_STACK_ALERT (NO_NODE);
 241    if (IS_REF (m)) {
 242  // REF AMODE colour pointer and object to which it refers.
 243      A68_REF *z = (A68_REF *) item;
 244      if (INITIALISED (z) && IS_IN_HEAP (z)) {
 245        if (STATUS_TEST (REF_HANDLE (z), COOKIE_MASK)) {
 246          return;
 247        }
 248        STATUS_SET (REF_HANDLE (z), (COOKIE_MASK | COLOUR_MASK));
 249        if (!IS_NIL (*z)) {
 250          colour_object (ADDRESS (z), SUB (m));
 251        }
 252  //    STATUS_CLEAR (REF_HANDLE (z), COOKIE_MASK);.
 253      }
 254    } else if (IS_FLEXETY_ROW (m)) {
 255  // Claim the descriptor and the row itself.
 256      A68_REF *z = (A68_REF *) item;
 257      if (INITIALISED (z) && IS_IN_HEAP (z)) {
 258        if (STATUS_TEST (REF_HANDLE (z), COOKIE_MASK)) {
 259          return;
 260        }
 261  // An array is ALWAYS in the heap.
 262        STATUS_SET (REF_HANDLE (z), (COOKIE_MASK | COLOUR_MASK));
 263        A68_ARRAY *arr; A68_TUPLE *tup;
 264        GET_DESCRIPTOR (arr, tup, z);
 265        if (REF_HANDLE (&(ARRAY (arr))) != NO_HANDLE) {
 266  // Assume its initialisation.
 267          MOID_T *n = DEFLEX (m);
 268          STATUS_SET (REF_HANDLE (&(ARRAY (arr))), COLOUR_MASK);
 269          if (moid_needs_colouring (SUB (n))) {
 270            colour_row_elements (z, n);
 271          }
 272        }
 273  //    STATUS_CLEAR (REF_HANDLE (z), COOKIE_MASK);.
 274        (void) tup;
 275      }
 276    } else if (IS_STRUCT (m)) {
 277  // STRUCTures - colour fields.
 278      for (PACK_T *p = PACK (m); p != NO_PACK; FORWARD (p)) {
 279        colour_object (&item[OFFSET (p)], MOID (p));
 280      }
 281    } else if (IS_UNION (m)) {
 282  // UNIONs - a united object may contain a value that needs colouring.
 283      A68_UNION *z = (A68_UNION *) item;
 284      if (INITIALISED (z)) {
 285        MOID_T *united_moid = (MOID_T *) VALUE (z);
 286        colour_object (&item[A68_UNION_SIZE], united_moid);
 287      }
 288    } else if (IS (m, PROC_SYMBOL)) {
 289  // PROCs - save a locale and the objects it points to.
 290      A68_PROCEDURE *z = (A68_PROCEDURE *) item;
 291      if (INITIALISED (z) && LOCALE (z) != NO_HANDLE && !(STATUS_TEST (LOCALE (z), COOKIE_MASK))) {
 292        BYTE_T *u = POINTER (LOCALE (z));
 293        STATUS_SET (LOCALE (z), (COOKIE_MASK | COLOUR_MASK));
 294        for (PACK_T *s = PACK (MOID (z)); s != NO_PACK; FORWARD (s)) {
 295          if (VALUE ((A68_BOOL *) & u[0]) == A68_TRUE) {
 296            colour_object (&u[SIZE (M_BOOL)], MOID (s));
 297          }
 298          u = &(u[SIZE (M_BOOL) + SIZE (MOID (s))]);
 299        }
 300  //    STATUS_CLEAR (LOCALE (z), COOKIE_MASK);.
 301      }
 302    } else if (m == M_SOUND) {
 303  // Claim the data of a SOUND object, that is in the heap.
 304      A68_SOUND *w = (A68_SOUND *) item;
 305      if (INITIALISED (w)) {
 306        STATUS_SET (REF_HANDLE (&(DATA (w))), (COOKIE_MASK | COLOUR_MASK));
 307      }
 308    }
 309  }
 310  
 311  //! @brief Colour active objects in the heap.
 312  
 313  void colour_heap (ADDR_T fp)
 314  {
 315    while (fp != 0) {
 316      NODE_T *p = FRAME_TREE (fp);
 317      TABLE_T *q = TABLE (p);
 318      if (q != NO_TABLE) {
 319        for (TAG_T *i = IDENTIFIERS (q); i != NO_TAG; FORWARD (i)) {
 320          colour_object (FRAME_LOCAL (fp, OFFSET (i)), MOID (i));
 321        }
 322        for (TAG_T *i = ANONYMOUS (q); i != NO_TAG; FORWARD (i)) {
 323          if (PRIO (i) == GENERATOR) {
 324            colour_object (FRAME_LOCAL (fp, OFFSET (i)), MOID (i));
 325          }
 326        }
 327      }
 328      fp = FRAME_DYNAMIC_LINK (fp);
 329    }
 330  }
 331  
 332  //! @brief Join all active blocks in the heap.
 333  
 334  void defragment_heap (void)
 335  {
 336    A68_HANDLE *z;
 337  // Free handles.
 338    z = A68_GC (busy_handles);
 339    while (z != NO_HANDLE) {
 340      if (!(STATUS_TEST (z, COLOUR_MASK)) && !(STATUS_TEST (z, BLOCK_GC_MASK))) {
 341        A68_HANDLE *y = NEXT (z);
 342        if (PREVIOUS (z) == NO_HANDLE) {
 343          A68_GC (busy_handles) = NEXT (z);
 344        } else {
 345          NEXT (PREVIOUS (z)) = NEXT (z);
 346        }
 347        if (NEXT (z) != NO_HANDLE) {
 348          PREVIOUS (NEXT (z)) = PREVIOUS (z);
 349        }
 350        NEXT (z) = A68_GC (available_handles);
 351        PREVIOUS (z) = NO_HANDLE;
 352        if (NEXT (z) != NO_HANDLE) {
 353          PREVIOUS (NEXT (z)) = z;
 354        }
 355        A68_GC (available_handles) = z;
 356        STATUS_CLEAR (z, ALLOCATED_MASK);
 357        A68_GC (freed) += SIZE (z);
 358        A68_GC (free_handles)++;
 359        z = y;
 360      } else {
 361        FORWARD (z);
 362      }
 363    }
 364  // There can be no uncoloured allocated handle.
 365    for (z = A68_GC (busy_handles); z != NO_HANDLE; FORWARD (z)) {
 366      ABEND (!(STATUS_TEST (z, COLOUR_MASK)) && !(STATUS_TEST (z, BLOCK_GC_MASK)), ERROR_INTERNAL_CONSISTENCY, __func__);
 367    }
 368  // Defragment the heap.
 369    A68_HP = A68 (fixed_heap_pointer);
 370    for (z = A68_GC (busy_handles); z != NO_HANDLE && NEXT (z) != NO_HANDLE; FORWARD (z)) {
 371      ;
 372    }
 373    for (; z != NO_HANDLE; BACKWARD (z)) {
 374      BYTE_T *dst = HEAP_ADDRESS (A68_HP);
 375      if (dst != POINTER (z)) {
 376        MOVE (dst, POINTER (z), (unt) SIZE (z));
 377      }
 378      STATUS_CLEAR (z, (COLOUR_MASK | COOKIE_MASK));
 379      POINTER (z) = dst;
 380      A68_HP += (SIZE (z));
 381      ABEND (A68_HP % A68_ALIGNMENT != 0, ERROR_ALIGNMENT, __func__);
 382    }
 383  }
 384  
 385  //! @brief Clean up garbage and defragment the heap.
 386  
 387  void gc_heap (NODE_T * p, ADDR_T fp)
 388  {
 389  // Must start with fp = current frame_pointer.
 390    A68_HANDLE *z;
 391    REAL_T t0, t1;
 392  #if defined (BUILD_PARALLEL_CLAUSE)
 393    if (OTHER_THREAD (FRAME_THREAD_ID (A68_FP), A68_PAR (main_thread_id))) {
 394      A68_GC (refused)++;
 395      return;
 396    }
 397  #endif
 398    if (STATUS_TEST (p, BLOCK_GC_MASK) || A68_GC (sema) > 0) {
 399      A68_GC (refused)++;
 400      return;
 401    }
 402  // Take no risk when intermediate results are on the stack.
 403    if (A68_SP != A68 (stack_start)) {
 404      A68_GC (refused)++;
 405      return;
 406    }
 407  // Give it a whirl then.
 408    t0 = seconds ();
 409  // Unfree handles are subject to inspection.
 410  // Release them all before colouring.
 411    for (z = A68_GC (busy_handles); z != NO_HANDLE; FORWARD (z)) {
 412      STATUS_CLEAR (z, (COLOUR_MASK | COOKIE_MASK));
 413    }
 414  // Pour paint into the heap to reveal active objects.
 415    colour_heap (fp);
 416  // Start freeing and compacting.
 417    A68_GC (freed) = 0;
 418    defragment_heap ();
 419  // Stats and logging.
 420    A68_GC (total) += A68_GC (freed);
 421    A68_GC (sweeps)++;
 422    A68_GC (preemptive) = A68_FALSE;
 423    t1 = seconds ();
 424  // C optimiser can make last digit differ, so next condition is 
 425  // needed to determine a positive time difference
 426    if ((t1 - t0) > ((REAL_T) A68 (clock_res) / 2.0)) {
 427      A68_GC (seconds) += (t1 - t0);
 428    } else {
 429      A68_GC (seconds) += ((REAL_T) A68 (clock_res) / 2.0);
 430    }
 431  // Call the event handler.
 432    genie_call_event_routine (p, M_PROC_VOID, &A68 (on_gc_event), A68_SP, A68_FP);
 433  }
 434  
 435  //! @brief Yield a handle that will point to a block in the heap.
 436  
 437  A68_HANDLE *give_handle (NODE_T * p, MOID_T * a68m)
 438  {
 439    if (A68_GC (available_handles) != NO_HANDLE) {
 440      A68_HANDLE *x = A68_GC (available_handles);
 441      A68_GC (available_handles) = NEXT (x);
 442      if (A68_GC (available_handles) != NO_HANDLE) {
 443        PREVIOUS (A68_GC (available_handles)) = NO_HANDLE;
 444      }
 445      STATUS (x) = ALLOCATED_MASK;
 446      POINTER (x) = NO_BYTE;
 447      SIZE (x) = 0;
 448      MOID (x) = a68m;
 449      NEXT (x) = A68_GC (busy_handles);
 450      PREVIOUS (x) = NO_HANDLE;
 451      if (NEXT (x) != NO_HANDLE) {
 452        PREVIOUS (NEXT (x)) = x;
 453      }
 454      A68_GC (busy_handles) = x;
 455      A68_GC (free_handles)--;
 456      return x;
 457    } else {
 458  // Do not auto-GC!.
 459      diagnostic (A68_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
 460      exit_genie (p, A68_RUNTIME_ERROR);
 461    }
 462    return NO_HANDLE;
 463  }
 464  
 465  //! @brief Give a block of heap for an object of indicated mode.
 466  
 467  A68_REF heap_generator (NODE_T * p, MOID_T * mode, int size)
 468  {
 469    ABEND (size < 0, ERROR_INVALID_SIZE, __func__);
 470    size = A68_ALIGN (size);
 471    if (heap_available () >= size) {
 472      A68_REF z;
 473      STATUS (&z) = (STATUS_MASK_T) (INIT_MASK | IN_HEAP_MASK);
 474      OFFSET (&z) = 0;
 475      A68_HANDLE *x = give_handle (p, mode);
 476      SIZE (x) = size;
 477      POINTER (x) = HEAP_ADDRESS (A68_HP);
 478      FILL (POINTER (x), 0, size);
 479      REF_SCOPE (&z) = PRIMAL_SCOPE;
 480      REF_HANDLE (&z) = x;
 481      ABEND (((long) ADDRESS (&z)) % A68_ALIGNMENT != 0, ERROR_ALIGNMENT, __func__);
 482      A68_HP += size;
 483      REAL_T _f_ = (REAL_T) A68_HP / (REAL_T) A68 (heap_size);
 484      REAL_T _g_ = (REAL_T) (A68_GC (max_handles) - A68_GC (free_handles)) / (REAL_T) A68_GC (max_handles);
 485      if (_f_ > DEFAULT_PREEMPTIVE || _g_ > DEFAULT_PREEMPTIVE) {
 486        A68_GC (preemptive) = A68_TRUE;
 487      }
 488      return z;
 489    } else {
 490  // Do not auto-GC!.
 491      diagnostic (A68_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
 492      exit_genie (p, A68_RUNTIME_ERROR);
 493      return nil_ref;
 494    }
 495  }
 496  
 497  //! @brief Give a block of heap for an object of indicated mode.
 498  
 499  A68_REF heap_generator_2 (NODE_T * p, MOID_T * mode, int len, int size)
 500  {
 501    if (len == 0 || size == 0) {
 502      return heap_generator (p, mode, 0);
 503    } else if (ABS (size) < (2 * GIGABYTE) / ABS (len)) {
 504      return heap_generator (p, mode, len * size);
 505    } else {
 506      diagnostic (A68_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
 507      exit_genie (p, A68_RUNTIME_ERROR);
 508    }
 509    return nil_ref;
 510  }
 511  
 512  //! @brief Give a block of heap for an object of indicated mode.
 513  
 514  A68_REF heap_generator_3 (NODE_T * p, MOID_T * mode, int len1, int len2, int size)
 515  {
 516    if (len1 == 0 || len2 == 0) {
 517      return heap_generator (p, mode, 0);
 518    } else if (ABS (len2) < (2 * GIGABYTE) / ABS (len1)) {
 519      return heap_generator_2 (p, mode, len1 * len2, size);
 520    } else {
 521      diagnostic (A68_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
 522      exit_genie (p, A68_RUNTIME_ERROR);
 523    }
 524    return nil_ref;
 525  }
 526  
 527  // Following implements the generator.
 528  
 529  //! @brief Whether a moid needs work in allocation.
 530  
 531  BOOL_T mode_needs_allocation (MOID_T * m)
 532  {
 533    if (IS_UNION (m)) {
 534      return A68_FALSE;
 535    } else {
 536      return HAS_ROWS (m);
 537    }
 538  }
 539  
 540  //! @brief Prepare bounds.
 541  
 542  void genie_compute_bounds (NODE_T * p)
 543  {
 544    for (; p != NO_NODE; FORWARD (p)) {
 545      if (IS (p, BOUNDS_LIST)) {
 546        genie_compute_bounds (SUB (p));
 547      } else if (IS (p, BOUND)) {
 548        genie_compute_bounds (SUB (p));
 549      } else if (IS (p, UNIT)) {
 550        if (NEXT (p) != NO_NODE && (is_one_of (NEXT (p), COLON_SYMBOL, DOTDOT_SYMBOL, STOP))) {
 551          GENIE_UNIT (p);
 552          p = NEXT_NEXT (p);
 553        } else {
 554  // Default lower bound.
 555          PUSH_VALUE (p, 1, A68_INT);
 556        }
 557        GENIE_UNIT (p);
 558      }
 559    }
 560  }
 561  
 562  //! @brief Prepare bounds for a row.
 563  
 564  void genie_generator_bounds (NODE_T * p)
 565  {
 566    LOW_STACK_ALERT (p);
 567    for (; p != NO_NODE; FORWARD (p)) {
 568      if (IS (p, BOUNDS)) {
 569        genie_compute_bounds (SUB (p));
 570      } else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) {
 571        return;
 572      } else if (IS (p, INDICANT)) {
 573        if (TAX (p) != NO_TAG && HAS_ROWS (MOID (TAX (p)))) {
 574  // Continue from definition at MODE A = ....
 575          genie_generator_bounds (DEF_NODE (p));
 576        }
 577      } else if (IS (p, DECLARER) && !mode_needs_allocation (MOID (p))) {
 578        return;
 579      } else {
 580        genie_generator_bounds (SUB (p));
 581      }
 582    }
 583  }
 584  
 585  //! @brief Allocate a structure.
 586  
 587  void genie_generator_field (NODE_T * p, BYTE_T ** faddr, NODE_T ** decl, ADDR_T * cur_sp, ADDR_T * top_sp)
 588  {
 589    for (; p != NO_NODE; FORWARD (p)) {
 590      if (IS (p, STRUCTURED_FIELD)) {
 591        genie_generator_field (SUB (p), faddr, decl, cur_sp, top_sp);
 592      }
 593      if (IS (p, DECLARER)) {
 594        (*decl) = SUB (p);
 595        FORWARD (p);
 596      }
 597      if (IS (p, FIELD_IDENTIFIER)) {
 598        MOID_T *fmoid = MOID (*decl);
 599        if (HAS_ROWS (fmoid) && ISNT (fmoid, UNION_SYMBOL)) {
 600          ADDR_T pop_sp = *cur_sp;
 601          genie_generator_stowed (*decl, *faddr, NO_VAR, cur_sp);
 602          *top_sp = *cur_sp;
 603          *cur_sp = pop_sp;
 604        }
 605        (*faddr) += SIZE (fmoid);
 606      }
 607    }
 608  }
 609  
 610  //! @brief Allocate a structure.
 611  
 612  void genie_generator_struct (NODE_T * p, BYTE_T ** faddr, ADDR_T * cur_sp)
 613  {
 614    for (; p != NO_NODE; FORWARD (p)) {
 615      if (IS (p, STRUCTURED_FIELD_LIST)) {
 616        genie_generator_struct (SUB (p), faddr, cur_sp);
 617      } else if (IS (p, STRUCTURED_FIELD)) {
 618        NODE_T *decl = NO_NODE;
 619        ADDR_T top_sp = *cur_sp;
 620        genie_generator_field (SUB (p), faddr, &decl, cur_sp, &top_sp);
 621        *cur_sp = top_sp;
 622      }
 623    }
 624  }
 625  
 626  //! @brief Allocate a stowed object.
 627  
 628  void genie_generator_stowed (NODE_T * p, BYTE_T * addr, NODE_T ** decl, ADDR_T * cur_sp)
 629  {
 630    if (p == NO_NODE) {
 631      return;
 632    } else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) {
 633  // The standard prelude definition is hard coded here.
 634      *((A68_REF *) addr) = empty_string (p);
 635      return;
 636    } else if (IS (p, INDICANT) && TAX (p) != NO_TAG) {
 637  // Continue from definition at MODE A = ..
 638      genie_generator_stowed (DEF_NODE (p), addr, decl, cur_sp);
 639      return;
 640    } else if (IS (p, DECLARER) && mode_needs_allocation (MOID (p))) {
 641      genie_generator_stowed (SUB (p), addr, decl, cur_sp);
 642      return;
 643    } else if (IS_STRUCT (p)) {
 644      BYTE_T *faddr = addr;
 645      genie_generator_struct (SUB_NEXT (p), &faddr, cur_sp);
 646      return;
 647    } else if (IS_FLEX (p)) {
 648      genie_generator_stowed (NEXT (p), addr, decl, cur_sp);
 649      return;
 650    } else if (IS (p, BOUNDS)) {
 651      A68_REF desc;
 652      MOID_T *rmod = MOID (p), *smod = MOID (NEXT (p));
 653      BYTE_T *bounds = STACK_ADDRESS (*cur_sp);
 654      int dim = DIM (DEFLEX (rmod)), esiz = SIZE (smod), rsiz = 1;
 655      BOOL_T alloc_sub = A68_FALSE, alloc_str = A68_FALSE;
 656      NODE_T *in = SUB_NEXT (p);
 657      if (IS (in, INDICANT) && IS_LITERALLY (in, "STRING")) {
 658        alloc_str = A68_TRUE;
 659        alloc_sub = A68_FALSE;
 660      } else {
 661        alloc_sub = mode_needs_allocation (smod);
 662        alloc_str = A68_FALSE;
 663      }
 664      desc = heap_generator (p, rmod, DESCRIPTOR_SIZE (dim));
 665      A68_ARRAY *arr; A68_TUPLE *tup;
 666      GET_DESCRIPTOR (arr, tup, &desc);
 667      for (int k = 0; k < dim; k++) {
 668        CHECK_INIT (p, INITIALISED ((A68_INT *) bounds), M_INT);
 669        LWB (&tup[k]) = VALUE ((A68_INT *) bounds);
 670        bounds += SIZE (M_INT);
 671        CHECK_INIT (p, INITIALISED ((A68_INT *) bounds), M_INT);
 672        UPB (&tup[k]) = VALUE ((A68_INT *) bounds);
 673        bounds += SIZE (M_INT);
 674        SPAN (&tup[k]) = rsiz;
 675        SHIFT (&tup[k]) = LWB (&tup[k]) * SPAN (&tup[k]);
 676        rsiz *= ROW_SIZE (&tup[k]);
 677      }
 678      DIM (arr) = dim;
 679      MOID (arr) = smod;
 680      ELEM_SIZE (arr) = esiz;
 681      SLICE_OFFSET (arr) = 0;
 682      FIELD_OFFSET (arr) = 0;
 683      (*cur_sp) += (dim * 2 * SIZE (M_INT));
 684  // Generate a new row. Note that STRING is handled explicitly since
 685  // it has implicit bounds 
 686      if (rsiz == 0) {
 687  // Generate a ghost element.
 688        ADDR_T top_sp = *cur_sp;
 689        ARRAY (arr) = heap_generator (p, rmod, esiz);
 690        BYTE_T *elem = ADDRESS (&(ARRAY (arr)));
 691        if (alloc_sub) {
 692          genie_generator_stowed (NEXT (p), &(elem[0]), NO_VAR, cur_sp);
 693          top_sp = *cur_sp;
 694        } else if (alloc_str) {
 695          *(A68_REF *) elem = empty_string (p);
 696        }
 697        (*cur_sp) = top_sp;
 698      } else {
 699        ADDR_T pop_sp = *cur_sp, top_sp = *cur_sp;
 700        ARRAY (arr) = heap_generator_2 (p, rmod, rsiz, esiz);
 701        BYTE_T *elem = ADDRESS (&(ARRAY (arr)));
 702        for (int k = 0; k < rsiz; k++) {
 703          if (alloc_sub) {
 704            (*cur_sp) = pop_sp;
 705            genie_generator_stowed (NEXT (p), &(elem[k * esiz]), NO_VAR, cur_sp);
 706            top_sp = *cur_sp;
 707          } else if (alloc_str) {
 708            *(A68_REF *) (&(elem[k * esiz])) = empty_string (p);
 709          }
 710        }
 711        (*cur_sp) = top_sp;
 712      }
 713      *(A68_REF *) addr = desc;
 714      return;
 715    }
 716  }
 717  
 718  //! @brief Generate space and push a REF.
 719  
 720  void genie_generator_internal (NODE_T * p, MOID_T * ref_mode, TAG_T * tag, LEAP_T leap, ADDR_T sp)
 721  {
 722  // Set up a REF MODE object, either in the stack or in the heap.
 723    MOID_T *mode = SUB (ref_mode);
 724    A68_REF name = nil_ref;
 725    if (leap == LOC_SYMBOL) {
 726      STATUS (&name) = (STATUS_MASK_T) (INIT_MASK | IN_FRAME_MASK);
 727      REF_HANDLE (&name) = (A68_HANDLE *) & nil_handle;
 728      OFFSET (&name) = A68_FP + FRAME_INFO_SIZE + OFFSET (tag);
 729      REF_SCOPE (&name) = A68_FP;
 730    } else if (leap == -LOC_SYMBOL && NON_LOCAL (p) != NO_TABLE) {
 731      name = heap_generator (p, mode, SIZE (mode));
 732      ADDR_T lev;
 733      FOLLOW_SL (lev, LEVEL (NON_LOCAL (p)));
 734      REF_SCOPE (&name) = lev;
 735    } else if (leap == -LOC_SYMBOL) {
 736      name = heap_generator (p, mode, SIZE (mode));
 737      REF_SCOPE (&name) = A68_FP;
 738    } else if (leap == HEAP_SYMBOL || leap == -HEAP_SYMBOL) {
 739      name = heap_generator (p, mode, SIZE (mode));
 740      REF_SCOPE (&name) = PRIMAL_SCOPE;
 741    } else if (leap == NEW_SYMBOL || leap == -NEW_SYMBOL) {
 742      name = heap_generator (p, mode, SIZE (mode));
 743      REF_SCOPE (&name) = PRIMAL_SCOPE;
 744    } else {
 745      ABEND (A68_TRUE, ERROR_INTERNAL_CONSISTENCY, __func__);
 746    }
 747    if (HAS_ROWS (mode)) {
 748      ADDR_T cur_sp = sp;
 749      genie_generator_stowed (p, ADDRESS (&name), NO_VAR, &cur_sp);
 750    }
 751    PUSH_REF (p, name);
 752  }
 753  
 754  //! @brief Push a name refering to allocated space.
 755  
 756  PROP_T genie_generator (NODE_T * p)
 757  {
 758    ADDR_T pop_sp = A68_SP;
 759    if (NEXT_SUB (p) != NO_NODE) {
 760      genie_generator_bounds (NEXT_SUB (p));
 761    }
 762    genie_generator_internal (NEXT_SUB (p), MOID (p), TAX (p), -ATTRIBUTE (SUB (p)), pop_sp);
 763    A68_REF z;
 764    POP_REF (p, &z);
 765    A68_SP = pop_sp;
 766    PUSH_REF (p, z);
 767    PROP_T self;
 768    UNIT (&self) = genie_generator;
 769    SOURCE (&self) = p;
 770    return self;
 771  }
 772  
 773  // Control of C heap
 774  
 775  //! @brief Discard_heap.
 776  
 777  void discard_heap (void)
 778  {
 779    a68_free (A68_HEAP);
 780    A68 (fixed_heap_pointer) = 0;
 781    A68 (temp_heap_pointer) = 0;
 782  }
     


© 2002-2024 J.M. van der Veer (jmvdveer@xs4all.nl)