rts-heap.c
1 //! @file rts-heap.c
2 //! @author J. Marcel van der Veer
3 //!
4 //! @section Copyright
5 //!
6 //! This file is part of Algol68G - an Algol 68 compiler-interpreter.
7 //! Copyright 2001-2023 J. Marcel van der Veer [algol68g@xs4all.nl].
8 //!
9 //! @section License
10 //!
11 //! This program is free software; you can redistribute it and/or modify it
12 //! under the terms of the GNU General Public License as published by the
13 //! Free Software Foundation; either version 3 of the License, or
14 //! (at your option) any later version.
15 //!
16 //! This program is distributed in the hope that it will be useful, but
17 //! WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 //! or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
19 //! more details. You should have received a copy of the GNU General Public
20 //! License along with this program. If not, see [http://www.gnu.org/licenses/].
21
22 //! @section Synopsis
23 //!
24 //! Generator and garbage collector routines.
25
26 // The generator allocates space in stack or heap and initialises dynamically sized objects.
27 //
28 // A mark-and-gc garbage collector defragments the heap. When called, it walks
29 // the stack frames and marks the heap space that is still active. This marking
30 // process is called "colouring" here since we "pour paint" into the heap.
31 // The active blocks are then joined, the non-active blocks are forgotten.
32 //
33 // When colouring the heap, "cookies" are placed in objects as to find circular
34 // references.
35 //
36 // Algol68G introduces several anonymous tags in the symbol tables that save
37 // temporary REF or ROW results, so that they do not get prematurely swept.
38 //
39 // The genie is not smart enough to handle every heap clog, e.g. when copying
40 // STOWED objects. This seems not very elegant, but garbage collectors in general
41 // cannot solve all core management problems. To avoid many of the "unforeseen"
42 // heap clogs, we try to keep heap occupation low by garbage collecting
43 // occasionally, before it fills up completely. If this automatic mechanism does
44 // not help, one can always invoke the garbage collector by calling "gc heap"
45 // from Algol 68 source text.
46 //
47 // Mark-and-collect is simple but since it walks recursive structures, it could
48 // exhaust the C-stack (segment violation). A rough check is in place.
49 //
50 // For dynamically sized objects, first bounds are evaluated (right first, then down).
51 // The object is generated keeping track of the bound-count.
52 //
53 // ...
54 // [#1]
55 // STRUCT
56 // (
57 // [#2]
58 // STRUCT
59 // (
60 // [#3] A a, b, ...
61 // )
62 // , Advance bound-count here, max is #3
63 // [#4] B a, b, ...
64 // )
65 // , Advance bound-count here, max is #4
66 // [#5] C a, b, ...
67 // ...
68 //
69 // Bound-count is maximised when generator_stowed is entered recursively.
70 // Bound-count is advanced when completing a STRUCTURED_FIELD.
71 //
72 // Note that A68G will not extend stack frames. Thus only 'static' LOC generators
73 // are in the stack, and 'dynamic' LOC generators go into the heap. These local
74 // REFs in the heap get local scope however, and A68G's approach differs from the
75 // CDC ALGOL 68 approach that put all generators in the heap.
76 //
77 // Note that part of memory is called 'COMMON'. This is meant for future extension
78 // where a68g would need to point to external objects. The adressing scheme is that
79 // of a HEAP pointer - handle pointer + offset.
80
81 #include "a68g.h"
82 #include "a68g-genie.h"
83 #include "a68g-frames.h"
84 #include "a68g-prelude.h"
85 #include "a68g-mp.h"
86 #include "a68g-double.h"
87 #include "a68g-parser.h"
88 #include "a68g-transput.h"
89
90 #define DEF_NODE(p) (NEXT_NEXT (NODE (TAX (p))))
91
92 //! @brief PROC VOID gc heap
93
94 void genie_gc_heap (NODE_T * p)
95 {
96 gc_heap (p, A68_FP);
97 }
98
99 //! @brief PROC VOID preemptive gc heap
100
101 void genie_preemptive_gc_heap (NODE_T * p)
102 {
103 if (A68_GC (preemptive)) {
104 gc_heap ((NODE_T *) (p), A68_FP);
105 }
106 }
107
108 //! @brief INT blocks
109
110 void genie_block (NODE_T * p)
111 {
112 PUSH_VALUE (p, 0, A68_INT);
113 }
114
115 //! @brief INT garbage collections
116
117 void genie_garbage_collections (NODE_T * p)
118 {
119 PUSH_VALUE (p, A68_GC (sweeps), A68_INT);
120 }
121
122 //! @brief INT garbage refused
123
124 void genie_garbage_refused (NODE_T * p)
125 {
126 PUSH_VALUE (p, A68_GC (refused), A68_INT);
127 }
128
129 //! @brief LONG INT garbage freed
130
131 void genie_garbage_freed (NODE_T * p)
132 {
133 PUSH_VALUE (p, A68_GC (total), A68_INT);
134 }
135
136 //! @brief REAL garbage seconds
137
138 void genie_garbage_seconds (NODE_T * p)
139 {
140 // Note that this timing is a rough cut.
141 PUSH_VALUE (p, A68_GC (seconds), A68_REAL);
142 }
143
144 //! @brief Size available for an object in the heap.
145
146 unt heap_available (void)
147 {
148 return A68 (heap_size) - A68_HP;
149 }
150
151 //! @brief Initialise heap management.
152
153 void genie_init_heap (NODE_T * p)
154 {
155 (void) p;
156 if (A68_HEAP == NO_BYTE) {
157 diagnostic (A68_RUNTIME_ERROR, TOP_NODE (&A68_JOB), ERROR_OUT_OF_CORE);
158 exit_genie (TOP_NODE (&A68_JOB), A68_RUNTIME_ERROR);
159 }
160 if (A68_HANDLES == NO_BYTE) {
161 diagnostic (A68_RUNTIME_ERROR, TOP_NODE (&A68_JOB), ERROR_OUT_OF_CORE);
162 exit_genie (TOP_NODE (&A68_JOB), A68_RUNTIME_ERROR);
163 }
164 A68_GC (seconds) = 0;
165 A68_GC (total) = 0;
166 A68_GC (sweeps) = 0;
167 A68_GC (refused) = 0;
168 A68_GC (preemptive) = A68_FALSE;
169 ABEND (A68 (fixed_heap_pointer) >= (A68 (heap_size) - MIN_MEM_SIZE), ERROR_OUT_OF_CORE, __func__);
170 A68_HP = A68 (fixed_heap_pointer);
171 A68 (heap_is_fluid) = A68_FALSE;
172 // Assign handle space.
173 A68_HANDLE *z = (A68_HANDLE *) A68_HANDLES;
174 A68_GC (available_handles) = z;
175 A68_GC (busy_handles) = NO_HANDLE;
176 int N = (unt) A68 (handle_pool_size) / SIZE_ALIGNED (A68_HANDLE);
177 A68_GC (free_handles) = N;
178 A68_GC (max_handles) = N;
179 for (int k = 0; k < N; k++) {
180 STATUS (&(z[k])) = NULL_MASK;
181 POINTER (&(z[k])) = NO_BYTE;
182 SIZE (&(z[k])) = 0;
183 NEXT (&z[k]) = (k == N - 1 ? NO_HANDLE : &z[k + 1]);
184 PREVIOUS (&z[k]) = (k == 0 ? NO_HANDLE : &z[k - 1]);
185 }
186 }
187
188 //! @brief Whether mode must be coloured.
189
190 BOOL_T moid_needs_colouring (MOID_T * m)
191 {
192 if (IS_REF (m)) {
193 return A68_TRUE;
194 } else if (IS (m, PROC_SYMBOL)) {
195 return A68_TRUE;
196 } else if (IS_FLEX (m) || IS_ROW (m)) {
197 return A68_TRUE;
198 } else if (IS_STRUCT (m) || IS_UNION (m)) {
199 PACK_T *p = PACK (m);
200 for (; p != NO_PACK; FORWARD (p)) {
201 if (moid_needs_colouring (MOID (p))) {
202 return A68_TRUE;
203 }
204 }
205 return A68_FALSE;
206 } else {
207 return A68_FALSE;
208 }
209 }
210
211 //! @brief Colour all elements of a row.
212
213 void colour_row_elements (A68_REF * z, MOID_T * m)
214 {
215 A68_ARRAY *arr;
216 A68_TUPLE *tup;
217 GET_DESCRIPTOR (arr, tup, z);
218 if (get_row_size (tup, DIM (arr)) == 0) {
219 // Empty rows have a ghost elements.
220 BYTE_T *elem = ADDRESS (&ARRAY (arr));
221 colour_object (&elem[0], SUB (m));
222 } else {
223 // The multi-dimensional garbage collector.
224 BYTE_T *elem = ADDRESS (&ARRAY (arr));
225 BOOL_T done = A68_FALSE;
226 initialise_internal_index (tup, DIM (arr));
227 while (!done) {
228 ADDR_T iindex = calculate_internal_index (tup, DIM (arr));
229 ADDR_T addr = ROW_ELEMENT (arr, iindex);
230 colour_object (&elem[addr], SUB (m));
231 done = increment_internal_index (tup, DIM (arr));
232 }
233 }
234 }
235
236 //! @brief Colour an (active) object.
237
238 void colour_object (BYTE_T * item, MOID_T * m)
239 {
240 if (item == NO_BYTE || m == NO_MOID) {
241 return;
242 }
243 if (!moid_needs_colouring (m)) {
244 return;
245 }
246 // Deeply recursive objects might exhaust the stack.
247 LOW_STACK_ALERT (NO_NODE);
248 if (IS_REF (m)) {
249 // REF AMODE colour pointer and object to which it refers.
250 A68_REF *z = (A68_REF *) item;
251 if (INITIALISED (z) && IS_IN_HEAP (z)) {
252 if (STATUS_TEST (REF_HANDLE (z), COOKIE_MASK)) {
253 return;
254 }
255 STATUS_SET (REF_HANDLE (z), (COOKIE_MASK | COLOUR_MASK));
256 if (!IS_NIL (*z)) {
257 colour_object (ADDRESS (z), SUB (m));
258 }
259 // STATUS_CLEAR (REF_HANDLE (z), COOKIE_MASK);.
260 }
261 } else if (IF_ROW (m)) {
262 // Claim the descriptor and the row itself.
263 A68_REF *z = (A68_REF *) item;
264 if (INITIALISED (z) && IS_IN_HEAP (z)) {
265 A68_ARRAY *arr;
266 A68_TUPLE *tup;
267 if (STATUS_TEST (REF_HANDLE (z), COOKIE_MASK)) {
268 return;
269 }
270 // An array is ALWAYS in the heap.
271 STATUS_SET (REF_HANDLE (z), (COOKIE_MASK | COLOUR_MASK));
272 GET_DESCRIPTOR (arr, tup, z);
273 if (REF_HANDLE (&(ARRAY (arr))) != NO_HANDLE) {
274 // Assume its initialisation.
275 MOID_T *n = DEFLEX (m);
276 STATUS_SET (REF_HANDLE (&(ARRAY (arr))), COLOUR_MASK);
277 if (moid_needs_colouring (SUB (n))) {
278 colour_row_elements (z, n);
279 }
280 }
281 // STATUS_CLEAR (REF_HANDLE (z), COOKIE_MASK);.
282 (void) tup;
283 }
284 } else if (IS_STRUCT (m)) {
285 // STRUCTures - colour fields.
286 PACK_T *p = PACK (m);
287 for (; p != NO_PACK; FORWARD (p)) {
288 colour_object (&item[OFFSET (p)], MOID (p));
289 }
290 } else if (IS_UNION (m)) {
291 // UNIONs - a united object may contain a value that needs colouring.
292 A68_UNION *z = (A68_UNION *) item;
293 if (INITIALISED (z)) {
294 MOID_T *united_moid = (MOID_T *) VALUE (z);
295 colour_object (&item[A68_UNION_SIZE], united_moid);
296 }
297 } else if (IS (m, PROC_SYMBOL)) {
298 // PROCs - save a locale and the objects it points to.
299 A68_PROCEDURE *z = (A68_PROCEDURE *) item;
300 if (INITIALISED (z) && LOCALE (z) != NO_HANDLE && !(STATUS_TEST (LOCALE (z), COOKIE_MASK))) {
301 BYTE_T *u = POINTER (LOCALE (z));
302 PACK_T *s = PACK (MOID (z));
303 STATUS_SET (LOCALE (z), (COOKIE_MASK | COLOUR_MASK));
304 for (; s != NO_PACK; FORWARD (s)) {
305 if (VALUE ((A68_BOOL *) & u[0]) == A68_TRUE) {
306 colour_object (&u[SIZE (M_BOOL)], MOID (s));
307 }
308 u = &(u[SIZE (M_BOOL) + SIZE (MOID (s))]);
309 }
310 // STATUS_CLEAR (LOCALE (z), COOKIE_MASK);.
311 }
312 } else if (m == M_SOUND) {
313 // Claim the data of a SOUND object, that is in the heap.
314 A68_SOUND *w = (A68_SOUND *) item;
315 if (INITIALISED (w)) {
316 STATUS_SET (REF_HANDLE (&(DATA (w))), (COOKIE_MASK | COLOUR_MASK));
317 }
318 }
319 }
320
321 //! @brief Colour active objects in the heap.
322
323 void colour_heap (ADDR_T fp)
324 {
325 while (fp != 0) {
326 NODE_T *p = FRAME_TREE (fp);
327 TABLE_T *q = TABLE (p);
328 if (q != NO_TABLE) {
329 TAG_T *i;
330 for (i = IDENTIFIERS (q); i != NO_TAG; FORWARD (i)) {
331 colour_object (FRAME_LOCAL (fp, OFFSET (i)), MOID (i));
332 }
333 for (i = ANONYMOUS (q); i != NO_TAG; FORWARD (i)) {
334 if (PRIO (i) == GENERATOR) {
335 colour_object (FRAME_LOCAL (fp, OFFSET (i)), MOID (i));
336 }
337 }
338 }
339 fp = FRAME_DYNAMIC_LINK (fp);
340 }
341 }
342
343 //! @brief Join all active blocks in the heap.
344
345 void defragment_heap (void)
346 {
347 A68_HANDLE *z;
348 // Free handles.
349 z = A68_GC (busy_handles);
350 while (z != NO_HANDLE) {
351 if (!(STATUS_TEST (z, COLOUR_MASK)) && !(STATUS_TEST (z, BLOCK_GC_MASK))) {
352 A68_HANDLE *y = NEXT (z);
353 if (PREVIOUS (z) == NO_HANDLE) {
354 A68_GC (busy_handles) = NEXT (z);
355 } else {
356 NEXT (PREVIOUS (z)) = NEXT (z);
357 }
358 if (NEXT (z) != NO_HANDLE) {
359 PREVIOUS (NEXT (z)) = PREVIOUS (z);
360 }
361 NEXT (z) = A68_GC (available_handles);
362 PREVIOUS (z) = NO_HANDLE;
363 if (NEXT (z) != NO_HANDLE) {
364 PREVIOUS (NEXT (z)) = z;
365 }
366 A68_GC (available_handles) = z;
367 STATUS_CLEAR (z, ALLOCATED_MASK);
368 A68_GC (freed) += SIZE (z);
369 A68_GC (free_handles)++;
370 z = y;
371 } else {
372 FORWARD (z);
373 }
374 }
375 // There can be no uncoloured allocated handle.
376 for (z = A68_GC (busy_handles); z != NO_HANDLE; FORWARD (z)) {
377 ABEND (!(STATUS_TEST (z, COLOUR_MASK)) && !(STATUS_TEST (z, BLOCK_GC_MASK)), ERROR_INTERNAL_CONSISTENCY, __func__);
378 }
379 // Defragment the heap.
380 A68_HP = A68 (fixed_heap_pointer);
381 for (z = A68_GC (busy_handles); z != NO_HANDLE && NEXT (z) != NO_HANDLE; FORWARD (z)) {
382 ;
383 }
384 for (; z != NO_HANDLE; BACKWARD (z)) {
385 BYTE_T *dst = HEAP_ADDRESS (A68_HP);
386 if (dst != POINTER (z)) {
387 MOVE (dst, POINTER (z), (unt) SIZE (z));
388 }
389 STATUS_CLEAR (z, (COLOUR_MASK | COOKIE_MASK));
390 POINTER (z) = dst;
391 A68_HP += (SIZE (z));
392 ABEND (A68_HP % A68_ALIGNMENT != 0, ERROR_ALIGNMENT, __func__);
393 }
394 }
395
396 //! @brief Clean up garbage and defragment the heap.
397
398 void gc_heap (NODE_T * p, ADDR_T fp)
399 {
400 // Must start with fp = current frame_pointer.
401 A68_HANDLE *z;
402 REAL_T t0, t1;
403 #if defined (BUILD_PARALLEL_CLAUSE)
404 if (OTHER_THREAD (FRAME_THREAD_ID (A68_FP), A68_PAR (main_thread_id))) {
405 A68_GC (refused)++;
406 return;
407 }
408 #endif
409 // Take no risk when intermediate results are on the stack.
410 if (A68_SP != A68 (stack_start)) {
411 A68_GC (refused)++;
412 return;
413 }
414 // Give it a whirl then.
415 t0 = seconds ();
416 // Unfree handles are subject to inspection.
417 // Release them all before colouring.
418 for (z = A68_GC (busy_handles); z != NO_HANDLE; FORWARD (z)) {
419 STATUS_CLEAR (z, (COLOUR_MASK | COOKIE_MASK));
420 }
421 // Pour paint into the heap to reveal active objects.
422 colour_heap (fp);
423 // Start freeing and compacting.
424 A68_GC (freed) = 0;
425 defragment_heap ();
426 // Stats and logging.
427 A68_GC (total) += A68_GC (freed);
428 A68_GC (sweeps)++;
429 A68_GC (preemptive) = A68_FALSE;
430 t1 = seconds ();
431 // C optimiser can make last digit differ, so next condition is
432 // needed to determine a positive time difference
433 if ((t1 - t0) > ((REAL_T) A68 (clock_res) / 2.0)) {
434 A68_GC (seconds) += (t1 - t0);
435 } else {
436 A68_GC (seconds) += ((REAL_T) A68 (clock_res) / 2.0);
437 }
438 // Call the event handler.
439 genie_call_event_routine (p, M_PROC_VOID, &A68 (on_gc_event), A68_SP, A68_FP);
440 }
441
442 //! @brief Yield a handle that will point to a block in the heap.
443
444 A68_HANDLE *give_handle (NODE_T * p, MOID_T * a68m)
445 {
446 if (A68_GC (available_handles) != NO_HANDLE) {
447 A68_HANDLE *x = A68_GC (available_handles);
448 A68_GC (available_handles) = NEXT (x);
449 if (A68_GC (available_handles) != NO_HANDLE) {
450 PREVIOUS (A68_GC (available_handles)) = NO_HANDLE;
451 }
452 STATUS (x) = ALLOCATED_MASK;
453 POINTER (x) = NO_BYTE;
454 SIZE (x) = 0;
455 MOID (x) = a68m;
456 NEXT (x) = A68_GC (busy_handles);
457 PREVIOUS (x) = NO_HANDLE;
458 if (NEXT (x) != NO_HANDLE) {
459 PREVIOUS (NEXT (x)) = x;
460 }
461 A68_GC (busy_handles) = x;
462 A68_GC (free_handles)--;
463 return x;
464 } else {
465 // Do not auto-GC!.
466 diagnostic (A68_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
467 exit_genie (p, A68_RUNTIME_ERROR);
468 }
469 return NO_HANDLE;
470 }
471
472 //! @brief Give a block of heap for an object of indicated mode.
473
474 A68_REF heap_generator (NODE_T * p, MOID_T * mode, int size)
475 {
476 // Align.
477 ABEND (size < 0, ERROR_INVALID_SIZE, __func__);
478 size = A68_ALIGN (size);
479 // Now give it.
480 if (heap_available () >= size) {
481 A68_HANDLE *x;
482 A68_REF z;
483 STATUS (&z) = (STATUS_MASK_T) (INIT_MASK | IN_HEAP_MASK);
484 OFFSET (&z) = 0;
485 x = give_handle (p, mode);
486 SIZE (x) = size;
487 POINTER (x) = HEAP_ADDRESS (A68_HP);
488 FILL (POINTER (x), 0, size);
489 REF_SCOPE (&z) = PRIMAL_SCOPE;
490 REF_HANDLE (&z) = x;
491 ABEND (((long) ADDRESS (&z)) % A68_ALIGNMENT != 0, ERROR_ALIGNMENT, __func__);
492 A68_HP += size;
493 REAL_T _f_ = (REAL_T) A68_HP / (REAL_T) A68 (heap_size);
494 REAL_T _g_ = (REAL_T) (A68_GC (max_handles) - A68_GC (free_handles)) / (REAL_T) A68_GC (max_handles);
495 if (_f_ > DEFAULT_PREEMPTIVE || _g_ > DEFAULT_PREEMPTIVE) {
496 A68_GC (preemptive) = A68_TRUE;
497 }
498 return z;
499 } else {
500 // Do not auto-GC!.
501 diagnostic (A68_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
502 exit_genie (p, A68_RUNTIME_ERROR);
503 return nil_ref;
504 }
505 }
506
507 // Following implements the generator.
508
509 //! @brief Whether a moid needs work in allocation.
510
511 BOOL_T mode_needs_allocation (MOID_T * m)
512 {
513 if (IS_UNION (m)) {
514 return A68_FALSE;
515 } else {
516 return HAS_ROWS (m);
517 }
518 }
519
520 //! @brief Prepare bounds.
521
522 void genie_compute_bounds (NODE_T * p)
523 {
524 for (; p != NO_NODE; FORWARD (p)) {
525 if (IS (p, BOUNDS_LIST)) {
526 genie_compute_bounds (SUB (p));
527 } else if (IS (p, BOUND)) {
528 genie_compute_bounds (SUB (p));
529 } else if (IS (p, UNIT)) {
530 if (NEXT (p) != NO_NODE && (is_one_of (NEXT (p), COLON_SYMBOL, DOTDOT_SYMBOL, STOP))) {
531 EXECUTE_UNIT (p);
532 p = NEXT_NEXT (p);
533 } else {
534 // Default lower bound.
535 PUSH_VALUE (p, 1, A68_INT);
536 }
537 EXECUTE_UNIT (p);
538 }
539 }
540 }
541
542 //! @brief Prepare bounds for a row.
543
544 void genie_generator_bounds (NODE_T * p)
545 {
546 LOW_STACK_ALERT (p);
547 for (; p != NO_NODE; FORWARD (p)) {
548 if (IS (p, BOUNDS)) {
549 genie_compute_bounds (SUB (p));
550 } else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) {
551 return;
552 } else if (IS (p, INDICANT)) {
553 if (TAX (p) != NO_TAG && HAS_ROWS (MOID (TAX (p)))) {
554 // Continue from definition at MODE A = ....
555 genie_generator_bounds (DEF_NODE (p));
556 }
557 } else if (IS (p, DECLARER) && !mode_needs_allocation (MOID (p))) {
558 return;
559 } else {
560 genie_generator_bounds (SUB (p));
561 }
562 }
563 }
564
565 //! @brief Allocate a structure.
566
567 void genie_generator_field (NODE_T * p, BYTE_T ** faddr, NODE_T ** decl, ADDR_T * cur_sp, ADDR_T * top_sp)
568 {
569 for (; p != NO_NODE; FORWARD (p)) {
570 if (IS (p, STRUCTURED_FIELD)) {
571 genie_generator_field (SUB (p), faddr, decl, cur_sp, top_sp);
572 }
573 if (IS (p, DECLARER)) {
574 (*decl) = SUB (p);
575 FORWARD (p);
576 }
577 if (IS (p, FIELD_IDENTIFIER)) {
578 MOID_T *fmoid = MOID (*decl);
579 if (HAS_ROWS (fmoid) && ISNT (fmoid, UNION_SYMBOL)) {
580 ADDR_T pop_sp = *cur_sp;
581 genie_generator_stowed (*decl, *faddr, NO_VAR, cur_sp);
582 *top_sp = *cur_sp;
583 *cur_sp = pop_sp;
584 }
585 (*faddr) += SIZE (fmoid);
586 }
587 }
588 }
589
590 //! @brief Allocate a structure.
591
592 void genie_generator_struct (NODE_T * p, BYTE_T ** faddr, ADDR_T * cur_sp)
593 {
594 for (; p != NO_NODE; FORWARD (p)) {
595 if (IS (p, STRUCTURED_FIELD_LIST)) {
596 genie_generator_struct (SUB (p), faddr, cur_sp);
597 } else if (IS (p, STRUCTURED_FIELD)) {
598 NODE_T *decl = NO_NODE;
599 ADDR_T top_sp = *cur_sp;
600 genie_generator_field (SUB (p), faddr, &decl, cur_sp, &top_sp);
601 *cur_sp = top_sp;
602 }
603 }
604 }
605
606 //! @brief Allocate a stowed object.
607
608 void genie_generator_stowed (NODE_T * p, BYTE_T * addr, NODE_T ** decl, ADDR_T * cur_sp)
609 {
610 if (p == NO_NODE) {
611 return;
612 } else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) {
613 // The standard prelude definition is hard coded here.
614 *((A68_REF *) addr) = empty_string (p);
615 return;
616 } else if (IS (p, INDICANT) && TAX (p) != NO_TAG) {
617 // Continue from definition at MODE A = ..
618 genie_generator_stowed (DEF_NODE (p), addr, decl, cur_sp);
619 return;
620 } else if (IS (p, DECLARER) && mode_needs_allocation (MOID (p))) {
621 genie_generator_stowed (SUB (p), addr, decl, cur_sp);
622 return;
623 } else if (IS_STRUCT (p)) {
624 BYTE_T *faddr = addr;
625 genie_generator_struct (SUB_NEXT (p), &faddr, cur_sp);
626 return;
627 } else if (IS_FLEX (p)) {
628 genie_generator_stowed (NEXT (p), addr, decl, cur_sp);
629 return;
630 } else if (IS (p, BOUNDS)) {
631 A68_REF desc;
632 MOID_T *rmod = MOID (p), *smod = MOID (NEXT (p));
633 A68_ARRAY *arr;
634 A68_TUPLE *tup;
635 BYTE_T *bounds = STACK_ADDRESS (*cur_sp);
636 int k, dim = DIM (DEFLEX (rmod));
637 int esiz = SIZE (smod), rsiz = 1;
638 BOOL_T alloc_sub, alloc_str;
639 NODE_T *in = SUB_NEXT (p);
640 if (IS (in, INDICANT) && IS_LITERALLY (in, "STRING")) {
641 alloc_str = A68_TRUE;
642 alloc_sub = A68_FALSE;
643 } else {
644 alloc_sub = mode_needs_allocation (smod);
645 alloc_str = A68_FALSE;
646 }
647 desc = heap_generator (p, rmod, DESCRIPTOR_SIZE (dim));
648 GET_DESCRIPTOR (arr, tup, &desc);
649 for (k = 0; k < dim; k++) {
650 CHECK_INIT (p, INITIALISED ((A68_INT *) bounds), M_INT);
651 LWB (&tup[k]) = VALUE ((A68_INT *) bounds);
652 bounds += SIZE (M_INT);
653 CHECK_INIT (p, INITIALISED ((A68_INT *) bounds), M_INT);
654 UPB (&tup[k]) = VALUE ((A68_INT *) bounds);
655 bounds += SIZE (M_INT);
656 SPAN (&tup[k]) = rsiz;
657 SHIFT (&tup[k]) = LWB (&tup[k]) * SPAN (&tup[k]);
658 rsiz *= ROW_SIZE (&tup[k]);
659 }
660 DIM (arr) = dim;
661 MOID (arr) = smod;
662 ELEM_SIZE (arr) = esiz;
663 SLICE_OFFSET (arr) = 0;
664 FIELD_OFFSET (arr) = 0;
665 (*cur_sp) += (dim * 2 * SIZE (M_INT));
666 // Generate a new row. Note that STRING is handled explicitly since
667 // it has implicit bounds
668 if (rsiz == 0) {
669 // Generate a ghost element.
670 ADDR_T top_sp = *cur_sp;
671 BYTE_T *elem;
672 ARRAY (arr) = heap_generator (p, rmod, esiz);
673 elem = ADDRESS (&(ARRAY (arr)));
674 if (alloc_sub) {
675 genie_generator_stowed (NEXT (p), &(elem[0]), NO_VAR, cur_sp);
676 top_sp = *cur_sp;
677 } else if (alloc_str) {
678 *(A68_REF *) elem = empty_string (p);
679 }
680 (*cur_sp) = top_sp;
681 } else {
682 ADDR_T pop_sp = *cur_sp, top_sp = *cur_sp;
683 BYTE_T *elem;
684 ARRAY (arr) = heap_generator (p, rmod, rsiz * esiz);
685 elem = ADDRESS (&(ARRAY (arr)));
686 for (k = 0; k < rsiz; k++) {
687 if (alloc_sub) {
688 (*cur_sp) = pop_sp;
689 genie_generator_stowed (NEXT (p), &(elem[k * esiz]), NO_VAR, cur_sp);
690 top_sp = *cur_sp;
691 } else if (alloc_str) {
692 *(A68_REF *) (&(elem[k * esiz])) = empty_string (p);
693 }
694 }
695 (*cur_sp) = top_sp;
696 }
697 *(A68_REF *) addr = desc;
698 return;
699 }
700 }
701
702 //! @brief Generate space and push a REF.
703
704 void genie_generator_internal (NODE_T * p, MOID_T * ref_mode, TAG_T * tag, LEAP_T leap, ADDR_T sp)
705 {
706 // Set up a REF MODE object, either in the stack or in the heap.
707 MOID_T *mode = SUB (ref_mode);
708 A68_REF name = nil_ref;
709 if (leap == LOC_SYMBOL) {
710 STATUS (&name) = (STATUS_MASK_T) (INIT_MASK | IN_FRAME_MASK);
711 REF_HANDLE (&name) = (A68_HANDLE *) & nil_handle;
712 OFFSET (&name) = A68_FP + FRAME_INFO_SIZE + OFFSET (tag);
713 REF_SCOPE (&name) = A68_FP;
714 } else if (leap == -LOC_SYMBOL && NON_LOCAL (p) != NO_TABLE) {
715 ADDR_T lev;
716 name = heap_generator (p, mode, SIZE (mode));
717 FOLLOW_SL (lev, LEVEL (NON_LOCAL (p)));
718 REF_SCOPE (&name) = lev;
719 } else if (leap == -LOC_SYMBOL) {
720 name = heap_generator (p, mode, SIZE (mode));
721 REF_SCOPE (&name) = A68_FP;
722 } else if (leap == HEAP_SYMBOL || leap == -HEAP_SYMBOL) {
723 name = heap_generator (p, mode, SIZE (mode));
724 REF_SCOPE (&name) = PRIMAL_SCOPE;
725 } else if (leap == NEW_SYMBOL || leap == -NEW_SYMBOL) {
726 name = heap_generator (p, mode, SIZE (mode));
727 REF_SCOPE (&name) = PRIMAL_SCOPE;
728 } else {
729 ABEND (A68_TRUE, ERROR_INTERNAL_CONSISTENCY, __func__);
730 }
731 if (HAS_ROWS (mode)) {
732 ADDR_T cur_sp = sp;
733 genie_generator_stowed (p, ADDRESS (&name), NO_VAR, &cur_sp);
734 }
735 PUSH_REF (p, name);
736 }
737
738 //! @brief Push a name refering to allocated space.
739
740 PROP_T genie_generator (NODE_T * p)
741 {
742 PROP_T self;
743 ADDR_T pop_sp = A68_SP;
744 A68_REF z;
745 if (NEXT_SUB (p) != NO_NODE) {
746 genie_generator_bounds (NEXT_SUB (p));
747 }
748 genie_generator_internal (NEXT_SUB (p), MOID (p), TAX (p), -ATTRIBUTE (SUB (p)), pop_sp);
749 POP_REF (p, &z);
750 A68_SP = pop_sp;
751 PUSH_REF (p, z);
752 UNIT (&self) = genie_generator;
753 SOURCE (&self) = p;
754 return self;
755 }
756
757 // Control of C heap
758
759 //! @brief Discard_heap.
760
761 void discard_heap (void)
762 {
763 if (A68_HEAP != NO_BYTE) {
764 a68_free (A68_HEAP);
765 }
766 A68 (fixed_heap_pointer) = 0;
767 A68 (temp_heap_pointer) = 0;
768 }