rts-heap.c
1 //! @file rts-heap.c
2 //! @author J. Marcel van der Veer
3
4 //! @section Copyright
5 //!
6 //! This file is part of Algol68G - an Algol 68 compiler-interpreter.
7 //! Copyright 2001-2025 J. Marcel van der Veer [algol68g@xs4all.nl].
8
9 //! @section License
10 //!
11 //! This program is free software; you can redistribute it and/or modify it
12 //! under the terms of the GNU General Public License as published by the
13 //! Free Software Foundation; either version 3 of the License, or
14 //! (at your option) any later version.
15 //!
16 //! This program is distributed in the hope that it will be useful, but
17 //! WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 //! or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
19 //! more details. You should have received a copy of the GNU General Public
20 //! License along with this program. If not, see [http://www.gnu.org/licenses/].
21
22 //! @section Synopsis
23 //!
24 //! Generator and garbage collector routines.
25
26 // The generator allocates space in stack or heap and initialises dynamically sized objects.
27 //
28 // A mark-and-gc garbage collector defragments the heap. When called, it walks
29 // the stack frames and marks the heap space that is still active. This marking
30 // process is called "colouring" here since we "pour paint" into the heap.
31 // The active blocks are then joined, the non-active blocks are forgotten.
32 //
33 // When colouring the heap, "cookies" are placed in objects as to find circular
34 // references.
35 //
36 // Algol68G introduces several anonymous tags in the symbol tables that save
37 // temporary REF or ROW results, so that they do not get prematurely swept.
38 //
39 // The genie is not smart enough to handle every heap clog, e.g. when copying
40 // STOWED objects. This seems not very elegant, but garbage collectors in general
41 // cannot solve all core management problems. To avoid many of the "unforeseen"
42 // heap clogs, we try to keep heap occupation low by garbage collecting
43 // occasionally, before it fills up completely. If this automatic mechanism does
44 // not help, one can always invoke the garbage collector by calling "gc heap"
45 // from Algol 68 source text.
46 //
47 // Mark-and-collect is simple but since it walks recursive structures, it could
48 // exhaust the C-stack (segment violation). A rough check is in place.
49 //
50 // For dynamically sized objects, first bounds are evaluated (right first, then down).
51 // The object is generated keeping track of the bound-count.
52 //
53 // ...
54 // [#1]
55 // STRUCT
56 // (
57 // [#2]
58 // STRUCT
59 // (
60 // [#3] A a, b, ...
61 // )
62 // , Advance bound-count here, max is #3
63 // [#4] B a, b, ...
64 // )
65 // , Advance bound-count here, max is #4
66 // [#5] C a, b, ...
67 // ...
68 //
69 // Bound-count is maximised when generator_stowed is entered recursively.
70 // Bound-count is advanced when completing a STRUCTURED_FIELD.
71 // Note that A68G will not extend stack frames. Thus only 'static' LOC generators
72 // are in the stack, and 'dynamic' LOC generators go into the heap. These local
73 // REFs in the heap get local scope however, and A68G's approach differs from the
74 // CDC ALGOL 68 approach that put all generators in the heap.
75 // Note that part of memory is called 'COMMON'. This is meant for future extension
76 // where a68g would need to point to external objects. The adressing scheme is that
77 // of a HEAP pointer - handle pointer + offset.
78
79 #include "a68g.h"
80 #include "a68g-genie.h"
81 #include "a68g-frames.h"
82 #include "a68g-prelude.h"
83 #include "a68g-parser.h"
84
85 #define DEF_NODE(p) (NEXT_NEXT (NODE (TAX (p))))
86
87 //! @brief PROC VOID gc heap
88
89 void genie_gc_heap (NODE_T * p)
90 {
91 gc_heap (p, A68G_FP);
92 }
93
94 //! @brief PROC VOID preemptive gc heap
95
96 void genie_preemptive_gc_heap (NODE_T * p)
97 {
98 if (A68G_GC (preemptive)) {
99 gc_heap (p, A68G_FP);
100 }
101 }
102
103 //! @brief INT blocks
104
105 void genie_block (NODE_T * p)
106 {
107 PUSH_VALUE (p, 0, A68G_INT);
108 }
109
110 //! @brief INT garbage collections
111
112 void genie_garbage_collections (NODE_T * p)
113 {
114 PUSH_VALUE (p, A68G_GC (sweeps), A68G_INT);
115 }
116
117 //! @brief INT garbage refused
118
119 void genie_garbage_refused (NODE_T * p)
120 {
121 PUSH_VALUE (p, A68G_GC (refused), A68G_INT);
122 }
123
124 //! @brief LONG INT garbage freed
125
126 void genie_garbage_freed (NODE_T * p)
127 {
128 PUSH_VALUE (p, A68G_GC (total), A68G_INT);
129 }
130
131 //! @brief REAL garbage seconds
132
133 void genie_garbage_seconds (NODE_T * p)
134 {
135 // Note that this timing is a rough cut.
136 PUSH_VALUE (p, A68G_GC (seconds), A68G_REAL);
137 }
138
139 //! @brief Size available for an object in the heap.
140
141 unt heap_available (void)
142 {
143 return A68G (heap_size) - A68G_HP;
144 }
145
146 //! @brief Initialise heap management.
147
148 void genie_init_heap (NODE_T * p)
149 {
150 (void) p;
151 if (A68G_HEAP == NO_BYTE) {
152 diagnostic (A68G_RUNTIME_ERROR, TOP_NODE (&A68G_JOB), ERROR_OUT_OF_CORE);
153 exit_genie (TOP_NODE (&A68G_JOB), A68G_RUNTIME_ERROR);
154 }
155 if (A68G_HANDLES == NO_BYTE) {
156 diagnostic (A68G_RUNTIME_ERROR, TOP_NODE (&A68G_JOB), ERROR_OUT_OF_CORE);
157 exit_genie (TOP_NODE (&A68G_JOB), A68G_RUNTIME_ERROR);
158 }
159 A68G_GC (seconds) = 0;
160 A68G_GC (total) = 0;
161 A68G_GC (sweeps) = 0;
162 A68G_GC (refused) = 0;
163 A68G_GC (preemptive) = A68G_FALSE;
164 ABEND (A68G (fixed_heap_pointer) >= (A68G (heap_size) - MIN_MEM_SIZE), ERROR_OUT_OF_CORE, __func__);
165 A68G_HP = A68G (fixed_heap_pointer);
166 A68G (heap_is_fluid) = A68G_FALSE;
167 // Assign handle space.
168 A68G_HANDLE *z = (A68G_HANDLE *) A68G_HANDLES;
169 A68G_GC (available_handles) = z;
170 A68G_GC (busy_handles) = NO_HANDLE;
171 int N = (unt) A68G (handle_pool_size) / SIZE_ALIGNED (A68G_HANDLE);
172 A68G_GC (free_handles) = N;
173 A68G_GC (max_handles) = N;
174 for (int k = 0; k < N; k++) {
175 STATUS (&(z[k])) = NULL_MASK;
176 POINTER (&(z[k])) = NO_BYTE;
177 SIZE (&(z[k])) = 0;
178 NEXT (&z[k]) = (k == N - 1 ? NO_HANDLE : &z[k + 1]);
179 PREVIOUS (&z[k]) = (k == 0 ? NO_HANDLE : &z[k - 1]);
180 }
181 }
182
183 //! @brief Whether mode must be coloured.
184
185 BOOL_T moid_needs_colouring (MOID_T * m)
186 {
187 if (IS_REF (m)) {
188 return A68G_TRUE;
189 } else if (IS (m, PROC_SYMBOL)) {
190 return A68G_TRUE;
191 } else if (IS_FLEX (m) || IS_ROW (m)) {
192 return A68G_TRUE;
193 } else if (IS_STRUCT (m) || IS_UNION (m)) {
194 for (PACK_T *p = PACK (m); p != NO_PACK; FORWARD (p)) {
195 if (moid_needs_colouring (MOID (p))) {
196 return A68G_TRUE;
197 }
198 }
199 return A68G_FALSE;
200 } else if (m == M_SIMPLIN || m == M_SIMPLOUT) {
201 return A68G_TRUE;
202 } else if (m == M_SOUND) {
203 return A68G_TRUE;
204 } else {
205 return A68G_FALSE;
206 }
207 }
208
209 //! @brief Colour all elements of a row.
210
211 void colour_row_elements (A68G_REF * z, MOID_T * m)
212 {
213 A68G_ARRAY *arr; A68G_TUPLE *tup;
214 GET_DESCRIPTOR (arr, tup, z);
215 if (get_row_size (tup, DIM (arr)) == 0) {
216 // Empty rows have a ghost elements.
217 BYTE_T *elem = ADDRESS (&ARRAY (arr));
218 colour_object (&elem[0], SUB (m));
219 } else {
220 // The multi-dimensional garbage collector.
221 BYTE_T *elem = ADDRESS (&ARRAY (arr));
222 BOOL_T done = A68G_FALSE;
223 initialise_internal_index (tup, DIM (arr));
224 while (!done) {
225 ADDR_T index = calculate_internal_index (tup, DIM (arr));
226 ADDR_T addr = ROW_ELEMENT (arr, index);
227 colour_object (&elem[addr], SUB (m));
228 done = increment_internal_index (tup, DIM (arr));
229 }
230 }
231 }
232
233 //! @brief Colour an (active) object.
234
235 void colour_object (BYTE_T * item, MOID_T * m)
236 {
237 if (item == NO_BYTE || m == NO_MOID) {
238 return;
239 }
240 if (!moid_needs_colouring (m)) {
241 return;
242 }
243 // Deeply recursive objects might exhaust the stack.
244 LOW_STACK_ALERT (NO_NODE);
245 if (IS_REF (m)) {
246 // REF AMODE colour pointer and object to which it refers.
247 A68G_REF *z = (A68G_REF *) item;
248 if (INITIALISED (z) && IS_IN_HEAP (z)) {
249 if (STATUS_TEST (REF_HANDLE (z), COOKIE_MASK)) {
250 return;
251 }
252 STATUS_SET (REF_HANDLE (z), (COOKIE_MASK | COLOUR_MASK));
253 if (!IS_NIL (*z)) {
254 colour_object (ADDRESS (z), SUB (m));
255 }
256 // STATUS_CLEAR (REF_HANDLE (z), COOKIE_MASK);.
257 }
258 } else if (IS_FLEXETY_ROW (m)) {
259 // Claim the descriptor and the row itself.
260 A68G_REF *z = (A68G_REF *) item;
261 if (INITIALISED (z) && IS_IN_HEAP (z)) {
262 if (STATUS_TEST (REF_HANDLE (z), COOKIE_MASK)) {
263 return;
264 }
265 // An array is ALWAYS in the heap.
266 STATUS_SET (REF_HANDLE (z), (COOKIE_MASK | COLOUR_MASK));
267 A68G_ARRAY *arr; A68G_TUPLE *tup;
268 GET_DESCRIPTOR (arr, tup, z);
269 if (REF_HANDLE (&(ARRAY (arr))) != NO_HANDLE) {
270 // Assume its initialisation.
271 MOID_T *n = DEFLEX (m);
272 STATUS_SET (REF_HANDLE (&(ARRAY (arr))), COLOUR_MASK);
273 if (moid_needs_colouring (SUB (n))) {
274 colour_row_elements (z, n);
275 }
276 }
277 // STATUS_CLEAR (REF_HANDLE (z), COOKIE_MASK);.
278 (void) tup;
279 }
280 } else if (IS_STRUCT (m)) {
281 // STRUCTures - colour fields.
282 for (PACK_T *p = PACK (m); p != NO_PACK; FORWARD (p)) {
283 colour_object (&item[OFFSET (p)], MOID (p));
284 }
285 } else if (IS_UNION (m)) {
286 // UNIONs - a united object may contain a value that needs colouring.
287 A68G_UNION *z = (A68G_UNION *) item;
288 if (INITIALISED (z)) {
289 MOID_T *united_moid = (MOID_T *) VALUE (z);
290 colour_object (&item[A68G_UNION_SIZE], united_moid);
291 }
292 } else if (IS (m, PROC_SYMBOL)) {
293 // PROCs - save a locale and the objects it points to.
294 A68G_PROCEDURE *z = (A68G_PROCEDURE *) item;
295 if (INITIALISED (z) && LOCALE (z) != NO_HANDLE && !(STATUS_TEST (LOCALE (z), COOKIE_MASK))) {
296 BYTE_T *u = POINTER (LOCALE (z));
297 STATUS_SET (LOCALE (z), (COOKIE_MASK | COLOUR_MASK));
298 for (PACK_T *s = PACK (MOID (z)); s != NO_PACK; FORWARD (s)) {
299 if (VALUE ((A68G_BOOL *) & u[0]) == A68G_TRUE) {
300 colour_object (&u[SIZE (M_BOOL)], MOID (s));
301 }
302 u = &(u[SIZE (M_BOOL) + SIZE (MOID (s))]);
303 }
304 // STATUS_CLEAR (LOCALE (z), COOKIE_MASK);.
305 }
306 } else if (m == M_SOUND) {
307 // Claim the data of a SOUND object, that is in the heap.
308 A68G_SOUND *w = (A68G_SOUND *) item;
309 if (INITIALISED (w)) {
310 STATUS_SET (REF_HANDLE (&(DATA (w))), (COOKIE_MASK | COLOUR_MASK));
311 }
312 } else if (m == M_SIMPLIN || m == M_SIMPLOUT) {
313 A68G_UNION *z = (A68G_UNION *) item;
314 if (INITIALISED (z)) {
315 MOID_T *united_moid = (MOID_T *) VALUE (z);
316 colour_object (&item[A68G_UNION_SIZE], united_moid);
317 }
318 }
319 }
320
321 //! @brief Colour active objects in the heap.
322
323 void colour_heap (ADDR_T fp)
324 {
325 while (fp != 0) {
326 NODE_T *p = FRAME_TREE (fp);
327 TABLE_T *t = TABLE (p);
328 if (t != NO_TABLE) {
329 for (TAG_T *q = IDENTIFIERS (t); q != NO_TAG; FORWARD (q)) {
330 colour_object (FRAME_LOCAL (fp, OFFSET (q)), MOID (q));
331 }
332 for (TAG_T *q = ANONYMOUS (t); q != NO_TAG; FORWARD (q)) {
333 colour_object (FRAME_LOCAL (fp, OFFSET (q)), MOID (q));
334 }
335 }
336 fp = FRAME_DYNAMIC_LINK (fp);
337 }
338 }
339
340 //! @brief Join all active blocks in the heap.
341
342 void defragment_heap (void)
343 {
344 A68G_HANDLE *z;
345 // Free handles.
346 z = A68G_GC (busy_handles);
347 while (z != NO_HANDLE) {
348 if (!(STATUS_TEST (z, COLOUR_MASK)) && !(STATUS_TEST (z, BLOCK_GC_MASK))) {
349 A68G_HANDLE *y = NEXT (z);
350 if (PREVIOUS (z) == NO_HANDLE) {
351 A68G_GC (busy_handles) = NEXT (z);
352 } else {
353 NEXT (PREVIOUS (z)) = NEXT (z);
354 }
355 if (NEXT (z) != NO_HANDLE) {
356 PREVIOUS (NEXT (z)) = PREVIOUS (z);
357 }
358 NEXT (z) = A68G_GC (available_handles);
359 PREVIOUS (z) = NO_HANDLE;
360 if (NEXT (z) != NO_HANDLE) {
361 PREVIOUS (NEXT (z)) = z;
362 }
363 A68G_GC (available_handles) = z;
364 STATUS_CLEAR (z, ALLOCATED_MASK);
365 A68G_GC (freed) += SIZE (z);
366 A68G_GC (free_handles)++;
367 z = y;
368 } else {
369 FORWARD (z);
370 }
371 }
372 // There can be no uncoloured allocated handle.
373 for (z = A68G_GC (busy_handles); z != NO_HANDLE; FORWARD (z)) {
374 ABEND (!(STATUS_TEST (z, COLOUR_MASK)) && !(STATUS_TEST (z, BLOCK_GC_MASK)), ERROR_INTERNAL_CONSISTENCY, __func__);
375 }
376 // Defragment the heap.
377 A68G_HP = A68G (fixed_heap_pointer);
378 for (z = A68G_GC (busy_handles); z != NO_HANDLE && NEXT (z) != NO_HANDLE; FORWARD (z)) {
379 ;
380 }
381 for (; z != NO_HANDLE; BACKWARD (z)) {
382 BYTE_T *dst = HEAP_ADDRESS (A68G_HP);
383 if (dst != POINTER (z)) {
384 MOVE (dst, POINTER (z), (unt) SIZE (z));
385 }
386 STATUS_CLEAR (z, (COLOUR_MASK | COOKIE_MASK));
387 POINTER (z) = dst;
388 A68G_HP += (SIZE (z));
389 ABEND (A68G_HP % A68G_ALIGNMENT != 0, ERROR_ALIGNMENT, __func__);
390 }
391 }
392
393 //! @brief Clean up garbage and defragment the heap.
394
395 void gc_heap (NODE_T * p, ADDR_T fp)
396 {
397 // Must start with fp = current frame_pointer.
398 A68G_HANDLE *z;
399 REAL_T t0, t1;
400 #if defined (BUILD_PARALLEL_CLAUSE)
401 if (OTHER_THREAD (FRAME_THREAD_ID (A68G_FP), A68G_PAR (main_thread_id))) {
402 A68G_GC (refused)++;
403 return;
404 }
405 #endif
406 if (STATUS_TEST (p, BLOCK_GC_MASK)) {
407 A68G_GC (refused)++;
408 return;
409 }
410 if (OPTION_CONSERVATIVE_GC (&A68G_JOB) == A68G_GC_HALT) {
411 A68G_GC (refused)++;
412 return;
413 }
414 if (OPTION_CONSERVATIVE_GC (&A68G_JOB) == A68G_GC_SAFE && (A68G_GC (sema) > 0)) {
415 A68G_GC (refused)++;
416 return;
417 }
418 // Take no risk when intermediate results are on the stack.
419 if (OPTION_CONSERVATIVE_GC (&A68G_JOB) && (A68G_SP != A68G (stack_start))) {
420 A68G_GC (refused)++;
421 return;
422 }
423 // Give it a whirl then.
424 t0 = seconds ();
425 // Unfree handles are subject to inspection.
426 // Release them all before colouring.
427 for (z = A68G_GC (busy_handles); z != NO_HANDLE; FORWARD (z)) {
428 STATUS_CLEAR (z, (COLOUR_MASK | COOKIE_MASK));
429 }
430 // Pour paint into the heap to reveal active objects.
431 colour_heap (fp);
432 // Start freeing and compacting.
433 A68G_GC (freed) = 0;
434 defragment_heap ();
435 // Stats and logging.
436 A68G_GC (total) += A68G_GC (freed);
437 A68G_GC (sweeps)++;
438 A68G_GC (preemptive) = A68G_FALSE;
439 t1 = seconds ();
440 // C optimiser can make last digit differ, so next condition is
441 // needed to determine a positive time difference
442 if ((t1 - t0) > ((REAL_T) A68G (clock_res) / 2.0)) {
443 A68G_GC (seconds) += (t1 - t0);
444 } else {
445 A68G_GC (seconds) += ((REAL_T) A68G (clock_res) / 2.0);
446 }
447 // Call the event handler.
448 genie_call_event_routine (p, M_PROC_VOID, &A68G (on_gc_event), A68G_SP, A68G_FP);
449 }
450
451 //! @brief Yield a handle that will point to a block in the heap.
452
453 A68G_HANDLE *give_handle (NODE_T * p, MOID_T * a68m)
454 {
455 if (A68G_GC (available_handles) != NO_HANDLE) {
456 A68G_HANDLE *x = A68G_GC (available_handles);
457 A68G_GC (available_handles) = NEXT (x);
458 if (A68G_GC (available_handles) != NO_HANDLE) {
459 PREVIOUS (A68G_GC (available_handles)) = NO_HANDLE;
460 }
461 STATUS (x) = ALLOCATED_MASK;
462 POINTER (x) = NO_BYTE;
463 SIZE (x) = 0;
464 MOID (x) = a68m;
465 NEXT (x) = A68G_GC (busy_handles);
466 PREVIOUS (x) = NO_HANDLE;
467 if (NEXT (x) != NO_HANDLE) {
468 PREVIOUS (NEXT (x)) = x;
469 }
470 A68G_GC (busy_handles) = x;
471 A68G_GC (free_handles)--;
472 return x;
473 } else {
474 // Do not auto-GC!.
475 diagnostic (A68G_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
476 exit_genie (p, A68G_RUNTIME_ERROR);
477 }
478 return NO_HANDLE;
479 }
480
481 //! @brief Give a block of heap for an object of indicated mode.
482
483 A68G_REF heap_generator (NODE_T * p, MOID_T * mode, size_t size)
484 {
485 ABEND (size < 0, ERROR_INVALID_SIZE, __func__);
486 size = A68G_ALIGN (size);
487 if (heap_available () >= size) {
488 A68G_REF z;
489 STATUS (&z) = (STATUS_MASK_T) (INIT_MASK | IN_HEAP_MASK);
490 OFFSET (&z) = 0;
491 A68G_HANDLE *x = give_handle (p, mode);
492 SIZE (x) = size;
493 POINTER (x) = HEAP_ADDRESS (A68G_HP);
494 FILL (POINTER (x), 0, size);
495 REF_SCOPE (&z) = PRIMAL_SCOPE;
496 REF_HANDLE (&z) = x;
497 ABEND (((long) ADDRESS (&z)) % A68G_ALIGNMENT != 0, ERROR_ALIGNMENT, __func__);
498 A68G_HP += size;
499 REAL_T _f_ = (REAL_T) A68G_HP / (REAL_T) A68G (heap_size);
500 REAL_T _g_ = (REAL_T) (A68G_GC (max_handles) - A68G_GC (free_handles)) / (REAL_T) A68G_GC (max_handles);
501 if (_f_ > DEFAULT_PREEMPTIVE || _g_ > DEFAULT_PREEMPTIVE) {
502 A68G_GC (preemptive) = A68G_TRUE;
503 }
504 return z;
505 } else {
506 // Do not auto-GC!.
507 diagnostic (A68G_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
508 exit_genie (p, A68G_RUNTIME_ERROR);
509 return nil_ref;
510 }
511 }
512
513 //! @brief Give a block of heap for an object of indicated mode.
514
515 A68G_REF heap_generator_2 (NODE_T * p, MOID_T * mode, int len, size_t size)
516 {
517 if (len == 0 || size == 0) {
518 return heap_generator (p, mode, 0);
519 } else if (ABS (size) < (2 * GIGABYTE) / ABS (len)) {
520 return heap_generator (p, mode, (size_t) (len) * size);
521 } else {
522 diagnostic (A68G_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
523 exit_genie (p, A68G_RUNTIME_ERROR);
524 }
525 return nil_ref;
526 }
527
528 //! @brief Give a block of heap for an object of indicated mode.
529
530 A68G_REF heap_generator_3 (NODE_T * p, MOID_T * mode, int len1, int len2, size_t size)
531 {
532 if (len1 == 0 || len2 == 0) {
533 return heap_generator (p, mode, 0);
534 } else if (ABS (len2) < (2 * GIGABYTE) / ABS (len1)) {
535 return heap_generator_2 (p, mode, len1 * len2, size);
536 } else {
537 diagnostic (A68G_RUNTIME_ERROR, p, ERROR_OUT_OF_CORE);
538 exit_genie (p, A68G_RUNTIME_ERROR);
539 }
540 return nil_ref;
541 }
542
543 // Following implements the generator.
544
545 //! @brief Whether a moid needs work in allocation.
546
547 BOOL_T mode_needs_allocation (MOID_T * m)
548 {
549 if (IS_UNION (m)) {
550 return A68G_FALSE;
551 } else {
552 return HAS_ROWS (m);
553 }
554 }
555
556 //! @brief Prepare bounds.
557
558 void genie_compute_bounds (NODE_T * p)
559 {
560 for (; p != NO_NODE; FORWARD (p)) {
561 if (IS (p, BOUNDS_LIST)) {
562 genie_compute_bounds (SUB (p));
563 } else if (IS (p, BOUND)) {
564 genie_compute_bounds (SUB (p));
565 } else if (IS (p, UNIT)) {
566 if (NEXT (p) != NO_NODE && (is_one_of (NEXT (p), COLON_SYMBOL, DOTDOT_SYMBOL, STOP))) {
567 GENIE_UNIT (p);
568 p = NEXT_NEXT (p);
569 } else {
570 // Default lower bound.
571 PUSH_VALUE (p, 1, A68G_INT);
572 }
573 GENIE_UNIT (p);
574 }
575 }
576 }
577
578 //! @brief Prepare bounds for a row.
579
580 void genie_generator_bounds (NODE_T * p)
581 {
582 LOW_STACK_ALERT (p);
583 for (; p != NO_NODE; FORWARD (p)) {
584 if (IS (p, BOUNDS)) {
585 genie_compute_bounds (SUB (p));
586 } else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) {
587 return;
588 } else if (IS (p, INDICANT)) {
589 if (TAX (p) != NO_TAG && HAS_ROWS (MOID (TAX (p)))) {
590 // Continue from definition at MODE A = ....
591 genie_generator_bounds (DEF_NODE (p));
592 }
593 } else if (IS (p, DECLARER) && !mode_needs_allocation (MOID (p))) {
594 return;
595 } else {
596 genie_generator_bounds (SUB (p));
597 }
598 }
599 }
600
601 //! @brief Allocate a structure.
602
603 void genie_generator_field (NODE_T * p, BYTE_T ** faddr, NODE_T ** decl, ADDR_T * cur_sp, ADDR_T * top_sp)
604 {
605 for (; p != NO_NODE; FORWARD (p)) {
606 if (IS (p, STRUCTURED_FIELD)) {
607 genie_generator_field (SUB (p), faddr, decl, cur_sp, top_sp);
608 }
609 if (IS (p, DECLARER)) {
610 (*decl) = SUB (p);
611 FORWARD (p);
612 }
613 if (IS (p, FIELD_IDENTIFIER)) {
614 MOID_T *fmoid = MOID (*decl);
615 if (HAS_ROWS (fmoid) && ISNT (fmoid, UNION_SYMBOL)) {
616 ADDR_T pop_sp = *cur_sp;
617 genie_generator_stowed (*decl, *faddr, NO_REF, cur_sp);
618 *top_sp = *cur_sp;
619 *cur_sp = pop_sp;
620 }
621 (*faddr) += SIZE (fmoid);
622 }
623 }
624 }
625
626 //! @brief Allocate a structure.
627
628 void genie_generator_struct (NODE_T * p, BYTE_T ** faddr, ADDR_T * cur_sp)
629 {
630 for (; p != NO_NODE; FORWARD (p)) {
631 if (IS (p, STRUCTURED_FIELD_LIST)) {
632 genie_generator_struct (SUB (p), faddr, cur_sp);
633 } else if (IS (p, STRUCTURED_FIELD)) {
634 NODE_T *decl = NO_NODE;
635 ADDR_T top_sp = *cur_sp;
636 genie_generator_field (SUB (p), faddr, &decl, cur_sp, &top_sp);
637 *cur_sp = top_sp;
638 }
639 }
640 }
641
642 //! @brief Allocate a stowed object.
643
644 void genie_generator_stowed (NODE_T * p, BYTE_T * addr, NODE_T ** decl, ADDR_T * cur_sp)
645 {
646 if (p == NO_NODE) {
647 return;
648 } else if (IS (p, INDICANT) && IS_LITERALLY (p, "STRING")) {
649 // The standard prelude definition is hard coded here.
650 *((A68G_REF *) addr) = empty_string (p);
651 return;
652 } else if (IS (p, INDICANT) && TAX (p) != NO_TAG) {
653 // Continue from definition at MODE A = ..
654 genie_generator_stowed (DEF_NODE (p), addr, decl, cur_sp);
655 return;
656 } else if (IS (p, DECLARER) && mode_needs_allocation (MOID (p))) {
657 genie_generator_stowed (SUB (p), addr, decl, cur_sp);
658 return;
659 } else if (IS_STRUCT (p)) {
660 BYTE_T *faddr = addr;
661 genie_generator_struct (SUB_NEXT (p), &faddr, cur_sp);
662 return;
663 } else if (IS_FLEX (p)) {
664 genie_generator_stowed (NEXT (p), addr, decl, cur_sp);
665 return;
666 } else if (IS (p, BOUNDS)) {
667 A68G_REF desc;
668 MOID_T *rmod = MOID (p), *smod = MOID (NEXT (p));
669 BYTE_T *bounds = STACK_ADDRESS (*cur_sp);
670 int dim = DIM (DEFLEX (rmod)), esiz = SIZE (smod), rsiz = 1;
671 BOOL_T alloc_sub = A68G_FALSE, alloc_str = A68G_FALSE;
672 NODE_T *in = SUB_NEXT (p);
673 if (IS (in, INDICANT) && IS_LITERALLY (in, "STRING")) {
674 alloc_str = A68G_TRUE;
675 alloc_sub = A68G_FALSE;
676 } else {
677 alloc_sub = mode_needs_allocation (smod);
678 alloc_str = A68G_FALSE;
679 }
680 desc = heap_generator (p, rmod, DESCRIPTOR_SIZE (dim));
681 A68G_ARRAY *arr; A68G_TUPLE *tup;
682 GET_DESCRIPTOR (arr, tup, &desc);
683 for (int k = 0; k < dim; k++) {
684 CHECK_INIT (p, INITIALISED ((A68G_INT *) bounds), M_INT);
685 LWB (&tup[k]) = VALUE ((A68G_INT *) bounds);
686 bounds += SIZE (M_INT);
687 CHECK_INIT (p, INITIALISED ((A68G_INT *) bounds), M_INT);
688 UPB (&tup[k]) = VALUE ((A68G_INT *) bounds);
689 bounds += SIZE (M_INT);
690 SPAN (&tup[k]) = rsiz;
691 SHIFT (&tup[k]) = LWB (&tup[k]) * SPAN (&tup[k]);
692 rsiz *= ROW_SIZE (&tup[k]);
693 }
694 DIM (arr) = dim;
695 MOID (arr) = smod;
696 ELEM_SIZE (arr) = esiz;
697 SLICE_OFFSET (arr) = 0;
698 FIELD_OFFSET (arr) = 0;
699 (*cur_sp) += (dim * 2 * SIZE (M_INT));
700 // Generate a new row. Note that STRING is handled explicitly since
701 // it has implicit bounds
702 if (rsiz == 0) {
703 // Generate a ghost element.
704 ADDR_T top_sp = *cur_sp;
705 ARRAY (arr) = heap_generator (p, rmod, esiz);
706 BYTE_T *elem = ADDRESS (&(ARRAY (arr)));
707 if (alloc_sub) {
708 genie_generator_stowed (NEXT (p), &(elem[0]), NO_REF, cur_sp);
709 top_sp = *cur_sp;
710 } else if (alloc_str) {
711 *(A68G_REF *) elem = empty_string (p);
712 }
713 (*cur_sp) = top_sp;
714 } else {
715 ADDR_T pop_sp = *cur_sp, top_sp = *cur_sp;
716 ARRAY (arr) = heap_generator_2 (p, rmod, rsiz, esiz);
717 BYTE_T *elem = ADDRESS (&(ARRAY (arr)));
718 for (int k = 0; k < rsiz; k++) {
719 if (alloc_sub) {
720 (*cur_sp) = pop_sp;
721 genie_generator_stowed (NEXT (p), &(elem[k * esiz]), NO_REF, cur_sp);
722 top_sp = *cur_sp;
723 } else if (alloc_str) {
724 *(A68G_REF *) (&(elem[k * esiz])) = empty_string (p);
725 }
726 }
727 (*cur_sp) = top_sp;
728 }
729 *(A68G_REF *) addr = desc;
730 return;
731 }
732 }
733
734 //! @brief Generate space and push a REF.
735
736 void genie_generator_internal (NODE_T * p, MOID_T * ref_mode, TAG_T * tag, LEAP_T leap, ADDR_T sp)
737 {
738 // Set up a REF MODE object, either in the stack or in the heap.
739 MOID_T *mode = SUB (ref_mode);
740 A68G_REF name = nil_ref;
741 if (leap == LOC_SYMBOL) {
742 STATUS (&name) = (STATUS_MASK_T) (INIT_MASK | IN_FRAME_MASK);
743 REF_HANDLE (&name) = (A68G_HANDLE *) & nil_handle;
744 OFFSET (&name) = A68G_FP + FRAME_INFO_SIZE + OFFSET (tag);
745 REF_SCOPE (&name) = A68G_FP;
746 } else if (leap == -LOC_SYMBOL && NON_LOCAL (p) != NO_TABLE) {
747 name = heap_generator (p, mode, SIZE (mode));
748 ADDR_T lev;
749 FOLLOW_SL (lev, LEVEL (NON_LOCAL (p)));
750 REF_SCOPE (&name) = lev;
751 } else if (leap == -LOC_SYMBOL) {
752 name = heap_generator (p, mode, SIZE (mode));
753 REF_SCOPE (&name) = A68G_FP;
754 } else if (leap == HEAP_SYMBOL || leap == -HEAP_SYMBOL) {
755 name = heap_generator (p, mode, SIZE (mode));
756 REF_SCOPE (&name) = PRIMAL_SCOPE;
757 } else if (leap == NEW_SYMBOL || leap == -NEW_SYMBOL) {
758 name = heap_generator (p, mode, SIZE (mode));
759 REF_SCOPE (&name) = PRIMAL_SCOPE;
760 } else {
761 ABEND (A68G_TRUE, ERROR_INTERNAL_CONSISTENCY, __func__);
762 }
763 if (HAS_ROWS (mode)) {
764 ADDR_T cur_sp = sp;
765 genie_generator_stowed (p, ADDRESS (&name), NO_REF, &cur_sp);
766 }
767 PUSH_REF (p, name);
768 }
769
770 //! @brief Push a name refering to allocated space.
771
772 PROP_T genie_generator (NODE_T * p)
773 {
774 ADDR_T pop_sp = A68G_SP;
775 if (NEXT_SUB (p) != NO_NODE) {
776 genie_generator_bounds (NEXT_SUB (p));
777 }
778 genie_generator_internal (NEXT_SUB (p), MOID (p), TAX (p), -ATTRIBUTE (SUB (p)), pop_sp);
779 A68G_REF z;
780 POP_REF (p, &z);
781 A68G_SP = pop_sp;
782 PUSH_REF (p, z);
783 PROP_T self;
784 UNIT (&self) = genie_generator;
785 SOURCE (&self) = p;
786 return self;
787 }
788
789 // Control of C heap
790
791 //! @brief Discard_heap.
792
793 void discard_heap (void)
794 {
795 a68g_free (A68G_HEAP);
796 A68G (fixed_heap_pointer) = 0;
797 A68G (temp_heap_pointer) = 0;
798 }
© 2002-2025 J.M. van der Veer (jmvdveer@xs4all.nl)
|