Ruby 3.3.6p108 (2024-11-05 revision 75015d4c1f6965b5e85e96fb309f1f2129f933c0)
node.c
1/**********************************************************************
2
3 node.c - ruby node tree
4
5 $Author: mame $
6 created at: 09/12/06 21:23:44 JST
7
8 Copyright (C) 2009 Yusuke Endoh
9
10**********************************************************************/
11
12#ifdef UNIVERSAL_PARSER
13
14#include <stddef.h>
15#include "node.h"
16#include "rubyparser.h"
17#include "internal/parse.h"
18#define T_NODE 0x1b
19
20#else
21
22#include "internal.h"
23#include "internal/hash.h"
24#include "internal/variable.h"
25#include "ruby/ruby.h"
26#include "vm_core.h"
27
28#endif
29
30#define NODE_BUF_DEFAULT_SIZE (sizeof(struct RNode) * 16)
31
32static void
33init_node_buffer_elem(node_buffer_elem_t *nbe, size_t allocated, void *xmalloc(size_t))
34{
35 nbe->allocated = allocated;
36 nbe->used = 0;
37 nbe->len = 0;
38 nbe->nodes = xmalloc(allocated / sizeof(struct RNode) * sizeof(struct RNode *)); /* All node requires at least RNode */
39}
40
41static void
42init_node_buffer_list(node_buffer_list_t * nb, node_buffer_elem_t *head, void *xmalloc(size_t))
43{
44 init_node_buffer_elem(head, NODE_BUF_DEFAULT_SIZE, xmalloc);
45 nb->head = nb->last = head;
46 nb->head->next = NULL;
47}
48
49#ifdef UNIVERSAL_PARSER
50#define ruby_xmalloc config->malloc
51#define Qnil config->qnil
52#endif
53
54#ifdef UNIVERSAL_PARSER
55static node_buffer_t *
56rb_node_buffer_new(rb_parser_config_t *config)
57#else
58static node_buffer_t *
59rb_node_buffer_new(void)
60#endif
61{
62 const size_t bucket_size = offsetof(node_buffer_elem_t, buf) + NODE_BUF_DEFAULT_SIZE;
63 const size_t alloc_size = sizeof(node_buffer_t) + (bucket_size * 2);
64 STATIC_ASSERT(
65 integer_overflow,
66 offsetof(node_buffer_elem_t, buf) + NODE_BUF_DEFAULT_SIZE
67 > sizeof(node_buffer_t) + 2 * sizeof(node_buffer_elem_t));
68 node_buffer_t *nb = ruby_xmalloc(alloc_size);
69 init_node_buffer_list(&nb->unmarkable, (node_buffer_elem_t*)&nb[1], ruby_xmalloc);
70 init_node_buffer_list(&nb->markable, (node_buffer_elem_t*)((size_t)nb->unmarkable.head + bucket_size), ruby_xmalloc);
71 nb->local_tables = 0;
72 nb->mark_hash = Qnil;
73 nb->tokens = Qnil;
74#ifdef UNIVERSAL_PARSER
75 nb->config = config;
76#endif
77 return nb;
78}
79
80#ifdef UNIVERSAL_PARSER
81#undef ruby_xmalloc
82#define ruby_xmalloc ast->node_buffer->config->malloc
83#undef xfree
84#define xfree ast->node_buffer->config->free
85#define rb_ident_hash_new ast->node_buffer->config->ident_hash_new
86#define rb_xmalloc_mul_add ast->node_buffer->config->xmalloc_mul_add
87#define ruby_xrealloc(var,size) (ast->node_buffer->config->realloc_n((void *)var, 1, size))
88#define rb_gc_mark ast->node_buffer->config->gc_mark
89#define rb_gc_location ast->node_buffer->config->gc_location
90#define rb_gc_mark_movable ast->node_buffer->config->gc_mark_movable
91#undef Qnil
92#define Qnil ast->node_buffer->config->qnil
93#define Qtrue ast->node_buffer->config->qtrue
94#define NIL_P ast->node_buffer->config->nil_p
95#define rb_hash_aset ast->node_buffer->config->hash_aset
96#define rb_hash_delete ast->node_buffer->config->hash_delete
97#define RB_OBJ_WRITE(old, slot, young) ast->node_buffer->config->obj_write((VALUE)(old), (VALUE *)(slot), (VALUE)(young))
98#endif
99
100typedef void node_itr_t(rb_ast_t *ast, void *ctx, NODE *node);
101static void iterate_node_values(rb_ast_t *ast, node_buffer_list_t *nb, node_itr_t * func, void *ctx);
102
103/* Setup NODE structure.
104 * NODE is not an object managed by GC, but it imitates an object
105 * so that it can work with `RB_TYPE_P(obj, T_NODE)`.
106 * This dirty hack is needed because Ripper jumbles NODEs and other type
107 * objects.
108 */
109void
110rb_node_init(NODE *n, enum node_type type)
111{
112 RNODE(n)->flags = T_NODE;
113 nd_init_type(RNODE(n), type);
114 RNODE(n)->nd_loc.beg_pos.lineno = 0;
115 RNODE(n)->nd_loc.beg_pos.column = 0;
116 RNODE(n)->nd_loc.end_pos.lineno = 0;
117 RNODE(n)->nd_loc.end_pos.column = 0;
118 RNODE(n)->node_id = -1;
119}
120
121const char *
122rb_node_name(int node)
123{
124 switch (node) {
125#include "node_name.inc"
126 default:
127 return 0;
128 }
129}
130
131#ifdef UNIVERSAL_PARSER
132const char *
133ruby_node_name(int node)
134{
135 return rb_node_name(node);
136}
137#else
138const char *
139ruby_node_name(int node)
140{
141 const char *name = rb_node_name(node);
142
143 if (!name) rb_bug("unknown node: %d", node);
144 return name;
145}
146#endif
147
148static void
149node_buffer_list_free(rb_ast_t *ast, node_buffer_list_t * nb)
150{
151 node_buffer_elem_t *nbe = nb->head;
152 while (nbe != nb->last) {
153 void *buf = nbe;
154 xfree(nbe->nodes);
155 nbe = nbe->next;
156 xfree(buf);
157 }
158
159 /* The last node_buffer_elem_t is allocated in the node_buffer_t, so we
160 * only need to free the nodes. */
161 xfree(nbe->nodes);
162}
163
165 struct rb_ast_local_table_link *next;
166 // struct rb_ast_id_table {
167 int size;
168 ID ids[FLEX_ARY_LEN];
169 // }
170};
171
172static void
173free_ast_value(rb_ast_t *ast, void *ctx, NODE *node)
174{
175 switch (nd_type(node)) {
176 default:
177 break;
178 }
179}
180
181static void
182rb_node_buffer_free(rb_ast_t *ast, node_buffer_t *nb)
183{
184 iterate_node_values(ast, &nb->unmarkable, free_ast_value, NULL);
185 node_buffer_list_free(ast, &nb->unmarkable);
186 node_buffer_list_free(ast, &nb->markable);
187 struct rb_ast_local_table_link *local_table = nb->local_tables;
188 while (local_table) {
189 struct rb_ast_local_table_link *next_table = local_table->next;
190 xfree(local_table);
191 local_table = next_table;
192 }
193 xfree(nb);
194}
195
196#define buf_add_offset(nbe, offset) ((char *)(nbe->buf) + (offset))
197
198static NODE *
199ast_newnode_in_bucket(rb_ast_t *ast, node_buffer_list_t *nb, size_t size, size_t alignment)
200{
201 size_t padding;
202 NODE *ptr;
203
204 padding = alignment - (size_t)buf_add_offset(nb->head, nb->head->used) % alignment;
205 padding = padding == alignment ? 0 : padding;
206
207 if (nb->head->used + size + padding > nb->head->allocated) {
208 size_t n = nb->head->allocated * 2;
210 nbe = rb_xmalloc_mul_add(n, sizeof(char *), offsetof(node_buffer_elem_t, buf));
211 init_node_buffer_elem(nbe, n, ruby_xmalloc);
212 nbe->next = nb->head;
213 nb->head = nbe;
214 padding = 0; /* malloc returns aligned address then no need to add padding */
215 }
216
217 ptr = (NODE *)buf_add_offset(nb->head, nb->head->used + padding);
218 nb->head->used += (size + padding);
219 nb->head->nodes[nb->head->len++] = ptr;
220 return ptr;
221}
222
224static bool
225nodetype_markable_p(enum node_type type)
226{
227 switch (type) {
228 case NODE_MATCH:
229 case NODE_LIT:
230 case NODE_STR:
231 case NODE_XSTR:
232 case NODE_DSTR:
233 case NODE_DXSTR:
234 case NODE_DREGX:
235 case NODE_DSYM:
236 return true;
237 default:
238 return false;
239 }
240}
241
242NODE *
243rb_ast_newnode(rb_ast_t *ast, enum node_type type, size_t size, size_t alignment)
244{
245 node_buffer_t *nb = ast->node_buffer;
246 node_buffer_list_t *bucket =
247 (nodetype_markable_p(type) ? &nb->markable : &nb->unmarkable);
248 return ast_newnode_in_bucket(ast, bucket, size, alignment);
249}
250
251#if RUBY_DEBUG
252void
253rb_ast_node_type_change(NODE *n, enum node_type type)
254{
255 enum node_type old_type = nd_type(n);
256 if (nodetype_markable_p(old_type) != nodetype_markable_p(type)) {
257 rb_bug("node type changed: %s -> %s",
258 ruby_node_name(old_type), ruby_node_name(type));
259 }
260}
261#endif
262
264rb_ast_new_local_table(rb_ast_t *ast, int size)
265{
266 size_t alloc_size = sizeof(struct rb_ast_local_table_link) + size * sizeof(ID);
267 struct rb_ast_local_table_link *link = ruby_xmalloc(alloc_size);
268 link->next = ast->node_buffer->local_tables;
269 ast->node_buffer->local_tables = link;
270 link->size = size;
271
272 return (rb_ast_id_table_t *) &link->size;
273}
274
276rb_ast_resize_latest_local_table(rb_ast_t *ast, int size)
277{
278 struct rb_ast_local_table_link *link = ast->node_buffer->local_tables;
279 size_t alloc_size = sizeof(struct rb_ast_local_table_link) + size * sizeof(ID);
280 link = ruby_xrealloc(link, alloc_size);
281 ast->node_buffer->local_tables = link;
282 link->size = size;
283
284 return (rb_ast_id_table_t *) &link->size;
285}
286
287void
288rb_ast_delete_node(rb_ast_t *ast, NODE *n)
289{
290 (void)ast;
291 (void)n;
292 /* should we implement freelist? */
293}
294
295#ifdef UNIVERSAL_PARSER
296rb_ast_t *
297rb_ast_new(rb_parser_config_t *config)
298{
299 node_buffer_t *nb = rb_node_buffer_new(config);
300 config->counter++;
301 return config->ast_new((VALUE)nb);
302}
303#else
304rb_ast_t *
305rb_ast_new(void)
306{
307 node_buffer_t *nb = rb_node_buffer_new();
308 rb_ast_t *ast = (rb_ast_t *)rb_imemo_new(imemo_ast, 0, 0, 0, (VALUE)nb);
309 return ast;
310}
311#endif
312
313static void
314iterate_buffer_elements(rb_ast_t *ast, node_buffer_elem_t *nbe, long len, node_itr_t *func, void *ctx)
315{
316 long cursor;
317 for (cursor = 0; cursor < len; cursor++) {
318 func(ast, ctx, nbe->nodes[cursor]);
319 }
320}
321
322static void
323iterate_node_values(rb_ast_t *ast, node_buffer_list_t *nb, node_itr_t * func, void *ctx)
324{
325 node_buffer_elem_t *nbe = nb->head;
326
327 while (nbe) {
328 iterate_buffer_elements(ast, nbe, nbe->len, func, ctx);
329 nbe = nbe->next;
330 }
331}
332
333static void
334mark_ast_value(rb_ast_t *ast, void *ctx, NODE *node)
335{
336#ifdef UNIVERSAL_PARSER
337 bug_report_func rb_bug = ast->node_buffer->config->bug;
338#endif
339
340 switch (nd_type(node)) {
341 case NODE_MATCH:
342 case NODE_LIT:
343 case NODE_STR:
344 case NODE_XSTR:
345 case NODE_DSTR:
346 case NODE_DXSTR:
347 case NODE_DREGX:
348 case NODE_DSYM:
349 rb_gc_mark_movable(RNODE_LIT(node)->nd_lit);
350 break;
351 default:
352 rb_bug("unreachable node %s", ruby_node_name(nd_type(node)));
353 }
354}
355
356static void
357update_ast_value(rb_ast_t *ast, void *ctx, NODE *node)
358{
359#ifdef UNIVERSAL_PARSER
360 bug_report_func rb_bug = ast->node_buffer->config->bug;
361#endif
362
363 switch (nd_type(node)) {
364 case NODE_MATCH:
365 case NODE_LIT:
366 case NODE_STR:
367 case NODE_XSTR:
368 case NODE_DSTR:
369 case NODE_DXSTR:
370 case NODE_DREGX:
371 case NODE_DSYM:
372 RNODE_LIT(node)->nd_lit = rb_gc_location(RNODE_LIT(node)->nd_lit);
373 break;
374 default:
375 rb_bug("unreachable");
376 }
377}
378
379void
380rb_ast_update_references(rb_ast_t *ast)
381{
382 if (ast->node_buffer) {
383 node_buffer_t *nb = ast->node_buffer;
384
385 iterate_node_values(ast, &nb->markable, update_ast_value, NULL);
386 }
387}
388
389void
390rb_ast_mark(rb_ast_t *ast)
391{
392 if (ast->node_buffer) {
393 rb_gc_mark(ast->node_buffer->mark_hash);
394 rb_gc_mark(ast->node_buffer->tokens);
395 node_buffer_t *nb = ast->node_buffer;
396 iterate_node_values(ast, &nb->markable, mark_ast_value, NULL);
397 if (ast->body.script_lines) rb_gc_mark(ast->body.script_lines);
398 }
399}
400
401void
402rb_ast_free(rb_ast_t *ast)
403{
404 if (ast->node_buffer) {
405#ifdef UNIVERSAL_PARSER
406 rb_parser_config_t *config = ast->node_buffer->config;
407#endif
408
409 rb_node_buffer_free(ast, ast->node_buffer);
410 ast->node_buffer = 0;
411#ifdef UNIVERSAL_PARSER
412 config->counter--;
413 if (config->counter <= 0) {
414 rb_ruby_parser_config_free(config);
415 }
416#endif
417 }
418}
419
420static size_t
421buffer_list_size(node_buffer_list_t *nb)
422{
423 size_t size = 0;
424 node_buffer_elem_t *nbe = nb->head;
425 while (nbe != nb->last) {
426 size += offsetof(node_buffer_elem_t, buf) + nbe->used;
427 nbe = nbe->next;
428 }
429 return size;
430}
431
432size_t
433rb_ast_memsize(const rb_ast_t *ast)
434{
435 size_t size = 0;
436 node_buffer_t *nb = ast->node_buffer;
437
438 if (nb) {
439 size += sizeof(node_buffer_t);
440 size += buffer_list_size(&nb->unmarkable);
441 size += buffer_list_size(&nb->markable);
442 }
443 return size;
444}
445
446void
447rb_ast_dispose(rb_ast_t *ast)
448{
449 rb_ast_free(ast);
450}
451
452void
453rb_ast_add_mark_object(rb_ast_t *ast, VALUE obj)
454{
455 if (NIL_P(ast->node_buffer->mark_hash)) {
456 RB_OBJ_WRITE(ast, &ast->node_buffer->mark_hash, rb_ident_hash_new());
457 }
458 rb_hash_aset(ast->node_buffer->mark_hash, obj, Qtrue);
459}
460
461void
462rb_ast_delete_mark_object(rb_ast_t *ast, VALUE obj)
463{
464 if (NIL_P(ast->node_buffer->mark_hash)) return;
465 rb_hash_delete(ast->node_buffer->mark_hash, obj);
466}
467
468VALUE
469rb_ast_tokens(rb_ast_t *ast)
470{
471 return ast->node_buffer->tokens;
472}
473
474void
475rb_ast_set_tokens(rb_ast_t *ast, VALUE tokens)
476{
477 RB_OBJ_WRITE(ast, &ast->node_buffer->tokens, tokens);
478}
479
480VALUE
481rb_node_set_type(NODE *n, enum node_type t)
482{
483#if RUBY_DEBUG
484 rb_ast_node_type_change(n, t);
485#endif
486 return nd_init_type(n, t);
487}
#define xfree
Old name of ruby_xfree.
Definition xmalloc.h:58
#define T_NODE
Old name of RUBY_T_NODE.
Definition value_type.h:73
#define xmalloc
Old name of ruby_xmalloc.
Definition xmalloc.h:53
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define NIL_P
Old name of RB_NIL_P.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:619
int len
Length of the buffer.
Definition io.h:8
VALUE type(ANYARGS)
ANYARGS-ed function type.
#define RBIMPL_ATTR_PURE()
Wraps (or simulates) __attribute__((pure))
Definition pure.h:33
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40