[rubygems/rubygems] Use a constant empty tar header to avoid extra allocations
[ruby.git] / prism_compile.c
blob5ecc69470f9b1c6a62073588ceff6c114b999a7d
1 #include "prism.h"
3 /******************************************************************************/
4 /* These macros operate on pm_line_column_t structs as opposed to NODE*s. */
5 /******************************************************************************/
7 #define PUSH_ADJUST(seq, location, label) \
8 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), (int) (location).line))
10 #define PUSH_ADJUST_RESTORE(seq, label) \
11 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
13 #define PUSH_INSN(seq, location, insn) \
14 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).column, BIN(insn), 0))
16 #define PUSH_INSN1(seq, location, insn, op1) \
17 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).column, BIN(insn), 1, (VALUE)(op1)))
19 #define PUSH_INSN2(seq, location, insn, op1, op2) \
20 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).column, BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
22 #define PUSH_INSN3(seq, location, insn, op1, op2, op3) \
23 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).column, BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
25 #define PUSH_INSNL(seq, location, insn, label) \
26 (PUSH_INSN1(seq, location, insn, label), LABEL_REF(label))
28 #define PUSH_LABEL(seq, label) \
29 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
31 #define PUSH_SEND_R(seq, location, id, argc, block, flag, keywords) \
32 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, (int) (location).line, (int) (location).column, (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
34 #define PUSH_SEND(seq, location, id, argc) \
35 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
37 #define PUSH_SEND_WITH_FLAG(seq, location, id, argc, flag) \
38 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)(flag), NULL)
40 #define PUSH_SEND_WITH_BLOCK(seq, location, id, argc, block) \
41 PUSH_SEND_R((seq), location, (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
43 #define PUSH_CALL(seq, location, id, argc) \
44 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
46 #define PUSH_CALL_WITH_BLOCK(seq, location, id, argc, block) \
47 PUSH_SEND_R((seq), location, (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
49 #define PUSH_TRACE(seq, event) \
50 ADD_ELEM((seq), (LINK_ELEMENT *) new_trace_body(iseq, (event), 0))
52 #define PUSH_CATCH_ENTRY(type, ls, le, iseqv, lc) \
53 ADD_CATCH_ENTRY((type), (ls), (le), (iseqv), (lc))
55 #define PUSH_SEQ(seq1, seq2) \
56 APPEND_LIST((seq1), (seq2))
58 #define PUSH_SYNTHETIC_PUTNIL(seq, iseq) \
59 do { \
60 int lineno = ISEQ_COMPILE_DATA(iseq)->last_line; \
61 if (lineno == 0) lineno = FIX2INT(rb_iseq_first_lineno(iseq)); \
62 ADD_SYNTHETIC_INSN(seq, lineno, -1, putnil); \
63 } while (0)
65 /******************************************************************************/
66 /* These functions compile getlocal/setlocal instructions but operate on */
67 /* prism locations instead of NODEs. */
68 /******************************************************************************/
70 static void
71 pm_iseq_add_getlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, int line_no, int column, int idx, int level)
73 if (iseq_local_block_param_p(iseq, idx, level)) {
74 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line_no, column, BIN(getblockparam), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
76 else {
77 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line_no, column, BIN(getlocal), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
79 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level), Qfalse);
82 static void
83 pm_iseq_add_setlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, int line_no, int column, int idx, int level)
85 if (iseq_local_block_param_p(iseq, idx, level)) {
86 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line_no, column, BIN(setblockparam), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
88 else {
89 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line_no, column, BIN(setlocal), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
91 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level), Qtrue);
94 #define PUSH_GETLOCAL(seq, location, idx, level) \
95 pm_iseq_add_getlocal(iseq, (seq), (int) (location).line, (int) (location).column, (idx), (level))
97 #define PUSH_SETLOCAL(seq, location, idx, level) \
98 pm_iseq_add_setlocal(iseq, (seq), (int) (location).line, (int) (location).column, (idx), (level))
100 /******************************************************************************/
101 /* These are helper macros for the compiler. */
102 /******************************************************************************/
104 #define OLD_ISEQ NEW_ISEQ
105 #undef NEW_ISEQ
107 #define NEW_ISEQ(node, name, type, line_no) \
108 pm_new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
110 #define OLD_CHILD_ISEQ NEW_CHILD_ISEQ
111 #undef NEW_CHILD_ISEQ
113 #define NEW_CHILD_ISEQ(node, name, type, line_no) \
114 pm_new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
116 #define PM_COMPILE(node) \
117 pm_compile_node(iseq, (node), ret, popped, scope_node)
119 #define PM_COMPILE_INTO_ANCHOR(_ret, node) \
120 pm_compile_node(iseq, (node), _ret, popped, scope_node)
122 #define PM_COMPILE_POPPED(node) \
123 pm_compile_node(iseq, (node), ret, true, scope_node)
125 #define PM_COMPILE_NOT_POPPED(node) \
126 pm_compile_node(iseq, (node), ret, false, scope_node)
128 #define PM_SPECIAL_CONSTANT_FLAG ((pm_constant_id_t)(1 << 31))
129 #define PM_CONSTANT_AND ((pm_constant_id_t)(idAnd | PM_SPECIAL_CONSTANT_FLAG))
130 #define PM_CONSTANT_DOT3 ((pm_constant_id_t)(idDot3 | PM_SPECIAL_CONSTANT_FLAG))
131 #define PM_CONSTANT_MULT ((pm_constant_id_t)(idMULT | PM_SPECIAL_CONSTANT_FLAG))
132 #define PM_CONSTANT_POW ((pm_constant_id_t)(idPow | PM_SPECIAL_CONSTANT_FLAG))
134 #define PM_NODE_START_LINE_COLUMN(parser, node) \
135 pm_newline_list_line_column(&(parser)->newline_list, ((const pm_node_t *) (node))->location.start, (parser)->start_line)
137 #define PM_NODE_END_LINE_COLUMN(parser, node) \
138 pm_newline_list_line_column(&(parser)->newline_list, ((const pm_node_t *) (node))->location.end, (parser)->start_line)
140 #define PM_LOCATION_START_LINE_COLUMN(parser, location) \
141 pm_newline_list_line_column(&(parser)->newline_list, (location)->start, (parser)->start_line)
143 static int
144 pm_node_line_number(const pm_parser_t *parser, const pm_node_t *node)
146 return (int) PM_NODE_START_LINE_COLUMN(parser, node).line;
149 static int
150 pm_location_line_number(const pm_parser_t *parser, const pm_location_t *location) {
151 return (int) PM_LOCATION_START_LINE_COLUMN(parser, location).line;
155 * Convert the value of an integer node into a Ruby Integer.
157 static VALUE
158 parse_integer(const pm_integer_node_t *node)
160 const pm_integer_t *integer = &node->value;
161 VALUE result;
163 if (integer->values == NULL) {
164 result = UINT2NUM(integer->value);
166 else {
167 VALUE string = rb_str_new(NULL, integer->length * 8);
168 unsigned char *bytes = (unsigned char *) RSTRING_PTR(string);
170 size_t offset = integer->length * 8;
171 for (size_t value_index = 0; value_index < integer->length; value_index++) {
172 uint32_t value = integer->values[value_index];
174 for (int index = 0; index < 8; index++) {
175 int byte = (value >> (4 * index)) & 0xf;
176 bytes[--offset] = byte < 10 ? byte + '0' : byte - 10 + 'a';
180 result = rb_funcall(string, rb_intern("to_i"), 1, UINT2NUM(16));
183 if (integer->negative) {
184 result = rb_funcall(result, rb_intern("-@"), 0);
187 return result;
191 * Convert the value of a float node into a Ruby Float.
193 static VALUE
194 parse_float(const pm_float_node_t *node)
196 return DBL2NUM(node->value);
200 * Convert the value of a rational node into a Ruby Rational. Rational nodes can
201 * either be wrapping an integer node or a float node. If it's an integer node,
202 * we can reuse our parsing. If it's not, then we'll parse the numerator and
203 * then parse the denominator and create the rational from those two values.
205 static VALUE
206 parse_rational(const pm_rational_node_t *node)
208 VALUE result;
210 if (PM_NODE_TYPE_P(node->numeric, PM_FLOAT_NODE)) {
211 const uint8_t *start = node->base.location.start;
212 const uint8_t *end = node->base.location.end - 1;
213 size_t length = end - start;
215 char *buffer = malloc(length + 1);
216 memcpy(buffer, start, length);
218 buffer[length] = '\0';
220 char *decimal = memchr(buffer, '.', length);
221 RUBY_ASSERT(decimal);
222 size_t seen_decimal = decimal - buffer;
223 size_t fraclen = length - seen_decimal - 1;
224 memmove(decimal, decimal + 1, fraclen + 1);
226 VALUE numerator = rb_cstr_to_inum(buffer, 10, false);
227 result = rb_rational_new(numerator, rb_int_positive_pow(10, fraclen));
229 free(buffer);
231 else {
232 RUBY_ASSERT(PM_NODE_TYPE_P(node->numeric, PM_INTEGER_NODE));
233 VALUE numerator = parse_integer((const pm_integer_node_t *) node->numeric);
234 result = rb_rational_raw(numerator, INT2FIX(1));
237 return result;
241 * Convert the value of an imaginary node into a Ruby Complex. Imaginary nodes
242 * can be wrapping an integer node, a float node, or a rational node. In all
243 * cases we will reuse parsing functions seen above to get the inner value, and
244 * then convert into an imaginary with rb_complex_raw.
246 static VALUE
247 parse_imaginary(const pm_imaginary_node_t *node)
249 VALUE imaginary_part;
250 switch (PM_NODE_TYPE(node->numeric)) {
251 case PM_FLOAT_NODE: {
252 imaginary_part = parse_float((const pm_float_node_t *) node->numeric);
253 break;
255 case PM_INTEGER_NODE: {
256 imaginary_part = parse_integer((const pm_integer_node_t *) node->numeric);
257 break;
259 case PM_RATIONAL_NODE: {
260 imaginary_part = parse_rational((const pm_rational_node_t *) node->numeric);
261 break;
263 default:
264 rb_bug("Unexpected numeric type on imaginary number %s\n", pm_node_type_to_str(PM_NODE_TYPE(node->numeric)));
267 return rb_complex_raw(INT2FIX(0), imaginary_part);
270 static inline VALUE
271 parse_string(const pm_scope_node_t *scope_node, const pm_string_t *string)
273 return rb_enc_str_new((const char *) pm_string_source(string), pm_string_length(string), scope_node->encoding);
277 * Certain strings can have their encoding differ from the parser's encoding due
278 * to bytes or escape sequences that have the top bit set. This function handles
279 * creating those strings based on the flags set on the owning node.
281 static inline VALUE
282 parse_string_encoded(const pm_node_t *node, const pm_string_t *string, rb_encoding *default_encoding)
284 rb_encoding *encoding;
286 if (node->flags & PM_ENCODING_FLAGS_FORCED_BINARY_ENCODING) {
287 encoding = rb_ascii8bit_encoding();
289 else if (node->flags & PM_ENCODING_FLAGS_FORCED_UTF8_ENCODING) {
290 encoding = rb_utf8_encoding();
292 else {
293 encoding = default_encoding;
296 return rb_enc_str_new((const char *) pm_string_source(string), pm_string_length(string), encoding);
299 static inline VALUE
300 parse_static_literal_string(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *string)
302 rb_encoding *encoding;
304 if (node->flags & PM_STRING_FLAGS_FORCED_BINARY_ENCODING) {
305 encoding = rb_ascii8bit_encoding();
307 else if (node->flags & PM_STRING_FLAGS_FORCED_UTF8_ENCODING) {
308 encoding = rb_utf8_encoding();
310 else {
311 encoding = scope_node->encoding;
314 VALUE value = rb_enc_interned_str((const char *) pm_string_source(string), pm_string_length(string), encoding);
315 rb_enc_str_coderange(value);
317 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal || RTEST(ruby_debug)) {
318 int line_number = pm_node_line_number(scope_node->parser, node);
319 VALUE debug_info = rb_ary_new_from_args(2, rb_iseq_path(iseq), INT2FIX(line_number));
320 value = rb_str_dup(value);
321 rb_ivar_set(value, id_debug_created_info, rb_obj_freeze(debug_info));
322 rb_str_freeze(value);
325 return value;
328 static inline ID
329 parse_string_symbol(const pm_scope_node_t *scope_node, const pm_symbol_node_t *symbol)
331 rb_encoding *encoding;
332 if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_UTF8_ENCODING) {
333 encoding = rb_utf8_encoding();
335 else if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_BINARY_ENCODING) {
336 encoding = rb_ascii8bit_encoding();
338 else if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_US_ASCII_ENCODING) {
339 encoding = rb_usascii_encoding();
341 else {
342 encoding = scope_node->encoding;
345 return rb_intern3((const char *) pm_string_source(&symbol->unescaped), pm_string_length(&symbol->unescaped), encoding);
348 static int
349 pm_optimizable_range_item_p(const pm_node_t *node)
351 return (!node || PM_NODE_TYPE_P(node, PM_INTEGER_NODE) || PM_NODE_TYPE_P(node, PM_NIL_NODE));
354 /** Raise an error corresponding to the invalid regular expression. */
355 static VALUE
356 parse_regexp_error(rb_iseq_t *iseq, int32_t line_number, const char *fmt, ...)
358 va_list args;
359 va_start(args, fmt);
360 VALUE error = rb_syntax_error_append(Qnil, rb_iseq_path(iseq), line_number, -1, NULL, "%" PRIsVALUE, args);
361 va_end(args);
362 rb_exc_raise(error);
365 static VALUE
366 parse_regexp_string_part(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *unescaped, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding)
368 // If we were passed an explicit regexp encoding, then we need to double
369 // check that it's okay here for this fragment of the string.
370 rb_encoding *encoding;
372 if (explicit_regexp_encoding != NULL) {
373 encoding = explicit_regexp_encoding;
375 else if (node->flags & PM_STRING_FLAGS_FORCED_BINARY_ENCODING) {
376 encoding = rb_ascii8bit_encoding();
378 else if (node->flags & PM_STRING_FLAGS_FORCED_UTF8_ENCODING) {
379 encoding = rb_utf8_encoding();
381 else {
382 encoding = implicit_regexp_encoding;
385 VALUE string = rb_enc_str_new((const char *) pm_string_source(unescaped), pm_string_length(unescaped), encoding);
386 VALUE error = rb_reg_check_preprocess(string);
388 if (error != Qnil) parse_regexp_error(iseq, pm_node_line_number(scope_node->parser, node), "%" PRIsVALUE, rb_obj_as_string(error));
389 return string;
392 static VALUE
393 pm_static_literal_concat(rb_iseq_t *iseq, const pm_node_list_t *nodes, const pm_scope_node_t *scope_node, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding, bool top)
395 VALUE current = Qnil;
397 for (size_t index = 0; index < nodes->size; index++) {
398 const pm_node_t *part = nodes->nodes[index];
399 VALUE string;
401 switch (PM_NODE_TYPE(part)) {
402 case PM_STRING_NODE:
403 if (implicit_regexp_encoding != NULL) {
404 if (top) {
405 string = parse_regexp_string_part(iseq, scope_node, part, &((const pm_string_node_t *) part)->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
407 else {
408 string = parse_string_encoded(part, &((const pm_string_node_t *) part)->unescaped, scope_node->encoding);
409 VALUE error = rb_reg_check_preprocess(string);
410 if (error != Qnil) parse_regexp_error(iseq, pm_node_line_number(scope_node->parser, part), "%" PRIsVALUE, rb_obj_as_string(error));
413 else {
414 string = parse_string_encoded(part, &((const pm_string_node_t *) part)->unescaped, scope_node->encoding);
416 break;
417 case PM_INTERPOLATED_STRING_NODE:
418 string = pm_static_literal_concat(iseq, &((const pm_interpolated_string_node_t *) part)->parts, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
419 break;
420 case PM_EMBEDDED_STATEMENTS_NODE: {
421 const pm_embedded_statements_node_t *cast = (const pm_embedded_statements_node_t *) part;
422 string = pm_static_literal_concat(iseq, &cast->statements->body, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
423 break;
425 default:
426 RUBY_ASSERT(false && "unexpected node type in pm_static_literal_concat");
427 return Qnil;
430 if (current != Qnil) {
431 current = rb_str_concat(current, string);
433 else {
434 current = string;
438 return top ? rb_fstring(current) : current;
441 #define RE_OPTION_ENCODING_SHIFT 8
442 #define RE_OPTION_ENCODING(encoding) (((encoding) & 0xFF) << RE_OPTION_ENCODING_SHIFT)
443 #define ARG_ENCODING_NONE 32
444 #define ARG_ENCODING_FIXED 16
445 #define ENC_ASCII8BIT 1
446 #define ENC_EUC_JP 2
447 #define ENC_Windows_31J 3
448 #define ENC_UTF8 4
451 * Check the prism flags of a regular expression-like node and return the flags
452 * that are expected by the CRuby VM.
454 static int
455 parse_regexp_flags(const pm_node_t *node)
457 int flags = 0;
459 // Check "no encoding" first so that flags don't get clobbered
460 // We're calling `rb_char_to_option_kcode` in this case so that
461 // we don't need to have access to `ARG_ENCODING_NONE`
462 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ASCII_8BIT)) {
463 flags |= ARG_ENCODING_NONE;
466 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EUC_JP)) {
467 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_EUC_JP));
470 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_WINDOWS_31J)) {
471 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_Windows_31J));
474 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_UTF_8)) {
475 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_UTF8));
478 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_IGNORE_CASE)) {
479 flags |= ONIG_OPTION_IGNORECASE;
482 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_MULTI_LINE)) {
483 flags |= ONIG_OPTION_MULTILINE;
486 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EXTENDED)) {
487 flags |= ONIG_OPTION_EXTEND;
490 return flags;
493 #undef RE_OPTION_ENCODING_SHIFT
494 #undef RE_OPTION_ENCODING
495 #undef ARG_ENCODING_FIXED
496 #undef ARG_ENCODING_NONE
497 #undef ENC_ASCII8BIT
498 #undef ENC_EUC_JP
499 #undef ENC_Windows_31J
500 #undef ENC_UTF8
502 static rb_encoding *
503 parse_regexp_encoding(const pm_scope_node_t *scope_node, const pm_node_t *node)
505 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ASCII_8BIT)) {
506 return rb_ascii8bit_encoding();
508 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_UTF_8)) {
509 return rb_utf8_encoding();
511 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EUC_JP)) {
512 return rb_enc_get_from_index(ENCINDEX_EUC_JP);
514 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_WINDOWS_31J)) {
515 return rb_enc_get_from_index(ENCINDEX_Windows_31J);
517 else {
518 return NULL;
522 static VALUE
523 parse_regexp(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, VALUE string)
525 VALUE errinfo = rb_errinfo();
527 int32_t line_number = pm_node_line_number(scope_node->parser, node);
528 VALUE regexp = rb_reg_compile(string, parse_regexp_flags(node), (const char *) pm_string_source(&scope_node->parser->filepath), line_number);
530 if (NIL_P(regexp)) {
531 VALUE message = rb_attr_get(rb_errinfo(), idMesg);
532 rb_set_errinfo(errinfo);
534 parse_regexp_error(iseq, line_number, "%" PRIsVALUE, message);
535 return Qnil;
538 rb_obj_freeze(regexp);
539 return regexp;
542 static inline VALUE
543 parse_regexp_literal(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *unescaped)
545 rb_encoding *regexp_encoding = parse_regexp_encoding(scope_node, node);
546 if (regexp_encoding == NULL) regexp_encoding = scope_node->encoding;
548 VALUE string = rb_enc_str_new((const char *) pm_string_source(unescaped), pm_string_length(unescaped), regexp_encoding);
549 return parse_regexp(iseq, scope_node, node, string);
552 static inline VALUE
553 parse_regexp_concat(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_node_list_t *parts)
555 rb_encoding *explicit_regexp_encoding = parse_regexp_encoding(scope_node, node);
556 rb_encoding *implicit_regexp_encoding = explicit_regexp_encoding != NULL ? explicit_regexp_encoding : scope_node->encoding;
558 VALUE string = pm_static_literal_concat(iseq, parts, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
559 return parse_regexp(iseq, scope_node, node, string);
562 static void pm_compile_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node);
564 static int
565 pm_interpolated_node_compile(rb_iseq_t *iseq, const pm_node_list_t *parts, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding)
567 int stack_size = 0;
568 size_t parts_size = parts->size;
569 bool interpolated = false;
571 if (parts_size > 0) {
572 VALUE current_string = Qnil;
574 for (size_t index = 0; index < parts_size; index++) {
575 const pm_node_t *part = parts->nodes[index];
577 if (PM_NODE_TYPE_P(part, PM_STRING_NODE)) {
578 const pm_string_node_t *string_node = (const pm_string_node_t *) part;
579 VALUE string_value;
581 if (implicit_regexp_encoding == NULL) {
582 string_value = parse_string_encoded(part, &string_node->unescaped, scope_node->encoding);
584 else {
585 string_value = parse_regexp_string_part(iseq, scope_node, (const pm_node_t *) string_node, &string_node->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
588 if (RTEST(current_string)) {
589 current_string = rb_str_concat(current_string, string_value);
591 else {
592 current_string = string_value;
595 else {
596 interpolated = true;
598 if (
599 PM_NODE_TYPE_P(part, PM_EMBEDDED_STATEMENTS_NODE) &&
600 ((const pm_embedded_statements_node_t *) part)->statements != NULL &&
601 ((const pm_embedded_statements_node_t *) part)->statements->body.size == 1 &&
602 PM_NODE_TYPE_P(((const pm_embedded_statements_node_t *) part)->statements->body.nodes[0], PM_STRING_NODE)
604 const pm_string_node_t *string_node = (const pm_string_node_t *) ((const pm_embedded_statements_node_t *) part)->statements->body.nodes[0];
605 VALUE string_value;
607 if (implicit_regexp_encoding == NULL) {
608 string_value = parse_string_encoded(part, &string_node->unescaped, scope_node->encoding);
610 else {
611 string_value = parse_regexp_string_part(iseq, scope_node, (const pm_node_t *) string_node, &string_node->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
614 if (RTEST(current_string)) {
615 current_string = rb_str_concat(current_string, string_value);
617 else {
618 current_string = string_value;
621 else {
622 if (!RTEST(current_string)) {
623 rb_encoding *encoding;
625 if (implicit_regexp_encoding != NULL) {
626 if (explicit_regexp_encoding != NULL) {
627 encoding = explicit_regexp_encoding;
629 else if (scope_node->parser->encoding == PM_ENCODING_US_ASCII_ENTRY) {
630 encoding = rb_ascii8bit_encoding();
632 else {
633 encoding = implicit_regexp_encoding;
636 else {
637 encoding = scope_node->encoding;
640 current_string = rb_enc_str_new(NULL, 0, encoding);
643 PUSH_INSN1(ret, *node_location, putobject, rb_fstring(current_string));
644 PM_COMPILE_NOT_POPPED(part);
645 PUSH_INSN(ret, *node_location, dup);
646 PUSH_INSN1(ret, *node_location, objtostring, new_callinfo(iseq, idTo_s, 0, VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE , NULL, FALSE));
647 PUSH_INSN(ret, *node_location, anytostring);
649 current_string = Qnil;
650 stack_size += 2;
655 if (RTEST(current_string)) {
656 current_string = rb_fstring(current_string);
658 if (stack_size == 0 && interpolated) {
659 PUSH_INSN1(ret, *node_location, putstring, current_string);
661 else {
662 PUSH_INSN1(ret, *node_location, putobject, current_string);
665 current_string = Qnil;
666 stack_size++;
669 else {
670 PUSH_INSN(ret, *node_location, putnil);
673 return stack_size;
676 static void
677 pm_compile_regexp_dynamic(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_list_t *parts, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
679 rb_encoding *explicit_regexp_encoding = parse_regexp_encoding(scope_node, node);
680 rb_encoding *implicit_regexp_encoding = explicit_regexp_encoding != NULL ? explicit_regexp_encoding : scope_node->encoding;
682 int length = pm_interpolated_node_compile(iseq, parts, node_location, ret, popped, scope_node, implicit_regexp_encoding, explicit_regexp_encoding);
683 PUSH_INSN2(ret, *node_location, toregexp, INT2FIX(parse_regexp_flags(node) & 0xFF), INT2FIX(length));
686 static VALUE
687 pm_source_file_value(const pm_source_file_node_t *node, const pm_scope_node_t *scope_node)
689 const pm_string_t *filepath = &node->filepath;
690 size_t length = pm_string_length(filepath);
692 if (length > 0) {
693 rb_encoding *filepath_encoding = scope_node->filepath_encoding != NULL ? scope_node->filepath_encoding : rb_utf8_encoding();
694 return rb_enc_interned_str((const char *) pm_string_source(filepath), length, filepath_encoding);
696 else {
697 return rb_fstring_lit("<compiled>");
702 * Return a static literal string, optionally with attached debugging
703 * information.
705 static VALUE
706 pm_static_literal_string(rb_iseq_t *iseq, VALUE string, int line_number)
708 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal || RTEST(ruby_debug)) {
709 VALUE debug_info = rb_ary_new_from_args(2, rb_iseq_path(iseq), INT2FIX(line_number));
710 rb_ivar_set(string, id_debug_created_info, rb_obj_freeze(debug_info));
711 return rb_str_freeze(string);
713 else {
714 return rb_fstring(string);
719 * Certain nodes can be compiled literally. This function returns the literal
720 * value described by the given node. For example, an array node with all static
721 * literal values can be compiled into a literal array.
723 static VALUE
724 pm_static_literal_value(rb_iseq_t *iseq, const pm_node_t *node, const pm_scope_node_t *scope_node)
726 // Every node that comes into this function should already be marked as
727 // static literal. If it's not, then we have a bug somewhere.
728 RUBY_ASSERT(PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL));
730 switch (PM_NODE_TYPE(node)) {
731 case PM_ARRAY_NODE: {
732 const pm_array_node_t *cast = (const pm_array_node_t *) node;
733 const pm_node_list_t *elements = &cast->elements;
735 VALUE value = rb_ary_hidden_new(elements->size);
736 for (size_t index = 0; index < elements->size; index++) {
737 rb_ary_push(value, pm_static_literal_value(iseq, elements->nodes[index], scope_node));
740 OBJ_FREEZE(value);
741 return value;
743 case PM_FALSE_NODE:
744 return Qfalse;
745 case PM_FLOAT_NODE:
746 return parse_float((const pm_float_node_t *) node);
747 case PM_HASH_NODE: {
748 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
749 const pm_node_list_t *elements = &cast->elements;
751 VALUE array = rb_ary_hidden_new(elements->size * 2);
752 for (size_t index = 0; index < elements->size; index++) {
753 RUBY_ASSERT(PM_NODE_TYPE_P(elements->nodes[index], PM_ASSOC_NODE));
754 const pm_assoc_node_t *cast = (const pm_assoc_node_t *) elements->nodes[index];
755 VALUE pair[2] = { pm_static_literal_value(iseq, cast->key, scope_node), pm_static_literal_value(iseq, cast->value, scope_node) };
756 rb_ary_cat(array, pair, 2);
759 VALUE value = rb_hash_new_with_size(elements->size);
760 rb_hash_bulk_insert(RARRAY_LEN(array), RARRAY_CONST_PTR(array), value);
762 value = rb_obj_hide(value);
763 OBJ_FREEZE(value);
764 return value;
766 case PM_IMAGINARY_NODE:
767 return parse_imaginary((const pm_imaginary_node_t *) node);
768 case PM_INTEGER_NODE:
769 return parse_integer((const pm_integer_node_t *) node);
770 case PM_INTERPOLATED_MATCH_LAST_LINE_NODE: {
771 const pm_interpolated_match_last_line_node_t *cast = (const pm_interpolated_match_last_line_node_t *) node;
772 return parse_regexp_concat(iseq, scope_node, (const pm_node_t *) cast, &cast->parts);
774 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
775 const pm_interpolated_regular_expression_node_t *cast = (const pm_interpolated_regular_expression_node_t *) node;
776 return parse_regexp_concat(iseq, scope_node, (const pm_node_t *) cast, &cast->parts);
778 case PM_INTERPOLATED_STRING_NODE: {
779 VALUE string = pm_static_literal_concat(iseq, &((const pm_interpolated_string_node_t *) node)->parts, scope_node, NULL, NULL, false);
780 int line_number = pm_node_line_number(scope_node->parser, node);
781 return pm_static_literal_string(iseq, string, line_number);
783 case PM_INTERPOLATED_SYMBOL_NODE: {
784 const pm_interpolated_symbol_node_t *cast = (const pm_interpolated_symbol_node_t *) node;
785 VALUE string = pm_static_literal_concat(iseq, &cast->parts, scope_node, NULL, NULL, true);
787 return ID2SYM(rb_intern_str(string));
789 case PM_MATCH_LAST_LINE_NODE: {
790 const pm_match_last_line_node_t *cast = (const pm_match_last_line_node_t *) node;
791 return parse_regexp_literal(iseq, scope_node, (const pm_node_t *) cast, &cast->unescaped);
793 case PM_NIL_NODE:
794 return Qnil;
795 case PM_RATIONAL_NODE:
796 return parse_rational((const pm_rational_node_t *) node);
797 case PM_REGULAR_EXPRESSION_NODE: {
798 const pm_regular_expression_node_t *cast = (const pm_regular_expression_node_t *) node;
799 return parse_regexp_literal(iseq, scope_node, (const pm_node_t *) cast, &cast->unescaped);
801 case PM_SOURCE_ENCODING_NODE:
802 return rb_enc_from_encoding(scope_node->encoding);
803 case PM_SOURCE_FILE_NODE: {
804 const pm_source_file_node_t *cast = (const pm_source_file_node_t *) node;
805 return pm_source_file_value(cast, scope_node);
807 case PM_SOURCE_LINE_NODE:
808 return INT2FIX(pm_node_line_number(scope_node->parser, node));
809 case PM_STRING_NODE: {
810 const pm_string_node_t *cast = (const pm_string_node_t *) node;
811 return parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
813 case PM_SYMBOL_NODE:
814 return ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) node));
815 case PM_TRUE_NODE:
816 return Qtrue;
817 default:
818 rb_bug("Don't have a literal value for node type %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
819 return Qfalse;
824 * A helper for converting a pm_location_t into a rb_code_location_t.
826 static rb_code_location_t
827 pm_code_location(const pm_scope_node_t *scope_node, const pm_node_t *node)
829 const pm_line_column_t start_location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
830 const pm_line_column_t end_location = PM_NODE_END_LINE_COLUMN(scope_node->parser, node);
832 return (rb_code_location_t) {
833 .beg_pos = { .lineno = start_location.line, .column = start_location.column },
834 .end_pos = { .lineno = end_location.line, .column = end_location.column }
839 * A macro for determining if we should go through the work of adding branch
840 * coverage to the current iseq. We check this manually each time because we
841 * want to avoid the overhead of creating rb_code_location_t objects.
843 #define PM_BRANCH_COVERAGE_P(iseq) (ISEQ_COVERAGE(iseq) && ISEQ_BRANCH_COVERAGE(iseq))
845 static void
846 pm_compile_branch_condition(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const pm_node_t *cond,
847 LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node);
849 static void
850 pm_compile_logical(rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_node_t *cond, LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node)
852 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, cond);
854 DECL_ANCHOR(seq);
855 INIT_ANCHOR(seq);
857 LABEL *label = NEW_LABEL(location.line);
858 if (!then_label) then_label = label;
859 else if (!else_label) else_label = label;
861 pm_compile_branch_condition(iseq, seq, cond, then_label, else_label, popped, scope_node);
863 if (LIST_INSN_SIZE_ONE(seq)) {
864 INSN *insn = (INSN *) ELEM_FIRST_INSN(FIRST_ELEMENT(seq));
865 if (insn->insn_id == BIN(jump) && (LABEL *)(insn->operands[0]) == label) return;
868 if (!label->refcnt) {
869 if (popped) PUSH_INSN(ret, location, putnil);
871 else {
872 PUSH_LABEL(seq, label);
875 PUSH_SEQ(ret, seq);
876 return;
879 static void
880 pm_compile_flip_flop_bound(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
882 const pm_line_column_t location = { .line = ISEQ_BODY(iseq)->location.first_lineno, .column = -1 };
884 if (PM_NODE_TYPE_P(node, PM_INTEGER_NODE)) {
885 PM_COMPILE_NOT_POPPED(node);
886 PUSH_INSN1(ret, location, getglobal, ID2SYM(rb_intern("$.")));
887 PUSH_SEND(ret, location, idEq, INT2FIX(1));
888 if (popped) PUSH_INSN(ret, location, pop);
890 else {
891 PM_COMPILE(node);
895 static void
896 pm_compile_flip_flop(const pm_flip_flop_node_t *flip_flop_node, LABEL *else_label, LABEL *then_label, rb_iseq_t *iseq, const int lineno, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
898 const pm_line_column_t location = { .line = ISEQ_BODY(iseq)->location.first_lineno, .column = -1 };
899 LABEL *lend = NEW_LABEL(location.line);
901 int again = !(flip_flop_node->base.flags & PM_RANGE_FLAGS_EXCLUDE_END);
903 rb_num_t count = ISEQ_FLIP_CNT_INCREMENT(ISEQ_BODY(iseq)->local_iseq) + VM_SVAR_FLIPFLOP_START;
904 VALUE key = INT2FIX(count);
906 PUSH_INSN2(ret, location, getspecial, key, INT2FIX(0));
907 PUSH_INSNL(ret, location, branchif, lend);
909 if (flip_flop_node->left) {
910 pm_compile_flip_flop_bound(iseq, flip_flop_node->left, ret, popped, scope_node);
912 else {
913 PUSH_INSN(ret, location, putnil);
916 PUSH_INSNL(ret, location, branchunless, else_label);
917 PUSH_INSN1(ret, location, putobject, Qtrue);
918 PUSH_INSN1(ret, location, setspecial, key);
919 if (!again) {
920 PUSH_INSNL(ret, location, jump, then_label);
923 PUSH_LABEL(ret, lend);
924 if (flip_flop_node->right) {
925 pm_compile_flip_flop_bound(iseq, flip_flop_node->right, ret, popped, scope_node);
927 else {
928 PUSH_INSN(ret, location, putnil);
931 PUSH_INSNL(ret, location, branchunless, then_label);
932 PUSH_INSN1(ret, location, putobject, Qfalse);
933 PUSH_INSN1(ret, location, setspecial, key);
934 PUSH_INSNL(ret, location, jump, then_label);
937 static void pm_compile_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition);
939 static void
940 pm_compile_branch_condition(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const pm_node_t *cond, LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node)
942 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, cond);
944 again:
945 switch (PM_NODE_TYPE(cond)) {
946 case PM_AND_NODE: {
947 const pm_and_node_t *cast = (const pm_and_node_t *) cond;
948 pm_compile_logical(iseq, ret, cast->left, NULL, else_label, popped, scope_node);
950 cond = cast->right;
951 goto again;
953 case PM_OR_NODE: {
954 const pm_or_node_t *cast = (const pm_or_node_t *) cond;
955 pm_compile_logical(iseq, ret, cast->left, then_label, NULL, popped, scope_node);
957 cond = cast->right;
958 goto again;
960 case PM_FALSE_NODE:
961 case PM_NIL_NODE:
962 PUSH_INSNL(ret, location, jump, else_label);
963 return;
964 case PM_FLOAT_NODE:
965 case PM_IMAGINARY_NODE:
966 case PM_INTEGER_NODE:
967 case PM_LAMBDA_NODE:
968 case PM_RATIONAL_NODE:
969 case PM_REGULAR_EXPRESSION_NODE:
970 case PM_STRING_NODE:
971 case PM_SYMBOL_NODE:
972 case PM_TRUE_NODE:
973 PUSH_INSNL(ret, location, jump, then_label);
974 return;
975 case PM_FLIP_FLOP_NODE:
976 pm_compile_flip_flop((const pm_flip_flop_node_t *) cond, else_label, then_label, iseq, location.line, ret, popped, scope_node);
977 return;
978 case PM_DEFINED_NODE: {
979 const pm_defined_node_t *cast = (const pm_defined_node_t *) cond;
980 pm_compile_defined_expr(iseq, cast->value, &location, ret, popped, scope_node, true);
981 break;
983 default: {
984 pm_compile_node(iseq, cond, ret, false, scope_node);
985 break;
989 PUSH_INSNL(ret, location, branchunless, else_label);
990 PUSH_INSNL(ret, location, jump, then_label);
994 * Compile an if or unless node.
996 static void
997 pm_compile_conditional(rb_iseq_t *iseq, const pm_line_column_t *line_column, pm_node_type_t type, const pm_node_t *node, const pm_statements_node_t *statements, const pm_node_t *consequent, const pm_node_t *predicate, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
999 const pm_line_column_t location = *line_column;
1000 LABEL *then_label = NEW_LABEL(location.line);
1001 LABEL *else_label = NEW_LABEL(location.line);
1002 LABEL *end_label = NULL;
1004 pm_compile_branch_condition(iseq, ret, predicate, then_label, else_label, false, scope_node);
1006 rb_code_location_t conditional_location;
1007 VALUE branches = Qfalse;
1009 if (then_label->refcnt && else_label->refcnt && PM_BRANCH_COVERAGE_P(iseq)) {
1010 conditional_location = pm_code_location(scope_node, node);
1011 branches = decl_branch_base(iseq, PTR2NUM(node), &conditional_location, type == PM_IF_NODE ? "if" : "unless");
1014 if (then_label->refcnt) {
1015 PUSH_LABEL(ret, then_label);
1017 DECL_ANCHOR(then_seq);
1018 INIT_ANCHOR(then_seq);
1020 if (statements != NULL) {
1021 pm_compile_node(iseq, (const pm_node_t *) statements, then_seq, popped, scope_node);
1023 else if (!popped) {
1024 PUSH_SYNTHETIC_PUTNIL(then_seq, iseq);
1027 if (else_label->refcnt) {
1028 // Establish branch coverage for the then block.
1029 if (PM_BRANCH_COVERAGE_P(iseq)) {
1030 rb_code_location_t branch_location;
1032 if (statements != NULL) {
1033 branch_location = pm_code_location(scope_node, (const pm_node_t *) statements);
1034 } else if (type == PM_IF_NODE) {
1035 pm_line_column_t predicate_end = PM_NODE_END_LINE_COLUMN(scope_node->parser, predicate);
1036 branch_location = (rb_code_location_t) {
1037 .beg_pos = { .lineno = predicate_end.line, .column = predicate_end.column },
1038 .end_pos = { .lineno = predicate_end.line, .column = predicate_end.column }
1040 } else {
1041 branch_location = conditional_location;
1044 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 0, type == PM_IF_NODE ? "then" : "else", branches);
1047 end_label = NEW_LABEL(location.line);
1048 PUSH_INSNL(then_seq, location, jump, end_label);
1049 if (!popped) PUSH_INSN(then_seq, location, pop);
1052 PUSH_SEQ(ret, then_seq);
1055 if (else_label->refcnt) {
1056 PUSH_LABEL(ret, else_label);
1058 DECL_ANCHOR(else_seq);
1059 INIT_ANCHOR(else_seq);
1061 if (consequent != NULL) {
1062 pm_compile_node(iseq, consequent, else_seq, popped, scope_node);
1064 else if (!popped) {
1065 PUSH_SYNTHETIC_PUTNIL(else_seq, iseq);
1068 // Establish branch coverage for the else block.
1069 if (then_label->refcnt && PM_BRANCH_COVERAGE_P(iseq)) {
1070 rb_code_location_t branch_location;
1072 if (consequent == NULL) {
1073 branch_location = conditional_location;
1074 } else if (PM_NODE_TYPE_P(consequent, PM_ELSE_NODE)) {
1075 const pm_else_node_t *else_node = (const pm_else_node_t *) consequent;
1076 branch_location = pm_code_location(scope_node, else_node->statements != NULL ? ((const pm_node_t *) else_node->statements) : (const pm_node_t *) else_node);
1077 } else {
1078 branch_location = pm_code_location(scope_node, (const pm_node_t *) consequent);
1081 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 1, type == PM_IF_NODE ? "else" : "then", branches);
1084 PUSH_SEQ(ret, else_seq);
1087 if (end_label) {
1088 PUSH_LABEL(ret, end_label);
1091 return;
1095 * Compile a while or until loop.
1097 static void
1098 pm_compile_loop(rb_iseq_t *iseq, const pm_line_column_t *line_column, pm_node_flags_t flags, enum pm_node_type type, const pm_node_t *node, const pm_statements_node_t *statements, const pm_node_t *predicate, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1100 const pm_line_column_t location = *line_column;
1102 LABEL *prev_start_label = ISEQ_COMPILE_DATA(iseq)->start_label;
1103 LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
1104 LABEL *prev_redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label;
1106 // TODO: Deal with ensures in here
1107 LABEL *next_label = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(location.line); /* next */
1108 LABEL *redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label = NEW_LABEL(location.line); /* redo */
1109 LABEL *break_label = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(location.line); /* break */
1110 LABEL *end_label = NEW_LABEL(location.line);
1111 LABEL *adjust_label = NEW_LABEL(location.line);
1113 LABEL *next_catch_label = NEW_LABEL(location.line);
1114 LABEL *tmp_label = NULL;
1116 // begin; end while true
1117 if (flags & PM_LOOP_FLAGS_BEGIN_MODIFIER) {
1118 tmp_label = NEW_LABEL(location.line);
1119 PUSH_INSNL(ret, location, jump, tmp_label);
1121 else {
1122 // while true; end
1123 PUSH_INSNL(ret, location, jump, next_label);
1126 PUSH_LABEL(ret, adjust_label);
1127 PUSH_INSN(ret, location, putnil);
1128 PUSH_LABEL(ret, next_catch_label);
1129 PUSH_INSN(ret, location, pop);
1130 PUSH_INSNL(ret, location, jump, next_label);
1131 if (tmp_label) PUSH_LABEL(ret, tmp_label);
1133 PUSH_LABEL(ret, redo_label);
1135 // Establish branch coverage for the loop.
1136 if (PM_BRANCH_COVERAGE_P(iseq)) {
1137 rb_code_location_t loop_location = pm_code_location(scope_node, node);
1138 VALUE branches = decl_branch_base(iseq, PTR2NUM(node), &loop_location, type == PM_WHILE_NODE ? "while" : "until");
1140 rb_code_location_t branch_location = statements != NULL ? pm_code_location(scope_node, (const pm_node_t *) statements) : loop_location;
1141 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 0, "body", branches);
1144 if (statements != NULL) PM_COMPILE_POPPED((const pm_node_t *) statements);
1145 PUSH_LABEL(ret, next_label);
1147 if (type == PM_WHILE_NODE) {
1148 pm_compile_branch_condition(iseq, ret, predicate, redo_label, end_label, popped, scope_node);
1150 else if (type == PM_UNTIL_NODE) {
1151 pm_compile_branch_condition(iseq, ret, predicate, end_label, redo_label, popped, scope_node);
1154 PUSH_LABEL(ret, end_label);
1155 PUSH_ADJUST_RESTORE(ret, adjust_label);
1156 PUSH_INSN(ret, location, putnil);
1158 PUSH_LABEL(ret, break_label);
1159 if (popped) PUSH_INSN(ret, location, pop);
1161 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, redo_label, break_label, NULL, break_label);
1162 PUSH_CATCH_ENTRY(CATCH_TYPE_NEXT, redo_label, break_label, NULL, next_catch_label);
1163 PUSH_CATCH_ENTRY(CATCH_TYPE_REDO, redo_label, break_label, NULL, ISEQ_COMPILE_DATA(iseq)->redo_label);
1165 ISEQ_COMPILE_DATA(iseq)->start_label = prev_start_label;
1166 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
1167 ISEQ_COMPILE_DATA(iseq)->redo_label = prev_redo_label;
1168 return;
1171 // This recurses through scopes and finds the local index at any scope level
1172 // It also takes a pointer to depth, and increments depth appropriately
1173 // according to the depth of the local.
1174 static pm_local_index_t
1175 pm_lookup_local_index(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, pm_constant_id_t constant_id, int start_depth)
1177 pm_local_index_t lindex = { 0 };
1178 st_data_t local_index;
1180 int level;
1181 for (level = 0; level < start_depth; level++) {
1182 scope_node = scope_node->previous;
1185 while (!st_lookup(scope_node->index_lookup_table, constant_id, &local_index)) {
1186 level++;
1188 if (scope_node->previous) {
1189 scope_node = scope_node->previous;
1191 else {
1192 // We have recursed up all scope nodes
1193 // and have not found the local yet
1194 rb_bug("Local with constant_id %u does not exist", (unsigned int) constant_id);
1198 lindex.level = level;
1199 lindex.index = scope_node->local_table_for_iseq_size - (int) local_index;
1200 return lindex;
1203 // This returns the CRuby ID which maps to the pm_constant_id_t
1205 // Constant_ids in prism are indexes of the constants in prism's constant pool.
1206 // We add a constants mapping on the scope_node which is a mapping from
1207 // these constant_id indexes to the CRuby IDs that they represent.
1208 // This helper method allows easy access to those IDs
1209 static ID
1210 pm_constant_id_lookup(const pm_scope_node_t *scope_node, pm_constant_id_t constant_id)
1212 if (constant_id < 1 || constant_id > scope_node->parser->constant_pool.size) {
1213 rb_bug("constant_id out of range: %u", (unsigned int)constant_id);
1215 return scope_node->constants[constant_id - 1];
1218 static rb_iseq_t *
1219 pm_new_child_iseq(rb_iseq_t *iseq, pm_scope_node_t *node, VALUE name, const rb_iseq_t *parent, enum rb_iseq_type type, int line_no)
1221 debugs("[new_child_iseq]> ---------------------------------------\n");
1222 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1223 rb_iseq_t *ret_iseq = pm_iseq_new_with_opt(node, name,
1224 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1225 line_no, parent,
1226 isolated_depth ? isolated_depth + 1 : 0,
1227 type, ISEQ_COMPILE_DATA(iseq)->option);
1228 debugs("[new_child_iseq]< ---------------------------------------\n");
1229 return ret_iseq;
1232 static int
1233 pm_compile_class_path(rb_iseq_t *iseq, const pm_node_t *node, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1235 if (PM_NODE_TYPE_P(node, PM_CONSTANT_PATH_NODE)) {
1236 const pm_node_t *parent = ((const pm_constant_path_node_t *) node)->parent;
1238 if (parent) {
1239 /* Bar::Foo */
1240 PM_COMPILE(parent);
1241 return VM_DEFINECLASS_FLAG_SCOPED;
1243 else {
1244 /* toplevel class ::Foo */
1245 PUSH_INSN1(ret, *node_location, putobject, rb_cObject);
1246 return VM_DEFINECLASS_FLAG_SCOPED;
1249 else {
1250 /* class at cbase Foo */
1251 PUSH_INSN1(ret, *node_location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
1252 return 0;
1257 * Compile either a call and write node or a call or write node. These look like
1258 * method calls that are followed by a ||= or &&= operator.
1260 static void
1261 pm_compile_call_and_or_write_node(rb_iseq_t *iseq, bool and_node, const pm_node_t *receiver, const pm_node_t *value, pm_constant_id_t write_name, pm_constant_id_t read_name, bool safe_nav, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1263 const pm_line_column_t location = *node_location;
1264 LABEL *lfin = NEW_LABEL(location.line);
1265 LABEL *lcfin = NEW_LABEL(location.line);
1266 LABEL *lskip = NULL;
1268 int flag = PM_NODE_TYPE_P(receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
1269 ID id_read_name = pm_constant_id_lookup(scope_node, read_name);
1271 PM_COMPILE_NOT_POPPED(receiver);
1272 if (safe_nav) {
1273 lskip = NEW_LABEL(location.line);
1274 PUSH_INSN(ret, location, dup);
1275 PUSH_INSNL(ret, location, branchnil, lskip);
1278 PUSH_INSN(ret, location, dup);
1279 PUSH_SEND_WITH_FLAG(ret, location, id_read_name, INT2FIX(0), INT2FIX(flag));
1280 if (!popped) PUSH_INSN(ret, location, dup);
1282 if (and_node) {
1283 PUSH_INSNL(ret, location, branchunless, lcfin);
1285 else {
1286 PUSH_INSNL(ret, location, branchif, lcfin);
1289 if (!popped) PUSH_INSN(ret, location, pop);
1290 PM_COMPILE_NOT_POPPED(value);
1292 if (!popped) {
1293 PUSH_INSN(ret, location, swap);
1294 PUSH_INSN1(ret, location, topn, INT2FIX(1));
1297 ID id_write_name = pm_constant_id_lookup(scope_node, write_name);
1298 PUSH_SEND_WITH_FLAG(ret, location, id_write_name, INT2FIX(1), INT2FIX(flag));
1299 PUSH_INSNL(ret, location, jump, lfin);
1301 PUSH_LABEL(ret, lcfin);
1302 if (!popped) PUSH_INSN(ret, location, swap);
1304 PUSH_LABEL(ret, lfin);
1306 if (lskip && popped) PUSH_LABEL(ret, lskip);
1307 PUSH_INSN(ret, location, pop);
1308 if (lskip && !popped) PUSH_LABEL(ret, lskip);
1312 * This function compiles a hash onto the stack. It is used to compile hash
1313 * literals and keyword arguments. It is assumed that if we get here that the
1314 * contents of the hash are not popped.
1316 static void
1317 pm_compile_hash_elements(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_list_t *elements, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node)
1319 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
1321 // If this element is not popped, then we need to create the hash on the
1322 // stack. Neighboring plain assoc nodes should be grouped together (either
1323 // by newhash or hash merge). Double splat nodes should be merged using the
1324 // merge_kwd method call.
1325 int assoc_length = 0;
1326 bool made_hash = false;
1328 for (size_t index = 0; index < elements->size; index++) {
1329 const pm_node_t *element = elements->nodes[index];
1331 switch (PM_NODE_TYPE(element)) {
1332 case PM_ASSOC_NODE: {
1333 // If this is a plain assoc node, then we can compile it directly
1334 // and then add to the number of assoc nodes we've seen so far.
1335 PM_COMPILE_NOT_POPPED(element);
1336 assoc_length++;
1337 break;
1339 case PM_ASSOC_SPLAT_NODE: {
1340 // If we are at a splat and we have already compiled some elements
1341 // of the hash, then we need to either create the first hash or
1342 // merge the current elements into the existing hash.
1343 if (assoc_length > 0) {
1344 if (!made_hash) {
1345 PUSH_INSN1(ret, location, newhash, INT2FIX(assoc_length * 2));
1346 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1347 PUSH_INSN(ret, location, swap);
1348 made_hash = true;
1350 else {
1351 // Here we are merging plain assoc nodes into the hash on
1352 // the stack.
1353 PUSH_SEND(ret, location, id_core_hash_merge_ptr, INT2FIX(assoc_length * 2 + 1));
1355 // Since we already have a hash on the stack, we need to set
1356 // up the method call for the next merge that will occur.
1357 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1358 PUSH_INSN(ret, location, swap);
1361 assoc_length = 0;
1364 // If this is the first time we've seen a splat, then we need to
1365 // create a hash that we can merge into.
1366 if (!made_hash) {
1367 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1368 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
1369 made_hash = true;
1372 // Now compile the splat node itself and merge it into the hash.
1373 PM_COMPILE_NOT_POPPED(element);
1374 PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2));
1376 // We know that any subsequent elements will need to be merged in
1377 // using one of the special core methods. So here we will put the
1378 // receiver of the merge and then swap it with the hash that is
1379 // going to be the first argument.
1380 if (index != elements->size - 1) {
1381 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1382 PUSH_INSN(ret, location, swap);
1385 break;
1387 default:
1388 RUBY_ASSERT("Invalid node type for hash" && false);
1389 break;
1393 if (!made_hash) {
1394 // If we haven't already made the hash, then this means we only saw
1395 // plain assoc nodes. In this case, we can just create the hash
1396 // directly.
1397 PUSH_INSN1(ret, location, newhash, INT2FIX(assoc_length * 2));
1399 else if (assoc_length > 0) {
1400 // If we have already made the hash, then we need to merge the remaining
1401 // assoc nodes into the hash on the stack.
1402 PUSH_SEND(ret, location, id_core_hash_merge_ptr, INT2FIX(assoc_length * 2 + 1));
1406 // This is details. Users should call pm_setup_args() instead.
1407 static int
1408 pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *block, int *flags, const bool has_regular_blockarg, struct rb_callinfo_kwarg **kw_arg, rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_line_column_t *node_location)
1410 const pm_line_column_t location = *node_location;
1412 int orig_argc = 0;
1413 bool has_splat = false;
1414 bool has_keyword_splat = false;
1416 if (arguments_node == NULL) {
1417 if (*flags & VM_CALL_FCALL) {
1418 *flags |= VM_CALL_VCALL;
1421 else {
1422 const pm_node_list_t *arguments = &arguments_node->arguments;
1423 has_keyword_splat = PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_KEYWORD_SPLAT);
1425 // We count the number of elements post the splat node that are not keyword elements to
1426 // eventually pass as an argument to newarray
1427 int post_splat_counter = 0;
1428 const pm_node_t *argument;
1430 PM_NODE_LIST_FOREACH(arguments, index, argument) {
1431 switch (PM_NODE_TYPE(argument)) {
1432 // A keyword hash node contains all keyword arguments as AssocNodes and AssocSplatNodes
1433 case PM_KEYWORD_HASH_NODE: {
1434 const pm_keyword_hash_node_t *keyword_arg = (const pm_keyword_hash_node_t *) argument;
1435 const pm_node_list_t *elements = &keyword_arg->elements;
1437 if (has_keyword_splat || has_splat) {
1438 *flags |= VM_CALL_KW_SPLAT;
1439 has_keyword_splat = true;
1440 pm_compile_hash_elements(iseq, argument, elements, ret, scope_node);
1442 else {
1443 // We need to first figure out if all elements of the
1444 // KeywordHashNode are AssocNodes with symbol keys.
1445 if (PM_NODE_FLAG_P(keyword_arg, PM_KEYWORD_HASH_NODE_FLAGS_SYMBOL_KEYS)) {
1446 // If they are all symbol keys then we can pass them as
1447 // keyword arguments. The first thing we need to do is
1448 // deduplicate. We'll do this using the combination of a
1449 // Ruby hash and a Ruby array.
1450 VALUE stored_indices = rb_hash_new();
1451 VALUE keyword_indices = rb_ary_new_capa(elements->size);
1453 size_t size = 0;
1454 for (size_t element_index = 0; element_index < elements->size; element_index++) {
1455 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1457 // Retrieve the stored index from the hash for this
1458 // keyword.
1459 VALUE keyword = pm_static_literal_value(iseq, assoc->key, scope_node);
1460 VALUE stored_index = rb_hash_aref(stored_indices, keyword);
1462 // If this keyword was already seen in the hash,
1463 // then mark the array at that index as false and
1464 // decrement the keyword size.
1465 if (!NIL_P(stored_index)) {
1466 rb_ary_store(keyword_indices, NUM2LONG(stored_index), Qfalse);
1467 size--;
1470 // Store (and possibly overwrite) the index for this
1471 // keyword in the hash, mark the array at that index
1472 // as true, and increment the keyword size.
1473 rb_hash_aset(stored_indices, keyword, ULONG2NUM(element_index));
1474 rb_ary_store(keyword_indices, (long) element_index, Qtrue);
1475 size++;
1478 *kw_arg = rb_xmalloc_mul_add(size, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
1479 *flags |= VM_CALL_KWARG;
1481 VALUE *keywords = (*kw_arg)->keywords;
1482 (*kw_arg)->references = 0;
1483 (*kw_arg)->keyword_len = (int) size;
1485 size_t keyword_index = 0;
1486 for (size_t element_index = 0; element_index < elements->size; element_index++) {
1487 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1488 bool popped = true;
1490 if (rb_ary_entry(keyword_indices, (long) element_index) == Qtrue) {
1491 keywords[keyword_index++] = pm_static_literal_value(iseq, assoc->key, scope_node);
1492 popped = false;
1495 PM_COMPILE(assoc->value);
1498 RUBY_ASSERT(keyword_index == size);
1500 else {
1501 // If they aren't all symbol keys then we need to
1502 // construct a new hash and pass that as an argument.
1503 orig_argc++;
1504 *flags |= VM_CALL_KW_SPLAT;
1506 size_t size = elements->size;
1507 if (size > 1) {
1508 // A new hash will be created for the keyword
1509 // arguments in this case, so mark the method as
1510 // passing mutable keyword splat.
1511 *flags |= VM_CALL_KW_SPLAT_MUT;
1514 for (size_t element_index = 0; element_index < size; element_index++) {
1515 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1516 PM_COMPILE_NOT_POPPED(assoc->key);
1517 PM_COMPILE_NOT_POPPED(assoc->value);
1520 PUSH_INSN1(ret, location, newhash, INT2FIX(size * 2));
1523 break;
1525 case PM_SPLAT_NODE: {
1526 *flags |= VM_CALL_ARGS_SPLAT;
1527 const pm_splat_node_t *splat_node = (const pm_splat_node_t *) argument;
1529 if (splat_node->expression) {
1530 PM_COMPILE_NOT_POPPED(splat_node->expression);
1532 else {
1533 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
1534 PUSH_GETLOCAL(ret, location, index.index, index.level);
1537 bool first_splat = !has_splat;
1539 if (first_splat) {
1540 // If this is the first splat array seen and it's not the
1541 // last parameter, we want splatarray to dup it.
1543 // foo(a, *b, c)
1544 // ^^
1545 if (index + 1 < arguments->size || has_regular_blockarg) {
1546 PUSH_INSN1(ret, location, splatarray, Qtrue);
1547 *flags |= VM_CALL_ARGS_SPLAT_MUT;
1549 // If this is the first spalt array seen and it's the last
1550 // parameter, we don't want splatarray to dup it.
1552 // foo(a, *b)
1553 // ^^
1554 else {
1555 PUSH_INSN1(ret, location, splatarray, Qfalse);
1558 else {
1559 // If this is not the first splat array seen and it is also
1560 // the last parameter, we don't want splatarray to dup it
1561 // and we need to concat the array.
1563 // foo(a, *b, *c)
1564 // ^^
1565 PUSH_INSN1(ret, location, splatarray, Qfalse);
1566 PUSH_INSN(ret, location, concatarray);
1569 has_splat = true;
1570 post_splat_counter = 0;
1572 break;
1574 case PM_FORWARDING_ARGUMENTS_NODE: {
1575 orig_argc += 2;
1576 *flags |= VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_SPLAT_MUT | VM_CALL_ARGS_BLOCKARG | VM_CALL_KW_SPLAT;
1578 // Forwarding arguments nodes are treated as foo(*, **, &)
1579 // So foo(...) equals foo(*, **, &) and as such the local
1580 // table for this method is known in advance
1582 // Push the *
1583 pm_local_index_t mult_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
1584 PUSH_GETLOCAL(ret, location, mult_local.index, mult_local.level);
1585 PUSH_INSN1(ret, location, splatarray, Qtrue);
1587 // Push the **
1588 pm_local_index_t pow_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_POW, 0);
1589 PUSH_GETLOCAL(ret, location, pow_local.index, pow_local.level);
1591 // Push the &
1592 pm_local_index_t and_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_AND, 0);
1593 PUSH_INSN2(ret, location, getblockparamproxy, INT2FIX(and_local.index + VM_ENV_DATA_SIZE - 1), INT2FIX(and_local.level));
1594 PUSH_INSN(ret, location, splatkw);
1596 break;
1598 default: {
1599 post_splat_counter++;
1600 PM_COMPILE_NOT_POPPED(argument);
1602 // If we have a splat and we've seen a splat, we need to process
1603 // everything after the splat.
1604 if (has_splat) {
1605 // Stack items are turned into an array and concatenated in
1606 // the following cases:
1608 // If the next node is a splat:
1610 // foo(*a, b, *c)
1612 // If the next node is a kwarg or kwarg splat:
1614 // foo(*a, b, c: :d)
1615 // foo(*a, b, **c)
1617 // If the next node is NULL (we have hit the end):
1619 // foo(*a, b)
1620 if (index == arguments->size - 1) {
1621 RUBY_ASSERT(post_splat_counter > 0);
1622 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(post_splat_counter));
1624 else {
1625 pm_node_t *next_arg = arguments->nodes[index + 1];
1627 switch (PM_NODE_TYPE(next_arg)) {
1628 // A keyword hash node contains all keyword arguments as AssocNodes and AssocSplatNodes
1629 case PM_KEYWORD_HASH_NODE: {
1630 PUSH_INSN1(ret, location, newarray, INT2FIX(post_splat_counter));
1631 PUSH_INSN(ret, location, concatarray);
1632 break;
1634 case PM_SPLAT_NODE: {
1635 PUSH_INSN1(ret, location, newarray, INT2FIX(post_splat_counter));
1636 PUSH_INSN(ret, location, concatarray);
1637 break;
1639 default:
1640 break;
1644 else {
1645 orig_argc++;
1652 if (has_splat) orig_argc++;
1653 if (has_keyword_splat) orig_argc++;
1654 return orig_argc;
1657 // Compile the argument parts of a call
1658 static int
1659 pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block, int *flags, struct rb_callinfo_kwarg **kw_arg, rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_line_column_t *node_location)
1661 if (block && PM_NODE_TYPE_P(block, PM_BLOCK_ARGUMENT_NODE)) {
1662 // We compile the `&block_arg` expression first and stitch it later
1663 // since the nature of the expression influences whether splat should
1664 // duplicate the array.
1665 bool regular_block_arg = true;
1666 DECL_ANCHOR(block_arg);
1667 INIT_ANCHOR(block_arg);
1668 pm_compile_node(iseq, block, block_arg, false, scope_node);
1670 *flags |= VM_CALL_ARGS_BLOCKARG;
1672 if (LIST_INSN_SIZE_ONE(block_arg)) {
1673 LINK_ELEMENT *elem = FIRST_ELEMENT(block_arg);
1674 if (IS_INSN(elem)) {
1675 INSN *iobj = (INSN *) elem;
1676 if (iobj->insn_id == BIN(getblockparam)) {
1677 iobj->insn_id = BIN(getblockparamproxy);
1679 // Allow splat without duplication for simple one-instruction
1680 // block arguments like `&arg`. It is known that this optimization
1681 // can be too aggressive in some cases. See [Bug #16504].
1682 regular_block_arg = false;
1686 int argc = pm_setup_args_core(arguments_node, block, flags, regular_block_arg, kw_arg, iseq, ret, scope_node, node_location);
1687 PUSH_SEQ(ret, block_arg);
1688 return argc;
1691 return pm_setup_args_core(arguments_node, block, flags, false, kw_arg, iseq, ret, scope_node, node_location);
1695 * Compile an index operator write node, which is a node that is writing a value
1696 * using the [] and []= methods. It looks like:
1698 * foo[bar] += baz
1700 * This breaks down to caching the receiver and arguments on the stack, calling
1701 * the [] method, calling the operator method with the result of the [] method,
1702 * and then calling the []= method with the result of the operator method.
1704 static void
1705 pm_compile_index_operator_write_node(rb_iseq_t *iseq, const pm_index_operator_write_node_t *node, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1707 const pm_line_column_t location = *node_location;
1708 if (!popped) PUSH_INSN(ret, location, putnil);
1710 PM_COMPILE_NOT_POPPED(node->receiver);
1712 int boff = (node->block == NULL ? 0 : 1);
1713 int flag = PM_NODE_TYPE_P(node->receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
1714 struct rb_callinfo_kwarg *keywords = NULL;
1715 int argc = pm_setup_args(node->arguments, node->block, &flag, &keywords, iseq, ret, scope_node, node_location);
1717 if ((argc > 0 || boff) && (flag & VM_CALL_KW_SPLAT)) {
1718 if (boff) {
1719 PUSH_INSN(ret, location, splatkw);
1721 else {
1722 PUSH_INSN(ret, location, dup);
1723 PUSH_INSN(ret, location, splatkw);
1724 PUSH_INSN(ret, location, pop);
1728 int dup_argn = argc + 1 + boff;
1729 int keyword_len = 0;
1731 if (keywords) {
1732 keyword_len = keywords->keyword_len;
1733 dup_argn += keyword_len;
1736 PUSH_INSN1(ret, location, dupn, INT2FIX(dup_argn));
1737 PUSH_SEND_R(ret, location, idAREF, INT2FIX(argc), NULL, INT2FIX(flag & ~(VM_CALL_ARGS_SPLAT_MUT | VM_CALL_KW_SPLAT_MUT)), keywords);
1738 PM_COMPILE_NOT_POPPED(node->value);
1740 ID id_operator = pm_constant_id_lookup(scope_node, node->binary_operator);
1741 PUSH_SEND(ret, location, id_operator, INT2FIX(1));
1743 if (!popped) {
1744 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
1746 if (flag & VM_CALL_ARGS_SPLAT) {
1747 if (flag & VM_CALL_KW_SPLAT) {
1748 PUSH_INSN1(ret, location, topn, INT2FIX(2 + boff));
1750 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
1751 PUSH_INSN1(ret, location, splatarray, Qtrue);
1752 flag |= VM_CALL_ARGS_SPLAT_MUT;
1755 PUSH_INSN(ret, location, swap);
1756 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
1757 PUSH_INSN1(ret, location, setn, INT2FIX(2 + boff));
1758 PUSH_INSN(ret, location, pop);
1760 else {
1761 if (boff > 0) {
1762 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
1763 PUSH_INSN(ret, location, swap);
1764 PUSH_INSN(ret, location, pop);
1766 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
1767 PUSH_INSN(ret, location, swap);
1768 PUSH_INSN1(ret, location, splatarray, Qtrue);
1769 PUSH_INSN(ret, location, swap);
1770 flag |= VM_CALL_ARGS_SPLAT_MUT;
1772 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
1773 if (boff > 0) {
1774 PUSH_INSN1(ret, location, setn, INT2FIX(3));
1775 PUSH_INSN(ret, location, pop);
1776 PUSH_INSN(ret, location, pop);
1780 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc), NULL, INT2FIX(flag), keywords);
1782 else if (flag & VM_CALL_KW_SPLAT) {
1783 if (boff > 0) {
1784 PUSH_INSN1(ret, location, topn, INT2FIX(2));
1785 PUSH_INSN(ret, location, swap);
1786 PUSH_INSN1(ret, location, setn, INT2FIX(3));
1787 PUSH_INSN(ret, location, pop);
1789 PUSH_INSN(ret, location, swap);
1790 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
1792 else if (keyword_len) {
1793 PUSH_INSN(ret, location, dup);
1794 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 2));
1795 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 1));
1796 PUSH_INSN(ret, location, pop);
1797 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
1799 else {
1800 if (boff > 0) {
1801 PUSH_INSN(ret, location, swap);
1803 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
1806 PUSH_INSN(ret, location, pop);
1810 * Compile an index control flow write node, which is a node that is writing a
1811 * value using the [] and []= methods and the &&= and ||= operators. It looks
1812 * like:
1814 * foo[bar] ||= baz
1816 * This breaks down to caching the receiver and arguments on the stack, calling
1817 * the [] method, checking the result and then changing control flow based on
1818 * it. If the value would result in a write, then the value is written using the
1819 * []= method.
1821 static void
1822 pm_compile_index_control_flow_write_node(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_t *receiver, const pm_arguments_node_t *arguments, const pm_node_t *block, const pm_node_t *value, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1824 const pm_line_column_t location = *node_location;
1825 if (!popped) PUSH_INSN(ret, location, putnil);
1826 PM_COMPILE_NOT_POPPED(receiver);
1828 int boff = (block == NULL ? 0 : 1);
1829 int flag = PM_NODE_TYPE_P(receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
1830 struct rb_callinfo_kwarg *keywords = NULL;
1831 int argc = pm_setup_args(arguments, block, &flag, &keywords, iseq, ret, scope_node, node_location);
1833 if ((argc > 0 || boff) && (flag & VM_CALL_KW_SPLAT)) {
1834 if (boff) {
1835 PUSH_INSN(ret, location, splatkw);
1837 else {
1838 PUSH_INSN(ret, location, dup);
1839 PUSH_INSN(ret, location, splatkw);
1840 PUSH_INSN(ret, location, pop);
1844 int dup_argn = argc + 1 + boff;
1845 int keyword_len = 0;
1847 if (keywords) {
1848 keyword_len = keywords->keyword_len;
1849 dup_argn += keyword_len;
1852 PUSH_INSN1(ret, location, dupn, INT2FIX(dup_argn));
1853 PUSH_SEND_R(ret, location, idAREF, INT2FIX(argc), NULL, INT2FIX(flag & ~(VM_CALL_ARGS_SPLAT_MUT | VM_CALL_KW_SPLAT_MUT)), keywords);
1855 LABEL *label = NEW_LABEL(location.line);
1856 LABEL *lfin = NEW_LABEL(location.line);
1858 PUSH_INSN(ret, location, dup);
1859 if (PM_NODE_TYPE_P(node, PM_INDEX_AND_WRITE_NODE)) {
1860 PUSH_INSNL(ret, location, branchunless, label);
1862 else {
1863 PUSH_INSNL(ret, location, branchif, label);
1866 PUSH_INSN(ret, location, pop);
1867 PM_COMPILE_NOT_POPPED(value);
1869 if (!popped) {
1870 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
1873 if (flag & VM_CALL_ARGS_SPLAT) {
1874 if (flag & VM_CALL_KW_SPLAT) {
1875 PUSH_INSN1(ret, location, topn, INT2FIX(2 + boff));
1876 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
1877 PUSH_INSN1(ret, location, splatarray, Qtrue);
1878 flag |= VM_CALL_ARGS_SPLAT_MUT;
1881 PUSH_INSN(ret, location, swap);
1882 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
1883 PUSH_INSN1(ret, location, setn, INT2FIX(2 + boff));
1884 PUSH_INSN(ret, location, pop);
1886 else {
1887 if (boff > 0) {
1888 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
1889 PUSH_INSN(ret, location, swap);
1890 PUSH_INSN(ret, location, pop);
1892 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
1893 PUSH_INSN(ret, location, swap);
1894 PUSH_INSN1(ret, location, splatarray, Qtrue);
1895 PUSH_INSN(ret, location, swap);
1896 flag |= VM_CALL_ARGS_SPLAT_MUT;
1898 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
1899 if (boff > 0) {
1900 PUSH_INSN1(ret, location, setn, INT2FIX(3));
1901 PUSH_INSN(ret, location, pop);
1902 PUSH_INSN(ret, location, pop);
1906 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc), NULL, INT2FIX(flag), keywords);
1908 else if (flag & VM_CALL_KW_SPLAT) {
1909 if (boff > 0) {
1910 PUSH_INSN1(ret, location, topn, INT2FIX(2));
1911 PUSH_INSN(ret, location, swap);
1912 PUSH_INSN1(ret, location, setn, INT2FIX(3));
1913 PUSH_INSN(ret, location, pop);
1916 PUSH_INSN(ret, location, swap);
1917 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
1919 else if (keyword_len) {
1920 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 1));
1921 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 0));
1922 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
1924 else {
1925 if (boff > 0) {
1926 PUSH_INSN(ret, location, swap);
1928 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
1931 PUSH_INSN(ret, location, pop);
1932 PUSH_INSNL(ret, location, jump, lfin);
1933 PUSH_LABEL(ret, label);
1934 if (!popped) {
1935 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
1937 PUSH_INSN1(ret, location, adjuststack, INT2FIX(dup_argn + 1));
1938 PUSH_LABEL(ret, lfin);
1941 // When we compile a pattern matching expression, we use the stack as a scratch
1942 // space to store lots of different values (consider it like we have a pattern
1943 // matching function and we need space for a bunch of different local
1944 // variables). The "base index" refers to the index on the stack where we
1945 // started compiling the pattern matching expression. These offsets from that
1946 // base index indicate the location of the various locals we need.
1947 #define PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE 0
1948 #define PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING 1
1949 #define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P 2
1950 #define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE 3
1951 #define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY 4
1953 // A forward declaration because this is the recursive function that handles
1954 // compiling a pattern. It can be reentered by nesting patterns, as in the case
1955 // of arrays or hashes.
1956 static int pm_compile_pattern(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *matched_label, LABEL *unmatched_label, bool in_single_pattern, bool in_alternation_pattern, bool use_deconstructed_cache, unsigned int base_index);
1959 * This function generates the code to set up the error string and error_p
1960 * locals depending on whether or not the pattern matched.
1962 static int
1963 pm_compile_pattern_generic_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, VALUE message, unsigned int base_index)
1965 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
1966 LABEL *match_succeeded_label = NEW_LABEL(location.line);
1968 PUSH_INSN(ret, location, dup);
1969 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
1971 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1972 PUSH_INSN1(ret, location, putobject, message);
1973 PUSH_INSN1(ret, location, topn, INT2FIX(3));
1974 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(2));
1975 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
1977 PUSH_INSN1(ret, location, putobject, Qfalse);
1978 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
1980 PUSH_INSN(ret, location, pop);
1981 PUSH_INSN(ret, location, pop);
1982 PUSH_LABEL(ret, match_succeeded_label);
1984 return COMPILE_OK;
1988 * This function generates the code to set up the error string and error_p
1989 * locals depending on whether or not the pattern matched when the value needs
1990 * to match a specific deconstructed length.
1992 static int
1993 pm_compile_pattern_length_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, VALUE message, VALUE length, unsigned int base_index)
1995 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
1996 LABEL *match_succeeded_label = NEW_LABEL(location.line);
1998 PUSH_INSN(ret, location, dup);
1999 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2001 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2002 PUSH_INSN1(ret, location, putobject, message);
2003 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2004 PUSH_INSN(ret, location, dup);
2005 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2006 PUSH_INSN1(ret, location, putobject, length);
2007 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(4));
2008 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2010 PUSH_INSN1(ret, location, putobject, Qfalse);
2011 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2013 PUSH_INSN(ret, location, pop);
2014 PUSH_INSN(ret, location, pop);
2015 PUSH_LABEL(ret, match_succeeded_label);
2017 return COMPILE_OK;
2021 * This function generates the code to set up the error string and error_p
2022 * locals depending on whether or not the pattern matched when the value needs
2023 * to pass a specific #=== method call.
2025 static int
2026 pm_compile_pattern_eqq_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, unsigned int base_index)
2028 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
2029 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2031 PUSH_INSN(ret, location, dup);
2032 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2034 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2035 PUSH_INSN1(ret, location, putobject, rb_fstring_lit("%p === %p does not return true"));
2036 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2037 PUSH_INSN1(ret, location, topn, INT2FIX(5));
2038 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2039 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2040 PUSH_INSN1(ret, location, putobject, Qfalse);
2041 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2042 PUSH_INSN(ret, location, pop);
2043 PUSH_INSN(ret, location, pop);
2045 PUSH_LABEL(ret, match_succeeded_label);
2046 PUSH_INSN1(ret, location, setn, INT2FIX(2));
2047 PUSH_INSN(ret, location, pop);
2048 PUSH_INSN(ret, location, pop);
2050 return COMPILE_OK;
2054 * This is a variation on compiling a pattern matching expression that is used
2055 * to have the pattern matching instructions fall through to immediately after
2056 * the pattern if it passes. Otherwise it jumps to the given unmatched_label
2057 * label.
2059 static int
2060 pm_compile_pattern_match(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *unmatched_label, bool in_single_pattern, bool in_alternation_pattern, bool use_deconstructed_cache, unsigned int base_index)
2062 LABEL *matched_label = NEW_LABEL(pm_node_line_number(scope_node->parser, node));
2063 CHECK(pm_compile_pattern(iseq, scope_node, node, ret, matched_label, unmatched_label, in_single_pattern, in_alternation_pattern, use_deconstructed_cache, base_index));
2064 PUSH_LABEL(ret, matched_label);
2065 return COMPILE_OK;
2069 * This function compiles in the code necessary to call #deconstruct on the
2070 * value to match against. It raises appropriate errors if the method does not
2071 * exist or if it returns the wrong type.
2073 static int
2074 pm_compile_pattern_deconstruct(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *deconstruct_label, LABEL *match_failed_label, LABEL *deconstructed_label, LABEL *type_error_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index)
2076 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
2078 if (use_deconstructed_cache) {
2079 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2080 PUSH_INSNL(ret, location, branchnil, deconstruct_label);
2082 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2083 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2085 PUSH_INSN(ret, location, pop);
2086 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE - 1));
2087 PUSH_INSNL(ret, location, jump, deconstructed_label);
2089 else {
2090 PUSH_INSNL(ret, location, jump, deconstruct_label);
2093 PUSH_LABEL(ret, deconstruct_label);
2094 PUSH_INSN(ret, location, dup);
2095 PUSH_INSN1(ret, location, putobject, ID2SYM(rb_intern("deconstruct")));
2096 PUSH_SEND(ret, location, idRespond_to, INT2FIX(1));
2098 if (use_deconstructed_cache) {
2099 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE + 1));
2102 if (in_single_pattern) {
2103 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p does not respond to #deconstruct"), base_index + 1));
2106 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2107 PUSH_SEND(ret, location, rb_intern("deconstruct"), INT2FIX(0));
2109 if (use_deconstructed_cache) {
2110 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2113 PUSH_INSN(ret, location, dup);
2114 PUSH_INSN1(ret, location, checktype, INT2FIX(T_ARRAY));
2115 PUSH_INSNL(ret, location, branchunless, type_error_label);
2116 PUSH_LABEL(ret, deconstructed_label);
2118 return COMPILE_OK;
2122 * This function compiles in the code necessary to match against the optional
2123 * constant path that is attached to an array, find, or hash pattern.
2125 static int
2126 pm_compile_pattern_constant(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *match_failed_label, bool in_single_pattern, unsigned int base_index)
2128 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
2130 PUSH_INSN(ret, location, dup);
2131 PM_COMPILE_NOT_POPPED(node);
2133 if (in_single_pattern) {
2134 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
2136 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE));
2137 if (in_single_pattern) {
2138 CHECK(pm_compile_pattern_eqq_error(iseq, scope_node, node, ret, base_index + 3));
2140 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2141 return COMPILE_OK;
2145 * When matching fails, an appropriate error must be raised. This function is
2146 * responsible for compiling in those error raising instructions.
2148 static void
2149 pm_compile_pattern_error_handler(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *done_label, bool popped)
2151 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
2152 LABEL *key_error_label = NEW_LABEL(location.line);
2153 LABEL *cleanup_label = NEW_LABEL(location.line);
2155 struct rb_callinfo_kwarg *kw_arg = rb_xmalloc_mul_add(2, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
2156 kw_arg->references = 0;
2157 kw_arg->keyword_len = 2;
2158 kw_arg->keywords[0] = ID2SYM(rb_intern("matchee"));
2159 kw_arg->keywords[1] = ID2SYM(rb_intern("key"));
2161 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2162 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2163 PUSH_INSNL(ret, location, branchif, key_error_label);
2165 PUSH_INSN1(ret, location, putobject, rb_eNoMatchingPatternError);
2166 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2167 PUSH_INSN1(ret, location, putobject, rb_fstring_lit("%p: %s"));
2168 PUSH_INSN1(ret, location, topn, INT2FIX(4));
2169 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 6));
2170 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2171 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2172 PUSH_INSNL(ret, location, jump, cleanup_label);
2174 PUSH_LABEL(ret, key_error_label);
2175 PUSH_INSN1(ret, location, putobject, rb_eNoMatchingPatternKeyError);
2176 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2177 PUSH_INSN1(ret, location, putobject, rb_fstring_lit("%p: %s"));
2178 PUSH_INSN1(ret, location, topn, INT2FIX(4));
2179 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 6));
2180 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2181 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE + 4));
2182 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY + 5));
2183 PUSH_SEND_R(ret, location, rb_intern("new"), INT2FIX(1), NULL, INT2FIX(VM_CALL_KWARG), kw_arg);
2184 PUSH_SEND(ret, location, id_core_raise, INT2FIX(1));
2185 PUSH_LABEL(ret, cleanup_label);
2187 PUSH_INSN1(ret, location, adjuststack, INT2FIX(7));
2188 if (!popped) PUSH_INSN(ret, location, putnil);
2189 PUSH_INSNL(ret, location, jump, done_label);
2190 PUSH_INSN1(ret, location, dupn, INT2FIX(5));
2191 if (popped) PUSH_INSN(ret, location, putnil);
2195 * Compile a pattern matching expression.
2197 static int
2198 pm_compile_pattern(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *matched_label, LABEL *unmatched_label, bool in_single_pattern, bool in_alternation_pattern, bool use_deconstructed_cache, unsigned int base_index)
2200 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
2202 switch (PM_NODE_TYPE(node)) {
2203 case PM_ARRAY_PATTERN_NODE: {
2204 // Array patterns in pattern matching are triggered by using commas in
2205 // a pattern or wrapping it in braces. They are represented by a
2206 // ArrayPatternNode. This looks like:
2208 // foo => [1, 2, 3]
2210 // It can optionally have a splat in the middle of it, which can
2211 // optionally have a name attached.
2212 const pm_array_pattern_node_t *cast = (const pm_array_pattern_node_t *) node;
2214 const size_t requireds_size = cast->requireds.size;
2215 const size_t posts_size = cast->posts.size;
2216 const size_t minimum_size = requireds_size + posts_size;
2218 bool rest_named = false;
2219 bool use_rest_size = false;
2221 if (cast->rest != NULL) {
2222 rest_named = (PM_NODE_TYPE_P(cast->rest, PM_SPLAT_NODE) && ((const pm_splat_node_t *) cast->rest)->expression != NULL);
2223 use_rest_size = (rest_named || (!rest_named && posts_size > 0));
2226 LABEL *match_failed_label = NEW_LABEL(location.line);
2227 LABEL *type_error_label = NEW_LABEL(location.line);
2228 LABEL *deconstruct_label = NEW_LABEL(location.line);
2229 LABEL *deconstructed_label = NEW_LABEL(location.line);
2231 if (use_rest_size) {
2232 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2233 PUSH_INSN(ret, location, swap);
2234 base_index++;
2237 if (cast->constant != NULL) {
2238 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2241 CHECK(pm_compile_pattern_deconstruct(iseq, scope_node, node, ret, deconstruct_label, match_failed_label, deconstructed_label, type_error_label, in_single_pattern, use_deconstructed_cache, base_index));
2243 PUSH_INSN(ret, location, dup);
2244 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2245 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2246 PUSH_SEND(ret, location, cast->rest == NULL ? idEq : idGE, INT2FIX(1));
2247 if (in_single_pattern) {
2248 VALUE message = cast->rest == NULL ? rb_fstring_lit("%p length mismatch (given %p, expected %p)") : rb_fstring_lit("%p length mismatch (given %p, expected %p+)");
2249 CHECK(pm_compile_pattern_length_error(iseq, scope_node, node, ret, message, INT2FIX(minimum_size), base_index + 1));
2251 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2253 for (size_t index = 0; index < requireds_size; index++) {
2254 const pm_node_t *required = cast->requireds.nodes[index];
2255 PUSH_INSN(ret, location, dup);
2256 PUSH_INSN1(ret, location, putobject, INT2FIX(index));
2257 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2258 CHECK(pm_compile_pattern_match(iseq, scope_node, required, ret, match_failed_label, in_single_pattern, in_alternation_pattern, false, base_index + 1));
2261 if (cast->rest != NULL) {
2262 if (rest_named) {
2263 PUSH_INSN(ret, location, dup);
2264 PUSH_INSN1(ret, location, putobject, INT2FIX(requireds_size));
2265 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2266 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2267 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2268 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2269 PUSH_INSN1(ret, location, setn, INT2FIX(4));
2270 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2271 CHECK(pm_compile_pattern_match(iseq, scope_node, ((const pm_splat_node_t *) cast->rest)->expression, ret, match_failed_label, in_single_pattern, in_alternation_pattern, false, base_index + 1));
2273 else if (posts_size > 0) {
2274 PUSH_INSN(ret, location, dup);
2275 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2276 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2277 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2278 PUSH_INSN1(ret, location, setn, INT2FIX(2));
2279 PUSH_INSN(ret, location, pop);
2283 for (size_t index = 0; index < posts_size; index++) {
2284 const pm_node_t *post = cast->posts.nodes[index];
2285 PUSH_INSN(ret, location, dup);
2287 PUSH_INSN1(ret, location, putobject, INT2FIX(requireds_size + index));
2288 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2289 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2290 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2291 CHECK(pm_compile_pattern_match(iseq, scope_node, post, ret, match_failed_label, in_single_pattern, in_alternation_pattern, false, base_index + 1));
2294 PUSH_INSN(ret, location, pop);
2295 if (use_rest_size) {
2296 PUSH_INSN(ret, location, pop);
2299 PUSH_INSNL(ret, location, jump, matched_label);
2300 PUSH_INSN(ret, location, putnil);
2301 if (use_rest_size) {
2302 PUSH_INSN(ret, location, putnil);
2305 PUSH_LABEL(ret, type_error_label);
2306 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2307 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2308 PUSH_INSN1(ret, location, putobject, rb_fstring_lit("deconstruct must return Array"));
2309 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2310 PUSH_INSN(ret, location, pop);
2312 PUSH_LABEL(ret, match_failed_label);
2313 PUSH_INSN(ret, location, pop);
2314 if (use_rest_size) {
2315 PUSH_INSN(ret, location, pop);
2318 PUSH_INSNL(ret, location, jump, unmatched_label);
2319 break;
2321 case PM_FIND_PATTERN_NODE: {
2322 // Find patterns in pattern matching are triggered by using commas in
2323 // a pattern or wrapping it in braces and using a splat on both the left
2324 // and right side of the pattern. This looks like:
2326 // foo => [*, 1, 2, 3, *]
2328 // There can be any number of requireds in the middle. The splats on
2329 // both sides can optionally have names attached.
2330 const pm_find_pattern_node_t *cast = (const pm_find_pattern_node_t *) node;
2331 const size_t size = cast->requireds.size;
2333 LABEL *match_failed_label = NEW_LABEL(location.line);
2334 LABEL *type_error_label = NEW_LABEL(location.line);
2335 LABEL *deconstruct_label = NEW_LABEL(location.line);
2336 LABEL *deconstructed_label = NEW_LABEL(location.line);
2338 if (cast->constant) {
2339 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2342 CHECK(pm_compile_pattern_deconstruct(iseq, scope_node, node, ret, deconstruct_label, match_failed_label, deconstructed_label, type_error_label, in_single_pattern, use_deconstructed_cache, base_index));
2344 PUSH_INSN(ret, location, dup);
2345 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2346 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2347 PUSH_SEND(ret, location, idGE, INT2FIX(1));
2348 if (in_single_pattern) {
2349 CHECK(pm_compile_pattern_length_error(iseq, scope_node, node, ret, rb_fstring_lit("%p length mismatch (given %p, expected %p+)"), INT2FIX(size), base_index + 1));
2351 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2354 LABEL *while_begin_label = NEW_LABEL(location.line);
2355 LABEL *next_loop_label = NEW_LABEL(location.line);
2356 LABEL *find_succeeded_label = NEW_LABEL(location.line);
2357 LABEL *find_failed_label = NEW_LABEL(location.line);
2359 PUSH_INSN(ret, location, dup);
2360 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2362 PUSH_INSN(ret, location, dup);
2363 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2364 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2365 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2366 PUSH_LABEL(ret, while_begin_label);
2368 PUSH_INSN(ret, location, dup);
2369 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2370 PUSH_SEND(ret, location, idLE, INT2FIX(1));
2371 PUSH_INSNL(ret, location, branchunless, find_failed_label);
2373 for (size_t index = 0; index < size; index++) {
2374 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2375 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2377 if (index != 0) {
2378 PUSH_INSN1(ret, location, putobject, INT2FIX(index));
2379 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2382 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2383 CHECK(pm_compile_pattern_match(iseq, scope_node, cast->requireds.nodes[index], ret, next_loop_label, in_single_pattern, in_alternation_pattern, false, base_index + 4));
2386 RUBY_ASSERT(PM_NODE_TYPE_P(cast->left, PM_SPLAT_NODE));
2387 const pm_splat_node_t *left = (const pm_splat_node_t *) cast->left;
2389 if (left->expression != NULL) {
2390 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2391 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2392 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2393 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2394 CHECK(pm_compile_pattern_match(iseq, scope_node, left->expression, ret, find_failed_label, in_single_pattern, in_alternation_pattern, false, base_index + 4));
2397 RUBY_ASSERT(PM_NODE_TYPE_P(cast->right, PM_SPLAT_NODE));
2398 const pm_splat_node_t *right = (const pm_splat_node_t *) cast->right;
2400 if (right->expression != NULL) {
2401 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2402 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2403 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2404 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2405 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2406 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2407 pm_compile_pattern_match(iseq, scope_node, right->expression, ret, find_failed_label, in_single_pattern, in_alternation_pattern, false, base_index + 4);
2410 PUSH_INSNL(ret, location, jump, find_succeeded_label);
2412 PUSH_LABEL(ret, next_loop_label);
2413 PUSH_INSN1(ret, location, putobject, INT2FIX(1));
2414 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2415 PUSH_INSNL(ret, location, jump, while_begin_label);
2417 PUSH_LABEL(ret, find_failed_label);
2418 PUSH_INSN1(ret, location, adjuststack, INT2FIX(3));
2419 if (in_single_pattern) {
2420 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2421 PUSH_INSN1(ret, location, putobject, rb_fstring_lit("%p does not match to find pattern"));
2422 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2423 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(2));
2424 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2426 PUSH_INSN1(ret, location, putobject, Qfalse);
2427 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2429 PUSH_INSN(ret, location, pop);
2430 PUSH_INSN(ret, location, pop);
2432 PUSH_INSNL(ret, location, jump, match_failed_label);
2433 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
2435 PUSH_LABEL(ret, find_succeeded_label);
2436 PUSH_INSN1(ret, location, adjuststack, INT2FIX(3));
2439 PUSH_INSN(ret, location, pop);
2440 PUSH_INSNL(ret, location, jump, matched_label);
2441 PUSH_INSN(ret, location, putnil);
2443 PUSH_LABEL(ret, type_error_label);
2444 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2445 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2446 PUSH_INSN1(ret, location, putobject, rb_fstring_lit("deconstruct must return Array"));
2447 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2448 PUSH_INSN(ret, location, pop);
2450 PUSH_LABEL(ret, match_failed_label);
2451 PUSH_INSN(ret, location, pop);
2452 PUSH_INSNL(ret, location, jump, unmatched_label);
2454 break;
2456 case PM_HASH_PATTERN_NODE: {
2457 // Hash patterns in pattern matching are triggered by using labels and
2458 // values in a pattern or by using the ** operator. They are represented
2459 // by the HashPatternNode. This looks like:
2461 // foo => { a: 1, b: 2, **bar }
2463 // It can optionally have an assoc splat in the middle of it, which can
2464 // optionally have a name.
2465 const pm_hash_pattern_node_t *cast = (const pm_hash_pattern_node_t *) node;
2467 // We don't consider it a "rest" parameter if it's a ** that is unnamed.
2468 bool has_rest = cast->rest != NULL && !(PM_NODE_TYPE_P(cast->rest, PM_ASSOC_SPLAT_NODE) && ((const pm_assoc_splat_node_t *) cast->rest)->value == NULL);
2469 bool has_keys = cast->elements.size > 0 || cast->rest != NULL;
2471 LABEL *match_failed_label = NEW_LABEL(location.line);
2472 LABEL *type_error_label = NEW_LABEL(location.line);
2473 VALUE keys = Qnil;
2475 if (has_keys && !has_rest) {
2476 keys = rb_ary_new_capa(cast->elements.size);
2478 for (size_t index = 0; index < cast->elements.size; index++) {
2479 const pm_node_t *element = cast->elements.nodes[index];
2480 RUBY_ASSERT(PM_NODE_TYPE_P(element, PM_ASSOC_NODE));
2482 const pm_node_t *key = ((const pm_assoc_node_t *) element)->key;
2483 RUBY_ASSERT(PM_NODE_TYPE_P(key, PM_SYMBOL_NODE));
2485 VALUE symbol = ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) key));
2486 rb_ary_push(keys, symbol);
2490 if (cast->constant) {
2491 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2494 PUSH_INSN(ret, location, dup);
2495 PUSH_INSN1(ret, location, putobject, ID2SYM(rb_intern("deconstruct_keys")));
2496 PUSH_SEND(ret, location, idRespond_to, INT2FIX(1));
2497 if (in_single_pattern) {
2498 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p does not respond to #deconstruct_keys"), base_index + 1));
2500 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2502 if (NIL_P(keys)) {
2503 PUSH_INSN(ret, location, putnil);
2505 else {
2506 PUSH_INSN1(ret, location, duparray, keys);
2507 RB_OBJ_WRITTEN(iseq, Qundef, rb_obj_hide(keys));
2509 PUSH_SEND(ret, location, rb_intern("deconstruct_keys"), INT2FIX(1));
2511 PUSH_INSN(ret, location, dup);
2512 PUSH_INSN1(ret, location, checktype, INT2FIX(T_HASH));
2513 PUSH_INSNL(ret, location, branchunless, type_error_label);
2515 if (has_rest) {
2516 PUSH_SEND(ret, location, rb_intern("dup"), INT2FIX(0));
2519 if (has_keys) {
2520 DECL_ANCHOR(match_values);
2521 INIT_ANCHOR(match_values);
2523 for (size_t index = 0; index < cast->elements.size; index++) {
2524 const pm_node_t *element = cast->elements.nodes[index];
2525 RUBY_ASSERT(PM_NODE_TYPE_P(element, PM_ASSOC_NODE));
2527 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
2528 const pm_node_t *key = assoc->key;
2529 RUBY_ASSERT(PM_NODE_TYPE_P(key, PM_SYMBOL_NODE));
2531 VALUE symbol = ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) key));
2532 PUSH_INSN(ret, location, dup);
2533 PUSH_INSN1(ret, location, putobject, symbol);
2534 PUSH_SEND(ret, location, rb_intern("key?"), INT2FIX(1));
2536 if (in_single_pattern) {
2537 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2539 PUSH_INSN(ret, location, dup);
2540 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2542 PUSH_INSN1(ret, location, putobject, rb_str_freeze(rb_sprintf("key not found: %+"PRIsVALUE, symbol)));
2543 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 2));
2544 PUSH_INSN1(ret, location, putobject, Qtrue);
2545 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 3));
2546 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2547 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE + 4));
2548 PUSH_INSN1(ret, location, putobject, symbol);
2549 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY + 5));
2551 PUSH_INSN1(ret, location, adjuststack, INT2FIX(4));
2552 PUSH_LABEL(ret, match_succeeded_label);
2555 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2556 PUSH_INSN(match_values, location, dup);
2557 PUSH_INSN1(match_values, location, putobject, symbol);
2558 PUSH_SEND(match_values, location, has_rest ? rb_intern("delete") : idAREF, INT2FIX(1));
2560 const pm_node_t *value = assoc->value;
2561 if (PM_NODE_TYPE_P(value, PM_IMPLICIT_NODE)) {
2562 value = ((const pm_implicit_node_t *) value)->value;
2565 CHECK(pm_compile_pattern_match(iseq, scope_node, value, match_values, match_failed_label, in_single_pattern, in_alternation_pattern, false, base_index + 1));
2568 PUSH_SEQ(ret, match_values);
2570 else {
2571 PUSH_INSN(ret, location, dup);
2572 PUSH_SEND(ret, location, idEmptyP, INT2FIX(0));
2573 if (in_single_pattern) {
2574 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p is not empty"), base_index + 1));
2576 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2579 if (has_rest) {
2580 switch (PM_NODE_TYPE(cast->rest)) {
2581 case PM_NO_KEYWORDS_PARAMETER_NODE: {
2582 PUSH_INSN(ret, location, dup);
2583 PUSH_SEND(ret, location, idEmptyP, INT2FIX(0));
2584 if (in_single_pattern) {
2585 pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("rest of %p is not empty"), base_index + 1);
2587 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2588 break;
2590 case PM_ASSOC_SPLAT_NODE: {
2591 const pm_assoc_splat_node_t *splat = (const pm_assoc_splat_node_t *) cast->rest;
2592 PUSH_INSN(ret, location, dup);
2593 pm_compile_pattern_match(iseq, scope_node, splat->value, ret, match_failed_label, in_single_pattern, in_alternation_pattern, false, base_index + 1);
2594 break;
2596 default:
2597 rb_bug("unreachable");
2598 break;
2602 PUSH_INSN(ret, location, pop);
2603 PUSH_INSNL(ret, location, jump, matched_label);
2604 PUSH_INSN(ret, location, putnil);
2606 PUSH_LABEL(ret, type_error_label);
2607 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2608 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2609 PUSH_INSN1(ret, location, putobject, rb_fstring_lit("deconstruct_keys must return Hash"));
2610 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2611 PUSH_INSN(ret, location, pop);
2613 PUSH_LABEL(ret, match_failed_label);
2614 PUSH_INSN(ret, location, pop);
2615 PUSH_INSNL(ret, location, jump, unmatched_label);
2616 break;
2618 case PM_CAPTURE_PATTERN_NODE: {
2619 // Capture patterns allow you to pattern match against an element in a
2620 // pattern and also capture the value into a local variable. This looks
2621 // like:
2623 // [1] => [Integer => foo]
2625 // In this case the `Integer => foo` will be represented by a
2626 // CapturePatternNode, which has both a value (the pattern to match
2627 // against) and a target (the place to write the variable into).
2628 const pm_capture_pattern_node_t *cast = (const pm_capture_pattern_node_t *) node;
2630 LABEL *match_failed_label = NEW_LABEL(location.line);
2632 PUSH_INSN(ret, location, dup);
2633 CHECK(pm_compile_pattern_match(iseq, scope_node, cast->value, ret, match_failed_label, in_single_pattern, in_alternation_pattern, use_deconstructed_cache, base_index + 1));
2634 CHECK(pm_compile_pattern(iseq, scope_node, cast->target, ret, matched_label, match_failed_label, in_single_pattern, in_alternation_pattern, false, base_index));
2635 PUSH_INSN(ret, location, putnil);
2637 PUSH_LABEL(ret, match_failed_label);
2638 PUSH_INSN(ret, location, pop);
2639 PUSH_INSNL(ret, location, jump, unmatched_label);
2641 break;
2643 case PM_LOCAL_VARIABLE_TARGET_NODE: {
2644 // Local variables can be targeted by placing identifiers in the place
2645 // of a pattern. For example, foo in bar. This results in the value
2646 // being matched being written to that local variable.
2647 const pm_local_variable_target_node_t *cast = (const pm_local_variable_target_node_t *) node;
2648 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
2650 // If this local variable is being written from within an alternation
2651 // pattern, then it cannot actually be added to the local table since
2652 // it's ambiguous which value should be used. So instead we indicate
2653 // this with a compile error.
2654 if (in_alternation_pattern) {
2655 ID id = pm_constant_id_lookup(scope_node, cast->name);
2656 const char *name = rb_id2name(id);
2658 if (name && strlen(name) > 0 && name[0] != '_') {
2659 COMPILE_ERROR(ERROR_ARGS "illegal variable in alternative pattern (%"PRIsVALUE")", rb_id2str(id));
2660 return COMPILE_NG;
2664 PUSH_SETLOCAL(ret, location, index.index, index.level);
2665 PUSH_INSNL(ret, location, jump, matched_label);
2666 break;
2668 case PM_ALTERNATION_PATTERN_NODE: {
2669 // Alternation patterns allow you to specify multiple patterns in a
2670 // single expression using the | operator.
2671 const pm_alternation_pattern_node_t *cast = (const pm_alternation_pattern_node_t *) node;
2673 LABEL *matched_left_label = NEW_LABEL(location.line);
2674 LABEL *unmatched_left_label = NEW_LABEL(location.line);
2676 // First, we're going to attempt to match against the left pattern. If
2677 // that pattern matches, then we'll skip matching the right pattern.
2678 PUSH_INSN(ret, location, dup);
2679 CHECK(pm_compile_pattern(iseq, scope_node, cast->left, ret, matched_left_label, unmatched_left_label, in_single_pattern, true, true, base_index + 1));
2681 // If we get here, then we matched on the left pattern. In this case we
2682 // should pop out the duplicate value that we preemptively added to
2683 // match against the right pattern and then jump to the match label.
2684 PUSH_LABEL(ret, matched_left_label);
2685 PUSH_INSN(ret, location, pop);
2686 PUSH_INSNL(ret, location, jump, matched_label);
2687 PUSH_INSN(ret, location, putnil);
2689 // If we get here, then we didn't match on the left pattern. In this
2690 // case we attempt to match against the right pattern.
2691 PUSH_LABEL(ret, unmatched_left_label);
2692 CHECK(pm_compile_pattern(iseq, scope_node, cast->right, ret, matched_label, unmatched_label, in_single_pattern, true, true, base_index));
2693 break;
2695 case PM_PARENTHESES_NODE:
2696 // Parentheses are allowed to wrap expressions in pattern matching and
2697 // they do nothing since they can only wrap individual expressions and
2698 // not groups. In this case we'll recurse back into this same function
2699 // with the body of the parentheses.
2700 return pm_compile_pattern(iseq, scope_node, ((const pm_parentheses_node_t *) node)->body, ret, matched_label, unmatched_label, in_single_pattern, in_alternation_pattern, use_deconstructed_cache, base_index);
2701 case PM_PINNED_EXPRESSION_NODE:
2702 // Pinned expressions are a way to match against the value of an
2703 // expression that should be evaluated at runtime. This looks like:
2704 // foo in ^(bar). To compile these, we compile the expression as if it
2705 // were a literal value by falling through to the literal case.
2706 node = ((const pm_pinned_expression_node_t *) node)->expression;
2707 /* fallthrough */
2708 case PM_ARRAY_NODE:
2709 case PM_CLASS_VARIABLE_READ_NODE:
2710 case PM_CONSTANT_PATH_NODE:
2711 case PM_CONSTANT_READ_NODE:
2712 case PM_FALSE_NODE:
2713 case PM_FLOAT_NODE:
2714 case PM_GLOBAL_VARIABLE_READ_NODE:
2715 case PM_IMAGINARY_NODE:
2716 case PM_INSTANCE_VARIABLE_READ_NODE:
2717 case PM_INTEGER_NODE:
2718 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE:
2719 case PM_INTERPOLATED_STRING_NODE:
2720 case PM_INTERPOLATED_SYMBOL_NODE:
2721 case PM_INTERPOLATED_X_STRING_NODE:
2722 case PM_LAMBDA_NODE:
2723 case PM_LOCAL_VARIABLE_READ_NODE:
2724 case PM_NIL_NODE:
2725 case PM_SOURCE_ENCODING_NODE:
2726 case PM_SOURCE_FILE_NODE:
2727 case PM_SOURCE_LINE_NODE:
2728 case PM_RANGE_NODE:
2729 case PM_RATIONAL_NODE:
2730 case PM_REGULAR_EXPRESSION_NODE:
2731 case PM_SELF_NODE:
2732 case PM_STRING_NODE:
2733 case PM_SYMBOL_NODE:
2734 case PM_TRUE_NODE:
2735 case PM_X_STRING_NODE: {
2736 // These nodes are all simple patterns, which means we'll use the
2737 // checkmatch instruction to match against them, which is effectively a
2738 // VM-level === operator.
2739 PM_COMPILE_NOT_POPPED(node);
2740 if (in_single_pattern) {
2741 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
2744 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE));
2746 if (in_single_pattern) {
2747 pm_compile_pattern_eqq_error(iseq, scope_node, node, ret, base_index + 2);
2750 PUSH_INSNL(ret, location, branchif, matched_label);
2751 PUSH_INSNL(ret, location, jump, unmatched_label);
2752 break;
2754 case PM_PINNED_VARIABLE_NODE: {
2755 // Pinned variables are a way to match against the value of a variable
2756 // without it looking like you're trying to write to the variable. This
2757 // looks like: foo in ^@bar. To compile these, we compile the variable
2758 // that they hold.
2759 const pm_pinned_variable_node_t *cast = (const pm_pinned_variable_node_t *) node;
2760 CHECK(pm_compile_pattern(iseq, scope_node, cast->variable, ret, matched_label, unmatched_label, in_single_pattern, in_alternation_pattern, true, base_index));
2761 break;
2763 case PM_IF_NODE:
2764 case PM_UNLESS_NODE: {
2765 // If and unless nodes can show up here as guards on `in` clauses. This
2766 // looks like:
2768 // case foo
2769 // in bar if baz?
2770 // qux
2771 // end
2773 // Because we know they're in the modifier form and they can't have any
2774 // variation on this pattern, we compile them differently (more simply)
2775 // here than we would in the normal compilation path.
2776 const pm_node_t *predicate;
2777 const pm_node_t *statement;
2779 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
2780 const pm_if_node_t *cast = (const pm_if_node_t *) node;
2781 predicate = cast->predicate;
2783 RUBY_ASSERT(cast->statements != NULL && cast->statements->body.size == 1);
2784 statement = cast->statements->body.nodes[0];
2786 else {
2787 const pm_unless_node_t *cast = (const pm_unless_node_t *) node;
2788 predicate = cast->predicate;
2790 RUBY_ASSERT(cast->statements != NULL && cast->statements->body.size == 1);
2791 statement = cast->statements->body.nodes[0];
2794 CHECK(pm_compile_pattern_match(iseq, scope_node, statement, ret, unmatched_label, in_single_pattern, in_alternation_pattern, use_deconstructed_cache, base_index));
2795 PM_COMPILE_NOT_POPPED(predicate);
2797 if (in_single_pattern) {
2798 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2800 PUSH_INSN(ret, location, dup);
2801 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
2802 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2804 else {
2805 PUSH_INSNL(ret, location, branchunless, match_succeeded_label);
2808 PUSH_INSN1(ret, location, putobject, rb_fstring_lit("guard clause does not return true"));
2809 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2810 PUSH_INSN1(ret, location, putobject, Qfalse);
2811 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2813 PUSH_INSN(ret, location, pop);
2814 PUSH_INSN(ret, location, pop);
2816 PUSH_LABEL(ret, match_succeeded_label);
2819 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
2820 PUSH_INSNL(ret, location, branchunless, unmatched_label);
2822 else {
2823 PUSH_INSNL(ret, location, branchif, unmatched_label);
2826 PUSH_INSNL(ret, location, jump, matched_label);
2827 break;
2829 default:
2830 // If we get here, then we have a node type that should not be in this
2831 // position. This would be a bug in the parser, because a different node
2832 // type should never have been created in this position in the tree.
2833 rb_bug("Unexpected node type in pattern matching expression: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
2834 break;
2837 return COMPILE_OK;
2840 #undef PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE
2841 #undef PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING
2842 #undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P
2843 #undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE
2844 #undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY
2846 // Generate a scope node from the given node.
2847 void
2848 pm_scope_node_init(const pm_node_t *node, pm_scope_node_t *scope, pm_scope_node_t *previous)
2850 // This is very important, otherwise the scope node could be seen as having
2851 // certain flags set that _should not_ be set.
2852 memset(scope, 0, sizeof(pm_scope_node_t));
2854 scope->base.type = PM_SCOPE_NODE;
2855 scope->base.location.start = node->location.start;
2856 scope->base.location.end = node->location.end;
2858 scope->previous = previous;
2859 scope->ast_node = (pm_node_t *) node;
2861 if (previous) {
2862 scope->parser = previous->parser;
2863 scope->encoding = previous->encoding;
2864 scope->filepath_encoding = previous->filepath_encoding;
2865 scope->constants = previous->constants;
2868 switch (PM_NODE_TYPE(node)) {
2869 case PM_BLOCK_NODE: {
2870 const pm_block_node_t *cast = (const pm_block_node_t *) node;
2871 scope->body = cast->body;
2872 scope->locals = cast->locals;
2873 scope->parameters = cast->parameters;
2874 break;
2876 case PM_CLASS_NODE: {
2877 const pm_class_node_t *cast = (const pm_class_node_t *) node;
2878 scope->body = cast->body;
2879 scope->locals = cast->locals;
2880 break;
2882 case PM_DEF_NODE: {
2883 const pm_def_node_t *cast = (const pm_def_node_t *) node;
2884 scope->parameters = (pm_node_t *) cast->parameters;
2885 scope->body = cast->body;
2886 scope->locals = cast->locals;
2887 break;
2889 case PM_ENSURE_NODE: {
2890 const pm_ensure_node_t *cast = (const pm_ensure_node_t *) node;
2891 scope->body = (pm_node_t *) node;
2893 if (cast->statements != NULL) {
2894 scope->base.location.start = cast->statements->base.location.start;
2895 scope->base.location.end = cast->statements->base.location.end;
2898 break;
2900 case PM_FOR_NODE: {
2901 const pm_for_node_t *cast = (const pm_for_node_t *) node;
2902 scope->body = (pm_node_t *) cast->statements;
2903 break;
2905 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
2906 RUBY_ASSERT(node->flags & PM_REGULAR_EXPRESSION_FLAGS_ONCE);
2907 scope->body = (pm_node_t *) node;
2908 break;
2910 case PM_LAMBDA_NODE: {
2911 const pm_lambda_node_t *cast = (const pm_lambda_node_t *) node;
2912 scope->parameters = cast->parameters;
2913 scope->body = cast->body;
2914 scope->locals = cast->locals;
2916 if (cast->parameters != NULL) {
2917 scope->base.location.start = cast->parameters->location.start;
2919 else {
2920 scope->base.location.start = cast->operator_loc.end;
2922 break;
2924 case PM_MODULE_NODE: {
2925 const pm_module_node_t *cast = (const pm_module_node_t *) node;
2926 scope->body = cast->body;
2927 scope->locals = cast->locals;
2928 break;
2930 case PM_POST_EXECUTION_NODE: {
2931 const pm_post_execution_node_t *cast = (const pm_post_execution_node_t *) node;
2932 scope->body = (pm_node_t *) cast->statements;
2933 break;
2935 case PM_PROGRAM_NODE: {
2936 const pm_program_node_t *cast = (const pm_program_node_t *) node;
2937 scope->body = (pm_node_t *) cast->statements;
2938 scope->locals = cast->locals;
2939 break;
2941 case PM_RESCUE_NODE: {
2942 const pm_rescue_node_t *cast = (const pm_rescue_node_t *) node;
2943 scope->body = (pm_node_t *) cast->statements;
2944 break;
2946 case PM_RESCUE_MODIFIER_NODE: {
2947 const pm_rescue_modifier_node_t *cast = (const pm_rescue_modifier_node_t *) node;
2948 scope->body = (pm_node_t *) cast->rescue_expression;
2949 break;
2951 case PM_SINGLETON_CLASS_NODE: {
2952 const pm_singleton_class_node_t *cast = (const pm_singleton_class_node_t *) node;
2953 scope->body = cast->body;
2954 scope->locals = cast->locals;
2955 break;
2957 case PM_STATEMENTS_NODE: {
2958 const pm_statements_node_t *cast = (const pm_statements_node_t *) node;
2959 scope->body = (pm_node_t *) cast;
2960 break;
2962 default:
2963 rb_bug("unreachable");
2964 break;
2968 void
2969 pm_scope_node_destroy(pm_scope_node_t *scope_node)
2971 if (scope_node->index_lookup_table) {
2972 st_free_table(scope_node->index_lookup_table);
2977 * We need to put the label "retry_end_l" immediately after the last "send"
2978 * instruction. This because vm_throw checks if the break cont is equal to the
2979 * index of next insn of the "send". (Otherwise, it is considered
2980 * "break from proc-closure". See "TAG_BREAK" handling in "vm_throw_start".)
2982 * Normally, "send" instruction is at the last. However, qcall under branch
2983 * coverage measurement adds some instructions after the "send".
2985 * Note that "invokesuper" appears instead of "send".
2987 static void
2988 pm_compile_retry_end_label(rb_iseq_t *iseq, LINK_ANCHOR *const ret, LABEL *retry_end_l)
2990 INSN *iobj;
2991 LINK_ELEMENT *last_elem = LAST_ELEMENT(ret);
2992 iobj = IS_INSN(last_elem) ? (INSN*) last_elem : (INSN*) get_prev_insn((INSN*) last_elem);
2993 while (INSN_OF(iobj) != BIN(send) && INSN_OF(iobj) != BIN(invokesuper)) {
2994 iobj = (INSN*) get_prev_insn(iobj);
2996 ELEM_INSERT_NEXT(&iobj->link, (LINK_ELEMENT*) retry_end_l);
2998 // LINK_ANCHOR has a pointer to the last element, but
2999 // ELEM_INSERT_NEXT does not update it even if we add an insn to the
3000 // last of LINK_ANCHOR. So this updates it manually.
3001 if (&iobj->link == LAST_ELEMENT(ret)) {
3002 ret->last = (LINK_ELEMENT*) retry_end_l;
3007 * Compile a call node into the given iseq.
3009 static void
3010 pm_compile_call(rb_iseq_t *iseq, const pm_call_node_t *call_node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, ID method_id, LABEL *start)
3012 const pm_location_t *message_loc = &call_node->message_loc;
3013 if (message_loc->start == NULL) message_loc = &call_node->base.location;
3015 const pm_line_column_t location = PM_LOCATION_START_LINE_COLUMN(scope_node->parser, message_loc);
3016 LABEL *else_label = NEW_LABEL(location.line);
3017 LABEL *end_label = NEW_LABEL(location.line);
3018 LABEL *retry_end_l = NEW_LABEL(location.line);
3020 VALUE branches = Qfalse;
3021 rb_code_location_t code_location = { 0 };
3022 int node_id = location.column;
3024 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
3025 if (PM_BRANCH_COVERAGE_P(iseq)) {
3026 const uint8_t *cursors[3] = {
3027 call_node->closing_loc.end,
3028 call_node->arguments == NULL ? NULL : call_node->arguments->base.location.end,
3029 call_node->message_loc.end
3032 const uint8_t *end_cursor = cursors[0];
3033 end_cursor = (end_cursor == NULL || cursors[1] == NULL) ? cursors[1] : (end_cursor > cursors[1] ? end_cursor : cursors[1]);
3034 end_cursor = (end_cursor == NULL || cursors[2] == NULL) ? cursors[2] : (end_cursor > cursors[2] ? end_cursor : cursors[2]);
3036 const pm_line_column_t start_location = PM_NODE_START_LINE_COLUMN(scope_node->parser, call_node);
3037 const pm_line_column_t end_location = pm_newline_list_line_column(&scope_node->parser->newline_list, end_cursor, scope_node->parser->start_line);
3039 code_location = (rb_code_location_t) {
3040 .beg_pos = { .lineno = start_location.line, .column = start_location.column },
3041 .end_pos = { .lineno = end_location.line, .column = end_location.column }
3044 branches = decl_branch_base(iseq, PTR2NUM(call_node), &code_location, "&.");
3047 PUSH_INSN(ret, location, dup);
3048 PUSH_INSNL(ret, location, branchnil, else_label);
3050 add_trace_branch_coverage(iseq, ret, &code_location, node_id, 0, "then", branches);
3053 int flags = 0;
3054 struct rb_callinfo_kwarg *kw_arg = NULL;
3056 int orig_argc = pm_setup_args(call_node->arguments, call_node->block, &flags, &kw_arg, iseq, ret, scope_node, &location);
3057 const rb_iseq_t *block_iseq = NULL;
3059 if (call_node->block != NULL && PM_NODE_TYPE_P(call_node->block, PM_BLOCK_NODE)) {
3060 // Scope associated with the block
3061 pm_scope_node_t next_scope_node;
3062 pm_scope_node_init(call_node->block, &next_scope_node, scope_node);
3064 block_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, pm_node_line_number(scope_node->parser, call_node->block));
3065 pm_scope_node_destroy(&next_scope_node);
3066 ISEQ_COMPILE_DATA(iseq)->current_block = block_iseq;
3068 else {
3069 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_VARIABLE_CALL)) {
3070 flags |= VM_CALL_VCALL;
3073 if (!flags) {
3074 flags |= VM_CALL_ARGS_SIMPLE;
3078 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) {
3079 flags |= VM_CALL_FCALL;
3082 if (!popped && PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE)) {
3083 if (flags & VM_CALL_ARGS_BLOCKARG) {
3084 PUSH_INSN1(ret, location, topn, INT2FIX(1));
3085 if (flags & VM_CALL_ARGS_SPLAT) {
3086 PUSH_INSN1(ret, location, putobject, INT2FIX(-1));
3087 PUSH_SEND_WITH_FLAG(ret, location, idAREF, INT2FIX(1), INT2FIX(0));
3089 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 3));
3090 PUSH_INSN(ret, location, pop);
3092 else if (flags & VM_CALL_ARGS_SPLAT) {
3093 PUSH_INSN(ret, location, dup);
3094 PUSH_INSN1(ret, location, putobject, INT2FIX(-1));
3095 PUSH_SEND_WITH_FLAG(ret, location, idAREF, INT2FIX(1), INT2FIX(0));
3096 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 2));
3097 PUSH_INSN(ret, location, pop);
3099 else {
3100 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 1));
3104 if ((flags & VM_CALL_KW_SPLAT) && (flags & VM_CALL_ARGS_BLOCKARG) && !(flags & VM_CALL_KW_SPLAT_MUT)) {
3105 PUSH_INSN(ret, location, splatkw);
3108 PUSH_SEND_R(ret, location, method_id, INT2FIX(orig_argc), block_iseq, INT2FIX(flags), kw_arg);
3110 if (block_iseq && ISEQ_BODY(block_iseq)->catch_table) {
3111 pm_compile_retry_end_label(iseq, ret, retry_end_l);
3112 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, start, retry_end_l, block_iseq, retry_end_l);
3115 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
3116 PUSH_INSNL(ret, location, jump, end_label);
3117 PUSH_LABEL(ret, else_label);
3118 add_trace_branch_coverage(iseq, ret, &code_location, node_id, 1, "else", branches);
3119 PUSH_LABEL(ret, end_label);
3122 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE) && !popped) {
3123 PUSH_INSN(ret, location, pop);
3126 if (popped) PUSH_INSN(ret, location, pop);
3129 static void
3130 pm_compile_defined_expr0(rb_iseq_t *iseq, const pm_node_t *node, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition, LABEL **lfinish, bool explicit_receiver)
3132 // in_condition is the same as compile.c's needstr
3133 enum defined_type dtype = DEFINED_NOT_DEFINED;
3134 const pm_line_column_t location = *node_location;
3136 switch (PM_NODE_TYPE(node)) {
3137 case PM_ARGUMENTS_NODE: {
3138 const pm_arguments_node_t *cast = (const pm_arguments_node_t *) node;
3139 const pm_node_list_t *arguments = &cast->arguments;
3140 for (size_t idx = 0; idx < arguments->size; idx++) {
3141 const pm_node_t *argument = arguments->nodes[idx];
3142 pm_compile_defined_expr0(iseq, argument, node_location, ret, popped, scope_node, in_condition, lfinish, explicit_receiver);
3144 if (!lfinish[1]) {
3145 lfinish[1] = NEW_LABEL(location.line);
3147 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
3149 dtype = DEFINED_TRUE;
3150 break;
3152 case PM_NIL_NODE:
3153 dtype = DEFINED_NIL;
3154 break;
3155 case PM_PARENTHESES_NODE: {
3156 const pm_parentheses_node_t *cast = (const pm_parentheses_node_t *) node;
3158 if (cast->body == NULL) {
3159 // If we have empty parentheses, then we want to return "nil".
3160 dtype = DEFINED_NIL;
3162 else if (PM_NODE_TYPE_P(cast->body, PM_STATEMENTS_NODE) && ((const pm_statements_node_t *) cast->body)->body.size == 1) {
3163 // If we have a parentheses node that is wrapping a single statement
3164 // then we want to recurse down to that statement and compile it.
3165 pm_compile_defined_expr0(iseq, ((const pm_statements_node_t *) cast->body)->body.nodes[0], node_location, ret, popped, scope_node, in_condition, lfinish, explicit_receiver);
3166 return;
3168 else {
3169 // Otherwise, we have parentheses wrapping multiple statements, in
3170 // which case this is defined as "expression".
3171 dtype = DEFINED_EXPR;
3174 break;
3176 case PM_SELF_NODE:
3177 dtype = DEFINED_SELF;
3178 break;
3179 case PM_TRUE_NODE:
3180 dtype = DEFINED_TRUE;
3181 break;
3182 case PM_FALSE_NODE:
3183 dtype = DEFINED_FALSE;
3184 break;
3185 case PM_ARRAY_NODE: {
3186 const pm_array_node_t *cast = (const pm_array_node_t *) node;
3188 if (!PM_NODE_FLAG_P(cast, PM_ARRAY_NODE_FLAGS_CONTAINS_SPLAT)) {
3189 for (size_t index = 0; index < cast->elements.size; index++) {
3190 pm_compile_defined_expr0(iseq, cast->elements.nodes[index], node_location, ret, popped, scope_node, true, lfinish, false);
3192 if (!lfinish[1]) {
3193 lfinish[1] = NEW_LABEL(location.line);
3196 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
3200 case PM_AND_NODE:
3201 case PM_BEGIN_NODE:
3202 case PM_BREAK_NODE:
3203 case PM_CASE_NODE:
3204 case PM_CASE_MATCH_NODE:
3205 case PM_CLASS_NODE:
3206 case PM_DEF_NODE:
3207 case PM_DEFINED_NODE:
3208 case PM_FLOAT_NODE:
3209 case PM_FOR_NODE:
3210 case PM_HASH_NODE:
3211 case PM_IF_NODE:
3212 case PM_IMAGINARY_NODE:
3213 case PM_INTEGER_NODE:
3214 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE:
3215 case PM_INTERPOLATED_STRING_NODE:
3216 case PM_INTERPOLATED_SYMBOL_NODE:
3217 case PM_INTERPOLATED_X_STRING_NODE:
3218 case PM_KEYWORD_HASH_NODE:
3219 case PM_LAMBDA_NODE:
3220 case PM_MATCH_PREDICATE_NODE:
3221 case PM_MATCH_REQUIRED_NODE:
3222 case PM_MATCH_WRITE_NODE:
3223 case PM_MODULE_NODE:
3224 case PM_NEXT_NODE:
3225 case PM_OR_NODE:
3226 case PM_RANGE_NODE:
3227 case PM_RATIONAL_NODE:
3228 case PM_REDO_NODE:
3229 case PM_REGULAR_EXPRESSION_NODE:
3230 case PM_RETRY_NODE:
3231 case PM_RETURN_NODE:
3232 case PM_SINGLETON_CLASS_NODE:
3233 case PM_SOURCE_ENCODING_NODE:
3234 case PM_SOURCE_FILE_NODE:
3235 case PM_SOURCE_LINE_NODE:
3236 case PM_STRING_NODE:
3237 case PM_SYMBOL_NODE:
3238 case PM_UNLESS_NODE:
3239 case PM_UNTIL_NODE:
3240 case PM_WHILE_NODE:
3241 case PM_X_STRING_NODE:
3242 dtype = DEFINED_EXPR;
3243 break;
3244 case PM_LOCAL_VARIABLE_READ_NODE:
3245 dtype = DEFINED_LVAR;
3246 break;
3248 #define PUSH_VAL(type) (in_condition ? Qtrue : rb_iseq_defined_string(type))
3250 case PM_INSTANCE_VARIABLE_READ_NODE: {
3251 const pm_instance_variable_read_node_t *cast = (const pm_instance_variable_read_node_t *) node;
3253 ID name = pm_constant_id_lookup(scope_node, cast->name);
3254 PUSH_INSN3(ret, location, definedivar, ID2SYM(name), get_ivar_ic_value(iseq, name), PUSH_VAL(DEFINED_IVAR));
3256 return;
3258 case PM_BACK_REFERENCE_READ_NODE: {
3259 const char *char_ptr = (const char *) (node->location.start + 1);
3260 ID backref_val = INT2FIX(rb_intern2(char_ptr, 1)) << 1 | 1;
3262 PUSH_INSN(ret, location, putnil);
3263 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_REF), backref_val, PUSH_VAL(DEFINED_GVAR));
3265 return;
3267 case PM_NUMBERED_REFERENCE_READ_NODE: {
3268 uint32_t reference_number = ((const pm_numbered_reference_read_node_t *) node)->number;
3270 PUSH_INSN(ret, location, putnil);
3271 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_REF), INT2FIX(reference_number << 1), PUSH_VAL(DEFINED_GVAR));
3273 return;
3275 case PM_GLOBAL_VARIABLE_READ_NODE: {
3276 const pm_global_variable_read_node_t *cast = (const pm_global_variable_read_node_t *) node;
3277 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
3279 PUSH_INSN(ret, location, putnil);
3280 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_GVAR), name, PUSH_VAL(DEFINED_GVAR));
3282 return;
3284 case PM_CLASS_VARIABLE_READ_NODE: {
3285 const pm_class_variable_read_node_t *cast = (const pm_class_variable_read_node_t *) node;
3286 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
3288 PUSH_INSN(ret, location, putnil);
3289 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CVAR), name, PUSH_VAL(DEFINED_CVAR));
3291 return;
3293 case PM_CONSTANT_READ_NODE: {
3294 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
3295 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
3297 PUSH_INSN(ret, location, putnil);
3298 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST), name, PUSH_VAL(DEFINED_CONST));
3300 return;
3302 case PM_CONSTANT_PATH_NODE: {
3303 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
3304 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
3306 if (cast->parent != NULL) {
3307 if (!lfinish[1]) lfinish[1] = NEW_LABEL(location.line);
3308 pm_compile_defined_expr0(iseq, cast->parent, node_location, ret, popped, scope_node, true, lfinish, false);
3310 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
3311 PM_COMPILE(cast->parent);
3313 else {
3314 PUSH_INSN1(ret, location, putobject, rb_cObject);
3317 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST_FROM), name, PUSH_VAL(DEFINED_CONST));
3318 return;
3320 case PM_CALL_NODE: {
3321 const pm_call_node_t *cast = ((const pm_call_node_t *) node);
3322 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
3324 if (cast->receiver || cast->arguments) {
3325 if (!lfinish[1]) lfinish[1] = NEW_LABEL(location.line);
3326 if (!lfinish[2]) lfinish[2] = NEW_LABEL(location.line);
3329 if (cast->arguments) {
3330 pm_compile_defined_expr0(iseq, (const pm_node_t *) cast->arguments, node_location, ret, popped, scope_node, true, lfinish, false);
3331 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
3334 if (cast->receiver) {
3335 pm_compile_defined_expr0(iseq, cast->receiver, node_location, ret, popped, scope_node, true, lfinish, true);
3337 if (PM_NODE_TYPE_P(cast->receiver, PM_CALL_NODE)) {
3338 PUSH_INSNL(ret, location, branchunless, lfinish[2]);
3340 const pm_call_node_t *receiver = (const pm_call_node_t *) cast->receiver;
3341 ID method_id = pm_constant_id_lookup(scope_node, receiver->name);
3342 pm_compile_call(iseq, receiver, ret, popped, scope_node, method_id, NULL);
3344 else {
3345 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
3346 PM_COMPILE(cast->receiver);
3349 if (explicit_receiver) PUSH_INSN(ret, location, dup);
3350 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_METHOD), rb_id2sym(method_id), PUSH_VAL(DEFINED_METHOD));
3352 else {
3353 PUSH_INSN(ret, location, putself);
3354 if (explicit_receiver) PUSH_INSN(ret, location, dup);
3355 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_FUNC), rb_id2sym(method_id), PUSH_VAL(DEFINED_METHOD));
3358 return;
3360 case PM_YIELD_NODE:
3361 PUSH_INSN(ret, location, putnil);
3362 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_YIELD), 0, PUSH_VAL(DEFINED_YIELD));
3363 return;
3364 case PM_SUPER_NODE:
3365 case PM_FORWARDING_SUPER_NODE:
3366 PUSH_INSN(ret, location, putnil);
3367 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_ZSUPER), 0, PUSH_VAL(DEFINED_ZSUPER));
3368 return;
3369 case PM_CALL_AND_WRITE_NODE:
3370 case PM_CALL_OPERATOR_WRITE_NODE:
3371 case PM_CALL_OR_WRITE_NODE:
3373 case PM_CONSTANT_WRITE_NODE:
3374 case PM_CONSTANT_OPERATOR_WRITE_NODE:
3375 case PM_CONSTANT_AND_WRITE_NODE:
3376 case PM_CONSTANT_OR_WRITE_NODE:
3378 case PM_CONSTANT_PATH_AND_WRITE_NODE:
3379 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE:
3380 case PM_CONSTANT_PATH_OR_WRITE_NODE:
3381 case PM_CONSTANT_PATH_WRITE_NODE:
3383 case PM_GLOBAL_VARIABLE_WRITE_NODE:
3384 case PM_GLOBAL_VARIABLE_OPERATOR_WRITE_NODE:
3385 case PM_GLOBAL_VARIABLE_AND_WRITE_NODE:
3386 case PM_GLOBAL_VARIABLE_OR_WRITE_NODE:
3388 case PM_CLASS_VARIABLE_WRITE_NODE:
3389 case PM_CLASS_VARIABLE_OPERATOR_WRITE_NODE:
3390 case PM_CLASS_VARIABLE_AND_WRITE_NODE:
3391 case PM_CLASS_VARIABLE_OR_WRITE_NODE:
3393 case PM_INDEX_AND_WRITE_NODE:
3394 case PM_INDEX_OPERATOR_WRITE_NODE:
3395 case PM_INDEX_OR_WRITE_NODE:
3397 case PM_INSTANCE_VARIABLE_WRITE_NODE:
3398 case PM_INSTANCE_VARIABLE_OPERATOR_WRITE_NODE:
3399 case PM_INSTANCE_VARIABLE_AND_WRITE_NODE:
3400 case PM_INSTANCE_VARIABLE_OR_WRITE_NODE:
3402 case PM_LOCAL_VARIABLE_WRITE_NODE:
3403 case PM_LOCAL_VARIABLE_OPERATOR_WRITE_NODE:
3404 case PM_LOCAL_VARIABLE_AND_WRITE_NODE:
3405 case PM_LOCAL_VARIABLE_OR_WRITE_NODE:
3407 case PM_MULTI_WRITE_NODE:
3408 dtype = DEFINED_ASGN;
3409 break;
3410 default:
3411 rb_bug("Unsupported node %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
3414 RUBY_ASSERT(dtype != DEFINED_NOT_DEFINED);
3415 PUSH_INSN1(ret, location, putobject, PUSH_VAL(dtype));
3416 #undef PUSH_VAL
3419 static void
3420 pm_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition, LABEL **lfinish, bool explicit_receiver)
3422 LINK_ELEMENT *lcur = ret->last;
3423 pm_compile_defined_expr0(iseq, node, node_location, ret, popped, scope_node, in_condition, lfinish, false);
3425 if (lfinish[1]) {
3426 LABEL *lstart = NEW_LABEL(node_location->line);
3427 LABEL *lend = NEW_LABEL(node_location->line);
3429 struct rb_iseq_new_with_callback_callback_func *ifunc =
3430 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq, NULL);
3432 const rb_iseq_t *rescue = new_child_iseq_with_callback(
3433 iseq,
3434 ifunc,
3435 rb_str_concat(rb_str_new2("defined guard in "), ISEQ_BODY(iseq)->location.label),
3436 iseq,
3437 ISEQ_TYPE_RESCUE,
3441 lstart->rescued = LABEL_RESCUE_BEG;
3442 lend->rescued = LABEL_RESCUE_END;
3444 APPEND_LABEL(ret, lcur, lstart);
3445 PUSH_LABEL(ret, lend);
3446 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lfinish[1]);
3450 static void
3451 pm_compile_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition)
3453 LABEL *lfinish[3];
3454 LINK_ELEMENT *last = ret->last;
3456 lfinish[0] = NEW_LABEL(node_location->line);
3457 lfinish[1] = 0;
3458 lfinish[2] = 0;
3460 if (!popped) {
3461 pm_defined_expr(iseq, node, node_location, ret, popped, scope_node, in_condition, lfinish, false);
3464 if (lfinish[1]) {
3465 ELEM_INSERT_NEXT(last, &new_insn_body(iseq, node_location->line, node_location->column, BIN(putnil), 0)->link);
3466 PUSH_INSN(ret, *node_location, swap);
3468 if (lfinish[2]) PUSH_LABEL(ret, lfinish[2]);
3469 PUSH_INSN(ret, *node_location, pop);
3470 PUSH_LABEL(ret, lfinish[1]);
3474 PUSH_LABEL(ret, lfinish[0]);
3477 // This is exactly the same as add_ensure_iseq, except it compiled
3478 // the node as a Prism node, and not a CRuby node
3479 static void
3480 pm_add_ensure_iseq(LINK_ANCHOR *const ret, rb_iseq_t *iseq, int is_return, pm_scope_node_t *scope_node)
3482 RUBY_ASSERT(can_add_ensure_iseq(iseq));
3484 struct iseq_compile_data_ensure_node_stack *enlp =
3485 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
3486 struct iseq_compile_data_ensure_node_stack *prev_enlp = enlp;
3487 DECL_ANCHOR(ensure);
3489 INIT_ANCHOR(ensure);
3490 while (enlp) {
3491 if (enlp->erange != NULL) {
3492 DECL_ANCHOR(ensure_part);
3493 LABEL *lstart = NEW_LABEL(0);
3494 LABEL *lend = NEW_LABEL(0);
3495 INIT_ANCHOR(ensure_part);
3497 add_ensure_range(iseq, enlp->erange, lstart, lend);
3499 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enlp->prev;
3500 PUSH_LABEL(ensure_part, lstart);
3501 bool popped = true;
3502 PM_COMPILE_INTO_ANCHOR(ensure_part, (const pm_node_t *) enlp->ensure_node);
3503 PUSH_LABEL(ensure_part, lend);
3504 PUSH_SEQ(ensure, ensure_part);
3506 else {
3507 if (!is_return) {
3508 break;
3511 enlp = enlp->prev;
3513 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = prev_enlp;
3514 PUSH_SEQ(ret, ensure);
3517 struct pm_local_table_insert_ctx {
3518 pm_scope_node_t *scope_node;
3519 rb_ast_id_table_t *local_table_for_iseq;
3520 int local_index;
3523 static int
3524 pm_local_table_insert_func(st_data_t *key, st_data_t *value, st_data_t arg, int existing)
3526 if (!existing) {
3527 pm_constant_id_t constant_id = (pm_constant_id_t) *key;
3528 struct pm_local_table_insert_ctx * ctx = (struct pm_local_table_insert_ctx *) arg;
3530 pm_scope_node_t *scope_node = ctx->scope_node;
3531 rb_ast_id_table_t *local_table_for_iseq = ctx->local_table_for_iseq;
3532 int local_index = ctx->local_index;
3534 ID local = pm_constant_id_lookup(scope_node, constant_id);
3535 local_table_for_iseq->ids[local_index] = local;
3537 *value = (st_data_t)local_index;
3539 ctx->local_index++;
3542 return ST_CONTINUE;
3546 * Insert a local into the local table for the iseq. This is used to create the
3547 * local table in the correct order while compiling the scope. The locals being
3548 * inserted are regular named locals, as opposed to special forwarding locals.
3550 static void
3551 pm_insert_local_index(pm_constant_id_t constant_id, int local_index, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq, pm_scope_node_t *scope_node)
3553 RUBY_ASSERT((constant_id & PM_SPECIAL_CONSTANT_FLAG) == 0);
3555 ID local = pm_constant_id_lookup(scope_node, constant_id);
3556 local_table_for_iseq->ids[local_index] = local;
3557 st_insert(index_lookup_table, (st_data_t) constant_id, (st_data_t) local_index);
3561 * Insert a local into the local table for the iseq that is a special forwarding
3562 * local variable.
3564 static void
3565 pm_insert_local_special(ID local_name, int local_index, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq)
3567 local_table_for_iseq->ids[local_index] = local_name;
3568 st_insert(index_lookup_table, (st_data_t) (local_name | PM_SPECIAL_CONSTANT_FLAG), (st_data_t) local_index);
3572 * Compile the locals of a multi target node that is used as a positional
3573 * parameter in a method, block, or lambda definition. Note that this doesn't
3574 * actually add any instructions to the iseq. Instead, it adds locals to the
3575 * local and index lookup tables and increments the local index as necessary.
3577 static int
3578 pm_compile_destructured_param_locals(const pm_multi_target_node_t *node, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq, pm_scope_node_t *scope_node, int local_index)
3580 for (size_t index = 0; index < node->lefts.size; index++) {
3581 const pm_node_t *left = node->lefts.nodes[index];
3583 if (PM_NODE_TYPE_P(left, PM_REQUIRED_PARAMETER_NODE)) {
3584 if (!PM_NODE_FLAG_P(left, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
3585 pm_insert_local_index(((const pm_required_parameter_node_t *) left)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
3586 local_index++;
3589 else {
3590 RUBY_ASSERT(PM_NODE_TYPE_P(left, PM_MULTI_TARGET_NODE));
3591 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) left, index_lookup_table, local_table_for_iseq, scope_node, local_index);
3595 if (node->rest != NULL && PM_NODE_TYPE_P(node->rest, PM_SPLAT_NODE)) {
3596 const pm_splat_node_t *rest = (const pm_splat_node_t *) node->rest;
3598 if (rest->expression != NULL) {
3599 RUBY_ASSERT(PM_NODE_TYPE_P(rest->expression, PM_REQUIRED_PARAMETER_NODE));
3601 if (!PM_NODE_FLAG_P(rest->expression, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
3602 pm_insert_local_index(((const pm_required_parameter_node_t *) rest->expression)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
3603 local_index++;
3608 for (size_t index = 0; index < node->rights.size; index++) {
3609 const pm_node_t *right = node->rights.nodes[index];
3611 if (PM_NODE_TYPE_P(right, PM_REQUIRED_PARAMETER_NODE)) {
3612 if (!PM_NODE_FLAG_P(right, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
3613 pm_insert_local_index(((const pm_required_parameter_node_t *) right)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
3614 local_index++;
3617 else {
3618 RUBY_ASSERT(PM_NODE_TYPE_P(right, PM_MULTI_TARGET_NODE));
3619 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) right, index_lookup_table, local_table_for_iseq, scope_node, local_index);
3623 return local_index;
3627 * Compile a required parameter node that is part of a destructure that is used
3628 * as a positional parameter in a method, block, or lambda definition.
3630 static inline void
3631 pm_compile_destructured_param_write(rb_iseq_t *iseq, const pm_required_parameter_node_t *node, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
3633 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
3634 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, node->name, 0);
3635 PUSH_SETLOCAL(ret, location, index.index, index.level);
3639 * Compile a multi target node that is used as a positional parameter in a
3640 * method, block, or lambda definition. Note that this is effectively the same
3641 * as a multi write, but with the added context that all of the targets
3642 * contained in the write are required parameter nodes. With this context, we
3643 * know they won't have any parent expressions so we build a separate code path
3644 * for this simplified case.
3646 static void
3647 pm_compile_destructured_param_writes(rb_iseq_t *iseq, const pm_multi_target_node_t *node, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
3649 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
3650 bool has_rest = (node->rest && PM_NODE_TYPE_P(node->rest, PM_SPLAT_NODE) && (((const pm_splat_node_t *) node->rest)->expression) != NULL);
3651 bool has_rights = node->rights.size > 0;
3653 int flag = (has_rest || has_rights) ? 1 : 0;
3654 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->lefts.size), INT2FIX(flag));
3656 for (size_t index = 0; index < node->lefts.size; index++) {
3657 const pm_node_t *left = node->lefts.nodes[index];
3659 if (PM_NODE_TYPE_P(left, PM_REQUIRED_PARAMETER_NODE)) {
3660 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) left, ret, scope_node);
3662 else {
3663 RUBY_ASSERT(PM_NODE_TYPE_P(left, PM_MULTI_TARGET_NODE));
3664 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) left, ret, scope_node);
3668 if (has_rest) {
3669 if (has_rights) {
3670 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->rights.size), INT2FIX(3));
3673 const pm_node_t *rest = ((const pm_splat_node_t *) node->rest)->expression;
3674 RUBY_ASSERT(PM_NODE_TYPE_P(rest, PM_REQUIRED_PARAMETER_NODE));
3676 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) rest, ret, scope_node);
3679 if (has_rights) {
3680 if (!has_rest) {
3681 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->rights.size), INT2FIX(2));
3684 for (size_t index = 0; index < node->rights.size; index++) {
3685 const pm_node_t *right = node->rights.nodes[index];
3687 if (PM_NODE_TYPE_P(right, PM_REQUIRED_PARAMETER_NODE)) {
3688 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) right, ret, scope_node);
3690 else {
3691 RUBY_ASSERT(PM_NODE_TYPE_P(right, PM_MULTI_TARGET_NODE));
3692 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) right, ret, scope_node);
3699 * This is a node in the multi target state linked list. It tracks the
3700 * information for a particular target that necessarily has a parent expression.
3702 typedef struct pm_multi_target_state_node {
3703 // The pointer to the topn instruction that will need to be modified after
3704 // we know the total stack size of all of the targets.
3705 INSN *topn;
3707 // The index of the stack from the base of the entire multi target at which
3708 // the parent expression is located.
3709 size_t stack_index;
3711 // The number of slots in the stack that this node occupies.
3712 size_t stack_size;
3714 // The position of the node in the list of targets.
3715 size_t position;
3717 // A pointer to the next node in this linked list.
3718 struct pm_multi_target_state_node *next;
3719 } pm_multi_target_state_node_t;
3722 * As we're compiling a multi target, we need to track additional information
3723 * whenever there is a parent expression on the left hand side of the target.
3724 * This is because we need to go back and tell the expression where to fetch its
3725 * parent expression from the stack. We use a linked list of nodes to track this
3726 * information.
3728 typedef struct {
3729 // The total number of slots in the stack that this multi target occupies.
3730 size_t stack_size;
3732 // The position of the current node being compiled. This is forwarded to
3733 // nodes when they are allocated.
3734 size_t position;
3736 // A pointer to the head of this linked list.
3737 pm_multi_target_state_node_t *head;
3739 // A pointer to the tail of this linked list.
3740 pm_multi_target_state_node_t *tail;
3741 } pm_multi_target_state_t;
3744 * Push a new state node onto the multi target state.
3746 static void
3747 pm_multi_target_state_push(pm_multi_target_state_t *state, INSN *topn, size_t stack_size)
3749 pm_multi_target_state_node_t *node = ALLOC(pm_multi_target_state_node_t);
3750 node->topn = topn;
3751 node->stack_index = state->stack_size + 1;
3752 node->stack_size = stack_size;
3753 node->position = state->position;
3754 node->next = NULL;
3756 if (state->head == NULL) {
3757 state->head = node;
3758 state->tail = node;
3760 else {
3761 state->tail->next = node;
3762 state->tail = node;
3765 state->stack_size += stack_size;
3769 * Walk through a multi target state's linked list and update the topn
3770 * instructions that were inserted into the write sequence to make sure they can
3771 * correctly retrieve their parent expressions.
3773 static void
3774 pm_multi_target_state_update(pm_multi_target_state_t *state)
3776 // If nothing was ever pushed onto the stack, then we don't need to do any
3777 // kind of updates.
3778 if (state->stack_size == 0) return;
3780 pm_multi_target_state_node_t *current = state->head;
3781 pm_multi_target_state_node_t *previous;
3783 while (current != NULL) {
3784 VALUE offset = INT2FIX(state->stack_size - current->stack_index + current->position);
3785 current->topn->operands[0] = offset;
3787 // stack_size will be > 1 in the case that we compiled an index target
3788 // and it had arguments. In this case, we use multiple topn instructions
3789 // to grab up all of the arguments as well, so those offsets need to be
3790 // updated as well.
3791 if (current->stack_size > 1) {
3792 INSN *insn = current->topn;
3794 for (size_t index = 1; index < current->stack_size; index += 1) {
3795 LINK_ELEMENT *element = get_next_insn(insn);
3796 RUBY_ASSERT(IS_INSN(element));
3798 insn = (INSN *) element;
3799 RUBY_ASSERT(insn->insn_id == BIN(topn));
3801 insn->operands[0] = offset;
3805 previous = current;
3806 current = current->next;
3808 xfree(previous);
3812 static size_t
3813 pm_compile_multi_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state);
3816 * A target node represents an indirect write to a variable or a method call to
3817 * a method ending in =. Compiling one of these nodes requires three sequences:
3819 * * The first is to compile retrieving the parent expression if there is one.
3820 * This could be the object that owns a constant or the receiver of a method
3821 * call.
3822 * * The second is to compile the writes to the targets. This could be writing
3823 * to variables, or it could be performing method calls.
3824 * * The third is to compile any cleanup that needs to happen, i.e., popping the
3825 * appropriate number of values off the stack.
3827 * When there is a parent expression and this target is part of a multi write, a
3828 * topn instruction will be inserted into the write sequence. This is to move
3829 * the parent expression to the top of the stack so that it can be used as the
3830 * receiver of the method call or the owner of the constant. To facilitate this,
3831 * we return a pointer to the topn instruction that was used to be later
3832 * modified with the correct offset.
3834 * These nodes can appear in a couple of places, but most commonly:
3836 * * For loops - the index variable is a target node
3837 * * Rescue clauses - the exception reference variable is a target node
3838 * * Multi writes - the left hand side contains a list of target nodes
3840 * For the comments with examples within this function, we'll use for loops as
3841 * the containing node.
3843 static void
3844 pm_compile_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state)
3846 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
3848 switch (PM_NODE_TYPE(node)) {
3849 case PM_LOCAL_VARIABLE_TARGET_NODE: {
3850 // Local variable targets have no parent expression, so they only need
3851 // to compile the write.
3853 // for i in []; end
3855 const pm_local_variable_target_node_t *cast = (const pm_local_variable_target_node_t *) node;
3856 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
3858 PUSH_SETLOCAL(writes, location, index.index, index.level);
3859 break;
3861 case PM_CLASS_VARIABLE_TARGET_NODE: {
3862 // Class variable targets have no parent expression, so they only need
3863 // to compile the write.
3865 // for @@i in []; end
3867 const pm_class_variable_target_node_t *cast = (const pm_class_variable_target_node_t *) node;
3868 ID name = pm_constant_id_lookup(scope_node, cast->name);
3870 PUSH_INSN2(writes, location, setclassvariable, ID2SYM(name), get_cvar_ic_value(iseq, name));
3871 break;
3873 case PM_CONSTANT_TARGET_NODE: {
3874 // Constant targets have no parent expression, so they only need to
3875 // compile the write.
3877 // for I in []; end
3879 const pm_constant_target_node_t *cast = (const pm_constant_target_node_t *) node;
3880 ID name = pm_constant_id_lookup(scope_node, cast->name);
3882 PUSH_INSN1(writes, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
3883 PUSH_INSN1(writes, location, setconstant, ID2SYM(name));
3884 break;
3886 case PM_GLOBAL_VARIABLE_TARGET_NODE: {
3887 // Global variable targets have no parent expression, so they only need
3888 // to compile the write.
3890 // for $i in []; end
3892 const pm_global_variable_target_node_t *cast = (const pm_global_variable_target_node_t *) node;
3893 ID name = pm_constant_id_lookup(scope_node, cast->name);
3895 PUSH_INSN1(writes, location, setglobal, ID2SYM(name));
3896 break;
3898 case PM_INSTANCE_VARIABLE_TARGET_NODE: {
3899 // Instance variable targets have no parent expression, so they only
3900 // need to compile the write.
3902 // for @i in []; end
3904 const pm_instance_variable_target_node_t *cast = (const pm_instance_variable_target_node_t *) node;
3905 ID name = pm_constant_id_lookup(scope_node, cast->name);
3907 PUSH_INSN2(writes, location, setinstancevariable, ID2SYM(name), get_ivar_ic_value(iseq, name));
3908 break;
3910 case PM_CONSTANT_PATH_TARGET_NODE: {
3911 // Constant path targets have a parent expression that is the object
3912 // that owns the constant. This needs to be compiled first into the
3913 // parents sequence. If no parent is found, then it represents using the
3914 // unary :: operator to indicate a top-level constant. In that case we
3915 // need to push Object onto the stack.
3917 // for I::J in []; end
3919 const pm_constant_path_target_node_t *cast = (const pm_constant_path_target_node_t *) node;
3920 ID name = pm_constant_id_lookup(scope_node, cast->name);
3922 if (cast->parent != NULL) {
3923 pm_compile_node(iseq, cast->parent, parents, false, scope_node);
3925 else {
3926 PUSH_INSN1(parents, location, putobject, rb_cObject);
3929 if (state == NULL) {
3930 PUSH_INSN(writes, location, swap);
3932 else {
3933 PUSH_INSN1(writes, location, topn, INT2FIX(1));
3934 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), 1);
3937 PUSH_INSN1(writes, location, setconstant, ID2SYM(name));
3939 if (state != NULL) {
3940 PUSH_INSN(cleanup, location, pop);
3943 break;
3945 case PM_CALL_TARGET_NODE: {
3946 // Call targets have a parent expression that is the receiver of the
3947 // method being called. This needs to be compiled first into the parents
3948 // sequence. These nodes cannot have arguments, so the method call is
3949 // compiled with a single argument which represents the value being
3950 // written.
3952 // for i.j in []; end
3954 const pm_call_target_node_t *cast = (const pm_call_target_node_t *) node;
3955 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
3957 pm_compile_node(iseq, cast->receiver, parents, false, scope_node);
3959 if (state != NULL) {
3960 PUSH_INSN1(writes, location, topn, INT2FIX(1));
3961 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), 1);
3962 PUSH_INSN(writes, location, swap);
3965 int flags = VM_CALL_ARGS_SIMPLE;
3966 if (PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) flags |= VM_CALL_FCALL;
3968 PUSH_SEND_WITH_FLAG(writes, location, method_id, INT2FIX(1), INT2FIX(flags));
3969 PUSH_INSN(writes, location, pop);
3971 if (state != NULL) {
3972 PUSH_INSN(cleanup, location, pop);
3975 break;
3977 case PM_INDEX_TARGET_NODE: {
3978 // Index targets have a parent expression that is the receiver of the
3979 // method being called and any additional arguments that are being
3980 // passed along with the value being written. The receiver and arguments
3981 // both need to be on the stack. Note that this is even more complicated
3982 // by the fact that these nodes can hold a block using the unary &
3983 // operator.
3985 // for i[:j] in []; end
3987 const pm_index_target_node_t *cast = (const pm_index_target_node_t *) node;
3989 pm_compile_node(iseq, cast->receiver, parents, false, scope_node);
3991 int flags = 0;
3992 struct rb_callinfo_kwarg *kwargs = NULL;
3993 int argc = pm_setup_args(cast->arguments, cast->block, &flags, &kwargs, iseq, parents, scope_node, &location);
3995 if (state != NULL) {
3996 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
3997 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), argc + 1);
3999 if (argc == 0) {
4000 PUSH_INSN(writes, location, swap);
4002 else {
4003 for (int index = 0; index < argc; index++) {
4004 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
4006 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
4010 // The argc that we're going to pass to the send instruction is the
4011 // number of arguments + 1 for the value being written. If there's a
4012 // splat, then we need to insert newarray and concatarray instructions
4013 // after the arguments have been written.
4014 int ci_argc = argc + 1;
4015 if (flags & VM_CALL_ARGS_SPLAT) {
4016 ci_argc--;
4017 PUSH_INSN1(writes, location, newarray, INT2FIX(1));
4018 PUSH_INSN(writes, location, concatarray);
4021 PUSH_SEND_R(writes, location, idASET, INT2NUM(ci_argc), NULL, INT2FIX(flags), kwargs);
4022 PUSH_INSN(writes, location, pop);
4024 if (state != NULL) {
4025 if (argc != 0) {
4026 PUSH_INSN(writes, location, pop);
4029 for (int index = 0; index < argc + 1; index++) {
4030 PUSH_INSN(cleanup, location, pop);
4034 break;
4036 case PM_MULTI_TARGET_NODE: {
4037 // Multi target nodes represent a set of writes to multiple variables.
4038 // The parent expressions are the combined set of the parent expressions
4039 // of its inner target nodes.
4041 // for i, j in []; end
4043 if (state != NULL) state->position--;
4044 pm_compile_multi_target_node(iseq, node, parents, writes, cleanup, scope_node, state);
4045 if (state != NULL) state->position++;
4046 break;
4048 default:
4049 rb_bug("Unexpected node type: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
4050 break;
4055 * Compile a multi target or multi write node. It returns the number of values
4056 * on the stack that correspond to the parent expressions of the various
4057 * targets.
4059 static size_t
4060 pm_compile_multi_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state)
4062 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
4063 const pm_node_list_t *lefts;
4064 const pm_node_t *rest;
4065 const pm_node_list_t *rights;
4067 switch (PM_NODE_TYPE(node)) {
4068 case PM_MULTI_TARGET_NODE: {
4069 const pm_multi_target_node_t *cast = (const pm_multi_target_node_t *) node;
4070 lefts = &cast->lefts;
4071 rest = cast->rest;
4072 rights = &cast->rights;
4073 break;
4075 case PM_MULTI_WRITE_NODE: {
4076 const pm_multi_write_node_t *cast = (const pm_multi_write_node_t *) node;
4077 lefts = &cast->lefts;
4078 rest = cast->rest;
4079 rights = &cast->rights;
4080 break;
4082 default:
4083 rb_bug("Unsupported node %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
4084 break;
4087 bool has_rest = (rest != NULL) && PM_NODE_TYPE_P(rest, PM_SPLAT_NODE) && ((const pm_splat_node_t *) rest)->expression != NULL;
4088 bool has_posts = rights->size > 0;
4090 // The first instruction in the writes sequence is going to spread the
4091 // top value of the stack onto the number of values that we're going to
4092 // write.
4093 PUSH_INSN2(writes, location, expandarray, INT2FIX(lefts->size), INT2FIX((has_rest || has_posts) ? 1 : 0));
4095 // We need to keep track of some additional state information as we're
4096 // going through the targets because we will need to revisit them once
4097 // we know how many values are being pushed onto the stack.
4098 pm_multi_target_state_t target_state = { 0 };
4099 size_t base_position = state == NULL ? 0 : state->position;
4100 size_t splat_position = has_rest ? 1 : 0;
4102 // Next, we'll iterate through all of the leading targets.
4103 for (size_t index = 0; index < lefts->size; index++) {
4104 const pm_node_t *target = lefts->nodes[index];
4105 target_state.position = lefts->size - index + splat_position + base_position;
4106 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, &target_state);
4109 // Next, we'll compile the rest target if there is one.
4110 if (has_rest) {
4111 const pm_node_t *target = ((const pm_splat_node_t *) rest)->expression;
4112 target_state.position = 1 + rights->size + base_position;
4114 if (has_posts) {
4115 PUSH_INSN2(writes, location, expandarray, INT2FIX(rights->size), INT2FIX(3));
4118 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, &target_state);
4121 // Finally, we'll compile the trailing targets.
4122 if (has_posts) {
4123 if (!has_rest && rest != NULL) {
4124 PUSH_INSN2(writes, location, expandarray, INT2FIX(rights->size), INT2FIX(2));
4127 for (size_t index = 0; index < rights->size; index++) {
4128 const pm_node_t *target = rights->nodes[index];
4129 target_state.position = rights->size - index + base_position;
4130 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, &target_state);
4134 // Now, we need to go back and modify the topn instructions in order to
4135 // ensure they can correctly retrieve the parent expressions.
4136 pm_multi_target_state_update(&target_state);
4138 if (state != NULL) state->stack_size += target_state.stack_size;
4140 return target_state.stack_size;
4144 * When compiling a for loop, we need to write the iteration variable to
4145 * whatever expression exists in the index slot. This function performs that
4146 * compilation.
4148 static void
4149 pm_compile_for_node_index(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node)
4151 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
4153 switch (PM_NODE_TYPE(node)) {
4154 case PM_LOCAL_VARIABLE_TARGET_NODE: {
4155 // For local variables, all we have to do is retrieve the value and then
4156 // compile the index node.
4157 PUSH_GETLOCAL(ret, location, 1, 0);
4158 pm_compile_target_node(iseq, node, ret, ret, ret, scope_node, NULL);
4159 break;
4161 case PM_CLASS_VARIABLE_TARGET_NODE:
4162 case PM_CONSTANT_TARGET_NODE:
4163 case PM_GLOBAL_VARIABLE_TARGET_NODE:
4164 case PM_INSTANCE_VARIABLE_TARGET_NODE:
4165 case PM_CONSTANT_PATH_TARGET_NODE:
4166 case PM_CALL_TARGET_NODE:
4167 case PM_INDEX_TARGET_NODE: {
4168 // For other targets, we need to potentially compile the parent or
4169 // owning expression of this target, then retrieve the value, expand it,
4170 // and then compile the necessary writes.
4171 DECL_ANCHOR(writes);
4172 INIT_ANCHOR(writes);
4174 DECL_ANCHOR(cleanup);
4175 INIT_ANCHOR(cleanup);
4177 pm_multi_target_state_t state = { 0 };
4178 state.position = 1;
4179 pm_compile_target_node(iseq, node, ret, writes, cleanup, scope_node, &state);
4181 PUSH_GETLOCAL(ret, location, 1, 0);
4182 PUSH_INSN2(ret, location, expandarray, INT2FIX(1), INT2FIX(0));
4184 PUSH_SEQ(ret, writes);
4185 PUSH_SEQ(ret, cleanup);
4187 pm_multi_target_state_update(&state);
4188 break;
4190 case PM_MULTI_TARGET_NODE: {
4191 DECL_ANCHOR(writes);
4192 INIT_ANCHOR(writes);
4194 DECL_ANCHOR(cleanup);
4195 INIT_ANCHOR(cleanup);
4197 pm_compile_target_node(iseq, node, ret, writes, cleanup, scope_node, NULL);
4199 LABEL *not_single = NEW_LABEL(location.line);
4200 LABEL *not_ary = NEW_LABEL(location.line);
4202 // When there are multiple targets, we'll do a bunch of work to convert
4203 // the value into an array before we expand it. Effectively we're trying
4204 // to accomplish:
4206 // (args.length == 1 && Array.try_convert(args[0])) || args
4208 PUSH_GETLOCAL(ret, location, 1, 0);
4209 PUSH_INSN(ret, location, dup);
4210 PUSH_CALL(ret, location, idLength, INT2FIX(0));
4211 PUSH_INSN1(ret, location, putobject, INT2FIX(1));
4212 PUSH_CALL(ret, location, idEq, INT2FIX(1));
4213 PUSH_INSNL(ret, location, branchunless, not_single);
4214 PUSH_INSN(ret, location, dup);
4215 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
4216 PUSH_CALL(ret, location, idAREF, INT2FIX(1));
4217 PUSH_INSN1(ret, location, putobject, rb_cArray);
4218 PUSH_INSN(ret, location, swap);
4219 PUSH_CALL(ret, location, rb_intern("try_convert"), INT2FIX(1));
4220 PUSH_INSN(ret, location, dup);
4221 PUSH_INSNL(ret, location, branchunless, not_ary);
4222 PUSH_INSN(ret, location, swap);
4224 PUSH_LABEL(ret, not_ary);
4225 PUSH_INSN(ret, location, pop);
4227 PUSH_LABEL(ret, not_single);
4228 PUSH_SEQ(ret, writes);
4229 PUSH_SEQ(ret, cleanup);
4230 break;
4232 default:
4233 rb_bug("Unexpected node type for index in for node: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
4234 break;
4238 static void
4239 pm_compile_rescue(rb_iseq_t *iseq, const pm_begin_node_t *cast, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4241 const pm_parser_t *parser = scope_node->parser;
4243 LABEL *lstart = NEW_LABEL(node_location->line);
4244 LABEL *lend = NEW_LABEL(node_location->line);
4245 LABEL *lcont = NEW_LABEL(node_location->line);
4247 pm_scope_node_t rescue_scope_node;
4248 pm_scope_node_init((const pm_node_t *) cast->rescue_clause, &rescue_scope_node, scope_node);
4250 rb_iseq_t *rescue_iseq = NEW_CHILD_ISEQ(
4251 &rescue_scope_node,
4252 rb_str_concat(rb_str_new2("rescue in "), ISEQ_BODY(iseq)->location.label),
4253 ISEQ_TYPE_RESCUE,
4254 pm_node_line_number(parser, (const pm_node_t *) cast->rescue_clause)
4257 pm_scope_node_destroy(&rescue_scope_node);
4259 lstart->rescued = LABEL_RESCUE_BEG;
4260 lend->rescued = LABEL_RESCUE_END;
4261 PUSH_LABEL(ret, lstart);
4263 bool prev_in_rescue = ISEQ_COMPILE_DATA(iseq)->in_rescue;
4264 ISEQ_COMPILE_DATA(iseq)->in_rescue = true;
4266 if (cast->statements != NULL) {
4267 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->statements);
4269 else {
4270 PUSH_INSN(ret, *node_location, putnil);
4273 ISEQ_COMPILE_DATA(iseq)->in_rescue = prev_in_rescue;
4274 PUSH_LABEL(ret, lend);
4276 if (cast->else_clause != NULL) {
4277 if (!popped) PUSH_INSN(ret, *node_location, pop);
4278 PM_COMPILE((const pm_node_t *) cast->else_clause);
4281 PUSH_INSN(ret, *node_location, nop);
4282 PUSH_LABEL(ret, lcont);
4284 if (popped) PUSH_INSN(ret, *node_location, pop);
4285 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue_iseq, lcont);
4286 PUSH_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
4289 static void
4290 pm_compile_ensure(rb_iseq_t *iseq, const pm_begin_node_t *cast, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4292 const pm_parser_t *parser = scope_node->parser;
4293 const pm_statements_node_t *statements = cast->ensure_clause->statements;
4294 const pm_line_column_t location = statements != NULL ? PM_NODE_START_LINE_COLUMN(parser, statements) : *node_location;
4296 LABEL *estart = NEW_LABEL(location.line);
4297 LABEL *eend = NEW_LABEL(location.line);
4298 LABEL *econt = NEW_LABEL(location.line);
4300 struct ensure_range er;
4301 struct iseq_compile_data_ensure_node_stack enl;
4302 struct ensure_range *erange;
4304 er.begin = estart;
4305 er.end = eend;
4306 er.next = 0;
4307 push_ensure_entry(iseq, &enl, &er, (void *) cast->ensure_clause);
4309 PUSH_LABEL(ret, estart);
4310 if (cast->rescue_clause) {
4311 pm_compile_rescue(iseq, cast, &location, ret, popped, scope_node);
4313 else {
4314 if (cast->statements) {
4315 PM_COMPILE((const pm_node_t *) cast->statements);
4317 else if (!popped) {
4318 PUSH_INSN(ret, *node_location, putnil);
4322 PUSH_LABEL(ret, eend);
4323 PUSH_LABEL(ret, econt);
4325 pm_scope_node_t next_scope_node;
4326 pm_scope_node_init((const pm_node_t *) cast->ensure_clause, &next_scope_node, scope_node);
4328 rb_iseq_t *child_iseq = NEW_CHILD_ISEQ(
4329 &next_scope_node,
4330 rb_str_concat(rb_str_new2("ensure in "), ISEQ_BODY(iseq)->location.label),
4331 ISEQ_TYPE_ENSURE,
4332 location.line
4335 pm_scope_node_destroy(&next_scope_node);
4336 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq;
4338 erange = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->erange;
4339 if (estart->link.next != &eend->link) {
4340 while (erange) {
4341 PUSH_CATCH_ENTRY(CATCH_TYPE_ENSURE, erange->begin, erange->end, child_iseq, econt);
4342 erange = erange->next;
4345 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl.prev;
4347 // Compile the ensure entry
4348 if (statements != NULL) {
4349 PM_COMPILE((const pm_node_t *) statements);
4350 if (!popped) PUSH_INSN(ret, *node_location, pop);
4355 * Returns true if the given call node can use the opt_str_uminus or
4356 * opt_str_freeze instructions as an optimization with the current iseq options.
4358 static inline bool
4359 pm_opt_str_freeze_p(const rb_iseq_t *iseq, const pm_call_node_t *node)
4361 return (
4362 !PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION) &&
4363 node->receiver != NULL &&
4364 PM_NODE_TYPE_P(node->receiver, PM_STRING_NODE) &&
4365 node->arguments == NULL &&
4366 node->block == NULL &&
4367 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction
4372 * Returns true if the given call node can use the opt_aref_with optimization
4373 * with the current iseq options.
4375 static inline bool
4376 pm_opt_aref_with_p(const rb_iseq_t *iseq, const pm_call_node_t *node)
4378 return (
4379 !PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION) &&
4380 node->arguments != NULL &&
4381 PM_NODE_TYPE_P((const pm_node_t *) node->arguments, PM_ARGUMENTS_NODE) &&
4382 ((const pm_arguments_node_t *) node->arguments)->arguments.size == 1 &&
4383 PM_NODE_TYPE_P(((const pm_arguments_node_t *) node->arguments)->arguments.nodes[0], PM_STRING_NODE) &&
4384 node->block == NULL &&
4385 !PM_NODE_FLAG_P(((const pm_arguments_node_t *) node->arguments)->arguments.nodes[0], PM_STRING_FLAGS_FROZEN) &&
4386 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction
4391 * Returns true if the given call node can use the opt_aset_with optimization
4392 * with the current iseq options.
4394 static inline bool
4395 pm_opt_aset_with_p(const rb_iseq_t *iseq, const pm_call_node_t *node)
4397 return (
4398 !PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION) &&
4399 node->arguments != NULL &&
4400 PM_NODE_TYPE_P((const pm_node_t *) node->arguments, PM_ARGUMENTS_NODE) &&
4401 ((const pm_arguments_node_t *) node->arguments)->arguments.size == 2 &&
4402 PM_NODE_TYPE_P(((const pm_arguments_node_t *) node->arguments)->arguments.nodes[0], PM_STRING_NODE) &&
4403 node->block == NULL &&
4404 !PM_NODE_FLAG_P(((const pm_arguments_node_t *) node->arguments)->arguments.nodes[0], PM_STRING_FLAGS_FROZEN) &&
4405 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction
4410 * Compile the instructions necessary to read a constant, based on the options
4411 * of the current iseq.
4413 static void
4414 pm_compile_constant_read(rb_iseq_t *iseq, VALUE name, const pm_location_t *name_loc, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
4416 const pm_line_column_t location = PM_LOCATION_START_LINE_COLUMN(scope_node->parser, name_loc);
4418 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
4419 ISEQ_BODY(iseq)->ic_size++;
4420 VALUE segments = rb_ary_new_from_args(1, name);
4421 PUSH_INSN1(ret, location, opt_getconstant_path, segments);
4423 else {
4424 PUSH_INSN(ret, location, putnil);
4425 PUSH_INSN1(ret, location, putobject, Qtrue);
4426 PUSH_INSN1(ret, location, getconstant, name);
4431 * Returns a Ruby array of the parts of the constant path node if it is constant
4432 * reads all of the way down. If it isn't, then Qnil is returned.
4434 static VALUE
4435 pm_constant_path_parts(const pm_node_t *node, const pm_scope_node_t *scope_node)
4437 VALUE parts = rb_ary_new();
4439 while (true) {
4440 switch (PM_NODE_TYPE(node)) {
4441 case PM_CONSTANT_READ_NODE: {
4442 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
4443 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
4445 rb_ary_unshift(parts, name);
4446 return parts;
4448 case PM_CONSTANT_PATH_NODE: {
4449 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
4450 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
4452 rb_ary_unshift(parts, name);
4453 if (cast->parent == NULL) {
4454 rb_ary_unshift(parts, ID2SYM(idNULL));
4455 return parts;
4458 node = cast->parent;
4459 break;
4461 default:
4462 return Qnil;
4468 * Compile a constant path into two sequences of instructions, one for the
4469 * owning expression if there is one (prefix) and one for the constant reads
4470 * (body).
4472 static void
4473 pm_compile_constant_path(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const prefix, LINK_ANCHOR *const body, bool popped, pm_scope_node_t *scope_node)
4475 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
4477 switch (PM_NODE_TYPE(node)) {
4478 case PM_CONSTANT_READ_NODE: {
4479 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
4480 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
4482 PUSH_INSN1(body, location, putobject, Qtrue);
4483 PUSH_INSN1(body, location, getconstant, name);
4484 break;
4486 case PM_CONSTANT_PATH_NODE: {
4487 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
4488 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
4490 if (cast->parent == NULL) {
4491 PUSH_INSN(body, location, pop);
4492 PUSH_INSN1(body, location, putobject, rb_cObject);
4493 PUSH_INSN1(body, location, putobject, Qtrue);
4494 PUSH_INSN1(body, location, getconstant, name);
4496 else {
4497 pm_compile_constant_path(iseq, cast->parent, prefix, body, false, scope_node);
4498 PUSH_INSN1(body, location, putobject, Qfalse);
4499 PUSH_INSN1(body, location, getconstant, name);
4501 break;
4503 default:
4504 PM_COMPILE_INTO_ANCHOR(prefix, node);
4505 break;
4510 * When we're compiling a case node, it's possible that we can speed it up using
4511 * a dispatch hash, which will allow us to jump directly to the correct when
4512 * clause body based on a hash lookup of the value. This can only happen when
4513 * the conditions are literals that can be compiled into a hash key.
4515 * This function accepts a dispatch hash and the condition of a when clause. It
4516 * is responsible for compiling the condition into a hash key and then adding it
4517 * to the dispatch hash.
4519 * If the value can be successfully compiled into the hash, then this function
4520 * returns the dispatch hash with the new key added. If the value cannot be
4521 * compiled into the hash, then this function returns Qundef. In the case of
4522 * Qundef, this function is signaling that the caller should abandon the
4523 * optimization entirely.
4525 static VALUE
4526 pm_compile_case_node_dispatch(rb_iseq_t *iseq, VALUE dispatch, const pm_node_t *node, LABEL *label, const pm_scope_node_t *scope_node)
4528 VALUE key = Qundef;
4530 switch (PM_NODE_TYPE(node)) {
4531 case PM_FLOAT_NODE: {
4532 key = pm_static_literal_value(iseq, node, scope_node);
4533 double intptr;
4535 if (modf(RFLOAT_VALUE(key), &intptr) == 0.0) {
4536 key = (FIXABLE(intptr) ? LONG2FIX((long) intptr) : rb_dbl2big(intptr));
4539 break;
4541 case PM_FALSE_NODE:
4542 case PM_INTEGER_NODE:
4543 case PM_NIL_NODE:
4544 case PM_SOURCE_FILE_NODE:
4545 case PM_SOURCE_LINE_NODE:
4546 case PM_SYMBOL_NODE:
4547 case PM_TRUE_NODE:
4548 key = pm_static_literal_value(iseq, node, scope_node);
4549 break;
4550 case PM_STRING_NODE: {
4551 const pm_string_node_t *cast = (const pm_string_node_t *) node;
4552 key = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
4553 break;
4555 default:
4556 return Qundef;
4559 if (NIL_P(rb_hash_lookup(dispatch, key))) {
4560 rb_hash_aset(dispatch, key, ((VALUE) label) | 1);
4563 return dispatch;
4567 * Return the object that will be pushed onto the stack for the given node.
4569 static VALUE
4570 pm_compile_shareable_constant_literal(rb_iseq_t *iseq, const pm_node_t *node, const pm_scope_node_t *scope_node)
4572 switch (PM_NODE_TYPE(node)) {
4573 case PM_TRUE_NODE:
4574 case PM_FALSE_NODE:
4575 case PM_NIL_NODE:
4576 case PM_SYMBOL_NODE:
4577 case PM_REGULAR_EXPRESSION_NODE:
4578 case PM_SOURCE_LINE_NODE:
4579 case PM_INTEGER_NODE:
4580 case PM_FLOAT_NODE:
4581 case PM_RATIONAL_NODE:
4582 case PM_IMAGINARY_NODE:
4583 case PM_SOURCE_ENCODING_NODE:
4584 return pm_static_literal_value(iseq, node, scope_node);
4585 case PM_STRING_NODE:
4586 return parse_static_literal_string(iseq, scope_node, node, &((const pm_string_node_t *) node)->unescaped);
4587 case PM_SOURCE_FILE_NODE:
4588 return pm_source_file_value((const pm_source_file_node_t *) node, scope_node);
4589 case PM_ARRAY_NODE: {
4590 const pm_array_node_t *cast = (const pm_array_node_t *) node;
4591 VALUE result = rb_ary_new_capa(cast->elements.size);
4593 for (size_t index = 0; index < cast->elements.size; index++) {
4594 VALUE element = pm_compile_shareable_constant_literal(iseq, cast->elements.nodes[index], scope_node);
4595 if (element == Qundef) return Qundef;
4597 rb_ary_push(result, element);
4600 return rb_ractor_make_shareable(result);
4602 case PM_HASH_NODE: {
4603 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
4604 VALUE result = rb_hash_new_capa(cast->elements.size);
4606 for (size_t index = 0; index < cast->elements.size; index++) {
4607 const pm_node_t *element = cast->elements.nodes[index];
4608 if (!PM_NODE_TYPE_P(element, PM_ASSOC_NODE)) return Qundef;
4610 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
4612 VALUE key = pm_compile_shareable_constant_literal(iseq, assoc->key, scope_node);
4613 if (key == Qundef) return Qundef;
4615 VALUE value = pm_compile_shareable_constant_literal(iseq, assoc->value, scope_node);
4616 if (value == Qundef) return Qundef;
4618 rb_hash_aset(result, key, value);
4621 return rb_ractor_make_shareable(result);
4623 default:
4624 return Qundef;
4629 * Compile the instructions for pushing the value that will be written to a
4630 * shared constant.
4632 static void
4633 pm_compile_shareable_constant_value(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_flags_t shareability, VALUE path, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, bool top)
4635 VALUE literal = pm_compile_shareable_constant_literal(iseq, node, scope_node);
4636 if (literal != Qundef) {
4637 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
4638 PUSH_INSN1(ret, location, putobject, literal);
4639 return;
4642 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
4643 switch (PM_NODE_TYPE(node)) {
4644 case PM_ARRAY_NODE: {
4645 const pm_array_node_t *cast = (const pm_array_node_t *) node;
4647 if (top) {
4648 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
4651 for (size_t index = 0; index < cast->elements.size; index++) {
4652 pm_compile_shareable_constant_value(iseq, cast->elements.nodes[index], shareability, path, ret, scope_node, false);
4655 PUSH_INSN1(ret, location, newarray, INT2FIX(cast->elements.size));
4657 if (top) {
4658 ID method_id = (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) ? rb_intern("make_shareable_copy") : rb_intern("make_shareable");
4659 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
4662 return;
4664 case PM_HASH_NODE: {
4665 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
4667 if (top) {
4668 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
4671 for (size_t index = 0; index < cast->elements.size; index++) {
4672 const pm_node_t *element = cast->elements.nodes[index];
4674 if (!PM_NODE_TYPE_P(element, PM_ASSOC_NODE)) {
4675 COMPILE_ERROR(ERROR_ARGS "Ractor constant writes do not support **");
4678 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
4679 pm_compile_shareable_constant_value(iseq, assoc->key, shareability, path, ret, scope_node, false);
4680 pm_compile_shareable_constant_value(iseq, assoc->value, shareability, path, ret, scope_node, false);
4683 PUSH_INSN1(ret, location, newhash, INT2FIX(cast->elements.size * 2));
4685 if (top) {
4686 ID method_id = (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) ? rb_intern("make_shareable_copy") : rb_intern("make_shareable");
4687 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
4690 return;
4692 default: {
4693 DECL_ANCHOR(value_seq);
4694 INIT_ANCHOR(value_seq);
4696 pm_compile_node(iseq, node, value_seq, false, scope_node);
4697 if (PM_NODE_TYPE_P(node, PM_INTERPOLATED_STRING_NODE)) {
4698 PUSH_SEND_WITH_FLAG(value_seq, location, idUMinus, INT2FIX(0), INT2FIX(VM_CALL_ARGS_SIMPLE));
4701 if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_LITERAL) {
4702 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
4703 PUSH_SEQ(ret, value_seq);
4704 PUSH_INSN1(ret, location, putobject, path);
4705 PUSH_SEND_WITH_FLAG(ret, location, rb_intern("ensure_shareable"), INT2FIX(2), INT2FIX(VM_CALL_ARGS_SIMPLE));
4707 else if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) {
4708 if (top) PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
4709 PUSH_SEQ(ret, value_seq);
4710 if (top) PUSH_SEND_WITH_FLAG(ret, location, rb_intern("make_shareable_copy"), INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
4712 else if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_EVERYTHING) {
4713 if (top) PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
4714 PUSH_SEQ(ret, value_seq);
4715 if (top) PUSH_SEND_WITH_FLAG(ret, location, rb_intern("make_shareable"), INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
4718 break;
4724 * Compile a constant write node, either in the context of a ractor pragma or
4725 * not.
4727 static void
4728 pm_compile_constant_write_node(rb_iseq_t *iseq, const pm_constant_write_node_t *node, const pm_node_flags_t shareability, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4730 const pm_line_column_t location = *node_location;
4731 ID name_id = pm_constant_id_lookup(scope_node, node->name);
4733 if (shareability != 0) {
4734 pm_compile_shareable_constant_value(iseq, node->value, shareability, rb_id2str(name_id), ret, scope_node, true);
4736 else {
4737 PM_COMPILE_NOT_POPPED(node->value);
4740 if (!popped) PUSH_INSN(ret, location, dup);
4741 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
4742 PUSH_INSN1(ret, location, setconstant, ID2SYM(name_id));
4746 * Compile a constant and write node, either in the context of a ractor pragma
4747 * or not.
4749 static void
4750 pm_compile_constant_and_write_node(rb_iseq_t *iseq, const pm_constant_and_write_node_t *node, const pm_node_flags_t shareability, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4752 const pm_line_column_t location = *node_location;
4754 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
4755 LABEL *end_label = NEW_LABEL(location.line);
4757 pm_compile_constant_read(iseq, name, &node->name_loc, ret, scope_node);
4758 if (!popped) PUSH_INSN(ret, location, dup);
4760 PUSH_INSNL(ret, location, branchunless, end_label);
4761 if (!popped) PUSH_INSN(ret, location, pop);
4763 if (shareability != 0) {
4764 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
4766 else {
4767 PM_COMPILE_NOT_POPPED(node->value);
4770 if (!popped) PUSH_INSN(ret, location, dup);
4771 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
4772 PUSH_INSN1(ret, location, setconstant, name);
4773 PUSH_LABEL(ret, end_label);
4777 * Compile a constant or write node, either in the context of a ractor pragma or
4778 * not.
4780 static void
4781 pm_compile_constant_or_write_node(rb_iseq_t *iseq, const pm_constant_or_write_node_t *node, const pm_node_flags_t shareability, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4783 const pm_line_column_t location = *node_location;
4784 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
4786 LABEL *set_label = NEW_LABEL(location.line);
4787 LABEL *end_label = NEW_LABEL(location.line);
4789 PUSH_INSN(ret, location, putnil);
4790 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST), name, Qtrue);
4791 PUSH_INSNL(ret, location, branchunless, set_label);
4793 pm_compile_constant_read(iseq, name, &node->name_loc, ret, scope_node);
4794 if (!popped) PUSH_INSN(ret, location, dup);
4796 PUSH_INSNL(ret, location, branchif, end_label);
4797 if (!popped) PUSH_INSN(ret, location, pop);
4798 PUSH_LABEL(ret, set_label);
4800 if (shareability != 0) {
4801 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
4803 else {
4804 PM_COMPILE_NOT_POPPED(node->value);
4807 if (!popped) PUSH_INSN(ret, location, dup);
4808 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
4809 PUSH_INSN1(ret, location, setconstant, name);
4810 PUSH_LABEL(ret, end_label);
4814 * Compile a constant operator write node, either in the context of a ractor
4815 * pragma or not.
4817 static void
4818 pm_compile_constant_operator_write_node(rb_iseq_t *iseq, const pm_constant_operator_write_node_t *node, const pm_node_flags_t shareability, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4820 const pm_line_column_t location = *node_location;
4822 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
4823 ID method_id = pm_constant_id_lookup(scope_node, node->binary_operator);
4825 pm_compile_constant_read(iseq, name, &node->name_loc, ret, scope_node);
4827 if (shareability != 0) {
4828 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
4830 else {
4831 PM_COMPILE_NOT_POPPED(node->value);
4834 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
4835 if (!popped) PUSH_INSN(ret, location, dup);
4837 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
4838 PUSH_INSN1(ret, location, setconstant, name);
4842 * Creates a string that is used in ractor error messages to describe the
4843 * constant path being written.
4845 static VALUE
4846 pm_constant_path_path(const pm_constant_path_node_t *node, const pm_scope_node_t *scope_node)
4848 VALUE parts = rb_ary_new();
4849 rb_ary_push(parts, rb_id2str(pm_constant_id_lookup(scope_node, node->name)));
4851 const pm_node_t *current = node->parent;
4852 while (current != NULL && PM_NODE_TYPE_P(current, PM_CONSTANT_PATH_NODE)) {
4853 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) current;
4854 rb_ary_unshift(parts, rb_id2str(pm_constant_id_lookup(scope_node, cast->name)));
4855 current = cast->parent;
4858 if (current == NULL) {
4859 rb_ary_unshift(parts, rb_id2str(idNULL));
4861 else if (PM_NODE_TYPE_P(current, PM_CONSTANT_READ_NODE)) {
4862 rb_ary_unshift(parts, rb_id2str(pm_constant_id_lookup(scope_node, ((const pm_constant_read_node_t *) current)->name)));
4864 else {
4865 rb_ary_unshift(parts, rb_str_new_cstr("..."));
4868 return rb_ary_join(parts, rb_str_new_cstr("::"));
4872 * Compile a constant path write node, either in the context of a ractor pragma
4873 * or not.
4875 static void
4876 pm_compile_constant_path_write_node(rb_iseq_t *iseq, const pm_constant_path_write_node_t *node, const pm_node_flags_t shareability, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4878 const pm_line_column_t location = *node_location;
4879 const pm_constant_path_node_t *target = node->target;
4880 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
4882 if (target->parent) {
4883 PM_COMPILE_NOT_POPPED((const pm_node_t *) target->parent);
4885 else {
4886 PUSH_INSN1(ret, location, putobject, rb_cObject);
4889 if (shareability != 0) {
4890 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
4892 else {
4893 PM_COMPILE_NOT_POPPED(node->value);
4896 if (!popped) {
4897 PUSH_INSN(ret, location, swap);
4898 PUSH_INSN1(ret, location, topn, INT2FIX(1));
4901 PUSH_INSN(ret, location, swap);
4902 PUSH_INSN1(ret, location, setconstant, name);
4906 * Compile a constant path and write node, either in the context of a ractor
4907 * pragma or not.
4909 static void
4910 pm_compile_constant_path_and_write_node(rb_iseq_t *iseq, const pm_constant_path_and_write_node_t *node, const pm_node_flags_t shareability, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4912 const pm_line_column_t location = *node_location;
4913 const pm_constant_path_node_t *target = node->target;
4915 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
4916 LABEL *lfin = NEW_LABEL(location.line);
4918 if (target->parent) {
4919 PM_COMPILE_NOT_POPPED(target->parent);
4921 else {
4922 PUSH_INSN1(ret, location, putobject, rb_cObject);
4925 PUSH_INSN(ret, location, dup);
4926 PUSH_INSN1(ret, location, putobject, Qtrue);
4927 PUSH_INSN1(ret, location, getconstant, name);
4929 if (!popped) PUSH_INSN(ret, location, dup);
4930 PUSH_INSNL(ret, location, branchunless, lfin);
4932 if (!popped) PUSH_INSN(ret, location, pop);
4934 if (shareability != 0) {
4935 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
4937 else {
4938 PM_COMPILE_NOT_POPPED(node->value);
4941 if (popped) {
4942 PUSH_INSN1(ret, location, topn, INT2FIX(1));
4944 else {
4945 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
4946 PUSH_INSN(ret, location, swap);
4949 PUSH_INSN1(ret, location, setconstant, name);
4950 PUSH_LABEL(ret, lfin);
4952 if (!popped) PUSH_INSN(ret, location, swap);
4953 PUSH_INSN(ret, location, pop);
4957 * Compile a constant path or write node, either in the context of a ractor
4958 * pragma or not.
4960 static void
4961 pm_compile_constant_path_or_write_node(rb_iseq_t *iseq, const pm_constant_path_or_write_node_t *node, const pm_node_flags_t shareability, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
4963 const pm_line_column_t location = *node_location;
4964 const pm_constant_path_node_t *target = node->target;
4966 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
4967 LABEL *lassign = NEW_LABEL(location.line);
4968 LABEL *lfin = NEW_LABEL(location.line);
4970 if (target->parent) {
4971 PM_COMPILE_NOT_POPPED(target->parent);
4973 else {
4974 PUSH_INSN1(ret, location, putobject, rb_cObject);
4977 PUSH_INSN(ret, location, dup);
4978 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST_FROM), name, Qtrue);
4979 PUSH_INSNL(ret, location, branchunless, lassign);
4981 PUSH_INSN(ret, location, dup);
4982 PUSH_INSN1(ret, location, putobject, Qtrue);
4983 PUSH_INSN1(ret, location, getconstant, name);
4985 if (!popped) PUSH_INSN(ret, location, dup);
4986 PUSH_INSNL(ret, location, branchif, lfin);
4988 if (!popped) PUSH_INSN(ret, location, pop);
4989 PUSH_LABEL(ret, lassign);
4991 if (shareability != 0) {
4992 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
4994 else {
4995 PM_COMPILE_NOT_POPPED(node->value);
4998 if (popped) {
4999 PUSH_INSN1(ret, location, topn, INT2FIX(1));
5001 else {
5002 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
5003 PUSH_INSN(ret, location, swap);
5006 PUSH_INSN1(ret, location, setconstant, name);
5007 PUSH_LABEL(ret, lfin);
5009 if (!popped) PUSH_INSN(ret, location, swap);
5010 PUSH_INSN(ret, location, pop);
5014 * Compile a constant path operator write node, either in the context of a
5015 * ractor pragma or not.
5017 static void
5018 pm_compile_constant_path_operator_write_node(rb_iseq_t *iseq, const pm_constant_path_operator_write_node_t *node, const pm_node_flags_t shareability, const pm_line_column_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5020 const pm_line_column_t location = *node_location;
5021 const pm_constant_path_node_t *target = node->target;
5023 ID method_id = pm_constant_id_lookup(scope_node, node->binary_operator);
5024 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
5026 if (target->parent) {
5027 PM_COMPILE_NOT_POPPED(target->parent);
5029 else {
5030 PUSH_INSN1(ret, location, putobject, rb_cObject);
5033 PUSH_INSN(ret, location, dup);
5034 PUSH_INSN1(ret, location, putobject, Qtrue);
5035 PUSH_INSN1(ret, location, getconstant, name);
5037 if (shareability != 0) {
5038 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
5040 else {
5041 PM_COMPILE_NOT_POPPED(node->value);
5044 PUSH_CALL(ret, location, method_id, INT2FIX(1));
5045 PUSH_INSN(ret, location, swap);
5047 if (!popped) {
5048 PUSH_INSN1(ret, location, topn, INT2FIX(1));
5049 PUSH_INSN(ret, location, swap);
5052 PUSH_INSN1(ret, location, setconstant, name);
5056 * Compiles a prism node into instruction sequences.
5058 * iseq - The current instruction sequence object (used for locals)
5059 * node - The prism node to compile
5060 * ret - The linked list of instructions to append instructions onto
5061 * popped - True if compiling something with no side effects, so instructions don't
5062 * need to be added
5063 * scope_node - Stores parser and local information
5065 static void
5066 pm_compile_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5068 const pm_parser_t *parser = scope_node->parser;
5069 const pm_line_column_t location = PM_NODE_START_LINE_COLUMN(parser, node);
5070 int lineno = (int) location.line;
5072 if (!PM_NODE_TYPE_P(node, PM_RETURN_NODE) || !PM_NODE_FLAG_P(node, PM_RETURN_NODE_FLAGS_REDUNDANT) || ((const pm_return_node_t *) node)->arguments != NULL) {
5073 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_NEWLINE) && ISEQ_COMPILE_DATA(iseq)->last_line != lineno) {
5074 int event = RUBY_EVENT_LINE;
5076 ISEQ_COMPILE_DATA(iseq)->last_line = lineno;
5077 if (ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq)) {
5078 event |= RUBY_EVENT_COVERAGE_LINE;
5080 PUSH_TRACE(ret, event);
5084 switch (PM_NODE_TYPE(node)) {
5085 case PM_ALIAS_GLOBAL_VARIABLE_NODE: {
5086 // alias $foo $bar
5087 // ^^^^^^^^^^^^^^^
5088 const pm_alias_global_variable_node_t *cast = (const pm_alias_global_variable_node_t *) node;
5089 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5091 const pm_location_t *new_name_loc = &cast->new_name->location;
5092 PUSH_INSN1(ret, location, putobject, ID2SYM(rb_intern3((const char *) new_name_loc->start, new_name_loc->end - new_name_loc->start, scope_node->encoding)));
5094 const pm_location_t *old_name_loc = &cast->old_name->location;
5095 PUSH_INSN1(ret, location, putobject, ID2SYM(rb_intern3((const char *) old_name_loc->start, old_name_loc->end - old_name_loc->start, scope_node->encoding)));
5097 PUSH_SEND(ret, location, id_core_set_variable_alias, INT2FIX(2));
5098 if (popped) PUSH_INSN(ret, location, pop);
5100 return;
5102 case PM_ALIAS_METHOD_NODE: {
5103 // alias foo bar
5104 // ^^^^^^^^^^^^^
5105 const pm_alias_method_node_t *cast = (const pm_alias_method_node_t *) node;
5107 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5108 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CBASE));
5109 PM_COMPILE_NOT_POPPED(cast->new_name);
5110 PM_COMPILE_NOT_POPPED(cast->old_name);
5112 PUSH_SEND(ret, location, id_core_set_method_alias, INT2FIX(3));
5113 if (popped) PUSH_INSN(ret, location, pop);
5115 return;
5117 case PM_AND_NODE: {
5118 // a and b
5119 // ^^^^^^^
5120 const pm_and_node_t *cast = (const pm_and_node_t *) node;
5121 LABEL *end_label = NEW_LABEL(lineno);
5123 PM_COMPILE_NOT_POPPED(cast->left);
5124 if (!popped) PUSH_INSN(ret, location, dup);
5125 PUSH_INSNL(ret, location, branchunless, end_label);
5127 if (!popped) PUSH_INSN(ret, location, pop);
5128 PM_COMPILE(cast->right);
5129 PUSH_LABEL(ret, end_label);
5131 return;
5133 case PM_ARGUMENTS_NODE:
5134 // These are ArgumentsNodes that are not compiled directly by their
5135 // parent call nodes, used in the cases of NextNodes, ReturnNodes, and
5136 // BreakNodes. They can create an array like ArrayNode.
5137 case PM_ARRAY_NODE: {
5138 const pm_node_list_t *elements;
5140 if (PM_NODE_TYPE(node) == PM_ARGUMENTS_NODE) {
5141 // break foo
5142 // ^^^
5143 const pm_arguments_node_t *cast = (const pm_arguments_node_t *) node;
5144 elements = &cast->arguments;
5146 // If we are only returning a single element through one of the jump
5147 // nodes, then we will only compile that node directly.
5148 if (elements->size == 1) {
5149 PM_COMPILE(elements->nodes[0]);
5150 return;
5153 else {
5154 // [foo, bar, baz]
5155 // ^^^^^^^^^^^^^^^
5156 const pm_array_node_t *cast = (const pm_array_node_t *) node;
5157 elements = &cast->elements;
5160 // If every node in the array is static, then we can compile the entire
5161 // array now instead of later.
5162 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
5163 // We're only going to compile this node if it's not popped. If it
5164 // is popped, then we know we don't need to do anything since it's
5165 // statically known.
5166 if (!popped) {
5167 if (elements->size) {
5168 VALUE value = pm_static_literal_value(iseq, node, scope_node);
5169 PUSH_INSN1(ret, location, duparray, value);
5171 else {
5172 PUSH_INSN1(ret, location, newarray, INT2FIX(0));
5176 else {
5177 // Here since we know there are possible side-effects inside the
5178 // array contents, we're going to build it entirely at runtime.
5179 // We'll do this by pushing all of the elements onto the stack and
5180 // then combining them with newarray.
5182 // If this array is popped, then this serves only to ensure we enact
5183 // all side-effects (like method calls) that are contained within
5184 // the array contents.
5186 // We treat all sequences of non-splat elements as their
5187 // own arrays, followed by a newarray, and then continually
5188 // concat the arrays with the SplatNode nodes.
5189 int new_array_size = 0;
5191 bool need_to_concat_array = false;
5192 bool has_kw_splat = false;
5194 for (size_t index = 0; index < elements->size; index++) {
5195 const pm_node_t *element = elements->nodes[index];
5197 if (PM_NODE_TYPE_P(element, PM_SPLAT_NODE)) {
5198 const pm_splat_node_t *splat_element = (const pm_splat_node_t *) element;
5200 // If we already have non-splat elements, we need to emit a
5201 // newarray instruction.
5202 if (new_array_size > 0) {
5203 PUSH_INSN1(ret, location, newarray, INT2FIX(new_array_size));
5204 new_array_size = 0;
5206 // We don't want to emit a concat array in the case
5207 // where we're seeing our first splat, and already have
5208 // elements.
5209 if (need_to_concat_array) PUSH_INSN(ret, location, concatarray);
5212 if (splat_element->expression) {
5213 PM_COMPILE_NOT_POPPED(splat_element->expression);
5215 else {
5216 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
5217 PUSH_GETLOCAL(ret, location, index.index, index.level);
5220 if (index > 0) {
5221 PUSH_INSN(ret, location, concatarray);
5223 else {
5224 // If this is the first element of the array then we
5225 // need to splatarray the elements into the list.
5226 PUSH_INSN1(ret, location, splatarray, Qtrue);
5229 // Since we have now seen a splat and are concat-ing arrays,
5230 // all subsequent splats will need to concat as well.
5231 need_to_concat_array = true;
5233 else if (PM_NODE_TYPE_P(element, PM_KEYWORD_HASH_NODE)) {
5234 new_array_size++;
5235 has_kw_splat = true;
5236 pm_compile_hash_elements(iseq, element, &((const pm_keyword_hash_node_t *) element)->elements, ret, scope_node);
5238 else {
5239 new_array_size++;
5240 PM_COMPILE_NOT_POPPED(element);
5244 if (new_array_size) {
5245 if (has_kw_splat) {
5246 PUSH_INSN1(ret, location, newarraykwsplat, INT2FIX(new_array_size));
5248 else {
5249 PUSH_INSN1(ret, location, newarray, INT2FIX(new_array_size));
5252 if (need_to_concat_array) PUSH_INSN(ret, location, concatarray);
5255 if (popped) PUSH_INSN(ret, location, pop);
5257 return;
5259 case PM_ASSOC_NODE: {
5260 // { foo: 1 }
5261 // ^^^^^^
5263 // foo(bar: 1)
5264 // ^^^^^^
5265 const pm_assoc_node_t *cast = (const pm_assoc_node_t *) node;
5267 PM_COMPILE(cast->key);
5268 PM_COMPILE(cast->value);
5270 return;
5272 case PM_ASSOC_SPLAT_NODE: {
5273 // { **foo }
5274 // ^^^^^
5276 // def foo(**); bar(**); end
5277 // ^^
5278 const pm_assoc_splat_node_t *cast = (const pm_assoc_splat_node_t *) node;
5280 if (cast->value != NULL) {
5281 PM_COMPILE(cast->value);
5283 else if (!popped) {
5284 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_POW, 0);
5285 PUSH_GETLOCAL(ret, location, index.index, index.level);
5288 return;
5290 case PM_BACK_REFERENCE_READ_NODE: {
5291 // $+
5292 // ^^
5293 if (!popped) {
5294 // Since a back reference is `$<char>`, ruby represents the ID as the
5295 // an rb_intern on the value after the `$`.
5296 char *char_ptr = (char *)(node->location.start) + 1;
5297 ID backref_val = INT2FIX(rb_intern2(char_ptr, 1)) << 1 | 1;
5298 PUSH_INSN2(ret, location, getspecial, INT2FIX(1), backref_val);
5300 return;
5302 case PM_BEGIN_NODE: {
5303 // begin end
5304 // ^^^^^^^^^
5305 const pm_begin_node_t *cast = (const pm_begin_node_t *) node;
5307 if (cast->ensure_clause) {
5308 // Compiling the ensure clause will compile the rescue clause (if
5309 // there is one), which will compile the begin statements.
5310 pm_compile_ensure(iseq, cast, &location, ret, popped, scope_node);
5312 else if (cast->rescue_clause) {
5313 // Compiling rescue will compile begin statements (if applicable).
5314 pm_compile_rescue(iseq, cast, &location, ret, popped, scope_node);
5316 else {
5317 // If there is neither ensure or rescue, the just compile the
5318 // statements.
5319 if (cast->statements != NULL) {
5320 PM_COMPILE((const pm_node_t *) cast->statements);
5322 else if (!popped) {
5323 PUSH_INSN(ret, location, putnil);
5326 return;
5328 case PM_BLOCK_ARGUMENT_NODE: {
5329 // foo(&bar)
5330 // ^^^^
5331 const pm_block_argument_node_t *cast = (const pm_block_argument_node_t *) node;
5333 if (cast->expression != NULL) {
5334 PM_COMPILE(cast->expression);
5336 else {
5337 // If there's no expression, this must be block forwarding.
5338 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_AND, 0);
5339 PUSH_INSN2(ret, location, getblockparamproxy, INT2FIX(local_index.index + VM_ENV_DATA_SIZE - 1), INT2FIX(local_index.level));
5341 return;
5343 case PM_BREAK_NODE: {
5344 // break
5345 // ^^^^^
5347 // break foo
5348 // ^^^^^^^^^
5349 const pm_break_node_t *cast = (const pm_break_node_t *) node;
5350 unsigned long throw_flag = 0;
5352 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
5353 /* while/until */
5354 LABEL *splabel = NEW_LABEL(0);
5355 PUSH_LABEL(ret, splabel);
5356 PUSH_ADJUST(ret, location, ISEQ_COMPILE_DATA(iseq)->redo_label);
5358 if (cast->arguments != NULL) {
5359 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->arguments);
5361 else {
5362 PUSH_INSN(ret, location, putnil);
5365 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
5366 PUSH_INSNL(ret, location, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
5367 PUSH_ADJUST_RESTORE(ret, splabel);
5368 if (!popped) PUSH_INSN(ret, location, putnil);
5370 else {
5371 const rb_iseq_t *ip = iseq;
5373 while (ip) {
5374 if (!ISEQ_COMPILE_DATA(ip)) {
5375 ip = 0;
5376 break;
5379 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
5380 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
5382 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
5383 throw_flag = 0;
5385 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
5386 COMPILE_ERROR(ERROR_ARGS "Can't escape from eval with break");
5387 return;
5389 else {
5390 ip = ISEQ_BODY(ip)->parent_iseq;
5391 continue;
5394 /* escape from block */
5395 if (cast->arguments != NULL) {
5396 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->arguments);
5398 else {
5399 PUSH_INSN(ret, location, putnil);
5402 PUSH_INSN1(ret, location, throw, INT2FIX(throw_flag | TAG_BREAK));
5403 if (popped) PUSH_INSN(ret, location, pop);
5405 return;
5408 COMPILE_ERROR(ERROR_ARGS "Invalid break");
5409 rb_bug("Invalid break");
5411 return;
5413 case PM_CALL_NODE: {
5414 // foo
5415 // ^^^
5417 // foo.bar
5418 // ^^^^^^^
5420 // foo.bar() {}
5421 // ^^^^^^^^^^^^
5422 const pm_call_node_t *cast = (const pm_call_node_t *) node;
5423 LABEL *start = NEW_LABEL(location.line);
5425 if (cast->block) {
5426 PUSH_LABEL(ret, start);
5429 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
5431 switch (method_id) {
5432 case idUMinus: {
5433 if (pm_opt_str_freeze_p(iseq, cast)) {
5434 VALUE value = parse_static_literal_string(iseq, scope_node, cast->receiver, &((const pm_string_node_t * ) cast->receiver)->unescaped);
5435 PUSH_INSN2(ret, location, opt_str_uminus, value, new_callinfo(iseq, idUMinus, 0, 0, NULL, FALSE));
5436 return;
5438 break;
5440 case idFreeze: {
5441 if (pm_opt_str_freeze_p(iseq, cast)) {
5442 VALUE value = parse_static_literal_string(iseq, scope_node, cast->receiver, &((const pm_string_node_t * ) cast->receiver)->unescaped);
5443 PUSH_INSN2(ret, location, opt_str_freeze, value, new_callinfo(iseq, idFreeze, 0, 0, NULL, FALSE));
5444 return;
5446 break;
5448 case idAREF: {
5449 if (pm_opt_aref_with_p(iseq, cast)) {
5450 const pm_string_node_t *string = (const pm_string_node_t *) ((const pm_arguments_node_t *) cast->arguments)->arguments.nodes[0];
5451 VALUE value = parse_static_literal_string(iseq, scope_node, (const pm_node_t *) string, &string->unescaped);
5453 PM_COMPILE_NOT_POPPED(cast->receiver);
5454 PUSH_INSN2(ret, location, opt_aref_with, value, new_callinfo(iseq, idAREF, 1, 0, NULL, FALSE));
5456 if (popped) {
5457 PUSH_INSN(ret, location, pop);
5460 return;
5462 break;
5464 case idASET: {
5465 if (pm_opt_aset_with_p(iseq, cast)) {
5466 const pm_string_node_t *string = (const pm_string_node_t *) ((const pm_arguments_node_t *) cast->arguments)->arguments.nodes[0];
5467 VALUE value = parse_static_literal_string(iseq, scope_node, (const pm_node_t *) string, &string->unescaped);
5469 PM_COMPILE_NOT_POPPED(cast->receiver);
5470 PM_COMPILE_NOT_POPPED(((const pm_arguments_node_t *) cast->arguments)->arguments.nodes[1]);
5472 if (!popped) {
5473 PUSH_INSN(ret, location, swap);
5474 PUSH_INSN1(ret, location, topn, INT2FIX(1));
5477 PUSH_INSN2(ret, location, opt_aset_with, value, new_callinfo(iseq, idASET, 2, 0, NULL, FALSE));
5478 PUSH_INSN(ret, location, pop);
5479 return;
5481 break;
5485 if (PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE) && !popped) {
5486 PUSH_INSN(ret, location, putnil);
5489 if (cast->receiver == NULL) {
5490 PUSH_INSN(ret, location, putself);
5492 else {
5493 PM_COMPILE_NOT_POPPED(cast->receiver);
5496 pm_compile_call(iseq, cast, ret, popped, scope_node, method_id, start);
5497 return;
5499 case PM_CALL_AND_WRITE_NODE: {
5500 // foo.bar &&= baz
5501 // ^^^^^^^^^^^^^^^
5502 const pm_call_and_write_node_t *cast = (const pm_call_and_write_node_t *) node;
5503 pm_compile_call_and_or_write_node(iseq, true, cast->receiver, cast->value, cast->write_name, cast->read_name, PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION), &location, ret, popped, scope_node);
5504 return;
5506 case PM_CALL_OR_WRITE_NODE: {
5507 // foo.bar ||= baz
5508 // ^^^^^^^^^^^^^^^
5509 const pm_call_or_write_node_t *cast = (const pm_call_or_write_node_t *) node;
5510 pm_compile_call_and_or_write_node(iseq, false, cast->receiver, cast->value, cast->write_name, cast->read_name, PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION), &location, ret, popped, scope_node);
5511 return;
5513 case PM_CALL_OPERATOR_WRITE_NODE: {
5514 // foo.bar += baz
5515 // ^^^^^^^^^^^^^^^
5517 // Call operator writes occur when you have a call node on the left-hand
5518 // side of a write operator that is not `=`. As an example,
5519 // `foo.bar *= 1`. This breaks down to caching the receiver on the
5520 // stack and then performing three method calls, one to read the value,
5521 // one to compute the result, and one to write the result back to the
5522 // receiver.
5523 const pm_call_operator_write_node_t *cast = (const pm_call_operator_write_node_t *) node;
5524 int flag = 0;
5526 if (PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) {
5527 flag = VM_CALL_FCALL;
5530 PM_COMPILE_NOT_POPPED(cast->receiver);
5532 LABEL *safe_label = NULL;
5533 if (PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
5534 safe_label = NEW_LABEL(location.line);
5535 PUSH_INSN(ret, location, dup);
5536 PUSH_INSNL(ret, location, branchnil, safe_label);
5539 PUSH_INSN(ret, location, dup);
5541 ID id_read_name = pm_constant_id_lookup(scope_node, cast->read_name);
5542 PUSH_SEND_WITH_FLAG(ret, location, id_read_name, INT2FIX(0), INT2FIX(flag));
5544 PM_COMPILE_NOT_POPPED(cast->value);
5545 ID id_operator = pm_constant_id_lookup(scope_node, cast->binary_operator);
5546 PUSH_SEND(ret, location, id_operator, INT2FIX(1));
5548 if (!popped) {
5549 PUSH_INSN(ret, location, swap);
5550 PUSH_INSN1(ret, location, topn, INT2FIX(1));
5553 ID id_write_name = pm_constant_id_lookup(scope_node, cast->write_name);
5554 PUSH_SEND_WITH_FLAG(ret, location, id_write_name, INT2FIX(1), INT2FIX(flag));
5556 if (safe_label != NULL && popped) PUSH_LABEL(ret, safe_label);
5557 PUSH_INSN(ret, location, pop);
5558 if (safe_label != NULL && !popped) PUSH_LABEL(ret, safe_label);
5560 return;
5562 case PM_CASE_NODE: {
5563 // case foo; when bar; end
5564 // ^^^^^^^^^^^^^^^^^^^^^^^
5565 const pm_case_node_t *cast = (const pm_case_node_t *) node;
5566 const pm_node_list_t *conditions = &cast->conditions;
5568 // This is the anchor that we will compile the conditions of the various
5569 // `when` nodes into. If a match is found, they will need to jump into
5570 // the body_seq anchor to the correct spot.
5571 DECL_ANCHOR(cond_seq);
5572 INIT_ANCHOR(cond_seq);
5574 // This is the anchor that we will compile the bodies of the various
5575 // `when` nodes into. We'll make sure that the clauses that are compiled
5576 // jump into the correct spots within this anchor.
5577 DECL_ANCHOR(body_seq);
5578 INIT_ANCHOR(body_seq);
5580 // This is the label where all of the when clauses will jump to if they
5581 // have matched and are done executing their bodies.
5582 LABEL *end_label = NEW_LABEL(location.line);
5584 // If we have a predicate on this case statement, then it's going to
5585 // compare all of the various when clauses to the predicate. If we
5586 // don't, then it's basically an if-elsif-else chain.
5587 if (cast->predicate == NULL) {
5588 // Establish branch coverage for the case node.
5589 VALUE branches = Qfalse;
5590 rb_code_location_t case_location = { 0 };
5591 int branch_id = 0;
5593 if (PM_BRANCH_COVERAGE_P(iseq)) {
5594 case_location = pm_code_location(scope_node, (const pm_node_t *) cast);
5595 branches = decl_branch_base(iseq, PTR2NUM(cast), &case_location, "case");
5598 // Loop through each clauses in the case node and compile each of
5599 // the conditions within them into cond_seq. If they match, they
5600 // should jump into their respective bodies in body_seq.
5601 for (size_t clause_index = 0; clause_index < conditions->size; clause_index++) {
5602 const pm_when_node_t *clause = (const pm_when_node_t *) conditions->nodes[clause_index];
5603 const pm_node_list_t *conditions = &clause->conditions;
5605 int clause_lineno = pm_node_line_number(parser, (const pm_node_t *) clause);
5606 LABEL *label = NEW_LABEL(clause_lineno);
5607 PUSH_LABEL(body_seq, label);
5609 // Establish branch coverage for the when clause.
5610 if (PM_BRANCH_COVERAGE_P(iseq)) {
5611 rb_code_location_t branch_location = pm_code_location(scope_node, clause->statements != NULL ? ((const pm_node_t *) clause->statements) : ((const pm_node_t *) clause));
5612 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "when", branches);
5615 if (clause->statements != NULL) {
5616 pm_compile_node(iseq, (const pm_node_t *) clause->statements, body_seq, popped, scope_node);
5618 else if (!popped) {
5619 PUSH_INSN(body_seq, location, putnil);
5622 PUSH_INSNL(body_seq, location, jump, end_label);
5624 // Compile each of the conditions for the when clause into the
5625 // cond_seq. Each one should have a unique condition and should
5626 // jump to the subsequent one if it doesn't match.
5627 for (size_t condition_index = 0; condition_index < conditions->size; condition_index++) {
5628 const pm_node_t *condition = conditions->nodes[condition_index];
5630 if (PM_NODE_TYPE_P(condition, PM_SPLAT_NODE)) {
5631 pm_line_column_t cond_location = PM_NODE_START_LINE_COLUMN(parser, condition);
5632 PUSH_INSN(cond_seq, cond_location, putnil);
5633 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
5634 PUSH_INSN1(cond_seq, cond_location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_WHEN | VM_CHECKMATCH_ARRAY));
5635 PUSH_INSNL(cond_seq, cond_location, branchif, label);
5637 else {
5638 LABEL *next_label = NEW_LABEL(pm_node_line_number(parser, condition));
5639 pm_compile_branch_condition(iseq, cond_seq, condition, label, next_label, false, scope_node);
5640 PUSH_LABEL(cond_seq, next_label);
5645 // Establish branch coverage for the else clause (implicit or
5646 // explicit).
5647 if (PM_BRANCH_COVERAGE_P(iseq)) {
5648 rb_code_location_t branch_location;
5650 if (cast->consequent == NULL) {
5651 branch_location = case_location;
5652 } else if (cast->consequent->statements == NULL) {
5653 branch_location = pm_code_location(scope_node, (const pm_node_t *) cast->consequent);
5654 } else {
5655 branch_location = pm_code_location(scope_node, (const pm_node_t *) cast->consequent->statements);
5658 add_trace_branch_coverage(iseq, cond_seq, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
5661 // Compile the consequent else clause if there is one.
5662 if (cast->consequent != NULL) {
5663 pm_compile_node(iseq, (const pm_node_t *) cast->consequent, cond_seq, popped, scope_node);
5665 else if (!popped) {
5666 PUSH_SYNTHETIC_PUTNIL(cond_seq, iseq);
5669 // Finally, jump to the end label if none of the other conditions
5670 // have matched.
5671 PUSH_INSNL(cond_seq, location, jump, end_label);
5672 PUSH_SEQ(ret, cond_seq);
5674 else {
5675 // Establish branch coverage for the case node.
5676 VALUE branches = Qfalse;
5677 rb_code_location_t case_location = { 0 };
5678 int branch_id = 0;
5680 if (PM_BRANCH_COVERAGE_P(iseq)) {
5681 case_location = pm_code_location(scope_node, (const pm_node_t *) cast);
5682 branches = decl_branch_base(iseq, PTR2NUM(cast), &case_location, "case");
5685 // This is the label where everything will fall into if none of the
5686 // conditions matched.
5687 LABEL *else_label = NEW_LABEL(location.line);
5689 // It's possible for us to speed up the case node by using a
5690 // dispatch hash. This is a hash that maps the conditions of the
5691 // various when clauses to the labels of their bodies. If we can
5692 // compile the conditions into a hash key, then we can use a hash
5693 // lookup to jump directly to the correct when clause body.
5694 VALUE dispatch = Qundef;
5695 if (ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
5696 dispatch = rb_hash_new();
5697 RHASH_TBL_RAW(dispatch)->type = &cdhash_type;
5700 // We're going to loop through each of the conditions in the case
5701 // node and compile each of their contents into both the cond_seq
5702 // and the body_seq. Each condition will use its own label to jump
5703 // from its conditions into its body.
5705 // Note that none of the code in the loop below should be adding
5706 // anything to ret, as we're going to be laying out the entire case
5707 // node instructions later.
5708 for (size_t clause_index = 0; clause_index < conditions->size; clause_index++) {
5709 const pm_when_node_t *clause = (const pm_when_node_t *) conditions->nodes[clause_index];
5710 pm_line_column_t clause_location = PM_NODE_START_LINE_COLUMN(parser, (const pm_node_t *) clause);
5712 const pm_node_list_t *conditions = &clause->conditions;
5713 LABEL *label = NEW_LABEL(clause_location.line);
5715 // Compile each of the conditions for the when clause into the
5716 // cond_seq. Each one should have a unique comparison that then
5717 // jumps into the body if it matches.
5718 for (size_t condition_index = 0; condition_index < conditions->size; condition_index++) {
5719 const pm_node_t *condition = conditions->nodes[condition_index];
5720 const pm_line_column_t condition_location = PM_NODE_START_LINE_COLUMN(parser, condition);
5722 // If we haven't already abandoned the optimization, then
5723 // we're going to try to compile the condition into the
5724 // dispatch hash.
5725 if (dispatch != Qundef) {
5726 dispatch = pm_compile_case_node_dispatch(iseq, dispatch, condition, label, scope_node);
5729 if (PM_NODE_TYPE_P(condition, PM_SPLAT_NODE)) {
5730 PUSH_INSN(cond_seq, condition_location, dup);
5731 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
5732 PUSH_INSN1(cond_seq, condition_location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5734 else {
5735 if (PM_NODE_TYPE_P(condition, PM_STRING_NODE)) {
5736 const pm_string_node_t *string = (const pm_string_node_t *) condition;
5737 VALUE value = parse_static_literal_string(iseq, scope_node, condition, &string->unescaped);
5738 PUSH_INSN1(cond_seq, condition_location, putobject, value);
5740 else {
5741 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
5744 PUSH_INSN1(cond_seq, condition_location, topn, INT2FIX(1));
5745 PUSH_SEND_WITH_FLAG(cond_seq, condition_location, idEqq, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
5748 PUSH_INSNL(cond_seq, condition_location, branchif, label);
5751 // Now, add the label to the body and compile the body of the
5752 // when clause. This involves popping the predicate, compiling
5753 // the statements to be executed, and then compiling a jump to
5754 // the end of the case node.
5755 PUSH_LABEL(body_seq, label);
5756 PUSH_INSN(body_seq, clause_location, pop);
5758 // Establish branch coverage for the when clause.
5759 if (PM_BRANCH_COVERAGE_P(iseq)) {
5760 rb_code_location_t branch_location = pm_code_location(scope_node, clause->statements != NULL ? ((const pm_node_t *) clause->statements) : ((const pm_node_t *) clause));
5761 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "when", branches);
5764 if (clause->statements != NULL) {
5765 pm_compile_node(iseq, (const pm_node_t *) clause->statements, body_seq, popped, scope_node);
5767 else if (!popped) {
5768 PUSH_INSN(body_seq, clause_location, putnil);
5771 PUSH_INSNL(body_seq, clause_location, jump, end_label);
5774 // Now that we have compiled the conditions and the bodies of the
5775 // various when clauses, we can compile the predicate, lay out the
5776 // conditions, compile the fallback consequent if there is one, and
5777 // finally put in the bodies of the when clauses.
5778 PM_COMPILE_NOT_POPPED(cast->predicate);
5780 // If we have a dispatch hash, then we'll use it here to create the
5781 // optimization.
5782 if (dispatch != Qundef) {
5783 PUSH_INSN(ret, location, dup);
5784 PUSH_INSN2(ret, location, opt_case_dispatch, dispatch, else_label);
5785 LABEL_REF(else_label);
5788 PUSH_SEQ(ret, cond_seq);
5790 // Compile either the explicit else clause or an implicit else
5791 // clause.
5792 PUSH_LABEL(ret, else_label);
5794 if (cast->consequent != NULL) {
5795 pm_line_column_t else_location = PM_NODE_START_LINE_COLUMN(parser, cast->consequent->statements != NULL ? ((const pm_node_t *) cast->consequent->statements) : ((const pm_node_t *) cast->consequent));
5796 PUSH_INSN(ret, else_location, pop);
5798 // Establish branch coverage for the else clause.
5799 if (PM_BRANCH_COVERAGE_P(iseq)) {
5800 rb_code_location_t branch_location = pm_code_location(scope_node, cast->consequent->statements != NULL ? ((const pm_node_t *) cast->consequent->statements) : ((const pm_node_t *) cast->consequent));
5801 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
5804 PM_COMPILE((const pm_node_t *) cast->consequent);
5805 PUSH_INSNL(ret, else_location, jump, end_label);
5807 else {
5808 PUSH_INSN(ret, location, pop);
5810 // Establish branch coverage for the implicit else clause.
5811 if (PM_BRANCH_COVERAGE_P(iseq)) {
5812 add_trace_branch_coverage(iseq, ret, &case_location, case_location.beg_pos.column, branch_id, "else", branches);
5815 if (!popped) PUSH_INSN(ret, location, putnil);
5816 PUSH_INSNL(ret, location, jump, end_label);
5820 PUSH_SEQ(ret, body_seq);
5821 PUSH_LABEL(ret, end_label);
5823 return;
5825 case PM_CASE_MATCH_NODE: {
5826 // case foo; in bar; end
5827 // ^^^^^^^^^^^^^^^^^^^^^
5829 // If you use the `case` keyword to create a case match node, it will
5830 // match against all of the `in` clauses until it finds one that
5831 // matches. If it doesn't find one, it can optionally fall back to an
5832 // `else` clause. If none is present and a match wasn't found, it will
5833 // raise an appropriate error.
5834 const pm_case_match_node_t *cast = (const pm_case_match_node_t *) node;
5836 // This is the anchor that we will compile the bodies of the various
5837 // `in` nodes into. We'll make sure that the patterns that are compiled
5838 // jump into the correct spots within this anchor.
5839 DECL_ANCHOR(body_seq);
5840 INIT_ANCHOR(body_seq);
5842 // This is the anchor that we will compile the patterns of the various
5843 // `in` nodes into. If a match is found, they will need to jump into the
5844 // body_seq anchor to the correct spot.
5845 DECL_ANCHOR(cond_seq);
5846 INIT_ANCHOR(cond_seq);
5848 // This label is used to indicate the end of the entire node. It is
5849 // jumped to after the entire stack is cleaned up.
5850 LABEL *end_label = NEW_LABEL(location.line);
5852 // This label is used as the fallback for the case match. If no match is
5853 // found, then we jump to this label. This is either an `else` clause or
5854 // an error handler.
5855 LABEL *else_label = NEW_LABEL(location.line);
5857 // We're going to use this to uniquely identify each branch so that we
5858 // can track coverage information.
5859 rb_code_location_t case_location;
5860 VALUE branches = Qfalse;
5861 int branch_id = 0;
5863 if (PM_BRANCH_COVERAGE_P(iseq)) {
5864 case_location = pm_code_location(scope_node, (const pm_node_t *) cast);
5865 branches = decl_branch_base(iseq, PTR2NUM(cast), &case_location, "case");
5868 // If there is only one pattern, then the behavior changes a bit. It
5869 // effectively gets treated as a match required node (this is how it is
5870 // represented in the other parser).
5871 bool in_single_pattern = cast->consequent == NULL && cast->conditions.size == 1;
5873 // First, we're going to push a bunch of stuff onto the stack that is
5874 // going to serve as our scratch space.
5875 if (in_single_pattern) {
5876 PUSH_INSN(ret, location, putnil); // key error key
5877 PUSH_INSN(ret, location, putnil); // key error matchee
5878 PUSH_INSN1(ret, location, putobject, Qfalse); // key error?
5879 PUSH_INSN(ret, location, putnil); // error string
5882 // Now we're going to compile the value to match against.
5883 PUSH_INSN(ret, location, putnil); // deconstruct cache
5884 PM_COMPILE_NOT_POPPED(cast->predicate);
5886 // Next, we'll loop through every in clause and compile its body into
5887 // the body_seq anchor and its pattern into the cond_seq anchor. We'll
5888 // make sure the pattern knows how to jump correctly into the body if it
5889 // finds a match.
5890 for (size_t index = 0; index < cast->conditions.size; index++) {
5891 const pm_node_t *condition = cast->conditions.nodes[index];
5892 RUBY_ASSERT(PM_NODE_TYPE_P(condition, PM_IN_NODE));
5894 const pm_in_node_t *in_node = (const pm_in_node_t *) condition;
5895 const pm_line_column_t in_location = PM_NODE_START_LINE_COLUMN(parser, in_node);
5896 const pm_line_column_t pattern_location = PM_NODE_START_LINE_COLUMN(parser, in_node->pattern);
5898 if (branch_id) {
5899 PUSH_INSN(body_seq, in_location, putnil);
5902 LABEL *body_label = NEW_LABEL(in_location.line);
5903 PUSH_LABEL(body_seq, body_label);
5904 PUSH_INSN1(body_seq, in_location, adjuststack, INT2FIX(in_single_pattern ? 6 : 2));
5906 // Establish branch coverage for the in clause.
5907 if (PM_BRANCH_COVERAGE_P(iseq)) {
5908 rb_code_location_t branch_location = pm_code_location(scope_node, in_node->statements != NULL ? ((const pm_node_t *) in_node->statements) : ((const pm_node_t *) in_node));
5909 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "in", branches);
5912 if (in_node->statements != NULL) {
5913 PM_COMPILE_INTO_ANCHOR(body_seq, (const pm_node_t *) in_node->statements);
5915 else if (!popped) {
5916 PUSH_INSN(body_seq, in_location, putnil);
5919 PUSH_INSNL(body_seq, in_location, jump, end_label);
5920 LABEL *next_pattern_label = NEW_LABEL(pattern_location.line);
5922 PUSH_INSN(cond_seq, pattern_location, dup);
5923 pm_compile_pattern(iseq, scope_node, in_node->pattern, cond_seq, body_label, next_pattern_label, in_single_pattern, false, true, 2);
5924 PUSH_LABEL(cond_seq, next_pattern_label);
5925 LABEL_UNREMOVABLE(next_pattern_label);
5928 if (cast->consequent != NULL) {
5929 // If we have an `else` clause, then this becomes our fallback (and
5930 // there is no need to compile in code to potentially raise an
5931 // error).
5932 const pm_else_node_t *else_node = (const pm_else_node_t *) cast->consequent;
5934 PUSH_LABEL(cond_seq, else_label);
5935 PUSH_INSN(cond_seq, location, pop);
5936 PUSH_INSN(cond_seq, location, pop);
5938 // Establish branch coverage for the else clause.
5939 if (PM_BRANCH_COVERAGE_P(iseq)) {
5940 rb_code_location_t branch_location = pm_code_location(scope_node, else_node->statements != NULL ? ((const pm_node_t *) else_node->statements) : ((const pm_node_t *) else_node));
5941 add_trace_branch_coverage(iseq, cond_seq, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
5944 PM_COMPILE_INTO_ANCHOR(cond_seq, (const pm_node_t *) else_node);
5945 PUSH_INSNL(cond_seq, location, jump, end_label);
5946 PUSH_INSN(cond_seq, location, putnil);
5947 if (popped) PUSH_INSN(cond_seq, location, putnil);
5949 else {
5950 // Otherwise, if we do not have an `else` clause, we will compile in
5951 // the code to handle raising an appropriate error.
5952 PUSH_LABEL(cond_seq, else_label);
5954 // Establish branch coverage for the implicit else clause.
5955 add_trace_branch_coverage(iseq, cond_seq, &case_location, case_location.beg_pos.column, branch_id, "else", branches);
5957 if (in_single_pattern) {
5958 pm_compile_pattern_error_handler(iseq, scope_node, node, cond_seq, end_label, popped);
5960 else {
5961 PUSH_INSN1(cond_seq, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5962 PUSH_INSN1(cond_seq, location, putobject, rb_eNoMatchingPatternError);
5963 PUSH_INSN1(cond_seq, location, topn, INT2FIX(2));
5964 PUSH_SEND(cond_seq, location, id_core_raise, INT2FIX(2));
5966 PUSH_INSN1(cond_seq, location, adjuststack, INT2FIX(3));
5967 if (!popped) PUSH_INSN(cond_seq, location, putnil);
5968 PUSH_INSNL(cond_seq, location, jump, end_label);
5969 PUSH_INSN1(cond_seq, location, dupn, INT2FIX(1));
5970 if (popped) PUSH_INSN(cond_seq, location, putnil);
5974 // At the end of all of this compilation, we will add the code for the
5975 // conditions first, then the various bodies, then mark the end of the
5976 // entire sequence with the end label.
5977 PUSH_SEQ(ret, cond_seq);
5978 PUSH_SEQ(ret, body_seq);
5979 PUSH_LABEL(ret, end_label);
5981 return;
5983 case PM_CLASS_NODE: {
5984 // class Foo; end
5985 // ^^^^^^^^^^^^^^
5986 const pm_class_node_t *cast = (const pm_class_node_t *) node;
5988 ID class_id = pm_constant_id_lookup(scope_node, cast->name);
5989 VALUE class_name = rb_str_freeze(rb_sprintf("<class:%"PRIsVALUE">", rb_id2str(class_id)));
5991 pm_scope_node_t next_scope_node;
5992 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
5994 const rb_iseq_t *class_iseq = NEW_CHILD_ISEQ(&next_scope_node, class_name, ISEQ_TYPE_CLASS, location.line);
5995 pm_scope_node_destroy(&next_scope_node);
5997 // TODO: Once we merge constant path nodes correctly, fix this flag
5998 const int flags = VM_DEFINECLASS_TYPE_CLASS |
5999 (cast->superclass ? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS : 0) |
6000 pm_compile_class_path(iseq, cast->constant_path, &location, ret, false, scope_node);
6002 if (cast->superclass) {
6003 PM_COMPILE_NOT_POPPED(cast->superclass);
6005 else {
6006 PUSH_INSN(ret, location, putnil);
6009 PUSH_INSN3(ret, location, defineclass, ID2SYM(class_id), class_iseq, INT2FIX(flags));
6010 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)class_iseq);
6012 if (popped) PUSH_INSN(ret, location, pop);
6013 return;
6015 case PM_CLASS_VARIABLE_AND_WRITE_NODE: {
6016 // @@foo &&= bar
6017 // ^^^^^^^^^^^^^
6018 const pm_class_variable_and_write_node_t *cast = (const pm_class_variable_and_write_node_t *) node;
6019 LABEL *end_label = NEW_LABEL(location.line);
6021 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
6022 VALUE name = ID2SYM(name_id);
6024 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
6025 if (!popped) PUSH_INSN(ret, location, dup);
6027 PUSH_INSNL(ret, location, branchunless, end_label);
6028 if (!popped) PUSH_INSN(ret, location, pop);
6030 PM_COMPILE_NOT_POPPED(cast->value);
6031 if (!popped) PUSH_INSN(ret, location, dup);
6033 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
6034 PUSH_LABEL(ret, end_label);
6036 return;
6038 case PM_CLASS_VARIABLE_OPERATOR_WRITE_NODE: {
6039 // @@foo += bar
6040 // ^^^^^^^^^^^^
6041 const pm_class_variable_operator_write_node_t *cast = (const pm_class_variable_operator_write_node_t *) node;
6043 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
6044 VALUE name = ID2SYM(name_id);
6046 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
6047 PM_COMPILE_NOT_POPPED(cast->value);
6049 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
6050 int flags = VM_CALL_ARGS_SIMPLE;
6051 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
6053 if (!popped) PUSH_INSN(ret, location, dup);
6054 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
6056 return;
6058 case PM_CLASS_VARIABLE_OR_WRITE_NODE: {
6059 // @@foo ||= bar
6060 // ^^^^^^^^^^^^^
6061 const pm_class_variable_or_write_node_t *cast = (const pm_class_variable_or_write_node_t *) node;
6062 LABEL *end_label = NEW_LABEL(location.line);
6063 LABEL *start_label = NEW_LABEL(location.line);
6065 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
6066 VALUE name = ID2SYM(name_id);
6068 PUSH_INSN(ret, location, putnil);
6069 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CVAR), name, Qtrue);
6070 PUSH_INSNL(ret, location, branchunless, start_label);
6072 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
6073 if (!popped) PUSH_INSN(ret, location, dup);
6075 PUSH_INSNL(ret, location, branchif, end_label);
6076 if (!popped) PUSH_INSN(ret, location, pop);
6078 PUSH_LABEL(ret, start_label);
6079 PM_COMPILE_NOT_POPPED(cast->value);
6080 if (!popped) PUSH_INSN(ret, location, dup);
6082 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
6083 PUSH_LABEL(ret, end_label);
6085 return;
6087 case PM_CLASS_VARIABLE_READ_NODE: {
6088 // @@foo
6089 // ^^^^^
6090 if (!popped) {
6091 const pm_class_variable_read_node_t *cast = (const pm_class_variable_read_node_t *) node;
6092 ID name = pm_constant_id_lookup(scope_node, cast->name);
6093 PUSH_INSN2(ret, location, getclassvariable, ID2SYM(name), get_cvar_ic_value(iseq, name));
6095 return;
6097 case PM_CLASS_VARIABLE_WRITE_NODE: {
6098 // @@foo = 1
6099 // ^^^^^^^^^
6100 const pm_class_variable_write_node_t *cast = (const pm_class_variable_write_node_t *) node;
6101 PM_COMPILE_NOT_POPPED(cast->value);
6102 if (!popped) PUSH_INSN(ret, location, dup);
6104 ID name = pm_constant_id_lookup(scope_node, cast->name);
6105 PUSH_INSN2(ret, location, setclassvariable, ID2SYM(name), get_cvar_ic_value(iseq, name));
6107 return;
6109 case PM_CONSTANT_PATH_NODE: {
6110 // Foo::Bar
6111 // ^^^^^^^^
6112 VALUE parts;
6114 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache && ((parts = pm_constant_path_parts(node, scope_node)) != Qnil)) {
6115 ISEQ_BODY(iseq)->ic_size++;
6116 PUSH_INSN1(ret, location, opt_getconstant_path, parts);
6118 else {
6119 DECL_ANCHOR(prefix);
6120 INIT_ANCHOR(prefix);
6122 DECL_ANCHOR(body);
6123 INIT_ANCHOR(body);
6125 pm_compile_constant_path(iseq, node, prefix, body, popped, scope_node);
6126 if (LIST_INSN_SIZE_ZERO(prefix)) {
6127 PUSH_INSN(ret, location, putnil);
6129 else {
6130 PUSH_SEQ(ret, prefix);
6133 PUSH_SEQ(ret, body);
6136 if (popped) PUSH_INSN(ret, location, pop);
6137 return;
6139 case PM_CONSTANT_PATH_AND_WRITE_NODE: {
6140 // Foo::Bar &&= baz
6141 // ^^^^^^^^^^^^^^^^
6142 const pm_constant_path_and_write_node_t *cast = (const pm_constant_path_and_write_node_t *) node;
6143 pm_compile_constant_path_and_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
6144 return;
6146 case PM_CONSTANT_PATH_OR_WRITE_NODE: {
6147 // Foo::Bar ||= baz
6148 // ^^^^^^^^^^^^^^^^
6149 const pm_constant_path_or_write_node_t *cast = (const pm_constant_path_or_write_node_t *) node;
6150 pm_compile_constant_path_or_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
6151 return;
6153 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE: {
6154 // Foo::Bar += baz
6155 // ^^^^^^^^^^^^^^^
6156 const pm_constant_path_operator_write_node_t *cast = (const pm_constant_path_operator_write_node_t *) node;
6157 pm_compile_constant_path_operator_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
6158 return;
6160 case PM_CONSTANT_PATH_WRITE_NODE: {
6161 // Foo::Bar = 1
6162 // ^^^^^^^^^^^^
6163 const pm_constant_path_write_node_t *cast = (const pm_constant_path_write_node_t *) node;
6164 pm_compile_constant_path_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
6165 return;
6167 case PM_CONSTANT_READ_NODE: {
6168 // Foo
6169 // ^^^
6170 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
6171 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
6173 pm_compile_constant_read(iseq, name, &cast->base.location, ret, scope_node);
6174 if (popped) PUSH_INSN(ret, location, pop);
6176 return;
6178 case PM_CONSTANT_AND_WRITE_NODE: {
6179 // Foo &&= bar
6180 // ^^^^^^^^^^^
6181 const pm_constant_and_write_node_t *cast = (const pm_constant_and_write_node_t *) node;
6182 pm_compile_constant_and_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
6183 return;
6185 case PM_CONSTANT_OR_WRITE_NODE: {
6186 // Foo ||= bar
6187 // ^^^^^^^^^^^
6188 const pm_constant_or_write_node_t *cast = (const pm_constant_or_write_node_t *) node;
6189 pm_compile_constant_or_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
6190 return;
6192 case PM_CONSTANT_OPERATOR_WRITE_NODE: {
6193 // Foo += bar
6194 // ^^^^^^^^^^
6195 const pm_constant_operator_write_node_t *cast = (const pm_constant_operator_write_node_t *) node;
6196 pm_compile_constant_operator_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
6197 return;
6199 case PM_CONSTANT_WRITE_NODE: {
6200 // Foo = 1
6201 // ^^^^^^^
6202 const pm_constant_write_node_t *cast = (const pm_constant_write_node_t *) node;
6203 pm_compile_constant_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
6204 return;
6206 case PM_DEF_NODE: {
6207 // def foo; end
6208 // ^^^^^^^^^^^^
6210 // def self.foo; end
6211 // ^^^^^^^^^^^^^^^^^
6212 const pm_def_node_t *cast = (const pm_def_node_t *) node;
6213 ID method_name = pm_constant_id_lookup(scope_node, cast->name);
6215 pm_scope_node_t next_scope_node;
6216 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
6218 rb_iseq_t *method_iseq = NEW_ISEQ(&next_scope_node, rb_id2str(method_name), ISEQ_TYPE_METHOD, location.line);
6219 pm_scope_node_destroy(&next_scope_node);
6221 if (cast->receiver) {
6222 PM_COMPILE_NOT_POPPED(cast->receiver);
6223 PUSH_INSN2(ret, location, definesmethod, ID2SYM(method_name), method_iseq);
6225 else {
6226 PUSH_INSN2(ret, location, definemethod, ID2SYM(method_name), method_iseq);
6228 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) method_iseq);
6230 if (!popped) {
6231 PUSH_INSN1(ret, location, putobject, ID2SYM(method_name));
6234 return;
6236 case PM_DEFINED_NODE: {
6237 // defined?(a)
6238 // ^^^^^^^^^^^
6239 const pm_defined_node_t *cast = (const pm_defined_node_t *) node;
6240 pm_compile_defined_expr(iseq, cast->value, &location, ret, popped, scope_node, false);
6241 return;
6243 case PM_EMBEDDED_STATEMENTS_NODE: {
6244 // "foo #{bar}"
6245 // ^^^^^^
6246 const pm_embedded_statements_node_t *cast = (const pm_embedded_statements_node_t *) node;
6248 if (cast->statements != NULL) {
6249 PM_COMPILE((const pm_node_t *) (cast->statements));
6251 else {
6252 PUSH_INSN(ret, location, putnil);
6255 if (popped) PUSH_INSN(ret, location, pop);
6256 return;
6258 case PM_EMBEDDED_VARIABLE_NODE: {
6259 // "foo #@bar"
6260 // ^^^^^
6261 const pm_embedded_variable_node_t *cast = (const pm_embedded_variable_node_t *) node;
6262 PM_COMPILE(cast->variable);
6263 return;
6265 case PM_FALSE_NODE: {
6266 // false
6267 // ^^^^^
6268 if (!popped) {
6269 PUSH_INSN1(ret, location, putobject, Qfalse);
6271 return;
6273 case PM_ENSURE_NODE: {
6274 const pm_ensure_node_t *cast = (const pm_ensure_node_t *) node;
6276 if (cast->statements != NULL) {
6277 LABEL *start = NEW_LABEL(location.line);
6278 LABEL *end = NEW_LABEL(location.line);
6279 PUSH_LABEL(ret, start);
6281 LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
6282 ISEQ_COMPILE_DATA(iseq)->end_label = end;
6284 PM_COMPILE((const pm_node_t *) cast->statements);
6285 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
6286 PUSH_LABEL(ret, end);
6289 return;
6291 case PM_ELSE_NODE: {
6292 // if foo then bar else baz end
6293 // ^^^^^^^^^^^^
6294 const pm_else_node_t *cast = (const pm_else_node_t *) node;
6296 if (cast->statements != NULL) {
6297 PM_COMPILE((const pm_node_t *) cast->statements);
6299 else if (!popped) {
6300 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
6303 return;
6305 case PM_FLIP_FLOP_NODE: {
6306 // if foo .. bar; end
6307 // ^^^^^^^^^^
6308 const pm_flip_flop_node_t *cast = (const pm_flip_flop_node_t *) node;
6310 LABEL *final_label = NEW_LABEL(location.line);
6311 LABEL *then_label = NEW_LABEL(location.line);
6312 LABEL *else_label = NEW_LABEL(location.line);
6314 pm_compile_flip_flop(cast, else_label, then_label, iseq, location.line, ret, popped, scope_node);
6316 PUSH_LABEL(ret, then_label);
6317 PUSH_INSN1(ret, location, putobject, Qtrue);
6318 PUSH_INSNL(ret, location, jump, final_label);
6319 PUSH_LABEL(ret, else_label);
6320 PUSH_INSN1(ret, location, putobject, Qfalse);
6321 PUSH_LABEL(ret, final_label);
6323 return;
6325 case PM_FLOAT_NODE: {
6326 // 1.0
6327 // ^^^
6328 if (!popped) {
6329 PUSH_INSN1(ret, location, putobject, parse_float((const pm_float_node_t *) node));
6331 return;
6333 case PM_FOR_NODE: {
6334 // for foo in bar do end
6335 // ^^^^^^^^^^^^^^^^^^^^^
6336 const pm_for_node_t *cast = (const pm_for_node_t *) node;
6338 LABEL *retry_label = NEW_LABEL(location.line);
6339 LABEL *retry_end_l = NEW_LABEL(location.line);
6341 // First, compile the collection that we're going to be iterating over.
6342 PUSH_LABEL(ret, retry_label);
6343 PM_COMPILE_NOT_POPPED(cast->collection);
6345 // Next, create the new scope that is going to contain the block that
6346 // will be passed to the each method.
6347 pm_scope_node_t next_scope_node;
6348 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
6350 const rb_iseq_t *child_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location.line);
6351 pm_scope_node_destroy(&next_scope_node);
6353 const rb_iseq_t *prev_block = ISEQ_COMPILE_DATA(iseq)->current_block;
6354 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq;
6356 // Now, create the method call to each that will be used to iterate over
6357 // the collection, and pass the newly created iseq as the block.
6358 PUSH_SEND_WITH_BLOCK(ret, location, idEach, INT2FIX(0), child_iseq);
6359 pm_compile_retry_end_label(iseq, ret, retry_end_l);
6361 if (popped) PUSH_INSN(ret, location, pop);
6362 ISEQ_COMPILE_DATA(iseq)->current_block = prev_block;
6363 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, child_iseq, retry_end_l);
6364 return;
6366 case PM_FORWARDING_ARGUMENTS_NODE: {
6367 rb_bug("Cannot compile a ForwardingArgumentsNode directly\n");
6368 return;
6370 case PM_FORWARDING_SUPER_NODE: {
6371 // super
6372 // ^^^^^
6374 // super {}
6375 // ^^^^^^^^
6376 const pm_forwarding_super_node_t *cast = (const pm_forwarding_super_node_t *) node;
6377 const rb_iseq_t *block = NULL;
6379 const rb_iseq_t *previous_block = NULL;
6380 LABEL *retry_label = NULL;
6381 LABEL *retry_end_l = NULL;
6383 if (cast->block != NULL) {
6384 previous_block = ISEQ_COMPILE_DATA(iseq)->current_block;
6385 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
6387 retry_label = NEW_LABEL(location.line);
6388 retry_end_l = NEW_LABEL(location.line);
6390 PUSH_LABEL(ret, retry_label);
6393 PUSH_INSN(ret, location, putself);
6394 int flag = VM_CALL_ZSUPER | VM_CALL_SUPER | VM_CALL_FCALL;
6396 if (cast->block != NULL) {
6397 pm_scope_node_t next_scope_node;
6398 pm_scope_node_init((const pm_node_t *) cast->block, &next_scope_node, scope_node);
6400 ISEQ_COMPILE_DATA(iseq)->current_block = block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location.line);
6401 pm_scope_node_destroy(&next_scope_node);
6402 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) block);
6405 DECL_ANCHOR(args);
6406 INIT_ANCHOR(args);
6408 struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
6409 const rb_iseq_t *local_iseq = body->local_iseq;
6410 const struct rb_iseq_constant_body *const local_body = ISEQ_BODY(local_iseq);
6412 int argc = 0;
6413 int depth = get_lvar_level(iseq);
6415 if (local_body->param.flags.has_lead) {
6416 /* required arguments */
6417 for (int i = 0; i < local_body->param.lead_num; i++) {
6418 int idx = local_body->local_table_size - i;
6419 PUSH_GETLOCAL(args, location, idx, depth);
6421 argc += local_body->param.lead_num;
6424 if (local_body->param.flags.has_opt) {
6425 /* optional arguments */
6426 for (int j = 0; j < local_body->param.opt_num; j++) {
6427 int idx = local_body->local_table_size - (argc + j);
6428 PUSH_GETLOCAL(args, location, idx, depth);
6430 argc += local_body->param.opt_num;
6433 if (local_body->param.flags.has_rest) {
6434 /* rest argument */
6435 int idx = local_body->local_table_size - local_body->param.rest_start;
6436 PUSH_GETLOCAL(args, location, idx, depth);
6437 PUSH_INSN1(args, location, splatarray, Qfalse);
6439 argc = local_body->param.rest_start + 1;
6440 flag |= VM_CALL_ARGS_SPLAT;
6443 if (local_body->param.flags.has_post) {
6444 /* post arguments */
6445 int post_len = local_body->param.post_num;
6446 int post_start = local_body->param.post_start;
6448 int j = 0;
6449 for (; j < post_len; j++) {
6450 int idx = local_body->local_table_size - (post_start + j);
6451 PUSH_GETLOCAL(args, location, idx, depth);
6454 if (local_body->param.flags.has_rest) {
6455 // argc remains unchanged from rest branch
6456 PUSH_INSN1(args, location, newarray, INT2FIX(j));
6457 PUSH_INSN(args, location, concatarray);
6459 else {
6460 argc = post_len + post_start;
6464 const struct rb_iseq_param_keyword *const local_keyword = local_body->param.keyword;
6465 if (local_body->param.flags.has_kw) {
6466 int local_size = local_body->local_table_size;
6467 argc++;
6469 PUSH_INSN1(args, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6471 if (local_body->param.flags.has_kwrest) {
6472 int idx = local_body->local_table_size - local_keyword->rest_start;
6473 PUSH_GETLOCAL(args, location, idx, depth);
6474 RUBY_ASSERT(local_keyword->num > 0);
6475 PUSH_SEND(args, location, rb_intern("dup"), INT2FIX(0));
6477 else {
6478 PUSH_INSN1(args, location, newhash, INT2FIX(0));
6480 int i = 0;
6481 for (; i < local_keyword->num; ++i) {
6482 ID id = local_keyword->table[i];
6483 int idx = local_size - get_local_var_idx(local_iseq, id);
6484 PUSH_INSN1(args, location, putobject, ID2SYM(id));
6485 PUSH_GETLOCAL(args, location, idx, depth);
6488 PUSH_SEND(args, location, id_core_hash_merge_ptr, INT2FIX(i * 2 + 1));
6489 flag |= VM_CALL_KW_SPLAT| VM_CALL_KW_SPLAT_MUT;
6491 else if (local_body->param.flags.has_kwrest) {
6492 int idx = local_body->local_table_size - local_keyword->rest_start;
6493 PUSH_GETLOCAL(args, location, idx, depth);
6494 argc++;
6495 flag |= VM_CALL_KW_SPLAT;
6498 PUSH_SEQ(ret, args);
6499 PUSH_INSN2(ret, location, invokesuper, new_callinfo(iseq, 0, argc, flag, NULL, block != NULL), block);
6501 if (cast->block != NULL) {
6502 pm_compile_retry_end_label(iseq, ret, retry_end_l);
6503 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, block, retry_end_l);
6504 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
6507 if (popped) PUSH_INSN(ret, location, pop);
6508 return;
6510 case PM_GLOBAL_VARIABLE_AND_WRITE_NODE: {
6511 // $foo &&= bar
6512 // ^^^^^^^^^^^^
6513 const pm_global_variable_and_write_node_t *cast = (const pm_global_variable_and_write_node_t *) node;
6514 LABEL *end_label = NEW_LABEL(location.line);
6516 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
6517 PUSH_INSN1(ret, location, getglobal, name);
6518 if (!popped) PUSH_INSN(ret, location, dup);
6520 PUSH_INSNL(ret, location, branchunless, end_label);
6521 if (!popped) PUSH_INSN(ret, location, pop);
6523 PM_COMPILE_NOT_POPPED(cast->value);
6524 if (!popped) PUSH_INSN(ret, location, dup);
6526 PUSH_INSN1(ret, location, setglobal, name);
6527 PUSH_LABEL(ret, end_label);
6529 return;
6531 case PM_GLOBAL_VARIABLE_OPERATOR_WRITE_NODE: {
6532 // $foo += bar
6533 // ^^^^^^^^^^^
6534 const pm_global_variable_operator_write_node_t *cast = (const pm_global_variable_operator_write_node_t *) node;
6536 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
6537 PUSH_INSN1(ret, location, getglobal, name);
6538 PM_COMPILE_NOT_POPPED(cast->value);
6540 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
6541 int flags = VM_CALL_ARGS_SIMPLE;
6542 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
6544 if (!popped) PUSH_INSN(ret, location, dup);
6545 PUSH_INSN1(ret, location, setglobal, name);
6547 return;
6549 case PM_GLOBAL_VARIABLE_OR_WRITE_NODE: {
6550 // $foo ||= bar
6551 // ^^^^^^^^^^^^
6552 const pm_global_variable_or_write_node_t *cast = (const pm_global_variable_or_write_node_t *) node;
6553 LABEL *set_label = NEW_LABEL(location.line);
6554 LABEL *end_label = NEW_LABEL(location.line);
6556 PUSH_INSN(ret, location, putnil);
6557 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
6559 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_GVAR), name, Qtrue);
6560 PUSH_INSNL(ret, location, branchunless, set_label);
6562 PUSH_INSN1(ret, location, getglobal, name);
6563 if (!popped) PUSH_INSN(ret, location, dup);
6565 PUSH_INSNL(ret, location, branchif, end_label);
6566 if (!popped) PUSH_INSN(ret, location, pop);
6568 PUSH_LABEL(ret, set_label);
6569 PM_COMPILE_NOT_POPPED(cast->value);
6570 if (!popped) PUSH_INSN(ret, location, dup);
6572 PUSH_INSN1(ret, location, setglobal, name);
6573 PUSH_LABEL(ret, end_label);
6575 return;
6577 case PM_GLOBAL_VARIABLE_READ_NODE: {
6578 // $foo
6579 // ^^^^
6580 const pm_global_variable_read_node_t *cast = (const pm_global_variable_read_node_t *) node;
6581 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
6583 PUSH_INSN1(ret, location, getglobal, name);
6584 if (popped) PUSH_INSN(ret, location, pop);
6586 return;
6588 case PM_GLOBAL_VARIABLE_WRITE_NODE: {
6589 // $foo = 1
6590 // ^^^^^^^^
6591 const pm_global_variable_write_node_t *cast = (const pm_global_variable_write_node_t *) node;
6592 PM_COMPILE_NOT_POPPED(cast->value);
6593 if (!popped) PUSH_INSN(ret, location, dup);
6595 ID name = pm_constant_id_lookup(scope_node, cast->name);
6596 PUSH_INSN1(ret, location, setglobal, ID2SYM(name));
6598 return;
6600 case PM_HASH_NODE: {
6601 // {}
6602 // ^^
6604 // If every node in the hash is static, then we can compile the entire
6605 // hash now instead of later.
6606 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
6607 // We're only going to compile this node if it's not popped. If it
6608 // is popped, then we know we don't need to do anything since it's
6609 // statically known.
6610 if (!popped) {
6611 VALUE value = pm_static_literal_value(iseq, node, scope_node);
6612 PUSH_INSN1(ret, location, duphash, value);
6613 RB_OBJ_WRITTEN(iseq, Qundef, value);
6616 else {
6617 // Here since we know there are possible side-effects inside the
6618 // hash contents, we're going to build it entirely at runtime. We'll
6619 // do this by pushing all of the key-value pairs onto the stack and
6620 // then combining them with newhash.
6622 // If this hash is popped, then this serves only to ensure we enact
6623 // all side-effects (like method calls) that are contained within
6624 // the hash contents.
6625 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
6626 const pm_node_list_t *elements = &cast->elements;
6628 if (popped) {
6629 // If this hash is popped, then we can iterate through each
6630 // element and compile it. The result of each compilation will
6631 // only include the side effects of the element itself.
6632 for (size_t index = 0; index < elements->size; index++) {
6633 PM_COMPILE_POPPED(elements->nodes[index]);
6636 else {
6637 pm_compile_hash_elements(iseq, node, elements, ret, scope_node);
6641 return;
6643 case PM_IF_NODE: {
6644 // if foo then bar end
6645 // ^^^^^^^^^^^^^^^^^^^
6647 // bar if foo
6648 // ^^^^^^^^^^
6650 // foo ? bar : baz
6651 // ^^^^^^^^^^^^^^^
6652 const pm_if_node_t *cast = (const pm_if_node_t *) node;
6653 pm_compile_conditional(iseq, &location, PM_IF_NODE, (const pm_node_t *) cast, cast->statements, cast->consequent, cast->predicate, ret, popped, scope_node);
6654 return;
6656 case PM_IMAGINARY_NODE: {
6657 // 1i
6658 // ^^
6659 if (!popped) {
6660 PUSH_INSN1(ret, location, putobject, parse_imaginary((const pm_imaginary_node_t *) node));
6662 return;
6664 case PM_IMPLICIT_NODE: {
6665 // Implicit nodes mark places in the syntax tree where explicit syntax
6666 // was omitted, but implied. For example,
6668 // { foo: }
6670 // In this case a method call/local variable read is implied by virtue
6671 // of the missing value. To compile these nodes, we simply compile the
6672 // value that is implied, which is helpfully supplied by the parser.
6673 const pm_implicit_node_t *cast = (const pm_implicit_node_t *) node;
6674 PM_COMPILE(cast->value);
6675 return;
6677 case PM_IN_NODE: {
6678 // In nodes are handled by the case match node directly, so we should
6679 // never end up hitting them through this path.
6680 rb_bug("Should not ever enter an in node directly");
6681 return;
6683 case PM_INDEX_OPERATOR_WRITE_NODE: {
6684 // foo[bar] += baz
6685 // ^^^^^^^^^^^^^^^
6686 const pm_index_operator_write_node_t *cast = (const pm_index_operator_write_node_t *) node;
6687 pm_compile_index_operator_write_node(iseq, cast, &location, ret, popped, scope_node);
6688 return;
6690 case PM_INDEX_AND_WRITE_NODE: {
6691 // foo[bar] &&= baz
6692 // ^^^^^^^^^^^^^^^^
6693 const pm_index_and_write_node_t *cast = (const pm_index_and_write_node_t *) node;
6694 pm_compile_index_control_flow_write_node(iseq, node, cast->receiver, cast->arguments, cast->block, cast->value, &location, ret, popped, scope_node);
6695 return;
6697 case PM_INDEX_OR_WRITE_NODE: {
6698 // foo[bar] ||= baz
6699 // ^^^^^^^^^^^^^^^^
6700 const pm_index_or_write_node_t *cast = (const pm_index_or_write_node_t *) node;
6701 pm_compile_index_control_flow_write_node(iseq, node, cast->receiver, cast->arguments, cast->block, cast->value, &location, ret, popped, scope_node);
6702 return;
6704 case PM_INSTANCE_VARIABLE_AND_WRITE_NODE: {
6705 // @foo &&= bar
6706 // ^^^^^^^^^^^^
6707 const pm_instance_variable_and_write_node_t *cast = (const pm_instance_variable_and_write_node_t *) node;
6708 LABEL *end_label = NEW_LABEL(location.line);
6710 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
6711 VALUE name = ID2SYM(name_id);
6713 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
6714 if (!popped) PUSH_INSN(ret, location, dup);
6716 PUSH_INSNL(ret, location, branchunless, end_label);
6717 if (!popped) PUSH_INSN(ret, location, pop);
6719 PM_COMPILE_NOT_POPPED(cast->value);
6720 if (!popped) PUSH_INSN(ret, location, dup);
6722 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
6723 PUSH_LABEL(ret, end_label);
6725 return;
6727 case PM_INSTANCE_VARIABLE_OPERATOR_WRITE_NODE: {
6728 // @foo += bar
6729 // ^^^^^^^^^^^
6730 const pm_instance_variable_operator_write_node_t *cast = (const pm_instance_variable_operator_write_node_t *) node;
6732 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
6733 VALUE name = ID2SYM(name_id);
6735 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
6736 PM_COMPILE_NOT_POPPED(cast->value);
6738 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
6739 int flags = VM_CALL_ARGS_SIMPLE;
6740 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
6742 if (!popped) PUSH_INSN(ret, location, dup);
6743 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
6745 return;
6747 case PM_INSTANCE_VARIABLE_OR_WRITE_NODE: {
6748 // @foo ||= bar
6749 // ^^^^^^^^^^^^
6750 const pm_instance_variable_or_write_node_t *cast = (const pm_instance_variable_or_write_node_t *) node;
6751 LABEL *end_label = NEW_LABEL(location.line);
6753 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
6754 VALUE name = ID2SYM(name_id);
6756 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
6757 if (!popped) PUSH_INSN(ret, location, dup);
6759 PUSH_INSNL(ret, location, branchif, end_label);
6760 if (!popped) PUSH_INSN(ret, location, pop);
6762 PM_COMPILE_NOT_POPPED(cast->value);
6763 if (!popped) PUSH_INSN(ret, location, dup);
6765 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
6766 PUSH_LABEL(ret, end_label);
6768 return;
6770 case PM_INSTANCE_VARIABLE_READ_NODE: {
6771 // @foo
6772 // ^^^^
6773 if (!popped) {
6774 const pm_instance_variable_read_node_t *cast = (const pm_instance_variable_read_node_t *) node;
6775 ID name = pm_constant_id_lookup(scope_node, cast->name);
6776 PUSH_INSN2(ret, location, getinstancevariable, ID2SYM(name), get_ivar_ic_value(iseq, name));
6778 return;
6780 case PM_INSTANCE_VARIABLE_WRITE_NODE: {
6781 // @foo = 1
6782 // ^^^^^^^^
6783 const pm_instance_variable_write_node_t *cast = (const pm_instance_variable_write_node_t *) node;
6784 PM_COMPILE_NOT_POPPED(cast->value);
6785 if (!popped) PUSH_INSN(ret, location, dup);
6787 ID name = pm_constant_id_lookup(scope_node, cast->name);
6788 PUSH_INSN2(ret, location, setinstancevariable, ID2SYM(name), get_ivar_ic_value(iseq, name));
6790 return;
6792 case PM_INTEGER_NODE: {
6793 // 1
6794 // ^
6795 if (!popped) {
6796 PUSH_INSN1(ret, location, putobject, parse_integer((const pm_integer_node_t *) node));
6798 return;
6800 case PM_INTERPOLATED_MATCH_LAST_LINE_NODE: {
6801 // if /foo #{bar}/ then end
6802 // ^^^^^^^^^^^^
6803 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
6804 if (!popped) {
6805 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
6806 PUSH_INSN1(ret, location, putobject, regexp);
6809 else {
6810 pm_compile_regexp_dynamic(iseq, node, &((const pm_interpolated_match_last_line_node_t *) node)->parts, &location, ret, popped, scope_node);
6813 PUSH_INSN1(ret, location, getglobal, rb_id2sym(idLASTLINE));
6814 PUSH_SEND(ret, location, idEqTilde, INT2NUM(1));
6815 if (popped) PUSH_INSN(ret, location, pop);
6817 return;
6819 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
6820 // /foo #{bar}/
6821 // ^^^^^^^^^^^^
6822 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ONCE)) {
6823 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
6824 const rb_iseq_t *block_iseq = NULL;
6825 int ise_index = ISEQ_BODY(iseq)->ise_size++;
6827 pm_scope_node_t next_scope_node;
6828 pm_scope_node_init(node, &next_scope_node, scope_node);
6830 block_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location.line);
6831 pm_scope_node_destroy(&next_scope_node);
6833 ISEQ_COMPILE_DATA(iseq)->current_block = block_iseq;
6834 PUSH_INSN2(ret, location, once, block_iseq, INT2FIX(ise_index));
6835 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
6837 if (popped) PUSH_INSN(ret, location, pop);
6838 return;
6841 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
6842 if (!popped) {
6843 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
6844 PUSH_INSN1(ret, location, putobject, regexp);
6847 else {
6848 pm_compile_regexp_dynamic(iseq, node, &((const pm_interpolated_regular_expression_node_t *) node)->parts, &location, ret, popped, scope_node);
6849 if (popped) PUSH_INSN(ret, location, pop);
6852 return;
6854 case PM_INTERPOLATED_STRING_NODE: {
6855 // "foo #{bar}"
6856 // ^^^^^^^^^^^^
6857 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
6858 if (!popped) {
6859 VALUE string = pm_static_literal_value(iseq, node, scope_node);
6861 if (PM_NODE_FLAG_P(node, PM_INTERPOLATED_STRING_NODE_FLAGS_FROZEN)) {
6862 PUSH_INSN1(ret, location, putobject, string);
6864 else if (PM_NODE_FLAG_P(node, PM_INTERPOLATED_STRING_NODE_FLAGS_MUTABLE)) {
6865 PUSH_INSN1(ret, location, putstring, string);
6867 else {
6868 PUSH_INSN1(ret, location, putchilledstring, string);
6872 else {
6873 const pm_interpolated_string_node_t *cast = (const pm_interpolated_string_node_t *) node;
6874 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, popped, scope_node, NULL, NULL);
6875 if (length > 1) PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
6876 if (popped) PUSH_INSN(ret, location, pop);
6879 return;
6881 case PM_INTERPOLATED_SYMBOL_NODE: {
6882 // :"foo #{bar}"
6883 // ^^^^^^^^^^^^^
6884 const pm_interpolated_symbol_node_t *cast = (const pm_interpolated_symbol_node_t *) node;
6886 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
6887 if (!popped) {
6888 VALUE symbol = pm_static_literal_value(iseq, node, scope_node);
6889 PUSH_INSN1(ret, location, putobject, symbol);
6892 else {
6893 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, popped, scope_node, NULL, NULL);
6894 if (length > 1) {
6895 PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
6898 if (!popped) {
6899 PUSH_INSN(ret, location, intern);
6901 else {
6902 PUSH_INSN(ret, location, pop);
6906 return;
6908 case PM_INTERPOLATED_X_STRING_NODE: {
6909 // `foo #{bar}`
6910 // ^^^^^^^^^^^^
6911 const pm_interpolated_x_string_node_t *cast = (const pm_interpolated_x_string_node_t *) node;
6913 PUSH_INSN(ret, location, putself);
6915 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, false, scope_node, NULL, NULL);
6916 if (length > 1) PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
6918 PUSH_SEND_WITH_FLAG(ret, location, idBackquote, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
6919 if (popped) PUSH_INSN(ret, location, pop);
6921 return;
6923 case PM_KEYWORD_HASH_NODE: {
6924 // foo(bar: baz)
6925 // ^^^^^^^^
6926 const pm_keyword_hash_node_t *cast = (const pm_keyword_hash_node_t *) node;
6927 const pm_node_list_t *elements = &cast->elements;
6929 const pm_node_t *element;
6930 PM_NODE_LIST_FOREACH(elements, index, element) {
6931 PM_COMPILE(element);
6934 if (!popped) PUSH_INSN1(ret, location, newhash, INT2FIX(elements->size * 2));
6935 return;
6937 case PM_LAMBDA_NODE: {
6938 // -> {}
6939 // ^^^^^
6940 const pm_lambda_node_t *cast = (const pm_lambda_node_t *) node;
6942 pm_scope_node_t next_scope_node;
6943 pm_scope_node_init(node, &next_scope_node, scope_node);
6945 int opening_lineno = pm_location_line_number(parser, &cast->opening_loc);
6946 const rb_iseq_t *block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, opening_lineno);
6947 pm_scope_node_destroy(&next_scope_node);
6949 VALUE argc = INT2FIX(0);
6950 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6951 PUSH_CALL_WITH_BLOCK(ret, location, idLambda, argc, block);
6952 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) block);
6954 if (popped) PUSH_INSN(ret, location, pop);
6955 return;
6957 case PM_LOCAL_VARIABLE_AND_WRITE_NODE: {
6958 // foo &&= bar
6959 // ^^^^^^^^^^^
6960 const pm_local_variable_and_write_node_t *cast = (const pm_local_variable_and_write_node_t *) node;
6961 LABEL *end_label = NEW_LABEL(location.line);
6963 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
6964 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
6965 if (!popped) PUSH_INSN(ret, location, dup);
6967 PUSH_INSNL(ret, location, branchunless, end_label);
6968 if (!popped) PUSH_INSN(ret, location, pop);
6970 PM_COMPILE_NOT_POPPED(cast->value);
6971 if (!popped) PUSH_INSN(ret, location, dup);
6973 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
6974 PUSH_LABEL(ret, end_label);
6976 return;
6978 case PM_LOCAL_VARIABLE_OPERATOR_WRITE_NODE: {
6979 // foo += bar
6980 // ^^^^^^^^^^
6981 const pm_local_variable_operator_write_node_t *cast = (const pm_local_variable_operator_write_node_t *) node;
6983 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
6984 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
6986 PM_COMPILE_NOT_POPPED(cast->value);
6988 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
6989 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
6991 if (!popped) PUSH_INSN(ret, location, dup);
6992 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
6994 return;
6996 case PM_LOCAL_VARIABLE_OR_WRITE_NODE: {
6997 // foo ||= bar
6998 // ^^^^^^^^^^^
6999 const pm_local_variable_or_write_node_t *cast = (const pm_local_variable_or_write_node_t *) node;
7001 LABEL *set_label = NEW_LABEL(location.line);
7002 LABEL *end_label = NEW_LABEL(location.line);
7004 PUSH_INSN1(ret, location, putobject, Qtrue);
7005 PUSH_INSNL(ret, location, branchunless, set_label);
7007 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
7008 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
7009 if (!popped) PUSH_INSN(ret, location, dup);
7011 PUSH_INSNL(ret, location, branchif, end_label);
7012 if (!popped) PUSH_INSN(ret, location, pop);
7014 PUSH_LABEL(ret, set_label);
7015 PM_COMPILE_NOT_POPPED(cast->value);
7016 if (!popped) PUSH_INSN(ret, location, dup);
7018 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
7019 PUSH_LABEL(ret, end_label);
7021 return;
7023 case PM_LOCAL_VARIABLE_READ_NODE: {
7024 // foo
7025 // ^^^
7026 const pm_local_variable_read_node_t *cast = (const pm_local_variable_read_node_t *) node;
7028 if (!popped) {
7029 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
7030 PUSH_GETLOCAL(ret, location, index.index, index.level);
7033 return;
7035 case PM_LOCAL_VARIABLE_WRITE_NODE: {
7036 // foo = 1
7037 // ^^^^^^^
7038 const pm_local_variable_write_node_t *cast = (const pm_local_variable_write_node_t *) node;
7039 PM_COMPILE_NOT_POPPED(cast->value);
7040 if (!popped) PUSH_INSN(ret, location, dup);
7042 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
7043 PUSH_SETLOCAL(ret, location, index.index, index.level);
7044 return;
7046 case PM_MATCH_LAST_LINE_NODE: {
7047 // if /foo/ then end
7048 // ^^^^^
7049 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
7051 PUSH_INSN1(ret, location, putobject, regexp);
7052 PUSH_INSN2(ret, location, getspecial, INT2FIX(0), INT2FIX(0));
7053 PUSH_SEND(ret, location, idEqTilde, INT2NUM(1));
7054 if (popped) PUSH_INSN(ret, location, pop);
7056 return;
7058 case PM_MATCH_PREDICATE_NODE: {
7059 // foo in bar
7060 // ^^^^^^^^^^
7061 const pm_match_predicate_node_t *cast = (const pm_match_predicate_node_t *) node;
7063 // First, allocate some stack space for the cached return value of any
7064 // calls to #deconstruct.
7065 PUSH_INSN(ret, location, putnil);
7067 // Next, compile the expression that we're going to match against.
7068 PM_COMPILE_NOT_POPPED(cast->value);
7069 PUSH_INSN(ret, location, dup);
7071 // Now compile the pattern that is going to be used to match against the
7072 // expression.
7073 LABEL *matched_label = NEW_LABEL(location.line);
7074 LABEL *unmatched_label = NEW_LABEL(location.line);
7075 LABEL *done_label = NEW_LABEL(location.line);
7076 pm_compile_pattern(iseq, scope_node, cast->pattern, ret, matched_label, unmatched_label, false, false, true, 2);
7078 // If the pattern did not match, then compile the necessary instructions
7079 // to handle pushing false onto the stack, then jump to the end.
7080 PUSH_LABEL(ret, unmatched_label);
7081 PUSH_INSN(ret, location, pop);
7082 PUSH_INSN(ret, location, pop);
7084 if (!popped) PUSH_INSN1(ret, location, putobject, Qfalse);
7085 PUSH_INSNL(ret, location, jump, done_label);
7086 PUSH_INSN(ret, location, putnil);
7088 // If the pattern did match, then compile the necessary instructions to
7089 // handle pushing true onto the stack, then jump to the end.
7090 PUSH_LABEL(ret, matched_label);
7091 PUSH_INSN1(ret, location, adjuststack, INT2FIX(2));
7092 if (!popped) PUSH_INSN1(ret, location, putobject, Qtrue);
7093 PUSH_INSNL(ret, location, jump, done_label);
7095 PUSH_LABEL(ret, done_label);
7096 return;
7098 case PM_MATCH_REQUIRED_NODE: {
7099 // foo => bar
7100 // ^^^^^^^^^^
7102 // A match required node represents pattern matching against a single
7103 // pattern using the => operator. For example,
7105 // foo => bar
7107 // This is somewhat analogous to compiling a case match statement with a
7108 // single pattern. In both cases, if the pattern fails it should
7109 // immediately raise an error.
7110 const pm_match_required_node_t *cast = (const pm_match_required_node_t *) node;
7112 LABEL *matched_label = NEW_LABEL(location.line);
7113 LABEL *unmatched_label = NEW_LABEL(location.line);
7114 LABEL *done_label = NEW_LABEL(location.line);
7116 // First, we're going to push a bunch of stuff onto the stack that is
7117 // going to serve as our scratch space.
7118 PUSH_INSN(ret, location, putnil); // key error key
7119 PUSH_INSN(ret, location, putnil); // key error matchee
7120 PUSH_INSN1(ret, location, putobject, Qfalse); // key error?
7121 PUSH_INSN(ret, location, putnil); // error string
7122 PUSH_INSN(ret, location, putnil); // deconstruct cache
7124 // Next we're going to compile the value expression such that it's on
7125 // the stack.
7126 PM_COMPILE_NOT_POPPED(cast->value);
7128 // Here we'll dup it so that it can be used for comparison, but also be
7129 // used for error handling.
7130 PUSH_INSN(ret, location, dup);
7132 // Next we'll compile the pattern. We indicate to the pm_compile_pattern
7133 // function that this is the only pattern that will be matched against
7134 // through the in_single_pattern parameter. We also indicate that the
7135 // value to compare against is 2 slots from the top of the stack (the
7136 // base_index parameter).
7137 pm_compile_pattern(iseq, scope_node, cast->pattern, ret, matched_label, unmatched_label, true, false, true, 2);
7139 // If the pattern did not match the value, then we're going to compile
7140 // in our error handler code. This will determine which error to raise
7141 // and raise it.
7142 PUSH_LABEL(ret, unmatched_label);
7143 pm_compile_pattern_error_handler(iseq, scope_node, node, ret, done_label, popped);
7145 // If the pattern did match, we'll clean up the values we've pushed onto
7146 // the stack and then push nil onto the stack if it's not popped.
7147 PUSH_LABEL(ret, matched_label);
7148 PUSH_INSN1(ret, location, adjuststack, INT2FIX(6));
7149 if (!popped) PUSH_INSN(ret, location, putnil);
7150 PUSH_INSNL(ret, location, jump, done_label);
7152 PUSH_LABEL(ret, done_label);
7153 return;
7155 case PM_MATCH_WRITE_NODE: {
7156 // /(?<foo>foo)/ =~ bar
7157 // ^^^^^^^^^^^^^^^^^^^^
7159 // Match write nodes are specialized call nodes that have a regular
7160 // expression with valid named capture groups on the left, the =~
7161 // operator, and some value on the right. The nodes themselves simply
7162 // wrap the call with the local variable targets that will be written
7163 // when the call is executed.
7164 const pm_match_write_node_t *cast = (const pm_match_write_node_t *) node;
7165 LABEL *fail_label = NEW_LABEL(location.line);
7166 LABEL *end_label = NEW_LABEL(location.line);
7168 // First, we'll compile the call so that all of its instructions are
7169 // present. Then we'll compile all of the local variable targets.
7170 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->call);
7172 // Now, check if the match was successful. If it was, then we'll
7173 // continue on and assign local variables. Otherwise we'll skip over the
7174 // assignment code.
7175 PUSH_INSN1(ret, location, getglobal, rb_id2sym(idBACKREF));
7176 PUSH_INSN(ret, location, dup);
7177 PUSH_INSNL(ret, location, branchunless, fail_label);
7179 // If there's only a single local variable target, we can skip some of
7180 // the bookkeeping, so we'll put a special branch here.
7181 size_t targets_count = cast->targets.size;
7183 if (targets_count == 1) {
7184 const pm_node_t *target = cast->targets.nodes[0];
7185 RUBY_ASSERT(PM_NODE_TYPE_P(target, PM_LOCAL_VARIABLE_TARGET_NODE));
7187 const pm_local_variable_target_node_t *local_target = (const pm_local_variable_target_node_t *) target;
7188 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, local_target->name, local_target->depth);
7190 PUSH_INSN1(ret, location, putobject, rb_id2sym(pm_constant_id_lookup(scope_node, local_target->name)));
7191 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
7192 PUSH_LABEL(ret, fail_label);
7193 PUSH_SETLOCAL(ret, location, index.index, index.level);
7194 if (popped) PUSH_INSN(ret, location, pop);
7195 return;
7198 DECL_ANCHOR(fail_anchor);
7199 INIT_ANCHOR(fail_anchor);
7201 // Otherwise there is more than one local variable target, so we'll need
7202 // to do some bookkeeping.
7203 for (size_t targets_index = 0; targets_index < targets_count; targets_index++) {
7204 const pm_node_t *target = cast->targets.nodes[targets_index];
7205 RUBY_ASSERT(PM_NODE_TYPE_P(target, PM_LOCAL_VARIABLE_TARGET_NODE));
7207 const pm_local_variable_target_node_t *local_target = (const pm_local_variable_target_node_t *) target;
7208 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, local_target->name, local_target->depth);
7210 if (((size_t) targets_index) < (targets_count - 1)) {
7211 PUSH_INSN(ret, location, dup);
7213 PUSH_INSN1(ret, location, putobject, rb_id2sym(pm_constant_id_lookup(scope_node, local_target->name)));
7214 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
7215 PUSH_SETLOCAL(ret, location, index.index, index.level);
7217 PUSH_INSN(fail_anchor, location, putnil);
7218 PUSH_SETLOCAL(fail_anchor, location, index.index, index.level);
7221 // Since we matched successfully, now we'll jump to the end.
7222 PUSH_INSNL(ret, location, jump, end_label);
7224 // In the case that the match failed, we'll loop through each local
7225 // variable target and set all of them to `nil`.
7226 PUSH_LABEL(ret, fail_label);
7227 PUSH_INSN(ret, location, pop);
7228 PUSH_SEQ(ret, fail_anchor);
7230 // Finally, we can push the end label for either case.
7231 PUSH_LABEL(ret, end_label);
7232 if (popped) PUSH_INSN(ret, location, pop);
7233 return;
7235 case PM_MISSING_NODE: {
7236 rb_bug("A pm_missing_node_t should not exist in prism's AST.");
7237 return;
7239 case PM_MODULE_NODE: {
7240 // module Foo; end
7241 // ^^^^^^^^^^^^^^^
7242 const pm_module_node_t *cast = (const pm_module_node_t *) node;
7244 ID module_id = pm_constant_id_lookup(scope_node, cast->name);
7245 VALUE module_name = rb_str_freeze(rb_sprintf("<module:%"PRIsVALUE">", rb_id2str(module_id)));
7247 pm_scope_node_t next_scope_node;
7248 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
7250 const rb_iseq_t *module_iseq = NEW_CHILD_ISEQ(&next_scope_node, module_name, ISEQ_TYPE_CLASS, location.line);
7251 pm_scope_node_destroy(&next_scope_node);
7253 const int flags = VM_DEFINECLASS_TYPE_MODULE | pm_compile_class_path(iseq, cast->constant_path, &location, ret, false, scope_node);
7254 PUSH_INSN(ret, location, putnil);
7255 PUSH_INSN3(ret, location, defineclass, ID2SYM(module_id), module_iseq, INT2FIX(flags));
7256 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) module_iseq);
7258 if (popped) PUSH_INSN(ret, location, pop);
7259 return;
7261 case PM_REQUIRED_PARAMETER_NODE: {
7262 // def foo(bar); end
7263 // ^^^
7264 const pm_required_parameter_node_t *cast = (const pm_required_parameter_node_t *) node;
7265 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, 0);
7267 PUSH_SETLOCAL(ret, location, index.index, index.level);
7268 return;
7270 case PM_MULTI_WRITE_NODE: {
7271 // foo, bar = baz
7272 // ^^^^^^^^^^^^^^
7274 // A multi write node represents writing to multiple values using an =
7275 // operator. Importantly these nodes are only parsed when the left-hand
7276 // side of the operator has multiple targets. The right-hand side of the
7277 // operator having multiple targets represents an implicit array
7278 // instead.
7279 const pm_multi_write_node_t *cast = (const pm_multi_write_node_t *) node;
7281 DECL_ANCHOR(writes);
7282 INIT_ANCHOR(writes);
7284 DECL_ANCHOR(cleanup);
7285 INIT_ANCHOR(cleanup);
7287 pm_multi_target_state_t state = { 0 };
7288 state.position = popped ? 0 : 1;
7289 size_t stack_size = pm_compile_multi_target_node(iseq, node, ret, writes, cleanup, scope_node, &state);
7291 PM_COMPILE_NOT_POPPED(cast->value);
7292 if (!popped) PUSH_INSN(ret, location, dup);
7294 PUSH_SEQ(ret, writes);
7295 if (!popped && stack_size >= 1) {
7296 // Make sure the value on the right-hand side of the = operator is
7297 // being returned before we pop the parent expressions.
7298 PUSH_INSN1(ret, location, setn, INT2FIX(stack_size));
7301 PUSH_SEQ(ret, cleanup);
7302 return;
7304 case PM_NEXT_NODE: {
7305 // next
7306 // ^^^^
7308 // next foo
7309 // ^^^^^^^^
7310 const pm_next_node_t *cast = (const pm_next_node_t *) node;
7312 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
7313 LABEL *splabel = NEW_LABEL(0);
7314 PUSH_LABEL(ret, splabel);
7316 if (cast->arguments) {
7317 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->arguments);
7319 else {
7320 PUSH_INSN(ret, location, putnil);
7322 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
7324 PUSH_ADJUST(ret, location, ISEQ_COMPILE_DATA(iseq)->redo_label);
7325 PUSH_INSNL(ret, location, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
7327 PUSH_ADJUST_RESTORE(ret, splabel);
7328 if (!popped) PUSH_INSN(ret, location, putnil);
7330 else if (ISEQ_COMPILE_DATA(iseq)->end_label && can_add_ensure_iseq(iseq)) {
7331 LABEL *splabel = NEW_LABEL(0);
7333 PUSH_LABEL(ret, splabel);
7334 PUSH_ADJUST(ret, location, ISEQ_COMPILE_DATA(iseq)->start_label);
7336 if (cast->arguments != NULL) {
7337 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->arguments);
7339 else {
7340 PUSH_INSN(ret, location, putnil);
7343 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
7344 PUSH_INSNL(ret, location, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
7345 PUSH_ADJUST_RESTORE(ret, splabel);
7346 splabel->unremovable = FALSE;
7348 if (!popped) PUSH_INSN(ret, location, putnil);
7350 else {
7351 const rb_iseq_t *ip = iseq;
7352 unsigned long throw_flag = 0;
7354 while (ip) {
7355 if (!ISEQ_COMPILE_DATA(ip)) {
7356 ip = 0;
7357 break;
7360 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
7361 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
7362 /* while loop */
7363 break;
7365 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
7366 break;
7368 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
7369 COMPILE_ERROR(ERROR_ARGS "Can't escape from eval with next");
7370 return;
7373 ip = ISEQ_BODY(ip)->parent_iseq;
7375 if (ip != 0) {
7376 if (cast->arguments) {
7377 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->arguments);
7379 else {
7380 PUSH_INSN(ret, location, putnil);
7383 PUSH_INSN1(ret, location, throw, INT2FIX(throw_flag | TAG_NEXT));
7384 if (popped) PUSH_INSN(ret, location, pop);
7386 else {
7387 COMPILE_ERROR(ERROR_ARGS "Invalid next");
7388 return;
7392 return;
7394 case PM_NIL_NODE: {
7395 // nil
7396 // ^^^
7397 if (!popped) {
7398 PUSH_INSN(ret, location, putnil);
7401 return;
7403 case PM_NO_KEYWORDS_PARAMETER_NODE: {
7404 // def foo(**nil); end
7405 // ^^^^^
7406 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg = TRUE;
7407 return;
7409 case PM_NUMBERED_REFERENCE_READ_NODE: {
7410 // $1
7411 // ^^
7412 if (!popped) {
7413 uint32_t reference_number = ((const pm_numbered_reference_read_node_t *) node)->number;
7415 if (reference_number > 0) {
7416 PUSH_INSN2(ret, location, getspecial, INT2FIX(1), INT2FIX(reference_number << 1));
7418 else {
7419 PUSH_INSN(ret, location, putnil);
7423 return;
7425 case PM_OR_NODE: {
7426 // a or b
7427 // ^^^^^^
7428 const pm_or_node_t *cast = (const pm_or_node_t *) node;
7430 LABEL *end_label = NEW_LABEL(location.line);
7431 PM_COMPILE_NOT_POPPED(cast->left);
7433 if (!popped) PUSH_INSN(ret, location, dup);
7434 PUSH_INSNL(ret, location, branchif, end_label);
7436 if (!popped) PUSH_INSN(ret, location, pop);
7437 PM_COMPILE(cast->right);
7438 PUSH_LABEL(ret, end_label);
7440 return;
7442 case PM_OPTIONAL_PARAMETER_NODE: {
7443 // def foo(bar = 1); end
7444 // ^^^^^^^
7445 const pm_optional_parameter_node_t *cast = (const pm_optional_parameter_node_t *) node;
7446 PM_COMPILE_NOT_POPPED(cast->value);
7448 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, 0);
7449 PUSH_SETLOCAL(ret, location, index.index, index.level);
7451 return;
7453 case PM_PARENTHESES_NODE: {
7454 // ()
7455 // ^^
7457 // (1)
7458 // ^^^
7459 const pm_parentheses_node_t *cast = (const pm_parentheses_node_t *) node;
7461 if (cast->body != NULL) {
7462 PM_COMPILE(cast->body);
7464 else if (!popped) {
7465 PUSH_INSN(ret, location, putnil);
7468 return;
7470 case PM_PRE_EXECUTION_NODE: {
7471 // BEGIN {}
7472 // ^^^^^^^^
7473 const pm_pre_execution_node_t *cast = (const pm_pre_execution_node_t *) node;
7475 LINK_ANCHOR *outer_pre = scope_node->pre_execution_anchor;
7476 RUBY_ASSERT(outer_pre != NULL);
7478 // BEGIN{} nodes can be nested, so here we're going to do the same thing
7479 // that we did for the top-level compilation where we create two
7480 // anchors and then join them in the correct order into the resulting
7481 // anchor.
7482 DECL_ANCHOR(inner_pre);
7483 INIT_ANCHOR(inner_pre);
7484 scope_node->pre_execution_anchor = inner_pre;
7486 DECL_ANCHOR(inner_body);
7487 INIT_ANCHOR(inner_body);
7489 if (cast->statements != NULL) {
7490 const pm_node_list_t *body = &cast->statements->body;
7492 for (size_t index = 0; index < body->size; index++) {
7493 pm_compile_node(iseq, body->nodes[index], inner_body, true, scope_node);
7497 if (!popped) {
7498 PUSH_INSN(inner_body, location, putnil);
7501 // Now that everything has been compiled, join both anchors together
7502 // into the correct outer pre execution anchor, and reset the value so
7503 // that subsequent BEGIN{} nodes can be compiled correctly.
7504 PUSH_SEQ(outer_pre, inner_pre);
7505 PUSH_SEQ(outer_pre, inner_body);
7506 scope_node->pre_execution_anchor = outer_pre;
7508 return;
7510 case PM_POST_EXECUTION_NODE: {
7511 // END {}
7512 // ^^^^^^
7513 const rb_iseq_t *child_iseq;
7514 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
7516 pm_scope_node_t next_scope_node;
7517 pm_scope_node_init(node, &next_scope_node, scope_node);
7518 child_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, lineno);
7519 pm_scope_node_destroy(&next_scope_node);
7521 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq;
7523 int is_index = ISEQ_BODY(iseq)->ise_size++;
7524 PUSH_INSN2(ret, location, once, child_iseq, INT2FIX(is_index));
7525 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) child_iseq);
7526 if (popped) PUSH_INSN(ret, location, pop);
7528 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
7530 return;
7532 case PM_RANGE_NODE: {
7533 // 0..5
7534 // ^^^^
7535 const pm_range_node_t *cast = (const pm_range_node_t *) node;
7536 bool exclude_end = PM_NODE_FLAG_P(cast, PM_RANGE_FLAGS_EXCLUDE_END);
7538 if (pm_optimizable_range_item_p(cast->left) && pm_optimizable_range_item_p(cast->right)) {
7539 if (!popped) {
7540 const pm_node_t *left = cast->left;
7541 const pm_node_t *right = cast->right;
7543 VALUE val = rb_range_new(
7544 (left && PM_NODE_TYPE_P(left, PM_INTEGER_NODE)) ? parse_integer((const pm_integer_node_t *) left) : Qnil,
7545 (right && PM_NODE_TYPE_P(right, PM_INTEGER_NODE)) ? parse_integer((const pm_integer_node_t *) right) : Qnil,
7546 exclude_end
7549 PUSH_INSN1(ret, location, putobject, val);
7552 else {
7553 if (cast->left == NULL) {
7554 PUSH_INSN(ret, location, putnil);
7556 else {
7557 PM_COMPILE(cast->left);
7560 if (cast->right == NULL) {
7561 PUSH_INSN(ret, location, putnil);
7563 else {
7564 PM_COMPILE(cast->right);
7567 if (!popped) {
7568 PUSH_INSN1(ret, location, newrange, INT2FIX(exclude_end ? 1 : 0));
7571 return;
7573 case PM_RATIONAL_NODE: {
7574 // 1r
7575 // ^^
7576 if (!popped) {
7577 PUSH_INSN1(ret, location, putobject, parse_rational((const pm_rational_node_t *) node));
7579 return;
7581 case PM_REDO_NODE: {
7582 // redo
7583 // ^^^^
7584 if (ISEQ_COMPILE_DATA(iseq)->redo_label && can_add_ensure_iseq(iseq)) {
7585 LABEL *splabel = NEW_LABEL(0);
7587 PUSH_LABEL(ret, splabel);
7588 PUSH_ADJUST(ret, location, ISEQ_COMPILE_DATA(iseq)->redo_label);
7589 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
7591 PUSH_INSNL(ret, location, jump, ISEQ_COMPILE_DATA(iseq)->redo_label);
7592 PUSH_ADJUST_RESTORE(ret, splabel);
7593 if (!popped) PUSH_INSN(ret, location, putnil);
7595 else if (ISEQ_BODY(iseq)->type != ISEQ_TYPE_EVAL && ISEQ_COMPILE_DATA(iseq)->start_label && can_add_ensure_iseq(iseq)) {
7596 LABEL *splabel = NEW_LABEL(0);
7598 PUSH_LABEL(ret, splabel);
7599 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
7600 PUSH_ADJUST(ret, location, ISEQ_COMPILE_DATA(iseq)->start_label);
7602 PUSH_INSNL(ret, location, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
7603 PUSH_ADJUST_RESTORE(ret, splabel);
7604 if (!popped) PUSH_INSN(ret, location, putnil);
7606 else {
7607 const rb_iseq_t *ip = iseq;
7609 while (ip) {
7610 if (!ISEQ_COMPILE_DATA(ip)) {
7611 ip = 0;
7612 break;
7615 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
7616 break;
7618 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
7619 break;
7621 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
7622 COMPILE_ERROR(ERROR_ARGS "Can't escape from eval with redo");
7623 return;
7626 ip = ISEQ_BODY(ip)->parent_iseq;
7629 if (ip != 0) {
7630 PUSH_INSN(ret, location, putnil);
7631 PUSH_INSN1(ret, location, throw, INT2FIX(VM_THROW_NO_ESCAPE_FLAG | TAG_REDO));
7632 if (popped) PUSH_INSN(ret, location, pop);
7634 else {
7635 COMPILE_ERROR(ERROR_ARGS "Invalid redo");
7636 return;
7639 return;
7641 case PM_REGULAR_EXPRESSION_NODE: {
7642 // /foo/
7643 // ^^^^^
7644 if (!popped) {
7645 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
7646 PUSH_INSN1(ret, location, putobject, regexp);
7648 return;
7650 case PM_RESCUE_NODE: {
7651 // begin; rescue; end
7652 // ^^^^^^^
7653 const pm_rescue_node_t *cast = (const pm_rescue_node_t *) node;
7654 iseq_set_exception_local_table(iseq);
7656 // First, establish the labels that we need to be able to jump to within
7657 // this compilation block.
7658 LABEL *exception_match_label = NEW_LABEL(location.line);
7659 LABEL *rescue_end_label = NEW_LABEL(location.line);
7661 // Next, compile each of the exceptions that we're going to be
7662 // handling. For each one, we'll add instructions to check if the
7663 // exception matches the raised one, and if it does then jump to the
7664 // exception_match_label label. Otherwise it will fall through to the
7665 // subsequent check. If there are no exceptions, we'll only check
7666 // StandardError.
7667 const pm_node_list_t *exceptions = &cast->exceptions;
7669 if (exceptions->size > 0) {
7670 for (size_t index = 0; index < exceptions->size; index++) {
7671 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
7672 PM_COMPILE(exceptions->nodes[index]);
7673 int checkmatch_flags = VM_CHECKMATCH_TYPE_RESCUE;
7674 if (PM_NODE_TYPE_P(exceptions->nodes[index], PM_SPLAT_NODE)) {
7675 checkmatch_flags |= VM_CHECKMATCH_ARRAY;
7677 PUSH_INSN1(ret, location, checkmatch, INT2FIX(checkmatch_flags));
7678 PUSH_INSNL(ret, location, branchif, exception_match_label);
7681 else {
7682 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
7683 PUSH_INSN1(ret, location, putobject, rb_eStandardError);
7684 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
7685 PUSH_INSNL(ret, location, branchif, exception_match_label);
7688 // If none of the exceptions that we are matching against matched, then
7689 // we'll jump straight to the rescue_end_label label.
7690 PUSH_INSNL(ret, location, jump, rescue_end_label);
7692 // Here we have the exception_match_label, which is where the
7693 // control-flow goes in the case that one of the exceptions matched.
7694 // Here we will compile the instructions to handle the exception.
7695 PUSH_LABEL(ret, exception_match_label);
7696 PUSH_TRACE(ret, RUBY_EVENT_RESCUE);
7698 // If we have a reference to the exception, then we'll compile the write
7699 // into the instruction sequence. This can look quite different
7700 // depending on the kind of write being performed.
7701 if (cast->reference) {
7702 DECL_ANCHOR(writes);
7703 INIT_ANCHOR(writes);
7705 DECL_ANCHOR(cleanup);
7706 INIT_ANCHOR(cleanup);
7708 pm_compile_target_node(iseq, cast->reference, ret, writes, cleanup, scope_node, NULL);
7709 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
7711 PUSH_SEQ(ret, writes);
7712 PUSH_SEQ(ret, cleanup);
7715 // If we have statements to execute, we'll compile them here. Otherwise
7716 // we'll push nil onto the stack.
7717 if (cast->statements) {
7718 // We'll temporarily remove the end_label location from the iseq
7719 // when compiling the statements so that next/redo statements
7720 // inside the body will throw to the correct place instead of
7721 // jumping straight to the end of this iseq
7722 LABEL *prev_end = ISEQ_COMPILE_DATA(iseq)->end_label;
7723 ISEQ_COMPILE_DATA(iseq)->end_label = NULL;
7725 PM_COMPILE((const pm_node_t *) cast->statements);
7727 // Now restore the end_label
7728 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end;
7730 else {
7731 PUSH_INSN(ret, location, putnil);
7734 PUSH_INSN(ret, location, leave);
7736 // Here we'll insert the rescue_end_label label, which is jumped to if
7737 // none of the exceptions matched. It will cause the control-flow to
7738 // either jump to the next rescue clause or it will fall through to the
7739 // subsequent instruction returning the raised error.
7740 PUSH_LABEL(ret, rescue_end_label);
7741 if (cast->consequent) {
7742 PM_COMPILE((const pm_node_t *) cast->consequent);
7744 else {
7745 PUSH_GETLOCAL(ret, location, 1, 0);
7748 return;
7750 case PM_RESCUE_MODIFIER_NODE: {
7751 // foo rescue bar
7752 // ^^^^^^^^^^^^^^
7753 const pm_rescue_modifier_node_t *cast = (const pm_rescue_modifier_node_t *) node;
7755 pm_scope_node_t rescue_scope_node;
7756 pm_scope_node_init((const pm_node_t *) cast, &rescue_scope_node, scope_node);
7758 rb_iseq_t *rescue_iseq = NEW_CHILD_ISEQ(
7759 &rescue_scope_node,
7760 rb_str_concat(rb_str_new2("rescue in "), ISEQ_BODY(iseq)->location.label),
7761 ISEQ_TYPE_RESCUE,
7762 pm_node_line_number(parser, cast->rescue_expression)
7765 pm_scope_node_destroy(&rescue_scope_node);
7767 LABEL *lstart = NEW_LABEL(location.line);
7768 LABEL *lend = NEW_LABEL(location.line);
7769 LABEL *lcont = NEW_LABEL(location.line);
7771 lstart->rescued = LABEL_RESCUE_BEG;
7772 lend->rescued = LABEL_RESCUE_END;
7773 PUSH_LABEL(ret, lstart);
7774 PM_COMPILE_NOT_POPPED(cast->expression);
7775 PUSH_LABEL(ret, lend);
7776 PUSH_INSN(ret, location, nop);
7777 PUSH_LABEL(ret, lcont);
7778 if (popped) PUSH_INSN(ret, location, pop);
7780 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue_iseq, lcont);
7781 PUSH_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
7782 return;
7784 case PM_RETURN_NODE: {
7785 // return
7786 // ^^^^^^
7788 // return 1
7789 // ^^^^^^^^
7790 const pm_return_node_t *cast = (const pm_return_node_t *) node;
7791 const pm_arguments_node_t *arguments = cast->arguments;
7793 if (PM_NODE_FLAG_P(cast, PM_RETURN_NODE_FLAGS_REDUNDANT)) {
7794 if (arguments) {
7795 PM_COMPILE_NOT_POPPED((const pm_node_t *) arguments);
7797 else {
7798 PUSH_INSN(ret, location, putnil);
7801 else {
7802 enum rb_iseq_type type = ISEQ_BODY(iseq)->type;
7803 LABEL *splabel = 0;
7805 const rb_iseq_t *parent_iseq = iseq;
7806 enum rb_iseq_type parent_type = ISEQ_BODY(parent_iseq)->type;
7807 while (parent_type == ISEQ_TYPE_RESCUE || parent_type == ISEQ_TYPE_ENSURE) {
7808 if (!(parent_iseq = ISEQ_BODY(parent_iseq)->parent_iseq)) break;
7809 parent_type = ISEQ_BODY(parent_iseq)->type;
7812 switch (parent_type) {
7813 case ISEQ_TYPE_TOP:
7814 case ISEQ_TYPE_MAIN:
7815 if (arguments) {
7816 rb_warn("argument of top-level return is ignored");
7818 if (parent_iseq == iseq) {
7819 type = ISEQ_TYPE_METHOD;
7821 break;
7822 default:
7823 break;
7826 if (type == ISEQ_TYPE_METHOD) {
7827 splabel = NEW_LABEL(0);
7828 PUSH_LABEL(ret, splabel);
7829 PUSH_ADJUST(ret, location, 0);
7832 if (arguments) {
7833 PM_COMPILE_NOT_POPPED((const pm_node_t *) arguments);
7835 else {
7836 PUSH_INSN(ret, location, putnil);
7839 if (type == ISEQ_TYPE_METHOD && can_add_ensure_iseq(iseq)) {
7840 pm_add_ensure_iseq(ret, iseq, 1, scope_node);
7841 PUSH_TRACE(ret, RUBY_EVENT_RETURN);
7842 PUSH_INSN(ret, location, leave);
7843 PUSH_ADJUST_RESTORE(ret, splabel);
7844 if (!popped) PUSH_INSN(ret, location, putnil);
7846 else {
7847 PUSH_INSN1(ret, location, throw, INT2FIX(TAG_RETURN));
7848 if (popped) PUSH_INSN(ret, location, pop);
7852 return;
7854 case PM_RETRY_NODE: {
7855 // retry
7856 // ^^^^^
7857 if (ISEQ_BODY(iseq)->type == ISEQ_TYPE_RESCUE) {
7858 PUSH_INSN(ret, location, putnil);
7859 PUSH_INSN1(ret, location, throw, INT2FIX(TAG_RETRY));
7860 if (popped) PUSH_INSN(ret, location, pop);
7862 else {
7863 COMPILE_ERROR(ERROR_ARGS "Invalid retry");
7864 return;
7866 return;
7868 case PM_SCOPE_NODE: {
7869 pm_scope_node_t *scope_node = (pm_scope_node_t *) node;
7870 pm_constant_id_list_t *locals = &scope_node->locals;
7872 pm_parameters_node_t *parameters_node = NULL;
7873 pm_node_list_t *keywords_list = NULL;
7874 pm_node_list_t *optionals_list = NULL;
7875 pm_node_list_t *posts_list = NULL;
7876 pm_node_list_t *requireds_list = NULL;
7877 pm_node_list_t *block_locals = NULL;
7878 bool trailing_comma = false;
7880 struct rb_iseq_constant_body *body = ISEQ_BODY(iseq);
7882 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_CLASS_NODE)) {
7883 ADD_TRACE(ret, RUBY_EVENT_CLASS);
7886 if (scope_node->parameters) {
7887 switch (PM_NODE_TYPE(scope_node->parameters)) {
7888 case PM_BLOCK_PARAMETERS_NODE: {
7889 pm_block_parameters_node_t *cast = (pm_block_parameters_node_t *) scope_node->parameters;
7890 parameters_node = cast->parameters;
7891 block_locals = &cast->locals;
7893 if (parameters_node) {
7894 if (parameters_node->rest && PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE)) {
7895 trailing_comma = true;
7898 break;
7900 case PM_PARAMETERS_NODE: {
7901 parameters_node = (pm_parameters_node_t *) scope_node->parameters;
7902 break;
7904 case PM_NUMBERED_PARAMETERS_NODE: {
7905 uint32_t maximum = ((const pm_numbered_parameters_node_t *) scope_node->parameters)->maximum;
7906 body->param.lead_num = maximum;
7907 body->param.flags.ambiguous_param0 = maximum == 1;
7908 break;
7910 case PM_IT_PARAMETERS_NODE:
7911 body->param.lead_num = 1;
7912 body->param.flags.ambiguous_param0 = true;
7913 break;
7914 default:
7915 rb_bug("Unexpected node type for parameters: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
7919 struct rb_iseq_param_keyword *keyword = NULL;
7921 if (parameters_node) {
7922 optionals_list = &parameters_node->optionals;
7923 requireds_list = &parameters_node->requireds;
7924 keywords_list = &parameters_node->keywords;
7925 posts_list = &parameters_node->posts;
7927 else if (scope_node->parameters && (PM_NODE_TYPE_P(scope_node->parameters, PM_NUMBERED_PARAMETERS_NODE) || PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE))) {
7928 body->param.opt_num = 0;
7930 else {
7931 body->param.lead_num = 0;
7932 body->param.opt_num = 0;
7935 //********STEP 1**********
7936 // Goal: calculate the table size for the locals, accounting for
7937 // hidden variables and multi target nodes
7938 size_t locals_size = locals->size;
7940 // Index lookup table buffer size is only the number of the locals
7941 st_table *index_lookup_table = st_init_numtable();
7943 int table_size = (int) locals_size;
7945 // For nodes have a hidden iteration variable. We add that to the local
7946 // table size here.
7947 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) table_size++;
7949 if (keywords_list && keywords_list->size) {
7950 table_size++;
7953 if (requireds_list) {
7954 for (size_t i = 0; i < requireds_list->size; i++) {
7955 // For each MultiTargetNode, we're going to have one
7956 // additional anonymous local not represented in the locals table
7957 // We want to account for this in our table size
7958 pm_node_t *required = requireds_list->nodes[i];
7959 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
7960 table_size++;
7962 else if (PM_NODE_TYPE_P(required, PM_REQUIRED_PARAMETER_NODE)) {
7963 if (PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
7964 table_size++;
7970 // Ensure there is enough room in the local table for any
7971 // parameters that have been repeated
7972 // ex: def underscore_parameters(_, _ = 1, _ = 2); _; end
7973 // ^^^^^^^^^^^^
7974 if (optionals_list && optionals_list->size) {
7975 for (size_t i = 0; i < optionals_list->size; i++) {
7976 pm_node_t * node = optionals_list->nodes[i];
7977 if (PM_NODE_FLAG_P(node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
7978 table_size++;
7983 // If we have an anonymous "rest" node, we'll need to increase the local
7984 // table size to take it in to account.
7985 // def m(foo, *, bar)
7986 // ^
7987 if (parameters_node) {
7988 if (parameters_node->rest) {
7989 if (!(PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE))) {
7990 if (!((const pm_rest_parameter_node_t *) parameters_node->rest)->name || PM_NODE_FLAG_P(parameters_node->rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
7991 table_size++;
7996 // def foo(_, **_); _; end
7997 // ^^^
7998 if (parameters_node->keyword_rest) {
7999 // def foo(...); end
8000 // ^^^
8001 // When we have a `...` as the keyword_rest, it's a forwarding_parameter_node and
8002 // we need to leave space for 4 locals: *, **, &, ...
8003 if (PM_NODE_TYPE_P(parameters_node->keyword_rest, PM_FORWARDING_PARAMETER_NODE)) {
8004 table_size += 4;
8006 else {
8007 const pm_keyword_rest_parameter_node_t *kw_rest = (const pm_keyword_rest_parameter_node_t *) parameters_node->keyword_rest;
8009 // If it's anonymous or repeated, then we need to allocate stack space
8010 if (!kw_rest->name || PM_NODE_FLAG_P(kw_rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8011 table_size++;
8017 if (posts_list) {
8018 for (size_t i = 0; i < posts_list->size; i++) {
8019 // For each MultiTargetNode, we're going to have one
8020 // additional anonymous local not represented in the locals table
8021 // We want to account for this in our table size
8022 pm_node_t *required = posts_list->nodes[i];
8023 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE) || PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8024 table_size++;
8029 if (keywords_list && keywords_list->size) {
8030 for (size_t i = 0; i < keywords_list->size; i++) {
8031 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
8032 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8033 table_size++;
8038 if (parameters_node && parameters_node->block) {
8039 const pm_block_parameter_node_t *block_node = (const pm_block_parameter_node_t *) parameters_node->block;
8041 if (PM_NODE_FLAG_P(block_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER) || !block_node->name) {
8042 table_size++;
8046 // We can create local_table_for_iseq with the correct size
8047 VALUE idtmp = 0;
8048 rb_ast_id_table_t *local_table_for_iseq = ALLOCV(idtmp, sizeof(rb_ast_id_table_t) + table_size * sizeof(ID));
8049 local_table_for_iseq->size = table_size;
8051 //********END OF STEP 1**********
8053 //********STEP 2**********
8054 // Goal: populate iv index table as well as local table, keeping the
8055 // layout of the local table consistent with the layout of the
8056 // stack when calling the method
8058 // Do a first pass on all of the parameters, setting their values in
8059 // the local_table_for_iseq, _except_ for Multis who get a hidden
8060 // variable in this step, and will get their names inserted in step 3
8062 // local_index is a cursor that keeps track of the current
8063 // index into local_table_for_iseq. The local table is actually a list,
8064 // and the order of that list must match the order of the items pushed
8065 // on the stack. We need to take in to account things pushed on the
8066 // stack that _might not have a name_ (for example array destructuring).
8067 // This index helps us know which item we're dealing with and also give
8068 // those anonymous items temporary names (as below)
8069 int local_index = 0;
8071 // Here we figure out local table indices and insert them in to the
8072 // index lookup table and local tables.
8074 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8075 // ^^^^^^^^^^^^^
8076 if (requireds_list && requireds_list->size) {
8077 for (size_t i = 0; i < requireds_list->size; i++, local_index++) {
8078 ID local;
8080 // For each MultiTargetNode, we're going to have one additional
8081 // anonymous local not represented in the locals table. We want
8082 // to account for this in our table size.
8083 pm_node_t *required = requireds_list->nodes[i];
8085 switch (PM_NODE_TYPE(required)) {
8086 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8087 // ^^^^^^^^^^
8088 case PM_MULTI_TARGET_NODE: {
8089 local = rb_make_temporary_id(local_index);
8090 local_table_for_iseq->ids[local_index] = local;
8091 break;
8093 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8094 // ^
8095 case PM_REQUIRED_PARAMETER_NODE: {
8096 const pm_required_parameter_node_t *param = (const pm_required_parameter_node_t *) required;
8098 if (PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8099 ID local = pm_constant_id_lookup(scope_node, param->name);
8100 local_table_for_iseq->ids[local_index] = local;
8102 else {
8103 pm_insert_local_index(param->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8106 break;
8108 default: {
8109 rb_bug("Unsupported node in requireds in parameters %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
8114 body->param.lead_num = (int) requireds_list->size;
8115 body->param.flags.has_lead = true;
8118 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8119 // ^^^^^
8120 if (optionals_list && optionals_list->size) {
8121 body->param.opt_num = (int) optionals_list->size;
8122 body->param.flags.has_opt = true;
8124 for (size_t i = 0; i < optionals_list->size; i++, local_index++) {
8125 pm_node_t * node = optionals_list->nodes[i];
8126 pm_constant_id_t name = ((const pm_optional_parameter_node_t *) node)->name;
8128 if (PM_NODE_FLAG_P(node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8129 ID local = pm_constant_id_lookup(scope_node, name);
8130 local_table_for_iseq->ids[local_index] = local;
8132 else {
8133 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8138 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8139 // ^^
8140 if (parameters_node && parameters_node->rest) {
8141 body->param.rest_start = local_index;
8143 // If there's a trailing comma, we'll have an implicit rest node,
8144 // and we don't want it to impact the rest variables on param
8145 if (!(PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE))) {
8146 body->param.flags.has_rest = true;
8147 RUBY_ASSERT(body->param.rest_start != -1);
8149 pm_constant_id_t name = ((const pm_rest_parameter_node_t *) parameters_node->rest)->name;
8151 if (name) {
8152 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8153 // ^^
8154 if (PM_NODE_FLAG_P(parameters_node->rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8155 ID local = pm_constant_id_lookup(scope_node, name);
8156 local_table_for_iseq->ids[local_index] = local;
8158 else {
8159 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8162 else {
8163 // def foo(a, (b, *c, d), e = 1, *, g, (h, *i, j), k:, l: 1, **m, &n)
8164 // ^
8165 pm_insert_local_special(idMULT, local_index, index_lookup_table, local_table_for_iseq);
8168 local_index++;
8172 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8173 // ^^^^^^^^^^^^^
8174 if (posts_list && posts_list->size) {
8175 body->param.post_num = (int) posts_list->size;
8176 body->param.post_start = local_index;
8177 body->param.flags.has_post = true;
8179 for (size_t i = 0; i < posts_list->size; i++, local_index++) {
8180 ID local;
8182 // For each MultiTargetNode, we're going to have one additional
8183 // anonymous local not represented in the locals table. We want
8184 // to account for this in our table size.
8185 const pm_node_t *post_node = posts_list->nodes[i];
8187 switch (PM_NODE_TYPE(post_node)) {
8188 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8189 // ^^^^^^^^^^
8190 case PM_MULTI_TARGET_NODE: {
8191 local = rb_make_temporary_id(local_index);
8192 local_table_for_iseq->ids[local_index] = local;
8193 break;
8195 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8196 // ^
8197 case PM_REQUIRED_PARAMETER_NODE: {
8198 const pm_required_parameter_node_t *param = (const pm_required_parameter_node_t *) post_node;
8200 if (PM_NODE_FLAG_P(param, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8201 ID local = pm_constant_id_lookup(scope_node, param->name);
8202 local_table_for_iseq->ids[local_index] = local;
8204 else {
8205 pm_insert_local_index(param->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8207 break;
8209 default: {
8210 rb_bug("Unsupported node in posts in parameters %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
8216 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8217 // ^^^^^^^^
8218 // Keywords create an internal variable on the parse tree
8219 if (keywords_list && keywords_list->size) {
8220 body->param.keyword = keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
8221 keyword->num = (int) keywords_list->size;
8223 body->param.flags.has_kw = true;
8224 const VALUE default_values = rb_ary_hidden_new(1);
8225 const VALUE complex_mark = rb_str_tmp_new(0);
8227 ID *ids = xcalloc(keywords_list->size, sizeof(ID));
8229 size_t kw_index = 0;
8231 for (size_t i = 0; i < keywords_list->size; i++) {
8232 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
8233 pm_constant_id_t name;
8235 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8236 // ^^
8237 if (PM_NODE_TYPE_P(keyword_parameter_node, PM_REQUIRED_KEYWORD_PARAMETER_NODE)) {
8238 name = ((const pm_required_keyword_parameter_node_t *) keyword_parameter_node)->name;
8239 keyword->required_num++;
8240 ID local = pm_constant_id_lookup(scope_node, name);
8242 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8243 local_table_for_iseq->ids[local_index] = local;
8245 else {
8246 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8248 local_index++;
8249 ids[kw_index++] = local;
8253 for (size_t i = 0; i < keywords_list->size; i++) {
8254 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
8255 pm_constant_id_t name;
8257 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8258 // ^^^^
8259 if (PM_NODE_TYPE_P(keyword_parameter_node, PM_OPTIONAL_KEYWORD_PARAMETER_NODE)) {
8260 const pm_optional_keyword_parameter_node_t *cast = ((const pm_optional_keyword_parameter_node_t *) keyword_parameter_node);
8262 pm_node_t *value = cast->value;
8263 name = cast->name;
8265 if (PM_NODE_FLAG_P(value, PM_NODE_FLAG_STATIC_LITERAL) && !(PM_NODE_TYPE_P(value, PM_ARRAY_NODE) || PM_NODE_TYPE_P(value, PM_HASH_NODE) || PM_NODE_TYPE_P(value, PM_RANGE_NODE))) {
8266 rb_ary_push(default_values, pm_static_literal_value(iseq, value, scope_node));
8268 else {
8269 rb_ary_push(default_values, complex_mark);
8272 ID local = pm_constant_id_lookup(scope_node, name);
8273 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8274 local_table_for_iseq->ids[local_index] = local;
8276 else {
8277 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8279 ids[kw_index++] = local;
8280 local_index++;
8285 keyword->bits_start = local_index;
8286 keyword->table = ids;
8288 VALUE *dvs = ALLOC_N(VALUE, RARRAY_LEN(default_values));
8290 for (int i = 0; i < RARRAY_LEN(default_values); i++) {
8291 VALUE dv = RARRAY_AREF(default_values, i);
8292 if (dv == complex_mark) dv = Qundef;
8293 if (!SPECIAL_CONST_P(dv)) {
8294 RB_OBJ_WRITTEN(iseq, Qundef, dv);
8296 dvs[i] = dv;
8299 keyword->default_values = dvs;
8301 // Hidden local for keyword arguments
8302 ID local = rb_make_temporary_id(local_index);
8303 local_table_for_iseq->ids[local_index] = local;
8304 local_index++;
8307 if (body->type == ISEQ_TYPE_BLOCK && local_index == 1 && requireds_list && requireds_list->size == 1 && !trailing_comma) {
8308 body->param.flags.ambiguous_param0 = true;
8311 if (parameters_node) {
8312 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8313 // ^^^
8314 if (parameters_node->keyword_rest) {
8315 switch (PM_NODE_TYPE(parameters_node->keyword_rest)) {
8316 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **nil, &n)
8317 // ^^^^^
8318 case PM_NO_KEYWORDS_PARAMETER_NODE: {
8319 body->param.flags.accepts_no_kwarg = true;
8320 break;
8322 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8323 // ^^^
8324 case PM_KEYWORD_REST_PARAMETER_NODE: {
8325 const pm_keyword_rest_parameter_node_t *kw_rest_node = (const pm_keyword_rest_parameter_node_t *) parameters_node->keyword_rest;
8326 if (!body->param.flags.has_kw) {
8327 body->param.keyword = keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
8330 keyword->rest_start = local_index;
8331 body->param.flags.has_kwrest = true;
8333 pm_constant_id_t constant_id = kw_rest_node->name;
8334 if (constant_id) {
8335 if (PM_NODE_FLAG_P(kw_rest_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8336 ID local = pm_constant_id_lookup(scope_node, constant_id);
8337 local_table_for_iseq->ids[local_index] = local;
8339 else {
8340 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8343 else {
8344 pm_insert_local_special(idPow, local_index, index_lookup_table, local_table_for_iseq);
8347 local_index++;
8348 break;
8350 // def foo(...)
8351 // ^^^
8352 case PM_FORWARDING_PARAMETER_NODE: {
8353 body->param.rest_start = local_index;
8354 body->param.flags.has_rest = true;
8356 // Add the leading *
8357 pm_insert_local_special(idMULT, local_index++, index_lookup_table, local_table_for_iseq);
8359 // Add the kwrest **
8360 RUBY_ASSERT(!body->param.flags.has_kw);
8362 // There are no keywords declared (in the text of the program)
8363 // but the forwarding node implies we support kwrest (**)
8364 body->param.flags.has_kw = false;
8365 body->param.flags.has_kwrest = true;
8366 body->param.keyword = keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
8368 keyword->rest_start = local_index;
8370 pm_insert_local_special(idPow, local_index++, index_lookup_table, local_table_for_iseq);
8372 body->param.block_start = local_index;
8373 body->param.flags.has_block = true;
8375 pm_insert_local_special(idAnd, local_index++, index_lookup_table, local_table_for_iseq);
8376 pm_insert_local_special(idDot3, local_index++, index_lookup_table, local_table_for_iseq);
8377 break;
8379 default: {
8380 rb_bug("node type %s not expected as keyword_rest", pm_node_type_to_str(PM_NODE_TYPE(parameters_node->keyword_rest)));
8385 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8386 // ^^
8387 if (parameters_node->block) {
8388 body->param.block_start = local_index;
8389 body->param.flags.has_block = true;
8391 pm_constant_id_t name = ((const pm_block_parameter_node_t *) parameters_node->block)->name;
8393 if (name) {
8394 if (PM_NODE_FLAG_P(parameters_node->block, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
8395 ID local = pm_constant_id_lookup(scope_node, name);
8396 local_table_for_iseq->ids[local_index] = local;
8398 else {
8399 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8402 else {
8403 pm_insert_local_special(idAnd, local_index, index_lookup_table, local_table_for_iseq);
8406 local_index++;
8410 //********END OF STEP 2**********
8411 // The local table is now consistent with expected
8412 // stack layout
8414 // If there's only one required element in the parameters
8415 // CRuby needs to recognize it as an ambiguous parameter
8417 //********STEP 3**********
8418 // Goal: fill in the names of the parameters in MultiTargetNodes
8420 // Go through requireds again to set the multis
8422 if (requireds_list && requireds_list->size) {
8423 for (size_t i = 0; i < requireds_list->size; i++) {
8424 // For each MultiTargetNode, we're going to have one
8425 // additional anonymous local not represented in the locals table
8426 // We want to account for this in our table size
8427 const pm_node_t *required = requireds_list->nodes[i];
8429 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
8430 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) required, index_lookup_table, local_table_for_iseq, scope_node, local_index);
8435 // Go through posts again to set the multis
8436 if (posts_list && posts_list->size) {
8437 for (size_t i = 0; i < posts_list->size; i++) {
8438 // For each MultiTargetNode, we're going to have one
8439 // additional anonymous local not represented in the locals table
8440 // We want to account for this in our table size
8441 const pm_node_t *post = posts_list->nodes[i];
8443 if (PM_NODE_TYPE_P(post, PM_MULTI_TARGET_NODE)) {
8444 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) post, index_lookup_table, local_table_for_iseq, scope_node, local_index);
8449 // Set any anonymous locals for the for node
8450 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) {
8451 if (PM_NODE_TYPE_P(((const pm_for_node_t *) scope_node->ast_node)->index, PM_LOCAL_VARIABLE_TARGET_NODE)) {
8452 body->param.lead_num++;
8454 else {
8455 body->param.rest_start = local_index;
8456 body->param.flags.has_rest = true;
8459 ID local = rb_make_temporary_id(local_index);
8460 local_table_for_iseq->ids[local_index] = local;
8461 local_index++;
8464 // Fill in any NumberedParameters, if they exist
8465 if (scope_node->parameters && PM_NODE_TYPE_P(scope_node->parameters, PM_NUMBERED_PARAMETERS_NODE)) {
8466 int maximum = ((const pm_numbered_parameters_node_t *) scope_node->parameters)->maximum;
8467 RUBY_ASSERT(0 < maximum && maximum <= 9);
8468 for (int i = 0; i < maximum; i++, local_index++) {
8469 const uint8_t param_name[] = { '_', '1' + i };
8470 pm_constant_id_t constant_id = pm_constant_pool_find(&parser->constant_pool, param_name, 2);
8471 RUBY_ASSERT(constant_id && "parser should fill in any gaps in numbered parameters");
8472 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8474 body->param.lead_num = maximum;
8475 body->param.flags.has_lead = true;
8478 // Fill in the it variable, if it exists
8479 if (scope_node->parameters && PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
8480 const uint8_t param_name[] = { '0', 'i', 't' };
8481 pm_constant_id_t constant_id = pm_constant_pool_find(&parser->constant_pool, param_name, 3);
8482 RUBY_ASSERT(constant_id && "parser should have inserted 0it for 'it' local");
8484 ID local = rb_make_temporary_id(local_index);
8485 local_table_for_iseq->ids[local_index] = local;
8486 st_insert(index_lookup_table, (st_data_t) constant_id, (st_data_t) local_index);
8487 local_index++;
8490 //********END OF STEP 3**********
8492 //********STEP 4**********
8493 // Goal: fill in the method body locals
8494 // To be explicit, these are the non-parameter locals
8495 // We fill in the block_locals, if they exist
8496 // lambda { |x; y| y }
8497 // ^
8498 if (block_locals && block_locals->size) {
8499 for (size_t i = 0; i < block_locals->size; i++, local_index++) {
8500 pm_constant_id_t constant_id = ((const pm_block_local_variable_node_t *) block_locals->nodes[i])->name;
8501 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
8505 // Fill in any locals we missed
8506 if (scope_node->locals.size) {
8507 for (size_t i = 0; i < scope_node->locals.size; i++) {
8508 pm_constant_id_t constant_id = locals->ids[i];
8509 if (constant_id) {
8510 struct pm_local_table_insert_ctx ctx;
8511 ctx.scope_node = scope_node;
8512 ctx.local_table_for_iseq = local_table_for_iseq;
8513 ctx.local_index = local_index;
8515 st_update(index_lookup_table, (st_data_t)constant_id, pm_local_table_insert_func, (st_data_t)&ctx);
8517 local_index = ctx.local_index;
8522 //********END OF STEP 4**********
8524 // We set the index_lookup_table on the scope node so we can
8525 // refer to the parameters correctly
8526 if (scope_node->index_lookup_table) {
8527 st_free_table(scope_node->index_lookup_table);
8529 scope_node->index_lookup_table = index_lookup_table;
8530 iseq_calc_param_size(iseq);
8531 iseq_set_local_table(iseq, local_table_for_iseq);
8532 scope_node->local_table_for_iseq_size = local_table_for_iseq->size;
8534 //********STEP 5************
8535 // Goal: compile anything that needed to be compiled
8536 if (optionals_list && optionals_list->size) {
8537 LABEL **opt_table = (LABEL **) ALLOC_N(VALUE, optionals_list->size + 1);
8538 LABEL *label;
8540 // TODO: Should we make an api for NEW_LABEL where you can pass
8541 // a pointer to the label it should fill out? We already
8542 // have a list of labels allocated above so it seems wasteful
8543 // to do the copies.
8544 for (size_t i = 0; i < optionals_list->size; i++) {
8545 label = NEW_LABEL(lineno);
8546 opt_table[i] = label;
8547 PUSH_LABEL(ret, label);
8548 pm_node_t *optional_node = optionals_list->nodes[i];
8549 PM_COMPILE_NOT_POPPED(optional_node);
8552 // Set the last label
8553 label = NEW_LABEL(lineno);
8554 opt_table[optionals_list->size] = label;
8555 PUSH_LABEL(ret, label);
8557 body->param.opt_table = (const VALUE *) opt_table;
8560 if (keywords_list && keywords_list->size) {
8561 size_t optional_index = 0;
8562 for (size_t i = 0; i < keywords_list->size; i++) {
8563 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
8564 pm_constant_id_t name;
8566 switch (PM_NODE_TYPE(keyword_parameter_node)) {
8567 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8568 // ^^^^
8569 case PM_OPTIONAL_KEYWORD_PARAMETER_NODE: {
8570 const pm_optional_keyword_parameter_node_t *cast = ((const pm_optional_keyword_parameter_node_t *) keyword_parameter_node);
8572 pm_node_t *value = cast->value;
8573 name = cast->name;
8575 if (!PM_NODE_FLAG_P(value, PM_NODE_FLAG_STATIC_LITERAL) || PM_NODE_TYPE_P(value, PM_ARRAY_NODE) || PM_NODE_TYPE_P(value, PM_HASH_NODE) || PM_NODE_TYPE_P(value, PM_RANGE_NODE)) {
8576 LABEL *end_label = NEW_LABEL(location.line);
8578 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, name, 0);
8579 int kw_bits_idx = table_size - body->param.keyword->bits_start;
8580 PUSH_INSN2(ret, location, checkkeyword, INT2FIX(kw_bits_idx + VM_ENV_DATA_SIZE - 1), INT2FIX(optional_index));
8581 PUSH_INSNL(ret, location, branchif, end_label);
8582 PM_COMPILE(value);
8583 PUSH_SETLOCAL(ret, location, index.index, index.level);
8584 PUSH_LABEL(ret, end_label);
8586 optional_index++;
8587 break;
8589 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
8590 // ^^
8591 case PM_REQUIRED_KEYWORD_PARAMETER_NODE: {
8592 break;
8594 default: {
8595 rb_bug("Unexpected keyword parameter node type %s", pm_node_type_to_str(PM_NODE_TYPE(keyword_parameter_node)));
8601 if (requireds_list && requireds_list->size) {
8602 for (size_t i = 0; i < requireds_list->size; i++) {
8603 // For each MultiTargetNode, we're going to have one additional
8604 // anonymous local not represented in the locals table. We want
8605 // to account for this in our table size.
8606 const pm_node_t *required = requireds_list->nodes[i];
8608 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
8609 PUSH_GETLOCAL(ret, location, table_size - (int)i, 0);
8610 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) required, ret, scope_node);
8615 if (posts_list && posts_list->size) {
8616 for (size_t i = 0; i < posts_list->size; i++) {
8617 // For each MultiTargetNode, we're going to have one additional
8618 // anonymous local not represented in the locals table. We want
8619 // to account for this in our table size.
8620 const pm_node_t *post = posts_list->nodes[i];
8622 if (PM_NODE_TYPE_P(post, PM_MULTI_TARGET_NODE)) {
8623 PUSH_GETLOCAL(ret, location, table_size - body->param.post_start - (int) i, 0);
8624 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) post, ret, scope_node);
8629 switch (body->type) {
8630 case ISEQ_TYPE_BLOCK: {
8631 LABEL *start = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(0);
8632 LABEL *end = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(0);
8633 const pm_line_column_t block_location = { .line = body->location.first_lineno, .column = -1 };
8635 start->rescued = LABEL_RESCUE_BEG;
8636 end->rescued = LABEL_RESCUE_END;
8638 // For nodes automatically assign the iteration variable to whatever
8639 // index variable. We need to handle that write here because it has
8640 // to happen in the context of the block. Note that this happens
8641 // before the B_CALL tracepoint event.
8642 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) {
8643 pm_compile_for_node_index(iseq, ((const pm_for_node_t *) scope_node->ast_node)->index, ret, scope_node);
8646 PUSH_TRACE(ret, RUBY_EVENT_B_CALL);
8647 PUSH_INSN(ret, block_location, nop);
8648 PUSH_LABEL(ret, start);
8650 if (scope_node->body != NULL) {
8651 switch (PM_NODE_TYPE(scope_node->ast_node)) {
8652 case PM_POST_EXECUTION_NODE: {
8653 const pm_post_execution_node_t *cast = (const pm_post_execution_node_t *) scope_node->ast_node;
8654 PUSH_INSN1(ret, block_location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8656 // We create another ScopeNode from the statements within the PostExecutionNode
8657 pm_scope_node_t next_scope_node;
8658 pm_scope_node_init((const pm_node_t *) cast->statements, &next_scope_node, scope_node);
8660 const rb_iseq_t *block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(body->parent_iseq), ISEQ_TYPE_BLOCK, location.line);
8661 pm_scope_node_destroy(&next_scope_node);
8663 PUSH_CALL_WITH_BLOCK(ret, block_location, id_core_set_postexe, INT2FIX(0), block);
8664 break;
8666 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
8667 const pm_interpolated_regular_expression_node_t *cast = (const pm_interpolated_regular_expression_node_t *) scope_node->ast_node;
8668 pm_compile_regexp_dynamic(iseq, (const pm_node_t *) cast, &cast->parts, &location, ret, popped, scope_node);
8669 break;
8671 default:
8672 pm_compile_node(iseq, scope_node->body, ret, popped, scope_node);
8673 break;
8676 else {
8677 PUSH_INSN(ret, block_location, putnil);
8680 PUSH_LABEL(ret, end);
8681 PUSH_TRACE(ret, RUBY_EVENT_B_RETURN);
8682 ISEQ_COMPILE_DATA(iseq)->last_line = body->location.code_location.end_pos.lineno;
8684 /* wide range catch handler must put at last */
8685 PUSH_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
8686 PUSH_CATCH_ENTRY(CATCH_TYPE_NEXT, start, end, NULL, end);
8687 break;
8689 case ISEQ_TYPE_ENSURE: {
8690 const pm_line_column_t statements_location = (scope_node->body != NULL ? PM_NODE_START_LINE_COLUMN(scope_node->parser, scope_node->body) : location);
8691 iseq_set_exception_local_table(iseq);
8693 if (scope_node->body != NULL) {
8694 PM_COMPILE_POPPED((const pm_node_t *) scope_node->body);
8697 PUSH_GETLOCAL(ret, statements_location, 1, 0);
8698 PUSH_INSN1(ret, statements_location, throw, INT2FIX(0));
8699 return;
8701 case ISEQ_TYPE_METHOD: {
8702 PUSH_TRACE(ret, RUBY_EVENT_CALL);
8703 if (scope_node->body) {
8704 PM_COMPILE((const pm_node_t *) scope_node->body);
8706 else {
8707 PUSH_INSN(ret, location, putnil);
8710 PUSH_TRACE(ret, RUBY_EVENT_RETURN);
8711 ISEQ_COMPILE_DATA(iseq)->last_line = body->location.code_location.end_pos.lineno;
8713 break;
8715 case ISEQ_TYPE_RESCUE: {
8716 iseq_set_exception_local_table(iseq);
8717 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_RESCUE_MODIFIER_NODE)) {
8718 LABEL *lab = NEW_LABEL(lineno);
8719 LABEL *rescue_end = NEW_LABEL(lineno);
8720 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
8721 PUSH_INSN1(ret, location, putobject, rb_eStandardError);
8722 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8723 PUSH_INSNL(ret, location, branchif, lab);
8724 PUSH_INSNL(ret, location, jump, rescue_end);
8725 PUSH_LABEL(ret, lab);
8726 PM_COMPILE((const pm_node_t *) scope_node->body);
8727 PUSH_INSN(ret, location, leave);
8728 PUSH_LABEL(ret, rescue_end);
8729 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
8731 else {
8732 PM_COMPILE((const pm_node_t *) scope_node->ast_node);
8734 PUSH_INSN1(ret, location, throw, INT2FIX(0));
8736 return;
8738 default:
8739 if (scope_node->body) {
8740 PM_COMPILE((const pm_node_t *) scope_node->body);
8742 else {
8743 PUSH_INSN(ret, location, putnil);
8745 break;
8748 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_CLASS_NODE)) {
8749 const pm_line_column_t end_location = PM_NODE_END_LINE_COLUMN(scope_node->parser, scope_node->ast_node);
8750 ADD_TRACE(ret, RUBY_EVENT_END);
8751 ISEQ_COMPILE_DATA(iseq)->last_line = end_location.line;
8754 if (!PM_NODE_TYPE_P(scope_node->ast_node, PM_ENSURE_NODE)) {
8755 const pm_line_column_t location = { .line = ISEQ_COMPILE_DATA(iseq)->last_line, .column = -1 };
8756 PUSH_INSN(ret, location, leave);
8759 return;
8761 case PM_SELF_NODE: {
8762 // self
8763 // ^^^^
8764 if (!popped) {
8765 PUSH_INSN(ret, location, putself);
8767 return;
8769 case PM_SHAREABLE_CONSTANT_NODE: {
8770 // A value that is being written to a constant that is being marked as
8771 // shared depending on the current lexical context.
8772 const pm_shareable_constant_node_t *cast = (const pm_shareable_constant_node_t *) node;
8774 switch (PM_NODE_TYPE(cast->write)) {
8775 case PM_CONSTANT_WRITE_NODE:
8776 pm_compile_constant_write_node(iseq, (const pm_constant_write_node_t *) cast->write, cast->base.flags, &location, ret, popped, scope_node);
8777 break;
8778 case PM_CONSTANT_AND_WRITE_NODE:
8779 pm_compile_constant_and_write_node(iseq, (const pm_constant_and_write_node_t *) cast->write, cast->base.flags, &location, ret, popped, scope_node);
8780 break;
8781 case PM_CONSTANT_OR_WRITE_NODE:
8782 pm_compile_constant_or_write_node(iseq, (const pm_constant_or_write_node_t *) cast->write, cast->base.flags, &location, ret, popped, scope_node);
8783 break;
8784 case PM_CONSTANT_OPERATOR_WRITE_NODE:
8785 pm_compile_constant_operator_write_node(iseq, (const pm_constant_operator_write_node_t *) cast->write, cast->base.flags, &location, ret, popped, scope_node);
8786 break;
8787 case PM_CONSTANT_PATH_WRITE_NODE:
8788 pm_compile_constant_path_write_node(iseq, (const pm_constant_path_write_node_t *) cast->write, cast->base.flags, &location, ret, popped, scope_node);
8789 break;
8790 case PM_CONSTANT_PATH_AND_WRITE_NODE:
8791 pm_compile_constant_path_and_write_node(iseq, (const pm_constant_path_and_write_node_t *) cast->write, cast->base.flags, &location, ret, popped, scope_node);
8792 break;
8793 case PM_CONSTANT_PATH_OR_WRITE_NODE:
8794 pm_compile_constant_path_or_write_node(iseq, (const pm_constant_path_or_write_node_t *) cast->write, cast->base.flags, &location, ret, popped, scope_node);
8795 break;
8796 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE:
8797 pm_compile_constant_path_operator_write_node(iseq, (const pm_constant_path_operator_write_node_t *) cast->write, cast->base.flags, &location, ret, popped, scope_node);
8798 break;
8799 default:
8800 rb_bug("Unexpected node type for shareable constant write: %s", pm_node_type_to_str(PM_NODE_TYPE(cast->write)));
8801 break;
8804 return;
8806 case PM_SINGLETON_CLASS_NODE: {
8807 // class << self; end
8808 // ^^^^^^^^^^^^^^^^^^
8809 const pm_singleton_class_node_t *cast = (const pm_singleton_class_node_t *) node;
8811 pm_scope_node_t next_scope_node;
8812 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
8813 const rb_iseq_t *child_iseq = NEW_ISEQ(&next_scope_node, rb_fstring_lit("singleton class"), ISEQ_TYPE_CLASS, location.line);
8814 pm_scope_node_destroy(&next_scope_node);
8816 PM_COMPILE_NOT_POPPED(cast->expression);
8817 PUSH_INSN(ret, location, putnil);
8819 ID singletonclass;
8820 CONST_ID(singletonclass, "singletonclass");
8821 PUSH_INSN3(ret, location, defineclass, ID2SYM(singletonclass), child_iseq, INT2FIX(VM_DEFINECLASS_TYPE_SINGLETON_CLASS));
8823 if (popped) PUSH_INSN(ret, location, pop);
8824 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) child_iseq);
8826 return;
8828 case PM_SOURCE_ENCODING_NODE: {
8829 // __ENCODING__
8830 // ^^^^^^^^^^^^
8831 if (!popped) {
8832 VALUE value = pm_static_literal_value(iseq, node, scope_node);
8833 PUSH_INSN1(ret, location, putobject, value);
8835 return;
8837 case PM_SOURCE_FILE_NODE: {
8838 // __FILE__
8839 // ^^^^^^^^
8840 if (!popped) {
8841 const pm_source_file_node_t *cast = (const pm_source_file_node_t *) node;
8842 VALUE string = pm_source_file_value(cast, scope_node);
8844 if (PM_NODE_FLAG_P(cast, PM_STRING_FLAGS_FROZEN)) {
8845 PUSH_INSN1(ret, location, putobject, string);
8847 else if (PM_NODE_FLAG_P(cast, PM_STRING_FLAGS_MUTABLE)) {
8848 PUSH_INSN1(ret, location, putstring, string);
8850 else {
8851 PUSH_INSN1(ret, location, putchilledstring, string);
8854 return;
8856 case PM_SOURCE_LINE_NODE: {
8857 // __LINE__
8858 // ^^^^^^^^
8859 if (!popped) {
8860 VALUE value = pm_static_literal_value(iseq, node, scope_node);
8861 PUSH_INSN1(ret, location, putobject, value);
8863 return;
8865 case PM_SPLAT_NODE: {
8866 // foo(*bar)
8867 // ^^^^
8868 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
8869 if (cast->expression) {
8870 PM_COMPILE(cast->expression);
8873 if (!popped) {
8874 PUSH_INSN1(ret, location, splatarray, Qtrue);
8876 return;
8878 case PM_STATEMENTS_NODE: {
8879 // A list of statements.
8880 const pm_statements_node_t *cast = (const pm_statements_node_t *) node;
8881 const pm_node_list_t *body = &cast->body;
8883 if (body->size > 0) {
8884 for (size_t index = 0; index < body->size - 1; index++) {
8885 PM_COMPILE_POPPED(body->nodes[index]);
8887 PM_COMPILE(body->nodes[body->size - 1]);
8889 else {
8890 PUSH_INSN(ret, location, putnil);
8892 return;
8894 case PM_STRING_NODE: {
8895 // "foo"
8896 // ^^^^^
8897 if (!popped) {
8898 const pm_string_node_t *cast = (const pm_string_node_t *) node;
8899 VALUE value = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
8901 if (PM_NODE_FLAG_P(node, PM_STRING_FLAGS_FROZEN)) {
8902 PUSH_INSN1(ret, location, putobject, value);
8904 else if (PM_NODE_FLAG_P(node, PM_STRING_FLAGS_MUTABLE)) {
8905 PUSH_INSN1(ret, location, putstring, value);
8907 else {
8908 PUSH_INSN1(ret, location, putchilledstring, value);
8911 return;
8913 case PM_SUPER_NODE: {
8914 // super(foo)
8915 // ^^^^^^^^^^
8916 const pm_super_node_t *cast = (const pm_super_node_t *) node;
8918 DECL_ANCHOR(args);
8919 INIT_ANCHOR(args);
8921 LABEL *retry_label = NEW_LABEL(location.line);
8922 LABEL *retry_end_l = NEW_LABEL(location.line);
8924 const rb_iseq_t *previous_block = ISEQ_COMPILE_DATA(iseq)->current_block;
8925 const rb_iseq_t *current_block;
8926 ISEQ_COMPILE_DATA(iseq)->current_block = current_block = NULL;
8928 PUSH_LABEL(ret, retry_label);
8929 PUSH_INSN(ret, location, putself);
8931 int flags = 0;
8932 struct rb_callinfo_kwarg *keywords = NULL;
8933 int argc = pm_setup_args(cast->arguments, cast->block, &flags, &keywords, iseq, ret, scope_node, &location);
8934 flags |= VM_CALL_SUPER | VM_CALL_FCALL;
8936 if (cast->block && PM_NODE_TYPE_P(cast->block, PM_BLOCK_NODE)) {
8937 pm_scope_node_t next_scope_node;
8938 pm_scope_node_init(cast->block, &next_scope_node, scope_node);
8940 ISEQ_COMPILE_DATA(iseq)->current_block = current_block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, lineno);
8941 pm_scope_node_destroy(&next_scope_node);
8944 if ((flags & VM_CALL_ARGS_BLOCKARG) && (flags & VM_CALL_KW_SPLAT) && !(flags & VM_CALL_KW_SPLAT_MUT)) {
8945 PUSH_INSN(args, location, splatkw);
8948 PUSH_SEQ(ret, args);
8949 PUSH_INSN2(ret, location, invokesuper, new_callinfo(iseq, 0, argc, flags, keywords, current_block != NULL), current_block);
8950 pm_compile_retry_end_label(iseq, ret, retry_end_l);
8952 if (popped) PUSH_INSN(ret, location, pop);
8953 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
8954 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, current_block, retry_end_l);
8956 return;
8958 case PM_SYMBOL_NODE: {
8959 // :foo
8960 // ^^^^
8961 if (!popped) {
8962 VALUE value = pm_static_literal_value(iseq, node, scope_node);
8963 PUSH_INSN1(ret, location, putobject, value);
8965 return;
8967 case PM_TRUE_NODE: {
8968 // true
8969 // ^^^^
8970 if (!popped) {
8971 PUSH_INSN1(ret, location, putobject, Qtrue);
8973 return;
8975 case PM_UNDEF_NODE: {
8976 // undef foo
8977 // ^^^^^^^^^
8978 const pm_undef_node_t *cast = (const pm_undef_node_t *) node;
8979 const pm_node_list_t *names = &cast->names;
8981 for (size_t index = 0; index < names->size; index++) {
8982 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8983 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CBASE));
8985 PM_COMPILE_NOT_POPPED(names->nodes[index]);
8986 PUSH_SEND(ret, location, id_core_undef_method, INT2NUM(2));
8988 if (index < names->size - 1) {
8989 PUSH_INSN(ret, location, pop);
8993 if (popped) PUSH_INSN(ret, location, pop);
8994 return;
8996 case PM_UNLESS_NODE: {
8997 // unless foo; bar end
8998 // ^^^^^^^^^^^^^^^^^^^
9000 // bar unless foo
9001 // ^^^^^^^^^^^^^^
9002 const pm_unless_node_t *cast = (const pm_unless_node_t *) node;
9003 const pm_statements_node_t *consequent = NULL;
9004 if (cast->consequent != NULL) {
9005 consequent = ((const pm_else_node_t *) cast->consequent)->statements;
9008 pm_compile_conditional(iseq, &location, PM_UNLESS_NODE, (const pm_node_t *) cast, consequent, (const pm_node_t *) cast->statements, cast->predicate, ret, popped, scope_node);
9009 return;
9011 case PM_UNTIL_NODE: {
9012 // until foo; bar end
9013 // ^^^^^^^^^^^^^^^^^
9015 // bar until foo
9016 // ^^^^^^^^^^^^^
9017 const pm_until_node_t *cast = (const pm_until_node_t *) node;
9018 pm_compile_loop(iseq, &location, cast->base.flags, PM_UNTIL_NODE, (const pm_node_t *) cast, cast->statements, cast->predicate, ret, popped, scope_node);
9019 return;
9021 case PM_WHILE_NODE: {
9022 // while foo; bar end
9023 // ^^^^^^^^^^^^^^^^^^
9025 // bar while foo
9026 // ^^^^^^^^^^^^^
9027 const pm_while_node_t *cast = (const pm_while_node_t *) node;
9028 pm_compile_loop(iseq, &location, cast->base.flags, PM_WHILE_NODE, (const pm_node_t *) cast, cast->statements, cast->predicate, ret, popped, scope_node);
9029 return;
9031 case PM_X_STRING_NODE: {
9032 // `foo`
9033 // ^^^^^
9034 const pm_x_string_node_t *cast = (const pm_x_string_node_t *) node;
9035 VALUE value = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
9037 PUSH_INSN(ret, location, putself);
9038 PUSH_INSN1(ret, location, putobject, value);
9039 PUSH_SEND_WITH_FLAG(ret, location, idBackquote, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
9040 if (popped) PUSH_INSN(ret, location, pop);
9042 return;
9044 case PM_YIELD_NODE: {
9045 // yield
9046 // ^^^^^
9048 // yield 1
9049 // ^^^^^^^
9050 const pm_yield_node_t *cast = (const pm_yield_node_t *) node;
9052 switch (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->type) {
9053 case ISEQ_TYPE_TOP:
9054 case ISEQ_TYPE_MAIN:
9055 case ISEQ_TYPE_CLASS:
9056 COMPILE_ERROR(ERROR_ARGS "Invalid yield");
9057 return;
9058 default: /* valid */;
9061 int argc = 0;
9062 int flags = 0;
9063 struct rb_callinfo_kwarg *keywords = NULL;
9065 if (cast->arguments) {
9066 argc = pm_setup_args(cast->arguments, NULL, &flags, &keywords, iseq, ret, scope_node, &location);
9069 PUSH_INSN1(ret, location, invokeblock, new_callinfo(iseq, 0, argc, flags, keywords, FALSE));
9070 if (popped) PUSH_INSN(ret, location, pop);
9072 int level = 0;
9073 for (const rb_iseq_t *tmp_iseq = iseq; tmp_iseq != ISEQ_BODY(iseq)->local_iseq; level++) {
9074 tmp_iseq = ISEQ_BODY(tmp_iseq)->parent_iseq;
9077 if (level > 0) access_outer_variables(iseq, level, rb_intern("yield"), true);
9078 return;
9080 default: {
9081 rb_raise(rb_eNotImpError, "node type %s not implemented", pm_node_type_to_str(PM_NODE_TYPE(node)));
9082 return;
9087 /** True if the given iseq can have pre execution blocks. */
9088 static inline bool
9089 pm_iseq_pre_execution_p(rb_iseq_t *iseq)
9091 switch (ISEQ_BODY(iseq)->type) {
9092 case ISEQ_TYPE_TOP:
9093 case ISEQ_TYPE_EVAL:
9094 case ISEQ_TYPE_MAIN:
9095 return true;
9096 default:
9097 return false;
9102 * This is the main entry-point into the prism compiler. It accepts the iseq
9103 * that it should be compiling instruction into and a pointer to the scope node
9104 * that it should be compiling. It returns the established instruction sequence.
9105 * Note that this function could raise Ruby errors if it encounters compilation
9106 * errors or if there is a bug in the compiler.
9108 VALUE
9109 pm_iseq_compile_node(rb_iseq_t *iseq, pm_scope_node_t *node)
9111 DECL_ANCHOR(ret);
9112 INIT_ANCHOR(ret);
9114 if (pm_iseq_pre_execution_p(iseq)) {
9115 // Because these ISEQs can have BEGIN{}, we're going to create two
9116 // anchors to compile them, a "pre" and a "body". We'll mark the "pre"
9117 // on the scope node so that when BEGIN{} is found, its contents will be
9118 // added to the "pre" anchor.
9119 DECL_ANCHOR(pre);
9120 INIT_ANCHOR(pre);
9121 node->pre_execution_anchor = pre;
9123 // Now we'll compile the body as normal. We won't compile directly into
9124 // the "ret" anchor yet because we want to add the "pre" anchor to the
9125 // beginning of the "ret" anchor first.
9126 DECL_ANCHOR(body);
9127 INIT_ANCHOR(body);
9128 pm_compile_node(iseq, (const pm_node_t *) node, body, false, node);
9130 // Now we'll join both anchors together so that the content is in the
9131 // correct order.
9132 PUSH_SEQ(ret, pre);
9133 PUSH_SEQ(ret, body);
9135 else {
9136 // In other circumstances, we can just compile the node directly into
9137 // the "ret" anchor.
9138 pm_compile_node(iseq, (const pm_node_t *) node, ret, false, node);
9141 CHECK(iseq_setup_insn(iseq, ret));
9142 return iseq_setup(iseq, ret);
9146 * Free the internal memory associated with a pm_parse_result_t struct.
9147 * Importantly this does not free the struct itself.
9149 void
9150 pm_parse_result_free(pm_parse_result_t *result)
9152 if (result->parsed) {
9153 pm_node_destroy(&result->parser, result->node.ast_node);
9154 pm_scope_node_destroy(&result->node);
9157 pm_parser_free(&result->parser);
9158 pm_string_free(&result->input);
9159 pm_options_free(&result->options);
9163 * Check if the given source slice is valid UTF-8. The location represents the
9164 * location of the error, but the slice of the source will include the content
9165 * of all of the lines that the error touches, so we need to check those parts
9166 * as well.
9168 static bool
9169 pm_parse_process_error_utf8_p(const pm_parser_t *parser, const pm_location_t *location)
9171 const size_t start_line = pm_newline_list_line_column(&parser->newline_list, location->start, 1).line;
9172 const size_t end_line = pm_newline_list_line_column(&parser->newline_list, location->end, 1).line;
9174 const uint8_t *start = parser->start + parser->newline_list.offsets[start_line - 1];
9175 const uint8_t *end = ((end_line == parser->newline_list.size) ? parser->end : (parser->start + parser->newline_list.offsets[end_line]));
9176 size_t width;
9178 while (start < end) {
9179 if ((width = pm_encoding_utf_8_char_width(start, end - start)) == 0) return false;
9180 start += width;
9183 return true;
9187 * Generate an error object from the given parser that contains as much
9188 * information as possible about the errors that were encountered.
9190 static VALUE
9191 pm_parse_process_error(const pm_parse_result_t *result)
9193 const pm_parser_t *parser = &result->parser;
9194 const pm_diagnostic_t *head = (const pm_diagnostic_t *) parser->error_list.head;
9195 bool valid_utf8 = true;
9197 pm_buffer_t buffer = { 0 };
9198 const pm_string_t *filepath = &parser->filepath;
9200 for (const pm_diagnostic_t *error = head; error != NULL; error = (const pm_diagnostic_t *) error->node.next) {
9201 switch (error->level) {
9202 case PM_ERROR_LEVEL_SYNTAX:
9203 // It is implicitly assumed that the error messages will be
9204 // encodeable as UTF-8. Because of this, we can't include source
9205 // examples that contain invalid byte sequences. So if any source
9206 // examples include invalid UTF-8 byte sequences, we will skip
9207 // showing source examples entirely.
9208 if (valid_utf8 && !pm_parse_process_error_utf8_p(parser, &error->location)) {
9209 valid_utf8 = false;
9211 break;
9212 case PM_ERROR_LEVEL_ARGUMENT: {
9213 // Any errors with the level PM_ERROR_LEVEL_ARGUMENT take over as
9214 // the only argument that gets raised. This is to allow priority
9215 // messages that should be handled before anything else.
9216 int32_t line_number = (int32_t) pm_location_line_number(parser, &error->location);
9218 pm_buffer_append_format(
9219 &buffer,
9220 "%.*s:%" PRIi32 ": %s",
9221 (int) pm_string_length(filepath),
9222 pm_string_source(filepath),
9223 line_number,
9224 error->message
9227 if (pm_parse_process_error_utf8_p(parser, &error->location)) {
9228 pm_buffer_append_byte(&buffer, '\n');
9230 pm_list_node_t *list_node = (pm_list_node_t *) error;
9231 pm_list_t error_list = { .size = 1, .head = list_node, .tail = list_node };
9233 pm_parser_errors_format(parser, &error_list, &buffer, rb_stderr_tty_p(), false);
9236 VALUE value = rb_exc_new(rb_eArgError, pm_buffer_value(&buffer), pm_buffer_length(&buffer));
9237 pm_buffer_free(&buffer);
9239 return value;
9241 case PM_ERROR_LEVEL_LOAD: {
9242 // Load errors are much simpler, because they don't include any of
9243 // the source in them. We create the error directly from the
9244 // message.
9245 VALUE message = rb_enc_str_new_cstr(error->message, rb_locale_encoding());
9246 VALUE value = rb_exc_new3(rb_eLoadError, message);
9247 rb_ivar_set(value, rb_intern_const("@path"), Qnil);
9248 return value;
9253 pm_buffer_append_format(
9254 &buffer,
9255 "%.*s:%" PRIi32 ": syntax error%s found\n",
9256 (int) pm_string_length(filepath),
9257 pm_string_source(filepath),
9258 (int32_t) pm_location_line_number(parser, &head->location),
9259 (parser->error_list.size > 1) ? "s" : ""
9262 if (valid_utf8) {
9263 pm_parser_errors_format(parser, &parser->error_list, &buffer, rb_stderr_tty_p(), true);
9265 else {
9266 for (const pm_diagnostic_t *error = head; error != NULL; error = (const pm_diagnostic_t *) error->node.next) {
9267 if (error != head) pm_buffer_append_byte(&buffer, '\n');
9268 pm_buffer_append_format(&buffer, "%.*s:%" PRIi32 ": %s", (int) pm_string_length(filepath), pm_string_source(filepath), (int32_t) pm_location_line_number(parser, &error->location), error->message);
9272 VALUE error = rb_exc_new(rb_eSyntaxError, pm_buffer_value(&buffer), pm_buffer_length(&buffer));
9274 rb_encoding *filepath_encoding = result->node.filepath_encoding != NULL ? result->node.filepath_encoding : rb_utf8_encoding();
9275 VALUE path = rb_enc_str_new((const char *) pm_string_source(filepath), pm_string_length(filepath), filepath_encoding);
9277 rb_ivar_set(error, rb_intern_const("@path"), path);
9278 pm_buffer_free(&buffer);
9280 return error;
9283 void rb_enc_compile_warning(rb_encoding *enc, const char *file, int line, const char *fmt, ...);
9284 void rb_enc_compile_warn(rb_encoding *enc, const char *file, int line, const char *fmt, ...);
9287 * Parse the parse result and raise a Ruby error if there are any syntax errors.
9288 * It returns an error if one should be raised. It is assumed that the parse
9289 * result object is zeroed out.
9291 static VALUE
9292 pm_parse_process(pm_parse_result_t *result, pm_node_t *node)
9294 pm_parser_t *parser = &result->parser;
9296 // First, set up the scope node so that the AST node is attached and can be
9297 // freed regardless of whether or we return an error.
9298 pm_scope_node_t *scope_node = &result->node;
9299 rb_encoding *filepath_encoding = scope_node->filepath_encoding;
9301 pm_scope_node_init(node, scope_node, NULL);
9302 scope_node->filepath_encoding = filepath_encoding;
9304 scope_node->encoding = rb_enc_find(parser->encoding->name);
9305 if (!scope_node->encoding) rb_bug("Encoding not found %s!", parser->encoding->name);
9307 // Emit all of the various warnings from the parse.
9308 const pm_diagnostic_t *warning;
9309 const char *warning_filepath = (const char *) pm_string_source(&parser->filepath);
9311 for (warning = (const pm_diagnostic_t *) parser->warning_list.head; warning != NULL; warning = (const pm_diagnostic_t *) warning->node.next) {
9312 int line = pm_location_line_number(parser, &warning->location);
9314 if (warning->level == PM_WARNING_LEVEL_VERBOSE) {
9315 rb_enc_compile_warning(scope_node->encoding, warning_filepath, line, "%s", warning->message);
9317 else {
9318 rb_enc_compile_warn(scope_node->encoding, warning_filepath, line, "%s", warning->message);
9322 // If there are errors, raise an appropriate error and free the result.
9323 if (parser->error_list.size > 0) {
9324 VALUE error = pm_parse_process_error(result);
9326 // TODO: We need to set the backtrace.
9327 // rb_funcallv(error, rb_intern("set_backtrace"), 1, &path);
9328 return error;
9331 // Now set up the constant pool and intern all of the various constants into
9332 // their corresponding IDs.
9333 scope_node->parser = parser;
9334 scope_node->constants = calloc(parser->constant_pool.size, sizeof(ID));
9336 for (uint32_t index = 0; index < parser->constant_pool.size; index++) {
9337 pm_constant_t *constant = &parser->constant_pool.constants[index];
9338 scope_node->constants[index] = rb_intern3((const char *) constant->start, constant->length, scope_node->encoding);
9341 scope_node->index_lookup_table = st_init_numtable();
9342 pm_constant_id_list_t *locals = &scope_node->locals;
9343 for (size_t index = 0; index < locals->size; index++) {
9344 st_insert(scope_node->index_lookup_table, locals->ids[index], index);
9347 // If we got here, this is a success and we can return Qnil to indicate that
9348 // no error should be raised.
9349 result->parsed = true;
9350 return Qnil;
9354 * Set the frozen_string_literal option based on the default value used by the
9355 * CRuby compiler.
9357 static void
9358 pm_options_frozen_string_literal_init(pm_options_t *options)
9360 int frozen_string_literal = rb_iseq_opt_frozen_string_literal();
9362 switch (frozen_string_literal) {
9363 case ISEQ_FROZEN_STRING_LITERAL_UNSET:
9364 break;
9365 case ISEQ_FROZEN_STRING_LITERAL_DISABLED:
9366 pm_options_frozen_string_literal_set(options, false);
9367 break;
9368 case ISEQ_FROZEN_STRING_LITERAL_ENABLED:
9369 pm_options_frozen_string_literal_set(options, true);
9370 break;
9371 default:
9372 rb_bug("pm_options_frozen_string_literal_init: invalid frozen_string_literal=%d", frozen_string_literal);
9373 break;
9378 * Returns an array of ruby String objects that represent the lines of the
9379 * source file that the given parser parsed.
9381 static inline VALUE
9382 pm_parse_file_script_lines(const pm_scope_node_t *scope_node, const pm_parser_t *parser)
9384 const pm_newline_list_t *newline_list = &parser->newline_list;
9385 const char *start = (const char *) parser->start;
9386 const char *end = (const char *) parser->end;
9388 // If we end exactly on a newline, then there's no need to push on a final
9389 // segment. If we don't, then we need to push on the last offset up to the
9390 // end of the string.
9391 size_t last_offset = newline_list->offsets[newline_list->size - 1];
9392 bool last_push = start + last_offset != end;
9394 // Create the ruby strings that represent the lines of the source.
9395 VALUE lines = rb_ary_new_capa(newline_list->size - (last_push ? 0 : 1));
9397 for (size_t index = 0; index < newline_list->size - 1; index++) {
9398 size_t offset = newline_list->offsets[index];
9399 size_t length = newline_list->offsets[index + 1] - offset;
9401 rb_ary_push(lines, rb_enc_str_new(start + offset, length, scope_node->encoding));
9404 // Push on the last line if we need to.
9405 if (last_push) {
9406 rb_ary_push(lines, rb_enc_str_new(start + last_offset, end - (start + last_offset), scope_node->encoding));
9409 return lines;
9413 * Attempt to load the file into memory. Return a Ruby error if the file cannot
9414 * be read.
9416 VALUE
9417 pm_load_file(pm_parse_result_t *result, VALUE filepath, bool load_error)
9419 if (!pm_string_mapped_init(&result->input, RSTRING_PTR(filepath))) {
9420 #ifdef _WIN32
9421 int e = rb_w32_map_errno(GetLastError());
9422 #else
9423 int e = errno;
9424 #endif
9426 VALUE error;
9428 if (load_error) {
9429 VALUE message = rb_str_buf_new_cstr(strerror(e));
9430 rb_str_cat2(message, " -- ");
9431 rb_str_append(message, filepath);
9433 error = rb_exc_new3(rb_eLoadError, message);
9434 rb_ivar_set(error, rb_intern_const("@path"), filepath);
9435 } else {
9436 error = rb_syserr_new(e, RSTRING_PTR(filepath));
9437 RB_GC_GUARD(filepath);
9440 return error;
9443 pm_options_frozen_string_literal_init(&result->options);
9444 return Qnil;
9448 * Parse the given filepath and store the resulting scope node in the given
9449 * parse result struct. It returns a Ruby error if the file cannot be read or
9450 * if it cannot be parsed properly. It is assumed that the parse result object
9451 * is zeroed out.
9453 VALUE
9454 pm_parse_file(pm_parse_result_t *result, VALUE filepath)
9456 pm_options_filepath_set(&result->options, RSTRING_PTR(filepath));
9457 RB_GC_GUARD(filepath);
9459 pm_parser_init(&result->parser, pm_string_source(&result->input), pm_string_length(&result->input), &result->options);
9460 pm_node_t *node = pm_parse(&result->parser);
9462 VALUE error = pm_parse_process(result, node);
9464 // If we're parsing a filepath, then we need to potentially support the
9465 // SCRIPT_LINES__ constant, which can be a hash that has an array of lines
9466 // of every read file.
9467 ID id_script_lines = rb_intern("SCRIPT_LINES__");
9469 if (rb_const_defined_at(rb_cObject, id_script_lines)) {
9470 VALUE script_lines = rb_const_get_at(rb_cObject, id_script_lines);
9472 if (RB_TYPE_P(script_lines, T_HASH)) {
9473 rb_hash_aset(script_lines, filepath, pm_parse_file_script_lines(&result->node, &result->parser));
9477 return error;
9481 * Load and then parse the given filepath. It returns a Ruby error if the file
9482 * cannot be read or if it cannot be parsed properly.
9484 VALUE
9485 pm_load_parse_file(pm_parse_result_t *result, VALUE filepath)
9487 VALUE error = pm_load_file(result, filepath, false);
9488 if (NIL_P(error)) {
9489 error = pm_parse_file(result, filepath);
9492 return error;
9496 * Parse the given source that corresponds to the given filepath and store the
9497 * resulting scope node in the given parse result struct. It is assumed that the
9498 * parse result object is zeroed out. If the string fails to parse, then a Ruby
9499 * error is returned.
9501 VALUE
9502 pm_parse_string(pm_parse_result_t *result, VALUE source, VALUE filepath)
9504 rb_encoding *encoding = rb_enc_get(source);
9505 if (!rb_enc_asciicompat(encoding)) {
9506 return rb_exc_new_cstr(rb_eArgError, "invalid source encoding");
9509 pm_options_frozen_string_literal_init(&result->options);
9510 pm_string_constant_init(&result->input, RSTRING_PTR(source), RSTRING_LEN(source));
9511 pm_options_encoding_set(&result->options, rb_enc_name(encoding));
9513 result->node.filepath_encoding = rb_enc_get(filepath);
9514 pm_options_filepath_set(&result->options, RSTRING_PTR(filepath));
9515 RB_GC_GUARD(filepath);
9517 pm_parser_init(&result->parser, pm_string_source(&result->input), pm_string_length(&result->input), &result->options);
9518 pm_node_t *node = pm_parse(&result->parser);
9520 return pm_parse_process(result, node);
9524 * An implementation of fgets that is suitable for use with Ruby IO objects.
9526 static char *
9527 pm_parse_stdin_fgets(char *string, int size, void *stream)
9529 RUBY_ASSERT(size > 0);
9531 VALUE line = rb_funcall((VALUE) stream, rb_intern("gets"), 1, INT2FIX(size - 1));
9532 if (NIL_P(line)) {
9533 return NULL;
9536 const char *cstr = StringValueCStr(line);
9537 size_t length = strlen(cstr);
9539 memcpy(string, cstr, length);
9540 string[length] = '\0';
9542 return string;
9546 * Parse the source off STDIN and store the resulting scope node in the given
9547 * parse result struct. It is assumed that the parse result object is zeroed
9548 * out. If the stream fails to parse, then a Ruby error is returned.
9550 VALUE
9551 pm_parse_stdin(pm_parse_result_t *result)
9553 pm_options_frozen_string_literal_init(&result->options);
9555 pm_buffer_t buffer;
9556 pm_node_t *node = pm_parse_stream(&result->parser, &buffer, (void *) rb_stdin, pm_parse_stdin_fgets, &result->options);
9558 // Copy the allocated buffer contents into the input string so that it gets
9559 // freed. At this point we've handed over ownership, so we don't need to
9560 // free the buffer itself.
9561 pm_string_owned_init(&result->input, (uint8_t *) pm_buffer_value(&buffer), pm_buffer_length(&buffer));
9563 return pm_parse_process(result, node);
9566 #undef NEW_ISEQ
9567 #define NEW_ISEQ OLD_ISEQ
9569 #undef NEW_CHILD_ISEQ
9570 #define NEW_CHILD_ISEQ OLD_CHILD_ISEQ