1 /**********************************************************************
3 compile.c - ruby node tree -> VM instruction sequence
6 created at: 04/01/01 03:42:15 JST
8 Copyright (C) 2004-2007 Koichi Sasada
10 **********************************************************************/
12 #include "ruby/internal/config.h"
23 #include "internal/array.h"
24 #include "internal/compile.h"
25 #include "internal/complex.h"
26 #include "internal/encoding.h"
27 #include "internal/error.h"
28 #include "internal/hash.h"
29 #include "internal/numeric.h"
30 #include "internal/object.h"
31 #include "internal/rational.h"
32 #include "internal/re.h"
33 #include "internal/symbol.h"
34 #include "internal/thread.h"
35 #include "internal/variable.h"
38 #include "ruby/util.h"
40 #include "vm_callinfo.h"
45 #include "insns_info.inc"
47 #undef RUBY_UNTYPED_DATA_WARNING
48 #define RUBY_UNTYPED_DATA_WARNING 0
50 #define FIXNUM_INC(n, i) ((n)+(INT2FIX(i)&~FIXNUM_FLAG))
51 #define FIXNUM_OR(n, i) ((n)|INT2FIX(i))
53 typedef struct iseq_link_element
{
61 struct iseq_link_element
*next
;
62 struct iseq_link_element
*prev
;
65 typedef struct iseq_link_anchor
{
77 typedef struct iseq_label_data
{
85 unsigned int rescued
: 2;
86 unsigned int unremovable
: 1;
89 typedef struct iseq_insn_data
{
91 enum ruby_vminsn_type insn_id
;
98 rb_event_flag_t events
;
102 typedef struct iseq_adjust_data
{
108 typedef struct iseq_trace_data
{
110 rb_event_flag_t event
;
114 struct ensure_range
{
117 struct ensure_range
*next
;
120 struct iseq_compile_data_ensure_node_stack
{
121 const NODE
*ensure_node
;
122 struct iseq_compile_data_ensure_node_stack
*prev
;
123 struct ensure_range
*erange
;
126 const ID rb_iseq_shared_exc_local_tbl
[] = {idERROR_INFO
};
129 * debug function(macro) interface depend on CPDEBUG
130 * if it is less than 0, runtime option is in effect.
135 * 2: show node important parameters
137 * 5: show other parameters
138 * 10: show every AST array
146 #define compile_debug CPDEBUG
148 #define compile_debug ISEQ_COMPILE_DATA(iseq)->option->debug_level
153 #define compile_debug_print_indent(level) \
154 ruby_debug_print_indent((level), compile_debug, gl_node_level * 2)
156 #define debugp(header, value) (void) \
157 (compile_debug_print_indent(1) && \
158 ruby_debug_print_value(1, compile_debug, (header), (value)))
160 #define debugi(header, id) (void) \
161 (compile_debug_print_indent(1) && \
162 ruby_debug_print_id(1, compile_debug, (header), (id)))
164 #define debugp_param(header, value) (void) \
165 (compile_debug_print_indent(1) && \
166 ruby_debug_print_value(1, compile_debug, (header), (value)))
168 #define debugp_verbose(header, value) (void) \
169 (compile_debug_print_indent(2) && \
170 ruby_debug_print_value(2, compile_debug, (header), (value)))
172 #define debugp_verbose_node(header, value) (void) \
173 (compile_debug_print_indent(10) && \
174 ruby_debug_print_value(10, compile_debug, (header), (value)))
176 #define debug_node_start(node) ((void) \
177 (compile_debug_print_indent(1) && \
178 (ruby_debug_print_node(1, CPDEBUG, "", (const NODE *)(node)), gl_node_level)), \
181 #define debug_node_end() gl_node_level --
185 #define debugi(header, id) ((void)0)
186 #define debugp(header, value) ((void)0)
187 #define debugp_verbose(header, value) ((void)0)
188 #define debugp_verbose_node(header, value) ((void)0)
189 #define debugp_param(header, value) ((void)0)
190 #define debug_node_start(node) ((void)0)
191 #define debug_node_end() ((void)0)
194 #if CPDEBUG > 1 || CPDEBUG < 0
196 #define printf ruby_debug_printf
197 #define debugs if (compile_debug_print_indent(1)) ruby_debug_printf
198 #define debug_compile(msg, v) ((void)(compile_debug_print_indent(1) && fputs((msg), stderr)), (v))
200 #define debugs if(0)printf
201 #define debug_compile(msg, v) (v)
204 #define LVAR_ERRINFO (1)
206 /* create new label */
207 #define NEW_LABEL(l) new_label_body(iseq, (l))
208 #define LABEL_FORMAT "<L%03d>"
210 #define NEW_ISEQ(node, name, type, line_no) \
211 new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
213 #define NEW_CHILD_ISEQ(node, name, type, line_no) \
214 new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
216 /* add instructions */
217 #define ADD_SEQ(seq1, seq2) \
218 APPEND_LIST((seq1), (seq2))
220 /* add an instruction */
221 #define ADD_INSN(seq, line_node, insn) \
222 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (line_node), BIN(insn), 0))
224 /* insert an instruction before next */
225 #define INSERT_BEFORE_INSN(next, line_node, insn) \
226 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) new_insn_body(iseq, (line_node), BIN(insn), 0))
228 /* insert an instruction after prev */
229 #define INSERT_AFTER_INSN(prev, line_node, insn) \
230 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) new_insn_body(iseq, (line_node), BIN(insn), 0))
232 /* add an instruction with some operands (1, 2, 3, 5) */
233 #define ADD_INSN1(seq, line_node, insn, op1) \
234 ADD_ELEM((seq), (LINK_ELEMENT *) \
235 new_insn_body(iseq, (line_node), BIN(insn), 1, (VALUE)(op1)))
237 /* insert an instruction with some operands (1, 2, 3, 5) before next */
238 #define INSERT_BEFORE_INSN1(next, line_node, insn, op1) \
239 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) \
240 new_insn_body(iseq, (line_node), BIN(insn), 1, (VALUE)(op1)))
242 /* insert an instruction with some operands (1, 2, 3, 5) after prev */
243 #define INSERT_AFTER_INSN1(prev, line_node, insn, op1) \
244 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) \
245 new_insn_body(iseq, (line_node), BIN(insn), 1, (VALUE)(op1)))
247 #define LABEL_REF(label) ((label)->refcnt++)
249 /* add an instruction with label operand (alias of ADD_INSN1) */
250 #define ADD_INSNL(seq, line_node, insn, label) (ADD_INSN1(seq, line_node, insn, label), LABEL_REF(label))
252 #define ADD_INSN2(seq, line_node, insn, op1, op2) \
253 ADD_ELEM((seq), (LINK_ELEMENT *) \
254 new_insn_body(iseq, (line_node), BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
256 #define ADD_INSN3(seq, line_node, insn, op1, op2, op3) \
257 ADD_ELEM((seq), (LINK_ELEMENT *) \
258 new_insn_body(iseq, (line_node), BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
260 /* Specific Insn factory */
261 #define ADD_SEND(seq, line_node, id, argc) \
262 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
264 #define ADD_SEND_WITH_FLAG(seq, line_node, id, argc, flag) \
265 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)(flag), NULL)
267 #define ADD_SEND_WITH_BLOCK(seq, line_node, id, argc, block) \
268 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
270 #define ADD_CALL_RECEIVER(seq, line_node) \
271 ADD_INSN((seq), (line_node), putself)
273 #define ADD_CALL(seq, line_node, id, argc) \
274 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
276 #define ADD_CALL_WITH_BLOCK(seq, line_node, id, argc, block) \
277 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
279 #define ADD_SEND_R(seq, line_node, id, argc, block, flag, keywords) \
280 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, (line_node), (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
282 #define ADD_TRACE(seq, event) \
283 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), 0))
284 #define ADD_TRACE_WITH_DATA(seq, event, data) \
285 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), (data)))
287 static void iseq_add_getlocal(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, const NODE
*const line_node
, int idx
, int level
);
288 static void iseq_add_setlocal(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, const NODE
*const line_node
, int idx
, int level
);
290 #define ADD_GETLOCAL(seq, line_node, idx, level) iseq_add_getlocal(iseq, (seq), (line_node), (idx), (level))
291 #define ADD_SETLOCAL(seq, line_node, idx, level) iseq_add_setlocal(iseq, (seq), (line_node), (idx), (level))
294 #define ADD_LABEL(seq, label) \
295 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
297 #define APPEND_LABEL(seq, before, label) \
298 APPEND_ELEM((seq), (before), (LINK_ELEMENT *) (label))
300 #define ADD_ADJUST(seq, line_node, label) \
301 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), nd_line(line_node)))
303 #define ADD_ADJUST_RESTORE(seq, label) \
304 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
306 #define LABEL_UNREMOVABLE(label) \
307 ((label) ? (LABEL_REF(label), (label)->unremovable=1) : 0)
308 #define ADD_CATCH_ENTRY(type, ls, le, iseqv, lc) do { \
309 VALUE _e = rb_ary_new3(5, (type), \
310 (VALUE)(ls) | 1, (VALUE)(le) | 1, \
311 (VALUE)(iseqv), (VALUE)(lc) | 1); \
312 LABEL_UNREMOVABLE(ls); \
315 if (NIL_P(ISEQ_COMPILE_DATA(iseq)->catch_table_ary)) \
316 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, rb_ary_tmp_new(3)); \
317 rb_ary_push(ISEQ_COMPILE_DATA(iseq)->catch_table_ary, freeze_hide_obj(_e)); \
321 #define COMPILE(anchor, desc, node) \
322 (debug_compile("== " desc "\n", \
323 iseq_compile_each(iseq, (anchor), (node), 0)))
325 /* compile node, this node's value will be popped */
326 #define COMPILE_POPPED(anchor, desc, node) \
327 (debug_compile("== " desc "\n", \
328 iseq_compile_each(iseq, (anchor), (node), 1)))
330 /* compile node, which is popped when 'popped' is true */
331 #define COMPILE_(anchor, desc, node, popped) \
332 (debug_compile("== " desc "\n", \
333 iseq_compile_each(iseq, (anchor), (node), (popped))))
335 #define COMPILE_RECV(anchor, desc, node) \
336 (private_recv_p(node) ? \
337 (ADD_INSN(anchor, node, putself), VM_CALL_FCALL) : \
338 COMPILE(anchor, desc, node->nd_recv) ? 0 : -1)
340 #define OPERAND_AT(insn, idx) \
341 (((INSN*)(insn))->operands[(idx)])
343 #define INSN_OF(insn) \
344 (((INSN*)(insn))->insn_id)
346 #define IS_INSN(link) ((link)->type == ISEQ_ELEMENT_INSN)
347 #define IS_LABEL(link) ((link)->type == ISEQ_ELEMENT_LABEL)
348 #define IS_ADJUST(link) ((link)->type == ISEQ_ELEMENT_ADJUST)
349 #define IS_TRACE(link) ((link)->type == ISEQ_ELEMENT_TRACE)
350 #define IS_INSN_ID(iobj, insn) (INSN_OF(iobj) == BIN(insn))
351 #define IS_NEXT_INSN_ID(link, insn) \
352 ((link)->next && IS_INSN((link)->next) && IS_INSN_ID((link)->next, insn))
356 RBIMPL_ATTR_NORETURN()
358 RBIMPL_ATTR_FORMAT(RBIMPL_PRINTF_FORMAT
, 3, 4)
360 append_compile_error(const rb_iseq_t
*iseq
, int line
, const char *fmt
, ...)
362 VALUE err_info
= ISEQ_COMPILE_DATA(iseq
)->err_info
;
363 VALUE file
= rb_iseq_path(iseq
);
364 VALUE err
= err_info
== Qtrue
? Qfalse
: err_info
;
368 err
= rb_syntax_error_append(err
, file
, line
, -1, NULL
, fmt
, args
);
370 if (NIL_P(err_info
)) {
371 RB_OBJ_WRITE(iseq
, &ISEQ_COMPILE_DATA(iseq
)->err_info
, err
);
374 else if (!err_info
) {
375 RB_OBJ_WRITE(iseq
, &ISEQ_COMPILE_DATA(iseq
)->err_info
, Qtrue
);
378 if (SPECIAL_CONST_P(err
)) err
= rb_eSyntaxError
;
385 compile_bug(rb_iseq_t
*iseq
, int line
, const char *fmt
, ...)
389 rb_report_bug_valist(rb_iseq_path(iseq
), line
, fmt
, args
);
395 #define COMPILE_ERROR append_compile_error
397 #define ERROR_ARGS_AT(n) iseq, nd_line(n),
398 #define ERROR_ARGS ERROR_ARGS_AT(node)
400 #define EXPECT_NODE(prefix, node, ndtype, errval) \
402 const NODE *error_node = (node); \
403 enum node_type error_type = nd_type(error_node); \
404 if (error_type != (ndtype)) { \
405 COMPILE_ERROR(ERROR_ARGS_AT(error_node) \
406 prefix ": " #ndtype " is expected, but %s", \
407 ruby_node_name(error_type)); \
412 #define EXPECT_NODE_NONULL(prefix, parent, ndtype, errval) \
414 COMPILE_ERROR(ERROR_ARGS_AT(parent) \
415 prefix ": must be " #ndtype ", but 0"); \
419 #define UNKNOWN_NODE(prefix, node, errval) \
421 const NODE *error_node = (node); \
422 COMPILE_ERROR(ERROR_ARGS_AT(error_node) prefix ": unknown node (%s)", \
423 ruby_node_name(nd_type(error_node))); \
430 #define CHECK(sub) if (!(sub)) {BEFORE_RETURN;return COMPILE_NG;}
431 #define NO_CHECK(sub) (void)(sub)
432 #define BEFORE_RETURN
434 /* leave name uninitialized so that compiler warn if INIT_ANCHOR is
436 #define DECL_ANCHOR(name) \
437 LINK_ANCHOR name[1] = {{{ISEQ_ELEMENT_ANCHOR,},}}
438 #define INIT_ANCHOR(name) \
439 (name->last = &name->anchor)
442 freeze_hide_obj(VALUE obj
)
445 RBASIC_CLEAR_CLASS(obj
);
449 #include "optinsn.inc"
450 #if OPT_INSTRUCTIONS_UNIFICATION
451 #include "optunifs.inc"
456 #define ISEQ_ARG iseq,
457 #define ISEQ_ARG_DECLARE rb_iseq_t *iseq,
460 #define ISEQ_ARG_DECLARE
464 #define gl_node_level ISEQ_COMPILE_DATA(iseq)->node_level
467 static void dump_disasm_list_with_cursor(const LINK_ELEMENT
*link
, const LINK_ELEMENT
*curr
, const LABEL
*dest
);
468 static void dump_disasm_list(const LINK_ELEMENT
*elem
);
470 static int insn_data_length(INSN
*iobj
);
471 static int calc_sp_depth(int depth
, INSN
*iobj
);
473 static INSN
*new_insn_body(rb_iseq_t
*iseq
, const NODE
*const line_node
, enum ruby_vminsn_type insn_id
, int argc
, ...);
474 static LABEL
*new_label_body(rb_iseq_t
*iseq
, long line
);
475 static ADJUST
*new_adjust_body(rb_iseq_t
*iseq
, LABEL
*label
, int line
);
476 static TRACE
*new_trace_body(rb_iseq_t
*iseq
, rb_event_flag_t event
, long data
);
479 static int iseq_compile_each(rb_iseq_t
*iseq
, LINK_ANCHOR
*anchor
, const NODE
*n
, int);
480 static int iseq_setup(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
481 static int iseq_setup_insn(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
482 static int iseq_optimize(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
483 static int iseq_insns_unification(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
485 static int iseq_set_local_table(rb_iseq_t
*iseq
, const rb_ast_id_table_t
*tbl
);
486 static int iseq_set_exception_local_table(rb_iseq_t
*iseq
);
487 static int iseq_set_arguments(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
, const NODE
*const node
);
489 static int iseq_set_sequence_stackcaching(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
490 static int iseq_set_sequence(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
);
491 static int iseq_set_exception_table(rb_iseq_t
*iseq
);
492 static int iseq_set_optargs_table(rb_iseq_t
*iseq
);
494 static int compile_defined_expr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, VALUE needstr
);
495 static int compile_hash(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, int method_call_keywords
, int popped
);
498 * To make Array to LinkedList, use link_anchor
502 verify_list(ISEQ_ARG_DECLARE
const char *info
, LINK_ANCHOR
*const anchor
)
506 LINK_ELEMENT
*list
, *plist
;
508 if (!compile_debug
) return;
510 list
= anchor
->anchor
.next
;
511 plist
= &anchor
->anchor
;
513 if (plist
!= list
->prev
) {
520 if (anchor
->last
!= plist
&& anchor
->last
!= 0) {
525 rb_bug("list verify error: %08x (%s)", flag
, info
);
530 #define verify_list(info, anchor) verify_list(iseq, (info), (anchor))
534 verify_call_cache(rb_iseq_t
*iseq
)
537 // fprintf(stderr, "ci_size:%d\t", iseq->body->ci_size); rp(iseq);
539 VALUE
*original
= rb_iseq_original_iseq(iseq
);
541 while (i
< iseq
->body
->iseq_size
) {
542 VALUE insn
= original
[i
];
543 const char *types
= insn_op_types(insn
);
545 for (int j
=0; types
[j
]; j
++) {
546 if (types
[j
] == TS_CALLDATA
) {
547 struct rb_call_data
*cd
= (struct rb_call_data
*)original
[i
+j
+1];
548 const struct rb_callinfo
*ci
= cd
->ci
;
549 const struct rb_callcache
*cc
= cd
->cc
;
550 if (cc
!= vm_cc_empty()) {
552 rb_bug("call cache is not initialized by vm_cc_empty()");
559 for (unsigned int i
=0; i
<iseq
->body
->ci_size
; i
++) {
560 struct rb_call_data
*cd
= &iseq
->body
->call_data
[i
];
561 const struct rb_callinfo
*ci
= cd
->ci
;
562 const struct rb_callcache
*cc
= cd
->cc
;
563 if (cc
!= NULL
&& cc
!= vm_cc_empty()) {
565 rb_bug("call cache is not initialized by vm_cc_empty()");
572 * elem1, elem2 => elem1, elem2, elem
575 ADD_ELEM(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anchor
, LINK_ELEMENT
*elem
)
577 elem
->prev
= anchor
->last
;
578 anchor
->last
->next
= elem
;
580 verify_list("add", anchor
);
584 * elem1, before, elem2 => elem1, before, elem, elem2
587 APPEND_ELEM(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anchor
, LINK_ELEMENT
*before
, LINK_ELEMENT
*elem
)
590 elem
->next
= before
->next
;
591 elem
->next
->prev
= elem
;
593 if (before
== anchor
->last
) anchor
->last
= elem
;
594 verify_list("add", anchor
);
597 #define ADD_ELEM(anchor, elem) ADD_ELEM(iseq, (anchor), (elem))
598 #define APPEND_ELEM(anchor, before, elem) APPEND_ELEM(iseq, (anchor), (before), (elem))
602 branch_coverage_valid_p(rb_iseq_t
*iseq
, int first_line
)
604 if (!ISEQ_COVERAGE(iseq
)) return 0;
605 if (!ISEQ_BRANCH_COVERAGE(iseq
)) return 0;
606 if (first_line
<= 0) return 0;
611 decl_branch_base(rb_iseq_t
*iseq
, const NODE
*node
, const char *type
)
613 const int first_lineno
= nd_first_lineno(node
), first_column
= nd_first_column(node
);
614 const int last_lineno
= nd_last_lineno(node
), last_column
= nd_last_column(node
);
616 if (!branch_coverage_valid_p(iseq
, first_lineno
)) return Qundef
;
619 * if !structure[node]
620 * structure[node] = [type, first_lineno, first_column, last_lineno, last_column, branches = {}]
622 * branches = structure[node][5]
626 VALUE structure
= RARRAY_AREF(ISEQ_BRANCH_COVERAGE(iseq
), 0);
627 VALUE key
= (VALUE
)node
| 1; // FIXNUM for hash key
628 VALUE branch_base
= rb_hash_aref(structure
, key
);
631 if (NIL_P(branch_base
)) {
632 branch_base
= rb_ary_tmp_new(6);
633 rb_hash_aset(structure
, key
, branch_base
);
634 rb_ary_push(branch_base
, ID2SYM(rb_intern(type
)));
635 rb_ary_push(branch_base
, INT2FIX(first_lineno
));
636 rb_ary_push(branch_base
, INT2FIX(first_column
));
637 rb_ary_push(branch_base
, INT2FIX(last_lineno
));
638 rb_ary_push(branch_base
, INT2FIX(last_column
));
639 branches
= rb_hash_new();
640 rb_obj_hide(branches
);
641 rb_ary_push(branch_base
, branches
);
644 branches
= RARRAY_AREF(branch_base
, 5);
651 generate_dummy_line_node(int lineno
, int node_id
)
654 nd_set_line(&dummy
, lineno
);
655 nd_set_node_id(&dummy
, node_id
);
660 add_trace_branch_coverage(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, const NODE
*node
, int branch_id
, const char *type
, VALUE branches
)
662 const int first_lineno
= nd_first_lineno(node
), first_column
= nd_first_column(node
);
663 const int last_lineno
= nd_last_lineno(node
), last_column
= nd_last_column(node
);
665 if (!branch_coverage_valid_p(iseq
, first_lineno
)) return;
668 * if !branches[branch_id]
669 * branches[branch_id] = [type, first_lineno, first_column, last_lineno, last_column, counter_idx]
671 * counter_idx= branches[branch_id][5]
675 VALUE key
= INT2FIX(branch_id
);
676 VALUE branch
= rb_hash_aref(branches
, key
);
680 branch
= rb_ary_tmp_new(6);
681 rb_hash_aset(branches
, key
, branch
);
682 rb_ary_push(branch
, ID2SYM(rb_intern(type
)));
683 rb_ary_push(branch
, INT2FIX(first_lineno
));
684 rb_ary_push(branch
, INT2FIX(first_column
));
685 rb_ary_push(branch
, INT2FIX(last_lineno
));
686 rb_ary_push(branch
, INT2FIX(last_column
));
687 VALUE counters
= RARRAY_AREF(ISEQ_BRANCH_COVERAGE(iseq
), 1);
688 counter_idx
= RARRAY_LEN(counters
);
689 rb_ary_push(branch
, LONG2FIX(counter_idx
));
690 rb_ary_push(counters
, INT2FIX(0));
693 counter_idx
= FIX2LONG(RARRAY_AREF(branch
, 5));
696 ADD_TRACE_WITH_DATA(seq
, RUBY_EVENT_COVERAGE_BRANCH
, counter_idx
);
698 NODE dummy_line_node
= generate_dummy_line_node(last_lineno
, nd_node_id(node
));
699 ADD_INSN(seq
, &dummy_line_node
, nop
);
702 #define ISEQ_LAST_LINE(iseq) (ISEQ_COMPILE_DATA(iseq)->last_line)
705 validate_label(st_data_t name
, st_data_t label
, st_data_t arg
)
707 rb_iseq_t
*iseq
= (rb_iseq_t
*)arg
;
708 LABEL
*lobj
= (LABEL
*)label
;
709 if (!lobj
->link
.next
) {
711 COMPILE_ERROR(iseq
, lobj
->position
,
712 "%"PRIsVALUE
": undefined label",
713 rb_sym2str((VALUE
)name
));
720 validate_labels(rb_iseq_t
*iseq
, st_table
*labels_table
)
722 st_foreach(labels_table
, validate_label
, (st_data_t
)iseq
);
723 st_free_table(labels_table
);
727 rb_iseq_compile_callback(rb_iseq_t
*iseq
, const struct rb_iseq_new_with_callback_callback_func
* ifunc
)
732 (*ifunc
->func
)(iseq
, ret
, ifunc
->data
);
734 NODE dummy_line_node
= generate_dummy_line_node(ISEQ_COMPILE_DATA(iseq
)->last_line
, -1);
735 ADD_INSN(ret
, &dummy_line_node
, leave
);
737 CHECK(iseq_setup_insn(iseq
, ret
));
738 return iseq_setup(iseq
, ret
);
742 rb_iseq_compile_node(rb_iseq_t
*iseq
, const NODE
*node
)
747 if (IMEMO_TYPE_P(node
, imemo_ifunc
)) {
748 rb_raise(rb_eArgError
, "unexpected imemo_ifunc");
752 NO_CHECK(COMPILE(ret
, "nil", node
));
753 iseq_set_local_table(iseq
, 0);
755 /* assume node is T_NODE */
756 else if (nd_type_p(node
, NODE_SCOPE
)) {
757 /* iseq type of top, method, class, block */
758 iseq_set_local_table(iseq
, node
->nd_tbl
);
759 iseq_set_arguments(iseq
, ret
, node
->nd_args
);
761 switch (iseq
->body
->type
) {
762 case ISEQ_TYPE_BLOCK
:
764 LABEL
*start
= ISEQ_COMPILE_DATA(iseq
)->start_label
= NEW_LABEL(0);
765 LABEL
*end
= ISEQ_COMPILE_DATA(iseq
)->end_label
= NEW_LABEL(0);
767 start
->rescued
= LABEL_RESCUE_BEG
;
768 end
->rescued
= LABEL_RESCUE_END
;
770 ADD_TRACE(ret
, RUBY_EVENT_B_CALL
);
771 NODE dummy_line_node
= generate_dummy_line_node(FIX2INT(iseq
->body
->location
.first_lineno
), -1);
772 ADD_INSN (ret
, &dummy_line_node
, nop
);
773 ADD_LABEL(ret
, start
);
774 CHECK(COMPILE(ret
, "block body", node
->nd_body
));
776 ADD_TRACE(ret
, RUBY_EVENT_B_RETURN
);
777 ISEQ_COMPILE_DATA(iseq
)->last_line
= iseq
->body
->location
.code_location
.end_pos
.lineno
;
779 /* wide range catch handler must put at last */
780 ADD_CATCH_ENTRY(CATCH_TYPE_REDO
, start
, end
, NULL
, start
);
781 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT
, start
, end
, NULL
, end
);
784 case ISEQ_TYPE_CLASS
:
786 ADD_TRACE(ret
, RUBY_EVENT_CLASS
);
787 CHECK(COMPILE(ret
, "scoped node", node
->nd_body
));
788 ADD_TRACE(ret
, RUBY_EVENT_END
);
789 ISEQ_COMPILE_DATA(iseq
)->last_line
= nd_line(node
);
792 case ISEQ_TYPE_METHOD
:
794 ISEQ_COMPILE_DATA(iseq
)->root_node
= node
->nd_body
;
795 ADD_TRACE(ret
, RUBY_EVENT_CALL
);
796 CHECK(COMPILE(ret
, "scoped node", node
->nd_body
));
797 ISEQ_COMPILE_DATA(iseq
)->root_node
= node
->nd_body
;
798 ADD_TRACE(ret
, RUBY_EVENT_RETURN
);
799 ISEQ_COMPILE_DATA(iseq
)->last_line
= nd_line(node
);
803 CHECK(COMPILE(ret
, "scoped node", node
->nd_body
));
810 #define INVALID_ISEQ_TYPE(type) \
811 ISEQ_TYPE_##type: m = #type; goto invalid_iseq_type
812 switch (iseq
->body
->type
) {
813 case INVALID_ISEQ_TYPE(METHOD
);
814 case INVALID_ISEQ_TYPE(CLASS
);
815 case INVALID_ISEQ_TYPE(BLOCK
);
816 case INVALID_ISEQ_TYPE(EVAL
);
817 case INVALID_ISEQ_TYPE(MAIN
);
818 case INVALID_ISEQ_TYPE(TOP
);
819 #undef INVALID_ISEQ_TYPE /* invalid iseq types end */
820 case ISEQ_TYPE_RESCUE
:
821 iseq_set_exception_local_table(iseq
);
822 CHECK(COMPILE(ret
, "rescue", node
));
824 case ISEQ_TYPE_ENSURE
:
825 iseq_set_exception_local_table(iseq
);
826 CHECK(COMPILE_POPPED(ret
, "ensure", node
));
828 case ISEQ_TYPE_PLAIN
:
829 CHECK(COMPILE(ret
, "ensure", node
));
832 COMPILE_ERROR(ERROR_ARGS
"unknown scope: %d", iseq
->body
->type
);
835 COMPILE_ERROR(ERROR_ARGS
"compile/ISEQ_TYPE_%s should not be reached", m
);
840 if (iseq
->body
->type
== ISEQ_TYPE_RESCUE
|| iseq
->body
->type
== ISEQ_TYPE_ENSURE
) {
841 NODE dummy_line_node
= generate_dummy_line_node(0, -1);
842 ADD_GETLOCAL(ret
, &dummy_line_node
, LVAR_ERRINFO
, 0);
843 ADD_INSN1(ret
, &dummy_line_node
, throw, INT2FIX(0) /* continue throw */ );
846 NODE dummy_line_node
= generate_dummy_line_node(ISEQ_COMPILE_DATA(iseq
)->last_line
, -1);
847 ADD_INSN(ret
, &dummy_line_node
, leave
);
851 if (ISEQ_COMPILE_DATA(iseq
)->labels_table
) {
852 st_table
*labels_table
= ISEQ_COMPILE_DATA(iseq
)->labels_table
;
853 ISEQ_COMPILE_DATA(iseq
)->labels_table
= 0;
854 validate_labels(iseq
, labels_table
);
857 CHECK(iseq_setup_insn(iseq
, ret
));
858 return iseq_setup(iseq
, ret
);
862 rb_iseq_translate_threaded_code(rb_iseq_t
*iseq
)
864 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
865 const void * const *table
= rb_vm_get_insns_address_table();
867 VALUE
*encoded
= (VALUE
*)iseq
->body
->iseq_encoded
;
869 for (i
= 0; i
< iseq
->body
->iseq_size
; /* */ ) {
870 int insn
= (int)iseq
->body
->iseq_encoded
[i
];
871 int len
= insn_len(insn
);
872 encoded
[i
] = (VALUE
)table
[insn
];
875 FL_SET((VALUE
)iseq
, ISEQ_TRANSLATED
);
881 rb_iseq_original_iseq(const rb_iseq_t
*iseq
) /* cold path */
883 VALUE
*original_code
;
885 if (ISEQ_ORIGINAL_ISEQ(iseq
)) return ISEQ_ORIGINAL_ISEQ(iseq
);
886 original_code
= ISEQ_ORIGINAL_ISEQ_ALLOC(iseq
, iseq
->body
->iseq_size
);
887 MEMCPY(original_code
, iseq
->body
->iseq_encoded
, VALUE
, iseq
->body
->iseq_size
);
889 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
893 for (i
= 0; i
< iseq
->body
->iseq_size
; /* */ ) {
894 const void *addr
= (const void *)original_code
[i
];
895 const int insn
= rb_vm_insn_addr2insn(addr
);
897 original_code
[i
] = insn
;
902 return original_code
;
905 /*********************************************/
906 /* definition of data structure for compiler */
907 /*********************************************/
910 * On 32-bit SPARC, GCC by default generates SPARC V7 code that may require
911 * 8-byte word alignment. On the other hand, Oracle Solaris Studio seems to
912 * generate SPARCV8PLUS code with unaligned memory access instructions.
913 * That is why the STRICT_ALIGNMENT is defined only with GCC.
915 #if defined(__sparc) && SIZEOF_VOIDP == 4 && defined(__GNUC__)
916 #define STRICT_ALIGNMENT
920 * Some OpenBSD platforms (including sparc64) require strict alignment.
922 #if defined(__OpenBSD__)
923 #include <sys/endian.h>
924 #ifdef __STRICT_ALIGNMENT
925 #define STRICT_ALIGNMENT
929 #ifdef STRICT_ALIGNMENT
930 #if defined(HAVE_TRUE_LONG_LONG) && SIZEOF_LONG_LONG > SIZEOF_VALUE
931 #define ALIGNMENT_SIZE SIZEOF_LONG_LONG
933 #define ALIGNMENT_SIZE SIZEOF_VALUE
935 #define PADDING_SIZE_MAX ((size_t)((ALIGNMENT_SIZE) - 1))
936 #define ALIGNMENT_SIZE_MASK PADDING_SIZE_MAX
937 /* Note: ALIGNMENT_SIZE == (2 ** N) is expected. */
939 #define PADDING_SIZE_MAX 0
940 #endif /* STRICT_ALIGNMENT */
942 #ifdef STRICT_ALIGNMENT
943 /* calculate padding size for aligned memory access */
945 calc_padding(void *ptr
, size_t size
)
950 mis
= (size_t)ptr
& ALIGNMENT_SIZE_MASK
;
952 padding
= ALIGNMENT_SIZE
- mis
;
955 * On 32-bit sparc or equivalents, when a single VALUE is requested
956 * and padding == sizeof(VALUE), it is clear that no padding is needed.
958 #if ALIGNMENT_SIZE > SIZEOF_VALUE
959 if (size
== sizeof(VALUE
) && padding
== sizeof(VALUE
)) {
966 #endif /* STRICT_ALIGNMENT */
969 compile_data_alloc_with_arena(struct iseq_compile_data_storage
**arena
, size_t size
)
972 struct iseq_compile_data_storage
*storage
= *arena
;
973 #ifdef STRICT_ALIGNMENT
974 size_t padding
= calc_padding((void *)&storage
->buff
[storage
->pos
], size
);
976 const size_t padding
= 0; /* expected to be optimized by compiler */
977 #endif /* STRICT_ALIGNMENT */
979 if (size
>= INT_MAX
- padding
) rb_memerror();
980 if (storage
->pos
+ size
+ padding
> storage
->size
) {
981 unsigned int alloc_size
= storage
->size
;
983 while (alloc_size
< size
+ PADDING_SIZE_MAX
) {
984 if (alloc_size
>= INT_MAX
/ 2) rb_memerror();
987 storage
->next
= (void *)ALLOC_N(char, alloc_size
+
988 offsetof(struct iseq_compile_data_storage
, buff
));
989 storage
= *arena
= storage
->next
;
992 storage
->size
= alloc_size
;
993 #ifdef STRICT_ALIGNMENT
994 padding
= calc_padding((void *)&storage
->buff
[storage
->pos
], size
);
995 #endif /* STRICT_ALIGNMENT */
998 #ifdef STRICT_ALIGNMENT
999 storage
->pos
+= (int)padding
;
1000 #endif /* STRICT_ALIGNMENT */
1002 ptr
= (void *)&storage
->buff
[storage
->pos
];
1003 storage
->pos
+= (int)size
;
1008 compile_data_alloc(rb_iseq_t
*iseq
, size_t size
)
1010 struct iseq_compile_data_storage
** arena
= &ISEQ_COMPILE_DATA(iseq
)->node
.storage_current
;
1011 return compile_data_alloc_with_arena(arena
, size
);
1014 static inline void *
1015 compile_data_alloc2(rb_iseq_t
*iseq
, size_t x
, size_t y
)
1017 size_t size
= rb_size_mul_or_raise(x
, y
, rb_eRuntimeError
);
1018 return compile_data_alloc(iseq
, size
);
1021 static inline void *
1022 compile_data_calloc2(rb_iseq_t
*iseq
, size_t x
, size_t y
)
1024 size_t size
= rb_size_mul_or_raise(x
, y
, rb_eRuntimeError
);
1025 void *p
= compile_data_alloc(iseq
, size
);
1031 compile_data_alloc_insn(rb_iseq_t
*iseq
)
1033 struct iseq_compile_data_storage
** arena
= &ISEQ_COMPILE_DATA(iseq
)->insn
.storage_current
;
1034 return (INSN
*)compile_data_alloc_with_arena(arena
, sizeof(INSN
));
1038 compile_data_alloc_label(rb_iseq_t
*iseq
)
1040 return (LABEL
*)compile_data_alloc(iseq
, sizeof(LABEL
));
1044 compile_data_alloc_adjust(rb_iseq_t
*iseq
)
1046 return (ADJUST
*)compile_data_alloc(iseq
, sizeof(ADJUST
));
1050 compile_data_alloc_trace(rb_iseq_t
*iseq
)
1052 return (TRACE
*)compile_data_alloc(iseq
, sizeof(TRACE
));
1056 * elem1, elemX => elem1, elem2, elemX
1059 ELEM_INSERT_NEXT(LINK_ELEMENT
*elem1
, LINK_ELEMENT
*elem2
)
1061 elem2
->next
= elem1
->next
;
1062 elem2
->prev
= elem1
;
1063 elem1
->next
= elem2
;
1065 elem2
->next
->prev
= elem2
;
1070 * elem1, elemX => elemX, elem2, elem1
1073 ELEM_INSERT_PREV(LINK_ELEMENT
*elem1
, LINK_ELEMENT
*elem2
)
1075 elem2
->prev
= elem1
->prev
;
1076 elem2
->next
= elem1
;
1077 elem1
->prev
= elem2
;
1079 elem2
->prev
->next
= elem2
;
1084 * elemX, elem1, elemY => elemX, elem2, elemY
1087 ELEM_REPLACE(LINK_ELEMENT
*elem1
, LINK_ELEMENT
*elem2
)
1089 elem2
->prev
= elem1
->prev
;
1090 elem2
->next
= elem1
->next
;
1092 elem1
->prev
->next
= elem2
;
1095 elem1
->next
->prev
= elem2
;
1100 ELEM_REMOVE(LINK_ELEMENT
*elem
)
1102 elem
->prev
->next
= elem
->next
;
1104 elem
->next
->prev
= elem
->prev
;
1108 static LINK_ELEMENT
*
1109 FIRST_ELEMENT(const LINK_ANCHOR
*const anchor
)
1111 return anchor
->anchor
.next
;
1114 static LINK_ELEMENT
*
1115 LAST_ELEMENT(LINK_ANCHOR
*const anchor
)
1117 return anchor
->last
;
1120 static LINK_ELEMENT
*
1121 ELEM_FIRST_INSN(LINK_ELEMENT
*elem
)
1124 switch (elem
->type
) {
1125 case ISEQ_ELEMENT_INSN
:
1126 case ISEQ_ELEMENT_ADJUST
:
1136 LIST_INSN_SIZE_ONE(const LINK_ANCHOR
*const anchor
)
1138 LINK_ELEMENT
*first_insn
= ELEM_FIRST_INSN(FIRST_ELEMENT(anchor
));
1139 if (first_insn
!= NULL
&&
1140 ELEM_FIRST_INSN(first_insn
->next
) == NULL
) {
1149 LIST_INSN_SIZE_ZERO(const LINK_ANCHOR
*const anchor
)
1151 if (ELEM_FIRST_INSN(FIRST_ELEMENT(anchor
)) == NULL
) {
1163 * anc1: e1, e2, e3, e4, e5
1164 * anc2: e4, e5 (broken)
1167 APPEND_LIST(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anc1
, LINK_ANCHOR
*const anc2
)
1169 if (anc2
->anchor
.next
) {
1170 anc1
->last
->next
= anc2
->anchor
.next
;
1171 anc2
->anchor
.next
->prev
= anc1
->last
;
1172 anc1
->last
= anc2
->last
;
1174 verify_list("append", anc1
);
1177 #define APPEND_LIST(anc1, anc2) APPEND_LIST(iseq, (anc1), (anc2))
1182 debug_list(ISEQ_ARG_DECLARE LINK_ANCHOR
*const anchor
, LINK_ELEMENT
*cur
)
1184 LINK_ELEMENT
*list
= FIRST_ELEMENT(anchor
);
1186 printf("anch: %p, frst: %p, last: %p\n", (void *)&anchor
->anchor
,
1187 (void *)anchor
->anchor
.next
, (void *)anchor
->last
);
1189 printf("curr: %p, next: %p, prev: %p, type: %d\n", (void *)list
, (void *)list
->next
,
1190 (void *)list
->prev
, (int)list
->type
);
1195 dump_disasm_list_with_cursor(anchor
->anchor
.next
, cur
, 0);
1196 verify_list("debug list", anchor
);
1199 #define debug_list(anc, cur) debug_list(iseq, (anc), (cur))
1202 #define debug_list(anc, cur) ((void)0)
1206 new_trace_body(rb_iseq_t
*iseq
, rb_event_flag_t event
, long data
)
1208 TRACE
*trace
= compile_data_alloc_trace(iseq
);
1210 trace
->link
.type
= ISEQ_ELEMENT_TRACE
;
1211 trace
->link
.next
= NULL
;
1212 trace
->event
= event
;
1219 new_label_body(rb_iseq_t
*iseq
, long line
)
1221 LABEL
*labelobj
= compile_data_alloc_label(iseq
);
1223 labelobj
->link
.type
= ISEQ_ELEMENT_LABEL
;
1224 labelobj
->link
.next
= 0;
1226 labelobj
->label_no
= ISEQ_COMPILE_DATA(iseq
)->label_no
++;
1227 labelobj
->sc_state
= 0;
1229 labelobj
->refcnt
= 0;
1231 labelobj
->rescued
= LABEL_RESCUE_NONE
;
1232 labelobj
->unremovable
= 0;
1237 new_adjust_body(rb_iseq_t
*iseq
, LABEL
*label
, int line
)
1239 ADJUST
*adjust
= compile_data_alloc_adjust(iseq
);
1240 adjust
->link
.type
= ISEQ_ELEMENT_ADJUST
;
1241 adjust
->link
.next
= 0;
1242 adjust
->label
= label
;
1243 adjust
->line_no
= line
;
1244 LABEL_UNREMOVABLE(label
);
1249 new_insn_core(rb_iseq_t
*iseq
, const NODE
*line_node
,
1250 int insn_id
, int argc
, VALUE
*argv
)
1252 INSN
*iobj
= compile_data_alloc_insn(iseq
);
1254 /* printf("insn_id: %d, line: %d\n", insn_id, nd_line(line_node)); */
1256 iobj
->link
.type
= ISEQ_ELEMENT_INSN
;
1257 iobj
->link
.next
= 0;
1258 iobj
->insn_id
= insn_id
;
1259 iobj
->insn_info
.line_no
= nd_line(line_node
);
1260 iobj
->insn_info
.node_id
= nd_node_id(line_node
);
1261 iobj
->insn_info
.events
= 0;
1262 iobj
->operands
= argv
;
1263 iobj
->operand_size
= argc
;
1269 new_insn_body(rb_iseq_t
*iseq
, const NODE
*const line_node
, enum ruby_vminsn_type insn_id
, int argc
, ...)
1271 VALUE
*operands
= 0;
1275 va_start(argv
, argc
);
1276 operands
= compile_data_alloc2(iseq
, sizeof(VALUE
), argc
);
1277 for (i
= 0; i
< argc
; i
++) {
1278 VALUE v
= va_arg(argv
, VALUE
);
1283 return new_insn_core(iseq
, line_node
, insn_id
, argc
, operands
);
1286 static const struct rb_callinfo
*
1287 new_callinfo(rb_iseq_t
*iseq
, ID mid
, int argc
, unsigned int flag
, struct rb_callinfo_kwarg
*kw_arg
, int has_blockiseq
)
1289 VM_ASSERT(argc
>= 0);
1291 if (!(flag
& (VM_CALL_ARGS_SPLAT
| VM_CALL_ARGS_BLOCKARG
| VM_CALL_KW_SPLAT
)) &&
1292 kw_arg
== NULL
&& !has_blockiseq
) {
1293 flag
|= VM_CALL_ARGS_SIMPLE
;
1297 flag
|= VM_CALL_KWARG
;
1298 argc
+= kw_arg
->keyword_len
;
1301 // fprintf(stderr, "[%d] id:%s\t", (int)iseq->body->ci_size, rb_id2name(mid)); rp(iseq);
1302 iseq
->body
->ci_size
++;
1303 const struct rb_callinfo
*ci
= vm_ci_new(mid
, flag
, argc
, kw_arg
);
1304 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
1309 new_insn_send(rb_iseq_t
*iseq
, const NODE
*const line_node
, ID id
, VALUE argc
, const rb_iseq_t
*blockiseq
, VALUE flag
, struct rb_callinfo_kwarg
*keywords
)
1311 VALUE
*operands
= compile_data_calloc2(iseq
, sizeof(VALUE
), 2);
1312 VALUE ci
= (VALUE
)new_callinfo(iseq
, id
, FIX2INT(argc
), FIX2INT(flag
), keywords
, blockiseq
!= NULL
);
1314 operands
[1] = (VALUE
)blockiseq
;
1316 RB_OBJ_WRITTEN(iseq
, Qundef
, blockiseq
);
1318 INSN
*insn
= new_insn_core(iseq
, line_node
, BIN(send
), 2, operands
);
1319 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
1325 new_child_iseq(rb_iseq_t
*iseq
, const NODE
*const node
,
1326 VALUE name
, const rb_iseq_t
*parent
, enum iseq_type type
, int line_no
)
1328 rb_iseq_t
*ret_iseq
;
1332 ast
.compile_option
= 0;
1333 ast
.script_lines
= iseq
->body
->variable
.script_lines
;
1335 debugs("[new_child_iseq]> ---------------------------------------\n");
1336 int isolated_depth
= ISEQ_COMPILE_DATA(iseq
)->isolated_depth
;
1337 ret_iseq
= rb_iseq_new_with_opt(&ast
, name
,
1338 rb_iseq_path(iseq
), rb_iseq_realpath(iseq
),
1339 INT2FIX(line_no
), parent
,
1340 isolated_depth
? isolated_depth
+ 1 : 0,
1341 type
, ISEQ_COMPILE_DATA(iseq
)->option
);
1342 debugs("[new_child_iseq]< ---------------------------------------\n");
1347 new_child_iseq_with_callback(rb_iseq_t
*iseq
, const struct rb_iseq_new_with_callback_callback_func
*ifunc
,
1348 VALUE name
, const rb_iseq_t
*parent
, enum iseq_type type
, int line_no
)
1350 rb_iseq_t
*ret_iseq
;
1352 debugs("[new_child_iseq_with_callback]> ---------------------------------------\n");
1353 ret_iseq
= rb_iseq_new_with_callback(ifunc
, name
,
1354 rb_iseq_path(iseq
), rb_iseq_realpath(iseq
),
1355 INT2FIX(line_no
), parent
, type
, ISEQ_COMPILE_DATA(iseq
)->option
);
1356 debugs("[new_child_iseq_with_callback]< ---------------------------------------\n");
1361 set_catch_except_p(struct rb_iseq_constant_body
*body
)
1363 body
->catch_except_p
= TRUE
;
1364 if (body
->parent_iseq
!= NULL
) {
1365 set_catch_except_p(body
->parent_iseq
->body
);
1369 /* Set body->catch_except_p to TRUE if the ISeq may catch an exception. If it is FALSE,
1370 JIT-ed code may be optimized. If we are extremely conservative, we should set TRUE
1371 if catch table exists. But we want to optimize while loop, which always has catch
1372 table entries for break/next/redo.
1374 So this function sets TRUE for limited ISeqs with break/next/redo catch table entries
1375 whose child ISeq would really raise an exception. */
1377 update_catch_except_flags(struct rb_iseq_constant_body
*body
)
1382 const struct iseq_catch_table
*ct
= body
->catch_table
;
1384 /* This assumes that a block has parent_iseq which may catch an exception from the block, and that
1385 BREAK/NEXT/REDO catch table entries are used only when `throw` insn is used in the block. */
1387 while (pos
< body
->iseq_size
) {
1388 insn
= rb_vm_insn_decode(body
->iseq_encoded
[pos
]);
1389 if (insn
== BIN(throw)) {
1390 set_catch_except_p(body
);
1393 pos
+= insn_len(insn
);
1399 for (i
= 0; i
< ct
->size
; i
++) {
1400 const struct iseq_catch_table_entry
*entry
=
1401 UNALIGNED_MEMBER_PTR(ct
, entries
[i
]);
1402 if (entry
->type
!= CATCH_TYPE_BREAK
1403 && entry
->type
!= CATCH_TYPE_NEXT
1404 && entry
->type
!= CATCH_TYPE_REDO
) {
1405 body
->catch_except_p
= TRUE
;
1412 iseq_insert_nop_between_end_and_cont(rb_iseq_t
*iseq
)
1414 VALUE catch_table_ary
= ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
;
1415 if (NIL_P(catch_table_ary
)) return;
1416 unsigned int i
, tlen
= (unsigned int)RARRAY_LEN(catch_table_ary
);
1417 const VALUE
*tptr
= RARRAY_CONST_PTR_TRANSIENT(catch_table_ary
);
1418 for (i
= 0; i
< tlen
; i
++) {
1419 const VALUE
*ptr
= RARRAY_CONST_PTR_TRANSIENT(tptr
[i
]);
1420 LINK_ELEMENT
*end
= (LINK_ELEMENT
*)(ptr
[2] & ~1);
1421 LINK_ELEMENT
*cont
= (LINK_ELEMENT
*)(ptr
[4] & ~1);
1424 enum catch_type ct
= (enum catch_type
)(ptr
[0] & 0xffff);
1426 if (ct
!= CATCH_TYPE_BREAK
1427 && ct
!= CATCH_TYPE_NEXT
1428 && ct
!= CATCH_TYPE_REDO
) {
1430 for (e
= end
; e
&& (IS_LABEL(e
) || IS_TRACE(e
)); e
= e
->next
) {
1432 NODE dummy_line_node
= generate_dummy_line_node(0, -1);
1433 INSN
*nop
= new_insn_core(iseq
, &dummy_line_node
, BIN(nop
), 0, 0);
1434 ELEM_INSERT_NEXT(end
, &nop
->link
);
1443 iseq_setup_insn(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
1445 if (RTEST(ISEQ_COMPILE_DATA(iseq
)->err_info
))
1448 /* debugs("[compile step 2] (iseq_array_to_linkedlist)\n"); */
1450 if (compile_debug
> 5)
1451 dump_disasm_list(FIRST_ELEMENT(anchor
));
1453 debugs("[compile step 3.1 (iseq_optimize)]\n");
1454 iseq_optimize(iseq
, anchor
);
1456 if (compile_debug
> 5)
1457 dump_disasm_list(FIRST_ELEMENT(anchor
));
1459 if (ISEQ_COMPILE_DATA(iseq
)->option
->instructions_unification
) {
1460 debugs("[compile step 3.2 (iseq_insns_unification)]\n");
1461 iseq_insns_unification(iseq
, anchor
);
1462 if (compile_debug
> 5)
1463 dump_disasm_list(FIRST_ELEMENT(anchor
));
1466 if (ISEQ_COMPILE_DATA(iseq
)->option
->stack_caching
) {
1467 debugs("[compile step 3.3 (iseq_set_sequence_stackcaching)]\n");
1468 iseq_set_sequence_stackcaching(iseq
, anchor
);
1469 if (compile_debug
> 5)
1470 dump_disasm_list(FIRST_ELEMENT(anchor
));
1473 debugs("[compile step 3.4 (iseq_insert_nop_between_end_and_cont)]\n");
1474 iseq_insert_nop_between_end_and_cont(iseq
);
1475 if (compile_debug
> 5)
1476 dump_disasm_list(FIRST_ELEMENT(anchor
));
1482 iseq_setup(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
1484 if (RTEST(ISEQ_COMPILE_DATA(iseq
)->err_info
))
1487 debugs("[compile step 4.1 (iseq_set_sequence)]\n");
1488 if (!iseq_set_sequence(iseq
, anchor
)) return COMPILE_NG
;
1489 if (compile_debug
> 5)
1490 dump_disasm_list(FIRST_ELEMENT(anchor
));
1492 debugs("[compile step 4.2 (iseq_set_exception_table)]\n");
1493 if (!iseq_set_exception_table(iseq
)) return COMPILE_NG
;
1495 debugs("[compile step 4.3 (set_optargs_table)] \n");
1496 if (!iseq_set_optargs_table(iseq
)) return COMPILE_NG
;
1498 debugs("[compile step 5 (iseq_translate_threaded_code)] \n");
1499 if (!rb_iseq_translate_threaded_code(iseq
)) return COMPILE_NG
;
1501 debugs("[compile step 6 (update_catch_except_flags)] \n");
1502 update_catch_except_flags(iseq
->body
);
1504 debugs("[compile step 6.1 (remove unused catch tables)] \n");
1505 if (!iseq
->body
->catch_except_p
&& iseq
->body
->catch_table
) {
1506 xfree(iseq
->body
->catch_table
);
1507 iseq
->body
->catch_table
= NULL
;
1510 #if VM_INSN_INFO_TABLE_IMPL == 2
1511 if (iseq
->body
->insns_info
.succ_index_table
== NULL
) {
1512 debugs("[compile step 7 (rb_iseq_insns_info_encode_positions)] \n");
1513 rb_iseq_insns_info_encode_positions(iseq
);
1517 if (compile_debug
> 1) {
1518 VALUE str
= rb_iseq_disasm(iseq
);
1519 printf("%s\n", StringValueCStr(str
));
1521 verify_call_cache(iseq
);
1522 debugs("[compile step: finish]\n");
1528 iseq_set_exception_local_table(rb_iseq_t
*iseq
)
1530 iseq
->body
->local_table_size
= numberof(rb_iseq_shared_exc_local_tbl
);
1531 iseq
->body
->local_table
= rb_iseq_shared_exc_local_tbl
;
1536 get_lvar_level(const rb_iseq_t
*iseq
)
1539 while (iseq
!= iseq
->body
->local_iseq
) {
1541 iseq
= iseq
->body
->parent_iseq
;
1547 get_dyna_var_idx_at_raw(const rb_iseq_t
*iseq
, ID id
)
1551 for (i
= 0; i
< iseq
->body
->local_table_size
; i
++) {
1552 if (iseq
->body
->local_table
[i
] == id
) {
1560 get_local_var_idx(const rb_iseq_t
*iseq
, ID id
)
1562 int idx
= get_dyna_var_idx_at_raw(iseq
->body
->local_iseq
, id
);
1565 COMPILE_ERROR(iseq
, ISEQ_LAST_LINE(iseq
),
1566 "get_local_var_idx: %d", idx
);
1573 get_dyna_var_idx(const rb_iseq_t
*iseq
, ID id
, int *level
, int *ls
)
1575 int lv
= 0, idx
= -1;
1576 const rb_iseq_t
*const topmost_iseq
= iseq
;
1579 idx
= get_dyna_var_idx_at_raw(iseq
, id
);
1583 iseq
= iseq
->body
->parent_iseq
;
1588 COMPILE_ERROR(topmost_iseq
, ISEQ_LAST_LINE(topmost_iseq
),
1589 "get_dyna_var_idx: -1");
1593 *ls
= iseq
->body
->local_table_size
;
1598 iseq_local_block_param_p(const rb_iseq_t
*iseq
, unsigned int idx
, unsigned int level
)
1600 const struct rb_iseq_constant_body
*body
;
1602 iseq
= iseq
->body
->parent_iseq
;
1606 if (body
->local_iseq
== iseq
&& /* local variables */
1607 body
->param
.flags
.has_block
&&
1608 body
->local_table_size
- body
->param
.block_start
== idx
) {
1617 iseq_block_param_id_p(const rb_iseq_t
*iseq
, ID id
, int *pidx
, int *plevel
)
1620 int idx
= get_dyna_var_idx(iseq
, id
, &level
, &ls
);
1621 if (iseq_local_block_param_p(iseq
, ls
- idx
, level
)) {
1632 access_outer_variables(const rb_iseq_t
*iseq
, int level
, ID id
, bool write
)
1634 int isolated_depth
= ISEQ_COMPILE_DATA(iseq
)->isolated_depth
;
1636 if (isolated_depth
&& level
>= isolated_depth
) {
1637 if (id
== rb_intern("yield")) {
1638 COMPILE_ERROR(iseq
, ISEQ_LAST_LINE(iseq
), "can not yield from isolated Proc");
1641 COMPILE_ERROR(iseq
, ISEQ_LAST_LINE(iseq
), "can not access variable `%s' from isolated Proc", rb_id2name(id
));
1645 for (int i
=0; i
<level
; i
++) {
1647 struct rb_id_table
*ovs
= iseq
->body
->outer_variables
;
1650 ovs
= iseq
->body
->outer_variables
= rb_id_table_create(8);
1653 if (rb_id_table_lookup(iseq
->body
->outer_variables
, id
, &val
)) {
1654 if (write
&& !val
) {
1655 rb_id_table_insert(iseq
->body
->outer_variables
, id
, Qtrue
);
1659 rb_id_table_insert(iseq
->body
->outer_variables
, id
, RBOOL(write
));
1662 iseq
= iseq
->body
->parent_iseq
;
1667 iseq_lvar_id(const rb_iseq_t
*iseq
, int idx
, int level
)
1669 for (int i
=0; i
<level
; i
++) {
1670 iseq
= iseq
->body
->parent_iseq
;
1673 ID id
= iseq
->body
->local_table
[iseq
->body
->local_table_size
- idx
];
1674 // fprintf(stderr, "idx:%d level:%d ID:%s\n", idx, level, rb_id2name(id));
1679 iseq_add_getlocal(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, const NODE
*const line_node
, int idx
, int level
)
1681 if (iseq_local_block_param_p(iseq
, idx
, level
)) {
1682 ADD_INSN2(seq
, line_node
, getblockparam
, INT2FIX((idx
) + VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
1685 ADD_INSN2(seq
, line_node
, getlocal
, INT2FIX((idx
) + VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
1687 if (level
> 0) access_outer_variables(iseq
, level
, iseq_lvar_id(iseq
, idx
, level
), Qfalse
);
1691 iseq_add_setlocal(rb_iseq_t
*iseq
, LINK_ANCHOR
*const seq
, const NODE
*const line_node
, int idx
, int level
)
1693 if (iseq_local_block_param_p(iseq
, idx
, level
)) {
1694 ADD_INSN2(seq
, line_node
, setblockparam
, INT2FIX((idx
) + VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
1697 ADD_INSN2(seq
, line_node
, setlocal
, INT2FIX((idx
) + VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
1699 if (level
> 0) access_outer_variables(iseq
, level
, iseq_lvar_id(iseq
, idx
, level
), Qtrue
);
1705 iseq_calc_param_size(rb_iseq_t
*iseq
)
1707 struct rb_iseq_constant_body
*const body
= iseq
->body
;
1708 if (body
->param
.flags
.has_opt
||
1709 body
->param
.flags
.has_post
||
1710 body
->param
.flags
.has_rest
||
1711 body
->param
.flags
.has_block
||
1712 body
->param
.flags
.has_kw
||
1713 body
->param
.flags
.has_kwrest
) {
1715 if (body
->param
.flags
.has_block
) {
1716 body
->param
.size
= body
->param
.block_start
+ 1;
1718 else if (body
->param
.flags
.has_kwrest
) {
1719 body
->param
.size
= body
->param
.keyword
->rest_start
+ 1;
1721 else if (body
->param
.flags
.has_kw
) {
1722 body
->param
.size
= body
->param
.keyword
->bits_start
+ 1;
1724 else if (body
->param
.flags
.has_post
) {
1725 body
->param
.size
= body
->param
.post_start
+ body
->param
.post_num
;
1727 else if (body
->param
.flags
.has_rest
) {
1728 body
->param
.size
= body
->param
.rest_start
+ 1;
1730 else if (body
->param
.flags
.has_opt
) {
1731 body
->param
.size
= body
->param
.lead_num
+ body
->param
.opt_num
;
1738 body
->param
.size
= body
->param
.lead_num
;
1743 iseq_set_arguments_keywords(rb_iseq_t
*iseq
, LINK_ANCHOR
*const optargs
,
1744 const struct rb_args_info
*args
, int arg_size
)
1746 const NODE
*node
= args
->kw_args
;
1747 struct rb_iseq_constant_body
*const body
= iseq
->body
;
1748 struct rb_iseq_param_keyword
*keyword
;
1749 const VALUE default_values
= rb_ary_tmp_new(1);
1750 const VALUE complex_mark
= rb_str_tmp_new(0);
1751 int kw
= 0, rkw
= 0, di
= 0, i
;
1753 body
->param
.flags
.has_kw
= TRUE
;
1754 body
->param
.keyword
= keyword
= ZALLOC_N(struct rb_iseq_param_keyword
, 1);
1758 node
= node
->nd_next
;
1761 keyword
->bits_start
= arg_size
++;
1763 node
= args
->kw_args
;
1765 const NODE
*val_node
= node
->nd_body
->nd_value
;
1768 if (val_node
== NODE_SPECIAL_REQUIRED_KEYWORD
) {
1772 switch (nd_type(val_node
)) {
1774 dv
= val_node
->nd_lit
;
1786 NO_CHECK(COMPILE_POPPED(optargs
, "kwarg", node
)); /* nd_type_p(node, NODE_KW_ARG) */
1790 keyword
->num
= ++di
;
1791 rb_ary_push(default_values
, dv
);
1794 node
= node
->nd_next
;
1799 if (args
->kw_rest_arg
->nd_vid
!= 0) {
1800 keyword
->rest_start
= arg_size
++;
1801 body
->param
.flags
.has_kwrest
= TRUE
;
1803 keyword
->required_num
= rkw
;
1804 keyword
->table
= &body
->local_table
[keyword
->bits_start
- keyword
->num
];
1807 VALUE
*dvs
= ALLOC_N(VALUE
, RARRAY_LEN(default_values
));
1809 for (i
= 0; i
< RARRAY_LEN(default_values
); i
++) {
1810 VALUE dv
= RARRAY_AREF(default_values
, i
);
1811 if (dv
== complex_mark
) dv
= Qundef
;
1812 if (!SPECIAL_CONST_P(dv
)) {
1813 RB_OBJ_WRITTEN(iseq
, Qundef
, dv
);
1818 keyword
->default_values
= dvs
;
1824 iseq_set_arguments(rb_iseq_t
*iseq
, LINK_ANCHOR
*const optargs
, const NODE
*const node_args
)
1826 debugs("iseq_set_arguments: %s\n", node_args
? "" : "0");
1829 struct rb_iseq_constant_body
*const body
= iseq
->body
;
1830 struct rb_args_info
*args
= node_args
->nd_ainfo
;
1836 EXPECT_NODE("iseq_set_arguments", node_args
, NODE_ARGS
, COMPILE_NG
);
1838 body
->param
.flags
.ruby2_keywords
= args
->ruby2_keywords
;
1839 body
->param
.lead_num
= arg_size
= (int)args
->pre_args_num
;
1840 if (body
->param
.lead_num
> 0) body
->param
.flags
.has_lead
= TRUE
;
1841 debugs(" - argc: %d\n", body
->param
.lead_num
);
1843 rest_id
= args
->rest_arg
;
1844 if (rest_id
== NODE_SPECIAL_EXCESSIVE_COMMA
) {
1848 block_id
= args
->block_arg
;
1850 if (args
->opt_args
) {
1851 const NODE
*node
= args
->opt_args
;
1853 VALUE labels
= rb_ary_tmp_new(1);
1858 label
= NEW_LABEL(nd_line(node
));
1859 rb_ary_push(labels
, (VALUE
)label
| 1);
1860 ADD_LABEL(optargs
, label
);
1861 NO_CHECK(COMPILE_POPPED(optargs
, "optarg", node
->nd_body
));
1862 node
= node
->nd_next
;
1867 label
= NEW_LABEL(nd_line(node_args
));
1868 rb_ary_push(labels
, (VALUE
)label
| 1);
1869 ADD_LABEL(optargs
, label
);
1871 opt_table
= ALLOC_N(VALUE
, i
+1);
1873 MEMCPY(opt_table
, RARRAY_CONST_PTR_TRANSIENT(labels
), VALUE
, i
+1);
1874 for (j
= 0; j
< i
+1; j
++) {
1877 rb_ary_clear(labels
);
1879 body
->param
.flags
.has_opt
= TRUE
;
1880 body
->param
.opt_num
= i
;
1881 body
->param
.opt_table
= opt_table
;
1886 body
->param
.rest_start
= arg_size
++;
1887 body
->param
.flags
.has_rest
= TRUE
;
1888 assert(body
->param
.rest_start
!= -1);
1891 if (args
->first_post_arg
) {
1892 body
->param
.post_start
= arg_size
;
1893 body
->param
.post_num
= args
->post_args_num
;
1894 body
->param
.flags
.has_post
= TRUE
;
1895 arg_size
+= args
->post_args_num
;
1897 if (body
->param
.flags
.has_rest
) { /* TODO: why that? */
1898 body
->param
.post_start
= body
->param
.rest_start
+ 1;
1902 if (args
->kw_args
) {
1903 arg_size
= iseq_set_arguments_keywords(iseq
, optargs
, args
, arg_size
);
1905 else if (args
->kw_rest_arg
) {
1906 struct rb_iseq_param_keyword
*keyword
= ZALLOC_N(struct rb_iseq_param_keyword
, 1);
1907 keyword
->rest_start
= arg_size
++;
1908 body
->param
.keyword
= keyword
;
1909 body
->param
.flags
.has_kwrest
= TRUE
;
1911 else if (args
->no_kwarg
) {
1912 body
->param
.flags
.accepts_no_kwarg
= TRUE
;
1916 body
->param
.block_start
= arg_size
++;
1917 body
->param
.flags
.has_block
= TRUE
;
1920 iseq_calc_param_size(iseq
);
1921 body
->param
.size
= arg_size
;
1923 if (args
->pre_init
) { /* m_init */
1924 NO_CHECK(COMPILE_POPPED(optargs
, "init arguments (m)", args
->pre_init
));
1926 if (args
->post_init
) { /* p_init */
1927 NO_CHECK(COMPILE_POPPED(optargs
, "init arguments (p)", args
->post_init
));
1930 if (body
->type
== ISEQ_TYPE_BLOCK
) {
1931 if (body
->param
.flags
.has_opt
== FALSE
&&
1932 body
->param
.flags
.has_post
== FALSE
&&
1933 body
->param
.flags
.has_rest
== FALSE
&&
1934 body
->param
.flags
.has_kw
== FALSE
&&
1935 body
->param
.flags
.has_kwrest
== FALSE
) {
1937 if (body
->param
.lead_num
== 1 && last_comma
== 0) {
1939 body
->param
.flags
.ambiguous_param0
= TRUE
;
1949 iseq_set_local_table(rb_iseq_t
*iseq
, const rb_ast_id_table_t
*tbl
)
1951 unsigned int size
= tbl
? tbl
->size
: 0;
1954 ID
*ids
= (ID
*)ALLOC_N(ID
, size
);
1955 MEMCPY(ids
, tbl
->ids
, ID
, size
);
1956 iseq
->body
->local_table
= ids
;
1958 iseq
->body
->local_table_size
= size
;
1960 debugs("iseq_set_local_table: %u\n", iseq
->body
->local_table_size
);
1965 rb_iseq_cdhash_cmp(VALUE val
, VALUE lit
)
1972 else if ((tlit
= OBJ_BUILTIN_TYPE(lit
)) == -1) {
1975 else if ((tval
= OBJ_BUILTIN_TYPE(val
)) == -1) {
1978 else if (tlit
!= tval
) {
1981 else if (tlit
== T_SYMBOL
) {
1984 else if (tlit
== T_STRING
) {
1985 return rb_str_hash_cmp(lit
, val
);
1987 else if (tlit
== T_BIGNUM
) {
1988 long x
= FIX2LONG(rb_big_cmp(lit
, val
));
1990 /* Given lit and val are both Bignum, x must be -1, 0, 1.
1991 * There is no need to call rb_fix2int here. */
1992 RUBY_ASSERT((x
== 1) || (x
== 0) || (x
== -1));
1995 else if (tlit
== T_FLOAT
) {
1996 return rb_float_cmp(lit
, val
);
1998 else if (tlit
== T_RATIONAL
) {
1999 const struct RRational
*rat1
= RRATIONAL(val
);
2000 const struct RRational
*rat2
= RRATIONAL(lit
);
2001 return rb_iseq_cdhash_cmp(rat1
->num
, rat2
->num
) || rb_iseq_cdhash_cmp(rat1
->den
, rat2
->den
);
2003 else if (tlit
== T_COMPLEX
) {
2004 const struct RComplex
*comp1
= RCOMPLEX(val
);
2005 const struct RComplex
*comp2
= RCOMPLEX(lit
);
2006 return rb_iseq_cdhash_cmp(comp1
->real
, comp2
->real
) || rb_iseq_cdhash_cmp(comp1
->imag
, comp2
->imag
);
2008 else if (tlit
== T_REGEXP
) {
2009 return rb_reg_equal(val
, lit
) ? 0 : -1;
2012 UNREACHABLE_RETURN(-1);
2017 rb_iseq_cdhash_hash(VALUE a
)
2019 switch (OBJ_BUILTIN_TYPE(a
)) {
2022 return (st_index_t
)a
;
2024 return rb_str_hash(a
);
2026 return FIX2LONG(rb_big_hash(a
));
2028 return rb_dbl_long_hash(RFLOAT_VALUE(a
));
2030 return rb_rational_hash(a
);
2032 return rb_complex_hash(a
);
2034 return NUM2LONG(rb_reg_hash(a
));
2036 UNREACHABLE_RETURN(0);
2040 static const struct st_hash_type cdhash_type
= {
2042 rb_iseq_cdhash_hash
,
2045 struct cdhash_set_label_struct
{
2052 cdhash_set_label_i(VALUE key
, VALUE val
, VALUE ptr
)
2054 struct cdhash_set_label_struct
*data
= (struct cdhash_set_label_struct
*)ptr
;
2055 LABEL
*lobj
= (LABEL
*)(val
& ~1);
2056 rb_hash_aset(data
->hash
, key
, INT2FIX(lobj
->position
- (data
->pos
+data
->len
)));
2062 get_ivar_ic_value(rb_iseq_t
*iseq
,ID id
)
2065 struct rb_id_table
*tbl
= ISEQ_COMPILE_DATA(iseq
)->ivar_cache_table
;
2067 if (rb_id_table_lookup(tbl
,id
,&val
)) {
2072 tbl
= rb_id_table_create(1);
2073 ISEQ_COMPILE_DATA(iseq
)->ivar_cache_table
= tbl
;
2075 val
= INT2FIX(iseq
->body
->is_size
++);
2076 rb_id_table_insert(tbl
,id
,val
);
2080 #define BADINSN_DUMP(anchor, list, dest) \
2081 dump_disasm_list_with_cursor(FIRST_ELEMENT(anchor), list, dest)
2083 #define BADINSN_ERROR \
2084 (xfree(generated_iseq), \
2085 xfree(insns_info), \
2086 BADINSN_DUMP(anchor, list, NULL), \
2090 fix_sp_depth(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
2092 int stack_max
= 0, sp
= 0, line
= 0;
2095 for (list
= FIRST_ELEMENT(anchor
); list
; list
= list
->next
) {
2096 if (list
->type
== ISEQ_ELEMENT_LABEL
) {
2097 LABEL
*lobj
= (LABEL
*)list
;
2102 for (list
= FIRST_ELEMENT(anchor
); list
; list
= list
->next
) {
2103 switch (list
->type
) {
2104 case ISEQ_ELEMENT_INSN
:
2109 INSN
*iobj
= (INSN
*)list
;
2112 sp
= calc_sp_depth(sp
, iobj
);
2114 BADINSN_DUMP(anchor
, list
, NULL
);
2115 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
2116 "argument stack underflow (%d)", sp
);
2119 if (sp
> stack_max
) {
2123 line
= iobj
->insn_info
.line_no
;
2124 /* fprintf(stderr, "insn: %-16s, sp: %d\n", insn_name(iobj->insn_id), sp); */
2125 operands
= iobj
->operands
;
2126 insn
= iobj
->insn_id
;
2127 types
= insn_op_types(insn
);
2128 len
= insn_len(insn
);
2131 if (iobj
->operand_size
!= len
- 1) {
2132 /* printf("operand size miss! (%d, %d)\n", iobj->operand_size, len); */
2133 BADINSN_DUMP(anchor
, list
, NULL
);
2134 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
2135 "operand size miss! (%d for %d)",
2136 iobj
->operand_size
, len
- 1);
2140 for (j
= 0; types
[j
]; j
++) {
2141 if (types
[j
] == TS_OFFSET
) {
2142 /* label(destination position) */
2143 LABEL
*lobj
= (LABEL
*)operands
[j
];
2145 BADINSN_DUMP(anchor
, list
, NULL
);
2146 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
2147 "unknown label: "LABEL_FORMAT
, lobj
->label_no
);
2150 if (lobj
->sp
== -1) {
2153 else if (lobj
->sp
!= sp
) {
2154 debugs("%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2155 RSTRING_PTR(rb_iseq_path(iseq
)), line
,
2156 lobj
->label_no
, lobj
->sp
, sp
);
2162 case ISEQ_ELEMENT_LABEL
:
2164 LABEL
*lobj
= (LABEL
*)list
;
2165 if (lobj
->sp
== -1) {
2169 if (lobj
->sp
!= sp
) {
2170 debugs("%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2171 RSTRING_PTR(rb_iseq_path(iseq
)), line
,
2172 lobj
->label_no
, lobj
->sp
, sp
);
2178 case ISEQ_ELEMENT_TRACE
:
2183 case ISEQ_ELEMENT_ADJUST
:
2185 ADJUST
*adjust
= (ADJUST
*)list
;
2188 sp
= adjust
->label
? adjust
->label
->sp
: 0;
2189 if (adjust
->line_no
!= -1 && orig_sp
- sp
< 0) {
2190 BADINSN_DUMP(anchor
, list
, NULL
);
2191 COMPILE_ERROR(iseq
, adjust
->line_no
,
2192 "iseq_set_sequence: adjust bug %d < %d",
2199 BADINSN_DUMP(anchor
, list
, NULL
);
2200 COMPILE_ERROR(iseq
, line
, "unknown list type: %d", list
->type
);
2208 add_insn_info(struct iseq_insn_info_entry
*insns_info
, unsigned int *positions
,
2209 int insns_info_index
, int code_index
, const INSN
*iobj
)
2211 if (insns_info_index
== 0 ||
2212 insns_info
[insns_info_index
-1].line_no
!= iobj
->insn_info
.line_no
||
2213 #ifdef USE_ISEQ_NODE_ID
2214 insns_info
[insns_info_index
-1].node_id
!= iobj
->insn_info
.node_id
||
2216 insns_info
[insns_info_index
-1].events
!= iobj
->insn_info
.events
) {
2217 insns_info
[insns_info_index
].line_no
= iobj
->insn_info
.line_no
;
2218 #ifdef USE_ISEQ_NODE_ID
2219 insns_info
[insns_info_index
].node_id
= iobj
->insn_info
.node_id
;
2221 insns_info
[insns_info_index
].events
= iobj
->insn_info
.events
;
2222 positions
[insns_info_index
] = code_index
;
2229 add_adjust_info(struct iseq_insn_info_entry
*insns_info
, unsigned int *positions
,
2230 int insns_info_index
, int code_index
, const ADJUST
*adjust
)
2232 if (insns_info_index
> 0 ||
2233 insns_info
[insns_info_index
-1].line_no
!= adjust
->line_no
) {
2234 insns_info
[insns_info_index
].line_no
= adjust
->line_no
;
2235 insns_info
[insns_info_index
].events
= 0;
2236 positions
[insns_info_index
] = code_index
;
2243 ruby insn object list -> raw instruction sequence
2246 iseq_set_sequence(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
2248 VALUE iseqv
= (VALUE
)iseq
;
2249 struct iseq_insn_info_entry
*insns_info
;
2250 struct rb_iseq_constant_body
*const body
= iseq
->body
;
2251 unsigned int *positions
;
2253 VALUE
*generated_iseq
;
2254 rb_event_flag_t events
= 0;
2257 int insn_num
, code_index
, insns_info_index
, sp
= 0;
2258 int stack_max
= fix_sp_depth(iseq
, anchor
);
2260 if (stack_max
< 0) return COMPILE_NG
;
2262 /* fix label position */
2263 insn_num
= code_index
= 0;
2264 for (list
= FIRST_ELEMENT(anchor
); list
; list
= list
->next
) {
2265 switch (list
->type
) {
2266 case ISEQ_ELEMENT_INSN
:
2268 INSN
*iobj
= (INSN
*)list
;
2270 sp
= calc_sp_depth(sp
, iobj
);
2272 events
= iobj
->insn_info
.events
|= events
;
2273 if (ISEQ_COVERAGE(iseq
)) {
2274 if (ISEQ_LINE_COVERAGE(iseq
) && (events
& RUBY_EVENT_COVERAGE_LINE
) &&
2275 !(rb_get_coverage_mode() & COVERAGE_TARGET_ONESHOT_LINES
)) {
2276 int line
= iobj
->insn_info
.line_no
;
2278 RARRAY_ASET(ISEQ_LINE_COVERAGE(iseq
), line
- 1, INT2FIX(0));
2281 if (ISEQ_BRANCH_COVERAGE(iseq
) && (events
& RUBY_EVENT_COVERAGE_BRANCH
)) {
2282 while (RARRAY_LEN(ISEQ_PC2BRANCHINDEX(iseq
)) <= code_index
) {
2283 rb_ary_push(ISEQ_PC2BRANCHINDEX(iseq
), Qnil
);
2285 RARRAY_ASET(ISEQ_PC2BRANCHINDEX(iseq
), code_index
, INT2FIX(data
));
2288 code_index
+= insn_data_length(iobj
);
2293 case ISEQ_ELEMENT_LABEL
:
2295 LABEL
*lobj
= (LABEL
*)list
;
2296 lobj
->position
= code_index
;
2297 if (lobj
->sp
!= sp
) {
2298 debugs("%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2299 RSTRING_PTR(rb_iseq_path(iseq
)),
2300 lobj
->label_no
, lobj
->sp
, sp
);
2305 case ISEQ_ELEMENT_TRACE
:
2307 TRACE
*trace
= (TRACE
*)list
;
2308 events
|= trace
->event
;
2309 if (trace
->event
& RUBY_EVENT_COVERAGE_BRANCH
) data
= trace
->data
;
2312 case ISEQ_ELEMENT_ADJUST
:
2314 ADJUST
*adjust
= (ADJUST
*)list
;
2315 if (adjust
->line_no
!= -1) {
2317 sp
= adjust
->label
? adjust
->label
->sp
: 0;
2318 if (orig_sp
- sp
> 0) {
2319 if (orig_sp
- sp
> 1) code_index
++; /* 1 operand */
2320 code_index
++; /* insn */
2330 /* make instruction sequence */
2331 generated_iseq
= ALLOC_N(VALUE
, code_index
);
2332 insns_info
= ALLOC_N(struct iseq_insn_info_entry
, insn_num
);
2333 positions
= ALLOC_N(unsigned int, insn_num
);
2334 body
->is_entries
= ZALLOC_N(union iseq_inline_storage_entry
, body
->is_size
);
2335 body
->call_data
= ZALLOC_N(struct rb_call_data
, body
->ci_size
);
2336 ISEQ_COMPILE_DATA(iseq
)->ci_index
= 0;
2338 list
= FIRST_ELEMENT(anchor
);
2339 insns_info_index
= code_index
= sp
= 0;
2342 switch (list
->type
) {
2343 case ISEQ_ELEMENT_INSN
:
2348 INSN
*iobj
= (INSN
*)list
;
2351 sp
= calc_sp_depth(sp
, iobj
);
2352 /* fprintf(stderr, "insn: %-16s, sp: %d\n", insn_name(iobj->insn_id), sp); */
2353 operands
= iobj
->operands
;
2354 insn
= iobj
->insn_id
;
2355 generated_iseq
[code_index
] = insn
;
2356 types
= insn_op_types(insn
);
2357 len
= insn_len(insn
);
2359 for (j
= 0; types
[j
]; j
++) {
2360 char type
= types
[j
];
2361 /* printf("--> [%c - (%d-%d)]\n", type, k, j); */
2365 /* label(destination position) */
2366 LABEL
*lobj
= (LABEL
*)operands
[j
];
2367 generated_iseq
[code_index
+ 1 + j
] = lobj
->position
- (code_index
+ len
);
2372 VALUE map
= operands
[j
];
2373 struct cdhash_set_label_struct data
;
2375 data
.pos
= code_index
;
2377 rb_hash_foreach(map
, cdhash_set_label_i
, (VALUE
)&data
);
2379 rb_hash_rehash(map
);
2380 freeze_hide_obj(map
);
2381 generated_iseq
[code_index
+ 1 + j
] = map
;
2382 RB_OBJ_WRITTEN(iseq
, Qundef
, map
);
2383 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
2387 case TS_NUM
: /* ulong */
2388 generated_iseq
[code_index
+ 1 + j
] = FIX2INT(operands
[j
]);
2390 case TS_VALUE
: /* VALUE */
2391 case TS_ISEQ
: /* iseq */
2393 VALUE v
= operands
[j
];
2394 generated_iseq
[code_index
+ 1 + j
] = v
;
2395 /* to mark ruby object */
2396 if (!SPECIAL_CONST_P(v
)) {
2397 RB_OBJ_WRITTEN(iseq
, Qundef
, v
);
2398 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
2402 case TS_IC
: /* inline cache */
2403 case TS_ISE
: /* inline storage entry */
2404 case TS_IVC
: /* inline ivar cache */
2406 unsigned int ic_index
= FIX2UINT(operands
[j
]);
2407 IC ic
= (IC
)&body
->is_entries
[ic_index
];
2408 if (UNLIKELY(ic_index
>= body
->is_size
)) {
2409 BADINSN_DUMP(anchor
, &iobj
->link
, 0);
2410 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
2411 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2412 ic_index
, body
->is_size
);
2414 generated_iseq
[code_index
+ 1 + j
] = (VALUE
)ic
;
2415 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
2417 if (insn
== BIN(opt_getinlinecache
) && type
== TS_IC
) {
2418 // Store the instruction index for opt_getinlinecache on the IC for
2419 // YJIT to invalidate code when opt_setinlinecache runs.
2420 ic
->get_insn_idx
= (unsigned int)code_index
;
2426 const struct rb_callinfo
*source_ci
= (const struct rb_callinfo
*)operands
[j
];
2427 struct rb_call_data
*cd
= &body
->call_data
[ISEQ_COMPILE_DATA(iseq
)->ci_index
++];
2428 assert(ISEQ_COMPILE_DATA(iseq
)->ci_index
<= body
->ci_size
);
2430 cd
->cc
= vm_cc_empty();
2431 generated_iseq
[code_index
+ 1 + j
] = (VALUE
)cd
;
2434 case TS_ID
: /* ID */
2435 generated_iseq
[code_index
+ 1 + j
] = SYM2ID(operands
[j
]);
2438 generated_iseq
[code_index
+ 1 + j
] = operands
[j
];
2441 generated_iseq
[code_index
+ 1 + j
] = operands
[j
];
2444 BADINSN_ERROR(iseq
, iobj
->insn_info
.line_no
,
2445 "unknown operand type: %c", type
);
2449 if (add_insn_info(insns_info
, positions
, insns_info_index
, code_index
, iobj
)) insns_info_index
++;
2453 case ISEQ_ELEMENT_LABEL
:
2455 LABEL
*lobj
= (LABEL
*)list
;
2456 if (lobj
->sp
!= sp
) {
2457 debugs("%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2458 RSTRING_PTR(rb_iseq_path(iseq
)),
2459 lobj
->label_no
, lobj
->sp
, sp
);
2464 case ISEQ_ELEMENT_ADJUST
:
2466 ADJUST
*adjust
= (ADJUST
*)list
;
2469 if (adjust
->label
) {
2470 sp
= adjust
->label
->sp
;
2476 if (adjust
->line_no
!= -1) {
2477 const int diff
= orig_sp
- sp
;
2479 if (add_adjust_info(insns_info
, positions
, insns_info_index
, code_index
, adjust
)) insns_info_index
++;
2482 generated_iseq
[code_index
++] = BIN(adjuststack
);
2483 generated_iseq
[code_index
++] = orig_sp
- sp
;
2485 else if (diff
== 1) {
2486 generated_iseq
[code_index
++] = BIN(pop
);
2488 else if (diff
< 0) {
2489 int label_no
= adjust
->label
? adjust
->label
->label_no
: -1;
2490 xfree(generated_iseq
);
2493 debug_list(anchor
, list
);
2494 COMPILE_ERROR(iseq
, adjust
->line_no
,
2495 "iseq_set_sequence: adjust bug to %d %d < %d",
2496 label_no
, orig_sp
, sp
);
2509 body
->iseq_encoded
= (void *)generated_iseq
;
2510 body
->iseq_size
= code_index
;
2511 body
->stack_max
= stack_max
;
2513 /* get rid of memory leak when REALLOC failed */
2514 body
->insns_info
.body
= insns_info
;
2515 body
->insns_info
.positions
= positions
;
2517 REALLOC_N(insns_info
, struct iseq_insn_info_entry
, insns_info_index
);
2518 body
->insns_info
.body
= insns_info
;
2519 REALLOC_N(positions
, unsigned int, insns_info_index
);
2520 body
->insns_info
.positions
= positions
;
2521 body
->insns_info
.size
= insns_info_index
;
2527 label_get_position(LABEL
*lobj
)
2529 return lobj
->position
;
2533 label_get_sp(LABEL
*lobj
)
2539 iseq_set_exception_table(rb_iseq_t
*iseq
)
2541 const VALUE
*tptr
, *ptr
;
2542 unsigned int tlen
, i
;
2543 struct iseq_catch_table_entry
*entry
;
2545 iseq
->body
->catch_table
= NULL
;
2546 if (NIL_P(ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
)) return COMPILE_OK
;
2547 tlen
= (int)RARRAY_LEN(ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
);
2548 tptr
= RARRAY_CONST_PTR_TRANSIENT(ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
);
2551 struct iseq_catch_table
*table
= xmalloc(iseq_catch_table_bytes(tlen
));
2554 for (i
= 0; i
< table
->size
; i
++) {
2555 ptr
= RARRAY_CONST_PTR_TRANSIENT(tptr
[i
]);
2556 entry
= UNALIGNED_MEMBER_PTR(table
, entries
[i
]);
2557 entry
->type
= (enum catch_type
)(ptr
[0] & 0xffff);
2558 entry
->start
= label_get_position((LABEL
*)(ptr
[1] & ~1));
2559 entry
->end
= label_get_position((LABEL
*)(ptr
[2] & ~1));
2560 entry
->iseq
= (rb_iseq_t
*)ptr
[3];
2561 RB_OBJ_WRITTEN(iseq
, Qundef
, entry
->iseq
);
2565 LABEL
*lobj
= (LABEL
*)(ptr
[4] & ~1);
2566 entry
->cont
= label_get_position(lobj
);
2567 entry
->sp
= label_get_sp(lobj
);
2569 /* TODO: Dirty Hack! Fix me */
2570 if (entry
->type
== CATCH_TYPE_RESCUE
||
2571 entry
->type
== CATCH_TYPE_BREAK
||
2572 entry
->type
== CATCH_TYPE_NEXT
) {
2580 iseq
->body
->catch_table
= table
;
2581 RB_OBJ_WRITE(iseq
, &ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
, 0); /* free */
2588 * set optional argument table
2589 * def foo(a, b=expr1, c=expr2)
2597 iseq_set_optargs_table(rb_iseq_t
*iseq
)
2600 VALUE
*opt_table
= (VALUE
*)iseq
->body
->param
.opt_table
;
2602 if (iseq
->body
->param
.flags
.has_opt
) {
2603 for (i
= 0; i
< iseq
->body
->param
.opt_num
+ 1; i
++) {
2604 opt_table
[i
] = label_get_position((LABEL
*)opt_table
[i
]);
2610 static LINK_ELEMENT
*
2611 get_destination_insn(INSN
*iobj
)
2613 LABEL
*lobj
= (LABEL
*)OPERAND_AT(iobj
, 0);
2615 rb_event_flag_t events
= 0;
2617 list
= lobj
->link
.next
;
2619 switch (list
->type
) {
2620 case ISEQ_ELEMENT_INSN
:
2621 case ISEQ_ELEMENT_ADJUST
:
2623 case ISEQ_ELEMENT_LABEL
:
2626 case ISEQ_ELEMENT_TRACE
:
2628 TRACE
*trace
= (TRACE
*)list
;
2629 events
|= trace
->event
;
2637 if (list
&& IS_INSN(list
)) {
2638 INSN
*iobj
= (INSN
*)list
;
2639 iobj
->insn_info
.events
|= events
;
2644 static LINK_ELEMENT
*
2645 get_next_insn(INSN
*iobj
)
2647 LINK_ELEMENT
*list
= iobj
->link
.next
;
2650 if (IS_INSN(list
) || IS_ADJUST(list
)) {
2658 static LINK_ELEMENT
*
2659 get_prev_insn(INSN
*iobj
)
2661 LINK_ELEMENT
*list
= iobj
->link
.prev
;
2664 if (IS_INSN(list
) || IS_ADJUST(list
)) {
2673 unref_destination(INSN
*iobj
, int pos
)
2675 LABEL
*lobj
= (LABEL
*)OPERAND_AT(iobj
, pos
);
2677 if (!lobj
->refcnt
) ELEM_REMOVE(&lobj
->link
);
2681 replace_destination(INSN
*dobj
, INSN
*nobj
)
2683 VALUE n
= OPERAND_AT(nobj
, 0);
2684 LABEL
*dl
= (LABEL
*)OPERAND_AT(dobj
, 0);
2685 LABEL
*nl
= (LABEL
*)n
;
2688 OPERAND_AT(dobj
, 0) = n
;
2689 if (!dl
->refcnt
) ELEM_REMOVE(&dl
->link
);
2693 find_destination(INSN
*i
)
2695 int pos
, len
= insn_len(i
->insn_id
);
2696 for (pos
= 0; pos
< len
; ++pos
) {
2697 if (insn_op_types(i
->insn_id
)[pos
] == TS_OFFSET
) {
2698 return (LABEL
*)OPERAND_AT(i
, pos
);
2705 remove_unreachable_chunk(rb_iseq_t
*iseq
, LINK_ELEMENT
*i
)
2707 LINK_ELEMENT
*first
= i
, *end
;
2708 int *unref_counts
= 0, nlabels
= ISEQ_COMPILE_DATA(iseq
)->label_no
;
2711 unref_counts
= ALLOCA_N(int, nlabels
);
2712 MEMZERO(unref_counts
, int, nlabels
);
2717 if (IS_INSN_ID(i
, leave
)) {
2721 else if ((lab
= find_destination((INSN
*)i
)) != 0) {
2722 if (lab
->unremovable
) break;
2723 unref_counts
[lab
->label_no
]++;
2726 else if (IS_LABEL(i
)) {
2728 if (lab
->unremovable
) return 0;
2729 if (lab
->refcnt
> unref_counts
[lab
->label_no
]) {
2730 if (i
== first
) return 0;
2735 else if (IS_TRACE(i
)) {
2738 else if (IS_ADJUST(i
)) {
2739 LABEL
*dest
= ((ADJUST
*)i
)->label
;
2740 if (dest
&& dest
->unremovable
) return 0;
2743 } while ((i
= i
->next
) != 0);
2747 struct rb_iseq_constant_body
*body
= iseq
->body
;
2748 VALUE insn
= INSN_OF(i
);
2749 int pos
, len
= insn_len(insn
);
2750 for (pos
= 0; pos
< len
; ++pos
) {
2751 switch (insn_op_types(insn
)[pos
]) {
2753 unref_destination((INSN
*)i
, pos
);
2762 } while ((i
!= end
) && (i
= i
->next
) != 0);
2767 iseq_pop_newarray(rb_iseq_t
*iseq
, INSN
*iobj
)
2769 switch (OPERAND_AT(iobj
, 0)) {
2770 case INT2FIX(0): /* empty array */
2771 ELEM_REMOVE(&iobj
->link
);
2773 case INT2FIX(1): /* single element array */
2774 ELEM_REMOVE(&iobj
->link
);
2777 iobj
->insn_id
= BIN(adjuststack
);
2783 is_frozen_putstring(INSN
*insn
, VALUE
*op
)
2785 if (IS_INSN_ID(insn
, putstring
)) {
2786 *op
= OPERAND_AT(insn
, 0);
2789 else if (IS_INSN_ID(insn
, putobject
)) { /* frozen_string_literal */
2790 *op
= OPERAND_AT(insn
, 0);
2791 return RB_TYPE_P(*op
, T_STRING
);
2797 optimize_checktype(rb_iseq_t
*iseq
, INSN
*iobj
)
2810 * putobject obj (T_XXX)
2814 * => obj is not a T_XXX
2816 * putobject obj (T_XXX)
2821 INSN
*niobj
, *ciobj
, *dup
= 0;
2825 switch (INSN_OF(iobj
)) {
2826 case BIN(putstring
):
2827 type
= INT2FIX(T_STRING
);
2830 type
= INT2FIX(T_NIL
);
2832 case BIN(putobject
):
2833 type
= INT2FIX(TYPE(OPERAND_AT(iobj
, 0)));
2835 default: return FALSE
;
2838 ciobj
= (INSN
*)get_next_insn(iobj
);
2839 if (IS_INSN_ID(ciobj
, jump
)) {
2840 ciobj
= (INSN
*)get_next_insn((INSN
*)OPERAND_AT(ciobj
, 0));
2842 if (IS_INSN_ID(ciobj
, dup
)) {
2843 ciobj
= (INSN
*)get_next_insn(dup
= ciobj
);
2845 if (!ciobj
|| !IS_INSN_ID(ciobj
, checktype
)) return FALSE
;
2846 niobj
= (INSN
*)get_next_insn(ciobj
);
2848 /* TODO: putobject true/false */
2851 switch (INSN_OF(niobj
)) {
2853 if (OPERAND_AT(ciobj
, 0) == type
) {
2854 dest
= (LABEL
*)OPERAND_AT(niobj
, 0);
2857 case BIN(branchunless
):
2858 if (OPERAND_AT(ciobj
, 0) != type
) {
2859 dest
= (LABEL
*)OPERAND_AT(niobj
, 0);
2865 line
= ciobj
->insn_info
.line_no
;
2866 node_id
= ciobj
->insn_info
.node_id
;
2867 NODE dummy_line_node
= generate_dummy_line_node(line
, node_id
);
2869 if (niobj
->link
.next
&& IS_LABEL(niobj
->link
.next
)) {
2870 dest
= (LABEL
*)niobj
->link
.next
; /* reuse label */
2873 dest
= NEW_LABEL(line
);
2874 ELEM_INSERT_NEXT(&niobj
->link
, &dest
->link
);
2877 INSERT_AFTER_INSN1(iobj
, &dummy_line_node
, jump
, dest
);
2879 if (!dup
) INSERT_AFTER_INSN(iobj
, &dummy_line_node
, pop
);
2883 static const struct rb_callinfo
*
2884 ci_flag_set(const rb_iseq_t
*iseq
, const struct rb_callinfo
*ci
, unsigned int add
)
2886 const struct rb_callinfo
*nci
= vm_ci_new(vm_ci_mid(ci
),
2887 vm_ci_flag(ci
) | add
,
2890 RB_OBJ_WRITTEN(iseq
, ci
, nci
);
2894 static const struct rb_callinfo
*
2895 ci_argc_set(const rb_iseq_t
*iseq
, const struct rb_callinfo
*ci
, int argc
)
2897 const struct rb_callinfo
*nci
= vm_ci_new(vm_ci_mid(ci
),
2901 RB_OBJ_WRITTEN(iseq
, ci
, nci
);
2906 iseq_peephole_optimize(rb_iseq_t
*iseq
, LINK_ELEMENT
*list
, const int do_tailcallopt
)
2908 INSN
*const iobj
= (INSN
*)list
;
2911 optimize_checktype(iseq
, iobj
);
2913 if (IS_INSN_ID(iobj
, jump
)) {
2914 INSN
*niobj
, *diobj
, *piobj
;
2915 diobj
= (INSN
*)get_destination_insn(iobj
);
2916 niobj
= (INSN
*)get_next_insn(iobj
);
2918 if (diobj
== niobj
) {
2925 unref_destination(iobj
, 0);
2926 ELEM_REMOVE(&iobj
->link
);
2929 else if (iobj
!= diobj
&& IS_INSN(&diobj
->link
) &&
2930 IS_INSN_ID(diobj
, jump
) &&
2931 OPERAND_AT(iobj
, 0) != OPERAND_AT(diobj
, 0) &&
2932 diobj
->insn_info
.events
== 0) {
2934 * useless jump elimination:
2940 * => in this case, first jump instruction should jump to
2943 replace_destination(iobj
, diobj
);
2944 remove_unreachable_chunk(iseq
, iobj
->link
.next
);
2947 else if (IS_INSN_ID(diobj
, leave
)) {
2960 unref_destination(iobj
, 0);
2961 iobj
->insn_id
= BIN(leave
);
2962 iobj
->operand_size
= 0;
2963 iobj
->insn_info
= diobj
->insn_info
;
2966 else if (IS_INSN(iobj
->link
.prev
) &&
2967 (piobj
= (INSN
*)iobj
->link
.prev
) &&
2968 (IS_INSN_ID(piobj
, branchif
) ||
2969 IS_INSN_ID(piobj
, branchunless
))) {
2970 INSN
*pdiobj
= (INSN
*)get_destination_insn(piobj
);
2971 if (niobj
== pdiobj
) {
2972 int refcnt
= IS_LABEL(piobj
->link
.next
) ?
2973 ((LABEL
*)piobj
->link
.next
)->refcnt
: 0;
2975 * useless jump elimination (if/unless destination):
2988 piobj
->insn_id
= (IS_INSN_ID(piobj
, branchif
))
2989 ? BIN(branchunless
) : BIN(branchif
);
2990 replace_destination(piobj
, iobj
);
2992 ELEM_REMOVE(&iobj
->link
);
2995 /* TODO: replace other branch destinations too */
2999 else if (diobj
== pdiobj
) {
3001 * useless jump elimination (if/unless before jump):
3013 NODE dummy_line_node
= generate_dummy_line_node(iobj
->insn_info
.line_no
, iobj
->insn_info
.node_id
);
3014 INSN
*popiobj
= new_insn_core(iseq
, &dummy_line_node
, BIN(pop
), 0, 0);
3015 ELEM_REPLACE(&piobj
->link
, &popiobj
->link
);
3018 if (remove_unreachable_chunk(iseq
, iobj
->link
.next
)) {
3030 * putobject "beg".."end"
3032 if (IS_INSN_ID(iobj
, newrange
)) {
3033 INSN
*const range
= iobj
;
3035 VALUE str_beg
, str_end
;
3037 if ((end
= (INSN
*)get_prev_insn(range
)) != 0 &&
3038 is_frozen_putstring(end
, &str_end
) &&
3039 (beg
= (INSN
*)get_prev_insn(end
)) != 0 &&
3040 is_frozen_putstring(beg
, &str_beg
)) {
3041 int excl
= FIX2INT(OPERAND_AT(range
, 0));
3042 VALUE lit_range
= rb_range_new(str_beg
, str_end
, excl
);
3044 ELEM_REMOVE(&beg
->link
);
3045 ELEM_REMOVE(&end
->link
);
3046 range
->insn_id
= BIN(putobject
);
3047 OPERAND_AT(range
, 0) = lit_range
;
3048 RB_OBJ_WRITTEN(iseq
, Qundef
, lit_range
);
3052 if (IS_INSN_ID(iobj
, leave
)) {
3053 remove_unreachable_chunk(iseq
, iobj
->link
.next
);
3065 if (IS_INSN_ID(iobj
, duparray
)) {
3066 LINK_ELEMENT
*next
= iobj
->link
.next
;
3067 if (IS_INSN(next
) && IS_INSN_ID(next
, concatarray
)) {
3068 iobj
->insn_id
= BIN(putobject
);
3072 if (IS_INSN_ID(iobj
, branchif
) ||
3073 IS_INSN_ID(iobj
, branchnil
) ||
3074 IS_INSN_ID(iobj
, branchunless
)) {
3083 INSN
*nobj
= (INSN
*)get_destination_insn(iobj
);
3085 /* This is super nasty hack!!!
3087 * This jump-jump optimization may ignore event flags of the jump
3088 * instruction being skipped. Actually, Line 2 TracePoint event
3089 * is never fired in the following code:
3091 * 1: raise if 1 == 2
3096 * This is critical for coverage measurement. [Bug #15980]
3098 * This is a stopgap measure: stop the jump-jump optimization if
3099 * coverage measurement is enabled and if the skipped instruction
3100 * has any event flag.
3102 * Note that, still, TracePoint Line event does not occur on Line 2.
3103 * This should be fixed in future.
3105 int stop_optimization
=
3106 ISEQ_COVERAGE(iseq
) && ISEQ_LINE_COVERAGE(iseq
) &&
3107 nobj
->link
.type
== ISEQ_ELEMENT_INSN
&&
3108 nobj
->insn_info
.events
;
3109 if (!stop_optimization
) {
3110 INSN
*pobj
= (INSN
*)iobj
->link
.prev
;
3113 if (!IS_INSN(&pobj
->link
))
3115 else if (IS_INSN_ID(pobj
, dup
))
3120 if (IS_INSN(&nobj
->link
) && IS_INSN_ID(nobj
, jump
)) {
3121 replace_destination(iobj
, nobj
);
3123 else if (prev_dup
&& IS_INSN_ID(nobj
, dup
) &&
3124 !!(nobj
= (INSN
*)nobj
->link
.next
) &&
3125 /* basic blocks, with no labels in the middle */
3126 nobj
->insn_id
== iobj
->insn_id
) {
3142 replace_destination(iobj
, nobj
);
3170 if (prev_dup
&& IS_INSN(pobj
->link
.prev
)) {
3171 pobj
= (INSN
*)pobj
->link
.prev
;
3173 if (IS_INSN_ID(pobj
, putobject
)) {
3174 cond
= (IS_INSN_ID(iobj
, branchif
) ?
3175 OPERAND_AT(pobj
, 0) != Qfalse
:
3176 IS_INSN_ID(iobj
, branchunless
) ?
3177 OPERAND_AT(pobj
, 0) == Qfalse
:
3180 else if (IS_INSN_ID(pobj
, putstring
) ||
3181 IS_INSN_ID(pobj
, duparray
) ||
3182 IS_INSN_ID(pobj
, newarray
)) {
3183 cond
= IS_INSN_ID(iobj
, branchif
);
3185 else if (IS_INSN_ID(pobj
, putnil
)) {
3186 cond
= !IS_INSN_ID(iobj
, branchif
);
3189 if (prev_dup
|| !IS_INSN_ID(pobj
, newarray
)) {
3190 ELEM_REMOVE(iobj
->link
.prev
);
3192 else if (!iseq_pop_newarray(iseq
, pobj
)) {
3193 NODE dummy_line_node
= generate_dummy_line_node(pobj
->insn_info
.line_no
, pobj
->insn_info
.node_id
);
3194 pobj
= new_insn_core(iseq
, &dummy_line_node
, BIN(pop
), 0, NULL
);
3195 ELEM_INSERT_PREV(&iobj
->link
, &pobj
->link
);
3199 NODE dummy_line_node
= generate_dummy_line_node(pobj
->insn_info
.line_no
, pobj
->insn_info
.node_id
);
3200 pobj
= new_insn_core(iseq
, &dummy_line_node
, BIN(putnil
), 0, NULL
);
3201 ELEM_INSERT_NEXT(&iobj
->link
, &pobj
->link
);
3203 iobj
->insn_id
= BIN(jump
);
3207 unref_destination(iobj
, 0);
3208 ELEM_REMOVE(&iobj
->link
);
3213 nobj
= (INSN
*)get_destination_insn(nobj
);
3218 if (IS_INSN_ID(iobj
, pop
)) {
3220 * putself / putnil / putobject obj / putstring "..."
3225 LINK_ELEMENT
*prev
= iobj
->link
.prev
;
3226 if (IS_INSN(prev
)) {
3227 enum ruby_vminsn_type previ
= ((INSN
*)prev
)->insn_id
;
3228 if (previ
== BIN(putobject
) || previ
== BIN(putnil
) ||
3229 previ
== BIN(putself
) || previ
== BIN(putstring
) ||
3230 previ
== BIN(dup
) ||
3231 previ
== BIN(getlocal
) ||
3232 previ
== BIN(getblockparam
) ||
3233 previ
== BIN(getblockparamproxy
) ||
3234 /* getinstancevariable may issue a warning */
3235 previ
== BIN(duparray
)) {
3236 /* just push operand or static value and pop soon, no
3239 ELEM_REMOVE(&iobj
->link
);
3241 else if (previ
== BIN(newarray
) && iseq_pop_newarray(iseq
, (INSN
*)prev
)) {
3242 ELEM_REMOVE(&iobj
->link
);
3244 else if (previ
== BIN(concatarray
)) {
3245 INSN
*piobj
= (INSN
*)prev
;
3246 NODE dummy_line_node
= generate_dummy_line_node(piobj
->insn_info
.line_no
, piobj
->insn_info
.node_id
);
3247 INSERT_BEFORE_INSN1(piobj
, &dummy_line_node
, splatarray
, Qfalse
);
3248 INSN_OF(piobj
) = BIN(pop
);
3250 else if (previ
== BIN(concatstrings
)) {
3251 if (OPERAND_AT(prev
, 0) == INT2FIX(1)) {
3255 ELEM_REMOVE(&iobj
->link
);
3256 INSN_OF(prev
) = BIN(adjuststack
);
3262 if (IS_INSN_ID(iobj
, newarray
) ||
3263 IS_INSN_ID(iobj
, duparray
) ||
3264 IS_INSN_ID(iobj
, expandarray
) ||
3265 IS_INSN_ID(iobj
, concatarray
) ||
3266 IS_INSN_ID(iobj
, splatarray
) ||
3273 * newarray always puts an array
3275 LINK_ELEMENT
*next
= iobj
->link
.next
;
3276 if (IS_INSN(next
) && IS_INSN_ID(next
, splatarray
)) {
3277 /* remove splatarray following always-array insn */
3282 if (IS_INSN_ID(iobj
, anytostring
)) {
3283 LINK_ELEMENT
*next
= iobj
->link
.next
;
3290 if (IS_INSN(next
) && IS_INSN_ID(next
, concatstrings
) &&
3291 OPERAND_AT(next
, 0) == INT2FIX(1)) {
3296 if (IS_INSN_ID(iobj
, putstring
) ||
3297 (IS_INSN_ID(iobj
, putobject
) && RB_TYPE_P(OPERAND_AT(iobj
, 0), T_STRING
))) {
3304 if (IS_NEXT_INSN_ID(&iobj
->link
, concatstrings
) &&
3305 RSTRING_LEN(OPERAND_AT(iobj
, 0)) == 0) {
3306 INSN
*next
= (INSN
*)iobj
->link
.next
;
3307 if ((OPERAND_AT(next
, 0) = FIXNUM_INC(OPERAND_AT(next
, 0), -1)) == INT2FIX(1)) {
3308 ELEM_REMOVE(&next
->link
);
3310 ELEM_REMOVE(&iobj
->link
);
3314 if (IS_INSN_ID(iobj
, concatstrings
)) {
3319 * concatstrings N+M-1
3321 LINK_ELEMENT
*next
= iobj
->link
.next
;
3323 if (IS_INSN(next
) && IS_INSN_ID(next
, jump
))
3324 next
= get_destination_insn(jump
= (INSN
*)next
);
3325 if (IS_INSN(next
) && IS_INSN_ID(next
, concatstrings
)) {
3326 int n
= FIX2INT(OPERAND_AT(iobj
, 0)) + FIX2INT(OPERAND_AT(next
, 0)) - 1;
3327 OPERAND_AT(iobj
, 0) = INT2FIX(n
);
3329 LABEL
*label
= ((LABEL
*)OPERAND_AT(jump
, 0));
3330 if (!--label
->refcnt
) {
3331 ELEM_REMOVE(&label
->link
);
3334 label
= NEW_LABEL(0);
3335 OPERAND_AT(jump
, 0) = (VALUE
)label
;
3338 ELEM_INSERT_NEXT(next
, &label
->link
);
3339 CHECK(iseq_peephole_optimize(iseq
, get_next_insn(jump
), do_tailcallopt
));
3347 if (do_tailcallopt
&&
3348 (IS_INSN_ID(iobj
, send
) ||
3349 IS_INSN_ID(iobj
, opt_aref_with
) ||
3350 IS_INSN_ID(iobj
, opt_aset_with
) ||
3351 IS_INSN_ID(iobj
, invokesuper
))) {
3356 * send ..., ... | VM_CALL_TAILCALL, ...
3357 * leave # unreachable
3360 if (iobj
->link
.next
) {
3361 LINK_ELEMENT
*next
= iobj
->link
.next
;
3363 if (!IS_INSN(next
)) {
3367 switch (INSN_OF(next
)) {
3376 next
= get_destination_insn((INSN
*)next
);
3389 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(piobj
, 0);
3390 if (IS_INSN_ID(piobj
, send
) ||
3391 IS_INSN_ID(piobj
, invokesuper
)) {
3392 if (OPERAND_AT(piobj
, 1) == 0) { /* no blockiseq */
3393 ci
= ci_flag_set(iseq
, ci
, VM_CALL_TAILCALL
);
3394 OPERAND_AT(piobj
, 0) = (VALUE
)ci
;
3395 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
3399 ci
= ci_flag_set(iseq
, ci
, VM_CALL_TAILCALL
);
3400 OPERAND_AT(piobj
, 0) = (VALUE
)ci
;
3401 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
3406 if (IS_INSN_ID(iobj
, dup
)) {
3407 if (IS_NEXT_INSN_ID(&iobj
->link
, setlocal
)) {
3408 LINK_ELEMENT
*set1
= iobj
->link
.next
, *set2
= NULL
;
3409 if (IS_NEXT_INSN_ID(set1
, setlocal
)) {
3411 if (OPERAND_AT(set1
, 0) == OPERAND_AT(set2
, 0) &&
3412 OPERAND_AT(set1
, 1) == OPERAND_AT(set2
, 1)) {
3414 ELEM_REMOVE(&iobj
->link
);
3417 else if (IS_NEXT_INSN_ID(set1
, dup
) &&
3418 IS_NEXT_INSN_ID(set1
->next
, setlocal
)) {
3419 set2
= set1
->next
->next
;
3420 if (OPERAND_AT(set1
, 0) == OPERAND_AT(set2
, 0) &&
3421 OPERAND_AT(set1
, 1) == OPERAND_AT(set2
, 1)) {
3422 ELEM_REMOVE(set1
->next
);
3429 if (IS_INSN_ID(iobj
, getlocal
)) {
3430 LINK_ELEMENT
*niobj
= &iobj
->link
;
3431 if (IS_NEXT_INSN_ID(niobj
, dup
)) {
3432 niobj
= niobj
->next
;
3434 if (IS_NEXT_INSN_ID(niobj
, setlocal
)) {
3435 LINK_ELEMENT
*set1
= niobj
->next
;
3436 if (OPERAND_AT(iobj
, 0) == OPERAND_AT(set1
, 0) &&
3437 OPERAND_AT(iobj
, 1) == OPERAND_AT(set1
, 1)) {
3444 if (IS_INSN_ID(iobj
, opt_invokebuiltin_delegate
)) {
3445 if (IS_TRACE(iobj
->link
.next
)) {
3446 if (IS_NEXT_INSN_ID(iobj
->link
.next
, leave
)) {
3447 iobj
->insn_id
= BIN(opt_invokebuiltin_delegate_leave
);
3456 insn_set_specialized_instruction(rb_iseq_t
*iseq
, INSN
*iobj
, int insn_id
)
3458 iobj
->insn_id
= insn_id
;
3459 iobj
->operand_size
= insn_len(insn_id
) - 1;
3460 iobj
->insn_info
.events
|= RUBY_EVENT_C_CALL
| RUBY_EVENT_C_RETURN
;
3462 if (insn_id
== BIN(opt_neq
)) {
3463 VALUE original_ci
= iobj
->operands
[0];
3464 iobj
->operand_size
= 2;
3465 iobj
->operands
= compile_data_calloc2(iseq
, iobj
->operand_size
, sizeof(VALUE
));
3466 iobj
->operands
[0] = (VALUE
)new_callinfo(iseq
, idEq
, 1, 0, NULL
, FALSE
);
3467 iobj
->operands
[1] = original_ci
;
3474 iseq_specialized_instruction(rb_iseq_t
*iseq
, INSN
*iobj
)
3476 if (IS_INSN_ID(iobj
, newarray
) && iobj
->link
.next
&&
3477 IS_INSN(iobj
->link
.next
)) {
3479 * [a, b, ...].max/min -> a, b, c, opt_newarray_max/min
3481 INSN
*niobj
= (INSN
*)iobj
->link
.next
;
3482 if (IS_INSN_ID(niobj
, send
)) {
3483 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(niobj
, 0);
3484 if ((vm_ci_flag(ci
) & VM_CALL_ARGS_SIMPLE
) && vm_ci_argc(ci
) == 0) {
3485 switch (vm_ci_mid(ci
)) {
3487 iobj
->insn_id
= BIN(opt_newarray_max
);
3488 ELEM_REMOVE(&niobj
->link
);
3491 iobj
->insn_id
= BIN(opt_newarray_min
);
3492 ELEM_REMOVE(&niobj
->link
);
3499 if (IS_INSN_ID(iobj
, send
)) {
3500 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(iobj
, 0);
3501 const rb_iseq_t
*blockiseq
= (rb_iseq_t
*)OPERAND_AT(iobj
, 1);
3503 #define SP_INSN(opt) insn_set_specialized_instruction(iseq, iobj, BIN(opt_##opt))
3504 if (vm_ci_flag(ci
) & VM_CALL_ARGS_SIMPLE
) {
3505 switch (vm_ci_argc(ci
)) {
3507 switch (vm_ci_mid(ci
)) {
3508 case idLength
: SP_INSN(length
); return COMPILE_OK
;
3509 case idSize
: SP_INSN(size
); return COMPILE_OK
;
3510 case idEmptyP
: SP_INSN(empty_p
);return COMPILE_OK
;
3511 case idNilP
: SP_INSN(nil_p
); return COMPILE_OK
;
3512 case idSucc
: SP_INSN(succ
); return COMPILE_OK
;
3513 case idNot
: SP_INSN(not); return COMPILE_OK
;
3517 switch (vm_ci_mid(ci
)) {
3518 case idPLUS
: SP_INSN(plus
); return COMPILE_OK
;
3519 case idMINUS
: SP_INSN(minus
); return COMPILE_OK
;
3520 case idMULT
: SP_INSN(mult
); return COMPILE_OK
;
3521 case idDIV
: SP_INSN(div
); return COMPILE_OK
;
3522 case idMOD
: SP_INSN(mod
); return COMPILE_OK
;
3523 case idEq
: SP_INSN(eq
); return COMPILE_OK
;
3524 case idNeq
: SP_INSN(neq
); return COMPILE_OK
;
3525 case idEqTilde
:SP_INSN(regexpmatch2
);return COMPILE_OK
;
3526 case idLT
: SP_INSN(lt
); return COMPILE_OK
;
3527 case idLE
: SP_INSN(le
); return COMPILE_OK
;
3528 case idGT
: SP_INSN(gt
); return COMPILE_OK
;
3529 case idGE
: SP_INSN(ge
); return COMPILE_OK
;
3530 case idLTLT
: SP_INSN(ltlt
); return COMPILE_OK
;
3531 case idAREF
: SP_INSN(aref
); return COMPILE_OK
;
3532 case idAnd
: SP_INSN(and); return COMPILE_OK
;
3533 case idOr
: SP_INSN(or); return COMPILE_OK
;
3537 switch (vm_ci_mid(ci
)) {
3538 case idASET
: SP_INSN(aset
); return COMPILE_OK
;
3544 if ((vm_ci_flag(ci
) & VM_CALL_ARGS_BLOCKARG
) == 0 && blockiseq
== NULL
) {
3545 iobj
->insn_id
= BIN(opt_send_without_block
);
3546 iobj
->operand_size
= insn_len(iobj
->insn_id
) - 1;
3555 tailcallable_p(rb_iseq_t
*iseq
)
3557 switch (iseq
->body
->type
) {
3559 case ISEQ_TYPE_EVAL
:
3560 case ISEQ_TYPE_MAIN
:
3561 /* not tail callable because cfp will be over popped */
3562 case ISEQ_TYPE_RESCUE
:
3563 case ISEQ_TYPE_ENSURE
:
3564 /* rescue block can't tail call because of errinfo */
3572 iseq_optimize(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
3575 const int do_peepholeopt
= ISEQ_COMPILE_DATA(iseq
)->option
->peephole_optimization
;
3576 const int do_tailcallopt
= tailcallable_p(iseq
) &&
3577 ISEQ_COMPILE_DATA(iseq
)->option
->tailcall_optimization
;
3578 const int do_si
= ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
;
3579 const int do_ou
= ISEQ_COMPILE_DATA(iseq
)->option
->operands_unification
;
3580 int rescue_level
= 0;
3581 int tailcallopt
= do_tailcallopt
;
3583 list
= FIRST_ELEMENT(anchor
);
3585 int do_block_optimization
= 0;
3587 if (iseq
->body
->type
== ISEQ_TYPE_BLOCK
&& !iseq
->body
->catch_except_p
) {
3588 do_block_optimization
= 1;
3592 if (IS_INSN(list
)) {
3593 if (do_peepholeopt
) {
3594 iseq_peephole_optimize(iseq
, list
, tailcallopt
);
3597 iseq_specialized_instruction(iseq
, (INSN
*)list
);
3600 insn_operands_unification((INSN
*)list
);
3603 if (do_block_optimization
) {
3604 INSN
* item
= (INSN
*)list
;
3605 if (IS_INSN_ID(item
, jump
)) {
3606 do_block_optimization
= 0;
3610 if (IS_LABEL(list
)) {
3611 switch (((LABEL
*)list
)->rescued
) {
3612 case LABEL_RESCUE_BEG
:
3614 tailcallopt
= FALSE
;
3616 case LABEL_RESCUE_END
:
3617 if (!--rescue_level
) tailcallopt
= do_tailcallopt
;
3624 if (do_block_optimization
) {
3625 LINK_ELEMENT
* le
= FIRST_ELEMENT(anchor
)->next
;
3626 if (IS_INSN(le
) && IS_INSN_ID((INSN
*)le
, nop
)) {
3633 #if OPT_INSTRUCTIONS_UNIFICATION
3635 new_unified_insn(rb_iseq_t
*iseq
,
3636 int insn_id
, int size
, LINK_ELEMENT
*seq_list
)
3639 LINK_ELEMENT
*list
= seq_list
;
3641 VALUE
*operands
= 0, *ptr
= 0;
3645 for (i
= 0; i
< size
; i
++) {
3646 iobj
= (INSN
*)list
;
3647 argc
+= iobj
->operand_size
;
3652 ptr
= operands
= compile_data_alloc2(iseq
, sizeof(VALUE
), argc
);
3657 for (i
= 0; i
< size
; i
++) {
3658 iobj
= (INSN
*)list
;
3659 MEMCPY(ptr
, iobj
->operands
, VALUE
, iobj
->operand_size
);
3660 ptr
+= iobj
->operand_size
;
3664 NODE dummy_line_node
= generate_dummy_line_node(iobj
->insn_info
.line_no
, iobj
->insn_info
.node_id
);
3665 return new_insn_core(iseq
, &dummy_line_node
, insn_id
, argc
, operands
);
3670 * This scheme can get more performance if do this optimize with
3671 * label address resolving.
3672 * It's future work (if compile time was bottle neck).
3675 iseq_insns_unification(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
3677 #if OPT_INSTRUCTIONS_UNIFICATION
3683 list
= FIRST_ELEMENT(anchor
);
3685 if (IS_INSN(list
)) {
3686 iobj
= (INSN
*)list
;
3688 if (unified_insns_data
[id
] != 0) {
3689 const int *const *entry
= unified_insns_data
[id
];
3690 for (j
= 1; j
< (intptr_t)entry
[0]; j
++) {
3691 const int *unified
= entry
[j
];
3692 LINK_ELEMENT
*li
= list
->next
;
3693 for (k
= 2; k
< unified
[1]; k
++) {
3695 ((INSN
*)li
)->insn_id
!= unified
[k
]) {
3702 new_unified_insn(iseq
, unified
[0], unified
[1] - 1,
3705 /* insert to list */
3706 niobj
->link
.prev
= (LINK_ELEMENT
*)iobj
->link
.prev
;
3707 niobj
->link
.next
= li
;
3709 li
->prev
= (LINK_ELEMENT
*)niobj
;
3712 list
->prev
->next
= (LINK_ELEMENT
*)niobj
;
3713 list
= (LINK_ELEMENT
*)niobj
;
3725 #if OPT_STACK_CACHING
3727 #define SC_INSN(insn, stat) sc_insn_info[(insn)][(stat)]
3728 #define SC_NEXT(insn) sc_insn_next[(insn)]
3730 #include "opt_sc.inc"
3733 insn_set_sc_state(rb_iseq_t
*iseq
, const LINK_ELEMENT
*anchor
, INSN
*iobj
, int state
)
3738 insn_id
= iobj
->insn_id
;
3739 iobj
->insn_id
= SC_INSN(insn_id
, state
);
3740 nstate
= SC_NEXT(iobj
->insn_id
);
3742 if (insn_id
== BIN(jump
) ||
3743 insn_id
== BIN(branchif
) || insn_id
== BIN(branchunless
)) {
3744 LABEL
*lobj
= (LABEL
*)OPERAND_AT(iobj
, 0);
3746 if (lobj
->sc_state
!= 0) {
3747 if (lobj
->sc_state
!= nstate
) {
3748 BADINSN_DUMP(anchor
, iobj
, lobj
);
3749 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
3750 "insn_set_sc_state error: %d at "LABEL_FORMAT
3752 lobj
->sc_state
, lobj
->label_no
, nstate
);
3757 lobj
->sc_state
= nstate
;
3759 if (insn_id
== BIN(jump
)) {
3763 else if (insn_id
== BIN(leave
)) {
3771 label_set_sc_state(LABEL
*lobj
, int state
)
3773 if (lobj
->sc_state
!= 0) {
3774 if (lobj
->sc_state
!= state
) {
3775 state
= lobj
->sc_state
;
3779 lobj
->sc_state
= state
;
3789 iseq_set_sequence_stackcaching(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
)
3791 #if OPT_STACK_CACHING
3797 list
= FIRST_ELEMENT(anchor
);
3798 /* dump_disasm_list(list); */
3800 /* for each list element */
3803 switch (list
->type
) {
3804 case ISEQ_ELEMENT_INSN
:
3806 INSN
*iobj
= (INSN
*)list
;
3807 insn_id
= iobj
->insn_id
;
3809 /* dump_disasm_list(list); */
3814 /* exception merge point */
3815 if (state
!= SCS_AX
) {
3816 NODE dummy_line_node
= generate_dummy_line_node(0, -1);
3818 new_insn_body(iseq
, &dummy_line_node
, BIN(reput
), 0);
3820 /* replace this insn */
3821 ELEM_REPLACE(list
, (LINK_ELEMENT
*)rpobj
);
3822 list
= (LINK_ELEMENT
*)rpobj
;
3829 if (state
== SCS_AB
|| state
== SCS_BA
) {
3830 state
= (state
== SCS_AB
? SCS_BA
: SCS_AB
);
3854 COMPILE_ERROR(iseq
, iobj
->insn_info
.line_no
,
3858 /* remove useless pop */
3865 } /* end of switch */
3867 state
= insn_set_sc_state(iseq
, anchor
, iobj
, state
);
3870 case ISEQ_ELEMENT_LABEL
:
3873 lobj
= (LABEL
*)list
;
3875 state
= label_set_sc_state(lobj
, state
);
3887 all_string_result_p(const NODE
*node
)
3889 if (!node
) return FALSE
;
3890 switch (nd_type(node
)) {
3891 case NODE_STR
: case NODE_DSTR
:
3893 case NODE_IF
: case NODE_UNLESS
:
3894 if (!node
->nd_body
|| !node
->nd_else
) return FALSE
;
3895 if (all_string_result_p(node
->nd_body
))
3896 return all_string_result_p(node
->nd_else
);
3898 case NODE_AND
: case NODE_OR
:
3900 return all_string_result_p(node
->nd_1st
);
3901 if (!all_string_result_p(node
->nd_1st
))
3903 return all_string_result_p(node
->nd_2nd
);
3910 compile_dstr_fragments(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int *cntp
)
3912 const NODE
*list
= node
->nd_next
;
3913 VALUE lit
= node
->nd_lit
;
3914 LINK_ELEMENT
*first_lit
= 0;
3917 debugp_param("nd_lit", lit
);
3920 if (!RB_TYPE_P(lit
, T_STRING
)) {
3921 COMPILE_ERROR(ERROR_ARGS
"dstr: must be string: %s",
3922 rb_builtin_type_name(TYPE(lit
)));
3925 lit
= rb_fstring(lit
);
3926 ADD_INSN1(ret
, node
, putobject
, lit
);
3927 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
3928 if (RSTRING_LEN(lit
) == 0) first_lit
= LAST_ELEMENT(ret
);
3932 const NODE
*const head
= list
->nd_head
;
3933 if (nd_type_p(head
, NODE_STR
)) {
3934 lit
= rb_fstring(head
->nd_lit
);
3935 ADD_INSN1(ret
, head
, putobject
, lit
);
3936 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
3940 CHECK(COMPILE(ret
, "each string", head
));
3943 list
= list
->nd_next
;
3945 if (NIL_P(lit
) && first_lit
) {
3946 ELEM_REMOVE(first_lit
);
3955 compile_block(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, int popped
)
3957 while (node
&& nd_type_p(node
, NODE_BLOCK
)) {
3958 CHECK(COMPILE_(ret
, "BLOCK body", node
->nd_head
,
3959 (node
->nd_next
? 1 : popped
)));
3960 node
= node
->nd_next
;
3963 CHECK(COMPILE_(ret
, "BLOCK next", node
->nd_next
, popped
));
3969 compile_dstr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
)
3972 if (!node
->nd_next
) {
3973 VALUE lit
= rb_fstring(node
->nd_lit
);
3974 ADD_INSN1(ret
, node
, putstring
, lit
);
3975 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
3978 CHECK(compile_dstr_fragments(iseq
, ret
, node
, &cnt
));
3979 ADD_INSN1(ret
, node
, concatstrings
, INT2FIX(cnt
));
3985 compile_dregx(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
)
3988 CHECK(compile_dstr_fragments(iseq
, ret
, node
, &cnt
));
3989 ADD_INSN2(ret
, node
, toregexp
, INT2FIX(node
->nd_cflag
), INT2FIX(cnt
));
3994 compile_flip_flop(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int again
,
3995 LABEL
*then_label
, LABEL
*else_label
)
3997 const int line
= nd_line(node
);
3998 LABEL
*lend
= NEW_LABEL(line
);
3999 rb_num_t cnt
= ISEQ_FLIP_CNT_INCREMENT(iseq
->body
->local_iseq
)
4000 + VM_SVAR_FLIPFLOP_START
;
4001 VALUE key
= INT2FIX(cnt
);
4003 ADD_INSN2(ret
, node
, getspecial
, key
, INT2FIX(0));
4004 ADD_INSNL(ret
, node
, branchif
, lend
);
4007 CHECK(COMPILE(ret
, "flip2 beg", node
->nd_beg
));
4008 ADD_INSNL(ret
, node
, branchunless
, else_label
);
4009 ADD_INSN1(ret
, node
, putobject
, Qtrue
);
4010 ADD_INSN1(ret
, node
, setspecial
, key
);
4012 ADD_INSNL(ret
, node
, jump
, then_label
);
4016 ADD_LABEL(ret
, lend
);
4017 CHECK(COMPILE(ret
, "flip2 end", node
->nd_end
));
4018 ADD_INSNL(ret
, node
, branchunless
, then_label
);
4019 ADD_INSN1(ret
, node
, putobject
, Qfalse
);
4020 ADD_INSN1(ret
, node
, setspecial
, key
);
4021 ADD_INSNL(ret
, node
, jump
, then_label
);
4027 compile_branch_condition(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*cond
,
4028 LABEL
*then_label
, LABEL
*else_label
)
4031 switch (nd_type(cond
)) {
4034 LABEL
*label
= NEW_LABEL(nd_line(cond
));
4035 CHECK(compile_branch_condition(iseq
, ret
, cond
->nd_1st
, label
,
4037 if (!label
->refcnt
) {
4038 ADD_INSN(ret
, cond
, putnil
);
4041 ADD_LABEL(ret
, label
);
4042 cond
= cond
->nd_2nd
;
4047 LABEL
*label
= NEW_LABEL(nd_line(cond
));
4048 CHECK(compile_branch_condition(iseq
, ret
, cond
->nd_1st
, then_label
,
4050 if (!label
->refcnt
) {
4051 ADD_INSN(ret
, cond
, putnil
);
4054 ADD_LABEL(ret
, label
);
4055 cond
= cond
->nd_2nd
;
4058 case NODE_LIT
: /* NODE_LIT is always true */
4063 /* printf("useless condition eliminate (%s)\n", ruby_node_name(nd_type(cond))); */
4064 ADD_INSNL(ret
, cond
, jump
, then_label
);
4068 /* printf("useless condition eliminate (%s)\n", ruby_node_name(nd_type(cond))); */
4069 ADD_INSNL(ret
, cond
, jump
, else_label
);
4075 CHECK(COMPILE_POPPED(ret
, "branch condition", cond
));
4076 ADD_INSNL(ret
, cond
, jump
, then_label
);
4079 CHECK(compile_flip_flop(iseq
, ret
, cond
, TRUE
, then_label
, else_label
));
4082 CHECK(compile_flip_flop(iseq
, ret
, cond
, FALSE
, then_label
, else_label
));
4085 CHECK(compile_defined_expr(iseq
, ret
, cond
, Qfalse
));
4088 CHECK(COMPILE(ret
, "branch condition", cond
));
4092 ADD_INSNL(ret
, cond
, branchunless
, else_label
);
4093 ADD_INSNL(ret
, cond
, jump
, then_label
);
4097 #define HASH_BRACE 1
4100 keyword_node_p(const NODE
*const node
)
4102 return nd_type_p(node
, NODE_HASH
) && (node
->nd_brace
& HASH_BRACE
) != HASH_BRACE
;
4106 compile_keyword_arg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
4107 const NODE
*const root_node
,
4108 struct rb_callinfo_kwarg
**const kw_arg_ptr
,
4111 if (kw_arg_ptr
== NULL
) return FALSE
;
4113 if (root_node
->nd_head
&& nd_type_p(root_node
->nd_head
, NODE_LIST
)) {
4114 const NODE
*node
= root_node
->nd_head
;
4118 const NODE
*key_node
= node
->nd_head
;
4121 assert(nd_type_p(node
, NODE_LIST
));
4122 if (key_node
&& nd_type_p(key_node
, NODE_LIT
) && SYMBOL_P(key_node
->nd_lit
)) {
4123 /* can be keywords */
4127 *flag
|= VM_CALL_KW_SPLAT
;
4128 if (seen_nodes
> 1 || node
->nd_next
->nd_next
) {
4129 /* A new hash will be created for the keyword arguments
4130 * in this case, so mark the method as passing mutable
4133 *flag
|= VM_CALL_KW_SPLAT_MUT
;
4138 node
= node
->nd_next
; /* skip value node */
4139 node
= node
->nd_next
;
4142 /* may be keywords */
4143 node
= root_node
->nd_head
;
4145 int len
= (int)node
->nd_alen
/ 2;
4146 struct rb_callinfo_kwarg
*kw_arg
=
4147 rb_xmalloc_mul_add(len
, sizeof(VALUE
), sizeof(struct rb_callinfo_kwarg
));
4148 VALUE
*keywords
= kw_arg
->keywords
;
4150 kw_arg
->keyword_len
= len
;
4152 *kw_arg_ptr
= kw_arg
;
4154 for (i
=0; node
!= NULL
; i
++, node
= node
->nd_next
->nd_next
) {
4155 const NODE
*key_node
= node
->nd_head
;
4156 const NODE
*val_node
= node
->nd_next
->nd_head
;
4157 keywords
[i
] = key_node
->nd_lit
;
4158 NO_CHECK(COMPILE(ret
, "keyword values", val_node
));
4168 compile_args(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
,
4169 struct rb_callinfo_kwarg
**keywords_ptr
, unsigned int *flag
)
4173 for (; node
; len
++, node
= node
->nd_next
) {
4175 EXPECT_NODE("compile_args", node
, NODE_LIST
, -1);
4178 if (node
->nd_next
== NULL
&& keyword_node_p(node
->nd_head
)) { /* last node */
4179 if (compile_keyword_arg(iseq
, ret
, node
->nd_head
, keywords_ptr
, flag
)) {
4183 compile_hash(iseq
, ret
, node
->nd_head
, TRUE
, FALSE
);
4187 NO_CHECK(COMPILE_(ret
, "array element", node
->nd_head
, FALSE
));
4195 static_literal_node_p(const NODE
*node
, const rb_iseq_t
*iseq
)
4197 switch (nd_type(node
)) {
4204 return ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
;
4211 static_literal_value(const NODE
*node
, rb_iseq_t
*iseq
)
4213 switch (nd_type(node
)) {
4221 if (ISEQ_COMPILE_DATA(iseq
)->option
->debug_frozen_string_literal
|| RTEST(ruby_debug
)) {
4223 VALUE debug_info
= rb_ary_new_from_args(2, rb_iseq_path(iseq
), INT2FIX((int)nd_line(node
)));
4224 lit
= rb_str_dup(node
->nd_lit
);
4225 rb_ivar_set(lit
, id_debug_created_info
, rb_obj_freeze(debug_info
));
4226 return rb_str_freeze(lit
);
4229 return rb_fstring(node
->nd_lit
);
4232 return node
->nd_lit
;
4237 compile_array(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, int popped
)
4239 const NODE
*line_node
= node
;
4241 if (nd_type_p(node
, NODE_ZLIST
)) {
4243 ADD_INSN1(ret
, line_node
, newarray
, INT2FIX(0));
4248 EXPECT_NODE("compile_array", node
, NODE_LIST
, -1);
4251 for (; node
; node
= node
->nd_next
) {
4252 NO_CHECK(COMPILE_(ret
, "array element", node
->nd_head
, popped
));
4257 /* Compilation of an array literal.
4258 * The following code is essentially the same as:
4260 * for (int count = 0; node; count++; node->nd_next) {
4261 * compile(node->nd_head);
4263 * ADD_INSN(newarray, count);
4265 * However, there are three points.
4267 * - The code above causes stack overflow for a big string literal.
4268 * The following limits the stack length up to max_stack_len.
4270 * [x1,x2,...,x10000] =>
4271 * push x1 ; push x2 ; ...; push x256; newarray 256;
4272 * push x257; push x258; ...; push x512; newarray 256; concatarray;
4273 * push x513; push x514; ...; push x768; newarray 256; concatarray;
4276 * - Long subarray can be optimized by pre-allocating a hidden array.
4278 * [1,2,3,...,100] =>
4279 * duparray [1,2,3,...,100]
4281 * [x, 1,2,3,...,100, z] =>
4282 * push x; newarray 1;
4283 * putobject [1,2,3,...,100] (<- hidden array); concatarray;
4284 * push z; newarray 1; concatarray
4286 * - If the last element is a keyword, newarraykwsplat should be emitted
4287 * to check and remove empty keyword arguments hash from array.
4288 * (Note: a keyword is NODE_HASH which is not static_literal_node_p.)
4291 * putobject 1; putobject 2; putobject 3; push kw; newarraykwsplat
4294 const int max_stack_len
= 0x100;
4295 const int min_tmp_ary_len
= 0x40;
4297 int first_chunk
= 1;
4299 /* Convert pushed elements to an array, and concatarray if needed */
4300 #define FLUSH_CHUNK(newarrayinsn) \
4302 ADD_INSN1(ret, line_node, newarrayinsn, INT2FIX(stack_len)); \
4303 if (!first_chunk) ADD_INSN(ret, line_node, concatarray); \
4304 first_chunk = stack_len = 0; \
4310 /* pre-allocation check (this branch can be omittable) */
4311 if (static_literal_node_p(node
->nd_head
, iseq
)) {
4312 /* count the elements that are optimizable */
4313 const NODE
*node_tmp
= node
->nd_next
;
4314 for (; node_tmp
&& static_literal_node_p(node_tmp
->nd_head
, iseq
); node_tmp
= node_tmp
->nd_next
)
4317 if ((first_chunk
&& stack_len
== 0 && !node_tmp
) || count
>= min_tmp_ary_len
) {
4318 /* The literal contains only optimizable elements, or the subarray is long enough */
4319 VALUE ary
= rb_ary_tmp_new(count
);
4321 /* Create a hidden array */
4322 for (; count
; count
--, node
= node
->nd_next
)
4323 rb_ary_push(ary
, static_literal_value(node
->nd_head
, iseq
));
4326 /* Emit optimized code */
4327 FLUSH_CHUNK(newarray
);
4329 ADD_INSN1(ret
, line_node
, duparray
, ary
);
4333 ADD_INSN1(ret
, line_node
, putobject
, ary
);
4334 ADD_INSN(ret
, line_node
, concatarray
);
4336 RB_OBJ_WRITTEN(iseq
, Qundef
, ary
);
4340 /* Base case: Compile "count" elements */
4341 for (; count
; count
--, node
= node
->nd_next
) {
4343 EXPECT_NODE("compile_array", node
, NODE_LIST
, -1);
4346 NO_CHECK(COMPILE_(ret
, "array element", node
->nd_head
, 0));
4349 if (!node
->nd_next
&& keyword_node_p(node
->nd_head
)) {
4350 /* Reached the end, and the last element is a keyword */
4351 FLUSH_CHUNK(newarraykwsplat
);
4355 /* If there are many pushed elements, flush them to avoid stack overflow */
4356 if (stack_len
>= max_stack_len
) FLUSH_CHUNK(newarray
);
4360 FLUSH_CHUNK(newarray
);
4365 /* Compile an array containing the single element represented by node */
4367 compile_array_1(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
)
4369 if (static_literal_node_p(node
, iseq
)) {
4370 VALUE ary
= rb_ary_tmp_new(1);
4371 rb_ary_push(ary
, static_literal_value(node
, iseq
));
4374 ADD_INSN1(ret
, node
, duparray
, ary
);
4377 CHECK(COMPILE_(ret
, "array element", node
, FALSE
));
4378 ADD_INSN1(ret
, node
, newarray
, INT2FIX(1));
4385 static_literal_node_pair_p(const NODE
*node
, const rb_iseq_t
*iseq
)
4387 return node
->nd_head
&& static_literal_node_p(node
->nd_head
, iseq
) && static_literal_node_p(node
->nd_next
->nd_head
, iseq
);
4391 compile_hash(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, int method_call_keywords
, int popped
)
4393 const NODE
*line_node
= node
;
4395 node
= node
->nd_head
;
4397 if (!node
|| nd_type_p(node
, NODE_ZLIST
)) {
4399 ADD_INSN1(ret
, line_node
, newhash
, INT2FIX(0));
4404 EXPECT_NODE("compile_hash", node
, NODE_LIST
, -1);
4407 for (; node
; node
= node
->nd_next
) {
4408 NO_CHECK(COMPILE_(ret
, "hash element", node
->nd_head
, popped
));
4413 /* Compilation of a hash literal (or keyword arguments).
4414 * This is very similar to compile_array, but there are some differences:
4416 * - It contains key-value pairs. So we need to take every two elements.
4417 * We can assume that the length is always even.
4419 * - Merging is done by a method call (id_core_hash_merge_ptr).
4420 * Sometimes we need to insert the receiver, so "anchor" is needed.
4421 * In addition, a method call is much slower than concatarray.
4422 * So it pays only when the subsequence is really long.
4423 * (min_tmp_hash_len must be much larger than min_tmp_ary_len.)
4425 * - We need to handle keyword splat: **kw.
4426 * For **kw, the key part (node->nd_head) is NULL, and the value part
4427 * (node->nd_next->nd_head) is "kw".
4428 * The code is a bit difficult to avoid hash allocation for **{}.
4431 const int max_stack_len
= 0x100;
4432 const int min_tmp_hash_len
= 0x800;
4434 int first_chunk
= 1;
4435 DECL_ANCHOR(anchor
);
4436 INIT_ANCHOR(anchor
);
4438 /* Convert pushed elements to a hash, and merge if needed */
4439 #define FLUSH_CHUNK() \
4441 if (first_chunk) { \
4442 APPEND_LIST(ret, anchor); \
4443 ADD_INSN1(ret, line_node, newhash, INT2FIX(stack_len)); \
4446 ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
4447 ADD_INSN(ret, line_node, swap); \
4448 APPEND_LIST(ret, anchor); \
4449 ADD_SEND(ret, line_node, id_core_hash_merge_ptr, INT2FIX(stack_len + 1)); \
4451 INIT_ANCHOR(anchor); \
4452 first_chunk = stack_len = 0; \
4458 /* pre-allocation check (this branch can be omittable) */
4459 if (static_literal_node_pair_p(node
, iseq
)) {
4460 /* count the elements that are optimizable */
4461 const NODE
*node_tmp
= node
->nd_next
->nd_next
;
4462 for (; node_tmp
&& static_literal_node_pair_p(node_tmp
, iseq
); node_tmp
= node_tmp
->nd_next
->nd_next
)
4465 if ((first_chunk
&& stack_len
== 0 && !node_tmp
) || count
>= min_tmp_hash_len
) {
4466 /* The literal contains only optimizable elements, or the subsequence is long enough */
4467 VALUE ary
= rb_ary_tmp_new(count
);
4469 /* Create a hidden hash */
4470 for (; count
; count
--, node
= node
->nd_next
->nd_next
) {
4472 elem
[0] = static_literal_value(node
->nd_head
, iseq
);
4473 elem
[1] = static_literal_value(node
->nd_next
->nd_head
, iseq
);
4474 rb_ary_cat(ary
, elem
, 2);
4476 VALUE hash
= rb_hash_new_with_size(RARRAY_LEN(ary
) / 2);
4477 rb_hash_bulk_insert(RARRAY_LEN(ary
), RARRAY_CONST_PTR_TRANSIENT(ary
), hash
);
4478 hash
= rb_obj_hide(hash
);
4481 /* Emit optimized code */
4484 ADD_INSN1(ret
, line_node
, duphash
, hash
);
4488 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
4489 ADD_INSN(ret
, line_node
, swap
);
4491 ADD_INSN1(ret
, line_node
, putobject
, hash
);
4493 ADD_SEND(ret
, line_node
, id_core_hash_merge_kwd
, INT2FIX(2));
4495 RB_OBJ_WRITTEN(iseq
, Qundef
, hash
);
4499 /* Base case: Compile "count" elements */
4500 for (; count
; count
--, node
= node
->nd_next
->nd_next
) {
4503 EXPECT_NODE("compile_hash", node
, NODE_LIST
, -1);
4506 if (node
->nd_head
) {
4507 /* Normal key-value pair */
4508 NO_CHECK(COMPILE_(anchor
, "hash key element", node
->nd_head
, 0));
4509 NO_CHECK(COMPILE_(anchor
, "hash value element", node
->nd_next
->nd_head
, 0));
4512 /* If there are many pushed elements, flush them to avoid stack overflow */
4513 if (stack_len
>= max_stack_len
) FLUSH_CHUNK();
4516 /* kwsplat case: foo(..., **kw, ...) */
4519 const NODE
*kw
= node
->nd_next
->nd_head
;
4520 int empty_kw
= nd_type_p(kw
, NODE_LIT
) && RB_TYPE_P(kw
->nd_lit
, T_HASH
); /* foo( ..., **{}, ...) */
4521 int first_kw
= first_chunk
&& stack_len
== 0; /* foo(1,2,3, **kw, ...) */
4522 int last_kw
= !node
->nd_next
->nd_next
; /* foo( ..., **kw) */
4523 int only_kw
= last_kw
&& first_kw
; /* foo(1,2,3, **kw) */
4526 if (only_kw
&& method_call_keywords
) {
4527 /* **{} appears at the only keyword argument in method call,
4528 * so it won't be modified.
4529 * kw is a special NODE_LIT that contains a special empty hash,
4530 * so this emits: putobject {}.
4531 * This is only done for method calls and not for literal hashes,
4532 * because literal hashes should always result in a new hash.
4534 NO_CHECK(COMPILE(ret
, "keyword splat", kw
));
4536 else if (first_kw
) {
4537 /* **{} appears as the first keyword argument, so it may be modified.
4538 * We need to create a fresh hash object.
4540 ADD_INSN1(ret
, line_node
, newhash
, INT2FIX(0));
4542 /* Any empty keyword splats that are not the first can be ignored.
4543 * since merging an empty hash into the existing hash is the same
4544 * as not merging it. */
4547 if (only_kw
&& method_call_keywords
) {
4548 /* **kw is only keyword argument in method call.
4549 * Use directly. This will be not be flagged as mutable.
4550 * This is only done for method calls and not for literal hashes,
4551 * because literal hashes should always result in a new hash.
4553 NO_CHECK(COMPILE(ret
, "keyword splat", kw
));
4556 /* There is more than one keyword argument, or this is not a method
4557 * call. In that case, we need to add an empty hash (if first keyword),
4558 * or merge the hash to the accumulated hash (if not the first keyword).
4560 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
4561 if (first_kw
) ADD_INSN1(ret
, line_node
, newhash
, INT2FIX(0));
4562 else ADD_INSN(ret
, line_node
, swap
);
4564 NO_CHECK(COMPILE(ret
, "keyword splat", kw
));
4566 ADD_SEND(ret
, line_node
, id_core_hash_merge_kwd
, INT2FIX(2));
4581 rb_node_case_when_optimizable_literal(const NODE
*const node
)
4583 switch (nd_type(node
)) {
4585 VALUE v
= node
->nd_lit
;
4587 if (RB_FLOAT_TYPE_P(v
) &&
4588 modf(RFLOAT_VALUE(v
), &ival
) == 0.0) {
4589 return FIXABLE(ival
) ? LONG2FIX((long)ival
) : rb_dbl2big(ival
);
4591 if (RB_TYPE_P(v
, T_RATIONAL
) || RB_TYPE_P(v
, T_COMPLEX
)) {
4594 if (SYMBOL_P(v
) || rb_obj_is_kind_of(v
, rb_cNumeric
)) {
4606 return rb_fstring(node
->nd_lit
);
4612 when_vals(rb_iseq_t
*iseq
, LINK_ANCHOR
*const cond_seq
, const NODE
*vals
,
4613 LABEL
*l1
, int only_special_literals
, VALUE literals
)
4616 const NODE
*val
= vals
->nd_head
;
4617 VALUE lit
= rb_node_case_when_optimizable_literal(val
);
4619 if (lit
== Qundef
) {
4620 only_special_literals
= 0;
4622 else if (NIL_P(rb_hash_lookup(literals
, lit
))) {
4623 rb_hash_aset(literals
, lit
, (VALUE
)(l1
) | 1);
4626 if (nd_type_p(val
, NODE_STR
)) {
4627 debugp_param("nd_lit", val
->nd_lit
);
4628 lit
= rb_fstring(val
->nd_lit
);
4629 ADD_INSN1(cond_seq
, val
, putobject
, lit
);
4630 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
4633 if (!COMPILE(cond_seq
, "when cond", val
)) return -1;
4636 // Emit patern === target
4637 ADD_INSN1(cond_seq
, vals
, topn
, INT2FIX(1));
4638 ADD_CALL(cond_seq
, vals
, idEqq
, INT2FIX(1));
4639 ADD_INSNL(cond_seq
, val
, branchif
, l1
);
4640 vals
= vals
->nd_next
;
4642 return only_special_literals
;
4646 when_splat_vals(rb_iseq_t
*iseq
, LINK_ANCHOR
*const cond_seq
, const NODE
*vals
,
4647 LABEL
*l1
, int only_special_literals
, VALUE literals
)
4649 const NODE
*line_node
= vals
;
4651 switch (nd_type(vals
)) {
4653 if (when_vals(iseq
, cond_seq
, vals
, l1
, only_special_literals
, literals
) < 0)
4657 ADD_INSN (cond_seq
, line_node
, dup
);
4658 CHECK(COMPILE(cond_seq
, "when splat", vals
->nd_head
));
4659 ADD_INSN1(cond_seq
, line_node
, splatarray
, Qfalse
);
4660 ADD_INSN1(cond_seq
, line_node
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
| VM_CHECKMATCH_ARRAY
));
4661 ADD_INSNL(cond_seq
, line_node
, branchif
, l1
);
4664 CHECK(when_splat_vals(iseq
, cond_seq
, vals
->nd_head
, l1
, only_special_literals
, literals
));
4665 CHECK(when_splat_vals(iseq
, cond_seq
, vals
->nd_body
, l1
, only_special_literals
, literals
));
4668 CHECK(when_splat_vals(iseq
, cond_seq
, vals
->nd_head
, l1
, only_special_literals
, literals
));
4669 ADD_INSN (cond_seq
, line_node
, dup
);
4670 CHECK(COMPILE(cond_seq
, "when argspush body", vals
->nd_body
));
4671 ADD_INSN1(cond_seq
, line_node
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
));
4672 ADD_INSNL(cond_seq
, line_node
, branchif
, l1
);
4675 ADD_INSN (cond_seq
, line_node
, dup
);
4676 CHECK(COMPILE(cond_seq
, "when val", vals
));
4677 ADD_INSN1(cond_seq
, line_node
, splatarray
, Qfalse
);
4678 ADD_INSN1(cond_seq
, line_node
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
| VM_CHECKMATCH_ARRAY
));
4679 ADD_INSNL(cond_seq
, line_node
, branchif
, l1
);
4685 /* Multiple Assignment Handling
4687 * In order to handle evaluation of multiple assignment such that the left hand side
4688 * is evaluated before the right hand side, we need to process the left hand side
4689 * and see if there are any attributes that need to be assigned. If so, we add
4690 * instructions to evaluate the receiver of any assigned attributes before we
4691 * process the right hand side.
4693 * For a multiple assignment such as:
4695 * l1.m1, l2[0] = r3, r4
4697 * We start off evaluating l1 and l2, then we evaluate r3 and r4, then we
4698 * assign the result of r3 to l1.m1, and then the result of r4 to l2.m2.
4699 * On the VM stack, this looks like:
4703 * l1, self # putself
4705 * l1, l2, 0 # putobject 0
4706 * l1, l2, 0, [r3, r4] # after evaluation of RHS
4707 * l1, l2, 0, [r3, r4], r4, r3 # expandarray
4708 * l1, l2, 0, [r3, r4], r4, r3, l1 # topn 5
4709 * l1, l2, 0, [r3, r4], r4, l1, r3 # swap
4710 * l1, l2, 0, [r3, r4], r4, m1= # send
4711 * l1, l2, 0, [r3, r4], r4 # pop
4712 * l1, l2, 0, [r3, r4], r4, l2 # topn 3
4713 * l1, l2, 0, [r3, r4], r4, l2, 0 # topn 3
4714 * l1, l2, 0, [r3, r4], r4, l2, 0, r4 # topn 2
4715 * l1, l2, 0, [r3, r4], r4, []= # send
4716 * l1, l2, 0, [r3, r4], r4 # pop
4717 * l1, l2, 0, [r3, r4] # pop
4718 * [r3, r4], l2, 0, [r3, r4] # setn 3
4719 * [r3, r4], l2, 0 # pop
4720 * [r3, r4], l2 # pop
4723 * This is made more complex when you have to handle splats, post args,
4724 * and arbitrary levels of nesting. You need to keep track of the total
4725 * number of attributes to set, and for each attribute, how many entries
4726 * are on the stack before the final attribute, in order to correctly
4727 * calculate the topn value to use to get the receiver of the attribute
4730 * A brief description of the VM stack for simple multiple assignment
4731 * with no splat (rhs_array will not be present if the return value of
4732 * the multiple assignment is not needed):
4734 * lhs_attr1, lhs_attr2, ..., rhs_array, ..., rhs_arg2, rhs_arg1
4736 * For multiple assignment with splats, while processing the part before
4737 * the splat (splat+post here is an array of the splat and the post arguments):
4739 * lhs_attr1, lhs_attr2, ..., rhs_array, splat+post, ..., rhs_arg2, rhs_arg1
4741 * When processing the splat and post arguments:
4743 * lhs_attr1, lhs_attr2, ..., rhs_array, ..., post_arg2, post_arg1, splat
4745 * When processing nested multiple assignment, existing values on the stack
4748 * (l1.m1, l2.m2), l3.m3, l4* = [r1, r2], r3, r4
4750 * The stack layout would be the following before processing the nested
4751 * multiple assignment:
4753 * l1, l2, [[r1, r2], r3, r4], [r4], r3, [r1, r2]
4755 * In order to handle this correctly, we need to keep track of the nesting
4756 * level for each attribute assignment, as well as the attribute number
4757 * (left hand side attributes are processed left to right) and number of
4758 * arguments to pass to the setter method. struct masgn_attrasgn tracks
4761 * We also need to track information for the entire multiple assignment, such
4762 * as the total number of arguments, and the current nesting level, to
4763 * handle both nested multiple assignment as well as cases where the
4764 * rhs is not needed. We also need to keep track of all attribute
4765 * assignments in this, which we do using a linked listed. struct masgn_state
4766 * tracks this information.
4769 struct masgn_attrasgn
{
4771 struct masgn_attrasgn
*next
;
4772 const NODE
*line_node
;
4778 struct masgn_state
{
4779 struct masgn_attrasgn
*first_memo
;
4780 struct masgn_attrasgn
*last_memo
;
4786 static int compile_massign0(rb_iseq_t
*iseq
, LINK_ANCHOR
*const pre
, LINK_ANCHOR
*const rhs
, LINK_ANCHOR
*const lhs
, LINK_ANCHOR
*const post
, const NODE
*const node
, struct masgn_state
*state
, int popped
);
4789 compile_massign_lhs(rb_iseq_t
*iseq
, LINK_ANCHOR
*const pre
, LINK_ANCHOR
*const rhs
, LINK_ANCHOR
*const lhs
, LINK_ANCHOR
*const post
, const NODE
*const node
, struct masgn_state
*state
, int lhs_pos
)
4791 switch (nd_type(node
)) {
4792 case NODE_ATTRASGN
: {
4794 rb_bug("no masgn_state");
4798 const NODE
*line_node
= node
;
4800 CHECK(COMPILE_POPPED(pre
, "masgn lhs (NODE_ATTRASGN)", node
));
4802 LINK_ELEMENT
*insn_element
= LAST_ELEMENT(pre
);
4803 iobj
= (INSN
*)get_prev_insn((INSN
*)insn_element
); /* send insn */
4805 ELEM_REMOVE(LAST_ELEMENT(pre
));
4806 ELEM_REMOVE((LINK_ELEMENT
*)iobj
);
4807 pre
->last
= iobj
->link
.prev
;
4809 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(iobj
, 0);
4810 int argc
= vm_ci_argc(ci
) + 1;
4811 ci
= ci_argc_set(iseq
, ci
, argc
);
4812 OPERAND_AT(iobj
, 0) = (VALUE
)ci
;
4813 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
4816 ADD_INSN(lhs
, line_node
, swap
);
4819 ADD_INSN1(lhs
, line_node
, topn
, INT2FIX(argc
));
4822 struct masgn_attrasgn
*memo
;
4823 memo
= malloc(sizeof(struct masgn_attrasgn
));
4827 memo
->before_insn
= (INSN
*)LAST_ELEMENT(lhs
);
4828 memo
->line_node
= line_node
;
4829 memo
->argn
= state
->num_args
+ 1;
4830 memo
->num_args
= argc
;
4831 state
->num_args
+= argc
;
4832 memo
->lhs_pos
= lhs_pos
;
4834 if (!state
->first_memo
) {
4835 state
->first_memo
= memo
;
4838 state
->last_memo
->next
= memo
;
4840 state
->last_memo
= memo
;
4842 ADD_ELEM(lhs
, (LINK_ELEMENT
*)iobj
);
4843 if (vm_ci_flag(ci
) & VM_CALL_ARGS_SPLAT
) {
4844 int argc
= vm_ci_argc(ci
);
4845 ci
= ci_argc_set(iseq
, ci
, argc
- 1);
4846 OPERAND_AT(iobj
, 0) = (VALUE
)ci
;
4847 RB_OBJ_WRITTEN(iseq
, Qundef
, iobj
);
4848 INSERT_BEFORE_INSN1(iobj
, line_node
, newarray
, INT2FIX(1));
4849 INSERT_BEFORE_INSN(iobj
, line_node
, concatarray
);
4851 ADD_INSN(lhs
, line_node
, pop
);
4853 ADD_INSN(lhs
, line_node
, pop
);
4855 for (int i
=0; i
< argc
; i
++) {
4856 ADD_INSN(post
, line_node
, pop
);
4861 DECL_ANCHOR(nest_rhs
);
4862 INIT_ANCHOR(nest_rhs
);
4863 DECL_ANCHOR(nest_lhs
);
4864 INIT_ANCHOR(nest_lhs
);
4866 int prev_level
= state
->lhs_level
;
4867 bool prev_nested
= state
->nested
;
4869 state
->lhs_level
= lhs_pos
- 1;
4870 CHECK(compile_massign0(iseq
, pre
, nest_rhs
, nest_lhs
, post
, node
, state
, 1));
4871 state
->lhs_level
= prev_level
;
4872 state
->nested
= prev_nested
;
4874 ADD_SEQ(lhs
, nest_rhs
);
4875 ADD_SEQ(lhs
, nest_lhs
);
4879 DECL_ANCHOR(anchor
);
4880 INIT_ANCHOR(anchor
);
4881 CHECK(COMPILE_POPPED(anchor
, "masgn lhs", node
));
4882 ELEM_REMOVE(FIRST_ELEMENT(anchor
));
4883 ADD_SEQ(lhs
, anchor
);
4891 compile_massign_opt_lhs(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*lhsn
)
4894 CHECK(compile_massign_opt_lhs(iseq
, ret
, lhsn
->nd_next
));
4895 CHECK(compile_massign_lhs(iseq
, ret
, ret
, ret
, ret
, lhsn
->nd_head
, NULL
, 0));
4901 compile_massign_opt(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
4902 const NODE
*rhsn
, const NODE
*orig_lhsn
)
4905 const int memsize
= numberof(mem
);
4907 int llen
= 0, rlen
= 0;
4909 const NODE
*lhsn
= orig_lhsn
;
4911 #define MEMORY(v) { \
4913 if (memindex == memsize) return 0; \
4914 for (i=0; i<memindex; i++) { \
4915 if (mem[i] == (v)) return 0; \
4917 mem[memindex++] = (v); \
4920 if (rhsn
== 0 || !nd_type_p(rhsn
, NODE_LIST
)) {
4925 const NODE
*ln
= lhsn
->nd_head
;
4926 switch (nd_type(ln
)) {
4938 lhsn
= lhsn
->nd_next
;
4944 NO_CHECK(COMPILE_POPPED(ret
, "masgn val (popped)", rhsn
->nd_head
));
4947 NO_CHECK(COMPILE(ret
, "masgn val", rhsn
->nd_head
));
4949 rhsn
= rhsn
->nd_next
;
4954 for (i
=0; i
<llen
-rlen
; i
++) {
4955 ADD_INSN(ret
, orig_lhsn
, putnil
);
4959 compile_massign_opt_lhs(iseq
, ret
, orig_lhsn
);
4964 compile_massign0(rb_iseq_t
*iseq
, LINK_ANCHOR
*const pre
, LINK_ANCHOR
*const rhs
, LINK_ANCHOR
*const lhs
, LINK_ANCHOR
*const post
, const NODE
*const node
, struct masgn_state
*state
, int popped
)
4966 const NODE
*rhsn
= node
->nd_value
;
4967 const NODE
*splatn
= node
->nd_args
;
4968 const NODE
*lhsn
= node
->nd_head
;
4969 const NODE
*lhsn_count
= lhsn
;
4970 int lhs_splat
= (splatn
&& NODE_NAMED_REST_P(splatn
)) ? 1 : 0;
4976 while (lhsn_count
) {
4978 lhsn_count
= lhsn_count
->nd_next
;
4981 CHECK(compile_massign_lhs(iseq
, pre
, rhs
, lhs
, post
, lhsn
->nd_head
, state
, (llen
- lpos
) + lhs_splat
+ state
->lhs_level
));
4983 lhsn
= lhsn
->nd_next
;
4987 if (nd_type_p(splatn
, NODE_POSTARG
)) {
4988 /*a, b, *r, p1, p2 */
4989 const NODE
*postn
= splatn
->nd_2nd
;
4990 const NODE
*restn
= splatn
->nd_1st
;
4991 int plen
= (int)postn
->nd_alen
;
4993 int flag
= 0x02 | (NODE_NAMED_REST_P(restn
) ? 0x01 : 0x00);
4995 ADD_INSN2(lhs
, splatn
, expandarray
, INT2FIX(plen
), INT2FIX(flag
));
4997 if (NODE_NAMED_REST_P(restn
)) {
4998 CHECK(compile_massign_lhs(iseq
, pre
, rhs
, lhs
, post
, restn
, state
, 1 + plen
+ state
->lhs_level
));
5001 CHECK(compile_massign_lhs(iseq
, pre
, rhs
, lhs
, post
, postn
->nd_head
, state
, (plen
- ppos
) + state
->lhs_level
));
5003 postn
= postn
->nd_next
;
5008 CHECK(compile_massign_lhs(iseq
, pre
, rhs
, lhs
, post
, splatn
, state
, 1 + state
->lhs_level
));
5013 if (!state
->nested
) {
5014 NO_CHECK(COMPILE(rhs
, "normal masgn rhs", rhsn
));
5018 ADD_INSN(rhs
, node
, dup
);
5021 ADD_INSN2(rhs
, node
, expandarray
, INT2FIX(llen
), INT2FIX(lhs_splat
));
5027 compile_massign(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
5029 if (!popped
|| node
->nd_args
|| !compile_massign_opt(iseq
, ret
, node
->nd_value
, node
->nd_head
)) {
5030 struct masgn_state state
;
5031 state
.lhs_level
= popped
? 0 : 1;
5034 state
.first_memo
= NULL
;
5035 state
.last_memo
= NULL
;
5045 int ok
= compile_massign0(iseq
, pre
, rhs
, lhs
, post
, node
, &state
, popped
);
5047 struct masgn_attrasgn
*memo
= state
.first_memo
, *tmp_memo
;
5049 VALUE topn_arg
= INT2FIX((state
.num_args
- memo
->argn
) + memo
->lhs_pos
);
5050 for (int i
= 0; i
< memo
->num_args
; i
++) {
5051 INSERT_BEFORE_INSN1(memo
->before_insn
, memo
->line_node
, topn
, topn_arg
);
5053 tmp_memo
= memo
->next
;
5062 if (!popped
&& state
.num_args
>= 1) {
5063 /* make sure rhs array is returned before popping */
5064 ADD_INSN1(ret
, node
, setn
, INT2FIX(state
.num_args
));
5072 compile_const_prefix(rb_iseq_t
*iseq
, const NODE
*const node
,
5073 LINK_ANCHOR
*const pref
, LINK_ANCHOR
*const body
)
5075 switch (nd_type(node
)) {
5077 debugi("compile_const_prefix - colon", node
->nd_vid
);
5078 ADD_INSN1(body
, node
, putobject
, Qtrue
);
5079 ADD_INSN1(body
, node
, getconstant
, ID2SYM(node
->nd_vid
));
5082 debugi("compile_const_prefix - colon3", node
->nd_mid
);
5083 ADD_INSN(body
, node
, pop
);
5084 ADD_INSN1(body
, node
, putobject
, rb_cObject
);
5085 ADD_INSN1(body
, node
, putobject
, Qtrue
);
5086 ADD_INSN1(body
, node
, getconstant
, ID2SYM(node
->nd_mid
));
5089 CHECK(compile_const_prefix(iseq
, node
->nd_head
, pref
, body
));
5090 debugi("compile_const_prefix - colon2", node
->nd_mid
);
5091 ADD_INSN1(body
, node
, putobject
, Qfalse
);
5092 ADD_INSN1(body
, node
, getconstant
, ID2SYM(node
->nd_mid
));
5095 CHECK(COMPILE(pref
, "const colon2 prefix", node
));
5102 compile_cpath(LINK_ANCHOR
*const ret
, rb_iseq_t
*iseq
, const NODE
*cpath
)
5104 if (nd_type_p(cpath
, NODE_COLON3
)) {
5105 /* toplevel class ::Foo */
5106 ADD_INSN1(ret
, cpath
, putobject
, rb_cObject
);
5107 return VM_DEFINECLASS_FLAG_SCOPED
;
5109 else if (cpath
->nd_head
) {
5111 NO_CHECK(COMPILE(ret
, "nd_else->nd_head", cpath
->nd_head
));
5112 return VM_DEFINECLASS_FLAG_SCOPED
;
5115 /* class at cbase Foo */
5116 ADD_INSN1(ret
, cpath
, putspecialobject
,
5117 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE
));
5123 private_recv_p(const NODE
*node
)
5125 if (nd_type_p(node
->nd_recv
, NODE_SELF
)) {
5126 NODE
*self
= node
->nd_recv
;
5127 return self
->nd_state
!= 0;
5133 defined_expr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
5134 const NODE
*const node
, LABEL
**lfinish
, VALUE needstr
);
5137 compile_call(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, const enum node_type type
, const NODE
*const line_node
, int popped
, bool assume_receiver
);
5140 defined_expr0(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
5141 const NODE
*const node
, LABEL
**lfinish
, VALUE needstr
,
5144 enum defined_type expr_type
= DEFINED_NOT_DEFINED
;
5145 enum node_type type
;
5146 const int line
= nd_line(node
);
5147 const NODE
*line_node
= node
;
5149 switch (type
= nd_type(node
)) {
5153 expr_type
= DEFINED_NIL
;
5156 expr_type
= DEFINED_SELF
;
5159 expr_type
= DEFINED_TRUE
;
5162 expr_type
= DEFINED_FALSE
;
5166 const NODE
*vals
= node
;
5169 defined_expr0(iseq
, ret
, vals
->nd_head
, lfinish
, Qfalse
, false);
5172 lfinish
[1] = NEW_LABEL(line
);
5174 ADD_INSNL(ret
, line_node
, branchunless
, lfinish
[1]);
5175 } while ((vals
= vals
->nd_next
) != NULL
);
5184 expr_type
= DEFINED_EXPR
;
5190 expr_type
= DEFINED_LVAR
;
5193 #define PUSH_VAL(type) (needstr == Qfalse ? Qtrue : rb_iseq_defined_string(type))
5195 ADD_INSN(ret
, line_node
, putnil
);
5196 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_IVAR
),
5197 ID2SYM(node
->nd_vid
), PUSH_VAL(DEFINED_IVAR
));
5201 ADD_INSN(ret
, line_node
, putnil
);
5202 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_GVAR
),
5203 ID2SYM(node
->nd_entry
), PUSH_VAL(DEFINED_GVAR
));
5207 ADD_INSN(ret
, line_node
, putnil
);
5208 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_CVAR
),
5209 ID2SYM(node
->nd_vid
), PUSH_VAL(DEFINED_CVAR
));
5213 ADD_INSN(ret
, line_node
, putnil
);
5214 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_CONST
),
5215 ID2SYM(node
->nd_vid
), PUSH_VAL(DEFINED_CONST
));
5219 lfinish
[1] = NEW_LABEL(line
);
5221 defined_expr0(iseq
, ret
, node
->nd_head
, lfinish
, Qfalse
, false);
5222 ADD_INSNL(ret
, line_node
, branchunless
, lfinish
[1]);
5223 NO_CHECK(COMPILE(ret
, "defined/colon2#nd_head", node
->nd_head
));
5225 if (rb_is_const_id(node
->nd_mid
)) {
5226 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_CONST_FROM
),
5227 ID2SYM(node
->nd_mid
), PUSH_VAL(DEFINED_CONST
));
5230 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_METHOD
),
5231 ID2SYM(node
->nd_mid
), PUSH_VAL(DEFINED_METHOD
));
5235 ADD_INSN1(ret
, line_node
, putobject
, rb_cObject
);
5236 ADD_INSN3(ret
, line_node
, defined
,
5237 INT2FIX(DEFINED_CONST_FROM
), ID2SYM(node
->nd_mid
), PUSH_VAL(DEFINED_CONST
));
5240 /* method dispatch */
5245 case NODE_ATTRASGN
:{
5246 const int explicit_receiver
=
5247 (type
== NODE_CALL
|| type
== NODE_OPCALL
||
5248 (type
== NODE_ATTRASGN
&& !private_recv_p(node
)));
5250 if (node
->nd_args
|| explicit_receiver
) {
5252 lfinish
[1] = NEW_LABEL(line
);
5255 lfinish
[2] = NEW_LABEL(line
);
5258 if (node
->nd_args
) {
5259 defined_expr0(iseq
, ret
, node
->nd_args
, lfinish
, Qfalse
, false);
5260 ADD_INSNL(ret
, line_node
, branchunless
, lfinish
[1]);
5262 if (explicit_receiver
) {
5263 defined_expr0(iseq
, ret
, node
->nd_recv
, lfinish
, Qfalse
, true);
5264 switch (nd_type(node
->nd_recv
)) {
5270 ADD_INSNL(ret
, line_node
, branchunless
, lfinish
[2]);
5271 compile_call(iseq
, ret
, node
->nd_recv
, nd_type(node
->nd_recv
), line_node
, 0, true);
5274 ADD_INSNL(ret
, line_node
, branchunless
, lfinish
[1]);
5275 NO_CHECK(COMPILE(ret
, "defined/recv", node
->nd_recv
));
5279 ADD_INSN(ret
, line_node
, dup
);
5281 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_METHOD
),
5282 ID2SYM(node
->nd_mid
), PUSH_VAL(DEFINED_METHOD
));
5285 ADD_INSN(ret
, line_node
, putself
);
5287 ADD_INSN(ret
, line_node
, dup
);
5289 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_FUNC
),
5290 ID2SYM(node
->nd_mid
), PUSH_VAL(DEFINED_METHOD
));
5296 ADD_INSN(ret
, line_node
, putnil
);
5297 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_YIELD
), 0,
5298 PUSH_VAL(DEFINED_YIELD
));
5303 ADD_INSN(ret
, line_node
, putnil
);
5304 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_REF
),
5305 INT2FIX((node
->nd_nth
<< 1) | (type
== NODE_BACK_REF
)),
5306 PUSH_VAL(DEFINED_GVAR
));
5311 ADD_INSN(ret
, line_node
, putnil
);
5312 ADD_INSN3(ret
, line_node
, defined
, INT2FIX(DEFINED_ZSUPER
), 0,
5313 PUSH_VAL(DEFINED_ZSUPER
));
5319 case NODE_OP_ASGN_OR
:
5320 case NODE_OP_ASGN_AND
:
5328 expr_type
= DEFINED_ASGN
;
5332 assert(expr_type
!= DEFINED_NOT_DEFINED
);
5334 if (needstr
!= Qfalse
) {
5335 VALUE str
= rb_iseq_defined_string(expr_type
);
5336 ADD_INSN1(ret
, line_node
, putobject
, str
);
5339 ADD_INSN1(ret
, line_node
, putobject
, Qtrue
);
5344 build_defined_rescue_iseq(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const void *unused
)
5346 NODE dummy_line_node
= generate_dummy_line_node(0, -1);
5347 ADD_INSN(ret
, &dummy_line_node
, putnil
);
5348 iseq_set_exception_local_table(iseq
);
5352 defined_expr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
,
5353 const NODE
*const node
, LABEL
**lfinish
, VALUE needstr
)
5355 LINK_ELEMENT
*lcur
= ret
->last
;
5356 defined_expr0(iseq
, ret
, node
, lfinish
, needstr
, false);
5358 int line
= nd_line(node
);
5359 LABEL
*lstart
= NEW_LABEL(line
);
5360 LABEL
*lend
= NEW_LABEL(line
);
5361 const rb_iseq_t
*rescue
;
5362 struct rb_iseq_new_with_callback_callback_func
*ifunc
=
5363 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq
, NULL
);
5364 rescue
= new_child_iseq_with_callback(iseq
, ifunc
,
5365 rb_str_concat(rb_str_new2("defined guard in "),
5366 iseq
->body
->location
.label
),
5367 iseq
, ISEQ_TYPE_RESCUE
, 0);
5368 lstart
->rescued
= LABEL_RESCUE_BEG
;
5369 lend
->rescued
= LABEL_RESCUE_END
;
5370 APPEND_LABEL(ret
, lcur
, lstart
);
5371 ADD_LABEL(ret
, lend
);
5372 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE
, lstart
, lend
, rescue
, lfinish
[1]);
5377 compile_defined_expr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, VALUE needstr
)
5379 const int line
= nd_line(node
);
5380 const NODE
*line_node
= node
;
5381 if (!node
->nd_head
) {
5382 VALUE str
= rb_iseq_defined_string(DEFINED_NIL
);
5383 ADD_INSN1(ret
, line_node
, putobject
, str
);
5387 LINK_ELEMENT
*last
= ret
->last
;
5388 lfinish
[0] = NEW_LABEL(line
);
5391 defined_expr(iseq
, ret
, node
->nd_head
, lfinish
, needstr
);
5393 ELEM_INSERT_NEXT(last
, &new_insn_body(iseq
, line_node
, BIN(putnil
), 0)->link
);
5394 ADD_INSN(ret
, line_node
, swap
);
5396 ADD_LABEL(ret
, lfinish
[2]);
5398 ADD_INSN(ret
, line_node
, pop
);
5399 ADD_LABEL(ret
, lfinish
[1]);
5401 ADD_LABEL(ret
, lfinish
[0]);
5407 make_name_for_block(const rb_iseq_t
*orig_iseq
)
5410 const rb_iseq_t
*iseq
= orig_iseq
;
5412 if (orig_iseq
->body
->parent_iseq
!= 0) {
5413 while (orig_iseq
->body
->local_iseq
!= iseq
) {
5414 if (iseq
->body
->type
== ISEQ_TYPE_BLOCK
) {
5417 iseq
= iseq
->body
->parent_iseq
;
5422 return rb_sprintf("block in %"PRIsVALUE
, iseq
->body
->location
.label
);
5425 return rb_sprintf("block (%d levels) in %"PRIsVALUE
, level
, iseq
->body
->location
.label
);
5430 push_ensure_entry(rb_iseq_t
*iseq
,
5431 struct iseq_compile_data_ensure_node_stack
*enl
,
5432 struct ensure_range
*er
, const NODE
*const node
)
5434 enl
->ensure_node
= node
;
5435 enl
->prev
= ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
; /* prev */
5437 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= enl
;
5441 add_ensure_range(rb_iseq_t
*iseq
, struct ensure_range
*erange
,
5442 LABEL
*lstart
, LABEL
*lend
)
5444 struct ensure_range
*ne
=
5445 compile_data_alloc(iseq
, sizeof(struct ensure_range
));
5447 while (erange
->next
!= 0) {
5448 erange
= erange
->next
;
5452 ne
->end
= erange
->end
;
5453 erange
->end
= lstart
;
5459 can_add_ensure_iseq(const rb_iseq_t
*iseq
)
5461 struct iseq_compile_data_ensure_node_stack
*e
;
5462 if (ISEQ_COMPILE_DATA(iseq
)->in_rescue
&& (e
= ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
) != NULL
) {
5464 if (e
->ensure_node
) return false;
5472 add_ensure_iseq(LINK_ANCHOR
*const ret
, rb_iseq_t
*iseq
, int is_return
)
5474 assert(can_add_ensure_iseq(iseq
));
5476 struct iseq_compile_data_ensure_node_stack
*enlp
=
5477 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
;
5478 struct iseq_compile_data_ensure_node_stack
*prev_enlp
= enlp
;
5479 DECL_ANCHOR(ensure
);
5481 INIT_ANCHOR(ensure
);
5483 if (enlp
->erange
!= NULL
) {
5484 DECL_ANCHOR(ensure_part
);
5485 LABEL
*lstart
= NEW_LABEL(0);
5486 LABEL
*lend
= NEW_LABEL(0);
5487 INIT_ANCHOR(ensure_part
);
5489 add_ensure_range(iseq
, enlp
->erange
, lstart
, lend
);
5491 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= enlp
->prev
;
5492 ADD_LABEL(ensure_part
, lstart
);
5493 NO_CHECK(COMPILE_POPPED(ensure_part
, "ensure part", enlp
->ensure_node
));
5494 ADD_LABEL(ensure_part
, lend
);
5495 ADD_SEQ(ensure
, ensure_part
);
5504 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= prev_enlp
;
5505 ADD_SEQ(ret
, ensure
);
5509 check_keyword(const NODE
*node
)
5511 /* This check is essentially a code clone of compile_keyword_arg. */
5513 if (nd_type_p(node
, NODE_LIST
)) {
5514 while (node
->nd_next
) {
5515 node
= node
->nd_next
;
5517 node
= node
->nd_head
;
5520 return keyword_node_p(node
);
5524 setup_args_core(rb_iseq_t
*iseq
, LINK_ANCHOR
*const args
, const NODE
*argn
,
5525 int dup_rest
, unsigned int *flag
, struct rb_callinfo_kwarg
**keywords
)
5528 switch (nd_type(argn
)) {
5530 NO_CHECK(COMPILE(args
, "args (splat)", argn
->nd_head
));
5531 ADD_INSN1(args
, argn
, splatarray
, RBOOL(dup_rest
));
5532 if (flag
) *flag
|= VM_CALL_ARGS_SPLAT
;
5536 case NODE_ARGSPUSH
: {
5537 int next_is_list
= (nd_type_p(argn
->nd_head
, NODE_LIST
));
5538 VALUE argc
= setup_args_core(iseq
, args
, argn
->nd_head
, 1, NULL
, NULL
);
5539 if (nd_type_p(argn
->nd_body
, NODE_LIST
)) {
5540 /* This branch is needed to avoid "newarraykwsplat" [Bug #16442] */
5541 int rest_len
= compile_args(iseq
, args
, argn
->nd_body
, NULL
, NULL
);
5542 ADD_INSN1(args
, argn
, newarray
, INT2FIX(rest_len
));
5545 NO_CHECK(COMPILE(args
, "args (cat: splat)", argn
->nd_body
));
5548 *flag
|= VM_CALL_ARGS_SPLAT
;
5549 /* This is a dirty hack. It traverses the AST twice.
5550 * In a long term, it should be fixed by a redesign of keyword arguments */
5551 if (check_keyword(argn
->nd_body
))
5552 *flag
|= VM_CALL_KW_SPLAT
;
5554 if (nd_type_p(argn
, NODE_ARGSCAT
)) {
5556 ADD_INSN1(args
, argn
, splatarray
, Qtrue
);
5557 return INT2FIX(FIX2INT(argc
) + 1);
5560 ADD_INSN1(args
, argn
, splatarray
, Qfalse
);
5561 ADD_INSN(args
, argn
, concatarray
);
5566 ADD_INSN1(args
, argn
, newarray
, INT2FIX(1));
5567 ADD_INSN(args
, argn
, concatarray
);
5572 int len
= compile_args(iseq
, args
, argn
, keywords
, flag
);
5573 return INT2FIX(len
);
5576 UNKNOWN_NODE("setup_arg", argn
, Qnil
);
5584 setup_args(rb_iseq_t
*iseq
, LINK_ANCHOR
*const args
, const NODE
*argn
,
5585 unsigned int *flag
, struct rb_callinfo_kwarg
**keywords
)
5588 if (argn
&& nd_type_p(argn
, NODE_BLOCK_PASS
)) {
5589 unsigned int dup_rest
= 1;
5590 DECL_ANCHOR(arg_block
);
5591 INIT_ANCHOR(arg_block
);
5592 NO_CHECK(COMPILE(arg_block
, "block", argn
->nd_body
));
5594 *flag
|= VM_CALL_ARGS_BLOCKARG
;
5596 if (LIST_INSN_SIZE_ONE(arg_block
)) {
5597 LINK_ELEMENT
*elem
= FIRST_ELEMENT(arg_block
);
5598 if (elem
->type
== ISEQ_ELEMENT_INSN
) {
5599 INSN
*iobj
= (INSN
*)elem
;
5600 if (iobj
->insn_id
== BIN(getblockparam
)) {
5601 iobj
->insn_id
= BIN(getblockparamproxy
);
5606 ret
= setup_args_core(iseq
, args
, argn
->nd_head
, dup_rest
, flag
, keywords
);
5607 ADD_SEQ(args
, arg_block
);
5610 ret
= setup_args_core(iseq
, args
, argn
, 0, flag
, keywords
);
5616 build_postexe_iseq(rb_iseq_t
*iseq
, LINK_ANCHOR
*ret
, const void *ptr
)
5618 const NODE
*body
= ptr
;
5619 int line
= nd_line(body
);
5620 VALUE argc
= INT2FIX(0);
5621 const rb_iseq_t
*block
= NEW_CHILD_ISEQ(body
, make_name_for_block(iseq
->body
->parent_iseq
), ISEQ_TYPE_BLOCK
, line
);
5623 ADD_INSN1(ret
, body
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
5624 ADD_CALL_WITH_BLOCK(ret
, body
, id_core_set_postexe
, argc
, block
);
5625 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)block
);
5626 iseq_set_local_table(iseq
, 0);
5630 compile_named_capture_assign(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
)
5634 int line
= nd_line(node
);
5635 const NODE
*line_node
= node
;
5636 LABEL
*fail_label
= NEW_LABEL(line
), *end_label
= NEW_LABEL(line
);
5638 #if !(defined(NAMED_CAPTURE_BY_SVAR) && NAMED_CAPTURE_BY_SVAR-0)
5639 ADD_INSN1(ret
, line_node
, getglobal
, ID2SYM(idBACKREF
));
5641 ADD_INSN2(ret
, line_node
, getspecial
, INT2FIX(1) /* '~' */, INT2FIX(0));
5643 ADD_INSN(ret
, line_node
, dup
);
5644 ADD_INSNL(ret
, line_node
, branchunless
, fail_label
);
5646 for (vars
= node
; vars
; vars
= vars
->nd_next
) {
5648 if (vars
->nd_next
) {
5649 ADD_INSN(ret
, line_node
, dup
);
5652 NO_CHECK(COMPILE_POPPED(ret
, "capture", vars
->nd_head
));
5653 last
= last
->next
; /* putobject :var */
5654 cap
= new_insn_send(iseq
, line_node
, idAREF
, INT2FIX(1),
5655 NULL
, INT2FIX(0), NULL
);
5656 ELEM_INSERT_PREV(last
->next
, (LINK_ELEMENT
*)cap
);
5657 #if !defined(NAMED_CAPTURE_SINGLE_OPT) || NAMED_CAPTURE_SINGLE_OPT-0
5658 if (!vars
->nd_next
&& vars
== node
) {
5663 ADD_INSNL(nom
, line_node
, jump
, end_label
);
5664 ADD_LABEL(nom
, fail_label
);
5665 # if 0 /* $~ must be MatchData or nil */
5666 ADD_INSN(nom
, line_node
, pop
);
5667 ADD_INSN(nom
, line_node
, putnil
);
5669 ADD_LABEL(nom
, end_label
);
5670 (nom
->last
->next
= cap
->link
.next
)->prev
= nom
->last
;
5671 (cap
->link
.next
= nom
->anchor
.next
)->prev
= &cap
->link
;
5676 ADD_INSNL(ret
, line_node
, jump
, end_label
);
5677 ADD_LABEL(ret
, fail_label
);
5678 ADD_INSN(ret
, line_node
, pop
);
5679 for (vars
= node
; vars
; vars
= vars
->nd_next
) {
5681 NO_CHECK(COMPILE_POPPED(ret
, "capture", vars
->nd_head
));
5682 last
= last
->next
; /* putobject :var */
5683 ((INSN
*)last
)->insn_id
= BIN(putnil
);
5684 ((INSN
*)last
)->operand_size
= 0;
5686 ADD_LABEL(ret
, end_label
);
5690 optimizable_range_item_p(const NODE
*n
)
5692 if (!n
) return FALSE
;
5693 switch (nd_type(n
)) {
5695 return RB_INTEGER_TYPE_P(n
->nd_lit
);
5704 compile_if(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
, const enum node_type type
)
5706 struct rb_iseq_constant_body
*const body
= iseq
->body
;
5707 const NODE
*const node_body
= type
== NODE_IF
? node
->nd_body
: node
->nd_else
;
5708 const NODE
*const node_else
= type
== NODE_IF
? node
->nd_else
: node
->nd_body
;
5710 const int line
= nd_line(node
);
5711 const NODE
*line_node
= node
;
5712 DECL_ANCHOR(cond_seq
);
5713 DECL_ANCHOR(then_seq
);
5714 DECL_ANCHOR(else_seq
);
5715 LABEL
*then_label
, *else_label
, *end_label
;
5716 VALUE branches
= Qfalse
;
5718 VALUE catch_table
= ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
;
5719 long catch_table_size
= NIL_P(catch_table
) ? 0 : RARRAY_LEN(catch_table
);
5721 INIT_ANCHOR(cond_seq
);
5722 INIT_ANCHOR(then_seq
);
5723 INIT_ANCHOR(else_seq
);
5724 then_label
= NEW_LABEL(line
);
5725 else_label
= NEW_LABEL(line
);
5728 compile_branch_condition(iseq
, cond_seq
, node
->nd_cond
,
5729 then_label
, else_label
);
5731 ci_size
= body
->ci_size
;
5732 CHECK(COMPILE_(then_seq
, "then", node_body
, popped
));
5733 catch_table
= ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
;
5734 if (!then_label
->refcnt
) {
5735 body
->ci_size
= ci_size
;
5736 if (!NIL_P(catch_table
)) rb_ary_set_len(catch_table
, catch_table_size
);
5739 if (!NIL_P(catch_table
)) catch_table_size
= RARRAY_LEN(catch_table
);
5742 ci_size
= body
->ci_size
;
5743 CHECK(COMPILE_(else_seq
, "else", node_else
, popped
));
5744 catch_table
= ISEQ_COMPILE_DATA(iseq
)->catch_table_ary
;
5745 if (!else_label
->refcnt
) {
5746 body
->ci_size
= ci_size
;
5747 if (!NIL_P(catch_table
)) rb_ary_set_len(catch_table
, catch_table_size
);
5750 if (!NIL_P(catch_table
)) catch_table_size
= RARRAY_LEN(catch_table
);
5753 ADD_SEQ(ret
, cond_seq
);
5755 if (then_label
->refcnt
&& else_label
->refcnt
) {
5756 branches
= decl_branch_base(iseq
, node
, type
== NODE_IF
? "if" : "unless");
5759 if (then_label
->refcnt
) {
5760 ADD_LABEL(ret
, then_label
);
5761 if (else_label
->refcnt
) {
5762 add_trace_branch_coverage(
5765 node_body
? node_body
: node
,
5767 type
== NODE_IF
? "then" : "else",
5769 end_label
= NEW_LABEL(line
);
5770 ADD_INSNL(then_seq
, line_node
, jump
, end_label
);
5772 ADD_INSN(then_seq
, line_node
, pop
);
5775 ADD_SEQ(ret
, then_seq
);
5778 if (else_label
->refcnt
) {
5779 ADD_LABEL(ret
, else_label
);
5780 if (then_label
->refcnt
) {
5781 add_trace_branch_coverage(
5784 node_else
? node_else
: node
,
5786 type
== NODE_IF
? "else" : "then",
5789 ADD_SEQ(ret
, else_seq
);
5793 ADD_LABEL(ret
, end_label
);
5800 compile_case(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const orig_node
, int popped
)
5803 const NODE
*node
= orig_node
;
5804 LABEL
*endlabel
, *elselabel
;
5806 DECL_ANCHOR(body_seq
);
5807 DECL_ANCHOR(cond_seq
);
5808 int only_special_literals
= 1;
5809 VALUE literals
= rb_hash_new();
5811 enum node_type type
;
5812 const NODE
*line_node
;
5813 VALUE branches
= Qfalse
;
5817 INIT_ANCHOR(body_seq
);
5818 INIT_ANCHOR(cond_seq
);
5820 RHASH_TBL_RAW(literals
)->type
= &cdhash_type
;
5822 CHECK(COMPILE(head
, "case base", node
->nd_head
));
5824 branches
= decl_branch_base(iseq
, node
, "case");
5826 node
= node
->nd_body
;
5827 EXPECT_NODE("NODE_CASE", node
, NODE_WHEN
, COMPILE_NG
);
5828 type
= nd_type(node
);
5829 line
= nd_line(node
);
5832 endlabel
= NEW_LABEL(line
);
5833 elselabel
= NEW_LABEL(line
);
5835 ADD_SEQ(ret
, head
); /* case VAL */
5837 while (type
== NODE_WHEN
) {
5840 l1
= NEW_LABEL(line
);
5841 ADD_LABEL(body_seq
, l1
);
5842 ADD_INSN(body_seq
, line_node
, pop
);
5843 add_trace_branch_coverage(
5846 node
->nd_body
? node
->nd_body
: node
,
5850 CHECK(COMPILE_(body_seq
, "when body", node
->nd_body
, popped
));
5851 ADD_INSNL(body_seq
, line_node
, jump
, endlabel
);
5853 vals
= node
->nd_head
;
5855 switch (nd_type(vals
)) {
5857 only_special_literals
= when_vals(iseq
, cond_seq
, vals
, l1
, only_special_literals
, literals
);
5858 if (only_special_literals
< 0) return COMPILE_NG
;
5863 only_special_literals
= 0;
5864 CHECK(when_splat_vals(iseq
, cond_seq
, vals
, l1
, only_special_literals
, literals
));
5867 UNKNOWN_NODE("NODE_CASE", vals
, COMPILE_NG
);
5871 EXPECT_NODE_NONULL("NODE_CASE", node
, NODE_LIST
, COMPILE_NG
);
5874 node
= node
->nd_next
;
5878 type
= nd_type(node
);
5879 line
= nd_line(node
);
5884 ADD_LABEL(cond_seq
, elselabel
);
5885 ADD_INSN(cond_seq
, line_node
, pop
);
5886 add_trace_branch_coverage(iseq
, cond_seq
, node
, branch_id
, "else", branches
);
5887 CHECK(COMPILE_(cond_seq
, "else", node
, popped
));
5888 ADD_INSNL(cond_seq
, line_node
, jump
, endlabel
);
5891 debugs("== else (implicit)\n");
5892 ADD_LABEL(cond_seq
, elselabel
);
5893 ADD_INSN(cond_seq
, orig_node
, pop
);
5894 add_trace_branch_coverage(iseq
, cond_seq
, orig_node
, branch_id
, "else", branches
);
5896 ADD_INSN(cond_seq
, orig_node
, putnil
);
5898 ADD_INSNL(cond_seq
, orig_node
, jump
, endlabel
);
5901 if (only_special_literals
&& ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
) {
5902 ADD_INSN(ret
, orig_node
, dup
);
5903 ADD_INSN2(ret
, orig_node
, opt_case_dispatch
, literals
, elselabel
);
5904 RB_OBJ_WRITTEN(iseq
, Qundef
, literals
);
5905 LABEL_REF(elselabel
);
5908 ADD_SEQ(ret
, cond_seq
);
5909 ADD_SEQ(ret
, body_seq
);
5910 ADD_LABEL(ret
, endlabel
);
5915 compile_case2(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const orig_node
, int popped
)
5919 const NODE
*node
= orig_node
->nd_body
;
5921 DECL_ANCHOR(body_seq
);
5922 VALUE branches
= Qfalse
;
5925 branches
= decl_branch_base(iseq
, orig_node
, "case");
5927 INIT_ANCHOR(body_seq
);
5928 endlabel
= NEW_LABEL(nd_line(node
));
5930 while (node
&& nd_type_p(node
, NODE_WHEN
)) {
5931 const int line
= nd_line(node
);
5932 LABEL
*l1
= NEW_LABEL(line
);
5933 ADD_LABEL(body_seq
, l1
);
5934 add_trace_branch_coverage(
5937 node
->nd_body
? node
->nd_body
: node
,
5941 CHECK(COMPILE_(body_seq
, "when", node
->nd_body
, popped
));
5942 ADD_INSNL(body_seq
, node
, jump
, endlabel
);
5944 vals
= node
->nd_head
;
5946 EXPECT_NODE_NONULL("NODE_WHEN", node
, NODE_LIST
, COMPILE_NG
);
5948 switch (nd_type(vals
)) {
5952 val
= vals
->nd_head
;
5953 lnext
= NEW_LABEL(nd_line(val
));
5954 debug_compile("== when2\n", (void)0);
5955 CHECK(compile_branch_condition(iseq
, ret
, val
, l1
, lnext
));
5956 ADD_LABEL(ret
, lnext
);
5957 vals
= vals
->nd_next
;
5963 ADD_INSN(ret
, vals
, putnil
);
5964 CHECK(COMPILE(ret
, "when2/cond splat", vals
));
5965 ADD_INSN1(ret
, vals
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_WHEN
| VM_CHECKMATCH_ARRAY
));
5966 ADD_INSNL(ret
, vals
, branchif
, l1
);
5969 UNKNOWN_NODE("NODE_WHEN", vals
, COMPILE_NG
);
5971 node
= node
->nd_next
;
5974 add_trace_branch_coverage(
5977 node
? node
: orig_node
,
5981 CHECK(COMPILE_(ret
, "else", node
, popped
));
5982 ADD_INSNL(ret
, orig_node
, jump
, endlabel
);
5984 ADD_SEQ(ret
, body_seq
);
5985 ADD_LABEL(ret
, endlabel
);
5989 static int iseq_compile_pattern_match(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*unmatched
, bool in_single_pattern
, bool in_alt_pattern
, int base_index
, bool use_deconstructed_cache
);
5991 static int iseq_compile_pattern_constant(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*match_failed
, bool in_single_pattern
, int base_index
);
5992 static int iseq_compile_array_deconstruct(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*deconstruct
, LABEL
*deconstructed
, LABEL
*match_failed
, LABEL
*type_error
, bool in_single_pattern
, int base_index
, bool use_deconstructed_cache
);
5993 static int iseq_compile_pattern_set_general_errmsg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, VALUE errmsg
, int base_index
);
5994 static int iseq_compile_pattern_set_length_errmsg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, VALUE errmsg
, VALUE pattern_length
, int base_index
);
5995 static int iseq_compile_pattern_set_eqq_errmsg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int base_index
);
5997 #define CASE3_BI_OFFSET_DECONSTRUCTED_CACHE 0
5998 #define CASE3_BI_OFFSET_ERROR_STRING 1
5999 #define CASE3_BI_OFFSET_KEY_ERROR_P 2
6000 #define CASE3_BI_OFFSET_KEY_ERROR_MATCHEE 3
6001 #define CASE3_BI_OFFSET_KEY_ERROR_KEY 4
6004 iseq_compile_pattern_each(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*matched
, LABEL
*unmatched
, bool in_single_pattern
, bool in_alt_pattern
, int base_index
, bool use_deconstructed_cache
)
6006 const int line
= nd_line(node
);
6007 const NODE
*line_node
= node
;
6009 switch (nd_type(node
)) {
6012 * if pattern.use_rest_num?
6015 * if pattern.has_constant_node?
6016 * unless pattern.constant === obj
6020 * unless obj.respond_to?(:deconstruct)
6023 * d = obj.deconstruct
6024 * unless Array === d
6027 * min_argc = pattern.pre_args_num + pattern.post_args_num
6028 * if pattern.has_rest_arg?
6029 * unless d.length >= min_argc
6033 * unless d.length == min_argc
6037 * pattern.pre_args_num.each do |i|
6038 * unless pattern.pre_args[i].match?(d[i])
6042 * if pattern.use_rest_num?
6043 * rest_num = d.length - min_argc
6044 * if pattern.has_rest_arg? && pattern.has_rest_arg_id # not `*`, but `*rest`
6045 * unless pattern.rest_arg.match?(d[pattern.pre_args_num, rest_num])
6050 * pattern.post_args_num.each do |i|
6051 * j = pattern.pre_args_num + i
6053 * unless pattern.post_args[i].match?(d[j])
6059 * FrozenCore.raise TypeError
6063 struct rb_ary_pattern_info
*apinfo
= node
->nd_apinfo
;
6064 const NODE
*args
= apinfo
->pre_args
;
6065 const int pre_args_num
= apinfo
->pre_args
? rb_long2int(apinfo
->pre_args
->nd_alen
) : 0;
6066 const int post_args_num
= apinfo
->post_args
? rb_long2int(apinfo
->post_args
->nd_alen
) : 0;
6068 const int min_argc
= pre_args_num
+ post_args_num
;
6069 const int use_rest_num
= apinfo
->rest_arg
&& (NODE_NAMED_REST_P(apinfo
->rest_arg
) ||
6070 (!NODE_NAMED_REST_P(apinfo
->rest_arg
) && post_args_num
> 0));
6072 LABEL
*match_failed
, *type_error
, *deconstruct
, *deconstructed
;
6074 match_failed
= NEW_LABEL(line
);
6075 type_error
= NEW_LABEL(line
);
6076 deconstruct
= NEW_LABEL(line
);
6077 deconstructed
= NEW_LABEL(line
);
6080 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(0)); /* allocate stack for rest_num */
6081 ADD_INSN(ret
, line_node
, swap
);
6087 CHECK(iseq_compile_pattern_constant(iseq
, ret
, node
, match_failed
, in_single_pattern
, base_index
));
6089 CHECK(iseq_compile_array_deconstruct(iseq
, ret
, node
, deconstruct
, deconstructed
, match_failed
, type_error
, in_single_pattern
, base_index
, use_deconstructed_cache
));
6091 ADD_INSN(ret
, line_node
, dup
);
6092 ADD_SEND(ret
, line_node
, idLength
, INT2FIX(0));
6093 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(min_argc
));
6094 ADD_SEND(ret
, line_node
, apinfo
->rest_arg
? idGE
: idEq
, INT2FIX(1)); // (1)
6095 if (in_single_pattern
) {
6096 CHECK(iseq_compile_pattern_set_length_errmsg(iseq
, ret
, node
,
6097 apinfo
->rest_arg
? rb_fstring_lit("%p length mismatch (given %p, expected %p+)") :
6098 rb_fstring_lit("%p length mismatch (given %p, expected %p)"),
6099 INT2FIX(min_argc
), base_index
+ 1 /* (1) */));
6101 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6103 for (i
= 0; i
< pre_args_num
; i
++) {
6104 ADD_INSN(ret
, line_node
, dup
);
6105 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(i
));
6106 ADD_SEND(ret
, line_node
, idAREF
, INT2FIX(1)); // (2)
6107 CHECK(iseq_compile_pattern_match(iseq
, ret
, args
->nd_head
, match_failed
, in_single_pattern
, in_alt_pattern
, base_index
+ 1 /* (2) */, false));
6108 args
= args
->nd_next
;
6111 if (apinfo
->rest_arg
) {
6112 if (NODE_NAMED_REST_P(apinfo
->rest_arg
)) {
6113 ADD_INSN(ret
, line_node
, dup
);
6114 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(pre_args_num
));
6115 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(1));
6116 ADD_SEND(ret
, line_node
, idLength
, INT2FIX(0));
6117 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(min_argc
));
6118 ADD_SEND(ret
, line_node
, idMINUS
, INT2FIX(1));
6119 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(4));
6120 ADD_SEND(ret
, line_node
, idAREF
, INT2FIX(2)); // (3)
6122 CHECK(iseq_compile_pattern_match(iseq
, ret
, apinfo
->rest_arg
, match_failed
, in_single_pattern
, in_alt_pattern
, base_index
+ 1 /* (3) */, false));
6125 if (post_args_num
> 0) {
6126 ADD_INSN(ret
, line_node
, dup
);
6127 ADD_SEND(ret
, line_node
, idLength
, INT2FIX(0));
6128 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(min_argc
));
6129 ADD_SEND(ret
, line_node
, idMINUS
, INT2FIX(1));
6130 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(2));
6131 ADD_INSN(ret
, line_node
, pop
);
6136 args
= apinfo
->post_args
;
6137 for (i
= 0; i
< post_args_num
; i
++) {
6138 ADD_INSN(ret
, line_node
, dup
);
6140 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(pre_args_num
+ i
));
6141 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3));
6142 ADD_SEND(ret
, line_node
, idPLUS
, INT2FIX(1));
6144 ADD_SEND(ret
, line_node
, idAREF
, INT2FIX(1)); // (4)
6145 CHECK(iseq_compile_pattern_match(iseq
, ret
, args
->nd_head
, match_failed
, in_single_pattern
, in_alt_pattern
, base_index
+ 1 /* (4) */, false));
6146 args
= args
->nd_next
;
6149 ADD_INSN(ret
, line_node
, pop
);
6151 ADD_INSN(ret
, line_node
, pop
);
6153 ADD_INSNL(ret
, line_node
, jump
, matched
);
6154 ADD_INSN(ret
, line_node
, putnil
);
6156 ADD_INSN(ret
, line_node
, putnil
);
6159 ADD_LABEL(ret
, type_error
);
6160 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6161 ADD_INSN1(ret
, line_node
, putobject
, rb_eTypeError
);
6162 ADD_INSN1(ret
, line_node
, putobject
, rb_fstring_lit("deconstruct must return Array"));
6163 ADD_SEND(ret
, line_node
, id_core_raise
, INT2FIX(2));
6164 ADD_INSN(ret
, line_node
, pop
);
6166 ADD_LABEL(ret
, match_failed
);
6167 ADD_INSN(ret
, line_node
, pop
);
6169 ADD_INSN(ret
, line_node
, pop
);
6171 ADD_INSNL(ret
, line_node
, jump
, unmatched
);
6177 * if pattern.has_constant_node?
6178 * unless pattern.constant === obj
6182 * unless obj.respond_to?(:deconstruct)
6185 * d = obj.deconstruct
6186 * unless Array === d
6189 * unless d.length >= pattern.args_num
6195 * limit = d.length - pattern.args_num
6198 * if pattern.args_num.times.all? {|j| pattern.args[j].match?(d[i+j]) }
6199 * if pattern.has_pre_rest_arg_id
6200 * unless pattern.pre_rest_arg.match?(d[0, i])
6204 * if pattern.has_post_rest_arg_id
6205 * unless pattern.post_rest_arg.match?(d[i+pattern.args_num, len])
6209 * goto find_succeeded
6220 * FrozenCore.raise TypeError
6224 struct rb_fnd_pattern_info
*fpinfo
= node
->nd_fpinfo
;
6225 const NODE
*args
= fpinfo
->args
;
6226 const int args_num
= fpinfo
->args
? rb_long2int(fpinfo
->args
->nd_alen
) : 0;
6228 LABEL
*match_failed
, *type_error
, *deconstruct
, *deconstructed
;
6229 match_failed
= NEW_LABEL(line
);
6230 type_error
= NEW_LABEL(line
);
6231 deconstruct
= NEW_LABEL(line
);
6232 deconstructed
= NEW_LABEL(line
);
6234 CHECK(iseq_compile_pattern_constant(iseq
, ret
, node
, match_failed
, in_single_pattern
, base_index
));
6236 CHECK(iseq_compile_array_deconstruct(iseq
, ret
, node
, deconstruct
, deconstructed
, match_failed
, type_error
, in_single_pattern
, base_index
, use_deconstructed_cache
));
6238 ADD_INSN(ret
, line_node
, dup
);
6239 ADD_SEND(ret
, line_node
, idLength
, INT2FIX(0));
6240 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(args_num
));
6241 ADD_SEND(ret
, line_node
, idGE
, INT2FIX(1)); // (1)
6242 if (in_single_pattern
) {
6243 CHECK(iseq_compile_pattern_set_length_errmsg(iseq
, ret
, node
, rb_fstring_lit("%p length mismatch (given %p, expected %p+)"), INT2FIX(args_num
), base_index
+ 1 /* (1) */));
6245 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6248 LABEL
*while_begin
= NEW_LABEL(nd_line(node
));
6249 LABEL
*next_loop
= NEW_LABEL(nd_line(node
));
6250 LABEL
*find_succeeded
= NEW_LABEL(line
);
6251 LABEL
*find_failed
= NEW_LABEL(nd_line(node
));
6254 ADD_INSN(ret
, line_node
, dup
); /* allocate stack for len */
6255 ADD_SEND(ret
, line_node
, idLength
, INT2FIX(0)); // (2)
6257 ADD_INSN(ret
, line_node
, dup
); /* allocate stack for limit */
6258 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(args_num
));
6259 ADD_SEND(ret
, line_node
, idMINUS
, INT2FIX(1)); // (3)
6261 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(0)); /* allocate stack for i */ // (4)
6263 ADD_LABEL(ret
, while_begin
);
6265 ADD_INSN(ret
, line_node
, dup
);
6266 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(2));
6267 ADD_SEND(ret
, line_node
, idLE
, INT2FIX(1));
6268 ADD_INSNL(ret
, line_node
, branchunless
, find_failed
);
6270 for (j
= 0; j
< args_num
; j
++) {
6271 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3));
6272 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(1));
6274 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(j
));
6275 ADD_SEND(ret
, line_node
, idPLUS
, INT2FIX(1));
6277 ADD_SEND(ret
, line_node
, idAREF
, INT2FIX(1)); // (5)
6279 CHECK(iseq_compile_pattern_match(iseq
, ret
, args
->nd_head
, next_loop
, in_single_pattern
, in_alt_pattern
, base_index
+ 4 /* (2), (3), (4), (5) */, false));
6280 args
= args
->nd_next
;
6283 if (NODE_NAMED_REST_P(fpinfo
->pre_rest_arg
)) {
6284 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3));
6285 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(0));
6286 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(2));
6287 ADD_SEND(ret
, line_node
, idAREF
, INT2FIX(2)); // (6)
6288 CHECK(iseq_compile_pattern_match(iseq
, ret
, fpinfo
->pre_rest_arg
, find_failed
, in_single_pattern
, in_alt_pattern
, base_index
+ 4 /* (2), (3), (4), (6) */, false));
6290 if (NODE_NAMED_REST_P(fpinfo
->post_rest_arg
)) {
6291 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3));
6292 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(1));
6293 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(args_num
));
6294 ADD_SEND(ret
, line_node
, idPLUS
, INT2FIX(1));
6295 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3));
6296 ADD_SEND(ret
, line_node
, idAREF
, INT2FIX(2)); // (7)
6297 CHECK(iseq_compile_pattern_match(iseq
, ret
, fpinfo
->post_rest_arg
, find_failed
, in_single_pattern
, in_alt_pattern
, base_index
+ 4 /* (2), (3),(4), (7) */, false));
6299 ADD_INSNL(ret
, line_node
, jump
, find_succeeded
);
6301 ADD_LABEL(ret
, next_loop
);
6302 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(1));
6303 ADD_SEND(ret
, line_node
, idPLUS
, INT2FIX(1));
6304 ADD_INSNL(ret
, line_node
, jump
, while_begin
);
6306 ADD_LABEL(ret
, find_failed
);
6307 ADD_INSN1(ret
, line_node
, adjuststack
, INT2FIX(3));
6308 if (in_single_pattern
) {
6309 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6310 ADD_INSN1(ret
, line_node
, putobject
, rb_fstring_lit("%p does not match to find pattern"));
6311 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(2));
6312 ADD_SEND(ret
, line_node
, id_core_sprintf
, INT2FIX(2)); // (8)
6313 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_ERROR_STRING
+ 1 /* (8) */)); // (9)
6315 ADD_INSN1(ret
, line_node
, putobject
, Qfalse
);
6316 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_KEY_ERROR_P
+ 2 /* (8), (9) */));
6318 ADD_INSN(ret
, line_node
, pop
);
6319 ADD_INSN(ret
, line_node
, pop
);
6321 ADD_INSNL(ret
, line_node
, jump
, match_failed
);
6322 ADD_INSN1(ret
, line_node
, dupn
, INT2FIX(3));
6324 ADD_LABEL(ret
, find_succeeded
);
6325 ADD_INSN1(ret
, line_node
, adjuststack
, INT2FIX(3));
6328 ADD_INSN(ret
, line_node
, pop
);
6329 ADD_INSNL(ret
, line_node
, jump
, matched
);
6330 ADD_INSN(ret
, line_node
, putnil
);
6332 ADD_LABEL(ret
, type_error
);
6333 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6334 ADD_INSN1(ret
, line_node
, putobject
, rb_eTypeError
);
6335 ADD_INSN1(ret
, line_node
, putobject
, rb_fstring_lit("deconstruct must return Array"));
6336 ADD_SEND(ret
, line_node
, id_core_raise
, INT2FIX(2));
6337 ADD_INSN(ret
, line_node
, pop
);
6339 ADD_LABEL(ret
, match_failed
);
6340 ADD_INSN(ret
, line_node
, pop
);
6341 ADD_INSNL(ret
, line_node
, jump
, unmatched
);
6348 * if pattern.has_kw_args_node? && !pattern.has_kw_rest_arg_node?
6349 * keys = pattern.kw_args_node.keys
6351 * if pattern.has_constant_node?
6352 * unless pattern.constant === obj
6356 * unless obj.respond_to?(:deconstruct_keys)
6359 * d = obj.deconstruct_keys(keys)
6363 * if pattern.has_kw_rest_arg_node?
6366 * if pattern.has_kw_args_node?
6367 * pattern.kw_args_node.each |k,|
6372 * pattern.kw_args_node.each |k, pat|
6373 * if pattern.has_kw_rest_arg_node?
6374 * unless pat.match?(d.delete(k))
6378 * unless pat.match?(d[k])
6388 * if pattern.has_kw_rest_arg_node?
6389 * if pattern.no_rest_keyword?
6394 * unless pattern.kw_rest_arg_node.match?(d)
6401 * FrozenCore.raise TypeError
6405 LABEL
*match_failed
, *type_error
;
6408 match_failed
= NEW_LABEL(line
);
6409 type_error
= NEW_LABEL(line
);
6411 if (node
->nd_pkwargs
&& !node
->nd_pkwrestarg
) {
6412 const NODE
*kw_args
= node
->nd_pkwargs
->nd_head
;
6413 keys
= rb_ary_new_capa(kw_args
? kw_args
->nd_alen
/2 : 0);
6415 rb_ary_push(keys
, kw_args
->nd_head
->nd_lit
);
6416 kw_args
= kw_args
->nd_next
->nd_next
;
6420 CHECK(iseq_compile_pattern_constant(iseq
, ret
, node
, match_failed
, in_single_pattern
, base_index
));
6422 ADD_INSN(ret
, line_node
, dup
);
6423 ADD_INSN1(ret
, line_node
, putobject
, ID2SYM(rb_intern("deconstruct_keys")));
6424 ADD_SEND(ret
, line_node
, idRespond_to
, INT2FIX(1)); // (1)
6425 if (in_single_pattern
) {
6426 CHECK(iseq_compile_pattern_set_general_errmsg(iseq
, ret
, node
, rb_fstring_lit("%p does not respond to #deconstruct_keys"), base_index
+ 1 /* (1) */));
6428 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6431 ADD_INSN(ret
, line_node
, putnil
);
6434 ADD_INSN1(ret
, line_node
, duparray
, keys
);
6435 RB_OBJ_WRITTEN(iseq
, Qundef
, rb_obj_hide(keys
));
6437 ADD_SEND(ret
, line_node
, rb_intern("deconstruct_keys"), INT2FIX(1)); // (2)
6439 ADD_INSN(ret
, line_node
, dup
);
6440 ADD_INSN1(ret
, line_node
, checktype
, INT2FIX(T_HASH
));
6441 ADD_INSNL(ret
, line_node
, branchunless
, type_error
);
6443 if (node
->nd_pkwrestarg
) {
6444 ADD_SEND(ret
, line_node
, rb_intern("dup"), INT2FIX(0));
6447 if (node
->nd_pkwargs
) {
6451 args
= node
->nd_pkwargs
->nd_head
;
6453 DECL_ANCHOR(match_values
);
6454 INIT_ANCHOR(match_values
);
6455 keys_num
= rb_long2int(args
->nd_alen
) / 2;
6456 for (i
= 0; i
< keys_num
; i
++) {
6457 NODE
*key_node
= args
->nd_head
;
6458 NODE
*value_node
= args
->nd_next
->nd_head
;
6461 if (!nd_type_p(key_node
, NODE_LIT
)) {
6462 UNKNOWN_NODE("NODE_IN", key_node
, COMPILE_NG
);
6464 key
= key_node
->nd_lit
;
6466 ADD_INSN(ret
, line_node
, dup
);
6467 ADD_INSN1(ret
, line_node
, putobject
, key
);
6468 ADD_SEND(ret
, line_node
, rb_intern("key?"), INT2FIX(1)); // (3)
6469 if (in_single_pattern
) {
6470 LABEL
*match_succeeded
;
6471 match_succeeded
= NEW_LABEL(line
);
6473 ADD_INSN(ret
, line_node
, dup
);
6474 ADD_INSNL(ret
, line_node
, branchif
, match_succeeded
);
6476 ADD_INSN1(ret
, line_node
, putobject
, rb_str_freeze(rb_sprintf("key not found: %+"PRIsVALUE
, key
))); // (4)
6477 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_ERROR_STRING
+ 2 /* (3), (4) */));
6478 ADD_INSN1(ret
, line_node
, putobject
, Qtrue
); // (5)
6479 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_KEY_ERROR_P
+ 3 /* (3), (4), (5) */));
6480 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3)); // (6)
6481 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_KEY_ERROR_MATCHEE
+ 4 /* (3), (4), (5), (6) */));
6482 ADD_INSN1(ret
, line_node
, putobject
, key
); // (7)
6483 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_KEY_ERROR_KEY
+ 5 /* (3), (4), (5), (6), (7) */));
6485 ADD_INSN1(ret
, line_node
, adjuststack
, INT2FIX(4));
6487 ADD_LABEL(ret
, match_succeeded
);
6489 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6491 ADD_INSN(match_values
, line_node
, dup
);
6492 ADD_INSN1(match_values
, line_node
, putobject
, key
);
6493 ADD_SEND(match_values
, line_node
, node
->nd_pkwrestarg
? rb_intern("delete") : idAREF
, INT2FIX(1)); // (8)
6494 CHECK(iseq_compile_pattern_match(iseq
, match_values
, value_node
, match_failed
, in_single_pattern
, in_alt_pattern
, base_index
+ 1 /* (8) */, false));
6495 args
= args
->nd_next
->nd_next
;
6497 ADD_SEQ(ret
, match_values
);
6501 ADD_INSN(ret
, line_node
, dup
);
6502 ADD_SEND(ret
, line_node
, idEmptyP
, INT2FIX(0)); // (9)
6503 if (in_single_pattern
) {
6504 CHECK(iseq_compile_pattern_set_general_errmsg(iseq
, ret
, node
, rb_fstring_lit("%p is not empty"), base_index
+ 1 /* (9) */));
6506 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6509 if (node
->nd_pkwrestarg
) {
6510 if (node
->nd_pkwrestarg
== NODE_SPECIAL_NO_REST_KEYWORD
) {
6511 ADD_INSN(ret
, line_node
, dup
);
6512 ADD_SEND(ret
, line_node
, idEmptyP
, INT2FIX(0)); // (10)
6513 if (in_single_pattern
) {
6514 CHECK(iseq_compile_pattern_set_general_errmsg(iseq
, ret
, node
, rb_fstring_lit("rest of %p is not empty"), base_index
+ 1 /* (10) */));
6516 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6519 ADD_INSN(ret
, line_node
, dup
); // (11)
6520 CHECK(iseq_compile_pattern_match(iseq
, ret
, node
->nd_pkwrestarg
, match_failed
, in_single_pattern
, in_alt_pattern
, base_index
+ 1 /* (11) */, false));
6524 ADD_INSN(ret
, line_node
, pop
);
6525 ADD_INSNL(ret
, line_node
, jump
, matched
);
6526 ADD_INSN(ret
, line_node
, putnil
);
6528 ADD_LABEL(ret
, type_error
);
6529 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6530 ADD_INSN1(ret
, line_node
, putobject
, rb_eTypeError
);
6531 ADD_INSN1(ret
, line_node
, putobject
, rb_fstring_lit("deconstruct_keys must return Hash"));
6532 ADD_SEND(ret
, line_node
, id_core_raise
, INT2FIX(2));
6533 ADD_INSN(ret
, line_node
, pop
);
6535 ADD_LABEL(ret
, match_failed
);
6536 ADD_INSN(ret
, line_node
, pop
);
6537 ADD_INSNL(ret
, line_node
, jump
, unmatched
);
6564 CHECK(COMPILE(ret
, "case in literal", node
)); // (1)
6565 if (in_single_pattern
) {
6566 ADD_INSN1(ret
, line_node
, dupn
, INT2FIX(2));
6568 ADD_INSN1(ret
, line_node
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
)); // (2)
6569 if (in_single_pattern
) {
6570 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq
, ret
, node
, base_index
+ 2 /* (1), (2) */));
6572 ADD_INSNL(ret
, line_node
, branchif
, matched
);
6573 ADD_INSNL(ret
, line_node
, jump
, unmatched
);
6576 struct rb_iseq_constant_body
*const body
= iseq
->body
;
6577 ID id
= node
->nd_vid
;
6578 int idx
= body
->local_iseq
->body
->local_table_size
- get_local_var_idx(iseq
, id
);
6580 if (in_alt_pattern
) {
6581 const char *name
= rb_id2name(id
);
6582 if (name
&& strlen(name
) > 0 && name
[0] != '_') {
6583 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
6589 ADD_SETLOCAL(ret
, line_node
, idx
, get_lvar_level(iseq
));
6590 ADD_INSNL(ret
, line_node
, jump
, matched
);
6595 ID id
= node
->nd_vid
;
6597 idx
= get_dyna_var_idx(iseq
, id
, &lv
, &ls
);
6599 if (in_alt_pattern
) {
6600 const char *name
= rb_id2name(id
);
6601 if (name
&& strlen(name
) > 0 && name
[0] != '_') {
6602 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
6609 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
6613 ADD_SETLOCAL(ret
, line_node
, ls
- idx
, lv
);
6614 ADD_INSNL(ret
, line_node
, jump
, matched
);
6619 LABEL
*match_failed
;
6620 match_failed
= unmatched
;
6621 CHECK(iseq_compile_pattern_match(iseq
, ret
, node
->nd_body
, unmatched
, in_single_pattern
, in_alt_pattern
, base_index
, use_deconstructed_cache
));
6622 CHECK(COMPILE(ret
, "case in if", node
->nd_cond
));
6623 if (in_single_pattern
) {
6624 LABEL
*match_succeeded
;
6625 match_succeeded
= NEW_LABEL(line
);
6627 ADD_INSN(ret
, line_node
, dup
);
6628 if (nd_type_p(node
, NODE_IF
)) {
6629 ADD_INSNL(ret
, line_node
, branchif
, match_succeeded
);
6632 ADD_INSNL(ret
, line_node
, branchunless
, match_succeeded
);
6635 ADD_INSN1(ret
, line_node
, putobject
, rb_fstring_lit("guard clause does not return true")); // (1)
6636 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_ERROR_STRING
+ 1 /* (1) */)); // (2)
6637 ADD_INSN1(ret
, line_node
, putobject
, Qfalse
);
6638 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_KEY_ERROR_P
+ 2 /* (1), (2) */));
6640 ADD_INSN(ret
, line_node
, pop
);
6641 ADD_INSN(ret
, line_node
, pop
);
6643 ADD_LABEL(ret
, match_succeeded
);
6645 if (nd_type_p(node
, NODE_IF
)) {
6646 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6649 ADD_INSNL(ret
, line_node
, branchif
, match_failed
);
6651 ADD_INSNL(ret
, line_node
, jump
, matched
);
6656 LABEL
*match_failed
;
6657 match_failed
= NEW_LABEL(line
);
6660 if (! (nd_type_p(n
, NODE_LIST
) && n
->nd_alen
== 2)) {
6661 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
6665 ADD_INSN(ret
, line_node
, dup
); // (1)
6666 CHECK(iseq_compile_pattern_match(iseq
, ret
, n
->nd_head
, match_failed
, in_single_pattern
, in_alt_pattern
, base_index
+ 1 /* (1) */, use_deconstructed_cache
));
6667 CHECK(iseq_compile_pattern_each(iseq
, ret
, n
->nd_next
->nd_head
, matched
, match_failed
, in_single_pattern
, in_alt_pattern
, base_index
, false));
6668 ADD_INSN(ret
, line_node
, putnil
);
6670 ADD_LABEL(ret
, match_failed
);
6671 ADD_INSN(ret
, line_node
, pop
);
6672 ADD_INSNL(ret
, line_node
, jump
, unmatched
);
6676 LABEL
*match_succeeded
, *fin
;
6677 match_succeeded
= NEW_LABEL(line
);
6678 fin
= NEW_LABEL(line
);
6680 ADD_INSN(ret
, line_node
, dup
); // (1)
6681 CHECK(iseq_compile_pattern_each(iseq
, ret
, node
->nd_1st
, match_succeeded
, fin
, in_single_pattern
, true, base_index
+ 1 /* (1) */, use_deconstructed_cache
));
6682 ADD_LABEL(ret
, match_succeeded
);
6683 ADD_INSN(ret
, line_node
, pop
);
6684 ADD_INSNL(ret
, line_node
, jump
, matched
);
6685 ADD_INSN(ret
, line_node
, putnil
);
6686 ADD_LABEL(ret
, fin
);
6687 CHECK(iseq_compile_pattern_each(iseq
, ret
, node
->nd_2nd
, matched
, unmatched
, in_single_pattern
, true, base_index
, use_deconstructed_cache
));
6691 UNKNOWN_NODE("NODE_IN", node
, COMPILE_NG
);
6697 iseq_compile_pattern_match(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*unmatched
, bool in_single_pattern
, bool in_alt_pattern
, int base_index
, bool use_deconstructed_cache
)
6699 LABEL
*fin
= NEW_LABEL(nd_line(node
));
6700 CHECK(iseq_compile_pattern_each(iseq
, ret
, node
, fin
, unmatched
, in_single_pattern
, in_alt_pattern
, base_index
, use_deconstructed_cache
));
6701 ADD_LABEL(ret
, fin
);
6706 iseq_compile_pattern_constant(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*match_failed
, bool in_single_pattern
, int base_index
)
6708 const NODE
*line_node
= node
;
6710 if (node
->nd_pconst
) {
6711 ADD_INSN(ret
, line_node
, dup
); // (1)
6712 CHECK(COMPILE(ret
, "constant", node
->nd_pconst
)); // (2)
6713 if (in_single_pattern
) {
6714 ADD_INSN1(ret
, line_node
, dupn
, INT2FIX(2));
6716 ADD_INSN1(ret
, line_node
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_CASE
)); // (3)
6717 if (in_single_pattern
) {
6718 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq
, ret
, node
, base_index
+ 3 /* (1), (2), (3) */));
6720 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6727 iseq_compile_array_deconstruct(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, LABEL
*deconstruct
, LABEL
*deconstructed
, LABEL
*match_failed
, LABEL
*type_error
, bool in_single_pattern
, int base_index
, bool use_deconstructed_cache
)
6729 const NODE
*line_node
= node
;
6731 // NOTE: this optimization allows us to re-use the #deconstruct value
6732 // (or its absence).
6733 if (use_deconstructed_cache
) {
6734 // If value is nil then we haven't tried to deconstruct
6735 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
));
6736 ADD_INSNL(ret
, line_node
, branchnil
, deconstruct
);
6738 // If false then the value is not deconstructable
6739 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
));
6740 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6742 // Drop value, add deconstructed to the stack and jump
6743 ADD_INSN(ret
, line_node
, pop
); // (1)
6744 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
- 1 /* (1) */));
6745 ADD_INSNL(ret
, line_node
, jump
, deconstructed
);
6748 ADD_INSNL(ret
, line_node
, jump
, deconstruct
);
6751 ADD_LABEL(ret
, deconstruct
);
6752 ADD_INSN(ret
, line_node
, dup
);
6753 ADD_INSN1(ret
, line_node
, putobject
, ID2SYM(rb_intern("deconstruct")));
6754 ADD_SEND(ret
, line_node
, idRespond_to
, INT2FIX(1)); // (2)
6756 // Cache the result of respond_to? (in case it's false is stays there, if true - it's overwritten after #deconstruct)
6757 if (use_deconstructed_cache
) {
6758 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
+ 1 /* (2) */));
6761 if (in_single_pattern
) {
6762 CHECK(iseq_compile_pattern_set_general_errmsg(iseq
, ret
, node
, rb_fstring_lit("%p does not respond to #deconstruct"), base_index
+ 1 /* (2) */));
6765 ADD_INSNL(ret
, line_node
, branchunless
, match_failed
);
6767 ADD_SEND(ret
, line_node
, rb_intern("deconstruct"), INT2FIX(0));
6769 // Cache the result (if it's cacheable - currently, only top-level array patterns)
6770 if (use_deconstructed_cache
) {
6771 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
));
6774 ADD_INSN(ret
, line_node
, dup
);
6775 ADD_INSN1(ret
, line_node
, checktype
, INT2FIX(T_ARRAY
));
6776 ADD_INSNL(ret
, line_node
, branchunless
, type_error
);
6778 ADD_LABEL(ret
, deconstructed
);
6784 iseq_compile_pattern_set_general_errmsg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, VALUE errmsg
, int base_index
)
6787 * if match_succeeded?
6788 * goto match_succeeded
6790 * error_string = FrozenCore.sprintf(errmsg, matchee)
6791 * key_error_p = false
6794 const int line
= nd_line(node
);
6795 const NODE
*line_node
= node
;
6796 LABEL
*match_succeeded
= NEW_LABEL(line
);
6798 ADD_INSN(ret
, line_node
, dup
);
6799 ADD_INSNL(ret
, line_node
, branchif
, match_succeeded
);
6801 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6802 ADD_INSN1(ret
, line_node
, putobject
, errmsg
);
6803 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3));
6804 ADD_SEND(ret
, line_node
, id_core_sprintf
, INT2FIX(2)); // (1)
6805 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_ERROR_STRING
+ 1 /* (1) */)); // (2)
6807 ADD_INSN1(ret
, line_node
, putobject
, Qfalse
);
6808 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_KEY_ERROR_P
+ 2 /* (1), (2) */));
6810 ADD_INSN(ret
, line_node
, pop
);
6811 ADD_INSN(ret
, line_node
, pop
);
6812 ADD_LABEL(ret
, match_succeeded
);
6818 iseq_compile_pattern_set_length_errmsg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, VALUE errmsg
, VALUE pattern_length
, int base_index
)
6821 * if match_succeeded?
6822 * goto match_succeeded
6824 * error_string = FrozenCore.sprintf(errmsg, matchee, matchee.length, pat.length)
6825 * key_error_p = false
6828 const int line
= nd_line(node
);
6829 const NODE
*line_node
= node
;
6830 LABEL
*match_succeeded
= NEW_LABEL(line
);
6832 ADD_INSN(ret
, line_node
, dup
);
6833 ADD_INSNL(ret
, line_node
, branchif
, match_succeeded
);
6835 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6836 ADD_INSN1(ret
, line_node
, putobject
, errmsg
);
6837 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3));
6838 ADD_INSN(ret
, line_node
, dup
);
6839 ADD_SEND(ret
, line_node
, idLength
, INT2FIX(0));
6840 ADD_INSN1(ret
, line_node
, putobject
, pattern_length
);
6841 ADD_SEND(ret
, line_node
, id_core_sprintf
, INT2FIX(4)); // (1)
6842 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_ERROR_STRING
+ 1 /* (1) */)); // (2)
6844 ADD_INSN1(ret
, line_node
, putobject
, Qfalse
);
6845 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_KEY_ERROR_P
+ 2/* (1), (2) */));
6847 ADD_INSN(ret
, line_node
, pop
);
6848 ADD_INSN(ret
, line_node
, pop
);
6849 ADD_LABEL(ret
, match_succeeded
);
6855 iseq_compile_pattern_set_eqq_errmsg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int base_index
)
6858 * if match_succeeded?
6859 * goto match_succeeded
6861 * error_string = FrozenCore.sprintf("%p === %p does not return true", pat, matchee)
6862 * key_error_p = false
6865 const int line
= nd_line(node
);
6866 const NODE
*line_node
= node
;
6867 LABEL
*match_succeeded
= NEW_LABEL(line
);
6869 ADD_INSN(ret
, line_node
, dup
);
6870 ADD_INSNL(ret
, line_node
, branchif
, match_succeeded
);
6872 ADD_INSN1(ret
, line_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6873 ADD_INSN1(ret
, line_node
, putobject
, rb_fstring_lit("%p === %p does not return true"));
6874 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(3));
6875 ADD_INSN1(ret
, line_node
, topn
, INT2FIX(5));
6876 ADD_SEND(ret
, line_node
, id_core_sprintf
, INT2FIX(3)); // (1)
6877 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_ERROR_STRING
+ 1 /* (1) */)); // (2)
6879 ADD_INSN1(ret
, line_node
, putobject
, Qfalse
);
6880 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(base_index
+ CASE3_BI_OFFSET_KEY_ERROR_P
+ 2 /* (1), (2) */));
6882 ADD_INSN(ret
, line_node
, pop
);
6883 ADD_INSN(ret
, line_node
, pop
);
6885 ADD_LABEL(ret
, match_succeeded
);
6886 ADD_INSN1(ret
, line_node
, setn
, INT2FIX(2));
6887 ADD_INSN(ret
, line_node
, pop
);
6888 ADD_INSN(ret
, line_node
, pop
);
6894 compile_case3(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const orig_node
, int popped
)
6896 const NODE
*pattern
;
6897 const NODE
*node
= orig_node
;
6898 LABEL
*endlabel
, *elselabel
;
6900 DECL_ANCHOR(body_seq
);
6901 DECL_ANCHOR(cond_seq
);
6903 enum node_type type
;
6904 const NODE
*line_node
;
6907 bool single_pattern
;
6910 INIT_ANCHOR(body_seq
);
6911 INIT_ANCHOR(cond_seq
);
6913 branches
= decl_branch_base(iseq
, node
, "case");
6915 node
= node
->nd_body
;
6916 EXPECT_NODE("NODE_CASE3", node
, NODE_IN
, COMPILE_NG
);
6917 type
= nd_type(node
);
6918 line
= nd_line(node
);
6920 single_pattern
= !node
->nd_next
;
6922 endlabel
= NEW_LABEL(line
);
6923 elselabel
= NEW_LABEL(line
);
6925 if (single_pattern
) {
6926 /* allocate stack for ... */
6927 ADD_INSN(head
, line_node
, putnil
); /* key_error_key */
6928 ADD_INSN(head
, line_node
, putnil
); /* key_error_matchee */
6929 ADD_INSN1(head
, line_node
, putobject
, Qfalse
); /* key_error_p */
6930 ADD_INSN(head
, line_node
, putnil
); /* error_string */
6932 ADD_INSN(head
, line_node
, putnil
); /* allocate stack for cached #deconstruct value */
6934 CHECK(COMPILE(head
, "case base", orig_node
->nd_head
));
6936 ADD_SEQ(ret
, head
); /* case VAL */
6938 while (type
== NODE_IN
) {
6942 ADD_INSN(body_seq
, line_node
, putnil
);
6944 l1
= NEW_LABEL(line
);
6945 ADD_LABEL(body_seq
, l1
);
6946 ADD_INSN1(body_seq
, line_node
, adjuststack
, INT2FIX(single_pattern
? 6 : 2));
6947 add_trace_branch_coverage(
6950 node
->nd_body
? node
->nd_body
: node
,
6954 CHECK(COMPILE_(body_seq
, "in body", node
->nd_body
, popped
));
6955 ADD_INSNL(body_seq
, line_node
, jump
, endlabel
);
6957 pattern
= node
->nd_head
;
6959 int pat_line
= nd_line(pattern
);
6960 LABEL
*next_pat
= NEW_LABEL(pat_line
);
6961 ADD_INSN (cond_seq
, pattern
, dup
); /* dup case VAL */
6962 // NOTE: set base_index (it's "under" the matchee value, so it's position is 2)
6963 CHECK(iseq_compile_pattern_each(iseq
, cond_seq
, pattern
, l1
, next_pat
, single_pattern
, false, 2, true));
6964 ADD_LABEL(cond_seq
, next_pat
);
6965 LABEL_UNREMOVABLE(next_pat
);
6968 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
6972 node
= node
->nd_next
;
6976 type
= nd_type(node
);
6977 line
= nd_line(node
);
6982 ADD_LABEL(cond_seq
, elselabel
);
6983 ADD_INSN(cond_seq
, line_node
, pop
);
6984 ADD_INSN(cond_seq
, line_node
, pop
); /* discard cached #deconstruct value */
6985 add_trace_branch_coverage(iseq
, cond_seq
, node
, branch_id
, "else", branches
);
6986 CHECK(COMPILE_(cond_seq
, "else", node
, popped
));
6987 ADD_INSNL(cond_seq
, line_node
, jump
, endlabel
);
6988 ADD_INSN(cond_seq
, line_node
, putnil
);
6990 ADD_INSN(cond_seq
, line_node
, putnil
);
6994 debugs("== else (implicit)\n");
6995 ADD_LABEL(cond_seq
, elselabel
);
6996 add_trace_branch_coverage(iseq
, cond_seq
, orig_node
, branch_id
, "else", branches
);
6997 ADD_INSN1(cond_seq
, orig_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
6999 if (single_pattern
) {
7002 * FrozenCore.raise NoMatchingPatternKeyError.new(FrozenCore.sprintf("%p: %s", case_val, error_string), matchee: key_error_matchee, key: key_error_key)
7004 * FrozenCore.raise NoMatchingPatternError, FrozenCore.sprintf("%p: %s", case_val, error_string)
7007 LABEL
*key_error
, *fin
;
7008 struct rb_callinfo_kwarg
*kw_arg
;
7010 key_error
= NEW_LABEL(line
);
7011 fin
= NEW_LABEL(line
);
7013 kw_arg
= rb_xmalloc_mul_add(2, sizeof(VALUE
), sizeof(struct rb_callinfo_kwarg
));
7014 kw_arg
->keyword_len
= 2;
7015 kw_arg
->keywords
[0] = ID2SYM(rb_intern("matchee"));
7016 kw_arg
->keywords
[1] = ID2SYM(rb_intern("key"));
7018 ADD_INSN1(cond_seq
, orig_node
, topn
, INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_P
+ 2));
7019 ADD_INSNL(cond_seq
, orig_node
, branchif
, key_error
);
7020 ADD_INSN1(cond_seq
, orig_node
, putobject
, rb_eNoMatchingPatternError
);
7021 ADD_INSN1(cond_seq
, orig_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
7022 ADD_INSN1(cond_seq
, orig_node
, putobject
, rb_fstring_lit("%p: %s"));
7023 ADD_INSN1(cond_seq
, orig_node
, topn
, INT2FIX(4)); /* case VAL */
7024 ADD_INSN1(cond_seq
, orig_node
, topn
, INT2FIX(CASE3_BI_OFFSET_ERROR_STRING
+ 6));
7025 ADD_SEND(cond_seq
, orig_node
, id_core_sprintf
, INT2FIX(3));
7026 ADD_SEND(cond_seq
, orig_node
, id_core_raise
, INT2FIX(2));
7027 ADD_INSNL(cond_seq
, orig_node
, jump
, fin
);
7029 ADD_LABEL(cond_seq
, key_error
);
7030 ADD_INSN1(cond_seq
, orig_node
, putobject
, rb_eNoMatchingPatternKeyError
);
7031 ADD_INSN1(cond_seq
, orig_node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
7032 ADD_INSN1(cond_seq
, orig_node
, putobject
, rb_fstring_lit("%p: %s"));
7033 ADD_INSN1(cond_seq
, orig_node
, topn
, INT2FIX(4)); /* case VAL */
7034 ADD_INSN1(cond_seq
, orig_node
, topn
, INT2FIX(CASE3_BI_OFFSET_ERROR_STRING
+ 6));
7035 ADD_SEND(cond_seq
, orig_node
, id_core_sprintf
, INT2FIX(3));
7036 ADD_INSN1(cond_seq
, orig_node
, topn
, INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_MATCHEE
+ 4));
7037 ADD_INSN1(cond_seq
, orig_node
, topn
, INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_KEY
+ 5));
7038 ADD_SEND_R(cond_seq
, orig_node
, rb_intern("new"), INT2FIX(1), NULL
, INT2FIX(VM_CALL_KWARG
), kw_arg
);
7039 ADD_SEND(cond_seq
, orig_node
, id_core_raise
, INT2FIX(1));
7041 ADD_LABEL(cond_seq
, fin
);
7044 ADD_INSN1(cond_seq
, orig_node
, putobject
, rb_eNoMatchingPatternError
);
7045 ADD_INSN1(cond_seq
, orig_node
, topn
, INT2FIX(2));
7046 ADD_SEND(cond_seq
, orig_node
, id_core_raise
, INT2FIX(2));
7048 ADD_INSN1(cond_seq
, orig_node
, adjuststack
, INT2FIX(single_pattern
? 7 : 3));
7050 ADD_INSN(cond_seq
, orig_node
, putnil
);
7052 ADD_INSNL(cond_seq
, orig_node
, jump
, endlabel
);
7053 ADD_INSN1(cond_seq
, orig_node
, dupn
, INT2FIX(single_pattern
? 5 : 1));
7055 ADD_INSN(cond_seq
, line_node
, putnil
);
7059 ADD_SEQ(ret
, cond_seq
);
7060 ADD_SEQ(ret
, body_seq
);
7061 ADD_LABEL(ret
, endlabel
);
7065 #undef CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
7066 #undef CASE3_BI_OFFSET_ERROR_STRING
7067 #undef CASE3_BI_OFFSET_KEY_ERROR_P
7068 #undef CASE3_BI_OFFSET_KEY_ERROR_MATCHEE
7069 #undef CASE3_BI_OFFSET_KEY_ERROR_KEY
7072 compile_loop(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
, const enum node_type type
)
7074 const int line
= (int)nd_line(node
);
7075 const NODE
*line_node
= node
;
7077 LABEL
*prev_start_label
= ISEQ_COMPILE_DATA(iseq
)->start_label
;
7078 LABEL
*prev_end_label
= ISEQ_COMPILE_DATA(iseq
)->end_label
;
7079 LABEL
*prev_redo_label
= ISEQ_COMPILE_DATA(iseq
)->redo_label
;
7080 int prev_loopval_popped
= ISEQ_COMPILE_DATA(iseq
)->loopval_popped
;
7081 VALUE branches
= Qfalse
;
7083 struct iseq_compile_data_ensure_node_stack enl
;
7085 LABEL
*next_label
= ISEQ_COMPILE_DATA(iseq
)->start_label
= NEW_LABEL(line
); /* next */
7086 LABEL
*redo_label
= ISEQ_COMPILE_DATA(iseq
)->redo_label
= NEW_LABEL(line
); /* redo */
7087 LABEL
*break_label
= ISEQ_COMPILE_DATA(iseq
)->end_label
= NEW_LABEL(line
); /* break */
7088 LABEL
*end_label
= NEW_LABEL(line
);
7089 LABEL
*adjust_label
= NEW_LABEL(line
);
7091 LABEL
*next_catch_label
= NEW_LABEL(line
);
7092 LABEL
*tmp_label
= NULL
;
7094 ISEQ_COMPILE_DATA(iseq
)->loopval_popped
= 0;
7095 push_ensure_entry(iseq
, &enl
, NULL
, NULL
);
7097 if (node
->nd_state
== 1) {
7098 ADD_INSNL(ret
, line_node
, jump
, next_label
);
7101 tmp_label
= NEW_LABEL(line
);
7102 ADD_INSNL(ret
, line_node
, jump
, tmp_label
);
7104 ADD_LABEL(ret
, adjust_label
);
7105 ADD_INSN(ret
, line_node
, putnil
);
7106 ADD_LABEL(ret
, next_catch_label
);
7107 ADD_INSN(ret
, line_node
, pop
);
7108 ADD_INSNL(ret
, line_node
, jump
, next_label
);
7109 if (tmp_label
) ADD_LABEL(ret
, tmp_label
);
7111 ADD_LABEL(ret
, redo_label
);
7112 branches
= decl_branch_base(iseq
, node
, type
== NODE_WHILE
? "while" : "until");
7113 add_trace_branch_coverage(
7116 node
->nd_body
? node
->nd_body
: node
,
7120 CHECK(COMPILE_POPPED(ret
, "while body", node
->nd_body
));
7121 ADD_LABEL(ret
, next_label
); /* next */
7123 if (type
== NODE_WHILE
) {
7124 compile_branch_condition(iseq
, ret
, node
->nd_cond
,
7125 redo_label
, end_label
);
7129 compile_branch_condition(iseq
, ret
, node
->nd_cond
,
7130 end_label
, redo_label
);
7133 ADD_LABEL(ret
, end_label
);
7134 ADD_ADJUST_RESTORE(ret
, adjust_label
);
7136 if (node
->nd_state
== Qundef
) {
7137 /* ADD_INSN(ret, line_node, putundef); */
7138 COMPILE_ERROR(ERROR_ARGS
"unsupported: putundef");
7142 ADD_INSN(ret
, line_node
, putnil
);
7145 ADD_LABEL(ret
, break_label
); /* break */
7148 ADD_INSN(ret
, line_node
, pop
);
7151 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK
, redo_label
, break_label
, NULL
,
7153 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT
, redo_label
, break_label
, NULL
,
7155 ADD_CATCH_ENTRY(CATCH_TYPE_REDO
, redo_label
, break_label
, NULL
,
7156 ISEQ_COMPILE_DATA(iseq
)->redo_label
);
7158 ISEQ_COMPILE_DATA(iseq
)->start_label
= prev_start_label
;
7159 ISEQ_COMPILE_DATA(iseq
)->end_label
= prev_end_label
;
7160 ISEQ_COMPILE_DATA(iseq
)->redo_label
= prev_redo_label
;
7161 ISEQ_COMPILE_DATA(iseq
)->loopval_popped
= prev_loopval_popped
;
7162 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
->prev
;
7167 compile_iter(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7169 const int line
= nd_line(node
);
7170 const NODE
*line_node
= node
;
7171 const rb_iseq_t
*prevblock
= ISEQ_COMPILE_DATA(iseq
)->current_block
;
7172 LABEL
*retry_label
= NEW_LABEL(line
);
7173 LABEL
*retry_end_l
= NEW_LABEL(line
);
7174 const rb_iseq_t
*child_iseq
;
7176 ADD_LABEL(ret
, retry_label
);
7177 if (nd_type_p(node
, NODE_FOR
)) {
7178 CHECK(COMPILE(ret
, "iter caller (for)", node
->nd_iter
));
7180 ISEQ_COMPILE_DATA(iseq
)->current_block
= child_iseq
=
7181 NEW_CHILD_ISEQ(node
->nd_body
, make_name_for_block(iseq
),
7182 ISEQ_TYPE_BLOCK
, line
);
7183 ADD_SEND_WITH_BLOCK(ret
, line_node
, idEach
, INT2FIX(0), child_iseq
);
7186 ISEQ_COMPILE_DATA(iseq
)->current_block
= child_iseq
=
7187 NEW_CHILD_ISEQ(node
->nd_body
, make_name_for_block(iseq
),
7188 ISEQ_TYPE_BLOCK
, line
);
7189 CHECK(COMPILE(ret
, "iter caller", node
->nd_iter
));
7191 ADD_LABEL(ret
, retry_end_l
);
7194 ADD_INSN(ret
, line_node
, pop
);
7197 ISEQ_COMPILE_DATA(iseq
)->current_block
= prevblock
;
7199 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK
, retry_label
, retry_end_l
, child_iseq
, retry_end_l
);
7204 compile_for_masgn(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7206 /* massign to var in "for"
7207 * (args.length == 1 && Array.try_convert(args[0])) || args
7209 const NODE
*line_node
= node
;
7210 const NODE
*var
= node
->nd_var
;
7211 LABEL
*not_single
= NEW_LABEL(nd_line(var
));
7212 LABEL
*not_ary
= NEW_LABEL(nd_line(var
));
7213 CHECK(COMPILE(ret
, "for var", var
));
7214 ADD_INSN(ret
, line_node
, dup
);
7215 ADD_CALL(ret
, line_node
, idLength
, INT2FIX(0));
7216 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(1));
7217 ADD_CALL(ret
, line_node
, idEq
, INT2FIX(1));
7218 ADD_INSNL(ret
, line_node
, branchunless
, not_single
);
7219 ADD_INSN(ret
, line_node
, dup
);
7220 ADD_INSN1(ret
, line_node
, putobject
, INT2FIX(0));
7221 ADD_CALL(ret
, line_node
, idAREF
, INT2FIX(1));
7222 ADD_INSN1(ret
, line_node
, putobject
, rb_cArray
);
7223 ADD_INSN(ret
, line_node
, swap
);
7224 ADD_CALL(ret
, line_node
, rb_intern("try_convert"), INT2FIX(1));
7225 ADD_INSN(ret
, line_node
, dup
);
7226 ADD_INSNL(ret
, line_node
, branchunless
, not_ary
);
7227 ADD_INSN(ret
, line_node
, swap
);
7228 ADD_LABEL(ret
, not_ary
);
7229 ADD_INSN(ret
, line_node
, pop
);
7230 ADD_LABEL(ret
, not_single
);
7235 compile_break(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7237 const NODE
*line_node
= node
;
7238 unsigned long throw_flag
= 0;
7240 if (ISEQ_COMPILE_DATA(iseq
)->redo_label
!= 0 && can_add_ensure_iseq(iseq
)) {
7242 LABEL
*splabel
= NEW_LABEL(0);
7243 ADD_LABEL(ret
, splabel
);
7244 ADD_ADJUST(ret
, line_node
, ISEQ_COMPILE_DATA(iseq
)->redo_label
);
7245 CHECK(COMPILE_(ret
, "break val (while/until)", node
->nd_stts
,
7246 ISEQ_COMPILE_DATA(iseq
)->loopval_popped
));
7247 add_ensure_iseq(ret
, iseq
, 0);
7248 ADD_INSNL(ret
, line_node
, jump
, ISEQ_COMPILE_DATA(iseq
)->end_label
);
7249 ADD_ADJUST_RESTORE(ret
, splabel
);
7252 ADD_INSN(ret
, line_node
, putnil
);
7256 const rb_iseq_t
*ip
= iseq
;
7259 if (!ISEQ_COMPILE_DATA(ip
)) {
7264 if (ISEQ_COMPILE_DATA(ip
)->redo_label
!= 0) {
7265 throw_flag
= VM_THROW_NO_ESCAPE_FLAG
;
7267 else if (ip
->body
->type
== ISEQ_TYPE_BLOCK
) {
7270 else if (ip
->body
->type
== ISEQ_TYPE_EVAL
) {
7271 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with break");
7275 ip
= ip
->body
->parent_iseq
;
7279 /* escape from block */
7280 CHECK(COMPILE(ret
, "break val (block)", node
->nd_stts
));
7281 ADD_INSN1(ret
, line_node
, throw, INT2FIX(throw_flag
| TAG_BREAK
));
7283 ADD_INSN(ret
, line_node
, pop
);
7287 COMPILE_ERROR(ERROR_ARGS
"Invalid break");
7294 compile_next(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7296 const NODE
*line_node
= node
;
7297 unsigned long throw_flag
= 0;
7299 if (ISEQ_COMPILE_DATA(iseq
)->redo_label
!= 0 && can_add_ensure_iseq(iseq
)) {
7300 LABEL
*splabel
= NEW_LABEL(0);
7301 debugs("next in while loop\n");
7302 ADD_LABEL(ret
, splabel
);
7303 CHECK(COMPILE(ret
, "next val/valid syntax?", node
->nd_stts
));
7304 add_ensure_iseq(ret
, iseq
, 0);
7305 ADD_ADJUST(ret
, line_node
, ISEQ_COMPILE_DATA(iseq
)->redo_label
);
7306 ADD_INSNL(ret
, line_node
, jump
, ISEQ_COMPILE_DATA(iseq
)->start_label
);
7307 ADD_ADJUST_RESTORE(ret
, splabel
);
7309 ADD_INSN(ret
, line_node
, putnil
);
7312 else if (ISEQ_COMPILE_DATA(iseq
)->end_label
&& can_add_ensure_iseq(iseq
)) {
7313 LABEL
*splabel
= NEW_LABEL(0);
7314 debugs("next in block\n");
7315 ADD_LABEL(ret
, splabel
);
7316 ADD_ADJUST(ret
, line_node
, ISEQ_COMPILE_DATA(iseq
)->start_label
);
7317 CHECK(COMPILE(ret
, "next val", node
->nd_stts
));
7318 add_ensure_iseq(ret
, iseq
, 0);
7319 ADD_INSNL(ret
, line_node
, jump
, ISEQ_COMPILE_DATA(iseq
)->end_label
);
7320 ADD_ADJUST_RESTORE(ret
, splabel
);
7321 splabel
->unremovable
= FALSE
;
7324 ADD_INSN(ret
, line_node
, putnil
);
7328 const rb_iseq_t
*ip
= iseq
;
7331 if (!ISEQ_COMPILE_DATA(ip
)) {
7336 throw_flag
= VM_THROW_NO_ESCAPE_FLAG
;
7337 if (ISEQ_COMPILE_DATA(ip
)->redo_label
!= 0) {
7341 else if (ip
->body
->type
== ISEQ_TYPE_BLOCK
) {
7344 else if (ip
->body
->type
== ISEQ_TYPE_EVAL
) {
7345 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with next");
7349 ip
= ip
->body
->parent_iseq
;
7352 CHECK(COMPILE(ret
, "next val", node
->nd_stts
));
7353 ADD_INSN1(ret
, line_node
, throw, INT2FIX(throw_flag
| TAG_NEXT
));
7356 ADD_INSN(ret
, line_node
, pop
);
7360 COMPILE_ERROR(ERROR_ARGS
"Invalid next");
7368 compile_redo(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7370 const NODE
*line_node
= node
;
7372 if (ISEQ_COMPILE_DATA(iseq
)->redo_label
&& can_add_ensure_iseq(iseq
)) {
7373 LABEL
*splabel
= NEW_LABEL(0);
7374 debugs("redo in while");
7375 ADD_LABEL(ret
, splabel
);
7376 ADD_ADJUST(ret
, line_node
, ISEQ_COMPILE_DATA(iseq
)->redo_label
);
7377 add_ensure_iseq(ret
, iseq
, 0);
7378 ADD_INSNL(ret
, line_node
, jump
, ISEQ_COMPILE_DATA(iseq
)->redo_label
);
7379 ADD_ADJUST_RESTORE(ret
, splabel
);
7381 ADD_INSN(ret
, line_node
, putnil
);
7384 else if (iseq
->body
->type
!= ISEQ_TYPE_EVAL
&& ISEQ_COMPILE_DATA(iseq
)->start_label
&& can_add_ensure_iseq(iseq
)) {
7385 LABEL
*splabel
= NEW_LABEL(0);
7387 debugs("redo in block");
7388 ADD_LABEL(ret
, splabel
);
7389 add_ensure_iseq(ret
, iseq
, 0);
7390 ADD_ADJUST(ret
, line_node
, ISEQ_COMPILE_DATA(iseq
)->start_label
);
7391 ADD_INSNL(ret
, line_node
, jump
, ISEQ_COMPILE_DATA(iseq
)->start_label
);
7392 ADD_ADJUST_RESTORE(ret
, splabel
);
7395 ADD_INSN(ret
, line_node
, putnil
);
7399 const rb_iseq_t
*ip
= iseq
;
7402 if (!ISEQ_COMPILE_DATA(ip
)) {
7407 if (ISEQ_COMPILE_DATA(ip
)->redo_label
!= 0) {
7410 else if (ip
->body
->type
== ISEQ_TYPE_BLOCK
) {
7413 else if (ip
->body
->type
== ISEQ_TYPE_EVAL
) {
7414 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with redo");
7418 ip
= ip
->body
->parent_iseq
;
7421 ADD_INSN(ret
, line_node
, putnil
);
7422 ADD_INSN1(ret
, line_node
, throw, INT2FIX(VM_THROW_NO_ESCAPE_FLAG
| TAG_REDO
));
7425 ADD_INSN(ret
, line_node
, pop
);
7429 COMPILE_ERROR(ERROR_ARGS
"Invalid redo");
7437 compile_retry(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7439 const NODE
*line_node
= node
;
7441 if (iseq
->body
->type
== ISEQ_TYPE_RESCUE
) {
7442 ADD_INSN(ret
, line_node
, putnil
);
7443 ADD_INSN1(ret
, line_node
, throw, INT2FIX(TAG_RETRY
));
7446 ADD_INSN(ret
, line_node
, pop
);
7450 COMPILE_ERROR(ERROR_ARGS
"Invalid retry");
7457 compile_rescue(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7459 const int line
= nd_line(node
);
7460 const NODE
*line_node
= node
;
7461 LABEL
*lstart
= NEW_LABEL(line
);
7462 LABEL
*lend
= NEW_LABEL(line
);
7463 LABEL
*lcont
= NEW_LABEL(line
);
7464 const rb_iseq_t
*rescue
= NEW_CHILD_ISEQ(node
->nd_resq
,
7465 rb_str_concat(rb_str_new2("rescue in "), iseq
->body
->location
.label
),
7466 ISEQ_TYPE_RESCUE
, line
);
7468 lstart
->rescued
= LABEL_RESCUE_BEG
;
7469 lend
->rescued
= LABEL_RESCUE_END
;
7470 ADD_LABEL(ret
, lstart
);
7472 bool prev_in_rescue
= ISEQ_COMPILE_DATA(iseq
)->in_rescue
;
7473 ISEQ_COMPILE_DATA(iseq
)->in_rescue
= true;
7475 CHECK(COMPILE(ret
, "rescue head", node
->nd_head
));
7477 ISEQ_COMPILE_DATA(iseq
)->in_rescue
= prev_in_rescue
;
7479 ADD_LABEL(ret
, lend
);
7480 if (node
->nd_else
) {
7481 ADD_INSN(ret
, line_node
, pop
);
7482 CHECK(COMPILE(ret
, "rescue else", node
->nd_else
));
7484 ADD_INSN(ret
, line_node
, nop
);
7485 ADD_LABEL(ret
, lcont
);
7488 ADD_INSN(ret
, line_node
, pop
);
7491 /* register catch entry */
7492 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE
, lstart
, lend
, rescue
, lcont
);
7493 ADD_CATCH_ENTRY(CATCH_TYPE_RETRY
, lend
, lcont
, NULL
, lstart
);
7498 compile_resbody(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7500 const int line
= nd_line(node
);
7501 const NODE
*line_node
= node
;
7502 const NODE
*resq
= node
;
7504 LABEL
*label_miss
, *label_hit
;
7507 label_miss
= NEW_LABEL(line
);
7508 label_hit
= NEW_LABEL(line
);
7510 narg
= resq
->nd_args
;
7512 switch (nd_type(narg
)) {
7515 ADD_GETLOCAL(ret
, line_node
, LVAR_ERRINFO
, 0);
7516 CHECK(COMPILE(ret
, "rescue arg", narg
->nd_head
));
7517 ADD_INSN1(ret
, line_node
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE
));
7518 ADD_INSNL(ret
, line_node
, branchif
, label_hit
);
7519 narg
= narg
->nd_next
;
7525 ADD_GETLOCAL(ret
, line_node
, LVAR_ERRINFO
, 0);
7526 CHECK(COMPILE(ret
, "rescue/cond splat", narg
));
7527 ADD_INSN1(ret
, line_node
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE
| VM_CHECKMATCH_ARRAY
));
7528 ADD_INSNL(ret
, line_node
, branchif
, label_hit
);
7531 UNKNOWN_NODE("NODE_RESBODY", narg
, COMPILE_NG
);
7535 ADD_GETLOCAL(ret
, line_node
, LVAR_ERRINFO
, 0);
7536 ADD_INSN1(ret
, line_node
, putobject
, rb_eStandardError
);
7537 ADD_INSN1(ret
, line_node
, checkmatch
, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE
));
7538 ADD_INSNL(ret
, line_node
, branchif
, label_hit
);
7540 ADD_INSNL(ret
, line_node
, jump
, label_miss
);
7541 ADD_LABEL(ret
, label_hit
);
7542 CHECK(COMPILE(ret
, "resbody body", resq
->nd_body
));
7543 if (ISEQ_COMPILE_DATA(iseq
)->option
->tailcall_optimization
) {
7544 ADD_INSN(ret
, line_node
, nop
);
7546 ADD_INSN(ret
, line_node
, leave
);
7547 ADD_LABEL(ret
, label_miss
);
7548 resq
= resq
->nd_head
;
7554 compile_ensure(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7556 const int line
= nd_line(node
);
7557 const NODE
*line_node
= node
;
7559 const rb_iseq_t
*ensure
= NEW_CHILD_ISEQ(node
->nd_ensr
,
7560 rb_str_concat(rb_str_new2 ("ensure in "), iseq
->body
->location
.label
),
7561 ISEQ_TYPE_ENSURE
, line
);
7562 LABEL
*lstart
= NEW_LABEL(line
);
7563 LABEL
*lend
= NEW_LABEL(line
);
7564 LABEL
*lcont
= NEW_LABEL(line
);
7567 struct ensure_range er
;
7568 struct iseq_compile_data_ensure_node_stack enl
;
7569 struct ensure_range
*erange
;
7572 CHECK(COMPILE_POPPED(ensr
, "ensure ensr", node
->nd_ensr
));
7574 last_leave
= last
&& IS_INSN(last
) && IS_INSN_ID(last
, leave
);
7579 push_ensure_entry(iseq
, &enl
, &er
, node
->nd_ensr
);
7581 ADD_LABEL(ret
, lstart
);
7582 CHECK(COMPILE_(ret
, "ensure head", node
->nd_head
, (popped
| last_leave
)));
7583 ADD_LABEL(ret
, lend
);
7585 if (!popped
&& last_leave
) ADD_INSN(ret
, line_node
, putnil
);
7586 ADD_LABEL(ret
, lcont
);
7587 if (last_leave
) ADD_INSN(ret
, line_node
, pop
);
7589 erange
= ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
->erange
;
7590 if (lstart
->link
.next
!= &lend
->link
) {
7592 ADD_CATCH_ENTRY(CATCH_TYPE_ENSURE
, erange
->begin
, erange
->end
,
7594 erange
= erange
->next
;
7598 ISEQ_COMPILE_DATA(iseq
)->ensure_node_stack
= enl
.prev
;
7603 compile_return(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7605 const NODE
*line_node
= node
;
7608 enum iseq_type type
= iseq
->body
->type
;
7609 const rb_iseq_t
*is
= iseq
;
7610 enum iseq_type t
= type
;
7611 const NODE
*retval
= node
->nd_stts
;
7614 while (t
== ISEQ_TYPE_RESCUE
|| t
== ISEQ_TYPE_ENSURE
) {
7615 if (!(is
= is
->body
->parent_iseq
)) break;
7620 case ISEQ_TYPE_MAIN
:
7622 rb_warn("argument of top-level return is ignored");
7625 /* plain top-level, leave directly */
7626 type
= ISEQ_TYPE_METHOD
;
7633 if (type
== ISEQ_TYPE_METHOD
) {
7634 splabel
= NEW_LABEL(0);
7635 ADD_LABEL(ret
, splabel
);
7636 ADD_ADJUST(ret
, line_node
, 0);
7639 CHECK(COMPILE(ret
, "return nd_stts (return val)", retval
));
7641 if (type
== ISEQ_TYPE_METHOD
&& can_add_ensure_iseq(iseq
)) {
7642 add_ensure_iseq(ret
, iseq
, 1);
7643 ADD_TRACE(ret
, RUBY_EVENT_RETURN
);
7644 ADD_INSN(ret
, line_node
, leave
);
7645 ADD_ADJUST_RESTORE(ret
, splabel
);
7648 ADD_INSN(ret
, line_node
, putnil
);
7652 ADD_INSN1(ret
, line_node
, throw, INT2FIX(TAG_RETURN
));
7654 ADD_INSN(ret
, line_node
, pop
);
7662 compile_evstr(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
7664 CHECK(COMPILE_(ret
, "nd_body", node
, popped
));
7666 if (!popped
&& !all_string_result_p(node
)) {
7667 const NODE
*line_node
= node
;
7668 const unsigned int flag
= VM_CALL_FCALL
;
7670 // Note, this dup could be removed if we are willing to change anytostring. It pops
7671 // two VALUEs off the stack when it could work by replacing the top most VALUE.
7672 ADD_INSN(ret
, line_node
, dup
);
7673 ADD_INSN1(ret
, line_node
, objtostring
, new_callinfo(iseq
, idTo_s
, 0, flag
, NULL
, FALSE
));
7674 ADD_INSN(ret
, line_node
, anytostring
);
7680 compile_lvar(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*line_node
, ID id
)
7682 int idx
= iseq
->body
->local_iseq
->body
->local_table_size
- get_local_var_idx(iseq
, id
);
7684 debugs("id: %s idx: %d\n", rb_id2name(id
), idx
);
7685 ADD_GETLOCAL(ret
, line_node
, idx
, get_lvar_level(iseq
));
7689 qcall_branch_start(rb_iseq_t
*iseq
, LINK_ANCHOR
*const recv
, VALUE
*branches
, const NODE
*node
, const NODE
*line_node
)
7691 LABEL
*else_label
= NEW_LABEL(nd_line(line_node
));
7694 br
= decl_branch_base(iseq
, node
, "&.");
7696 ADD_INSN(recv
, line_node
, dup
);
7697 ADD_INSNL(recv
, line_node
, branchnil
, else_label
);
7698 add_trace_branch_coverage(iseq
, recv
, node
, 0, "then", br
);
7703 qcall_branch_end(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, LABEL
*else_label
, VALUE branches
, const NODE
*node
, const NODE
*line_node
)
7706 if (!else_label
) return;
7707 end_label
= NEW_LABEL(nd_line(line_node
));
7708 ADD_INSNL(ret
, line_node
, jump
, end_label
);
7709 ADD_LABEL(ret
, else_label
);
7710 add_trace_branch_coverage(iseq
, ret
, node
, 1, "else", branches
);
7711 ADD_LABEL(ret
, end_label
);
7715 compile_call_precheck_freeze(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, const NODE
*line_node
, int popped
)
7717 /* optimization shortcut
7718 * "literal".freeze -> opt_str_freeze("literal")
7720 if (node
->nd_recv
&& nd_type_p(node
->nd_recv
, NODE_STR
) &&
7721 (node
->nd_mid
== idFreeze
|| node
->nd_mid
== idUMinus
) &&
7722 node
->nd_args
== NULL
&&
7723 ISEQ_COMPILE_DATA(iseq
)->current_block
== NULL
&&
7724 ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
) {
7725 VALUE str
= rb_fstring(node
->nd_recv
->nd_lit
);
7726 if (node
->nd_mid
== idUMinus
) {
7727 ADD_INSN2(ret
, line_node
, opt_str_uminus
, str
,
7728 new_callinfo(iseq
, idUMinus
, 0, 0, NULL
, FALSE
));
7731 ADD_INSN2(ret
, line_node
, opt_str_freeze
, str
,
7732 new_callinfo(iseq
, idFreeze
, 0, 0, NULL
, FALSE
));
7734 RB_OBJ_WRITTEN(iseq
, Qundef
, str
);
7736 ADD_INSN(ret
, line_node
, pop
);
7740 /* optimization shortcut
7741 * obj["literal"] -> opt_aref_with(obj, "literal")
7743 if (node
->nd_mid
== idAREF
&& !private_recv_p(node
) && node
->nd_args
&&
7744 nd_type_p(node
->nd_args
, NODE_LIST
) && node
->nd_args
->nd_alen
== 1 &&
7745 nd_type_p(node
->nd_args
->nd_head
, NODE_STR
) &&
7746 ISEQ_COMPILE_DATA(iseq
)->current_block
== NULL
&&
7747 !ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
&&
7748 ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
) {
7749 VALUE str
= rb_fstring(node
->nd_args
->nd_head
->nd_lit
);
7750 CHECK(COMPILE(ret
, "recv", node
->nd_recv
));
7751 ADD_INSN2(ret
, line_node
, opt_aref_with
, str
,
7752 new_callinfo(iseq
, idAREF
, 1, 0, NULL
, FALSE
));
7753 RB_OBJ_WRITTEN(iseq
, Qundef
, str
);
7755 ADD_INSN(ret
, line_node
, pop
);
7763 iseq_has_builtin_function_table(const rb_iseq_t
*iseq
)
7765 return ISEQ_COMPILE_DATA(iseq
)->builtin_function_table
!= NULL
;
7768 static const struct rb_builtin_function
*
7769 iseq_builtin_function_lookup(const rb_iseq_t
*iseq
, const char *name
)
7772 const struct rb_builtin_function
*table
= ISEQ_COMPILE_DATA(iseq
)->builtin_function_table
;
7773 for (i
=0; table
[i
].index
!= -1; i
++) {
7774 if (strcmp(table
[i
].name
, name
) == 0) {
7782 iseq_builtin_function_name(const enum node_type type
, const NODE
*recv
, ID mid
)
7784 const char *name
= rb_id2name(mid
);
7785 static const char prefix
[] = "__builtin_";
7786 const size_t prefix_len
= sizeof(prefix
) - 1;
7791 switch (nd_type(recv
)) {
7793 if (recv
->nd_mid
== rb_intern("__builtin")) {
7798 if (recv
->nd_vid
== rb_intern("Primitive")) {
7808 if (UNLIKELY(strncmp(prefix
, name
, prefix_len
) == 0)) {
7809 return &name
[prefix_len
];
7818 delegate_call_p(const rb_iseq_t
*iseq
, unsigned int argc
, const LINK_ANCHOR
*args
, unsigned int *pstart_index
)
7825 else if (argc
<= iseq
->body
->local_table_size
) {
7826 unsigned int start
=0;
7828 // local_table: [p1, p2, p3, l1, l2, l3]
7829 // arguments: [p3, l1, l2] -> 2
7831 argc
+ start
<= iseq
->body
->local_table_size
;
7833 const LINK_ELEMENT
*elem
= FIRST_ELEMENT(args
);
7835 for (unsigned int i
=start
; i
-start
<argc
; i
++) {
7836 if (elem
->type
== ISEQ_ELEMENT_INSN
&&
7837 INSN_OF(elem
) == BIN(getlocal
)) {
7838 int local_index
= FIX2INT(OPERAND_AT(elem
, 0));
7839 int local_level
= FIX2INT(OPERAND_AT(elem
, 1));
7841 if (local_level
== 0) {
7842 unsigned int index
= iseq
->body
->local_table_size
- (local_index
- VM_ENV_DATA_SIZE
+ 1);
7843 if (0) { // for debug
7844 fprintf(stderr
, "lvar:%s (%d), id:%s (%d) local_index:%d, local_size:%d\n",
7845 rb_id2name(iseq
->body
->local_table
[i
]), i
,
7846 rb_id2name(iseq
->body
->local_table
[index
]), index
,
7847 local_index
, (int)iseq
->body
->local_table_size
);
7858 goto fail
; // level != 0 is unsupported
7862 goto fail
; // insn is not a getlocal
7871 *pstart_index
= start
;
7880 compile_builtin_arg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*node
, const NODE
*line_node
, int popped
)
7882 if (!node
) goto no_arg
;
7883 if (!nd_type_p(node
, NODE_LIST
)) goto bad_arg
;
7884 if (node
->nd_next
) goto too_many_arg
;
7885 node
= node
->nd_head
;
7886 if (!node
) goto no_arg
;
7887 if (!nd_type_p(node
, NODE_LIT
)) goto bad_arg
;
7888 VALUE name
= node
->nd_lit
;
7889 if (!SYMBOL_P(name
)) goto non_symbol_arg
;
7891 compile_lvar(iseq
, ret
, line_node
, SYM2ID(name
));
7895 COMPILE_ERROR(ERROR_ARGS
"arg!: no argument");
7898 COMPILE_ERROR(ERROR_ARGS
"arg!: too many argument");
7901 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to arg!: %s",
7902 rb_builtin_class_name(name
));
7905 UNKNOWN_NODE("arg!", node
, COMPILE_NG
);
7909 mandatory_node(const rb_iseq_t
*iseq
, const NODE
*cond_node
)
7911 const NODE
*node
= ISEQ_COMPILE_DATA(iseq
)->root_node
;
7912 if (nd_type(node
) == NODE_IF
&& node
->nd_cond
== cond_node
) {
7913 return node
->nd_body
;
7916 rb_bug("mandatory_node: can't find mandatory node");
7921 compile_builtin_mandatory_only_method(rb_iseq_t
*iseq
, const NODE
*node
, const NODE
*line_node
)
7924 struct rb_args_info args
= {
7925 .pre_args_num
= iseq
->body
->param
.lead_num
,
7928 rb_node_init(&args_node
, NODE_ARGS
, 0, 0, (VALUE
)&args
);
7930 // local table without non-mandatory parameters
7931 const int skip_local_size
= iseq
->body
->param
.size
- iseq
->body
->param
.lead_num
;
7932 const int table_size
= iseq
->body
->local_table_size
- skip_local_size
;
7935 rb_ast_id_table_t
*tbl
= ALLOCV(idtmp
, sizeof(rb_ast_id_table_t
) + table_size
* sizeof(ID
));
7936 tbl
->size
= table_size
;
7941 for (i
=0; i
<iseq
->body
->param
.lead_num
; i
++) {
7942 tbl
->ids
[i
] = iseq
->body
->local_table
[i
];
7945 for (; i
<table_size
; i
++) {
7946 tbl
->ids
[i
] = iseq
->body
->local_table
[i
+ skip_local_size
];
7950 rb_node_init(&scope_node
, NODE_SCOPE
, (VALUE
)tbl
, (VALUE
)mandatory_node(iseq
, node
), (VALUE
)&args_node
);
7952 rb_ast_body_t ast
= {
7953 .root
= &scope_node
,
7954 .compile_option
= 0,
7955 .script_lines
= iseq
->body
->variable
.script_lines
,
7958 int prev_inline_index
= GET_VM()->builtin_inline_index
;
7960 iseq
->body
->mandatory_only_iseq
=
7961 rb_iseq_new_with_opt(&ast
, rb_iseq_base_label(iseq
),
7962 rb_iseq_path(iseq
), rb_iseq_realpath(iseq
),
7963 INT2FIX(nd_line(line_node
)), NULL
, 0,
7964 ISEQ_TYPE_METHOD
, ISEQ_COMPILE_DATA(iseq
)->option
);
7966 GET_VM()->builtin_inline_index
= prev_inline_index
;
7972 compile_builtin_function_call(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, const NODE
*line_node
, int popped
,
7973 const rb_iseq_t
*parent_block
, LINK_ANCHOR
*args
, const char *builtin_func
)
7975 NODE
*args_node
= node
->nd_args
;
7977 if (parent_block
!= NULL
) {
7978 COMPILE_ERROR(iseq
, nd_line(line_node
), "should not call builtins here.");
7982 # define BUILTIN_INLINE_PREFIX "_bi"
7983 char inline_func
[DECIMAL_SIZE_OF_BITS(sizeof(int) * CHAR_BIT
) + sizeof(BUILTIN_INLINE_PREFIX
)];
7984 bool cconst
= false;
7986 const struct rb_builtin_function
*bf
= iseq_builtin_function_lookup(iseq
, builtin_func
);
7989 if (strcmp("cstmt!", builtin_func
) == 0 ||
7990 strcmp("cexpr!", builtin_func
) == 0) {
7993 else if (strcmp("cconst!", builtin_func
) == 0) {
7996 else if (strcmp("cinit!", builtin_func
) == 0) {
7998 GET_VM()->builtin_inline_index
++;
8001 else if (strcmp("attr!", builtin_func
) == 0) {
8002 // There's only "inline" attribute for now
8003 iseq
->body
->builtin_inline_p
= true;
8006 else if (strcmp("arg!", builtin_func
) == 0) {
8007 return compile_builtin_arg(iseq
, ret
, args_node
, line_node
, popped
);
8009 else if (strcmp("mandatory_only?", builtin_func
) == 0) {
8011 rb_bug("mandatory_only? should be in if condition");
8013 else if (!LIST_INSN_SIZE_ZERO(ret
)) {
8014 rb_bug("mandatory_only? should be put on top");
8017 ADD_INSN1(ret
, line_node
, putobject
, Qfalse
);
8018 return compile_builtin_mandatory_only_method(iseq
, node
, line_node
);
8021 rb_bug("can't find builtin function:%s", builtin_func
);
8024 COMPILE_ERROR(ERROR_ARGS
"can't find builtin function:%s", builtin_func
);
8028 if (GET_VM()->builtin_inline_index
== INT_MAX
) {
8029 rb_bug("builtin inline function index overflow:%s", builtin_func
);
8031 int inline_index
= GET_VM()->builtin_inline_index
++;
8032 snprintf(inline_func
, sizeof(inline_func
), BUILTIN_INLINE_PREFIX
"%d", inline_index
);
8033 builtin_func
= inline_func
;
8039 typedef VALUE(*builtin_func0
)(void *, VALUE
);
8040 VALUE const_val
= (*(builtin_func0
)bf
->func_ptr
)(NULL
, Qnil
);
8041 ADD_INSN1(ret
, line_node
, putobject
, const_val
);
8045 // fprintf(stderr, "func_name:%s -> %p\n", builtin_func, bf->func_ptr);
8047 unsigned int flag
= 0;
8048 struct rb_callinfo_kwarg
*keywords
= NULL
;
8049 VALUE argc
= setup_args(iseq
, args
, args_node
, &flag
, &keywords
);
8051 if (FIX2INT(argc
) != bf
->argc
) {
8052 COMPILE_ERROR(ERROR_ARGS
"argc is not match for builtin function:%s (expect %d but %d)",
8053 builtin_func
, bf
->argc
, FIX2INT(argc
));
8057 unsigned int start_index
;
8058 if (delegate_call_p(iseq
, FIX2INT(argc
), args
, &start_index
)) {
8059 ADD_INSN2(ret
, line_node
, opt_invokebuiltin_delegate
, bf
, INT2FIX(start_index
));
8063 ADD_INSN1(ret
, line_node
, invokebuiltin
, bf
);
8066 if (popped
) ADD_INSN(ret
, line_node
, pop
);
8072 compile_call(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, const enum node_type type
, const NODE
*const line_node
, int popped
, bool assume_receiver
)
8074 /* call: obj.method(...)
8080 ID mid
= node
->nd_mid
;
8082 unsigned int flag
= 0;
8083 struct rb_callinfo_kwarg
*keywords
= NULL
;
8084 const rb_iseq_t
*parent_block
= ISEQ_COMPILE_DATA(iseq
)->current_block
;
8085 LABEL
*else_label
= NULL
;
8086 VALUE branches
= Qfalse
;
8088 ISEQ_COMPILE_DATA(iseq
)->current_block
= NULL
;
8092 #if OPT_SUPPORT_JOKE
8093 if (nd_type_p(node
, NODE_VCALL
)) {
8097 CONST_ID(id_bitblt
, "bitblt");
8098 CONST_ID(id_answer
, "the_answer_to_life_the_universe_and_everything");
8100 if (mid
== id_bitblt
) {
8101 ADD_INSN(ret
, line_node
, bitblt
);
8104 else if (mid
== id_answer
) {
8105 ADD_INSN(ret
, line_node
, answer
);
8114 CONST_ID(goto_id
, "__goto__");
8115 CONST_ID(label_id
, "__label__");
8117 if (nd_type_p(node
, NODE_FCALL
) &&
8118 (mid
== goto_id
|| mid
== label_id
)) {
8121 st_table
*labels_table
= ISEQ_COMPILE_DATA(iseq
)->labels_table
;
8124 if (!labels_table
) {
8125 labels_table
= st_init_numtable();
8126 ISEQ_COMPILE_DATA(iseq
)->labels_table
= labels_table
;
8128 if (nd_type_p(node
->nd_args
->nd_head
, NODE_LIT
) &&
8129 SYMBOL_P(node
->nd_args
->nd_head
->nd_lit
)) {
8131 label_name
= node
->nd_args
->nd_head
->nd_lit
;
8132 if (!st_lookup(labels_table
, (st_data_t
)label_name
, &data
)) {
8133 label
= NEW_LABEL(nd_line(line_node
));
8134 label
->position
= nd_line(line_node
);
8135 st_insert(labels_table
, (st_data_t
)label_name
, (st_data_t
)label
);
8138 label
= (LABEL
*)data
;
8142 COMPILE_ERROR(ERROR_ARGS
"invalid goto/label format");
8146 if (mid
== goto_id
) {
8147 ADD_INSNL(ret
, line_node
, jump
, label
);
8150 ADD_LABEL(ret
, label
);
8157 const char *builtin_func
;
8158 if (UNLIKELY(iseq_has_builtin_function_table(iseq
)) &&
8159 (builtin_func
= iseq_builtin_function_name(type
, node
->nd_recv
, mid
)) != NULL
) {
8160 return compile_builtin_function_call(iseq
, ret
, node
, line_node
, popped
, parent_block
, args
, builtin_func
);
8164 if (!assume_receiver
) {
8165 if (type
== NODE_CALL
|| type
== NODE_OPCALL
|| type
== NODE_QCALL
) {
8168 if (mid
== idCall
&&
8169 nd_type_p(node
->nd_recv
, NODE_LVAR
) &&
8170 iseq_block_param_id_p(iseq
, node
->nd_recv
->nd_vid
, &idx
, &level
)) {
8171 ADD_INSN2(recv
, node
->nd_recv
, getblockparamproxy
, INT2FIX(idx
+ VM_ENV_DATA_SIZE
- 1), INT2FIX(level
));
8173 else if (private_recv_p(node
)) {
8174 ADD_INSN(recv
, node
, putself
);
8175 flag
|= VM_CALL_FCALL
;
8178 CHECK(COMPILE(recv
, "recv", node
->nd_recv
));
8181 if (type
== NODE_QCALL
) {
8182 else_label
= qcall_branch_start(iseq
, recv
, &branches
, node
, line_node
);
8185 else if (type
== NODE_FCALL
|| type
== NODE_VCALL
) {
8186 ADD_CALL_RECEIVER(recv
, line_node
);
8191 if (type
!= NODE_VCALL
) {
8192 argc
= setup_args(iseq
, args
, node
->nd_args
, &flag
, &keywords
);
8193 CHECK(!NIL_P(argc
));
8202 debugp_param("call args argc", argc
);
8203 debugp_param("call method", ID2SYM(mid
));
8205 switch ((int)type
) {
8207 flag
|= VM_CALL_VCALL
;
8208 /* VCALL is funcall, so fall through */
8210 flag
|= VM_CALL_FCALL
;
8213 ADD_SEND_R(ret
, line_node
, mid
, argc
, parent_block
, INT2FIX(flag
), keywords
);
8215 qcall_branch_end(iseq
, ret
, else_label
, branches
, node
, line_node
);
8217 ADD_INSN(ret
, line_node
, pop
);
8223 compile_op_asgn1(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8225 const int line
= nd_line(node
);
8227 unsigned int flag
= 0;
8229 ID id
= node
->nd_mid
;
8238 * dupn 2 # nil a x a x
8239 * send :[] # nil a x a[x]
8240 * eval y # nil a x a[x] y
8241 * send op # nil a x ret
8242 * setn 3 # ret a x ret
8248 * nd_recv[nd_args->nd_body] (nd_mid)= nd_args->nd_head;
8249 * NODE_OP_ASGN nd_recv
8256 ADD_INSN(ret
, node
, putnil
);
8258 asgnflag
= COMPILE_RECV(ret
, "NODE_OP_ASGN1 recv", node
);
8259 CHECK(asgnflag
!= -1);
8260 switch (nd_type(node
->nd_args
->nd_head
)) {
8264 case NODE_BLOCK_PASS
:
8268 argc
= setup_args(iseq
, ret
, node
->nd_args
->nd_head
, &flag
, NULL
);
8269 CHECK(!NIL_P(argc
));
8271 ADD_INSN1(ret
, node
, dupn
, FIXNUM_INC(argc
, 1 + boff
));
8273 ADD_SEND_WITH_FLAG(ret
, node
, idAREF
, argc
, INT2FIX(flag
));
8275 if (id
== idOROP
|| id
== idANDOP
) {
8276 /* a[x] ||= y or a[x] &&= y
8284 LABEL
*label
= NEW_LABEL(line
);
8285 LABEL
*lfin
= NEW_LABEL(line
);
8287 ADD_INSN(ret
, node
, dup
);
8289 ADD_INSNL(ret
, node
, branchif
, label
);
8291 else { /* idANDOP */
8292 ADD_INSNL(ret
, node
, branchunless
, label
);
8294 ADD_INSN(ret
, node
, pop
);
8296 CHECK(COMPILE(ret
, "NODE_OP_ASGN1 args->body: ", node
->nd_args
->nd_body
));
8298 ADD_INSN1(ret
, node
, setn
, FIXNUM_INC(argc
, 2+boff
));
8300 if (flag
& VM_CALL_ARGS_SPLAT
) {
8301 ADD_INSN1(ret
, node
, newarray
, INT2FIX(1));
8303 ADD_INSN1(ret
, node
, dupn
, INT2FIX(3));
8304 ADD_INSN(ret
, node
, swap
);
8305 ADD_INSN(ret
, node
, pop
);
8307 ADD_INSN(ret
, node
, concatarray
);
8309 ADD_INSN1(ret
, node
, setn
, INT2FIX(3));
8310 ADD_INSN(ret
, node
, pop
);
8311 ADD_INSN(ret
, node
, pop
);
8313 ADD_SEND_WITH_FLAG(ret
, node
, idASET
, argc
, INT2FIX(flag
));
8317 ADD_INSN(ret
, node
, swap
);
8318 ADD_SEND_WITH_FLAG(ret
, node
, idASET
, FIXNUM_INC(argc
, 1), INT2FIX(flag
));
8320 ADD_INSN(ret
, node
, pop
);
8321 ADD_INSNL(ret
, node
, jump
, lfin
);
8322 ADD_LABEL(ret
, label
);
8324 ADD_INSN1(ret
, node
, setn
, FIXNUM_INC(argc
, 2+boff
));
8326 ADD_INSN1(ret
, node
, adjuststack
, FIXNUM_INC(argc
, 2+boff
));
8327 ADD_LABEL(ret
, lfin
);
8330 CHECK(COMPILE(ret
, "NODE_OP_ASGN1 args->body: ", node
->nd_args
->nd_body
));
8331 ADD_SEND(ret
, node
, id
, INT2FIX(1));
8333 ADD_INSN1(ret
, node
, setn
, FIXNUM_INC(argc
, 2+boff
));
8335 if (flag
& VM_CALL_ARGS_SPLAT
) {
8336 ADD_INSN1(ret
, node
, newarray
, INT2FIX(1));
8338 ADD_INSN1(ret
, node
, dupn
, INT2FIX(3));
8339 ADD_INSN(ret
, node
, swap
);
8340 ADD_INSN(ret
, node
, pop
);
8342 ADD_INSN(ret
, node
, concatarray
);
8344 ADD_INSN1(ret
, node
, setn
, INT2FIX(3));
8345 ADD_INSN(ret
, node
, pop
);
8346 ADD_INSN(ret
, node
, pop
);
8348 ADD_SEND_WITH_FLAG(ret
, node
, idASET
, argc
, INT2FIX(flag
));
8352 ADD_INSN(ret
, node
, swap
);
8353 ADD_SEND_WITH_FLAG(ret
, node
, idASET
, FIXNUM_INC(argc
, 1), INT2FIX(flag
));
8355 ADD_INSN(ret
, node
, pop
);
8361 compile_op_asgn2(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8363 const int line
= nd_line(node
);
8364 ID atype
= node
->nd_next
->nd_mid
;
8365 ID vid
= node
->nd_next
->nd_vid
, aid
= rb_id_attrset(vid
);
8367 LABEL
*lfin
= NEW_LABEL(line
);
8368 LABEL
*lcfin
= NEW_LABEL(line
);
8371 class C; attr_accessor :c; end
8412 asgnflag
= COMPILE_RECV(ret
, "NODE_OP_ASGN2#recv", node
);
8413 CHECK(asgnflag
!= -1);
8414 if (node
->nd_next
->nd_aid
) {
8415 lskip
= NEW_LABEL(line
);
8416 ADD_INSN(ret
, node
, dup
);
8417 ADD_INSNL(ret
, node
, branchnil
, lskip
);
8419 ADD_INSN(ret
, node
, dup
);
8420 ADD_SEND_WITH_FLAG(ret
, node
, vid
, INT2FIX(0), INT2FIX(asgnflag
));
8422 if (atype
== idOROP
|| atype
== idANDOP
) {
8423 ADD_INSN(ret
, node
, dup
);
8424 if (atype
== idOROP
) {
8425 ADD_INSNL(ret
, node
, branchif
, lcfin
);
8427 else { /* idANDOP */
8428 ADD_INSNL(ret
, node
, branchunless
, lcfin
);
8430 ADD_INSN(ret
, node
, pop
);
8431 CHECK(COMPILE(ret
, "NODE_OP_ASGN2 val", node
->nd_value
));
8432 ADD_INSN(ret
, node
, swap
);
8433 ADD_INSN1(ret
, node
, topn
, INT2FIX(1));
8434 ADD_SEND_WITH_FLAG(ret
, node
, aid
, INT2FIX(1), INT2FIX(asgnflag
));
8435 ADD_INSNL(ret
, node
, jump
, lfin
);
8437 ADD_LABEL(ret
, lcfin
);
8438 ADD_INSN(ret
, node
, swap
);
8440 ADD_LABEL(ret
, lfin
);
8441 ADD_INSN(ret
, node
, pop
);
8443 ADD_LABEL(ret
, lskip
);
8446 /* we can apply more optimize */
8447 ADD_INSN(ret
, node
, pop
);
8451 CHECK(COMPILE(ret
, "NODE_OP_ASGN2 val", node
->nd_value
));
8452 ADD_SEND(ret
, node
, atype
, INT2FIX(1));
8454 ADD_INSN(ret
, node
, swap
);
8455 ADD_INSN1(ret
, node
, topn
, INT2FIX(1));
8457 ADD_SEND_WITH_FLAG(ret
, node
, aid
, INT2FIX(1), INT2FIX(asgnflag
));
8458 if (lskip
&& popped
) {
8459 ADD_LABEL(ret
, lskip
);
8461 ADD_INSN(ret
, node
, pop
);
8462 if (lskip
&& !popped
) {
8463 ADD_LABEL(ret
, lskip
);
8470 compile_op_cdecl(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8472 const int line
= nd_line(node
);
8477 switch (nd_type(node
->nd_head
)) {
8479 ADD_INSN1(ret
, node
, putobject
, rb_cObject
);
8482 CHECK(COMPILE(ret
, "NODE_OP_CDECL/colon2#nd_head", node
->nd_head
->nd_head
));
8485 COMPILE_ERROR(ERROR_ARGS
"%s: invalid node in NODE_OP_CDECL",
8486 ruby_node_name(nd_type(node
->nd_head
)));
8489 mid
= node
->nd_head
->nd_mid
;
8491 if (node
->nd_aid
== idOROP
) {
8492 lassign
= NEW_LABEL(line
);
8493 ADD_INSN(ret
, node
, dup
); /* cref cref */
8494 ADD_INSN3(ret
, node
, defined
, INT2FIX(DEFINED_CONST_FROM
),
8495 ID2SYM(mid
), Qtrue
); /* cref bool */
8496 ADD_INSNL(ret
, node
, branchunless
, lassign
); /* cref */
8498 ADD_INSN(ret
, node
, dup
); /* cref cref */
8499 ADD_INSN1(ret
, node
, putobject
, Qtrue
);
8500 ADD_INSN1(ret
, node
, getconstant
, ID2SYM(mid
)); /* cref obj */
8502 if (node
->nd_aid
== idOROP
|| node
->nd_aid
== idANDOP
) {
8503 lfin
= NEW_LABEL(line
);
8504 if (!popped
) ADD_INSN(ret
, node
, dup
); /* cref [obj] obj */
8505 if (node
->nd_aid
== idOROP
)
8506 ADD_INSNL(ret
, node
, branchif
, lfin
);
8508 ADD_INSNL(ret
, node
, branchunless
, lfin
);
8510 if (!popped
) ADD_INSN(ret
, node
, pop
); /* cref */
8511 if (lassign
) ADD_LABEL(ret
, lassign
);
8512 CHECK(COMPILE(ret
, "NODE_OP_CDECL#nd_value", node
->nd_value
));
8515 ADD_INSN1(ret
, node
, topn
, INT2FIX(1)); /* cref value cref */
8517 ADD_INSN1(ret
, node
, dupn
, INT2FIX(2)); /* cref value cref value */
8518 ADD_INSN(ret
, node
, swap
); /* cref value value cref */
8520 ADD_INSN1(ret
, node
, setconstant
, ID2SYM(mid
)); /* cref [value] */
8521 ADD_LABEL(ret
, lfin
); /* cref [value] */
8522 if (!popped
) ADD_INSN(ret
, node
, swap
); /* [value] cref */
8523 ADD_INSN(ret
, node
, pop
); /* [value] */
8526 CHECK(COMPILE(ret
, "NODE_OP_CDECL#nd_value", node
->nd_value
));
8527 /* cref obj value */
8528 ADD_CALL(ret
, node
, node
->nd_aid
, INT2FIX(1));
8530 ADD_INSN(ret
, node
, swap
); /* value cref */
8532 ADD_INSN1(ret
, node
, topn
, INT2FIX(1)); /* value cref value */
8533 ADD_INSN(ret
, node
, swap
); /* value value cref */
8535 ADD_INSN1(ret
, node
, setconstant
, ID2SYM(mid
));
8541 compile_op_log(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
, const enum node_type type
)
8543 const int line
= nd_line(node
);
8544 LABEL
*lfin
= NEW_LABEL(line
);
8547 if (type
== NODE_OP_ASGN_OR
&& !nd_type_p(node
->nd_head
, NODE_IVAR
)) {
8551 defined_expr(iseq
, ret
, node
->nd_head
, lfinish
, Qfalse
);
8552 lassign
= lfinish
[1];
8554 lassign
= NEW_LABEL(line
);
8556 ADD_INSNL(ret
, node
, branchunless
, lassign
);
8559 lassign
= NEW_LABEL(line
);
8562 CHECK(COMPILE(ret
, "NODE_OP_ASGN_AND/OR#nd_head", node
->nd_head
));
8563 ADD_INSN(ret
, node
, dup
);
8565 if (type
== NODE_OP_ASGN_AND
) {
8566 ADD_INSNL(ret
, node
, branchunless
, lfin
);
8569 ADD_INSNL(ret
, node
, branchif
, lfin
);
8572 ADD_INSN(ret
, node
, pop
);
8573 ADD_LABEL(ret
, lassign
);
8574 CHECK(COMPILE(ret
, "NODE_OP_ASGN_AND/OR#nd_value", node
->nd_value
));
8575 ADD_LABEL(ret
, lfin
);
8578 /* we can apply more optimize */
8579 ADD_INSN(ret
, node
, pop
);
8585 compile_super(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
, const enum node_type type
)
8587 struct rb_iseq_constant_body
*const body
= iseq
->body
;
8590 unsigned int flag
= 0;
8591 struct rb_callinfo_kwarg
*keywords
= NULL
;
8592 const rb_iseq_t
*parent_block
= ISEQ_COMPILE_DATA(iseq
)->current_block
;
8595 ISEQ_COMPILE_DATA(iseq
)->current_block
= NULL
;
8596 if (type
== NODE_SUPER
) {
8597 VALUE vargc
= setup_args(iseq
, args
, node
->nd_args
, &flag
, &keywords
);
8598 CHECK(!NIL_P(vargc
));
8599 argc
= FIX2INT(vargc
);
8604 const rb_iseq_t
*liseq
= body
->local_iseq
;
8605 const struct rb_iseq_constant_body
*const local_body
= liseq
->body
;
8606 const struct rb_iseq_param_keyword
*const local_kwd
= local_body
->param
.keyword
;
8607 int lvar_level
= get_lvar_level(iseq
);
8609 argc
= local_body
->param
.lead_num
;
8611 /* normal arguments */
8612 for (i
= 0; i
< local_body
->param
.lead_num
; i
++) {
8613 int idx
= local_body
->local_table_size
- i
;
8614 ADD_GETLOCAL(args
, node
, idx
, lvar_level
);
8617 if (local_body
->param
.flags
.has_opt
) {
8618 /* optional arguments */
8620 for (j
= 0; j
< local_body
->param
.opt_num
; j
++) {
8621 int idx
= local_body
->local_table_size
- (i
+ j
);
8622 ADD_GETLOCAL(args
, node
, idx
, lvar_level
);
8627 if (local_body
->param
.flags
.has_rest
) {
8629 int idx
= local_body
->local_table_size
- local_body
->param
.rest_start
;
8630 ADD_GETLOCAL(args
, node
, idx
, lvar_level
);
8631 ADD_INSN1(args
, node
, splatarray
, Qfalse
);
8633 argc
= local_body
->param
.rest_start
+ 1;
8634 flag
|= VM_CALL_ARGS_SPLAT
;
8636 if (local_body
->param
.flags
.has_post
) {
8637 /* post arguments */
8638 int post_len
= local_body
->param
.post_num
;
8639 int post_start
= local_body
->param
.post_start
;
8641 if (local_body
->param
.flags
.has_rest
) {
8643 for (j
=0; j
<post_len
; j
++) {
8644 int idx
= local_body
->local_table_size
- (post_start
+ j
);
8645 ADD_GETLOCAL(args
, node
, idx
, lvar_level
);
8647 ADD_INSN1(args
, node
, newarray
, INT2FIX(j
));
8648 ADD_INSN (args
, node
, concatarray
);
8649 /* argc is settled at above */
8653 for (j
=0; j
<post_len
; j
++) {
8654 int idx
= local_body
->local_table_size
- (post_start
+ j
);
8655 ADD_GETLOCAL(args
, node
, idx
, lvar_level
);
8657 argc
= post_len
+ post_start
;
8661 if (local_body
->param
.flags
.has_kw
) { /* TODO: support keywords */
8662 int local_size
= local_body
->local_table_size
;
8665 ADD_INSN1(args
, node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
8667 if (local_body
->param
.flags
.has_kwrest
) {
8668 int idx
= local_body
->local_table_size
- local_kwd
->rest_start
;
8669 ADD_GETLOCAL(args
, node
, idx
, lvar_level
);
8670 if (local_kwd
->num
> 0) {
8671 ADD_SEND (args
, node
, rb_intern("dup"), INT2FIX(0));
8672 flag
|= VM_CALL_KW_SPLAT_MUT
;
8676 ADD_INSN1(args
, node
, newhash
, INT2FIX(0));
8677 flag
|= VM_CALL_KW_SPLAT_MUT
;
8679 for (i
= 0; i
< local_kwd
->num
; ++i
) {
8680 ID id
= local_kwd
->table
[i
];
8681 int idx
= local_size
- get_local_var_idx(liseq
, id
);
8682 ADD_INSN1(args
, node
, putobject
, ID2SYM(id
));
8683 ADD_GETLOCAL(args
, node
, idx
, lvar_level
);
8685 ADD_SEND(args
, node
, id_core_hash_merge_ptr
, INT2FIX(i
* 2 + 1));
8686 if (local_body
->param
.flags
.has_rest
) {
8687 ADD_INSN1(args
, node
, newarray
, INT2FIX(1));
8688 ADD_INSN (args
, node
, concatarray
);
8691 flag
|= VM_CALL_KW_SPLAT
;
8693 else if (local_body
->param
.flags
.has_kwrest
) {
8694 int idx
= local_body
->local_table_size
- local_kwd
->rest_start
;
8695 ADD_GETLOCAL(args
, node
, idx
, lvar_level
);
8697 if (local_body
->param
.flags
.has_rest
) {
8698 ADD_INSN1(args
, node
, newarray
, INT2FIX(1));
8699 ADD_INSN (args
, node
, concatarray
);
8704 flag
|= VM_CALL_KW_SPLAT
;
8708 flag
|= VM_CALL_SUPER
| VM_CALL_FCALL
;
8709 if (type
== NODE_ZSUPER
) flag
|= VM_CALL_ZSUPER
;
8710 ADD_INSN(ret
, node
, putself
);
8712 ADD_INSN2(ret
, node
, invokesuper
,
8713 new_callinfo(iseq
, 0, argc
, flag
, keywords
, parent_block
!= NULL
),
8717 ADD_INSN(ret
, node
, pop
);
8723 compile_yield(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8727 unsigned int flag
= 0;
8728 struct rb_callinfo_kwarg
*keywords
= NULL
;
8732 switch (iseq
->body
->local_iseq
->body
->type
) {
8734 case ISEQ_TYPE_MAIN
:
8735 case ISEQ_TYPE_CLASS
:
8736 COMPILE_ERROR(ERROR_ARGS
"Invalid yield");
8738 default: /* valid */;
8741 if (node
->nd_head
) {
8742 argc
= setup_args(iseq
, args
, node
->nd_head
, &flag
, &keywords
);
8743 CHECK(!NIL_P(argc
));
8750 ADD_INSN1(ret
, node
, invokeblock
, new_callinfo(iseq
, 0, FIX2INT(argc
), flag
, keywords
, FALSE
));
8753 ADD_INSN(ret
, node
, pop
);
8757 const rb_iseq_t
*tmp_iseq
= iseq
;
8758 for (; tmp_iseq
!= iseq
->body
->local_iseq
; level
++ ) {
8759 tmp_iseq
= tmp_iseq
->body
->parent_iseq
;
8761 if (level
> 0) access_outer_variables(iseq
, level
, rb_intern("yield"), true);
8767 compile_match(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
, const enum node_type type
)
8774 switch ((int)type
) {
8776 ADD_INSN1(recv
, node
, putobject
, node
->nd_lit
);
8777 ADD_INSN2(val
, node
, getspecial
, INT2FIX(0),
8781 CHECK(COMPILE(recv
, "receiver", node
->nd_recv
));
8782 CHECK(COMPILE(val
, "value", node
->nd_value
));
8785 CHECK(COMPILE(recv
, "receiver", node
->nd_value
));
8786 CHECK(COMPILE(val
, "value", node
->nd_recv
));
8792 ADD_SEND(ret
, node
, idEqTilde
, INT2FIX(1));
8794 if (node
->nd_args
) {
8795 compile_named_capture_assign(iseq
, ret
, node
->nd_args
);
8799 ADD_INSN(ret
, node
, pop
);
8805 compile_colon2(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8807 const int line
= nd_line(node
);
8808 if (rb_is_const_id(node
->nd_mid
)) {
8810 LABEL
*lend
= NEW_LABEL(line
);
8811 int ic_index
= iseq
->body
->is_size
++;
8818 CHECK(compile_const_prefix(iseq
, node
, pref
, body
));
8819 if (LIST_INSN_SIZE_ZERO(pref
)) {
8820 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
8821 ADD_INSN2(ret
, node
, opt_getinlinecache
, lend
, INT2FIX(ic_index
));
8824 ADD_INSN(ret
, node
, putnil
);
8829 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
8830 ADD_INSN1(ret
, node
, opt_setinlinecache
, INT2FIX(ic_index
));
8831 ADD_LABEL(ret
, lend
);
8841 ADD_CALL_RECEIVER(ret
, node
);
8842 CHECK(COMPILE(ret
, "colon2#nd_head", node
->nd_head
));
8843 ADD_CALL(ret
, node
, node
->nd_mid
, INT2FIX(1));
8846 ADD_INSN(ret
, node
, pop
);
8852 compile_colon3(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8854 const int line
= nd_line(node
);
8855 LABEL
*lend
= NEW_LABEL(line
);
8856 int ic_index
= iseq
->body
->is_size
++;
8858 debugi("colon3#nd_mid", node
->nd_mid
);
8860 /* add cache insn */
8861 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
8862 ADD_INSN2(ret
, node
, opt_getinlinecache
, lend
, INT2FIX(ic_index
));
8863 ADD_INSN(ret
, node
, pop
);
8866 ADD_INSN1(ret
, node
, putobject
, rb_cObject
);
8867 ADD_INSN1(ret
, node
, putobject
, Qtrue
);
8868 ADD_INSN1(ret
, node
, getconstant
, ID2SYM(node
->nd_mid
));
8870 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
8871 ADD_INSN1(ret
, node
, opt_setinlinecache
, INT2FIX(ic_index
));
8872 ADD_LABEL(ret
, lend
);
8876 ADD_INSN(ret
, node
, pop
);
8882 compile_dots(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
, const int excl
)
8884 VALUE flag
= INT2FIX(excl
);
8885 const NODE
*b
= node
->nd_beg
;
8886 const NODE
*e
= node
->nd_end
;
8888 if (optimizable_range_item_p(b
) && optimizable_range_item_p(e
)) {
8890 VALUE bv
= nd_type_p(b
, NODE_LIT
) ? b
->nd_lit
: Qnil
;
8891 VALUE ev
= nd_type_p(e
, NODE_LIT
) ? e
->nd_lit
: Qnil
;
8892 VALUE val
= rb_range_new(bv
, ev
, excl
);
8893 ADD_INSN1(ret
, node
, putobject
, val
);
8894 RB_OBJ_WRITTEN(iseq
, Qundef
, val
);
8898 CHECK(COMPILE_(ret
, "min", b
, popped
));
8899 CHECK(COMPILE_(ret
, "max", e
, popped
));
8901 ADD_INSN1(ret
, node
, newrange
, flag
);
8908 compile_errinfo(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8911 if (iseq
->body
->type
== ISEQ_TYPE_RESCUE
) {
8912 ADD_GETLOCAL(ret
, node
, LVAR_ERRINFO
, 0);
8915 const rb_iseq_t
*ip
= iseq
;
8918 if (ip
->body
->type
== ISEQ_TYPE_RESCUE
) {
8921 ip
= ip
->body
->parent_iseq
;
8925 ADD_GETLOCAL(ret
, node
, LVAR_ERRINFO
, level
);
8928 ADD_INSN(ret
, node
, putnil
);
8936 compile_kw_arg(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8938 struct rb_iseq_constant_body
*const body
= iseq
->body
;
8939 LABEL
*end_label
= NEW_LABEL(nd_line(node
));
8940 const NODE
*default_value
= node
->nd_body
->nd_value
;
8942 if (default_value
== NODE_SPECIAL_REQUIRED_KEYWORD
) {
8943 /* required argument. do nothing */
8944 COMPILE_ERROR(ERROR_ARGS
"unreachable");
8947 else if (nd_type_p(default_value
, NODE_LIT
) ||
8948 nd_type_p(default_value
, NODE_NIL
) ||
8949 nd_type_p(default_value
, NODE_TRUE
) ||
8950 nd_type_p(default_value
, NODE_FALSE
)) {
8951 COMPILE_ERROR(ERROR_ARGS
"unreachable");
8955 /* if keywordcheck(_kw_bits, nth_keyword)
8956 * kw = default_value
8959 int kw_bits_idx
= body
->local_table_size
- body
->param
.keyword
->bits_start
;
8960 int keyword_idx
= body
->param
.keyword
->num
;
8962 ADD_INSN2(ret
, node
, checkkeyword
, INT2FIX(kw_bits_idx
+ VM_ENV_DATA_SIZE
- 1), INT2FIX(keyword_idx
));
8963 ADD_INSNL(ret
, node
, branchif
, end_label
);
8964 CHECK(COMPILE_POPPED(ret
, "keyword default argument", node
->nd_body
));
8965 ADD_LABEL(ret
, end_label
);
8971 compile_attrasgn(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
8975 unsigned int flag
= 0;
8976 ID mid
= node
->nd_mid
;
8978 LABEL
*else_label
= NULL
;
8979 VALUE branches
= Qfalse
;
8981 /* optimization shortcut
8982 * obj["literal"] = value -> opt_aset_with(obj, "literal", value)
8984 if (mid
== idASET
&& !private_recv_p(node
) && node
->nd_args
&&
8985 nd_type_p(node
->nd_args
, NODE_LIST
) && node
->nd_args
->nd_alen
== 2 &&
8986 nd_type_p(node
->nd_args
->nd_head
, NODE_STR
) &&
8987 ISEQ_COMPILE_DATA(iseq
)->current_block
== NULL
&&
8988 !ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
&&
8989 ISEQ_COMPILE_DATA(iseq
)->option
->specialized_instruction
)
8991 VALUE str
= rb_fstring(node
->nd_args
->nd_head
->nd_lit
);
8992 CHECK(COMPILE(ret
, "recv", node
->nd_recv
));
8993 CHECK(COMPILE(ret
, "value", node
->nd_args
->nd_next
->nd_head
));
8995 ADD_INSN(ret
, node
, swap
);
8996 ADD_INSN1(ret
, node
, topn
, INT2FIX(1));
8998 ADD_INSN2(ret
, node
, opt_aset_with
, str
,
8999 new_callinfo(iseq
, idASET
, 2, 0, NULL
, FALSE
));
9000 RB_OBJ_WRITTEN(iseq
, Qundef
, str
);
9001 ADD_INSN(ret
, node
, pop
);
9007 argc
= setup_args(iseq
, args
, node
->nd_args
, &flag
, NULL
);
9008 CHECK(!NIL_P(argc
));
9010 int asgnflag
= COMPILE_RECV(recv
, "recv", node
);
9011 CHECK(asgnflag
!= -1);
9012 flag
|= (unsigned int)asgnflag
;
9014 debugp_param("argc", argc
);
9015 debugp_param("nd_mid", ID2SYM(mid
));
9017 if (!rb_is_attrset_id(mid
)) {
9019 mid
= rb_id_attrset(mid
);
9020 else_label
= qcall_branch_start(iseq
, recv
, &branches
, node
, node
);
9023 ADD_INSN(ret
, node
, putnil
);
9027 if (flag
& VM_CALL_ARGS_BLOCKARG
) {
9028 ADD_INSN1(ret
, node
, topn
, INT2FIX(1));
9029 if (flag
& VM_CALL_ARGS_SPLAT
) {
9030 ADD_INSN1(ret
, node
, putobject
, INT2FIX(-1));
9031 ADD_SEND_WITH_FLAG(ret
, node
, idAREF
, INT2FIX(1), INT2FIX(asgnflag
));
9033 ADD_INSN1(ret
, node
, setn
, FIXNUM_INC(argc
, 3));
9034 ADD_INSN (ret
, node
, pop
);
9036 else if (flag
& VM_CALL_ARGS_SPLAT
) {
9037 ADD_INSN(ret
, node
, dup
);
9038 ADD_INSN1(ret
, node
, putobject
, INT2FIX(-1));
9039 ADD_SEND_WITH_FLAG(ret
, node
, idAREF
, INT2FIX(1), INT2FIX(asgnflag
));
9040 ADD_INSN1(ret
, node
, setn
, FIXNUM_INC(argc
, 2));
9041 ADD_INSN (ret
, node
, pop
);
9044 ADD_INSN1(ret
, node
, setn
, FIXNUM_INC(argc
, 1));
9051 ADD_SEND_WITH_FLAG(ret
, node
, mid
, argc
, INT2FIX(flag
));
9052 qcall_branch_end(iseq
, ret
, else_label
, branches
, node
, node
);
9053 ADD_INSN(ret
, node
, pop
);
9057 static int iseq_compile_each0(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
);
9061 self: InstructionSequence
9062 node: Ruby compiled node
9063 popped: This node will be popped
9066 iseq_compile_each(rb_iseq_t
*iseq
, LINK_ANCHOR
*ret
, const NODE
*node
, int popped
)
9070 int lineno
= ISEQ_COMPILE_DATA(iseq
)->last_line
;
9071 if (lineno
== 0) lineno
= FIX2INT(rb_iseq_first_lineno(iseq
));
9072 debugs("node: NODE_NIL(implicit)\n");
9073 NODE dummy_line_node
= generate_dummy_line_node(lineno
, -1);
9074 ADD_INSN(ret
, &dummy_line_node
, putnil
);
9078 return iseq_compile_each0(iseq
, ret
, node
, popped
);
9082 iseq_compile_each0(rb_iseq_t
*iseq
, LINK_ANCHOR
*const ret
, const NODE
*const node
, int popped
)
9084 const int line
= (int)nd_line(node
);
9085 const enum node_type type
= nd_type(node
);
9086 struct rb_iseq_constant_body
*const body
= iseq
->body
;
9088 if (ISEQ_COMPILE_DATA(iseq
)->last_line
== line
) {
9092 if (node
->flags
& NODE_FL_NEWLINE
) {
9093 int event
= RUBY_EVENT_LINE
;
9094 ISEQ_COMPILE_DATA(iseq
)->last_line
= line
;
9095 if (ISEQ_COVERAGE(iseq
) && ISEQ_LINE_COVERAGE(iseq
)) {
9096 event
|= RUBY_EVENT_COVERAGE_LINE
;
9098 ADD_TRACE(ret
, event
);
9102 debug_node_start(node
);
9103 #undef BEFORE_RETURN
9104 #define BEFORE_RETURN debug_node_end()
9108 CHECK(compile_block(iseq
, ret
, node
, popped
));
9112 CHECK(compile_if(iseq
, ret
, node
, popped
, type
));
9115 CHECK(compile_case(iseq
, ret
, node
, popped
));
9118 CHECK(compile_case2(iseq
, ret
, node
, popped
));
9121 CHECK(compile_case3(iseq
, ret
, node
, popped
));
9125 CHECK(compile_loop(iseq
, ret
, node
, popped
, type
));
9129 CHECK(compile_iter(iseq
, ret
, node
, popped
));
9131 case NODE_FOR_MASGN
:
9132 CHECK(compile_for_masgn(iseq
, ret
, node
, popped
));
9135 CHECK(compile_break(iseq
, ret
, node
, popped
));
9138 CHECK(compile_next(iseq
, ret
, node
, popped
));
9141 CHECK(compile_redo(iseq
, ret
, node
, popped
));
9144 CHECK(compile_retry(iseq
, ret
, node
, popped
));
9147 CHECK(COMPILE_(ret
, "NODE_BEGIN", node
->nd_body
, popped
));
9151 CHECK(compile_rescue(iseq
, ret
, node
, popped
));
9154 CHECK(compile_resbody(iseq
, ret
, node
, popped
));
9157 CHECK(compile_ensure(iseq
, ret
, node
, popped
));
9162 LABEL
*end_label
= NEW_LABEL(line
);
9163 CHECK(COMPILE(ret
, "nd_1st", node
->nd_1st
));
9165 ADD_INSN(ret
, node
, dup
);
9167 if (type
== NODE_AND
) {
9168 ADD_INSNL(ret
, node
, branchunless
, end_label
);
9171 ADD_INSNL(ret
, node
, branchif
, end_label
);
9174 ADD_INSN(ret
, node
, pop
);
9176 CHECK(COMPILE_(ret
, "nd_2nd", node
->nd_2nd
, popped
));
9177 ADD_LABEL(ret
, end_label
);
9182 compile_massign(iseq
, ret
, node
, popped
);
9187 ID id
= node
->nd_vid
;
9188 int idx
= body
->local_iseq
->body
->local_table_size
- get_local_var_idx(iseq
, id
);
9190 debugs("lvar: %s idx: %d\n", rb_id2name(id
), idx
);
9191 CHECK(COMPILE(ret
, "rvalue", node
->nd_value
));
9194 ADD_INSN(ret
, node
, dup
);
9196 ADD_SETLOCAL(ret
, node
, idx
, get_lvar_level(iseq
));
9201 ID id
= node
->nd_vid
;
9202 CHECK(COMPILE(ret
, "dvalue", node
->nd_value
));
9203 debugi("dassn id", rb_id2str(id
) ? id
: '*');
9206 ADD_INSN(ret
, node
, dup
);
9209 idx
= get_dyna_var_idx(iseq
, id
, &lv
, &ls
);
9212 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
9216 ADD_SETLOCAL(ret
, node
, ls
- idx
, lv
);
9220 CHECK(COMPILE(ret
, "lvalue", node
->nd_value
));
9223 ADD_INSN(ret
, node
, dup
);
9225 ADD_INSN1(ret
, node
, setglobal
, ID2SYM(node
->nd_entry
));
9229 CHECK(COMPILE(ret
, "lvalue", node
->nd_value
));
9231 ADD_INSN(ret
, node
, dup
);
9233 ADD_INSN2(ret
, node
, setinstancevariable
,
9234 ID2SYM(node
->nd_vid
),
9235 get_ivar_ic_value(iseq
,node
->nd_vid
));
9239 CHECK(COMPILE(ret
, "lvalue", node
->nd_value
));
9242 ADD_INSN(ret
, node
, dup
);
9246 ADD_INSN1(ret
, node
, putspecialobject
,
9247 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE
));
9248 ADD_INSN1(ret
, node
, setconstant
, ID2SYM(node
->nd_vid
));
9251 compile_cpath(ret
, iseq
, node
->nd_else
);
9252 ADD_INSN1(ret
, node
, setconstant
, ID2SYM(node
->nd_else
->nd_mid
));
9257 CHECK(COMPILE(ret
, "cvasgn val", node
->nd_value
));
9259 ADD_INSN(ret
, node
, dup
);
9261 ADD_INSN2(ret
, node
, setclassvariable
,
9262 ID2SYM(node
->nd_vid
),
9263 get_ivar_ic_value(iseq
,node
->nd_vid
));
9267 CHECK(compile_op_asgn1(iseq
, ret
, node
, popped
));
9270 CHECK(compile_op_asgn2(iseq
, ret
, node
, popped
));
9273 CHECK(compile_op_cdecl(iseq
, ret
, node
, popped
));
9275 case NODE_OP_ASGN_AND
:
9276 case NODE_OP_ASGN_OR
:
9277 CHECK(compile_op_log(iseq
, ret
, node
, popped
, type
));
9279 case NODE_CALL
: /* obj.foo */
9280 case NODE_OPCALL
: /* foo[] */
9281 if (compile_call_precheck_freeze(iseq
, ret
, node
, node
, popped
) == TRUE
) {
9284 case NODE_QCALL
: /* obj&.foo */
9285 case NODE_FCALL
: /* foo() */
9286 case NODE_VCALL
: /* foo (variable or call) */
9287 if (compile_call(iseq
, ret
, node
, type
, node
, popped
, false) == COMPILE_NG
) {
9293 CHECK(compile_super(iseq
, ret
, node
, popped
, type
));
9296 CHECK(compile_array(iseq
, ret
, node
, popped
) >= 0);
9301 ADD_INSN1(ret
, node
, newarray
, INT2FIX(0));
9306 const NODE
*n
= node
;
9308 COMPILE_ERROR(ERROR_ARGS
"NODE_VALUES: must not be popped");
9311 CHECK(COMPILE(ret
, "values item", n
->nd_head
));
9314 ADD_INSN1(ret
, node
, newarray
, INT2FIX(node
->nd_alen
));
9318 CHECK(compile_hash(iseq
, ret
, node
, FALSE
, popped
) >= 0);
9321 CHECK(compile_return(iseq
, ret
, node
, popped
));
9324 CHECK(compile_yield(iseq
, ret
, node
, popped
));
9328 compile_lvar(iseq
, ret
, node
, node
->nd_vid
);
9334 debugi("nd_vid", node
->nd_vid
);
9336 idx
= get_dyna_var_idx(iseq
, node
->nd_vid
, &lv
, &ls
);
9338 COMPILE_ERROR(ERROR_ARGS
"unknown dvar (%"PRIsVALUE
")",
9339 rb_id2str(node
->nd_vid
));
9342 ADD_GETLOCAL(ret
, node
, ls
- idx
, lv
);
9347 ADD_INSN1(ret
, node
, getglobal
, ID2SYM(node
->nd_entry
));
9349 ADD_INSN(ret
, node
, pop
);
9354 debugi("nd_vid", node
->nd_vid
);
9356 ADD_INSN2(ret
, node
, getinstancevariable
,
9357 ID2SYM(node
->nd_vid
),
9358 get_ivar_ic_value(iseq
,node
->nd_vid
));
9363 debugi("nd_vid", node
->nd_vid
);
9365 if (ISEQ_COMPILE_DATA(iseq
)->option
->inline_const_cache
) {
9366 LABEL
*lend
= NEW_LABEL(line
);
9367 int ic_index
= body
->is_size
++;
9369 ADD_INSN2(ret
, node
, opt_getinlinecache
, lend
, INT2FIX(ic_index
));
9370 ADD_INSN1(ret
, node
, putobject
, Qtrue
);
9371 ADD_INSN1(ret
, node
, getconstant
, ID2SYM(node
->nd_vid
));
9372 ADD_INSN1(ret
, node
, opt_setinlinecache
, INT2FIX(ic_index
));
9373 ADD_LABEL(ret
, lend
);
9376 ADD_INSN(ret
, node
, putnil
);
9377 ADD_INSN1(ret
, node
, putobject
, Qtrue
);
9378 ADD_INSN1(ret
, node
, getconstant
, ID2SYM(node
->nd_vid
));
9382 ADD_INSN(ret
, node
, pop
);
9388 ADD_INSN2(ret
, node
, getclassvariable
,
9389 ID2SYM(node
->nd_vid
),
9390 get_ivar_ic_value(iseq
,node
->nd_vid
));
9396 if (!node
->nd_nth
) {
9397 ADD_INSN(ret
, node
, putnil
);
9400 ADD_INSN2(ret
, node
, getspecial
, INT2FIX(1) /* '~' */,
9401 INT2FIX(node
->nd_nth
<< 1));
9405 case NODE_BACK_REF
:{
9407 ADD_INSN2(ret
, node
, getspecial
, INT2FIX(1) /* '~' */,
9408 INT2FIX(0x01 | (node
->nd_nth
<< 1)));
9415 CHECK(compile_match(iseq
, ret
, node
, popped
, type
));
9418 debugp_param("lit", node
->nd_lit
);
9420 ADD_INSN1(ret
, node
, putobject
, node
->nd_lit
);
9421 RB_OBJ_WRITTEN(iseq
, Qundef
, node
->nd_lit
);
9426 debugp_param("nd_lit", node
->nd_lit
);
9428 VALUE lit
= node
->nd_lit
;
9429 if (!ISEQ_COMPILE_DATA(iseq
)->option
->frozen_string_literal
) {
9430 lit
= rb_fstring(lit
);
9431 ADD_INSN1(ret
, node
, putstring
, lit
);
9432 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
9435 if (ISEQ_COMPILE_DATA(iseq
)->option
->debug_frozen_string_literal
|| RTEST(ruby_debug
)) {
9436 VALUE debug_info
= rb_ary_new_from_args(2, rb_iseq_path(iseq
), INT2FIX(line
));
9437 lit
= rb_str_dup(lit
);
9438 rb_ivar_set(lit
, id_debug_created_info
, rb_obj_freeze(debug_info
));
9439 lit
= rb_str_freeze(lit
);
9442 lit
= rb_fstring(lit
);
9444 ADD_INSN1(ret
, node
, putobject
, lit
);
9445 RB_OBJ_WRITTEN(iseq
, Qundef
, lit
);
9451 compile_dstr(iseq
, ret
, node
);
9454 ADD_INSN(ret
, node
, pop
);
9459 ADD_CALL_RECEIVER(ret
, node
);
9460 VALUE str
= rb_fstring(node
->nd_lit
);
9461 ADD_INSN1(ret
, node
, putobject
, str
);
9462 RB_OBJ_WRITTEN(iseq
, Qundef
, str
);
9463 ADD_CALL(ret
, node
, idBackquote
, INT2FIX(1));
9466 ADD_INSN(ret
, node
, pop
);
9471 ADD_CALL_RECEIVER(ret
, node
);
9472 compile_dstr(iseq
, ret
, node
);
9473 ADD_CALL(ret
, node
, idBackquote
, INT2FIX(1));
9476 ADD_INSN(ret
, node
, pop
);
9481 CHECK(compile_evstr(iseq
, ret
, node
->nd_body
, popped
));
9484 compile_dregx(iseq
, ret
, node
);
9487 ADD_INSN(ret
, node
, pop
);
9492 int ic_index
= body
->is_size
++;
9493 const rb_iseq_t
*block_iseq
;
9494 block_iseq
= NEW_CHILD_ISEQ(node
->nd_body
, make_name_for_block(iseq
), ISEQ_TYPE_PLAIN
, line
);
9496 ADD_INSN2(ret
, node
, once
, block_iseq
, INT2FIX(ic_index
));
9497 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)block_iseq
);
9500 ADD_INSN(ret
, node
, pop
);
9506 CHECK(COMPILE(ret
, "argscat head", node
->nd_head
));
9507 ADD_INSN1(ret
, node
, splatarray
, Qfalse
);
9508 ADD_INSN(ret
, node
, pop
);
9509 CHECK(COMPILE(ret
, "argscat body", node
->nd_body
));
9510 ADD_INSN1(ret
, node
, splatarray
, Qfalse
);
9511 ADD_INSN(ret
, node
, pop
);
9514 CHECK(COMPILE(ret
, "argscat head", node
->nd_head
));
9515 CHECK(COMPILE(ret
, "argscat body", node
->nd_body
));
9516 ADD_INSN(ret
, node
, concatarray
);
9520 case NODE_ARGSPUSH
:{
9522 CHECK(COMPILE(ret
, "argspush head", node
->nd_head
));
9523 ADD_INSN1(ret
, node
, splatarray
, Qfalse
);
9524 ADD_INSN(ret
, node
, pop
);
9525 CHECK(COMPILE_(ret
, "argspush body", node
->nd_body
, popped
));
9528 CHECK(COMPILE(ret
, "argspush head", node
->nd_head
));
9529 CHECK(compile_array_1(iseq
, ret
, node
->nd_body
));
9530 ADD_INSN(ret
, node
, concatarray
);
9535 CHECK(COMPILE(ret
, "splat", node
->nd_head
));
9536 ADD_INSN1(ret
, node
, splatarray
, Qtrue
);
9539 ADD_INSN(ret
, node
, pop
);
9544 ID mid
= node
->nd_mid
;
9545 const rb_iseq_t
*method_iseq
= NEW_ISEQ(node
->nd_defn
,
9547 ISEQ_TYPE_METHOD
, line
);
9549 debugp_param("defn/iseq", rb_iseqw_new(method_iseq
));
9550 ADD_INSN2(ret
, node
, definemethod
, ID2SYM(mid
), method_iseq
);
9551 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)method_iseq
);
9554 ADD_INSN1(ret
, node
, putobject
, ID2SYM(mid
));
9560 ID mid
= node
->nd_mid
;
9561 const rb_iseq_t
* singleton_method_iseq
= NEW_ISEQ(node
->nd_defn
,
9563 ISEQ_TYPE_METHOD
, line
);
9565 debugp_param("defs/iseq", rb_iseqw_new(singleton_method_iseq
));
9566 CHECK(COMPILE(ret
, "defs: recv", node
->nd_recv
));
9567 ADD_INSN2(ret
, node
, definesmethod
, ID2SYM(mid
), singleton_method_iseq
);
9568 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)singleton_method_iseq
);
9571 ADD_INSN1(ret
, node
, putobject
, ID2SYM(mid
));
9576 ADD_INSN1(ret
, node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
9577 ADD_INSN1(ret
, node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_CBASE
));
9578 CHECK(COMPILE(ret
, "alias arg1", node
->nd_1st
));
9579 CHECK(COMPILE(ret
, "alias arg2", node
->nd_2nd
));
9580 ADD_SEND(ret
, node
, id_core_set_method_alias
, INT2FIX(3));
9583 ADD_INSN(ret
, node
, pop
);
9588 ADD_INSN1(ret
, node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
9589 ADD_INSN1(ret
, node
, putobject
, ID2SYM(node
->nd_alias
));
9590 ADD_INSN1(ret
, node
, putobject
, ID2SYM(node
->nd_orig
));
9591 ADD_SEND(ret
, node
, id_core_set_variable_alias
, INT2FIX(2));
9594 ADD_INSN(ret
, node
, pop
);
9599 ADD_INSN1(ret
, node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
9600 ADD_INSN1(ret
, node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_CBASE
));
9601 CHECK(COMPILE(ret
, "undef arg", node
->nd_undef
));
9602 ADD_SEND(ret
, node
, id_core_undef_method
, INT2FIX(2));
9605 ADD_INSN(ret
, node
, pop
);
9610 const rb_iseq_t
*class_iseq
= NEW_CHILD_ISEQ(node
->nd_body
,
9611 rb_str_freeze(rb_sprintf("<class:%"PRIsVALUE
">", rb_id2str(node
->nd_cpath
->nd_mid
))),
9612 ISEQ_TYPE_CLASS
, line
);
9613 const int flags
= VM_DEFINECLASS_TYPE_CLASS
|
9614 (node
->nd_super
? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS
: 0) |
9615 compile_cpath(ret
, iseq
, node
->nd_cpath
);
9617 CHECK(COMPILE(ret
, "super", node
->nd_super
));
9618 ADD_INSN3(ret
, node
, defineclass
, ID2SYM(node
->nd_cpath
->nd_mid
), class_iseq
, INT2FIX(flags
));
9619 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)class_iseq
);
9622 ADD_INSN(ret
, node
, pop
);
9627 const rb_iseq_t
*module_iseq
= NEW_CHILD_ISEQ(node
->nd_body
,
9628 rb_str_freeze(rb_sprintf("<module:%"PRIsVALUE
">", rb_id2str(node
->nd_cpath
->nd_mid
))),
9629 ISEQ_TYPE_CLASS
, line
);
9630 const int flags
= VM_DEFINECLASS_TYPE_MODULE
|
9631 compile_cpath(ret
, iseq
, node
->nd_cpath
);
9633 ADD_INSN (ret
, node
, putnil
); /* dummy */
9634 ADD_INSN3(ret
, node
, defineclass
, ID2SYM(node
->nd_cpath
->nd_mid
), module_iseq
, INT2FIX(flags
));
9635 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)module_iseq
);
9638 ADD_INSN(ret
, node
, pop
);
9644 const rb_iseq_t
*singleton_class
= NEW_ISEQ(node
->nd_body
, rb_fstring_lit("singleton class"),
9645 ISEQ_TYPE_CLASS
, line
);
9647 CHECK(COMPILE(ret
, "sclass#recv", node
->nd_recv
));
9648 ADD_INSN (ret
, node
, putnil
);
9649 CONST_ID(singletonclass
, "singletonclass");
9650 ADD_INSN3(ret
, node
, defineclass
,
9651 ID2SYM(singletonclass
), singleton_class
,
9652 INT2FIX(VM_DEFINECLASS_TYPE_SINGLETON_CLASS
));
9653 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)singleton_class
);
9656 ADD_INSN(ret
, node
, pop
);
9661 CHECK(compile_colon2(iseq
, ret
, node
, popped
));
9664 CHECK(compile_colon3(iseq
, ret
, node
, popped
));
9667 CHECK(compile_dots(iseq
, ret
, node
, popped
, FALSE
));
9670 CHECK(compile_dots(iseq
, ret
, node
, popped
, TRUE
));
9674 LABEL
*lend
= NEW_LABEL(line
);
9675 LABEL
*ltrue
= NEW_LABEL(line
);
9676 LABEL
*lfalse
= NEW_LABEL(line
);
9677 CHECK(compile_flip_flop(iseq
, ret
, node
, type
== NODE_FLIP2
,
9679 ADD_LABEL(ret
, ltrue
);
9680 ADD_INSN1(ret
, node
, putobject
, Qtrue
);
9681 ADD_INSNL(ret
, node
, jump
, lend
);
9682 ADD_LABEL(ret
, lfalse
);
9683 ADD_INSN1(ret
, node
, putobject
, Qfalse
);
9684 ADD_LABEL(ret
, lend
);
9689 ADD_INSN(ret
, node
, putself
);
9695 ADD_INSN(ret
, node
, putnil
);
9701 ADD_INSN1(ret
, node
, putobject
, Qtrue
);
9707 ADD_INSN1(ret
, node
, putobject
, Qfalse
);
9712 CHECK(compile_errinfo(iseq
, ret
, node
, popped
));
9716 CHECK(compile_defined_expr(iseq
, ret
, node
, Qtrue
));
9721 * ONCE{ rb_mRubyVMFrozenCore::core#set_postexe{ ... } }
9723 int is_index
= body
->is_size
++;
9724 struct rb_iseq_new_with_callback_callback_func
*ifunc
=
9725 rb_iseq_new_with_callback_new_callback(build_postexe_iseq
, node
->nd_body
);
9726 const rb_iseq_t
*once_iseq
=
9727 new_child_iseq_with_callback(iseq
, ifunc
,
9728 rb_fstring(make_name_for_block(iseq
)), iseq
, ISEQ_TYPE_BLOCK
, line
);
9730 ADD_INSN2(ret
, node
, once
, once_iseq
, INT2FIX(is_index
));
9731 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)once_iseq
);
9734 ADD_INSN(ret
, node
, pop
);
9739 CHECK(compile_kw_arg(iseq
, ret
, node
, popped
));
9742 compile_dstr(iseq
, ret
, node
);
9744 ADD_INSN(ret
, node
, intern
);
9747 ADD_INSN(ret
, node
, pop
);
9752 CHECK(compile_attrasgn(iseq
, ret
, node
, popped
));
9755 /* compile same as lambda{...} */
9756 const rb_iseq_t
*block
= NEW_CHILD_ISEQ(node
->nd_body
, make_name_for_block(iseq
), ISEQ_TYPE_BLOCK
, line
);
9757 VALUE argc
= INT2FIX(0);
9759 ADD_INSN1(ret
, node
, putspecialobject
, INT2FIX(VM_SPECIAL_OBJECT_VMCORE
));
9760 ADD_CALL_WITH_BLOCK(ret
, node
, idLambda
, argc
, block
);
9761 RB_OBJ_WRITTEN(iseq
, Qundef
, (VALUE
)block
);
9764 ADD_INSN(ret
, node
, pop
);
9769 UNKNOWN_NODE("iseq_compile_each", node
, COMPILE_NG
);
9779 /***************************/
9780 /* instruction information */
9781 /***************************/
9784 insn_data_length(INSN
*iobj
)
9786 return insn_len(iobj
->insn_id
);
9790 calc_sp_depth(int depth
, INSN
*insn
)
9792 return comptime_insn_stack_increase(depth
, insn
->insn_id
, insn
->operands
);
9796 opobj_inspect(VALUE obj
)
9798 if (!SPECIAL_CONST_P(obj
) && !RBASIC_CLASS(obj
)) {
9799 switch (BUILTIN_TYPE(obj
)) {
9801 obj
= rb_str_new_cstr(RSTRING_PTR(obj
));
9804 obj
= rb_ary_dup(obj
);
9810 return rb_inspect(obj
);
9816 insn_data_to_s_detail(INSN
*iobj
)
9818 VALUE str
= rb_sprintf("%-20s ", insn_name(iobj
->insn_id
));
9820 if (iobj
->operands
) {
9821 const char *types
= insn_op_types(iobj
->insn_id
);
9824 for (j
= 0; types
[j
]; j
++) {
9825 char type
= types
[j
];
9828 case TS_OFFSET
: /* label(destination position) */
9830 LABEL
*lobj
= (LABEL
*)OPERAND_AT(iobj
, j
);
9831 rb_str_catf(str
, LABEL_FORMAT
, lobj
->label_no
);
9835 case TS_ISEQ
: /* iseq */
9837 rb_iseq_t
*iseq
= (rb_iseq_t
*)OPERAND_AT(iobj
, j
);
9839 if (0 && iseq
) { /* TODO: invalidate now */
9842 rb_str_concat(str
, opobj_inspect(val
));
9846 case TS_NUM
: /* ulong */
9847 case TS_VALUE
: /* VALUE */
9849 VALUE v
= OPERAND_AT(iobj
, j
);
9851 rb_str_cat2(str
, "<hidden>");
9853 rb_str_concat(str
, opobj_inspect(v
));
9857 case TS_ID
: /* ID */
9858 rb_str_concat(str
, opobj_inspect(OPERAND_AT(iobj
, j
)));
9860 case TS_IC
: /* inline cache */
9861 case TS_IVC
: /* inline ivar cache */
9862 case TS_ISE
: /* inline storage entry */
9863 rb_str_catf(str
, "<ic:%d>", FIX2INT(OPERAND_AT(iobj
, j
)));
9865 case TS_CALLDATA
: /* we store these as call infos at compile time */
9867 const struct rb_callinfo
*ci
= (struct rb_callinfo
*)OPERAND_AT(iobj
, j
);
9868 rb_str_cat2(str
, "<calldata:");
9869 if (vm_ci_mid(ci
)) rb_str_catf(str
, "%"PRIsVALUE
, rb_id2str(vm_ci_mid(ci
)));
9870 rb_str_catf(str
, ", %d>", vm_ci_argc(ci
));
9873 case TS_CDHASH
: /* case/when condition cache */
9874 rb_str_cat2(str
, "<ch>");
9878 void *func
= (void *)OPERAND_AT(iobj
, j
);
9881 if (dladdr(func
, &info
) && info
.dli_sname
) {
9882 rb_str_cat2(str
, info
.dli_sname
);
9886 rb_str_catf(str
, "<%p>", func
);
9890 rb_str_cat2(str
, "<TS_BUILTIN>");
9893 rb_raise(rb_eSyntaxError
, "unknown operand type: %c", type
);
9897 rb_str_cat2(str
, ", ");
9905 dump_disasm_list(const LINK_ELEMENT
*link
)
9907 dump_disasm_list_with_cursor(link
, NULL
, NULL
);
9911 dump_disasm_list_with_cursor(const LINK_ELEMENT
*link
, const LINK_ELEMENT
*curr
, const LABEL
*dest
)
9918 printf("-- raw disasm--------\n");
9921 if (curr
) printf(curr
== link
? "*" : " ");
9922 switch (link
->type
) {
9923 case ISEQ_ELEMENT_INSN
:
9925 iobj
= (INSN
*)link
;
9926 str
= insn_data_to_s_detail(iobj
);
9927 printf(" %04d %-65s(%4u)\n", pos
, StringValueCStr(str
), iobj
->insn_info
.line_no
);
9928 pos
+= insn_data_length(iobj
);
9931 case ISEQ_ELEMENT_LABEL
:
9933 lobj
= (LABEL
*)link
;
9934 printf(LABEL_FORMAT
" [sp: %d]%s\n", lobj
->label_no
, lobj
->sp
,
9935 dest
== lobj
? " <---" : "");
9938 case ISEQ_ELEMENT_TRACE
:
9940 TRACE
*trace
= (TRACE
*)link
;
9941 printf(" trace: %0x\n", trace
->event
);
9944 case ISEQ_ELEMENT_ADJUST
:
9946 ADJUST
*adjust
= (ADJUST
*)link
;
9947 printf(" adjust: [label: %d]\n", adjust
->label
? adjust
->label
->label_no
: -1);
9952 rb_raise(rb_eSyntaxError
, "dump_disasm_list error: %ld\n", FIX2LONG(link
->type
));
9956 printf("---------------------\n");
9961 rb_insns_name(int i
)
9963 return insn_name(i
);
9967 rb_insns_name_array(void)
9969 VALUE ary
= rb_ary_new_capa(VM_INSTRUCTION_SIZE
);
9971 for (i
= 0; i
< VM_INSTRUCTION_SIZE
; i
++) {
9972 rb_ary_push(ary
, rb_fstring_cstr(insn_name(i
)));
9974 return rb_obj_freeze(ary
);
9978 register_label(rb_iseq_t
*iseq
, struct st_table
*labels_table
, VALUE obj
)
9982 obj
= rb_to_symbol_type(obj
);
9984 if (st_lookup(labels_table
, obj
, &tmp
) == 0) {
9985 label
= NEW_LABEL(0);
9986 st_insert(labels_table
, obj
, (st_data_t
)label
);
9989 label
= (LABEL
*)tmp
;
9996 get_exception_sym2type(VALUE sym
)
9998 static VALUE symRescue
, symEnsure
, symRetry
;
9999 static VALUE symBreak
, symRedo
, symNext
;
10001 if (symRescue
== 0) {
10002 symRescue
= ID2SYM(rb_intern_const("rescue"));
10003 symEnsure
= ID2SYM(rb_intern_const("ensure"));
10004 symRetry
= ID2SYM(rb_intern_const("retry"));
10005 symBreak
= ID2SYM(rb_intern_const("break"));
10006 symRedo
= ID2SYM(rb_intern_const("redo"));
10007 symNext
= ID2SYM(rb_intern_const("next"));
10010 if (sym
== symRescue
) return CATCH_TYPE_RESCUE
;
10011 if (sym
== symEnsure
) return CATCH_TYPE_ENSURE
;
10012 if (sym
== symRetry
) return CATCH_TYPE_RETRY
;
10013 if (sym
== symBreak
) return CATCH_TYPE_BREAK
;
10014 if (sym
== symRedo
) return CATCH_TYPE_REDO
;
10015 if (sym
== symNext
) return CATCH_TYPE_NEXT
;
10016 rb_raise(rb_eSyntaxError
, "invalid exception symbol: %+"PRIsVALUE
, sym
);
10021 iseq_build_from_ary_exception(rb_iseq_t
*iseq
, struct st_table
*labels_table
,
10026 for (i
=0; i
<RARRAY_LEN(exception
); i
++) {
10027 const rb_iseq_t
*eiseq
;
10029 LABEL
*lstart
, *lend
, *lcont
;
10032 v
= rb_to_array_type(RARRAY_AREF(exception
, i
));
10033 if (RARRAY_LEN(v
) != 6) {
10034 rb_raise(rb_eSyntaxError
, "wrong exception entry");
10036 type
= get_exception_sym2type(RARRAY_AREF(v
, 0));
10037 if (NIL_P(RARRAY_AREF(v
, 1))) {
10041 eiseq
= rb_iseqw_to_iseq(rb_iseq_load(RARRAY_AREF(v
, 1), (VALUE
)iseq
, Qnil
));
10044 lstart
= register_label(iseq
, labels_table
, RARRAY_AREF(v
, 2));
10045 lend
= register_label(iseq
, labels_table
, RARRAY_AREF(v
, 3));
10046 lcont
= register_label(iseq
, labels_table
, RARRAY_AREF(v
, 4));
10047 sp
= NUM2UINT(RARRAY_AREF(v
, 5));
10049 /* TODO: Dirty Hack! Fix me */
10050 if (type
== CATCH_TYPE_RESCUE
||
10051 type
== CATCH_TYPE_BREAK
||
10052 type
== CATCH_TYPE_NEXT
) {
10058 ADD_CATCH_ENTRY(type
, lstart
, lend
, eiseq
, lcont
);
10065 static struct st_table
*
10066 insn_make_insn_table(void)
10068 struct st_table
*table
;
10070 table
= st_init_numtable_with_size(VM_INSTRUCTION_SIZE
);
10072 for (i
=0; i
<VM_INSTRUCTION_SIZE
; i
++) {
10073 st_insert(table
, ID2SYM(rb_intern_const(insn_name(i
))), i
);
10079 static const rb_iseq_t
*
10080 iseq_build_load_iseq(const rb_iseq_t
*iseq
, VALUE op
)
10083 const rb_iseq_t
*loaded_iseq
;
10085 if (RB_TYPE_P(op
, T_ARRAY
)) {
10086 iseqw
= rb_iseq_load(op
, (VALUE
)iseq
, Qnil
);
10088 else if (CLASS_OF(op
) == rb_cISeq
) {
10092 rb_raise(rb_eSyntaxError
, "ISEQ is required");
10095 loaded_iseq
= rb_iseqw_to_iseq(iseqw
);
10096 return loaded_iseq
;
10100 iseq_build_callinfo_from_hash(rb_iseq_t
*iseq
, VALUE op
)
10104 unsigned int flag
= 0;
10105 struct rb_callinfo_kwarg
*kw_arg
= 0;
10108 VALUE vmid
= rb_hash_aref(op
, ID2SYM(rb_intern_const("mid")));
10109 VALUE vflag
= rb_hash_aref(op
, ID2SYM(rb_intern_const("flag")));
10110 VALUE vorig_argc
= rb_hash_aref(op
, ID2SYM(rb_intern_const("orig_argc")));
10111 VALUE vkw_arg
= rb_hash_aref(op
, ID2SYM(rb_intern_const("kw_arg")));
10113 if (!NIL_P(vmid
)) mid
= SYM2ID(vmid
);
10114 if (!NIL_P(vflag
)) flag
= NUM2UINT(vflag
);
10115 if (!NIL_P(vorig_argc
)) orig_argc
= FIX2INT(vorig_argc
);
10117 if (!NIL_P(vkw_arg
)) {
10119 int len
= RARRAY_LENINT(vkw_arg
);
10120 size_t n
= rb_callinfo_kwarg_bytes(len
);
10122 kw_arg
= xmalloc(n
);
10123 kw_arg
->keyword_len
= len
;
10124 for (i
= 0; i
< len
; i
++) {
10125 VALUE kw
= RARRAY_AREF(vkw_arg
, i
);
10126 SYM2ID(kw
); /* make immortal */
10127 kw_arg
->keywords
[i
] = kw
;
10132 const struct rb_callinfo
*ci
= new_callinfo(iseq
, mid
, orig_argc
, flag
, kw_arg
, (flag
& VM_CALL_ARGS_SIMPLE
) == 0);
10133 RB_OBJ_WRITTEN(iseq
, Qundef
, ci
);
10137 static rb_event_flag_t
10138 event_name_to_flag(VALUE sym
)
10140 #define CHECK_EVENT(ev) if (sym == ID2SYM(rb_intern_const(#ev))) return ev;
10141 CHECK_EVENT(RUBY_EVENT_LINE
);
10142 CHECK_EVENT(RUBY_EVENT_CLASS
);
10143 CHECK_EVENT(RUBY_EVENT_END
);
10144 CHECK_EVENT(RUBY_EVENT_CALL
);
10145 CHECK_EVENT(RUBY_EVENT_RETURN
);
10146 CHECK_EVENT(RUBY_EVENT_B_CALL
);
10147 CHECK_EVENT(RUBY_EVENT_B_RETURN
);
10149 return RUBY_EVENT_NONE
;
10153 iseq_build_from_ary_body(rb_iseq_t
*iseq
, LINK_ANCHOR
*const anchor
,
10154 VALUE body
, VALUE node_ids
, VALUE labels_wrapper
)
10156 /* TODO: body should be frozen */
10157 long i
, len
= RARRAY_LEN(body
);
10158 struct st_table
*labels_table
= DATA_PTR(labels_wrapper
);
10160 int line_no
= 0, node_id
= -1, insn_idx
= 0;
10161 int ret
= COMPILE_OK
;
10164 * index -> LABEL *label
10166 static struct st_table
*insn_table
;
10168 if (insn_table
== 0) {
10169 insn_table
= insn_make_insn_table();
10172 for (i
=0; i
<len
; i
++) {
10173 VALUE obj
= RARRAY_AREF(body
, i
);
10175 if (SYMBOL_P(obj
)) {
10176 rb_event_flag_t event
;
10177 if ((event
= event_name_to_flag(obj
)) != RUBY_EVENT_NONE
) {
10178 ADD_TRACE(anchor
, event
);
10181 LABEL
*label
= register_label(iseq
, labels_table
, obj
);
10182 ADD_LABEL(anchor
, label
);
10185 else if (FIXNUM_P(obj
)) {
10186 line_no
= NUM2INT(obj
);
10188 else if (RB_TYPE_P(obj
, T_ARRAY
)) {
10190 int argc
= RARRAY_LENINT(obj
) - 1;
10195 node_id
= NUM2INT(rb_ary_entry(node_ids
, insn_idx
++));
10198 insn
= (argc
< 0) ? Qnil
: RARRAY_AREF(obj
, 0);
10199 if (st_lookup(insn_table
, (st_data_t
)insn
, &insn_id
) == 0) {
10200 /* TODO: exception */
10201 COMPILE_ERROR(iseq
, line_no
,
10202 "unknown instruction: %+"PRIsVALUE
, insn
);
10207 if (argc
!= insn_len((VALUE
)insn_id
)-1) {
10208 COMPILE_ERROR(iseq
, line_no
,
10209 "operand size mismatch");
10215 argv
= compile_data_calloc2(iseq
, sizeof(VALUE
), argc
);
10217 // add element before operand setup to make GC root
10218 NODE dummy_line_node
= generate_dummy_line_node(line_no
, node_id
);
10220 (LINK_ELEMENT
*)new_insn_core(iseq
, &dummy_line_node
,
10221 (enum ruby_vminsn_type
)insn_id
, argc
, argv
));
10223 for (j
=0; j
<argc
; j
++) {
10224 VALUE op
= rb_ary_entry(obj
, j
+1);
10225 switch (insn_op_type((VALUE
)insn_id
, j
)) {
10227 LABEL
*label
= register_label(iseq
, labels_table
, op
);
10228 argv
[j
] = (VALUE
)label
;
10238 RB_OBJ_WRITTEN(iseq
, Qundef
, op
);
10243 VALUE v
= (VALUE
)iseq_build_load_iseq(iseq
, op
);
10245 RB_OBJ_WRITTEN(iseq
, Qundef
, v
);
10254 case TS_IVC
: /* inline ivar cache */
10256 if (NUM2UINT(op
) >= iseq
->body
->is_size
) {
10257 iseq
->body
->is_size
= NUM2INT(op
) + 1;
10259 FL_SET((VALUE
)iseq
, ISEQ_MARKABLE_ISEQ
);
10262 argv
[j
] = iseq_build_callinfo_from_hash(iseq
, op
);
10265 argv
[j
] = rb_to_symbol_type(op
);
10270 VALUE map
= rb_hash_new_with_size(RARRAY_LEN(op
)/2);
10272 RHASH_TBL_RAW(map
)->type
= &cdhash_type
;
10273 op
= rb_to_array_type(op
);
10274 for (i
=0; i
<RARRAY_LEN(op
); i
+=2) {
10275 VALUE key
= RARRAY_AREF(op
, i
);
10276 VALUE sym
= RARRAY_AREF(op
, i
+1);
10278 register_label(iseq
, labels_table
, sym
);
10279 rb_hash_aset(map
, key
, (VALUE
)label
| 1);
10283 RB_OBJ_WRITTEN(iseq
, Qundef
, map
);
10288 #if SIZEOF_VALUE <= SIZEOF_LONG
10289 long funcptr
= NUM2LONG(op
);
10291 LONG_LONG funcptr
= NUM2LL(op
);
10293 argv
[j
] = (VALUE
)funcptr
;
10297 rb_raise(rb_eSyntaxError
, "unknown operand: %c", insn_op_type((VALUE
)insn_id
, j
));
10302 NODE dummy_line_node
= generate_dummy_line_node(line_no
, node_id
);
10304 (LINK_ELEMENT
*)new_insn_core(iseq
, &dummy_line_node
,
10305 (enum ruby_vminsn_type
)insn_id
, argc
, NULL
));
10309 rb_raise(rb_eTypeError
, "unexpected object for instruction");
10312 DATA_PTR(labels_wrapper
) = 0;
10313 validate_labels(iseq
, labels_table
);
10314 if (!ret
) return ret
;
10315 return iseq_setup(iseq
, anchor
);
10318 #define CHECK_ARRAY(v) rb_to_array_type(v)
10319 #define CHECK_SYMBOL(v) rb_to_symbol_type(v)
10322 int_param(int *dst
, VALUE param
, VALUE sym
)
10324 VALUE val
= rb_hash_aref(param
, sym
);
10325 if (FIXNUM_P(val
)) {
10326 *dst
= FIX2INT(val
);
10329 else if (!NIL_P(val
)) {
10330 rb_raise(rb_eTypeError
, "invalid %+"PRIsVALUE
" Fixnum: %+"PRIsVALUE
,
10336 static const struct rb_iseq_param_keyword
*
10337 iseq_build_kw(rb_iseq_t
*iseq
, VALUE params
, VALUE keywords
)
10340 int len
= RARRAY_LENINT(keywords
);
10342 VALUE key
, sym
, default_val
;
10345 struct rb_iseq_param_keyword
*keyword
= ZALLOC(struct rb_iseq_param_keyword
);
10347 iseq
->body
->param
.flags
.has_kw
= TRUE
;
10349 keyword
->num
= len
;
10350 #define SYM(s) ID2SYM(rb_intern_const(#s))
10351 (void)int_param(&keyword
->bits_start
, params
, SYM(kwbits
));
10352 i
= keyword
->bits_start
- keyword
->num
;
10353 ids
= (ID
*)&iseq
->body
->local_table
[i
];
10356 /* required args */
10357 for (i
= 0; i
< len
; i
++) {
10358 VALUE val
= RARRAY_AREF(keywords
, i
);
10360 if (!SYMBOL_P(val
)) {
10361 goto default_values
;
10363 ids
[i
] = SYM2ID(val
);
10364 keyword
->required_num
++;
10367 default_values
: /* note: we intentionally preserve `i' from previous loop */
10368 default_len
= len
- i
;
10369 if (default_len
== 0) {
10370 keyword
->table
= ids
;
10373 else if (default_len
< 0) {
10377 dvs
= ALLOC_N(VALUE
, (unsigned int)default_len
);
10379 for (j
= 0; i
< len
; i
++, j
++) {
10380 key
= RARRAY_AREF(keywords
, i
);
10383 switch (RARRAY_LEN(key
)) {
10385 sym
= RARRAY_AREF(key
, 0);
10386 default_val
= Qundef
;
10389 sym
= RARRAY_AREF(key
, 0);
10390 default_val
= RARRAY_AREF(key
, 1);
10393 rb_raise(rb_eTypeError
, "keyword default has unsupported len %+"PRIsVALUE
, key
);
10395 ids
[i
] = SYM2ID(sym
);
10396 dvs
[j
] = default_val
;
10399 keyword
->table
= ids
;
10400 keyword
->default_values
= dvs
;
10406 rb_iseq_mark_insn_storage(struct iseq_compile_data_storage
*storage
)
10409 size_t size
= sizeof(INSN
);
10410 unsigned int pos
= 0;
10413 #ifdef STRICT_ALIGNMENT
10414 size_t padding
= calc_padding((void *)&storage
->buff
[pos
], size
);
10416 const size_t padding
= 0; /* expected to be optimized by compiler */
10417 #endif /* STRICT_ALIGNMENT */
10418 size_t offset
= pos
+ size
+ padding
;
10419 if (offset
> storage
->size
|| offset
> storage
->pos
) {
10421 storage
= storage
->next
;
10424 #ifdef STRICT_ALIGNMENT
10425 pos
+= (int)padding
;
10426 #endif /* STRICT_ALIGNMENT */
10428 iobj
= (INSN
*)&storage
->buff
[pos
];
10430 if (iobj
->operands
) {
10432 const char *types
= insn_op_types(iobj
->insn_id
);
10434 for (j
= 0; types
[j
]; j
++) {
10435 char type
= types
[j
];
10440 case TS_CALLDATA
: // ci is stored.
10442 VALUE op
= OPERAND_AT(iobj
, j
);
10444 if (!SPECIAL_CONST_P(op
)) {
10460 rb_iseq_build_from_ary(rb_iseq_t
*iseq
, VALUE misc
, VALUE locals
, VALUE params
,
10461 VALUE exception
, VALUE body
)
10463 #define SYM(s) ID2SYM(rb_intern_const(#s))
10465 unsigned int arg_size
, local_size
, stack_max
;
10467 struct st_table
*labels_table
= st_init_numtable();
10468 VALUE labels_wrapper
= Data_Wrap_Struct(0, rb_mark_set
, st_free_table
, labels_table
);
10469 VALUE arg_opt_labels
= rb_hash_aref(params
, SYM(opt
));
10470 VALUE keywords
= rb_hash_aref(params
, SYM(keyword
));
10471 VALUE sym_arg_rest
= ID2SYM(rb_intern_const("#arg_rest"));
10472 DECL_ANCHOR(anchor
);
10473 INIT_ANCHOR(anchor
);
10475 len
= RARRAY_LENINT(locals
);
10476 iseq
->body
->local_table_size
= len
;
10477 iseq
->body
->local_table
= tbl
= len
> 0 ? (ID
*)ALLOC_N(ID
, iseq
->body
->local_table_size
) : NULL
;
10479 for (i
= 0; i
< len
; i
++) {
10480 VALUE lv
= RARRAY_AREF(locals
, i
);
10482 if (sym_arg_rest
== lv
) {
10486 tbl
[i
] = FIXNUM_P(lv
) ? (ID
)FIX2LONG(lv
) : SYM2ID(CHECK_SYMBOL(lv
));
10490 #define INT_PARAM(F) int_param(&iseq->body->param.F, params, SYM(F))
10491 if (INT_PARAM(lead_num
)) {
10492 iseq
->body
->param
.flags
.has_lead
= TRUE
;
10494 if (INT_PARAM(post_num
)) iseq
->body
->param
.flags
.has_post
= TRUE
;
10495 if (INT_PARAM(post_start
)) iseq
->body
->param
.flags
.has_post
= TRUE
;
10496 if (INT_PARAM(rest_start
)) iseq
->body
->param
.flags
.has_rest
= TRUE
;
10497 if (INT_PARAM(block_start
)) iseq
->body
->param
.flags
.has_block
= TRUE
;
10500 #define INT_PARAM(F) F = (int_param(&x, misc, SYM(F)) ? (unsigned int)x : 0)
10502 INT_PARAM(arg_size
);
10503 INT_PARAM(local_size
);
10504 INT_PARAM(stack_max
);
10508 VALUE node_ids
= Qfalse
;
10509 #ifdef USE_ISEQ_NODE_ID
10510 node_ids
= rb_hash_aref(misc
, ID2SYM(rb_intern("node_ids")));
10511 if (!RB_TYPE_P(node_ids
, T_ARRAY
)) {
10512 rb_raise(rb_eTypeError
, "node_ids is not an array");
10516 if (RB_TYPE_P(arg_opt_labels
, T_ARRAY
)) {
10517 len
= RARRAY_LENINT(arg_opt_labels
);
10518 iseq
->body
->param
.flags
.has_opt
= !!(len
- 1 >= 0);
10520 if (iseq
->body
->param
.flags
.has_opt
) {
10521 VALUE
*opt_table
= ALLOC_N(VALUE
, len
);
10523 for (i
= 0; i
< len
; i
++) {
10524 VALUE ent
= RARRAY_AREF(arg_opt_labels
, i
);
10525 LABEL
*label
= register_label(iseq
, labels_table
, ent
);
10526 opt_table
[i
] = (VALUE
)label
;
10529 iseq
->body
->param
.opt_num
= len
- 1;
10530 iseq
->body
->param
.opt_table
= opt_table
;
10533 else if (!NIL_P(arg_opt_labels
)) {
10534 rb_raise(rb_eTypeError
, ":opt param is not an array: %+"PRIsVALUE
,
10538 if (RB_TYPE_P(keywords
, T_ARRAY
)) {
10539 iseq
->body
->param
.keyword
= iseq_build_kw(iseq
, params
, keywords
);
10541 else if (!NIL_P(keywords
)) {
10542 rb_raise(rb_eTypeError
, ":keywords param is not an array: %+"PRIsVALUE
,
10546 if (Qtrue
== rb_hash_aref(params
, SYM(ambiguous_param0
))) {
10547 iseq
->body
->param
.flags
.ambiguous_param0
= TRUE
;
10550 if (int_param(&i
, params
, SYM(kwrest
))) {
10551 struct rb_iseq_param_keyword
*keyword
= (struct rb_iseq_param_keyword
*)iseq
->body
->param
.keyword
;
10552 if (keyword
== NULL
) {
10553 iseq
->body
->param
.keyword
= keyword
= ZALLOC(struct rb_iseq_param_keyword
);
10555 keyword
->rest_start
= i
;
10556 iseq
->body
->param
.flags
.has_kwrest
= TRUE
;
10559 iseq_calc_param_size(iseq
);
10562 iseq_build_from_ary_exception(iseq
, labels_table
, exception
);
10565 iseq_build_from_ary_body(iseq
, anchor
, body
, node_ids
, labels_wrapper
);
10567 iseq
->body
->param
.size
= arg_size
;
10568 iseq
->body
->local_table_size
= local_size
;
10569 iseq
->body
->stack_max
= stack_max
;
10575 rb_dvar_defined(ID id
, const rb_iseq_t
*iseq
)
10578 const struct rb_iseq_constant_body
*body
= iseq
->body
;
10579 while (body
->type
== ISEQ_TYPE_BLOCK
||
10580 body
->type
== ISEQ_TYPE_RESCUE
||
10581 body
->type
== ISEQ_TYPE_ENSURE
||
10582 body
->type
== ISEQ_TYPE_EVAL
||
10583 body
->type
== ISEQ_TYPE_MAIN
10587 for (i
= 0; i
< body
->local_table_size
; i
++) {
10588 if (body
->local_table
[i
] == id
) {
10592 iseq
= body
->parent_iseq
;
10600 rb_local_defined(ID id
, const rb_iseq_t
*iseq
)
10604 const struct rb_iseq_constant_body
*const body
= iseq
->body
->local_iseq
->body
;
10606 for (i
=0; i
<body
->local_table_size
; i
++) {
10607 if (body
->local_table
[i
] == id
) {
10615 /* ISeq binary format */
10617 #ifndef IBF_ISEQ_DEBUG
10618 #define IBF_ISEQ_DEBUG 0
10621 #ifndef IBF_ISEQ_ENABLE_LOCAL_BUFFER
10622 #define IBF_ISEQ_ENABLE_LOCAL_BUFFER 0
10625 typedef unsigned int ibf_offset_t
;
10626 #define IBF_OFFSET(ptr) ((ibf_offset_t)(VALUE)(ptr))
10628 #define IBF_MAJOR_VERSION ISEQ_MAJOR_VERSION
10630 #define IBF_DEVEL_VERSION 3
10631 #define IBF_MINOR_VERSION (ISEQ_MINOR_VERSION * 10000 + IBF_DEVEL_VERSION)
10633 #define IBF_MINOR_VERSION ISEQ_MINOR_VERSION
10636 struct ibf_header
{
10637 char magic
[4]; /* YARB */
10638 unsigned int major_version
;
10639 unsigned int minor_version
;
10641 unsigned int extra_size
;
10643 unsigned int iseq_list_size
;
10644 unsigned int global_object_list_size
;
10645 ibf_offset_t iseq_list_offset
;
10646 ibf_offset_t global_object_list_offset
;
10649 struct ibf_dump_buffer
{
10651 st_table
*obj_table
; /* obj -> obj number */
10655 st_table
*iseq_table
; /* iseq -> iseq number */
10656 struct ibf_dump_buffer global_buffer
;
10657 struct ibf_dump_buffer
*current_buffer
;
10660 rb_iseq_t
* iseq_alloc(void);
10662 struct ibf_load_buffer
{
10666 VALUE obj_list
; /* [obj0, ...] */
10667 unsigned int obj_list_size
;
10668 ibf_offset_t obj_list_offset
;
10672 const struct ibf_header
*header
;
10673 VALUE iseq_list
; /* [iseq0, ...] */
10674 struct ibf_load_buffer global_buffer
;
10678 struct ibf_load_buffer
*current_buffer
;
10681 struct pinned_list
{
10687 pinned_list_mark(void *ptr
)
10690 struct pinned_list
*list
= (struct pinned_list
*)ptr
;
10691 for (i
= 0; i
< list
->size
; i
++) {
10692 if (list
->buffer
[i
]) {
10693 rb_gc_mark(list
->buffer
[i
]);
10699 pinned_list_free(void *ptr
)
10701 struct pinned_list
*list
= (struct pinned_list
*)ptr
;
10702 xfree(list
->buffer
);
10707 pinned_list_memsize(const void *ptr
)
10709 struct pinned_list
*list
= (struct pinned_list
*)ptr
;
10710 return sizeof(struct pinned_list
) + (list
->size
* sizeof(VALUE
*));
10713 static const rb_data_type_t pinned_list_type
= {
10715 {pinned_list_mark
, pinned_list_free
, pinned_list_memsize
,},
10716 0, 0, RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_FREE_IMMEDIATELY
10720 pinned_list_fetch(VALUE list
, long offset
)
10722 struct pinned_list
* ptr
;
10724 TypedData_Get_Struct(list
, struct pinned_list
, &pinned_list_type
, ptr
);
10726 if (offset
>= ptr
->size
) {
10727 rb_raise(rb_eIndexError
, "object index out of range: %ld", offset
);
10730 return ptr
->buffer
[offset
];
10734 pinned_list_store(VALUE list
, long offset
, VALUE object
)
10736 struct pinned_list
* ptr
;
10738 TypedData_Get_Struct(list
, struct pinned_list
, &pinned_list_type
, ptr
);
10740 if (offset
>= ptr
->size
) {
10741 rb_raise(rb_eIndexError
, "object index out of range: %ld", offset
);
10744 RB_OBJ_WRITE(list
, &ptr
->buffer
[offset
], object
);
10748 pinned_list_new(long size
)
10750 struct pinned_list
* ptr
;
10752 TypedData_Make_Struct(0, struct pinned_list
, &pinned_list_type
, ptr
);
10754 ptr
->buffer
= xcalloc(size
, sizeof(VALUE
));
10760 static ibf_offset_t
10761 ibf_dump_pos(struct ibf_dump
*dump
)
10763 long pos
= RSTRING_LEN(dump
->current_buffer
->str
);
10764 #if SIZEOF_LONG > SIZEOF_INT
10765 if (pos
>= UINT_MAX
) {
10766 rb_raise(rb_eRuntimeError
, "dump size exceeds");
10769 return (unsigned int)pos
;
10773 ibf_dump_align(struct ibf_dump
*dump
, size_t align
)
10775 ibf_offset_t pos
= ibf_dump_pos(dump
);
10777 static const char padding
[sizeof(VALUE
)];
10778 size_t size
= align
- ((size_t)pos
% align
);
10779 #if SIZEOF_LONG > SIZEOF_INT
10780 if (pos
+ size
>= UINT_MAX
) {
10781 rb_raise(rb_eRuntimeError
, "dump size exceeds");
10784 for (; size
> sizeof(padding
); size
-= sizeof(padding
)) {
10785 rb_str_cat(dump
->current_buffer
->str
, padding
, sizeof(padding
));
10787 rb_str_cat(dump
->current_buffer
->str
, padding
, size
);
10791 static ibf_offset_t
10792 ibf_dump_write(struct ibf_dump
*dump
, const void *buff
, unsigned long size
)
10794 ibf_offset_t pos
= ibf_dump_pos(dump
);
10795 rb_str_cat(dump
->current_buffer
->str
, (const char *)buff
, size
);
10796 /* TODO: overflow check */
10800 static ibf_offset_t
10801 ibf_dump_write_byte(struct ibf_dump
*dump
, unsigned char byte
)
10803 return ibf_dump_write(dump
, &byte
, sizeof(unsigned char));
10807 ibf_dump_overwrite(struct ibf_dump
*dump
, void *buff
, unsigned int size
, long offset
)
10809 VALUE str
= dump
->current_buffer
->str
;
10810 char *ptr
= RSTRING_PTR(str
);
10811 if ((unsigned long)(size
+ offset
) > (unsigned long)RSTRING_LEN(str
))
10812 rb_bug("ibf_dump_overwrite: overflow");
10813 memcpy(ptr
+ offset
, buff
, size
);
10816 static const void *
10817 ibf_load_ptr(const struct ibf_load
*load
, ibf_offset_t
*offset
, int size
)
10819 ibf_offset_t beg
= *offset
;
10821 return load
->current_buffer
->buff
+ beg
;
10825 ibf_load_alloc(const struct ibf_load
*load
, ibf_offset_t offset
, size_t x
, size_t y
)
10827 void *buff
= ruby_xmalloc2(x
, y
);
10828 size_t size
= x
* y
;
10829 memcpy(buff
, load
->current_buffer
->buff
+ offset
, size
);
10833 #define IBF_W_ALIGN(type) (RUBY_ALIGNOF(type) > 1 ? ibf_dump_align(dump, RUBY_ALIGNOF(type)) : (void)0)
10835 #define IBF_W(b, type, n) (IBF_W_ALIGN(type), (type *)(VALUE)IBF_WP(b, type, n))
10836 #define IBF_WV(variable) ibf_dump_write(dump, &(variable), sizeof(variable))
10837 #define IBF_WP(b, type, n) ibf_dump_write(dump, (b), sizeof(type) * (n))
10838 #define IBF_R(val, type, n) (type *)ibf_load_alloc(load, IBF_OFFSET(val), sizeof(type), (n))
10839 #define IBF_ZERO(variable) memset(&(variable), 0, sizeof(variable))
10842 ibf_table_lookup(struct st_table
*table
, st_data_t key
)
10846 if (st_lookup(table
, key
, &val
)) {
10855 ibf_table_find_or_insert(struct st_table
*table
, st_data_t key
)
10857 int index
= ibf_table_lookup(table
, key
);
10859 if (index
< 0) { /* not found */
10860 index
= (int)table
->num_entries
;
10861 st_insert(table
, key
, (st_data_t
)index
);
10867 /* dump/load generic */
10869 static void ibf_dump_object_list(struct ibf_dump
*dump
, ibf_offset_t
*obj_list_offset
, unsigned int *obj_list_size
);
10871 static VALUE
ibf_load_object(const struct ibf_load
*load
, VALUE object_index
);
10872 static rb_iseq_t
*ibf_load_iseq(const struct ibf_load
*load
, const rb_iseq_t
*index_iseq
);
10875 ibf_dump_object_table_new(void)
10877 st_table
*obj_table
= st_init_numtable(); /* need free */
10878 st_insert(obj_table
, (st_data_t
)Qnil
, (st_data_t
)0); /* 0th is nil */
10884 ibf_dump_object(struct ibf_dump
*dump
, VALUE obj
)
10886 return ibf_table_find_or_insert(dump
->current_buffer
->obj_table
, (st_data_t
)obj
);
10890 ibf_dump_id(struct ibf_dump
*dump
, ID id
)
10892 if (id
== 0 || rb_id2name(id
) == NULL
) {
10895 return ibf_dump_object(dump
, rb_id2sym(id
));
10899 ibf_load_id(const struct ibf_load
*load
, const ID id_index
)
10901 if (id_index
== 0) {
10904 VALUE sym
= ibf_load_object(load
, id_index
);
10905 return rb_sym2id(sym
);
10908 /* dump/load: code */
10910 static ibf_offset_t
ibf_dump_iseq_each(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
);
10913 ibf_dump_iseq(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
10915 if (iseq
== NULL
) {
10919 return ibf_table_find_or_insert(dump
->iseq_table
, (st_data_t
)iseq
);
10923 static unsigned char
10924 ibf_load_byte(const struct ibf_load
*load
, ibf_offset_t
*offset
)
10926 if (*offset
>= load
->current_buffer
->size
) { rb_raise(rb_eRuntimeError
, "invalid bytecode"); }
10927 return (unsigned char)load
->current_buffer
->buff
[(*offset
)++];
10931 * Small uint serialization
10932 * 0x00000000_00000000 - 0x00000000_0000007f: 1byte | XXXX XXX1 |
10933 * 0x00000000_00000080 - 0x00000000_00003fff: 2byte | XXXX XX10 | XXXX XXXX |
10934 * 0x00000000_00004000 - 0x00000000_001fffff: 3byte | XXXX X100 | XXXX XXXX | XXXX XXXX |
10935 * 0x00000000_00020000 - 0x00000000_0fffffff: 4byte | XXXX 1000 | XXXX XXXX | XXXX XXXX | XXXX XXXX |
10937 * 0x00010000_00000000 - 0x00ffffff_ffffffff: 8byte | 1000 0000 | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX |
10938 * 0x01000000_00000000 - 0xffffffff_ffffffff: 9byte | 0000 0000 | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX |
10941 ibf_dump_write_small_value(struct ibf_dump
*dump
, VALUE x
)
10943 if (sizeof(VALUE
) > 8 || CHAR_BIT
!= 8) {
10944 ibf_dump_write(dump
, &x
, sizeof(VALUE
));
10948 enum { max_byte_length
= sizeof(VALUE
) + 1 };
10950 unsigned char bytes
[max_byte_length
];
10953 for (n
= 0; n
< sizeof(VALUE
) && (x
>> (7 - n
)); n
++, x
>>= 8) {
10954 bytes
[max_byte_length
- 1 - n
] = (unsigned char)x
;
10960 bytes
[max_byte_length
- 1 - n
] = (unsigned char)x
;
10963 ibf_dump_write(dump
, bytes
+ max_byte_length
- n
, n
);
10967 ibf_load_small_value(const struct ibf_load
*load
, ibf_offset_t
*offset
)
10969 if (sizeof(VALUE
) > 8 || CHAR_BIT
!= 8) {
10970 union { char s
[sizeof(VALUE
)]; VALUE v
; } x
;
10972 memcpy(x
.s
, load
->current_buffer
->buff
+ *offset
, sizeof(VALUE
));
10973 *offset
+= sizeof(VALUE
);
10978 enum { max_byte_length
= sizeof(VALUE
) + 1 };
10980 const unsigned char *buffer
= (const unsigned char *)load
->current_buffer
->buff
;
10981 const unsigned char c
= buffer
[*offset
];
10985 c
== 0 ? 9 : ntz_int32(c
) + 1;
10986 VALUE x
= (VALUE
)c
>> n
;
10988 if (*offset
+ n
> load
->current_buffer
->size
) {
10989 rb_raise(rb_eRuntimeError
, "invalid byte sequence");
10993 for (i
= 1; i
< n
; i
++) {
10995 x
|= (VALUE
)buffer
[*offset
+ i
];
11003 ibf_dump_builtin(struct ibf_dump
*dump
, const struct rb_builtin_function
*bf
)
11006 // short: name.length
11008 // // omit argc (only verify with name)
11009 ibf_dump_write_small_value(dump
, (VALUE
)bf
->index
);
11011 size_t len
= strlen(bf
->name
);
11012 ibf_dump_write_small_value(dump
, (VALUE
)len
);
11013 ibf_dump_write(dump
, bf
->name
, len
);
11016 static const struct rb_builtin_function
*
11017 ibf_load_builtin(const struct ibf_load
*load
, ibf_offset_t
*offset
)
11019 int i
= (int)ibf_load_small_value(load
, offset
);
11020 int len
= (int)ibf_load_small_value(load
, offset
);
11021 const char *name
= (char *)ibf_load_ptr(load
, offset
, len
);
11024 fprintf(stderr
, "%.*s!!\n", len
, name
);
11027 const struct rb_builtin_function
*table
= GET_VM()->builtin_function_table
;
11028 if (table
== NULL
) rb_raise(rb_eArgError
, "builtin function table is not provided");
11029 if (strncmp(table
[i
].name
, name
, len
) != 0) {
11030 rb_raise(rb_eArgError
, "builtin function index (%d) mismatch (expect %s but %s)", i
, name
, table
[i
].name
);
11032 // fprintf(stderr, "load-builtin: name:%s(%d)\n", table[i].name, table[i].argc);
11037 static ibf_offset_t
11038 ibf_dump_code(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11040 const struct rb_iseq_constant_body
*const body
= iseq
->body
;
11041 const int iseq_size
= body
->iseq_size
;
11043 const VALUE
*orig_code
= rb_iseq_original_iseq(iseq
);
11045 ibf_offset_t offset
= ibf_dump_pos(dump
);
11047 for (code_index
=0; code_index
<iseq_size
;) {
11048 const VALUE insn
= orig_code
[code_index
++];
11049 const char *types
= insn_op_types(insn
);
11053 if (insn
>= 0x100) { rb_raise(rb_eRuntimeError
, "invalid instruction"); }
11054 ibf_dump_write_small_value(dump
, insn
);
11057 for (op_index
=0; types
[op_index
]; op_index
++, code_index
++) {
11058 VALUE op
= orig_code
[code_index
];
11061 switch (types
[op_index
]) {
11064 wv
= ibf_dump_object(dump
, op
);
11067 wv
= (VALUE
)ibf_dump_iseq(dump
, (const rb_iseq_t
*)op
);
11074 for (i
=0; i
<body
->is_size
; i
++) {
11075 if (op
== (VALUE
)&body
->is_entries
[i
]) {
11087 wv
= ibf_dump_id(dump
, (ID
)op
);
11090 rb_raise(rb_eRuntimeError
, "TS_FUNCPTR is not supported");
11093 ibf_dump_builtin(dump
, (const struct rb_builtin_function
*)op
);
11099 ibf_dump_write_small_value(dump
, wv
);
11102 assert(insn_len(insn
) == op_index
+1);
11109 ibf_load_code(const struct ibf_load
*load
, rb_iseq_t
*iseq
, ibf_offset_t bytecode_offset
, ibf_offset_t bytecode_size
, unsigned int iseq_size
)
11111 VALUE iseqv
= (VALUE
)iseq
;
11112 unsigned int code_index
;
11113 ibf_offset_t reading_pos
= bytecode_offset
;
11114 VALUE
*code
= ALLOC_N(VALUE
, iseq_size
);
11116 struct rb_iseq_constant_body
*load_body
= iseq
->body
;
11117 struct rb_call_data
*cd_entries
= load_body
->call_data
;
11118 union iseq_inline_storage_entry
*is_entries
= load_body
->is_entries
;
11120 for (code_index
=0; code_index
<iseq_size
;) {
11122 const VALUE insn
= code
[code_index
] = ibf_load_small_value(load
, &reading_pos
);
11123 const unsigned int insn_index
= code_index
;
11124 const char *types
= insn_op_types(insn
);
11130 for (op_index
=0; types
[op_index
]; op_index
++, code_index
++) {
11131 const char operand_type
= types
[op_index
];
11132 switch (operand_type
) {
11135 VALUE op
= ibf_load_small_value(load
, &reading_pos
);
11136 VALUE v
= ibf_load_object(load
, op
);
11137 code
[code_index
] = v
;
11138 if (!SPECIAL_CONST_P(v
)) {
11139 RB_OBJ_WRITTEN(iseqv
, Qundef
, v
);
11140 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
11146 VALUE op
= ibf_load_small_value(load
, &reading_pos
);
11147 VALUE v
= ibf_load_object(load
, op
);
11148 v
= rb_hash_dup(v
); // hash dumped as frozen
11149 RHASH_TBL_RAW(v
)->type
= &cdhash_type
;
11150 rb_hash_rehash(v
); // hash function changed
11151 freeze_hide_obj(v
);
11153 // Overwrite the existing hash in the object list. This
11154 // is to keep the object alive during load time.
11155 // [Bug #17984] [ruby-core:104259]
11156 pinned_list_store(load
->current_buffer
->obj_list
, (long)op
, v
);
11158 code
[code_index
] = v
;
11159 RB_OBJ_WRITTEN(iseqv
, Qundef
, v
);
11160 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
11165 VALUE op
= (VALUE
)ibf_load_small_value(load
, &reading_pos
);
11166 VALUE v
= (VALUE
)ibf_load_iseq(load
, (const rb_iseq_t
*)op
);
11167 code
[code_index
] = v
;
11168 if (!SPECIAL_CONST_P(v
)) {
11169 RB_OBJ_WRITTEN(iseqv
, Qundef
, v
);
11170 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
11178 VALUE op
= ibf_load_small_value(load
, &reading_pos
);
11179 code
[code_index
] = (VALUE
)&is_entries
[op
];
11181 if (insn
== BIN(opt_getinlinecache
) && operand_type
== TS_IC
) {
11182 // Store the instruction index for opt_getinlinecache on the IC for
11183 // YJIT to invalidate code when opt_setinlinecache runs.
11184 is_entries
[op
].ic_cache
.get_insn_idx
= insn_index
;
11187 FL_SET(iseqv
, ISEQ_MARKABLE_ISEQ
);
11191 code
[code_index
] = (VALUE
)cd_entries
++;
11196 VALUE op
= ibf_load_small_value(load
, &reading_pos
);
11197 code
[code_index
] = ibf_load_id(load
, (ID
)(VALUE
)op
);
11201 rb_raise(rb_eRuntimeError
, "TS_FUNCPTR is not supported");
11204 code
[code_index
] = (VALUE
)ibf_load_builtin(load
, &reading_pos
);
11207 code
[code_index
] = ibf_load_small_value(load
, &reading_pos
);
11211 if (insn_len(insn
) != op_index
+1) {
11212 rb_raise(rb_eRuntimeError
, "operand size mismatch");
11215 load_body
->iseq_encoded
= code
;
11216 load_body
->iseq_size
= code_index
;
11218 assert(code_index
== iseq_size
);
11219 assert(reading_pos
== bytecode_offset
+ bytecode_size
);
11223 static ibf_offset_t
11224 ibf_dump_param_opt_table(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11226 int opt_num
= iseq
->body
->param
.opt_num
;
11229 IBF_W_ALIGN(VALUE
);
11230 return ibf_dump_write(dump
, iseq
->body
->param
.opt_table
, sizeof(VALUE
) * (opt_num
+ 1));
11233 return ibf_dump_pos(dump
);
11238 ibf_load_param_opt_table(const struct ibf_load
*load
, ibf_offset_t opt_table_offset
, int opt_num
)
11241 VALUE
*table
= ALLOC_N(VALUE
, opt_num
+1);
11242 MEMCPY(table
, load
->current_buffer
->buff
+ opt_table_offset
, VALUE
, opt_num
+1);
11250 static ibf_offset_t
11251 ibf_dump_param_keyword(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11253 const struct rb_iseq_param_keyword
*kw
= iseq
->body
->param
.keyword
;
11256 struct rb_iseq_param_keyword dump_kw
= *kw
;
11257 int dv_num
= kw
->num
- kw
->required_num
;
11258 ID
*ids
= kw
->num
> 0 ? ALLOCA_N(ID
, kw
->num
) : NULL
;
11259 VALUE
*dvs
= dv_num
> 0 ? ALLOCA_N(VALUE
, dv_num
) : NULL
;
11262 for (i
=0; i
<kw
->num
; i
++) ids
[i
] = (ID
)ibf_dump_id(dump
, kw
->table
[i
]);
11263 for (i
=0; i
<dv_num
; i
++) dvs
[i
] = (VALUE
)ibf_dump_object(dump
, kw
->default_values
[i
]);
11265 dump_kw
.table
= IBF_W(ids
, ID
, kw
->num
);
11266 dump_kw
.default_values
= IBF_W(dvs
, VALUE
, dv_num
);
11267 IBF_W_ALIGN(struct rb_iseq_param_keyword
);
11268 return ibf_dump_write(dump
, &dump_kw
, sizeof(struct rb_iseq_param_keyword
) * 1);
11275 static const struct rb_iseq_param_keyword
*
11276 ibf_load_param_keyword(const struct ibf_load
*load
, ibf_offset_t param_keyword_offset
)
11278 if (param_keyword_offset
) {
11279 struct rb_iseq_param_keyword
*kw
= IBF_R(param_keyword_offset
, struct rb_iseq_param_keyword
, 1);
11280 ID
*ids
= IBF_R(kw
->table
, ID
, kw
->num
);
11281 int dv_num
= kw
->num
- kw
->required_num
;
11282 VALUE
*dvs
= IBF_R(kw
->default_values
, VALUE
, dv_num
);
11285 for (i
=0; i
<kw
->num
; i
++) {
11286 ids
[i
] = ibf_load_id(load
, ids
[i
]);
11288 for (i
=0; i
<dv_num
; i
++) {
11289 dvs
[i
] = ibf_load_object(load
, dvs
[i
]);
11293 kw
->default_values
= dvs
;
11301 static ibf_offset_t
11302 ibf_dump_insns_info_body(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11304 ibf_offset_t offset
= ibf_dump_pos(dump
);
11305 const struct iseq_insn_info_entry
*entries
= iseq
->body
->insns_info
.body
;
11308 for (i
= 0; i
< iseq
->body
->insns_info
.size
; i
++) {
11309 ibf_dump_write_small_value(dump
, entries
[i
].line_no
);
11310 #ifdef USE_ISEQ_NODE_ID
11311 ibf_dump_write_small_value(dump
, entries
[i
].node_id
);
11313 ibf_dump_write_small_value(dump
, entries
[i
].events
);
11319 static struct iseq_insn_info_entry
*
11320 ibf_load_insns_info_body(const struct ibf_load
*load
, ibf_offset_t body_offset
, unsigned int size
)
11322 ibf_offset_t reading_pos
= body_offset
;
11323 struct iseq_insn_info_entry
*entries
= ALLOC_N(struct iseq_insn_info_entry
, size
);
11326 for (i
= 0; i
< size
; i
++) {
11327 entries
[i
].line_no
= (int)ibf_load_small_value(load
, &reading_pos
);
11328 #ifdef USE_ISEQ_NODE_ID
11329 entries
[i
].node_id
= (int)ibf_load_small_value(load
, &reading_pos
);
11331 entries
[i
].events
= (rb_event_flag_t
)ibf_load_small_value(load
, &reading_pos
);
11337 static ibf_offset_t
11338 ibf_dump_insns_info_positions(struct ibf_dump
*dump
, const unsigned int *positions
, unsigned int size
)
11340 ibf_offset_t offset
= ibf_dump_pos(dump
);
11342 unsigned int last
= 0;
11344 for (i
= 0; i
< size
; i
++) {
11345 ibf_dump_write_small_value(dump
, positions
[i
] - last
);
11346 last
= positions
[i
];
11352 static unsigned int *
11353 ibf_load_insns_info_positions(const struct ibf_load
*load
, ibf_offset_t positions_offset
, unsigned int size
)
11355 ibf_offset_t reading_pos
= positions_offset
;
11356 unsigned int *positions
= ALLOC_N(unsigned int, size
);
11358 unsigned int last
= 0;
11360 for (i
= 0; i
< size
; i
++) {
11361 positions
[i
] = last
+ (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11362 last
= positions
[i
];
11368 static ibf_offset_t
11369 ibf_dump_local_table(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11371 const struct rb_iseq_constant_body
*const body
= iseq
->body
;
11372 const int size
= body
->local_table_size
;
11373 ID
*table
= ALLOCA_N(ID
, size
);
11376 for (i
=0; i
<size
; i
++) {
11377 table
[i
] = ibf_dump_id(dump
, body
->local_table
[i
]);
11381 return ibf_dump_write(dump
, table
, sizeof(ID
) * size
);
11385 ibf_load_local_table(const struct ibf_load
*load
, ibf_offset_t local_table_offset
, int size
)
11388 ID
*table
= IBF_R(local_table_offset
, ID
, size
);
11391 for (i
=0; i
<size
; i
++) {
11392 table
[i
] = ibf_load_id(load
, table
[i
]);
11401 static ibf_offset_t
11402 ibf_dump_catch_table(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11404 const struct iseq_catch_table
*table
= iseq
->body
->catch_table
;
11407 int *iseq_indices
= ALLOCA_N(int, table
->size
);
11410 for (i
=0; i
<table
->size
; i
++) {
11411 iseq_indices
[i
] = ibf_dump_iseq(dump
, table
->entries
[i
].iseq
);
11414 const ibf_offset_t offset
= ibf_dump_pos(dump
);
11416 for (i
=0; i
<table
->size
; i
++) {
11417 ibf_dump_write_small_value(dump
, iseq_indices
[i
]);
11418 ibf_dump_write_small_value(dump
, table
->entries
[i
].type
);
11419 ibf_dump_write_small_value(dump
, table
->entries
[i
].start
);
11420 ibf_dump_write_small_value(dump
, table
->entries
[i
].end
);
11421 ibf_dump_write_small_value(dump
, table
->entries
[i
].cont
);
11422 ibf_dump_write_small_value(dump
, table
->entries
[i
].sp
);
11427 return ibf_dump_pos(dump
);
11431 static struct iseq_catch_table
*
11432 ibf_load_catch_table(const struct ibf_load
*load
, ibf_offset_t catch_table_offset
, unsigned int size
)
11435 struct iseq_catch_table
*table
= ruby_xmalloc(iseq_catch_table_bytes(size
));
11436 table
->size
= size
;
11438 ibf_offset_t reading_pos
= catch_table_offset
;
11441 for (i
=0; i
<table
->size
; i
++) {
11442 int iseq_index
= (int)ibf_load_small_value(load
, &reading_pos
);
11443 table
->entries
[i
].type
= (enum catch_type
)ibf_load_small_value(load
, &reading_pos
);
11444 table
->entries
[i
].start
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11445 table
->entries
[i
].end
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11446 table
->entries
[i
].cont
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11447 table
->entries
[i
].sp
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11449 table
->entries
[i
].iseq
= ibf_load_iseq(load
, (const rb_iseq_t
*)(VALUE
)iseq_index
);
11458 static ibf_offset_t
11459 ibf_dump_ci_entries(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11461 const struct rb_iseq_constant_body
*const body
= iseq
->body
;
11462 const unsigned int ci_size
= body
->ci_size
;
11463 const struct rb_call_data
*cds
= body
->call_data
;
11465 ibf_offset_t offset
= ibf_dump_pos(dump
);
11469 for (i
= 0; i
< ci_size
; i
++) {
11470 const struct rb_callinfo
*ci
= cds
[i
].ci
;
11472 ibf_dump_write_small_value(dump
, ibf_dump_id(dump
, vm_ci_mid(ci
)));
11473 ibf_dump_write_small_value(dump
, vm_ci_flag(ci
));
11474 ibf_dump_write_small_value(dump
, vm_ci_argc(ci
));
11476 const struct rb_callinfo_kwarg
*kwarg
= vm_ci_kwarg(ci
);
11478 int len
= kwarg
->keyword_len
;
11479 ibf_dump_write_small_value(dump
, len
);
11480 for (int j
=0; j
<len
; j
++) {
11481 VALUE keyword
= ibf_dump_object(dump
, kwarg
->keywords
[j
]);
11482 ibf_dump_write_small_value(dump
, keyword
);
11486 ibf_dump_write_small_value(dump
, 0);
11490 // TODO: truncate NULL ci from call_data.
11491 ibf_dump_write_small_value(dump
, (VALUE
)-1);
11498 static enum rb_id_table_iterator_result
11499 dump_outer_variable(ID id
, VALUE val
, void *dump
)
11501 ibf_dump_write_small_value(dump
, ibf_dump_id(dump
, id
));
11502 ibf_dump_write_small_value(dump
, val
);
11504 return ID_TABLE_CONTINUE
;
11507 static ibf_offset_t
11508 ibf_dump_outer_variables(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11510 struct rb_id_table
* ovs
= iseq
->body
->outer_variables
;
11512 ibf_offset_t offset
= ibf_dump_pos(dump
);
11515 ibf_dump_write_small_value(dump
, (VALUE
)rb_id_table_size(ovs
));
11516 rb_id_table_foreach(ovs
, dump_outer_variable
, (void *)dump
);
11519 ibf_dump_write_small_value(dump
, (VALUE
)0);
11525 /* note that we dump out rb_call_info but load back rb_call_data */
11527 ibf_load_ci_entries(const struct ibf_load
*load
,
11528 ibf_offset_t ci_entries_offset
,
11529 unsigned int ci_size
,
11530 struct rb_call_data
**cd_ptr
)
11532 ibf_offset_t reading_pos
= ci_entries_offset
;
11536 struct rb_call_data
*cds
= ZALLOC_N(struct rb_call_data
, ci_size
);
11539 for (i
= 0; i
< ci_size
; i
++) {
11540 VALUE mid_index
= ibf_load_small_value(load
, &reading_pos
);
11541 if (mid_index
!= (VALUE
)-1) {
11542 ID mid
= ibf_load_id(load
, mid_index
);
11543 unsigned int flag
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11544 unsigned int argc
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11546 struct rb_callinfo_kwarg
*kwarg
= NULL
;
11547 int kwlen
= (int)ibf_load_small_value(load
, &reading_pos
);
11549 kwarg
= rb_xmalloc_mul_add(kwlen
, sizeof(VALUE
), sizeof(struct rb_callinfo_kwarg
));
11550 kwarg
->keyword_len
= kwlen
;
11551 for (int j
=0; j
<kwlen
; j
++) {
11552 VALUE keyword
= ibf_load_small_value(load
, &reading_pos
);
11553 kwarg
->keywords
[j
] = ibf_load_object(load
, keyword
);
11557 cds
[i
].ci
= vm_ci_new(mid
, flag
, argc
, kwarg
);
11558 RB_OBJ_WRITTEN(load
->iseq
, Qundef
, cds
[i
].ci
);
11559 cds
[i
].cc
= vm_cc_empty();
11569 static struct rb_id_table
*
11570 ibf_load_outer_variables(const struct ibf_load
* load
, ibf_offset_t outer_variables_offset
)
11572 ibf_offset_t reading_pos
= outer_variables_offset
;
11574 struct rb_id_table
*tbl
= NULL
;
11576 size_t table_size
= (size_t)ibf_load_small_value(load
, &reading_pos
);
11578 if (table_size
> 0) {
11579 tbl
= rb_id_table_create(table_size
);
11582 for (size_t i
= 0; i
< table_size
; i
++) {
11583 ID key
= ibf_load_id(load
, (ID
)ibf_load_small_value(load
, &reading_pos
));
11584 VALUE value
= ibf_load_small_value(load
, &reading_pos
);
11585 if (!key
) key
= rb_make_temporary_id(i
);
11586 rb_id_table_insert(tbl
, key
, value
);
11592 static ibf_offset_t
11593 ibf_dump_iseq_each(struct ibf_dump
*dump
, const rb_iseq_t
*iseq
)
11595 assert(dump
->current_buffer
== &dump
->global_buffer
);
11597 unsigned int *positions
;
11599 const struct rb_iseq_constant_body
*body
= iseq
->body
;
11601 const VALUE location_pathobj_index
= ibf_dump_object(dump
, body
->location
.pathobj
); /* TODO: freeze */
11602 const VALUE location_base_label_index
= ibf_dump_object(dump
, body
->location
.base_label
);
11603 const VALUE location_label_index
= ibf_dump_object(dump
, body
->location
.label
);
11605 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
11606 ibf_offset_t iseq_start
= ibf_dump_pos(dump
);
11608 struct ibf_dump_buffer
*saved_buffer
= dump
->current_buffer
;
11609 struct ibf_dump_buffer buffer
;
11610 buffer
.str
= rb_str_new(0, 0);
11611 buffer
.obj_table
= ibf_dump_object_table_new();
11612 dump
->current_buffer
= &buffer
;
11615 const ibf_offset_t bytecode_offset
= ibf_dump_code(dump
, iseq
);
11616 const ibf_offset_t bytecode_size
= ibf_dump_pos(dump
) - bytecode_offset
;
11617 const ibf_offset_t param_opt_table_offset
= ibf_dump_param_opt_table(dump
, iseq
);
11618 const ibf_offset_t param_keyword_offset
= ibf_dump_param_keyword(dump
, iseq
);
11619 const ibf_offset_t insns_info_body_offset
= ibf_dump_insns_info_body(dump
, iseq
);
11621 positions
= rb_iseq_insns_info_decode_positions(iseq
->body
);
11622 const ibf_offset_t insns_info_positions_offset
= ibf_dump_insns_info_positions(dump
, positions
, body
->insns_info
.size
);
11623 ruby_xfree(positions
);
11625 const ibf_offset_t local_table_offset
= ibf_dump_local_table(dump
, iseq
);
11626 const unsigned int catch_table_size
= body
->catch_table
? body
->catch_table
->size
: 0;
11627 const ibf_offset_t catch_table_offset
= ibf_dump_catch_table(dump
, iseq
);
11628 const int parent_iseq_index
= ibf_dump_iseq(dump
, iseq
->body
->parent_iseq
);
11629 const int local_iseq_index
= ibf_dump_iseq(dump
, iseq
->body
->local_iseq
);
11630 const int mandatory_only_iseq_index
= ibf_dump_iseq(dump
, iseq
->body
->mandatory_only_iseq
);
11631 const ibf_offset_t ci_entries_offset
= ibf_dump_ci_entries(dump
, iseq
);
11632 const ibf_offset_t outer_variables_offset
= ibf_dump_outer_variables(dump
, iseq
);
11634 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
11635 ibf_offset_t local_obj_list_offset
;
11636 unsigned int local_obj_list_size
;
11638 ibf_dump_object_list(dump
, &local_obj_list_offset
, &local_obj_list_size
);
11641 ibf_offset_t body_offset
= ibf_dump_pos(dump
);
11643 /* dump the constant body */
11644 unsigned int param_flags
=
11645 (body
->param
.flags
.has_lead
<< 0) |
11646 (body
->param
.flags
.has_opt
<< 1) |
11647 (body
->param
.flags
.has_rest
<< 2) |
11648 (body
->param
.flags
.has_post
<< 3) |
11649 (body
->param
.flags
.has_kw
<< 4) |
11650 (body
->param
.flags
.has_kwrest
<< 5) |
11651 (body
->param
.flags
.has_block
<< 6) |
11652 (body
->param
.flags
.ambiguous_param0
<< 7) |
11653 (body
->param
.flags
.accepts_no_kwarg
<< 8) |
11654 (body
->param
.flags
.ruby2_keywords
<< 9);
11656 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
11657 # define IBF_BODY_OFFSET(x) (x)
11659 # define IBF_BODY_OFFSET(x) (body_offset - (x))
11662 ibf_dump_write_small_value(dump
, body
->type
);
11663 ibf_dump_write_small_value(dump
, body
->iseq_size
);
11664 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(bytecode_offset
));
11665 ibf_dump_write_small_value(dump
, bytecode_size
);
11666 ibf_dump_write_small_value(dump
, param_flags
);
11667 ibf_dump_write_small_value(dump
, body
->param
.size
);
11668 ibf_dump_write_small_value(dump
, body
->param
.lead_num
);
11669 ibf_dump_write_small_value(dump
, body
->param
.opt_num
);
11670 ibf_dump_write_small_value(dump
, body
->param
.rest_start
);
11671 ibf_dump_write_small_value(dump
, body
->param
.post_start
);
11672 ibf_dump_write_small_value(dump
, body
->param
.post_num
);
11673 ibf_dump_write_small_value(dump
, body
->param
.block_start
);
11674 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(param_opt_table_offset
));
11675 ibf_dump_write_small_value(dump
, param_keyword_offset
);
11676 ibf_dump_write_small_value(dump
, location_pathobj_index
);
11677 ibf_dump_write_small_value(dump
, location_base_label_index
);
11678 ibf_dump_write_small_value(dump
, location_label_index
);
11679 ibf_dump_write_small_value(dump
, body
->location
.first_lineno
);
11680 ibf_dump_write_small_value(dump
, body
->location
.node_id
);
11681 ibf_dump_write_small_value(dump
, body
->location
.code_location
.beg_pos
.lineno
);
11682 ibf_dump_write_small_value(dump
, body
->location
.code_location
.beg_pos
.column
);
11683 ibf_dump_write_small_value(dump
, body
->location
.code_location
.end_pos
.lineno
);
11684 ibf_dump_write_small_value(dump
, body
->location
.code_location
.end_pos
.column
);
11685 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(insns_info_body_offset
));
11686 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(insns_info_positions_offset
));
11687 ibf_dump_write_small_value(dump
, body
->insns_info
.size
);
11688 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(local_table_offset
));
11689 ibf_dump_write_small_value(dump
, catch_table_size
);
11690 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(catch_table_offset
));
11691 ibf_dump_write_small_value(dump
, parent_iseq_index
);
11692 ibf_dump_write_small_value(dump
, local_iseq_index
);
11693 ibf_dump_write_small_value(dump
, mandatory_only_iseq_index
);
11694 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(ci_entries_offset
));
11695 ibf_dump_write_small_value(dump
, IBF_BODY_OFFSET(outer_variables_offset
));
11696 ibf_dump_write_small_value(dump
, body
->variable
.flip_count
);
11697 ibf_dump_write_small_value(dump
, body
->local_table_size
);
11698 ibf_dump_write_small_value(dump
, body
->is_size
);
11699 ibf_dump_write_small_value(dump
, body
->ci_size
);
11700 ibf_dump_write_small_value(dump
, body
->stack_max
);
11701 ibf_dump_write_small_value(dump
, body
->catch_except_p
);
11702 ibf_dump_write_small_value(dump
, body
->builtin_inline_p
);
11704 #undef IBF_BODY_OFFSET
11706 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
11707 ibf_offset_t iseq_length_bytes
= ibf_dump_pos(dump
);
11709 dump
->current_buffer
= saved_buffer
;
11710 ibf_dump_write(dump
, RSTRING_PTR(buffer
.str
), iseq_length_bytes
);
11712 ibf_offset_t offset
= ibf_dump_pos(dump
);
11713 ibf_dump_write_small_value(dump
, iseq_start
);
11714 ibf_dump_write_small_value(dump
, iseq_length_bytes
);
11715 ibf_dump_write_small_value(dump
, body_offset
);
11717 ibf_dump_write_small_value(dump
, local_obj_list_offset
);
11718 ibf_dump_write_small_value(dump
, local_obj_list_size
);
11720 st_free_table(buffer
.obj_table
); // TODO: this leaks in case of exception
11724 return body_offset
;
11729 ibf_load_location_str(const struct ibf_load
*load
, VALUE str_index
)
11731 VALUE str
= ibf_load_object(load
, str_index
);
11733 str
= rb_fstring(str
);
11739 ibf_load_iseq_each(struct ibf_load
*load
, rb_iseq_t
*iseq
, ibf_offset_t offset
)
11741 struct rb_iseq_constant_body
*load_body
= iseq
->body
= rb_iseq_constant_body_alloc();
11743 ibf_offset_t reading_pos
= offset
;
11745 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
11746 struct ibf_load_buffer
*saved_buffer
= load
->current_buffer
;
11747 load
->current_buffer
= &load
->global_buffer
;
11749 const ibf_offset_t iseq_start
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
11750 const ibf_offset_t iseq_length_bytes
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
11751 const ibf_offset_t body_offset
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
11753 struct ibf_load_buffer buffer
;
11754 buffer
.buff
= load
->global_buffer
.buff
+ iseq_start
;
11755 buffer
.size
= iseq_length_bytes
;
11756 buffer
.obj_list_offset
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
11757 buffer
.obj_list_size
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
11758 buffer
.obj_list
= pinned_list_new(buffer
.obj_list_size
);
11760 load
->current_buffer
= &buffer
;
11761 reading_pos
= body_offset
;
11764 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
11765 # define IBF_BODY_OFFSET(x) (x)
11767 # define IBF_BODY_OFFSET(x) (offset - (x))
11770 const unsigned int type
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11771 const unsigned int iseq_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11772 const ibf_offset_t bytecode_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
11773 const ibf_offset_t bytecode_size
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
11774 const unsigned int param_flags
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11775 const unsigned int param_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11776 const int param_lead_num
= (int)ibf_load_small_value(load
, &reading_pos
);
11777 const int param_opt_num
= (int)ibf_load_small_value(load
, &reading_pos
);
11778 const int param_rest_start
= (int)ibf_load_small_value(load
, &reading_pos
);
11779 const int param_post_start
= (int)ibf_load_small_value(load
, &reading_pos
);
11780 const int param_post_num
= (int)ibf_load_small_value(load
, &reading_pos
);
11781 const int param_block_start
= (int)ibf_load_small_value(load
, &reading_pos
);
11782 const ibf_offset_t param_opt_table_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
11783 const ibf_offset_t param_keyword_offset
= (ibf_offset_t
)ibf_load_small_value(load
, &reading_pos
);
11784 const VALUE location_pathobj_index
= ibf_load_small_value(load
, &reading_pos
);
11785 const VALUE location_base_label_index
= ibf_load_small_value(load
, &reading_pos
);
11786 const VALUE location_label_index
= ibf_load_small_value(load
, &reading_pos
);
11787 const VALUE location_first_lineno
= ibf_load_small_value(load
, &reading_pos
);
11788 const int location_node_id
= (int)ibf_load_small_value(load
, &reading_pos
);
11789 const int location_code_location_beg_pos_lineno
= (int)ibf_load_small_value(load
, &reading_pos
);
11790 const int location_code_location_beg_pos_column
= (int)ibf_load_small_value(load
, &reading_pos
);
11791 const int location_code_location_end_pos_lineno
= (int)ibf_load_small_value(load
, &reading_pos
);
11792 const int location_code_location_end_pos_column
= (int)ibf_load_small_value(load
, &reading_pos
);
11793 const ibf_offset_t insns_info_body_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
11794 const ibf_offset_t insns_info_positions_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
11795 const unsigned int insns_info_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11796 const ibf_offset_t local_table_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
11797 const unsigned int catch_table_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11798 const ibf_offset_t catch_table_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
11799 const int parent_iseq_index
= (int)ibf_load_small_value(load
, &reading_pos
);
11800 const int local_iseq_index
= (int)ibf_load_small_value(load
, &reading_pos
);
11801 const int mandatory_only_iseq_index
= (int)ibf_load_small_value(load
, &reading_pos
);
11802 const ibf_offset_t ci_entries_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
11803 const ibf_offset_t outer_variables_offset
= (ibf_offset_t
)IBF_BODY_OFFSET(ibf_load_small_value(load
, &reading_pos
));
11804 const rb_snum_t variable_flip_count
= (rb_snum_t
)ibf_load_small_value(load
, &reading_pos
);
11805 const unsigned int local_table_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11806 const unsigned int is_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11807 const unsigned int ci_size
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11808 const unsigned int stack_max
= (unsigned int)ibf_load_small_value(load
, &reading_pos
);
11809 const char catch_except_p
= (char)ibf_load_small_value(load
, &reading_pos
);
11810 const bool builtin_inline_p
= (bool)ibf_load_small_value(load
, &reading_pos
);
11812 #undef IBF_BODY_OFFSET
11814 load_body
->type
= type
;
11815 load_body
->stack_max
= stack_max
;
11816 load_body
->param
.flags
.has_lead
= (param_flags
>> 0) & 1;
11817 load_body
->param
.flags
.has_opt
= (param_flags
>> 1) & 1;
11818 load_body
->param
.flags
.has_rest
= (param_flags
>> 2) & 1;
11819 load_body
->param
.flags
.has_post
= (param_flags
>> 3) & 1;
11820 load_body
->param
.flags
.has_kw
= FALSE
;
11821 load_body
->param
.flags
.has_kwrest
= (param_flags
>> 5) & 1;
11822 load_body
->param
.flags
.has_block
= (param_flags
>> 6) & 1;
11823 load_body
->param
.flags
.ambiguous_param0
= (param_flags
>> 7) & 1;
11824 load_body
->param
.flags
.accepts_no_kwarg
= (param_flags
>> 8) & 1;
11825 load_body
->param
.flags
.ruby2_keywords
= (param_flags
>> 9) & 1;
11826 load_body
->param
.size
= param_size
;
11827 load_body
->param
.lead_num
= param_lead_num
;
11828 load_body
->param
.opt_num
= param_opt_num
;
11829 load_body
->param
.rest_start
= param_rest_start
;
11830 load_body
->param
.post_start
= param_post_start
;
11831 load_body
->param
.post_num
= param_post_num
;
11832 load_body
->param
.block_start
= param_block_start
;
11833 load_body
->local_table_size
= local_table_size
;
11834 load_body
->is_size
= is_size
;
11835 load_body
->ci_size
= ci_size
;
11836 load_body
->insns_info
.size
= insns_info_size
;
11838 ISEQ_COVERAGE_SET(iseq
, Qnil
);
11839 ISEQ_ORIGINAL_ISEQ_CLEAR(iseq
);
11840 load_body
->variable
.flip_count
= variable_flip_count
;
11841 load_body
->variable
.script_lines
= Qnil
;
11843 load_body
->location
.first_lineno
= location_first_lineno
;
11844 load_body
->location
.node_id
= location_node_id
;
11845 load_body
->location
.code_location
.beg_pos
.lineno
= location_code_location_beg_pos_lineno
;
11846 load_body
->location
.code_location
.beg_pos
.column
= location_code_location_beg_pos_column
;
11847 load_body
->location
.code_location
.end_pos
.lineno
= location_code_location_end_pos_lineno
;
11848 load_body
->location
.code_location
.end_pos
.column
= location_code_location_end_pos_column
;
11849 load_body
->catch_except_p
= catch_except_p
;
11850 load_body
->builtin_inline_p
= builtin_inline_p
;
11852 load_body
->is_entries
= ZALLOC_N(union iseq_inline_storage_entry
, is_size
);
11853 ibf_load_ci_entries(load
, ci_entries_offset
, ci_size
, &load_body
->call_data
);
11854 load_body
->outer_variables
= ibf_load_outer_variables(load
, outer_variables_offset
);
11855 load_body
->param
.opt_table
= ibf_load_param_opt_table(load
, param_opt_table_offset
, param_opt_num
);
11856 load_body
->param
.keyword
= ibf_load_param_keyword(load
, param_keyword_offset
);
11857 load_body
->param
.flags
.has_kw
= (param_flags
>> 4) & 1;
11858 load_body
->insns_info
.body
= ibf_load_insns_info_body(load
, insns_info_body_offset
, insns_info_size
);
11859 load_body
->insns_info
.positions
= ibf_load_insns_info_positions(load
, insns_info_positions_offset
, insns_info_size
);
11860 load_body
->local_table
= ibf_load_local_table(load
, local_table_offset
, local_table_size
);
11861 load_body
->catch_table
= ibf_load_catch_table(load
, catch_table_offset
, catch_table_size
);
11862 load_body
->parent_iseq
= ibf_load_iseq(load
, (const rb_iseq_t
*)(VALUE
)parent_iseq_index
);
11863 load_body
->local_iseq
= ibf_load_iseq(load
, (const rb_iseq_t
*)(VALUE
)local_iseq_index
);
11864 load_body
->mandatory_only_iseq
= ibf_load_iseq(load
, (const rb_iseq_t
*)(VALUE
)mandatory_only_iseq_index
);
11866 ibf_load_code(load
, iseq
, bytecode_offset
, bytecode_size
, iseq_size
);
11867 #if VM_INSN_INFO_TABLE_IMPL == 2
11868 rb_iseq_insns_info_encode_positions(iseq
);
11871 rb_iseq_translate_threaded_code(iseq
);
11873 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
11874 load
->current_buffer
= &load
->global_buffer
;
11878 VALUE realpath
= Qnil
, path
= ibf_load_object(load
, location_pathobj_index
);
11879 if (RB_TYPE_P(path
, T_STRING
)) {
11880 realpath
= path
= rb_fstring(path
);
11882 else if (RB_TYPE_P(path
, T_ARRAY
)) {
11883 VALUE pathobj
= path
;
11884 if (RARRAY_LEN(pathobj
) != 2) {
11885 rb_raise(rb_eRuntimeError
, "path object size mismatch");
11887 path
= rb_fstring(RARRAY_AREF(pathobj
, 0));
11888 realpath
= RARRAY_AREF(pathobj
, 1);
11889 if (!NIL_P(realpath
)) {
11890 if (!RB_TYPE_P(realpath
, T_STRING
)) {
11891 rb_raise(rb_eArgError
, "unexpected realpath %"PRIxVALUE
11892 "(%x), path=%+"PRIsVALUE
,
11893 realpath
, TYPE(realpath
), path
);
11895 realpath
= rb_fstring(realpath
);
11899 rb_raise(rb_eRuntimeError
, "unexpected path object");
11901 rb_iseq_pathobj_set(iseq
, path
, realpath
);
11904 RB_OBJ_WRITE(iseq
, &load_body
->location
.base_label
, ibf_load_location_str(load
, location_base_label_index
));
11905 RB_OBJ_WRITE(iseq
, &load_body
->location
.label
, ibf_load_location_str(load
, location_label_index
));
11907 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
11908 load
->current_buffer
= saved_buffer
;
11910 verify_call_cache(iseq
);
11913 struct ibf_dump_iseq_list_arg
11915 struct ibf_dump
*dump
;
11920 ibf_dump_iseq_list_i(st_data_t key
, st_data_t val
, st_data_t ptr
)
11922 const rb_iseq_t
*iseq
= (const rb_iseq_t
*)key
;
11923 struct ibf_dump_iseq_list_arg
*args
= (struct ibf_dump_iseq_list_arg
*)ptr
;
11925 ibf_offset_t offset
= ibf_dump_iseq_each(args
->dump
, iseq
);
11926 rb_ary_push(args
->offset_list
, UINT2NUM(offset
));
11928 return ST_CONTINUE
;
11932 ibf_dump_iseq_list(struct ibf_dump
*dump
, struct ibf_header
*header
)
11934 VALUE offset_list
= rb_ary_tmp_new(dump
->iseq_table
->num_entries
);
11936 struct ibf_dump_iseq_list_arg args
;
11938 args
.offset_list
= offset_list
;
11940 st_foreach(dump
->iseq_table
, ibf_dump_iseq_list_i
, (st_data_t
)&args
);
11943 st_index_t size
= dump
->iseq_table
->num_entries
;
11944 ibf_offset_t
*offsets
= ALLOCA_N(ibf_offset_t
, size
);
11946 for (i
= 0; i
< size
; i
++) {
11947 offsets
[i
] = NUM2UINT(RARRAY_AREF(offset_list
, i
));
11950 ibf_dump_align(dump
, sizeof(ibf_offset_t
));
11951 header
->iseq_list_offset
= ibf_dump_write(dump
, offsets
, sizeof(ibf_offset_t
) * size
);
11952 header
->iseq_list_size
= (unsigned int)size
;
11955 #define IBF_OBJECT_INTERNAL FL_PROMOTED0
11959 * - ibf_object_header
11960 * - ibf_object_xxx (xxx is type)
11963 struct ibf_object_header
{
11964 unsigned int type
: 5;
11965 unsigned int special_const
: 1;
11966 unsigned int frozen
: 1;
11967 unsigned int internal
: 1;
11970 enum ibf_object_class_index
{
11971 IBF_OBJECT_CLASS_OBJECT
,
11972 IBF_OBJECT_CLASS_ARRAY
,
11973 IBF_OBJECT_CLASS_STANDARD_ERROR
,
11974 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR
,
11975 IBF_OBJECT_CLASS_TYPE_ERROR
,
11976 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR
,
11979 struct ibf_object_regexp
{
11984 struct ibf_object_hash
{
11986 long keyval
[FLEX_ARY_LEN
];
11989 struct ibf_object_struct_range
{
11997 struct ibf_object_bignum
{
11999 BDIGIT digits
[FLEX_ARY_LEN
];
12002 enum ibf_object_data_type
{
12003 IBF_OBJECT_DATA_ENCODING
,
12006 struct ibf_object_complex_rational
{
12010 struct ibf_object_symbol
{
12014 #define IBF_ALIGNED_OFFSET(align, offset) /* offset > 0 */ \
12015 ((((offset) - 1) / (align) + 1) * (align))
12016 #define IBF_OBJBODY(type, offset) (const type *)\
12017 ibf_load_check_offset(load, IBF_ALIGNED_OFFSET(RUBY_ALIGNOF(type), offset))
12019 static const void *
12020 ibf_load_check_offset(const struct ibf_load
*load
, size_t offset
)
12022 if (offset
>= load
->current_buffer
->size
) {
12023 rb_raise(rb_eIndexError
, "object offset out of range: %"PRIdSIZE
, offset
);
12025 return load
->current_buffer
->buff
+ offset
;
12028 NORETURN(static void ibf_dump_object_unsupported(struct ibf_dump
*dump
, VALUE obj
));
12031 ibf_dump_object_unsupported(struct ibf_dump
*dump
, VALUE obj
)
12034 rb_raw_obj_info(buff
, sizeof(buff
), obj
);
12035 rb_raise(rb_eNotImpError
, "ibf_dump_object_unsupported: %s", buff
);
12038 NORETURN(static VALUE
ibf_load_object_unsupported(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
));
12041 ibf_load_object_unsupported(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12043 rb_raise(rb_eArgError
, "unsupported");
12044 UNREACHABLE_RETURN(Qnil
);
12048 ibf_dump_object_class(struct ibf_dump
*dump
, VALUE obj
)
12050 enum ibf_object_class_index cindex
;
12051 if (obj
== rb_cObject
) {
12052 cindex
= IBF_OBJECT_CLASS_OBJECT
;
12054 else if (obj
== rb_cArray
) {
12055 cindex
= IBF_OBJECT_CLASS_ARRAY
;
12057 else if (obj
== rb_eStandardError
) {
12058 cindex
= IBF_OBJECT_CLASS_STANDARD_ERROR
;
12060 else if (obj
== rb_eNoMatchingPatternError
) {
12061 cindex
= IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR
;
12063 else if (obj
== rb_eTypeError
) {
12064 cindex
= IBF_OBJECT_CLASS_TYPE_ERROR
;
12066 else if (obj
== rb_eNoMatchingPatternKeyError
) {
12067 cindex
= IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR
;
12070 rb_obj_info_dump(obj
);
12072 rb_bug("unsupported class");
12074 ibf_dump_write_small_value(dump
, (VALUE
)cindex
);
12078 ibf_load_object_class(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12080 enum ibf_object_class_index cindex
= (enum ibf_object_class_index
)ibf_load_small_value(load
, &offset
);
12083 case IBF_OBJECT_CLASS_OBJECT
:
12085 case IBF_OBJECT_CLASS_ARRAY
:
12087 case IBF_OBJECT_CLASS_STANDARD_ERROR
:
12088 return rb_eStandardError
;
12089 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR
:
12090 return rb_eNoMatchingPatternError
;
12091 case IBF_OBJECT_CLASS_TYPE_ERROR
:
12092 return rb_eTypeError
;
12093 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR
:
12094 return rb_eNoMatchingPatternKeyError
;
12097 rb_raise(rb_eArgError
, "ibf_load_object_class: unknown class (%d)", (int)cindex
);
12102 ibf_dump_object_float(struct ibf_dump
*dump
, VALUE obj
)
12104 double dbl
= RFLOAT_VALUE(obj
);
12105 (void)IBF_W(&dbl
, double, 1);
12109 ibf_load_object_float(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12111 const double *dblp
= IBF_OBJBODY(double, offset
);
12112 return DBL2NUM(*dblp
);
12116 ibf_dump_object_string(struct ibf_dump
*dump
, VALUE obj
)
12118 long encindex
= (long)rb_enc_get_index(obj
);
12119 long len
= RSTRING_LEN(obj
);
12120 const char *ptr
= RSTRING_PTR(obj
);
12122 if (encindex
> RUBY_ENCINDEX_BUILTIN_MAX
) {
12123 rb_encoding
*enc
= rb_enc_from_index((int)encindex
);
12124 const char *enc_name
= rb_enc_name(enc
);
12125 encindex
= RUBY_ENCINDEX_BUILTIN_MAX
+ ibf_dump_object(dump
, rb_str_new2(enc_name
));
12128 ibf_dump_write_small_value(dump
, encindex
);
12129 ibf_dump_write_small_value(dump
, len
);
12130 IBF_WP(ptr
, char, len
);
12134 ibf_load_object_string(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12136 ibf_offset_t reading_pos
= offset
;
12138 int encindex
= (int)ibf_load_small_value(load
, &reading_pos
);
12139 const long len
= (long)ibf_load_small_value(load
, &reading_pos
);
12140 const char *ptr
= load
->current_buffer
->buff
+ reading_pos
;
12142 if (encindex
> RUBY_ENCINDEX_BUILTIN_MAX
) {
12143 VALUE enc_name_str
= ibf_load_object(load
, encindex
- RUBY_ENCINDEX_BUILTIN_MAX
);
12144 encindex
= rb_enc_find_index(RSTRING_PTR(enc_name_str
));
12148 if (header
->frozen
&& !header
->internal
) {
12149 str
= rb_enc_interned_str(ptr
, len
, rb_enc_from_index(encindex
));
12152 str
= rb_enc_str_new(ptr
, len
, rb_enc_from_index(encindex
));
12154 if (header
->internal
) rb_obj_hide(str
);
12155 if (header
->frozen
) str
= rb_fstring(str
);
12161 ibf_dump_object_regexp(struct ibf_dump
*dump
, VALUE obj
)
12163 VALUE srcstr
= RREGEXP_SRC(obj
);
12164 struct ibf_object_regexp regexp
;
12165 regexp
.option
= (char)rb_reg_options(obj
);
12166 regexp
.srcstr
= (long)ibf_dump_object(dump
, srcstr
);
12168 ibf_dump_write_byte(dump
, (unsigned char)regexp
.option
);
12169 ibf_dump_write_small_value(dump
, regexp
.srcstr
);
12173 ibf_load_object_regexp(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12175 struct ibf_object_regexp regexp
;
12176 regexp
.option
= ibf_load_byte(load
, &offset
);
12177 regexp
.srcstr
= ibf_load_small_value(load
, &offset
);
12179 VALUE srcstr
= ibf_load_object(load
, regexp
.srcstr
);
12180 VALUE reg
= rb_reg_compile(srcstr
, (int)regexp
.option
, NULL
, 0);
12182 if (header
->internal
) rb_obj_hide(reg
);
12183 if (header
->frozen
) rb_obj_freeze(reg
);
12189 ibf_dump_object_array(struct ibf_dump
*dump
, VALUE obj
)
12191 long i
, len
= RARRAY_LEN(obj
);
12192 ibf_dump_write_small_value(dump
, len
);
12193 for (i
=0; i
<len
; i
++) {
12194 long index
= (long)ibf_dump_object(dump
, RARRAY_AREF(obj
, i
));
12195 ibf_dump_write_small_value(dump
, index
);
12200 ibf_load_object_array(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12202 ibf_offset_t reading_pos
= offset
;
12204 const long len
= (long)ibf_load_small_value(load
, &reading_pos
);
12206 VALUE ary
= rb_ary_new_capa(len
);
12209 for (i
=0; i
<len
; i
++) {
12210 const VALUE index
= ibf_load_small_value(load
, &reading_pos
);
12211 rb_ary_push(ary
, ibf_load_object(load
, index
));
12214 if (header
->internal
) rb_obj_hide(ary
);
12215 if (header
->frozen
) rb_obj_freeze(ary
);
12221 ibf_dump_object_hash_i(st_data_t key
, st_data_t val
, st_data_t ptr
)
12223 struct ibf_dump
*dump
= (struct ibf_dump
*)ptr
;
12225 VALUE key_index
= ibf_dump_object(dump
, (VALUE
)key
);
12226 VALUE val_index
= ibf_dump_object(dump
, (VALUE
)val
);
12228 ibf_dump_write_small_value(dump
, key_index
);
12229 ibf_dump_write_small_value(dump
, val_index
);
12230 return ST_CONTINUE
;
12234 ibf_dump_object_hash(struct ibf_dump
*dump
, VALUE obj
)
12236 long len
= RHASH_SIZE(obj
);
12237 ibf_dump_write_small_value(dump
, (VALUE
)len
);
12239 if (len
> 0) rb_hash_foreach(obj
, ibf_dump_object_hash_i
, (VALUE
)dump
);
12243 ibf_load_object_hash(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12245 long len
= (long)ibf_load_small_value(load
, &offset
);
12246 VALUE obj
= rb_hash_new_with_size(len
);
12249 for (i
= 0; i
< len
; i
++) {
12250 VALUE key_index
= ibf_load_small_value(load
, &offset
);
12251 VALUE val_index
= ibf_load_small_value(load
, &offset
);
12253 VALUE key
= ibf_load_object(load
, key_index
);
12254 VALUE val
= ibf_load_object(load
, val_index
);
12255 rb_hash_aset(obj
, key
, val
);
12257 rb_hash_rehash(obj
);
12259 if (header
->internal
) rb_obj_hide(obj
);
12260 if (header
->frozen
) rb_obj_freeze(obj
);
12266 ibf_dump_object_struct(struct ibf_dump
*dump
, VALUE obj
)
12268 if (rb_obj_is_kind_of(obj
, rb_cRange
)) {
12269 struct ibf_object_struct_range range
;
12273 range
.class_index
= 0;
12275 rb_range_values(obj
, &beg
, &end
, &range
.excl
);
12276 range
.beg
= (long)ibf_dump_object(dump
, beg
);
12277 range
.end
= (long)ibf_dump_object(dump
, end
);
12279 IBF_W_ALIGN(struct ibf_object_struct_range
);
12283 rb_raise(rb_eNotImpError
, "ibf_dump_object_struct: unsupported class %"PRIsVALUE
,
12284 rb_class_name(CLASS_OF(obj
)));
12289 ibf_load_object_struct(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12291 const struct ibf_object_struct_range
*range
= IBF_OBJBODY(struct ibf_object_struct_range
, offset
);
12292 VALUE beg
= ibf_load_object(load
, range
->beg
);
12293 VALUE end
= ibf_load_object(load
, range
->end
);
12294 VALUE obj
= rb_range_new(beg
, end
, range
->excl
);
12295 if (header
->internal
) rb_obj_hide(obj
);
12296 if (header
->frozen
) rb_obj_freeze(obj
);
12301 ibf_dump_object_bignum(struct ibf_dump
*dump
, VALUE obj
)
12303 ssize_t len
= BIGNUM_LEN(obj
);
12304 ssize_t slen
= BIGNUM_SIGN(obj
) > 0 ? len
: len
* -1;
12305 BDIGIT
*d
= BIGNUM_DIGITS(obj
);
12307 (void)IBF_W(&slen
, ssize_t
, 1);
12308 IBF_WP(d
, BDIGIT
, len
);
12312 ibf_load_object_bignum(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12314 const struct ibf_object_bignum
*bignum
= IBF_OBJBODY(struct ibf_object_bignum
, offset
);
12315 int sign
= bignum
->slen
> 0;
12316 ssize_t len
= sign
> 0 ? bignum
->slen
: -1 * bignum
->slen
;
12317 VALUE obj
= rb_integer_unpack(bignum
->digits
, len
* 2, 2, 0,
12318 INTEGER_PACK_LITTLE_ENDIAN
| (sign
== 0 ? INTEGER_PACK_NEGATIVE
: 0));
12319 if (header
->internal
) rb_obj_hide(obj
);
12320 if (header
->frozen
) rb_obj_freeze(obj
);
12325 ibf_dump_object_data(struct ibf_dump
*dump
, VALUE obj
)
12327 if (rb_data_is_encoding(obj
)) {
12328 rb_encoding
*enc
= rb_to_encoding(obj
);
12329 const char *name
= rb_enc_name(enc
);
12330 long len
= strlen(name
) + 1;
12332 data
[0] = IBF_OBJECT_DATA_ENCODING
;
12334 (void)IBF_W(data
, long, 2);
12335 IBF_WP(name
, char, len
);
12338 ibf_dump_object_unsupported(dump
, obj
);
12343 ibf_load_object_data(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12345 const long *body
= IBF_OBJBODY(long, offset
);
12346 const enum ibf_object_data_type type
= (enum ibf_object_data_type
)body
[0];
12347 /* const long len = body[1]; */
12348 const char *data
= (const char *)&body
[2];
12351 case IBF_OBJECT_DATA_ENCODING
:
12353 VALUE encobj
= rb_enc_from_encoding(rb_enc_find(data
));
12358 return ibf_load_object_unsupported(load
, header
, offset
);
12362 ibf_dump_object_complex_rational(struct ibf_dump
*dump
, VALUE obj
)
12365 data
[0] = (long)ibf_dump_object(dump
, RCOMPLEX(obj
)->real
);
12366 data
[1] = (long)ibf_dump_object(dump
, RCOMPLEX(obj
)->imag
);
12368 (void)IBF_W(data
, long, 2);
12372 ibf_load_object_complex_rational(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12374 const struct ibf_object_complex_rational
*nums
= IBF_OBJBODY(struct ibf_object_complex_rational
, offset
);
12375 VALUE a
= ibf_load_object(load
, nums
->a
);
12376 VALUE b
= ibf_load_object(load
, nums
->b
);
12377 VALUE obj
= header
->type
== T_COMPLEX
?
12378 rb_complex_new(a
, b
) : rb_rational_new(a
, b
);
12380 if (header
->internal
) rb_obj_hide(obj
);
12381 if (header
->frozen
) rb_obj_freeze(obj
);
12386 ibf_dump_object_symbol(struct ibf_dump
*dump
, VALUE obj
)
12388 ibf_dump_object_string(dump
, rb_sym2str(obj
));
12392 ibf_load_object_symbol(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
)
12394 ibf_offset_t reading_pos
= offset
;
12396 int encindex
= (int)ibf_load_small_value(load
, &reading_pos
);
12397 const long len
= (long)ibf_load_small_value(load
, &reading_pos
);
12398 const char *ptr
= load
->current_buffer
->buff
+ reading_pos
;
12400 if (encindex
> RUBY_ENCINDEX_BUILTIN_MAX
) {
12401 VALUE enc_name_str
= ibf_load_object(load
, encindex
- RUBY_ENCINDEX_BUILTIN_MAX
);
12402 encindex
= rb_enc_find_index(RSTRING_PTR(enc_name_str
));
12405 ID id
= rb_intern3(ptr
, len
, rb_enc_from_index(encindex
));
12409 typedef void (*ibf_dump_object_function
)(struct ibf_dump
*dump
, VALUE obj
);
12410 static ibf_dump_object_function dump_object_functions
[RUBY_T_MASK
+1] = {
12411 ibf_dump_object_unsupported
, /* T_NONE */
12412 ibf_dump_object_unsupported
, /* T_OBJECT */
12413 ibf_dump_object_class
, /* T_CLASS */
12414 ibf_dump_object_unsupported
, /* T_MODULE */
12415 ibf_dump_object_float
, /* T_FLOAT */
12416 ibf_dump_object_string
, /* T_STRING */
12417 ibf_dump_object_regexp
, /* T_REGEXP */
12418 ibf_dump_object_array
, /* T_ARRAY */
12419 ibf_dump_object_hash
, /* T_HASH */
12420 ibf_dump_object_struct
, /* T_STRUCT */
12421 ibf_dump_object_bignum
, /* T_BIGNUM */
12422 ibf_dump_object_unsupported
, /* T_FILE */
12423 ibf_dump_object_data
, /* T_DATA */
12424 ibf_dump_object_unsupported
, /* T_MATCH */
12425 ibf_dump_object_complex_rational
, /* T_COMPLEX */
12426 ibf_dump_object_complex_rational
, /* T_RATIONAL */
12427 ibf_dump_object_unsupported
, /* 0x10 */
12428 ibf_dump_object_unsupported
, /* 0x11 T_NIL */
12429 ibf_dump_object_unsupported
, /* 0x12 T_TRUE */
12430 ibf_dump_object_unsupported
, /* 0x13 T_FALSE */
12431 ibf_dump_object_symbol
, /* 0x14 T_SYMBOL */
12432 ibf_dump_object_unsupported
, /* T_FIXNUM */
12433 ibf_dump_object_unsupported
, /* T_UNDEF */
12434 ibf_dump_object_unsupported
, /* 0x17 */
12435 ibf_dump_object_unsupported
, /* 0x18 */
12436 ibf_dump_object_unsupported
, /* 0x19 */
12437 ibf_dump_object_unsupported
, /* T_IMEMO 0x1a */
12438 ibf_dump_object_unsupported
, /* T_NODE 0x1b */
12439 ibf_dump_object_unsupported
, /* T_ICLASS 0x1c */
12440 ibf_dump_object_unsupported
, /* T_ZOMBIE 0x1d */
12441 ibf_dump_object_unsupported
, /* 0x1e */
12442 ibf_dump_object_unsupported
, /* 0x1f */
12446 ibf_dump_object_object_header(struct ibf_dump
*dump
, const struct ibf_object_header header
)
12448 unsigned char byte
=
12449 (header
.type
<< 0) |
12450 (header
.special_const
<< 5) |
12451 (header
.frozen
<< 6) |
12452 (header
.internal
<< 7);
12457 static struct ibf_object_header
12458 ibf_load_object_object_header(const struct ibf_load
*load
, ibf_offset_t
*offset
)
12460 unsigned char byte
= ibf_load_byte(load
, offset
);
12462 struct ibf_object_header header
;
12463 header
.type
= (byte
>> 0) & 0x1f;
12464 header
.special_const
= (byte
>> 5) & 0x01;
12465 header
.frozen
= (byte
>> 6) & 0x01;
12466 header
.internal
= (byte
>> 7) & 0x01;
12471 static ibf_offset_t
12472 ibf_dump_object_object(struct ibf_dump
*dump
, VALUE obj
)
12474 struct ibf_object_header obj_header
;
12475 ibf_offset_t current_offset
;
12476 IBF_ZERO(obj_header
);
12477 obj_header
.type
= TYPE(obj
);
12479 IBF_W_ALIGN(ibf_offset_t
);
12480 current_offset
= ibf_dump_pos(dump
);
12482 if (SPECIAL_CONST_P(obj
) &&
12483 ! (SYMBOL_P(obj
) ||
12484 RB_FLOAT_TYPE_P(obj
))) {
12485 obj_header
.special_const
= TRUE
;
12486 obj_header
.frozen
= TRUE
;
12487 obj_header
.internal
= TRUE
;
12488 ibf_dump_object_object_header(dump
, obj_header
);
12489 ibf_dump_write_small_value(dump
, obj
);
12492 obj_header
.internal
= SPECIAL_CONST_P(obj
) ? FALSE
: (RBASIC_CLASS(obj
) == 0) ? TRUE
: FALSE
;
12493 obj_header
.special_const
= FALSE
;
12494 obj_header
.frozen
= FL_TEST(obj
, FL_FREEZE
) ? TRUE
: FALSE
;
12495 ibf_dump_object_object_header(dump
, obj_header
);
12496 (*dump_object_functions
[obj_header
.type
])(dump
, obj
);
12499 return current_offset
;
12502 typedef VALUE (*ibf_load_object_function
)(const struct ibf_load
*load
, const struct ibf_object_header
*header
, ibf_offset_t offset
);
12503 static ibf_load_object_function load_object_functions
[RUBY_T_MASK
+1] = {
12504 ibf_load_object_unsupported
, /* T_NONE */
12505 ibf_load_object_unsupported
, /* T_OBJECT */
12506 ibf_load_object_class
, /* T_CLASS */
12507 ibf_load_object_unsupported
, /* T_MODULE */
12508 ibf_load_object_float
, /* T_FLOAT */
12509 ibf_load_object_string
, /* T_STRING */
12510 ibf_load_object_regexp
, /* T_REGEXP */
12511 ibf_load_object_array
, /* T_ARRAY */
12512 ibf_load_object_hash
, /* T_HASH */
12513 ibf_load_object_struct
, /* T_STRUCT */
12514 ibf_load_object_bignum
, /* T_BIGNUM */
12515 ibf_load_object_unsupported
, /* T_FILE */
12516 ibf_load_object_data
, /* T_DATA */
12517 ibf_load_object_unsupported
, /* T_MATCH */
12518 ibf_load_object_complex_rational
, /* T_COMPLEX */
12519 ibf_load_object_complex_rational
, /* T_RATIONAL */
12520 ibf_load_object_unsupported
, /* 0x10 */
12521 ibf_load_object_unsupported
, /* T_NIL */
12522 ibf_load_object_unsupported
, /* T_TRUE */
12523 ibf_load_object_unsupported
, /* T_FALSE */
12524 ibf_load_object_symbol
,
12525 ibf_load_object_unsupported
, /* T_FIXNUM */
12526 ibf_load_object_unsupported
, /* T_UNDEF */
12527 ibf_load_object_unsupported
, /* 0x17 */
12528 ibf_load_object_unsupported
, /* 0x18 */
12529 ibf_load_object_unsupported
, /* 0x19 */
12530 ibf_load_object_unsupported
, /* T_IMEMO 0x1a */
12531 ibf_load_object_unsupported
, /* T_NODE 0x1b */
12532 ibf_load_object_unsupported
, /* T_ICLASS 0x1c */
12533 ibf_load_object_unsupported
, /* T_ZOMBIE 0x1d */
12534 ibf_load_object_unsupported
, /* 0x1e */
12535 ibf_load_object_unsupported
, /* 0x1f */
12539 ibf_load_object(const struct ibf_load
*load
, VALUE object_index
)
12541 if (object_index
== 0) {
12545 VALUE obj
= pinned_list_fetch(load
->current_buffer
->obj_list
, (long)object_index
);
12547 ibf_offset_t
*offsets
= (ibf_offset_t
*)(load
->current_buffer
->obj_list_offset
+ load
->current_buffer
->buff
);
12548 ibf_offset_t offset
= offsets
[object_index
];
12549 const struct ibf_object_header header
= ibf_load_object_object_header(load
, &offset
);
12552 fprintf(stderr
, "ibf_load_object: list=%#x offsets=%p offset=%#x\n",
12553 load
->current_buffer
->obj_list_offset
, (void *)offsets
, offset
);
12554 fprintf(stderr
, "ibf_load_object: type=%#x special=%d frozen=%d internal=%d\n",
12555 header
.type
, header
.special_const
, header
.frozen
, header
.internal
);
12557 if (offset
>= load
->current_buffer
->size
) {
12558 rb_raise(rb_eIndexError
, "object offset out of range: %u", offset
);
12561 if (header
.special_const
) {
12562 ibf_offset_t reading_pos
= offset
;
12564 obj
= ibf_load_small_value(load
, &reading_pos
);
12567 obj
= (*load_object_functions
[header
.type
])(load
, &header
, offset
);
12570 pinned_list_store(load
->current_buffer
->obj_list
, (long)object_index
, obj
);
12573 fprintf(stderr
, "ibf_load_object: index=%#"PRIxVALUE
" obj=%#"PRIxVALUE
"\n",
12574 object_index
, obj
);
12580 struct ibf_dump_object_list_arg
12582 struct ibf_dump
*dump
;
12587 ibf_dump_object_list_i(st_data_t key
, st_data_t val
, st_data_t ptr
)
12589 VALUE obj
= (VALUE
)key
;
12590 struct ibf_dump_object_list_arg
*args
= (struct ibf_dump_object_list_arg
*)ptr
;
12592 ibf_offset_t offset
= ibf_dump_object_object(args
->dump
, obj
);
12593 rb_ary_push(args
->offset_list
, UINT2NUM(offset
));
12595 return ST_CONTINUE
;
12599 ibf_dump_object_list(struct ibf_dump
*dump
, ibf_offset_t
*obj_list_offset
, unsigned int *obj_list_size
)
12601 st_table
*obj_table
= dump
->current_buffer
->obj_table
;
12602 VALUE offset_list
= rb_ary_tmp_new(obj_table
->num_entries
);
12604 struct ibf_dump_object_list_arg args
;
12606 args
.offset_list
= offset_list
;
12608 st_foreach(obj_table
, ibf_dump_object_list_i
, (st_data_t
)&args
);
12610 IBF_W_ALIGN(ibf_offset_t
);
12611 *obj_list_offset
= ibf_dump_pos(dump
);
12613 st_index_t size
= obj_table
->num_entries
;
12616 for (i
=0; i
<size
; i
++) {
12617 ibf_offset_t offset
= NUM2UINT(RARRAY_AREF(offset_list
, i
));
12621 *obj_list_size
= (unsigned int)size
;
12625 ibf_dump_mark(void *ptr
)
12627 struct ibf_dump
*dump
= (struct ibf_dump
*)ptr
;
12628 rb_gc_mark(dump
->global_buffer
.str
);
12630 rb_mark_set(dump
->global_buffer
.obj_table
);
12631 rb_mark_set(dump
->iseq_table
);
12635 ibf_dump_free(void *ptr
)
12637 struct ibf_dump
*dump
= (struct ibf_dump
*)ptr
;
12638 if (dump
->global_buffer
.obj_table
) {
12639 st_free_table(dump
->global_buffer
.obj_table
);
12640 dump
->global_buffer
.obj_table
= 0;
12642 if (dump
->iseq_table
) {
12643 st_free_table(dump
->iseq_table
);
12644 dump
->iseq_table
= 0;
12650 ibf_dump_memsize(const void *ptr
)
12652 struct ibf_dump
*dump
= (struct ibf_dump
*)ptr
;
12653 size_t size
= sizeof(*dump
);
12654 if (dump
->iseq_table
) size
+= st_memsize(dump
->iseq_table
);
12655 if (dump
->global_buffer
.obj_table
) size
+= st_memsize(dump
->global_buffer
.obj_table
);
12659 static const rb_data_type_t ibf_dump_type
= {
12661 {ibf_dump_mark
, ibf_dump_free
, ibf_dump_memsize
,},
12662 0, 0, RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_FREE_IMMEDIATELY
12666 ibf_dump_setup(struct ibf_dump
*dump
, VALUE dumper_obj
)
12668 dump
->global_buffer
.obj_table
= NULL
; // GC may run before a value is assigned
12669 dump
->iseq_table
= NULL
;
12671 RB_OBJ_WRITE(dumper_obj
, &dump
->global_buffer
.str
, rb_str_new(0, 0));
12672 dump
->global_buffer
.obj_table
= ibf_dump_object_table_new();
12673 dump
->iseq_table
= st_init_numtable(); /* need free */
12675 dump
->current_buffer
= &dump
->global_buffer
;
12679 rb_iseq_ibf_dump(const rb_iseq_t
*iseq
, VALUE opt
)
12681 struct ibf_dump
*dump
;
12682 struct ibf_header header
= {{0}};
12686 if (iseq
->body
->parent_iseq
!= NULL
||
12687 iseq
->body
->local_iseq
!= iseq
) {
12688 rb_raise(rb_eRuntimeError
, "should be top of iseq");
12690 if (RTEST(ISEQ_COVERAGE(iseq
))) {
12691 rb_raise(rb_eRuntimeError
, "should not compile with coverage");
12694 dump_obj
= TypedData_Make_Struct(0, struct ibf_dump
, &ibf_dump_type
, dump
);
12695 ibf_dump_setup(dump
, dump_obj
);
12697 ibf_dump_write(dump
, &header
, sizeof(header
));
12698 ibf_dump_write(dump
, RUBY_PLATFORM
, strlen(RUBY_PLATFORM
) + 1);
12699 ibf_dump_iseq(dump
, iseq
);
12701 header
.magic
[0] = 'Y'; /* YARB */
12702 header
.magic
[1] = 'A';
12703 header
.magic
[2] = 'R';
12704 header
.magic
[3] = 'B';
12705 header
.major_version
= IBF_MAJOR_VERSION
;
12706 header
.minor_version
= IBF_MINOR_VERSION
;
12707 ibf_dump_iseq_list(dump
, &header
);
12708 ibf_dump_object_list(dump
, &header
.global_object_list_offset
, &header
.global_object_list_size
);
12709 header
.size
= ibf_dump_pos(dump
);
12712 VALUE opt_str
= opt
;
12713 const char *ptr
= StringValuePtr(opt_str
);
12714 header
.extra_size
= RSTRING_LENINT(opt_str
);
12715 ibf_dump_write(dump
, ptr
, header
.extra_size
);
12718 header
.extra_size
= 0;
12721 ibf_dump_overwrite(dump
, &header
, sizeof(header
), 0);
12723 str
= dump
->global_buffer
.str
;
12724 ibf_dump_free(dump
);
12725 DATA_PTR(dump_obj
) = NULL
;
12726 RB_GC_GUARD(dump_obj
);
12730 static const ibf_offset_t
*
12731 ibf_iseq_list(const struct ibf_load
*load
)
12733 return (const ibf_offset_t
*)(load
->global_buffer
.buff
+ load
->header
->iseq_list_offset
);
12737 rb_ibf_load_iseq_complete(rb_iseq_t
*iseq
)
12739 struct ibf_load
*load
= RTYPEDDATA_DATA(iseq
->aux
.loader
.obj
);
12740 rb_iseq_t
*prev_src_iseq
= load
->iseq
;
12741 ibf_offset_t offset
= ibf_iseq_list(load
)[iseq
->aux
.loader
.index
];
12744 fprintf(stderr
, "rb_ibf_load_iseq_complete: index=%#x offset=%#x size=%#x\n",
12745 iseq
->aux
.loader
.index
, offset
,
12746 load
->header
->size
);
12748 ibf_load_iseq_each(load
, iseq
, offset
);
12749 ISEQ_COMPILE_DATA_CLEAR(iseq
);
12750 FL_UNSET((VALUE
)iseq
, ISEQ_NOT_LOADED_YET
);
12751 rb_iseq_init_trace(iseq
);
12752 load
->iseq
= prev_src_iseq
;
12756 MJIT_FUNC_EXPORTED
const rb_iseq_t
*
12757 rb_iseq_complete(const rb_iseq_t
*iseq
)
12759 rb_ibf_load_iseq_complete((rb_iseq_t
*)iseq
);
12765 ibf_load_iseq(const struct ibf_load
*load
, const rb_iseq_t
*index_iseq
)
12767 int iseq_index
= (int)(VALUE
)index_iseq
;
12770 fprintf(stderr
, "ibf_load_iseq: index_iseq=%p iseq_list=%p\n",
12771 (void *)index_iseq
, (void *)load
->iseq_list
);
12773 if (iseq_index
== -1) {
12777 VALUE iseqv
= pinned_list_fetch(load
->iseq_list
, iseq_index
);
12780 fprintf(stderr
, "ibf_load_iseq: iseqv=%p\n", (void *)iseqv
);
12783 return (rb_iseq_t
*)iseqv
;
12786 rb_iseq_t
*iseq
= iseq_imemo_alloc();
12788 fprintf(stderr
, "ibf_load_iseq: new iseq=%p\n", (void *)iseq
);
12790 FL_SET((VALUE
)iseq
, ISEQ_NOT_LOADED_YET
);
12791 iseq
->aux
.loader
.obj
= load
->loader_obj
;
12792 iseq
->aux
.loader
.index
= iseq_index
;
12794 fprintf(stderr
, "ibf_load_iseq: iseq=%p loader_obj=%p index=%d\n",
12795 (void *)iseq
, (void *)load
->loader_obj
, iseq_index
);
12797 pinned_list_store(load
->iseq_list
, iseq_index
, (VALUE
)iseq
);
12801 fprintf(stderr
, "ibf_load_iseq: loading iseq=%p\n", (void *)iseq
);
12803 rb_ibf_load_iseq_complete(iseq
);
12805 if (GET_VM()->builtin_function_table
) {
12806 rb_ibf_load_iseq_complete(iseq
);
12808 #endif /* !USE_LAZY_LOAD */
12811 fprintf(stderr
, "ibf_load_iseq: iseq=%p loaded %p\n",
12812 (void *)iseq
, (void *)load
->iseq
);
12820 ibf_load_setup_bytes(struct ibf_load
*load
, VALUE loader_obj
, const char *bytes
, size_t size
)
12822 load
->loader_obj
= loader_obj
;
12823 load
->global_buffer
.buff
= bytes
;
12824 load
->header
= (struct ibf_header
*)load
->global_buffer
.buff
;
12825 load
->global_buffer
.size
= load
->header
->size
;
12826 load
->global_buffer
.obj_list_offset
= load
->header
->global_object_list_offset
;
12827 load
->global_buffer
.obj_list_size
= load
->header
->global_object_list_size
;
12828 RB_OBJ_WRITE(loader_obj
, &load
->iseq_list
, pinned_list_new(load
->header
->iseq_list_size
));
12829 RB_OBJ_WRITE(loader_obj
, &load
->global_buffer
.obj_list
, pinned_list_new(load
->global_buffer
.obj_list_size
));
12832 load
->current_buffer
= &load
->global_buffer
;
12834 if (size
< load
->header
->size
) {
12835 rb_raise(rb_eRuntimeError
, "broken binary format");
12837 if (strncmp(load
->header
->magic
, "YARB", 4) != 0) {
12838 rb_raise(rb_eRuntimeError
, "unknown binary format");
12840 if (load
->header
->major_version
!= IBF_MAJOR_VERSION
||
12841 load
->header
->minor_version
!= IBF_MINOR_VERSION
) {
12842 rb_raise(rb_eRuntimeError
, "unmatched version file (%u.%u for %u.%u)",
12843 load
->header
->major_version
, load
->header
->minor_version
, IBF_MAJOR_VERSION
, IBF_MINOR_VERSION
);
12845 if (strcmp(load
->global_buffer
.buff
+ sizeof(struct ibf_header
), RUBY_PLATFORM
) != 0) {
12846 rb_raise(rb_eRuntimeError
, "unmatched platform");
12848 if (load
->header
->iseq_list_offset
% RUBY_ALIGNOF(ibf_offset_t
)) {
12849 rb_raise(rb_eArgError
, "unaligned iseq list offset: %u",
12850 load
->header
->iseq_list_offset
);
12852 if (load
->global_buffer
.obj_list_offset
% RUBY_ALIGNOF(ibf_offset_t
)) {
12853 rb_raise(rb_eArgError
, "unaligned object list offset: %u",
12854 load
->global_buffer
.obj_list_offset
);
12859 ibf_load_setup(struct ibf_load
*load
, VALUE loader_obj
, VALUE str
)
12861 if (RSTRING_LENINT(str
) < (int)sizeof(struct ibf_header
)) {
12862 rb_raise(rb_eRuntimeError
, "broken binary format");
12866 str
= rb_str_new(RSTRING_PTR(str
), RSTRING_LEN(str
));
12869 ibf_load_setup_bytes(load
, loader_obj
, StringValuePtr(str
), RSTRING_LEN(str
));
12870 RB_OBJ_WRITE(loader_obj
, &load
->str
, str
);
12874 ibf_loader_mark(void *ptr
)
12876 struct ibf_load
*load
= (struct ibf_load
*)ptr
;
12877 rb_gc_mark(load
->str
);
12878 rb_gc_mark(load
->iseq_list
);
12879 rb_gc_mark(load
->global_buffer
.obj_list
);
12883 ibf_loader_free(void *ptr
)
12885 struct ibf_load
*load
= (struct ibf_load
*)ptr
;
12890 ibf_loader_memsize(const void *ptr
)
12892 return sizeof(struct ibf_load
);
12895 static const rb_data_type_t ibf_load_type
= {
12897 {ibf_loader_mark
, ibf_loader_free
, ibf_loader_memsize
,},
12898 0, 0, RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_FREE_IMMEDIATELY
12902 rb_iseq_ibf_load(VALUE str
)
12904 struct ibf_load
*load
;
12906 VALUE loader_obj
= TypedData_Make_Struct(0, struct ibf_load
, &ibf_load_type
, load
);
12908 ibf_load_setup(load
, loader_obj
, str
);
12909 iseq
= ibf_load_iseq(load
, 0);
12911 RB_GC_GUARD(loader_obj
);
12916 rb_iseq_ibf_load_bytes(const char *bytes
, size_t size
)
12918 struct ibf_load
*load
;
12920 VALUE loader_obj
= TypedData_Make_Struct(0, struct ibf_load
, &ibf_load_type
, load
);
12922 ibf_load_setup_bytes(load
, loader_obj
, bytes
, size
);
12923 iseq
= ibf_load_iseq(load
, 0);
12925 RB_GC_GUARD(loader_obj
);
12930 rb_iseq_ibf_load_extra_data(VALUE str
)
12932 struct ibf_load
*load
;
12933 VALUE loader_obj
= TypedData_Make_Struct(0, struct ibf_load
, &ibf_load_type
, load
);
12936 ibf_load_setup(load
, loader_obj
, str
);
12937 extra_str
= rb_str_new(load
->global_buffer
.buff
+ load
->header
->size
, load
->header
->extra_size
);
12938 RB_GC_GUARD(loader_obj
);