1 /************************************************
3 enumerator.c - provides Enumerator class
7 Copyright (C) 2001-2003 Akinori MUSHA
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
13 ************************************************/
15 #include "ruby/internal/config.h"
23 #include "internal/enumerator.h"
24 #include "internal/error.h"
25 #include "internal/hash.h"
26 #include "internal/imemo.h"
27 #include "internal/numeric.h"
28 #include "internal/range.h"
29 #include "internal/rational.h"
30 #include "ruby/ruby.h"
33 * Document-class: Enumerator
35 * A class which allows both internal and external iteration.
37 * An Enumerator can be created by the following methods.
42 * Most methods have two forms: a block form where the contents
43 * are evaluated for each item in the enumeration, and a non-block form
44 * which returns a new Enumerator wrapping the iteration.
46 * enumerator = %w(one two three).each
47 * puts enumerator.class # => Enumerator
49 * enumerator.each_with_object("foo") do |item, obj|
50 * puts "#{obj}: #{item}"
57 * enum_with_obj = enumerator.each_with_object("foo")
58 * puts enum_with_obj.class # => Enumerator
60 * enum_with_obj.each do |item, obj|
61 * puts "#{obj}: #{item}"
68 * This allows you to chain Enumerators together. For example, you
69 * can map a list's elements to strings containing the index
70 * and the element as a string via:
72 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
73 * # => ["0:foo", "1:bar", "2:baz"]
75 * An Enumerator can also be used as an external iterator.
76 * For example, Enumerator#next returns the next value of the iterator
77 * or raises StopIteration if the Enumerator is at the end.
79 * e = [1,2,3].each # returns an enumerator object.
83 * puts e.next # raises StopIteration
85 * Note that enumeration sequence by +next+, +next_values+, +peek+ and
86 * +peek_values+ do not affect other non-external
87 * enumeration methods, unless the underlying iteration method itself has
88 * side-effect, e.g. IO#each_line.
90 * Moreover, implementation typically uses fibers so performance could be
91 * slower and exception stacktraces different than expected.
93 * You can use this to implement an internal iterator as follows:
99 * rescue StopIteration
116 * # use o.each as an internal iterator directly.
117 * puts o.each {|*x| puts x; [:b, *x] }
118 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
120 * # convert o.each to an external iterator for
121 * # implementing an internal iterator.
122 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
123 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
126 VALUE rb_cEnumerator
;
127 static VALUE rb_cLazy
;
128 static ID id_rewind
, id_new
, id_to_enum
;
129 static ID id_next
, id_result
, id_receiver
, id_arguments
, id_memo
, id_method
, id_force
;
130 static ID id_begin
, id_end
, id_step
, id_exclude_end
;
131 static VALUE sym_each
, sym_cycle
, sym_yield
;
133 static VALUE lazy_use_super_method
;
135 #define id_call idCall
136 #define id_each idEach
138 #define id_initialize idInitialize
139 #define id_size idSize
141 VALUE rb_eStopIteration
;
154 rb_enumerator_size_func
*size_fn
;
158 static VALUE rb_cGenerator
, rb_cYielder
, rb_cEnumProducer
;
174 typedef struct MEMO
*lazyenum_proc_func(VALUE
, struct MEMO
*, VALUE
, long);
175 typedef VALUE
lazyenum_size_func(VALUE
, VALUE
);
177 lazyenum_proc_func
*proc
;
178 lazyenum_size_func
*size
;
184 const lazyenum_funcs
*fn
;
187 static VALUE
generator_allocate(VALUE klass
);
188 static VALUE
generator_init(VALUE obj
, VALUE proc
);
190 static VALUE rb_cEnumChain
;
203 enumerator_mark(void *p
)
205 struct enumerator
*ptr
= p
;
206 rb_gc_mark_movable(ptr
->obj
);
207 rb_gc_mark_movable(ptr
->args
);
208 rb_gc_mark_movable(ptr
->fib
);
209 rb_gc_mark_movable(ptr
->dst
);
210 rb_gc_mark_movable(ptr
->lookahead
);
211 rb_gc_mark_movable(ptr
->feedvalue
);
212 rb_gc_mark_movable(ptr
->stop_exc
);
213 rb_gc_mark_movable(ptr
->size
);
214 rb_gc_mark_movable(ptr
->procs
);
218 enumerator_compact(void *p
)
220 struct enumerator
*ptr
= p
;
221 ptr
->obj
= rb_gc_location(ptr
->obj
);
222 ptr
->args
= rb_gc_location(ptr
->args
);
223 ptr
->fib
= rb_gc_location(ptr
->fib
);
224 ptr
->dst
= rb_gc_location(ptr
->dst
);
225 ptr
->lookahead
= rb_gc_location(ptr
->lookahead
);
226 ptr
->feedvalue
= rb_gc_location(ptr
->feedvalue
);
227 ptr
->stop_exc
= rb_gc_location(ptr
->stop_exc
);
228 ptr
->size
= rb_gc_location(ptr
->size
);
229 ptr
->procs
= rb_gc_location(ptr
->procs
);
232 #define enumerator_free RUBY_TYPED_DEFAULT_FREE
235 enumerator_memsize(const void *p
)
237 return sizeof(struct enumerator
);
240 static const rb_data_type_t enumerator_data_type
= {
248 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
251 static struct enumerator
*
252 enumerator_ptr(VALUE obj
)
254 struct enumerator
*ptr
;
256 TypedData_Get_Struct(obj
, struct enumerator
, &enumerator_data_type
, ptr
);
257 if (!ptr
|| ptr
->obj
== Qundef
) {
258 rb_raise(rb_eArgError
, "uninitialized enumerator");
264 proc_entry_mark(void *p
)
266 struct proc_entry
*ptr
= p
;
267 rb_gc_mark_movable(ptr
->proc
);
268 rb_gc_mark_movable(ptr
->memo
);
272 proc_entry_compact(void *p
)
274 struct proc_entry
*ptr
= p
;
275 ptr
->proc
= rb_gc_location(ptr
->proc
);
276 ptr
->memo
= rb_gc_location(ptr
->memo
);
279 #define proc_entry_free RUBY_TYPED_DEFAULT_FREE
282 proc_entry_memsize(const void *p
)
284 return p
? sizeof(struct proc_entry
) : 0;
287 static const rb_data_type_t proc_entry_data_type
= {
297 static struct proc_entry
*
298 proc_entry_ptr(VALUE proc_entry
)
300 struct proc_entry
*ptr
;
302 TypedData_Get_Struct(proc_entry
, struct proc_entry
, &proc_entry_data_type
, ptr
);
309 * obj.to_enum(method = :each, *args) -> enum
310 * obj.enum_for(method = :each, *args) -> enum
311 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
312 * obj.enum_for(method = :each, *args){|*args| block} -> enum
314 * Creates a new Enumerator which will enumerate by calling +method+ on
315 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
316 * values of enumerator.
318 * If a block is given, it will be used to calculate the size of
319 * the enumerator without the need to iterate it (see Enumerator#size).
325 * enum = str.enum_for(:each_byte)
326 * enum.each { |b| puts b }
331 * # protect an array from being modified by some_method
333 * some_method(a.to_enum)
335 * # String#split in block form is more memory-effective:
336 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
337 * # This could be rewritten more idiomatically with to_enum:
338 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
340 * It is typical to call to_enum when defining methods for
341 * a generic Enumerable, in case no block is passed.
343 * Here is such an example, with parameter passing and a sizing block:
346 * # a generic method to repeat the values of any enumerable
348 * raise ArgumentError, "#{n} is negative!" if n < 0
349 * unless block_given?
350 * return to_enum(__method__, n) do # __method__ is :repeat here
351 * sz = size # Call size and multiply by n...
352 * sz * n if sz # but return nil if size itself is nil
356 * n.times { yield *val }
361 * %i[hello world].repeat(2) { |w| puts w }
362 * # => Prints 'hello', 'hello', 'world', 'world'
363 * enum = (1..14).repeat(3)
364 * # => returns an Enumerator when called without a block
365 * enum.first(4) # => [1, 1, 1, 2]
369 obj_to_enum(int argc
, VALUE
*argv
, VALUE obj
)
371 VALUE enumerator
, meth
= sym_each
;
377 enumerator
= rb_enumeratorize_with_size(obj
, meth
, argc
, argv
, 0);
378 if (rb_block_given_p()) {
379 enumerator_ptr(enumerator
)->size
= rb_block_proc();
385 enumerator_allocate(VALUE klass
)
387 struct enumerator
*ptr
;
390 enum_obj
= TypedData_Make_Struct(klass
, struct enumerator
, &enumerator_data_type
, ptr
);
397 enumerator_init(VALUE enum_obj
, VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
, VALUE size
, int kw_splat
)
399 struct enumerator
*ptr
;
401 rb_check_frozen(enum_obj
);
402 TypedData_Get_Struct(enum_obj
, struct enumerator
, &enumerator_data_type
, ptr
);
405 rb_raise(rb_eArgError
, "unallocated enumerator");
409 ptr
->meth
= rb_to_id(meth
);
410 if (argc
) ptr
->args
= rb_ary_new4(argc
, argv
);
413 ptr
->lookahead
= Qundef
;
414 ptr
->feedvalue
= Qundef
;
415 ptr
->stop_exc
= Qfalse
;
417 ptr
->size_fn
= size_fn
;
418 ptr
->kw_splat
= kw_splat
;
424 convert_to_feasible_size_value(VALUE obj
)
429 else if (rb_respond_to(obj
, id_call
)) {
432 else if (RB_FLOAT_TYPE_P(obj
) && RFLOAT_VALUE(obj
) == HUGE_VAL
) {
436 return rb_to_int(obj
);
442 * Enumerator.new(size = nil) { |yielder| ... }
444 * Creates a new Enumerator object, which can be used as an
447 * Iteration is defined by the given block, in
448 * which a "yielder" object, given as block parameter, can be used to
449 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
451 * fib = Enumerator.new do |y|
459 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
461 * The optional parameter can be used to specify how to calculate the size
462 * in a lazy fashion (see Enumerator#size). It can either be a value or
466 enumerator_initialize(int argc
, VALUE
*argv
, VALUE obj
)
468 VALUE iter
= rb_block_proc();
469 VALUE recv
= generator_init(generator_allocate(rb_cGenerator
), iter
);
470 VALUE arg0
= rb_check_arity(argc
, 0, 1) ? argv
[0] : Qnil
;
471 VALUE size
= convert_to_feasible_size_value(arg0
);
473 return enumerator_init(obj
, recv
, sym_each
, 0, 0, 0, size
, false);
478 enumerator_init_copy(VALUE obj
, VALUE orig
)
480 struct enumerator
*ptr0
, *ptr1
;
482 if (!OBJ_INIT_COPY(obj
, orig
)) return obj
;
483 ptr0
= enumerator_ptr(orig
);
485 /* Fibers cannot be copied */
486 rb_raise(rb_eTypeError
, "can't copy execution context");
489 TypedData_Get_Struct(obj
, struct enumerator
, &enumerator_data_type
, ptr1
);
492 rb_raise(rb_eArgError
, "unallocated enumerator");
495 ptr1
->obj
= ptr0
->obj
;
496 ptr1
->meth
= ptr0
->meth
;
497 ptr1
->args
= ptr0
->args
;
499 ptr1
->lookahead
= Qundef
;
500 ptr1
->feedvalue
= Qundef
;
501 ptr1
->size
= ptr0
->size
;
502 ptr1
->size_fn
= ptr0
->size_fn
;
508 * For backwards compatibility; use rb_enumeratorize_with_size
511 rb_enumeratorize(VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
)
513 return rb_enumeratorize_with_size(obj
, meth
, argc
, argv
, 0);
517 lazy_to_enum_i(VALUE self
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
, int kw_splat
);
520 rb_enumeratorize_with_size_kw(VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
, int kw_splat
)
522 VALUE base_class
= rb_cEnumerator
;
524 if (RTEST(rb_obj_is_kind_of(obj
, rb_cLazy
))) {
525 base_class
= rb_cLazy
;
527 else if (RTEST(rb_obj_is_kind_of(obj
, rb_cEnumChain
))) {
528 obj
= enumerator_init(enumerator_allocate(rb_cEnumerator
), obj
, sym_each
, 0, 0, 0, Qnil
, false);
531 return enumerator_init(enumerator_allocate(base_class
),
532 obj
, meth
, argc
, argv
, size_fn
, Qnil
, kw_splat
);
536 rb_enumeratorize_with_size(VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
)
538 return rb_enumeratorize_with_size_kw(obj
, meth
, argc
, argv
, size_fn
, rb_keyword_given_p());
542 enumerator_block_call(VALUE obj
, rb_block_call_func
*func
, VALUE arg
)
545 const VALUE
*argv
= 0;
546 const struct enumerator
*e
= enumerator_ptr(obj
);
550 argc
= RARRAY_LENINT(e
->args
);
551 argv
= RARRAY_CONST_PTR(e
->args
);
553 return rb_block_call_kw(e
->obj
, meth
, argc
, argv
, func
, arg
, e
->kw_splat
);
558 * enum.each { |elm| block } -> obj
560 * enum.each(*appending_args) { |elm| block } -> obj
561 * enum.each(*appending_args) -> an_enumerator
563 * Iterates over the block according to how this Enumerator was constructed.
564 * If no block and no arguments are given, returns self.
568 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
569 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
570 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
574 * def obj.each_arg(a, b=:b, *rest)
581 * enum = obj.to_enum :each_arg, :a, :x
583 * enum.each.to_a #=> [:a, :x, []]
584 * enum.each.equal?(enum) #=> true
585 * enum.each { |elm| elm } #=> :method_returned
587 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
588 * enum.each(:y, :z).equal?(enum) #=> false
589 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
593 enumerator_each(int argc
, VALUE
*argv
, VALUE obj
)
596 struct enumerator
*e
= enumerator_ptr(obj
= rb_obj_dup(obj
));
597 VALUE args
= e
->args
;
599 #if SIZEOF_INT < SIZEOF_LONG
600 /* check int range overflow */
601 rb_long2int(RARRAY_LEN(args
) + argc
);
603 args
= rb_ary_dup(args
);
604 rb_ary_cat(args
, argv
, argc
);
607 args
= rb_ary_new4(argc
, argv
);
613 if (!rb_block_given_p()) return obj
;
614 return enumerator_block_call(obj
, 0, obj
);
618 enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val
, m
))
620 struct MEMO
*memo
= (struct MEMO
*)m
;
621 VALUE idx
= memo
->v1
;
622 MEMO_V1_SET(memo
, rb_int_succ(idx
));
625 return rb_yield_values(2, val
, idx
);
627 return rb_yield_values(2, rb_ary_new4(argc
, argv
), idx
);
631 enumerator_size(VALUE obj
);
634 enumerator_enum_size(VALUE obj
, VALUE args
, VALUE eobj
)
636 return enumerator_size(obj
);
641 * e.with_index(offset = 0) {|(*args), idx| ... }
642 * e.with_index(offset = 0)
644 * Iterates the given block for each element with an index, which
645 * starts from +offset+. If no block is given, returns a new Enumerator
646 * that includes the index, starting from +offset+
648 * +offset+:: the starting index to use
652 enumerator_with_index(int argc
, VALUE
*argv
, VALUE obj
)
656 rb_check_arity(argc
, 0, 1);
657 RETURN_SIZED_ENUMERATOR(obj
, argc
, argv
, enumerator_enum_size
);
658 memo
= (!argc
|| NIL_P(memo
= argv
[0])) ? INT2FIX(0) : rb_to_int(memo
);
659 return enumerator_block_call(obj
, enumerator_with_index_i
, (VALUE
)MEMO_NEW(memo
, 0, 0));
664 * e.each_with_index {|(*args), idx| ... }
667 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
669 * If no block is given, a new Enumerator is returned that includes the index.
673 enumerator_each_with_index(VALUE obj
)
675 return enumerator_with_index(0, NULL
, obj
);
679 enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val
, memo
))
682 return rb_yield_values(2, val
, memo
);
684 return rb_yield_values(2, rb_ary_new4(argc
, argv
), memo
);
689 * e.each_with_object(obj) {|(*args), obj| ... }
690 * e.each_with_object(obj)
691 * e.with_object(obj) {|(*args), obj| ... }
694 * Iterates the given block for each element with an arbitrary object, +obj+,
697 * If no block is given, returns a new Enumerator.
701 * to_three = Enumerator.new do |y|
707 * to_three_with_string = to_three.with_object("foo")
708 * to_three_with_string.each do |x,string|
709 * puts "#{string}: #{x}"
717 enumerator_with_object(VALUE obj
, VALUE memo
)
719 RETURN_SIZED_ENUMERATOR(obj
, 1, &memo
, enumerator_enum_size
);
720 enumerator_block_call(obj
, enumerator_with_object_i
, memo
);
726 next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i
, obj
))
728 struct enumerator
*e
= enumerator_ptr(obj
);
729 VALUE feedvalue
= Qnil
;
730 VALUE args
= rb_ary_new4(argc
, argv
);
731 rb_fiber_yield(1, &args
);
732 if (e
->feedvalue
!= Qundef
) {
733 feedvalue
= e
->feedvalue
;
734 e
->feedvalue
= Qundef
;
740 next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_
, obj
))
742 struct enumerator
*e
= enumerator_ptr(obj
);
746 result
= rb_block_call(obj
, id_each
, 0, 0, next_ii
, obj
);
747 e
->stop_exc
= rb_exc_new2(rb_eStopIteration
, "iteration reached an end");
748 rb_ivar_set(e
->stop_exc
, id_result
, result
);
749 return rb_fiber_yield(1, &nil
);
753 next_init(VALUE obj
, struct enumerator
*e
)
755 VALUE curr
= rb_fiber_current();
757 e
->fib
= rb_fiber_new(next_i
, obj
);
758 e
->lookahead
= Qundef
;
762 get_next_values(VALUE obj
, struct enumerator
*e
)
767 rb_exc_raise(e
->stop_exc
);
769 curr
= rb_fiber_current();
771 if (!e
->fib
|| !rb_fiber_alive_p(e
->fib
)) {
775 vs
= rb_fiber_resume(e
->fib
, 1, &curr
);
779 e
->lookahead
= Qundef
;
780 e
->feedvalue
= Qundef
;
781 rb_exc_raise(e
->stop_exc
);
788 * e.next_values -> array
790 * Returns the next object as an array in the enumerator, and move the
791 * internal position forward. When the position reached at the end,
792 * StopIteration is raised.
794 * See class-level notes about external iterators.
796 * This method can be used to distinguish <code>yield</code> and <code>yield
822 * ## yield args next_values next
825 * # yield 1, 2 [1, 2] [1, 2]
826 * # yield nil [nil] nil
827 * # yield [1, 2] [[1, 2]] [1, 2]
832 enumerator_next_values(VALUE obj
)
834 struct enumerator
*e
= enumerator_ptr(obj
);
837 if (e
->lookahead
!= Qundef
) {
839 e
->lookahead
= Qundef
;
843 return get_next_values(obj
, e
);
847 ary2sv(VALUE args
, int dup
)
849 if (!RB_TYPE_P(args
, T_ARRAY
))
852 switch (RARRAY_LEN(args
)) {
857 return RARRAY_AREF(args
, 0);
861 return rb_ary_dup(args
);
870 * Returns the next object in the enumerator, and move the internal position
871 * forward. When the position reached at the end, StopIteration is raised.
880 * p e.next #raises StopIteration
882 * See class-level notes about external iterators.
887 enumerator_next(VALUE obj
)
889 VALUE vs
= enumerator_next_values(obj
);
890 return ary2sv(vs
, 0);
894 enumerator_peek_values(VALUE obj
)
896 struct enumerator
*e
= enumerator_ptr(obj
);
898 if (e
->lookahead
== Qundef
) {
899 e
->lookahead
= get_next_values(obj
, e
);
906 * e.peek_values -> array
908 * Returns the next object as an array, similar to Enumerator#next_values, but
909 * doesn't move the internal position forward. If the position is already at
910 * the end, StopIteration is raised.
912 * See class-level notes about external iterators.
923 * p e.peek_values #=> []
925 * p e.peek_values #=> [1]
926 * p e.peek_values #=> [1]
928 * p e.peek_values #=> [1, 2]
930 * p e.peek_values # raises StopIteration
935 enumerator_peek_values_m(VALUE obj
)
937 return rb_ary_dup(enumerator_peek_values(obj
));
944 * Returns the next object in the enumerator, but doesn't move the internal
945 * position forward. If the position is already at the end, StopIteration
948 * See class-level notes about external iterators.
960 * p e.peek #raises StopIteration
965 enumerator_peek(VALUE obj
)
967 VALUE vs
= enumerator_peek_values(obj
);
968 return ary2sv(vs
, 1);
975 * Sets the value to be returned by the next yield inside +e+.
977 * If the value is not set, the yield returns nil.
979 * This value is cleared after being yielded.
981 * # Array#map passes the array's elements to "yield" and collects the
982 * # results of "yield" as an array.
983 * # Following example shows that "next" returns the passed elements and
984 * # values passed to "feed" are collected as an array which can be
985 * # obtained by StopIteration#result.
995 * rescue StopIteration
996 * p $!.result #=> ["a", "b", "c"]
1001 * x = yield # (2) blocks
1002 * p x # (5) => "foo"
1003 * x = yield # (6) blocks
1005 * x = yield # (9) blocks
1006 * p x # not reached w/o another e.next
1011 * e.feed "foo" # (3)
1018 enumerator_feed(VALUE obj
, VALUE v
)
1020 struct enumerator
*e
= enumerator_ptr(obj
);
1022 if (e
->feedvalue
!= Qundef
) {
1023 rb_raise(rb_eTypeError
, "feed value already set");
1034 * Rewinds the enumeration sequence to the beginning.
1036 * If the enclosed object responds to a "rewind" method, it is called.
1040 enumerator_rewind(VALUE obj
)
1042 struct enumerator
*e
= enumerator_ptr(obj
);
1044 rb_check_funcall(e
->obj
, id_rewind
, 0, 0);
1048 e
->lookahead
= Qundef
;
1049 e
->feedvalue
= Qundef
;
1050 e
->stop_exc
= Qfalse
;
1054 static struct generator
*generator_ptr(VALUE obj
);
1055 static VALUE
append_method(VALUE obj
, VALUE str
, ID default_method
, VALUE default_args
);
1058 inspect_enumerator(VALUE obj
, VALUE dummy
, int recur
)
1060 struct enumerator
*e
;
1061 VALUE eobj
, str
, cname
;
1063 TypedData_Get_Struct(obj
, struct enumerator
, &enumerator_data_type
, e
);
1065 cname
= rb_obj_class(obj
);
1067 if (!e
|| e
->obj
== Qundef
) {
1068 return rb_sprintf("#<%"PRIsVALUE
": uninitialized>", rb_class_path(cname
));
1072 str
= rb_sprintf("#<%"PRIsVALUE
": ...>", rb_class_path(cname
));
1079 eobj
= generator_ptr(e
->obj
)->obj
;
1080 /* In case procs chained enumerator traversing all proc entries manually */
1081 if (rb_obj_class(eobj
) == cname
) {
1082 str
= rb_inspect(eobj
);
1085 str
= rb_sprintf("#<%"PRIsVALUE
": %+"PRIsVALUE
">", rb_class_path(cname
), eobj
);
1087 for (i
= 0; i
< RARRAY_LEN(e
->procs
); i
++) {
1088 str
= rb_sprintf("#<%"PRIsVALUE
": %"PRIsVALUE
, cname
, str
);
1089 append_method(RARRAY_AREF(e
->procs
, i
), str
, e
->meth
, e
->args
);
1090 rb_str_buf_cat2(str
, ">");
1095 eobj
= rb_attr_get(obj
, id_receiver
);
1100 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1101 str
= rb_sprintf("#<%"PRIsVALUE
": %+"PRIsVALUE
, rb_class_path(cname
), eobj
);
1102 append_method(obj
, str
, e
->meth
, e
->args
);
1104 rb_str_buf_cat2(str
, ">");
1110 key_symbol_p(VALUE key
, VALUE val
, VALUE arg
)
1112 if (SYMBOL_P(key
)) return ST_CONTINUE
;
1113 *(int *)arg
= FALSE
;
1118 kwd_append(VALUE key
, VALUE val
, VALUE str
)
1120 if (!SYMBOL_P(key
)) rb_raise(rb_eRuntimeError
, "non-symbol key inserted");
1121 rb_str_catf(str
, "% "PRIsVALUE
": %"PRIsVALUE
", ", key
, val
);
1126 append_method(VALUE obj
, VALUE str
, ID default_method
, VALUE default_args
)
1128 VALUE method
, eargs
;
1130 method
= rb_attr_get(obj
, id_method
);
1131 if (method
!= Qfalse
) {
1132 if (!NIL_P(method
)) {
1133 Check_Type(method
, T_SYMBOL
);
1134 method
= rb_sym2str(method
);
1137 method
= rb_id2str(default_method
);
1139 rb_str_buf_cat2(str
, ":");
1140 rb_str_buf_append(str
, method
);
1143 eargs
= rb_attr_get(obj
, id_arguments
);
1145 eargs
= default_args
;
1147 if (eargs
!= Qfalse
) {
1148 long argc
= RARRAY_LEN(eargs
);
1149 const VALUE
*argv
= RARRAY_CONST_PTR(eargs
); /* WB: no new reference */
1154 rb_str_buf_cat2(str
, "(");
1156 if (RB_TYPE_P(argv
[argc
-1], T_HASH
) && !RHASH_EMPTY_P(argv
[argc
-1])) {
1158 rb_hash_foreach(argv
[argc
-1], key_symbol_p
, (VALUE
)&all_key
);
1159 if (all_key
) kwds
= argv
[--argc
];
1163 VALUE arg
= *argv
++;
1165 rb_str_append(str
, rb_inspect(arg
));
1166 rb_str_buf_cat2(str
, ", ");
1169 rb_hash_foreach(kwds
, kwd_append
, str
);
1171 rb_str_set_len(str
, RSTRING_LEN(str
)-2);
1172 rb_str_buf_cat2(str
, ")");
1181 * e.inspect -> string
1183 * Creates a printable version of <i>e</i>.
1187 enumerator_inspect(VALUE obj
)
1189 return rb_exec_recursive(inspect_enumerator
, obj
, 0);
1194 * e.size -> int, Float::INFINITY or nil
1196 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1198 * (1..100).to_a.permutation(4).size # => 94109400
1199 * loop.size # => Float::INFINITY
1200 * (1..100).drop_while.size # => nil
1204 enumerator_size(VALUE obj
)
1206 struct enumerator
*e
= enumerator_ptr(obj
);
1208 const VALUE
*argv
= NULL
;
1212 struct generator
*g
= generator_ptr(e
->obj
);
1213 VALUE receiver
= rb_check_funcall(g
->obj
, id_size
, 0, 0);
1216 for (i
= 0; i
< RARRAY_LEN(e
->procs
); i
++) {
1217 VALUE proc
= RARRAY_AREF(e
->procs
, i
);
1218 struct proc_entry
*entry
= proc_entry_ptr(proc
);
1219 lazyenum_size_func
*size_fn
= entry
->fn
->size
;
1223 receiver
= (*size_fn
)(proc
, receiver
);
1229 return (*e
->size_fn
)(e
->obj
, e
->args
, obj
);
1232 argc
= (int)RARRAY_LEN(e
->args
);
1233 argv
= RARRAY_CONST_PTR(e
->args
);
1235 size
= rb_check_funcall_kw(e
->size
, id_call
, argc
, argv
, e
->kw_splat
);
1236 if (size
!= Qundef
) return size
;
1244 yielder_mark(void *p
)
1246 struct yielder
*ptr
= p
;
1247 rb_gc_mark_movable(ptr
->proc
);
1251 yielder_compact(void *p
)
1253 struct yielder
*ptr
= p
;
1254 ptr
->proc
= rb_gc_location(ptr
->proc
);
1257 #define yielder_free RUBY_TYPED_DEFAULT_FREE
1260 yielder_memsize(const void *p
)
1262 return sizeof(struct yielder
);
1265 static const rb_data_type_t yielder_data_type
= {
1273 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1276 static struct yielder
*
1277 yielder_ptr(VALUE obj
)
1279 struct yielder
*ptr
;
1281 TypedData_Get_Struct(obj
, struct yielder
, &yielder_data_type
, ptr
);
1282 if (!ptr
|| ptr
->proc
== Qundef
) {
1283 rb_raise(rb_eArgError
, "uninitialized yielder");
1290 yielder_allocate(VALUE klass
)
1292 struct yielder
*ptr
;
1295 obj
= TypedData_Make_Struct(klass
, struct yielder
, &yielder_data_type
, ptr
);
1302 yielder_init(VALUE obj
, VALUE proc
)
1304 struct yielder
*ptr
;
1306 TypedData_Get_Struct(obj
, struct yielder
, &yielder_data_type
, ptr
);
1309 rb_raise(rb_eArgError
, "unallocated yielder");
1319 yielder_initialize(VALUE obj
)
1323 return yielder_init(obj
, rb_block_proc());
1328 yielder_yield(VALUE obj
, VALUE args
)
1330 struct yielder
*ptr
= yielder_ptr(obj
);
1332 return rb_proc_call_kw(ptr
->proc
, args
, RB_PASS_CALLED_KEYWORDS
);
1337 yielder_yield_push(VALUE obj
, VALUE arg
)
1339 struct yielder
*ptr
= yielder_ptr(obj
);
1341 rb_proc_call_with_block(ptr
->proc
, 1, &arg
, Qnil
);
1347 * Returns a Proc object that takes arguments and yields them.
1349 * This method is implemented so that a Yielder object can be directly
1350 * passed to another method as a block argument.
1352 * enum = Enumerator.new { |y|
1353 * Dir.glob("*.rb") { |file|
1354 * File.open(file) { |f| f.each_line(&y) }
1359 yielder_to_proc(VALUE obj
)
1361 VALUE method
= rb_obj_method(obj
, sym_yield
);
1363 return rb_funcall(method
, idTo_proc
, 0);
1367 yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj
, memo
))
1369 return rb_yield_values_kw(argc
, argv
, RB_PASS_CALLED_KEYWORDS
);
1375 return yielder_init(yielder_allocate(rb_cYielder
), rb_proc_new(yielder_yield_i
, 0));
1382 generator_mark(void *p
)
1384 struct generator
*ptr
= p
;
1385 rb_gc_mark_movable(ptr
->proc
);
1386 rb_gc_mark_movable(ptr
->obj
);
1390 generator_compact(void *p
)
1392 struct generator
*ptr
= p
;
1393 ptr
->proc
= rb_gc_location(ptr
->proc
);
1394 ptr
->obj
= rb_gc_location(ptr
->obj
);
1397 #define generator_free RUBY_TYPED_DEFAULT_FREE
1400 generator_memsize(const void *p
)
1402 return sizeof(struct generator
);
1405 static const rb_data_type_t generator_data_type
= {
1413 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1416 static struct generator
*
1417 generator_ptr(VALUE obj
)
1419 struct generator
*ptr
;
1421 TypedData_Get_Struct(obj
, struct generator
, &generator_data_type
, ptr
);
1422 if (!ptr
|| ptr
->proc
== Qundef
) {
1423 rb_raise(rb_eArgError
, "uninitialized generator");
1430 generator_allocate(VALUE klass
)
1432 struct generator
*ptr
;
1435 obj
= TypedData_Make_Struct(klass
, struct generator
, &generator_data_type
, ptr
);
1442 generator_init(VALUE obj
, VALUE proc
)
1444 struct generator
*ptr
;
1446 rb_check_frozen(obj
);
1447 TypedData_Get_Struct(obj
, struct generator
, &generator_data_type
, ptr
);
1450 rb_raise(rb_eArgError
, "unallocated generator");
1460 generator_initialize(int argc
, VALUE
*argv
, VALUE obj
)
1467 proc
= rb_block_proc();
1470 rb_scan_args(argc
, argv
, "1", &proc
);
1472 if (!rb_obj_is_proc(proc
))
1473 rb_raise(rb_eTypeError
,
1474 "wrong argument type %"PRIsVALUE
" (expected Proc)",
1475 rb_obj_class(proc
));
1477 if (rb_block_given_p()) {
1478 rb_warn("given block not used");
1482 return generator_init(obj
, proc
);
1487 generator_init_copy(VALUE obj
, VALUE orig
)
1489 struct generator
*ptr0
, *ptr1
;
1491 if (!OBJ_INIT_COPY(obj
, orig
)) return obj
;
1493 ptr0
= generator_ptr(orig
);
1495 TypedData_Get_Struct(obj
, struct generator
, &generator_data_type
, ptr1
);
1498 rb_raise(rb_eArgError
, "unallocated generator");
1501 ptr1
->proc
= ptr0
->proc
;
1508 generator_each(int argc
, VALUE
*argv
, VALUE obj
)
1510 struct generator
*ptr
= generator_ptr(obj
);
1511 VALUE args
= rb_ary_new2(argc
+ 1);
1513 rb_ary_push(args
, yielder_new());
1515 rb_ary_cat(args
, argv
, argc
);
1518 return rb_proc_call_kw(ptr
->proc
, args
, RB_PASS_CALLED_KEYWORDS
);
1521 /* Lazy Enumerator methods */
1523 enum_size(VALUE self
)
1525 VALUE r
= rb_check_funcall(self
, id_size
, 0, 0);
1526 return (r
== Qundef
) ? Qnil
: r
;
1530 lazyenum_size(VALUE self
, VALUE args
, VALUE eobj
)
1532 return enum_size(self
);
1535 #define lazy_receiver_size lazy_map_size
1538 lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val
, m
))
1545 result
= rb_yield_values2(2, args
);
1549 int len
= rb_long2int((long)argc
+ 1);
1550 VALUE
*nargv
= ALLOCV_N(VALUE
, args
, len
);
1554 MEMCPY(nargv
+ 1, argv
, VALUE
, argc
);
1556 result
= rb_yield_values2(len
, nargv
);
1559 if (result
== Qundef
) rb_iter_break();
1564 lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val
, m
))
1566 rb_block_call(m
, id_each
, argc
-1, argv
+1, lazy_init_iterator
, val
);
1570 #define memo_value v2
1571 #define memo_flags u3.state
1572 #define LAZY_MEMO_BREAK 1
1573 #define LAZY_MEMO_PACKED 2
1574 #define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1575 #define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1576 #define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1577 #define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1578 #define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1579 #define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1580 #define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1582 static VALUE
lazy_yielder_result(struct MEMO
*result
, VALUE yielder
, VALUE procs_array
, VALUE memos
, long i
);
1585 lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_
, m
))
1587 VALUE yielder
= RARRAY_AREF(m
, 0);
1588 VALUE procs_array
= RARRAY_AREF(m
, 1);
1589 VALUE memos
= rb_attr_get(yielder
, id_memo
);
1590 struct MEMO
*result
;
1592 result
= MEMO_NEW(m
, rb_enum_values_pack(argc
, argv
),
1593 argc
> 1 ? LAZY_MEMO_PACKED
: 0);
1594 return lazy_yielder_result(result
, yielder
, procs_array
, memos
, 0);
1598 lazy_yielder_yield(struct MEMO
*result
, long memo_index
, int argc
, const VALUE
*argv
)
1600 VALUE m
= result
->v1
;
1601 VALUE yielder
= RARRAY_AREF(m
, 0);
1602 VALUE procs_array
= RARRAY_AREF(m
, 1);
1603 VALUE memos
= rb_attr_get(yielder
, id_memo
);
1604 LAZY_MEMO_SET_VALUE(result
, rb_enum_values_pack(argc
, argv
));
1606 LAZY_MEMO_SET_PACKED(result
);
1608 LAZY_MEMO_RESET_PACKED(result
);
1609 return lazy_yielder_result(result
, yielder
, procs_array
, memos
, memo_index
);
1613 lazy_yielder_result(struct MEMO
*result
, VALUE yielder
, VALUE procs_array
, VALUE memos
, long i
)
1617 for (; i
< RARRAY_LEN(procs_array
); i
++) {
1618 VALUE proc
= RARRAY_AREF(procs_array
, i
);
1619 struct proc_entry
*entry
= proc_entry_ptr(proc
);
1620 if (!(*entry
->fn
->proc
)(proc
, result
, memos
, i
)) {
1627 rb_funcall2(yielder
, idLTLT
, 1, &(result
->memo_value
));
1629 if (LAZY_MEMO_BREAK_P(result
)) {
1632 return result
->memo_value
;
1636 lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val
, m
))
1638 VALUE procs
= RARRAY_AREF(m
, 1);
1640 rb_ivar_set(val
, id_memo
, rb_ary_new2(RARRAY_LEN(procs
)));
1641 rb_block_call(RARRAY_AREF(m
, 0), id_each
, 0, 0,
1642 lazy_init_yielder
, rb_ary_new3(2, val
, procs
));
1647 lazy_generator_init(VALUE enumerator
, VALUE procs
)
1651 struct generator
*gen_ptr
;
1652 struct enumerator
*e
= enumerator_ptr(enumerator
);
1654 if (RARRAY_LEN(procs
) > 0) {
1655 struct generator
*old_gen_ptr
= generator_ptr(e
->obj
);
1656 obj
= old_gen_ptr
->obj
;
1662 generator
= generator_allocate(rb_cGenerator
);
1664 rb_block_call(generator
, id_initialize
, 0, 0,
1665 lazy_init_block
, rb_ary_new3(2, obj
, procs
));
1667 gen_ptr
= generator_ptr(generator
);
1674 * Document-class: Enumerator::Lazy
1676 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1677 * chains of operations without evaluating them immediately, and evaluating
1678 * values on as-needed basis. In order to do so it redefines most of Enumerable
1679 * methods so that they just construct another lazy enumerator.
1681 * Enumerator::Lazy can be constructed from any Enumerable with the
1682 * Enumerable#lazy method.
1684 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1685 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1687 * The real enumeration is performed when any non-redefined Enumerable method
1688 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1689 * as #force for more semantic code):
1695 * #=> [21, 23, 25, 27, 29]
1697 * Note that most Enumerable methods that could be called with or without
1698 * a block, on Enumerator::Lazy will always require a block:
1700 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1701 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1703 * This class allows idiomatic calculations on long or infinite sequences, as well
1704 * as chaining of calculations without constructing intermediate arrays.
1706 * Example for working with a slowly calculated sequence:
1708 * require 'open-uri'
1710 * # This will fetch all URLs before selecting
1712 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1713 * .select { |data| data.key?('stats') }
1716 * # This will fetch URLs one-by-one, only till
1717 * # there is enough data to satisfy the condition
1718 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1719 * .select { |data| data.key?('stats') }
1722 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1723 * is suitable for returning or passing to another method that expects
1724 * a normal enumerator.
1729 * .flat_map(&:items)
1730 * .reject(&:disabled)
1734 * # This works lazily; if a checked item is found, it stops
1735 * # iteration and does not look into remaining groups.
1736 * first_checked = active_items.find(&:checked)
1738 * # This returns an array of items like a normal enumerator does.
1739 * all_checked = active_items.select(&:checked)
1745 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1747 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1748 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1749 * to the given block. The block can yield values back using +yielder+.
1750 * For example, to create a "filter+map" enumerator:
1752 * def filter_map(sequence)
1753 * Lazy.new(sequence) do |yielder, *values|
1754 * result = yield *values
1755 * yielder << result if result
1759 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1760 * #=> [4, 16, 36, 64, 100]
1763 lazy_initialize(int argc
, VALUE
*argv
, VALUE self
)
1765 VALUE obj
, size
= Qnil
;
1768 rb_check_arity(argc
, 1, 2);
1769 if (!rb_block_given_p()) {
1770 rb_raise(rb_eArgError
, "tried to call lazy new without a block");
1776 generator
= generator_allocate(rb_cGenerator
);
1777 rb_block_call(generator
, id_initialize
, 0, 0, lazy_init_block_i
, obj
);
1778 enumerator_init(self
, generator
, sym_each
, 0, 0, 0, size
, 0);
1779 rb_ivar_set(self
, id_receiver
, obj
);
1784 #if 0 /* for RDoc */
1787 * lazy.to_a -> array
1788 * lazy.force -> array
1790 * Expands +lazy+ enumerator to an array.
1791 * See Enumerable#to_a.
1793 static VALUE
lazy_to_a(VALUE self
)
1799 lazy_set_args(VALUE lazy
, VALUE args
)
1801 ID id
= rb_frame_this_func();
1802 rb_ivar_set(lazy
, id_method
, ID2SYM(id
));
1804 /* Qfalse indicates that the arguments are empty */
1805 rb_ivar_set(lazy
, id_arguments
, Qfalse
);
1808 rb_ivar_set(lazy
, id_arguments
, args
);
1814 lazy_set_method(VALUE lazy
, VALUE args
, rb_enumerator_size_func
*size_fn
)
1816 struct enumerator
*e
= enumerator_ptr(lazy
);
1817 lazy_set_args(lazy
, args
);
1818 e
->size_fn
= size_fn
;
1824 lazy_add_method(VALUE obj
, int argc
, VALUE
*argv
, VALUE args
, VALUE memo
,
1825 const lazyenum_funcs
*fn
)
1827 struct enumerator
*new_e
;
1829 VALUE new_generator
;
1831 struct enumerator
*e
= enumerator_ptr(obj
);
1832 struct proc_entry
*entry
;
1833 VALUE entry_obj
= TypedData_Make_Struct(rb_cObject
, struct proc_entry
,
1834 &proc_entry_data_type
, entry
);
1835 if (rb_block_given_p()) {
1836 entry
->proc
= rb_block_proc();
1841 lazy_set_args(entry_obj
, memo
);
1843 new_procs
= RTEST(e
->procs
) ? rb_ary_dup(e
->procs
) : rb_ary_new();
1844 new_generator
= lazy_generator_init(obj
, new_procs
);
1845 rb_ary_push(new_procs
, entry_obj
);
1847 new_obj
= enumerator_init_copy(enumerator_allocate(rb_cLazy
), obj
);
1848 new_e
= DATA_PTR(new_obj
);
1849 new_e
->obj
= new_generator
;
1850 new_e
->procs
= new_procs
;
1853 new_e
->meth
= rb_to_id(*argv
++);
1857 new_e
->meth
= id_each
;
1859 new_e
->args
= rb_ary_new4(argc
, argv
);
1865 * e.lazy -> lazy_enumerator
1867 * Returns an Enumerator::Lazy, which redefines most Enumerable
1868 * methods to postpone enumeration and enumerate values only on an
1873 * The following program finds pythagorean triples:
1875 * def pythagorean_triples
1876 * (1..Float::INFINITY).lazy.flat_map {|z|
1877 * (1..z).flat_map {|x|
1878 * (x..z).select {|y|
1879 * x**2 + y**2 == z**2
1886 * # show first ten pythagorean triples
1887 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1888 * p pythagorean_triples.first(10) # first is eager
1889 * # show pythagorean triples less than 100
1890 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1893 enumerable_lazy(VALUE obj
)
1895 VALUE result
= lazy_to_enum_i(obj
, sym_each
, 0, 0, lazyenum_size
, rb_keyword_given_p());
1896 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1897 rb_ivar_set(result
, id_method
, Qfalse
);
1902 lazy_to_enum_i(VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
, int kw_splat
)
1904 return enumerator_init(enumerator_allocate(rb_cLazy
),
1905 obj
, meth
, argc
, argv
, size_fn
, Qnil
, kw_splat
);
1910 * lzy.to_enum(method = :each, *args) -> lazy_enum
1911 * lzy.enum_for(method = :each, *args) -> lazy_enum
1912 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1913 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1915 * Similar to Object#to_enum, except it returns a lazy enumerator.
1916 * This makes it easy to define Enumerable methods that will
1917 * naturally remain lazy if called from a lazy enumerator.
1919 * For example, continuing from the example in Object#to_enum:
1921 * # See Object#to_enum for the definition of repeat
1922 * r = 1..Float::INFINITY
1923 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1924 * r.repeat(2).class # => Enumerator
1925 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1926 * # works naturally on lazy enumerator:
1927 * r.lazy.repeat(2).class # => Enumerator::Lazy
1928 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1932 lazy_to_enum(int argc
, VALUE
*argv
, VALUE self
)
1934 VALUE lazy
, meth
= sym_each
, super_meth
;
1940 if (RTEST((super_meth
= rb_hash_aref(lazy_use_super_method
, meth
)))) {
1943 lazy
= lazy_to_enum_i(self
, meth
, argc
, argv
, 0, rb_keyword_given_p());
1944 if (rb_block_given_p()) {
1945 enumerator_ptr(lazy
)->size
= rb_block_proc();
1951 lazy_eager_size(VALUE self
, VALUE args
, VALUE eobj
)
1953 return enum_size(self
);
1960 * Returns a non-lazy Enumerator converted from the lazy enumerator.
1964 lazy_eager(VALUE self
)
1966 return enumerator_init(enumerator_allocate(rb_cEnumerator
),
1967 self
, sym_each
, 0, 0, lazy_eager_size
, Qnil
, 0);
1971 lazyenum_yield(VALUE proc_entry
, struct MEMO
*result
)
1973 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
1974 return rb_proc_call_with_block(entry
->proc
, 1, &result
->memo_value
, Qnil
);
1978 lazyenum_yield_values(VALUE proc_entry
, struct MEMO
*result
)
1980 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
1982 const VALUE
*argv
= &result
->memo_value
;
1983 if (LAZY_MEMO_PACKED_P(result
)) {
1984 const VALUE args
= *argv
;
1985 argc
= RARRAY_LENINT(args
);
1986 argv
= RARRAY_CONST_PTR(args
);
1988 return rb_proc_call_with_block(entry
->proc
, argc
, argv
, Qnil
);
1991 static struct MEMO
*
1992 lazy_map_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
1994 VALUE value
= lazyenum_yield_values(proc_entry
, result
);
1995 LAZY_MEMO_SET_VALUE(result
, value
);
1996 LAZY_MEMO_RESET_PACKED(result
);
2001 lazy_map_size(VALUE entry
, VALUE receiver
)
2006 static const lazyenum_funcs lazy_map_funcs
= {
2007 lazy_map_proc
, lazy_map_size
,
2012 * lazy.collect { |obj| block } -> lazy_enumerator
2013 * lazy.map { |obj| block } -> lazy_enumerator
2015 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2017 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2018 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2019 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2026 if (!rb_block_given_p()) {
2027 rb_raise(rb_eArgError
, "tried to call lazy map without a block");
2030 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_map_funcs
);
2033 struct flat_map_i_arg
{
2034 struct MEMO
*result
;
2039 lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i
, y
))
2041 struct flat_map_i_arg
*arg
= (struct flat_map_i_arg
*)y
;
2043 return lazy_yielder_yield(arg
->result
, arg
->index
, argc
, argv
);
2046 static struct MEMO
*
2047 lazy_flat_map_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2049 VALUE value
= lazyenum_yield_values(proc_entry
, result
);
2051 const long proc_index
= memo_index
+ 1;
2052 int break_p
= LAZY_MEMO_BREAK_P(result
);
2054 if (RB_TYPE_P(value
, T_ARRAY
)) {
2057 else if (rb_respond_to(value
, id_force
) && rb_respond_to(value
, id_each
)) {
2058 struct flat_map_i_arg arg
= {.result
= result
, .index
= proc_index
};
2059 LAZY_MEMO_RESET_BREAK(result
);
2060 rb_block_call(value
, id_each
, 0, 0, lazy_flat_map_i
, (VALUE
)&arg
);
2061 if (break_p
) LAZY_MEMO_SET_BREAK(result
);
2065 if (ary
|| !NIL_P(ary
= rb_check_array_type(value
))) {
2067 LAZY_MEMO_RESET_BREAK(result
);
2068 for (i
= 0; i
+ 1 < RARRAY_LEN(ary
); i
++) {
2069 const VALUE argv
= RARRAY_AREF(ary
, i
);
2070 lazy_yielder_yield(result
, proc_index
, 1, &argv
);
2072 if (break_p
) LAZY_MEMO_SET_BREAK(result
);
2073 if (i
>= RARRAY_LEN(ary
)) return 0;
2074 value
= RARRAY_AREF(ary
, i
);
2076 LAZY_MEMO_SET_VALUE(result
, value
);
2077 LAZY_MEMO_RESET_PACKED(result
);
2081 static const lazyenum_funcs lazy_flat_map_funcs
= {
2082 lazy_flat_map_proc
, 0,
2087 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2088 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2090 * Returns a new lazy enumerator with the concatenated results of running
2091 * +block+ once for every element in the lazy enumerator.
2093 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2094 * #=> ["f", "o", "o", "b", "a", "r"]
2096 * A value +x+ returned by +block+ is decomposed if either of
2097 * the following conditions is true:
2099 * * +x+ responds to both each and force, which means that
2100 * +x+ is a lazy enumerator.
2101 * * +x+ is an array or responds to to_ary.
2103 * Otherwise, +x+ is contained as-is in the return value.
2105 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2106 * #=> [{:a=>1}, {:b=>2}]
2109 lazy_flat_map(VALUE obj
)
2111 if (!rb_block_given_p()) {
2112 rb_raise(rb_eArgError
, "tried to call lazy flat_map without a block");
2115 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_flat_map_funcs
);
2118 static struct MEMO
*
2119 lazy_select_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2121 VALUE chain
= lazyenum_yield(proc_entry
, result
);
2122 if (!RTEST(chain
)) return 0;
2126 static const lazyenum_funcs lazy_select_funcs
= {
2127 lazy_select_proc
, 0,
2132 * lazy.find_all { |obj| block } -> lazy_enumerator
2133 * lazy.select { |obj| block } -> lazy_enumerator
2134 * lazy.filter { |obj| block } -> lazy_enumerator
2136 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2139 lazy_select(VALUE obj
)
2141 if (!rb_block_given_p()) {
2142 rb_raise(rb_eArgError
, "tried to call lazy select without a block");
2145 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_select_funcs
);
2148 static struct MEMO
*
2149 lazy_filter_map_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2151 VALUE value
= lazyenum_yield_values(proc_entry
, result
);
2152 if (!RTEST(value
)) return 0;
2153 LAZY_MEMO_SET_VALUE(result
, value
);
2154 LAZY_MEMO_RESET_PACKED(result
);
2158 static const lazyenum_funcs lazy_filter_map_funcs
= {
2159 lazy_filter_map_proc
, 0,
2164 * lazy.filter_map { |obj| block } -> lazy_enumerator
2166 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2168 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2169 * #=> [4, 8, 12, 16, 20]
2173 lazy_filter_map(VALUE obj
)
2175 if (!rb_block_given_p()) {
2176 rb_raise(rb_eArgError
, "tried to call lazy filter_map without a block");
2179 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_filter_map_funcs
);
2182 static struct MEMO
*
2183 lazy_reject_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2185 VALUE chain
= lazyenum_yield(proc_entry
, result
);
2186 if (RTEST(chain
)) return 0;
2190 static const lazyenum_funcs lazy_reject_funcs
= {
2191 lazy_reject_proc
, 0,
2196 * lazy.reject { |obj| block } -> lazy_enumerator
2198 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2202 lazy_reject(VALUE obj
)
2204 if (!rb_block_given_p()) {
2205 rb_raise(rb_eArgError
, "tried to call lazy reject without a block");
2208 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_reject_funcs
);
2211 static struct MEMO
*
2212 lazy_grep_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2214 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2215 VALUE chain
= rb_funcall(entry
->memo
, id_eqq
, 1, result
->memo_value
);
2216 if (!RTEST(chain
)) return 0;
2220 static struct MEMO
*
2221 lazy_grep_iter_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2223 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2224 VALUE value
, chain
= rb_funcall(entry
->memo
, id_eqq
, 1, result
->memo_value
);
2226 if (!RTEST(chain
)) return 0;
2227 value
= rb_proc_call_with_block(entry
->proc
, 1, &(result
->memo_value
), Qnil
);
2228 LAZY_MEMO_SET_VALUE(result
, value
);
2229 LAZY_MEMO_RESET_PACKED(result
);
2234 static const lazyenum_funcs lazy_grep_iter_funcs
= {
2235 lazy_grep_iter_proc
, 0,
2238 static const lazyenum_funcs lazy_grep_funcs
= {
2244 * lazy.grep(pattern) -> lazy_enumerator
2245 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2247 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2251 lazy_grep(VALUE obj
, VALUE pattern
)
2253 const lazyenum_funcs
*const funcs
= rb_block_given_p() ?
2254 &lazy_grep_iter_funcs
: &lazy_grep_funcs
;
2255 return lazy_add_method(obj
, 0, 0, pattern
, rb_ary_new3(1, pattern
), funcs
);
2258 static struct MEMO
*
2259 lazy_grep_v_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2261 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2262 VALUE chain
= rb_funcall(entry
->memo
, id_eqq
, 1, result
->memo_value
);
2263 if (RTEST(chain
)) return 0;
2267 static struct MEMO
*
2268 lazy_grep_v_iter_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2270 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2271 VALUE value
, chain
= rb_funcall(entry
->memo
, id_eqq
, 1, result
->memo_value
);
2273 if (RTEST(chain
)) return 0;
2274 value
= rb_proc_call_with_block(entry
->proc
, 1, &(result
->memo_value
), Qnil
);
2275 LAZY_MEMO_SET_VALUE(result
, value
);
2276 LAZY_MEMO_RESET_PACKED(result
);
2281 static const lazyenum_funcs lazy_grep_v_iter_funcs
= {
2282 lazy_grep_v_iter_proc
, 0,
2285 static const lazyenum_funcs lazy_grep_v_funcs
= {
2286 lazy_grep_v_proc
, 0,
2291 * lazy.grep_v(pattern) -> lazy_enumerator
2292 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2294 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2298 lazy_grep_v(VALUE obj
, VALUE pattern
)
2300 const lazyenum_funcs
*const funcs
= rb_block_given_p() ?
2301 &lazy_grep_v_iter_funcs
: &lazy_grep_v_funcs
;
2302 return lazy_add_method(obj
, 0, 0, pattern
, rb_ary_new3(1, pattern
), funcs
);
2306 call_next(VALUE obj
)
2308 return rb_funcall(obj
, id_next
, 0);
2312 next_stopped(VALUE obj
, VALUE _
)
2317 static struct MEMO
*
2318 lazy_zip_arrays_func(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2320 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2321 VALUE ary
, arrays
= entry
->memo
;
2322 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2323 long i
, count
= NIL_P(memo
) ? 0 : NUM2LONG(memo
);
2325 ary
= rb_ary_new2(RARRAY_LEN(arrays
) + 1);
2326 rb_ary_push(ary
, result
->memo_value
);
2327 for (i
= 0; i
< RARRAY_LEN(arrays
); i
++) {
2328 rb_ary_push(ary
, rb_ary_entry(RARRAY_AREF(arrays
, i
), count
));
2330 LAZY_MEMO_SET_VALUE(result
, ary
);
2331 LAZY_MEMO_SET_PACKED(result
);
2332 rb_ary_store(memos
, memo_index
, LONG2NUM(++count
));
2336 static struct MEMO
*
2337 lazy_zip_func(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2339 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2340 VALUE arg
= rb_ary_entry(memos
, memo_index
);
2341 VALUE zip_args
= entry
->memo
;
2346 arg
= rb_ary_new2(RARRAY_LEN(zip_args
));
2347 for (i
= 0; i
< RARRAY_LEN(zip_args
); i
++) {
2348 rb_ary_push(arg
, rb_funcall(RARRAY_AREF(zip_args
, i
), id_to_enum
, 0));
2350 rb_ary_store(memos
, memo_index
, arg
);
2353 ary
= rb_ary_new2(RARRAY_LEN(arg
) + 1);
2354 rb_ary_push(ary
, result
->memo_value
);
2355 for (i
= 0; i
< RARRAY_LEN(arg
); i
++) {
2356 v
= rb_rescue2(call_next
, RARRAY_AREF(arg
, i
), next_stopped
, 0,
2357 rb_eStopIteration
, (VALUE
)0);
2358 rb_ary_push(ary
, v
);
2360 LAZY_MEMO_SET_VALUE(result
, ary
);
2361 LAZY_MEMO_SET_PACKED(result
);
2365 static const lazyenum_funcs lazy_zip_funcs
[] = {
2366 {lazy_zip_func
, lazy_receiver_size
,},
2367 {lazy_zip_arrays_func
, lazy_receiver_size
,},
2372 * lazy.zip(arg, ...) -> lazy_enumerator
2373 * lazy.zip(arg, ...) { |arr| block } -> nil
2375 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2376 * However, if a block is given to zip, values are enumerated immediately.
2379 lazy_zip(int argc
, VALUE
*argv
, VALUE obj
)
2383 const lazyenum_funcs
*funcs
= &lazy_zip_funcs
[1];
2385 if (rb_block_given_p()) {
2386 return rb_call_super(argc
, argv
);
2389 ary
= rb_ary_new2(argc
);
2390 for (i
= 0; i
< argc
; i
++) {
2391 v
= rb_check_array_type(argv
[i
]);
2393 for (; i
< argc
; i
++) {
2394 if (!rb_respond_to(argv
[i
], id_each
)) {
2395 rb_raise(rb_eTypeError
, "wrong argument type %"PRIsVALUE
" (must respond to :each)",
2396 rb_obj_class(argv
[i
]));
2399 ary
= rb_ary_new4(argc
, argv
);
2400 funcs
= &lazy_zip_funcs
[0];
2403 rb_ary_push(ary
, v
);
2406 return lazy_add_method(obj
, 0, 0, ary
, ary
, funcs
);
2409 static struct MEMO
*
2410 lazy_take_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2413 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2414 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2420 remain
= NUM2LONG(memo
);
2422 LAZY_MEMO_SET_BREAK(result
);
2425 if (--remain
== 0) LAZY_MEMO_SET_BREAK(result
);
2426 rb_ary_store(memos
, memo_index
, LONG2NUM(remain
));
2432 lazy_take_size(VALUE entry
, VALUE receiver
)
2434 long len
= NUM2LONG(RARRAY_AREF(rb_ivar_get(entry
, id_arguments
), 0));
2435 if (NIL_P(receiver
) || (FIXNUM_P(receiver
) && FIX2LONG(receiver
) < len
))
2437 return LONG2NUM(len
);
2440 static const lazyenum_funcs lazy_take_funcs
= {
2441 lazy_take_proc
, lazy_take_size
,
2446 * lazy.take(n) -> lazy_enumerator
2448 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2452 lazy_take(VALUE obj
, VALUE n
)
2454 long len
= NUM2LONG(n
);
2459 rb_raise(rb_eArgError
, "attempt to take negative size");
2463 argv
[0] = sym_cycle
;
2464 argv
[1] = INT2NUM(0);
2468 return lazy_add_method(obj
, argc
, argv
, n
, rb_ary_new3(1, n
), &lazy_take_funcs
);
2471 static struct MEMO
*
2472 lazy_take_while_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2474 VALUE take
= lazyenum_yield_values(proc_entry
, result
);
2476 LAZY_MEMO_SET_BREAK(result
);
2482 static const lazyenum_funcs lazy_take_while_funcs
= {
2483 lazy_take_while_proc
, 0,
2488 * lazy.take_while { |obj| block } -> lazy_enumerator
2490 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2494 lazy_take_while(VALUE obj
)
2496 if (!rb_block_given_p()) {
2497 rb_raise(rb_eArgError
, "tried to call lazy take_while without a block");
2500 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_take_while_funcs
);
2504 lazy_drop_size(VALUE proc_entry
, VALUE receiver
)
2506 long len
= NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry
, id_arguments
), 0));
2507 if (NIL_P(receiver
))
2509 if (FIXNUM_P(receiver
)) {
2510 len
= FIX2LONG(receiver
) - len
;
2511 return LONG2FIX(len
< 0 ? 0 : len
);
2513 return rb_funcall(receiver
, '-', 1, LONG2NUM(len
));
2516 static struct MEMO
*
2517 lazy_drop_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2520 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2521 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2526 remain
= NUM2LONG(memo
);
2529 rb_ary_store(memos
, memo_index
, LONG2NUM(remain
));
2536 static const lazyenum_funcs lazy_drop_funcs
= {
2537 lazy_drop_proc
, lazy_drop_size
,
2542 * lazy.drop(n) -> lazy_enumerator
2544 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2548 lazy_drop(VALUE obj
, VALUE n
)
2550 long len
= NUM2LONG(n
);
2556 rb_raise(rb_eArgError
, "attempt to drop negative size");
2559 return lazy_add_method(obj
, 2, argv
, n
, rb_ary_new3(1, n
), &lazy_drop_funcs
);
2562 static struct MEMO
*
2563 lazy_drop_while_proc(VALUE proc_entry
, struct MEMO
* result
, VALUE memos
, long memo_index
)
2565 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2566 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2573 VALUE drop
= lazyenum_yield_values(proc_entry
, result
);
2574 if (RTEST(drop
)) return 0;
2575 rb_ary_store(memos
, memo_index
, Qtrue
);
2580 static const lazyenum_funcs lazy_drop_while_funcs
= {
2581 lazy_drop_while_proc
, 0,
2586 * lazy.drop_while { |obj| block } -> lazy_enumerator
2588 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2592 lazy_drop_while(VALUE obj
)
2594 if (!rb_block_given_p()) {
2595 rb_raise(rb_eArgError
, "tried to call lazy drop_while without a block");
2598 return lazy_add_method(obj
, 0, 0, Qfalse
, Qnil
, &lazy_drop_while_funcs
);
2602 lazy_uniq_check(VALUE chain
, VALUE memos
, long memo_index
)
2604 VALUE hash
= rb_ary_entry(memos
, memo_index
);
2607 hash
= rb_obj_hide(rb_hash_new());
2608 rb_ary_store(memos
, memo_index
, hash
);
2611 return rb_hash_add_new_element(hash
, chain
, Qfalse
);
2614 static struct MEMO
*
2615 lazy_uniq_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2617 if (lazy_uniq_check(result
->memo_value
, memos
, memo_index
)) return 0;
2621 static struct MEMO
*
2622 lazy_uniq_iter_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2624 VALUE chain
= lazyenum_yield(proc_entry
, result
);
2626 if (lazy_uniq_check(chain
, memos
, memo_index
)) return 0;
2630 static const lazyenum_funcs lazy_uniq_iter_funcs
= {
2631 lazy_uniq_iter_proc
, 0,
2634 static const lazyenum_funcs lazy_uniq_funcs
= {
2640 * lazy.uniq -> lazy_enumerator
2641 * lazy.uniq { |item| block } -> lazy_enumerator
2643 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2647 lazy_uniq(VALUE obj
)
2649 const lazyenum_funcs
*const funcs
=
2650 rb_block_given_p() ? &lazy_uniq_iter_funcs
: &lazy_uniq_funcs
;
2651 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, funcs
);
2654 static struct MEMO
*
2655 lazy_compact_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2657 if (NIL_P(result
->memo_value
)) return 0;
2661 static const lazyenum_funcs lazy_compact_funcs
= {
2662 lazy_compact_proc
, 0,
2667 * lazy.compact -> lazy_enumerator
2669 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2673 lazy_compact(VALUE obj
)
2675 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_compact_funcs
);
2678 static struct MEMO
*
2679 lazy_with_index_proc(VALUE proc_entry
, struct MEMO
* result
, VALUE memos
, long memo_index
)
2681 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2682 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2689 argv
[0] = result
->memo_value
;
2692 rb_proc_call_with_block(entry
->proc
, 2, argv
, Qnil
);
2693 LAZY_MEMO_RESET_PACKED(result
);
2696 LAZY_MEMO_SET_VALUE(result
, rb_ary_new_from_values(2, argv
));
2697 LAZY_MEMO_SET_PACKED(result
);
2699 rb_ary_store(memos
, memo_index
, LONG2NUM(NUM2LONG(memo
) + 1));
2704 lazy_with_index_size(VALUE proc
, VALUE receiver
)
2709 static const lazyenum_funcs lazy_with_index_funcs
= {
2710 lazy_with_index_proc
, lazy_with_index_size
,
2715 * lazy.with_index(offset = 0) {|(*args), idx| block }
2716 * lazy.with_index(offset = 0)
2718 * If a block is given, returns a lazy enumerator that will
2719 * iterate over the given block for each element
2720 * with an index, which starts from +offset+, and returns a
2721 * lazy enumerator that yields the same values (without the index).
2723 * If a block is not given, returns a new lazy enumerator that
2724 * includes the index, starting from +offset+.
2726 * +offset+:: the starting index to use
2728 * See Enumerator#with_index.
2731 lazy_with_index(int argc
, VALUE
*argv
, VALUE obj
)
2735 rb_scan_args(argc
, argv
, "01", &memo
);
2739 return lazy_add_method(obj
, 0, 0, memo
, rb_ary_new_from_values(1, &memo
), &lazy_with_index_funcs
);
2742 #if 0 /* for RDoc */
2746 * lazy.chunk { |elt| ... } -> lazy_enumerator
2748 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2750 static VALUE
lazy_chunk(VALUE self
)
2756 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2758 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2760 static VALUE
lazy_chunk_while(VALUE self
)
2766 * lazy.slice_after(pattern) -> lazy_enumerator
2767 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2769 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2771 static VALUE
lazy_slice_after(VALUE self
)
2777 * lazy.slice_before(pattern) -> lazy_enumerator
2778 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2780 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2782 static VALUE
lazy_slice_before(VALUE self
)
2788 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2790 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2792 static VALUE
lazy_slice_when(VALUE self
)
2798 lazy_super(int argc
, VALUE
*argv
, VALUE lazy
)
2800 return enumerable_lazy(rb_call_super(argc
, argv
));
2805 * enum.lazy -> lazy_enumerator
2811 lazy_lazy(VALUE obj
)
2817 * Document-class: StopIteration
2819 * Raised to stop the iteration, in particular by Enumerator#next. It is
2820 * rescued by Kernel#loop.
2824 * raise StopIteration
2829 * <em>produces:</em>
2839 * Returns the return value of the iterator.
2857 * rescue StopIteration => ex
2858 * puts ex.result #=> 100
2864 stop_result(VALUE self
)
2866 return rb_attr_get(self
, id_result
);
2874 producer_mark(void *p
)
2876 struct producer
*ptr
= p
;
2877 rb_gc_mark_movable(ptr
->init
);
2878 rb_gc_mark_movable(ptr
->proc
);
2882 producer_compact(void *p
)
2884 struct producer
*ptr
= p
;
2885 ptr
->init
= rb_gc_location(ptr
->init
);
2886 ptr
->proc
= rb_gc_location(ptr
->proc
);
2889 #define producer_free RUBY_TYPED_DEFAULT_FREE
2892 producer_memsize(const void *p
)
2894 return sizeof(struct producer
);
2897 static const rb_data_type_t producer_data_type
= {
2905 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
2908 static struct producer
*
2909 producer_ptr(VALUE obj
)
2911 struct producer
*ptr
;
2913 TypedData_Get_Struct(obj
, struct producer
, &producer_data_type
, ptr
);
2914 if (!ptr
|| ptr
->proc
== Qundef
) {
2915 rb_raise(rb_eArgError
, "uninitialized producer");
2922 producer_allocate(VALUE klass
)
2924 struct producer
*ptr
;
2927 obj
= TypedData_Make_Struct(klass
, struct producer
, &producer_data_type
, ptr
);
2935 producer_init(VALUE obj
, VALUE init
, VALUE proc
)
2937 struct producer
*ptr
;
2939 TypedData_Get_Struct(obj
, struct producer
, &producer_data_type
, ptr
);
2942 rb_raise(rb_eArgError
, "unallocated producer");
2952 producer_each_stop(VALUE dummy
, VALUE exc
)
2954 return rb_attr_get(exc
, id_result
);
2957 NORETURN(static VALUE
producer_each_i(VALUE obj
));
2960 producer_each_i(VALUE obj
)
2962 struct producer
*ptr
;
2963 VALUE init
, proc
, curr
;
2965 ptr
= producer_ptr(obj
);
2969 if (init
== Qundef
) {
2978 curr
= rb_funcall(proc
, id_call
, 1, curr
);
2982 UNREACHABLE_RETURN(Qnil
);
2987 producer_each(VALUE obj
)
2991 return rb_rescue2(producer_each_i
, obj
, producer_each_stop
, (VALUE
)0, rb_eStopIteration
, (VALUE
)0);
2995 producer_size(VALUE obj
, VALUE args
, VALUE eobj
)
2997 return DBL2NUM(HUGE_VAL
);
3002 * Enumerator.produce(initial = nil) { |prev| block } -> enumerator
3004 * Creates an infinite enumerator from any block, just called over and
3005 * over. The result of the previous iteration is passed to the next one.
3006 * If +initial+ is provided, it is passed to the first iteration, and
3007 * becomes the first element of the enumerator; if it is not provided,
3008 * the first iteration receives +nil+, and its result becomes the first
3009 * element of the iterator.
3011 * Raising StopIteration from the block stops an iteration.
3013 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3015 * Enumerator.produce { rand(10) } # => infinite random number sequence
3017 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3018 * enclosing_section = ancestors.find { |n| n.type == :section }
3020 * Using ::produce together with Enumerable methods like Enumerable#detect,
3021 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3022 * for +while+ and +until+ cycles:
3024 * # Find next Tuesday
3026 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3030 * scanner = StringScanner.new("7+38/6")
3031 * PATTERN = %r{\d+|[-/+*]}
3032 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3033 * # => ["7", "+", "38", "/", "6"]
3036 enumerator_s_produce(int argc
, VALUE
*argv
, VALUE klass
)
3038 VALUE init
, producer
;
3040 if (!rb_block_given_p()) rb_raise(rb_eArgError
, "no block given");
3042 if (rb_scan_args(argc
, argv
, "01", &init
) == 0) {
3046 producer
= producer_init(producer_allocate(rb_cEnumProducer
), init
, rb_block_proc());
3048 return rb_enumeratorize_with_size_kw(producer
, sym_each
, 0, 0, producer_size
, RB_NO_KEYWORDS
);
3052 * Document-class: Enumerator::Chain
3054 * Enumerator::Chain is a subclass of Enumerator, which represents a
3055 * chain of enumerables that works as a single enumerator.
3057 * This type of objects can be created by Enumerable#chain and
3062 enum_chain_mark(void *p
)
3064 struct enum_chain
*ptr
= p
;
3065 rb_gc_mark_movable(ptr
->enums
);
3069 enum_chain_compact(void *p
)
3071 struct enum_chain
*ptr
= p
;
3072 ptr
->enums
= rb_gc_location(ptr
->enums
);
3075 #define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3078 enum_chain_memsize(const void *p
)
3080 return sizeof(struct enum_chain
);
3083 static const rb_data_type_t enum_chain_data_type
= {
3091 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3094 static struct enum_chain
*
3095 enum_chain_ptr(VALUE obj
)
3097 struct enum_chain
*ptr
;
3099 TypedData_Get_Struct(obj
, struct enum_chain
, &enum_chain_data_type
, ptr
);
3100 if (!ptr
|| ptr
->enums
== Qundef
) {
3101 rb_raise(rb_eArgError
, "uninitialized chain");
3108 enum_chain_allocate(VALUE klass
)
3110 struct enum_chain
*ptr
;
3113 obj
= TypedData_Make_Struct(klass
, struct enum_chain
, &enum_chain_data_type
, ptr
);
3114 ptr
->enums
= Qundef
;
3122 * Enumerator::Chain.new(*enums) -> enum
3124 * Generates a new enumerator object that iterates over the elements
3125 * of given enumerable objects in sequence.
3127 * e = Enumerator::Chain.new(1..3, [4, 5])
3128 * e.to_a #=> [1, 2, 3, 4, 5]
3132 enum_chain_initialize(VALUE obj
, VALUE enums
)
3134 struct enum_chain
*ptr
;
3136 rb_check_frozen(obj
);
3137 TypedData_Get_Struct(obj
, struct enum_chain
, &enum_chain_data_type
, ptr
);
3139 if (!ptr
) rb_raise(rb_eArgError
, "unallocated chain");
3141 ptr
->enums
= rb_obj_freeze(enums
);
3148 new_enum_chain(VALUE enums
)
3151 VALUE obj
= enum_chain_initialize(enum_chain_allocate(rb_cEnumChain
), enums
);
3153 for (i
= 0; i
< RARRAY_LEN(enums
); i
++) {
3154 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums
, i
), rb_cLazy
))) {
3155 return enumerable_lazy(obj
);
3164 enum_chain_init_copy(VALUE obj
, VALUE orig
)
3166 struct enum_chain
*ptr0
, *ptr1
;
3168 if (!OBJ_INIT_COPY(obj
, orig
)) return obj
;
3169 ptr0
= enum_chain_ptr(orig
);
3171 TypedData_Get_Struct(obj
, struct enum_chain
, &enum_chain_data_type
, ptr1
);
3173 if (!ptr1
) rb_raise(rb_eArgError
, "unallocated chain");
3175 ptr1
->enums
= ptr0
->enums
;
3176 ptr1
->pos
= ptr0
->pos
;
3182 enum_chain_total_size(VALUE enums
)
3184 VALUE total
= INT2FIX(0);
3187 for (i
= 0; i
< RARRAY_LEN(enums
); i
++) {
3188 VALUE size
= enum_size(RARRAY_AREF(enums
, i
));
3190 if (NIL_P(size
) || (RB_FLOAT_TYPE_P(size
) && isinf(NUM2DBL(size
)))) {
3193 if (!RB_INTEGER_TYPE_P(size
)) {
3197 total
= rb_funcall(total
, '+', 1, size
);
3205 * obj.size -> int, Float::INFINITY or nil
3207 * Returns the total size of the enumerator chain calculated by
3208 * summing up the size of each enumerable in the chain. If any of the
3209 * enumerables reports its size as nil or Float::INFINITY, that value
3210 * is returned as the total size.
3213 enum_chain_size(VALUE obj
)
3215 return enum_chain_total_size(enum_chain_ptr(obj
)->enums
);
3219 enum_chain_enum_size(VALUE obj
, VALUE args
, VALUE eobj
)
3221 return enum_chain_size(obj
);
3225 enum_chain_enum_no_size(VALUE obj
, VALUE args
, VALUE eobj
)
3232 * obj.each(*args) { |...| ... } -> obj
3233 * obj.each(*args) -> enumerator
3235 * Iterates over the elements of the first enumerable by calling the
3236 * "each" method on it with the given arguments, then proceeds to the
3237 * following enumerables in sequence until all of the enumerables are
3240 * If no block is given, returns an enumerator.
3243 enum_chain_each(int argc
, VALUE
*argv
, VALUE obj
)
3246 struct enum_chain
*objptr
;
3249 RETURN_SIZED_ENUMERATOR(obj
, argc
, argv
, argc
> 0 ? enum_chain_enum_no_size
: enum_chain_enum_size
);
3251 objptr
= enum_chain_ptr(obj
);
3252 enums
= objptr
->enums
;
3253 block
= rb_block_proc();
3255 for (i
= 0; i
< RARRAY_LEN(enums
); i
++) {
3257 rb_funcall_with_block(RARRAY_AREF(enums
, i
), id_each
, argc
, argv
, block
);
3267 * Rewinds the enumerator chain by calling the "rewind" method on each
3268 * enumerable in reverse order. Each call is performed only if the
3269 * enumerable responds to the method.
3272 enum_chain_rewind(VALUE obj
)
3274 struct enum_chain
*objptr
= enum_chain_ptr(obj
);
3275 VALUE enums
= objptr
->enums
;
3278 for (i
= objptr
->pos
; 0 <= i
&& i
< RARRAY_LEN(enums
); objptr
->pos
= --i
) {
3279 rb_check_funcall(RARRAY_AREF(enums
, i
), id_rewind
, 0, 0);
3286 inspect_enum_chain(VALUE obj
, VALUE dummy
, int recur
)
3288 VALUE klass
= rb_obj_class(obj
);
3289 struct enum_chain
*ptr
;
3291 TypedData_Get_Struct(obj
, struct enum_chain
, &enum_chain_data_type
, ptr
);
3293 if (!ptr
|| ptr
->enums
== Qundef
) {
3294 return rb_sprintf("#<%"PRIsVALUE
": uninitialized>", rb_class_path(klass
));
3298 return rb_sprintf("#<%"PRIsVALUE
": ...>", rb_class_path(klass
));
3301 return rb_sprintf("#<%"PRIsVALUE
": %+"PRIsVALUE
">", rb_class_path(klass
), ptr
->enums
);
3306 * obj.inspect -> string
3308 * Returns a printable version of the enumerator chain.
3311 enum_chain_inspect(VALUE obj
)
3313 return rb_exec_recursive(inspect_enum_chain
, obj
, 0);
3318 * e.chain(*enums) -> enumerator
3320 * Returns an enumerator object generated from this enumerator and
3321 * given enumerables.
3323 * e = (1..3).chain([4, 5])
3324 * e.to_a #=> [1, 2, 3, 4, 5]
3327 enum_chain(int argc
, VALUE
*argv
, VALUE obj
)
3329 VALUE enums
= rb_ary_new_from_values(1, &obj
);
3330 rb_ary_cat(enums
, argv
, argc
);
3331 return new_enum_chain(enums
);
3336 * e + enum -> enumerator
3338 * Returns an enumerator object generated from this enumerator and a
3341 * e = (1..3).each + [4, 5]
3342 * e.to_a #=> [1, 2, 3, 4, 5]
3345 enumerator_plus(VALUE obj
, VALUE eobj
)
3347 return new_enum_chain(rb_ary_new_from_args(2, obj
, eobj
));
3351 * Document-class: Enumerator::ArithmeticSequence
3353 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3354 * that is a representation of sequences of numbers with common difference.
3355 * Instances of this class can be generated by the Range#step and Numeric#step
3358 * The class can be used for slicing Array (see Array#slice) or custom
3363 rb_arith_seq_new(VALUE obj
, VALUE meth
, int argc
, VALUE
const *argv
,
3364 rb_enumerator_size_func
*size_fn
,
3365 VALUE beg
, VALUE end
, VALUE step
, int excl
)
3367 VALUE aseq
= enumerator_init(enumerator_allocate(rb_cArithSeq
),
3368 obj
, meth
, argc
, argv
, size_fn
, Qnil
, rb_keyword_given_p());
3369 rb_ivar_set(aseq
, id_begin
, beg
);
3370 rb_ivar_set(aseq
, id_end
, end
);
3371 rb_ivar_set(aseq
, id_step
, step
);
3372 rb_ivar_set(aseq
, id_exclude_end
, RBOOL(excl
));
3377 * call-seq: aseq.begin -> num or nil
3379 * Returns the number that defines the first element of this arithmetic
3383 arith_seq_begin(VALUE self
)
3385 return rb_ivar_get(self
, id_begin
);
3389 * call-seq: aseq.end -> num or nil
3391 * Returns the number that defines the end of this arithmetic sequence.
3394 arith_seq_end(VALUE self
)
3396 return rb_ivar_get(self
, id_end
);
3400 * call-seq: aseq.step -> num
3402 * Returns the number that defines the common difference between
3403 * two adjacent elements in this arithmetic sequence.
3406 arith_seq_step(VALUE self
)
3408 return rb_ivar_get(self
, id_step
);
3412 * call-seq: aseq.exclude_end? -> true or false
3414 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3417 arith_seq_exclude_end(VALUE self
)
3419 return rb_ivar_get(self
, id_exclude_end
);
3423 arith_seq_exclude_end_p(VALUE self
)
3425 return RTEST(arith_seq_exclude_end(self
));
3429 rb_arithmetic_sequence_extract(VALUE obj
, rb_arithmetic_sequence_components_t
*component
)
3431 if (rb_obj_is_kind_of(obj
, rb_cArithSeq
)) {
3432 component
->begin
= arith_seq_begin(obj
);
3433 component
->end
= arith_seq_end(obj
);
3434 component
->step
= arith_seq_step(obj
);
3435 component
->exclude_end
= arith_seq_exclude_end_p(obj
);
3438 else if (rb_range_values(obj
, &component
->begin
, &component
->end
, &component
->exclude_end
)) {
3439 component
->step
= INT2FIX(1);
3447 rb_arithmetic_sequence_beg_len_step(VALUE obj
, long *begp
, long *lenp
, long *stepp
, long len
, int err
)
3449 RBIMPL_NONNULL_ARG(begp
);
3450 RBIMPL_NONNULL_ARG(lenp
);
3451 RBIMPL_NONNULL_ARG(stepp
);
3453 rb_arithmetic_sequence_components_t aseq
;
3454 if (!rb_arithmetic_sequence_extract(obj
, &aseq
)) {
3458 long step
= NIL_P(aseq
.step
) ? 1 : NUM2LONG(aseq
.step
);
3462 VALUE tmp
= aseq
.begin
;
3463 aseq
.begin
= aseq
.end
;
3467 if (err
== 0 && (step
< -1 || step
> 1)) {
3468 if (rb_range_component_beg_len(aseq
.begin
, aseq
.end
, aseq
.exclude_end
, begp
, lenp
, len
, 1) == Qtrue
) {
3477 return rb_range_component_beg_len(aseq
.begin
, aseq
.end
, aseq
.exclude_end
, begp
, lenp
, len
, err
);
3481 rb_raise(rb_eRangeError
, "%+"PRIsVALUE
" out of range", obj
);
3487 * aseq.first -> num or nil
3488 * aseq.first(n) -> an_array
3490 * Returns the first number in this arithmetic sequence,
3491 * or an array of the first +n+ elements.
3494 arith_seq_first(int argc
, VALUE
*argv
, VALUE self
)
3500 rb_check_arity(argc
, 0, 1);
3502 b
= arith_seq_begin(self
);
3503 e
= arith_seq_end(self
);
3504 s
= arith_seq_step(self
);
3510 VALUE zero
= INT2FIX(0);
3511 int r
= rb_cmpint(rb_num_coerce_cmp(s
, zero
, idCmp
), s
, zero
);
3512 if (r
> 0 && RTEST(rb_funcall(b
, '>', 1, e
))) {
3515 if (r
< 0 && RTEST(rb_funcall(b
, '<', 1, e
))) {
3522 // TODO: the following code should be extracted as arith_seq_take
3524 n
= NUM2LONG(argv
[0]);
3526 rb_raise(rb_eArgError
, "attempt to take negative size");
3529 return rb_ary_new_capa(0);
3532 x
= arith_seq_exclude_end_p(self
);
3534 if (FIXNUM_P(b
) && NIL_P(e
) && FIXNUM_P(s
)) {
3535 long i
= FIX2LONG(b
), unit
= FIX2LONG(s
);
3536 ary
= rb_ary_new_capa(n
);
3537 while (n
> 0 && FIXABLE(i
)) {
3538 rb_ary_push(ary
, LONG2FIX(i
));
3539 i
+= unit
; // FIXABLE + FIXABLE never overflow;
3545 rb_ary_push(ary
, b
);
3546 b
= rb_big_plus(b
, s
);
3552 else if (FIXNUM_P(b
) && FIXNUM_P(e
) && FIXNUM_P(s
)) {
3553 long i
= FIX2LONG(b
);
3554 long end
= FIX2LONG(e
);
3555 long unit
= FIX2LONG(s
);
3562 if (len
< 0) len
= 0;
3563 ary
= rb_ary_new_capa((n
< len
) ? n
: len
);
3564 while (n
> 0 && i
< end
) {
3565 rb_ary_push(ary
, LONG2FIX(i
));
3566 if (i
+ unit
< i
) break;
3575 if (len
< 0) len
= 0;
3576 ary
= rb_ary_new_capa((n
< len
) ? n
: len
);
3577 while (n
> 0 && i
> end
) {
3578 rb_ary_push(ary
, LONG2FIX(i
));
3579 if (i
+ unit
> i
) break;
3586 else if (RB_FLOAT_TYPE_P(b
) || RB_FLOAT_TYPE_P(e
) || RB_FLOAT_TYPE_P(s
)) {
3587 /* generate values like ruby_float_step */
3589 double unit
= NUM2DBL(s
);
3590 double beg
= NUM2DBL(b
);
3591 double end
= NIL_P(e
) ? (unit
< 0 ? -1 : 1)*HUGE_VAL
: NUM2DBL(e
);
3592 double len
= ruby_float_step_size(beg
, end
, unit
, x
);
3600 ary
= rb_ary_new_capa(1);
3601 rb_ary_push(ary
, DBL2NUM(beg
));
3604 ary
= rb_ary_new_capa(0);
3607 else if (unit
== 0) {
3608 VALUE val
= DBL2NUM(beg
);
3609 ary
= rb_ary_new_capa(n
);
3610 for (i
= 0; i
< len
; ++i
) {
3611 rb_ary_push(ary
, val
);
3615 ary
= rb_ary_new_capa(n
);
3616 for (i
= 0; i
< n
; ++i
) {
3617 double d
= i
*unit
+beg
;
3618 if (unit
>= 0 ? end
< d
: d
< end
) d
= end
;
3619 rb_ary_push(ary
, DBL2NUM(d
));
3626 return rb_call_super(argc
, argv
);
3630 num_plus(VALUE a
, VALUE b
)
3632 if (RB_INTEGER_TYPE_P(a
)) {
3633 return rb_int_plus(a
, b
);
3635 else if (RB_FLOAT_TYPE_P(a
)) {
3636 return rb_float_plus(a
, b
);
3638 else if (RB_TYPE_P(a
, T_RATIONAL
)) {
3639 return rb_rational_plus(a
, b
);
3642 return rb_funcallv(a
, '+', 1, &b
);
3647 num_minus(VALUE a
, VALUE b
)
3649 if (RB_INTEGER_TYPE_P(a
)) {
3650 return rb_int_minus(a
, b
);
3652 else if (RB_FLOAT_TYPE_P(a
)) {
3653 return rb_float_minus(a
, b
);
3655 else if (RB_TYPE_P(a
, T_RATIONAL
)) {
3656 return rb_rational_minus(a
, b
);
3659 return rb_funcallv(a
, '-', 1, &b
);
3664 num_mul(VALUE a
, VALUE b
)
3666 if (RB_INTEGER_TYPE_P(a
)) {
3667 return rb_int_mul(a
, b
);
3669 else if (RB_FLOAT_TYPE_P(a
)) {
3670 return rb_float_mul(a
, b
);
3672 else if (RB_TYPE_P(a
, T_RATIONAL
)) {
3673 return rb_rational_mul(a
, b
);
3676 return rb_funcallv(a
, '*', 1, &b
);
3681 num_idiv(VALUE a
, VALUE b
)
3684 if (RB_INTEGER_TYPE_P(a
)) {
3685 q
= rb_int_idiv(a
, b
);
3687 else if (RB_FLOAT_TYPE_P(a
)) {
3688 q
= rb_float_div(a
, b
);
3690 else if (RB_TYPE_P(a
, T_RATIONAL
)) {
3691 q
= rb_rational_div(a
, b
);
3694 q
= rb_funcallv(a
, idDiv
, 1, &b
);
3697 if (RB_INTEGER_TYPE_P(q
)) {
3700 else if (RB_FLOAT_TYPE_P(q
)) {
3701 return rb_float_floor(q
, 0);
3703 else if (RB_TYPE_P(q
, T_RATIONAL
)) {
3704 return rb_rational_floor(q
, 0);
3707 return rb_funcall(q
, rb_intern("floor"), 0);
3713 * aseq.last -> num or nil
3714 * aseq.last(n) -> an_array
3716 * Returns the last number in this arithmetic sequence,
3717 * or an array of the last +n+ elements.
3720 arith_seq_last(int argc
, VALUE
*argv
, VALUE self
)
3722 VALUE b
, e
, s
, len_1
, len
, last
, nv
, ary
;
3723 int last_is_adjusted
;
3726 e
= arith_seq_end(self
);
3728 rb_raise(rb_eRangeError
,
3729 "cannot get the last element of endless arithmetic sequence");
3732 b
= arith_seq_begin(self
);
3733 s
= arith_seq_step(self
);
3735 len_1
= num_idiv(num_minus(e
, b
), s
);
3736 if (rb_num_negative_int_p(len_1
)) {
3740 return rb_ary_new_capa(0);
3743 last
= num_plus(b
, num_mul(s
, len_1
));
3744 if ((last_is_adjusted
= arith_seq_exclude_end_p(self
) && rb_equal(last
, e
))) {
3745 last
= num_minus(last
, s
);
3752 if (last_is_adjusted
) {
3756 len
= rb_int_plus(len_1
, INT2FIX(1));
3759 rb_scan_args(argc
, argv
, "1", &nv
);
3760 if (!RB_INTEGER_TYPE_P(nv
)) {
3763 if (RTEST(rb_int_gt(nv
, len
))) {
3768 rb_raise(rb_eArgError
, "negative array size");
3771 ary
= rb_ary_new_capa(n
);
3772 b
= rb_int_minus(last
, rb_int_mul(s
, nv
));
3774 b
= rb_int_plus(b
, s
);
3775 rb_ary_push(ary
, b
);
3784 * aseq.inspect -> string
3786 * Convert this arithmetic sequence to a printable form.
3789 arith_seq_inspect(VALUE self
)
3791 struct enumerator
*e
;
3792 VALUE eobj
, str
, eargs
;
3795 TypedData_Get_Struct(self
, struct enumerator
, &enumerator_data_type
, e
);
3797 eobj
= rb_attr_get(self
, id_receiver
);
3802 range_p
= RTEST(rb_obj_is_kind_of(eobj
, rb_cRange
));
3803 str
= rb_sprintf("(%s%"PRIsVALUE
"%s.", range_p
? "(" : "", eobj
, range_p
? ")" : "");
3805 rb_str_buf_append(str
, rb_id2str(e
->meth
));
3807 eargs
= rb_attr_get(eobj
, id_arguments
);
3811 if (eargs
!= Qfalse
) {
3812 long argc
= RARRAY_LEN(eargs
);
3813 const VALUE
*argv
= RARRAY_CONST_PTR(eargs
); /* WB: no new reference */
3818 rb_str_buf_cat2(str
, "(");
3820 if (RB_TYPE_P(argv
[argc
-1], T_HASH
)) {
3822 rb_hash_foreach(argv
[argc
-1], key_symbol_p
, (VALUE
)&all_key
);
3823 if (all_key
) kwds
= argv
[--argc
];
3827 VALUE arg
= *argv
++;
3829 rb_str_append(str
, rb_inspect(arg
));
3830 rb_str_buf_cat2(str
, ", ");
3833 rb_hash_foreach(kwds
, kwd_append
, str
);
3835 rb_str_set_len(str
, RSTRING_LEN(str
)-2); /* drop the last ", " */
3836 rb_str_buf_cat2(str
, ")");
3840 rb_str_buf_cat2(str
, ")");
3847 * aseq == obj -> true or false
3849 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
3850 * has equivalent begin, end, step, and exclude_end? settings.
3853 arith_seq_eq(VALUE self
, VALUE other
)
3855 if (!RTEST(rb_obj_is_kind_of(other
, rb_cArithSeq
))) {
3859 if (!rb_equal(arith_seq_begin(self
), arith_seq_begin(other
))) {
3863 if (!rb_equal(arith_seq_end(self
), arith_seq_end(other
))) {
3867 if (!rb_equal(arith_seq_step(self
), arith_seq_step(other
))) {
3871 if (arith_seq_exclude_end_p(self
) != arith_seq_exclude_end_p(other
)) {
3880 * aseq.hash -> integer
3882 * Compute a hash-value for this arithmetic sequence.
3883 * Two arithmetic sequences with same begin, end, step, and exclude_end?
3884 * values will generate the same hash-value.
3886 * See also Object#hash.
3889 arith_seq_hash(VALUE self
)
3894 hash
= rb_hash_start(arith_seq_exclude_end_p(self
));
3895 v
= rb_hash(arith_seq_begin(self
));
3896 hash
= rb_hash_uint(hash
, NUM2LONG(v
));
3897 v
= rb_hash(arith_seq_end(self
));
3898 hash
= rb_hash_uint(hash
, NUM2LONG(v
));
3899 v
= rb_hash(arith_seq_step(self
));
3900 hash
= rb_hash_uint(hash
, NUM2LONG(v
));
3901 hash
= rb_hash_end(hash
);
3903 return ST2FIX(hash
);
3906 #define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
3908 struct arith_seq_gen
{
3917 * aseq.each {|i| block } -> aseq
3921 arith_seq_each(VALUE self
)
3923 VALUE c
, e
, s
, len_1
, last
;
3926 if (!rb_block_given_p()) return self
;
3928 c
= arith_seq_begin(self
);
3929 e
= arith_seq_end(self
);
3930 s
= arith_seq_step(self
);
3931 x
= arith_seq_exclude_end_p(self
);
3933 if (!RB_TYPE_P(s
, T_COMPLEX
) && ruby_float_step(c
, e
, s
, x
, TRUE
)) {
3940 c
= rb_int_plus(c
, s
);
3946 if (rb_equal(s
, INT2FIX(0))) {
3954 len_1
= num_idiv(num_minus(e
, c
), s
);
3955 last
= num_plus(c
, num_mul(s
, len_1
));
3956 if (x
&& rb_equal(last
, e
)) {
3957 last
= num_minus(last
, s
);
3960 if (rb_num_negative_int_p(s
)) {
3961 while (NUM_GE(c
, last
)) {
3967 while (NUM_GE(last
, c
)) {
3978 * aseq.size -> num or nil
3980 * Returns the number of elements in this arithmetic sequence if it is a finite
3981 * sequence. Otherwise, returns <code>nil</code>.
3984 arith_seq_size(VALUE self
)
3986 VALUE b
, e
, s
, len_1
, len
, last
;
3989 b
= arith_seq_begin(self
);
3990 e
= arith_seq_end(self
);
3991 s
= arith_seq_step(self
);
3992 x
= arith_seq_exclude_end_p(self
);
3994 if (RB_FLOAT_TYPE_P(b
) || RB_FLOAT_TYPE_P(e
) || RB_FLOAT_TYPE_P(s
)) {
3998 if (rb_num_negative_int_p(s
)) {
4009 n
= ruby_float_step_size(NUM2DBL(b
), ee
, NUM2DBL(s
), x
);
4010 if (isinf(n
)) return DBL2NUM(n
);
4011 if (POSFIXABLE(n
)) return LONG2FIX((long)n
);
4012 return rb_dbl2big(n
);
4016 return DBL2NUM(HUGE_VAL
);
4019 if (!rb_obj_is_kind_of(s
, rb_cNumeric
)) {
4023 if (rb_equal(s
, INT2FIX(0))) {
4024 return DBL2NUM(HUGE_VAL
);
4027 len_1
= rb_int_idiv(rb_int_minus(e
, b
), s
);
4028 if (rb_num_negative_int_p(len_1
)) {
4032 last
= rb_int_plus(b
, rb_int_mul(s
, len_1
));
4033 if (x
&& rb_equal(last
, e
)) {
4037 len
= rb_int_plus(len_1
, INT2FIX(1));
4043 #define sym(name) ID2SYM(rb_intern_const(name))
4045 InitVM_Enumerator(void)
4047 ID id_private
= rb_intern_const("private");
4049 rb_define_method(rb_mKernel
, "to_enum", obj_to_enum
, -1);
4050 rb_define_method(rb_mKernel
, "enum_for", obj_to_enum
, -1);
4052 rb_cEnumerator
= rb_define_class("Enumerator", rb_cObject
);
4053 rb_include_module(rb_cEnumerator
, rb_mEnumerable
);
4055 rb_define_alloc_func(rb_cEnumerator
, enumerator_allocate
);
4056 rb_define_method(rb_cEnumerator
, "initialize", enumerator_initialize
, -1);
4057 rb_define_method(rb_cEnumerator
, "initialize_copy", enumerator_init_copy
, 1);
4058 rb_define_method(rb_cEnumerator
, "each", enumerator_each
, -1);
4059 rb_define_method(rb_cEnumerator
, "each_with_index", enumerator_each_with_index
, 0);
4060 rb_define_method(rb_cEnumerator
, "each_with_object", enumerator_with_object
, 1);
4061 rb_define_method(rb_cEnumerator
, "with_index", enumerator_with_index
, -1);
4062 rb_define_method(rb_cEnumerator
, "with_object", enumerator_with_object
, 1);
4063 rb_define_method(rb_cEnumerator
, "next_values", enumerator_next_values
, 0);
4064 rb_define_method(rb_cEnumerator
, "peek_values", enumerator_peek_values_m
, 0);
4065 rb_define_method(rb_cEnumerator
, "next", enumerator_next
, 0);
4066 rb_define_method(rb_cEnumerator
, "peek", enumerator_peek
, 0);
4067 rb_define_method(rb_cEnumerator
, "feed", enumerator_feed
, 1);
4068 rb_define_method(rb_cEnumerator
, "rewind", enumerator_rewind
, 0);
4069 rb_define_method(rb_cEnumerator
, "inspect", enumerator_inspect
, 0);
4070 rb_define_method(rb_cEnumerator
, "size", enumerator_size
, 0);
4071 rb_define_method(rb_cEnumerator
, "+", enumerator_plus
, 1);
4072 rb_define_method(rb_mEnumerable
, "chain", enum_chain
, -1);
4075 rb_cLazy
= rb_define_class_under(rb_cEnumerator
, "Lazy", rb_cEnumerator
);
4076 rb_define_method(rb_mEnumerable
, "lazy", enumerable_lazy
, 0);
4078 rb_define_alias(rb_cLazy
, "_enumerable_map", "map");
4079 rb_define_alias(rb_cLazy
, "_enumerable_collect", "collect");
4080 rb_define_alias(rb_cLazy
, "_enumerable_flat_map", "flat_map");
4081 rb_define_alias(rb_cLazy
, "_enumerable_collect_concat", "collect_concat");
4082 rb_define_alias(rb_cLazy
, "_enumerable_select", "select");
4083 rb_define_alias(rb_cLazy
, "_enumerable_find_all", "find_all");
4084 rb_define_alias(rb_cLazy
, "_enumerable_filter", "filter");
4085 rb_define_alias(rb_cLazy
, "_enumerable_filter_map", "filter_map");
4086 rb_define_alias(rb_cLazy
, "_enumerable_reject", "reject");
4087 rb_define_alias(rb_cLazy
, "_enumerable_grep", "grep");
4088 rb_define_alias(rb_cLazy
, "_enumerable_grep_v", "grep_v");
4089 rb_define_alias(rb_cLazy
, "_enumerable_zip", "zip");
4090 rb_define_alias(rb_cLazy
, "_enumerable_take", "take");
4091 rb_define_alias(rb_cLazy
, "_enumerable_take_while", "take_while");
4092 rb_define_alias(rb_cLazy
, "_enumerable_drop", "drop");
4093 rb_define_alias(rb_cLazy
, "_enumerable_drop_while", "drop_while");
4094 rb_define_alias(rb_cLazy
, "_enumerable_uniq", "uniq");
4095 rb_define_private_method(rb_cLazy
, "_enumerable_with_index", enumerator_with_index
, -1);
4097 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_map"));
4098 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_collect"));
4099 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_flat_map"));
4100 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_collect_concat"));
4101 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_select"));
4102 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_find_all"));
4103 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_filter"));
4104 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_filter_map"));
4105 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_reject"));
4106 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_grep"));
4107 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_grep_v"));
4108 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_zip"));
4109 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_take"));
4110 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_take_while"));
4111 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_drop"));
4112 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_drop_while"));
4113 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_uniq"));
4115 rb_define_method(rb_cLazy
, "initialize", lazy_initialize
, -1);
4116 rb_define_method(rb_cLazy
, "to_enum", lazy_to_enum
, -1);
4117 rb_define_method(rb_cLazy
, "enum_for", lazy_to_enum
, -1);
4118 rb_define_method(rb_cLazy
, "eager", lazy_eager
, 0);
4119 rb_define_method(rb_cLazy
, "map", lazy_map
, 0);
4120 rb_define_method(rb_cLazy
, "collect", lazy_map
, 0);
4121 rb_define_method(rb_cLazy
, "flat_map", lazy_flat_map
, 0);
4122 rb_define_method(rb_cLazy
, "collect_concat", lazy_flat_map
, 0);
4123 rb_define_method(rb_cLazy
, "select", lazy_select
, 0);
4124 rb_define_method(rb_cLazy
, "find_all", lazy_select
, 0);
4125 rb_define_method(rb_cLazy
, "filter", lazy_select
, 0);
4126 rb_define_method(rb_cLazy
, "filter_map", lazy_filter_map
, 0);
4127 rb_define_method(rb_cLazy
, "reject", lazy_reject
, 0);
4128 rb_define_method(rb_cLazy
, "grep", lazy_grep
, 1);
4129 rb_define_method(rb_cLazy
, "grep_v", lazy_grep_v
, 1);
4130 rb_define_method(rb_cLazy
, "zip", lazy_zip
, -1);
4131 rb_define_method(rb_cLazy
, "take", lazy_take
, 1);
4132 rb_define_method(rb_cLazy
, "take_while", lazy_take_while
, 0);
4133 rb_define_method(rb_cLazy
, "drop", lazy_drop
, 1);
4134 rb_define_method(rb_cLazy
, "drop_while", lazy_drop_while
, 0);
4135 rb_define_method(rb_cLazy
, "lazy", lazy_lazy
, 0);
4136 rb_define_method(rb_cLazy
, "chunk", lazy_super
, -1);
4137 rb_define_method(rb_cLazy
, "slice_before", lazy_super
, -1);
4138 rb_define_method(rb_cLazy
, "slice_after", lazy_super
, -1);
4139 rb_define_method(rb_cLazy
, "slice_when", lazy_super
, -1);
4140 rb_define_method(rb_cLazy
, "chunk_while", lazy_super
, -1);
4141 rb_define_method(rb_cLazy
, "uniq", lazy_uniq
, 0);
4142 rb_define_method(rb_cLazy
, "compact", lazy_compact
, 0);
4143 rb_define_method(rb_cLazy
, "with_index", lazy_with_index
, -1);
4145 lazy_use_super_method
= rb_hash_new_with_size(18);
4146 rb_hash_aset(lazy_use_super_method
, sym("map"), sym("_enumerable_map"));
4147 rb_hash_aset(lazy_use_super_method
, sym("collect"), sym("_enumerable_collect"));
4148 rb_hash_aset(lazy_use_super_method
, sym("flat_map"), sym("_enumerable_flat_map"));
4149 rb_hash_aset(lazy_use_super_method
, sym("collect_concat"), sym("_enumerable_collect_concat"));
4150 rb_hash_aset(lazy_use_super_method
, sym("select"), sym("_enumerable_select"));
4151 rb_hash_aset(lazy_use_super_method
, sym("find_all"), sym("_enumerable_find_all"));
4152 rb_hash_aset(lazy_use_super_method
, sym("filter"), sym("_enumerable_filter"));
4153 rb_hash_aset(lazy_use_super_method
, sym("filter_map"), sym("_enumerable_filter_map"));
4154 rb_hash_aset(lazy_use_super_method
, sym("reject"), sym("_enumerable_reject"));
4155 rb_hash_aset(lazy_use_super_method
, sym("grep"), sym("_enumerable_grep"));
4156 rb_hash_aset(lazy_use_super_method
, sym("grep_v"), sym("_enumerable_grep_v"));
4157 rb_hash_aset(lazy_use_super_method
, sym("zip"), sym("_enumerable_zip"));
4158 rb_hash_aset(lazy_use_super_method
, sym("take"), sym("_enumerable_take"));
4159 rb_hash_aset(lazy_use_super_method
, sym("take_while"), sym("_enumerable_take_while"));
4160 rb_hash_aset(lazy_use_super_method
, sym("drop"), sym("_enumerable_drop"));
4161 rb_hash_aset(lazy_use_super_method
, sym("drop_while"), sym("_enumerable_drop_while"));
4162 rb_hash_aset(lazy_use_super_method
, sym("uniq"), sym("_enumerable_uniq"));
4163 rb_hash_aset(lazy_use_super_method
, sym("with_index"), sym("_enumerable_with_index"));
4164 rb_obj_freeze(lazy_use_super_method
);
4165 rb_gc_register_mark_object(lazy_use_super_method
);
4167 #if 0 /* for RDoc */
4168 rb_define_method(rb_cLazy
, "to_a", lazy_to_a
, 0);
4169 rb_define_method(rb_cLazy
, "chunk", lazy_chunk
, 0);
4170 rb_define_method(rb_cLazy
, "chunk_while", lazy_chunk_while
, 0);
4171 rb_define_method(rb_cLazy
, "slice_after", lazy_slice_after
, 0);
4172 rb_define_method(rb_cLazy
, "slice_before", lazy_slice_before
, 0);
4173 rb_define_method(rb_cLazy
, "slice_when", lazy_slice_when
, 0);
4175 rb_define_alias(rb_cLazy
, "force", "to_a");
4177 rb_eStopIteration
= rb_define_class("StopIteration", rb_eIndexError
);
4178 rb_define_method(rb_eStopIteration
, "result", stop_result
, 0);
4181 rb_cGenerator
= rb_define_class_under(rb_cEnumerator
, "Generator", rb_cObject
);
4182 rb_include_module(rb_cGenerator
, rb_mEnumerable
);
4183 rb_define_alloc_func(rb_cGenerator
, generator_allocate
);
4184 rb_define_method(rb_cGenerator
, "initialize", generator_initialize
, -1);
4185 rb_define_method(rb_cGenerator
, "initialize_copy", generator_init_copy
, 1);
4186 rb_define_method(rb_cGenerator
, "each", generator_each
, -1);
4189 rb_cYielder
= rb_define_class_under(rb_cEnumerator
, "Yielder", rb_cObject
);
4190 rb_define_alloc_func(rb_cYielder
, yielder_allocate
);
4191 rb_define_method(rb_cYielder
, "initialize", yielder_initialize
, 0);
4192 rb_define_method(rb_cYielder
, "yield", yielder_yield
, -2);
4193 rb_define_method(rb_cYielder
, "<<", yielder_yield_push
, 1);
4194 rb_define_method(rb_cYielder
, "to_proc", yielder_to_proc
, 0);
4197 rb_cEnumProducer
= rb_define_class_under(rb_cEnumerator
, "Producer", rb_cObject
);
4198 rb_define_alloc_func(rb_cEnumProducer
, producer_allocate
);
4199 rb_define_method(rb_cEnumProducer
, "each", producer_each
, 0);
4200 rb_define_singleton_method(rb_cEnumerator
, "produce", enumerator_s_produce
, -1);
4203 rb_cEnumChain
= rb_define_class_under(rb_cEnumerator
, "Chain", rb_cEnumerator
);
4204 rb_define_alloc_func(rb_cEnumChain
, enum_chain_allocate
);
4205 rb_define_method(rb_cEnumChain
, "initialize", enum_chain_initialize
, -2);
4206 rb_define_method(rb_cEnumChain
, "initialize_copy", enum_chain_init_copy
, 1);
4207 rb_define_method(rb_cEnumChain
, "each", enum_chain_each
, -1);
4208 rb_define_method(rb_cEnumChain
, "size", enum_chain_size
, 0);
4209 rb_define_method(rb_cEnumChain
, "rewind", enum_chain_rewind
, 0);
4210 rb_define_method(rb_cEnumChain
, "inspect", enum_chain_inspect
, 0);
4211 rb_undef_method(rb_cEnumChain
, "feed");
4212 rb_undef_method(rb_cEnumChain
, "next");
4213 rb_undef_method(rb_cEnumChain
, "next_values");
4214 rb_undef_method(rb_cEnumChain
, "peek");
4215 rb_undef_method(rb_cEnumChain
, "peek_values");
4217 /* ArithmeticSequence */
4218 rb_cArithSeq
= rb_define_class_under(rb_cEnumerator
, "ArithmeticSequence", rb_cEnumerator
);
4219 rb_undef_alloc_func(rb_cArithSeq
);
4220 rb_undef_method(CLASS_OF(rb_cArithSeq
), "new");
4221 rb_define_method(rb_cArithSeq
, "begin", arith_seq_begin
, 0);
4222 rb_define_method(rb_cArithSeq
, "end", arith_seq_end
, 0);
4223 rb_define_method(rb_cArithSeq
, "exclude_end?", arith_seq_exclude_end
, 0);
4224 rb_define_method(rb_cArithSeq
, "step", arith_seq_step
, 0);
4225 rb_define_method(rb_cArithSeq
, "first", arith_seq_first
, -1);
4226 rb_define_method(rb_cArithSeq
, "last", arith_seq_last
, -1);
4227 rb_define_method(rb_cArithSeq
, "inspect", arith_seq_inspect
, 0);
4228 rb_define_method(rb_cArithSeq
, "==", arith_seq_eq
, 1);
4229 rb_define_method(rb_cArithSeq
, "===", arith_seq_eq
, 1);
4230 rb_define_method(rb_cArithSeq
, "eql?", arith_seq_eq
, 1);
4231 rb_define_method(rb_cArithSeq
, "hash", arith_seq_hash
, 0);
4232 rb_define_method(rb_cArithSeq
, "each", arith_seq_each
, 0);
4233 rb_define_method(rb_cArithSeq
, "size", arith_seq_size
, 0);
4235 rb_provide("enumerator.so"); /* for backward compatibility */
4240 Init_Enumerator(void)
4242 id_rewind
= rb_intern_const("rewind");
4243 id_new
= rb_intern_const("new");
4244 id_next
= rb_intern_const("next");
4245 id_result
= rb_intern_const("result");
4246 id_receiver
= rb_intern_const("receiver");
4247 id_arguments
= rb_intern_const("arguments");
4248 id_memo
= rb_intern_const("memo");
4249 id_method
= rb_intern_const("method");
4250 id_force
= rb_intern_const("force");
4251 id_to_enum
= rb_intern_const("to_enum");
4252 id_begin
= rb_intern_const("begin");
4253 id_end
= rb_intern_const("end");
4254 id_step
= rb_intern_const("step");
4255 id_exclude_end
= rb_intern_const("exclude_end");
4256 sym_each
= ID2SYM(id_each
);
4257 sym_cycle
= ID2SYM(rb_intern_const("cycle"));
4258 sym_yield
= ID2SYM(rb_intern_const("yield"));