1 /************************************************
3 enumerator.c - provides Enumerator class
7 Copyright (C) 2001-2003 Akinori MUSHA
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
13 ************************************************/
15 #include "ruby/internal/config.h"
23 #include "internal/class.h"
24 #include "internal/enumerator.h"
25 #include "internal/error.h"
26 #include "internal/hash.h"
27 #include "internal/imemo.h"
28 #include "internal/numeric.h"
29 #include "internal/range.h"
30 #include "internal/rational.h"
31 #include "ruby/ruby.h"
34 * Document-class: Enumerator
36 * A class which allows both internal and external iteration.
38 * An Enumerator can be created by the following methods.
43 * Most methods have two forms: a block form where the contents
44 * are evaluated for each item in the enumeration, and a non-block form
45 * which returns a new Enumerator wrapping the iteration.
47 * enumerator = %w(one two three).each
48 * puts enumerator.class # => Enumerator
50 * enumerator.each_with_object("foo") do |item, obj|
51 * puts "#{obj}: #{item}"
58 * enum_with_obj = enumerator.each_with_object("foo")
59 * puts enum_with_obj.class # => Enumerator
61 * enum_with_obj.each do |item, obj|
62 * puts "#{obj}: #{item}"
69 * This allows you to chain Enumerators together. For example, you
70 * can map a list's elements to strings containing the index
71 * and the element as a string via:
73 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
74 * # => ["0:foo", "1:bar", "2:baz"]
76 * == External Iteration
78 * An Enumerator can also be used as an external iterator.
79 * For example, Enumerator#next returns the next value of the iterator
80 * or raises StopIteration if the Enumerator is at the end.
82 * e = [1,2,3].each # returns an enumerator object.
86 * puts e.next # raises StopIteration
88 * +next+, +next_values+, +peek+, and +peek_values+ are the only methods
89 * which use external iteration (and Array#zip(Enumerable-not-Array) which uses +next+ internally).
91 * These methods do not affect other internal enumeration methods,
92 * unless the underlying iteration method itself has side-effect, e.g. IO#each_line.
94 * FrozenError will be raised if these methods are called against a frozen enumerator.
95 * Since +rewind+ and +feed+ also change state for external iteration,
96 * these methods may raise FrozenError too.
98 * External iteration differs *significantly* from internal iteration
99 * due to using a Fiber:
100 * - The Fiber adds some overhead compared to internal enumeration.
101 * - The stacktrace will only include the stack from the Enumerator, not above.
102 * - Fiber-local variables are *not* inherited inside the Enumerator Fiber,
103 * which instead starts with no Fiber-local variables.
104 * - Fiber storage variables *are* inherited and are designed
105 * to handle Enumerator Fibers. Assigning to a Fiber storage variable
106 * only affects the current Fiber, so if you want to change state
107 * in the caller Fiber of the Enumerator Fiber, you need to use an
108 * extra indirection (e.g., use some object in the Fiber storage
109 * variable and mutate some ivar of it).
113 * Thread.current[:fiber_local] = 1
114 * Fiber[:storage_var] = 1
115 * e = Enumerator.new do |y|
116 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
117 * p Fiber[:storage_var] # => 1, inherited
118 * Fiber[:storage_var] += 1
123 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
126 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
128 * == Convert External Iteration to Internal Iteration
130 * You can use an external iterator to implement an internal iterator as follows:
136 * rescue StopIteration
153 * # use o.each as an internal iterator directly.
154 * puts o.each {|*x| puts x; [:b, *x] }
155 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
157 * # convert o.each to an external iterator for
158 * # implementing an internal iterator.
159 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
160 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
163 VALUE rb_cEnumerator
;
164 static VALUE rb_cLazy
;
165 static ID id_rewind
, id_new
, id_to_enum
, id_each_entry
;
166 static ID id_next
, id_result
, id_receiver
, id_arguments
, id_memo
, id_method
, id_force
;
167 static ID id_begin
, id_end
, id_step
, id_exclude_end
;
168 static VALUE sym_each
, sym_cycle
, sym_yield
;
170 static VALUE lazy_use_super_method
;
172 extern ID ruby_static_id_cause
;
174 #define id_call idCall
175 #define id_cause ruby_static_id_cause
176 #define id_each idEach
178 #define id_initialize idInitialize
179 #define id_size idSize
181 VALUE rb_eStopIteration
;
194 rb_enumerator_size_func
*size_fn
;
198 RUBY_REFERENCES(enumerator_refs
) = {
199 RUBY_REF_EDGE(struct enumerator
, obj
),
200 RUBY_REF_EDGE(struct enumerator
, args
),
201 RUBY_REF_EDGE(struct enumerator
, fib
),
202 RUBY_REF_EDGE(struct enumerator
, dst
),
203 RUBY_REF_EDGE(struct enumerator
, lookahead
),
204 RUBY_REF_EDGE(struct enumerator
, feedvalue
),
205 RUBY_REF_EDGE(struct enumerator
, stop_exc
),
206 RUBY_REF_EDGE(struct enumerator
, size
),
207 RUBY_REF_EDGE(struct enumerator
, procs
),
211 static VALUE rb_cGenerator
, rb_cYielder
, rb_cEnumProducer
;
227 typedef struct MEMO
*lazyenum_proc_func(VALUE
, struct MEMO
*, VALUE
, long);
228 typedef VALUE
lazyenum_size_func(VALUE
, VALUE
);
229 typedef int lazyenum_precheck_func(VALUE proc_entry
);
231 lazyenum_proc_func
*proc
;
232 lazyenum_size_func
*size
;
233 lazyenum_precheck_func
*precheck
;
239 const lazyenum_funcs
*fn
;
242 static VALUE
generator_allocate(VALUE klass
);
243 static VALUE
generator_init(VALUE obj
, VALUE proc
);
245 static VALUE rb_cEnumChain
;
252 static VALUE rb_cEnumProduct
;
254 struct enum_product
{
260 static const rb_data_type_t enumerator_data_type
= {
263 RUBY_REFS_LIST_PTR(enumerator_refs
),
264 RUBY_TYPED_DEFAULT_FREE
,
265 NULL
, // Nothing allocated externally, so don't need a memsize function
268 0, NULL
, RUBY_TYPED_FREE_IMMEDIATELY
| RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_DECL_MARKING
| RUBY_TYPED_EMBEDDABLE
271 static struct enumerator
*
272 enumerator_ptr(VALUE obj
)
274 struct enumerator
*ptr
;
276 TypedData_Get_Struct(obj
, struct enumerator
, &enumerator_data_type
, ptr
);
277 if (!ptr
|| UNDEF_P(ptr
->obj
)) {
278 rb_raise(rb_eArgError
, "uninitialized enumerator");
284 proc_entry_mark(void *p
)
286 struct proc_entry
*ptr
= p
;
287 rb_gc_mark_movable(ptr
->proc
);
288 rb_gc_mark_movable(ptr
->memo
);
292 proc_entry_compact(void *p
)
294 struct proc_entry
*ptr
= p
;
295 ptr
->proc
= rb_gc_location(ptr
->proc
);
296 ptr
->memo
= rb_gc_location(ptr
->memo
);
299 static const rb_data_type_t proc_entry_data_type
= {
303 RUBY_TYPED_DEFAULT_FREE
,
304 NULL
, // Nothing allocated externally, so don't need a memsize function
307 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
| RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_EMBEDDABLE
310 static struct proc_entry
*
311 proc_entry_ptr(VALUE proc_entry
)
313 struct proc_entry
*ptr
;
315 TypedData_Get_Struct(proc_entry
, struct proc_entry
, &proc_entry_data_type
, ptr
);
322 * obj.to_enum(method = :each, *args) -> enum
323 * obj.enum_for(method = :each, *args) -> enum
324 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
325 * obj.enum_for(method = :each, *args){|*args| block} -> enum
327 * Creates a new Enumerator which will enumerate by calling +method+ on
328 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
329 * values of enumerator.
331 * If a block is given, it will be used to calculate the size of
332 * the enumerator without the need to iterate it (see Enumerator#size).
338 * enum = str.enum_for(:each_byte)
339 * enum.each { |b| puts b }
344 * # protect an array from being modified by some_method
346 * some_method(a.to_enum)
348 * # String#split in block form is more memory-effective:
349 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
350 * # This could be rewritten more idiomatically with to_enum:
351 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
353 * It is typical to call to_enum when defining methods for
354 * a generic Enumerable, in case no block is passed.
356 * Here is such an example, with parameter passing and a sizing block:
359 * # a generic method to repeat the values of any enumerable
361 * raise ArgumentError, "#{n} is negative!" if n < 0
362 * unless block_given?
363 * return to_enum(__method__, n) do # __method__ is :repeat here
364 * sz = size # Call size and multiply by n...
365 * sz * n if sz # but return nil if size itself is nil
369 * n.times { yield *val }
374 * %i[hello world].repeat(2) { |w| puts w }
375 * # => Prints 'hello', 'hello', 'world', 'world'
376 * enum = (1..14).repeat(3)
377 * # => returns an Enumerator when called without a block
378 * enum.first(4) # => [1, 1, 1, 2]
382 obj_to_enum(int argc
, VALUE
*argv
, VALUE obj
)
384 VALUE enumerator
, meth
= sym_each
;
390 enumerator
= rb_enumeratorize_with_size(obj
, meth
, argc
, argv
, 0);
391 if (rb_block_given_p()) {
392 RB_OBJ_WRITE(enumerator
, &enumerator_ptr(enumerator
)->size
, rb_block_proc());
398 enumerator_allocate(VALUE klass
)
400 struct enumerator
*ptr
;
403 enum_obj
= TypedData_Make_Struct(klass
, struct enumerator
, &enumerator_data_type
, ptr
);
410 enumerator_init(VALUE enum_obj
, VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
, VALUE size
, int kw_splat
)
412 struct enumerator
*ptr
;
414 rb_check_frozen(enum_obj
);
415 TypedData_Get_Struct(enum_obj
, struct enumerator
, &enumerator_data_type
, ptr
);
418 rb_raise(rb_eArgError
, "unallocated enumerator");
421 RB_OBJ_WRITE(enum_obj
, &ptr
->obj
, obj
);
422 ptr
->meth
= rb_to_id(meth
);
423 if (argc
) RB_OBJ_WRITE(enum_obj
, &ptr
->args
, rb_ary_new4(argc
, argv
));
426 ptr
->lookahead
= Qundef
;
427 ptr
->feedvalue
= Qundef
;
428 ptr
->stop_exc
= Qfalse
;
429 RB_OBJ_WRITE(enum_obj
, &ptr
->size
, size
);
430 ptr
->size_fn
= size_fn
;
431 ptr
->kw_splat
= kw_splat
;
437 convert_to_feasible_size_value(VALUE obj
)
442 else if (rb_respond_to(obj
, id_call
)) {
445 else if (RB_FLOAT_TYPE_P(obj
) && RFLOAT_VALUE(obj
) == HUGE_VAL
) {
449 return rb_to_int(obj
);
455 * Enumerator.new(size = nil) { |yielder| ... }
457 * Creates a new Enumerator object, which can be used as an
460 * Iteration is defined by the given block, in
461 * which a "yielder" object, given as block parameter, can be used to
462 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
464 * fib = Enumerator.new do |y|
472 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
474 * The optional parameter can be used to specify how to calculate the size
475 * in a lazy fashion (see Enumerator#size). It can either be a value or
479 enumerator_initialize(int argc
, VALUE
*argv
, VALUE obj
)
481 VALUE iter
= rb_block_proc();
482 VALUE recv
= generator_init(generator_allocate(rb_cGenerator
), iter
);
483 VALUE arg0
= rb_check_arity(argc
, 0, 1) ? argv
[0] : Qnil
;
484 VALUE size
= convert_to_feasible_size_value(arg0
);
486 return enumerator_init(obj
, recv
, sym_each
, 0, 0, 0, size
, false);
491 enumerator_init_copy(VALUE obj
, VALUE orig
)
493 struct enumerator
*ptr0
, *ptr1
;
495 if (!OBJ_INIT_COPY(obj
, orig
)) return obj
;
496 ptr0
= enumerator_ptr(orig
);
498 /* Fibers cannot be copied */
499 rb_raise(rb_eTypeError
, "can't copy execution context");
502 TypedData_Get_Struct(obj
, struct enumerator
, &enumerator_data_type
, ptr1
);
505 rb_raise(rb_eArgError
, "unallocated enumerator");
508 RB_OBJ_WRITE(obj
, &ptr1
->obj
, ptr0
->obj
);
509 RB_OBJ_WRITE(obj
, &ptr1
->meth
, ptr0
->meth
);
510 RB_OBJ_WRITE(obj
, &ptr1
->args
, ptr0
->args
);
512 ptr1
->lookahead
= Qundef
;
513 ptr1
->feedvalue
= Qundef
;
514 RB_OBJ_WRITE(obj
, &ptr1
->size
, ptr0
->size
);
515 ptr1
->size_fn
= ptr0
->size_fn
;
521 * For backwards compatibility; use rb_enumeratorize_with_size
524 rb_enumeratorize(VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
)
526 return rb_enumeratorize_with_size(obj
, meth
, argc
, argv
, 0);
529 static VALUE
lazy_to_enum_i(VALUE self
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
, int kw_splat
);
530 static int lazy_precheck(VALUE procs
);
533 rb_enumeratorize_with_size_kw(VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
, int kw_splat
)
535 VALUE base_class
= rb_cEnumerator
;
537 if (RTEST(rb_obj_is_kind_of(obj
, rb_cLazy
))) {
538 base_class
= rb_cLazy
;
540 else if (RTEST(rb_obj_is_kind_of(obj
, rb_cEnumChain
))) {
541 obj
= enumerator_init(enumerator_allocate(rb_cEnumerator
), obj
, sym_each
, 0, 0, 0, Qnil
, false);
544 return enumerator_init(enumerator_allocate(base_class
),
545 obj
, meth
, argc
, argv
, size_fn
, Qnil
, kw_splat
);
549 rb_enumeratorize_with_size(VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
)
551 return rb_enumeratorize_with_size_kw(obj
, meth
, argc
, argv
, size_fn
, rb_keyword_given_p());
555 enumerator_block_call(VALUE obj
, rb_block_call_func
*func
, VALUE arg
)
558 const VALUE
*argv
= 0;
559 const struct enumerator
*e
= enumerator_ptr(obj
);
562 VALUE args
= e
->args
;
564 argc
= RARRAY_LENINT(args
);
565 argv
= RARRAY_CONST_PTR(args
);
568 VALUE ret
= rb_block_call_kw(e
->obj
, meth
, argc
, argv
, func
, arg
, e
->kw_splat
);
577 * enum.each { |elm| block } -> obj
579 * enum.each(*appending_args) { |elm| block } -> obj
580 * enum.each(*appending_args) -> an_enumerator
582 * Iterates over the block according to how this Enumerator was constructed.
583 * If no block and no arguments are given, returns self.
587 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
588 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
589 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
593 * def obj.each_arg(a, b=:b, *rest)
600 * enum = obj.to_enum :each_arg, :a, :x
602 * enum.each.to_a #=> [:a, :x, []]
603 * enum.each.equal?(enum) #=> true
604 * enum.each { |elm| elm } #=> :method_returned
606 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
607 * enum.each(:y, :z).equal?(enum) #=> false
608 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
612 enumerator_each(int argc
, VALUE
*argv
, VALUE obj
)
614 struct enumerator
*e
= enumerator_ptr(obj
);
617 VALUE args
= (e
= enumerator_ptr(obj
= rb_obj_dup(obj
)))->args
;
619 #if SIZEOF_INT < SIZEOF_LONG
620 /* check int range overflow */
621 rb_long2int(RARRAY_LEN(args
) + argc
);
623 args
= rb_ary_dup(args
);
624 rb_ary_cat(args
, argv
, argc
);
627 args
= rb_ary_new4(argc
, argv
);
629 RB_OBJ_WRITE(obj
, &e
->args
, args
);
633 if (!rb_block_given_p()) return obj
;
635 if (!lazy_precheck(e
->procs
)) return Qnil
;
637 return enumerator_block_call(obj
, 0, obj
);
641 enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val
, m
))
643 struct MEMO
*memo
= (struct MEMO
*)m
;
644 VALUE idx
= memo
->v1
;
645 MEMO_V1_SET(memo
, rb_int_succ(idx
));
648 return rb_yield_values(2, val
, idx
);
650 return rb_yield_values(2, rb_ary_new4(argc
, argv
), idx
);
654 enumerator_size(VALUE obj
);
657 enumerator_enum_size(VALUE obj
, VALUE args
, VALUE eobj
)
659 return enumerator_size(obj
);
664 * e.with_index(offset = 0) {|(*args), idx| ... }
665 * e.with_index(offset = 0)
667 * Iterates the given block for each element with an index, which
668 * starts from +offset+. If no block is given, returns a new Enumerator
669 * that includes the index, starting from +offset+
671 * +offset+:: the starting index to use
675 enumerator_with_index(int argc
, VALUE
*argv
, VALUE obj
)
679 rb_check_arity(argc
, 0, 1);
680 RETURN_SIZED_ENUMERATOR(obj
, argc
, argv
, enumerator_enum_size
);
681 memo
= (!argc
|| NIL_P(memo
= argv
[0])) ? INT2FIX(0) : rb_to_int(memo
);
682 return enumerator_block_call(obj
, enumerator_with_index_i
, (VALUE
)MEMO_NEW(memo
, 0, 0));
687 * e.each_with_index {|(*args), idx| ... }
690 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
692 * If no block is given, a new Enumerator is returned that includes the index.
696 enumerator_each_with_index(VALUE obj
)
698 return enumerator_with_index(0, NULL
, obj
);
702 enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val
, memo
))
705 return rb_yield_values(2, val
, memo
);
707 return rb_yield_values(2, rb_ary_new4(argc
, argv
), memo
);
712 * e.each_with_object(obj) {|(*args), obj| ... }
713 * e.each_with_object(obj)
714 * e.with_object(obj) {|(*args), obj| ... }
717 * Iterates the given block for each element with an arbitrary object, +obj+,
720 * If no block is given, returns a new Enumerator.
724 * to_three = Enumerator.new do |y|
730 * to_three_with_string = to_three.with_object("foo")
731 * to_three_with_string.each do |x,string|
732 * puts "#{string}: #{x}"
740 enumerator_with_object(VALUE obj
, VALUE memo
)
742 RETURN_SIZED_ENUMERATOR(obj
, 1, &memo
, enumerator_enum_size
);
743 enumerator_block_call(obj
, enumerator_with_object_i
, memo
);
749 next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i
, obj
))
751 struct enumerator
*e
= enumerator_ptr(obj
);
752 VALUE feedvalue
= Qnil
;
753 VALUE args
= rb_ary_new4(argc
, argv
);
754 rb_fiber_yield(1, &args
);
755 if (!UNDEF_P(e
->feedvalue
)) {
756 feedvalue
= e
->feedvalue
;
757 e
->feedvalue
= Qundef
;
763 next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_
, obj
))
765 struct enumerator
*e
= enumerator_ptr(obj
);
769 result
= rb_block_call(obj
, id_each
, 0, 0, next_ii
, obj
);
770 RB_OBJ_WRITE(obj
, &e
->stop_exc
, rb_exc_new2(rb_eStopIteration
, "iteration reached an end"));
771 rb_ivar_set(e
->stop_exc
, id_result
, result
);
772 return rb_fiber_yield(1, &nil
);
776 next_init(VALUE obj
, struct enumerator
*e
)
778 VALUE curr
= rb_fiber_current();
779 RB_OBJ_WRITE(obj
, &e
->dst
, curr
);
780 RB_OBJ_WRITE(obj
, &e
->fib
, rb_fiber_new(next_i
, obj
));
781 e
->lookahead
= Qundef
;
785 get_next_values(VALUE obj
, struct enumerator
*e
)
790 VALUE exc
= e
->stop_exc
;
791 VALUE result
= rb_attr_get(exc
, id_result
);
792 VALUE mesg
= rb_attr_get(exc
, idMesg
);
793 if (!NIL_P(mesg
)) mesg
= rb_str_dup(mesg
);
794 VALUE stop_exc
= rb_exc_new_str(rb_eStopIteration
, mesg
);
795 rb_ivar_set(stop_exc
, id_cause
, exc
);
796 rb_ivar_set(stop_exc
, id_result
, result
);
797 rb_exc_raise(stop_exc
);
800 curr
= rb_fiber_current();
802 if (!e
->fib
|| !rb_fiber_alive_p(e
->fib
)) {
806 vs
= rb_fiber_resume(e
->fib
, 1, &curr
);
810 e
->lookahead
= Qundef
;
811 e
->feedvalue
= Qundef
;
812 rb_exc_raise(e
->stop_exc
);
819 * e.next_values -> array
821 * Returns the next object as an array in the enumerator, and move the
822 * internal position forward. When the position reached at the end,
823 * StopIteration is raised.
825 * See class-level notes about external iterators.
827 * This method can be used to distinguish <code>yield</code> and <code>yield
853 * ## yield args next_values next
856 * # yield 1, 2 [1, 2] [1, 2]
857 * # yield nil [nil] nil
858 * # yield [1, 2] [[1, 2]] [1, 2]
863 enumerator_next_values(VALUE obj
)
865 struct enumerator
*e
= enumerator_ptr(obj
);
868 rb_check_frozen(obj
);
870 if (!UNDEF_P(e
->lookahead
)) {
872 e
->lookahead
= Qundef
;
876 return get_next_values(obj
, e
);
880 ary2sv(VALUE args
, int dup
)
882 if (!RB_TYPE_P(args
, T_ARRAY
))
885 switch (RARRAY_LEN(args
)) {
890 return RARRAY_AREF(args
, 0);
894 return rb_ary_dup(args
);
903 * Returns the next object in the enumerator, and move the internal position
904 * forward. When the position reached at the end, StopIteration is raised.
913 * p e.next #raises StopIteration
915 * See class-level notes about external iterators.
920 enumerator_next(VALUE obj
)
922 VALUE vs
= enumerator_next_values(obj
);
923 return ary2sv(vs
, 0);
927 enumerator_peek_values(VALUE obj
)
929 struct enumerator
*e
= enumerator_ptr(obj
);
931 rb_check_frozen(obj
);
933 if (UNDEF_P(e
->lookahead
)) {
934 RB_OBJ_WRITE(obj
, &e
->lookahead
, get_next_values(obj
, e
));
942 * e.peek_values -> array
944 * Returns the next object as an array, similar to Enumerator#next_values, but
945 * doesn't move the internal position forward. If the position is already at
946 * the end, StopIteration is raised.
948 * See class-level notes about external iterators.
959 * p e.peek_values #=> []
961 * p e.peek_values #=> [1]
962 * p e.peek_values #=> [1]
964 * p e.peek_values #=> [1, 2]
966 * p e.peek_values # raises StopIteration
971 enumerator_peek_values_m(VALUE obj
)
973 return rb_ary_dup(enumerator_peek_values(obj
));
980 * Returns the next object in the enumerator, but doesn't move the internal
981 * position forward. If the position is already at the end, StopIteration
984 * See class-level notes about external iterators.
996 * p e.peek #raises StopIteration
1001 enumerator_peek(VALUE obj
)
1003 VALUE vs
= enumerator_peek_values(obj
);
1004 return ary2sv(vs
, 1);
1011 * Sets the value to be returned by the next yield inside +e+.
1013 * If the value is not set, the yield returns nil.
1015 * This value is cleared after being yielded.
1017 * # Array#map passes the array's elements to "yield" and collects the
1018 * # results of "yield" as an array.
1019 * # Following example shows that "next" returns the passed elements and
1020 * # values passed to "feed" are collected as an array which can be
1021 * # obtained by StopIteration#result.
1031 * rescue StopIteration
1032 * p $!.result #=> ["a", "b", "c"]
1037 * x = yield # (2) blocks
1038 * p x # (5) => "foo"
1039 * x = yield # (6) blocks
1041 * x = yield # (9) blocks
1042 * p x # not reached w/o another e.next
1047 * e.feed "foo" # (3)
1054 enumerator_feed(VALUE obj
, VALUE v
)
1056 struct enumerator
*e
= enumerator_ptr(obj
);
1058 rb_check_frozen(obj
);
1060 if (!UNDEF_P(e
->feedvalue
)) {
1061 rb_raise(rb_eTypeError
, "feed value already set");
1063 RB_OBJ_WRITE(obj
, &e
->feedvalue
, v
);
1072 * Rewinds the enumeration sequence to the beginning.
1074 * If the enclosed object responds to a "rewind" method, it is called.
1078 enumerator_rewind(VALUE obj
)
1080 struct enumerator
*e
= enumerator_ptr(obj
);
1082 rb_check_frozen(obj
);
1084 rb_check_funcall(e
->obj
, id_rewind
, 0, 0);
1088 e
->lookahead
= Qundef
;
1089 e
->feedvalue
= Qundef
;
1090 e
->stop_exc
= Qfalse
;
1094 static struct generator
*generator_ptr(VALUE obj
);
1095 static VALUE
append_method(VALUE obj
, VALUE str
, ID default_method
, VALUE default_args
);
1098 inspect_enumerator(VALUE obj
, VALUE dummy
, int recur
)
1100 struct enumerator
*e
;
1101 VALUE eobj
, str
, cname
;
1103 TypedData_Get_Struct(obj
, struct enumerator
, &enumerator_data_type
, e
);
1105 cname
= rb_obj_class(obj
);
1107 if (!e
|| UNDEF_P(e
->obj
)) {
1108 return rb_sprintf("#<%"PRIsVALUE
": uninitialized>", rb_class_path(cname
));
1112 str
= rb_sprintf("#<%"PRIsVALUE
": ...>", rb_class_path(cname
));
1119 eobj
= generator_ptr(e
->obj
)->obj
;
1120 /* In case procs chained enumerator traversing all proc entries manually */
1121 if (rb_obj_class(eobj
) == cname
) {
1122 str
= rb_inspect(eobj
);
1125 str
= rb_sprintf("#<%"PRIsVALUE
": %+"PRIsVALUE
">", rb_class_path(cname
), eobj
);
1127 for (i
= 0; i
< RARRAY_LEN(e
->procs
); i
++) {
1128 str
= rb_sprintf("#<%"PRIsVALUE
": %"PRIsVALUE
, cname
, str
);
1129 append_method(RARRAY_AREF(e
->procs
, i
), str
, e
->meth
, e
->args
);
1130 rb_str_buf_cat2(str
, ">");
1135 eobj
= rb_attr_get(obj
, id_receiver
);
1140 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1141 str
= rb_sprintf("#<%"PRIsVALUE
": %+"PRIsVALUE
, rb_class_path(cname
), eobj
);
1142 append_method(obj
, str
, e
->meth
, e
->args
);
1144 rb_str_buf_cat2(str
, ">");
1150 key_symbol_p(VALUE key
, VALUE val
, VALUE arg
)
1152 if (SYMBOL_P(key
)) return ST_CONTINUE
;
1153 *(int *)arg
= FALSE
;
1158 kwd_append(VALUE key
, VALUE val
, VALUE str
)
1160 if (!SYMBOL_P(key
)) rb_raise(rb_eRuntimeError
, "non-symbol key inserted");
1161 rb_str_catf(str
, "% "PRIsVALUE
": %"PRIsVALUE
", ", key
, val
);
1166 append_method(VALUE obj
, VALUE str
, ID default_method
, VALUE default_args
)
1168 VALUE method
, eargs
;
1170 method
= rb_attr_get(obj
, id_method
);
1171 if (method
!= Qfalse
) {
1172 if (!NIL_P(method
)) {
1173 Check_Type(method
, T_SYMBOL
);
1174 method
= rb_sym2str(method
);
1177 method
= rb_id2str(default_method
);
1179 rb_str_buf_cat2(str
, ":");
1180 rb_str_buf_append(str
, method
);
1183 eargs
= rb_attr_get(obj
, id_arguments
);
1185 eargs
= default_args
;
1187 if (eargs
!= Qfalse
) {
1188 long argc
= RARRAY_LEN(eargs
);
1189 const VALUE
*argv
= RARRAY_CONST_PTR(eargs
); /* WB: no new reference */
1194 rb_str_buf_cat2(str
, "(");
1196 if (RB_TYPE_P(argv
[argc
-1], T_HASH
) && !RHASH_EMPTY_P(argv
[argc
-1])) {
1198 rb_hash_foreach(argv
[argc
-1], key_symbol_p
, (VALUE
)&all_key
);
1199 if (all_key
) kwds
= argv
[--argc
];
1203 VALUE arg
= *argv
++;
1205 rb_str_append(str
, rb_inspect(arg
));
1206 rb_str_buf_cat2(str
, ", ");
1209 rb_hash_foreach(kwds
, kwd_append
, str
);
1211 rb_str_set_len(str
, RSTRING_LEN(str
)-2);
1212 rb_str_buf_cat2(str
, ")");
1221 * e.inspect -> string
1223 * Creates a printable version of <i>e</i>.
1227 enumerator_inspect(VALUE obj
)
1229 return rb_exec_recursive(inspect_enumerator
, obj
, 0);
1234 * e.size -> int, Float::INFINITY or nil
1236 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1238 * (1..100).to_a.permutation(4).size # => 94109400
1239 * loop.size # => Float::INFINITY
1240 * (1..100).drop_while.size # => nil
1244 enumerator_size(VALUE obj
)
1246 struct enumerator
*e
= enumerator_ptr(obj
);
1248 const VALUE
*argv
= NULL
;
1252 struct generator
*g
= generator_ptr(e
->obj
);
1253 VALUE receiver
= rb_check_funcall(g
->obj
, id_size
, 0, 0);
1256 for (i
= 0; i
< RARRAY_LEN(e
->procs
); i
++) {
1257 VALUE proc
= RARRAY_AREF(e
->procs
, i
);
1258 struct proc_entry
*entry
= proc_entry_ptr(proc
);
1259 lazyenum_size_func
*size_fn
= entry
->fn
->size
;
1263 receiver
= (*size_fn
)(proc
, receiver
);
1269 return (*e
->size_fn
)(e
->obj
, e
->args
, obj
);
1272 argc
= (int)RARRAY_LEN(e
->args
);
1273 argv
= RARRAY_CONST_PTR(e
->args
);
1275 size
= rb_check_funcall_kw(e
->size
, id_call
, argc
, argv
, e
->kw_splat
);
1276 if (!UNDEF_P(size
)) return size
;
1284 yielder_mark(void *p
)
1286 struct yielder
*ptr
= p
;
1287 rb_gc_mark_movable(ptr
->proc
);
1291 yielder_compact(void *p
)
1293 struct yielder
*ptr
= p
;
1294 ptr
->proc
= rb_gc_location(ptr
->proc
);
1297 static const rb_data_type_t yielder_data_type
= {
1301 RUBY_TYPED_DEFAULT_FREE
,
1305 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
| RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_EMBEDDABLE
1308 static struct yielder
*
1309 yielder_ptr(VALUE obj
)
1311 struct yielder
*ptr
;
1313 TypedData_Get_Struct(obj
, struct yielder
, &yielder_data_type
, ptr
);
1314 if (!ptr
|| UNDEF_P(ptr
->proc
)) {
1315 rb_raise(rb_eArgError
, "uninitialized yielder");
1322 yielder_allocate(VALUE klass
)
1324 struct yielder
*ptr
;
1327 obj
= TypedData_Make_Struct(klass
, struct yielder
, &yielder_data_type
, ptr
);
1334 yielder_init(VALUE obj
, VALUE proc
)
1336 struct yielder
*ptr
;
1338 TypedData_Get_Struct(obj
, struct yielder
, &yielder_data_type
, ptr
);
1341 rb_raise(rb_eArgError
, "unallocated yielder");
1344 RB_OBJ_WRITE(obj
, &ptr
->proc
, proc
);
1351 yielder_initialize(VALUE obj
)
1355 return yielder_init(obj
, rb_block_proc());
1360 yielder_yield(VALUE obj
, VALUE args
)
1362 struct yielder
*ptr
= yielder_ptr(obj
);
1364 return rb_proc_call_kw(ptr
->proc
, args
, RB_PASS_CALLED_KEYWORDS
);
1369 yielder_yield_push(VALUE obj
, VALUE arg
)
1371 struct yielder
*ptr
= yielder_ptr(obj
);
1373 rb_proc_call_with_block(ptr
->proc
, 1, &arg
, Qnil
);
1379 * Returns a Proc object that takes arguments and yields them.
1381 * This method is implemented so that a Yielder object can be directly
1382 * passed to another method as a block argument.
1384 * enum = Enumerator.new { |y|
1385 * Dir.glob("*.rb") { |file|
1386 * File.open(file) { |f| f.each_line(&y) }
1391 yielder_to_proc(VALUE obj
)
1393 VALUE method
= rb_obj_method(obj
, sym_yield
);
1395 return rb_funcall(method
, idTo_proc
, 0);
1399 yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj
, memo
))
1401 return rb_yield_values_kw(argc
, argv
, RB_PASS_CALLED_KEYWORDS
);
1407 return yielder_init(yielder_allocate(rb_cYielder
), rb_proc_new(yielder_yield_i
, 0));
1414 generator_mark(void *p
)
1416 struct generator
*ptr
= p
;
1417 rb_gc_mark_movable(ptr
->proc
);
1418 rb_gc_mark_movable(ptr
->obj
);
1422 generator_compact(void *p
)
1424 struct generator
*ptr
= p
;
1425 ptr
->proc
= rb_gc_location(ptr
->proc
);
1426 ptr
->obj
= rb_gc_location(ptr
->obj
);
1429 static const rb_data_type_t generator_data_type
= {
1433 RUBY_TYPED_DEFAULT_FREE
,
1437 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
| RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_EMBEDDABLE
1440 static struct generator
*
1441 generator_ptr(VALUE obj
)
1443 struct generator
*ptr
;
1445 TypedData_Get_Struct(obj
, struct generator
, &generator_data_type
, ptr
);
1446 if (!ptr
|| UNDEF_P(ptr
->proc
)) {
1447 rb_raise(rb_eArgError
, "uninitialized generator");
1454 generator_allocate(VALUE klass
)
1456 struct generator
*ptr
;
1459 obj
= TypedData_Make_Struct(klass
, struct generator
, &generator_data_type
, ptr
);
1466 generator_init(VALUE obj
, VALUE proc
)
1468 struct generator
*ptr
;
1470 rb_check_frozen(obj
);
1471 TypedData_Get_Struct(obj
, struct generator
, &generator_data_type
, ptr
);
1474 rb_raise(rb_eArgError
, "unallocated generator");
1477 RB_OBJ_WRITE(obj
, &ptr
->proc
, proc
);
1484 generator_initialize(int argc
, VALUE
*argv
, VALUE obj
)
1491 proc
= rb_block_proc();
1494 rb_scan_args(argc
, argv
, "1", &proc
);
1496 if (!rb_obj_is_proc(proc
))
1497 rb_raise(rb_eTypeError
,
1498 "wrong argument type %"PRIsVALUE
" (expected Proc)",
1499 rb_obj_class(proc
));
1501 if (rb_block_given_p()) {
1502 rb_warn("given block not used");
1506 return generator_init(obj
, proc
);
1511 generator_init_copy(VALUE obj
, VALUE orig
)
1513 struct generator
*ptr0
, *ptr1
;
1515 if (!OBJ_INIT_COPY(obj
, orig
)) return obj
;
1517 ptr0
= generator_ptr(orig
);
1519 TypedData_Get_Struct(obj
, struct generator
, &generator_data_type
, ptr1
);
1522 rb_raise(rb_eArgError
, "unallocated generator");
1525 RB_OBJ_WRITE(obj
, &ptr1
->proc
, ptr0
->proc
);
1532 generator_each(int argc
, VALUE
*argv
, VALUE obj
)
1534 struct generator
*ptr
= generator_ptr(obj
);
1535 VALUE args
= rb_ary_new2(argc
+ 1);
1537 rb_ary_push(args
, yielder_new());
1539 rb_ary_cat(args
, argv
, argc
);
1542 return rb_proc_call_kw(ptr
->proc
, args
, RB_PASS_CALLED_KEYWORDS
);
1545 /* Lazy Enumerator methods */
1547 enum_size(VALUE self
)
1549 VALUE r
= rb_check_funcall(self
, id_size
, 0, 0);
1550 return UNDEF_P(r
) ? Qnil
: r
;
1554 lazyenum_size(VALUE self
, VALUE args
, VALUE eobj
)
1556 return enum_size(self
);
1559 #define lazy_receiver_size lazy_map_size
1562 lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val
, m
))
1569 result
= rb_yield_values2(2, args
);
1573 int len
= rb_long2int((long)argc
+ 1);
1574 VALUE
*nargv
= ALLOCV_N(VALUE
, args
, len
);
1578 MEMCPY(nargv
+ 1, argv
, VALUE
, argc
);
1580 result
= rb_yield_values2(len
, nargv
);
1583 if (UNDEF_P(result
)) rb_iter_break();
1588 lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val
, m
))
1590 rb_block_call(m
, id_each
, argc
-1, argv
+1, lazy_init_iterator
, val
);
1594 #define memo_value v2
1595 #define memo_flags u3.state
1596 #define LAZY_MEMO_BREAK 1
1597 #define LAZY_MEMO_PACKED 2
1598 #define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1599 #define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1600 #define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1601 #define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1602 #define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1603 #define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1604 #define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1606 static VALUE
lazy_yielder_result(struct MEMO
*result
, VALUE yielder
, VALUE procs_array
, VALUE memos
, long i
);
1609 lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_
, m
))
1611 VALUE yielder
= RARRAY_AREF(m
, 0);
1612 VALUE procs_array
= RARRAY_AREF(m
, 1);
1613 VALUE memos
= rb_attr_get(yielder
, id_memo
);
1614 struct MEMO
*result
;
1616 result
= MEMO_NEW(m
, rb_enum_values_pack(argc
, argv
),
1617 argc
> 1 ? LAZY_MEMO_PACKED
: 0);
1618 return lazy_yielder_result(result
, yielder
, procs_array
, memos
, 0);
1622 lazy_yielder_yield(struct MEMO
*result
, long memo_index
, int argc
, const VALUE
*argv
)
1624 VALUE m
= result
->v1
;
1625 VALUE yielder
= RARRAY_AREF(m
, 0);
1626 VALUE procs_array
= RARRAY_AREF(m
, 1);
1627 VALUE memos
= rb_attr_get(yielder
, id_memo
);
1628 LAZY_MEMO_SET_VALUE(result
, rb_enum_values_pack(argc
, argv
));
1630 LAZY_MEMO_SET_PACKED(result
);
1632 LAZY_MEMO_RESET_PACKED(result
);
1633 return lazy_yielder_result(result
, yielder
, procs_array
, memos
, memo_index
);
1637 lazy_yielder_result(struct MEMO
*result
, VALUE yielder
, VALUE procs_array
, VALUE memos
, long i
)
1641 for (; i
< RARRAY_LEN(procs_array
); i
++) {
1642 VALUE proc
= RARRAY_AREF(procs_array
, i
);
1643 struct proc_entry
*entry
= proc_entry_ptr(proc
);
1644 if (!(*entry
->fn
->proc
)(proc
, result
, memos
, i
)) {
1651 rb_funcall2(yielder
, idLTLT
, 1, &(result
->memo_value
));
1653 if (LAZY_MEMO_BREAK_P(result
)) {
1656 return result
->memo_value
;
1660 lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val
, m
))
1662 VALUE procs
= RARRAY_AREF(m
, 1);
1664 rb_ivar_set(val
, id_memo
, rb_ary_new2(RARRAY_LEN(procs
)));
1665 rb_block_call(RARRAY_AREF(m
, 0), id_each
, 0, 0,
1666 lazy_init_yielder
, rb_ary_new3(2, val
, procs
));
1671 lazy_generator_init(VALUE enumerator
, VALUE procs
)
1675 struct generator
*gen_ptr
;
1676 struct enumerator
*e
= enumerator_ptr(enumerator
);
1678 if (RARRAY_LEN(procs
) > 0) {
1679 struct generator
*old_gen_ptr
= generator_ptr(e
->obj
);
1680 obj
= old_gen_ptr
->obj
;
1686 generator
= generator_allocate(rb_cGenerator
);
1688 rb_block_call(generator
, id_initialize
, 0, 0,
1689 lazy_init_block
, rb_ary_new3(2, obj
, procs
));
1691 gen_ptr
= generator_ptr(generator
);
1692 RB_OBJ_WRITE(generator
, &gen_ptr
->obj
, obj
);
1698 lazy_precheck(VALUE procs
)
1701 long num_procs
= RARRAY_LEN(procs
), i
= num_procs
;
1703 VALUE proc
= RARRAY_AREF(procs
, i
);
1704 struct proc_entry
*entry
= proc_entry_ptr(proc
);
1705 lazyenum_precheck_func
*precheck
= entry
->fn
->precheck
;
1706 if (precheck
&& !precheck(proc
)) return FALSE
;
1714 * Document-class: Enumerator::Lazy
1716 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1717 * chains of operations without evaluating them immediately, and evaluating
1718 * values on as-needed basis. In order to do so it redefines most of Enumerable
1719 * methods so that they just construct another lazy enumerator.
1721 * Enumerator::Lazy can be constructed from any Enumerable with the
1722 * Enumerable#lazy method.
1724 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1725 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1727 * The real enumeration is performed when any non-redefined Enumerable method
1728 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1729 * as #force for more semantic code):
1735 * #=> [21, 23, 25, 27, 29]
1737 * Note that most Enumerable methods that could be called with or without
1738 * a block, on Enumerator::Lazy will always require a block:
1740 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1741 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1743 * This class allows idiomatic calculations on long or infinite sequences, as well
1744 * as chaining of calculations without constructing intermediate arrays.
1746 * Example for working with a slowly calculated sequence:
1748 * require 'open-uri'
1750 * # This will fetch all URLs before selecting
1752 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1753 * .select { |data| data.key?('stats') }
1756 * # This will fetch URLs one-by-one, only till
1757 * # there is enough data to satisfy the condition
1758 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1759 * .select { |data| data.key?('stats') }
1762 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1763 * is suitable for returning or passing to another method that expects
1764 * a normal enumerator.
1769 * .flat_map(&:items)
1770 * .reject(&:disabled)
1774 * # This works lazily; if a checked item is found, it stops
1775 * # iteration and does not look into remaining groups.
1776 * first_checked = active_items.find(&:checked)
1778 * # This returns an array of items like a normal enumerator does.
1779 * all_checked = active_items.select(&:checked)
1785 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1787 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1788 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1789 * to the given block. The block can yield values back using +yielder+.
1790 * For example, to create a "filter+map" enumerator:
1792 * def filter_map(sequence)
1793 * Lazy.new(sequence) do |yielder, *values|
1794 * result = yield *values
1795 * yielder << result if result
1799 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1800 * #=> [4, 16, 36, 64, 100]
1803 lazy_initialize(int argc
, VALUE
*argv
, VALUE self
)
1805 VALUE obj
, size
= Qnil
;
1808 rb_check_arity(argc
, 1, 2);
1809 if (!rb_block_given_p()) {
1810 rb_raise(rb_eArgError
, "tried to call lazy new without a block");
1816 generator
= generator_allocate(rb_cGenerator
);
1817 rb_block_call(generator
, id_initialize
, 0, 0, lazy_init_block_i
, obj
);
1818 enumerator_init(self
, generator
, sym_each
, 0, 0, 0, size
, 0);
1819 rb_ivar_set(self
, id_receiver
, obj
);
1824 #if 0 /* for RDoc */
1827 * lazy.to_a -> array
1828 * lazy.force -> array
1830 * Expands +lazy+ enumerator to an array.
1831 * See Enumerable#to_a.
1834 lazy_to_a(VALUE self
)
1840 lazy_set_args(VALUE lazy
, VALUE args
)
1842 ID id
= rb_frame_this_func();
1843 rb_ivar_set(lazy
, id_method
, ID2SYM(id
));
1845 /* Qfalse indicates that the arguments are empty */
1846 rb_ivar_set(lazy
, id_arguments
, Qfalse
);
1849 rb_ivar_set(lazy
, id_arguments
, args
);
1855 lazy_set_method(VALUE lazy
, VALUE args
, rb_enumerator_size_func
*size_fn
)
1857 struct enumerator
*e
= enumerator_ptr(lazy
);
1858 lazy_set_args(lazy
, args
);
1859 e
->size_fn
= size_fn
;
1865 lazy_add_method(VALUE obj
, int argc
, VALUE
*argv
, VALUE args
, VALUE memo
,
1866 const lazyenum_funcs
*fn
)
1868 struct enumerator
*new_e
;
1870 VALUE new_generator
;
1872 struct enumerator
*e
= enumerator_ptr(obj
);
1873 struct proc_entry
*entry
;
1874 VALUE entry_obj
= TypedData_Make_Struct(rb_cObject
, struct proc_entry
,
1875 &proc_entry_data_type
, entry
);
1876 if (rb_block_given_p()) {
1877 RB_OBJ_WRITE(entry_obj
, &entry
->proc
, rb_block_proc());
1880 RB_OBJ_WRITE(entry_obj
, &entry
->memo
, args
);
1882 lazy_set_args(entry_obj
, memo
);
1884 new_procs
= RTEST(e
->procs
) ? rb_ary_dup(e
->procs
) : rb_ary_new();
1885 new_generator
= lazy_generator_init(obj
, new_procs
);
1886 rb_ary_push(new_procs
, entry_obj
);
1888 new_obj
= enumerator_init_copy(enumerator_allocate(rb_cLazy
), obj
);
1889 new_e
= RTYPEDDATA_GET_DATA(new_obj
);
1890 RB_OBJ_WRITE(new_obj
, &new_e
->obj
, new_generator
);
1891 RB_OBJ_WRITE(new_obj
, &new_e
->procs
, new_procs
);
1894 new_e
->meth
= rb_to_id(*argv
++);
1898 new_e
->meth
= id_each
;
1901 RB_OBJ_WRITE(new_obj
, &new_e
->args
, rb_ary_new4(argc
, argv
));
1908 * e.lazy -> lazy_enumerator
1910 * Returns an Enumerator::Lazy, which redefines most Enumerable
1911 * methods to postpone enumeration and enumerate values only on an
1916 * The following program finds pythagorean triples:
1918 * def pythagorean_triples
1919 * (1..Float::INFINITY).lazy.flat_map {|z|
1920 * (1..z).flat_map {|x|
1921 * (x..z).select {|y|
1922 * x**2 + y**2 == z**2
1929 * # show first ten pythagorean triples
1930 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1931 * p pythagorean_triples.first(10) # first is eager
1932 * # show pythagorean triples less than 100
1933 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1936 enumerable_lazy(VALUE obj
)
1938 VALUE result
= lazy_to_enum_i(obj
, sym_each
, 0, 0, lazyenum_size
, rb_keyword_given_p());
1939 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1940 rb_ivar_set(result
, id_method
, Qfalse
);
1945 lazy_to_enum_i(VALUE obj
, VALUE meth
, int argc
, const VALUE
*argv
, rb_enumerator_size_func
*size_fn
, int kw_splat
)
1947 return enumerator_init(enumerator_allocate(rb_cLazy
),
1948 obj
, meth
, argc
, argv
, size_fn
, Qnil
, kw_splat
);
1953 * lzy.to_enum(method = :each, *args) -> lazy_enum
1954 * lzy.enum_for(method = :each, *args) -> lazy_enum
1955 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1956 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1958 * Similar to Object#to_enum, except it returns a lazy enumerator.
1959 * This makes it easy to define Enumerable methods that will
1960 * naturally remain lazy if called from a lazy enumerator.
1962 * For example, continuing from the example in Object#to_enum:
1964 * # See Object#to_enum for the definition of repeat
1965 * r = 1..Float::INFINITY
1966 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1967 * r.repeat(2).class # => Enumerator
1968 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1969 * # works naturally on lazy enumerator:
1970 * r.lazy.repeat(2).class # => Enumerator::Lazy
1971 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1975 lazy_to_enum(int argc
, VALUE
*argv
, VALUE self
)
1977 VALUE lazy
, meth
= sym_each
, super_meth
;
1983 if (RTEST((super_meth
= rb_hash_aref(lazy_use_super_method
, meth
)))) {
1986 lazy
= lazy_to_enum_i(self
, meth
, argc
, argv
, 0, rb_keyword_given_p());
1987 if (rb_block_given_p()) {
1988 RB_OBJ_WRITE(lazy
, &enumerator_ptr(lazy
)->size
, rb_block_proc());
1994 lazy_eager_size(VALUE self
, VALUE args
, VALUE eobj
)
1996 return enum_size(self
);
2003 * Returns a non-lazy Enumerator converted from the lazy enumerator.
2007 lazy_eager(VALUE self
)
2009 return enumerator_init(enumerator_allocate(rb_cEnumerator
),
2010 self
, sym_each
, 0, 0, lazy_eager_size
, Qnil
, 0);
2014 lazyenum_yield(VALUE proc_entry
, struct MEMO
*result
)
2016 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2017 return rb_proc_call_with_block(entry
->proc
, 1, &result
->memo_value
, Qnil
);
2021 lazyenum_yield_values(VALUE proc_entry
, struct MEMO
*result
)
2023 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2025 const VALUE
*argv
= &result
->memo_value
;
2026 if (LAZY_MEMO_PACKED_P(result
)) {
2027 const VALUE args
= *argv
;
2028 argc
= RARRAY_LENINT(args
);
2029 argv
= RARRAY_CONST_PTR(args
);
2031 return rb_proc_call_with_block(entry
->proc
, argc
, argv
, Qnil
);
2034 static struct MEMO
*
2035 lazy_map_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2037 VALUE value
= lazyenum_yield_values(proc_entry
, result
);
2038 LAZY_MEMO_SET_VALUE(result
, value
);
2039 LAZY_MEMO_RESET_PACKED(result
);
2044 lazy_map_size(VALUE entry
, VALUE receiver
)
2049 static const lazyenum_funcs lazy_map_funcs
= {
2050 lazy_map_proc
, lazy_map_size
,
2055 * lazy.collect { |obj| block } -> lazy_enumerator
2056 * lazy.map { |obj| block } -> lazy_enumerator
2058 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2060 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2061 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2062 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2069 if (!rb_block_given_p()) {
2070 rb_raise(rb_eArgError
, "tried to call lazy map without a block");
2073 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_map_funcs
);
2076 struct flat_map_i_arg
{
2077 struct MEMO
*result
;
2082 lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i
, y
))
2084 struct flat_map_i_arg
*arg
= (struct flat_map_i_arg
*)y
;
2086 return lazy_yielder_yield(arg
->result
, arg
->index
, argc
, argv
);
2089 static struct MEMO
*
2090 lazy_flat_map_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2092 VALUE value
= lazyenum_yield_values(proc_entry
, result
);
2094 const long proc_index
= memo_index
+ 1;
2095 int break_p
= LAZY_MEMO_BREAK_P(result
);
2097 if (RB_TYPE_P(value
, T_ARRAY
)) {
2100 else if (rb_respond_to(value
, id_force
) && rb_respond_to(value
, id_each
)) {
2101 struct flat_map_i_arg arg
= {.result
= result
, .index
= proc_index
};
2102 LAZY_MEMO_RESET_BREAK(result
);
2103 rb_block_call(value
, id_each
, 0, 0, lazy_flat_map_i
, (VALUE
)&arg
);
2104 if (break_p
) LAZY_MEMO_SET_BREAK(result
);
2108 if (ary
|| !NIL_P(ary
= rb_check_array_type(value
))) {
2110 LAZY_MEMO_RESET_BREAK(result
);
2111 for (i
= 0; i
+ 1 < RARRAY_LEN(ary
); i
++) {
2112 const VALUE argv
= RARRAY_AREF(ary
, i
);
2113 lazy_yielder_yield(result
, proc_index
, 1, &argv
);
2115 if (break_p
) LAZY_MEMO_SET_BREAK(result
);
2116 if (i
>= RARRAY_LEN(ary
)) return 0;
2117 value
= RARRAY_AREF(ary
, i
);
2119 LAZY_MEMO_SET_VALUE(result
, value
);
2120 LAZY_MEMO_RESET_PACKED(result
);
2124 static const lazyenum_funcs lazy_flat_map_funcs
= {
2125 lazy_flat_map_proc
, 0,
2130 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2131 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2133 * Returns a new lazy enumerator with the concatenated results of running
2134 * +block+ once for every element in the lazy enumerator.
2136 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2137 * #=> ["f", "o", "o", "b", "a", "r"]
2139 * A value +x+ returned by +block+ is decomposed if either of
2140 * the following conditions is true:
2142 * * +x+ responds to both each and force, which means that
2143 * +x+ is a lazy enumerator.
2144 * * +x+ is an array or responds to to_ary.
2146 * Otherwise, +x+ is contained as-is in the return value.
2148 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2149 * #=> [{:a=>1}, {:b=>2}]
2152 lazy_flat_map(VALUE obj
)
2154 if (!rb_block_given_p()) {
2155 rb_raise(rb_eArgError
, "tried to call lazy flat_map without a block");
2158 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_flat_map_funcs
);
2161 static struct MEMO
*
2162 lazy_select_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2164 VALUE chain
= lazyenum_yield(proc_entry
, result
);
2165 if (!RTEST(chain
)) return 0;
2169 static const lazyenum_funcs lazy_select_funcs
= {
2170 lazy_select_proc
, 0,
2175 * lazy.find_all { |obj| block } -> lazy_enumerator
2176 * lazy.select { |obj| block } -> lazy_enumerator
2177 * lazy.filter { |obj| block } -> lazy_enumerator
2179 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2182 lazy_select(VALUE obj
)
2184 if (!rb_block_given_p()) {
2185 rb_raise(rb_eArgError
, "tried to call lazy select without a block");
2188 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_select_funcs
);
2191 static struct MEMO
*
2192 lazy_filter_map_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2194 VALUE value
= lazyenum_yield_values(proc_entry
, result
);
2195 if (!RTEST(value
)) return 0;
2196 LAZY_MEMO_SET_VALUE(result
, value
);
2197 LAZY_MEMO_RESET_PACKED(result
);
2201 static const lazyenum_funcs lazy_filter_map_funcs
= {
2202 lazy_filter_map_proc
, 0,
2207 * lazy.filter_map { |obj| block } -> lazy_enumerator
2209 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2211 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2212 * #=> [4, 8, 12, 16, 20]
2216 lazy_filter_map(VALUE obj
)
2218 if (!rb_block_given_p()) {
2219 rb_raise(rb_eArgError
, "tried to call lazy filter_map without a block");
2222 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_filter_map_funcs
);
2225 static struct MEMO
*
2226 lazy_reject_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2228 VALUE chain
= lazyenum_yield(proc_entry
, result
);
2229 if (RTEST(chain
)) return 0;
2233 static const lazyenum_funcs lazy_reject_funcs
= {
2234 lazy_reject_proc
, 0,
2239 * lazy.reject { |obj| block } -> lazy_enumerator
2241 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2245 lazy_reject(VALUE obj
)
2247 if (!rb_block_given_p()) {
2248 rb_raise(rb_eArgError
, "tried to call lazy reject without a block");
2251 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_reject_funcs
);
2254 static struct MEMO
*
2255 lazy_grep_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2257 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2258 VALUE chain
= rb_funcall(entry
->memo
, id_eqq
, 1, result
->memo_value
);
2259 if (!RTEST(chain
)) return 0;
2263 static struct MEMO
*
2264 lazy_grep_iter_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2266 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2267 VALUE value
, chain
= rb_funcall(entry
->memo
, id_eqq
, 1, result
->memo_value
);
2269 if (!RTEST(chain
)) return 0;
2270 value
= rb_proc_call_with_block(entry
->proc
, 1, &(result
->memo_value
), Qnil
);
2271 LAZY_MEMO_SET_VALUE(result
, value
);
2272 LAZY_MEMO_RESET_PACKED(result
);
2277 static const lazyenum_funcs lazy_grep_iter_funcs
= {
2278 lazy_grep_iter_proc
, 0,
2281 static const lazyenum_funcs lazy_grep_funcs
= {
2287 * lazy.grep(pattern) -> lazy_enumerator
2288 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2290 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2294 lazy_grep(VALUE obj
, VALUE pattern
)
2296 const lazyenum_funcs
*const funcs
= rb_block_given_p() ?
2297 &lazy_grep_iter_funcs
: &lazy_grep_funcs
;
2298 return lazy_add_method(obj
, 0, 0, pattern
, rb_ary_new3(1, pattern
), funcs
);
2301 static struct MEMO
*
2302 lazy_grep_v_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2304 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2305 VALUE chain
= rb_funcall(entry
->memo
, id_eqq
, 1, result
->memo_value
);
2306 if (RTEST(chain
)) return 0;
2310 static struct MEMO
*
2311 lazy_grep_v_iter_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2313 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2314 VALUE value
, chain
= rb_funcall(entry
->memo
, id_eqq
, 1, result
->memo_value
);
2316 if (RTEST(chain
)) return 0;
2317 value
= rb_proc_call_with_block(entry
->proc
, 1, &(result
->memo_value
), Qnil
);
2318 LAZY_MEMO_SET_VALUE(result
, value
);
2319 LAZY_MEMO_RESET_PACKED(result
);
2324 static const lazyenum_funcs lazy_grep_v_iter_funcs
= {
2325 lazy_grep_v_iter_proc
, 0,
2328 static const lazyenum_funcs lazy_grep_v_funcs
= {
2329 lazy_grep_v_proc
, 0,
2334 * lazy.grep_v(pattern) -> lazy_enumerator
2335 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2337 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2341 lazy_grep_v(VALUE obj
, VALUE pattern
)
2343 const lazyenum_funcs
*const funcs
= rb_block_given_p() ?
2344 &lazy_grep_v_iter_funcs
: &lazy_grep_v_funcs
;
2345 return lazy_add_method(obj
, 0, 0, pattern
, rb_ary_new3(1, pattern
), funcs
);
2349 call_next(VALUE obj
)
2351 return rb_funcall(obj
, id_next
, 0);
2355 next_stopped(VALUE obj
, VALUE _
)
2360 static struct MEMO
*
2361 lazy_zip_arrays_func(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2363 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2364 VALUE ary
, arrays
= entry
->memo
;
2365 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2366 long i
, count
= NIL_P(memo
) ? 0 : NUM2LONG(memo
);
2368 ary
= rb_ary_new2(RARRAY_LEN(arrays
) + 1);
2369 rb_ary_push(ary
, result
->memo_value
);
2370 for (i
= 0; i
< RARRAY_LEN(arrays
); i
++) {
2371 rb_ary_push(ary
, rb_ary_entry(RARRAY_AREF(arrays
, i
), count
));
2373 LAZY_MEMO_SET_VALUE(result
, ary
);
2374 rb_ary_store(memos
, memo_index
, LONG2NUM(++count
));
2378 static struct MEMO
*
2379 lazy_zip_func(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2381 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2382 VALUE arg
= rb_ary_entry(memos
, memo_index
);
2383 VALUE zip_args
= entry
->memo
;
2388 arg
= rb_ary_new2(RARRAY_LEN(zip_args
));
2389 for (i
= 0; i
< RARRAY_LEN(zip_args
); i
++) {
2390 rb_ary_push(arg
, rb_funcall(RARRAY_AREF(zip_args
, i
), id_to_enum
, 0));
2392 rb_ary_store(memos
, memo_index
, arg
);
2395 ary
= rb_ary_new2(RARRAY_LEN(arg
) + 1);
2396 rb_ary_push(ary
, result
->memo_value
);
2397 for (i
= 0; i
< RARRAY_LEN(arg
); i
++) {
2398 v
= rb_rescue2(call_next
, RARRAY_AREF(arg
, i
), next_stopped
, 0,
2399 rb_eStopIteration
, (VALUE
)0);
2400 rb_ary_push(ary
, v
);
2402 LAZY_MEMO_SET_VALUE(result
, ary
);
2403 LAZY_MEMO_SET_PACKED(result
);
2407 static const lazyenum_funcs lazy_zip_funcs
[] = {
2408 {lazy_zip_func
, lazy_receiver_size
,},
2409 {lazy_zip_arrays_func
, lazy_receiver_size
,},
2414 * lazy.zip(arg, ...) -> lazy_enumerator
2415 * lazy.zip(arg, ...) { |arr| block } -> nil
2417 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2418 * However, if a block is given to zip, values are enumerated immediately.
2421 lazy_zip(int argc
, VALUE
*argv
, VALUE obj
)
2425 const lazyenum_funcs
*funcs
= &lazy_zip_funcs
[1];
2427 if (rb_block_given_p()) {
2428 return rb_call_super(argc
, argv
);
2431 ary
= rb_ary_new2(argc
);
2432 for (i
= 0; i
< argc
; i
++) {
2433 v
= rb_check_array_type(argv
[i
]);
2435 for (; i
< argc
; i
++) {
2436 if (!rb_respond_to(argv
[i
], id_each
)) {
2437 rb_raise(rb_eTypeError
, "wrong argument type %"PRIsVALUE
" (must respond to :each)",
2438 rb_obj_class(argv
[i
]));
2441 ary
= rb_ary_new4(argc
, argv
);
2442 funcs
= &lazy_zip_funcs
[0];
2445 rb_ary_push(ary
, v
);
2448 return lazy_add_method(obj
, 0, 0, ary
, ary
, funcs
);
2451 static struct MEMO
*
2452 lazy_take_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2455 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2456 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2462 remain
= NUM2LONG(memo
);
2463 if (--remain
== 0) LAZY_MEMO_SET_BREAK(result
);
2464 rb_ary_store(memos
, memo_index
, LONG2NUM(remain
));
2469 lazy_take_size(VALUE entry
, VALUE receiver
)
2471 long len
= NUM2LONG(RARRAY_AREF(rb_ivar_get(entry
, id_arguments
), 0));
2472 if (NIL_P(receiver
) || (FIXNUM_P(receiver
) && FIX2LONG(receiver
) < len
))
2474 return LONG2NUM(len
);
2478 lazy_take_precheck(VALUE proc_entry
)
2480 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2481 return entry
->memo
!= INT2FIX(0);
2484 static const lazyenum_funcs lazy_take_funcs
= {
2485 lazy_take_proc
, lazy_take_size
, lazy_take_precheck
,
2490 * lazy.take(n) -> lazy_enumerator
2492 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2496 lazy_take(VALUE obj
, VALUE n
)
2498 long len
= NUM2LONG(n
);
2501 rb_raise(rb_eArgError
, "attempt to take negative size");
2504 n
= LONG2NUM(len
); /* no more conversion */
2506 return lazy_add_method(obj
, 0, 0, n
, rb_ary_new3(1, n
), &lazy_take_funcs
);
2509 static struct MEMO
*
2510 lazy_take_while_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2512 VALUE take
= lazyenum_yield_values(proc_entry
, result
);
2514 LAZY_MEMO_SET_BREAK(result
);
2520 static const lazyenum_funcs lazy_take_while_funcs
= {
2521 lazy_take_while_proc
, 0,
2526 * lazy.take_while { |obj| block } -> lazy_enumerator
2528 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2532 lazy_take_while(VALUE obj
)
2534 if (!rb_block_given_p()) {
2535 rb_raise(rb_eArgError
, "tried to call lazy take_while without a block");
2538 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_take_while_funcs
);
2542 lazy_drop_size(VALUE proc_entry
, VALUE receiver
)
2544 long len
= NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry
, id_arguments
), 0));
2545 if (NIL_P(receiver
))
2547 if (FIXNUM_P(receiver
)) {
2548 len
= FIX2LONG(receiver
) - len
;
2549 return LONG2FIX(len
< 0 ? 0 : len
);
2551 return rb_funcall(receiver
, '-', 1, LONG2NUM(len
));
2554 static struct MEMO
*
2555 lazy_drop_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2558 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2559 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2564 remain
= NUM2LONG(memo
);
2567 rb_ary_store(memos
, memo_index
, LONG2NUM(remain
));
2574 static const lazyenum_funcs lazy_drop_funcs
= {
2575 lazy_drop_proc
, lazy_drop_size
,
2580 * lazy.drop(n) -> lazy_enumerator
2582 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2586 lazy_drop(VALUE obj
, VALUE n
)
2588 long len
= NUM2LONG(n
);
2594 rb_raise(rb_eArgError
, "attempt to drop negative size");
2597 return lazy_add_method(obj
, 2, argv
, n
, rb_ary_new3(1, n
), &lazy_drop_funcs
);
2600 static struct MEMO
*
2601 lazy_drop_while_proc(VALUE proc_entry
, struct MEMO
* result
, VALUE memos
, long memo_index
)
2603 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2604 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2611 VALUE drop
= lazyenum_yield_values(proc_entry
, result
);
2612 if (RTEST(drop
)) return 0;
2613 rb_ary_store(memos
, memo_index
, Qtrue
);
2618 static const lazyenum_funcs lazy_drop_while_funcs
= {
2619 lazy_drop_while_proc
, 0,
2624 * lazy.drop_while { |obj| block } -> lazy_enumerator
2626 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2630 lazy_drop_while(VALUE obj
)
2632 if (!rb_block_given_p()) {
2633 rb_raise(rb_eArgError
, "tried to call lazy drop_while without a block");
2636 return lazy_add_method(obj
, 0, 0, Qfalse
, Qnil
, &lazy_drop_while_funcs
);
2640 lazy_uniq_check(VALUE chain
, VALUE memos
, long memo_index
)
2642 VALUE hash
= rb_ary_entry(memos
, memo_index
);
2645 hash
= rb_obj_hide(rb_hash_new());
2646 rb_ary_store(memos
, memo_index
, hash
);
2649 return rb_hash_add_new_element(hash
, chain
, Qfalse
);
2652 static struct MEMO
*
2653 lazy_uniq_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2655 if (lazy_uniq_check(result
->memo_value
, memos
, memo_index
)) return 0;
2659 static struct MEMO
*
2660 lazy_uniq_iter_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2662 VALUE chain
= lazyenum_yield(proc_entry
, result
);
2664 if (lazy_uniq_check(chain
, memos
, memo_index
)) return 0;
2668 static const lazyenum_funcs lazy_uniq_iter_funcs
= {
2669 lazy_uniq_iter_proc
, 0,
2672 static const lazyenum_funcs lazy_uniq_funcs
= {
2678 * lazy.uniq -> lazy_enumerator
2679 * lazy.uniq { |item| block } -> lazy_enumerator
2681 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2685 lazy_uniq(VALUE obj
)
2687 const lazyenum_funcs
*const funcs
=
2688 rb_block_given_p() ? &lazy_uniq_iter_funcs
: &lazy_uniq_funcs
;
2689 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, funcs
);
2692 static struct MEMO
*
2693 lazy_compact_proc(VALUE proc_entry
, struct MEMO
*result
, VALUE memos
, long memo_index
)
2695 if (NIL_P(result
->memo_value
)) return 0;
2699 static const lazyenum_funcs lazy_compact_funcs
= {
2700 lazy_compact_proc
, 0,
2705 * lazy.compact -> lazy_enumerator
2707 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2711 lazy_compact(VALUE obj
)
2713 return lazy_add_method(obj
, 0, 0, Qnil
, Qnil
, &lazy_compact_funcs
);
2716 static struct MEMO
*
2717 lazy_with_index_proc(VALUE proc_entry
, struct MEMO
* result
, VALUE memos
, long memo_index
)
2719 struct proc_entry
*entry
= proc_entry_ptr(proc_entry
);
2720 VALUE memo
= rb_ary_entry(memos
, memo_index
);
2727 argv
[0] = result
->memo_value
;
2730 rb_proc_call_with_block(entry
->proc
, 2, argv
, Qnil
);
2731 LAZY_MEMO_RESET_PACKED(result
);
2734 LAZY_MEMO_SET_VALUE(result
, rb_ary_new_from_values(2, argv
));
2735 LAZY_MEMO_SET_PACKED(result
);
2737 rb_ary_store(memos
, memo_index
, LONG2NUM(NUM2LONG(memo
) + 1));
2742 lazy_with_index_size(VALUE proc
, VALUE receiver
)
2747 static const lazyenum_funcs lazy_with_index_funcs
= {
2748 lazy_with_index_proc
, lazy_with_index_size
,
2753 * lazy.with_index(offset = 0) {|(*args), idx| block }
2754 * lazy.with_index(offset = 0)
2756 * If a block is given, returns a lazy enumerator that will
2757 * iterate over the given block for each element
2758 * with an index, which starts from +offset+, and returns a
2759 * lazy enumerator that yields the same values (without the index).
2761 * If a block is not given, returns a new lazy enumerator that
2762 * includes the index, starting from +offset+.
2764 * +offset+:: the starting index to use
2766 * See Enumerator#with_index.
2769 lazy_with_index(int argc
, VALUE
*argv
, VALUE obj
)
2773 rb_scan_args(argc
, argv
, "01", &memo
);
2777 return lazy_add_method(obj
, 0, 0, memo
, rb_ary_new_from_values(1, &memo
), &lazy_with_index_funcs
);
2780 #if 0 /* for RDoc */
2784 * lazy.chunk { |elt| ... } -> lazy_enumerator
2786 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2789 lazy_chunk(VALUE self
)
2795 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2797 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2800 lazy_chunk_while(VALUE self
)
2806 * lazy.slice_after(pattern) -> lazy_enumerator
2807 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2809 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2812 lazy_slice_after(VALUE self
)
2818 * lazy.slice_before(pattern) -> lazy_enumerator
2819 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2821 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2824 lazy_slice_before(VALUE self
)
2830 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2832 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2835 lazy_slice_when(VALUE self
)
2841 lazy_super(int argc
, VALUE
*argv
, VALUE lazy
)
2843 return enumerable_lazy(rb_call_super(argc
, argv
));
2848 * enum.lazy -> lazy_enumerator
2854 lazy_lazy(VALUE obj
)
2860 * Document-class: StopIteration
2862 * Raised to stop the iteration, in particular by Enumerator#next. It is
2863 * rescued by Kernel#loop.
2867 * raise StopIteration
2872 * <em>produces:</em>
2882 * Returns the return value of the iterator.
2900 * rescue StopIteration => ex
2901 * puts ex.result #=> 100
2907 stop_result(VALUE self
)
2909 return rb_attr_get(self
, id_result
);
2917 producer_mark(void *p
)
2919 struct producer
*ptr
= p
;
2920 rb_gc_mark_movable(ptr
->init
);
2921 rb_gc_mark_movable(ptr
->proc
);
2925 producer_compact(void *p
)
2927 struct producer
*ptr
= p
;
2928 ptr
->init
= rb_gc_location(ptr
->init
);
2929 ptr
->proc
= rb_gc_location(ptr
->proc
);
2932 #define producer_free RUBY_TYPED_DEFAULT_FREE
2935 producer_memsize(const void *p
)
2937 return sizeof(struct producer
);
2940 static const rb_data_type_t producer_data_type
= {
2948 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
| RUBY_TYPED_WB_PROTECTED
| RUBY_TYPED_EMBEDDABLE
2951 static struct producer
*
2952 producer_ptr(VALUE obj
)
2954 struct producer
*ptr
;
2956 TypedData_Get_Struct(obj
, struct producer
, &producer_data_type
, ptr
);
2957 if (!ptr
|| UNDEF_P(ptr
->proc
)) {
2958 rb_raise(rb_eArgError
, "uninitialized producer");
2965 producer_allocate(VALUE klass
)
2967 struct producer
*ptr
;
2970 obj
= TypedData_Make_Struct(klass
, struct producer
, &producer_data_type
, ptr
);
2978 producer_init(VALUE obj
, VALUE init
, VALUE proc
)
2980 struct producer
*ptr
;
2982 TypedData_Get_Struct(obj
, struct producer
, &producer_data_type
, ptr
);
2985 rb_raise(rb_eArgError
, "unallocated producer");
2988 RB_OBJ_WRITE(obj
, &ptr
->init
, init
);
2989 RB_OBJ_WRITE(obj
, &ptr
->proc
, proc
);
2995 producer_each_stop(VALUE dummy
, VALUE exc
)
2997 return rb_attr_get(exc
, id_result
);
3000 NORETURN(static VALUE
producer_each_i(VALUE obj
));
3003 producer_each_i(VALUE obj
)
3005 struct producer
*ptr
;
3006 VALUE init
, proc
, curr
;
3008 ptr
= producer_ptr(obj
);
3012 if (UNDEF_P(init
)) {
3021 curr
= rb_funcall(proc
, id_call
, 1, curr
);
3025 UNREACHABLE_RETURN(Qnil
);
3030 producer_each(VALUE obj
)
3034 return rb_rescue2(producer_each_i
, obj
, producer_each_stop
, (VALUE
)0, rb_eStopIteration
, (VALUE
)0);
3038 producer_size(VALUE obj
, VALUE args
, VALUE eobj
)
3040 return DBL2NUM(HUGE_VAL
);
3045 * Enumerator.produce(initial = nil) { |prev| block } -> enumerator
3047 * Creates an infinite enumerator from any block, just called over and
3048 * over. The result of the previous iteration is passed to the next one.
3049 * If +initial+ is provided, it is passed to the first iteration, and
3050 * becomes the first element of the enumerator; if it is not provided,
3051 * the first iteration receives +nil+, and its result becomes the first
3052 * element of the iterator.
3054 * Raising StopIteration from the block stops an iteration.
3056 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3058 * Enumerator.produce { rand(10) } # => infinite random number sequence
3060 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3061 * enclosing_section = ancestors.find { |n| n.type == :section }
3063 * Using ::produce together with Enumerable methods like Enumerable#detect,
3064 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3065 * for +while+ and +until+ cycles:
3067 * # Find next Tuesday
3069 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3073 * scanner = StringScanner.new("7+38/6")
3074 * PATTERN = %r{\d+|[-/+*]}
3075 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3076 * # => ["7", "+", "38", "/", "6"]
3079 enumerator_s_produce(int argc
, VALUE
*argv
, VALUE klass
)
3081 VALUE init
, producer
;
3083 if (!rb_block_given_p()) rb_raise(rb_eArgError
, "no block given");
3085 if (rb_scan_args(argc
, argv
, "01", &init
) == 0) {
3089 producer
= producer_init(producer_allocate(rb_cEnumProducer
), init
, rb_block_proc());
3091 return rb_enumeratorize_with_size_kw(producer
, sym_each
, 0, 0, producer_size
, RB_NO_KEYWORDS
);
3095 * Document-class: Enumerator::Chain
3097 * Enumerator::Chain is a subclass of Enumerator, which represents a
3098 * chain of enumerables that works as a single enumerator.
3100 * This type of objects can be created by Enumerable#chain and
3105 enum_chain_mark(void *p
)
3107 struct enum_chain
*ptr
= p
;
3108 rb_gc_mark_movable(ptr
->enums
);
3112 enum_chain_compact(void *p
)
3114 struct enum_chain
*ptr
= p
;
3115 ptr
->enums
= rb_gc_location(ptr
->enums
);
3118 #define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3121 enum_chain_memsize(const void *p
)
3123 return sizeof(struct enum_chain
);
3126 static const rb_data_type_t enum_chain_data_type
= {
3134 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3137 static struct enum_chain
*
3138 enum_chain_ptr(VALUE obj
)
3140 struct enum_chain
*ptr
;
3142 TypedData_Get_Struct(obj
, struct enum_chain
, &enum_chain_data_type
, ptr
);
3143 if (!ptr
|| UNDEF_P(ptr
->enums
)) {
3144 rb_raise(rb_eArgError
, "uninitialized chain");
3151 enum_chain_allocate(VALUE klass
)
3153 struct enum_chain
*ptr
;
3156 obj
= TypedData_Make_Struct(klass
, struct enum_chain
, &enum_chain_data_type
, ptr
);
3157 ptr
->enums
= Qundef
;
3165 * Enumerator::Chain.new(*enums) -> enum
3167 * Generates a new enumerator object that iterates over the elements
3168 * of given enumerable objects in sequence.
3170 * e = Enumerator::Chain.new(1..3, [4, 5])
3171 * e.to_a #=> [1, 2, 3, 4, 5]
3175 enum_chain_initialize(VALUE obj
, VALUE enums
)
3177 struct enum_chain
*ptr
;
3179 rb_check_frozen(obj
);
3180 TypedData_Get_Struct(obj
, struct enum_chain
, &enum_chain_data_type
, ptr
);
3182 if (!ptr
) rb_raise(rb_eArgError
, "unallocated chain");
3184 ptr
->enums
= rb_obj_freeze(enums
);
3191 new_enum_chain(VALUE enums
)
3194 VALUE obj
= enum_chain_initialize(enum_chain_allocate(rb_cEnumChain
), enums
);
3196 for (i
= 0; i
< RARRAY_LEN(enums
); i
++) {
3197 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums
, i
), rb_cLazy
))) {
3198 return enumerable_lazy(obj
);
3207 enum_chain_init_copy(VALUE obj
, VALUE orig
)
3209 struct enum_chain
*ptr0
, *ptr1
;
3211 if (!OBJ_INIT_COPY(obj
, orig
)) return obj
;
3212 ptr0
= enum_chain_ptr(orig
);
3214 TypedData_Get_Struct(obj
, struct enum_chain
, &enum_chain_data_type
, ptr1
);
3216 if (!ptr1
) rb_raise(rb_eArgError
, "unallocated chain");
3218 ptr1
->enums
= ptr0
->enums
;
3219 ptr1
->pos
= ptr0
->pos
;
3225 enum_chain_total_size(VALUE enums
)
3227 VALUE total
= INT2FIX(0);
3230 for (i
= 0; i
< RARRAY_LEN(enums
); i
++) {
3231 VALUE size
= enum_size(RARRAY_AREF(enums
, i
));
3233 if (NIL_P(size
) || (RB_FLOAT_TYPE_P(size
) && isinf(NUM2DBL(size
)))) {
3236 if (!RB_INTEGER_TYPE_P(size
)) {
3240 total
= rb_funcall(total
, '+', 1, size
);
3248 * obj.size -> int, Float::INFINITY or nil
3250 * Returns the total size of the enumerator chain calculated by
3251 * summing up the size of each enumerable in the chain. If any of the
3252 * enumerables reports its size as nil or Float::INFINITY, that value
3253 * is returned as the total size.
3256 enum_chain_size(VALUE obj
)
3258 return enum_chain_total_size(enum_chain_ptr(obj
)->enums
);
3262 enum_chain_enum_size(VALUE obj
, VALUE args
, VALUE eobj
)
3264 return enum_chain_size(obj
);
3268 enum_chain_enum_no_size(VALUE obj
, VALUE args
, VALUE eobj
)
3275 * obj.each(*args) { |...| ... } -> obj
3276 * obj.each(*args) -> enumerator
3278 * Iterates over the elements of the first enumerable by calling the
3279 * "each" method on it with the given arguments, then proceeds to the
3280 * following enumerables in sequence until all of the enumerables are
3283 * If no block is given, returns an enumerator.
3286 enum_chain_each(int argc
, VALUE
*argv
, VALUE obj
)
3289 struct enum_chain
*objptr
;
3292 RETURN_SIZED_ENUMERATOR(obj
, argc
, argv
, argc
> 0 ? enum_chain_enum_no_size
: enum_chain_enum_size
);
3294 objptr
= enum_chain_ptr(obj
);
3295 enums
= objptr
->enums
;
3296 block
= rb_block_proc();
3298 for (i
= 0; i
< RARRAY_LEN(enums
); i
++) {
3300 rb_funcall_with_block(RARRAY_AREF(enums
, i
), id_each
, argc
, argv
, block
);
3310 * Rewinds the enumerator chain by calling the "rewind" method on each
3311 * enumerable in reverse order. Each call is performed only if the
3312 * enumerable responds to the method.
3315 enum_chain_rewind(VALUE obj
)
3317 struct enum_chain
*objptr
= enum_chain_ptr(obj
);
3318 VALUE enums
= objptr
->enums
;
3321 for (i
= objptr
->pos
; 0 <= i
&& i
< RARRAY_LEN(enums
); objptr
->pos
= --i
) {
3322 rb_check_funcall(RARRAY_AREF(enums
, i
), id_rewind
, 0, 0);
3329 inspect_enum_chain(VALUE obj
, VALUE dummy
, int recur
)
3331 VALUE klass
= rb_obj_class(obj
);
3332 struct enum_chain
*ptr
;
3334 TypedData_Get_Struct(obj
, struct enum_chain
, &enum_chain_data_type
, ptr
);
3336 if (!ptr
|| UNDEF_P(ptr
->enums
)) {
3337 return rb_sprintf("#<%"PRIsVALUE
": uninitialized>", rb_class_path(klass
));
3341 return rb_sprintf("#<%"PRIsVALUE
": ...>", rb_class_path(klass
));
3344 return rb_sprintf("#<%"PRIsVALUE
": %+"PRIsVALUE
">", rb_class_path(klass
), ptr
->enums
);
3349 * obj.inspect -> string
3351 * Returns a printable version of the enumerator chain.
3354 enum_chain_inspect(VALUE obj
)
3356 return rb_exec_recursive(inspect_enum_chain
, obj
, 0);
3361 * e.chain(*enums) -> enumerator
3363 * Returns an enumerator object generated from this enumerator and
3364 * given enumerables.
3366 * e = (1..3).chain([4, 5])
3367 * e.to_a #=> [1, 2, 3, 4, 5]
3370 enum_chain(int argc
, VALUE
*argv
, VALUE obj
)
3372 VALUE enums
= rb_ary_new_from_values(1, &obj
);
3373 rb_ary_cat(enums
, argv
, argc
);
3374 return new_enum_chain(enums
);
3379 * e + enum -> enumerator
3381 * Returns an enumerator object generated from this enumerator and a
3384 * e = (1..3).each + [4, 5]
3385 * e.to_a #=> [1, 2, 3, 4, 5]
3388 enumerator_plus(VALUE obj
, VALUE eobj
)
3390 return new_enum_chain(rb_ary_new_from_args(2, obj
, eobj
));
3394 * Document-class: Enumerator::Product
3396 * Enumerator::Product generates a Cartesian product of any number of
3397 * enumerable objects. Iterating over the product of enumerable
3398 * objects is roughly equivalent to nested each_entry loops where the
3399 * loop for the rightmost object is put innermost.
3401 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3403 * innings.each do |i, h|
3416 * The method used against each enumerable object is `each_entry`
3417 * instead of `each` so that the product of N enumerable objects
3418 * yields an array of exactly N elements in each iteration.
3420 * When no enumerator is given, it calls a given block once yielding
3421 * an empty argument list.
3423 * This type of objects can be created by Enumerator.product.
3427 enum_product_mark(void *p
)
3429 struct enum_product
*ptr
= p
;
3430 rb_gc_mark_movable(ptr
->enums
);
3434 enum_product_compact(void *p
)
3436 struct enum_product
*ptr
= p
;
3437 ptr
->enums
= rb_gc_location(ptr
->enums
);
3440 #define enum_product_free RUBY_TYPED_DEFAULT_FREE
3443 enum_product_memsize(const void *p
)
3445 return sizeof(struct enum_product
);
3448 static const rb_data_type_t enum_product_data_type
= {
3453 enum_product_memsize
,
3454 enum_product_compact
,
3456 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3459 static struct enum_product
*
3460 enum_product_ptr(VALUE obj
)
3462 struct enum_product
*ptr
;
3464 TypedData_Get_Struct(obj
, struct enum_product
, &enum_product_data_type
, ptr
);
3465 if (!ptr
|| UNDEF_P(ptr
->enums
)) {
3466 rb_raise(rb_eArgError
, "uninitialized product");
3473 enum_product_allocate(VALUE klass
)
3475 struct enum_product
*ptr
;
3478 obj
= TypedData_Make_Struct(klass
, struct enum_product
, &enum_product_data_type
, ptr
);
3479 ptr
->enums
= Qundef
;
3486 * Enumerator::Product.new(*enums) -> enum
3488 * Generates a new enumerator object that generates a Cartesian
3489 * product of given enumerable objects.
3491 * e = Enumerator::Product.new(1..3, [4, 5])
3492 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3496 enum_product_initialize(int argc
, VALUE
*argv
, VALUE obj
)
3498 struct enum_product
*ptr
;
3499 VALUE enums
= Qnil
, options
= Qnil
;
3501 rb_scan_args(argc
, argv
, "*:", &enums
, &options
);
3503 if (!NIL_P(options
) && !RHASH_EMPTY_P(options
)) {
3504 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options
)));
3507 rb_check_frozen(obj
);
3508 TypedData_Get_Struct(obj
, struct enum_product
, &enum_product_data_type
, ptr
);
3510 if (!ptr
) rb_raise(rb_eArgError
, "unallocated product");
3512 ptr
->enums
= rb_obj_freeze(enums
);
3519 enum_product_init_copy(VALUE obj
, VALUE orig
)
3521 struct enum_product
*ptr0
, *ptr1
;
3523 if (!OBJ_INIT_COPY(obj
, orig
)) return obj
;
3524 ptr0
= enum_product_ptr(orig
);
3526 TypedData_Get_Struct(obj
, struct enum_product
, &enum_product_data_type
, ptr1
);
3528 if (!ptr1
) rb_raise(rb_eArgError
, "unallocated product");
3530 ptr1
->enums
= ptr0
->enums
;
3536 enum_product_total_size(VALUE enums
)
3538 VALUE total
= INT2FIX(1);
3539 VALUE sizes
= rb_ary_hidden_new(RARRAY_LEN(enums
));
3542 for (i
= 0; i
< RARRAY_LEN(enums
); i
++) {
3543 VALUE size
= enum_size(RARRAY_AREF(enums
, i
));
3544 if (size
== INT2FIX(0)) {
3545 rb_ary_resize(sizes
, 0);
3548 rb_ary_push(sizes
, size
);
3550 for (i
= 0; i
< RARRAY_LEN(sizes
); i
++) {
3551 VALUE size
= RARRAY_AREF(sizes
, i
);
3553 if (NIL_P(size
) || (RB_TYPE_P(size
, T_FLOAT
) && isinf(NUM2DBL(size
)))) {
3556 if (!RB_INTEGER_TYPE_P(size
)) {
3560 total
= rb_funcall(total
, '*', 1, size
);
3568 * obj.size -> int, Float::INFINITY or nil
3570 * Returns the total size of the enumerator product calculated by
3571 * multiplying the sizes of enumerables in the product. If any of the
3572 * enumerables reports its size as nil or Float::INFINITY, that value
3573 * is returned as the size.
3576 enum_product_size(VALUE obj
)
3578 return enum_product_total_size(enum_product_ptr(obj
)->enums
);
3582 enum_product_enum_size(VALUE obj
, VALUE args
, VALUE eobj
)
3584 return enum_product_size(obj
);
3587 struct product_state
{
3595 static VALUE
product_each(VALUE
, struct product_state
*);
3598 product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value
, state
))
3600 struct product_state
*pstate
= (struct product_state
*)state
;
3601 pstate
->argv
[pstate
->index
++] = value
;
3603 VALUE val
= product_each(pstate
->obj
, pstate
);
3609 product_each(VALUE obj
, struct product_state
*pstate
)
3611 struct enum_product
*ptr
= enum_product_ptr(obj
);
3612 VALUE enums
= ptr
->enums
;
3614 if (pstate
->index
< pstate
->argc
) {
3615 VALUE eobj
= RARRAY_AREF(enums
, pstate
->index
);
3617 rb_block_call(eobj
, id_each_entry
, 0, NULL
, product_each_i
, (VALUE
)pstate
);
3620 rb_funcall(pstate
->block
, id_call
, 1, rb_ary_new_from_values(pstate
->argc
, pstate
->argv
));
3627 enum_product_run(VALUE obj
, VALUE block
)
3629 struct enum_product
*ptr
= enum_product_ptr(obj
);
3630 int argc
= RARRAY_LENINT(ptr
->enums
);
3631 struct product_state state
= {
3636 .argv
= ALLOCA_N(VALUE
, argc
),
3639 return product_each(obj
, &state
);
3644 * obj.each { |...| ... } -> obj
3645 * obj.each -> enumerator
3647 * Iterates over the elements of the first enumerable by calling the
3648 * "each_entry" method on it with the given arguments, then proceeds
3649 * to the following enumerables in sequence until all of the
3650 * enumerables are exhausted.
3652 * If no block is given, returns an enumerator. Otherwise, returns self.
3655 enum_product_each(VALUE obj
)
3657 RETURN_SIZED_ENUMERATOR(obj
, 0, 0, enum_product_enum_size
);
3659 return enum_product_run(obj
, rb_block_proc());
3666 * Rewinds the product enumerator by calling the "rewind" method on
3667 * each enumerable in reverse order. Each call is performed only if
3668 * the enumerable responds to the method.
3671 enum_product_rewind(VALUE obj
)
3673 struct enum_product
*ptr
= enum_product_ptr(obj
);
3674 VALUE enums
= ptr
->enums
;
3677 for (i
= 0; i
< RARRAY_LEN(enums
); i
++) {
3678 rb_check_funcall(RARRAY_AREF(enums
, i
), id_rewind
, 0, 0);
3685 inspect_enum_product(VALUE obj
, VALUE dummy
, int recur
)
3687 VALUE klass
= rb_obj_class(obj
);
3688 struct enum_product
*ptr
;
3690 TypedData_Get_Struct(obj
, struct enum_product
, &enum_product_data_type
, ptr
);
3692 if (!ptr
|| UNDEF_P(ptr
->enums
)) {
3693 return rb_sprintf("#<%"PRIsVALUE
": uninitialized>", rb_class_path(klass
));
3697 return rb_sprintf("#<%"PRIsVALUE
": ...>", rb_class_path(klass
));
3700 return rb_sprintf("#<%"PRIsVALUE
": %+"PRIsVALUE
">", rb_class_path(klass
), ptr
->enums
);
3705 * obj.inspect -> string
3707 * Returns a printable version of the product enumerator.
3710 enum_product_inspect(VALUE obj
)
3712 return rb_exec_recursive(inspect_enum_product
, obj
, 0);
3717 * Enumerator.product(*enums) -> enumerator
3718 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3720 * Generates a new enumerator object that generates a Cartesian
3721 * product of given enumerable objects. This is equivalent to
3722 * Enumerator::Product.new.
3724 * e = Enumerator.product(1..3, [4, 5])
3725 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3728 * When a block is given, calls the block with each N-element array
3729 * generated and returns +nil+.
3732 enumerator_s_product(int argc
, VALUE
*argv
, VALUE klass
)
3734 VALUE enums
= Qnil
, options
= Qnil
, block
= Qnil
;
3736 rb_scan_args(argc
, argv
, "*:&", &enums
, &options
, &block
);
3738 if (!NIL_P(options
) && !RHASH_EMPTY_P(options
)) {
3739 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options
)));
3742 VALUE obj
= enum_product_initialize(argc
, argv
, enum_product_allocate(rb_cEnumProduct
));
3744 if (!NIL_P(block
)) {
3745 enum_product_run(obj
, block
);
3753 * Document-class: Enumerator::ArithmeticSequence
3755 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3756 * that is a representation of sequences of numbers with common difference.
3757 * Instances of this class can be generated by the Range#step and Numeric#step
3760 * The class can be used for slicing Array (see Array#slice) or custom
3765 rb_arith_seq_new(VALUE obj
, VALUE meth
, int argc
, VALUE
const *argv
,
3766 rb_enumerator_size_func
*size_fn
,
3767 VALUE beg
, VALUE end
, VALUE step
, int excl
)
3769 VALUE aseq
= enumerator_init(enumerator_allocate(rb_cArithSeq
),
3770 obj
, meth
, argc
, argv
, size_fn
, Qnil
, rb_keyword_given_p());
3771 rb_ivar_set(aseq
, id_begin
, beg
);
3772 rb_ivar_set(aseq
, id_end
, end
);
3773 rb_ivar_set(aseq
, id_step
, step
);
3774 rb_ivar_set(aseq
, id_exclude_end
, RBOOL(excl
));
3779 * call-seq: aseq.begin -> num or nil
3781 * Returns the number that defines the first element of this arithmetic
3785 arith_seq_begin(VALUE self
)
3787 return rb_ivar_get(self
, id_begin
);
3791 * call-seq: aseq.end -> num or nil
3793 * Returns the number that defines the end of this arithmetic sequence.
3796 arith_seq_end(VALUE self
)
3798 return rb_ivar_get(self
, id_end
);
3802 * call-seq: aseq.step -> num
3804 * Returns the number that defines the common difference between
3805 * two adjacent elements in this arithmetic sequence.
3808 arith_seq_step(VALUE self
)
3810 return rb_ivar_get(self
, id_step
);
3814 * call-seq: aseq.exclude_end? -> true or false
3816 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3819 arith_seq_exclude_end(VALUE self
)
3821 return rb_ivar_get(self
, id_exclude_end
);
3825 arith_seq_exclude_end_p(VALUE self
)
3827 return RTEST(arith_seq_exclude_end(self
));
3831 rb_arithmetic_sequence_extract(VALUE obj
, rb_arithmetic_sequence_components_t
*component
)
3833 if (rb_obj_is_kind_of(obj
, rb_cArithSeq
)) {
3834 component
->begin
= arith_seq_begin(obj
);
3835 component
->end
= arith_seq_end(obj
);
3836 component
->step
= arith_seq_step(obj
);
3837 component
->exclude_end
= arith_seq_exclude_end_p(obj
);
3840 else if (rb_range_values(obj
, &component
->begin
, &component
->end
, &component
->exclude_end
)) {
3841 component
->step
= INT2FIX(1);
3849 rb_arithmetic_sequence_beg_len_step(VALUE obj
, long *begp
, long *lenp
, long *stepp
, long len
, int err
)
3851 RBIMPL_NONNULL_ARG(begp
);
3852 RBIMPL_NONNULL_ARG(lenp
);
3853 RBIMPL_NONNULL_ARG(stepp
);
3855 rb_arithmetic_sequence_components_t aseq
;
3856 if (!rb_arithmetic_sequence_extract(obj
, &aseq
)) {
3860 long step
= NIL_P(aseq
.step
) ? 1 : NUM2LONG(aseq
.step
);
3864 if (aseq
.exclude_end
&& !NIL_P(aseq
.end
)) {
3865 /* Handle exclusion before range reversal */
3866 aseq
.end
= LONG2NUM(NUM2LONG(aseq
.end
) + 1);
3868 /* Don't exclude the previous beginning */
3869 aseq
.exclude_end
= 0;
3871 VALUE tmp
= aseq
.begin
;
3872 aseq
.begin
= aseq
.end
;
3876 if (err
== 0 && (step
< -1 || step
> 1)) {
3877 if (rb_range_component_beg_len(aseq
.begin
, aseq
.end
, aseq
.exclude_end
, begp
, lenp
, len
, 1) == Qtrue
) {
3886 return rb_range_component_beg_len(aseq
.begin
, aseq
.end
, aseq
.exclude_end
, begp
, lenp
, len
, err
);
3890 rb_raise(rb_eRangeError
, "%+"PRIsVALUE
" out of range", obj
);
3896 * aseq.first -> num or nil
3897 * aseq.first(n) -> an_array
3899 * Returns the first number in this arithmetic sequence,
3900 * or an array of the first +n+ elements.
3903 arith_seq_first(int argc
, VALUE
*argv
, VALUE self
)
3909 rb_check_arity(argc
, 0, 1);
3911 b
= arith_seq_begin(self
);
3912 e
= arith_seq_end(self
);
3913 s
= arith_seq_step(self
);
3919 VALUE zero
= INT2FIX(0);
3920 int r
= rb_cmpint(rb_num_coerce_cmp(s
, zero
, idCmp
), s
, zero
);
3921 if (r
> 0 && RTEST(rb_funcall(b
, '>', 1, e
))) {
3924 if (r
< 0 && RTEST(rb_funcall(b
, '<', 1, e
))) {
3931 // TODO: the following code should be extracted as arith_seq_take
3933 n
= NUM2LONG(argv
[0]);
3935 rb_raise(rb_eArgError
, "attempt to take negative size");
3938 return rb_ary_new_capa(0);
3941 x
= arith_seq_exclude_end_p(self
);
3943 if (FIXNUM_P(b
) && NIL_P(e
) && FIXNUM_P(s
)) {
3944 long i
= FIX2LONG(b
), unit
= FIX2LONG(s
);
3945 ary
= rb_ary_new_capa(n
);
3946 while (n
> 0 && FIXABLE(i
)) {
3947 rb_ary_push(ary
, LONG2FIX(i
));
3948 i
+= unit
; // FIXABLE + FIXABLE never overflow;
3954 rb_ary_push(ary
, b
);
3955 b
= rb_big_plus(b
, s
);
3961 else if (FIXNUM_P(b
) && FIXNUM_P(e
) && FIXNUM_P(s
)) {
3962 long i
= FIX2LONG(b
);
3963 long end
= FIX2LONG(e
);
3964 long unit
= FIX2LONG(s
);
3971 if (len
< 0) len
= 0;
3972 ary
= rb_ary_new_capa((n
< len
) ? n
: len
);
3973 while (n
> 0 && i
< end
) {
3974 rb_ary_push(ary
, LONG2FIX(i
));
3975 if (i
+ unit
< i
) break;
3984 if (len
< 0) len
= 0;
3985 ary
= rb_ary_new_capa((n
< len
) ? n
: len
);
3986 while (n
> 0 && i
> end
) {
3987 rb_ary_push(ary
, LONG2FIX(i
));
3988 if (i
+ unit
> i
) break;
3995 else if (RB_FLOAT_TYPE_P(b
) || RB_FLOAT_TYPE_P(e
) || RB_FLOAT_TYPE_P(s
)) {
3996 /* generate values like ruby_float_step */
3998 double unit
= NUM2DBL(s
);
3999 double beg
= NUM2DBL(b
);
4000 double end
= NIL_P(e
) ? (unit
< 0 ? -1 : 1)*HUGE_VAL
: NUM2DBL(e
);
4001 double len
= ruby_float_step_size(beg
, end
, unit
, x
);
4009 ary
= rb_ary_new_capa(1);
4010 rb_ary_push(ary
, DBL2NUM(beg
));
4013 ary
= rb_ary_new_capa(0);
4016 else if (unit
== 0) {
4017 VALUE val
= DBL2NUM(beg
);
4018 ary
= rb_ary_new_capa(n
);
4019 for (i
= 0; i
< len
; ++i
) {
4020 rb_ary_push(ary
, val
);
4024 ary
= rb_ary_new_capa(n
);
4025 for (i
= 0; i
< n
; ++i
) {
4026 double d
= i
*unit
+beg
;
4027 if (unit
>= 0 ? end
< d
: d
< end
) d
= end
;
4028 rb_ary_push(ary
, DBL2NUM(d
));
4035 return rb_call_super(argc
, argv
);
4039 num_plus(VALUE a
, VALUE b
)
4041 if (RB_INTEGER_TYPE_P(a
)) {
4042 return rb_int_plus(a
, b
);
4044 else if (RB_FLOAT_TYPE_P(a
)) {
4045 return rb_float_plus(a
, b
);
4047 else if (RB_TYPE_P(a
, T_RATIONAL
)) {
4048 return rb_rational_plus(a
, b
);
4051 return rb_funcallv(a
, '+', 1, &b
);
4056 num_minus(VALUE a
, VALUE b
)
4058 if (RB_INTEGER_TYPE_P(a
)) {
4059 return rb_int_minus(a
, b
);
4061 else if (RB_FLOAT_TYPE_P(a
)) {
4062 return rb_float_minus(a
, b
);
4064 else if (RB_TYPE_P(a
, T_RATIONAL
)) {
4065 return rb_rational_minus(a
, b
);
4068 return rb_funcallv(a
, '-', 1, &b
);
4073 num_mul(VALUE a
, VALUE b
)
4075 if (RB_INTEGER_TYPE_P(a
)) {
4076 return rb_int_mul(a
, b
);
4078 else if (RB_FLOAT_TYPE_P(a
)) {
4079 return rb_float_mul(a
, b
);
4081 else if (RB_TYPE_P(a
, T_RATIONAL
)) {
4082 return rb_rational_mul(a
, b
);
4085 return rb_funcallv(a
, '*', 1, &b
);
4090 num_idiv(VALUE a
, VALUE b
)
4093 if (RB_INTEGER_TYPE_P(a
)) {
4094 q
= rb_int_idiv(a
, b
);
4096 else if (RB_FLOAT_TYPE_P(a
)) {
4097 q
= rb_float_div(a
, b
);
4099 else if (RB_TYPE_P(a
, T_RATIONAL
)) {
4100 q
= rb_rational_div(a
, b
);
4103 q
= rb_funcallv(a
, idDiv
, 1, &b
);
4106 if (RB_INTEGER_TYPE_P(q
)) {
4109 else if (RB_FLOAT_TYPE_P(q
)) {
4110 return rb_float_floor(q
, 0);
4112 else if (RB_TYPE_P(q
, T_RATIONAL
)) {
4113 return rb_rational_floor(q
, 0);
4116 return rb_funcall(q
, rb_intern("floor"), 0);
4122 * aseq.last -> num or nil
4123 * aseq.last(n) -> an_array
4125 * Returns the last number in this arithmetic sequence,
4126 * or an array of the last +n+ elements.
4129 arith_seq_last(int argc
, VALUE
*argv
, VALUE self
)
4131 VALUE b
, e
, s
, len_1
, len
, last
, nv
, ary
;
4132 int last_is_adjusted
;
4135 e
= arith_seq_end(self
);
4137 rb_raise(rb_eRangeError
,
4138 "cannot get the last element of endless arithmetic sequence");
4141 b
= arith_seq_begin(self
);
4142 s
= arith_seq_step(self
);
4144 len_1
= num_idiv(num_minus(e
, b
), s
);
4145 if (rb_num_negative_int_p(len_1
)) {
4149 return rb_ary_new_capa(0);
4152 last
= num_plus(b
, num_mul(s
, len_1
));
4153 if ((last_is_adjusted
= arith_seq_exclude_end_p(self
) && rb_equal(last
, e
))) {
4154 last
= num_minus(last
, s
);
4161 if (last_is_adjusted
) {
4165 len
= rb_int_plus(len_1
, INT2FIX(1));
4168 rb_scan_args(argc
, argv
, "1", &nv
);
4169 if (!RB_INTEGER_TYPE_P(nv
)) {
4172 if (RTEST(rb_int_gt(nv
, len
))) {
4177 rb_raise(rb_eArgError
, "negative array size");
4180 ary
= rb_ary_new_capa(n
);
4181 b
= rb_int_minus(last
, rb_int_mul(s
, nv
));
4183 b
= rb_int_plus(b
, s
);
4184 rb_ary_push(ary
, b
);
4193 * aseq.inspect -> string
4195 * Convert this arithmetic sequence to a printable form.
4198 arith_seq_inspect(VALUE self
)
4200 struct enumerator
*e
;
4201 VALUE eobj
, str
, eargs
;
4204 TypedData_Get_Struct(self
, struct enumerator
, &enumerator_data_type
, e
);
4206 eobj
= rb_attr_get(self
, id_receiver
);
4211 range_p
= RTEST(rb_obj_is_kind_of(eobj
, rb_cRange
));
4212 str
= rb_sprintf("(%s%"PRIsVALUE
"%s.", range_p
? "(" : "", eobj
, range_p
? ")" : "");
4214 rb_str_buf_append(str
, rb_id2str(e
->meth
));
4216 eargs
= rb_attr_get(eobj
, id_arguments
);
4220 if (eargs
!= Qfalse
) {
4221 long argc
= RARRAY_LEN(eargs
);
4222 const VALUE
*argv
= RARRAY_CONST_PTR(eargs
); /* WB: no new reference */
4227 rb_str_buf_cat2(str
, "(");
4229 if (RB_TYPE_P(argv
[argc
-1], T_HASH
)) {
4231 rb_hash_foreach(argv
[argc
-1], key_symbol_p
, (VALUE
)&all_key
);
4232 if (all_key
) kwds
= argv
[--argc
];
4236 VALUE arg
= *argv
++;
4238 rb_str_append(str
, rb_inspect(arg
));
4239 rb_str_buf_cat2(str
, ", ");
4242 rb_hash_foreach(kwds
, kwd_append
, str
);
4244 rb_str_set_len(str
, RSTRING_LEN(str
)-2); /* drop the last ", " */
4245 rb_str_buf_cat2(str
, ")");
4249 rb_str_buf_cat2(str
, ")");
4256 * aseq == obj -> true or false
4258 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4259 * has equivalent begin, end, step, and exclude_end? settings.
4262 arith_seq_eq(VALUE self
, VALUE other
)
4264 if (!RTEST(rb_obj_is_kind_of(other
, rb_cArithSeq
))) {
4268 if (!rb_equal(arith_seq_begin(self
), arith_seq_begin(other
))) {
4272 if (!rb_equal(arith_seq_end(self
), arith_seq_end(other
))) {
4276 if (!rb_equal(arith_seq_step(self
), arith_seq_step(other
))) {
4280 if (arith_seq_exclude_end_p(self
) != arith_seq_exclude_end_p(other
)) {
4289 * aseq.hash -> integer
4291 * Compute a hash-value for this arithmetic sequence.
4292 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4293 * values will generate the same hash-value.
4295 * See also Object#hash.
4298 arith_seq_hash(VALUE self
)
4303 hash
= rb_hash_start(arith_seq_exclude_end_p(self
));
4304 v
= rb_hash(arith_seq_begin(self
));
4305 hash
= rb_hash_uint(hash
, NUM2LONG(v
));
4306 v
= rb_hash(arith_seq_end(self
));
4307 hash
= rb_hash_uint(hash
, NUM2LONG(v
));
4308 v
= rb_hash(arith_seq_step(self
));
4309 hash
= rb_hash_uint(hash
, NUM2LONG(v
));
4310 hash
= rb_hash_end(hash
);
4312 return ST2FIX(hash
);
4315 #define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4317 struct arith_seq_gen
{
4326 * aseq.each {|i| block } -> aseq
4330 arith_seq_each(VALUE self
)
4332 VALUE c
, e
, s
, len_1
, last
;
4335 if (!rb_block_given_p()) return self
;
4337 c
= arith_seq_begin(self
);
4338 e
= arith_seq_end(self
);
4339 s
= arith_seq_step(self
);
4340 x
= arith_seq_exclude_end_p(self
);
4342 if (!RB_TYPE_P(s
, T_COMPLEX
) && ruby_float_step(c
, e
, s
, x
, TRUE
)) {
4349 c
= rb_int_plus(c
, s
);
4355 if (rb_equal(s
, INT2FIX(0))) {
4363 len_1
= num_idiv(num_minus(e
, c
), s
);
4364 last
= num_plus(c
, num_mul(s
, len_1
));
4365 if (x
&& rb_equal(last
, e
)) {
4366 last
= num_minus(last
, s
);
4369 if (rb_num_negative_int_p(s
)) {
4370 while (NUM_GE(c
, last
)) {
4376 while (NUM_GE(last
, c
)) {
4387 * aseq.size -> num or nil
4389 * Returns the number of elements in this arithmetic sequence if it is a finite
4390 * sequence. Otherwise, returns <code>nil</code>.
4393 arith_seq_size(VALUE self
)
4395 VALUE b
, e
, s
, len_1
, len
, last
;
4398 b
= arith_seq_begin(self
);
4399 e
= arith_seq_end(self
);
4400 s
= arith_seq_step(self
);
4401 x
= arith_seq_exclude_end_p(self
);
4403 if (RB_FLOAT_TYPE_P(b
) || RB_FLOAT_TYPE_P(e
) || RB_FLOAT_TYPE_P(s
)) {
4407 if (rb_num_negative_int_p(s
)) {
4418 n
= ruby_float_step_size(NUM2DBL(b
), ee
, NUM2DBL(s
), x
);
4419 if (isinf(n
)) return DBL2NUM(n
);
4420 if (POSFIXABLE(n
)) return LONG2FIX((long)n
);
4421 return rb_dbl2big(n
);
4425 return DBL2NUM(HUGE_VAL
);
4428 if (!rb_obj_is_kind_of(s
, rb_cNumeric
)) {
4432 if (rb_equal(s
, INT2FIX(0))) {
4433 return DBL2NUM(HUGE_VAL
);
4436 len_1
= rb_int_idiv(rb_int_minus(e
, b
), s
);
4437 if (rb_num_negative_int_p(len_1
)) {
4441 last
= rb_int_plus(b
, rb_int_mul(s
, len_1
));
4442 if (x
&& rb_equal(last
, e
)) {
4446 len
= rb_int_plus(len_1
, INT2FIX(1));
4452 #define sym(name) ID2SYM(rb_intern_const(name))
4454 InitVM_Enumerator(void)
4456 ID id_private
= rb_intern_const("private");
4458 rb_define_method(rb_mKernel
, "to_enum", obj_to_enum
, -1);
4459 rb_define_method(rb_mKernel
, "enum_for", obj_to_enum
, -1);
4461 rb_cEnumerator
= rb_define_class("Enumerator", rb_cObject
);
4462 rb_include_module(rb_cEnumerator
, rb_mEnumerable
);
4464 rb_define_alloc_func(rb_cEnumerator
, enumerator_allocate
);
4465 rb_define_method(rb_cEnumerator
, "initialize", enumerator_initialize
, -1);
4466 rb_define_method(rb_cEnumerator
, "initialize_copy", enumerator_init_copy
, 1);
4467 rb_define_method(rb_cEnumerator
, "each", enumerator_each
, -1);
4468 rb_define_method(rb_cEnumerator
, "each_with_index", enumerator_each_with_index
, 0);
4469 rb_define_method(rb_cEnumerator
, "each_with_object", enumerator_with_object
, 1);
4470 rb_define_method(rb_cEnumerator
, "with_index", enumerator_with_index
, -1);
4471 rb_define_method(rb_cEnumerator
, "with_object", enumerator_with_object
, 1);
4472 rb_define_method(rb_cEnumerator
, "next_values", enumerator_next_values
, 0);
4473 rb_define_method(rb_cEnumerator
, "peek_values", enumerator_peek_values_m
, 0);
4474 rb_define_method(rb_cEnumerator
, "next", enumerator_next
, 0);
4475 rb_define_method(rb_cEnumerator
, "peek", enumerator_peek
, 0);
4476 rb_define_method(rb_cEnumerator
, "feed", enumerator_feed
, 1);
4477 rb_define_method(rb_cEnumerator
, "rewind", enumerator_rewind
, 0);
4478 rb_define_method(rb_cEnumerator
, "inspect", enumerator_inspect
, 0);
4479 rb_define_method(rb_cEnumerator
, "size", enumerator_size
, 0);
4480 rb_define_method(rb_cEnumerator
, "+", enumerator_plus
, 1);
4481 rb_define_method(rb_mEnumerable
, "chain", enum_chain
, -1);
4484 rb_cLazy
= rb_define_class_under(rb_cEnumerator
, "Lazy", rb_cEnumerator
);
4485 rb_define_method(rb_mEnumerable
, "lazy", enumerable_lazy
, 0);
4487 rb_define_alias(rb_cLazy
, "_enumerable_map", "map");
4488 rb_define_alias(rb_cLazy
, "_enumerable_collect", "collect");
4489 rb_define_alias(rb_cLazy
, "_enumerable_flat_map", "flat_map");
4490 rb_define_alias(rb_cLazy
, "_enumerable_collect_concat", "collect_concat");
4491 rb_define_alias(rb_cLazy
, "_enumerable_select", "select");
4492 rb_define_alias(rb_cLazy
, "_enumerable_find_all", "find_all");
4493 rb_define_alias(rb_cLazy
, "_enumerable_filter", "filter");
4494 rb_define_alias(rb_cLazy
, "_enumerable_filter_map", "filter_map");
4495 rb_define_alias(rb_cLazy
, "_enumerable_reject", "reject");
4496 rb_define_alias(rb_cLazy
, "_enumerable_grep", "grep");
4497 rb_define_alias(rb_cLazy
, "_enumerable_grep_v", "grep_v");
4498 rb_define_alias(rb_cLazy
, "_enumerable_zip", "zip");
4499 rb_define_alias(rb_cLazy
, "_enumerable_take", "take");
4500 rb_define_alias(rb_cLazy
, "_enumerable_take_while", "take_while");
4501 rb_define_alias(rb_cLazy
, "_enumerable_drop", "drop");
4502 rb_define_alias(rb_cLazy
, "_enumerable_drop_while", "drop_while");
4503 rb_define_alias(rb_cLazy
, "_enumerable_uniq", "uniq");
4504 rb_define_private_method(rb_cLazy
, "_enumerable_with_index", enumerator_with_index
, -1);
4506 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_map"));
4507 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_collect"));
4508 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_flat_map"));
4509 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_collect_concat"));
4510 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_select"));
4511 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_find_all"));
4512 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_filter"));
4513 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_filter_map"));
4514 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_reject"));
4515 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_grep"));
4516 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_grep_v"));
4517 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_zip"));
4518 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_take"));
4519 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_take_while"));
4520 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_drop"));
4521 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_drop_while"));
4522 rb_funcall(rb_cLazy
, id_private
, 1, sym("_enumerable_uniq"));
4524 rb_define_method(rb_cLazy
, "initialize", lazy_initialize
, -1);
4525 rb_define_method(rb_cLazy
, "to_enum", lazy_to_enum
, -1);
4526 rb_define_method(rb_cLazy
, "enum_for", lazy_to_enum
, -1);
4527 rb_define_method(rb_cLazy
, "eager", lazy_eager
, 0);
4528 rb_define_method(rb_cLazy
, "map", lazy_map
, 0);
4529 rb_define_method(rb_cLazy
, "collect", lazy_map
, 0);
4530 rb_define_method(rb_cLazy
, "flat_map", lazy_flat_map
, 0);
4531 rb_define_method(rb_cLazy
, "collect_concat", lazy_flat_map
, 0);
4532 rb_define_method(rb_cLazy
, "select", lazy_select
, 0);
4533 rb_define_method(rb_cLazy
, "find_all", lazy_select
, 0);
4534 rb_define_method(rb_cLazy
, "filter", lazy_select
, 0);
4535 rb_define_method(rb_cLazy
, "filter_map", lazy_filter_map
, 0);
4536 rb_define_method(rb_cLazy
, "reject", lazy_reject
, 0);
4537 rb_define_method(rb_cLazy
, "grep", lazy_grep
, 1);
4538 rb_define_method(rb_cLazy
, "grep_v", lazy_grep_v
, 1);
4539 rb_define_method(rb_cLazy
, "zip", lazy_zip
, -1);
4540 rb_define_method(rb_cLazy
, "take", lazy_take
, 1);
4541 rb_define_method(rb_cLazy
, "take_while", lazy_take_while
, 0);
4542 rb_define_method(rb_cLazy
, "drop", lazy_drop
, 1);
4543 rb_define_method(rb_cLazy
, "drop_while", lazy_drop_while
, 0);
4544 rb_define_method(rb_cLazy
, "lazy", lazy_lazy
, 0);
4545 rb_define_method(rb_cLazy
, "chunk", lazy_super
, -1);
4546 rb_define_method(rb_cLazy
, "slice_before", lazy_super
, -1);
4547 rb_define_method(rb_cLazy
, "slice_after", lazy_super
, -1);
4548 rb_define_method(rb_cLazy
, "slice_when", lazy_super
, -1);
4549 rb_define_method(rb_cLazy
, "chunk_while", lazy_super
, -1);
4550 rb_define_method(rb_cLazy
, "uniq", lazy_uniq
, 0);
4551 rb_define_method(rb_cLazy
, "compact", lazy_compact
, 0);
4552 rb_define_method(rb_cLazy
, "with_index", lazy_with_index
, -1);
4554 lazy_use_super_method
= rb_hash_new_with_size(18);
4555 rb_hash_aset(lazy_use_super_method
, sym("map"), sym("_enumerable_map"));
4556 rb_hash_aset(lazy_use_super_method
, sym("collect"), sym("_enumerable_collect"));
4557 rb_hash_aset(lazy_use_super_method
, sym("flat_map"), sym("_enumerable_flat_map"));
4558 rb_hash_aset(lazy_use_super_method
, sym("collect_concat"), sym("_enumerable_collect_concat"));
4559 rb_hash_aset(lazy_use_super_method
, sym("select"), sym("_enumerable_select"));
4560 rb_hash_aset(lazy_use_super_method
, sym("find_all"), sym("_enumerable_find_all"));
4561 rb_hash_aset(lazy_use_super_method
, sym("filter"), sym("_enumerable_filter"));
4562 rb_hash_aset(lazy_use_super_method
, sym("filter_map"), sym("_enumerable_filter_map"));
4563 rb_hash_aset(lazy_use_super_method
, sym("reject"), sym("_enumerable_reject"));
4564 rb_hash_aset(lazy_use_super_method
, sym("grep"), sym("_enumerable_grep"));
4565 rb_hash_aset(lazy_use_super_method
, sym("grep_v"), sym("_enumerable_grep_v"));
4566 rb_hash_aset(lazy_use_super_method
, sym("zip"), sym("_enumerable_zip"));
4567 rb_hash_aset(lazy_use_super_method
, sym("take"), sym("_enumerable_take"));
4568 rb_hash_aset(lazy_use_super_method
, sym("take_while"), sym("_enumerable_take_while"));
4569 rb_hash_aset(lazy_use_super_method
, sym("drop"), sym("_enumerable_drop"));
4570 rb_hash_aset(lazy_use_super_method
, sym("drop_while"), sym("_enumerable_drop_while"));
4571 rb_hash_aset(lazy_use_super_method
, sym("uniq"), sym("_enumerable_uniq"));
4572 rb_hash_aset(lazy_use_super_method
, sym("with_index"), sym("_enumerable_with_index"));
4573 rb_obj_freeze(lazy_use_super_method
);
4574 rb_vm_register_global_object(lazy_use_super_method
);
4576 #if 0 /* for RDoc */
4577 rb_define_method(rb_cLazy
, "to_a", lazy_to_a
, 0);
4578 rb_define_method(rb_cLazy
, "chunk", lazy_chunk
, 0);
4579 rb_define_method(rb_cLazy
, "chunk_while", lazy_chunk_while
, 0);
4580 rb_define_method(rb_cLazy
, "slice_after", lazy_slice_after
, 0);
4581 rb_define_method(rb_cLazy
, "slice_before", lazy_slice_before
, 0);
4582 rb_define_method(rb_cLazy
, "slice_when", lazy_slice_when
, 0);
4584 rb_define_alias(rb_cLazy
, "force", "to_a");
4586 rb_eStopIteration
= rb_define_class("StopIteration", rb_eIndexError
);
4587 rb_define_method(rb_eStopIteration
, "result", stop_result
, 0);
4590 rb_cGenerator
= rb_define_class_under(rb_cEnumerator
, "Generator", rb_cObject
);
4591 rb_include_module(rb_cGenerator
, rb_mEnumerable
);
4592 rb_define_alloc_func(rb_cGenerator
, generator_allocate
);
4593 rb_define_method(rb_cGenerator
, "initialize", generator_initialize
, -1);
4594 rb_define_method(rb_cGenerator
, "initialize_copy", generator_init_copy
, 1);
4595 rb_define_method(rb_cGenerator
, "each", generator_each
, -1);
4598 rb_cYielder
= rb_define_class_under(rb_cEnumerator
, "Yielder", rb_cObject
);
4599 rb_define_alloc_func(rb_cYielder
, yielder_allocate
);
4600 rb_define_method(rb_cYielder
, "initialize", yielder_initialize
, 0);
4601 rb_define_method(rb_cYielder
, "yield", yielder_yield
, -2);
4602 rb_define_method(rb_cYielder
, "<<", yielder_yield_push
, 1);
4603 rb_define_method(rb_cYielder
, "to_proc", yielder_to_proc
, 0);
4606 rb_cEnumProducer
= rb_define_class_under(rb_cEnumerator
, "Producer", rb_cObject
);
4607 rb_define_alloc_func(rb_cEnumProducer
, producer_allocate
);
4608 rb_define_method(rb_cEnumProducer
, "each", producer_each
, 0);
4609 rb_define_singleton_method(rb_cEnumerator
, "produce", enumerator_s_produce
, -1);
4612 rb_cEnumChain
= rb_define_class_under(rb_cEnumerator
, "Chain", rb_cEnumerator
);
4613 rb_define_alloc_func(rb_cEnumChain
, enum_chain_allocate
);
4614 rb_define_method(rb_cEnumChain
, "initialize", enum_chain_initialize
, -2);
4615 rb_define_method(rb_cEnumChain
, "initialize_copy", enum_chain_init_copy
, 1);
4616 rb_define_method(rb_cEnumChain
, "each", enum_chain_each
, -1);
4617 rb_define_method(rb_cEnumChain
, "size", enum_chain_size
, 0);
4618 rb_define_method(rb_cEnumChain
, "rewind", enum_chain_rewind
, 0);
4619 rb_define_method(rb_cEnumChain
, "inspect", enum_chain_inspect
, 0);
4620 rb_undef_method(rb_cEnumChain
, "feed");
4621 rb_undef_method(rb_cEnumChain
, "next");
4622 rb_undef_method(rb_cEnumChain
, "next_values");
4623 rb_undef_method(rb_cEnumChain
, "peek");
4624 rb_undef_method(rb_cEnumChain
, "peek_values");
4627 rb_cEnumProduct
= rb_define_class_under(rb_cEnumerator
, "Product", rb_cEnumerator
);
4628 rb_define_alloc_func(rb_cEnumProduct
, enum_product_allocate
);
4629 rb_define_method(rb_cEnumProduct
, "initialize", enum_product_initialize
, -1);
4630 rb_define_method(rb_cEnumProduct
, "initialize_copy", enum_product_init_copy
, 1);
4631 rb_define_method(rb_cEnumProduct
, "each", enum_product_each
, 0);
4632 rb_define_method(rb_cEnumProduct
, "size", enum_product_size
, 0);
4633 rb_define_method(rb_cEnumProduct
, "rewind", enum_product_rewind
, 0);
4634 rb_define_method(rb_cEnumProduct
, "inspect", enum_product_inspect
, 0);
4635 rb_undef_method(rb_cEnumProduct
, "feed");
4636 rb_undef_method(rb_cEnumProduct
, "next");
4637 rb_undef_method(rb_cEnumProduct
, "next_values");
4638 rb_undef_method(rb_cEnumProduct
, "peek");
4639 rb_undef_method(rb_cEnumProduct
, "peek_values");
4640 rb_define_singleton_method(rb_cEnumerator
, "product", enumerator_s_product
, -1);
4642 /* ArithmeticSequence */
4643 rb_cArithSeq
= rb_define_class_under(rb_cEnumerator
, "ArithmeticSequence", rb_cEnumerator
);
4644 rb_undef_alloc_func(rb_cArithSeq
);
4645 rb_undef_method(CLASS_OF(rb_cArithSeq
), "new");
4646 rb_define_method(rb_cArithSeq
, "begin", arith_seq_begin
, 0);
4647 rb_define_method(rb_cArithSeq
, "end", arith_seq_end
, 0);
4648 rb_define_method(rb_cArithSeq
, "exclude_end?", arith_seq_exclude_end
, 0);
4649 rb_define_method(rb_cArithSeq
, "step", arith_seq_step
, 0);
4650 rb_define_method(rb_cArithSeq
, "first", arith_seq_first
, -1);
4651 rb_define_method(rb_cArithSeq
, "last", arith_seq_last
, -1);
4652 rb_define_method(rb_cArithSeq
, "inspect", arith_seq_inspect
, 0);
4653 rb_define_method(rb_cArithSeq
, "==", arith_seq_eq
, 1);
4654 rb_define_method(rb_cArithSeq
, "===", arith_seq_eq
, 1);
4655 rb_define_method(rb_cArithSeq
, "eql?", arith_seq_eq
, 1);
4656 rb_define_method(rb_cArithSeq
, "hash", arith_seq_hash
, 0);
4657 rb_define_method(rb_cArithSeq
, "each", arith_seq_each
, 0);
4658 rb_define_method(rb_cArithSeq
, "size", arith_seq_size
, 0);
4660 rb_provide("enumerator.so"); /* for backward compatibility */
4665 Init_Enumerator(void)
4667 id_rewind
= rb_intern_const("rewind");
4668 id_new
= rb_intern_const("new");
4669 id_next
= rb_intern_const("next");
4670 id_result
= rb_intern_const("result");
4671 id_receiver
= rb_intern_const("receiver");
4672 id_arguments
= rb_intern_const("arguments");
4673 id_memo
= rb_intern_const("memo");
4674 id_method
= rb_intern_const("method");
4675 id_force
= rb_intern_const("force");
4676 id_to_enum
= rb_intern_const("to_enum");
4677 id_each_entry
= rb_intern_const("each_entry");
4678 id_begin
= rb_intern_const("begin");
4679 id_end
= rb_intern_const("end");
4680 id_step
= rb_intern_const("step");
4681 id_exclude_end
= rb_intern_const("exclude_end");
4682 sym_each
= ID2SYM(id_each
);
4683 sym_cycle
= ID2SYM(rb_intern_const("cycle"));
4684 sym_yield
= ID2SYM(rb_intern_const("yield"));