[ruby/etc] bump up to 1.3.1
[ruby-80x24.org.git] / enumerator.c
blobbe469ee38bee51cad80e528a616079d03c5d401d
1 /************************************************
3 enumerator.c - provides Enumerator class
5 $Author$
7 Copyright (C) 2001-2003 Akinori MUSHA
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
13 ************************************************/
15 #include "ruby/internal/config.h"
17 #ifdef HAVE_FLOAT_H
18 #include <float.h>
19 #endif
21 #include "id.h"
22 #include "internal.h"
23 #include "internal/enumerator.h"
24 #include "internal/error.h"
25 #include "internal/hash.h"
26 #include "internal/imemo.h"
27 #include "internal/numeric.h"
28 #include "internal/range.h"
29 #include "internal/rational.h"
30 #include "ruby/ruby.h"
33 * Document-class: Enumerator
35 * A class which allows both internal and external iteration.
37 * An Enumerator can be created by the following methods.
38 * - Object#to_enum
39 * - Object#enum_for
40 * - Enumerator.new
42 * Most methods have two forms: a block form where the contents
43 * are evaluated for each item in the enumeration, and a non-block form
44 * which returns a new Enumerator wrapping the iteration.
46 * enumerator = %w(one two three).each
47 * puts enumerator.class # => Enumerator
49 * enumerator.each_with_object("foo") do |item, obj|
50 * puts "#{obj}: #{item}"
51 * end
53 * # foo: one
54 * # foo: two
55 * # foo: three
57 * enum_with_obj = enumerator.each_with_object("foo")
58 * puts enum_with_obj.class # => Enumerator
60 * enum_with_obj.each do |item, obj|
61 * puts "#{obj}: #{item}"
62 * end
64 * # foo: one
65 * # foo: two
66 * # foo: three
68 * This allows you to chain Enumerators together. For example, you
69 * can map a list's elements to strings containing the index
70 * and the element as a string via:
72 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
73 * # => ["0:foo", "1:bar", "2:baz"]
75 * An Enumerator can also be used as an external iterator.
76 * For example, Enumerator#next returns the next value of the iterator
77 * or raises StopIteration if the Enumerator is at the end.
79 * e = [1,2,3].each # returns an enumerator object.
80 * puts e.next # => 1
81 * puts e.next # => 2
82 * puts e.next # => 3
83 * puts e.next # raises StopIteration
85 * Note that enumeration sequence by +next+, +next_values+, +peek+ and
86 * +peek_values+ do not affect other non-external
87 * enumeration methods, unless the underlying iteration method itself has
88 * side-effect, e.g. IO#each_line.
90 * Moreover, implementation typically uses fibers so performance could be
91 * slower and exception stacktraces different than expected.
93 * You can use this to implement an internal iterator as follows:
95 * def ext_each(e)
96 * while true
97 * begin
98 * vs = e.next_values
99 * rescue StopIteration
100 * return $!.result
101 * end
102 * y = yield(*vs)
103 * e.feed y
104 * end
105 * end
107 * o = Object.new
109 * def o.each
110 * puts yield
111 * puts yield(1)
112 * puts yield(1, 2)
114 * end
116 * # use o.each as an internal iterator directly.
117 * puts o.each {|*x| puts x; [:b, *x] }
118 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
120 * # convert o.each to an external iterator for
121 * # implementing an internal iterator.
122 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
123 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
126 VALUE rb_cEnumerator;
127 static VALUE rb_cLazy;
128 static ID id_rewind, id_new, id_to_enum;
129 static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
130 static ID id_begin, id_end, id_step, id_exclude_end;
131 static VALUE sym_each, sym_cycle, sym_yield;
133 static VALUE lazy_use_super_method;
135 #define id_call idCall
136 #define id_each idEach
137 #define id_eqq idEqq
138 #define id_initialize idInitialize
139 #define id_size idSize
141 VALUE rb_eStopIteration;
143 struct enumerator {
144 VALUE obj;
145 ID meth;
146 VALUE args;
147 VALUE fib;
148 VALUE dst;
149 VALUE lookahead;
150 VALUE feedvalue;
151 VALUE stop_exc;
152 VALUE size;
153 VALUE procs;
154 rb_enumerator_size_func *size_fn;
155 int kw_splat;
158 static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
160 struct generator {
161 VALUE proc;
162 VALUE obj;
165 struct yielder {
166 VALUE proc;
169 struct producer {
170 VALUE init;
171 VALUE proc;
174 typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
175 typedef VALUE lazyenum_size_func(VALUE, VALUE);
176 typedef struct {
177 lazyenum_proc_func *proc;
178 lazyenum_size_func *size;
179 } lazyenum_funcs;
181 struct proc_entry {
182 VALUE proc;
183 VALUE memo;
184 const lazyenum_funcs *fn;
187 static VALUE generator_allocate(VALUE klass);
188 static VALUE generator_init(VALUE obj, VALUE proc);
190 static VALUE rb_cEnumChain;
192 struct enum_chain {
193 VALUE enums;
194 long pos;
197 VALUE rb_cArithSeq;
200 * Enumerator
202 static void
203 enumerator_mark(void *p)
205 struct enumerator *ptr = p;
206 rb_gc_mark_movable(ptr->obj);
207 rb_gc_mark_movable(ptr->args);
208 rb_gc_mark_movable(ptr->fib);
209 rb_gc_mark_movable(ptr->dst);
210 rb_gc_mark_movable(ptr->lookahead);
211 rb_gc_mark_movable(ptr->feedvalue);
212 rb_gc_mark_movable(ptr->stop_exc);
213 rb_gc_mark_movable(ptr->size);
214 rb_gc_mark_movable(ptr->procs);
217 static void
218 enumerator_compact(void *p)
220 struct enumerator *ptr = p;
221 ptr->obj = rb_gc_location(ptr->obj);
222 ptr->args = rb_gc_location(ptr->args);
223 ptr->fib = rb_gc_location(ptr->fib);
224 ptr->dst = rb_gc_location(ptr->dst);
225 ptr->lookahead = rb_gc_location(ptr->lookahead);
226 ptr->feedvalue = rb_gc_location(ptr->feedvalue);
227 ptr->stop_exc = rb_gc_location(ptr->stop_exc);
228 ptr->size = rb_gc_location(ptr->size);
229 ptr->procs = rb_gc_location(ptr->procs);
232 #define enumerator_free RUBY_TYPED_DEFAULT_FREE
234 static size_t
235 enumerator_memsize(const void *p)
237 return sizeof(struct enumerator);
240 static const rb_data_type_t enumerator_data_type = {
241 "enumerator",
243 enumerator_mark,
244 enumerator_free,
245 enumerator_memsize,
246 enumerator_compact,
248 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
251 static struct enumerator *
252 enumerator_ptr(VALUE obj)
254 struct enumerator *ptr;
256 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
257 if (!ptr || ptr->obj == Qundef) {
258 rb_raise(rb_eArgError, "uninitialized enumerator");
260 return ptr;
263 static void
264 proc_entry_mark(void *p)
266 struct proc_entry *ptr = p;
267 rb_gc_mark_movable(ptr->proc);
268 rb_gc_mark_movable(ptr->memo);
271 static void
272 proc_entry_compact(void *p)
274 struct proc_entry *ptr = p;
275 ptr->proc = rb_gc_location(ptr->proc);
276 ptr->memo = rb_gc_location(ptr->memo);
279 #define proc_entry_free RUBY_TYPED_DEFAULT_FREE
281 static size_t
282 proc_entry_memsize(const void *p)
284 return p ? sizeof(struct proc_entry) : 0;
287 static const rb_data_type_t proc_entry_data_type = {
288 "proc_entry",
290 proc_entry_mark,
291 proc_entry_free,
292 proc_entry_memsize,
293 proc_entry_compact,
297 static struct proc_entry *
298 proc_entry_ptr(VALUE proc_entry)
300 struct proc_entry *ptr;
302 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
304 return ptr;
308 * call-seq:
309 * obj.to_enum(method = :each, *args) -> enum
310 * obj.enum_for(method = :each, *args) -> enum
311 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
312 * obj.enum_for(method = :each, *args){|*args| block} -> enum
314 * Creates a new Enumerator which will enumerate by calling +method+ on
315 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
316 * values of enumerator.
318 * If a block is given, it will be used to calculate the size of
319 * the enumerator without the need to iterate it (see Enumerator#size).
321 * === Examples
323 * str = "xyz"
325 * enum = str.enum_for(:each_byte)
326 * enum.each { |b| puts b }
327 * # => 120
328 * # => 121
329 * # => 122
331 * # protect an array from being modified by some_method
332 * a = [1, 2, 3]
333 * some_method(a.to_enum)
335 * # String#split in block form is more memory-effective:
336 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
337 * # This could be rewritten more idiomatically with to_enum:
338 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
340 * It is typical to call to_enum when defining methods for
341 * a generic Enumerable, in case no block is passed.
343 * Here is such an example, with parameter passing and a sizing block:
345 * module Enumerable
346 * # a generic method to repeat the values of any enumerable
347 * def repeat(n)
348 * raise ArgumentError, "#{n} is negative!" if n < 0
349 * unless block_given?
350 * return to_enum(__method__, n) do # __method__ is :repeat here
351 * sz = size # Call size and multiply by n...
352 * sz * n if sz # but return nil if size itself is nil
353 * end
354 * end
355 * each do |*val|
356 * n.times { yield *val }
357 * end
358 * end
359 * end
361 * %i[hello world].repeat(2) { |w| puts w }
362 * # => Prints 'hello', 'hello', 'world', 'world'
363 * enum = (1..14).repeat(3)
364 * # => returns an Enumerator when called without a block
365 * enum.first(4) # => [1, 1, 1, 2]
366 * enum.size # => 42
368 static VALUE
369 obj_to_enum(int argc, VALUE *argv, VALUE obj)
371 VALUE enumerator, meth = sym_each;
373 if (argc > 0) {
374 --argc;
375 meth = *argv++;
377 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
378 if (rb_block_given_p()) {
379 enumerator_ptr(enumerator)->size = rb_block_proc();
381 return enumerator;
384 static VALUE
385 enumerator_allocate(VALUE klass)
387 struct enumerator *ptr;
388 VALUE enum_obj;
390 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
391 ptr->obj = Qundef;
393 return enum_obj;
396 static VALUE
397 enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
399 struct enumerator *ptr;
401 rb_check_frozen(enum_obj);
402 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
404 if (!ptr) {
405 rb_raise(rb_eArgError, "unallocated enumerator");
408 ptr->obj = obj;
409 ptr->meth = rb_to_id(meth);
410 if (argc) ptr->args = rb_ary_new4(argc, argv);
411 ptr->fib = 0;
412 ptr->dst = Qnil;
413 ptr->lookahead = Qundef;
414 ptr->feedvalue = Qundef;
415 ptr->stop_exc = Qfalse;
416 ptr->size = size;
417 ptr->size_fn = size_fn;
418 ptr->kw_splat = kw_splat;
420 return enum_obj;
423 static VALUE
424 convert_to_feasible_size_value(VALUE obj)
426 if (NIL_P(obj)) {
427 return obj;
429 else if (rb_respond_to(obj, id_call)) {
430 return obj;
432 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
433 return obj;
435 else {
436 return rb_to_int(obj);
441 * call-seq:
442 * Enumerator.new(size = nil) { |yielder| ... }
444 * Creates a new Enumerator object, which can be used as an
445 * Enumerable.
447 * Iteration is defined by the given block, in
448 * which a "yielder" object, given as block parameter, can be used to
449 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
451 * fib = Enumerator.new do |y|
452 * a = b = 1
453 * loop do
454 * y << a
455 * a, b = b, a + b
456 * end
457 * end
459 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
461 * The optional parameter can be used to specify how to calculate the size
462 * in a lazy fashion (see Enumerator#size). It can either be a value or
463 * a callable object.
465 static VALUE
466 enumerator_initialize(int argc, VALUE *argv, VALUE obj)
468 VALUE iter = rb_block_proc();
469 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
470 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
471 VALUE size = convert_to_feasible_size_value(arg0);
473 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
476 /* :nodoc: */
477 static VALUE
478 enumerator_init_copy(VALUE obj, VALUE orig)
480 struct enumerator *ptr0, *ptr1;
482 if (!OBJ_INIT_COPY(obj, orig)) return obj;
483 ptr0 = enumerator_ptr(orig);
484 if (ptr0->fib) {
485 /* Fibers cannot be copied */
486 rb_raise(rb_eTypeError, "can't copy execution context");
489 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
491 if (!ptr1) {
492 rb_raise(rb_eArgError, "unallocated enumerator");
495 ptr1->obj = ptr0->obj;
496 ptr1->meth = ptr0->meth;
497 ptr1->args = ptr0->args;
498 ptr1->fib = 0;
499 ptr1->lookahead = Qundef;
500 ptr1->feedvalue = Qundef;
501 ptr1->size = ptr0->size;
502 ptr1->size_fn = ptr0->size_fn;
504 return obj;
508 * For backwards compatibility; use rb_enumeratorize_with_size
510 VALUE
511 rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
513 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
516 static VALUE
517 lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
519 VALUE
520 rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
522 VALUE base_class = rb_cEnumerator;
524 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
525 base_class = rb_cLazy;
527 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
528 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
531 return enumerator_init(enumerator_allocate(base_class),
532 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
535 VALUE
536 rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
538 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
541 static VALUE
542 enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
544 int argc = 0;
545 const VALUE *argv = 0;
546 const struct enumerator *e = enumerator_ptr(obj);
547 ID meth = e->meth;
549 if (e->args) {
550 argc = RARRAY_LENINT(e->args);
551 argv = RARRAY_CONST_PTR(e->args);
553 return rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
557 * call-seq:
558 * enum.each { |elm| block } -> obj
559 * enum.each -> enum
560 * enum.each(*appending_args) { |elm| block } -> obj
561 * enum.each(*appending_args) -> an_enumerator
563 * Iterates over the block according to how this Enumerator was constructed.
564 * If no block and no arguments are given, returns self.
566 * === Examples
568 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
569 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
570 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
572 * obj = Object.new
574 * def obj.each_arg(a, b=:b, *rest)
575 * yield a
576 * yield b
577 * yield rest
578 * :method_returned
579 * end
581 * enum = obj.to_enum :each_arg, :a, :x
583 * enum.each.to_a #=> [:a, :x, []]
584 * enum.each.equal?(enum) #=> true
585 * enum.each { |elm| elm } #=> :method_returned
587 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
588 * enum.each(:y, :z).equal?(enum) #=> false
589 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
592 static VALUE
593 enumerator_each(int argc, VALUE *argv, VALUE obj)
595 if (argc > 0) {
596 struct enumerator *e = enumerator_ptr(obj = rb_obj_dup(obj));
597 VALUE args = e->args;
598 if (args) {
599 #if SIZEOF_INT < SIZEOF_LONG
600 /* check int range overflow */
601 rb_long2int(RARRAY_LEN(args) + argc);
602 #endif
603 args = rb_ary_dup(args);
604 rb_ary_cat(args, argv, argc);
606 else {
607 args = rb_ary_new4(argc, argv);
609 e->args = args;
610 e->size = Qnil;
611 e->size_fn = 0;
613 if (!rb_block_given_p()) return obj;
614 return enumerator_block_call(obj, 0, obj);
617 static VALUE
618 enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
620 struct MEMO *memo = (struct MEMO *)m;
621 VALUE idx = memo->v1;
622 MEMO_V1_SET(memo, rb_int_succ(idx));
624 if (argc <= 1)
625 return rb_yield_values(2, val, idx);
627 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
630 static VALUE
631 enumerator_size(VALUE obj);
633 static VALUE
634 enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
636 return enumerator_size(obj);
640 * call-seq:
641 * e.with_index(offset = 0) {|(*args), idx| ... }
642 * e.with_index(offset = 0)
644 * Iterates the given block for each element with an index, which
645 * starts from +offset+. If no block is given, returns a new Enumerator
646 * that includes the index, starting from +offset+
648 * +offset+:: the starting index to use
651 static VALUE
652 enumerator_with_index(int argc, VALUE *argv, VALUE obj)
654 VALUE memo;
656 rb_check_arity(argc, 0, 1);
657 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
658 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
659 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)MEMO_NEW(memo, 0, 0));
663 * call-seq:
664 * e.each_with_index {|(*args), idx| ... }
665 * e.each_with_index
667 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
669 * If no block is given, a new Enumerator is returned that includes the index.
672 static VALUE
673 enumerator_each_with_index(VALUE obj)
675 return enumerator_with_index(0, NULL, obj);
678 static VALUE
679 enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
681 if (argc <= 1)
682 return rb_yield_values(2, val, memo);
684 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
688 * call-seq:
689 * e.each_with_object(obj) {|(*args), obj| ... }
690 * e.each_with_object(obj)
691 * e.with_object(obj) {|(*args), obj| ... }
692 * e.with_object(obj)
694 * Iterates the given block for each element with an arbitrary object, +obj+,
695 * and returns +obj+
697 * If no block is given, returns a new Enumerator.
699 * === Example
701 * to_three = Enumerator.new do |y|
702 * 3.times do |x|
703 * y << x
704 * end
705 * end
707 * to_three_with_string = to_three.with_object("foo")
708 * to_three_with_string.each do |x,string|
709 * puts "#{string}: #{x}"
710 * end
712 * # => foo: 0
713 * # => foo: 1
714 * # => foo: 2
716 static VALUE
717 enumerator_with_object(VALUE obj, VALUE memo)
719 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
720 enumerator_block_call(obj, enumerator_with_object_i, memo);
722 return memo;
725 static VALUE
726 next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
728 struct enumerator *e = enumerator_ptr(obj);
729 VALUE feedvalue = Qnil;
730 VALUE args = rb_ary_new4(argc, argv);
731 rb_fiber_yield(1, &args);
732 if (e->feedvalue != Qundef) {
733 feedvalue = e->feedvalue;
734 e->feedvalue = Qundef;
736 return feedvalue;
739 static VALUE
740 next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
742 struct enumerator *e = enumerator_ptr(obj);
743 VALUE nil = Qnil;
744 VALUE result;
746 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
747 e->stop_exc = rb_exc_new2(rb_eStopIteration, "iteration reached an end");
748 rb_ivar_set(e->stop_exc, id_result, result);
749 return rb_fiber_yield(1, &nil);
752 static void
753 next_init(VALUE obj, struct enumerator *e)
755 VALUE curr = rb_fiber_current();
756 e->dst = curr;
757 e->fib = rb_fiber_new(next_i, obj);
758 e->lookahead = Qundef;
761 static VALUE
762 get_next_values(VALUE obj, struct enumerator *e)
764 VALUE curr, vs;
766 if (e->stop_exc)
767 rb_exc_raise(e->stop_exc);
769 curr = rb_fiber_current();
771 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
772 next_init(obj, e);
775 vs = rb_fiber_resume(e->fib, 1, &curr);
776 if (e->stop_exc) {
777 e->fib = 0;
778 e->dst = Qnil;
779 e->lookahead = Qundef;
780 e->feedvalue = Qundef;
781 rb_exc_raise(e->stop_exc);
783 return vs;
787 * call-seq:
788 * e.next_values -> array
790 * Returns the next object as an array in the enumerator, and move the
791 * internal position forward. When the position reached at the end,
792 * StopIteration is raised.
794 * See class-level notes about external iterators.
796 * This method can be used to distinguish <code>yield</code> and <code>yield
797 * nil</code>.
799 * === Example
801 * o = Object.new
802 * def o.each
803 * yield
804 * yield 1
805 * yield 1, 2
806 * yield nil
807 * yield [1, 2]
808 * end
809 * e = o.to_enum
810 * p e.next_values
811 * p e.next_values
812 * p e.next_values
813 * p e.next_values
814 * p e.next_values
815 * e = o.to_enum
816 * p e.next
817 * p e.next
818 * p e.next
819 * p e.next
820 * p e.next
822 * ## yield args next_values next
823 * # yield [] nil
824 * # yield 1 [1] 1
825 * # yield 1, 2 [1, 2] [1, 2]
826 * # yield nil [nil] nil
827 * # yield [1, 2] [[1, 2]] [1, 2]
831 static VALUE
832 enumerator_next_values(VALUE obj)
834 struct enumerator *e = enumerator_ptr(obj);
835 VALUE vs;
837 if (e->lookahead != Qundef) {
838 vs = e->lookahead;
839 e->lookahead = Qundef;
840 return vs;
843 return get_next_values(obj, e);
846 static VALUE
847 ary2sv(VALUE args, int dup)
849 if (!RB_TYPE_P(args, T_ARRAY))
850 return args;
852 switch (RARRAY_LEN(args)) {
853 case 0:
854 return Qnil;
856 case 1:
857 return RARRAY_AREF(args, 0);
859 default:
860 if (dup)
861 return rb_ary_dup(args);
862 return args;
867 * call-seq:
868 * e.next -> object
870 * Returns the next object in the enumerator, and move the internal position
871 * forward. When the position reached at the end, StopIteration is raised.
873 * === Example
875 * a = [1,2,3]
876 * e = a.to_enum
877 * p e.next #=> 1
878 * p e.next #=> 2
879 * p e.next #=> 3
880 * p e.next #raises StopIteration
882 * See class-level notes about external iterators.
886 static VALUE
887 enumerator_next(VALUE obj)
889 VALUE vs = enumerator_next_values(obj);
890 return ary2sv(vs, 0);
893 static VALUE
894 enumerator_peek_values(VALUE obj)
896 struct enumerator *e = enumerator_ptr(obj);
898 if (e->lookahead == Qundef) {
899 e->lookahead = get_next_values(obj, e);
901 return e->lookahead;
905 * call-seq:
906 * e.peek_values -> array
908 * Returns the next object as an array, similar to Enumerator#next_values, but
909 * doesn't move the internal position forward. If the position is already at
910 * the end, StopIteration is raised.
912 * See class-level notes about external iterators.
914 * === Example
916 * o = Object.new
917 * def o.each
918 * yield
919 * yield 1
920 * yield 1, 2
921 * end
922 * e = o.to_enum
923 * p e.peek_values #=> []
924 * e.next
925 * p e.peek_values #=> [1]
926 * p e.peek_values #=> [1]
927 * e.next
928 * p e.peek_values #=> [1, 2]
929 * e.next
930 * p e.peek_values # raises StopIteration
934 static VALUE
935 enumerator_peek_values_m(VALUE obj)
937 return rb_ary_dup(enumerator_peek_values(obj));
941 * call-seq:
942 * e.peek -> object
944 * Returns the next object in the enumerator, but doesn't move the internal
945 * position forward. If the position is already at the end, StopIteration
946 * is raised.
948 * See class-level notes about external iterators.
950 * === Example
952 * a = [1,2,3]
953 * e = a.to_enum
954 * p e.next #=> 1
955 * p e.peek #=> 2
956 * p e.peek #=> 2
957 * p e.peek #=> 2
958 * p e.next #=> 2
959 * p e.next #=> 3
960 * p e.peek #raises StopIteration
964 static VALUE
965 enumerator_peek(VALUE obj)
967 VALUE vs = enumerator_peek_values(obj);
968 return ary2sv(vs, 1);
972 * call-seq:
973 * e.feed obj -> nil
975 * Sets the value to be returned by the next yield inside +e+.
977 * If the value is not set, the yield returns nil.
979 * This value is cleared after being yielded.
981 * # Array#map passes the array's elements to "yield" and collects the
982 * # results of "yield" as an array.
983 * # Following example shows that "next" returns the passed elements and
984 * # values passed to "feed" are collected as an array which can be
985 * # obtained by StopIteration#result.
986 * e = [1,2,3].map
987 * p e.next #=> 1
988 * e.feed "a"
989 * p e.next #=> 2
990 * e.feed "b"
991 * p e.next #=> 3
992 * e.feed "c"
993 * begin
994 * e.next
995 * rescue StopIteration
996 * p $!.result #=> ["a", "b", "c"]
997 * end
999 * o = Object.new
1000 * def o.each
1001 * x = yield # (2) blocks
1002 * p x # (5) => "foo"
1003 * x = yield # (6) blocks
1004 * p x # (8) => nil
1005 * x = yield # (9) blocks
1006 * p x # not reached w/o another e.next
1007 * end
1009 * e = o.to_enum
1010 * e.next # (1)
1011 * e.feed "foo" # (3)
1012 * e.next # (4)
1013 * e.next # (7)
1014 * # (10)
1017 static VALUE
1018 enumerator_feed(VALUE obj, VALUE v)
1020 struct enumerator *e = enumerator_ptr(obj);
1022 if (e->feedvalue != Qundef) {
1023 rb_raise(rb_eTypeError, "feed value already set");
1025 e->feedvalue = v;
1027 return Qnil;
1031 * call-seq:
1032 * e.rewind -> e
1034 * Rewinds the enumeration sequence to the beginning.
1036 * If the enclosed object responds to a "rewind" method, it is called.
1039 static VALUE
1040 enumerator_rewind(VALUE obj)
1042 struct enumerator *e = enumerator_ptr(obj);
1044 rb_check_funcall(e->obj, id_rewind, 0, 0);
1046 e->fib = 0;
1047 e->dst = Qnil;
1048 e->lookahead = Qundef;
1049 e->feedvalue = Qundef;
1050 e->stop_exc = Qfalse;
1051 return obj;
1054 static struct generator *generator_ptr(VALUE obj);
1055 static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1057 static VALUE
1058 inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1060 struct enumerator *e;
1061 VALUE eobj, str, cname;
1063 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1065 cname = rb_obj_class(obj);
1067 if (!e || e->obj == Qundef) {
1068 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1071 if (recur) {
1072 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1073 return str;
1076 if (e->procs) {
1077 long i;
1079 eobj = generator_ptr(e->obj)->obj;
1080 /* In case procs chained enumerator traversing all proc entries manually */
1081 if (rb_obj_class(eobj) == cname) {
1082 str = rb_inspect(eobj);
1084 else {
1085 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1087 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1088 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1089 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1090 rb_str_buf_cat2(str, ">");
1092 return str;
1095 eobj = rb_attr_get(obj, id_receiver);
1096 if (NIL_P(eobj)) {
1097 eobj = e->obj;
1100 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1101 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1102 append_method(obj, str, e->meth, e->args);
1104 rb_str_buf_cat2(str, ">");
1106 return str;
1109 static int
1110 key_symbol_p(VALUE key, VALUE val, VALUE arg)
1112 if (SYMBOL_P(key)) return ST_CONTINUE;
1113 *(int *)arg = FALSE;
1114 return ST_STOP;
1117 static int
1118 kwd_append(VALUE key, VALUE val, VALUE str)
1120 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1121 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1122 return ST_CONTINUE;
1125 static VALUE
1126 append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1128 VALUE method, eargs;
1130 method = rb_attr_get(obj, id_method);
1131 if (method != Qfalse) {
1132 if (!NIL_P(method)) {
1133 Check_Type(method, T_SYMBOL);
1134 method = rb_sym2str(method);
1136 else {
1137 method = rb_id2str(default_method);
1139 rb_str_buf_cat2(str, ":");
1140 rb_str_buf_append(str, method);
1143 eargs = rb_attr_get(obj, id_arguments);
1144 if (NIL_P(eargs)) {
1145 eargs = default_args;
1147 if (eargs != Qfalse) {
1148 long argc = RARRAY_LEN(eargs);
1149 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1151 if (argc > 0) {
1152 VALUE kwds = Qnil;
1154 rb_str_buf_cat2(str, "(");
1156 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1157 int all_key = TRUE;
1158 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1159 if (all_key) kwds = argv[--argc];
1162 while (argc--) {
1163 VALUE arg = *argv++;
1165 rb_str_append(str, rb_inspect(arg));
1166 rb_str_buf_cat2(str, ", ");
1168 if (!NIL_P(kwds)) {
1169 rb_hash_foreach(kwds, kwd_append, str);
1171 rb_str_set_len(str, RSTRING_LEN(str)-2);
1172 rb_str_buf_cat2(str, ")");
1176 return str;
1180 * call-seq:
1181 * e.inspect -> string
1183 * Creates a printable version of <i>e</i>.
1186 static VALUE
1187 enumerator_inspect(VALUE obj)
1189 return rb_exec_recursive(inspect_enumerator, obj, 0);
1193 * call-seq:
1194 * e.size -> int, Float::INFINITY or nil
1196 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1198 * (1..100).to_a.permutation(4).size # => 94109400
1199 * loop.size # => Float::INFINITY
1200 * (1..100).drop_while.size # => nil
1203 static VALUE
1204 enumerator_size(VALUE obj)
1206 struct enumerator *e = enumerator_ptr(obj);
1207 int argc = 0;
1208 const VALUE *argv = NULL;
1209 VALUE size;
1211 if (e->procs) {
1212 struct generator *g = generator_ptr(e->obj);
1213 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1214 long i = 0;
1216 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1217 VALUE proc = RARRAY_AREF(e->procs, i);
1218 struct proc_entry *entry = proc_entry_ptr(proc);
1219 lazyenum_size_func *size_fn = entry->fn->size;
1220 if (!size_fn) {
1221 return Qnil;
1223 receiver = (*size_fn)(proc, receiver);
1225 return receiver;
1228 if (e->size_fn) {
1229 return (*e->size_fn)(e->obj, e->args, obj);
1231 if (e->args) {
1232 argc = (int)RARRAY_LEN(e->args);
1233 argv = RARRAY_CONST_PTR(e->args);
1235 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1236 if (size != Qundef) return size;
1237 return e->size;
1241 * Yielder
1243 static void
1244 yielder_mark(void *p)
1246 struct yielder *ptr = p;
1247 rb_gc_mark_movable(ptr->proc);
1250 static void
1251 yielder_compact(void *p)
1253 struct yielder *ptr = p;
1254 ptr->proc = rb_gc_location(ptr->proc);
1257 #define yielder_free RUBY_TYPED_DEFAULT_FREE
1259 static size_t
1260 yielder_memsize(const void *p)
1262 return sizeof(struct yielder);
1265 static const rb_data_type_t yielder_data_type = {
1266 "yielder",
1268 yielder_mark,
1269 yielder_free,
1270 yielder_memsize,
1271 yielder_compact,
1273 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1276 static struct yielder *
1277 yielder_ptr(VALUE obj)
1279 struct yielder *ptr;
1281 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1282 if (!ptr || ptr->proc == Qundef) {
1283 rb_raise(rb_eArgError, "uninitialized yielder");
1285 return ptr;
1288 /* :nodoc: */
1289 static VALUE
1290 yielder_allocate(VALUE klass)
1292 struct yielder *ptr;
1293 VALUE obj;
1295 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1296 ptr->proc = Qundef;
1298 return obj;
1301 static VALUE
1302 yielder_init(VALUE obj, VALUE proc)
1304 struct yielder *ptr;
1306 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1308 if (!ptr) {
1309 rb_raise(rb_eArgError, "unallocated yielder");
1312 ptr->proc = proc;
1314 return obj;
1317 /* :nodoc: */
1318 static VALUE
1319 yielder_initialize(VALUE obj)
1321 rb_need_block();
1323 return yielder_init(obj, rb_block_proc());
1326 /* :nodoc: */
1327 static VALUE
1328 yielder_yield(VALUE obj, VALUE args)
1330 struct yielder *ptr = yielder_ptr(obj);
1332 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1335 /* :nodoc: */
1336 static VALUE
1337 yielder_yield_push(VALUE obj, VALUE arg)
1339 struct yielder *ptr = yielder_ptr(obj);
1341 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1343 return obj;
1347 * Returns a Proc object that takes arguments and yields them.
1349 * This method is implemented so that a Yielder object can be directly
1350 * passed to another method as a block argument.
1352 * enum = Enumerator.new { |y|
1353 * Dir.glob("*.rb") { |file|
1354 * File.open(file) { |f| f.each_line(&y) }
1358 static VALUE
1359 yielder_to_proc(VALUE obj)
1361 VALUE method = rb_obj_method(obj, sym_yield);
1363 return rb_funcall(method, idTo_proc, 0);
1366 static VALUE
1367 yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1369 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1372 static VALUE
1373 yielder_new(void)
1375 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1379 * Generator
1381 static void
1382 generator_mark(void *p)
1384 struct generator *ptr = p;
1385 rb_gc_mark_movable(ptr->proc);
1386 rb_gc_mark_movable(ptr->obj);
1389 static void
1390 generator_compact(void *p)
1392 struct generator *ptr = p;
1393 ptr->proc = rb_gc_location(ptr->proc);
1394 ptr->obj = rb_gc_location(ptr->obj);
1397 #define generator_free RUBY_TYPED_DEFAULT_FREE
1399 static size_t
1400 generator_memsize(const void *p)
1402 return sizeof(struct generator);
1405 static const rb_data_type_t generator_data_type = {
1406 "generator",
1408 generator_mark,
1409 generator_free,
1410 generator_memsize,
1411 generator_compact,
1413 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
1416 static struct generator *
1417 generator_ptr(VALUE obj)
1419 struct generator *ptr;
1421 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1422 if (!ptr || ptr->proc == Qundef) {
1423 rb_raise(rb_eArgError, "uninitialized generator");
1425 return ptr;
1428 /* :nodoc: */
1429 static VALUE
1430 generator_allocate(VALUE klass)
1432 struct generator *ptr;
1433 VALUE obj;
1435 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1436 ptr->proc = Qundef;
1438 return obj;
1441 static VALUE
1442 generator_init(VALUE obj, VALUE proc)
1444 struct generator *ptr;
1446 rb_check_frozen(obj);
1447 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1449 if (!ptr) {
1450 rb_raise(rb_eArgError, "unallocated generator");
1453 ptr->proc = proc;
1455 return obj;
1458 /* :nodoc: */
1459 static VALUE
1460 generator_initialize(int argc, VALUE *argv, VALUE obj)
1462 VALUE proc;
1464 if (argc == 0) {
1465 rb_need_block();
1467 proc = rb_block_proc();
1469 else {
1470 rb_scan_args(argc, argv, "1", &proc);
1472 if (!rb_obj_is_proc(proc))
1473 rb_raise(rb_eTypeError,
1474 "wrong argument type %"PRIsVALUE" (expected Proc)",
1475 rb_obj_class(proc));
1477 if (rb_block_given_p()) {
1478 rb_warn("given block not used");
1482 return generator_init(obj, proc);
1485 /* :nodoc: */
1486 static VALUE
1487 generator_init_copy(VALUE obj, VALUE orig)
1489 struct generator *ptr0, *ptr1;
1491 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1493 ptr0 = generator_ptr(orig);
1495 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1497 if (!ptr1) {
1498 rb_raise(rb_eArgError, "unallocated generator");
1501 ptr1->proc = ptr0->proc;
1503 return obj;
1506 /* :nodoc: */
1507 static VALUE
1508 generator_each(int argc, VALUE *argv, VALUE obj)
1510 struct generator *ptr = generator_ptr(obj);
1511 VALUE args = rb_ary_new2(argc + 1);
1513 rb_ary_push(args, yielder_new());
1514 if (argc > 0) {
1515 rb_ary_cat(args, argv, argc);
1518 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1521 /* Lazy Enumerator methods */
1522 static VALUE
1523 enum_size(VALUE self)
1525 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1526 return (r == Qundef) ? Qnil : r;
1529 static VALUE
1530 lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1532 return enum_size(self);
1535 #define lazy_receiver_size lazy_map_size
1537 static VALUE
1538 lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1540 VALUE result;
1541 if (argc == 1) {
1542 VALUE args[2];
1543 args[0] = m;
1544 args[1] = val;
1545 result = rb_yield_values2(2, args);
1547 else {
1548 VALUE args;
1549 int len = rb_long2int((long)argc + 1);
1550 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1552 nargv[0] = m;
1553 if (argc > 0) {
1554 MEMCPY(nargv + 1, argv, VALUE, argc);
1556 result = rb_yield_values2(len, nargv);
1557 ALLOCV_END(args);
1559 if (result == Qundef) rb_iter_break();
1560 return Qnil;
1563 static VALUE
1564 lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1566 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1567 return Qnil;
1570 #define memo_value v2
1571 #define memo_flags u3.state
1572 #define LAZY_MEMO_BREAK 1
1573 #define LAZY_MEMO_PACKED 2
1574 #define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1575 #define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1576 #define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1577 #define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1578 #define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1579 #define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1580 #define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1582 static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1584 static VALUE
1585 lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1587 VALUE yielder = RARRAY_AREF(m, 0);
1588 VALUE procs_array = RARRAY_AREF(m, 1);
1589 VALUE memos = rb_attr_get(yielder, id_memo);
1590 struct MEMO *result;
1592 result = MEMO_NEW(m, rb_enum_values_pack(argc, argv),
1593 argc > 1 ? LAZY_MEMO_PACKED : 0);
1594 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1597 static VALUE
1598 lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1600 VALUE m = result->v1;
1601 VALUE yielder = RARRAY_AREF(m, 0);
1602 VALUE procs_array = RARRAY_AREF(m, 1);
1603 VALUE memos = rb_attr_get(yielder, id_memo);
1604 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1605 if (argc > 1)
1606 LAZY_MEMO_SET_PACKED(result);
1607 else
1608 LAZY_MEMO_RESET_PACKED(result);
1609 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1612 static VALUE
1613 lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1615 int cont = 1;
1617 for (; i < RARRAY_LEN(procs_array); i++) {
1618 VALUE proc = RARRAY_AREF(procs_array, i);
1619 struct proc_entry *entry = proc_entry_ptr(proc);
1620 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1621 cont = 0;
1622 break;
1626 if (cont) {
1627 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1629 if (LAZY_MEMO_BREAK_P(result)) {
1630 rb_iter_break();
1632 return result->memo_value;
1635 static VALUE
1636 lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1638 VALUE procs = RARRAY_AREF(m, 1);
1640 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1641 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1642 lazy_init_yielder, rb_ary_new3(2, val, procs));
1643 return Qnil;
1646 static VALUE
1647 lazy_generator_init(VALUE enumerator, VALUE procs)
1649 VALUE generator;
1650 VALUE obj;
1651 struct generator *gen_ptr;
1652 struct enumerator *e = enumerator_ptr(enumerator);
1654 if (RARRAY_LEN(procs) > 0) {
1655 struct generator *old_gen_ptr = generator_ptr(e->obj);
1656 obj = old_gen_ptr->obj;
1658 else {
1659 obj = enumerator;
1662 generator = generator_allocate(rb_cGenerator);
1664 rb_block_call(generator, id_initialize, 0, 0,
1665 lazy_init_block, rb_ary_new3(2, obj, procs));
1667 gen_ptr = generator_ptr(generator);
1668 gen_ptr->obj = obj;
1670 return generator;
1674 * Document-class: Enumerator::Lazy
1676 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1677 * chains of operations without evaluating them immediately, and evaluating
1678 * values on as-needed basis. In order to do so it redefines most of Enumerable
1679 * methods so that they just construct another lazy enumerator.
1681 * Enumerator::Lazy can be constructed from any Enumerable with the
1682 * Enumerable#lazy method.
1684 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1685 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1687 * The real enumeration is performed when any non-redefined Enumerable method
1688 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1689 * as #force for more semantic code):
1691 * lazy.first(2)
1692 * #=> [21, 23]
1694 * lazy.force
1695 * #=> [21, 23, 25, 27, 29]
1697 * Note that most Enumerable methods that could be called with or without
1698 * a block, on Enumerator::Lazy will always require a block:
1700 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1701 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1703 * This class allows idiomatic calculations on long or infinite sequences, as well
1704 * as chaining of calculations without constructing intermediate arrays.
1706 * Example for working with a slowly calculated sequence:
1708 * require 'open-uri'
1710 * # This will fetch all URLs before selecting
1711 * # necessary data
1712 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1713 * .select { |data| data.key?('stats') }
1714 * .first(5)
1716 * # This will fetch URLs one-by-one, only till
1717 * # there is enough data to satisfy the condition
1718 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1719 * .select { |data| data.key?('stats') }
1720 * .first(5)
1722 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1723 * is suitable for returning or passing to another method that expects
1724 * a normal enumerator.
1726 * def active_items
1727 * groups
1728 * .lazy
1729 * .flat_map(&:items)
1730 * .reject(&:disabled)
1731 * .eager
1732 * end
1734 * # This works lazily; if a checked item is found, it stops
1735 * # iteration and does not look into remaining groups.
1736 * first_checked = active_items.find(&:checked)
1738 * # This returns an array of items like a normal enumerator does.
1739 * all_checked = active_items.select(&:checked)
1744 * call-seq:
1745 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1747 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1748 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1749 * to the given block. The block can yield values back using +yielder+.
1750 * For example, to create a "filter+map" enumerator:
1752 * def filter_map(sequence)
1753 * Lazy.new(sequence) do |yielder, *values|
1754 * result = yield *values
1755 * yielder << result if result
1756 * end
1757 * end
1759 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1760 * #=> [4, 16, 36, 64, 100]
1762 static VALUE
1763 lazy_initialize(int argc, VALUE *argv, VALUE self)
1765 VALUE obj, size = Qnil;
1766 VALUE generator;
1768 rb_check_arity(argc, 1, 2);
1769 if (!rb_block_given_p()) {
1770 rb_raise(rb_eArgError, "tried to call lazy new without a block");
1772 obj = argv[0];
1773 if (argc > 1) {
1774 size = argv[1];
1776 generator = generator_allocate(rb_cGenerator);
1777 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1778 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1779 rb_ivar_set(self, id_receiver, obj);
1781 return self;
1784 #if 0 /* for RDoc */
1786 * call-seq:
1787 * lazy.to_a -> array
1788 * lazy.force -> array
1790 * Expands +lazy+ enumerator to an array.
1791 * See Enumerable#to_a.
1793 static VALUE lazy_to_a(VALUE self)
1796 #endif
1798 static void
1799 lazy_set_args(VALUE lazy, VALUE args)
1801 ID id = rb_frame_this_func();
1802 rb_ivar_set(lazy, id_method, ID2SYM(id));
1803 if (NIL_P(args)) {
1804 /* Qfalse indicates that the arguments are empty */
1805 rb_ivar_set(lazy, id_arguments, Qfalse);
1807 else {
1808 rb_ivar_set(lazy, id_arguments, args);
1812 #if 0
1813 static VALUE
1814 lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1816 struct enumerator *e = enumerator_ptr(lazy);
1817 lazy_set_args(lazy, args);
1818 e->size_fn = size_fn;
1819 return lazy;
1821 #endif
1823 static VALUE
1824 lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1825 const lazyenum_funcs *fn)
1827 struct enumerator *new_e;
1828 VALUE new_obj;
1829 VALUE new_generator;
1830 VALUE new_procs;
1831 struct enumerator *e = enumerator_ptr(obj);
1832 struct proc_entry *entry;
1833 VALUE entry_obj = TypedData_Make_Struct(rb_cObject, struct proc_entry,
1834 &proc_entry_data_type, entry);
1835 if (rb_block_given_p()) {
1836 entry->proc = rb_block_proc();
1838 entry->fn = fn;
1839 entry->memo = args;
1841 lazy_set_args(entry_obj, memo);
1843 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1844 new_generator = lazy_generator_init(obj, new_procs);
1845 rb_ary_push(new_procs, entry_obj);
1847 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1848 new_e = DATA_PTR(new_obj);
1849 new_e->obj = new_generator;
1850 new_e->procs = new_procs;
1852 if (argc > 0) {
1853 new_e->meth = rb_to_id(*argv++);
1854 --argc;
1856 else {
1857 new_e->meth = id_each;
1859 new_e->args = rb_ary_new4(argc, argv);
1860 return new_obj;
1864 * call-seq:
1865 * e.lazy -> lazy_enumerator
1867 * Returns an Enumerator::Lazy, which redefines most Enumerable
1868 * methods to postpone enumeration and enumerate values only on an
1869 * as-needed basis.
1871 * === Example
1873 * The following program finds pythagorean triples:
1875 * def pythagorean_triples
1876 * (1..Float::INFINITY).lazy.flat_map {|z|
1877 * (1..z).flat_map {|x|
1878 * (x..z).select {|y|
1879 * x**2 + y**2 == z**2
1880 * }.map {|y|
1881 * [x, y, z]
1885 * end
1886 * # show first ten pythagorean triples
1887 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1888 * p pythagorean_triples.first(10) # first is eager
1889 * # show pythagorean triples less than 100
1890 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1892 static VALUE
1893 enumerable_lazy(VALUE obj)
1895 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1896 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1897 rb_ivar_set(result, id_method, Qfalse);
1898 return result;
1901 static VALUE
1902 lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1904 return enumerator_init(enumerator_allocate(rb_cLazy),
1905 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1909 * call-seq:
1910 * lzy.to_enum(method = :each, *args) -> lazy_enum
1911 * lzy.enum_for(method = :each, *args) -> lazy_enum
1912 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1913 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1915 * Similar to Object#to_enum, except it returns a lazy enumerator.
1916 * This makes it easy to define Enumerable methods that will
1917 * naturally remain lazy if called from a lazy enumerator.
1919 * For example, continuing from the example in Object#to_enum:
1921 * # See Object#to_enum for the definition of repeat
1922 * r = 1..Float::INFINITY
1923 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1924 * r.repeat(2).class # => Enumerator
1925 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1926 * # works naturally on lazy enumerator:
1927 * r.lazy.repeat(2).class # => Enumerator::Lazy
1928 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1931 static VALUE
1932 lazy_to_enum(int argc, VALUE *argv, VALUE self)
1934 VALUE lazy, meth = sym_each, super_meth;
1936 if (argc > 0) {
1937 --argc;
1938 meth = *argv++;
1940 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
1941 meth = super_meth;
1943 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
1944 if (rb_block_given_p()) {
1945 enumerator_ptr(lazy)->size = rb_block_proc();
1947 return lazy;
1950 static VALUE
1951 lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
1953 return enum_size(self);
1957 * call-seq:
1958 * lzy.eager -> enum
1960 * Returns a non-lazy Enumerator converted from the lazy enumerator.
1963 static VALUE
1964 lazy_eager(VALUE self)
1966 return enumerator_init(enumerator_allocate(rb_cEnumerator),
1967 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
1970 static VALUE
1971 lazyenum_yield(VALUE proc_entry, struct MEMO *result)
1973 struct proc_entry *entry = proc_entry_ptr(proc_entry);
1974 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
1977 static VALUE
1978 lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
1980 struct proc_entry *entry = proc_entry_ptr(proc_entry);
1981 int argc = 1;
1982 const VALUE *argv = &result->memo_value;
1983 if (LAZY_MEMO_PACKED_P(result)) {
1984 const VALUE args = *argv;
1985 argc = RARRAY_LENINT(args);
1986 argv = RARRAY_CONST_PTR(args);
1988 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
1991 static struct MEMO *
1992 lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
1994 VALUE value = lazyenum_yield_values(proc_entry, result);
1995 LAZY_MEMO_SET_VALUE(result, value);
1996 LAZY_MEMO_RESET_PACKED(result);
1997 return result;
2000 static VALUE
2001 lazy_map_size(VALUE entry, VALUE receiver)
2003 return receiver;
2006 static const lazyenum_funcs lazy_map_funcs = {
2007 lazy_map_proc, lazy_map_size,
2011 * call-seq:
2012 * lazy.collect { |obj| block } -> lazy_enumerator
2013 * lazy.map { |obj| block } -> lazy_enumerator
2015 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2017 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2018 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2019 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2020 * #=> [1, 4, 9]
2023 static VALUE
2024 lazy_map(VALUE obj)
2026 if (!rb_block_given_p()) {
2027 rb_raise(rb_eArgError, "tried to call lazy map without a block");
2030 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2033 struct flat_map_i_arg {
2034 struct MEMO *result;
2035 long index;
2038 static VALUE
2039 lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2041 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2043 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2046 static struct MEMO *
2047 lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2049 VALUE value = lazyenum_yield_values(proc_entry, result);
2050 VALUE ary = 0;
2051 const long proc_index = memo_index + 1;
2052 int break_p = LAZY_MEMO_BREAK_P(result);
2054 if (RB_TYPE_P(value, T_ARRAY)) {
2055 ary = value;
2057 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2058 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2059 LAZY_MEMO_RESET_BREAK(result);
2060 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2061 if (break_p) LAZY_MEMO_SET_BREAK(result);
2062 return 0;
2065 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2066 long i;
2067 LAZY_MEMO_RESET_BREAK(result);
2068 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2069 const VALUE argv = RARRAY_AREF(ary, i);
2070 lazy_yielder_yield(result, proc_index, 1, &argv);
2072 if (break_p) LAZY_MEMO_SET_BREAK(result);
2073 if (i >= RARRAY_LEN(ary)) return 0;
2074 value = RARRAY_AREF(ary, i);
2076 LAZY_MEMO_SET_VALUE(result, value);
2077 LAZY_MEMO_RESET_PACKED(result);
2078 return result;
2081 static const lazyenum_funcs lazy_flat_map_funcs = {
2082 lazy_flat_map_proc, 0,
2086 * call-seq:
2087 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2088 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2090 * Returns a new lazy enumerator with the concatenated results of running
2091 * +block+ once for every element in the lazy enumerator.
2093 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2094 * #=> ["f", "o", "o", "b", "a", "r"]
2096 * A value +x+ returned by +block+ is decomposed if either of
2097 * the following conditions is true:
2099 * * +x+ responds to both each and force, which means that
2100 * +x+ is a lazy enumerator.
2101 * * +x+ is an array or responds to to_ary.
2103 * Otherwise, +x+ is contained as-is in the return value.
2105 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2106 * #=> [{:a=>1}, {:b=>2}]
2108 static VALUE
2109 lazy_flat_map(VALUE obj)
2111 if (!rb_block_given_p()) {
2112 rb_raise(rb_eArgError, "tried to call lazy flat_map without a block");
2115 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2118 static struct MEMO *
2119 lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2121 VALUE chain = lazyenum_yield(proc_entry, result);
2122 if (!RTEST(chain)) return 0;
2123 return result;
2126 static const lazyenum_funcs lazy_select_funcs = {
2127 lazy_select_proc, 0,
2131 * call-seq:
2132 * lazy.find_all { |obj| block } -> lazy_enumerator
2133 * lazy.select { |obj| block } -> lazy_enumerator
2134 * lazy.filter { |obj| block } -> lazy_enumerator
2136 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2138 static VALUE
2139 lazy_select(VALUE obj)
2141 if (!rb_block_given_p()) {
2142 rb_raise(rb_eArgError, "tried to call lazy select without a block");
2145 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2148 static struct MEMO *
2149 lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2151 VALUE value = lazyenum_yield_values(proc_entry, result);
2152 if (!RTEST(value)) return 0;
2153 LAZY_MEMO_SET_VALUE(result, value);
2154 LAZY_MEMO_RESET_PACKED(result);
2155 return result;
2158 static const lazyenum_funcs lazy_filter_map_funcs = {
2159 lazy_filter_map_proc, 0,
2163 * call-seq:
2164 * lazy.filter_map { |obj| block } -> lazy_enumerator
2166 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2168 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2169 * #=> [4, 8, 12, 16, 20]
2172 static VALUE
2173 lazy_filter_map(VALUE obj)
2175 if (!rb_block_given_p()) {
2176 rb_raise(rb_eArgError, "tried to call lazy filter_map without a block");
2179 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2182 static struct MEMO *
2183 lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2185 VALUE chain = lazyenum_yield(proc_entry, result);
2186 if (RTEST(chain)) return 0;
2187 return result;
2190 static const lazyenum_funcs lazy_reject_funcs = {
2191 lazy_reject_proc, 0,
2195 * call-seq:
2196 * lazy.reject { |obj| block } -> lazy_enumerator
2198 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2201 static VALUE
2202 lazy_reject(VALUE obj)
2204 if (!rb_block_given_p()) {
2205 rb_raise(rb_eArgError, "tried to call lazy reject without a block");
2208 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2211 static struct MEMO *
2212 lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2214 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2215 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2216 if (!RTEST(chain)) return 0;
2217 return result;
2220 static struct MEMO *
2221 lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2223 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2224 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2226 if (!RTEST(chain)) return 0;
2227 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2228 LAZY_MEMO_SET_VALUE(result, value);
2229 LAZY_MEMO_RESET_PACKED(result);
2231 return result;
2234 static const lazyenum_funcs lazy_grep_iter_funcs = {
2235 lazy_grep_iter_proc, 0,
2238 static const lazyenum_funcs lazy_grep_funcs = {
2239 lazy_grep_proc, 0,
2243 * call-seq:
2244 * lazy.grep(pattern) -> lazy_enumerator
2245 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2247 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2250 static VALUE
2251 lazy_grep(VALUE obj, VALUE pattern)
2253 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2254 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2255 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2258 static struct MEMO *
2259 lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2261 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2262 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2263 if (RTEST(chain)) return 0;
2264 return result;
2267 static struct MEMO *
2268 lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2270 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2271 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2273 if (RTEST(chain)) return 0;
2274 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2275 LAZY_MEMO_SET_VALUE(result, value);
2276 LAZY_MEMO_RESET_PACKED(result);
2278 return result;
2281 static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2282 lazy_grep_v_iter_proc, 0,
2285 static const lazyenum_funcs lazy_grep_v_funcs = {
2286 lazy_grep_v_proc, 0,
2290 * call-seq:
2291 * lazy.grep_v(pattern) -> lazy_enumerator
2292 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2294 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2297 static VALUE
2298 lazy_grep_v(VALUE obj, VALUE pattern)
2300 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2301 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2302 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2305 static VALUE
2306 call_next(VALUE obj)
2308 return rb_funcall(obj, id_next, 0);
2311 static VALUE
2312 next_stopped(VALUE obj, VALUE _)
2314 return Qnil;
2317 static struct MEMO *
2318 lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2320 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2321 VALUE ary, arrays = entry->memo;
2322 VALUE memo = rb_ary_entry(memos, memo_index);
2323 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2325 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2326 rb_ary_push(ary, result->memo_value);
2327 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2328 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2330 LAZY_MEMO_SET_VALUE(result, ary);
2331 LAZY_MEMO_SET_PACKED(result);
2332 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2333 return result;
2336 static struct MEMO *
2337 lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2339 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2340 VALUE arg = rb_ary_entry(memos, memo_index);
2341 VALUE zip_args = entry->memo;
2342 VALUE ary, v;
2343 long i;
2345 if (NIL_P(arg)) {
2346 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2347 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2348 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2350 rb_ary_store(memos, memo_index, arg);
2353 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2354 rb_ary_push(ary, result->memo_value);
2355 for (i = 0; i < RARRAY_LEN(arg); i++) {
2356 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2357 rb_eStopIteration, (VALUE)0);
2358 rb_ary_push(ary, v);
2360 LAZY_MEMO_SET_VALUE(result, ary);
2361 LAZY_MEMO_SET_PACKED(result);
2362 return result;
2365 static const lazyenum_funcs lazy_zip_funcs[] = {
2366 {lazy_zip_func, lazy_receiver_size,},
2367 {lazy_zip_arrays_func, lazy_receiver_size,},
2371 * call-seq:
2372 * lazy.zip(arg, ...) -> lazy_enumerator
2373 * lazy.zip(arg, ...) { |arr| block } -> nil
2375 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2376 * However, if a block is given to zip, values are enumerated immediately.
2378 static VALUE
2379 lazy_zip(int argc, VALUE *argv, VALUE obj)
2381 VALUE ary, v;
2382 long i;
2383 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2385 if (rb_block_given_p()) {
2386 return rb_call_super(argc, argv);
2389 ary = rb_ary_new2(argc);
2390 for (i = 0; i < argc; i++) {
2391 v = rb_check_array_type(argv[i]);
2392 if (NIL_P(v)) {
2393 for (; i < argc; i++) {
2394 if (!rb_respond_to(argv[i], id_each)) {
2395 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2396 rb_obj_class(argv[i]));
2399 ary = rb_ary_new4(argc, argv);
2400 funcs = &lazy_zip_funcs[0];
2401 break;
2403 rb_ary_push(ary, v);
2406 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2409 static struct MEMO *
2410 lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2412 long remain;
2413 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2414 VALUE memo = rb_ary_entry(memos, memo_index);
2416 if (NIL_P(memo)) {
2417 memo = entry->memo;
2420 remain = NUM2LONG(memo);
2421 if (remain == 0) {
2422 LAZY_MEMO_SET_BREAK(result);
2424 else {
2425 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2426 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2428 return result;
2431 static VALUE
2432 lazy_take_size(VALUE entry, VALUE receiver)
2434 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2435 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2436 return receiver;
2437 return LONG2NUM(len);
2440 static const lazyenum_funcs lazy_take_funcs = {
2441 lazy_take_proc, lazy_take_size,
2445 * call-seq:
2446 * lazy.take(n) -> lazy_enumerator
2448 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2451 static VALUE
2452 lazy_take(VALUE obj, VALUE n)
2454 long len = NUM2LONG(n);
2455 int argc = 0;
2456 VALUE argv[2];
2458 if (len < 0) {
2459 rb_raise(rb_eArgError, "attempt to take negative size");
2462 if (len == 0) {
2463 argv[0] = sym_cycle;
2464 argv[1] = INT2NUM(0);
2465 argc = 2;
2468 return lazy_add_method(obj, argc, argv, n, rb_ary_new3(1, n), &lazy_take_funcs);
2471 static struct MEMO *
2472 lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2474 VALUE take = lazyenum_yield_values(proc_entry, result);
2475 if (!RTEST(take)) {
2476 LAZY_MEMO_SET_BREAK(result);
2477 return 0;
2479 return result;
2482 static const lazyenum_funcs lazy_take_while_funcs = {
2483 lazy_take_while_proc, 0,
2487 * call-seq:
2488 * lazy.take_while { |obj| block } -> lazy_enumerator
2490 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2493 static VALUE
2494 lazy_take_while(VALUE obj)
2496 if (!rb_block_given_p()) {
2497 rb_raise(rb_eArgError, "tried to call lazy take_while without a block");
2500 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2503 static VALUE
2504 lazy_drop_size(VALUE proc_entry, VALUE receiver)
2506 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2507 if (NIL_P(receiver))
2508 return receiver;
2509 if (FIXNUM_P(receiver)) {
2510 len = FIX2LONG(receiver) - len;
2511 return LONG2FIX(len < 0 ? 0 : len);
2513 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2516 static struct MEMO *
2517 lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2519 long remain;
2520 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2521 VALUE memo = rb_ary_entry(memos, memo_index);
2523 if (NIL_P(memo)) {
2524 memo = entry->memo;
2526 remain = NUM2LONG(memo);
2527 if (remain > 0) {
2528 --remain;
2529 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2530 return 0;
2533 return result;
2536 static const lazyenum_funcs lazy_drop_funcs = {
2537 lazy_drop_proc, lazy_drop_size,
2541 * call-seq:
2542 * lazy.drop(n) -> lazy_enumerator
2544 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2547 static VALUE
2548 lazy_drop(VALUE obj, VALUE n)
2550 long len = NUM2LONG(n);
2551 VALUE argv[2];
2552 argv[0] = sym_each;
2553 argv[1] = n;
2555 if (len < 0) {
2556 rb_raise(rb_eArgError, "attempt to drop negative size");
2559 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2562 static struct MEMO *
2563 lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2565 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2566 VALUE memo = rb_ary_entry(memos, memo_index);
2568 if (NIL_P(memo)) {
2569 memo = entry->memo;
2572 if (!RTEST(memo)) {
2573 VALUE drop = lazyenum_yield_values(proc_entry, result);
2574 if (RTEST(drop)) return 0;
2575 rb_ary_store(memos, memo_index, Qtrue);
2577 return result;
2580 static const lazyenum_funcs lazy_drop_while_funcs = {
2581 lazy_drop_while_proc, 0,
2585 * call-seq:
2586 * lazy.drop_while { |obj| block } -> lazy_enumerator
2588 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2591 static VALUE
2592 lazy_drop_while(VALUE obj)
2594 if (!rb_block_given_p()) {
2595 rb_raise(rb_eArgError, "tried to call lazy drop_while without a block");
2598 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2601 static int
2602 lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2604 VALUE hash = rb_ary_entry(memos, memo_index);
2606 if (NIL_P(hash)) {
2607 hash = rb_obj_hide(rb_hash_new());
2608 rb_ary_store(memos, memo_index, hash);
2611 return rb_hash_add_new_element(hash, chain, Qfalse);
2614 static struct MEMO *
2615 lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2617 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2618 return result;
2621 static struct MEMO *
2622 lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2624 VALUE chain = lazyenum_yield(proc_entry, result);
2626 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2627 return result;
2630 static const lazyenum_funcs lazy_uniq_iter_funcs = {
2631 lazy_uniq_iter_proc, 0,
2634 static const lazyenum_funcs lazy_uniq_funcs = {
2635 lazy_uniq_proc, 0,
2639 * call-seq:
2640 * lazy.uniq -> lazy_enumerator
2641 * lazy.uniq { |item| block } -> lazy_enumerator
2643 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2646 static VALUE
2647 lazy_uniq(VALUE obj)
2649 const lazyenum_funcs *const funcs =
2650 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2651 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2654 static struct MEMO *
2655 lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2657 if (NIL_P(result->memo_value)) return 0;
2658 return result;
2661 static const lazyenum_funcs lazy_compact_funcs = {
2662 lazy_compact_proc, 0,
2666 * call-seq:
2667 * lazy.compact -> lazy_enumerator
2669 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2672 static VALUE
2673 lazy_compact(VALUE obj)
2675 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2678 static struct MEMO *
2679 lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2681 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2682 VALUE memo = rb_ary_entry(memos, memo_index);
2683 VALUE argv[2];
2685 if (NIL_P(memo)) {
2686 memo = entry->memo;
2689 argv[0] = result->memo_value;
2690 argv[1] = memo;
2691 if (entry->proc) {
2692 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2693 LAZY_MEMO_RESET_PACKED(result);
2695 else {
2696 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2697 LAZY_MEMO_SET_PACKED(result);
2699 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2700 return result;
2703 static VALUE
2704 lazy_with_index_size(VALUE proc, VALUE receiver)
2706 return receiver;
2709 static const lazyenum_funcs lazy_with_index_funcs = {
2710 lazy_with_index_proc, lazy_with_index_size,
2714 * call-seq:
2715 * lazy.with_index(offset = 0) {|(*args), idx| block }
2716 * lazy.with_index(offset = 0)
2718 * If a block is given, returns a lazy enumerator that will
2719 * iterate over the given block for each element
2720 * with an index, which starts from +offset+, and returns a
2721 * lazy enumerator that yields the same values (without the index).
2723 * If a block is not given, returns a new lazy enumerator that
2724 * includes the index, starting from +offset+.
2726 * +offset+:: the starting index to use
2728 * See Enumerator#with_index.
2730 static VALUE
2731 lazy_with_index(int argc, VALUE *argv, VALUE obj)
2733 VALUE memo;
2735 rb_scan_args(argc, argv, "01", &memo);
2736 if (NIL_P(memo))
2737 memo = LONG2NUM(0);
2739 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2742 #if 0 /* for RDoc */
2745 * call-seq:
2746 * lazy.chunk { |elt| ... } -> lazy_enumerator
2748 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2750 static VALUE lazy_chunk(VALUE self)
2755 * call-seq:
2756 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2758 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2760 static VALUE lazy_chunk_while(VALUE self)
2765 * call-seq:
2766 * lazy.slice_after(pattern) -> lazy_enumerator
2767 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2769 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2771 static VALUE lazy_slice_after(VALUE self)
2776 * call-seq:
2777 * lazy.slice_before(pattern) -> lazy_enumerator
2778 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2780 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2782 static VALUE lazy_slice_before(VALUE self)
2787 * call-seq:
2788 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2790 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2792 static VALUE lazy_slice_when(VALUE self)
2795 # endif
2797 static VALUE
2798 lazy_super(int argc, VALUE *argv, VALUE lazy)
2800 return enumerable_lazy(rb_call_super(argc, argv));
2804 * call-seq:
2805 * enum.lazy -> lazy_enumerator
2807 * Returns self.
2810 static VALUE
2811 lazy_lazy(VALUE obj)
2813 return obj;
2817 * Document-class: StopIteration
2819 * Raised to stop the iteration, in particular by Enumerator#next. It is
2820 * rescued by Kernel#loop.
2822 * loop do
2823 * puts "Hello"
2824 * raise StopIteration
2825 * puts "World"
2826 * end
2827 * puts "Done!"
2829 * <em>produces:</em>
2831 * Hello
2832 * Done!
2836 * call-seq:
2837 * result -> value
2839 * Returns the return value of the iterator.
2841 * o = Object.new
2842 * def o.each
2843 * yield 1
2844 * yield 2
2845 * yield 3
2846 * 100
2847 * end
2849 * e = o.to_enum
2851 * puts e.next #=> 1
2852 * puts e.next #=> 2
2853 * puts e.next #=> 3
2855 * begin
2856 * e.next
2857 * rescue StopIteration => ex
2858 * puts ex.result #=> 100
2859 * end
2863 static VALUE
2864 stop_result(VALUE self)
2866 return rb_attr_get(self, id_result);
2870 * Producer
2873 static void
2874 producer_mark(void *p)
2876 struct producer *ptr = p;
2877 rb_gc_mark_movable(ptr->init);
2878 rb_gc_mark_movable(ptr->proc);
2881 static void
2882 producer_compact(void *p)
2884 struct producer *ptr = p;
2885 ptr->init = rb_gc_location(ptr->init);
2886 ptr->proc = rb_gc_location(ptr->proc);
2889 #define producer_free RUBY_TYPED_DEFAULT_FREE
2891 static size_t
2892 producer_memsize(const void *p)
2894 return sizeof(struct producer);
2897 static const rb_data_type_t producer_data_type = {
2898 "producer",
2900 producer_mark,
2901 producer_free,
2902 producer_memsize,
2903 producer_compact,
2905 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
2908 static struct producer *
2909 producer_ptr(VALUE obj)
2911 struct producer *ptr;
2913 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2914 if (!ptr || ptr->proc == Qundef) {
2915 rb_raise(rb_eArgError, "uninitialized producer");
2917 return ptr;
2920 /* :nodoc: */
2921 static VALUE
2922 producer_allocate(VALUE klass)
2924 struct producer *ptr;
2925 VALUE obj;
2927 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
2928 ptr->init = Qundef;
2929 ptr->proc = Qundef;
2931 return obj;
2934 static VALUE
2935 producer_init(VALUE obj, VALUE init, VALUE proc)
2937 struct producer *ptr;
2939 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2941 if (!ptr) {
2942 rb_raise(rb_eArgError, "unallocated producer");
2945 ptr->init = init;
2946 ptr->proc = proc;
2948 return obj;
2951 static VALUE
2952 producer_each_stop(VALUE dummy, VALUE exc)
2954 return rb_attr_get(exc, id_result);
2957 NORETURN(static VALUE producer_each_i(VALUE obj));
2959 static VALUE
2960 producer_each_i(VALUE obj)
2962 struct producer *ptr;
2963 VALUE init, proc, curr;
2965 ptr = producer_ptr(obj);
2966 init = ptr->init;
2967 proc = ptr->proc;
2969 if (init == Qundef) {
2970 curr = Qnil;
2972 else {
2973 rb_yield(init);
2974 curr = init;
2977 for (;;) {
2978 curr = rb_funcall(proc, id_call, 1, curr);
2979 rb_yield(curr);
2982 UNREACHABLE_RETURN(Qnil);
2985 /* :nodoc: */
2986 static VALUE
2987 producer_each(VALUE obj)
2989 rb_need_block();
2991 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
2994 static VALUE
2995 producer_size(VALUE obj, VALUE args, VALUE eobj)
2997 return DBL2NUM(HUGE_VAL);
3001 * call-seq:
3002 * Enumerator.produce(initial = nil) { |prev| block } -> enumerator
3004 * Creates an infinite enumerator from any block, just called over and
3005 * over. The result of the previous iteration is passed to the next one.
3006 * If +initial+ is provided, it is passed to the first iteration, and
3007 * becomes the first element of the enumerator; if it is not provided,
3008 * the first iteration receives +nil+, and its result becomes the first
3009 * element of the iterator.
3011 * Raising StopIteration from the block stops an iteration.
3013 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3015 * Enumerator.produce { rand(10) } # => infinite random number sequence
3017 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3018 * enclosing_section = ancestors.find { |n| n.type == :section }
3020 * Using ::produce together with Enumerable methods like Enumerable#detect,
3021 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3022 * for +while+ and +until+ cycles:
3024 * # Find next Tuesday
3025 * require "date"
3026 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3028 * # Simple lexer:
3029 * require "strscan"
3030 * scanner = StringScanner.new("7+38/6")
3031 * PATTERN = %r{\d+|[-/+*]}
3032 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3033 * # => ["7", "+", "38", "/", "6"]
3035 static VALUE
3036 enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3038 VALUE init, producer;
3040 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3042 if (rb_scan_args(argc, argv, "01", &init) == 0) {
3043 init = Qundef;
3046 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc());
3048 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3052 * Document-class: Enumerator::Chain
3054 * Enumerator::Chain is a subclass of Enumerator, which represents a
3055 * chain of enumerables that works as a single enumerator.
3057 * This type of objects can be created by Enumerable#chain and
3058 * Enumerator#+.
3061 static void
3062 enum_chain_mark(void *p)
3064 struct enum_chain *ptr = p;
3065 rb_gc_mark_movable(ptr->enums);
3068 static void
3069 enum_chain_compact(void *p)
3071 struct enum_chain *ptr = p;
3072 ptr->enums = rb_gc_location(ptr->enums);
3075 #define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3077 static size_t
3078 enum_chain_memsize(const void *p)
3080 return sizeof(struct enum_chain);
3083 static const rb_data_type_t enum_chain_data_type = {
3084 "chain",
3086 enum_chain_mark,
3087 enum_chain_free,
3088 enum_chain_memsize,
3089 enum_chain_compact,
3091 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3094 static struct enum_chain *
3095 enum_chain_ptr(VALUE obj)
3097 struct enum_chain *ptr;
3099 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3100 if (!ptr || ptr->enums == Qundef) {
3101 rb_raise(rb_eArgError, "uninitialized chain");
3103 return ptr;
3106 /* :nodoc: */
3107 static VALUE
3108 enum_chain_allocate(VALUE klass)
3110 struct enum_chain *ptr;
3111 VALUE obj;
3113 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3114 ptr->enums = Qundef;
3115 ptr->pos = -1;
3117 return obj;
3121 * call-seq:
3122 * Enumerator::Chain.new(*enums) -> enum
3124 * Generates a new enumerator object that iterates over the elements
3125 * of given enumerable objects in sequence.
3127 * e = Enumerator::Chain.new(1..3, [4, 5])
3128 * e.to_a #=> [1, 2, 3, 4, 5]
3129 * e.size #=> 5
3131 static VALUE
3132 enum_chain_initialize(VALUE obj, VALUE enums)
3134 struct enum_chain *ptr;
3136 rb_check_frozen(obj);
3137 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3139 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3141 ptr->enums = rb_obj_freeze(enums);
3142 ptr->pos = -1;
3144 return obj;
3147 static VALUE
3148 new_enum_chain(VALUE enums)
3150 long i;
3151 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3153 for (i = 0; i < RARRAY_LEN(enums); i++) {
3154 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3155 return enumerable_lazy(obj);
3159 return obj;
3162 /* :nodoc: */
3163 static VALUE
3164 enum_chain_init_copy(VALUE obj, VALUE orig)
3166 struct enum_chain *ptr0, *ptr1;
3168 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3169 ptr0 = enum_chain_ptr(orig);
3171 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3173 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3175 ptr1->enums = ptr0->enums;
3176 ptr1->pos = ptr0->pos;
3178 return obj;
3181 static VALUE
3182 enum_chain_total_size(VALUE enums)
3184 VALUE total = INT2FIX(0);
3185 long i;
3187 for (i = 0; i < RARRAY_LEN(enums); i++) {
3188 VALUE size = enum_size(RARRAY_AREF(enums, i));
3190 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3191 return size;
3193 if (!RB_INTEGER_TYPE_P(size)) {
3194 return Qnil;
3197 total = rb_funcall(total, '+', 1, size);
3200 return total;
3204 * call-seq:
3205 * obj.size -> int, Float::INFINITY or nil
3207 * Returns the total size of the enumerator chain calculated by
3208 * summing up the size of each enumerable in the chain. If any of the
3209 * enumerables reports its size as nil or Float::INFINITY, that value
3210 * is returned as the total size.
3212 static VALUE
3213 enum_chain_size(VALUE obj)
3215 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3218 static VALUE
3219 enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3221 return enum_chain_size(obj);
3224 static VALUE
3225 enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3227 return Qnil;
3231 * call-seq:
3232 * obj.each(*args) { |...| ... } -> obj
3233 * obj.each(*args) -> enumerator
3235 * Iterates over the elements of the first enumerable by calling the
3236 * "each" method on it with the given arguments, then proceeds to the
3237 * following enumerables in sequence until all of the enumerables are
3238 * exhausted.
3240 * If no block is given, returns an enumerator.
3242 static VALUE
3243 enum_chain_each(int argc, VALUE *argv, VALUE obj)
3245 VALUE enums, block;
3246 struct enum_chain *objptr;
3247 long i;
3249 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3251 objptr = enum_chain_ptr(obj);
3252 enums = objptr->enums;
3253 block = rb_block_proc();
3255 for (i = 0; i < RARRAY_LEN(enums); i++) {
3256 objptr->pos = i;
3257 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3260 return obj;
3264 * call-seq:
3265 * obj.rewind -> obj
3267 * Rewinds the enumerator chain by calling the "rewind" method on each
3268 * enumerable in reverse order. Each call is performed only if the
3269 * enumerable responds to the method.
3271 static VALUE
3272 enum_chain_rewind(VALUE obj)
3274 struct enum_chain *objptr = enum_chain_ptr(obj);
3275 VALUE enums = objptr->enums;
3276 long i;
3278 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3279 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3282 return obj;
3285 static VALUE
3286 inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3288 VALUE klass = rb_obj_class(obj);
3289 struct enum_chain *ptr;
3291 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3293 if (!ptr || ptr->enums == Qundef) {
3294 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3297 if (recur) {
3298 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3301 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3305 * call-seq:
3306 * obj.inspect -> string
3308 * Returns a printable version of the enumerator chain.
3310 static VALUE
3311 enum_chain_inspect(VALUE obj)
3313 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3317 * call-seq:
3318 * e.chain(*enums) -> enumerator
3320 * Returns an enumerator object generated from this enumerator and
3321 * given enumerables.
3323 * e = (1..3).chain([4, 5])
3324 * e.to_a #=> [1, 2, 3, 4, 5]
3326 static VALUE
3327 enum_chain(int argc, VALUE *argv, VALUE obj)
3329 VALUE enums = rb_ary_new_from_values(1, &obj);
3330 rb_ary_cat(enums, argv, argc);
3331 return new_enum_chain(enums);
3335 * call-seq:
3336 * e + enum -> enumerator
3338 * Returns an enumerator object generated from this enumerator and a
3339 * given enumerable.
3341 * e = (1..3).each + [4, 5]
3342 * e.to_a #=> [1, 2, 3, 4, 5]
3344 static VALUE
3345 enumerator_plus(VALUE obj, VALUE eobj)
3347 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3351 * Document-class: Enumerator::ArithmeticSequence
3353 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3354 * that is a representation of sequences of numbers with common difference.
3355 * Instances of this class can be generated by the Range#step and Numeric#step
3356 * methods.
3358 * The class can be used for slicing Array (see Array#slice) or custom
3359 * collections.
3362 VALUE
3363 rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3364 rb_enumerator_size_func *size_fn,
3365 VALUE beg, VALUE end, VALUE step, int excl)
3367 VALUE aseq = enumerator_init(enumerator_allocate(rb_cArithSeq),
3368 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3369 rb_ivar_set(aseq, id_begin, beg);
3370 rb_ivar_set(aseq, id_end, end);
3371 rb_ivar_set(aseq, id_step, step);
3372 rb_ivar_set(aseq, id_exclude_end, RBOOL(excl));
3373 return aseq;
3377 * call-seq: aseq.begin -> num or nil
3379 * Returns the number that defines the first element of this arithmetic
3380 * sequence.
3382 static inline VALUE
3383 arith_seq_begin(VALUE self)
3385 return rb_ivar_get(self, id_begin);
3389 * call-seq: aseq.end -> num or nil
3391 * Returns the number that defines the end of this arithmetic sequence.
3393 static inline VALUE
3394 arith_seq_end(VALUE self)
3396 return rb_ivar_get(self, id_end);
3400 * call-seq: aseq.step -> num
3402 * Returns the number that defines the common difference between
3403 * two adjacent elements in this arithmetic sequence.
3405 static inline VALUE
3406 arith_seq_step(VALUE self)
3408 return rb_ivar_get(self, id_step);
3412 * call-seq: aseq.exclude_end? -> true or false
3414 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3416 static inline VALUE
3417 arith_seq_exclude_end(VALUE self)
3419 return rb_ivar_get(self, id_exclude_end);
3422 static inline int
3423 arith_seq_exclude_end_p(VALUE self)
3425 return RTEST(arith_seq_exclude_end(self));
3429 rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3431 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3432 component->begin = arith_seq_begin(obj);
3433 component->end = arith_seq_end(obj);
3434 component->step = arith_seq_step(obj);
3435 component->exclude_end = arith_seq_exclude_end_p(obj);
3436 return 1;
3438 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3439 component->step = INT2FIX(1);
3440 return 1;
3443 return 0;
3446 VALUE
3447 rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3449 RBIMPL_NONNULL_ARG(begp);
3450 RBIMPL_NONNULL_ARG(lenp);
3451 RBIMPL_NONNULL_ARG(stepp);
3453 rb_arithmetic_sequence_components_t aseq;
3454 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3455 return Qfalse;
3458 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3459 *stepp = step;
3461 if (step < 0) {
3462 VALUE tmp = aseq.begin;
3463 aseq.begin = aseq.end;
3464 aseq.end = tmp;
3467 if (err == 0 && (step < -1 || step > 1)) {
3468 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
3469 if (*begp > len)
3470 goto out_of_range;
3471 if (*lenp > len)
3472 goto out_of_range;
3473 return Qtrue;
3476 else {
3477 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
3480 out_of_range:
3481 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
3482 return Qnil;
3486 * call-seq:
3487 * aseq.first -> num or nil
3488 * aseq.first(n) -> an_array
3490 * Returns the first number in this arithmetic sequence,
3491 * or an array of the first +n+ elements.
3493 static VALUE
3494 arith_seq_first(int argc, VALUE *argv, VALUE self)
3496 VALUE b, e, s, ary;
3497 long n;
3498 int x;
3500 rb_check_arity(argc, 0, 1);
3502 b = arith_seq_begin(self);
3503 e = arith_seq_end(self);
3504 s = arith_seq_step(self);
3505 if (argc == 0) {
3506 if (NIL_P(b)) {
3507 return Qnil;
3509 if (!NIL_P(e)) {
3510 VALUE zero = INT2FIX(0);
3511 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
3512 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
3513 return Qnil;
3515 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
3516 return Qnil;
3519 return b;
3522 // TODO: the following code should be extracted as arith_seq_take
3524 n = NUM2LONG(argv[0]);
3525 if (n < 0) {
3526 rb_raise(rb_eArgError, "attempt to take negative size");
3528 if (n == 0) {
3529 return rb_ary_new_capa(0);
3532 x = arith_seq_exclude_end_p(self);
3534 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
3535 long i = FIX2LONG(b), unit = FIX2LONG(s);
3536 ary = rb_ary_new_capa(n);
3537 while (n > 0 && FIXABLE(i)) {
3538 rb_ary_push(ary, LONG2FIX(i));
3539 i += unit; // FIXABLE + FIXABLE never overflow;
3540 --n;
3542 if (n > 0) {
3543 b = LONG2NUM(i);
3544 while (n > 0) {
3545 rb_ary_push(ary, b);
3546 b = rb_big_plus(b, s);
3547 --n;
3550 return ary;
3552 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
3553 long i = FIX2LONG(b);
3554 long end = FIX2LONG(e);
3555 long unit = FIX2LONG(s);
3556 long len;
3558 if (unit >= 0) {
3559 if (!x) end += 1;
3561 len = end - i;
3562 if (len < 0) len = 0;
3563 ary = rb_ary_new_capa((n < len) ? n : len);
3564 while (n > 0 && i < end) {
3565 rb_ary_push(ary, LONG2FIX(i));
3566 if (i + unit < i) break;
3567 i += unit;
3568 --n;
3571 else {
3572 if (!x) end -= 1;
3574 len = i - end;
3575 if (len < 0) len = 0;
3576 ary = rb_ary_new_capa((n < len) ? n : len);
3577 while (n > 0 && i > end) {
3578 rb_ary_push(ary, LONG2FIX(i));
3579 if (i + unit > i) break;
3580 i += unit;
3581 --n;
3584 return ary;
3586 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
3587 /* generate values like ruby_float_step */
3589 double unit = NUM2DBL(s);
3590 double beg = NUM2DBL(b);
3591 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
3592 double len = ruby_float_step_size(beg, end, unit, x);
3593 long i;
3595 if (n > len)
3596 n = (long)len;
3598 if (isinf(unit)) {
3599 if (len > 0) {
3600 ary = rb_ary_new_capa(1);
3601 rb_ary_push(ary, DBL2NUM(beg));
3603 else {
3604 ary = rb_ary_new_capa(0);
3607 else if (unit == 0) {
3608 VALUE val = DBL2NUM(beg);
3609 ary = rb_ary_new_capa(n);
3610 for (i = 0; i < len; ++i) {
3611 rb_ary_push(ary, val);
3614 else {
3615 ary = rb_ary_new_capa(n);
3616 for (i = 0; i < n; ++i) {
3617 double d = i*unit+beg;
3618 if (unit >= 0 ? end < d : d < end) d = end;
3619 rb_ary_push(ary, DBL2NUM(d));
3623 return ary;
3626 return rb_call_super(argc, argv);
3629 static inline VALUE
3630 num_plus(VALUE a, VALUE b)
3632 if (RB_INTEGER_TYPE_P(a)) {
3633 return rb_int_plus(a, b);
3635 else if (RB_FLOAT_TYPE_P(a)) {
3636 return rb_float_plus(a, b);
3638 else if (RB_TYPE_P(a, T_RATIONAL)) {
3639 return rb_rational_plus(a, b);
3641 else {
3642 return rb_funcallv(a, '+', 1, &b);
3646 static inline VALUE
3647 num_minus(VALUE a, VALUE b)
3649 if (RB_INTEGER_TYPE_P(a)) {
3650 return rb_int_minus(a, b);
3652 else if (RB_FLOAT_TYPE_P(a)) {
3653 return rb_float_minus(a, b);
3655 else if (RB_TYPE_P(a, T_RATIONAL)) {
3656 return rb_rational_minus(a, b);
3658 else {
3659 return rb_funcallv(a, '-', 1, &b);
3663 static inline VALUE
3664 num_mul(VALUE a, VALUE b)
3666 if (RB_INTEGER_TYPE_P(a)) {
3667 return rb_int_mul(a, b);
3669 else if (RB_FLOAT_TYPE_P(a)) {
3670 return rb_float_mul(a, b);
3672 else if (RB_TYPE_P(a, T_RATIONAL)) {
3673 return rb_rational_mul(a, b);
3675 else {
3676 return rb_funcallv(a, '*', 1, &b);
3680 static inline VALUE
3681 num_idiv(VALUE a, VALUE b)
3683 VALUE q;
3684 if (RB_INTEGER_TYPE_P(a)) {
3685 q = rb_int_idiv(a, b);
3687 else if (RB_FLOAT_TYPE_P(a)) {
3688 q = rb_float_div(a, b);
3690 else if (RB_TYPE_P(a, T_RATIONAL)) {
3691 q = rb_rational_div(a, b);
3693 else {
3694 q = rb_funcallv(a, idDiv, 1, &b);
3697 if (RB_INTEGER_TYPE_P(q)) {
3698 return q;
3700 else if (RB_FLOAT_TYPE_P(q)) {
3701 return rb_float_floor(q, 0);
3703 else if (RB_TYPE_P(q, T_RATIONAL)) {
3704 return rb_rational_floor(q, 0);
3706 else {
3707 return rb_funcall(q, rb_intern("floor"), 0);
3712 * call-seq:
3713 * aseq.last -> num or nil
3714 * aseq.last(n) -> an_array
3716 * Returns the last number in this arithmetic sequence,
3717 * or an array of the last +n+ elements.
3719 static VALUE
3720 arith_seq_last(int argc, VALUE *argv, VALUE self)
3722 VALUE b, e, s, len_1, len, last, nv, ary;
3723 int last_is_adjusted;
3724 long n;
3726 e = arith_seq_end(self);
3727 if (NIL_P(e)) {
3728 rb_raise(rb_eRangeError,
3729 "cannot get the last element of endless arithmetic sequence");
3732 b = arith_seq_begin(self);
3733 s = arith_seq_step(self);
3735 len_1 = num_idiv(num_minus(e, b), s);
3736 if (rb_num_negative_int_p(len_1)) {
3737 if (argc == 0) {
3738 return Qnil;
3740 return rb_ary_new_capa(0);
3743 last = num_plus(b, num_mul(s, len_1));
3744 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
3745 last = num_minus(last, s);
3748 if (argc == 0) {
3749 return last;
3752 if (last_is_adjusted) {
3753 len = len_1;
3755 else {
3756 len = rb_int_plus(len_1, INT2FIX(1));
3759 rb_scan_args(argc, argv, "1", &nv);
3760 if (!RB_INTEGER_TYPE_P(nv)) {
3761 nv = rb_to_int(nv);
3763 if (RTEST(rb_int_gt(nv, len))) {
3764 nv = len;
3766 n = NUM2LONG(nv);
3767 if (n < 0) {
3768 rb_raise(rb_eArgError, "negative array size");
3771 ary = rb_ary_new_capa(n);
3772 b = rb_int_minus(last, rb_int_mul(s, nv));
3773 while (n) {
3774 b = rb_int_plus(b, s);
3775 rb_ary_push(ary, b);
3776 --n;
3779 return ary;
3783 * call-seq:
3784 * aseq.inspect -> string
3786 * Convert this arithmetic sequence to a printable form.
3788 static VALUE
3789 arith_seq_inspect(VALUE self)
3791 struct enumerator *e;
3792 VALUE eobj, str, eargs;
3793 int range_p;
3795 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
3797 eobj = rb_attr_get(self, id_receiver);
3798 if (NIL_P(eobj)) {
3799 eobj = e->obj;
3802 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
3803 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
3805 rb_str_buf_append(str, rb_id2str(e->meth));
3807 eargs = rb_attr_get(eobj, id_arguments);
3808 if (NIL_P(eargs)) {
3809 eargs = e->args;
3811 if (eargs != Qfalse) {
3812 long argc = RARRAY_LEN(eargs);
3813 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
3815 if (argc > 0) {
3816 VALUE kwds = Qnil;
3818 rb_str_buf_cat2(str, "(");
3820 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
3821 int all_key = TRUE;
3822 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
3823 if (all_key) kwds = argv[--argc];
3826 while (argc--) {
3827 VALUE arg = *argv++;
3829 rb_str_append(str, rb_inspect(arg));
3830 rb_str_buf_cat2(str, ", ");
3832 if (!NIL_P(kwds)) {
3833 rb_hash_foreach(kwds, kwd_append, str);
3835 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
3836 rb_str_buf_cat2(str, ")");
3840 rb_str_buf_cat2(str, ")");
3842 return str;
3846 * call-seq:
3847 * aseq == obj -> true or false
3849 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
3850 * has equivalent begin, end, step, and exclude_end? settings.
3852 static VALUE
3853 arith_seq_eq(VALUE self, VALUE other)
3855 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
3856 return Qfalse;
3859 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
3860 return Qfalse;
3863 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
3864 return Qfalse;
3867 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
3868 return Qfalse;
3871 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
3872 return Qfalse;
3875 return Qtrue;
3879 * call-seq:
3880 * aseq.hash -> integer
3882 * Compute a hash-value for this arithmetic sequence.
3883 * Two arithmetic sequences with same begin, end, step, and exclude_end?
3884 * values will generate the same hash-value.
3886 * See also Object#hash.
3888 static VALUE
3889 arith_seq_hash(VALUE self)
3891 st_index_t hash;
3892 VALUE v;
3894 hash = rb_hash_start(arith_seq_exclude_end_p(self));
3895 v = rb_hash(arith_seq_begin(self));
3896 hash = rb_hash_uint(hash, NUM2LONG(v));
3897 v = rb_hash(arith_seq_end(self));
3898 hash = rb_hash_uint(hash, NUM2LONG(v));
3899 v = rb_hash(arith_seq_step(self));
3900 hash = rb_hash_uint(hash, NUM2LONG(v));
3901 hash = rb_hash_end(hash);
3903 return ST2FIX(hash);
3906 #define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
3908 struct arith_seq_gen {
3909 VALUE current;
3910 VALUE end;
3911 VALUE step;
3912 int excl;
3916 * call-seq:
3917 * aseq.each {|i| block } -> aseq
3918 * aseq.each -> aseq
3920 static VALUE
3921 arith_seq_each(VALUE self)
3923 VALUE c, e, s, len_1, last;
3924 int x;
3926 if (!rb_block_given_p()) return self;
3928 c = arith_seq_begin(self);
3929 e = arith_seq_end(self);
3930 s = arith_seq_step(self);
3931 x = arith_seq_exclude_end_p(self);
3933 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
3934 return self;
3937 if (NIL_P(e)) {
3938 while (1) {
3939 rb_yield(c);
3940 c = rb_int_plus(c, s);
3943 return self;
3946 if (rb_equal(s, INT2FIX(0))) {
3947 while (1) {
3948 rb_yield(c);
3951 return self;
3954 len_1 = num_idiv(num_minus(e, c), s);
3955 last = num_plus(c, num_mul(s, len_1));
3956 if (x && rb_equal(last, e)) {
3957 last = num_minus(last, s);
3960 if (rb_num_negative_int_p(s)) {
3961 while (NUM_GE(c, last)) {
3962 rb_yield(c);
3963 c = num_plus(c, s);
3966 else {
3967 while (NUM_GE(last, c)) {
3968 rb_yield(c);
3969 c = num_plus(c, s);
3973 return self;
3977 * call-seq:
3978 * aseq.size -> num or nil
3980 * Returns the number of elements in this arithmetic sequence if it is a finite
3981 * sequence. Otherwise, returns <code>nil</code>.
3983 static VALUE
3984 arith_seq_size(VALUE self)
3986 VALUE b, e, s, len_1, len, last;
3987 int x;
3989 b = arith_seq_begin(self);
3990 e = arith_seq_end(self);
3991 s = arith_seq_step(self);
3992 x = arith_seq_exclude_end_p(self);
3994 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
3995 double ee, n;
3997 if (NIL_P(e)) {
3998 if (rb_num_negative_int_p(s)) {
3999 ee = -HUGE_VAL;
4001 else {
4002 ee = HUGE_VAL;
4005 else {
4006 ee = NUM2DBL(e);
4009 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4010 if (isinf(n)) return DBL2NUM(n);
4011 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4012 return rb_dbl2big(n);
4015 if (NIL_P(e)) {
4016 return DBL2NUM(HUGE_VAL);
4019 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4020 s = rb_to_int(s);
4023 if (rb_equal(s, INT2FIX(0))) {
4024 return DBL2NUM(HUGE_VAL);
4027 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4028 if (rb_num_negative_int_p(len_1)) {
4029 return INT2FIX(0);
4032 last = rb_int_plus(b, rb_int_mul(s, len_1));
4033 if (x && rb_equal(last, e)) {
4034 len = len_1;
4036 else {
4037 len = rb_int_plus(len_1, INT2FIX(1));
4040 return len;
4043 #define sym(name) ID2SYM(rb_intern_const(name))
4044 void
4045 InitVM_Enumerator(void)
4047 ID id_private = rb_intern_const("private");
4049 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4050 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4052 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4053 rb_include_module(rb_cEnumerator, rb_mEnumerable);
4055 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4056 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4057 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4058 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4059 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4060 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4061 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4062 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4063 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4064 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4065 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4066 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4067 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4068 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4069 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4070 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4071 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4072 rb_define_method(rb_mEnumerable, "chain", enum_chain, -1);
4074 /* Lazy */
4075 rb_cLazy = rb_define_class_under(rb_cEnumerator, "Lazy", rb_cEnumerator);
4076 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4078 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4079 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4080 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4081 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4082 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4083 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4084 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4085 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4086 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4087 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4088 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4089 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4090 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4091 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4092 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4093 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4094 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4095 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4097 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4098 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4099 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4100 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4101 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4102 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4103 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4104 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4105 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4106 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4107 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4108 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4109 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4110 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4111 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4112 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4113 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4115 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4116 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4117 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4118 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4119 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4120 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4121 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4122 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4123 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4124 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4125 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4126 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4127 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4128 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4129 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4130 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4131 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4132 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4133 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4134 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4135 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4136 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4137 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4138 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4139 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4140 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4141 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4142 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4143 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4145 lazy_use_super_method = rb_hash_new_with_size(18);
4146 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4147 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4148 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4149 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4150 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4151 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4152 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4153 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4154 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4155 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4156 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4157 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4158 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4159 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4160 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4161 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4162 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4163 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4164 rb_obj_freeze(lazy_use_super_method);
4165 rb_gc_register_mark_object(lazy_use_super_method);
4167 #if 0 /* for RDoc */
4168 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4169 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4170 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4171 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4172 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4173 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4174 #endif
4175 rb_define_alias(rb_cLazy, "force", "to_a");
4177 rb_eStopIteration = rb_define_class("StopIteration", rb_eIndexError);
4178 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4180 /* Generator */
4181 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4182 rb_include_module(rb_cGenerator, rb_mEnumerable);
4183 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4184 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4185 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4186 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4188 /* Yielder */
4189 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4190 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4191 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4192 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4193 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4194 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4196 /* Producer */
4197 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4198 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4199 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4200 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4202 /* Chain */
4203 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4204 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4205 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4206 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4207 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4208 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4209 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4210 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4211 rb_undef_method(rb_cEnumChain, "feed");
4212 rb_undef_method(rb_cEnumChain, "next");
4213 rb_undef_method(rb_cEnumChain, "next_values");
4214 rb_undef_method(rb_cEnumChain, "peek");
4215 rb_undef_method(rb_cEnumChain, "peek_values");
4217 /* ArithmeticSequence */
4218 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4219 rb_undef_alloc_func(rb_cArithSeq);
4220 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4221 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4222 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4223 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4224 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4225 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4226 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4227 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4228 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4229 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4230 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4231 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4232 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4233 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4235 rb_provide("enumerator.so"); /* for backward compatibility */
4237 #undef sym
4239 void
4240 Init_Enumerator(void)
4242 id_rewind = rb_intern_const("rewind");
4243 id_new = rb_intern_const("new");
4244 id_next = rb_intern_const("next");
4245 id_result = rb_intern_const("result");
4246 id_receiver = rb_intern_const("receiver");
4247 id_arguments = rb_intern_const("arguments");
4248 id_memo = rb_intern_const("memo");
4249 id_method = rb_intern_const("method");
4250 id_force = rb_intern_const("force");
4251 id_to_enum = rb_intern_const("to_enum");
4252 id_begin = rb_intern_const("begin");
4253 id_end = rb_intern_const("end");
4254 id_step = rb_intern_const("step");
4255 id_exclude_end = rb_intern_const("exclude_end");
4256 sym_each = ID2SYM(id_each);
4257 sym_cycle = ID2SYM(rb_intern_const("cycle"));
4258 sym_yield = ID2SYM(rb_intern_const("yield"));
4260 InitVM(Enumerator);