110 static VALUE rb_cLazy;
111 static ID id_rewind, id_new, id_to_enum;
112 static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
113 static ID id_begin, id_end, id_step, id_exclude_end;
114 static VALUE sym_each, sym_cycle, sym_yield;
116 static VALUE lazy_use_super_method;
118 #define id_call idCall
119 #define id_each idEach
121 #define id_initialize idInitialize
122 #define id_size idSize
141 static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
173 static VALUE rb_cEnumChain;
186 enumerator_mark(
void *p)
201 enumerator_compact(
void *p)
215 #define enumerator_free RUBY_TYPED_DEFAULT_FREE
218 enumerator_memsize(
const void *p)
247 proc_entry_mark(
void *p)
255 proc_entry_compact(
void *p)
262 #define proc_entry_free RUBY_TYPED_DEFAULT_FREE
265 proc_entry_memsize(
const void *p)
379 #define PASS_KW_SPLAT (rb_empty_keyword_given_p() ? RB_PASS_EMPTY_KEYWORDS : rb_keyword_given_p())
455 recv = generator_init(generator_allocate(rb_cGenerator),
rb_block_proc());
488 ptr0 = enumerator_ptr(orig);
606 #if SIZEOF_INT < SIZEOF_LONG
621 return enumerator_block_call(
obj, 0,
obj);
627 struct MEMO *memo = (
struct MEMO *)m;
643 return enumerator_size(
obj);
666 return enumerator_block_call(
obj, enumerator_with_index_i, (
VALUE)
MEMO_NEW(memo, 0, 0));
680 enumerator_each_with_index(
VALUE obj)
682 return enumerator_with_index(0,
NULL,
obj);
727 enumerator_block_call(
obj, enumerator_with_object_i, memo);
852 return get_next_values(
obj, e);
900 VALUE vs = enumerator_next_values(
obj);
901 return ary2sv(vs, 0);
974 VALUE vs = enumerator_peek_values(
obj);
975 return ary2sv(vs, 1);
1086 eobj = generator_ptr(e->
obj)->
obj;
1135 VALUE method, eargs;
1139 if (!
NIL_P(method)) {
1152 eargs = default_args;
1230 receiver = (*size_fn)(
proc, receiver);
1251 yielder_mark(
void *p)
1258 yielder_compact(
void *p)
1264 #define yielder_free RUBY_TYPED_DEFAULT_FREE
1267 yielder_memsize(
const void *p)
1269 return sizeof(
struct yielder);
1382 return yielder_init(yielder_allocate(rb_cYielder),
rb_proc_new(yielder_yield_i, 0));
1389 generator_mark(
void *p)
1397 generator_compact(
void *p)
1404 #define generator_free RUBY_TYPED_DEFAULT_FREE
1407 generator_memsize(
const void *p)
1481 "wrong argument type %"PRIsVALUE" (expected Proc)",
1485 rb_warn(
"given block not used");
1489 return generator_init(
obj,
proc);
1500 ptr0 = generator_ptr(orig);
1530 enum_size(
VALUE self)
1539 return enum_size(
self);
1543 lazy_size(
VALUE self)
1551 return lazy_size(lazy);
1587 #define memo_value v2
1588 #define memo_flags u3.state
1589 #define LAZY_MEMO_BREAK 1
1590 #define LAZY_MEMO_PACKED 2
1591 #define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1592 #define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1593 #define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1594 #define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1595 #define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1596 #define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1605 struct MEMO *result;
1614 if (!(*entry->
fn->
proc)(
proc, result, memos,
i)) {
1626 return result->memo_value;
1656 generator = generator_allocate(rb_cGenerator);
1770 generator = generator_allocate(rb_cGenerator);
1772 enumerator_init(
self,
generator, sym_each, 0, 0, 0,
size, 0);
1810 lazy_set_args(lazy,
args);
1821 VALUE new_generator;
1826 &proc_entry_data_type, entry);
1833 lazy_set_args(entry_obj,
memo);
1836 new_generator = lazy_generator_init(
obj, new_procs);
1839 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy),
obj);
1841 new_e->
obj = new_generator;
1842 new_e->
procs = new_procs;
1896 return enumerator_init(enumerator_allocate(rb_cLazy),
1926 VALUE lazy, meth = sym_each, super_meth;
1945 return enum_size(
self);
1956 lazy_eager(
VALUE self)
1959 self, sym_each, 0, 0, lazy_eager_size,
Qnil, 0);
1974 const VALUE *
argv = &result->memo_value;
1983 static struct MEMO *
1999 lazy_map_proc, lazy_map_size,
2022 return lazy_add_method(
obj, 0, 0,
Qnil,
Qnil, &lazy_map_funcs);
2068 lazy_flat_map_each(result,
argv[0]);
2071 lazy_flat_map_to_ary(result,
argv[0]);
2108 lazy_flat_map_proc, 0),
2112 static struct MEMO *
2116 if (!
RTEST(chain))
return 0;
2121 lazy_select_proc, 0,
2139 return lazy_add_method(
obj, 0, 0,
Qnil,
Qnil, &lazy_select_funcs);
2142 static struct MEMO *
2153 lazy_filter_map_proc, 0,
2173 return lazy_add_method(
obj, 0, 0,
Qnil,
Qnil, &lazy_filter_map_funcs);
2176 static struct MEMO *
2180 if (
RTEST(chain))
return 0;
2185 lazy_reject_proc, 0,
2202 return lazy_add_method(
obj, 0, 0,
Qnil,
Qnil, &lazy_reject_funcs);
2205 static struct MEMO *
2210 if (!
RTEST(chain))
return 0;
2214 static struct MEMO *
2220 if (!
RTEST(chain))
return 0;
2229 lazy_grep_iter_proc, 0,
2248 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2249 return lazy_add_method(
obj, 0, 0, pattern,
rb_ary_new3(1, pattern), funcs);
2252 static struct MEMO *
2257 if (
RTEST(chain))
return 0;
2261 static struct MEMO *
2267 if (
RTEST(chain))
return 0;
2276 lazy_grep_v_iter_proc, 0,
2280 lazy_grep_v_proc, 0,
2295 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2296 return lazy_add_method(
obj, 0, 0, pattern,
rb_ary_new3(1, pattern), funcs);
2393 func = lazy_zip_func;
2401 ary, lazy_receiver_size);
2404 static struct MEMO *
2436 lazy_take_proc, lazy_take_size,
2458 argv[0] = sym_cycle;
2466 static struct MEMO *
2478 lazy_take_while_proc, 0,
2495 return lazy_add_method(
obj, 0, 0,
Qnil,
Qnil, &lazy_take_while_funcs);
2502 if (
NIL_P(receiver))
2511 static struct MEMO *
2532 lazy_drop_proc, lazy_drop_size,
2557 static struct MEMO *
2569 if (
RTEST(drop))
return 0;
2576 lazy_drop_while_proc, 0,
2593 return lazy_add_method(
obj, 0, 0,
Qfalse,
Qnil, &lazy_drop_while_funcs);
2597 lazy_uniq_check(
VALUE chain,
VALUE memos,
long memo_index)
2609 static struct MEMO *
2612 if (lazy_uniq_check(result->memo_value, memos, memo_index))
return 0;
2616 static struct MEMO *
2621 if (lazy_uniq_check(chain, memos, memo_index))
return 0;
2626 lazy_uniq_iter_proc, 0,
2646 return lazy_add_method(
obj, 0, 0,
Qnil,
Qnil, funcs);
2649 static struct MEMO *
2660 argv[0] = result->memo_value;
2674 lazy_with_index_proc, 0,
2827 stop_result(
VALUE self)
2837 producer_mark(
void *p)
2845 producer_compact(
void *p)
2852 #define producer_free RUBY_TYPED_DEFAULT_FREE
2855 producer_memsize(
const void *p)
3022 enum_chain_mark(
void *p)
3029 enum_chain_compact(
void *p)
3035 #define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3038 enum_chain_memsize(
const void *p)
3114 ptr0 = enum_chain_ptr(orig);
3160 return enum_chain_total_size(enum_chain_ptr(
obj)->
enums);
3166 return enum_chain_size(
obj);
3196 objptr = enum_chain_ptr(
obj);
3277 return enum_chain_initialize(enum_chain_allocate(rb_cEnumChain),
enums);
3295 return enum_chain_initialize(enum_chain_allocate(rb_cEnumChain),
enums);
3328 arith_seq_begin(
VALUE self)
3339 arith_seq_end(
VALUE self)
3351 arith_seq_step(
VALUE self)
3362 arith_seq_exclude_end(
VALUE self)
3368 arith_seq_exclude_end_p(
VALUE self)
3370 return RTEST(arith_seq_exclude_end(
self));
3377 component->
begin = arith_seq_begin(
obj);
3378 component->
end = arith_seq_end(
obj);
3379 component->
step = arith_seq_step(
obj);
3411 b = arith_seq_begin(
self);
3412 e = arith_seq_end(
self);
3413 s = arith_seq_step(
self);
3441 x = arith_seq_exclude_end_p(
self);
3473 while (
n > 0 &&
i < end) {
3475 if (
i + unit <
i)
break;
3486 while (
n > 0 &&
i > end) {
3488 if (
i + unit >
i)
break;
3516 else if (unit == 0) {
3519 for (
i = 0;
i <
len; ++
i) {
3525 for (
i = 0;
i <
n; ++
i) {
3526 double d =
i*unit+beg;
3527 if (unit >= 0 ? end < d : d < end) d = end;
3550 int last_is_adjusted;
3553 e = arith_seq_end(
self);
3556 "cannot get the last element of endless arithmetic sequence");
3559 b = arith_seq_begin(
self);
3560 s = arith_seq_step(
self);
3563 if (rb_num_negative_int_p(len_1)) {
3571 if ((last_is_adjusted = arith_seq_exclude_end_p(
self) &&
rb_equal(
last, e))) {
3579 if (last_is_adjusted) {
3616 arith_seq_inspect(
VALUE self)
3686 if (!
rb_equal(arith_seq_begin(
self), arith_seq_begin(other))) {
3690 if (!
rb_equal(arith_seq_end(
self), arith_seq_end(other))) {
3694 if (!
rb_equal(arith_seq_step(
self), arith_seq_step(other))) {
3698 if (arith_seq_exclude_end_p(
self) != arith_seq_exclude_end_p(other)) {
3716 arith_seq_hash(
VALUE self)
3722 v =
rb_hash(arith_seq_begin(
self));
3733 #define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
3748 arith_seq_each(
VALUE self)
3755 c = arith_seq_begin(
self);
3756 e = arith_seq_end(
self);
3757 s = arith_seq_step(
self);
3758 x = arith_seq_exclude_end_p(
self);
3787 if (rb_num_negative_int_p(s)) {
3804 arith_seq_float_step_size(
double beg,
double end,
double step,
int excl)
3812 n = (end - beg) / step;
3815 return step > 0 ? beg <= end : beg >= end;
3817 if (
err > 0.5)
err = 0.5;
3819 if (
n <= 0)
return 0;
3826 if (
n < 0)
return 0;
3840 arith_seq_size(
VALUE self)
3845 b = arith_seq_begin(
self);
3846 e = arith_seq_end(
self);
3847 s = arith_seq_step(
self);
3848 x = arith_seq_exclude_end_p(
self);
3854 if (rb_num_negative_int_p(s)) {
3884 if (rb_num_negative_int_p(len_1)) {
3936 rb_define_alias(rb_cLazy,
"_enumerable_collect_concat",
"collect_concat");
4039 rb_define_method(rb_cGenerator,
"initialize_copy", generator_init_copy, 1);
4060 rb_define_method(rb_cEnumChain,
"initialize_copy", enum_chain_init_copy, 1);
4104 id_exclude_end =
rb_intern(
"exclude_end");