Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Newer
Older
100644 527 lines (455 sloc) 12.009 kb
9c1d230 committing experimental branch content
Laurent Sansonetti authored
1 /************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author: knu $
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id: enumerator.c 16614 2008-05-26 08:49:08Z knu $
12
13 ************************************************/
14
468a2ea Move Obj-C related headers around.
Thibault Martin-Lagardette authored
15 #include "ruby/macruby.h"
301b43b ported to rb_objc_block_call() + misc cleanup
Laurent Sansonetti authored
16 #include "id.h"
9c1d230 committing experimental branch content
Laurent Sansonetti authored
17
18 /*
19 * Document-class: Enumerable::Enumerator
20 *
21 * A class which provides a method `each' to be used as an Enumerable
22 * object.
23 */
24 VALUE rb_cEnumerator;
25 static VALUE sym_each;
26
27 VALUE rb_eStopIteration;
28
29 struct enumerator {
30 VALUE obj;
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
31 SEL sel;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
32 VALUE args;
33 VALUE fib;
34 VALUE dst;
35 VALUE no_next;
36 };
37
38 static struct enumerator *
39 enumerator_ptr(VALUE obj)
40 {
41 struct enumerator *ptr;
42
43 Data_Get_Struct(obj, struct enumerator, ptr);
44 #if 0
45 if (RDATA(obj)->dmark != enumerator_mark) {
46 rb_raise(rb_eTypeError,
47 "wrong argument type %s (expected %s)",
48 rb_obj_classname(obj), rb_class2name(rb_cEnumerator));
49 }
50 #endif
51 if (!ptr) {
52 rb_raise(rb_eArgError, "uninitialized enumerator");
53 }
54 return ptr;
55 }
56
57 /*
58 * call-seq:
59 * obj.to_enum(method = :each, *args)
60 * obj.enum_for(method = :each, *args)
61 *
62 * Returns Enumerable::Enumerator.new(self, method, *args).
63 *
64 * e.g.:
65 *
66 * str = "xyz"
67 *
68 * enum = str.enum_for(:each_byte)
69 * a = enum.map {|b| '%02x' % b } #=> ["78", "79", "7a"]
70 *
71 * # protects an array from being modified
72 * a = [1, 2, 3]
73 * some_method(a.to_enum)
74 *
75 */
76 static VALUE
77 obj_to_enum(VALUE obj, SEL sel, int argc, VALUE *argv)
78 {
79 VALUE meth = sym_each;
80
81 if (argc > 0) {
82 --argc;
83 meth = *argv++;
84 }
c6a0341 Kernel#to_enum: implemented
Laurent Sansonetti authored
85
e1960f6 Make sure #to_enum verify param is a symbol
Thibault Martin-Lagardette authored
86 ID meth_id = rb_to_id(meth);
c6a0341 Kernel#to_enum: implemented
Laurent Sansonetti authored
87 SEL enum_sel;
88 if (argc > 0) {
89 char buf[100];
90 snprintf(buf, sizeof buf, "%s:", rb_id2name(meth_id));
91 enum_sel = sel_registerName(buf);
92 }
93 else {
94 enum_sel = sel_registerName(rb_id2name(meth_id));
95 }
96 return rb_enumeratorize(obj, enum_sel, argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
97 }
98
99 static VALUE
100 each_slice_i(VALUE val, VALUE *memo)
101 {
102 VALUE ary = memo[0];
103 VALUE v = Qnil;
104 long size = (long)memo[1];
105
106 rb_ary_push(ary, val);
107
108 if (RARRAY_LEN(ary) == size) {
109 v = rb_yield(ary);
110 memo[0] = rb_ary_new2(size);
111 }
112
113 return v;
114 }
115
116 /*
117 * call-seq:
118 * e.each_slice(n) {...}
119 * e.each_slice(n)
120 *
121 * Iterates the given block for each slice of <n> elements. If no
122 * block is given, returns an enumerator.
123 *
124 * e.g.:
125 * (1..10).each_slice(3) {|a| p a}
126 * # outputs below
127 * [1, 2, 3]
128 * [4, 5, 6]
129 * [7, 8, 9]
130 * [10]
131 *
132 */
133 static VALUE
134 enum_each_slice(VALUE obj, SEL sel, VALUE n)
135 {
136 long size = NUM2LONG(n);
137 VALUE args[2], ary;
138
139 if (size <= 0) rb_raise(rb_eArgError, "invalid slice size");
140 RETURN_ENUMERATOR(obj, 1, &n);
141 args[0] = rb_ary_new2(size);
142 args[1] = (VALUE)size;
143
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
144 rb_objc_block_call(obj, selEach, 0, 0, each_slice_i, (VALUE)args);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
145
146 ary = args[0];
147 if (RARRAY_LEN(ary) > 0) rb_yield(ary);
148
149 return Qnil;
150 }
151
152 static VALUE
153 each_cons_i(VALUE val, VALUE *memo)
154 {
155 VALUE ary = memo[0];
156 VALUE v = Qnil;
157 long size = (long)memo[1];
158
159 if (RARRAY_LEN(ary) == size) {
160 rb_ary_shift(ary);
161 }
162 rb_ary_push(ary, val);
163 if (RARRAY_LEN(ary) == size) {
164 v = rb_yield(rb_ary_dup(ary));
165 }
166 return v;
167 }
168
169 /*
170 * call-seq:
171 * each_cons(n) {...}
172 * each_cons(n)
173 *
174 * Iterates the given block for each array of consecutive <n>
175 * elements. If no block is given, returns an enumerator.a
176 *
177 * e.g.:
178 * (1..10).each_cons(3) {|a| p a}
179 * # outputs below
180 * [1, 2, 3]
181 * [2, 3, 4]
182 * [3, 4, 5]
183 * [4, 5, 6]
184 * [5, 6, 7]
185 * [6, 7, 8]
186 * [7, 8, 9]
187 * [8, 9, 10]
188 *
189 */
190 static VALUE
191 enum_each_cons(VALUE obj, SEL sel, VALUE n)
192 {
193 long size = NUM2LONG(n);
194 VALUE args[2];
195
196 if (size <= 0) rb_raise(rb_eArgError, "invalid size");
197 RETURN_ENUMERATOR(obj, 1, &n);
198 args[0] = rb_ary_new2(size);
199 args[1] = (VALUE)size;
200
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
201 rb_objc_block_call(obj, selEach, 0, 0, each_cons_i, (VALUE)args);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
202
203 return Qnil;
204 }
205
206 static VALUE
207 enumerator_allocate(VALUE klass)
208 {
209 struct enumerator *ptr;
210 return Data_Make_Struct(klass, struct enumerator,
211 NULL, -1, ptr);
212 }
213
214 static VALUE
215 enumerator_each_i(VALUE v, VALUE enum_obj, int argc, VALUE *argv)
216 {
217 return rb_yield_values2(argc, argv);
218 }
219
220 static VALUE
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
221 enumerator_init(VALUE enum_obj, VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
222 {
223 struct enumerator *ptr = enumerator_ptr(enum_obj);
224
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
225 GC_WB(&ptr->obj, obj);
226 ptr->sel = sel;
227 if (argc > 0) {
228 GC_WB(&ptr->args, rb_ary_new4(argc, argv));
229 }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
230 ptr->fib = 0;
231 ptr->dst = Qnil;
232 ptr->no_next = Qfalse;
233
234 return enum_obj;
235 }
236
237 /*
238 * call-seq:
239 * Enumerable::Enumerator.new(obj, method = :each, *args)
240 *
241 * Creates a new Enumerable::Enumerator object, which is to be
242 * used as an Enumerable object using the given object's given
243 * method with the given arguments.
244 *
245 * Use of this method is not discouraged. Use Kernel#enum_for()
246 * instead.
247 */
248 static VALUE
249 enumerator_initialize(VALUE obj, SEL sel, int argc, VALUE *argv)
250 {
251 VALUE recv, meth = sym_each;
252
253 if (argc == 0)
254 rb_raise(rb_eArgError, "wrong number of argument (0 for 1)");
255 recv = *argv++;
256 if (--argc) {
257 meth = *argv++;
258 --argc;
259 }
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
260 ID meth_id = rb_to_id(meth);
261 SEL meth_sel;
262 if (argc == 0) {
263 meth_sel = sel_registerName(rb_id2name(meth_id));
264 }
265 else {
266 char buf[100];
267 snprintf(buf, sizeof buf, "%s:", rb_id2name(meth_id));
268 meth_sel = sel_registerName(buf);
269 }
270 return enumerator_init(obj, recv, meth_sel, argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
271 }
272
273 /* :nodoc: */
274 static VALUE
275 enumerator_init_copy(VALUE obj, SEL sel, VALUE orig)
276 {
277 struct enumerator *ptr0, *ptr1;
278
279 ptr0 = enumerator_ptr(orig);
280 if (ptr0->fib) {
281 /* Fibers cannot be copied */
282 rb_raise(rb_eTypeError, "can't copy execution context");
283 }
284 ptr1 = enumerator_ptr(obj);
285
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
286 GC_WB(&ptr1->obj, ptr0->obj);
287 ptr1->sel = ptr0->sel;
288 if (ptr0->args != 0) {
289 GC_WB(&ptr1->args, ptr0->args);
290 }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
291 ptr1->fib = 0;
292
293 return obj;
294 }
295
296 VALUE
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
297 rb_enumeratorize(VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
298 {
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
299 return enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sel,
300 argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
301 }
302
303 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
304 enumerator_block_call(VALUE obj, VALUE (*func)(ANYARGS), VALUE arg)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
305 {
306 struct enumerator *e;
307 int argc = 0;
308 const VALUE *argv = 0;
309
310 e = enumerator_ptr(obj);
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
311 if (e->args != 0) {
9c1d230 committing experimental branch content
Laurent Sansonetti authored
312 argc = RARRAY_LEN(e->args);
313 argv = RARRAY_PTR(e->args);
314 }
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
315 return rb_objc_block_call(e->obj, e->sel, argc, (VALUE *)argv,
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
316 func, arg);
317 }
318
319 /*
320 * call-seq:
321 * enum.each {...}
322 *
323 * Iterates the given block using the object and the method specified
324 * in the first place. If no block is given, returns self.
325 *
326 */
327 static VALUE
328 enumerator_each(VALUE obj, SEL sel)
329 {
330 if (!rb_block_given_p()) {
331 return obj;
332 }
333 return enumerator_block_call(obj, enumerator_each_i, obj);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
334 }
335
336 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
337 enumerator_with_index_i(VALUE val, VALUE m, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
338 {
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
339 VALUE idx;
340 VALUE *memo = (VALUE *)m;
341
342 idx = INT2FIX(*memo);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
343 ++*memo;
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
344
345 if (argc <= 1)
346 return rb_yield_values(2, val, idx);
347
348 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
349 }
350
351 /*
352 * call-seq:
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
353 * e.with_index(offset = 0) {|(*args), idx| ... }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
354 * e.with_index
355 *
356 * Iterates the given block for each elements with an index, which
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
357 * starts from +offset+. If no block is given, returns an enumerator.
9c1d230 committing experimental branch content
Laurent Sansonetti authored
358 *
359 */
360 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
361 enumerator_with_index(VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
362 {
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
363 VALUE memo;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
364
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
365 rb_scan_args(argc, argv, "01", &memo);
366 RETURN_ENUMERATOR(obj, argc, argv);
367 memo = NIL_P(memo) ? 0 : (VALUE)NUM2LONG(memo);
368 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)&memo);
369 }
370
371 /*
372 * call-seq:
373 * e.each_with_index {|(*args), idx| ... }
374 * e.each_with_index
375 *
376 * Same as Enumeartor#with_index, except each_with_index does not
377 * receive an offset argument.
378 *
379 */
380 static VALUE
381 enumerator_each_with_index(VALUE obj, SEL sel)
382 {
383 return enumerator_with_index(obj, sel, 0, NULL);
384 }
385
386 static VALUE
387 enumerator_with_object_i(VALUE val, VALUE memo, int argc, VALUE *argv)
388 {
389 if (argc <= 1) {
390 return rb_yield_values(2, val, memo);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
391 }
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
392
393 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
394 }
395
396 /*
397 * call-seq:
398 * e.with_object(obj) {|(*args), memo_obj| ... }
399 * e.with_object(obj)
400 *
401 * Iterates the given block for each element with an arbitrary
402 * object given, and returns the initially given object.
403 *
404 * If no block is given, returns an enumerator.
405 *
406 */
407 static VALUE
408 enumerator_with_object(VALUE obj, SEL sel, VALUE memo)
409 {
410 RETURN_ENUMERATOR(obj, 1, &memo);
411 enumerator_block_call(obj, enumerator_with_object_i, memo);
412 return memo;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
413 }
414
415 #if 0
416 static VALUE
417 next_ii(VALUE i, VALUE obj, int argc, VALUE *argv)
418 {
419 rb_fiber_yield(argc, argv);
420 return Qnil;
421 }
422
423 static VALUE
424 next_i(VALUE curr, VALUE obj)
425 {
426 struct enumerator *e = enumerator_ptr(obj);
427 VALUE rnil = Qnil;
428
429 rb_block_call(obj, rb_intern("each"), 0, 0, next_ii, obj);
430 e->no_next = Qtrue;
431 return rb_fiber_yield(1, &rnil);
432 }
433
434 static void
435 next_init(VALUE obj, struct enumerator *e)
436 {
437 VALUE curr = rb_fiber_current();
438 e->dst = curr;
439 e->fib = rb_fiber_new(next_i, obj);
440 }
441 #endif
442
443 /*
444 * call-seq:
445 * e.next => object
446 *
447 * Returns the next object in the enumerator, and move the internal
448 * position forward. When the position reached at the end, internal
449 * position is rewinded then StopIteration is raised.
450 *
451 * Note that enumeration sequence by next method does not affect other
452 * non-external enumeration methods, unless underlying iteration
453 * methods itself has side-effect, e.g. IO#each_line.
454 *
455 */
456
457 static VALUE
458 enumerator_next(VALUE obj, SEL sel)
459 {
460 // TODO
461 #if 0
462 struct enumerator *e = enumerator_ptr(obj);
463 VALUE curr, v;
464 curr = rb_fiber_current();
465
466 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
467 next_init(obj, e);
468 }
469
470 v = rb_fiber_resume(e->fib, 1, &curr);
471 if (e->no_next) {
472 e->fib = 0;
473 e->dst = Qnil;
474 e->no_next = Qfalse;
475 rb_raise(rb_eStopIteration, "iteration reached at end");
476 }
477 return v;
478 #endif
479 return Qnil;
480 }
481
482 /*
483 * call-seq:
484 * e.rewind => e
485 *
486 * Rewinds the enumeration sequence by the next method.
487 */
488
489 static VALUE
490 enumerator_rewind(VALUE obj, SEL sel)
491 {
492 struct enumerator *e = enumerator_ptr(obj);
493
494 e->fib = 0;
495 e->dst = Qnil;
496 e->no_next = Qfalse;
497 return obj;
498 }
499
500 void
501 Init_Enumerator(void)
502 {
503 rb_objc_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
504 rb_objc_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
505
506 rb_objc_define_method(rb_mEnumerable, "each_slice", enum_each_slice, 1);
507 rb_objc_define_method(rb_mEnumerable, "each_cons", enum_each_cons, 1);
508
fabdd03 Enumerator should be a top-level class
Laurent Sansonetti authored
509 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
510 rb_include_module(rb_cEnumerator, rb_mEnumerable);
511
512 rb_objc_define_method(*(VALUE *)rb_cEnumerator, "alloc", enumerator_allocate, 0);
513 rb_objc_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
514 rb_objc_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
515 rb_objc_define_method(rb_cEnumerator, "each", enumerator_each, 0);
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
516 rb_objc_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
517 rb_objc_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
518 rb_objc_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
519 rb_objc_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
520 rb_objc_define_method(rb_cEnumerator, "next", enumerator_next, 0);
521 rb_objc_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
522
523 rb_eStopIteration = rb_define_class("StopIteration", rb_eIndexError);
524
525 sym_each = ID2SYM(rb_intern("each"));
526 }
Something went wrong with that request. Please try again.