Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Newer
Older
100644 506 lines (439 sloc) 11.492 kb
9595725 update copyrights to 2011
Laurent Sansonetti authored
1 /*
2 * This file is covered by the Ruby license. See COPYING for more details.
3 *
4 * Copyright (C) 2007-2011, Apple Inc. All rights reserved.
5 * Copyright (C) 2001-2003 Akinori MUSHA
6 */
9c1d230 committing experimental branch content
Laurent Sansonetti authored
7
d0898dd include/ruby/macruby.h -> macruby_internal.h
Laurent Sansonetti authored
8 #include "macruby_internal.h"
301b43b ported to rb_objc_block_call() + misc cleanup
Laurent Sansonetti authored
9 #include "id.h"
81232c2 refactor duplicated code
Laurent Sansonetti authored
10 #include "ruby/node.h"
11 #include "vm.h"
9c1d230 committing experimental branch content
Laurent Sansonetti authored
12
13 /*
14 * Document-class: Enumerable::Enumerator
15 *
16 * A class which provides a method `each' to be used as an Enumerable
17 * object.
18 */
19 VALUE rb_cEnumerator;
20 static VALUE sym_each;
21
22 VALUE rb_eStopIteration;
23
24 struct enumerator {
25 VALUE obj;
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
26 SEL sel;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
27 VALUE args;
28 VALUE fib;
29 VALUE dst;
30 VALUE no_next;
31 };
32
33 static struct enumerator *
34 enumerator_ptr(VALUE obj)
35 {
36 struct enumerator *ptr;
37
38 Data_Get_Struct(obj, struct enumerator, ptr);
39 #if 0
40 if (RDATA(obj)->dmark != enumerator_mark) {
41 rb_raise(rb_eTypeError,
42 "wrong argument type %s (expected %s)",
43 rb_obj_classname(obj), rb_class2name(rb_cEnumerator));
44 }
45 #endif
46 if (!ptr) {
47 rb_raise(rb_eArgError, "uninitialized enumerator");
48 }
49 return ptr;
50 }
51
52 /*
53 * call-seq:
54 * obj.to_enum(method = :each, *args)
55 * obj.enum_for(method = :each, *args)
56 *
57 * Returns Enumerable::Enumerator.new(self, method, *args).
58 *
59 * e.g.:
60 *
61 * str = "xyz"
62 *
63 * enum = str.enum_for(:each_byte)
64 * a = enum.map {|b| '%02x' % b } #=> ["78", "79", "7a"]
65 *
66 * # protects an array from being modified
67 * a = [1, 2, 3]
68 * some_method(a.to_enum)
69 *
70 */
71 static VALUE
72 obj_to_enum(VALUE obj, SEL sel, int argc, VALUE *argv)
73 {
74 VALUE meth = sym_each;
75
76 if (argc > 0) {
77 --argc;
78 meth = *argv++;
79 }
c6a0341 Kernel#to_enum: implemented
Laurent Sansonetti authored
80
e1960f6 Make sure #to_enum verify param is a symbol
Thibault Martin-Lagardette authored
81 ID meth_id = rb_to_id(meth);
81232c2 refactor duplicated code
Laurent Sansonetti authored
82 SEL enum_sel = rb_vm_id_to_sel(meth_id, argc);
c6a0341 Kernel#to_enum: implemented
Laurent Sansonetti authored
83 return rb_enumeratorize(obj, enum_sel, argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
84 }
85
86 static VALUE
87 each_slice_i(VALUE val, VALUE *memo)
88 {
89 VALUE ary = memo[0];
90 VALUE v = Qnil;
91 long size = (long)memo[1];
92
93 rb_ary_push(ary, val);
94
95 if (RARRAY_LEN(ary) == size) {
96 v = rb_yield(ary);
97 memo[0] = rb_ary_new2(size);
98 }
99
100 return v;
101 }
102
103 /*
104 * call-seq:
105 * e.each_slice(n) {...}
106 * e.each_slice(n)
107 *
108 * Iterates the given block for each slice of <n> elements. If no
109 * block is given, returns an enumerator.
110 *
111 * e.g.:
112 * (1..10).each_slice(3) {|a| p a}
113 * # outputs below
114 * [1, 2, 3]
115 * [4, 5, 6]
116 * [7, 8, 9]
117 * [10]
118 *
119 */
120 static VALUE
121 enum_each_slice(VALUE obj, SEL sel, VALUE n)
122 {
123 long size = NUM2LONG(n);
124 VALUE args[2], ary;
125
126 if (size <= 0) rb_raise(rb_eArgError, "invalid slice size");
127 RETURN_ENUMERATOR(obj, 1, &n);
128 args[0] = rb_ary_new2(size);
129 args[1] = (VALUE)size;
130
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
131 rb_objc_block_call(obj, selEach, 0, 0, each_slice_i, (VALUE)args);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
132
133 ary = args[0];
134 if (RARRAY_LEN(ary) > 0) rb_yield(ary);
135
136 return Qnil;
137 }
138
139 static VALUE
140 each_cons_i(VALUE val, VALUE *memo)
141 {
142 VALUE ary = memo[0];
143 VALUE v = Qnil;
144 long size = (long)memo[1];
145
146 if (RARRAY_LEN(ary) == size) {
147 rb_ary_shift(ary);
148 }
149 rb_ary_push(ary, val);
150 if (RARRAY_LEN(ary) == size) {
151 v = rb_yield(rb_ary_dup(ary));
152 }
153 return v;
154 }
155
156 /*
157 * call-seq:
158 * each_cons(n) {...}
159 * each_cons(n)
160 *
161 * Iterates the given block for each array of consecutive <n>
162 * elements. If no block is given, returns an enumerator.a
163 *
164 * e.g.:
165 * (1..10).each_cons(3) {|a| p a}
166 * # outputs below
167 * [1, 2, 3]
168 * [2, 3, 4]
169 * [3, 4, 5]
170 * [4, 5, 6]
171 * [5, 6, 7]
172 * [6, 7, 8]
173 * [7, 8, 9]
174 * [8, 9, 10]
175 *
176 */
177 static VALUE
178 enum_each_cons(VALUE obj, SEL sel, VALUE n)
179 {
180 long size = NUM2LONG(n);
181 VALUE args[2];
182
183 if (size <= 0) rb_raise(rb_eArgError, "invalid size");
184 RETURN_ENUMERATOR(obj, 1, &n);
185 args[0] = rb_ary_new2(size);
186 args[1] = (VALUE)size;
187
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
188 rb_objc_block_call(obj, selEach, 0, 0, each_cons_i, (VALUE)args);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
189
190 return Qnil;
191 }
192
193 static VALUE
194 enumerator_allocate(VALUE klass)
195 {
196 struct enumerator *ptr;
197 return Data_Make_Struct(klass, struct enumerator,
198 NULL, -1, ptr);
199 }
200
201 static VALUE
202 enumerator_each_i(VALUE v, VALUE enum_obj, int argc, VALUE *argv)
203 {
204 return rb_yield_values2(argc, argv);
205 }
206
207 static VALUE
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
208 enumerator_init(VALUE enum_obj, VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
209 {
210 struct enumerator *ptr = enumerator_ptr(enum_obj);
211
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
212 GC_WB(&ptr->obj, obj);
213 ptr->sel = sel;
214 if (argc > 0) {
215 GC_WB(&ptr->args, rb_ary_new4(argc, argv));
216 }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
217 ptr->fib = 0;
218 ptr->dst = Qnil;
219 ptr->no_next = Qfalse;
220
221 return enum_obj;
222 }
223
224 /*
225 * call-seq:
226 * Enumerable::Enumerator.new(obj, method = :each, *args)
227 *
228 * Creates a new Enumerable::Enumerator object, which is to be
229 * used as an Enumerable object using the given object's given
230 * method with the given arguments.
231 *
232 * Use of this method is not discouraged. Use Kernel#enum_for()
233 * instead.
234 */
235 static VALUE
236 enumerator_initialize(VALUE obj, SEL sel, int argc, VALUE *argv)
237 {
238 VALUE recv, meth = sym_each;
239
240 if (argc == 0)
241 rb_raise(rb_eArgError, "wrong number of argument (0 for 1)");
242 recv = *argv++;
243 if (--argc) {
244 meth = *argv++;
245 --argc;
246 }
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
247 ID meth_id = rb_to_id(meth);
81232c2 refactor duplicated code
Laurent Sansonetti authored
248 SEL meth_sel = rb_vm_id_to_sel(meth_id, argc);
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
249 return enumerator_init(obj, recv, meth_sel, argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
250 }
251
252 /* :nodoc: */
253 static VALUE
254 enumerator_init_copy(VALUE obj, SEL sel, VALUE orig)
255 {
256 struct enumerator *ptr0, *ptr1;
257
258 ptr0 = enumerator_ptr(orig);
259 if (ptr0->fib) {
260 /* Fibers cannot be copied */
261 rb_raise(rb_eTypeError, "can't copy execution context");
262 }
263 ptr1 = enumerator_ptr(obj);
264
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
265 GC_WB(&ptr1->obj, ptr0->obj);
266 ptr1->sel = ptr0->sel;
267 if (ptr0->args != 0) {
268 GC_WB(&ptr1->args, ptr0->args);
269 }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
270 ptr1->fib = 0;
271
272 return obj;
273 }
274
275 VALUE
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
276 rb_enumeratorize(VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
277 {
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
278 return enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sel,
279 argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
280 }
281
282 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
283 enumerator_block_call(VALUE obj, VALUE (*func)(ANYARGS), VALUE arg)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
284 {
285 struct enumerator *e;
286 int argc = 0;
287 const VALUE *argv = 0;
288
289 e = enumerator_ptr(obj);
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
290 if (e->args != 0) {
9c1d230 committing experimental branch content
Laurent Sansonetti authored
291 argc = RARRAY_LEN(e->args);
292 argv = RARRAY_PTR(e->args);
293 }
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
294 return rb_objc_block_call(e->obj, e->sel, argc, (VALUE *)argv,
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
295 func, arg);
296 }
297
298 /*
299 * call-seq:
300 * enum.each {...}
301 *
302 * Iterates the given block using the object and the method specified
303 * in the first place. If no block is given, returns self.
304 *
305 */
306 static VALUE
307 enumerator_each(VALUE obj, SEL sel)
308 {
309 if (!rb_block_given_p()) {
310 return obj;
311 }
312 return enumerator_block_call(obj, enumerator_each_i, obj);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
313 }
314
315 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
316 enumerator_with_index_i(VALUE val, VALUE m, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
317 {
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
318 VALUE idx;
319 VALUE *memo = (VALUE *)m;
320
321 idx = INT2FIX(*memo);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
322 ++*memo;
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
323
324 if (argc <= 1)
325 return rb_yield_values(2, val, idx);
326
327 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
328 }
329
330 /*
331 * call-seq:
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
332 * e.with_index(offset = 0) {|(*args), idx| ... }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
333 * e.with_index
334 *
335 * Iterates the given block for each elements with an index, which
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
336 * starts from +offset+. If no block is given, returns an enumerator.
9c1d230 committing experimental branch content
Laurent Sansonetti authored
337 *
338 */
339 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
340 enumerator_with_index(VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
341 {
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
342 VALUE memo;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
343
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
344 rb_scan_args(argc, argv, "01", &memo);
345 RETURN_ENUMERATOR(obj, argc, argv);
346 memo = NIL_P(memo) ? 0 : (VALUE)NUM2LONG(memo);
347 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)&memo);
348 }
349
350 /*
351 * call-seq:
352 * e.each_with_index {|(*args), idx| ... }
353 * e.each_with_index
354 *
355 * Same as Enumeartor#with_index, except each_with_index does not
356 * receive an offset argument.
357 *
358 */
359 static VALUE
360 enumerator_each_with_index(VALUE obj, SEL sel)
361 {
362 return enumerator_with_index(obj, sel, 0, NULL);
363 }
364
365 static VALUE
366 enumerator_with_object_i(VALUE val, VALUE memo, int argc, VALUE *argv)
367 {
368 if (argc <= 1) {
369 return rb_yield_values(2, val, memo);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
370 }
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
371
372 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
373 }
374
375 /*
376 * call-seq:
377 * e.with_object(obj) {|(*args), memo_obj| ... }
378 * e.with_object(obj)
379 *
380 * Iterates the given block for each element with an arbitrary
381 * object given, and returns the initially given object.
382 *
383 * If no block is given, returns an enumerator.
384 *
385 */
386 static VALUE
387 enumerator_with_object(VALUE obj, SEL sel, VALUE memo)
388 {
389 RETURN_ENUMERATOR(obj, 1, &memo);
390 enumerator_block_call(obj, enumerator_with_object_i, memo);
391 return memo;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
392 }
393
394 #if 0
395 static VALUE
396 next_ii(VALUE i, VALUE obj, int argc, VALUE *argv)
397 {
398 rb_fiber_yield(argc, argv);
399 return Qnil;
400 }
401
402 static VALUE
403 next_i(VALUE curr, VALUE obj)
404 {
405 struct enumerator *e = enumerator_ptr(obj);
406 VALUE rnil = Qnil;
407
408 rb_block_call(obj, rb_intern("each"), 0, 0, next_ii, obj);
409 e->no_next = Qtrue;
410 return rb_fiber_yield(1, &rnil);
411 }
412
413 static void
414 next_init(VALUE obj, struct enumerator *e)
415 {
416 VALUE curr = rb_fiber_current();
417 e->dst = curr;
418 e->fib = rb_fiber_new(next_i, obj);
419 }
420 #endif
421
422 /*
423 * call-seq:
424 * e.next => object
425 *
426 * Returns the next object in the enumerator, and move the internal
427 * position forward. When the position reached at the end, internal
428 * position is rewinded then StopIteration is raised.
429 *
430 * Note that enumeration sequence by next method does not affect other
431 * non-external enumeration methods, unless underlying iteration
432 * methods itself has side-effect, e.g. IO#each_line.
433 *
434 */
435
436 static VALUE
437 enumerator_next(VALUE obj, SEL sel)
438 {
439 // TODO
440 #if 0
441 struct enumerator *e = enumerator_ptr(obj);
442 VALUE curr, v;
443 curr = rb_fiber_current();
444
445 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
446 next_init(obj, e);
447 }
448
449 v = rb_fiber_resume(e->fib, 1, &curr);
450 if (e->no_next) {
451 e->fib = 0;
452 e->dst = Qnil;
453 e->no_next = Qfalse;
454 rb_raise(rb_eStopIteration, "iteration reached at end");
455 }
456 return v;
457 #endif
458 return Qnil;
459 }
460
461 /*
462 * call-seq:
463 * e.rewind => e
464 *
465 * Rewinds the enumeration sequence by the next method.
466 */
467
468 static VALUE
469 enumerator_rewind(VALUE obj, SEL sel)
470 {
471 struct enumerator *e = enumerator_ptr(obj);
472
473 e->fib = 0;
474 e->dst = Qnil;
475 e->no_next = Qfalse;
476 return obj;
477 }
478
479 void
480 Init_Enumerator(void)
481 {
482 rb_objc_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
483 rb_objc_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
484
485 rb_objc_define_method(rb_mEnumerable, "each_slice", enum_each_slice, 1);
486 rb_objc_define_method(rb_mEnumerable, "each_cons", enum_each_cons, 1);
487
fabdd03 Enumerator should be a top-level class
Laurent Sansonetti authored
488 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
489 rb_include_module(rb_cEnumerator, rb_mEnumerable);
490
491 rb_objc_define_method(*(VALUE *)rb_cEnumerator, "alloc", enumerator_allocate, 0);
492 rb_objc_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
493 rb_objc_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
494 rb_objc_define_method(rb_cEnumerator, "each", enumerator_each, 0);
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
495 rb_objc_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
496 rb_objc_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
497 rb_objc_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
498 rb_objc_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
499 rb_objc_define_method(rb_cEnumerator, "next", enumerator_next, 0);
500 rb_objc_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
501
502 rb_eStopIteration = rb_define_class("StopIteration", rb_eIndexError);
503
504 sym_each = ID2SYM(rb_intern("each"));
505 }
Something went wrong with that request. Please try again.