Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Newer
Older
100644 520 lines (453 sloc) 11.79 kb
9595725 update copyrights to 2011
Laurent Sansonetti authored
1 /*
2 * This file is covered by the Ruby license. See COPYING for more details.
3 *
4 * Copyright (C) 2007-2011, Apple Inc. All rights reserved.
5 * Copyright (C) 2001-2003 Akinori MUSHA
6 */
9c1d230 committing experimental branch content
Laurent Sansonetti authored
7
d0898dd include/ruby/macruby.h -> macruby_internal.h
Laurent Sansonetti authored
8 #include "macruby_internal.h"
301b43b ported to rb_objc_block_call() + misc cleanup
Laurent Sansonetti authored
9 #include "id.h"
9c1d230 committing experimental branch content
Laurent Sansonetti authored
10
11 /*
12 * Document-class: Enumerable::Enumerator
13 *
14 * A class which provides a method `each' to be used as an Enumerable
15 * object.
16 */
17 VALUE rb_cEnumerator;
18 static VALUE sym_each;
19
20 VALUE rb_eStopIteration;
21
22 struct enumerator {
23 VALUE obj;
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
24 SEL sel;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
25 VALUE args;
26 VALUE fib;
27 VALUE dst;
28 VALUE no_next;
29 };
30
31 static struct enumerator *
32 enumerator_ptr(VALUE obj)
33 {
34 struct enumerator *ptr;
35
36 Data_Get_Struct(obj, struct enumerator, ptr);
37 #if 0
38 if (RDATA(obj)->dmark != enumerator_mark) {
39 rb_raise(rb_eTypeError,
40 "wrong argument type %s (expected %s)",
41 rb_obj_classname(obj), rb_class2name(rb_cEnumerator));
42 }
43 #endif
44 if (!ptr) {
45 rb_raise(rb_eArgError, "uninitialized enumerator");
46 }
47 return ptr;
48 }
49
50 /*
51 * call-seq:
52 * obj.to_enum(method = :each, *args)
53 * obj.enum_for(method = :each, *args)
54 *
55 * Returns Enumerable::Enumerator.new(self, method, *args).
56 *
57 * e.g.:
58 *
59 * str = "xyz"
60 *
61 * enum = str.enum_for(:each_byte)
62 * a = enum.map {|b| '%02x' % b } #=> ["78", "79", "7a"]
63 *
64 * # protects an array from being modified
65 * a = [1, 2, 3]
66 * some_method(a.to_enum)
67 *
68 */
69 static VALUE
70 obj_to_enum(VALUE obj, SEL sel, int argc, VALUE *argv)
71 {
72 VALUE meth = sym_each;
73
74 if (argc > 0) {
75 --argc;
76 meth = *argv++;
77 }
c6a0341 Kernel#to_enum: implemented
Laurent Sansonetti authored
78
e1960f6 Make sure #to_enum verify param is a symbol
Thibault Martin-Lagardette authored
79 ID meth_id = rb_to_id(meth);
c6a0341 Kernel#to_enum: implemented
Laurent Sansonetti authored
80 SEL enum_sel;
81 if (argc > 0) {
82 char buf[100];
83 snprintf(buf, sizeof buf, "%s:", rb_id2name(meth_id));
84 enum_sel = sel_registerName(buf);
85 }
86 else {
87 enum_sel = sel_registerName(rb_id2name(meth_id));
88 }
89 return rb_enumeratorize(obj, enum_sel, argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
90 }
91
92 static VALUE
93 each_slice_i(VALUE val, VALUE *memo)
94 {
95 VALUE ary = memo[0];
96 VALUE v = Qnil;
97 long size = (long)memo[1];
98
99 rb_ary_push(ary, val);
100
101 if (RARRAY_LEN(ary) == size) {
102 v = rb_yield(ary);
103 memo[0] = rb_ary_new2(size);
104 }
105
106 return v;
107 }
108
109 /*
110 * call-seq:
111 * e.each_slice(n) {...}
112 * e.each_slice(n)
113 *
114 * Iterates the given block for each slice of <n> elements. If no
115 * block is given, returns an enumerator.
116 *
117 * e.g.:
118 * (1..10).each_slice(3) {|a| p a}
119 * # outputs below
120 * [1, 2, 3]
121 * [4, 5, 6]
122 * [7, 8, 9]
123 * [10]
124 *
125 */
126 static VALUE
127 enum_each_slice(VALUE obj, SEL sel, VALUE n)
128 {
129 long size = NUM2LONG(n);
130 VALUE args[2], ary;
131
132 if (size <= 0) rb_raise(rb_eArgError, "invalid slice size");
133 RETURN_ENUMERATOR(obj, 1, &n);
134 args[0] = rb_ary_new2(size);
135 args[1] = (VALUE)size;
136
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
137 rb_objc_block_call(obj, selEach, 0, 0, each_slice_i, (VALUE)args);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
138
139 ary = args[0];
140 if (RARRAY_LEN(ary) > 0) rb_yield(ary);
141
142 return Qnil;
143 }
144
145 static VALUE
146 each_cons_i(VALUE val, VALUE *memo)
147 {
148 VALUE ary = memo[0];
149 VALUE v = Qnil;
150 long size = (long)memo[1];
151
152 if (RARRAY_LEN(ary) == size) {
153 rb_ary_shift(ary);
154 }
155 rb_ary_push(ary, val);
156 if (RARRAY_LEN(ary) == size) {
157 v = rb_yield(rb_ary_dup(ary));
158 }
159 return v;
160 }
161
162 /*
163 * call-seq:
164 * each_cons(n) {...}
165 * each_cons(n)
166 *
167 * Iterates the given block for each array of consecutive <n>
168 * elements. If no block is given, returns an enumerator.a
169 *
170 * e.g.:
171 * (1..10).each_cons(3) {|a| p a}
172 * # outputs below
173 * [1, 2, 3]
174 * [2, 3, 4]
175 * [3, 4, 5]
176 * [4, 5, 6]
177 * [5, 6, 7]
178 * [6, 7, 8]
179 * [7, 8, 9]
180 * [8, 9, 10]
181 *
182 */
183 static VALUE
184 enum_each_cons(VALUE obj, SEL sel, VALUE n)
185 {
186 long size = NUM2LONG(n);
187 VALUE args[2];
188
189 if (size <= 0) rb_raise(rb_eArgError, "invalid size");
190 RETURN_ENUMERATOR(obj, 1, &n);
191 args[0] = rb_ary_new2(size);
192 args[1] = (VALUE)size;
193
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
194 rb_objc_block_call(obj, selEach, 0, 0, each_cons_i, (VALUE)args);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
195
196 return Qnil;
197 }
198
199 static VALUE
200 enumerator_allocate(VALUE klass)
201 {
202 struct enumerator *ptr;
203 return Data_Make_Struct(klass, struct enumerator,
204 NULL, -1, ptr);
205 }
206
207 static VALUE
208 enumerator_each_i(VALUE v, VALUE enum_obj, int argc, VALUE *argv)
209 {
210 return rb_yield_values2(argc, argv);
211 }
212
213 static VALUE
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
214 enumerator_init(VALUE enum_obj, VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
215 {
216 struct enumerator *ptr = enumerator_ptr(enum_obj);
217
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
218 GC_WB(&ptr->obj, obj);
219 ptr->sel = sel;
220 if (argc > 0) {
221 GC_WB(&ptr->args, rb_ary_new4(argc, argv));
222 }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
223 ptr->fib = 0;
224 ptr->dst = Qnil;
225 ptr->no_next = Qfalse;
226
227 return enum_obj;
228 }
229
230 /*
231 * call-seq:
232 * Enumerable::Enumerator.new(obj, method = :each, *args)
233 *
234 * Creates a new Enumerable::Enumerator object, which is to be
235 * used as an Enumerable object using the given object's given
236 * method with the given arguments.
237 *
238 * Use of this method is not discouraged. Use Kernel#enum_for()
239 * instead.
240 */
241 static VALUE
242 enumerator_initialize(VALUE obj, SEL sel, int argc, VALUE *argv)
243 {
244 VALUE recv, meth = sym_each;
245
246 if (argc == 0)
247 rb_raise(rb_eArgError, "wrong number of argument (0 for 1)");
248 recv = *argv++;
249 if (--argc) {
250 meth = *argv++;
251 --argc;
252 }
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
253 ID meth_id = rb_to_id(meth);
254 SEL meth_sel;
255 if (argc == 0) {
256 meth_sel = sel_registerName(rb_id2name(meth_id));
257 }
258 else {
259 char buf[100];
260 snprintf(buf, sizeof buf, "%s:", rb_id2name(meth_id));
261 meth_sel = sel_registerName(buf);
262 }
263 return enumerator_init(obj, recv, meth_sel, argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
264 }
265
266 /* :nodoc: */
267 static VALUE
268 enumerator_init_copy(VALUE obj, SEL sel, VALUE orig)
269 {
270 struct enumerator *ptr0, *ptr1;
271
272 ptr0 = enumerator_ptr(orig);
273 if (ptr0->fib) {
274 /* Fibers cannot be copied */
275 rb_raise(rb_eTypeError, "can't copy execution context");
276 }
277 ptr1 = enumerator_ptr(obj);
278
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
279 GC_WB(&ptr1->obj, ptr0->obj);
280 ptr1->sel = ptr0->sel;
281 if (ptr0->args != 0) {
282 GC_WB(&ptr1->args, ptr0->args);
283 }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
284 ptr1->fib = 0;
285
286 return obj;
287 }
288
289 VALUE
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
290 rb_enumeratorize(VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
291 {
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
292 return enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sel,
293 argc, argv);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
294 }
295
296 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
297 enumerator_block_call(VALUE obj, VALUE (*func)(ANYARGS), VALUE arg)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
298 {
299 struct enumerator *e;
300 int argc = 0;
301 const VALUE *argv = 0;
302
303 e = enumerator_ptr(obj);
a6a914f implemented implicit enumeratorization
Laurent Sansonetti authored
304 if (e->args != 0) {
9c1d230 committing experimental branch content
Laurent Sansonetti authored
305 argc = RARRAY_LEN(e->args);
306 argv = RARRAY_PTR(e->args);
307 }
70ea0b5 per-vm method cache + misc fixes/improvements
Laurent Sansonetti authored
308 return rb_objc_block_call(e->obj, e->sel, argc, (VALUE *)argv,
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
309 func, arg);
310 }
311
312 /*
313 * call-seq:
314 * enum.each {...}
315 *
316 * Iterates the given block using the object and the method specified
317 * in the first place. If no block is given, returns self.
318 *
319 */
320 static VALUE
321 enumerator_each(VALUE obj, SEL sel)
322 {
323 if (!rb_block_given_p()) {
324 return obj;
325 }
326 return enumerator_block_call(obj, enumerator_each_i, obj);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
327 }
328
329 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
330 enumerator_with_index_i(VALUE val, VALUE m, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
331 {
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
332 VALUE idx;
333 VALUE *memo = (VALUE *)m;
334
335 idx = INT2FIX(*memo);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
336 ++*memo;
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
337
338 if (argc <= 1)
339 return rb_yield_values(2, val, idx);
340
341 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
342 }
343
344 /*
345 * call-seq:
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
346 * e.with_index(offset = 0) {|(*args), idx| ... }
9c1d230 committing experimental branch content
Laurent Sansonetti authored
347 * e.with_index
348 *
349 * Iterates the given block for each elements with an index, which
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
350 * starts from +offset+. If no block is given, returns an enumerator.
9c1d230 committing experimental branch content
Laurent Sansonetti authored
351 *
352 */
353 static VALUE
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
354 enumerator_with_index(VALUE obj, SEL sel, int argc, VALUE *argv)
9c1d230 committing experimental branch content
Laurent Sansonetti authored
355 {
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
356 VALUE memo;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
357
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
358 rb_scan_args(argc, argv, "01", &memo);
359 RETURN_ENUMERATOR(obj, argc, argv);
360 memo = NIL_P(memo) ? 0 : (VALUE)NUM2LONG(memo);
361 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)&memo);
362 }
363
364 /*
365 * call-seq:
366 * e.each_with_index {|(*args), idx| ... }
367 * e.each_with_index
368 *
369 * Same as Enumeartor#with_index, except each_with_index does not
370 * receive an offset argument.
371 *
372 */
373 static VALUE
374 enumerator_each_with_index(VALUE obj, SEL sel)
375 {
376 return enumerator_with_index(obj, sel, 0, NULL);
377 }
378
379 static VALUE
380 enumerator_with_object_i(VALUE val, VALUE memo, int argc, VALUE *argv)
381 {
382 if (argc <= 1) {
383 return rb_yield_values(2, val, memo);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
384 }
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
385
386 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
387 }
388
389 /*
390 * call-seq:
391 * e.with_object(obj) {|(*args), memo_obj| ... }
392 * e.with_object(obj)
393 *
394 * Iterates the given block for each element with an arbitrary
395 * object given, and returns the initially given object.
396 *
397 * If no block is given, returns an enumerator.
398 *
399 */
400 static VALUE
401 enumerator_with_object(VALUE obj, SEL sel, VALUE memo)
402 {
403 RETURN_ENUMERATOR(obj, 1, &memo);
404 enumerator_block_call(obj, enumerator_with_object_i, memo);
405 return memo;
9c1d230 committing experimental branch content
Laurent Sansonetti authored
406 }
407
408 #if 0
409 static VALUE
410 next_ii(VALUE i, VALUE obj, int argc, VALUE *argv)
411 {
412 rb_fiber_yield(argc, argv);
413 return Qnil;
414 }
415
416 static VALUE
417 next_i(VALUE curr, VALUE obj)
418 {
419 struct enumerator *e = enumerator_ptr(obj);
420 VALUE rnil = Qnil;
421
422 rb_block_call(obj, rb_intern("each"), 0, 0, next_ii, obj);
423 e->no_next = Qtrue;
424 return rb_fiber_yield(1, &rnil);
425 }
426
427 static void
428 next_init(VALUE obj, struct enumerator *e)
429 {
430 VALUE curr = rb_fiber_current();
431 e->dst = curr;
432 e->fib = rb_fiber_new(next_i, obj);
433 }
434 #endif
435
436 /*
437 * call-seq:
438 * e.next => object
439 *
440 * Returns the next object in the enumerator, and move the internal
441 * position forward. When the position reached at the end, internal
442 * position is rewinded then StopIteration is raised.
443 *
444 * Note that enumeration sequence by next method does not affect other
445 * non-external enumeration methods, unless underlying iteration
446 * methods itself has side-effect, e.g. IO#each_line.
447 *
448 */
449
450 static VALUE
451 enumerator_next(VALUE obj, SEL sel)
452 {
453 // TODO
454 #if 0
455 struct enumerator *e = enumerator_ptr(obj);
456 VALUE curr, v;
457 curr = rb_fiber_current();
458
459 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
460 next_init(obj, e);
461 }
462
463 v = rb_fiber_resume(e->fib, 1, &curr);
464 if (e->no_next) {
465 e->fib = 0;
466 e->dst = Qnil;
467 e->no_next = Qfalse;
468 rb_raise(rb_eStopIteration, "iteration reached at end");
469 }
470 return v;
471 #endif
472 return Qnil;
473 }
474
475 /*
476 * call-seq:
477 * e.rewind => e
478 *
479 * Rewinds the enumeration sequence by the next method.
480 */
481
482 static VALUE
483 enumerator_rewind(VALUE obj, SEL sel)
484 {
485 struct enumerator *e = enumerator_ptr(obj);
486
487 e->fib = 0;
488 e->dst = Qnil;
489 e->no_next = Qfalse;
490 return obj;
491 }
492
493 void
494 Init_Enumerator(void)
495 {
496 rb_objc_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
497 rb_objc_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
498
499 rb_objc_define_method(rb_mEnumerable, "each_slice", enum_each_slice, 1);
500 rb_objc_define_method(rb_mEnumerable, "each_cons", enum_each_cons, 1);
501
fabdd03 Enumerator should be a top-level class
Laurent Sansonetti authored
502 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
503 rb_include_module(rb_cEnumerator, rb_mEnumerable);
504
505 rb_objc_define_method(*(VALUE *)rb_cEnumerator, "alloc", enumerator_allocate, 0);
506 rb_objc_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
507 rb_objc_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
508 rb_objc_define_method(rb_cEnumerator, "each", enumerator_each, 0);
6f5ed12 Improve core/enumerator pass rate
Thibault Martin-Lagardette authored
509 rb_objc_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
510 rb_objc_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
511 rb_objc_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
512 rb_objc_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
9c1d230 committing experimental branch content
Laurent Sansonetti authored
513 rb_objc_define_method(rb_cEnumerator, "next", enumerator_next, 0);
514 rb_objc_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
515
516 rb_eStopIteration = rb_define_class("StopIteration", rb_eIndexError);
517
518 sym_each = ID2SYM(rb_intern("each"));
519 }
Something went wrong with that request. Please try again.