Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
tree: 5f56ad6f17
Fetching contributors…

Cannot retrieve contributors at this time

14464 lines (12868 sloc) 334.498 kb
/**********************************************************************
eval.c -
$Author$
$Date$
created at: Thu Jun 10 14:22:17 JST 1993
Copyright (C) 1993-2003 Yukihiro Matsumoto
Copyright (C) 2000 Network Applied Communication Laboratory, Inc.
Copyright (C) 2000 Information-technology Promotion Agency, Japan
**********************************************************************/
#include "ruby.h"
#include "node.h"
#include "env.h"
#include "util.h"
#include "rubysig.h"
#ifdef HAVE_STDLIB_H
#include <stdlib.h>
#endif
#ifndef EXIT_SUCCESS
#define EXIT_SUCCESS 0
#endif
#ifndef EXIT_FAILURE
#define EXIT_FAILURE 1
#endif
#include <stdio.h>
#include "st.h"
#include "dln.h"
#ifdef __APPLE__
#include <crt_externs.h>
#endif
/* Make alloca work the best possible way. */
#ifdef __GNUC__
# ifndef atarist
# ifndef alloca
# define alloca __builtin_alloca
# endif
# endif /* atarist */
#else
# ifdef HAVE_ALLOCA_H
# include <alloca.h>
# else
# ifndef _AIX
# ifndef alloca /* predefined by HP cc +Olibcalls */
void *alloca ();
# endif
# endif /* AIX */
# endif /* HAVE_ALLOCA_H */
#endif /* __GNUC__ */
#ifdef HAVE_STDARG_PROTOTYPES
#include <stdarg.h>
#define va_init_list(a,b) va_start(a,b)
#else
#include <varargs.h>
#define va_init_list(a,b) va_start(a)
#endif
#ifndef HAVE_STRING_H
char *strrchr _((const char*,const char));
#endif
#ifdef HAVE_UNISTD_H
#include <unistd.h>
#endif
#include <time.h>
#include <sys/mman.h>
#if defined(HAVE_FCNTL_H) || defined(_WIN32)
#include <fcntl.h>
#elif defined(HAVE_SYS_FCNTL_H)
#include <sys/fcntl.h>
#endif
#ifdef __CYGWIN__
#include <io.h>
#endif
#if defined(__BEOS__) && !defined(BONE)
#include <net/socket.h>
#endif
#ifdef __MACOS__
#include "macruby_private.h"
#endif
#ifdef __VMS
#include "vmsruby_private.h"
#endif
#ifdef USE_CONTEXT
NORETURN(static void rb_jump_context(rb_jmpbuf_t, int));
static inline void
rb_jump_context(env, val)
rb_jmpbuf_t env;
int val;
{
env->status = val;
setcontext(&env->context);
abort(); /* ensure noreturn */
}
/*
* PRE_GETCONTEXT and POST_GETCONTEXT is a magic for getcontext, gcc,
* IA64 register stack and SPARC register window combination problem.
*
* Assume following code sequence.
*
* 1. set a register in the register stack/window such as r32/l0.
* 2. call getcontext.
* 3. use the register.
* 4. update the register for other use.
* 5. call setcontext indirectly (or directly).
*
* This code should be run as 1->2->3->4->5->3->4.
* But after second getcontext return (second 3),
* the register is broken (updated).
* It's because getcontext/setcontext doesn't preserve the content of the
* register stack/window.
*
* setjmp also doesn't preserve the content of the register stack/window.
* But it has not the problem because gcc knows setjmp may return twice.
* gcc detects setjmp and generates setjmp safe code.
*
* So setjmp calls before and after the getcontext call makes the code
* somewhat safe.
* It fix the problem on IA64.
* It is not required that setjmp is called at run time, since the problem is
* register usage.
*
* Since the magic setjmp is not enough for SPARC,
* inline asm is used to prohibit registers in register windows.
*
* Since the problem is fixed at gcc 4.0.3, the magic is applied only for
* prior versions of gcc.
* http://gcc.gnu.org/bugzilla/show_bug.cgi?id=21957
* http://gcc.gnu.org/bugzilla/show_bug.cgi?id=22127
*/
# define GCC_VERSION_BEFORE(major, minor, patchlevel) \
(defined(__GNUC__) && !defined(__INTEL_COMPILER) && \
((__GNUC__ < (major)) || \
(__GNUC__ == (major) && __GNUC_MINOR__ < (minor)) || \
(__GNUC__ == (major) && __GNUC_MINOR__ == (minor) && __GNUC_PATCHLEVEL__ < (patchlevel))))
# if GCC_VERSION_BEFORE(4,0,3) && (defined(sparc) || defined(__sparc__))
# ifdef __pic__
/*
* %l7 is excluded for PIC because it is PIC register.
* http://lists.freebsd.org/pipermail/freebsd-sparc64/2006-January/003739.html
*/
# define PRE_GETCONTEXT \
({ __asm__ volatile ("" : : : \
"%o0", "%o1", "%o2", "%o3", "%o4", "%o5", "%o7", \
"%l0", "%l1", "%l2", "%l3", "%l4", "%l5", "%l6", \
"%i0", "%i1", "%i2", "%i3", "%i4", "%i5", "%i7"); })
# else
# define PRE_GETCONTEXT \
({ __asm__ volatile ("" : : : \
"%o0", "%o1", "%o2", "%o3", "%o4", "%o5", "%o7", \
"%l0", "%l1", "%l2", "%l3", "%l4", "%l5", "%l6", "%l7", \
"%i0", "%i1", "%i2", "%i3", "%i4", "%i5", "%i7"); })
# endif
# define POST_GETCONTEXT PRE_GETCONTEXT
# elif GCC_VERSION_BEFORE(4,0,3) && defined(__ia64)
static jmp_buf function_call_may_return_twice_jmp_buf;
int function_call_may_return_twice_false_1 = 0;
int function_call_may_return_twice_false_2 = 0;
# define PRE_GETCONTEXT \
(function_call_may_return_twice_false_1 ? \
setjmp(function_call_may_return_twice_jmp_buf) : \
0)
# define POST_GETCONTEXT \
(function_call_may_return_twice_false_2 ? \
setjmp(function_call_may_return_twice_jmp_buf) : \
0)
# elif defined(__FreeBSD__) && __FreeBSD__ < 7
/*
* workaround for FreeBSD/i386 getcontext/setcontext bug.
* clear the carry flag by (0 ? ... : ...).
* FreeBSD PR 92110 http://www.freebsd.org/cgi/query-pr.cgi?pr=92110
* [ruby-dev:28263]
*/
static int volatile freebsd_clear_carry_flag = 0;
# define PRE_GETCONTEXT \
(freebsd_clear_carry_flag ? (freebsd_clear_carry_flag = 0) : 0)
# endif
# ifndef PRE_GETCONTEXT
# define PRE_GETCONTEXT 0
# endif
# ifndef POST_GETCONTEXT
# define POST_GETCONTEXT 0
# endif
# define ruby_longjmp(env, val) rb_jump_context(env, val)
# define ruby_setjmp(just_before_setjmp, j) ((j)->status = 0, \
(just_before_setjmp), \
PRE_GETCONTEXT, \
getcontext(&(j)->context), \
POST_GETCONTEXT, \
(j)->status)
#else
# define ruby_setjmp(just_before_setjmp, env) \
((just_before_setjmp), RUBY_SETJMP(env))
# define ruby_longjmp(env,val) RUBY_LONGJMP(env,val)
# ifdef __CYGWIN__
int _setjmp(), _longjmp();
# endif
#endif
#include <sys/types.h>
#include <signal.h>
#include <errno.h>
#if defined(__VMS)
#pragma nostandard
#endif
#ifdef HAVE_SYS_SELECT_H
#include <sys/select.h>
#endif
#include <sys/stat.h>
VALUE rb_cProc;
VALUE rb_cBinding;
static VALUE proc_invoke _((VALUE,VALUE,VALUE,VALUE));
static VALUE rb_f_binding _((VALUE));
NOINLINE(static void rb_f_END _((void)));
static VALUE rb_f_block_given_p _((void));
static VALUE block_pass _((VALUE,NODE*));
static void eval_check_tick _((void));
VALUE rb_cMethod;
static VALUE method_call _((int, VALUE*, VALUE));
VALUE rb_cUnboundMethod;
static VALUE umethod_bind _((VALUE, VALUE));
static VALUE rb_mod_define_method _((int, VALUE*, VALUE));
NORETURN(static void rb_raise_jump _((VALUE)));
static VALUE rb_make_exception _((int argc, VALUE *argv));
static int scope_vmode;
#define SCOPE_PUBLIC 0
#define SCOPE_PRIVATE 1
#define SCOPE_PROTECTED 2
#define SCOPE_MODFUNC 5
#define SCOPE_MASK 7
#define SCOPE_SET(f) (scope_vmode=(f))
#define SCOPE_TEST(f) (scope_vmode&(f))
VALUE (*ruby_sandbox_save)_((rb_thread_t));
VALUE (*ruby_sandbox_restore)_((rb_thread_t));
NODE* ruby_current_node;
#if 0
#define SET_CURRENT_SOURCE() (ruby_sourcefile = ruby_current_node->nd_file, \
ruby_sourceline = nd_line(ruby_current_node))
#else
#define SET_CURRENT_SOURCE() ((void)0)
#endif
void
ruby_set_current_source()
{
if (ruby_current_node) {
ruby_sourcefile = ruby_current_node->nd_file;
ruby_sourceline = nd_line(ruby_current_node);
}
}
#ifdef MBARI_API
#define SET_METHOD_SOURCE() ruby_set_current_source()
#else
#define SET_METHOD_SOURCE() (void)0
#endif
int ruby_safe_level = 0;
/* safe-level:
0 - strings from streams/environment/ARGV are tainted (default)
1 - no dangerous operation by tainted value
2 - process/file operations prohibited
3 - all generated objects are tainted
4 - no global (non-tainted) variable modification/no direct output
*/
static VALUE safe_getter _((void));
static void safe_setter _((VALUE val));
void
rb_secure(level)
int level;
{
if (level <= ruby_safe_level) {
if (ruby_frame->last_func) {
rb_raise(rb_eSecurityError, "Insecure operation `%s' at level %d",
rb_id2name(ruby_frame->last_func), ruby_safe_level);
}
else {
rb_raise(rb_eSecurityError, "Insecure operation at level %d", ruby_safe_level);
}
}
}
void
rb_secure_update(obj)
VALUE obj;
{
if (!OBJ_TAINTED(obj)) rb_secure(4);
}
void
rb_check_safe_obj(x)
VALUE x;
{
if (ruby_safe_level > 0 && OBJ_TAINTED(x)){
if (ruby_frame->last_func) {
rb_raise(rb_eSecurityError, "Insecure operation - %s",
rb_id2name(ruby_frame->last_func));
}
else {
rb_raise(rb_eSecurityError, "Insecure operation: -r");
}
}
rb_secure(4);
}
void
rb_check_safe_str(x)
VALUE x;
{
rb_check_safe_obj(x);
if (TYPE(x)!= T_STRING) {
rb_raise(rb_eTypeError, "wrong argument type %s (expected String)",
rb_obj_classname(x));
}
}
NORETURN(static void print_undef _((VALUE, ID)));
static void
print_undef(klass, id)
VALUE klass;
ID id;
{
rb_name_error(id, "undefined method `%s' for %s `%s'",
rb_id2name(id),
(TYPE(klass) == T_MODULE) ? "module" : "class",
rb_class2name(klass));
}
static ID removed, singleton_removed, undefined, singleton_undefined;
#define CACHE_SIZE 0x800
#define CACHE_MASK 0x7ff
#define EXPR1(c,m) ((((c)>>3)^(m))&CACHE_MASK)
struct cache_entry { /* method hash table. */
ID mid; /* method's id */
ID mid0; /* method's original id */
VALUE klass; /* receiver's class */
VALUE origin; /* where method defined */
NODE *method;
int noex;
};
static struct cache_entry cache[CACHE_SIZE];
static int ruby_running = 0;
void
rb_clear_cache()
{
struct cache_entry *ent, *end;
if (!ruby_running) return;
ent = cache; end = ent + CACHE_SIZE;
while (ent < end) {
ent->mid = 0;
ent++;
}
}
static void
rb_clear_cache_for_undef(klass, id)
VALUE klass;
ID id;
{
struct cache_entry *ent, *end;
if (!ruby_running) return;
ent = cache; end = ent + CACHE_SIZE;
while (ent < end) {
if (ent->mid == id &&
(ent->klass == klass ||
RCLASS(ent->origin)->m_tbl == RCLASS(klass)->m_tbl)) {
ent->mid = 0;
}
ent++;
}
}
static void
rb_clear_cache_by_id(id)
ID id;
{
struct cache_entry *ent, *end;
if (!ruby_running) return;
ent = cache; end = ent + CACHE_SIZE;
while (ent < end) {
if (ent->mid == id) {
ent->mid = 0;
}
ent++;
}
}
void
rb_clear_cache_by_class(klass)
VALUE klass;
{
struct cache_entry *ent, *end;
if (!ruby_running) return;
ent = cache; end = ent + CACHE_SIZE;
while (ent < end) {
if (ent->klass == klass || ent->origin == klass) {
ent->mid = 0;
}
ent++;
}
}
static ID init, eqq, each, aref, aset, match, missing;
static ID added, singleton_added;
static ID __id__, __send__, respond_to;
#define NOEX_TAINTED 8
#define NOEX_SAFE(n) ((n) >> 4)
#define NOEX_WITH(n, v) ((n) | (v) << 4)
#define NOEX_WITH_SAFE(n) NOEX_WITH(n, ruby_safe_level)
void
rb_add_method(klass, mid, node, noex)
VALUE klass;
ID mid;
NODE *node;
int noex;
{
NODE *body;
if (NIL_P(klass)) klass = rb_cObject;
if (ruby_safe_level >= 4 && (klass == rb_cObject || !OBJ_TAINTED(klass))) {
rb_raise(rb_eSecurityError, "Insecure: can't define method");
}
if (!FL_TEST(klass, FL_SINGLETON) &&
node && nd_type(node) != NODE_ZSUPER &&
(mid == rb_intern("initialize" )|| mid == rb_intern("initialize_copy"))) {
noex = NOEX_PRIVATE | noex;
}
else if (FL_TEST(klass, FL_SINGLETON) && node && nd_type(node) == NODE_CFUNC &&
mid == rb_intern("allocate")) {
rb_warn("defining %s.allocate is deprecated; use rb_define_alloc_func()",
rb_class2name(rb_iv_get(klass, "__attached__")));
mid = ID_ALLOCATOR;
}
if (OBJ_FROZEN(klass)) rb_error_frozen("class/module");
rb_clear_cache_by_id(mid);
body = NEW_METHOD(node, NOEX_WITH_SAFE(noex));
st_insert(RCLASS(klass)->m_tbl, mid, (st_data_t)body);
if (node && mid != ID_ALLOCATOR && ruby_running) {
if (FL_TEST(klass, FL_SINGLETON)) {
rb_funcall(rb_iv_get(klass, "__attached__"), singleton_added, 1, ID2SYM(mid));
}
else {
rb_funcall(klass, added, 1, ID2SYM(mid));
}
}
}
void
rb_define_alloc_func(klass, func)
VALUE klass;
VALUE (*func) _((VALUE));
{
Check_Type(klass, T_CLASS);
rb_add_method(rb_singleton_class(klass), ID_ALLOCATOR, NEW_CFUNC(func, 0),
NOEX_PRIVATE);
}
void
rb_undef_alloc_func(klass)
VALUE klass;
{
Check_Type(klass, T_CLASS);
rb_add_method(rb_singleton_class(klass), ID_ALLOCATOR, 0, NOEX_UNDEF);
}
static NODE*
search_method(klass, id, origin)
VALUE klass, *origin;
ID id;
{
st_data_t body;
if (!klass) return 0;
while (!st_lookup(RCLASS(klass)->m_tbl, id, &body)) {
klass = RCLASS(klass)->super;
if (!klass) return 0;
}
if (origin) *origin = klass;
return (NODE *)body;
}
static NODE*
rb_get_method_body(klassp, idp, noexp)
VALUE *klassp;
ID *idp;
int *noexp;
{
ID id = *idp;
VALUE klass = *klassp;
VALUE origin = 0;
NODE * volatile body;
struct cache_entry *ent;
if ((body = search_method(klass, id, &origin)) == 0 || !body->nd_body) {
/* store empty info in cache */
ent = cache + EXPR1(klass, id);
ent->klass = klass;
ent->origin = klass;
ent->mid = ent->mid0 = id;
ent->noex = 0;
ent->method = 0;
return 0;
}
if (ruby_running) {
/* store in cache */
ent = cache + EXPR1(klass, id);
ent->klass = klass;
ent->noex = body->nd_noex;
if (noexp) *noexp = body->nd_noex;
body = body->nd_body;
if (nd_type(body) == NODE_FBODY) {
ent->mid = id;
*klassp = body->nd_orig;
ent->origin = body->nd_orig;
*idp = ent->mid0 = body->nd_mid;
body = ent->method = body->nd_head;
}
else {
*klassp = origin;
ent->origin = origin;
ent->mid = ent->mid0 = id;
ent->method = body;
}
}
else {
if (noexp) *noexp = body->nd_noex;
body = body->nd_body;
if (nd_type(body) == NODE_FBODY) {
*klassp = body->nd_orig;
*idp = body->nd_mid;
body = body->nd_head;
}
else {
*klassp = origin;
}
}
return body;
}
NODE*
rb_method_node(klass, id)
VALUE klass;
ID id;
{
int noex;
return rb_get_method_body(&klass, &id, &noex);
}
static void
remove_method(klass, mid)
VALUE klass;
ID mid;
{
st_data_t data;
NODE *body = 0;
if (klass == rb_cObject) {
rb_secure(4);
}
if (ruby_safe_level >= 4 && !OBJ_TAINTED(klass)) {
rb_raise(rb_eSecurityError, "Insecure: can't remove method");
}
if (OBJ_FROZEN(klass)) rb_error_frozen("class/module");
if (mid == __id__ || mid == __send__ || mid == init) {
rb_warn("removing `%s' may cause serious problem", rb_id2name(mid));
}
if (st_lookup(RCLASS(klass)->m_tbl, mid, &data)) {
body = (NODE *)data;
if (!body || !body->nd_body) body = 0;
else {
st_delete(RCLASS(klass)->m_tbl, &mid, &data);
}
}
if (!body) {
rb_name_error(mid, "method `%s' not defined in %s",
rb_id2name(mid), rb_class2name(klass));
}
rb_clear_cache_for_undef(klass, mid);
if (FL_TEST(klass, FL_SINGLETON)) {
rb_funcall(rb_iv_get(klass, "__attached__"), singleton_removed, 1, ID2SYM(mid));
}
else {
rb_funcall(klass, removed, 1, ID2SYM(mid));
}
}
void
rb_remove_method(klass, name)
VALUE klass;
const char *name;
{
remove_method(klass, rb_intern(name));
}
/*
* call-seq:
* remove_method(symbol) => self
*
* Removes the method identified by _symbol_ from the current
* class. For an example, see <code>Module.undef_method</code>.
*/
static VALUE
rb_mod_remove_method(argc, argv, mod)
int argc;
VALUE *argv;
VALUE mod;
{
int i;
for (i=0; i<argc; i++) {
remove_method(mod, rb_to_id(argv[i]));
}
return mod;
}
#undef rb_disable_super
#undef rb_enable_super
void
rb_disable_super(klass, name)
VALUE klass;
const char *name;
{
/* obsolete - no use */
}
void
rb_enable_super(klass, name)
VALUE klass;
const char *name;
{
rb_warn("rb_enable_super() is obsolete");
}
static void
rb_export_method(klass, name, noex)
VALUE klass;
ID name;
ID noex;
{
NODE *body;
VALUE origin;
if (klass == rb_cObject) {
rb_secure(4);
}
body = search_method(klass, name, &origin);
if (!body && TYPE(klass) == T_MODULE) {
body = search_method(rb_cObject, name, &origin);
}
if (!body || !body->nd_body) {
print_undef(klass, name);
}
if (body->nd_noex != noex) {
if (klass == origin) {
body->nd_noex = noex;
}
else {
rb_add_method(klass, name, NEW_ZSUPER(), noex);
}
}
}
int
rb_method_boundp(klass, id, ex)
VALUE klass;
ID id;
int ex;
{
struct cache_entry *ent;
int noex;
/* is it in the method cache? */
ent = cache + EXPR1(klass, id);
if (ent->mid == id && ent->klass == klass) {
if (ex && (ent->noex & NOEX_PRIVATE))
return Qfalse;
if (!ent->method) return Qfalse;
return Qtrue;
}
if (rb_get_method_body(&klass, &id, &noex)) {
if (ex && (noex & NOEX_PRIVATE))
return Qfalse;
return Qtrue;
}
return Qfalse;
}
void
rb_attr(klass, id, read, write, ex)
VALUE klass;
ID id;
int read, write, ex;
{
const char *name;
char *buf;
ID attriv;
int noex;
size_t len;
if (!ex) noex = NOEX_PUBLIC;
else {
if (SCOPE_TEST(SCOPE_PRIVATE)) {
noex = NOEX_PRIVATE;
rb_warning((scope_vmode == SCOPE_MODFUNC) ?
"attribute accessor as module_function" :
"private attribute?");
}
else if (SCOPE_TEST(SCOPE_PROTECTED)) {
noex = NOEX_PROTECTED;
}
else {
noex = NOEX_PUBLIC;
}
}
if (!rb_is_local_id(id) && !rb_is_const_id(id)) {
rb_name_error(id, "invalid attribute name `%s'", rb_id2name(id));
}
name = rb_id2name(id);
if (!name) {
rb_raise(rb_eArgError, "argument needs to be symbol or string");
}
SET_METHOD_SOURCE();
len = strlen(name)+2;
buf = ALLOCA_N(char,len);
snprintf(buf, len, "@%s", name);
attriv = rb_intern(buf);
if (read) {
rb_add_method(klass, id, NEW_IVAR(attriv), noex);
}
if (write) {
rb_add_method(klass, rb_id_attrset(id), NEW_ATTRSET(attriv), noex);
}
}
extern int ruby_in_compile;
VALUE ruby_errinfo = Qnil;
extern NODE *ruby_eval_tree_begin;
extern NODE *ruby_eval_tree;
extern int ruby_nerrs;
VALUE rb_eLocalJumpError;
VALUE rb_eSysStackError;
extern VALUE ruby_top_self;
struct FRAME *ruby_frame;
struct SCOPE *ruby_scope;
static struct FRAME *top_frame;
static struct SCOPE *top_scope;
static unsigned long frame_unique = 0;
#define PUSH_FRAME() do { \
volatile struct FRAME _frame; \
_frame.prev = ruby_frame; \
_frame.tmp = 0; \
_frame.node = ruby_current_node; \
_frame.iter = ruby_iter->iter; \
_frame.argc = 0; \
_frame.flags = 0; \
_frame.uniq = frame_unique++; \
ruby_frame = &_frame
#define POP_FRAME() \
ruby_current_node = _frame.node; \
ruby_frame = _frame.prev; \
} while (0)
struct BLOCK {
NODE *var;
NODE *body;
VALUE self;
struct FRAME frame;
struct SCOPE *scope;
VALUE klass;
NODE *cref;
int iter;
int vmode;
int flags;
int uniq;
struct RVarmap *dyna_vars;
VALUE orig_thread;
VALUE wrapper;
VALUE block_obj;
struct BLOCK *outer;
struct BLOCK *prev;
};
#define BLOCK_D_SCOPE 1
#define BLOCK_LAMBDA 2
static struct BLOCK *ruby_block;
static unsigned long block_unique = 1;
#define PUSH_BLOCK(v,b) do { \
struct BLOCK _block; \
_block.var = (v); \
_block.body = (b); \
_block.self = self; \
_block.frame = *ruby_frame; \
_block.klass = ruby_class; \
_block.cref = ruby_cref; \
_block.frame.node = ruby_current_node;\
_block.scope = ruby_scope; \
_block.prev = ruby_block; \
_block.outer = ruby_block; \
_block.iter = ruby_iter->iter; \
_block.vmode = scope_vmode; \
_block.flags = BLOCK_D_SCOPE; \
_block.dyna_vars = ruby_dyna_vars; \
_block.wrapper = ruby_wrapper; \
_block.block_obj = 0; \
_block.uniq = (b)?block_unique++:0; \
if (b) { \
prot_tag->blkid = _block.uniq; \
} \
ruby_block = &_block
#define POP_BLOCK() \
ruby_block = _block.prev; \
} while (0)
struct RVarmap *ruby_dyna_vars;
#define PUSH_VARS() do { \
struct RVarmap * volatile _old; \
_old = ruby_dyna_vars; \
ruby_dyna_vars = 0
#define POP_VARS() \
if (_old && (ruby_scope->flags & SCOPE_DONT_RECYCLE)) {\
if (RBASIC(_old)->flags) /* unless it's already recycled */ \
FL_SET(_old, DVAR_DONT_RECYCLE); \
}\
ruby_dyna_vars = _old; \
} while (0)
#define DVAR_DONT_RECYCLE FL_USER2
#define DMETHOD_P() (ruby_frame->flags & FRAME_DMETH)
static struct RVarmap*
new_dvar(id, value, prev)
ID id;
VALUE value;
struct RVarmap *prev;
{
NEWOBJ(vars, struct RVarmap);
OBJSETUP(vars, 0, T_VARMAP);
vars->id = id;
vars->val = value;
vars->next = prev;
return vars;
}
VALUE
rb_dvar_defined(id)
ID id;
{
struct RVarmap *vars = ruby_dyna_vars;
while (vars) {
if (vars->id == id) return Qtrue;
vars = vars->next;
}
return Qfalse;
}
VALUE
rb_dvar_curr(id)
ID id;
{
struct RVarmap *vars = ruby_dyna_vars;
while (vars) {
if (vars->id == 0) break;
if (vars->id == id) return Qtrue;
vars = vars->next;
}
return Qfalse;
}
VALUE
rb_dvar_ref(id)
ID id;
{
struct RVarmap *vars = ruby_dyna_vars;
while (vars) {
if (vars->id == id) {
return vars->val;
}
vars = vars->next;
}
return Qnil;
}
void
rb_dvar_push(id, value)
ID id;
VALUE value;
{
ruby_dyna_vars = new_dvar(id, value, ruby_dyna_vars);
}
static void
dvar_asgn_internal(id, value, curr)
ID id;
VALUE value;
int curr;
{
int n = 0;
struct RVarmap *vars = ruby_dyna_vars;
while (vars) {
if (curr && vars->id == 0) {
/* first null is a dvar header */
n++;
if (n == 2) break;
}
if (vars->id == id) {
vars->val = value;
return;
}
vars = vars->next;
}
if (!ruby_dyna_vars) {
ruby_dyna_vars = new_dvar(id, value, 0);
}
else {
vars = new_dvar(id, value, ruby_dyna_vars->next);
ruby_dyna_vars->next = vars;
}
}
static inline void
dvar_asgn(id, value)
ID id;
VALUE value;
{
dvar_asgn_internal(id, value, 0);
}
static inline void
dvar_asgn_curr(id, value)
ID id;
VALUE value;
{
dvar_asgn_internal(id, value, 1);
}
VALUE *
rb_svar(cnt)
int cnt;
{
struct RVarmap *vars = ruby_dyna_vars;
ID id;
if (!ruby_scope->local_tbl) return NULL;
if ((ID)cnt >= ruby_scope->local_tbl[0]) return NULL;
id = ruby_scope->local_tbl[cnt+1];
while (vars) {
if (vars->id == id) return &vars->val;
vars = vars->next;
}
if (ruby_scope->local_vars == 0) return NULL;
return &ruby_scope->local_vars[cnt];
}
struct iter {
int iter;
struct iter *prev;
};
static struct iter *ruby_iter;
#define ITER_NOT 0
#define ITER_PRE 1
#define ITER_CUR 2
#define ITER_PAS 3
#define PUSH_ITER(i) do { \
struct iter _iter; \
_iter.prev = ruby_iter; \
_iter.iter = (i); \
ruby_iter = &_iter
#define POP_ITER() \
ruby_iter = _iter.prev; \
} while (0)
struct tag {
rb_jmpbuf_t buf;
struct FRAME *frame;
struct iter *iter;
VALUE tag;
VALUE retval;
struct SCOPE *scope;
VALUE dst;
struct tag *prev;
int blkid;
};
static struct tag *prot_tag;
#define PUSH_TAG(ptag) do { \
struct tag _tag; \
_tag.retval = Qnil; \
_tag.frame = ruby_frame; \
_tag.iter = ruby_iter; \
_tag.prev = prot_tag; \
_tag.scope = ruby_scope; \
_tag.tag = ptag; \
_tag.dst = 0; \
_tag.blkid = 0; \
prot_tag = &_tag
#define PROT_EMPTY Qfalse /* 0 */
#define PROT_THREAD Qtrue /* 2 */
#define PROT_FUNC INT2FIX(0) /* 1 */
#define PROT_LOOP INT2FIX(1) /* 3 */
#define PROT_LAMBDA INT2FIX(2) /* 5 */
#define PROT_YIELD INT2FIX(3) /* 7 */
#if STACK_WIPE_SITES & 0x42
#ifdef __GNUC__
static inline int wipeAfter(int) __attribute__((always_inline));
#endif
static inline int wipeAfter(int status)
{
rb_gc_wipe_stack();
return status;
}
#else
#define wipeAfter(status) status
#endif
#if STACK_WIPE_SITES & 2
#define wipeAfterTag(status) wipeAfter(status)
#else
#define wipeAfterTag(status) status
#endif
#define EXEC_TAG_0() ruby_setjmp(((void)0), prot_tag->buf)
#define EXEC_TAG() wipeAfterTag(EXEC_TAG_0())
#define JUMP_TAG(st) do { \
ruby_frame = prot_tag->frame; \
ruby_iter = prot_tag->iter; \
ruby_longjmp(prot_tag->buf,(st)); \
} while (0)
#define POP_TAG() \
prot_tag = _tag.prev; \
} while (0)
#define TAG_DST() (_tag.dst == (VALUE)ruby_frame->uniq)
#define TAG_RETURN 0x1
#define TAG_BREAK 0x2
#define TAG_NEXT 0x3
#define TAG_RETRY 0x4
#define TAG_REDO 0x5
#define TAG_RAISE 0x6
#define TAG_THROW 0x7
#define TAG_FATAL 0x8
#define TAG_MASK 0xf
VALUE ruby_class;
static VALUE ruby_wrapper; /* security wrapper */
#define PUSH_CLASS(c) do { \
volatile VALUE _class = ruby_class; \
ruby_class = (c)
#define POP_CLASS() ruby_class = _class; \
} while (0)
NODE *ruby_cref = 0;
NODE *ruby_top_cref;
#define PUSH_CREF(c) ruby_cref = NEW_CREF(c,ruby_cref)
#define POP_CREF() ruby_cref = ruby_cref->nd_next
#define PUSH_SCOPE() do { \
volatile int _vmode = scope_vmode; \
struct SCOPE * volatile _old; \
NEWOBJ(_scope, struct SCOPE); \
OBJSETUP(_scope, 0, T_SCOPE); \
_scope->local_tbl = 0; \
_scope->local_vars = 0; \
_scope->flags = 0; \
_old = ruby_scope; \
ruby_scope = _scope; \
scope_vmode = SCOPE_PUBLIC
rb_thread_t rb_curr_thread;
rb_thread_t rb_main_thread;
#define main_thread rb_main_thread
#define curr_thread rb_curr_thread
#ifndef STACK_FREE_SAFE_DEBUG
#define STACK_FREE_SAFE_DEBUG 0
#endif
#if STACK_FREE_SAFE_DEBUG
#define stack_free_safe(TH,MSG) _stack_free_safe(TH,MSG)
#else
#define stack_free_safe(TH,MSG) _stack_free_safe(TH)
#endif
static void stack_free_safe_all_dead_threads();
static void scope_dup _((struct SCOPE *));
#define POP_SCOPE() \
if (ruby_scope->flags & SCOPE_DONT_RECYCLE) {\
if (_old) scope_dup(_old); \
} \
if (!(ruby_scope->flags & SCOPE_MALLOC)) {\
ruby_scope->local_vars = 0; \
ruby_scope->local_tbl = 0; \
if (!(ruby_scope->flags & SCOPE_DONT_RECYCLE) && \
ruby_scope != top_scope) { \
rb_gc_force_recycle((VALUE)ruby_scope);\
} \
} \
ruby_scope->flags |= SCOPE_NOSTACK; \
ruby_scope = _old; \
scope_vmode = _vmode; \
} while (0)
static VALUE rb_eval _((VALUE,NODE*));
static VALUE eval _((VALUE,VALUE,volatile VALUE,const char* volatile,int));
static NODE *compile _((VALUE, const char*, int));
static VALUE rb_yield_0
_((volatile VALUE, volatile VALUE, VALUE, int, volatile int));
#if STACK_WIPE_SITES & 0x20
#define wipeBeforeYield() rb_gc_wipe_stack()
#else
#define wipeBeforeYield() (void)0
#endif
#define YIELD_LAMBDA_CALL 1
#define YIELD_PROC_CALL 2
#define YIELD_PUBLIC_DEF 4
#define YIELD_FUNC_AVALUE 1
#define YIELD_FUNC_SVALUE 2
#define YIELD_FUNC_LAMBDA 3
static VALUE rb_call _((VALUE,VALUE,ID,int,const VALUE*,int,VALUE));
static VALUE module_setup _((VALUE,NODE *volatile));
static VALUE massign _((VALUE,NODE*,VALUE,int));
static void assign _((VALUE,NODE*,VALUE,int));
typedef struct event_hook {
rb_event_hook_func_t func;
rb_event_t events;
struct event_hook *next;
} rb_event_hook_t;
static rb_event_hook_t *event_hooks;
#define EXEC_EVENT_HOOK(event, node, self, id, klass) \
do { \
rb_event_hook_t *hook = event_hooks; \
rb_event_hook_func_t hook_func; \
rb_event_t events; \
\
while (hook) { \
hook_func = hook->func; \
events = hook->events; \
hook = hook->next; \
if (events & event) \
(*hook_func)(event, node, self, id, klass); \
} \
} while (0)
static VALUE trace_func = 0;
static int tracing = 0;
static void call_trace_func _((rb_event_t,NODE*,VALUE,ID,VALUE));
static void
#ifdef HAVE_STDARG_PROTOTYPES
warn_printf(const char *fmt, ...)
#else
warn_printf(fmt, va_alist)
const char *fmt;
va_dcl
#endif
{
char buf[BUFSIZ];
va_list args;
va_init_list(args, fmt);
vsnprintf(buf, BUFSIZ, fmt, args);
va_end(args);
rb_write_error(buf);
}
#define warn_print(x) rb_write_error(x)
#define warn_print2(x,l) rb_write_error2(x,l)
static void
error_pos()
{
ruby_set_current_source();
if (ruby_sourcefile) {
if (ruby_frame->last_func) {
warn_printf("%s:%d:in `%s'", ruby_sourcefile, ruby_sourceline,
rb_id2name(ruby_frame->orig_func));
}
else if (ruby_sourceline == 0) {
warn_printf("%s", ruby_sourcefile);
}
else {
warn_printf("%s:%d", ruby_sourcefile, ruby_sourceline);
}
}
}
VALUE rb_check_backtrace(VALUE);
static VALUE
get_backtrace(info)
VALUE info;
{
if (NIL_P(info)) return Qnil;
info = rb_funcall(info, rb_intern("backtrace"), 0);
if (NIL_P(info)) return Qnil;
return rb_check_backtrace(info);
}
static void
set_backtrace(info, bt)
VALUE info, bt;
{
rb_funcall(info, rb_intern("set_backtrace"), 1, bt);
}
static void
error_print()
{
VALUE errat;
volatile VALUE eclass, e;
const char * einfo;
long elen;
if (NIL_P(ruby_errinfo)) return;
PUSH_TAG(PROT_EMPTY);
errat = EXEC_TAG() ? Qnil : get_backtrace(ruby_errinfo);
if (EXEC_TAG()) goto error;
if (NIL_P(errat)){
ruby_set_current_source();
if (ruby_sourcefile)
warn_printf("%s:%d", ruby_sourcefile, ruby_sourceline);
else
warn_printf("%d", ruby_sourceline);
}
else if (RARRAY(errat)->len == 0) {
error_pos();
}
else {
VALUE mesg = RARRAY(errat)->ptr[0];
if (NIL_P(mesg)) error_pos();
else {
warn_print2(RSTRING(mesg)->ptr, RSTRING(mesg)->len);
}
}
eclass = CLASS_OF(ruby_errinfo);
if (EXEC_TAG() == 0) {
e = rb_funcall(ruby_errinfo, rb_intern("message"), 0, 0);
StringValue(e);
einfo = RSTRING(e)->ptr;
elen = RSTRING(e)->len;
}
else {
einfo = "";
elen = 0;
}
if (EXEC_TAG()) goto error;
if (eclass == rb_eRuntimeError && elen == 0) {
warn_print(": unhandled exception\n");
}
else {
VALUE epath;
epath = rb_class_name(eclass);
if (elen == 0) {
warn_print(": ");
warn_print2(RSTRING(epath)->ptr, RSTRING(epath)->len);
warn_print("\n");
}
else {
char *tail = 0;
long len = elen;
if (RSTRING(epath)->ptr[0] == '#') epath = 0;
if ((tail = memchr(einfo, '\n', elen)) != 0) {
len = tail - einfo;
tail++; /* skip newline */
}
warn_print(": ");
warn_print2(einfo, len);
if (epath) {
warn_print(" (");
warn_print2(RSTRING(epath)->ptr, RSTRING(epath)->len);
warn_print(")\n");
}
if (tail && elen>len+1) {
warn_print2(tail, elen-len-1);
if (einfo[elen-1] != '\n') warn_print2("\n", 1);
}
}
}
if (!NIL_P(errat)) {
long i;
struct RArray *ep = RARRAY(errat);
int truncate = eclass == rb_eSysStackError;
#define TRACE_MAX (TRACE_HEAD+TRACE_TAIL+5)
#define TRACE_HEAD 8
#define TRACE_TAIL 5
ep = RARRAY(errat);
for (i=1; i<ep->len; i++) {
if (TYPE(ep->ptr[i]) == T_STRING) {
warn_printf("\tfrom %s\n", RSTRING(ep->ptr[i])->ptr);
}
if (truncate && i == TRACE_HEAD && ep->len > TRACE_MAX) {
warn_printf("\t ... %ld levels...\n",
ep->len - TRACE_HEAD - TRACE_TAIL);
i = ep->len - TRACE_TAIL;
}
}
}
error:
POP_TAG();
}
#if defined(__APPLE__)
#define environ (*_NSGetEnviron())
#elif !defined(_WIN32) && !defined(__MACOS__) || defined(_WIN32_WCE)
extern char **environ;
#endif
char **rb_origenviron;
void rb_call_inits _((void));
void Init_stack _((VALUE*));
void Init_heap _((void));
void Init_ext _((void));
#ifdef HAVE_NATIVETHREAD
static rb_nativethread_t ruby_thid;
int
is_ruby_native_thread() {
return NATIVETHREAD_EQUAL(ruby_thid, NATIVETHREAD_CURRENT());
}
# ifdef HAVE_NATIVETHREAD_KILL
void
ruby_native_thread_kill(sig)
int sig;
{
NATIVETHREAD_KILL(ruby_thid, sig);
}
# endif
#endif
void
ruby_init()
{
static int initialized = 0;
static struct FRAME frame;
static struct iter iter;
int state;
if (initialized)
return;
initialized = 1;
#ifdef HAVE_NATIVETHREAD
ruby_thid = NATIVETHREAD_CURRENT();
#endif
ruby_frame = top_frame = &frame;
ruby_iter = &iter;
#ifdef __MACOS__
rb_origenviron = 0;
#else
rb_origenviron = environ;
#endif
Init_stack((void*)&state);
Init_heap();
PUSH_SCOPE();
ruby_scope->local_vars = 0;
ruby_scope->local_tbl = 0;
top_scope = ruby_scope;
/* default visibility is private at toplevel */
SCOPE_SET(SCOPE_PRIVATE);
PUSH_TAG(PROT_EMPTY);
if ((state = EXEC_TAG()) == 0) {
rb_call_inits();
ruby_class = rb_cObject;
ruby_frame->self = ruby_top_self;
ruby_top_cref = NEW_CREF(rb_cObject, 0);
ruby_cref = ruby_top_cref;
rb_define_global_const("TOPLEVEL_BINDING", rb_f_binding(ruby_top_self));
#ifdef __MACOS__
_macruby_init();
#elif defined(__VMS)
_vmsruby_init();
#endif
ruby_prog_init();
ALLOW_INTS;
}
POP_TAG();
if (state) {
error_print();
exit(EXIT_FAILURE);
}
POP_SCOPE();
ruby_scope = top_scope;
top_scope->flags &= ~SCOPE_NOSTACK;
ruby_running = 1;
}
static VALUE
eval_tree(self, node)
VALUE self;
NODE *node;
{
NODE *beg_tree = ruby_eval_tree_begin;
ruby_eval_tree_begin = 0;
if (beg_tree) {
rb_eval(self, beg_tree);
}
if (!node) return Qnil;
return rb_eval(self, node);
}
int ruby_in_eval;
static void rb_thread_cleanup _((void));
static void rb_thread_wait_other_threads _((void));
static int thread_no_ensure _((void));
static VALUE exception_error;
static VALUE sysstack_error;
static int
sysexit_status(err)
VALUE err;
{
VALUE st = rb_iv_get(err, "status");
return NUM2INT(st);
}
static int
error_handle(ex)
int ex;
{
int status = EXIT_FAILURE;
rb_thread_t th = curr_thread;
if (rb_thread_set_raised(th))
return EXIT_FAILURE;
switch (ex & TAG_MASK) {
case 0:
status = EXIT_SUCCESS;
break;
case TAG_RETURN:
error_pos();
warn_print(": unexpected return\n");
break;
case TAG_NEXT:
error_pos();
warn_print(": unexpected next\n");
break;
case TAG_BREAK:
error_pos();
warn_print(": unexpected break\n");
break;
case TAG_REDO:
error_pos();
warn_print(": unexpected redo\n");
break;
case TAG_RETRY:
error_pos();
warn_print(": retry outside of rescue clause\n");
break;
case TAG_THROW:
if (prot_tag && prot_tag->frame && prot_tag->frame->node) {
NODE *tag = prot_tag->frame->node;
warn_printf("%s:%d: uncaught throw\n",
tag->nd_file, nd_line(tag));
}
else {
error_pos();
warn_printf(": unexpected throw\n");
}
break;
case TAG_RAISE:
case TAG_FATAL:
if (rb_obj_is_kind_of(ruby_errinfo, rb_eSystemExit)) {
status = sysexit_status(ruby_errinfo);
}
else if (rb_obj_is_instance_of(ruby_errinfo, rb_eSignal)) {
/* no message when exiting by signal */
}
else {
error_print();
}
break;
default:
rb_bug("Unknown longjmp status %d", ex);
break;
}
rb_thread_reset_raised(th);
return status;
}
void
ruby_options(argc, argv)
int argc;
char **argv;
{
int state;
Init_stack((void*)&state);
PUSH_TAG(PROT_EMPTY);
if ((state = EXEC_TAG()) == 0) {
ruby_process_options(argc, argv);
}
else {
trace_func = 0;
tracing = 0;
exit(error_handle(state));
}
POP_TAG();
}
void rb_exec_end_proc _((void));
static void
ruby_finalize_0()
{
PUSH_TAG(PROT_EMPTY);
if (EXEC_TAG() == 0) {
rb_trap_exit();
}
POP_TAG();
rb_exec_end_proc();
}
static void
ruby_finalize_1()
{
signal(SIGINT, SIG_DFL);
ruby_errinfo = 0;
rb_gc_call_finalizer_at_exit();
trace_func = 0;
tracing = 0;
}
void
ruby_finalize()
{
ruby_finalize_0();
ruby_finalize_1();
}
int
ruby_cleanup(exArg)
int exArg;
{
int state;
volatile VALUE errs[2];
unsigned nerr;
volatile int ex = exArg;
errs[1] = ruby_errinfo;
ruby_safe_level = 0;
Init_stack((void *)&state);
ruby_finalize_0();
errs[0] = ruby_errinfo;
PUSH_TAG(PROT_EMPTY);
PUSH_ITER(ITER_NOT);
if ((state = EXEC_TAG()) == 0) {
rb_thread_cleanup();
rb_thread_wait_other_threads();
}
else if (ex == 0) {
ex = state;
}
POP_ITER();
ruby_errinfo = errs[1];
ex = error_handle(ex);
ruby_finalize_1();
POP_TAG();
for (nerr = 0; nerr < sizeof(errs) / sizeof(errs[0]); ++nerr) {
VALUE err = errs[nerr];
if (!RTEST(err)) continue;
if (rb_obj_is_kind_of(err, rb_eSystemExit)) {
return sysexit_status(err);
}
else if (rb_obj_is_kind_of(err, rb_eSignal)) {
VALUE sig = rb_iv_get(err, "signo");
ruby_default_signal(NUM2INT(sig));
}
else if (ex == 0) {
ex = 1;
}
}
#if EXIT_SUCCESS != 0 || EXIT_FAILURE != 1
switch (ex) {
#if EXIT_SUCCESS != 0
case 0: return EXIT_SUCCESS;
#endif
#if EXIT_FAILURE != 1
case 1: return EXIT_FAILURE;
#endif
}
#endif
return ex;
}
static int
ruby_exec_internal()
{
int state;
PUSH_TAG(PROT_EMPTY);
PUSH_ITER(ITER_NOT);
/* default visibility is private at toplevel */
SCOPE_SET(SCOPE_PRIVATE);
if ((state = EXEC_TAG()) == 0) {
eval_tree(ruby_top_self, ruby_eval_tree);
}
POP_ITER();
POP_TAG();
return state;
}
void
ruby_stop(ex)
int ex;
{
exit(ruby_cleanup(ex));
}
int
ruby_exec()
{
volatile NODE *tmp;
Init_stack((void*)&tmp);
return ruby_exec_internal();
}
void
ruby_run()
{
int state;
static int ex;
if (ruby_nerrs > 0) exit(EXIT_FAILURE);
state = ruby_exec();
if (state && !ex) ex = state;
ruby_stop(ex);
}
static void
compile_error(at)
const char *at;
{
VALUE str;
ruby_nerrs = 0;
str = rb_str_buf_new2("compile error");
if (at) {
rb_str_buf_cat2(str, " in ");
rb_str_buf_cat2(str, at);
}
rb_str_buf_cat(str, "\n", 1);
if (!NIL_P(ruby_errinfo)) {
rb_str_append(str, rb_obj_as_string(ruby_errinfo));
}
rb_exc_raise(rb_exc_new3(rb_eSyntaxError, str));
}
VALUE
rb_eval_string(str)
const char *str;
{
VALUE v;
NODE *oldsrc = ruby_current_node;
ruby_current_node = 0;
ruby_sourcefile = rb_source_filename("(eval)");
v = eval(ruby_top_self, rb_str_new2(str), Qnil, 0, 0);
ruby_current_node = oldsrc;
return v;
}
VALUE
rb_eval_string_protect(str, state)
const char *str;
int *state;
{
return rb_protect((VALUE (*)_((VALUE)))rb_eval_string, (VALUE)str, state);
}
VALUE
rb_eval_string_wrap(str, state)
const char *str;
int *state;
{
int status;
VALUE self = ruby_top_self;
VALUE wrapper = ruby_wrapper;
VALUE val;
PUSH_CLASS(ruby_wrapper = rb_module_new());
ruby_top_self = rb_obj_clone(ruby_top_self);
rb_extend_object(ruby_top_self, ruby_wrapper);
PUSH_FRAME();
ruby_frame->last_func = 0;
ruby_frame->last_class = 0;
ruby_frame->self = self;
PUSH_CREF(ruby_wrapper);
PUSH_SCOPE();
val = rb_eval_string_protect(str, &status);
ruby_top_self = self;
POP_SCOPE();
POP_FRAME();
POP_CLASS();
ruby_wrapper = wrapper;
if (state) {
*state = status;
}
else if (status) {
JUMP_TAG(status);
}
return val;
}
NORETURN(static void localjump_error(const char*, VALUE, int));
static void
localjump_error(mesg, value, reason)
const char *mesg;
VALUE value;
int reason;
{
VALUE exc = rb_exc_new2(rb_eLocalJumpError, mesg);
ID id;
rb_iv_set(exc, "@exit_value", value);
switch (reason) {
case TAG_BREAK:
id = rb_intern("break"); break;
case TAG_REDO:
id = rb_intern("redo"); break;
case TAG_RETRY:
id = rb_intern("retry"); break;
case TAG_NEXT:
id = rb_intern("next"); break;
case TAG_RETURN:
id = rb_intern("return"); break;
default:
id = rb_intern("noreason"); break;
}
rb_iv_set(exc, "@reason", ID2SYM(id));
rb_exc_raise(exc);
}
/*
* call_seq:
* local_jump_error.exit_value => obj
*
* Returns the exit value associated with this +LocalJumpError+.
*/
static VALUE
localjump_xvalue(exc)
VALUE exc;
{
return rb_iv_get(exc, "@exit_value");
}
/*
* call-seq:
* local_jump_error.reason => symbol
*
* The reason this block was terminated:
* :break, :redo, :retry, :next, :return, or :noreason.
*/
static VALUE
localjump_reason(exc)
VALUE exc;
{
return rb_iv_get(exc, "@reason");
}
NORETURN(static void jump_tag_but_local_jump _((int,VALUE)));
static void
jump_tag_but_local_jump(state, val)
int state;
VALUE val;
{
if (val == Qundef) val = prot_tag->retval;
switch (state) {
case 0:
break;
case TAG_RETURN:
localjump_error("unexpected return", val, state);
break;
case TAG_BREAK:
localjump_error("unexpected break", val, state);
break;
case TAG_NEXT:
localjump_error("unexpected next", val, state);
break;
case TAG_REDO:
localjump_error("unexpected redo", Qnil, state);
break;
case TAG_RETRY:
localjump_error("retry outside of rescue clause", Qnil, state);
break;
default:
break;
}
JUMP_TAG(state);
}
VALUE
rb_eval_cmd(cmd, arg, level)
VALUE cmd, arg;
int level;
{
int state;
VALUE val;
struct SCOPE * volatile saved_scope;
volatile int safe = ruby_safe_level;
if (OBJ_TAINTED(cmd)) {
level = 4;
}
if (TYPE(cmd) != T_STRING) {
PUSH_ITER(ITER_NOT);
PUSH_TAG(PROT_EMPTY);
ruby_safe_level = level;
if ((state = EXEC_TAG()) == 0) {
val = rb_funcall2(cmd, rb_intern("call"), RARRAY(arg)->len, RARRAY(arg)->ptr);
}
ruby_safe_level = safe;
POP_TAG();
POP_ITER();
if (state) JUMP_TAG(state);
return val;
}
saved_scope = ruby_scope;
ruby_scope = top_scope;
PUSH_FRAME();
ruby_frame->last_func = 0;
ruby_frame->last_class = 0;
ruby_frame->self = ruby_top_self;
PUSH_CREF(ruby_wrapper ? ruby_wrapper : rb_cObject);
ruby_safe_level = level;
PUSH_TAG(PROT_EMPTY);
val = (state = EXEC_TAG()) ? Qnil : eval(ruby_top_self, cmd, Qnil, 0, 0);
if (ruby_scope->flags & SCOPE_DONT_RECYCLE)
scope_dup(saved_scope);
ruby_scope = saved_scope;
ruby_safe_level = safe;
POP_TAG();
POP_FRAME();
if (state) jump_tag_but_local_jump(state, val);
return val;
}
#define ruby_cbase (ruby_cref->nd_clss)
static VALUE
ev_const_defined(cref, id, self)
NODE *cref;
ID id;
VALUE self;
{
NODE *cbase = cref;
VALUE result;
while (cbase && cbase->nd_next) {
struct RClass *klass = RCLASS(cbase->nd_clss);
if (!NIL_P(klass)) {
if (klass->iv_tbl && st_lookup(klass->iv_tbl, id, &result)) {
if (result == Qundef && NIL_P(rb_autoload_p((VALUE)klass, id))) {
return Qfalse;
}
return Qtrue;
}
}
cbase = cbase->nd_next;
}
return rb_const_defined(cref->nd_clss, id);
}
NOINLINE(static VALUE ev_const_get _((NODE *cref, ID id, VALUE self)));
NOINLINE(static void eval_cvar_set _((NODE *node, VALUE result, int warn)));
NOINLINE(static void eval_cdecl _((VALUE self, NODE *node, VALUE value)));
static VALUE
ev_const_get(cref, id, self)
NODE *cref;
ID id;
VALUE self;
{
NODE *cbase = cref;
VALUE result;
while (cbase && cbase->nd_next) {
VALUE klass = cbase->nd_clss;
if (!NIL_P(klass)) {
while (RCLASS(klass)->iv_tbl &&
st_lookup(RCLASS(klass)->iv_tbl, id, &result)) {
if (result == Qundef) {
if (!RTEST(rb_autoload_load(klass, id))) break;
continue;
}
return result;
}
}
cbase = cbase->nd_next;
}
return rb_const_get(NIL_P(cref->nd_clss) ? CLASS_OF(self): cref->nd_clss, id);
}
static VALUE
cvar_cbase()
{
NODE *cref = ruby_cref;
while (cref && cref->nd_next && (NIL_P(cref->nd_clss) || FL_TEST(cref->nd_clss, FL_SINGLETON))) {
cref = cref->nd_next;
if (!cref->nd_next) {
rb_warn("class variable access from toplevel singleton method");
}
}
if (NIL_P(cref->nd_clss)) {
rb_raise(rb_eTypeError, "no class variables available");
}
return cref->nd_clss;
}
/*
* call-seq:
* Module.nesting => array
*
* Returns the list of +Modules+ nested at the point of call.
*
* module M1
* module M2
* $a = Module.nesting
* end
* end
* $a #=> [M1::M2, M1]
* $a[0].name #=> "M1::M2"
*/
static VALUE
rb_mod_nesting()
{
NODE *cbase = ruby_cref;
VALUE ary = rb_ary_new();
while (cbase && cbase->nd_next) {
if (!NIL_P(cbase->nd_clss)) rb_ary_push(ary, cbase->nd_clss);
cbase = cbase->nd_next;
}
if (ruby_wrapper && RARRAY(ary)->len == 0) {
rb_ary_push(ary, ruby_wrapper);
}
return ary;
}
/*
* call-seq:
* Module.constants => array
*
* Returns an array of the names of all constants defined in the
* system. This list includes the names of all modules and classes.
*
* p Module.constants.sort[1..5]
*
* <em>produces:</em>
*
* ["ARGV", "ArgumentError", "Array", "Bignum", "Binding"]
*/
static VALUE
rb_mod_s_constants()
{
NODE *cbase = ruby_cref;
void *data = 0;
while (cbase) {
if (!NIL_P(cbase->nd_clss)) {
data = rb_mod_const_at(cbase->nd_clss, data);
}
cbase = cbase->nd_next;
}
if (!NIL_P(ruby_cbase)) {
data = rb_mod_const_of(ruby_cbase, data);
}
return rb_const_list(data);
}
void
rb_frozen_class_p(klass)
VALUE klass;
{
const char *desc = "something(?!)";
if (OBJ_FROZEN(klass)) {
if (FL_TEST(klass, FL_SINGLETON))
desc = "object";
else {
switch (TYPE(klass)) {
case T_MODULE:
case T_ICLASS:
desc = "module"; break;
case T_CLASS:
desc = "class"; break;
}
}
rb_error_frozen(desc);
}
}
void
rb_undef(klass, id)
VALUE klass;
ID id;
{
VALUE origin;
NODE *body;
if (ruby_cbase == rb_cObject && klass == rb_cObject) {
rb_secure(4);
}
if (ruby_safe_level >= 4 && !OBJ_TAINTED(klass)) {
rb_raise(rb_eSecurityError, "Insecure: can't undef `%s'", rb_id2name(id));
}
rb_frozen_class_p(klass);
if (id == __id__ || id == __send__ || id == init) {
rb_warn("undefining `%s' may cause serious problem", rb_id2name(id));
}
body = search_method(klass, id, &origin);
if (!body || !body->nd_body) {
const char *s0 = " class";
VALUE c = klass;
if (FL_TEST(c, FL_SINGLETON)) {
VALUE obj = rb_iv_get(klass, "__attached__");
switch (TYPE(obj)) {
case T_MODULE:
case T_CLASS:
c = obj;
s0 = "";
}
}
else if (TYPE(c) == T_MODULE) {
s0 = " module";
}
rb_name_error(id, "undefined method `%s' for%s `%s'",
rb_id2name(id),s0,rb_class2name(c));
}
rb_add_method(klass, id, 0, NOEX_PUBLIC);
if (FL_TEST(klass, FL_SINGLETON)) {
rb_funcall(rb_iv_get(klass, "__attached__"),
singleton_undefined, 1, ID2SYM(id));
}
else {
rb_funcall(klass, undefined, 1, ID2SYM(id));
}
}
/*
* call-seq:
* undef_method(symbol) => self
*
* Prevents the current class from responding to calls to the named
* method. Contrast this with <code>remove_method</code>, which deletes
* the method from the particular class; Ruby will still search
* superclasses and mixed-in modules for a possible receiver.
*
* class Parent
* def hello
* puts "In parent"
* end
* end
* class Child < Parent
* def hello
* puts "In child"
* end
* end
*
*
* c = Child.new
* c.hello
*
*
* class Child
* remove_method :hello # remove from child, still in parent
* end
* c.hello
*
*
* class Child
* undef_method :hello # prevent any calls to 'hello'
* end
* c.hello
*
* <em>produces:</em>
*
* In child
* In parent
* prog.rb:23: undefined method `hello' for #<Child:0x401b3bb4> (NoMethodError)
*/
static VALUE
rb_mod_undef_method(argc, argv, mod)
int argc;
VALUE *argv;
VALUE mod;
{
int i;
for (i=0; i<argc; i++) {
rb_undef(mod, rb_to_id(argv[i]));
}
return mod;
}
void
rb_alias(klass, name, def)
VALUE klass;
ID name, def;
{
VALUE origin = 0;
NODE *orig, *body, *node;
VALUE singleton = 0;
st_data_t data;
rb_frozen_class_p(klass);
if (name == def) return;
if (klass == rb_cObject) {
rb_secure(4);
}
orig = search_method(klass, def, &origin);
if (!orig || !orig->nd_body) {
if (TYPE(klass) == T_MODULE) {
orig = search_method(rb_cObject, def, &origin);
}
}
if (!orig || !orig->nd_body) {
print_undef(klass, def);
}
if (FL_TEST(klass, FL_SINGLETON)) {
singleton = rb_iv_get(klass, "__attached__");
}
body = orig->nd_body;
orig->nd_cnt++;
if (nd_type(body) == NODE_FBODY) { /* was alias */
def = body->nd_mid;
origin = body->nd_orig;
body = body->nd_head;
}
rb_clear_cache_by_id(name);
if (RTEST(ruby_verbose) && st_lookup(RCLASS(klass)->m_tbl, name, &data)) {
node = (NODE *)data;
if (node->nd_cnt == 0 && node->nd_body) {
rb_warning("discarding old %s", rb_id2name(name));
}
}
st_insert(RCLASS(klass)->m_tbl, name,
(st_data_t)NEW_METHOD(NEW_FBODY(body, def, origin),
NOEX_WITH_SAFE(orig->nd_noex)));
if (!ruby_running) return;
if (singleton) {
rb_funcall(singleton, singleton_added, 1, ID2SYM(name));
}
else {
rb_funcall(klass, added, 1, ID2SYM(name));
}
}
/*
* call-seq:
* alias_method(new_name, old_name) => self
*
* Makes <i>new_name</i> a new copy of the method <i>old_name</i>. This can
* be used to retain access to methods that are overridden.
*
* module Mod
* alias_method :orig_exit, :exit
* def exit(code=0)
* puts "Exiting with code #{code}"
* orig_exit(code)
* end
* end
* include Mod
* exit(99)
*
* <em>produces:</em>
*
* Exiting with code 99
*/
static VALUE
rb_mod_alias_method(mod, newname, oldname)
VALUE mod, newname, oldname;
{
rb_alias(mod, rb_to_id(newname), rb_to_id(oldname));
return mod;
}
NODE *
rb_copy_node_scope(node, rval)
NODE *node;
NODE *rval;
{
NODE *copy;
SET_METHOD_SOURCE();
copy=NEW_NODE(NODE_SCOPE,0,rval,node->nd_next);
if (node->nd_tbl) {
copy->nd_tbl = ALLOC_N(ID, node->nd_tbl[0]+1);
MEMCPY(copy->nd_tbl, node->nd_tbl, ID, node->nd_tbl[0]+1);
}
else {
copy->nd_tbl = 0;
}
return copy;
}
#ifdef C_ALLOCA
# define TMP_PROTECT NODE * volatile tmp__protect_tmp=0
# define TMP_ALLOC(n) \
(tmp__protect_tmp = NEW_NODE(NODE_ALLOCA, \
ALLOC_N(VALUE,n),tmp__protect_tmp,n), \
(void*)tmp__protect_tmp->nd_head)
#else
# define TMP_PROTECT typedef int foobazzz
# define TMP_ALLOC(n) ALLOCA_N(VALUE,n)
#endif
#define SETUP_ARGS0(anode,extra) do {\
NODE *n = anode;\
if (!n) {\
argc = 0;\
argv = 0;\
}\
else if (nd_type(n) == NODE_ARRAY) {\
argc=anode->nd_alen;\
if (argc > 0) {\
int i;\
n = anode;\
argv = TMP_ALLOC(argc+extra);\
for (i=0;i<argc;i++) {\
argv[i] = rb_eval(self,n->nd_head);\
n=n->nd_next;\
}\
}\
else {\
argc = 0;\
argv = 0;\
}\
}\
else {\
VALUE args = rb_eval(self,n);\
if (TYPE(args) != T_ARRAY)\
args = rb_ary_to_ary(args);\
argc = RARRAY(args)->len;\
argv = TMP_ALLOC(argc+extra);\
MEMCPY(argv, RARRAY(args)->ptr, VALUE, argc);\
}\
} while (0)
#define SETUP_ARGS(anode) SETUP_ARGS0(anode,0)
#define BEGIN_CALLARGS do {\
struct BLOCK *tmp_block = ruby_block;\
int tmp_iter = ruby_iter->iter;\
switch (tmp_iter) {\
case ITER_PRE:\
if (ruby_block) ruby_block = ruby_block->outer;\
case ITER_PAS:\
tmp_iter = ITER_NOT;\
}\
PUSH_ITER(tmp_iter)
#define END_CALLARGS \
ruby_block = tmp_block;\
POP_ITER();\
} while (0)
#define MATCH_DATA *rb_svar(node->nd_cnt)
static const char* is_defined _((VALUE, NODE*, char*));
static const char*
arg_defined(self, node, buf, type)
VALUE self;
NODE *node;
char *buf;
char *type;
{
int argc;
int i;
if (!node) return type; /* no args */
if (nd_type(node) == NODE_ARRAY) {
argc=node->nd_alen;
if (argc > 0) {
for (i=0;i<argc;i++) {
if (!is_defined(self, node->nd_head, buf))
return 0;
node = node->nd_next;
}
}
}
else if (!is_defined(self, node, buf)) {
return 0;
}
return type;
}
static const char*
is_defined(self, node, buf)
VALUE self;
NODE *node;
char *buf;
{
VALUE val;
int state;
again:
if (!node) return "expression";
switch (nd_type(node)) {
case NODE_SUPER:
case NODE_ZSUPER:
if (ruby_frame->last_func == 0) return 0;
else if (ruby_frame->last_class == 0) return 0;
val = ruby_frame->last_class;
if (rb_method_boundp(RCLASS(val)->super, ruby_frame->orig_func, 0)) {
if (nd_type(node) == NODE_SUPER) {
return arg_defined(self, node->nd_args, buf, "super");
}
return "super";
}
break;
case NODE_VCALL:
case NODE_FCALL:
val = self;
goto check_bound;
case NODE_ATTRASGN:
val = self;
if (node->nd_recv == (NODE *)1) goto check_bound;
case NODE_CALL:
PUSH_TAG(PROT_EMPTY);
if ((state = EXEC_TAG()) == 0) {
val = rb_eval(self, node->nd_recv);
}
POP_TAG();
if (state) {
ruby_errinfo = Qnil;
return 0;
}
check_bound:
{
int call = nd_type(node)==NODE_CALL;
val = CLASS_OF(val);
if (call) {
int noex;
ID id = node->nd_mid;
if (!rb_get_method_body(&val, &id, &noex))
break;
if ((noex & NOEX_PRIVATE))
break;
if ((noex & NOEX_PROTECTED) &&
!rb_obj_is_kind_of(self, rb_class_real(val)))
break;
}
else if (!rb_method_boundp(val, node->nd_mid, call))
break;
return arg_defined(self, node->nd_args, buf,
nd_type(node) == NODE_ATTRASGN ?
"assignment" : "method");
}
break;
case NODE_MATCH2:
case NODE_MATCH3:
return "method";
case NODE_YIELD:
if (rb_block_given_p()) {
return "yield";
}
break;
case NODE_SELF:
return "self";
case NODE_NIL:
return "nil";
case NODE_TRUE:
return "true";
case NODE_FALSE:
return "false";
case NODE_ATTRSET:
case NODE_OP_ASGN1:
case NODE_OP_ASGN2:
case NODE_OP_ASGN_OR:
case NODE_OP_ASGN_AND:
case NODE_MASGN:
case NODE_LASGN:
case NODE_DASGN:
case NODE_DASGN_CURR:
case NODE_GASGN:
case NODE_IASGN:
case NODE_CDECL:
case NODE_CVDECL:
case NODE_CVASGN:
return "assignment";
case NODE_LVAR:
return "local-variable";
case NODE_DVAR:
return "local-variable(in-block)";
case NODE_GVAR:
if (rb_gvar_defined(node->nd_entry)) {
return "global-variable";
}
break;
case NODE_IVAR:
if (rb_ivar_defined(self, node->nd_vid)) {
return "instance-variable";
}
break;
case NODE_CONST:
if (ev_const_defined(ruby_cref, node->nd_vid, self)) {
return "constant";
}
break;
case NODE_CVAR:
if (rb_cvar_defined(cvar_cbase(), node->nd_vid)) {
return "class variable";
}
break;
case NODE_COLON2:
PUSH_TAG(PROT_EMPTY);
if ((state = EXEC_TAG()) == 0) {
val = rb_eval(self, node->nd_head);
}
POP_TAG();
if (state) {
ruby_errinfo = Qnil;
return 0;
}
else {
switch (TYPE(val)) {
case T_CLASS:
case T_MODULE:
if (rb_const_defined_from(val, node->nd_mid))
return "constant";
break;
default:
if (rb_method_boundp(CLASS_OF(val), node->nd_mid, 1)) {
return "method";
}
}
}
break;
case NODE_COLON3:
if (rb_const_defined_from(rb_cObject, node->nd_mid)) {
return "constant";
}
break;
case NODE_NTH_REF:
if (RTEST(rb_reg_nth_defined(node->nd_nth, MATCH_DATA))) {
sprintf(buf, "$%d", (int)node->nd_nth);
return buf;
}
break;
case NODE_BACK_REF:
if (RTEST(rb_reg_nth_defined(0, MATCH_DATA))) {
sprintf(buf, "$%c", (char)node->nd_nth);
return buf;
}
break;
case NODE_NEWLINE:
node = node->nd_next;
goto again;
default:
PUSH_TAG(PROT_EMPTY);
if ((state = EXEC_TAG()) == 0) {
rb_eval(self, node);
}
POP_TAG();
if (!state) {
return "expression";
}
ruby_errinfo = Qnil;
break;
}
return 0;
}
static int handle_rescue _((VALUE,NODE*));
static void blk_free();
static VALUE
rb_obj_is_proc(proc)
VALUE proc;
{
if (TYPE(proc) == T_DATA && RDATA(proc)->dfree == (RUBY_DATA_FUNC)blk_free) {
return Qtrue;
}
return Qfalse;
}
void
rb_add_event_hook(func, events)
rb_event_hook_func_t func;
rb_event_t events;
{
rb_event_hook_t *hook;
hook = ALLOC(rb_event_hook_t);
hook->func = func;
hook->events = events;
hook->next = event_hooks;
event_hooks = hook;
}
int
rb_remove_event_hook(func)
rb_event_hook_func_t func;
{
rb_event_hook_t *prev, *hook;
prev = NULL;
hook = event_hooks;
while (hook) {
if (hook->func == func) {
if (prev) {
prev->next = hook->next;
}
else {
event_hooks = hook->next;
}
xfree(hook);
return 0;
}
prev = hook;
hook = hook->next;
}
return -1;
}
/*
* call-seq:
* set_trace_func(proc) => proc
* set_trace_func(nil) => nil
*
* Establishes _proc_ as the handler for tracing, or disables
* tracing if the parameter is +nil+. _proc_ takes up
* to six parameters: an event name, a filename, a line number, an
* object id, a binding, and the name of a class. _proc_ is
* invoked whenever an event occurs. Events are: <code>c-call</code>
* (call a C-language routine), <code>c-return</code> (return from a
* C-language routine), <code>call</code> (call a Ruby method),
* <code>class</code> (start a class or module definition),
* <code>end</code> (finish a class or module definition),
* <code>line</code> (execute code on a new line), <code>raise</code>
* (raise an exception), and <code>return</code> (return from a Ruby
* method). Tracing is disabled within the context of _proc_.
*
* class Test
* def test
* a = 1
* b = 2
* end
* end
*
* set_trace_func proc { |event, file, line, id, binding, classname|
* printf "%8s %s:%-2d %10s %8s\n", event, file, line, id, classname
* }
* t = Test.new
* t.test
*
* line prog.rb:11 false
* c-call prog.rb:11 new Class
* c-call prog.rb:11 initialize Object
* c-return prog.rb:11 initialize Object
* c-return prog.rb:11 new Class
* line prog.rb:12 false
* call prog.rb:2 test Test
* line prog.rb:3 test Test
* line prog.rb:4 test Test
* return prog.rb:4 test Test
*/
static VALUE
set_trace_func(obj, trace)
VALUE obj, trace;
{
rb_event_hook_t *hook;
rb_secure(4);
if (NIL_P(trace)) {
trace_func = 0;
rb_remove_event_hook(call_trace_func);
return Qnil;
}
if (!rb_obj_is_proc(trace)) {
rb_raise(rb_eTypeError, "trace_func needs to be Proc");
}
trace_func = trace;
for (hook = event_hooks; hook; hook = hook->next) {
if (hook->func == call_trace_func)
return trace;
}
rb_add_event_hook(call_trace_func, RUBY_EVENT_ALL);
return trace;
}
static const char *
get_event_name(rb_event_t event)
{
switch (event) {
case RUBY_EVENT_LINE:
return "line";
case RUBY_EVENT_CLASS:
return "class";
case RUBY_EVENT_END:
return "end";
case RUBY_EVENT_CALL:
return "call";
case RUBY_EVENT_RETURN:
return "return";
case RUBY_EVENT_C_CALL:
return "c-call";
case RUBY_EVENT_C_RETURN:
return "c-return";
case RUBY_EVENT_RAISE:
return "raise";
default:
return "unknown";
}
}
static void
call_trace_func(event, node, self, id, klass)
rb_event_t event;
NODE *node;
VALUE self;
ID id;
VALUE klass;
{
int state;
volatile int raised;
struct FRAME *prev;
NODE * volatile node_save;
VALUE srcfile;
const char *event_name;
volatile rb_thread_t th = curr_thread;
if (!trace_func) return;
if (tracing) return;
if (ruby_in_compile) return;
if (id == ID_ALLOCATOR) return;
if (!(node_save = ruby_current_node)) {
node_save = NEW_NEWLINE(0);
}
tracing = 1;
prev = ruby_frame;
PUSH_FRAME();
*ruby_frame = *prev;
ruby_frame->prev = prev;
ruby_frame->iter = 0; /* blocks not available anyway */
if (node) {
ruby_current_node = node;
ruby_frame->node = node;
ruby_sourcefile = node->nd_file;
ruby_sourceline = nd_line(node);
}
if (klass) {
if (TYPE(klass) == T_ICLASS) {
klass = RBASIC(klass)->klass;
}
else if (FL_TEST(klass, FL_SINGLETON)) {
klass = rb_iv_get(klass, "__attached__");
}
}
PUSH_TAG(PROT_EMPTY);
raised = rb_thread_reset_raised(th);
if ((state = EXEC_TAG()) == 0) {
srcfile = rb_str_new2(ruby_sourcefile?ruby_sourcefile:"(ruby)");
event_name = get_event_name(event);
proc_invoke(trace_func, rb_ary_new3(6, rb_str_new2(event_name),
srcfile,
INT2FIX(ruby_sourceline),
id?ID2SYM(id):Qnil,
self?rb_f_binding(self):Qnil,
klass),
Qundef, 0);
}
if (raised) rb_thread_set_raised(th);
POP_TAG();
POP_FRAME();
tracing = 0;
ruby_current_node = node_save;
SET_CURRENT_SOURCE();
if (state) JUMP_TAG(state);
}
static VALUE
avalue_to_svalue(v)
VALUE v;
{
VALUE tmp, top;
tmp = rb_check_array_type(v);
if (NIL_P(tmp)) {
return v;
}
if (RARRAY(tmp)->len == 0) {
return Qundef;
}
if (RARRAY(tmp)->len == 1) {
top = rb_check_array_type(RARRAY(tmp)->ptr[0]);
if (NIL_P(top)) {
return RARRAY(tmp)->ptr[0];
}
if (RARRAY(top)->len > 1) {
return v;
}
return top;
}
return tmp;
}
static VALUE
svalue_to_avalue(v)
VALUE v;
{
VALUE tmp, top;
if (v == Qundef) return rb_ary_new2(0);
tmp = rb_check_array_type(v);
if (NIL_P(tmp)) {
return rb_ary_new3(1, v);
}
if (RARRAY(tmp)->len == 1) {
top = rb_check_array_type(RARRAY(tmp)->ptr[0]);
if (!NIL_P(top) && RARRAY(top)->len > 1) {
return tmp;
}
return rb_ary_new3(1, v);
}
return tmp;
}
static VALUE
svalue_to_mrhs(v, lhs)
VALUE v;
NODE *lhs;
{
VALUE tmp;
if (v == Qundef) return rb_ary_new2(0);
tmp = rb_check_array_type(v);
if (NIL_P(tmp)) {
return rb_ary_new3(1, v);
}
/* no lhs means splat lhs only */
if (!lhs) {
return rb_ary_new3(1, v);
}
return tmp;
}
static VALUE
avalue_splat(v)
VALUE v;
{
if (RARRAY(v)->len == 0) {
return Qundef;
}
if (RARRAY(v)->len == 1) {
return RARRAY(v)->ptr[0];
}
return v;
}
#if 1
VALUE
rb_Array(val)
VALUE val;
{
VALUE tmp = rb_check_array_type(val);
if (NIL_P(tmp)) {
/* hack to avoid invoke Object#to_a */
VALUE origin;
ID id = rb_intern("to_a");
if (search_method(CLASS_OF(val), id, &origin) &&
RCLASS(origin)->m_tbl != RCLASS(rb_mKernel)->m_tbl) { /* exclude Kernel#to_a */
val = rb_funcall(val, id, 0);
if (TYPE(val) != T_ARRAY) {
rb_raise(rb_eTypeError, "`to_a' did not return Array");
}
return val;
}
else {
return rb_ary_new3(1, val);
}
}
return tmp;
}
#endif
static VALUE
splat_value(v)
VALUE v;
{
if (NIL_P(v)) return rb_ary_new3(1, Qnil);
return rb_Array(v);
}
static VALUE
class_prefix(self, cpath)
VALUE self;
NODE *cpath;
{
if (!cpath) {
rb_bug("class path missing");
}
if (cpath->nd_head) {
VALUE c = rb_eval(self, cpath->nd_head);
switch (TYPE(c)) {
case T_CLASS:
case T_MODULE:
break;
default:
rb_raise(rb_eTypeError, "%s is not a class/module",
RSTRING(rb_obj_as_string(c))->ptr);
}
return c;
}
else if (nd_type(cpath) == NODE_COLON2) {
return ruby_cbase;
}
else if (ruby_wrapper) {
return ruby_wrapper;
}
else {
return rb_cObject;
}
}
#define return_value(v) do {\
if ((prot_tag->retval = (v)) == Qundef) {\
prot_tag->retval = Qnil;\
}\
} while (0)
NORETURN(static void return_jump _((VALUE)));
NORETURN(static void break_jump _((VALUE)));
NORETURN(static void next_jump _((VALUE)));
NORETURN(static void unknown_node _((NODE *)));
static void
unknown_node(node)
NODE *node;
{
ruby_current_node = 0;
if (node->flags == 0) {
rb_bug("terminated node (0x%lx)", node);
}
else if (BUILTIN_TYPE(node) != T_NODE) {
rb_bug("not a node 0x%02lx (0x%lx)", BUILTIN_TYPE(node), node);
}
else {
rb_bug("unknown node type %d (0x%lx)", nd_type(node), node);
}
}
/*
* functions factored out of rb_eval() to reduce its stack frame size
*/
#define eval_node_0(n,retType, self, node) \
NOINLINE(static retType TOKEN_PASTE(eval_,n) _((self, node)));\
static retType TOKEN_PASTE(eval_,n)(self, node)
#define eval_node(n,retType) \
eval_node_0(n,retType, VALUE self, NODE *node)
#define eval_node_volatile(n,retType) \
eval_node_0(n,retType, volatile VALUE self, NODE * volatile node)
eval_node(match2, VALUE)
{
VALUE l = rb_eval(self,node->nd_recv);
VALUE r = rb_eval(self,node->nd_value);
return rb_reg_match(l, r);
}
eval_node(match3, VALUE)
{
VALUE r = rb_eval(self,node->nd_recv);
VALUE l = rb_eval(self,node->nd_value);
return TYPE(l) == T_STRING ? rb_reg_match(r, l) : rb_funcall(l, match, 1, r);
}
eval_node_volatile(opt_n, void)
{
int state;
PUSH_TAG(PROT_LOOP);
switch (state = EXEC_TAG()) {
case TAG_NEXT:
state = 0;
case 0:
while (!NIL_P(rb_gets())) {
opt_n_redo:
rb_eval(self, node->nd_body);
}
break;
case TAG_REDO:
state = 0;
goto opt_n_redo;
case TAG_BREAK:
state = 0;
default:
break;
}
POP_TAG();
if (state) JUMP_TAG(state);
}
eval_node(when, NODE*)
{
do {
NODE *tag = node->nd_head;
while (tag) {
EXEC_EVENT_HOOK(RUBY_EVENT_LINE, tag, self,
ruby_frame->last_func,
ruby_frame->last_class);
if (tag->nd_head && nd_type(tag->nd_head) == NODE_WHEN) {
VALUE v = rb_eval(self, tag->nd_head->nd_head);
long i;
if (TYPE(v) != T_ARRAY) v = rb_ary_to_ary(v);
for (i=0; i<RARRAY(v)->len; i++) {
if (RTEST(RARRAY(v)->ptr[i])) return node->nd_body;
}
tag = tag->nd_next;
continue;
}
if (RTEST(rb_eval(self, tag->nd_head))) return node->nd_body;
tag = tag->nd_next;
}
} while ((node = node->nd_next) && nd_type(node) == NODE_WHEN);
return node;
}
eval_node(case, NODE*)
{
VALUE val = rb_eval(self, node->nd_head);
node = node->nd_body;
while (node) {
NODE *tag;
if (nd_type(node) != NODE_WHEN) break;
tag = node->nd_head;
while (tag) {
EXEC_EVENT_HOOK(RUBY_EVENT_LINE, tag, self,
ruby_frame->last_func,
ruby_frame->last_class);
if (tag->nd_head && nd_type(tag->nd_head) == NODE_WHEN) {
VALUE v = rb_eval(self, tag->nd_head->nd_head);
long i;
if (TYPE(v) != T_ARRAY) v = rb_ary_to_ary(v);
for (i=0; i<RARRAY(v)->len; i++) {
if (RTEST(rb_funcall2(RARRAY(v)->ptr[i], eqq, 1, &val)))
return node->nd_body;
}
tag = tag->nd_next;
continue;
}
if (RTEST(rb_funcall2(rb_eval(self, tag->nd_head), eqq, 1, &val)))
return node->nd_body;
tag = tag->nd_next;
}
node = node->nd_next;
}
return node;
}
eval_node_volatile(while, VALUE)
{
int state;
VALUE result;
PUSH_TAG(PROT_LOOP);
switch (state = EXEC_TAG()) {
case 0:
if (!(node->nd_state) || RTEST(rb_eval(self, node->nd_cond))) {
do {
while_redo:
#if STACK_WIPE_SITES & 0x10
rb_gc_wipe_stack();
#endif
rb_eval(self, node->nd_body);
while_next:
;
} while (RTEST(rb_eval(self, node->nd_cond)));
} /* fall thru */
default:
result=Qnil;
break;
case TAG_REDO:
state = 0;
goto while_redo;
case TAG_NEXT:
state = 0;
goto while_next;
case TAG_BREAK:
if (TAG_DST()) {
state = 0;
result = prot_tag->retval;
}
}
POP_TAG();
if (state) JUMP_TAG(state);
return result;
}
eval_node_volatile(until, VALUE)
{
int state;
VALUE result;
PUSH_TAG(PROT_LOOP);
switch (state = EXEC_TAG()) {
case 0:
if (!(node->nd_state) || !RTEST(rb_eval(self, node->nd_cond))) {
do {
until_redo:
#if STACK_WIPE_SITES & 0x10
rb_gc_wipe_stack();
#endif
rb_eval(self, node->nd_body);
until_next:
;
} while (!RTEST(rb_eval(self, node->nd_cond)));
} /* fall thru */
default:
result=Qnil;
break;
case TAG_REDO:
state = 0;
goto until_redo;
case TAG_NEXT:
state = 0;
goto until_next;
case TAG_BREAK:
if (TAG_DST()) {
state = 0;
result = prot_tag->retval;
}
}
POP_TAG();
if (state) JUMP_TAG(state);
return result;
}
eval_node_volatile(iter, VALUE)
{
int state;
VALUE result;
PUSH_TAG(PROT_LOOP);
PUSH_BLOCK(node->nd_var, node->nd_body);
state = EXEC_TAG();
switch (state) {
case TAG_RETRY:
state = 0; /* fall thru to case 0 */
case 0:
PUSH_ITER(ITER_PRE);
if (nd_type(node) == NODE_ITER) {
result = rb_eval(self, node->nd_iter);
}
else {
_block.flags &= ~BLOCK_D_SCOPE;
BEGIN_CALLARGS;
result = rb_eval(self, node->nd_iter);
END_CALLARGS;
ruby_current_node = (NODE *)node;
SET_CURRENT_SOURCE();
result = rb_call(CLASS_OF(result),result,each,0,0,0,self);
}
POP_ITER();
break;
case TAG_BREAK:
if (TAG_DST()) {
result = prot_tag->retval;
state = 0;
}
}
POP_BLOCK();
POP_TAG();
if (state) JUMP_TAG(state);
return result;
}
eval_node_volatile(rescue, VALUE)
{
volatile VALUE e_info = ruby_errinfo;
volatile int rescuing = 0;
int state;
VALUE result;
PUSH_TAG(PROT_EMPTY);
if ((state = EXEC_TAG()) == 0) {
retry_entry:
result = rb_eval(self, node->nd_head);
}
else if (rescuing) {
if (rescuing < 0) {
/* in rescue argument, just reraise */
result = Qnil;
}
else if (state == TAG_RETRY) {
rescuing = state = 0;
ruby_errinfo = e_info;
goto retry_entry;
}
else if (state != TAG_RAISE) {
result = prot_tag->retval;
}
}
else if (state == TAG_RAISE) {
NODE *resq = node->nd_resq;
rescuing = -1;
while (resq) {
ruby_current_node = resq;
if (handle_rescue(self, resq)) {
state = 0;
rescuing = 1;
result = rb_eval(self, resq->nd_body);
break;
}
resq = resq->nd_head; /* next rescue */
}
}
else {
result = prot_tag->retval;
}
POP_TAG();
if (state != TAG_RAISE) ruby_errinfo = e_info;
if (state) {
JUMP_TAG(state);
}
/* no exception raised */
if (!rescuing && node->nd_else) { /* else clause given */
result = Qundef; /* caller must eval this! */
}
return result;
}
eval_node_volatile(ensure, VALUE)
{
int state;
VALUE result;
PUSH_TAG(PROT_EMPTY);
if ((state = EXEC_TAG()) == 0) {
result = rb_eval(self, node->nd_head);
}
POP_TAG();
if (node->nd_ensr && !thread_no_ensure()) {
VALUE retval = prot_tag->retval; /* save retval */
VALUE errinfo = ruby_errinfo;
rb_eval(self, node->nd_ensr);
return_value(retval);
ruby_errinfo = errinfo;
}
if (state) JUMP_TAG(state);
return result;
}
eval_node(dot, VALUE)
{
VALUE beg = rb_eval(self, node->nd_beg);
VALUE end = rb_eval(self, node->nd_end);
return rb_range_new(beg, end, nd_type(node) == NODE_DOT3);
}
eval_node(flip2, VALUE)
{
VALUE *flip = rb_svar(node->nd_cnt);
if (!flip) rb_bug("unexpected local variable");
if (!RTEST(*flip)) {
if (!RTEST(rb_eval(self, node->nd_beg)))
return Qfalse;
*flip = RTEST(rb_eval(self, node->nd_end))?Qfalse:Qtrue;
}
else if (RTEST(rb_eval(self, node->nd_end)))
*flip = Qfalse;
return Qtrue;
}
eval_node(flip3, VALUE)
{
VALUE *flip = rb_svar(node->nd_cnt);
if (!flip) rb_bug("unexpected local variable");
if (!RTEST(*flip))
return *flip = (RTEST(rb_eval(self, node->nd_beg)) ? Qtrue : Qfalse);
if (RTEST(rb_eval(self, node->nd_end)))
*flip = Qfalse;
return Qtrue;
}
eval_node(attrasgn, VALUE)
{
VALUE recv;
int argc; VALUE *argv; /* used in SETUP_ARGS */
int scope;
TMP_PROTECT;
BEGIN_CALLARGS;
if (node->nd_recv == (NODE *)1) {
recv = self;
scope = 1;
}
else {
recv = rb_eval(self, node->nd_recv);
scope = 0;
}
SETUP_ARGS(node->nd_args);
END_CALLARGS;
ruby_current_node = node;
SET_CURRENT_SOURCE();
rb_call(CLASS_OF(recv),recv,node->nd_mid,argc,argv,scope,self);
return argv[argc-1];
}
eval_node(call, VALUE)
{
VALUE recv;
int argc; VALUE *argv; /* used in SETUP_ARGS */
TMP_PROTECT;
BEGIN_CALLARGS;
recv = rb_eval(self, node->nd_recv);
SETUP_ARGS(node->nd_args);
END_CALLARGS;
ruby_current_node = node;
SET_CURRENT_SOURCE();
return rb_call(CLASS_OF(recv),recv,node->nd_mid,argc,argv,0,self);
}
eval_node(fcall, VALUE)
{
int argc; VALUE *argv; /* used in SETUP_ARGS */
TMP_PROTECT;
BEGIN_CALLARGS;
SETUP_ARGS(node->nd_args);
END_CALLARGS;
ruby_current_node = node;
SET_CURRENT_SOURCE();
return rb_call(CLASS_OF(self),self,node->nd_mid,argc,argv,1,self);
}
eval_node(super, VALUE)
{
int argc; VALUE *argv; /* used in SETUP_ARGS */
TMP_PROTECT;
if (ruby_frame->last_class == 0) {
if (ruby_frame->last_func) {
rb_name_error(ruby_frame->last_func,
"superclass method `%s' disabled",
rb_id2name(ruby_frame->orig_func));
}
else {
rb_raise(rb_eNoMethodError, "super called outside of method");
}
}
if (nd_type(node) == NODE_ZSUPER) {
argc = ruby_frame->argc;
if (argc && DMETHOD_P()) {
if (TYPE(RBASIC(ruby_scope)->klass) != T_ARRAY ||
RARRAY(RBASIC(ruby_scope)->klass)->len != argc) {
rb_raise(rb_eRuntimeError,
"super: specify arguments explicitly");
}
argv = RARRAY(RBASIC(ruby_scope)->klass)->ptr;
}
else if (!ruby_scope->local_vars) {
argc = 0;
argv = 0;
}
else {
argv = ruby_scope->local_vars + 2;
}
}
else {
BEGIN_CALLARGS;
SETUP_ARGS(node->nd_args);
END_CALLARGS;
ruby_current_node = node;
}
SET_CURRENT_SOURCE();
return rb_call_super(argc, argv);
}
eval_node_volatile(scope, VALUE)
{
int state;
VALUE result;
struct FRAME frame;
NODE * volatile saved_cref = 0;
frame = *ruby_frame;
frame.tmp = ruby_frame;
ruby_frame = &frame;
PUSH_SCOPE();
PUSH_TAG(PROT_EMPTY);
if (node->nd_rval) {
saved_cref = ruby_cref;
ruby_cref = (NODE*)node->nd_rval;
}
if (node->nd_tbl) {
VALUE *vars = ALLOCA_N(VALUE, node->nd_tbl[0]+1);
*vars++ = (VALUE)node;
ruby_scope->local_vars = vars;
rb_mem_clear(ruby_scope->local_vars, node->nd_tbl[0]);
ruby_scope->local_tbl = node->nd_tbl;
}
else {
ruby_scope->local_vars = 0;
ruby_scope->local_tbl = 0;
}
if ((state = EXEC_TAG()) == 0) {
result = rb_eval(self, node->nd_next);
}
POP_TAG();
POP_SCOPE();
ruby_frame = frame.tmp;
if (saved_cref)
ruby_cref = saved_cref;
if (state) JUMP_TAG(state);
return result;
}
eval_node(op_asgn1, VALUE)
{
int argc; VALUE *argv; /* used in SETUP_ARGS */
VALUE recv, val, tmp;
NODE *rval;
TMP_PROTECT;
recv = rb_eval(self, node->nd_recv);
rval = node->nd_args->nd_head;
SETUP_ARGS0(node->nd_args->nd_body, 1);
val = rb_funcall3(recv, aref, argc, argv);
switch (node->nd_mid) {
case 0: /* OR */
if (RTEST(val)) return val;
val = rb_eval(self, rval);
break;
case 1: /* AND */
if (!RTEST(val)) return val;
val = rb_eval(self, rval);
break;
default:
tmp = rb_eval(self, rval);
val = rb_funcall3(val, node->nd_mid, 1, &tmp);
}
argv[argc] = val;
rb_funcall2(recv, aset, argc+1, argv);
return val;
}
eval_node(op_asgn2, VALUE)
{
ID id = node->nd_next->nd_vid;
VALUE recv, val, tmp;
recv = rb_eval(self, node->nd_recv);
val = rb_funcall3(recv, id, 0, 0);
switch (node->nd_next->nd_mid) {
case 0: /* OR */
if (RTEST(val)) return val;
val = rb_eval(self, node->nd_value);
break;
case 1: /* AND */
if (!RTEST(val)) return val;
val = rb_eval(self, node->nd_value);
break;
default:
tmp = rb_eval(self, node->nd_value);
val = rb_funcall3(val, node->nd_next->nd_mid, 1, &tmp);
}
rb_funcall2(recv, node->nd_next->nd_aid, 1, &val);
return val;
}
eval_node(hash, VALUE)
{
NODE *list;
VALUE hash = rb_hash_new();
VALUE key, val;
list = node->nd_head;
while (list) {
key = rb_eval(self, list->nd_head);
list = list->nd_next;
if (list == 0)
rb_bug("odd number list for Hash");
val = rb_eval(self, list->nd_head);
list = list->nd_next;
rb_hash_aset(hash, key, val);
}
return hash;
}
eval_node(array, VALUE)
{
VALUE ary;
long i;
i = node->nd_alen;
ary = rb_ary_new2(i);
for (i=0;node;node=node->nd_next) {
RARRAY(ary)->ptr[i++] = rb_eval(self, node->nd_head);
RARRAY(ary)->len = i;
}
return ary;
}
eval_node(slit, VALUE)
{
VALUE str, str2;
NODE *list = node->nd_next;
str = rb_str_new3(node->nd_lit);
while (list) {
if (list->nd_head) {
switch (nd_type(list->nd_head)) {
case NODE_STR:
str2 = list->nd_head->nd_lit;
break;
default:
str2 = rb_eval(self, list->nd_head);
break;
}
rb_str_append(str, str2);
OBJ_INFECT(str, str2);
}
list = list->nd_next;
}
switch (nd_type(node)) {
case NODE_DREGX:
str2 = rb_reg_new(RSTRING(str)->ptr, RSTRING(str)->len,
node->nd_cflag);
RB_GC_GUARD(str); /* ensure str is not GC'd in rb_reg_new */
return str2;
case NODE_DREGX_ONCE: /* regexp expand once */
str2 = rb_reg_new(RSTRING(str)->ptr, RSTRING(str)->len,
node->nd_cflag);
nd_set_type(node, NODE_LIT);
RB_GC_GUARD(str); /* ensure str is not GC'd in rb_reg_new */
return node->nd_lit = str2;
case NODE_LIT:
/* other thread may replace NODE_DREGX_ONCE to NODE_LIT */
return Qundef;
case NODE_DXSTR:
return rb_funcall(self, '`', 1, str);
case NODE_DSYM:
return rb_str_intern(str);
}
return str;
}
eval_node(defn, void)
{
NODE *body, *defn;
VALUE origin = 0;
int noex;
if (NIL_P(ruby_class)) {
rb_raise(rb_eTypeError, "no class/module to add method");
}
if (ruby_class == rb_cObject && node->nd_mid == init) {
rb_warn("redefining Object#initialize may cause infinite loop");
}
if (node->nd_mid == __id__ || node->nd_mid == __send__) {
rb_warn("redefining `%s' may cause serious problem",
rb_id2name(node->nd_mid));
}
rb_frozen_class_p(ruby_class);
body = search_method(ruby_class, node->nd_mid, &origin);
if (body){
if (RTEST(ruby_verbose) && ruby_class == origin && body->nd_cnt == 0 && body->nd_body) {
rb_warning("method redefined; discarding old %s", rb_id2name(node->nd_mid));
}
}
if (SCOPE_TEST(SCOPE_PRIVATE) || node->nd_mid == init) {
noex = NOEX_PRIVATE;
}
else if (SCOPE_TEST(SCOPE_PROTECTED)) {
noex = NOEX_PROTECTED;
}
else {
noex = NOEX_PUBLIC;
}
if (body && origin == ruby_class && body->nd_body == 0) {
noex |= NOEX_NOSUPER;
}
defn = rb_copy_node_scope(node->nd_defn, ruby_cref);
rb_add_method(ruby_class, node->nd_mid, defn, noex);
if (scope_vmode == SCOPE_MODFUNC) {
rb_add_method(rb_singleton_class(ruby_class),
node->nd_mid, defn, NOEX_PUBLIC);
}
}
eval_node(defs, void)
{
VALUE recv = rb_eval(self, node->nd_recv);
VALUE klass;
NODE *body = 0, *defn;
st_data_t data;
if (ruby_safe_level >= 4 && !OBJ_TAINTED(recv)) {
rb_raise(rb_eSecurityError, "Insecure: can't define singleton method");
}
if (FIXNUM_P(recv) || SYMBOL_P(recv)) {
rb_raise(rb_eTypeError,
"can't define singleton method \"%s\" for %s",
rb_id2name(node->nd_mid),
rb_obj_classname(recv));
}
if (OBJ_FROZEN(recv)) rb_error_frozen("object");
klass = rb_singleton_class(recv);
if (st_lookup(RCLASS(klass)->m_tbl, node->nd_mid, &data)) {
body = (NODE *)data;
if (ruby_safe_level >= 4) {
rb_raise(rb_eSecurityError, "redefining method prohibited");
}
if (RTEST(ruby_verbose)) {
rb_warning("redefine %s", rb_id2name(node->nd_mid));
}
}
defn = rb_copy_node_scope(node->nd_defn, ruby_cref);
rb_add_method(klass, node->nd_mid, defn,
NOEX_PUBLIC|(body?body->nd_noex&NOEX_UNDEF:0));
}
eval_node(class, VALUE)
{
VALUE super, klass, tmp, cbase;
ID cname;
int gen = Qfalse;
cbase = class_prefix(self, node->nd_cpath);
cname = node->nd_cpath->nd_mid;
if (NIL_P(ruby_cbase)) {
rb_raise(rb_eTypeError, "no outer class/module");
}
if (node->nd_super) {
super = rb_eval(self, node->nd_super);
rb_check_inheritable(super);
}
else {
super = 0;
}
if (rb_const_defined_at(cbase, cname)) {
klass = rb_const_get_at(cbase, cname);
if (TYPE(klass) != T_CLASS) {
rb_raise(rb_eTypeError, "%s is not a class",
rb_id2name(cname));
}
if (super) {
tmp = rb_class_real(RCLASS(klass)->super);
if (tmp != super) {
rb_raise(rb_eTypeError, "superclass mismatch for class %s",
rb_id2name(cname));
}
super = 0;
}
if (ruby_safe_level >= 4) {
rb_raise(rb_eSecurityError, "extending class prohibited");
}
}
else {
if (!super) super = rb_cObject;
klass = rb_define_class_id(cname, super);
rb_set_class_path(klass, cbase, rb_id2name(cname));
rb_const_set(cbase, cname, klass);
gen = Qtrue;
}
if (ruby_wrapper) {
rb_extend_object(klass, ruby_wrapper);
rb_include_module(klass, ruby_wrapper);
}
if (super && gen) {
rb_class_inherited(super, klass);
}
return module_setup(klass, node);
}
eval_node(module, VALUE)
{
VALUE module, cbase;
ID cname;
if (NIL_P(ruby_cbase)) {
rb_raise(rb_eTypeError, "no outer class/module");
}
cbase = class_prefix(self, node->nd_cpath);
cname = node->nd_cpath->nd_mid;
if (rb_const_defined_at(cbase, cname)) {
module = rb_const_get_at(cbase, cname);
if (TYPE(module) != T_MODULE) {
rb_raise(rb_eTypeError, "%s is not a module",
rb_id2name(cname));
}
if (ruby_safe_level >= 4) {
rb_raise(rb_eSecurityError, "extending module prohibited");
}
}
else {
module = rb_define_module_id(cname);
rb_set_class_path(module, cbase, rb_id2name(cname));
rb_const_set(cbase, cname, module);
}
if (ruby_wrapper) {
rb_extend_object(module, ruby_wrapper);
rb_include_module(module, ruby_wrapper);
}
return module_setup(module, node);
}
eval_node(sclass, VALUE)
{
VALUE klass, result;
result = rb_eval(self, node->nd_recv);
if (FIXNUM_P(result) || SYMBOL_P(result)) {
rb_raise(rb_eTypeError, "no virtual class for %s",
rb_obj_classname(result));
}
if (ruby_safe_level >= 4 && !OBJ_TAINTED(result))
rb_raise(rb_eSecurityError, "Insecure: can't extend object");
klass = rb_singleton_class(result);
if (ruby_wrapper) {
rb_extend_object(klass, ruby_wrapper);
rb_include_module(klass, ruby_wrapper);
}
return module_setup(klass, node);
}
eval_node(defined, VALUE)
{
char buf[20];
const char *desc = is_defined(self, node->nd_head, buf);
return desc ? rb_str_new2(desc) : Qnil;
}
static void
eval_cvar_set(node, result, warn)
NODE *node;
VALUE result;
int warn;
{
rb_cvar_set(cvar_cbase(), node->nd_vid, result, warn);
}
static void
eval_cdecl(self, node, result)
VALUE self, result;
NODE *node;
{
if (node->nd_vid == 0)
rb_const_set(class_prefix(self, node->nd_else),
node->nd_else->nd_mid, result);
else
rb_const_set(ruby_cbase, node->nd_vid, result);
}
static VALUE
rb_eval(self, node)
VALUE self;
NODE * node;
{
VALUE result;
again:
eval_check_tick();
result = Qnil;
if (node) {
ruby_current_node = node;
switch (nd_type(node)) {
case NODE_BLOCK:
while (node->nd_next) {
rb_eval(self, node->nd_head);
node = node->nd_next;
}
node = node->nd_head;
goto again;
case NODE_POSTEXE:
rb_f_END();
nd_set_type(node, NODE_NIL); /* exec just once */
break;
/* begin .. end without clauses */
case NODE_BEGIN:
node = node->nd_body;
goto again;
/* nodes for speed-up(default match) */
case NODE_MATCH:
result = rb_reg_match2(node->nd_lit);
break;
/* nodes for speed-up(literal match) */
case NODE_MATCH2:
result = eval_match2(self, node);
break;
/* nodes for speed-up(literal match) */
case NODE_MATCH3:
result = eval_match3(self,node);
break;
/* node for speed-up(top-level loop for -n/-p) */
case NODE_OPT_N:
eval_opt_n(self, node);
break;
case NODE_SELF:
result = self;
break;
case NODE_NIL:
break;
case NODE_TRUE:
result = Qtrue;
break;
case NODE_FALSE:
result = Qfalse;
break;
case NODE_IF:
if (RTEST(rb_eval(self, node->nd_cond))) {
EXEC_EVENT_HOOK(RUBY_EVENT_LINE, node, self,
ruby_frame->last_func,
ruby_frame->last_class);
node = node->nd_body;
}
else {
EXEC_EVENT_HOOK(RUBY_EVENT_LINE, node, self,
ruby_frame->last_func,
ruby_frame->last_class);
node = node->nd_else;
}
goto again;
case NODE_WHEN:
if (node = eval_when(self, node)) goto again;
break;
case NODE_CASE:
if (node = eval_case(self, node)) goto again;
break;
case NODE_WHILE:
result = eval_while(self,node);
break;
case NODE_UNTIL:
result = eval_until(self,node);
break;
case NODE_BLOCK_PASS:
result = block_pass(self, node);
break;
case NODE_ITER:
case NODE_FOR:
result = eval_iter(self, node);
break;
case NODE_BREAK:
break_jump(rb_eval(self, node->nd_stts));
break;
case NODE_NEXT:
next_jump(rb_eval(self, node->nd_stts));
break;
case NODE_REDO:
JUMP_TAG(TAG_REDO);
break;
case NODE_RETRY:
JUMP_TAG(TAG_RETRY);
break;
case NODE_SPLAT:
result = splat_value(rb_eval(self, node->nd_head));
break;
case NODE_TO_ARY:
result = rb_ary_to_ary(rb_eval(self, node->nd_head));
break;
case NODE_SVALUE:
result = avalue_splat(rb_eval(self, node->nd_head));
if (result == Qundef) result = Qnil;
break;
case NODE_YIELD:
if (node->nd_head) {
result = rb_eval(self, node->nd_head);
ruby_current_node = node;
}
else {
result = Qundef; /* no arg */
}
SET_CURRENT_SOURCE();
result = rb_yield_0(result, 0, 0, 0, node->nd_state);
break;
case NODE_RESCUE:
result = eval_rescue(self,node);
if (result == Qundef) { /* handle else clause w/o recursion */
node = node->nd_else;
goto again;
}
break;
case NODE_ENSURE:
result = eval_ensure(self,node);
break;
case NODE_AND:
result = rb_eval(self, node->nd_1st);
if (!RTEST(result)) break;
node = node->nd_2nd;
goto again;
case NODE_OR:
result = rb_eval(self, node->nd_1st);
if (RTEST(result)) break;
node = node->nd_2nd;
goto again;
case NODE_NOT:
result = RTEST(rb_eval(self, node->nd_body)) ? Qfalse : Qtrue;
break;
case NODE_DOT2:
case NODE_DOT3:
result = eval_dot(self,node);
break;
case NODE_FLIP2: /* like AWK */
result = eval_flip2(self,node);
break;
case NODE_FLIP3: /* like SED */
result = eval_flip3(self,node);
break;
case NODE_RETURN:
return_jump(rb_eval(self, node->nd_stts));
break;
case NODE_ARGSCAT:
result = rb_eval(self, node->nd_head);
result = rb_ary_concat(result, splat_value(rb_eval(self, node->nd_body)));
break;
case NODE_ARGSPUSH:
result = rb_ary_dup(rb_eval(self, node->nd_head));
result = rb_ary_push(result, rb_eval(self, node->nd_body));
break;
case NODE_ATTRASGN:
result = eval_attrasgn(self,node);
break;
case NODE_CALL:
result = eval_call(self,node);
break;
case NODE_FCALL:
result = eval_fcall(self,node);
break;
case NODE_VCALL:
SET_CURRENT_SOURCE();
result = rb_call(CLASS_OF(self),self,node->nd_mid,0,0,2,self);
break;
case NODE_SUPER:
case NODE_ZSUPER:
result = eval_super(self,node);
break;
case NODE_SCOPE:
result = eval_scope(self,node);
break;
case NODE_OP_ASGN1:
result = eval_op_asgn1(self,node);
break;
case NODE_OP_ASGN2:
result = eval_op_asgn2(self,node);
break;
case NODE_OP_ASGN_AND:
result = rb_eval(self, node->nd_head);
if (!RTEST(result)) break;
node = node->nd_value;
goto again;
case NODE_OP_ASGN_OR:
if ((node->nd_aid && !is_defined(self, node->nd_head, 0)) ||
!RTEST(result = rb_eval(self, node->nd_head))) {
node = node->nd_value;
goto again;
}
break;
case NODE_MASGN:
result = massign(self, node, rb_eval(self, node->nd_value), 0);
break;
case NODE_LASGN:
if (ruby_scope->local_vars == 0)
rb_bug("unexpected local variable assignment");
result = rb_eval(self, node->nd_value);
ruby_scope->local_vars[node->nd_cnt] = result;
break;
case NODE_DASGN:
result = rb_eval(self, node->nd_value);
dvar_asgn(node->nd_vid, result);
break;
case NODE_DASGN_CURR:
result = rb_eval(self, node->nd_value);
dvar_asgn_curr(node->nd_vid, result);
break;
case NODE_GASGN:
result = rb_eval(self, node->nd_value);
rb_gvar_set(node->nd_entry, result);
break;
case NODE_IASGN:
result = rb_eval(self, node->nd_value);
rb_ivar_set(self, node->nd_vid, result);
break;
case NODE_CDECL:
result = rb_eval(self, node->nd_value);
eval_cdecl(self, node, result);
break;
case NODE_CVDECL:
if (NIL_P(ruby_cbase)) {
rb_raise(rb_eTypeError, "no class/module to define class variable");
}
result = rb_eval(self, node->nd_value);
eval_cvar_set(node, result, Qtrue);
break;
case NODE_CVASGN:
result = rb_eval(self, node->nd_value);
eval_cvar_set(node, result, Qfalse);
break;
case NODE_LVAR:
if (ruby_scope->local_vars == 0) {
rb_bug("unexpected local variable");
}
result = ruby_scope->local_vars[node->nd_cnt];
break;
case NODE_DVAR:
result = rb_dvar_ref(node->nd_vid);
break;
case NODE_GVAR:
result = rb_gvar_get(node->nd_entry);
break;
case NODE_IVAR:
result = rb_ivar_get(self, node->nd_vid);
break;
case NODE_CONST:
result = ev_const_get(ruby_cref, node->nd_vid, self);
break;
case NODE_CVAR:
result = rb_cvar_get(cvar_cbase(), node->nd_vid);
break;
case NODE_BLOCK_ARG:
if (ruby_scope->local_vars == 0)
rb_bug("unexpected block argument");
if (rb_block_given_p()) {
result = rb_block_proc();
ruby_scope->local_vars[node->nd_cnt] = result;
}
else {
result = Qnil;
}
break;
case NODE_COLON2:
result = rb_eval(self, node->nd_head);
if (rb_is_const_id(node->nd_mid)) {
switch (TYPE(result)) {
case T_CLASS:
case T_MODULE:
result = rb_const_get_from(result, node->nd_mid);
break;
default:
rb_raise(rb_eTypeError, "%s is not a class/module",
RSTRING(rb_obj_as_string(result))->ptr);
break;
}
}
else
result = rb_funcall(result, node->nd_mid, 0, 0);
break;
case NODE_COLON3:
result = rb_const_get_from(rb_cObject, node->nd_mid);
break;
case NODE_NTH_REF:
result = rb_reg_nth_match(node->nd_nth, MATCH_DATA);
break;
case NODE_BACK_REF:
switch (node->nd_nth) {
case '&':
result = rb_reg_last_match(MATCH_DATA);
break;
case '`':
result = rb_reg_match_pre(MATCH_DATA);
break;
case '\'':
result = rb_reg_match_post(MATCH_DATA);
break;
case '+':
result = rb_reg_match_last(MATCH_DATA);
break;
default:
rb_bug("unexpected back-ref");
}
break;
case NODE_HASH:
result = eval_hash(self,node);
break;
case NODE_ZARRAY: /* zero length list */
result = rb_ary_new();
break;
case NODE_ARRAY:
result = eval_array(self,node);
break;
case NODE_STR:
result = rb_str_new3(node->nd_lit);
break;
case NODE_EVSTR:
result = rb_obj_as_string(rb_eval(self, node->nd_body));
break;
case NODE_DSTR:
case NODE_DXSTR:
case NODE_DREGX:
case NODE_DREGX_ONCE:
case NODE_DSYM:
result = eval_slit(self, node);
if (result == Qundef) goto again;
break;
case NODE_XSTR:
result = rb_funcall(self, '`', 1, rb_str_new3(node->nd_lit));
break;
case NODE_LIT:
result = node->nd_lit;
break;
case NODE_DEFN:
if (node->nd_defn)
eval_defn(self,node);
break;
case NODE_DEFS:
if (node->nd_defn)
eval_defs(self,node);
break;
case NODE_UNDEF:
if (NIL_P(ruby_class)) {
rb_raise(rb_eTypeError, "no class to undef method");
}
rb_undef(ruby_class, rb_to_id(rb_eval(self, node->u2.node)));
break;
case NODE_ALIAS:
if (NIL_P(ruby_class)) {
rb_raise(rb_eTypeError, "no class to make alias");
}
rb_alias(ruby_class, rb_to_id(rb_eval(self, node->u1.node)),
rb_to_id(rb_eval(self, node->u2.node)));
break;
case NODE_VALIAS:
rb_alias_variable(node->u1.id, node->u2.id);
break;
case NODE_CLASS:
result = eval_class(self,node);
break;
case NODE_MODULE:
result = eval_module(self,node);
break;
case NODE_SCLASS:
result = eval_sclass(self,node);
break;
case NODE_DEFINED:
result = eval_defined(self,node);