Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

"new" vectors #2180

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile.in
Expand Up @@ -205,7 +205,7 @@ COMPILER_INPUTS := $(filter-out $(S)src/rustc/driver/rustc.rs, \

LIBRUSTSYNTAX_CRATE := $(S)src/librustsyntax/rustsyntax.rc
LIBRUSTSYNTAX_INPUTS := $(wildcard $(addprefix $(S)src/librustsyntax/, \
rustsyntax.rc *.rs))
rustsyntax.rc *.rs */*.rs */*/*.rs))

RUSTC_INPUTS := $(S)src/rustc/driver/rustc.rs

Expand Down
2 changes: 2 additions & 0 deletions mk/tests.mk
Expand Up @@ -127,6 +127,8 @@ tidy:
$(wildcard $(S)src/etc/*.py) \
$(COMPILER_CRATE) \
$(COMPILER_INPUTS) \
$(LIBRUSTSYNTAX_CRATE) \
$(LIBRUSTSYNTAX_INPUTS) \
$(CORELIB_CRATE) \
$(CORELIB_INPUTS) \
$(STDLIB_CRATE) \
Expand Down
10 changes: 10 additions & 0 deletions src/librustsyntax/ast.rs
Expand Up @@ -169,6 +169,14 @@ enum proto {
proto_block, // fn&
}

#[auto_serialize]
enum vstore {
vstore_fixed(option<uint>), // [1,2,3,4]/_ or 4 FIXME: uint -> @expr
vstore_uniq, // [1,2,3,4]/~
vstore_box, // [1,2,3,4]/@
vstore_slice(region) // [1,2,3,4]/&(foo)?
}

pure fn is_blockish(p: ast::proto) -> bool {
alt p {
proto_any | proto_block { true }
Expand Down Expand Up @@ -278,6 +286,7 @@ enum alt_mode { alt_check, alt_exhaustive, }

#[auto_serialize]
enum expr_ {
expr_vstore(@expr, vstore),
expr_vec([@expr], mutability),
expr_rec([field], option<@expr>),
expr_call(@expr, [@expr], bool),
Expand Down Expand Up @@ -459,6 +468,7 @@ enum ty_ {
ty_tup([@ty]),
ty_path(@path, node_id),
ty_constr(@ty, [@ty_constr]),
ty_vstore(@ty, vstore),
ty_mac(mac),
// ty_infer means the type should be inferred instead of it having been
// specified. This should only appear at the "top level" of a type and not
Expand Down
20 changes: 12 additions & 8 deletions src/librustsyntax/ext/auto_serialize.rs
Expand Up @@ -367,8 +367,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
}

ast::ty_ptr(_) | ast::ty_rptr(_, _) {
cx.span_err(
ty.span, #fmt["Cannot serialize pointer types"]);
cx.span_err(ty.span, "cannot serialize pointer types");
[]
}

Expand All @@ -390,8 +389,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
}

ast::ty_fn(_, _) {
cx.span_err(
ty.span, #fmt["Cannot serialize function types"]);
cx.span_err(ty.span, "cannot serialize function types");
[]
}

Expand Down Expand Up @@ -448,17 +446,19 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
}

ast::ty_mac(_) {
cx.span_err(
ty.span, #fmt["Cannot serialize macro types"]);
cx.span_err(ty.span, "cannot serialize macro types");
[]
}

ast::ty_infer {
cx.span_err(
ty.span, #fmt["Cannot serialize inferred types"]);
cx.span_err(ty.span, "cannot serialize inferred types");
[]
}

ast::ty_vstore(_, _) {
cx.span_unimpl(ty.span, "serialization for vstore types");
}

ast::ty_vec(mt) {
let ser_e =
cx.expr(
Expand Down Expand Up @@ -675,6 +675,10 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
#ast{ fail }
}

ast::ty_vstore(_, _) {
cx.span_unimpl(ty.span, "deserialization for vstore types");
}

ast::ty_vec(mt) {
let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
#ast{ std::serialization::read_to_vec($(d), $(l)) }
Expand Down
4 changes: 4 additions & 0 deletions src/librustsyntax/fold.rs
Expand Up @@ -389,6 +389,9 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
fld.new_id(i),
fld.fold_expr(v))
}
expr_vstore(e, v) {
expr_vstore(fld.fold_expr(e), v)
}
expr_vec(exprs, mutt) {
expr_vec(fld.map_exprs(fld.fold_expr, exprs), mutt)
}
Expand Down Expand Up @@ -497,6 +500,7 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ {
ty_path(path, id) {ty_path(fld.fold_path(path), fld.new_id(id))}
// FIXME: constrs likely needs to be folded...
ty_constr(ty, constrs) {ty_constr(fld.fold_ty(ty), constrs)}
ty_vstore(t, vs) {ty_vstore(fld.fold_ty(t), vs)}
ty_mac(mac) {ty_mac(fold_mac(mac))}
ty_infer {t}
}
Expand Down
143 changes: 113 additions & 30 deletions src/librustsyntax/parse/parser.rs
Expand Up @@ -382,15 +382,32 @@ fn parse_type_constraints(p: parser) -> [@ast::ty_constr] {

fn parse_ty_postfix(orig_t: ast::ty_, p: parser, colons_before_params: bool,
lo: uint) -> @ast::ty {


fn mk_ty(p: parser, t: ast::ty_, lo: uint, hi: uint) -> @ast::ty {
@{id: p.get_id(),
node: t,
span: ast_util::mk_sp(lo, hi)}
}

if p.token == token::BINOP(token::SLASH) {
let orig_hi = p.last_span.hi;
alt maybe_parse_vstore(p) {
none { }
some(v) {
let t = ast::ty_vstore(mk_ty(p, orig_t, lo, orig_hi), v);
ret mk_ty(p, t, lo, p.last_span.hi);
}
}
}

if colons_before_params && p.token == token::MOD_SEP {
p.bump();
expect(p, token::LT);
} else if !colons_before_params && p.token == token::LT {
p.bump();
} else {
ret @{id: p.get_id(),
node: orig_t,
span: ast_util::mk_sp(lo, p.last_span.hi)};
ret mk_ty(p, orig_t, lo, p.last_span.hi);
}

// If we're here, we have explicit type parameter instantiation.
Expand All @@ -399,12 +416,11 @@ fn parse_ty_postfix(orig_t: ast::ty_, p: parser, colons_before_params: bool,

alt orig_t {
ast::ty_path(pth, ann) {
ret @{id: p.get_id(),
node: ast::ty_path(@spanned(lo, p.last_span.hi,
{global: pth.node.global,
idents: pth.node.idents,
types: seq}), ann),
span: ast_util::mk_sp(lo, p.last_span.hi)};
ret mk_ty(p, ast::ty_path(@spanned(lo, p.last_span.hi,
{global: pth.node.global,
idents: pth.node.idents,
types: seq}), ann),
lo, p.last_span.hi);
}
_ { p.fatal("type parameter instantiation only allowed for paths"); }
}
Expand All @@ -428,22 +444,33 @@ fn parse_ret_ty(p: parser) -> (ast::ret_style, @ast::ty) {
}
}

fn parse_region(p: parser) -> ast::region {
let region_ = alt p.token {
token::IDENT(sid, _) if p.look_ahead(1u) == token::DOT {
let string = p.get_str(sid);
p.bump(); p.bump();
if string == "self" {
ast::re_self
} else if string == "static" {
ast::re_static
} else {
ast::re_named(string)
}
fn region_from_name(p: parser, s: option<str>) -> ast::region {
let r = alt s {
some (string) {
if string == "self" {
ast::re_self
} else if string == "static" {
ast::re_static
} else {
ast::re_named(string)
}
_ { ast::re_inferred }
}
none { ast::re_inferred }
};
ret {id: p.get_id(), node: region_};

{id: p.get_id(), node: r}
}

fn parse_region(p: parser) -> ast::region {
let name =
alt p.token {
token::IDENT(sid, _) if p.look_ahead(1u) == token::DOT {
p.bump(); p.bump();
some(p.get_str(sid))
}
_ { none }
};
region_from_name(p, name)
}

fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
Expand Down Expand Up @@ -666,6 +693,44 @@ fn have_dollar(p: parser) -> option<ast::mac_> {
}
}

fn maybe_parse_vstore(p: parser) -> option<ast::vstore> {
if p.token == token::BINOP(token::SLASH) {
p.bump();
alt p.token {
token::AT {
p.bump(); some(ast::vstore_box)
}
token::TILDE {
p.bump(); some(ast::vstore_uniq)
}
token::UNDERSCORE {
p.bump(); some(ast::vstore_fixed(none))
}
token::LIT_INT(i, ast::ty_i) if i >= 0 {
p.bump(); some(ast::vstore_fixed(some(i as uint)))
}
token::BINOP(token::AND) {
p.bump();
alt p.token {
token::IDENT(sid, _) {
p.bump();
let n = p.get_str(sid);
some(ast::vstore_slice(region_from_name(p, some(n))))
}
_ {
some(ast::vstore_slice(region_from_name(p, none)))
}
}
}
_ {
none
}
}
} else {
none
}
}

fn lit_from_token(p: parser, tok: token::token) -> ast::lit_ {
alt tok {
token::LIT_INT(i, it) { ast::lit_int(i, it) }
Expand All @@ -678,7 +743,7 @@ fn lit_from_token(p: parser, tok: token::token) -> ast::lit_ {
}

fn parse_lit(p: parser) -> ast::lit {
let sp = p.span;
let lo = p.span.lo;
let lit = if eat_word(p, "true") {
ast::lit_bool(true)
} else if eat_word(p, "false") {
Expand All @@ -688,7 +753,7 @@ fn parse_lit(p: parser) -> ast::lit {
p.bump();
lit_from_token(p, tok)
};
ret {node: lit, span: sp};
ret {node: lit, span: ast_util::mk_sp(lo, p.last_span.hi)};
}

fn is_ident(t: token::token) -> bool {
Expand Down Expand Up @@ -891,6 +956,7 @@ fn parse_bottom_expr(p: parser) -> pexpr {
let es =
parse_seq_to_end(token::RBRACKET, seq_sep(token::COMMA),
parse_expr, p);
hi = p.span.hi;
ex = ast::expr_vec(es, mutbl);
} else if p.token == token::POUND_LT {
p.bump();
Expand Down Expand Up @@ -988,6 +1054,23 @@ fn parse_bottom_expr(p: parser) -> pexpr {
hi = lit.span.hi;
ex = ast::expr_lit(@lit);
}

// Vstore is legal following expr_lit(lit_str(...)) and expr_vec(...)
// only.
alt ex {
ast::expr_lit(@{node: ast::lit_str(_), span: _}) |
ast::expr_vec(_, _) {
alt maybe_parse_vstore(p) {
none { }
some(v) {
hi = p.span.hi;
ex = ast::expr_vstore(mk_expr(p, lo, hi, ex), v);
}
}
}
_ { }
}

ret mk_pexpr(p, lo, hi, ex);
}

Expand Down Expand Up @@ -1194,10 +1277,10 @@ type op_spec = {tok: token::token, op: ast::binop, prec: int};

// FIXME make this a const, don't store it in parser state
fn prec_table() -> @[op_spec] {
ret @[// 'as' sits between here with 12
{tok: token::BINOP(token::STAR), op: ast::mul, prec: 11},
{tok: token::BINOP(token::SLASH), op: ast::div, prec: 11},
{tok: token::BINOP(token::PERCENT), op: ast::rem, prec: 11},
ret @[{tok: token::BINOP(token::STAR), op: ast::mul, prec: 12},
{tok: token::BINOP(token::SLASH), op: ast::div, prec: 12},
{tok: token::BINOP(token::PERCENT), op: ast::rem, prec: 12},
// 'as' sits between here with 11
{tok: token::BINOP(token::PLUS), op: ast::add, prec: 10},
{tok: token::BINOP(token::MINUS), op: ast::subtract, prec: 10},
{tok: token::BINOP(token::LSL), op: ast::lsl, prec: 9},
Expand All @@ -1222,7 +1305,7 @@ fn parse_binops(p: parser) -> @ast::expr {

const unop_prec: int = 100;

const as_prec: int = 12;
const as_prec: int = 11;

fn parse_more_binops(p: parser, plhs: pexpr, min_prec: int) ->
@ast::expr {
Expand Down
28 changes: 28 additions & 0 deletions src/librustsyntax/print/pprust.rs
Expand Up @@ -378,6 +378,10 @@ fn print_type(s: ps, &&ty: @ast::ty) {
space(s.s);
word(s.s, constrs_str(cs, ty_constr_to_str));
}
ast::ty_vstore(t, v) {
print_type(s, t);
print_vstore(s, v);
}
ast::ty_mac(_) {
fail "print_type doesn't know how to print a ty_mac";
}
Expand Down Expand Up @@ -810,12 +814,36 @@ fn print_mac(s: ps, m: ast::mac) {
}
}

fn print_vstore(s: ps, t: ast::vstore) {
alt t {
ast::vstore_fixed(some(i)) { word_space(s, #fmt("/%u", i)); }
ast::vstore_fixed(none) { word_space(s, "/_"); }
ast::vstore_uniq { word_space(s, "/~"); }
ast::vstore_box { word_space(s, "/@"); }
ast::vstore_slice(r) {
alt r.node {
ast::re_inferred { word_space(s, "/&"); }
ast::re_self { word_space(s, "/&self"); }
ast::re_static { word_space(s, "/&static"); }
ast::re_named(name) {
word(s.s, "/&");
word_space(s, name);
}
}
}
}
}

fn print_expr(s: ps, &&expr: @ast::expr) {
maybe_print_comment(s, expr.span.lo);
ibox(s, indent_unit);
let ann_node = node_expr(s, expr);
s.ann.pre(ann_node);
alt expr.node {
ast::expr_vstore(e, v) {
print_expr(s, e);
print_vstore(s, v);
}
ast::expr_vec(exprs, mutbl) {
ibox(s, indent_unit);
word(s.s, "[");
Expand Down