Skip to content

Commit

Permalink
make CrateContext fields private
Browse files Browse the repository at this point in the history
  • Loading branch information
spernsteiner committed Sep 5, 2014
1 parent 67b97ab commit cf35cb3
Show file tree
Hide file tree
Showing 27 changed files with 544 additions and 377 deletions.
8 changes: 4 additions & 4 deletions src/librustc/back/link.rs
Expand Up @@ -715,14 +715,14 @@ fn symbol_hash(tcx: &ty::ctxt,
}

fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> String {
match ccx.type_hashcodes.borrow().find(&t) {
match ccx.type_hashcodes().borrow().find(&t) {
Some(h) => return h.to_string(),
None => {}
}

let mut symbol_hasher = ccx.symbol_hasher.borrow_mut();
let hash = symbol_hash(ccx.tcx(), &mut *symbol_hasher, t, &ccx.link_meta);
ccx.type_hashcodes.borrow_mut().insert(t, hash.clone());
let mut symbol_hasher = ccx.symbol_hasher().borrow_mut();
let hash = symbol_hash(ccx.tcx(), &mut *symbol_hasher, t, ccx.link_meta());
ccx.type_hashcodes().borrow_mut().insert(t, hash.clone());
hash
}

Expand Down
2 changes: 1 addition & 1 deletion src/librustc/middle/trans/_match.rs
Expand Up @@ -563,7 +563,7 @@ fn get_branches<'a>(bcx: &'a Block, m: &[Match], col: uint) -> Vec<Opt<'a>> {
}
ast::PatIdent(..) | ast::PatEnum(..) | ast::PatStruct(..) => {
// This is either an enum variant or a variable binding.
let opt_def = ccx.tcx.def_map.borrow().find_copy(&cur.id);
let opt_def = ccx.tcx().def_map.borrow().find_copy(&cur.id);
match opt_def {
Some(def::DefVariant(enum_id, var_id, _)) => {
let variant = ty::enum_variant_with_id(ccx.tcx(), enum_id, var_id);
Expand Down
6 changes: 3 additions & 3 deletions src/librustc/middle/trans/adt.rs
Expand Up @@ -150,14 +150,14 @@ pub fn represent_node(bcx: &Block, node: ast::NodeId) -> Rc<Repr> {
/// Decides how to represent a given type.
pub fn represent_type(cx: &CrateContext, t: ty::t) -> Rc<Repr> {
debug!("Representing: {}", ty_to_string(cx.tcx(), t));
match cx.adt_reprs.borrow().find(&t) {
match cx.adt_reprs().borrow().find(&t) {
Some(repr) => return repr.clone(),
None => {}
}

let repr = Rc::new(represent_type_uncached(cx, t));
debug!("Represented as: {:?}", repr)
cx.adt_reprs.borrow_mut().insert(t, repr.clone());
cx.adt_reprs().borrow_mut().insert(t, repr.clone());
repr
}

Expand Down Expand Up @@ -423,7 +423,7 @@ fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntTyp
attempts = choose_shortest;
},
attr::ReprPacked => {
cx.tcx.sess.bug("range_to_inttype: found ReprPacked on an enum");
cx.tcx().sess.bug("range_to_inttype: found ReprPacked on an enum");
}
}
for &ity in attempts.iter() {
Expand Down
172 changes: 86 additions & 86 deletions src/librustc/middle/trans/base.rs

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions src/librustc/middle/trans/build.rs
Expand Up @@ -352,7 +352,7 @@ pub fn Load(cx: &Block, pointer_val: ValueRef) -> ValueRef {
let eltty = if ty.kind() == llvm::Array {
ty.element_type()
} else {
ccx.int_type
ccx.int_type()
};
return llvm::LLVMGetUndef(eltty.to_ref());
}
Expand All @@ -373,7 +373,7 @@ pub fn AtomicLoad(cx: &Block, pointer_val: ValueRef, order: AtomicOrdering) -> V
unsafe {
let ccx = cx.fcx.ccx;
if cx.unreachable.get() {
return llvm::LLVMGetUndef(ccx.int_type.to_ref());
return llvm::LLVMGetUndef(ccx.int_type().to_ref());
}
B(cx).atomic_load(pointer_val, order)
}
Expand All @@ -388,7 +388,7 @@ pub fn LoadRangeAssert(cx: &Block, pointer_val: ValueRef, lo: c_ulonglong,
let eltty = if ty.kind() == llvm::Array {
ty.element_type()
} else {
ccx.int_type
ccx.int_type()
};
unsafe {
llvm::LLVMGetUndef(eltty.to_ref())
Expand Down Expand Up @@ -658,7 +658,7 @@ pub fn _UndefReturn(cx: &Block, fn_: ValueRef) -> ValueRef {
let retty = if ty.kind() == llvm::Integer {
ty.return_type()
} else {
ccx.int_type
ccx.int_type()
};
B(cx).count_insn("ret_undef");
llvm::LLVMGetUndef(retty.to_ref())
Expand Down Expand Up @@ -786,7 +786,7 @@ pub fn IsNotNull(cx: &Block, val: ValueRef) -> ValueRef {
pub fn PtrDiff(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type.to_ref()); }
if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); }
B(cx).ptrdiff(lhs, rhs)
}
}
Expand Down
34 changes: 17 additions & 17 deletions src/librustc/middle/trans/builder.rs
Expand Up @@ -38,21 +38,21 @@ pub fn noname() -> *const c_char {
impl<'a> Builder<'a> {
pub fn new(ccx: &'a CrateContext) -> Builder<'a> {
Builder {
llbuilder: ccx.builder.b,
llbuilder: ccx.raw_builder(),
ccx: ccx,
}
}

pub fn count_insn(&self, category: &str) {
if self.ccx.sess().trans_stats() {
self.ccx.stats.n_llvm_insns.set(self.ccx
.stats
self.ccx.stats().n_llvm_insns.set(self.ccx
.stats()
.n_llvm_insns
.get() + 1);
}
if self.ccx.sess().count_llvm_insns() {
base::with_insn_ctxt(|v| {
let mut h = self.ccx.stats.llvm_insns.borrow_mut();
let mut h = self.ccx.stats().llvm_insns.borrow_mut();

// Build version of path with cycles removed.

Expand Down Expand Up @@ -160,9 +160,9 @@ impl<'a> Builder<'a> {
self.count_insn("invoke");

debug!("Invoke {} with args ({})",
self.ccx.tn.val_to_string(llfn),
self.ccx.tn().val_to_string(llfn),
args.iter()
.map(|&v| self.ccx.tn.val_to_string(v))
.map(|&v| self.ccx.tn().val_to_string(v))
.collect::<Vec<String>>()
.connect(", "));

Expand Down Expand Up @@ -488,7 +488,7 @@ impl<'a> Builder<'a> {
let v = [min, max];

llvm::LLVMSetMetadata(value, llvm::MD_range as c_uint,
llvm::LLVMMDNodeInContext(self.ccx.llcx,
llvm::LLVMMDNodeInContext(self.ccx.llcx(),
v.as_ptr(), v.len() as c_uint));
}

Expand All @@ -497,8 +497,8 @@ impl<'a> Builder<'a> {

pub fn store(&self, val: ValueRef, ptr: ValueRef) {
debug!("Store {} -> {}",
self.ccx.tn.val_to_string(val),
self.ccx.tn.val_to_string(ptr));
self.ccx.tn().val_to_string(val),
self.ccx.tn().val_to_string(ptr));
assert!(self.llbuilder.is_not_null());
self.count_insn("store");
unsafe {
Expand All @@ -508,8 +508,8 @@ impl<'a> Builder<'a> {

pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) {
debug!("Store {} -> {}",
self.ccx.tn.val_to_string(val),
self.ccx.tn.val_to_string(ptr));
self.ccx.tn().val_to_string(val),
self.ccx.tn().val_to_string(ptr));
assert!(self.llbuilder.is_not_null());
self.count_insn("store.volatile");
unsafe {
Expand All @@ -520,8 +520,8 @@ impl<'a> Builder<'a> {

pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
debug!("Store {} -> {}",
self.ccx.tn.val_to_string(val),
self.ccx.tn.val_to_string(ptr));
self.ccx.tn().val_to_string(val),
self.ccx.tn().val_to_string(ptr));
self.count_insn("store.atomic");
unsafe {
let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
Expand Down Expand Up @@ -794,11 +794,11 @@ impl<'a> Builder<'a> {
else { llvm::False };

let argtys = inputs.iter().map(|v| {
debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_string(*v));
debug!("Asm Input Type: {:?}", self.ccx.tn().val_to_string(*v));
val_ty(*v)
}).collect::<Vec<_>>();

debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_string(output));
debug!("Asm Output Type: {:?}", self.ccx.tn().type_to_string(output));
let fty = Type::func(argtys.as_slice(), &output);
unsafe {
let v = llvm::LLVMInlineAsm(
Expand All @@ -812,9 +812,9 @@ impl<'a> Builder<'a> {
self.count_insn("call");

debug!("Call {} with args ({})",
self.ccx.tn.val_to_string(llfn),
self.ccx.tn().val_to_string(llfn),
args.iter()
.map(|&v| self.ccx.tn.val_to_string(v))
.map(|&v| self.ccx.tn().val_to_string(v))
.collect::<Vec<String>>()
.connect(", "));

Expand Down
2 changes: 1 addition & 1 deletion src/librustc/middle/trans/cabi_mips.rs
Expand Up @@ -146,7 +146,7 @@ fn coerce_to_int(ccx: &CrateContext, size: uint) -> Vec<Type> {
let r = size % 32;
if r > 0 {
unsafe {
args.push(Type::from_ref(llvm::LLVMIntTypeInContext(ccx.llcx, r as c_uint)));
args.push(Type::from_ref(llvm::LLVMIntTypeInContext(ccx.llcx(), r as c_uint)));
}
}

Expand Down
24 changes: 12 additions & 12 deletions src/librustc/middle/trans/cleanup.rs
Expand Up @@ -87,7 +87,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
*/

debug!("push_ast_cleanup_scope({})",
self.ccx.tcx.map.node_to_string(id));
self.ccx.tcx().map.node_to_string(id));

// FIXME(#2202) -- currently closure bodies have a parent
// region, which messes up the assertion below, since there
Expand All @@ -101,7 +101,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
// this new AST scope had better be its immediate child.
let top_scope = self.top_ast_scope();
if top_scope.is_some() {
assert_eq!(self.ccx.tcx.region_maps.opt_encl_scope(id), top_scope);
assert_eq!(self.ccx.tcx().region_maps.opt_encl_scope(id), top_scope);
}

self.push_scope(CleanupScope::new(AstScopeKind(id)));
Expand All @@ -111,7 +111,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
id: ast::NodeId,
exits: [&'a Block<'a>, ..EXIT_MAX]) {
debug!("push_loop_cleanup_scope({})",
self.ccx.tcx.map.node_to_string(id));
self.ccx.tcx().map.node_to_string(id));
assert_eq!(Some(id), self.top_ast_scope());

self.push_scope(CleanupScope::new(LoopScopeKind(id, exits)));
Expand All @@ -135,7 +135,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
*/

debug!("pop_and_trans_ast_cleanup_scope({})",
self.ccx.tcx.map.node_to_string(cleanup_scope));
self.ccx.tcx().map.node_to_string(cleanup_scope));

assert!(self.top_scope(|s| s.kind.is_ast_with_id(cleanup_scope)));

Expand All @@ -154,7 +154,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
*/

debug!("pop_loop_cleanup_scope({})",
self.ccx.tcx.map.node_to_string(cleanup_scope));
self.ccx.tcx().map.node_to_string(cleanup_scope));

assert!(self.top_scope(|s| s.kind.is_loop_with_id(cleanup_scope)));

Expand Down Expand Up @@ -237,7 +237,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {

debug!("schedule_lifetime_end({:?}, val={})",
cleanup_scope,
self.ccx.tn.val_to_string(val));
self.ccx.tn().val_to_string(val));

self.schedule_clean(cleanup_scope, drop as CleanupObj);
}
Expand All @@ -262,7 +262,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {

debug!("schedule_drop_mem({:?}, val={}, ty={})",
cleanup_scope,
self.ccx.tn.val_to_string(val),
self.ccx.tn().val_to_string(val),
ty.repr(self.ccx.tcx()));

self.schedule_clean(cleanup_scope, drop as CleanupObj);
Expand All @@ -288,7 +288,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {

debug!("schedule_drop_and_zero_mem({:?}, val={}, ty={}, zero={})",
cleanup_scope,
self.ccx.tn.val_to_string(val),
self.ccx.tn().val_to_string(val),
ty.repr(self.ccx.tcx()),
true);

Expand All @@ -314,7 +314,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {

debug!("schedule_drop_immediate({:?}, val={}, ty={})",
cleanup_scope,
self.ccx.tn.val_to_string(val),
self.ccx.tn().val_to_string(val),
ty.repr(self.ccx.tcx()));

self.schedule_clean(cleanup_scope, drop as CleanupObj);
Expand All @@ -334,7 +334,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {

debug!("schedule_free_value({:?}, val={}, heap={:?})",
cleanup_scope,
self.ccx.tn.val_to_string(val),
self.ccx.tn().val_to_string(val),
heap);

self.schedule_clean(cleanup_scope, drop as CleanupObj);
Expand Down Expand Up @@ -374,7 +374,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {

self.ccx.sess().bug(
format!("no cleanup scope {} found",
self.ccx.tcx.map.node_to_string(cleanup_scope)).as_slice());
self.ccx.tcx().map.node_to_string(cleanup_scope)).as_slice());
}

fn schedule_clean_in_custom_scope(&self,
Expand Down Expand Up @@ -720,7 +720,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
let llpersonality = match pad_bcx.tcx().lang_items.eh_personality() {
Some(def_id) => callee::trans_fn_ref(pad_bcx, def_id, ExprId(0)),
None => {
let mut personality = self.ccx.eh_personality.borrow_mut();
let mut personality = self.ccx.eh_personality().borrow_mut();
match *personality {
Some(llpersonality) => llpersonality,
None => {
Expand Down
16 changes: 8 additions & 8 deletions src/librustc/middle/trans/closure.rs
Expand Up @@ -427,12 +427,12 @@ pub fn trans_expr_fn<'a>(
pub fn get_or_create_declaration_if_unboxed_closure(ccx: &CrateContext,
closure_id: ast::DefId)
-> Option<ValueRef> {
if !ccx.tcx.unboxed_closures.borrow().contains_key(&closure_id) {
if !ccx.tcx().unboxed_closures.borrow().contains_key(&closure_id) {
// Not an unboxed closure.
return None
}

match ccx.unboxed_closure_vals.borrow().find(&closure_id) {
match ccx.unboxed_closure_vals().borrow().find(&closure_id) {
Some(llfn) => {
debug!("get_or_create_declaration_if_unboxed_closure(): found \
closure");
Expand All @@ -441,10 +441,10 @@ pub fn get_or_create_declaration_if_unboxed_closure(ccx: &CrateContext,
None => {}
}

let function_type = ty::mk_unboxed_closure(&ccx.tcx,
let function_type = ty::mk_unboxed_closure(ccx.tcx(),
closure_id,
ty::ReStatic);
let symbol = ccx.tcx.map.with_path(closure_id.node, |path| {
let symbol = ccx.tcx().map.with_path(closure_id.node, |path| {
mangle_internal_name_by_path_and_seq(path, "unboxed_closure")
});

Expand All @@ -456,8 +456,8 @@ pub fn get_or_create_declaration_if_unboxed_closure(ccx: &CrateContext,
debug!("get_or_create_declaration_if_unboxed_closure(): inserting new \
closure {} (type {})",
closure_id,
ccx.tn.type_to_string(val_ty(llfn)));
ccx.unboxed_closure_vals.borrow_mut().insert(closure_id, llfn);
ccx.tn().type_to_string(val_ty(llfn)));
ccx.unboxed_closure_vals().borrow_mut().insert(closure_id, llfn);

Some(llfn)
}
Expand Down Expand Up @@ -554,7 +554,7 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
}
};

match ccx.closure_bare_wrapper_cache.borrow().find(&fn_ptr) {
match ccx.closure_bare_wrapper_cache().borrow().find(&fn_ptr) {
Some(&llval) => return llval,
None => {}
}
Expand All @@ -581,7 +581,7 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
decl_rust_fn(ccx, closure_ty, name.as_slice())
};

ccx.closure_bare_wrapper_cache.borrow_mut().insert(fn_ptr, llfn);
ccx.closure_bare_wrapper_cache().borrow_mut().insert(fn_ptr, llfn);

// This is only used by statics inlined from a different crate.
if !is_local {
Expand Down

0 comments on commit cf35cb3

Please sign in to comment.