debuginfo: Make debuginfo source location assignment more stable (Pt. 1)

So far, the source location an LLVM instruction was linked to was controlled by
`debuginfo::set_source_location()` and `debuginfo::clear_source_location()`.
This interface mimicked how LLVM's `IRBuilder` handles debug location
assignment. While this interface has some theoretical performance benefits, it
also makes things terribly unstable: One sets some quasi-global state and then
hopes that it is still correct when a given instruction is emitted---an
assumption that has been proven to not hold a bit too often.

This patch requires the debug source location to be passed to the actual
instruction emitting function. This makes source location assignment explicit
and will prevent future changes to `trans` from accidentally breaking things in
the majority of cases.

This patch does not yet implement the new principle for all instruction kinds
but the stepping experience should have improved significantly nonetheless
already.
This commit is contained in:
Michael Woerister 2014-12-11 13:53:30 +01:00
parent 51e28dd0c8
commit a55ef3a032
14 changed files with 889 additions and 405 deletions

View file

@ -208,10 +208,10 @@ use trans::cleanup::{self, CleanupMethods};
use trans::common::*;
use trans::consts;
use trans::datum::*;
use trans::debuginfo::{self, DebugLoc, ToDebugLoc};
use trans::expr::{self, Dest};
use trans::tvec;
use trans::type_of;
use trans::debuginfo;
use middle::ty::{self, Ty};
use session::config::FullDebugInfo;
use util::common::indenter;
@ -632,7 +632,7 @@ fn bind_subslice_pat(bcx: Block,
let slice_begin = InBoundsGEP(bcx, base, &[C_uint(bcx.ccx(), offset_left)]);
let slice_len_offset = C_uint(bcx.ccx(), offset_left + offset_right);
let slice_len = Sub(bcx, len, slice_len_offset);
let slice_len = Sub(bcx, len, slice_len_offset, DebugLoc::None);
let slice_ty = ty::mk_slice(bcx.tcx(),
bcx.tcx().mk_region(ty::ReStatic),
ty::mt {ty: vt.unit_ty, mutbl: ast::MutImmutable});
@ -656,7 +656,7 @@ fn extract_vec_elems<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
elems.extend(range(0, before).map(|i| GEPi(bcx, base, &[i])));
elems.extend(range(0, after).rev().map(|i| {
InBoundsGEP(bcx, base, &[
Sub(bcx, len, C_uint(bcx.ccx(), i + 1))
Sub(bcx, len, C_uint(bcx.ccx(), i + 1), DebugLoc::None)
])
}));
ExtractedBlock { vals: elems, bcx: bcx }
@ -731,7 +731,7 @@ impl FailureHandler {
Infallible =>
panic!("attempted to panic in a non-panicking panic handler!"),
JumpToBasicBlock(basic_block) =>
Br(bcx, basic_block),
Br(bcx, basic_block, DebugLoc::None),
Unreachable =>
build::Unreachable(bcx)
}
@ -889,7 +889,7 @@ fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
}
with_cond(bcx, Not(bcx, val), |bcx| {
with_cond(bcx, Not(bcx, val, guard_expr.debug_loc()), |bcx| {
// Guard does not match: remove all bindings from the lllocals table
for (_, &binding_info) in data.bindings_map.iter() {
call_lifetime_end(bcx, binding_info.llmatch);
@ -966,7 +966,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
_ => ()
}
Br(bcx, data.bodycx.llbb);
Br(bcx, data.bodycx.llbb, DebugLoc::None);
}
}
}
@ -1096,7 +1096,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
if !exhaustive || i + 1 < len {
opt_cx = bcx.fcx.new_temp_block("match_case");
match kind {
Single => Br(bcx, opt_cx.llbb),
Single => Br(bcx, opt_cx.llbb, DebugLoc::None),
Switch => {
match opt.trans(bcx) {
SingleResult(r) => {
@ -1131,7 +1131,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
compare_scalar_types(
bcx, test_val, vend,
t, ast::BiLe);
Result::new(bcx, And(bcx, llge, llle))
Result::new(bcx, And(bcx, llge, llle, DebugLoc::None))
}
LowerBound(Result { bcx, val }) => {
compare_scalar_types(bcx, test_val, val, t, ast::BiGe)
@ -1149,12 +1149,12 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
if i + 1 < len && (guarded || multi_pats || kind == CompareSliceLength) {
branch_chk = Some(JumpToBasicBlock(bcx.llbb));
}
CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb);
CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb, DebugLoc::None);
}
_ => ()
}
} else if kind == Compare || kind == CompareSliceLength {
Br(bcx, else_cx.llbb);
Br(bcx, else_cx.llbb, DebugLoc::None);
}
let mut size = 0u;
@ -1194,7 +1194,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
// Compile the fall-through case, if any
if !exhaustive && kind != Single {
if kind == Compare || kind == CompareSliceLength {
Br(bcx, else_cx.llbb);
Br(bcx, else_cx.llbb, DebugLoc::None);
}
match chk {
// If there is only one default arm left, move on to the next

View file

@ -62,6 +62,7 @@ use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::common::*;
use trans::datum;
use trans::debuginfo::DebugLoc;
use trans::machine;
use trans::monomorphize;
use trans::type_::Type;
@ -979,7 +980,7 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to());
variant_cx = f(variant_cx, case, variant_value);
Br(variant_cx, bcx_next.llbb);
Br(variant_cx, bcx_next.llbb, DebugLoc::None);
}
bcx_next

View file

@ -57,7 +57,7 @@ use trans::closure;
use trans::common::{Block, C_bool, C_bytes_in_context, C_i32, C_integral};
use trans::common::{C_null, C_struct_in_context, C_u64, C_u8, C_undef};
use trans::common::{CrateContext, ExternMap, FunctionContext};
use trans::common::{NodeInfo, Result};
use trans::common::{Result};
use trans::common::{node_id_type, return_type_is_void};
use trans::common::{tydesc_info, type_is_immediate};
use trans::common::{type_is_zero_size, val_ty};
@ -66,7 +66,7 @@ use trans::consts;
use trans::context::SharedCrateContext;
use trans::controlflow;
use trans::datum;
use trans::debuginfo;
use trans::debuginfo::{self, DebugLoc};
use trans::expr;
use trans::foreign;
use trans::glue;
@ -792,7 +792,7 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>,
&**variant,
substs,
&mut f);
Br(variant_cx, next_cx.llbb);
Br(variant_cx, next_cx.llbb, DebugLoc::None);
}
cx = next_cx;
}
@ -957,7 +957,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
llfn: ValueRef,
llargs: &[ValueRef],
fn_ty: Ty<'tcx>,
call_info: Option<NodeInfo>)
debug_loc: DebugLoc)
-> (ValueRef, Block<'blk, 'tcx>) {
let _icx = push_ctxt("invoke_");
if bcx.unreachable.get() {
@ -983,17 +983,13 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let normal_bcx = bcx.fcx.new_temp_block("normal-return");
let landing_pad = bcx.fcx.get_landing_pad();
match call_info {
Some(info) => debuginfo::set_source_location(bcx.fcx, info.id, info.span),
None => debuginfo::clear_source_location(bcx.fcx)
};
let llresult = Invoke(bcx,
llfn,
&llargs[],
normal_bcx.llbb,
landing_pad,
Some(attributes));
Some(attributes),
debug_loc);
return (llresult, normal_bcx);
} else {
debug!("calling {} at {:?}", bcx.val_to_string(llfn), bcx.llbb);
@ -1001,12 +997,11 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
debug!("arg: {}", bcx.val_to_string(llarg));
}
match call_info {
Some(info) => debuginfo::set_source_location(bcx.fcx, info.id, info.span),
None => debuginfo::clear_source_location(bcx.fcx)
};
let llresult = Call(bcx, llfn, &llargs[], Some(attributes));
let llresult = Call(bcx,
llfn,
&llargs[],
Some(attributes),
debug_loc);
return (llresult, bcx);
}
}
@ -1094,10 +1089,10 @@ pub fn with_cond<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
let fcx = bcx.fcx;
let next_cx = fcx.new_temp_block("next");
let cond_cx = fcx.new_temp_block("cond");
CondBr(bcx, val, cond_cx.llbb, next_cx.llbb);
CondBr(bcx, val, cond_cx.llbb, next_cx.llbb, DebugLoc::None);
let after_cx = f(cond_cx);
if !after_cx.terminated.get() {
Br(after_cx, next_cx.llbb);
Br(after_cx, next_cx.llbb, DebugLoc::None);
}
next_cx
}
@ -1113,7 +1108,7 @@ pub fn call_lifetime_start(cx: Block, ptr: ValueRef) {
let llsize = C_u64(ccx, machine::llsize_of_alloc(ccx, val_ty(ptr).element_type()));
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
let lifetime_start = ccx.get_intrinsic(&"llvm.lifetime.start");
Call(cx, lifetime_start, &[llsize, ptr], None);
Call(cx, lifetime_start, &[llsize, ptr], None, DebugLoc::None);
}
pub fn call_lifetime_end(cx: Block, ptr: ValueRef) {
@ -1127,7 +1122,7 @@ pub fn call_lifetime_end(cx: Block, ptr: ValueRef) {
let llsize = C_u64(ccx, machine::llsize_of_alloc(ccx, val_ty(ptr).element_type()));
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
let lifetime_end = ccx.get_intrinsic(&"llvm.lifetime.end");
Call(cx, lifetime_end, &[llsize, ptr], None);
Call(cx, lifetime_end, &[llsize, ptr], None, DebugLoc::None);
}
pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) {
@ -1144,7 +1139,7 @@ pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, a
let size = IntCast(cx, n_bytes, ccx.int_type());
let align = C_i32(ccx, align as i32);
let volatile = C_bool(ccx, false);
Call(cx, memcpy, &[dst_ptr, src_ptr, size, align, volatile], None);
Call(cx, memcpy, &[dst_ptr, src_ptr, size, align, volatile], None, DebugLoc::None);
}
pub fn memcpy_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
@ -1697,13 +1692,14 @@ fn copy_unboxed_closure_args_to_allocas<'blk, 'tcx>(
// and builds the return block.
pub fn finish_fn<'blk, 'tcx>(fcx: &'blk FunctionContext<'blk, 'tcx>,
last_bcx: Block<'blk, 'tcx>,
retty: ty::FnOutput<'tcx>) {
retty: ty::FnOutput<'tcx>,
ret_debug_loc: DebugLoc) {
let _icx = push_ctxt("finish_fn");
let ret_cx = match fcx.llreturn.get() {
Some(llreturn) => {
if !last_bcx.terminated.get() {
Br(last_bcx, llreturn);
Br(last_bcx, llreturn, DebugLoc::None);
}
raw_block(fcx, false, llreturn)
}
@ -1713,7 +1709,7 @@ pub fn finish_fn<'blk, 'tcx>(fcx: &'blk FunctionContext<'blk, 'tcx>,
// This shouldn't need to recompute the return type,
// as new_fn_ctxt did it already.
let substd_retty = fcx.monomorphize(&retty);
build_return_block(fcx, ret_cx, substd_retty);
build_return_block(fcx, ret_cx, substd_retty, ret_debug_loc);
debuginfo::clear_source_location(fcx);
fcx.cleanup();
@ -1722,10 +1718,11 @@ pub fn finish_fn<'blk, 'tcx>(fcx: &'blk FunctionContext<'blk, 'tcx>,
// Builds the return block for a function.
pub fn build_return_block<'blk, 'tcx>(fcx: &FunctionContext<'blk, 'tcx>,
ret_cx: Block<'blk, 'tcx>,
retty: ty::FnOutput<'tcx>) {
retty: ty::FnOutput<'tcx>,
ret_debug_location: DebugLoc) {
if fcx.llretslotptr.get().is_none() ||
(!fcx.needs_ret_allocas && fcx.caller_expects_out_pointer) {
return RetVoid(ret_cx);
return RetVoid(ret_cx, ret_debug_location);
}
let retslot = if fcx.needs_ret_allocas {
@ -1755,9 +1752,9 @@ pub fn build_return_block<'blk, 'tcx>(fcx: &FunctionContext<'blk, 'tcx>,
if let ty::FnConverging(retty) = retty {
store_ty(ret_cx, retval, get_param(fcx.llfn, 0), retty);
}
RetVoid(ret_cx)
RetVoid(ret_cx, ret_debug_location)
} else {
Ret(ret_cx, retval)
Ret(ret_cx, retval, ret_debug_location)
}
}
// Otherwise, copy the return value to the ret slot
@ -1765,16 +1762,16 @@ pub fn build_return_block<'blk, 'tcx>(fcx: &FunctionContext<'blk, 'tcx>,
ty::FnConverging(retty) => {
if fcx.caller_expects_out_pointer {
memcpy_ty(ret_cx, get_param(fcx.llfn, 0), retslot, retty);
RetVoid(ret_cx)
RetVoid(ret_cx, ret_debug_location)
} else {
Ret(ret_cx, load_ty(ret_cx, retslot, retty))
Ret(ret_cx, load_ty(ret_cx, retslot, retty), ret_debug_location)
}
}
ty::FnDiverging => {
if fcx.caller_expects_out_pointer {
RetVoid(ret_cx)
RetVoid(ret_cx, ret_debug_location)
} else {
Ret(ret_cx, C_undef(Type::nil(fcx.ccx)))
Ret(ret_cx, C_undef(Type::nil(fcx.ccx)), ret_debug_location)
}
}
}
@ -1905,7 +1902,7 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
match fcx.llreturn.get() {
Some(_) => {
Br(bcx, fcx.return_exit_block());
Br(bcx, fcx.return_exit_block(), DebugLoc::None);
fcx.pop_custom_cleanup_scope(arg_scope);
}
None => {
@ -1924,8 +1921,11 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
}
}
let ret_debug_loc = DebugLoc::At(fn_cleanup_debug_loc.id,
fn_cleanup_debug_loc.span);
// Insert the mandatory first few basic blocks before lltop.
finish_fn(&fcx, bcx, output_type);
finish_fn(&fcx, bcx, output_type, ret_debug_loc);
}
// trans_fn: creates an LLVM function corresponding to a source language
@ -1977,7 +1977,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
disr: ty::Disr,
args: callee::CallArgs,
dest: expr::Dest,
call_info: Option<NodeInfo>)
debug_loc: DebugLoc)
-> Result<'blk, 'tcx> {
let ccx = bcx.fcx.ccx;
@ -2016,7 +2016,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
&fields[],
None,
expr::SaveIn(llresult),
call_info);
debug_loc);
}
_ => ccx.sess().bug("expected expr as arguments for variant/struct tuple constructor")
}
@ -2027,7 +2027,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let bcx = match dest {
expr::SaveIn(_) => bcx,
expr::Ignore => {
glue::drop_ty(bcx, llresult, result_ty, call_info)
glue::drop_ty(bcx, llresult, result_ty, debug_loc)
}
};
@ -2094,7 +2094,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx
adt::trans_set_discr(bcx, &*repr, dest, disr);
}
finish_fn(&fcx, bcx, result_ty);
finish_fn(&fcx, bcx, result_ty, DebugLoc::None);
}
fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span, id: ast::NodeId) {

View file

@ -20,6 +20,7 @@ use syntax::codemap::Span;
use trans::builder::Builder;
use trans::type_::Type;
use trans::debuginfo::DebugLoc;
use libc::{c_uint, c_char};
@ -48,41 +49,59 @@ pub fn B<'blk, 'tcx>(cx: Block<'blk, 'tcx>) -> Builder<'blk, 'tcx> {
// for (panic/break/return statements, call to diverging functions, etc), and
// further instructions to the block should simply be ignored.
pub fn RetVoid(cx: Block) {
if cx.unreachable.get() { return; }
pub fn RetVoid(cx: Block, debug_loc: DebugLoc) {
if cx.unreachable.get() {
return;
}
check_not_terminated(cx);
terminate(cx, "RetVoid");
debug_loc.apply(cx.fcx);
B(cx).ret_void();
}
pub fn Ret(cx: Block, v: ValueRef) {
if cx.unreachable.get() { return; }
pub fn Ret(cx: Block, v: ValueRef, debug_loc: DebugLoc) {
if cx.unreachable.get() {
return;
}
check_not_terminated(cx);
terminate(cx, "Ret");
debug_loc.apply(cx.fcx);
B(cx).ret(v);
}
pub fn AggregateRet(cx: Block, ret_vals: &[ValueRef]) {
if cx.unreachable.get() { return; }
pub fn AggregateRet(cx: Block,
ret_vals: &[ValueRef],
debug_loc: DebugLoc) {
if cx.unreachable.get() {
return;
}
check_not_terminated(cx);
terminate(cx, "AggregateRet");
debug_loc.apply(cx.fcx);
B(cx).aggregate_ret(ret_vals);
}
pub fn Br(cx: Block, dest: BasicBlockRef) {
if cx.unreachable.get() { return; }
pub fn Br(cx: Block, dest: BasicBlockRef, debug_loc: DebugLoc) {
if cx.unreachable.get() {
return;
}
check_not_terminated(cx);
terminate(cx, "Br");
debug_loc.apply(cx.fcx);
B(cx).br(dest);
}
pub fn CondBr(cx: Block,
if_: ValueRef,
then: BasicBlockRef,
else_: BasicBlockRef) {
if cx.unreachable.get() { return; }
else_: BasicBlockRef,
debug_loc: DebugLoc) {
if cx.unreachable.get() {
return;
}
check_not_terminated(cx);
terminate(cx, "CondBr");
debug_loc.apply(cx.fcx);
B(cx).cond_br(if_, then, else_);
}
@ -101,10 +120,16 @@ pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
}
}
pub fn IndirectBr(cx: Block, addr: ValueRef, num_dests: uint) {
if cx.unreachable.get() { return; }
pub fn IndirectBr(cx: Block,
addr: ValueRef,
num_dests: uint,
debug_loc: DebugLoc) {
if cx.unreachable.get() {
return;
}
check_not_terminated(cx);
terminate(cx, "IndirectBr");
debug_loc.apply(cx.fcx);
B(cx).indirect_br(addr, num_dests);
}
@ -113,7 +138,8 @@ pub fn Invoke(cx: Block,
args: &[ValueRef],
then: BasicBlockRef,
catch: BasicBlockRef,
attributes: Option<AttrBuilder>)
attributes: Option<AttrBuilder>,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return C_null(Type::i8(cx.ccx()));
@ -123,6 +149,7 @@ pub fn Invoke(cx: Block,
debug!("Invoke({} with arguments ({}))",
cx.val_to_string(fn_),
args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().connect(", "));
debug_loc.apply(cx.fcx);
B(cx).invoke(fn_, args, then, catch, attributes)
}
@ -143,176 +170,378 @@ pub fn _Undef(val: ValueRef) -> ValueRef {
}
/* Arithmetic */
pub fn Add(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn Add(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).add(lhs, rhs)
}
pub fn NSWAdd(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn NSWAdd(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).nswadd(lhs, rhs)
}
pub fn NUWAdd(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn NUWAdd(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).nuwadd(lhs, rhs)
}
pub fn FAdd(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn FAdd(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).fadd(lhs, rhs)
}
pub fn Sub(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn Sub(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).sub(lhs, rhs)
}
pub fn NSWSub(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn NSWSub(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).nswsub(lhs, rhs)
}
pub fn NUWSub(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn NUWSub(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).nuwsub(lhs, rhs)
}
pub fn FSub(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn FSub(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).fsub(lhs, rhs)
}
pub fn Mul(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn Mul(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).mul(lhs, rhs)
}
pub fn NSWMul(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn NSWMul(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).nswmul(lhs, rhs)
}
pub fn NUWMul(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn NUWMul(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).nuwmul(lhs, rhs)
}
pub fn FMul(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn FMul(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).fmul(lhs, rhs)
}
pub fn UDiv(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn UDiv(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).udiv(lhs, rhs)
}
pub fn SDiv(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn SDiv(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).sdiv(lhs, rhs)
}
pub fn ExactSDiv(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn ExactSDiv(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).exactsdiv(lhs, rhs)
}
pub fn FDiv(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn FDiv(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).fdiv(lhs, rhs)
}
pub fn URem(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn URem(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).urem(lhs, rhs)
}
pub fn SRem(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn SRem(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).srem(lhs, rhs)
}
pub fn FRem(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn FRem(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).frem(lhs, rhs)
}
pub fn Shl(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn Shl(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).shl(lhs, rhs)
}
pub fn LShr(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn LShr(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).lshr(lhs, rhs)
}
pub fn AShr(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn AShr(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).ashr(lhs, rhs)
}
pub fn And(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn And(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).and(lhs, rhs)
}
pub fn Or(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn Or(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).or(lhs, rhs)
}
pub fn Xor(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
pub fn Xor(cx: Block,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).xor(lhs, rhs)
}
pub fn BinOp(cx: Block, op: Opcode, lhs: ValueRef, rhs: ValueRef)
pub fn BinOp(cx: Block,
op: Opcode,
lhs: ValueRef,
rhs: ValueRef,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
if cx.unreachable.get() {
return _Undef(lhs);
}
debug_loc.apply(cx.fcx);
B(cx).binop(op, lhs, rhs)
}
pub fn Neg(cx: Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
pub fn Neg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
if cx.unreachable.get() {
return _Undef(v);
}
debug_loc.apply(cx.fcx);
B(cx).neg(v)
}
pub fn NSWNeg(cx: Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
pub fn NSWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
if cx.unreachable.get() {
return _Undef(v);
}
debug_loc.apply(cx.fcx);
B(cx).nswneg(v)
}
pub fn NUWNeg(cx: Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
pub fn NUWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
if cx.unreachable.get() {
return _Undef(v);
}
debug_loc.apply(cx.fcx);
B(cx).nuwneg(v)
}
pub fn FNeg(cx: Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
pub fn FNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
if cx.unreachable.get() {
return _Undef(v);
}
debug_loc.apply(cx.fcx);
B(cx).fneg(v)
}
pub fn Not(cx: Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
pub fn Not(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
if cx.unreachable.get() {
return _Undef(v);
}
debug_loc.apply(cx.fcx);
B(cx).not(v)
}
/* Memory */
pub fn Malloc(cx: Block, ty: Type) -> ValueRef {
pub fn Malloc(cx: Block, ty: Type, debug_loc: DebugLoc) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
}
debug_loc.apply(cx.fcx);
B(cx).malloc(ty)
}
}
pub fn ArrayMalloc(cx: Block, ty: Type, val: ValueRef) -> ValueRef {
pub fn ArrayMalloc(cx: Block,
ty: Type,
val: ValueRef,
debug_loc: DebugLoc) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
}
debug_loc.apply(cx.fcx);
B(cx).array_malloc(ty, val)
}
}
@ -327,6 +556,7 @@ pub fn Alloca(cx: Block, ty: Type, name: &str) -> ValueRef {
pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {
let b = fcx.ccx.builder();
b.position_before(fcx.alloca_insert_pt.get().unwrap());
DebugLoc::None.apply(fcx);
b.alloca(ty, name)
}
@ -335,6 +565,7 @@ pub fn ArrayAlloca(cx: Block, ty: Type, val: ValueRef) -> ValueRef {
if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
let b = cx.fcx.ccx.builder();
b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
DebugLoc::None.apply(cx.fcx);
b.array_alloca(ty, val)
}
}
@ -680,9 +911,16 @@ pub fn InlineAsmCall(cx: Block, asm: *const c_char, cons: *const c_char,
B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
}
pub fn Call(cx: Block, fn_: ValueRef, args: &[ValueRef],
attributes: Option<AttrBuilder>) -> ValueRef {
if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
pub fn Call(cx: Block,
fn_: ValueRef,
args: &[ValueRef],
attributes: Option<AttrBuilder>,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
return _UndefReturn(cx, fn_);
}
debug_loc.apply(cx.fcx);
B(cx).call(fn_, args, attributes)
}

View file

@ -40,6 +40,7 @@ use trans::common;
use trans::common::*;
use trans::consts;
use trans::datum::*;
use trans::debuginfo::{DebugLoc, ToDebugLoc};
use trans::expr;
use trans::glue;
use trans::inline;
@ -356,7 +357,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
ArgVals(&llargs[]),
dest).bcx;
finish_fn(&fcx, bcx, sig.output);
finish_fn(&fcx, bcx, sig.output, DebugLoc::None);
ccx.fn_pointer_shims().borrow_mut().insert(bare_fn_ty, llfn);
@ -646,7 +647,7 @@ pub fn trans_lang_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
/// For non-lang items, `dest` is always Some, and hence the result is written into memory
/// somewhere. Nonetheless we return the actual return value of the function.
pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
call_info: Option<NodeInfo>,
call_info: Option<NodeIdAndSpan>,
callee_ty: Ty<'tcx>,
get_callee: F,
args: CallArgs<'a, 'tcx>,
@ -703,7 +704,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
disr,
args,
dest.unwrap(),
call_info);
call_info.debug_loc());
}
};
@ -781,7 +782,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
llfn,
&llargs[],
callee_ty,
call_info);
call_info.debug_loc());
bcx = b;
llresult = llret;
@ -828,7 +829,10 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
match (dest, opt_llretslot, ret_ty) {
(Some(expr::Ignore), Some(llretslot), ty::FnConverging(ret_ty)) => {
// drop the value if it is not being saved.
bcx = glue::drop_ty(bcx, llretslot, ret_ty, call_info);
bcx = glue::drop_ty(bcx,
llretslot,
ret_ty,
call_info.debug_loc());
call_lifetime_end(bcx, llretslot);
}
_ => {}

View file

@ -21,8 +21,8 @@ use trans::base;
use trans::build;
use trans::callee;
use trans::common;
use trans::common::{Block, FunctionContext, ExprId, NodeInfo};
use trans::debuginfo;
use trans::common::{Block, FunctionContext, ExprId, NodeIdAndSpan};
use trans::debuginfo::{DebugLoc, ToDebugLoc};
use trans::glue;
use middle::region;
use trans::type_::Type;
@ -44,7 +44,7 @@ pub struct CleanupScope<'blk, 'tcx: 'blk> {
// The debug location any drop calls generated for this scope will be
// associated with.
debug_loc: Option<NodeInfo>,
debug_loc: DebugLoc,
cached_early_exits: Vec<CachedEarlyExit>,
cached_landing_pad: Option<BasicBlockRef>,
@ -100,7 +100,7 @@ pub trait Cleanup<'tcx> {
fn is_lifetime_end(&self) -> bool;
fn trans<'blk>(&self,
bcx: Block<'blk, 'tcx>,
debug_loc: Option<NodeInfo>)
debug_loc: DebugLoc)
-> Block<'blk, 'tcx>;
}
@ -114,7 +114,7 @@ pub enum ScopeId {
impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
/// Invoked when we start to trans the code contained within a new cleanup scope.
fn push_ast_cleanup_scope(&self, debug_loc: NodeInfo) {
fn push_ast_cleanup_scope(&self, debug_loc: NodeIdAndSpan) {
debug!("push_ast_cleanup_scope({})",
self.ccx.tcx().map.node_to_string(debug_loc.id));
@ -139,7 +139,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
}
self.push_scope(CleanupScope::new(AstScopeKind(debug_loc.id),
Some(debug_loc)));
debug_loc.debug_loc()));
}
fn push_loop_cleanup_scope(&self,
@ -168,19 +168,20 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
.borrow()
.last()
.map(|opt_scope| opt_scope.debug_loc)
.unwrap_or(None);
.unwrap_or(DebugLoc::None);
self.push_scope(CleanupScope::new(CustomScopeKind, debug_loc));
CustomScopeIndex { index: index }
}
fn push_custom_cleanup_scope_with_debug_loc(&self,
debug_loc: NodeInfo)
debug_loc: NodeIdAndSpan)
-> CustomScopeIndex {
let index = self.scopes_len();
debug!("push_custom_cleanup_scope(): {}", index);
self.push_scope(CleanupScope::new(CustomScopeKind, Some(debug_loc)));
self.push_scope(CleanupScope::new(CustomScopeKind,
debug_loc.debug_loc()));
CustomScopeIndex { index: index }
}
@ -664,7 +665,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
scope.debug_loc);
}
}
build::Br(bcx_out, prev_llbb);
build::Br(bcx_out, prev_llbb, DebugLoc::None);
prev_llbb = bcx_in.llbb;
} else {
debug!("no suitable cleanups in {}",
@ -766,7 +767,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
// Generate the cleanup block and branch to it.
let cleanup_llbb = self.trans_cleanups_to_exit_scope(UnwindExit);
build::Br(pad_bcx, cleanup_llbb);
build::Br(pad_bcx, cleanup_llbb, DebugLoc::None);
return pad_bcx.llbb;
}
@ -774,7 +775,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
impl<'blk, 'tcx> CleanupScope<'blk, 'tcx> {
fn new(kind: CleanupScopeKind<'blk, 'tcx>,
debug_loc: Option<NodeInfo>)
debug_loc: DebugLoc)
-> CleanupScope<'blk, 'tcx> {
CleanupScope {
kind: kind,
@ -896,7 +897,7 @@ impl<'tcx> Cleanup<'tcx> for DropValue<'tcx> {
fn trans<'blk>(&self,
bcx: Block<'blk, 'tcx>,
debug_loc: Option<NodeInfo>)
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
let bcx = if self.is_immediate {
glue::drop_ty_immediate(bcx, self.val, self.ty, debug_loc)
@ -937,9 +938,9 @@ impl<'tcx> Cleanup<'tcx> for FreeValue<'tcx> {
fn trans<'blk>(&self,
bcx: Block<'blk, 'tcx>,
debug_loc: Option<NodeInfo>)
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
apply_debug_loc(bcx.fcx, debug_loc);
debug_loc.apply(bcx.fcx);
match self.heap {
HeapExchange => {
@ -972,9 +973,9 @@ impl<'tcx> Cleanup<'tcx> for FreeSlice {
fn trans<'blk>(&self,
bcx: Block<'blk, 'tcx>,
debug_loc: Option<NodeInfo>)
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
apply_debug_loc(bcx.fcx, debug_loc);
debug_loc.apply(bcx.fcx);
match self.heap {
HeapExchange => {
@ -1004,9 +1005,9 @@ impl<'tcx> Cleanup<'tcx> for LifetimeEnd {
fn trans<'blk>(&self,
bcx: Block<'blk, 'tcx>,
debug_loc: Option<NodeInfo>)
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
apply_debug_loc(bcx.fcx, debug_loc);
debug_loc.apply(bcx.fcx);
base::call_lifetime_end(bcx, self.ptr);
bcx
}
@ -1041,28 +1042,17 @@ fn cleanup_is_suitable_for(c: &Cleanup,
!label.is_unwind() || c.clean_on_unwind()
}
fn apply_debug_loc(fcx: &FunctionContext, debug_loc: Option<NodeInfo>) {
match debug_loc {
Some(ref src_loc) => {
debuginfo::set_source_location(fcx, src_loc.id, src_loc.span);
}
None => {
debuginfo::clear_source_location(fcx);
}
}
}
///////////////////////////////////////////////////////////////////////////
// These traits just exist to put the methods into this file.
pub trait CleanupMethods<'blk, 'tcx> {
fn push_ast_cleanup_scope(&self, id: NodeInfo);
fn push_ast_cleanup_scope(&self, id: NodeIdAndSpan);
fn push_loop_cleanup_scope(&self,
id: ast::NodeId,
exits: [Block<'blk, 'tcx>; EXIT_MAX]);
fn push_custom_cleanup_scope(&self) -> CustomScopeIndex;
fn push_custom_cleanup_scope_with_debug_loc(&self,
debug_loc: NodeInfo)
debug_loc: NodeIdAndSpan)
-> CustomScopeIndex;
fn pop_and_trans_ast_cleanup_scope(&self,
bcx: Block<'blk, 'tcx>,

View file

@ -30,7 +30,7 @@ use trans::build;
use trans::cleanup;
use trans::consts;
use trans::datum;
use trans::debuginfo;
use trans::debuginfo::{self, DebugLoc};
use trans::machine;
use trans::monomorphize;
use trans::type_::Type;
@ -317,13 +317,13 @@ pub struct tydesc_info<'tcx> {
*/
#[derive(Copy)]
pub struct NodeInfo {
pub struct NodeIdAndSpan {
pub id: ast::NodeId,
pub span: Span,
}
pub fn expr_info(expr: &ast::Expr) -> NodeInfo {
NodeInfo { id: expr.id, span: expr.span }
pub fn expr_info(expr: &ast::Expr) -> NodeIdAndSpan {
NodeIdAndSpan { id: expr.id, span: expr.span }
}
pub struct BuilderRef_res {
@ -517,7 +517,7 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
let mut reachable = false;
for bcx in in_cxs.iter() {
if !bcx.unreachable.get() {
build::Br(*bcx, out.llbb);
build::Br(*bcx, out.llbb, DebugLoc::None);
reachable = true;
}
}

View file

@ -22,6 +22,7 @@ use trans::common::*;
use trans::consts;
use trans::datum;
use trans::debuginfo;
use trans::debuginfo::{DebugLoc, ToDebugLoc};
use trans::expr;
use trans::meth;
use trans::type_::Type;
@ -188,6 +189,8 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let then_bcx_out = trans_block(then_bcx_in, &*thn, dest);
trans::debuginfo::clear_source_location(bcx.fcx);
let cond_source_loc = cond.debug_loc();
let next_bcx;
match els {
Some(elexpr) => {
@ -195,13 +198,13 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let else_bcx_out = expr::trans_into(else_bcx_in, &*elexpr, dest);
next_bcx = bcx.fcx.join_blocks(if_id,
&[then_bcx_out, else_bcx_out]);
CondBr(bcx, cond_val, then_bcx_in.llbb, else_bcx_in.llbb);
CondBr(bcx, cond_val, then_bcx_in.llbb, else_bcx_in.llbb, cond_source_loc);
}
None => {
next_bcx = bcx.fcx.new_id_block("next-block", if_id);
Br(then_bcx_out, next_bcx.llbb);
CondBr(bcx, cond_val, then_bcx_in.llbb, next_bcx.llbb);
Br(then_bcx_out, next_bcx.llbb, DebugLoc::None);
CondBr(bcx, cond_val, then_bcx_in.llbb, next_bcx.llbb, cond_source_loc);
}
}
@ -213,7 +216,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
pub fn trans_while<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
loop_id: ast::NodeId,
loop_expr: &ast::Expr,
cond: &ast::Expr,
body: &ast::Block)
-> Block<'blk, 'tcx> {
@ -231,33 +234,34 @@ pub fn trans_while<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// | body_bcx_out --+
// next_bcx_in
let next_bcx_in = fcx.new_id_block("while_exit", loop_id);
let next_bcx_in = fcx.new_id_block("while_exit", loop_expr.id);
let cond_bcx_in = fcx.new_id_block("while_cond", cond.id);
let body_bcx_in = fcx.new_id_block("while_body", body.id);
fcx.push_loop_cleanup_scope(loop_id, [next_bcx_in, cond_bcx_in]);
fcx.push_loop_cleanup_scope(loop_expr.id, [next_bcx_in, cond_bcx_in]);
Br(bcx, cond_bcx_in.llbb);
Br(bcx, cond_bcx_in.llbb, loop_expr.debug_loc());
// compile the block where we will handle loop cleanups
let cleanup_llbb = fcx.normal_exit_block(loop_id, cleanup::EXIT_BREAK);
let cleanup_llbb = fcx.normal_exit_block(loop_expr.id, cleanup::EXIT_BREAK);
// compile the condition
let Result {bcx: cond_bcx_out, val: cond_val} =
expr::trans(cond_bcx_in, cond).to_llbool();
CondBr(cond_bcx_out, cond_val, body_bcx_in.llbb, cleanup_llbb);
CondBr(cond_bcx_out, cond_val, body_bcx_in.llbb, cleanup_llbb, cond.debug_loc());
// loop body:
let body_bcx_out = trans_block(body_bcx_in, body, expr::Ignore);
Br(body_bcx_out, cond_bcx_in.llbb);
Br(body_bcx_out, cond_bcx_in.llbb, DebugLoc::None);
fcx.pop_loop_cleanup_scope(loop_id);
fcx.pop_loop_cleanup_scope(loop_expr.id);
return next_bcx_in;
}
/// Translates a `for` loop.
pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
loop_info: NodeInfo,
loop_info: NodeIdAndSpan,
pat: &ast::Pat,
head: &ast::Expr,
body: &ast::Block)
@ -292,7 +296,7 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let body_bcx_in = bcx.fcx.new_id_block("for_body", body.id);
bcx.fcx.push_loop_cleanup_scope(loop_info.id,
[next_bcx_in, loopback_bcx_in]);
Br(bcx, loopback_bcx_in.llbb);
Br(bcx, loopback_bcx_in.llbb, DebugLoc::None);
let cleanup_llbb = bcx.fcx.normal_exit_block(loop_info.id,
cleanup::EXIT_BREAK);
@ -347,7 +351,7 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
None);
let i1_type = Type::i1(loopback_bcx_out.ccx());
let llcondition = Trunc(loopback_bcx_out, lldiscriminant, i1_type);
CondBr(loopback_bcx_out, llcondition, body_bcx_in.llbb, cleanup_llbb);
CondBr(loopback_bcx_out, llcondition, body_bcx_in.llbb, cleanup_llbb, DebugLoc::None);
// Now we're in the body. Unpack the `Option` value into the programmer-
// supplied pattern.
@ -377,7 +381,7 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
body_bcx_out.fcx
.pop_and_trans_custom_cleanup_scope(body_bcx_out,
option_cleanup_scope);
Br(body_bcx_out, loopback_bcx_in.llbb);
Br(body_bcx_out, loopback_bcx_in.llbb, DebugLoc::None);
// Codegen cleanups and leave.
next_bcx_in.fcx.pop_loop_cleanup_scope(loop_info.id);
@ -385,7 +389,7 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}
pub fn trans_loop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
loop_id: ast::NodeId,
loop_expr: &ast::Expr,
body: &ast::Block)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_loop");
@ -402,22 +406,22 @@ pub fn trans_loop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// Links between body_bcx_in and next_bcx are created by
// break statements.
let next_bcx_in = bcx.fcx.new_id_block("loop_exit", loop_id);
let next_bcx_in = bcx.fcx.new_id_block("loop_exit", loop_expr.id);
let body_bcx_in = bcx.fcx.new_id_block("loop_body", body.id);
fcx.push_loop_cleanup_scope(loop_id, [next_bcx_in, body_bcx_in]);
fcx.push_loop_cleanup_scope(loop_expr.id, [next_bcx_in, body_bcx_in]);
Br(bcx, body_bcx_in.llbb);
Br(bcx, body_bcx_in.llbb, loop_expr.debug_loc());
let body_bcx_out = trans_block(body_bcx_in, body, expr::Ignore);
Br(body_bcx_out, body_bcx_in.llbb);
Br(body_bcx_out, body_bcx_in.llbb, DebugLoc::None);
fcx.pop_loop_cleanup_scope(loop_id);
fcx.pop_loop_cleanup_scope(loop_expr.id);
return next_bcx_in;
}
pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr_id: ast::NodeId,
expr: &ast::Expr,
opt_label: Option<Ident>,
exit: uint)
-> Block<'blk, 'tcx> {
@ -432,7 +436,7 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let loop_id = match opt_label {
None => fcx.top_loop_scope(),
Some(_) => {
match bcx.tcx().def_map.borrow().get(&expr_id) {
match bcx.tcx().def_map.borrow().get(&expr.id) {
Some(&def::DefLabel(loop_id)) => loop_id,
ref r => {
bcx.tcx().sess.bug(&format!("{:?} in def-map for label",
@ -444,39 +448,40 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// Generate appropriate cleanup code and branch
let cleanup_llbb = fcx.normal_exit_block(loop_id, exit);
Br(bcx, cleanup_llbb);
Br(bcx, cleanup_llbb, expr.debug_loc());
Unreachable(bcx); // anything afterwards should be ignored
return bcx;
}
pub fn trans_break<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr_id: ast::NodeId,
expr: &ast::Expr,
label_opt: Option<Ident>)
-> Block<'blk, 'tcx> {
return trans_break_cont(bcx, expr_id, label_opt, cleanup::EXIT_BREAK);
return trans_break_cont(bcx, expr, label_opt, cleanup::EXIT_BREAK);
}
pub fn trans_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr_id: ast::NodeId,
expr: &ast::Expr,
label_opt: Option<Ident>)
-> Block<'blk, 'tcx> {
return trans_break_cont(bcx, expr_id, label_opt, cleanup::EXIT_LOOP);
return trans_break_cont(bcx, expr, label_opt, cleanup::EXIT_LOOP);
}
pub fn trans_ret<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
e: Option<&ast::Expr>)
return_expr: &ast::Expr,
retval_expr: Option<&ast::Expr>)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_ret");
let fcx = bcx.fcx;
let mut bcx = bcx;
let dest = match (fcx.llretslotptr.get(), e) {
(Some(_), Some(e)) => {
let ret_ty = expr_ty(bcx, &*e);
let dest = match (fcx.llretslotptr.get(), retval_expr) {
(Some(_), Some(retval_expr)) => {
let ret_ty = expr_ty(bcx, &*retval_expr);
expr::SaveIn(fcx.get_ret_slot(bcx, ty::FnConverging(ret_ty), "ret_slot"))
}
_ => expr::Ignore,
};
if let Some(x) = e {
if let Some(x) = retval_expr {
bcx = expr::trans_into(bcx, &*x, dest);
match dest {
expr::SaveIn(slot) if fcx.needs_ret_allocas => {
@ -486,7 +491,7 @@ pub fn trans_ret<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
}
let cleanup_llbb = fcx.return_exit_block();
Br(bcx, cleanup_llbb);
Br(bcx, cleanup_llbb, return_expr.debug_loc());
Unreachable(bcx);
return bcx;
}

View file

@ -188,7 +188,7 @@ use self::MemberOffset::*;
use self::MemberDescriptionFactory::*;
use self::RecursiveTypeDescription::*;
use self::EnumDiscriminantInfo::*;
use self::DebugLocation::*;
use self::InternalDebugLocation::*;
use llvm;
use llvm::{ModuleRef, ContextRef, ValueRef};
@ -196,7 +196,8 @@ use llvm::debuginfo::*;
use metadata::csearch;
use middle::subst::{self, Substs};
use trans::{self, adt, machine, type_of};
use trans::common::*;
use trans::common::{self, NodeIdAndSpan, CrateContext, FunctionContext, Block,
C_bytes, C_i32, C_i64, NormalizingUnboxedClosureTyper};
use trans::_match::{BindingInfo, TrByCopy, TrByMove, TrByRef};
use trans::monomorphize;
use trans::type_::Type;
@ -650,7 +651,7 @@ macro_rules! return_if_metadata_created_in_meantime {
pub struct CrateDebugContext<'tcx> {
llcontext: ContextRef,
builder: DIBuilderRef,
current_debug_location: Cell<DebugLocation>,
current_debug_location: Cell<InternalDebugLocation>,
created_files: RefCell<FnvHashMap<String, DIFile>>,
created_enum_disr_types: RefCell<DefIdMap<DIType>>,
@ -940,13 +941,14 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
};
let variable_type = node_id_type(bcx, node_id);
let variable_type = common::node_id_type(bcx, node_id);
let scope_metadata = bcx.fcx.debug_context.get_ref(cx, span).fn_metadata;
// env_pointer is the alloca containing the pointer to the environment,
// so it's type is **EnvironmentType. In order to find out the type of
// the environment we have to "dereference" two times.
let llvm_env_data_type = val_ty(env_pointer).element_type().element_type();
let llvm_env_data_type = common::val_ty(env_pointer).element_type()
.element_type();
let byte_offset_of_var_in_env = machine::llelement_offset(cx,
llvm_env_data_type,
env_index);
@ -1123,7 +1125,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
node_id: ast::NodeId,
node_span: Span,
is_block: bool)
-> NodeInfo {
-> NodeIdAndSpan {
// A debug location needs two things:
// (1) A span (of which only the beginning will actually be used)
// (2) An AST node-id which will be used to look up the lexical scope
@ -1173,12 +1175,56 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
}
}
NodeInfo {
NodeIdAndSpan {
id: node_id,
span: cleanup_span
}
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum DebugLoc {
At(ast::NodeId, Span),
None
}
impl DebugLoc {
pub fn apply(&self, fcx: &FunctionContext) {
match *self {
DebugLoc::At(node_id, span) => {
set_source_location(fcx, node_id, span);
}
DebugLoc::None => {
clear_source_location(fcx);
}
}
}
}
pub trait ToDebugLoc {
fn debug_loc(&self) -> DebugLoc;
}
impl ToDebugLoc for ast::Expr {
fn debug_loc(&self) -> DebugLoc {
DebugLoc::At(self.id, self.span)
}
}
impl ToDebugLoc for NodeIdAndSpan {
fn debug_loc(&self) -> DebugLoc {
DebugLoc::At(self.id, self.span)
}
}
impl ToDebugLoc for Option<NodeIdAndSpan> {
fn debug_loc(&self) -> DebugLoc {
match *self {
Some(NodeIdAndSpan { id, span }) => DebugLoc::At(id, span),
None => DebugLoc::None
}
}
}
/// Sets the current debug location at the beginning of the span.
///
/// Maps to a call to llvm::LLVMSetCurrentDebugLocation(...). The node_id
@ -1202,7 +1248,7 @@ pub fn set_source_location(fcx: &FunctionContext,
let loc = span_start(cx, span);
let scope = scope_metadata(fcx, node_id, span);
set_debug_location(cx, DebugLocation::new(scope,
set_debug_location(cx, InternalDebugLocation::new(scope,
loc.line,
loc.col.to_uint()));
} else {
@ -1714,7 +1760,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
)
};
set_debug_location(cx, DebugLocation::new(scope_metadata,
set_debug_location(cx, InternalDebugLocation::new(scope_metadata,
loc.line,
loc.col.to_uint()));
unsafe {
@ -3095,13 +3141,13 @@ impl MetadataCreationResult {
}
#[derive(Copy, PartialEq)]
enum DebugLocation {
enum InternalDebugLocation {
KnownLocation { scope: DIScope, line: uint, col: uint },
UnknownLocation
}
impl DebugLocation {
fn new(scope: DIScope, line: uint, col: uint) -> DebugLocation {
impl InternalDebugLocation {
fn new(scope: DIScope, line: uint, col: uint) -> InternalDebugLocation {
KnownLocation {
scope: scope,
line: line,
@ -3110,7 +3156,7 @@ impl DebugLocation {
}
}
fn set_debug_location(cx: &CrateContext, debug_location: DebugLocation) {
fn set_debug_location(cx: &CrateContext, debug_location: InternalDebugLocation) {
if debug_location == debug_context(cx).current_debug_location.get() {
return;
}

View file

@ -46,7 +46,7 @@ use trans::build::*;
use trans::cleanup::{self, CleanupMethods};
use trans::common::*;
use trans::datum::*;
use trans::debuginfo;
use trans::debuginfo::{self, DebugLoc, ToDebugLoc};
use trans::glue;
use trans::machine;
use trans::meth;
@ -779,7 +779,8 @@ fn trans_index<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let expected = Call(bcx,
expect,
&[bounds_check, C_bool(ccx, false)],
None);
None,
index_expr.debug_loc());
bcx = with_cond(bcx, expected, |bcx| {
controlflow::trans_fail_bounds_check(bcx,
index_expr.span,
@ -890,10 +891,10 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
trans_into(bcx, &**e, Ignore)
}
ast::ExprBreak(label_opt) => {
controlflow::trans_break(bcx, expr.id, label_opt)
controlflow::trans_break(bcx, expr, label_opt)
}
ast::ExprAgain(label_opt) => {
controlflow::trans_cont(bcx, expr.id, label_opt)
controlflow::trans_cont(bcx, expr, label_opt)
}
ast::ExprRet(ref ex) => {
// Check to see if the return expression itself is reachable.
@ -905,7 +906,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
};
if reachable {
controlflow::trans_ret(bcx, ex.as_ref().map(|e| &**e))
controlflow::trans_ret(bcx, expr, ex.as_ref().map(|e| &**e))
} else {
// If it's not reachable, just translate the inner expression
// directly. This avoids having to manage a return slot when
@ -921,7 +922,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
}
ast::ExprWhile(ref cond, ref body, _) => {
controlflow::trans_while(bcx, expr.id, &**cond, &**body)
controlflow::trans_while(bcx, expr, &**cond, &**body)
}
ast::ExprForLoop(ref pat, ref head, ref body, _) => {
controlflow::trans_for(bcx,
@ -931,7 +932,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
&**body)
}
ast::ExprLoop(ref body, _) => {
controlflow::trans_loop(bcx, expr.id, &**body)
controlflow::trans_loop(bcx, expr, &**body)
}
ast::ExprAssign(ref dst, ref src) => {
let src_datum = unpack_datum!(bcx, trans(bcx, &**src));
@ -960,7 +961,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
bcx = glue::drop_ty(bcx,
dst_datum.val,
dst_datum.ty,
Some(NodeInfo { id: expr.id, span: expr.span }));
expr.debug_loc());
src_datum.store_to(bcx, dst_datum.val)
} else {
src_datum.store_to(bcx, dst_datum.val)
@ -1078,7 +1079,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
&numbered_fields[],
None,
dest,
Some(NodeInfo { id: expr.id, span: expr.span }))
expr.debug_loc())
}
ast::ExprLit(ref lit) => {
match lit.node {
@ -1417,7 +1418,7 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
numbered_fields.as_slice(),
optbase,
dest,
Some(NodeInfo { id: expr_id, span: expr_span }))
DebugLoc::At(expr_id, expr_span))
})
}
@ -1448,18 +1449,13 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
fields: &[(uint, &ast::Expr)],
optbase: Option<StructBaseInfo<'a, 'tcx>>,
dest: Dest,
source_location: Option<NodeInfo>)
debug_location: DebugLoc)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_adt");
let fcx = bcx.fcx;
let repr = adt::represent_type(bcx.ccx(), ty);
match source_location {
Some(src_loc) => debuginfo::set_source_location(bcx.fcx,
src_loc.id,
src_loc.span),
None => {}
};
debug_location.apply(bcx.fcx);
// If we don't care about the result, just make a
// temporary stack slot
@ -1494,12 +1490,7 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}
}
match source_location {
Some(src_loc) => debuginfo::set_source_location(bcx.fcx,
src_loc.id,
src_loc.span),
None => {}
};
debug_location.apply(bcx.fcx);
if ty::type_is_simd(bcx.tcx(), ty) {
// This is the constructor of a SIMD type, such types are
@ -1540,7 +1531,7 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
match dest {
SaveIn(_) => bcx,
Ignore => {
bcx = glue::drop_ty(bcx, addr, ty, source_location);
bcx = glue::drop_ty(bcx, addr, ty, debug_location);
base::call_lifetime_end(bcx, addr);
bcx
}
@ -1579,10 +1570,12 @@ fn trans_unary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let un_ty = expr_ty(bcx, expr);
let debug_loc = expr.debug_loc();
match op {
ast::UnNot => {
let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
let llresult = Not(bcx, datum.to_llscalarish(bcx));
let llresult = Not(bcx, datum.to_llscalarish(bcx), debug_loc);
immediate_rvalue_bcx(bcx, llresult, un_ty).to_expr_datumblock()
}
ast::UnNeg => {
@ -1590,9 +1583,9 @@ fn trans_unary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let val = datum.to_llscalarish(bcx);
let llneg = {
if ty::type_is_fp(un_ty) {
FNeg(bcx, val)
FNeg(bcx, val, debug_loc)
} else {
Neg(bcx, val)
Neg(bcx, val, debug_loc)
}
};
immediate_rvalue_bcx(bcx, llneg, un_ty).to_expr_datumblock()
@ -1691,56 +1684,69 @@ fn trans_eager_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let rhs = base::cast_shift_expr_rhs(bcx, op, lhs, rhs);
let binop_debug_loc = binop_expr.debug_loc();
let mut bcx = bcx;
let val = match op {
ast::BiAdd => {
if is_float { FAdd(bcx, lhs, rhs) }
else { Add(bcx, lhs, rhs) }
if is_float {
FAdd(bcx, lhs, rhs, binop_debug_loc)
} else {
Add(bcx, lhs, rhs, binop_debug_loc)
}
}
ast::BiSub => {
if is_float { FSub(bcx, lhs, rhs) }
else { Sub(bcx, lhs, rhs) }
if is_float {
FSub(bcx, lhs, rhs, binop_debug_loc)
} else {
Sub(bcx, lhs, rhs, binop_debug_loc)
}
}
ast::BiMul => {
if is_float { FMul(bcx, lhs, rhs) }
else { Mul(bcx, lhs, rhs) }
if is_float {
FMul(bcx, lhs, rhs, binop_debug_loc)
} else {
Mul(bcx, lhs, rhs, binop_debug_loc)
}
}
ast::BiDiv => {
if is_float {
FDiv(bcx, lhs, rhs)
FDiv(bcx, lhs, rhs, binop_debug_loc)
} else {
// Only zero-check integers; fp /0 is NaN
bcx = base::fail_if_zero_or_overflows(bcx, binop_expr.span,
op, lhs, rhs, rhs_t);
if is_signed {
SDiv(bcx, lhs, rhs)
SDiv(bcx, lhs, rhs, binop_debug_loc)
} else {
UDiv(bcx, lhs, rhs)
UDiv(bcx, lhs, rhs, binop_debug_loc)
}
}
}
ast::BiRem => {
if is_float {
FRem(bcx, lhs, rhs)
FRem(bcx, lhs, rhs, binop_debug_loc)
} else {
// Only zero-check integers; fp %0 is NaN
bcx = base::fail_if_zero_or_overflows(bcx, binop_expr.span,
op, lhs, rhs, rhs_t);
if is_signed {
SRem(bcx, lhs, rhs)
SRem(bcx, lhs, rhs, binop_debug_loc)
} else {
URem(bcx, lhs, rhs)
URem(bcx, lhs, rhs, binop_debug_loc)
}
}
}
ast::BiBitOr => Or(bcx, lhs, rhs),
ast::BiBitAnd => And(bcx, lhs, rhs),
ast::BiBitXor => Xor(bcx, lhs, rhs),
ast::BiShl => Shl(bcx, lhs, rhs),
ast::BiBitOr => Or(bcx, lhs, rhs, binop_debug_loc),
ast::BiBitAnd => And(bcx, lhs, rhs, binop_debug_loc),
ast::BiBitXor => Xor(bcx, lhs, rhs, binop_debug_loc),
ast::BiShl => Shl(bcx, lhs, rhs, binop_debug_loc),
ast::BiShr => {
if is_signed {
AShr(bcx, lhs, rhs)
} else { LShr(bcx, lhs, rhs) }
AShr(bcx, lhs, rhs, binop_debug_loc)
} else {
LShr(bcx, lhs, rhs, binop_debug_loc)
}
}
ast::BiEq | ast::BiNe | ast::BiLt | ast::BiGe | ast::BiLe | ast::BiGt => {
if ty::type_is_scalar(rhs_t) {
@ -1786,8 +1792,8 @@ fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let before_rhs = fcx.new_id_block("before_rhs", b.id);
match op {
lazy_and => CondBr(past_lhs, lhs, before_rhs.llbb, join.llbb),
lazy_or => CondBr(past_lhs, lhs, join.llbb, before_rhs.llbb)
lazy_and => CondBr(past_lhs, lhs, before_rhs.llbb, join.llbb, DebugLoc::None),
lazy_or => CondBr(past_lhs, lhs, join.llbb, before_rhs.llbb, DebugLoc::None)
}
let DatumBlock {bcx: past_rhs, datum: rhs} = trans(before_rhs, b);
@ -1797,7 +1803,7 @@ fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
return immediate_rvalue_bcx(join, lhs, binop_ty).to_expr_datumblock();
}
Br(past_rhs, join.llbb);
Br(past_rhs, join.llbb, DebugLoc::None);
let phi = Phi(join, Type::i1(bcx.ccx()), &[lhs, rhs],
&[past_lhs.llbb, past_rhs.llbb]);

View file

@ -29,7 +29,7 @@ use trans::cleanup::CleanupMethods;
use trans::consts;
use trans::common::*;
use trans::datum;
use trans::debuginfo;
use trans::debuginfo::DebugLoc;
use trans::expr;
use trans::machine::*;
use trans::tvec;
@ -106,7 +106,7 @@ pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
// NB: v is an *alias* of type t here, not a direct value.
debug!("drop_ty(t={})", t.repr(bcx.tcx()));
@ -121,12 +121,7 @@ pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v
};
match source_location {
Some(sl) => debuginfo::set_source_location(bcx.fcx, sl.id, sl.span),
None => debuginfo::clear_source_location(bcx.fcx)
};
Call(bcx, glue, &[ptr], None);
Call(bcx, glue, &[ptr], None, debug_loc);
}
bcx
}
@ -134,12 +129,12 @@ pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
source_location: Option<NodeInfo>)
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("drop_ty_immediate");
let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
store_ty(bcx, v, vp, t);
drop_ty(bcx, vp, t, source_location)
drop_ty(bcx, vp, t, debug_loc)
}
pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
@ -295,7 +290,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
class_did,
&[get_drop_glue_type(bcx.ccx(), t)],
ty::mk_nil(bcx.tcx()));
let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[], dtor_ty, None);
let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[], dtor_ty, DebugLoc::None);
variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
variant_cx
@ -331,7 +326,7 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info:
let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
// Return the sum of sizes and max of aligns.
let size = Add(bcx, sized_size, unsized_size);
let size = Add(bcx, sized_size, unsized_size, DebugLoc::None);
let align = Select(bcx,
ICmp(bcx, llvm::IntULT, sized_align, unsized_align),
sized_align,
@ -353,7 +348,8 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info:
let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
let unit_align = llalign_of_min(bcx.ccx(), llunit_ty);
let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
(Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), unit_align))
(Mul(bcx, info, C_uint(bcx.ccx(), unit_size), DebugLoc::None),
C_uint(bcx.ccx(), unit_align))
}
_ => bcx.sess().bug(&format!("Unexpected unsized type, found {}",
bcx.ty_to_string(t))[])
@ -384,7 +380,8 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
Call(bcx,
dtor,
&[PointerCast(bcx, lluniquevalue, Type::i8p(bcx.ccx()))],
None);
None,
DebugLoc::None);
bcx
})
}
@ -393,7 +390,7 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, v0, content_ty, None);
let bcx = drop_ty(bcx, v0, content_ty, DebugLoc::None);
let info = GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]);
let info = Load(bcx, info);
let (llsize, llalign) = size_and_align_of_dst(bcx, content_ty, info);
@ -406,7 +403,7 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
let llbox = Load(bcx, llval);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, llbox, content_ty, None);
let bcx = drop_ty(bcx, llbox, content_ty, DebugLoc::None);
trans_exchange_free_ty(bcx, llbox, content_ty)
})
}
@ -437,14 +434,16 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
}
ty::NoDtor => {
// No dtor? Just the default case
iter_structural_ty(bcx, v0, t, |bb, vv, tt| drop_ty(bb, vv, tt, None))
iter_structural_ty(bcx, v0, t, |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
}
}
}
ty::ty_unboxed_closure(..) => iter_structural_ty(bcx,
ty::ty_unboxed_closure(..) => {
iter_structural_ty(bcx,
v0,
t,
|bb, vv, tt| drop_ty(bb, vv, tt, None)),
|bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
}
ty::ty_trait(..) => {
// No need to do a null check here (as opposed to the Box<trait case
// above), because this happens for a trait field in an unsized
@ -456,7 +455,8 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
Call(bcx,
dtor,
&[PointerCast(bcx, Load(bcx, lluniquevalue), Type::i8p(bcx.ccx()))],
None);
None,
DebugLoc::None);
bcx
},
ty::ty_vec(_, None) | ty::ty_str => {
@ -465,9 +465,11 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>)
},
_ => {
assert!(type_is_sized(bcx.tcx(), t));
if type_needs_drop(bcx.tcx(), t) &&
ty::type_is_structural(t) {
iter_structural_ty(bcx, v0, t, |bb, vv, tt| drop_ty(bb, vv, tt, None))
if type_needs_drop(bcx.tcx(), t) && ty::type_is_structural(t) {
iter_structural_ty(bcx,
v0,
t,
|bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
} else {
bcx
}
@ -559,7 +561,7 @@ fn make_generic_glue<'a, 'tcx, F>(ccx: &CrateContext<'a, 'tcx>,
let llrawptr0 = get_param(llfn, fcx.arg_pos(0) as c_uint);
let bcx = helper(bcx, llrawptr0, t);
finish_fn(&fcx, bcx, ty::FnConverging(ty::mk_nil(ccx.tcx())));
finish_fn(&fcx, bcx, ty::FnConverging(ty::mk_nil(ccx.tcx())), DebugLoc::None);
llfn
}

View file

@ -21,6 +21,7 @@ use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::common::*;
use trans::datum::*;
use trans::debuginfo::DebugLoc;
use trans::expr;
use trans::glue;
use trans::type_of::*;
@ -149,9 +150,8 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
args: callee::CallArgs<'a, 'tcx>,
dest: expr::Dest,
substs: subst::Substs<'tcx>,
call_info: NodeInfo)
-> Result<'blk, 'tcx>
{
call_info: NodeIdAndSpan)
-> Result<'blk, 'tcx> {
let fcx = bcx.fcx;
let ccx = fcx.ccx;
let tcx = bcx.tcx();
@ -270,10 +270,12 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
fcx.pop_custom_cleanup_scope(cleanup_scope);
let call_debug_location = DebugLoc::At(call_info.id, call_info.span);
// These are the only intrinsic functions that diverge.
if name.get() == "abort" {
let llfn = ccx.get_intrinsic(&("llvm.trap"));
Call(bcx, llfn, &[], None);
Call(bcx, llfn, &[], None, call_debug_location);
Unreachable(bcx);
return Result::new(bcx, C_undef(Type::nil(ccx).ptr_to()));
} else if name.get() == "unreachable" {
@ -304,11 +306,11 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let simple = get_simple_intrinsic(ccx, &*foreign_item);
let llval = match (simple, name.get()) {
(Some(llfn), _) => {
Call(bcx, llfn, llargs.as_slice(), None)
Call(bcx, llfn, llargs.as_slice(), None, call_debug_location)
}
(_, "breakpoint") => {
let llfn = ccx.get_intrinsic(&("llvm.debugtrap"));
Call(bcx, llfn, &[], None)
Call(bcx, llfn, &[], None, call_debug_location)
}
(_, "size_of") => {
let tp_ty = *substs.types.get(FnSpace, 0);
@ -384,29 +386,63 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}
(_, "copy_nonoverlapping_memory") => {
copy_intrinsic(bcx, false, false, *substs.types.get(FnSpace, 0),
llargs[0], llargs[1], llargs[2])
copy_intrinsic(bcx,
false,
false,
*substs.types.get(FnSpace, 0),
llargs[0],
llargs[1],
llargs[2],
call_debug_location)
}
(_, "copy_memory") => {
copy_intrinsic(bcx, true, false, *substs.types.get(FnSpace, 0),
llargs[0], llargs[1], llargs[2])
copy_intrinsic(bcx,
true,
false,
*substs.types.get(FnSpace, 0),
llargs[0],
llargs[1],
llargs[2],
call_debug_location)
}
(_, "set_memory") => {
memset_intrinsic(bcx, false, *substs.types.get(FnSpace, 0),
llargs[0], llargs[1], llargs[2])
memset_intrinsic(bcx,
false,
*substs.types.get(FnSpace, 0),
llargs[0],
llargs[1],
llargs[2],
call_debug_location)
}
(_, "volatile_copy_nonoverlapping_memory") => {
copy_intrinsic(bcx, false, true, *substs.types.get(FnSpace, 0),
llargs[0], llargs[1], llargs[2])
copy_intrinsic(bcx,
false,
true,
*substs.types.get(FnSpace, 0),
llargs[0],
llargs[1],
llargs[2],
call_debug_location)
}
(_, "volatile_copy_memory") => {
copy_intrinsic(bcx, true, true, *substs.types.get(FnSpace, 0),
llargs[0], llargs[1], llargs[2])
copy_intrinsic(bcx,
true,
true,
*substs.types.get(FnSpace, 0),
llargs[0],
llargs[1],
llargs[2],
call_debug_location)
}
(_, "volatile_set_memory") => {
memset_intrinsic(bcx, true, *substs.types.get(FnSpace, 0),
llargs[0], llargs[1], llargs[2])
memset_intrinsic(bcx,
true,
*substs.types.get(FnSpace, 0),
llargs[0],
llargs[1],
llargs[2],
call_debug_location)
}
(_, "volatile_load") => {
VolatileLoad(bcx, llargs[0])
@ -416,93 +452,208 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
C_nil(ccx)
},
(_, "ctlz8") => count_zeros_intrinsic(bcx, "llvm.ctlz.i8", llargs[0]),
(_, "ctlz16") => count_zeros_intrinsic(bcx, "llvm.ctlz.i16", llargs[0]),
(_, "ctlz32") => count_zeros_intrinsic(bcx, "llvm.ctlz.i32", llargs[0]),
(_, "ctlz64") => count_zeros_intrinsic(bcx, "llvm.ctlz.i64", llargs[0]),
(_, "cttz8") => count_zeros_intrinsic(bcx, "llvm.cttz.i8", llargs[0]),
(_, "cttz16") => count_zeros_intrinsic(bcx, "llvm.cttz.i16", llargs[0]),
(_, "cttz32") => count_zeros_intrinsic(bcx, "llvm.cttz.i32", llargs[0]),
(_, "cttz64") => count_zeros_intrinsic(bcx, "llvm.cttz.i64", llargs[0]),
(_, "ctlz8") => count_zeros_intrinsic(bcx,
"llvm.ctlz.i8",
llargs[0],
call_debug_location),
(_, "ctlz16") => count_zeros_intrinsic(bcx,
"llvm.ctlz.i16",
llargs[0],
call_debug_location),
(_, "ctlz32") => count_zeros_intrinsic(bcx,
"llvm.ctlz.i32",
llargs[0],
call_debug_location),
(_, "ctlz64") => count_zeros_intrinsic(bcx,
"llvm.ctlz.i64",
llargs[0],
call_debug_location),
(_, "cttz8") => count_zeros_intrinsic(bcx,
"llvm.cttz.i8",
llargs[0],
call_debug_location),
(_, "cttz16") => count_zeros_intrinsic(bcx,
"llvm.cttz.i16",
llargs[0],
call_debug_location),
(_, "cttz32") => count_zeros_intrinsic(bcx,
"llvm.cttz.i32",
llargs[0],
call_debug_location),
(_, "cttz64") => count_zeros_intrinsic(bcx,
"llvm.cttz.i64",
llargs[0],
call_debug_location),
(_, "i8_add_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.sadd.with.overflow.i8", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.sadd.with.overflow.i8",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i16_add_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.sadd.with.overflow.i16", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.sadd.with.overflow.i16",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i32_add_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.sadd.with.overflow.i32", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.sadd.with.overflow.i32",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i64_add_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.sadd.with.overflow.i64", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.sadd.with.overflow.i64",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u8_add_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.uadd.with.overflow.i8", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.uadd.with.overflow.i8",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u16_add_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.uadd.with.overflow.i16", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.uadd.with.overflow.i16",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u32_add_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.uadd.with.overflow.i32", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.uadd.with.overflow.i32",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u64_add_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.uadd.with.overflow.i64", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.uadd.with.overflow.i64",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i8_sub_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.ssub.with.overflow.i8", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.ssub.with.overflow.i8",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i16_sub_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.ssub.with.overflow.i16", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.ssub.with.overflow.i16",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i32_sub_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.ssub.with.overflow.i32", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.ssub.with.overflow.i32",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i64_sub_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.ssub.with.overflow.i64", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.ssub.with.overflow.i64",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u8_sub_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.usub.with.overflow.i8", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.usub.with.overflow.i8",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u16_sub_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.usub.with.overflow.i16", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.usub.with.overflow.i16",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u32_sub_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.usub.with.overflow.i32", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.usub.with.overflow.i32",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u64_sub_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.usub.with.overflow.i64", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.usub.with.overflow.i64",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i8_mul_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.smul.with.overflow.i8", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.smul.with.overflow.i8",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i16_mul_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.smul.with.overflow.i16", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.smul.with.overflow.i16",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i32_mul_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.smul.with.overflow.i32", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.smul.with.overflow.i32",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "i64_mul_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.smul.with.overflow.i64", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.smul.with.overflow.i64",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u8_mul_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.umul.with.overflow.i8", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.umul.with.overflow.i8",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u16_mul_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.umul.with.overflow.i16", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.umul.with.overflow.i16",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u32_mul_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.umul.with.overflow.i32", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.umul.with.overflow.i32",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "u64_mul_with_overflow") =>
with_overflow_intrinsic(bcx, "llvm.umul.with.overflow.i64", ret_ty,
llargs[0], llargs[1]),
with_overflow_intrinsic(bcx,
"llvm.umul.with.overflow.i64",
ret_ty,
llargs[0],
llargs[1],
call_debug_location),
(_, "return_address") => {
if !fcx.caller_expects_out_pointer {
tcx.sess.span_err(call_info.span,
@ -609,7 +760,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
// If we made a temporary stack slot, let's clean it up
match dest {
expr::Ignore => {
bcx = glue::drop_ty(bcx, llresult, ret_ty, Some(call_info));
bcx = glue::drop_ty(bcx, llresult, ret_ty, call_debug_location);
}
expr::SaveIn(_) => {}
}
@ -618,8 +769,14 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}
fn copy_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
allow_overlap: bool, volatile: bool, tp_ty: Ty<'tcx>,
dst: ValueRef, src: ValueRef, count: ValueRef) -> ValueRef {
allow_overlap: bool,
volatile: bool,
tp_ty: Ty<'tcx>,
dst: ValueRef,
src: ValueRef,
count: ValueRef,
call_debug_location: DebugLoc)
-> ValueRef {
let ccx = bcx.ccx();
let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(ccx, type_of::align_of(ccx, tp_ty) as i32);
@ -643,12 +800,25 @@ fn copy_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let src_ptr = PointerCast(bcx, src, Type::i8p(ccx));
let llfn = ccx.get_intrinsic(&name);
Call(bcx, llfn, &[dst_ptr, src_ptr, Mul(bcx, size, count), align,
C_bool(ccx, volatile)], None)
Call(bcx,
llfn,
&[dst_ptr,
src_ptr,
Mul(bcx, size, count, DebugLoc::None),
align,
C_bool(ccx, volatile)],
None,
call_debug_location)
}
fn memset_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, volatile: bool, tp_ty: Ty<'tcx>,
dst: ValueRef, val: ValueRef, count: ValueRef) -> ValueRef {
fn memset_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
volatile: bool,
tp_ty: Ty<'tcx>,
dst: ValueRef,
val: ValueRef,
count: ValueRef,
call_debug_location: DebugLoc)
-> ValueRef {
let ccx = bcx.ccx();
let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(ccx, type_of::align_of(ccx, tp_ty) as i32);
@ -662,22 +832,38 @@ fn memset_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, volatile: bool, tp_ty: T
let dst_ptr = PointerCast(bcx, dst, Type::i8p(ccx));
let llfn = ccx.get_intrinsic(&name);
Call(bcx, llfn, &[dst_ptr, val, Mul(bcx, size, count), align,
C_bool(ccx, volatile)], None)
Call(bcx,
llfn,
&[dst_ptr,
val,
Mul(bcx, size, count, DebugLoc::None),
align,
C_bool(ccx, volatile)],
None,
call_debug_location)
}
fn count_zeros_intrinsic(bcx: Block, name: &'static str, val: ValueRef) -> ValueRef {
fn count_zeros_intrinsic(bcx: Block,
name: &'static str,
val: ValueRef,
call_debug_location: DebugLoc)
-> ValueRef {
let y = C_bool(bcx.ccx(), false);
let llfn = bcx.ccx().get_intrinsic(&name);
Call(bcx, llfn, &[val, y], None)
Call(bcx, llfn, &[val, y], None, call_debug_location)
}
fn with_overflow_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, name: &'static str,
t: Ty<'tcx>, a: ValueRef, b: ValueRef) -> ValueRef {
fn with_overflow_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
name: &'static str,
t: Ty<'tcx>,
a: ValueRef,
b: ValueRef,
call_debug_location: DebugLoc)
-> ValueRef {
let llfn = bcx.ccx().get_intrinsic(&name);
// Convert `i1` to a `bool`, and write it to the out parameter
let val = Call(bcx, llfn, &[a, b], None);
let val = Call(bcx, llfn, &[a, b], None, call_debug_location);
let result = ExtractValue(bcx, val, 0);
let overflow = ZExt(bcx, ExtractValue(bcx, val, 1), Type::bool(bcx.ccx()));
let ret = C_undef(type_of::type_of(bcx.ccx(), t));

View file

@ -24,6 +24,7 @@ use trans::callee;
use trans::cleanup;
use trans::common::*;
use trans::datum::*;
use trans::debuginfo::DebugLoc;
use trans::expr::{SaveIn, Ignore};
use trans::expr;
use trans::glue;
@ -676,7 +677,7 @@ pub fn trans_object_shim<'a, 'tcx>(
ArgVals(llargs.as_slice()),
dest).bcx;
finish_fn(&fcx, bcx, sig.output);
finish_fn(&fcx, bcx, sig.output, DebugLoc::None);
(llfn, method_bare_fn_ty)
}

View file

@ -21,6 +21,7 @@ use trans::cleanup::CleanupMethods;
use trans::common::*;
use trans::consts;
use trans::datum::*;
use trans::debuginfo::DebugLoc;
use trans::expr::{Dest, Ignore, SaveIn};
use trans::expr;
use trans::glue;
@ -58,7 +59,11 @@ pub fn make_drop_glue_unboxed<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let dataptr = get_dataptr(bcx, vptr);
let bcx = if type_needs_drop(tcx, unit_ty) {
let len = get_len(bcx, vptr);
iter_vec_raw(bcx, dataptr, unit_ty, len, |bb, vv, tt| glue::drop_ty(bb, vv, tt, None))
iter_vec_raw(bcx,
dataptr,
unit_ty,
len,
|bb, vv, tt| glue::drop_ty(bb, vv, tt, DebugLoc::None))
} else {
bcx
};
@ -71,7 +76,7 @@ pub fn make_drop_glue_unboxed<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let not_empty = ICmp(bcx, llvm::IntNE, len, C_uint(ccx, 0u));
with_cond(bcx, not_empty, |bcx| {
let llalign = C_uint(ccx, machine::llalign_of_min(ccx, llty));
let size = Mul(bcx, C_uint(ccx, unit_size), len);
let size = Mul(bcx, C_uint(ccx, unit_size), len, DebugLoc::None);
glue::trans_exchange_free_dyn(bcx, dataptr, size, llalign)
})
} else {
@ -420,14 +425,14 @@ pub fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
let cond_bcx = fcx.new_temp_block("expr_repeat: loop cond");
let body_bcx = fcx.new_temp_block("expr_repeat: body: set");
let inc_bcx = fcx.new_temp_block("expr_repeat: body: inc");
Br(bcx, loop_bcx.llbb);
Br(bcx, loop_bcx.llbb, DebugLoc::None);
let loop_counter = {
// i = 0
let i = alloca(loop_bcx, bcx.ccx().int_type(), "__i");
Store(loop_bcx, C_uint(bcx.ccx(), 0u), i);
Br(loop_bcx, cond_bcx.llbb);
Br(loop_bcx, cond_bcx.llbb, DebugLoc::None);
i
};
@ -436,7 +441,7 @@ pub fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
let rhs = count;
let cond_val = ICmp(cond_bcx, llvm::IntULT, lhs, rhs);
CondBr(cond_bcx, cond_val, body_bcx.llbb, next_bcx.llbb);
CondBr(cond_bcx, cond_val, body_bcx.llbb, next_bcx.llbb, DebugLoc::None);
}
{ // loop body
@ -448,15 +453,15 @@ pub fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
};
let body_bcx = f(body_bcx, lleltptr, vt.unit_ty);
Br(body_bcx, inc_bcx.llbb);
Br(body_bcx, inc_bcx.llbb, DebugLoc::None);
}
{ // i += 1
let i = Load(inc_bcx, loop_counter);
let plusone = Add(inc_bcx, i, C_uint(bcx.ccx(), 1u));
let plusone = Add(inc_bcx, i, C_uint(bcx.ccx(), 1u), DebugLoc::None);
Store(inc_bcx, plusone, loop_counter);
Br(inc_bcx, cond_bcx.llbb);
Br(inc_bcx, cond_bcx.llbb, DebugLoc::None);
}
next_bcx
@ -484,19 +489,19 @@ pub fn iter_vec_raw<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
// Now perform the iteration.
let header_bcx = fcx.new_temp_block("iter_vec_loop_header");
Br(bcx, header_bcx.llbb);
Br(bcx, header_bcx.llbb, DebugLoc::None);
let data_ptr =
Phi(header_bcx, val_ty(data_ptr), &[data_ptr], &[bcx.llbb]);
let not_yet_at_end =
ICmp(header_bcx, llvm::IntULT, data_ptr, data_end_ptr);
let body_bcx = fcx.new_temp_block("iter_vec_loop_body");
let next_bcx = fcx.new_temp_block("iter_vec_next");
CondBr(header_bcx, not_yet_at_end, body_bcx.llbb, next_bcx.llbb);
CondBr(header_bcx, not_yet_at_end, body_bcx.llbb, next_bcx.llbb, DebugLoc::None);
let body_bcx = f(body_bcx, data_ptr, vt.unit_ty);
AddIncomingToPhi(data_ptr, InBoundsGEP(body_bcx, data_ptr,
&[C_int(bcx.ccx(), 1i)]),
body_bcx.llbb);
Br(body_bcx, header_bcx.llbb);
Br(body_bcx, header_bcx.llbb, DebugLoc::None);
next_bcx
}
}