diff --git a/src/rustc/driver/driver.rs b/src/rustc/driver/driver.rs index f0bac2b1d97..2bbce0e2cb5 100644 --- a/src/rustc/driver/driver.rs +++ b/src/rustc/driver/driver.rs @@ -386,6 +386,7 @@ fn build_session_options(match: getopts::match, let stats = opt_present(match, "stats"); let time_passes = opt_present(match, "time-passes"); let time_llvm_passes = opt_present(match, "time-llvm-passes"); + let count_llvm_insns = opt_present(match, "count-llvm-insns"); let sysroot_opt = getopts::opt_maybe_str(match, "sysroot"); let target_opt = getopts::opt_maybe_str(match, "target"); let mut no_asm_comments = getopts::opt_present(match, "no-asm-comments"); @@ -433,6 +434,7 @@ fn build_session_options(match: getopts::match, save_temps: save_temps, stats: stats, time_passes: time_passes, + count_llvm_insns: count_llvm_insns, time_llvm_passes: time_llvm_passes, output_type: output_type, addl_lib_search_paths: addl_lib_search_paths, @@ -513,6 +515,7 @@ fn opts() -> [getopts::opt] { optflag("c"), optflag("g"), optflag("save-temps"), optopt("sysroot"), optopt("target"), optflag("stats"), optflag("time-passes"), optflag("time-llvm-passes"), + optflag("count-llvm-insns"), optflag("no-verify"), optflag("no-lint-ctypes"), optmulti("cfg"), optflag("test"), diff --git a/src/rustc/driver/rustc.rs b/src/rustc/driver/rustc.rs index 2058b0f45b9..fe5587e0faf 100644 --- a/src/rustc/driver/rustc.rs +++ b/src/rustc/driver/rustc.rs @@ -62,6 +62,7 @@ Options: --time-passes Time the individual phases of the compiler --time-llvm-passes Time the individual phases of the LLVM backend + --count-llvm-insns Count and categorize generated LLVM instructions -v --version Print version info and exit --warn-unused-imports Warn about unnecessary imports diff --git a/src/rustc/driver/session.rs b/src/rustc/driver/session.rs index b691aa65f88..79f9a44aaa0 100644 --- a/src/rustc/driver/session.rs +++ b/src/rustc/driver/session.rs @@ -35,6 +35,7 @@ type options = save_temps: bool, stats: bool, time_passes: bool, + count_llvm_insns: bool, time_llvm_passes: bool, output_type: back::link::output_type, addl_lib_search_paths: [str], diff --git a/src/rustc/middle/trans/alt.rs b/src/rustc/middle/trans/alt.rs index 818fda7c60f..0705114495c 100644 --- a/src/rustc/middle/trans/alt.rs +++ b/src/rustc/middle/trans/alt.rs @@ -40,6 +40,7 @@ enum opt_result { range_result(result, result), } fn trans_opt(bcx: block, o: opt) -> opt_result { + let _icx = bcx.insn_ctxt("alt::trans_opt"); let ccx = bcx.ccx(); let mut bcx = bcx; alt o { @@ -259,6 +260,7 @@ fn get_options(ccx: @crate_ctxt, m: match, col: uint) -> [opt] { fn extract_variant_args(bcx: block, pat_id: ast::node_id, vdefs: {enm: def_id, var: def_id}, val: ValueRef) -> {vals: [ValueRef], bcx: block} { + let _icx = bcx.insn_ctxt("alt::extract_variant_args"); let ccx = bcx.fcx.ccx; let mut bcx = bcx; let enum_ty_substs = alt check ty::get(node_id_type(bcx, pat_id)).struct { @@ -358,6 +360,7 @@ fn pick_col(m: match) -> uint { fn compile_submatch(bcx: block, m: match, vals: [ValueRef], chk: option, &exits: [exit_node]) { + let _icx = bcx.insn_ctxt("alt::compile_submatch"); let mut bcx = bcx; let tcx = bcx.tcx(), dm = tcx.def_map; if m.len() == 0u { Br(bcx, option::get(chk)()); ret; } @@ -564,6 +567,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], // Returns false for unreachable blocks fn make_phi_bindings(bcx: block, map: [exit_node], ids: pat_util::pat_id_map) -> bool { + let _icx = bcx.insn_ctxt("alt::make_phi_bindings"); let our_block = bcx.llbb as uint; let mut success = true, bcx = bcx; ids.items {|name, node_id| @@ -609,6 +613,7 @@ fn make_phi_bindings(bcx: block, map: [exit_node], fn trans_alt(bcx: block, expr: @ast::expr, arms: [ast::arm], mode: ast::alt_mode, dest: dest) -> block { + let _icx = bcx.insn_ctxt("alt::trans_alt"); with_scope(bcx, "alt") {|bcx| trans_alt_inner(bcx, expr, arms, mode, dest) } @@ -616,6 +621,7 @@ fn trans_alt(bcx: block, expr: @ast::expr, arms: [ast::arm], fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm], mode: ast::alt_mode, dest: dest) -> block { + let _icx = scope_cx.insn_ctxt("alt::trans_alt_inner"); let bcx = scope_cx, tcx = bcx.tcx(); let mut bodies = [], match = []; @@ -675,6 +681,7 @@ fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm], // Not alt-related, but similar to the pattern-munging code above fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef, make_copy: bool) -> block { + let _icx = bcx.insn_ctxt("alt::bind_irrefutable_pat"); let ccx = bcx.fcx.ccx; let mut bcx = bcx; diff --git a/src/rustc/middle/trans/base.rs b/src/rustc/middle/trans/base.rs index b63e42bf7a4..bc63f46f780 100644 --- a/src/rustc/middle/trans/base.rs +++ b/src/rustc/middle/trans/base.rs @@ -80,6 +80,33 @@ fn dup_for_join(dest: dest) -> dest { } } +resource icx_popper(ccx: @crate_ctxt) { + if (ccx.sess.opts.count_llvm_insns) { + vec::pop(*ccx.stats.llvm_insn_ctxt); + } +} + +impl ccx_icx for @crate_ctxt { + fn insn_ctxt(s: str) -> icx_popper { + if (self.sess.opts.count_llvm_insns) { + *self.stats.llvm_insn_ctxt += [s]; + } + icx_popper(self) + } +} + +impl bcx_icx for block { + fn insn_ctxt(s: str) -> icx_popper { + self.ccx().insn_ctxt(s) + } +} + +impl fcx_icx for fn_ctxt { + fn insn_ctxt(s: str) -> icx_popper { + self.ccx.insn_ctxt(s) + } +} + fn join_returns(parent_cx: block, in_cxs: [block], in_ds: [dest], out_dest: dest) -> block { let out = sub_block(parent_cx, "join"); @@ -179,6 +206,7 @@ fn get_simple_extern_fn(cx: block, externs: hashmap, llmod: ModuleRef, name: str, n_args: int) -> ValueRef { + let _icx = cx.insn_ctxt("get_simple_extern_fn"); let ccx = cx.fcx.ccx; let inputs = vec::from_elem(n_args as uint, ccx.int_type); let output = ccx.int_type; @@ -189,6 +217,7 @@ fn get_simple_extern_fn(cx: block, fn trans_native_call(cx: block, externs: hashmap, llmod: ModuleRef, name: str, args: [ValueRef]) -> ValueRef { + let _icx = cx.insn_ctxt("trans_native_call"); let n = args.len() as int; let llnative: ValueRef = get_simple_extern_fn(cx, externs, llmod, name, n); @@ -200,27 +229,32 @@ fn trans_native_call(cx: block, externs: hashmap, } fn trans_free(cx: block, v: ValueRef) -> block { + let _icx = cx.insn_ctxt("trans_free"); Call(cx, cx.ccx().upcalls.free, [PointerCast(cx, v, T_ptr(T_i8()))]); cx } fn trans_shared_free(cx: block, v: ValueRef) -> block { + let _icx = cx.insn_ctxt("trans_shared_free"); Call(cx, cx.ccx().upcalls.shared_free, [PointerCast(cx, v, T_ptr(T_i8()))]); ret cx; } fn umax(cx: block, a: ValueRef, b: ValueRef) -> ValueRef { + let _icx = cx.insn_ctxt("umax"); let cond = ICmp(cx, lib::llvm::IntULT, a, b); ret Select(cx, cond, b, a); } fn umin(cx: block, a: ValueRef, b: ValueRef) -> ValueRef { + let _icx = cx.insn_ctxt("umin"); let cond = ICmp(cx, lib::llvm::IntULT, a, b); ret Select(cx, cond, a, b); } fn alloca(cx: block, t: TypeRef) -> ValueRef { + let _icx = cx.insn_ctxt("alloca"); if cx.unreachable { ret llvm::LLVMGetUndef(t); } ret Alloca(raw_block(cx.fcx, cx.fcx.llstaticallocas), t); } @@ -229,6 +263,7 @@ fn alloca(cx: block, t: TypeRef) -> ValueRef { // The type of the returned pointer is always i8*. If you care about the // return type, use bump_ptr(). fn ptr_offs(bcx: block, base: ValueRef, sz: ValueRef) -> ValueRef { + let _icx = bcx.insn_ctxt("ptr_offs"); let raw = PointerCast(bcx, base, T_ptr(T_i8())); InBoundsGEP(bcx, raw, [sz]) } @@ -237,6 +272,7 @@ fn ptr_offs(bcx: block, base: ValueRef, sz: ValueRef) -> ValueRef { // to a given type. fn bump_ptr(bcx: block, t: ty::t, base: ValueRef, sz: ValueRef) -> ValueRef { + let _icx = bcx.insn_ctxt("bump_ptr"); let ccx = bcx.ccx(); let bumped = ptr_offs(bcx, base, sz); let typ = T_ptr(type_of(ccx, t)); @@ -249,6 +285,7 @@ fn bump_ptr(bcx: block, t: ty::t, base: ValueRef, sz: ValueRef) -> fn GEP_enum(bcx: block, llblobptr: ValueRef, enum_id: ast::def_id, variant_id: ast::def_id, ty_substs: [ty::t], ix: uint) -> result { + let _icx = bcx.insn_ctxt("GEP_enum"); let ccx = bcx.ccx(); let variant = ty::enum_variant_with_id(ccx.tcx, enum_id, variant_id); assert ix < variant.args.len(); @@ -265,6 +302,7 @@ fn GEP_enum(bcx: block, llblobptr: ValueRef, enum_id: ast::def_id, // and a size indicating how much space we want malloc'd. fn trans_shared_malloc(cx: block, llptr_ty: TypeRef, llsize: ValueRef) -> result { + let _icx = cx.insn_ctxt("opaque_shared_malloc"); let rval = Call(cx, cx.ccx().upcalls.shared_malloc, [llsize]); ret rslt(cx, PointerCast(cx, rval, llptr_ty)); } @@ -277,6 +315,7 @@ fn trans_shared_malloc(cx: block, llptr_ty: TypeRef, llsize: ValueRef) fn opaque_box_body(bcx: block, body_t: ty::t, boxptr: ValueRef) -> ValueRef { + let _icx = bcx.insn_ctxt("opaque_box_body"); let ccx = bcx.ccx(); let boxptr = PointerCast(bcx, boxptr, T_ptr(T_box_header(ccx))); let bodyptr = GEPi(bcx, boxptr, [1]); @@ -288,6 +327,7 @@ fn opaque_box_body(bcx: block, // header. fn trans_malloc_boxed_raw(bcx: block, t: ty::t, &static_ti: option<@tydesc_info>) -> result { + let _icx = bcx.insn_ctxt("trans_malloc_boxed_raw"); let mut bcx = bcx; let ccx = bcx.ccx(); @@ -309,6 +349,7 @@ fn trans_malloc_boxed_raw(bcx: block, t: ty::t, // initializes the reference count to 1, and pulls out the body and rc fn trans_malloc_boxed(bcx: block, t: ty::t) -> {bcx: block, box: ValueRef, body: ValueRef} { + let _icx = bcx.insn_ctxt("trans_malloc_boxed"); let mut ti = none; let {bcx, val:box} = trans_malloc_boxed_raw(bcx, t, ti); let body = GEPi(bcx, box, [0, abi::box_field_body]); @@ -396,6 +437,7 @@ fn note_unique_llvm_symbol(ccx: @crate_ctxt, sym: str) { // Generates the declaration for (but doesn't emit) a type descriptor. fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info { + let _icx = ccx.insn_ctxt("declare_tydesc"); log(debug, "+++ declare_tydesc " + ty_to_str(ccx.tcx, t)); let llty = type_of(ccx, t); let llsize = llsize_of(ccx, llty); @@ -425,6 +467,7 @@ type glue_helper = fn@(block, ValueRef, ty::t); fn declare_generic_glue(ccx: @crate_ctxt, t: ty::t, llfnty: TypeRef, name: str) -> ValueRef { + let _icx = ccx.insn_ctxt("declare_generic_glue"); let name = name; let mut fn_nm; //XXX this triggers duplicate LLVM symbols @@ -441,6 +484,7 @@ fn declare_generic_glue(ccx: @crate_ctxt, t: ty::t, llfnty: TypeRef, fn make_generic_glue_inner(ccx: @crate_ctxt, t: ty::t, llfn: ValueRef, helper: glue_helper) -> ValueRef { + let _icx = ccx.insn_ctxt("make_generic_glue_inner"); let fcx = new_fn_ctxt(ccx, [], llfn, none); lib::llvm::SetLinkage(llfn, lib::llvm::InternalLinkage); ccx.stats.n_glues_created += 1u; @@ -463,6 +507,7 @@ fn make_generic_glue_inner(ccx: @crate_ctxt, t: ty::t, fn make_generic_glue(ccx: @crate_ctxt, t: ty::t, llfn: ValueRef, helper: glue_helper, name: str) -> ValueRef { + let _icx = ccx.insn_ctxt("make_generic_glue"); if !ccx.sess.opts.stats { ret make_generic_glue_inner(ccx, t, llfn, helper); } @@ -476,6 +521,7 @@ fn make_generic_glue(ccx: @crate_ctxt, t: ty::t, llfn: ValueRef, } fn emit_tydescs(ccx: @crate_ctxt) { + let _icx = ccx.insn_ctxt("emit_tydescs"); ccx.tydescs.items {|key, val| let glue_fn_ty = T_ptr(T_glue_fn(ccx)); let ti = val; @@ -526,6 +572,7 @@ fn emit_tydescs(ccx: @crate_ctxt) { } fn make_take_glue(cx: block, v: ValueRef, t: ty::t) { + let _icx = cx.insn_ctxt("make_take_glue"); let mut bcx = cx; // NB: v is a *pointer* to type t here, not a direct value. bcx = alt ty::get(t).struct { @@ -562,6 +609,7 @@ fn make_take_glue(cx: block, v: ValueRef, t: ty::t) { } fn incr_refcnt_of_boxed(cx: block, box_ptr: ValueRef) -> block { + let _icx = cx.insn_ctxt("incr_refcnt_of_boxed"); let ccx = cx.ccx(); maybe_validate_box(cx, box_ptr); let rc_ptr = GEPi(cx, box_ptr, [0, abi::box_field_refcnt]); @@ -575,6 +623,7 @@ fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) { // v is a pointer to the actual box component of the type here. The // ValueRef will have the wrong type here (make_generic_glue is casting // everything to a pointer to the type that the glue acts on). + let _icx = bcx.insn_ctxt("make_free_glue"); let ccx = bcx.ccx(); let bcx = alt ty::get(t).struct { ty::ty_box(body_mt) { @@ -611,6 +660,7 @@ fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) { fn make_drop_glue(bcx: block, v0: ValueRef, t: ty::t) { // NB: v0 is an *alias* of type t here, not a direct value. + let _icx = bcx.insn_ctxt("make_drop_glue"); let ccx = bcx.ccx(); let bcx = alt ty::get(t).struct { ty::ty_box(_) | ty::ty_opaque_box { @@ -644,6 +694,7 @@ fn make_drop_glue(bcx: block, v0: ValueRef, t: ty::t) { fn get_res_dtor(ccx: @crate_ctxt, did: ast::def_id, substs: [ty::t]) -> ValueRef { + let _icx = ccx.insn_ctxt("trans_res_dtor"); let did = if did.crate != ast::local_crate && substs.len() > 0u { maybe_instantiate_inline(ccx, did) } else { did }; @@ -653,6 +704,7 @@ fn get_res_dtor(ccx: @crate_ctxt, did: ast::def_id, substs: [ty::t]) fn trans_res_drop(bcx: block, rs: ValueRef, did: ast::def_id, inner_t: ty::t, tps: [ty::t]) -> block { + let _icx = bcx.insn_ctxt("trans_res_drop"); let ccx = bcx.ccx(); let inner_t_s = ty::substitute_type_params(ccx.tcx, tps, inner_t); @@ -689,6 +741,7 @@ fn maybe_validate_box(_cx: block, _box_ptr: ValueRef) { } fn decr_refcnt_maybe_free(bcx: block, box_ptr: ValueRef, t: ty::t) -> block { + let _icx = bcx.insn_ctxt("decr_refcnt_maybe_free"); let ccx = bcx.ccx(); maybe_validate_box(bcx, box_ptr); @@ -742,6 +795,7 @@ fn compare_scalar_types(cx: block, lhs: ValueRef, rhs: ValueRef, // A helper function to do the actual comparison of scalar values. fn compare_scalar_values(cx: block, lhs: ValueRef, rhs: ValueRef, nt: scalar_type, op: ast::binop) -> ValueRef { + let _icx = cx.insn_ctxt("compare_scalar_values"); fn die_(cx: block) -> ! { cx.tcx().sess.bug("compare_scalar_values: must be a\ comparison operator"); @@ -812,9 +866,12 @@ fn store_inbounds(cx: block, v: ValueRef, p: ValueRef, // Iterates through the elements of a structural type. fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, f: val_and_ty_fn) -> block { + let _icx = cx.insn_ctxt("iter_structural_ty"); + fn iter_variant(cx: block, a_tup: ValueRef, variant: ty::variant_info, tps: [ty::t], tid: ast::def_id, f: val_and_ty_fn) -> block { + let _icx = cx.insn_ctxt("iter_variant"); if variant.args.len() == 0u { ret cx; } let fn_ty = variant.ctor_ty; let ccx = cx.ccx(); @@ -922,6 +979,7 @@ fn lazily_emit_all_tydesc_glue(ccx: @crate_ctxt, fn lazily_emit_tydesc_glue(ccx: @crate_ctxt, field: int, static_ti: option<@tydesc_info>) { + let _icx = ccx.insn_ctxt("lazily_emit_tydesc_glue"); alt static_ti { none { } some(ti) { @@ -977,6 +1035,7 @@ fn lazily_emit_tydesc_glue(ccx: @crate_ctxt, field: int, fn call_tydesc_glue_full(cx: block, v: ValueRef, tydesc: ValueRef, field: int, static_ti: option<@tydesc_info>) { + let _icx = cx.insn_ctxt("call_tydesc_glue_full"); lazily_emit_tydesc_glue(cx.ccx(), field, static_ti); if cx.unreachable { ret; } @@ -1014,6 +1073,7 @@ fn call_tydesc_glue_full(cx: block, v: ValueRef, tydesc: ValueRef, fn call_tydesc_glue(cx: block, v: ValueRef, t: ty::t, field: int) -> block { + let _icx = cx.insn_ctxt("call_tydesc_glue"); let mut ti: option<@tydesc_info> = none; let {bcx: bcx, val: td} = get_tydesc(cx, t, ti); call_tydesc_glue_full(bcx, v, td, field, ti); @@ -1024,7 +1084,7 @@ fn call_cmp_glue(cx: block, lhs: ValueRef, rhs: ValueRef, t: ty::t, llop: ValueRef) -> result { // We can't use call_tydesc_glue_full() and friends here because compare // glue has a special signature. - + let _icx = cx.insn_ctxt("call_cmp_glue"); let bcx = cx; let r = spill_if_immediate(bcx, lhs, t); @@ -1051,6 +1111,7 @@ fn call_cmp_glue(cx: block, lhs: ValueRef, rhs: ValueRef, t: ty::t, } fn take_ty(cx: block, v: ValueRef, t: ty::t) -> block { + let _icx = cx.insn_ctxt("take_ty"); if ty::type_needs_drop(cx.tcx(), t) { ret call_tydesc_glue(cx, v, t, abi::tydesc_field_take_glue); } @@ -1058,6 +1119,7 @@ fn take_ty(cx: block, v: ValueRef, t: ty::t) -> block { } fn drop_ty(cx: block, v: ValueRef, t: ty::t) -> block { + let _icx = cx.insn_ctxt("drop_ty"); if ty::type_needs_drop(cx.tcx(), t) { ret call_tydesc_glue(cx, v, t, abi::tydesc_field_drop_glue); } @@ -1065,6 +1127,7 @@ fn drop_ty(cx: block, v: ValueRef, t: ty::t) -> block { } fn drop_ty_immediate(bcx: block, v: ValueRef, t: ty::t) -> block { + let _icx = bcx.insn_ctxt("drop_ty_immediate"); alt ty::get(t).struct { ty::ty_uniq(_) | ty::ty_vec(_) | ty::ty_str { free_ty(bcx, v, t) } ty::ty_box(_) | ty::ty_opaque_box { @@ -1075,6 +1138,7 @@ fn drop_ty_immediate(bcx: block, v: ValueRef, t: ty::t) -> block { } fn take_ty_immediate(bcx: block, v: ValueRef, t: ty::t) -> result { + let _icx = bcx.insn_ctxt("take_ty_immediate"); alt ty::get(t).struct { ty::ty_box(_) | ty::ty_opaque_box { rslt(incr_refcnt_of_boxed(bcx, v), v) @@ -1088,6 +1152,7 @@ fn take_ty_immediate(bcx: block, v: ValueRef, t: ty::t) -> result { } fn free_ty(cx: block, v: ValueRef, t: ty::t) -> block { + let _icx = cx.insn_ctxt("free_ty"); if ty::type_needs_drop(cx.tcx(), t) { ret call_tydesc_glue(cx, v, t, abi::tydesc_field_free_glue); } @@ -1100,7 +1165,7 @@ fn call_memmove(cx: block, dst: ValueRef, src: ValueRef, // alignment is statically known (it must be nothing more than a constant // int, or LLVM complains -- not even a constant element of a tydesc // works). - + let _icx = cx.insn_ctxt("call_memmove"); let ccx = cx.ccx(); let key = alt ccx.sess.targ_cfg.arch { session::arch_x86 | session::arch_arm { "llvm.memmove.p0i8.p0i8.i32" } @@ -1121,6 +1186,7 @@ fn call_memmove(cx: block, dst: ValueRef, src: ValueRef, fn memmove_ty(bcx: block, dst: ValueRef, src: ValueRef, t: ty::t) -> block { + let _icx = bcx.insn_ctxt("memmove_ty"); let ccx = bcx.ccx(); if ty::type_is_structural(t) { let llsz = llsize_of(ccx, type_of(ccx, t)); @@ -1143,6 +1209,7 @@ fn type_is_structural_or_param(t: ty::t) -> bool { fn copy_val(cx: block, action: copy_action, dst: ValueRef, src: ValueRef, t: ty::t) -> block { + let _icx = cx.insn_ctxt("copy_val"); if action == DROP_EXISTING && (type_is_structural_or_param(t) || ty::type_is_unique(t)) { @@ -1159,6 +1226,7 @@ fn copy_val(cx: block, action: copy_action, dst: ValueRef, fn copy_val_no_check(bcx: block, action: copy_action, dst: ValueRef, src: ValueRef, t: ty::t) -> block { + let _icx = bcx.insn_ctxt("copy_val_no_check"); let ccx = bcx.ccx(); let mut bcx = bcx; if ty::type_is_scalar(t) { @@ -1189,6 +1257,7 @@ fn copy_val_no_check(bcx: block, action: copy_action, dst: ValueRef, // doesn't need to be dropped. fn move_val(cx: block, action: copy_action, dst: ValueRef, src: lval_result, t: ty::t) -> block { + let _icx = cx.insn_ctxt("move_val"); let mut src_val = src.val; let tcx = cx.tcx(); let mut cx = cx; @@ -1221,6 +1290,7 @@ fn move_val(cx: block, action: copy_action, dst: ValueRef, fn store_temp_expr(cx: block, action: copy_action, dst: ValueRef, src: lval_result, t: ty::t, last_use: bool) -> block { + let _icx = cx.insn_ctxt("trans_temp_expr"); // Lvals in memory are not temporaries. Copy them. if src.kind != temporary && !last_use { let v = if src.kind == owned { @@ -1234,6 +1304,7 @@ fn store_temp_expr(cx: block, action: copy_action, dst: ValueRef, } fn trans_crate_lit(cx: @crate_ctxt, lit: ast::lit) -> ValueRef { + let _icx = cx.insn_ctxt("trans_crate_lit"); alt lit.node { ast::lit_int(i, t) { C_integral(T_int_ty(cx, t), i as u64, True) } ast::lit_uint(u, t) { C_integral(T_uint_ty(cx, t), u, False) } @@ -1247,6 +1318,7 @@ fn trans_crate_lit(cx: @crate_ctxt, lit: ast::lit) -> ValueRef { } fn trans_lit(cx: block, lit: ast::lit, dest: dest) -> block { + let _icx = cx.insn_ctxt("trans_lit"); if dest == ignore { ret cx; } alt lit.node { ast::lit_str(s) { ret tvec::trans_str(cx, s, dest); } @@ -1258,6 +1330,7 @@ fn trans_lit(cx: block, lit: ast::lit, dest: dest) -> block { fn trans_unary(bcx: block, op: ast::unop, e: @ast::expr, un_expr: @ast::expr, dest: dest) -> block { + let _icx = bcx.insn_ctxt("trans_unary"); // Check for user-defined method call alt bcx.ccx().maps.method_map.find(un_expr.id) { some(origin) { @@ -1309,6 +1382,7 @@ fn trans_unary(bcx: block, op: ast::unop, e: @ast::expr, } fn trans_addr_of(cx: block, e: @ast::expr, dest: dest) -> block { + let _icx = cx.insn_ctxt("trans_addr_of"); let mut {bcx, val, kind} = trans_temp_lval(cx, e); let ety = expr_ty(cx, e); let is_immediate = ty::type_is_immediate(ety); @@ -1321,6 +1395,7 @@ fn trans_addr_of(cx: block, e: @ast::expr, dest: dest) -> block { fn trans_compare(cx: block, op: ast::binop, lhs: ValueRef, _lhs_t: ty::t, rhs: ValueRef, rhs_t: ty::t) -> result { + let _icx = cx.insn_ctxt("trans_compare"); if ty::type_is_scalar(rhs_t) { let rs = compare_scalar_types(cx, lhs, rhs, rhs_t, op); ret rslt(rs.bcx, rs.val); @@ -1366,7 +1441,6 @@ fn cast_shift_rhs(op: ast::binop, trunc: fn(ValueRef, TypeRef) -> ValueRef, zext: fn(ValueRef, TypeRef) -> ValueRef ) -> ValueRef { - // Shifts may have any size int on the rhs if ast_util::is_shift_binop(op) { let rhs_llty = val_ty(rhs); @@ -1392,6 +1466,7 @@ fn cast_shift_rhs(op: ast::binop, fn trans_eager_binop(cx: block, op: ast::binop, lhs: ValueRef, lhs_t: ty::t, rhs: ValueRef, rhs_t: ty::t, dest: dest) -> block { + let _icx = cx.insn_ctxt("trans_eager_binop"); if dest == ignore { ret cx; } let intype = { if ty::type_is_bot(lhs_t) { rhs_t } @@ -1447,6 +1522,7 @@ fn trans_eager_binop(cx: block, op: ast::binop, lhs: ValueRef, fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop, dst: @ast::expr, src: @ast::expr) -> block { + let _icx = bcx.insn_ctxt("trans_assign_op"); let t = expr_ty(bcx, src); let lhs_res = trans_lval(bcx, dst); assert (lhs_res.kind == owned); @@ -1491,6 +1567,7 @@ fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop, } fn autoderef(cx: block, v: ValueRef, t: ty::t) -> result_t { + let _icx = cx.insn_ctxt("autoderef"); let mut v1: ValueRef = v; let mut t1: ty::t = t; let ccx = cx.ccx(); @@ -1541,7 +1618,7 @@ enum lazy_binop_ty { lazy_and, lazy_or } fn trans_lazy_binop(bcx: block, op: lazy_binop_ty, a: @ast::expr, b: @ast::expr, dest: dest) -> block { - + let _icx = bcx.insn_ctxt("trans_lazy_binop"); let {bcx: past_lhs, val: lhs} = with_scope_result(bcx, "lhs") {|bcx| trans_temp_expr(bcx, a)}; if past_lhs.unreachable { ret past_lhs; } @@ -1562,6 +1639,7 @@ fn trans_lazy_binop(bcx: block, op: lazy_binop_ty, a: @ast::expr, fn trans_binary(bcx: block, op: ast::binop, lhs: @ast::expr, rhs: @ast::expr, dest: dest, ex: @ast::expr) -> block { + let _icx = bcx.insn_ctxt("trans_binary"); // User-defined operators alt bcx.ccx().maps.method_map.find(ex.id) { some(origin) { @@ -1596,6 +1674,7 @@ fn trans_binary(bcx: block, op: ast::binop, lhs: @ast::expr, fn trans_if(cx: block, cond: @ast::expr, thn: ast::blk, els: option<@ast::expr>, dest: dest) -> block { + let _icx = cx.insn_ctxt("trans_if"); let {bcx, val: cond_val} = trans_temp_expr(cx, cond); let then_dest = dup_for_join(dest); @@ -1633,6 +1712,7 @@ fn trans_if(cx: block, cond: @ast::expr, thn: ast::blk, fn trans_for(cx: block, local: @ast::local, seq: @ast::expr, body: ast::blk) -> block { + let _icx = cx.insn_ctxt("trans_for"); fn inner(bcx: block, local: @ast::local, curr: ValueRef, t: ty::t, body: ast::blk, outer_next_cx: block) -> block { let next_cx = sub_block(bcx, "next"); @@ -1665,6 +1745,7 @@ fn trans_for(cx: block, local: @ast::local, seq: @ast::expr, fn trans_while(cx: block, cond: @ast::expr, body: ast::blk) -> block { + let _icx = cx.insn_ctxt("trans_while"); let next_cx = sub_block(cx, "while next"); let loop_cx = loop_scope_block(cx, cont_self, next_cx, "while loop", body.span); @@ -1682,6 +1763,7 @@ fn trans_while(cx: block, cond: @ast::expr, body: ast::blk) fn trans_do_while(cx: block, body: ast::blk, cond: @ast::expr) -> block { + let _icx = cx.insn_ctxt("trans_do_while"); let next_cx = sub_block(cx, "next"); let body_cx = loop_scope_block(cx, cont_self, next_cx, @@ -1697,6 +1779,7 @@ fn trans_do_while(cx: block, body: ast::blk, cond: @ast::expr) -> } fn trans_loop(cx:block, body: ast::blk) -> block { + let _icx = cx.insn_ctxt("trans_loop"); let next_cx = sub_block(cx, "next"); let body_cx = loop_scope_block(cx, cont_self, next_cx, @@ -1830,6 +1913,7 @@ fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: [ty::t], fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, real_substs: [ty::t], vtables: option) -> {val: ValueRef, must_cast: bool, intrinsic: bool} { + let _icx = ccx.insn_ctxt("monomorphic_fn"); let mut must_cast = false; let substs = vec::map(real_substs, {|t| alt normalize_for_monomorphization(ccx.tcx, t) { @@ -1932,6 +2016,7 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, real_substs: [ty::t], fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) -> ast::def_id { + let _icx = ccx.insn_ctxt("maybe_instantiate_inline"); alt ccx.external.find(fn_id) { some(some(node_id)) { // Already inline @@ -1988,6 +2073,7 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) fn lval_intrinsic_fn(bcx: block, val: ValueRef, tys: [ty::t], id: ast::node_id) -> lval_maybe_callee { + let _icx = bcx.insn_ctxt("lval_intrinsic_fn"); fn add_tydesc_params(ccx: @crate_ctxt, llfty: TypeRef, n: uint) -> TypeRef { let out_ty = llvm::LLVMGetReturnType(llfty); @@ -2016,6 +2102,7 @@ fn lval_intrinsic_fn(bcx: block, val: ValueRef, tys: [ty::t], fn lval_static_fn(bcx: block, fn_id: ast::def_id, id: ast::node_id) -> lval_maybe_callee { + let _icx = bcx.insn_ctxt("lval_static_fn"); let vts = option::map(bcx.ccx().maps.vtable_map.find(id), {|vts| impl::resolve_vtables_in_fn_ctxt(bcx.fcx, vts) }); @@ -2025,6 +2112,7 @@ fn lval_static_fn(bcx: block, fn_id: ast::def_id, id: ast::node_id) fn lval_static_fn_inner(bcx: block, fn_id: ast::def_id, id: ast::node_id, tys: [ty::t], vtables: option) -> lval_maybe_callee { + let _icx = bcx.insn_ctxt("lval_static_fn_inner"); let ccx = bcx.ccx(), tcx = ccx.tcx; let tpt = ty::lookup_item_type(tcx, fn_id); @@ -2078,6 +2166,7 @@ fn lval_static_fn_inner(bcx: block, fn_id: ast::def_id, id: ast::node_id, } fn lookup_discriminant(ccx: @crate_ctxt, vid: ast::def_id) -> ValueRef { + let _icx = ccx.insn_ctxt("lookup_discriminant"); alt ccx.discrims.find(vid) { none { // It's an external discriminant that we haven't seen yet. @@ -2096,6 +2185,7 @@ fn lookup_discriminant(ccx: @crate_ctxt, vid: ast::def_id) -> ValueRef { } fn trans_local_var(cx: block, def: ast::def) -> local_var_result { + let _icx = cx.insn_ctxt("trans_local_var"); fn take_local(table: hashmap, id: ast::node_id) -> local_var_result { alt table.find(id) { @@ -2135,6 +2225,7 @@ fn trans_local_var(cx: block, def: ast::def) -> local_var_result { // gets turned into a record field name. fn trans_path(cx: block, id: ast::node_id, path: @ast::path) -> lval_maybe_callee { + let _icx = cx.insn_ctxt("trans_path"); alt cx.tcx().def_map.find(id) { none { cx.sess().bug("trans_path: unbound node ID"); } some(df) { @@ -2145,6 +2236,7 @@ fn trans_path(cx: block, id: ast::node_id, path: @ast::path) fn trans_var(cx: block, def: ast::def, id: ast::node_id, path: @ast::path) -> lval_maybe_callee { + let _icx = cx.insn_ctxt("trans_var"); let ccx = cx.ccx(); alt def { ast::def_fn(did, _) { @@ -2199,6 +2291,7 @@ fn trans_var(cx: block, def: ast::def, id: ast::node_id, path: @ast::path) fn trans_rec_field(bcx: block, base: @ast::expr, field: ast::ident) -> lval_result { + let _icx = bcx.insn_ctxt("trans_rec_field"); let {bcx, val} = trans_temp_expr(bcx, base); let {bcx, val, ty} = autoderef(bcx, val, expr_ty(bcx, base)); let fields = alt ty::get(ty).struct { @@ -2215,6 +2308,7 @@ fn trans_rec_field(bcx: block, base: @ast::expr, fn trans_index(cx: block, ex: @ast::expr, base: @ast::expr, idx: @ast::expr) -> lval_result { + let _icx = cx.insn_ctxt("trans_index"); let base_ty = expr_ty(cx, base); let exp = trans_temp_expr(cx, base); let lv = autoderef(exp.bcx, exp.val, base_ty); @@ -2257,6 +2351,7 @@ fn expr_is_lval(bcx: block, e: @ast::expr) -> bool { } fn trans_callee(bcx: block, e: @ast::expr) -> lval_maybe_callee { + let _icx = bcx.insn_ctxt("trans_callee"); alt e.node { ast::expr_path(path) { ret trans_path(bcx, e.id, path); } ast::expr_field(base, ident, _) { @@ -2283,6 +2378,7 @@ fn trans_callee(bcx: block, e: @ast::expr) -> lval_maybe_callee { // represented as an alloca or heap, hence needs a 'load' to be used as an // immediate). fn trans_lval(cx: block, e: @ast::expr) -> lval_result { + let _icx = cx.insn_ctxt("trans_lval"); alt e.node { ast::expr_path(p) { let v = trans_path(cx, e.id, p); @@ -2343,6 +2439,7 @@ fn lval_maybe_callee_to_lval(c: lval_maybe_callee, ty: ty::t) -> lval_result { fn int_cast(bcx: block, lldsttype: TypeRef, llsrctype: TypeRef, llsrc: ValueRef, signed: bool) -> ValueRef { + let _icx = bcx.insn_ctxt("int_cast"); let srcsz = llvm::LLVMGetIntTypeWidth(llsrctype); let dstsz = llvm::LLVMGetIntTypeWidth(lldsttype); ret if dstsz == srcsz { @@ -2356,6 +2453,7 @@ fn int_cast(bcx: block, lldsttype: TypeRef, llsrctype: TypeRef, fn float_cast(bcx: block, lldsttype: TypeRef, llsrctype: TypeRef, llsrc: ValueRef) -> ValueRef { + let _icx = bcx.insn_ctxt("float_cast"); let srcsz = lib::llvm::float_width(llsrctype); let dstsz = lib::llvm::float_width(lldsttype); ret if dstsz > srcsz { @@ -2378,6 +2476,7 @@ fn cast_type_kind(t: ty::t) -> cast_kind { fn trans_cast(cx: block, e: @ast::expr, id: ast::node_id, dest: dest) -> block { + let _icx = cx.insn_ctxt("trans_cast"); let ccx = cx.ccx(); let t_out = node_id_type(cx, id); alt ty::get(t_out).struct { @@ -2443,6 +2542,7 @@ fn trans_cast(cx: block, e: @ast::expr, id: ast::node_id, // call takes place: fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr, &temp_cleanups: [ValueRef]) -> result { + let _icx = cx.insn_ctxt("trans_arg_expr"); let ccx = cx.ccx(); let e_ty = expr_ty(cx, e); let is_bot = ty::type_is_bot(e_ty); @@ -2522,7 +2622,7 @@ enum call_args { fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t, dest: dest, generic_intrinsic: bool) -> {bcx: block, args: [ValueRef], retslot: ValueRef} { - + let _icx = cx.insn_ctxt("trans_args"); let mut temp_cleanups = []; let arg_tys = ty::ty_fn_args(fn_ty); let mut llargs: [ValueRef] = []; @@ -2592,6 +2692,7 @@ fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t, fn trans_call(in_cx: block, f: @ast::expr, args: call_args, id: ast::node_id, dest: dest) -> block { + let _icx = in_cx.insn_ctxt("trans_call"); trans_call_inner(in_cx, expr_ty(in_cx, f), node_id_type(in_cx, id), {|cx| trans_callee(cx, f)}, args, dest) } @@ -2673,6 +2774,7 @@ fn invoke_full(bcx: block, llfn: ValueRef, llargs: [ValueRef]) fn invoke_(bcx: block, llfn: ValueRef, llargs: [ValueRef], invoker: fn(block, ValueRef, [ValueRef], BasicBlockRef, BasicBlockRef)) -> block { + let _icx = bcx.insn_ctxt("invoke_"); // FIXME: May be worth turning this into a plain call when there are no // cleanups to run if bcx.unreachable { ret bcx; } @@ -2688,6 +2790,7 @@ fn invoke_(bcx: block, llfn: ValueRef, llargs: [ValueRef], } fn get_landing_pad(bcx: block) -> BasicBlockRef { + let _icx = bcx.insn_ctxt("get_landing_pad"); fn in_lpad_scope_cx(bcx: block, f: fn(scope_info)) { let mut bcx = bcx; loop { @@ -2749,6 +2852,7 @@ fn get_landing_pad(bcx: block) -> BasicBlockRef { } fn trans_tup(bcx: block, elts: [@ast::expr], dest: dest) -> block { + let _icx = bcx.insn_ctxt("trans_tup"); let mut bcx = bcx; let addr = alt dest { ignore { @@ -2774,6 +2878,7 @@ fn trans_tup(bcx: block, elts: [@ast::expr], dest: dest) -> block { fn trans_rec(bcx: block, fields: [ast::field], base: option<@ast::expr>, id: ast::node_id, dest: dest) -> block { + let _icx = bcx.insn_ctxt("trans_rec"); let t = node_id_type(bcx, id); let mut bcx = bcx; let addr = alt dest { @@ -2841,6 +2946,7 @@ fn trans_expr_save_in(bcx: block, e: @ast::expr, dest: ValueRef) // trans_expr_save_in. For intermediates where you don't care about lval-ness, // use trans_temp_expr. fn trans_temp_lval(bcx: block, e: @ast::expr) -> lval_result { + let _icx = bcx.insn_ctxt("trans_temp_lval"); let mut bcx = bcx; if expr_is_lval(bcx, e) { ret trans_lval(bcx, e); @@ -2866,6 +2972,7 @@ fn trans_temp_lval(bcx: block, e: @ast::expr) -> lval_result { // Use only for intermediate values. See trans_expr and trans_expr_save_in for // expressions that must 'end up somewhere' (or get ignored). fn trans_temp_expr(bcx: block, e: @ast::expr) -> result { + let _icx = bcx.insn_ctxt("trans_temp_expr"); let mut {bcx, val, kind} = trans_temp_lval(bcx, e); if kind == owned { val = load_if_immediate(bcx, val, expr_ty(bcx, e)); @@ -2878,6 +2985,7 @@ fn trans_temp_expr(bcx: block, e: @ast::expr) -> result { // - exprs returning nil or bot always get dest=ignore // - exprs with non-immediate type never get dest=by_val fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block { + let _icx = bcx.insn_ctxt("trans_expr"); let tcx = bcx.tcx(); debuginfo::update_source_pos(bcx, e.span); @@ -3163,16 +3271,19 @@ fn do_spill_noroot(cx: block, v: ValueRef) -> ValueRef { } fn spill_if_immediate(cx: block, v: ValueRef, t: ty::t) -> result { + let _icx = cx.insn_ctxt("spill_if_immediate"); if ty::type_is_immediate(t) { ret do_spill(cx, v, t); } ret rslt(cx, v); } fn load_if_immediate(cx: block, v: ValueRef, t: ty::t) -> ValueRef { + let _icx = cx.insn_ctxt("load_if_immediate"); if ty::type_is_immediate(t) { ret Load(cx, v); } ret v; } fn trans_log(lvl: @ast::expr, bcx: block, e: @ast::expr) -> block { + let _icx = bcx.insn_ctxt("trans_log"); let ccx = bcx.ccx(); if ty::type_is_bot(expr_ty(bcx, lvl)) { ret trans_expr(bcx, lvl, ignore); @@ -3218,6 +3329,7 @@ fn trans_log(lvl: @ast::expr, bcx: block, e: @ast::expr) -> block { } fn trans_check_expr(bcx: block, e: @ast::expr, s: str) -> block { + let _icx = bcx.insn_ctxt("trans_check_expr"); let expr_str = s + " " + expr_to_str(e) + " failed"; let {bcx, val} = with_scope_result(bcx, "check") {|bcx| trans_temp_expr(bcx, e) @@ -3229,6 +3341,7 @@ fn trans_check_expr(bcx: block, e: @ast::expr, s: str) -> block { fn trans_fail_expr(bcx: block, sp_opt: option, fail_expr: option<@ast::expr>) -> block { + let _icx = bcx.insn_ctxt("trans_fail_expr"); let mut bcx = bcx; alt fail_expr { some(expr) { @@ -3256,12 +3369,14 @@ fn trans_fail_expr(bcx: block, sp_opt: option, fn trans_fail(bcx: block, sp_opt: option, fail_str: str) -> block { + let _icx = bcx.insn_ctxt("trans_fail"); let V_fail_str = C_cstr(bcx.ccx(), fail_str); ret trans_fail_value(bcx, sp_opt, V_fail_str); } fn trans_fail_value(bcx: block, sp_opt: option, V_fail_str: ValueRef) -> block { + let _icx = bcx.insn_ctxt("trans_fail_value"); let ccx = bcx.ccx(); let {V_filename, V_line} = alt sp_opt { some(sp) { @@ -3285,6 +3400,7 @@ fn trans_fail_value(bcx: block, sp_opt: option, fn trans_break_cont(bcx: block, to_end: bool) -> block { + let _icx = bcx.insn_ctxt("trans_break_cont"); // Locate closest loop block, outputting cleanup as we go. let mut unwind = bcx; let mut target = bcx; @@ -3325,6 +3441,7 @@ fn trans_cont(cx: block) -> block { } fn trans_ret(bcx: block, e: option<@ast::expr>) -> block { + let _icx = bcx.insn_ctxt("trans_ret"); let mut bcx = bcx; alt e { some(x) { bcx = trans_expr_save_in(bcx, x, bcx.fcx.llretptr); } @@ -3335,15 +3452,20 @@ fn trans_ret(bcx: block, e: option<@ast::expr>) -> block { ret bcx; } -fn build_return(bcx: block) { Br(bcx, bcx.fcx.llreturn); } +fn build_return(bcx: block) { + let _icx = bcx.insn_ctxt("build_return"); + Br(bcx, bcx.fcx.llreturn); +} fn trans_be(cx: block, e: @ast::expr) -> block { // FIXME: Turn this into a real tail call once // calling convention issues are settled + let _icx = cx.insn_ctxt("trans_be"); ret trans_ret(cx, some(e)); } fn init_local(bcx: block, local: @ast::local) -> block { + let _icx = bcx.insn_ctxt("init_local"); let ty = node_id_type(bcx, local.node.id); let llptr = alt bcx.fcx.lllocals.find(local.node.id) { some(local_mem(v)) { v } @@ -3390,6 +3512,7 @@ fn init_local(bcx: block, local: @ast::local) -> block { fn zero_alloca(cx: block, llptr: ValueRef, t: ty::t) -> block { + let _icx = cx.insn_ctxt("zero_alloca"); let bcx = cx; let ccx = cx.ccx(); let llty = type_of(ccx, t); @@ -3398,6 +3521,7 @@ fn zero_alloca(cx: block, llptr: ValueRef, t: ty::t) } fn trans_stmt(cx: block, s: ast::stmt) -> block { + let _icx = cx.insn_ctxt("trans_stmt"); #debug["trans_stmt(%s)", stmt_to_str(s)]; if (!cx.sess().opts.no_asm_comments) { @@ -3509,6 +3633,7 @@ fn raw_block(fcx: fn_ctxt, llbb: BasicBlockRef) -> block { // trans_block_cleanups runs all the cleanup functions for the block. fn trans_block_cleanups(bcx: block, cleanup_cx: block) -> block { + let _icx = bcx.insn_ctxt("trans_block_cleanups"); if bcx.unreachable { ret bcx; } let mut bcx = bcx; alt check cleanup_cx.kind { @@ -3526,6 +3651,7 @@ fn trans_block_cleanups(bcx: block, cleanup_cx: block) -> // instruction. fn cleanup_and_leave(bcx: block, upto: option, leave: option) { + let _icx = bcx.insn_ctxt("cleanup_and_leave"); let mut cur = bcx, bcx = bcx; loop { alt cur.kind { @@ -3560,10 +3686,12 @@ fn cleanup_and_leave(bcx: block, upto: option, fn cleanup_and_Br(bcx: block, upto: block, target: BasicBlockRef) { + let _icx = bcx.insn_ctxt("cleanup_and_Br"); cleanup_and_leave(bcx, some(upto.llbb), some(target)); } fn leave_block(bcx: block, out_of: block) -> block { + let _icx = bcx.insn_ctxt("leave_block"); let next_cx = sub_block(block_parent(out_of), "next"); if bcx.unreachable { Unreachable(next_cx); } cleanup_and_Br(bcx, out_of, next_cx.llbb); @@ -3571,6 +3699,7 @@ fn leave_block(bcx: block, out_of: block) -> block { } fn with_scope(bcx: block, name: str, f: fn(block) -> block) -> block { + let _icx = bcx.insn_ctxt("with_scope"); let scope_cx = scope_block(bcx, name); Br(bcx, scope_cx.llbb); leave_block(f(scope_cx), scope_cx) @@ -3578,6 +3707,7 @@ fn with_scope(bcx: block, name: str, f: fn(block) -> block) -> block { fn with_scope_result(bcx: block, name: str, f: fn(block) -> result) -> result { + let _icx = bcx.insn_ctxt("with_scope_result"); let scope_cx = scope_block(bcx, name); Br(bcx, scope_cx.llbb); let {bcx, val} = f(scope_cx); @@ -3585,6 +3715,7 @@ fn with_scope_result(bcx: block, name: str, f: fn(block) -> result) } fn with_cond(bcx: block, val: ValueRef, f: fn(block) -> block) -> block { + let _icx = bcx.insn_ctxt("with_cond"); let next_cx = sub_block(bcx, "next"), cond_cx = sub_block(bcx, "cond"); CondBr(bcx, val, cond_cx.llbb, next_cx.llbb); let after_cx = f(cond_cx); @@ -3609,6 +3740,7 @@ fn block_locals(b: ast::blk, it: fn(@ast::local)) { } fn alloc_ty(cx: block, t: ty::t) -> result { + let _icx = cx.insn_ctxt("alloc_ty"); let bcx = cx, ccx = cx.ccx(); let llty = type_of(ccx, t); assert !ty::type_has_params(t); @@ -3625,6 +3757,7 @@ fn alloc_ty(cx: block, t: ty::t) -> result { } fn alloc_local(cx: block, local: @ast::local) -> block { + let _icx = cx.insn_ctxt("alloc_local"); let t = node_id_type(cx, local.node.id); let simple_name = alt local.node.pat.node { ast::pat_ident(pth, none) { some(path_to_ident(pth)) } @@ -3655,6 +3788,7 @@ fn alloc_local(cx: block, local: @ast::local) -> block { fn trans_block(bcx: block, b: ast::blk, dest: dest) -> block { + let _icx = bcx.insn_ctxt("trans_block"); let mut bcx = bcx; block_locals(b) {|local| bcx = alloc_local(bcx, local); }; for s: @ast::stmt in b.node.stmts { @@ -3737,6 +3871,7 @@ fn new_fn_ctxt(ccx: @crate_ctxt, path: path, llfndecl: ValueRef, fn create_llargs_for_fn_args(cx: fn_ctxt, ty_self: self_arg, args: [ast::arg]) { + let _icx = cx.insn_ctxt("create_llargs_for_fn_args"); // Skip the implicit arguments 0, and 1. let mut arg_n = first_real_arg; alt ty_self { @@ -3761,6 +3896,7 @@ fn create_llargs_for_fn_args(cx: fn_ctxt, fn copy_args_to_allocas(fcx: fn_ctxt, bcx: block, args: [ast::arg], arg_tys: [ty::arg]) -> block { + let _icx = fcx.insn_ctxt("copy_args_to_allocas"); let tcx = bcx.tcx(); let mut arg_n: uint = 0u, bcx = bcx; let epic_fail = fn@() -> ! { @@ -3797,12 +3933,14 @@ fn copy_args_to_allocas(fcx: fn_ctxt, bcx: block, args: [ast::arg], // Ties up the llstaticallocas -> llloadenv -> lltop edges, // and builds the return block. fn finish_fn(fcx: fn_ctxt, lltop: BasicBlockRef) { + let _icx = fcx.insn_ctxt("finish_fn"); tie_up_header_blocks(fcx, lltop); let ret_cx = raw_block(fcx, fcx.llreturn); RetVoid(ret_cx); } fn tie_up_header_blocks(fcx: fn_ctxt, lltop: BasicBlockRef) { + let _icx = fcx.insn_ctxt("tie_up_header_blocks"); Br(raw_block(fcx, fcx.llstaticallocas), fcx.llloadenv); Br(raw_block(fcx, fcx.llloadenv), lltop); } @@ -3818,6 +3956,7 @@ fn trans_closure(ccx: @crate_ctxt, path: path, decl: ast::fn_decl, param_substs: option, id: ast::node_id, maybe_self_id: option<@ast::expr>, maybe_load_env: fn(fn_ctxt)) { + let _icx = ccx.insn_ctxt("trans_closure"); set_uwtable(llfndecl); // Set up arguments to the function. @@ -3873,6 +4012,7 @@ fn trans_fn(ccx: @crate_ctxt, let do_time = ccx.sess.opts.stats; let start = if do_time { time::get_time() } else { {sec: 0u32, usec: 0u32} }; + let _icx = ccx.insn_ctxt("trans_fn"); trans_closure(ccx, path, decl, body, llfndecl, ty_self, param_substs, id, maybe_self_id, {|fcx| if ccx.sess.opts.extra_debuginfo { @@ -3888,6 +4028,7 @@ fn trans_fn(ccx: @crate_ctxt, fn trans_res_ctor(ccx: @crate_ctxt, path: path, dtor: ast::fn_decl, ctor_id: ast::node_id, param_substs: option, llfndecl: ValueRef) { + let _icx = ccx.insn_ctxt("trans_res_ctor"); // Create a function for the constructor let fcx = new_fn_ctxt_w_id(ccx, path, llfndecl, ctor_id, none, param_substs, none); @@ -3916,6 +4057,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, enum_id: ast::node_id, variant: ast::variant, disr: int, is_degen: bool, param_substs: option, llfndecl: ValueRef) { + let _icx = ccx.insn_ctxt("trans_enum_variant"); // Translate variant arguments to function arguments. let mut fn_args = [], i = 0u; for varg in variant.node.args { @@ -3971,6 +4113,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, enum_id: ast::node_id, // duplicate constants. I think. Maybe LLVM has a magical mode // that does so later on? fn trans_const_expr(cx: @crate_ctxt, e: @ast::expr) -> ValueRef { + let _icx = cx.insn_ctxt("trans_const_expr"); alt e.node { ast::expr_lit(lit) { ret trans_crate_lit(cx, *lit); } ast::expr_binary(b, e1, e2) { @@ -4065,6 +4208,7 @@ fn trans_const_expr(cx: @crate_ctxt, e: @ast::expr) -> ValueRef { } fn trans_const(ccx: @crate_ctxt, e: @ast::expr, id: ast::node_id) { + let _icx = ccx.insn_ctxt("trans_const"); let v = trans_const_expr(ccx, e); // The scalars come back as 1st class LLVM vals @@ -4075,6 +4219,7 @@ fn trans_const(ccx: @crate_ctxt, e: @ast::expr, id: ast::node_id) { } fn trans_item(ccx: @crate_ctxt, item: ast::item) { + let _icx = ccx.insn_ctxt("trans_item"); let path = alt check ccx.tcx.items.get(item.id) { ast_map::node_item(_, p) { p } }; @@ -4224,6 +4369,7 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { // only as a convenience for humans working with the code, to organize names // and control visibility. fn trans_mod(ccx: @crate_ctxt, m: ast::_mod) { + let _icx = ccx.insn_ctxt("trans_mod"); for item in m.items { trans_item(ccx, *item); } } @@ -4454,6 +4600,7 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef { // The constant translation pass. fn trans_constant(ccx: @crate_ctxt, it: @ast::item) { + let _icx = ccx.insn_ctxt("trans_constant"); alt it.node { ast::item_enum(variants, _) { let vi = ty::enum_variants(ccx.tcx, {crate: ast::local_crate, @@ -4728,6 +4875,8 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt, mutable n_glues_created: 0u, mutable n_null_glues: 0u, mutable n_real_glues: 0u, + llvm_insn_ctxt: @mutable [], + llvm_insns: str_hash(), fn_times: @mutable []}, upcalls: upcall::declare_upcalls(targ_cfg, tn, tydesc_type, @@ -4742,8 +4891,18 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt, crate_map: crate_map, dbg_cx: dbg_cx, mutable do_not_commit_warning_issued: false}; - trans_constants(ccx, crate); - trans_mod(ccx, crate.node.module); + + + { + let _icx = ccx.insn_ctxt("data"); + trans_constants(ccx, crate); + } + + { + let _icx = ccx.insn_ctxt("text"); + trans_mod(ccx, crate.node.module); + } + fill_crate_map(ccx, crate_map); emit_tydescs(ccx); gen_shape_tables(ccx); @@ -4752,14 +4911,23 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt, // Translate the metadata. write_metadata(ccx, crate); if ccx.sess.opts.stats { - #error("--- trans stats ---"); - #error("n_static_tydescs: %u", ccx.stats.n_static_tydescs); - #error("n_glues_created: %u", ccx.stats.n_glues_created); - #error("n_null_glues: %u", ccx.stats.n_null_glues); - #error("n_real_glues: %u", ccx.stats.n_real_glues); + io::println("--- trans stats ---"); + io::println(#fmt("n_static_tydescs: %u", + ccx.stats.n_static_tydescs)); + io::println(#fmt("n_glues_created: %u", + ccx.stats.n_glues_created)); + io::println(#fmt("n_null_glues: %u", ccx.stats.n_null_glues)); + io::println(#fmt("n_real_glues: %u", ccx.stats.n_real_glues)); for timing: {ident: str, time: int} in *ccx.stats.fn_times { - #error("time: %s took %d ms", timing.ident, timing.time); + io::println(#fmt("time: %s took %d ms", timing.ident, + timing.time)); + } + } + + if ccx.sess.opts.count_llvm_insns { + ccx.stats.llvm_insns.items() { |k, v| + io::println(#fmt("%-7u %s", v, k)); } } ret (llmod, link_meta); diff --git a/src/rustc/middle/trans/build.rs b/src/rustc/middle/trans/build.rs index 7c488aa14bd..c547c36db76 100644 --- a/src/rustc/middle/trans/build.rs +++ b/src/rustc/middle/trans/build.rs @@ -1,3 +1,4 @@ +import std::map::{hashmap, str_hash}; import libc::{c_uint, c_int}; import lib::llvm::llvm; import syntax::codemap; @@ -13,6 +14,46 @@ fn B(cx: block) -> BuilderRef { ret b; } +fn count_insn(cx: block, category: str) { + if (cx.ccx().sess.opts.count_llvm_insns) { + + let h = cx.ccx().stats.llvm_insns; + let mut v = cx.ccx().stats.llvm_insn_ctxt; + + // Build version of path with cycles removed. + + // Pass 1: scan table mapping str -> rightmost pos. + let mm = str_hash(); + let len = vec::len(*v); + let mut i = 0u; + while i < len { + mm.insert(copy v[i], i); + i += 1u; + } + + + // Pass 2: concat strings for each elt, skipping + // forwards over any cycles by advancing to rightmost + // occurrence of each element in path. + let mut s = "."; + i = 0u; + while i < len { + let e = v[i]; + i = mm.get(e); + s += "/"; + s += e; + i += 1u; + } + + s += "/"; + s += category; + + let n = alt h.find(s) { some(n) { n } _ { 0u } }; + h.insert(s, n+1u); + } +} + + // The difference between a block being unreachable and being terminated is // somewhat obscure, and has to do with error checking. When a block is // terminated, we're saying that trying to add any further statements in the @@ -25,6 +66,7 @@ fn RetVoid(cx: block) { if cx.unreachable { ret; } assert (!cx.terminated); cx.terminated = true; + count_insn(cx, "retvoid"); llvm::LLVMBuildRetVoid(B(cx)); } @@ -32,6 +74,7 @@ fn Ret(cx: block, V: ValueRef) { if cx.unreachable { ret; } assert (!cx.terminated); cx.terminated = true; + count_insn(cx, "ret"); llvm::LLVMBuildRet(B(cx), V); } @@ -49,6 +92,7 @@ fn Br(cx: block, Dest: BasicBlockRef) { if cx.unreachable { ret; } assert (!cx.terminated); cx.terminated = true; + count_insn(cx, "br"); llvm::LLVMBuildBr(B(cx), Dest); } @@ -57,6 +101,7 @@ fn CondBr(cx: block, If: ValueRef, Then: BasicBlockRef, if cx.unreachable { ret; } assert (!cx.terminated); cx.terminated = true; + count_insn(cx, "condbr"); llvm::LLVMBuildCondBr(B(cx), If, Then, Else); } @@ -77,6 +122,7 @@ fn IndirectBr(cx: block, Addr: ValueRef, NumDests: uint) { if cx.unreachable { ret; } assert (!cx.terminated); cx.terminated = true; + count_insn(cx, "indirectbr"); llvm::LLVMBuildIndirectBr(B(cx), Addr, NumDests as c_uint); } @@ -97,6 +143,7 @@ fn Invoke(cx: block, Fn: ValueRef, Args: [ValueRef], str::connect(vec::map(Args, {|a|val_str(cx.ccx().tn, a)}), ", ")]; unsafe { + count_insn(cx, "invoke"); llvm::LLVMBuildInvoke(B(cx), Fn, vec::unsafe::to_ptr(Args), Args.len() as c_uint, Then, Catch, noname()); @@ -109,6 +156,7 @@ fn FastInvoke(cx: block, Fn: ValueRef, Args: [ValueRef], assert (!cx.terminated); cx.terminated = true; unsafe { + count_insn(cx, "fastinvoke"); let v = llvm::LLVMBuildInvoke(B(cx), Fn, vec::unsafe::to_ptr(Args), Args.len() as c_uint, Then, Catch, noname()); @@ -119,7 +167,10 @@ fn FastInvoke(cx: block, Fn: ValueRef, Args: [ValueRef], fn Unreachable(cx: block) { if cx.unreachable { ret; } cx.unreachable = true; - if !cx.terminated { llvm::LLVMBuildUnreachable(B(cx)); } + if !cx.terminated { + count_insn(cx, "unreachable"); + llvm::LLVMBuildUnreachable(B(cx)); + } } fn _Undef(val: ValueRef) -> ValueRef { @@ -129,182 +180,218 @@ fn _Undef(val: ValueRef) -> ValueRef { /* Arithmetic */ fn Add(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "add"); ret llvm::LLVMBuildAdd(B(cx), LHS, RHS, noname()); } fn NSWAdd(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "nswadd"); ret llvm::LLVMBuildNSWAdd(B(cx), LHS, RHS, noname()); } fn NUWAdd(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "nuwadd"); ret llvm::LLVMBuildNUWAdd(B(cx), LHS, RHS, noname()); } fn FAdd(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "fadd"); ret llvm::LLVMBuildFAdd(B(cx), LHS, RHS, noname()); } fn Sub(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "sub"); ret llvm::LLVMBuildSub(B(cx), LHS, RHS, noname()); } fn NSWSub(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "nwsub"); ret llvm::LLVMBuildNSWSub(B(cx), LHS, RHS, noname()); } fn NUWSub(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "nuwsub"); ret llvm::LLVMBuildNUWSub(B(cx), LHS, RHS, noname()); } fn FSub(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "sub"); ret llvm::LLVMBuildFSub(B(cx), LHS, RHS, noname()); } fn Mul(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "mul"); ret llvm::LLVMBuildMul(B(cx), LHS, RHS, noname()); } fn NSWMul(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "nswmul"); ret llvm::LLVMBuildNSWMul(B(cx), LHS, RHS, noname()); } fn NUWMul(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "nuwmul"); ret llvm::LLVMBuildNUWMul(B(cx), LHS, RHS, noname()); } fn FMul(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "fmul"); ret llvm::LLVMBuildFMul(B(cx), LHS, RHS, noname()); } fn UDiv(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "udiv"); ret llvm::LLVMBuildUDiv(B(cx), LHS, RHS, noname()); } fn SDiv(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "sdiv"); ret llvm::LLVMBuildSDiv(B(cx), LHS, RHS, noname()); } fn ExactSDiv(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "extractsdiv"); ret llvm::LLVMBuildExactSDiv(B(cx), LHS, RHS, noname()); } fn FDiv(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "fdiv"); ret llvm::LLVMBuildFDiv(B(cx), LHS, RHS, noname()); } fn URem(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "urem"); ret llvm::LLVMBuildURem(B(cx), LHS, RHS, noname()); } fn SRem(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "srem"); ret llvm::LLVMBuildSRem(B(cx), LHS, RHS, noname()); } fn FRem(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "frem"); ret llvm::LLVMBuildFRem(B(cx), LHS, RHS, noname()); } fn Shl(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "shl"); ret llvm::LLVMBuildShl(B(cx), LHS, RHS, noname()); } fn LShr(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "lshr"); ret llvm::LLVMBuildLShr(B(cx), LHS, RHS, noname()); } fn AShr(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "ashr"); ret llvm::LLVMBuildAShr(B(cx), LHS, RHS, noname()); } fn And(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "and"); ret llvm::LLVMBuildAnd(B(cx), LHS, RHS, noname()); } fn Or(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "or"); ret llvm::LLVMBuildOr(B(cx), LHS, RHS, noname()); } fn Xor(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "xor"); ret llvm::LLVMBuildXor(B(cx), LHS, RHS, noname()); } fn BinOp(cx: block, Op: Opcode, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(LHS); } + count_insn(cx, "binop"); ret llvm::LLVMBuildBinOp(B(cx), Op, LHS, RHS, noname()); } fn Neg(cx: block, V: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(V); } + count_insn(cx, "neg"); ret llvm::LLVMBuildNeg(B(cx), V, noname()); } fn NSWNeg(cx: block, V: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(V); } + count_insn(cx, "nswneg"); ret llvm::LLVMBuildNSWNeg(B(cx), V, noname()); } fn NUWNeg(cx: block, V: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(V); } + count_insn(cx, "nuwneg"); ret llvm::LLVMBuildNUWNeg(B(cx), V, noname()); } fn FNeg(cx: block, V: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(V); } + count_insn(cx, "fneg"); ret llvm::LLVMBuildFNeg(B(cx), V, noname()); } fn Not(cx: block, V: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(V); } + count_insn(cx, "not"); ret llvm::LLVMBuildNot(B(cx), V, noname()); } /* Memory */ fn Malloc(cx: block, Ty: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_i8())); } + count_insn(cx, "malloc"); ret llvm::LLVMBuildMalloc(B(cx), Ty, noname()); } fn ArrayMalloc(cx: block, Ty: TypeRef, Val: ValueRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_i8())); } + count_insn(cx, "arraymalloc"); ret llvm::LLVMBuildArrayMalloc(B(cx), Ty, Val, noname()); } fn Alloca(cx: block, Ty: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(Ty)); } + count_insn(cx, "alloca"); ret llvm::LLVMBuildAlloca(B(cx), Ty, noname()); } fn ArrayAlloca(cx: block, Ty: TypeRef, Val: ValueRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(Ty)); } + count_insn(cx, "arrayalloca"); ret llvm::LLVMBuildArrayAlloca(B(cx), Ty, Val, noname()); } fn Free(cx: block, PointerVal: ValueRef) { if cx.unreachable { ret; } + count_insn(cx, "free"); llvm::LLVMBuildFree(B(cx), PointerVal); } @@ -316,6 +403,7 @@ fn Load(cx: block, PointerVal: ValueRef) -> ValueRef { llvm::LLVMGetElementType(ty) } else { ccx.int_type }; ret llvm::LLVMGetUndef(eltty); } + count_insn(cx, "load"); ret llvm::LLVMBuildLoad(B(cx), PointerVal, noname()); } @@ -324,13 +412,15 @@ fn Store(cx: block, Val: ValueRef, Ptr: ValueRef) { #debug["Store %s -> %s", val_str(cx.ccx().tn, Val), val_str(cx.ccx().tn, Ptr)]; + count_insn(cx, "store"); llvm::LLVMBuildStore(B(cx), Val, Ptr); } fn GEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_nil())); } unsafe { - ret llvm::LLVMBuildGEP(B(cx), Pointer, vec::unsafe::to_ptr(Indices), + count_insn(cx, "gep"); + ret llvm::LLVMBuildGEP(B(cx), Pointer, vec::unsafe::to_ptr(Indices), Indices.len() as c_uint, noname()); } } @@ -340,6 +430,7 @@ fn GEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]) -> ValueRef { fn GEPi(cx: block, base: ValueRef, ixs: [int]) -> ValueRef { let mut v: [ValueRef] = []; for i: int in ixs { v += [C_i32(i as i32)]; } + count_insn(cx, "gepi"); ret InBoundsGEP(cx, base, v); } @@ -347,7 +438,8 @@ fn InBoundsGEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_nil())); } unsafe { - ret llvm::LLVMBuildInBoundsGEP(B(cx), Pointer, + count_insn(cx, "inboundsgep"); + ret llvm::LLVMBuildInBoundsGEP(B(cx), Pointer, vec::unsafe::to_ptr(Indices), Indices.len() as c_uint, noname()); @@ -356,116 +448,138 @@ fn InBoundsGEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]) -> fn StructGEP(cx: block, Pointer: ValueRef, Idx: uint) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_nil())); } + count_insn(cx, "structgep"); ret llvm::LLVMBuildStructGEP(B(cx), Pointer, Idx as c_uint, noname()); } fn GlobalString(cx: block, _Str: *libc::c_char) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_i8())); } + count_insn(cx, "globalstring"); ret llvm::LLVMBuildGlobalString(B(cx), _Str, noname()); } fn GlobalStringPtr(cx: block, _Str: *libc::c_char) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_ptr(T_i8())); } + count_insn(cx, "globalstringptr"); ret llvm::LLVMBuildGlobalStringPtr(B(cx), _Str, noname()); } /* Casts */ fn Trunc(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "trunc"); ret llvm::LLVMBuildTrunc(B(cx), Val, DestTy, noname()); } fn ZExt(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "zext"); ret llvm::LLVMBuildZExt(B(cx), Val, DestTy, noname()); } fn SExt(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "sext"); ret llvm::LLVMBuildSExt(B(cx), Val, DestTy, noname()); } fn FPToUI(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "fptoui"); ret llvm::LLVMBuildFPToUI(B(cx), Val, DestTy, noname()); } fn FPToSI(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "fptosi"); ret llvm::LLVMBuildFPToSI(B(cx), Val, DestTy, noname()); } fn UIToFP(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "uitofp"); ret llvm::LLVMBuildUIToFP(B(cx), Val, DestTy, noname()); } fn SIToFP(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "sitofp"); ret llvm::LLVMBuildSIToFP(B(cx), Val, DestTy, noname()); } fn FPTrunc(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "fptrunc"); ret llvm::LLVMBuildFPTrunc(B(cx), Val, DestTy, noname()); } fn FPExt(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "fpext"); ret llvm::LLVMBuildFPExt(B(cx), Val, DestTy, noname()); } fn PtrToInt(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "ptrtoint"); ret llvm::LLVMBuildPtrToInt(B(cx), Val, DestTy, noname()); } fn IntToPtr(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "inttoptr"); ret llvm::LLVMBuildIntToPtr(B(cx), Val, DestTy, noname()); } fn BitCast(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "bitcast"); ret llvm::LLVMBuildBitCast(B(cx), Val, DestTy, noname()); } fn ZExtOrBitCast(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "zextorbitcast"); ret llvm::LLVMBuildZExtOrBitCast(B(cx), Val, DestTy, noname()); } fn SExtOrBitCast(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "sextorbitcast"); ret llvm::LLVMBuildSExtOrBitCast(B(cx), Val, DestTy, noname()); } fn TruncOrBitCast(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "truncorbitcast"); ret llvm::LLVMBuildTruncOrBitCast(B(cx), Val, DestTy, noname()); } fn Cast(cx: block, Op: Opcode, Val: ValueRef, DestTy: TypeRef, _Name: *u8) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "cast"); ret llvm::LLVMBuildCast(B(cx), Op, Val, DestTy, noname()); } fn PointerCast(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "pointercast"); ret llvm::LLVMBuildPointerCast(B(cx), Val, DestTy, noname()); } fn IntCast(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "intcast"); ret llvm::LLVMBuildIntCast(B(cx), Val, DestTy, noname()); } fn FPCast(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(DestTy); } + count_insn(cx, "fpcast"); ret llvm::LLVMBuildFPCast(B(cx), Val, DestTy, noname()); } @@ -474,18 +588,21 @@ fn FPCast(cx: block, Val: ValueRef, DestTy: TypeRef) -> ValueRef { fn ICmp(cx: block, Op: IntPredicate, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_i1()); } + count_insn(cx, "icmp"); ret llvm::LLVMBuildICmp(B(cx), Op as c_uint, LHS, RHS, noname()); } fn FCmp(cx: block, Op: RealPredicate, LHS: ValueRef, RHS: ValueRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_i1()); } + count_insn(cx, "fcmp"); ret llvm::LLVMBuildFCmp(B(cx), Op as c_uint, LHS, RHS, noname()); } /* Miscellaneous instructions */ fn EmptyPhi(cx: block, Ty: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(Ty); } + count_insn(cx, "emptyphi"); ret llvm::LLVMBuildPhi(B(cx), Ty, noname()); } @@ -495,6 +612,7 @@ fn Phi(cx: block, Ty: TypeRef, vals: [ValueRef], bbs: [BasicBlockRef]) assert vals.len() == bbs.len(); let phi = EmptyPhi(cx, Ty); unsafe { + count_insn(cx, "addincoming"); llvm::LLVMAddIncoming(phi, vec::unsafe::to_ptr(vals), vec::unsafe::to_ptr(bbs), vals.len() as c_uint); @@ -516,6 +634,7 @@ fn _UndefReturn(cx: block, Fn: ValueRef) -> ValueRef { let ty = val_ty(Fn); let retty = if llvm::LLVMGetTypeKind(ty) == 8 as c_int { llvm::LLVMGetReturnType(ty) } else { ccx.int_type }; + count_insn(cx, ""); ret llvm::LLVMGetUndef(retty); } @@ -536,6 +655,7 @@ fn add_comment(bcx: block, text: str) { let comment_text = "; " + sanitized; let asm = str::as_c_str(comment_text, {|c| str::as_c_str("", {|e| + count_insn(bcx, "inlineasm"); llvm::LLVMConstInlineAsm(T_fn([], T_void()), c, e, False, False) }) @@ -547,6 +667,7 @@ fn add_comment(bcx: block, text: str) { fn Call(cx: block, Fn: ValueRef, Args: [ValueRef]) -> ValueRef { if cx.unreachable { ret _UndefReturn(cx, Fn); } unsafe { + count_insn(cx, "call"); ret llvm::LLVMBuildCall(B(cx), Fn, vec::unsafe::to_ptr(Args), Args.len() as c_uint, noname()); } @@ -555,6 +676,7 @@ fn Call(cx: block, Fn: ValueRef, Args: [ValueRef]) -> ValueRef { fn FastCall(cx: block, Fn: ValueRef, Args: [ValueRef]) -> ValueRef { if cx.unreachable { ret _UndefReturn(cx, Fn); } unsafe { + count_insn(cx, "fastcall"); let v = llvm::LLVMBuildCall(B(cx), Fn, vec::unsafe::to_ptr(Args), Args.len() as c_uint, noname()); lib::llvm::SetInstructionCallConv(v, lib::llvm::FastCallConv); @@ -566,6 +688,7 @@ fn CallWithConv(cx: block, Fn: ValueRef, Args: [ValueRef], Conv: CallConv) -> ValueRef { if cx.unreachable { ret _UndefReturn(cx, Fn); } unsafe { + count_insn(cx, "callwithconv"); let v = llvm::LLVMBuildCall(B(cx), Fn, vec::unsafe::to_ptr(Args), Args.len() as c_uint, noname()); lib::llvm::SetInstructionCallConv(v, Conv); @@ -576,57 +699,67 @@ fn CallWithConv(cx: block, Fn: ValueRef, Args: [ValueRef], fn Select(cx: block, If: ValueRef, Then: ValueRef, Else: ValueRef) -> ValueRef { if cx.unreachable { ret _Undef(Then); } + count_insn(cx, "select"); ret llvm::LLVMBuildSelect(B(cx), If, Then, Else, noname()); } fn VAArg(cx: block, list: ValueRef, Ty: TypeRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(Ty); } + count_insn(cx, "vaarg"); ret llvm::LLVMBuildVAArg(B(cx), list, Ty, noname()); } fn ExtractElement(cx: block, VecVal: ValueRef, Index: ValueRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_nil()); } + count_insn(cx, "extractelement"); ret llvm::LLVMBuildExtractElement(B(cx), VecVal, Index, noname()); } fn InsertElement(cx: block, VecVal: ValueRef, EltVal: ValueRef, Index: ValueRef) { if cx.unreachable { ret; } + count_insn(cx, "insertelement"); llvm::LLVMBuildInsertElement(B(cx), VecVal, EltVal, Index, noname()); } fn ShuffleVector(cx: block, V1: ValueRef, V2: ValueRef, Mask: ValueRef) { if cx.unreachable { ret; } + count_insn(cx, "shufflevector"); llvm::LLVMBuildShuffleVector(B(cx), V1, V2, Mask, noname()); } fn ExtractValue(cx: block, AggVal: ValueRef, Index: uint) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_nil()); } + count_insn(cx, "extractvalue"); ret llvm::LLVMBuildExtractValue(B(cx), AggVal, Index as c_uint, noname()); } fn InsertValue(cx: block, AggVal: ValueRef, EltVal: ValueRef, Index: uint) { if cx.unreachable { ret; } + count_insn(cx, "insertvalue"); llvm::LLVMBuildInsertValue(B(cx), AggVal, EltVal, Index as c_uint, noname()); } fn IsNull(cx: block, Val: ValueRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_i1()); } + count_insn(cx, "isnull"); ret llvm::LLVMBuildIsNull(B(cx), Val, noname()); } fn IsNotNull(cx: block, Val: ValueRef) -> ValueRef { if cx.unreachable { ret llvm::LLVMGetUndef(T_i1()); } + count_insn(cx, "isnotnull"); ret llvm::LLVMBuildIsNotNull(B(cx), Val, noname()); } fn PtrDiff(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { let ccx = cx.fcx.ccx; if cx.unreachable { ret llvm::LLVMGetUndef(ccx.int_type); } + count_insn(cx, "ptrdiff"); ret llvm::LLVMBuildPtrDiff(B(cx), LHS, RHS, noname()); } @@ -642,6 +775,7 @@ fn Trap(cx: block) { assert (T as int != 0); let Args: [ValueRef] = []; unsafe { + count_insn(cx, "trap"); llvm::LLVMBuildCall(b, T, vec::unsafe::to_ptr(Args), Args.len() as c_uint, noname()); } @@ -650,17 +784,20 @@ fn Trap(cx: block) { fn LandingPad(cx: block, Ty: TypeRef, PersFn: ValueRef, NumClauses: uint) -> ValueRef { assert !cx.terminated && !cx.unreachable; + count_insn(cx, "landingpad"); ret llvm::LLVMBuildLandingPad(B(cx), Ty, PersFn, NumClauses as c_uint, noname()); } -fn SetCleanup(_cx: block, LandingPad: ValueRef) { +fn SetCleanup(cx: block, LandingPad: ValueRef) { + count_insn(cx, "setcleanup"); llvm::LLVMSetCleanup(LandingPad, lib::llvm::True); } fn Resume(cx: block, Exn: ValueRef) -> ValueRef { assert (!cx.terminated); cx.terminated = true; + count_insn(cx, "resume"); ret llvm::LLVMBuildResume(B(cx), Exn); } diff --git a/src/rustc/middle/trans/closure.rs b/src/rustc/middle/trans/closure.rs index 111f172a8ea..4bcb9cc1647 100644 --- a/src/rustc/middle/trans/closure.rs +++ b/src/rustc/middle/trans/closure.rs @@ -146,10 +146,11 @@ fn allocate_cbox(bcx: block, ck: ty::closure_kind, cdata_ty: ty::t) -> (block, ValueRef, [ValueRef]) { - + let _icx = bcx.insn_ctxt("closure::allocate_cbox"); let ccx = bcx.ccx(), tcx = ccx.tcx; fn nuke_ref_count(bcx: block, box: ValueRef) { + let _icx = bcx.insn_ctxt("closure::nuke_ref_count"); // Initialize ref count to arbitrary value for debugging: let ccx = bcx.ccx(); let box = PointerCast(bcx, box, T_opaque_box_ptr(ccx)); @@ -211,7 +212,7 @@ type closure_result = { fn store_environment(bcx: block, bound_values: [environment_value], ck: ty::closure_kind) -> closure_result { - + let _icx = bcx.insn_ctxt("closure::store_environment"); let ccx = bcx.ccx(), tcx = ccx.tcx; // compute the shape of the closure @@ -287,6 +288,7 @@ fn build_closure(bcx0: block, cap_vars: [capture::capture_var], ck: ty::closure_kind, id: ast::node_id) -> closure_result { + let _icx = bcx0.insn_ctxt("closure::build_closure"); // If we need to, package up the iterator body to call let mut env_vals = []; let mut bcx = bcx0; @@ -332,6 +334,7 @@ fn load_environment(fcx: fn_ctxt, cdata_ty: ty::t, cap_vars: [capture::capture_var], ck: ty::closure_kind) { + let _icx = fcx.insn_ctxt("closure::load_environment"); let bcx = raw_block(fcx, fcx.llloadenv); // Load a pointer to the closure data, skipping over the box header: @@ -365,6 +368,7 @@ fn trans_expr_fn(bcx: block, id: ast::node_id, cap_clause: ast::capture_clause, dest: dest) -> block { + let _icx = bcx.insn_ctxt("closure::trans_expr_fn"); if dest == ignore { ret bcx; } let ccx = bcx.ccx(), bcx = bcx; let fty = node_id_type(bcx, id); @@ -401,6 +405,7 @@ fn trans_expr_fn(bcx: block, fn trans_bind(cx: block, f: @ast::expr, args: [option<@ast::expr>], id: ast::node_id, dest: dest) -> block { + let _icx = cx.insn_ctxt("closure::trans_bind"); let f_res = trans_callee(cx, f); ret trans_bind_1(cx, expr_ty(cx, f), f_res, args, node_id_type(cx, id), dest); @@ -410,6 +415,7 @@ fn trans_bind_1(cx: block, outgoing_fty: ty::t, f_res: lval_maybe_callee, args: [option<@ast::expr>], pair_ty: ty::t, dest: dest) -> block { + let _icx = cx.insn_ctxt("closure::trans_bind1"); assert option::is_none(f_res.tds); let ccx = cx.ccx(); let mut bound: [@ast::expr] = []; @@ -472,6 +478,7 @@ fn make_fn_glue( t: ty::t, glue_fn: fn@(block, v: ValueRef, t: ty::t) -> block) -> block { + let _icx = cx.insn_ctxt("closure::make_fn_glue"); let bcx = cx; let tcx = cx.tcx(); @@ -500,6 +507,7 @@ fn make_opaque_cbox_take_glue( cboxptr: ValueRef) // ptr to ptr to the opaque closure -> block { // Easy cases: + let _icx = bcx.insn_ctxt("closure::make_opaque_cbox_take_glue"); alt ck { ty::ck_block { ret bcx; } ty::ck_box { ret incr_refcnt_of_boxed(bcx, Load(bcx, cboxptr)); } @@ -546,6 +554,7 @@ fn make_opaque_cbox_drop_glue( ck: ty::closure_kind, cboxptr: ValueRef) // ptr to the opaque closure -> block { + let _icx = bcx.insn_ctxt("closure::make_opaque_cbox_drop_glue"); alt ck { ty::ck_block { bcx } ty::ck_box { @@ -564,6 +573,7 @@ fn make_opaque_cbox_free_glue( ck: ty::closure_kind, cbox: ValueRef) // ptr to the opaque closure -> block { + let _icx = bcx.insn_ctxt("closure::make_opaque_cbox_free_glue"); alt ck { ty::ck_block { ret bcx; } ty::ck_box | ty::ck_uniq { /* hard cases: */ } @@ -614,6 +624,7 @@ fn trans_bind_thunk(ccx: @crate_ctxt, cdata_ty: ty::t, target_info: target_info) -> {val: ValueRef, ty: TypeRef} { + let _icx = ccx.insn_ctxt("closure::trans_bind_thunk"); let tcx = ccx.tcx; #debug["trans_bind_thunk[incoming_fty=%s,outgoing_fty=%s,\ cdata_ty=%s]", diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs index 9fcbdec48e9..192bfbffbf5 100644 --- a/src/rustc/middle/trans/common.rs +++ b/src/rustc/middle/trans/common.rs @@ -50,11 +50,14 @@ type tydesc_info = * task such as bringing a task to life, allocating memory, etc. * */ + type stats = {mutable n_static_tydescs: uint, mutable n_glues_created: uint, mutable n_null_glues: uint, mutable n_real_glues: uint, + llvm_insn_ctxt: @mutable [str], + llvm_insns: hashmap, fn_times: @mutable [{ident: str, time: int}]}; resource BuilderRef_res(B: BuilderRef) { llvm::LLVMDisposeBuilder(B); } diff --git a/src/rustc/middle/trans/impl.rs b/src/rustc/middle/trans/impl.rs index 45d23fdbfac..7593482c1fb 100644 --- a/src/rustc/middle/trans/impl.rs +++ b/src/rustc/middle/trans/impl.rs @@ -16,6 +16,7 @@ import std::map::hashmap; fn trans_impl(ccx: @crate_ctxt, path: path, name: ast::ident, methods: [@ast::method], tps: [ast::ty_param]) { + let _icx = ccx.insn_ctxt("impl::trans_impl"); if tps.len() > 0u { ret; } let sub_path = path + [path_name(name)]; for m in methods { @@ -29,6 +30,7 @@ fn trans_impl(ccx: @crate_ctxt, path: path, name: ast::ident, } fn trans_self_arg(bcx: block, base: @ast::expr) -> result { + let _icx = bcx.insn_ctxt("impl::trans_self_arg"); let basety = expr_ty(bcx, base); let m_by_ref = ast::expl(ast::by_ref); let mut temp_cleanups = []; @@ -46,6 +48,7 @@ fn trans_self_arg(bcx: block, base: @ast::expr) -> result { fn trans_method_callee(bcx: block, callee_id: ast::node_id, self: @ast::expr, origin: typeck::method_origin) -> lval_maybe_callee { + let _icx = bcx.insn_ctxt("impl::trans_method_callee"); alt origin { typeck::method_static(did) { let {bcx, val} = trans_self_arg(bcx, self); @@ -69,6 +72,7 @@ fn trans_method_callee(bcx: block, callee_id: ast::node_id, fn trans_vtable_callee(bcx: block, env: callee_env, vtable: ValueRef, callee_id: ast::node_id, n_method: uint) -> lval_maybe_callee { + let _icx = bcx.insn_ctxt("impl::trans_vtable_callee"); let bcx = bcx, ccx = bcx.ccx(); let fty = node_id_type(bcx, callee_id); let llfty = type_of::type_of_fn_from_ty(ccx, fty); @@ -105,6 +109,7 @@ fn trans_monomorphized_callee(bcx: block, callee_id: ast::node_id, base: @ast::expr, iface_id: ast::def_id, n_method: uint, n_param: uint, n_bound: uint, substs: param_substs) -> lval_maybe_callee { + let _icx = bcx.insn_ctxt("impl::trans_monomorphized_callee"); alt find_vtable_in_fn_ctxt(substs, n_param, n_bound) { typeck::vtable_static(impl_did, impl_substs, sub_origins) { let ccx = bcx.ccx(); @@ -135,6 +140,7 @@ fn trans_monomorphized_callee(bcx: block, callee_id: ast::node_id, fn trans_iface_callee(bcx: block, base: @ast::expr, callee_id: ast::node_id, n_method: uint) -> lval_maybe_callee { + let _icx = bcx.insn_ctxt("impl::trans_iface_callee"); let {bcx, val} = trans_temp_expr(bcx, base); let vtable = Load(bcx, PointerCast(bcx, GEPi(bcx, val, [0, 0]), T_ptr(T_ptr(T_vtable())))); @@ -220,6 +226,7 @@ fn get_vtable(ccx: @crate_ctxt, origin: typeck::vtable_origin) } fn make_vtable(ccx: @crate_ctxt, ptrs: [ValueRef]) -> ValueRef { + let _icx = ccx.insn_ctxt("impl::make_vtable"); let tbl = C_struct(ptrs); let vt_gvar = str::as_c_str(ccx.names("vtable"), {|buf| llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl), buf) @@ -232,6 +239,7 @@ fn make_vtable(ccx: @crate_ctxt, ptrs: [ValueRef]) -> ValueRef { fn make_impl_vtable(ccx: @crate_ctxt, impl_id: ast::def_id, substs: [ty::t], vtables: typeck::vtable_res) -> ValueRef { + let _icx = ccx.insn_ctxt("impl::make_impl_vtable"); let tcx = ccx.tcx; let ifce_id = ty::ty_to_def_id(option::get(ty::impl_iface(tcx, impl_id))); let has_tps = (*ty::lookup_item_type(ccx.tcx, impl_id).bounds).len() > 0u; @@ -255,6 +263,7 @@ fn make_impl_vtable(ccx: @crate_ctxt, impl_id: ast::def_id, substs: [ty::t], fn trans_cast(bcx: block, val: @ast::expr, id: ast::node_id, dest: dest) -> block { + let _icx = bcx.insn_ctxt("impl::trans_cast"); if dest == ignore { ret trans_expr(bcx, val, ignore); } let ccx = bcx.ccx(); let v_ty = expr_ty(bcx, val); diff --git a/src/rustc/middle/trans/native.rs b/src/rustc/middle/trans/native.rs index bb78a9b95be..bcb4889349c 100644 --- a/src/rustc/middle/trans/native.rs +++ b/src/rustc/middle/trans/native.rs @@ -492,6 +492,7 @@ fn build_wrap_fn_(ccx: @crate_ctxt, arg_builder: wrap_arg_builder, ret_builder: wrap_ret_builder) { + let _icx = ccx.insn_ctxt("native::build_wrap_fn_"); let fcx = new_fn_ctxt(ccx, [], llwrapfn, none); let bcx = top_scope_block(fcx, none); let lltop = bcx.llbb; @@ -550,13 +551,19 @@ fn build_wrap_fn_(ccx: @crate_ctxt, // perhaps do so in the future. fn trans_native_mod(ccx: @crate_ctxt, native_mod: ast::native_mod, abi: ast::native_abi) { + + let _icx = ccx.insn_ctxt("native::trans_native_mod"); + fn build_shim_fn(ccx: @crate_ctxt, native_item: @ast::native_item, tys: @c_stack_tys, cc: lib::llvm::CallConv) -> ValueRef { + let _icx = ccx.insn_ctxt("native::build_shim_fn"); + fn build_args(bcx: block, tys: @c_stack_tys, llargbundle: ValueRef) -> [ValueRef] { + let _icx = bcx.insn_ctxt("native::shim::build_args"); let mut llargvals = []; let mut i = 0u; let n = vec::len(tys.arg_tys); @@ -602,6 +609,7 @@ fn trans_native_mod(ccx: @crate_ctxt, fn build_ret(bcx: block, tys: @c_stack_tys, llargbundle: ValueRef, llretval: ValueRef) { + let _icx = bcx.insn_ctxt("native::shim::build_ret"); alt tys.x86_64_tys { some(x86_64) { vec::iteri(x86_64.attrs) {|i, a| @@ -666,8 +674,11 @@ fn trans_native_mod(ccx: @crate_ctxt, llshimfn: ValueRef, llwrapfn: ValueRef) { + let _icx = ccx.insn_ctxt("native::build_wrap_fn"); + fn build_args(bcx: block, tys: @c_stack_tys, llwrapfn: ValueRef, llargbundle: ValueRef) { + let _icx = bcx.insn_ctxt("native::wrap::build_args"); let mut i = 0u; let n = vec::len(tys.arg_tys); let implicit_args = first_real_arg; // ret + env @@ -684,6 +695,7 @@ fn trans_native_mod(ccx: @crate_ctxt, fn build_ret(bcx: block, _tys: @c_stack_tys, _llargbundle: ValueRef) { + let _icx = bcx.insn_ctxt("native::wrap::build_ret"); RetVoid(bcx); } @@ -718,9 +730,12 @@ fn trans_native_mod(ccx: @crate_ctxt, fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, body: ast::blk, llwrapfn: ValueRef, id: ast::node_id) { + let _icx = ccx.insn_ctxt("native::build_crust_fn"); + fn build_rust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, body: ast::blk, id: ast::node_id) -> ValueRef { + let _icx = ccx.insn_ctxt("native::crust::build_rust_fn"); let t = ty::node_id_to_type(ccx.tcx, id); let ps = link::mangle_internal_name_by_path( ccx, path + [ast_map::path_name("__rust_abi")]); @@ -734,8 +749,11 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, fn build_shim_fn(ccx: @crate_ctxt, path: ast_map::path, llrustfn: ValueRef, tys: @c_stack_tys) -> ValueRef { + let _icx = ccx.insn_ctxt("native::crust::build_shim_fn"); + fn build_args(bcx: block, tys: @c_stack_tys, llargbundle: ValueRef) -> [ValueRef] { + let _icx = bcx.insn_ctxt("native::crust::shim::build_args"); let mut llargvals = []; let mut i = 0u; let n = vec::len(tys.arg_tys); @@ -767,8 +785,11 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, fn build_wrap_fn(ccx: @crate_ctxt, llshimfn: ValueRef, llwrapfn: ValueRef, tys: @c_stack_tys) { + let _icx = ccx.insn_ctxt("native::crust::build_wrap_fn"); + fn build_args(bcx: block, tys: @c_stack_tys, llwrapfn: ValueRef, llargbundle: ValueRef) { + let _icx = bcx.insn_ctxt("native::crust::wrap::build_args"); alt tys.x86_64_tys { option::some(x86_64) { let mut atys = x86_64.arg_tys; @@ -827,6 +848,7 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, fn build_ret(bcx: block, tys: @c_stack_tys, llargbundle: ValueRef) { + let _icx = bcx.insn_ctxt("native::crust::wrap::build_ret"); alt tys.x86_64_tys { option::some(x86_64) { if x86_64.sret || !tys.ret_def { @@ -872,6 +894,7 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, fn register_crust_fn(ccx: @crate_ctxt, sp: span, path: ast_map::path, node_id: ast::node_id) -> ValueRef { + let _icx = ccx.insn_ctxt("native::register_crust_fn"); let t = ty::node_id_to_type(ccx.tcx, node_id); let (llargtys, llretty, ret_ty) = c_arg_and_ret_lltys(ccx, node_id); ret if ccx.sess.targ_cfg.arch == arch_x86_64 { @@ -907,6 +930,7 @@ fn abi_of_native_fn(ccx: @crate_ctxt, i: @ast::native_item) fn decl_native_fn(ccx: @crate_ctxt, i: @ast::native_item, pth: ast_map::path) -> ValueRef { + let _icx = ccx.insn_ctxt("native::decl_native_fn"); alt i.node { ast::native_item_fn(_, _) { let node_type = ty::node_id_to_type(ccx.tcx, i.id); diff --git a/src/rustc/middle/trans/shape.rs b/src/rustc/middle/trans/shape.rs index d13da3b0cfe..bcf73ca9774 100644 --- a/src/rustc/middle/trans/shape.rs +++ b/src/rustc/middle/trans/shape.rs @@ -14,7 +14,6 @@ import syntax::ast; import syntax::ast_util::dummy_sp; import syntax::util::interner; import util::common; -import trans::build::{Load, Store, Add, GEPi}; import syntax::codemap::span; import std::map::hashmap; diff --git a/src/rustc/middle/trans/tvec.rs b/src/rustc/middle/trans/tvec.rs index b8031022354..1bd765200de 100644 --- a/src/rustc/middle/trans/tvec.rs +++ b/src/rustc/middle/trans/tvec.rs @@ -5,27 +5,31 @@ import back::abi; import base::{call_memmove, trans_shared_malloc, INIT, copy_val, load_if_immediate, get_tydesc, sub_block, do_spill_noroot, - dest}; + dest, bcx_icx}; import shape::llsize_of; import build::*; import common::*; fn get_fill(bcx: block, vptr: ValueRef) -> ValueRef { + let _icx = bcx.insn_ctxt("tvec::get_fill"); Load(bcx, GEPi(bcx, vptr, [0, abi::vec_elt_fill])) } fn get_dataptr(bcx: block, vptr: ValueRef, unit_ty: TypeRef) -> ValueRef { + let _icx = bcx.insn_ctxt("tvec::get_dataptr"); let ptr = GEPi(bcx, vptr, [0, abi::vec_elt_elems]); PointerCast(bcx, ptr, T_ptr(unit_ty)) } fn pointer_add(bcx: block, ptr: ValueRef, bytes: ValueRef) -> ValueRef { + let _icx = bcx.insn_ctxt("tvec::pointer_add"); let old_ty = val_ty(ptr); let bptr = PointerCast(bcx, ptr, T_ptr(T_i8())); ret PointerCast(bcx, InBoundsGEP(bcx, bptr, [bytes]), old_ty); } fn alloc_raw(bcx: block, fill: ValueRef, alloc: ValueRef) -> result { + let _icx = bcx.insn_ctxt("tvec::alloc_raw"); let ccx = bcx.ccx(); let llvecty = ccx.opaque_vec_type; let vecsize = Add(bcx, alloc, llsize_of(ccx, llvecty)); @@ -43,6 +47,7 @@ type alloc_result = llunitty: TypeRef}; fn alloc(bcx: block, vec_ty: ty::t, elts: uint) -> alloc_result { + let _icx = bcx.insn_ctxt("tvec::alloc"); let ccx = bcx.ccx(); let unit_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); let llunitty = type_of::type_of(ccx, unit_ty); @@ -62,6 +67,7 @@ fn alloc(bcx: block, vec_ty: ty::t, elts: uint) -> alloc_result { } fn duplicate(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> result { + let _icx = bcx.insn_ctxt("tvec::duplicate"); let ccx = bcx.ccx(); let fill = get_fill(bcx, vptr); let size = Add(bcx, fill, llsize_of(ccx, ccx.opaque_vec_type)); @@ -77,6 +83,7 @@ fn duplicate(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> result { } fn make_free_glue(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> block { + let _icx = bcx.insn_ctxt("tvec::make_free_glue"); let tcx = bcx.tcx(), unit_ty = ty::sequence_element_type(tcx, vec_ty); base::with_cond(bcx, IsNotNull(bcx, vptr)) {|bcx| let bcx = if ty::type_needs_drop(tcx, unit_ty) { @@ -88,6 +95,7 @@ fn make_free_glue(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> fn trans_vec(bcx: block, args: [@ast::expr], id: ast::node_id, dest: dest) -> block { + let _icx = bcx.insn_ctxt("tvec::trans_vec"); let ccx = bcx.ccx(); let mut bcx = bcx; if dest == base::ignore { @@ -118,6 +126,7 @@ fn trans_vec(bcx: block, args: [@ast::expr], id: ast::node_id, } fn trans_str(bcx: block, s: str, dest: dest) -> block { + let _icx = bcx.insn_ctxt("tvec::trans_str"); let veclen = str::len(s) + 1u; // +1 for \0 let {bcx: bcx, val: sptr, _} = alloc(bcx, ty::mk_str(bcx.tcx()), veclen); @@ -131,6 +140,7 @@ fn trans_str(bcx: block, s: str, dest: dest) -> block { fn trans_append(bcx: block, vec_ty: ty::t, lhsptr: ValueRef, rhs: ValueRef) -> block { + let _icx = bcx.insn_ctxt("tvec::trans_append"); // Cast to opaque interior vector types if necessary. let ccx = bcx.ccx(); let unit_ty = ty::sequence_element_type(ccx.tcx, vec_ty); @@ -172,6 +182,7 @@ fn trans_append(bcx: block, vec_ty: ty::t, lhsptr: ValueRef, fn trans_append_literal(bcx: block, vptrptr: ValueRef, vec_ty: ty::t, vals: [@ast::expr]) -> block { + let _icx = bcx.insn_ctxt("tvec::trans_append_literal"); let ccx = bcx.ccx(); let elt_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); let mut ti = none; @@ -193,6 +204,7 @@ fn trans_append_literal(bcx: block, vptrptr: ValueRef, vec_ty: ty::t, fn trans_add(bcx: block, vec_ty: ty::t, lhs: ValueRef, rhs: ValueRef, dest: dest) -> block { + let _icx = bcx.insn_ctxt("tvec::trans_add"); let ccx = bcx.ccx(); if ty::get(vec_ty).struct == ty::ty_str { @@ -233,6 +245,7 @@ type iter_vec_block = fn(block, ValueRef, ty::t) -> block; fn iter_vec_raw(bcx: block, vptr: ValueRef, vec_ty: ty::t, fill: ValueRef, f: iter_vec_block) -> block { + let _icx = bcx.insn_ctxt("tvec::iter_vec_raw"); let ccx = bcx.ccx(); let unit_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); let llunitty = type_of::type_of(ccx, unit_ty); @@ -262,6 +275,7 @@ fn iter_vec_raw(bcx: block, vptr: ValueRef, vec_ty: ty::t, fn iter_vec(bcx: block, vptr: ValueRef, vec_ty: ty::t, f: iter_vec_block) -> block { + let _icx = bcx.insn_ctxt("tvec::iter_vec"); let vptr = PointerCast(bcx, vptr, T_ptr(bcx.ccx().opaque_vec_type)); ret iter_vec_raw(bcx, vptr, vec_ty, get_fill(bcx, vptr), f); } diff --git a/src/rustc/middle/trans/uniq.rs b/src/rustc/middle/trans/uniq.rs index e1529664e99..ad52a0a3a2b 100644 --- a/src/rustc/middle/trans/uniq.rs +++ b/src/rustc/middle/trans/uniq.rs @@ -9,6 +9,7 @@ export trans_uniq, make_free_glue, autoderef, duplicate, alloc_uniq; fn trans_uniq(bcx: block, contents: @ast::expr, node_id: ast::node_id, dest: dest) -> block { + let _icx = bcx.insn_ctxt("uniq::trans_uniq"); let uniq_ty = node_id_type(bcx, node_id); let {bcx, val: llptr} = alloc_uniq(bcx, uniq_ty); add_clean_free(bcx, llptr, true); @@ -19,6 +20,7 @@ fn trans_uniq(bcx: block, contents: @ast::expr, fn alloc_uniq(cx: block, uniq_ty: ty::t) -> result { let bcx = cx; + let _icx = bcx.insn_ctxt("uniq::alloc_uniq"); let contents_ty = content_ty(uniq_ty); let llty = type_of::type_of(bcx.ccx(), contents_ty); let llsz = llsize_of(bcx.ccx(), llty); @@ -28,6 +30,7 @@ fn alloc_uniq(cx: block, uniq_ty: ty::t) -> result { fn make_free_glue(bcx: block, vptr: ValueRef, t: ty::t) -> block { + let _icx = bcx.insn_ctxt("uniq::make_free_glue"); with_cond(bcx, IsNotNull(bcx, vptr)) {|bcx| let bcx = drop_ty(bcx, vptr, content_ty(t)); trans_shared_free(bcx, vptr) @@ -47,6 +50,7 @@ fn autoderef(v: ValueRef, t: ty::t) -> {v: ValueRef, t: ty::t} { } fn duplicate(bcx: block, v: ValueRef, t: ty::t) -> result { + let _icx = bcx.insn_ctxt("uniq::duplicate"); let content_ty = content_ty(t); let {bcx, val: llptr} = alloc_uniq(bcx, t); diff --git a/src/rustdoc/astsrv.rs b/src/rustdoc/astsrv.rs index c3e2221cea1..83044447ec6 100644 --- a/src/rustdoc/astsrv.rs +++ b/src/rustdoc/astsrv.rs @@ -137,6 +137,7 @@ fn build_session() -> (session::session, @mutable bool) { save_temps: false, stats: false, time_passes: false, + count_llvm_insns: false, time_llvm_passes: false, output_type: link::output_type_exe, addl_lib_search_paths: [],