Auto merge of #34956 - nikomatsakis:incr-comp-o-files, r=mw

Enable reuse of `.o` files if nothing has changed

This PR completes a first "spike" for incremental compilation by enabling us to reuse `.o` files when nothing has changed. When in incr. mode, we will save `.o` files into the temporary directory, then copy them back out again if they are still valid. The code is still a bit rough but it does seem to work. =)

r? @michaelwoerister

Fixes #34036
Fixes #34037
Fixes #34038
This commit is contained in:
bors 2016-07-28 14:17:54 -07:00 committed by GitHub
commit 54c0dcfd63
36 changed files with 1249 additions and 201 deletions

View file

@ -9,6 +9,7 @@
// except according to those terms.
use std::fmt::Debug;
use std::sync::Arc;
macro_rules! try_opt {
($e:expr) => (
@ -45,6 +46,10 @@ pub enum DepNode<D: Clone + Debug> {
// in an extern crate.
MetaData(D),
// Represents some artifact that we save to disk. Note that these
// do not have a def-id as part of their identifier.
WorkProduct(Arc<WorkProductId>),
// Represents different phases in the compiler.
CrateReader,
CollectLanguageItems,
@ -189,6 +194,11 @@ impl<D: Clone + Debug> DepNode<D> {
TransCrate => Some(TransCrate),
TransWriteMetadata => Some(TransWriteMetadata),
LinkBinary => Some(LinkBinary),
// work product names do not need to be mapped, because
// they are always absolute.
WorkProduct(ref id) => Some(WorkProduct(id.clone())),
Hir(ref d) => op(d).map(Hir),
MetaData(ref d) => op(d).map(MetaData),
CollectItem(ref d) => op(d).map(CollectItem),
@ -229,3 +239,12 @@ impl<D: Clone + Debug> DepNode<D> {
}
}
}
/// A "work product" corresponds to a `.o` (or other) file that we
/// save in between runs. These ids do not have a DefId but rather
/// some independent path or string that persists between runs without
/// the need to be mapped or unmapped. (This ensures we can serialize
/// them even in the absence of a tcx.)
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct WorkProductId(pub String);

View file

@ -9,22 +9,45 @@
// except according to those terms.
use hir::def_id::DefId;
use rustc_data_structures::fnv::FnvHashMap;
use session::config::OutputType;
use std::cell::{Ref, RefCell};
use std::rc::Rc;
use std::sync::Arc;
use super::dep_node::DepNode;
use super::dep_node::{DepNode, WorkProductId};
use super::query::DepGraphQuery;
use super::raii;
use super::thread::{DepGraphThreadData, DepMessage};
#[derive(Clone)]
pub struct DepGraph {
data: Rc<DepGraphThreadData>
data: Rc<DepGraphData>
}
struct DepGraphData {
/// We send messages to the thread to let it build up the dep-graph
/// from the current run.
thread: DepGraphThreadData,
/// When we load, there may be `.o` files, cached mir, or other such
/// things available to us. If we find that they are not dirty, we
/// load the path to the file storing those work-products here into
/// this map. We can later look for and extract that data.
previous_work_products: RefCell<FnvHashMap<Arc<WorkProductId>, WorkProduct>>,
/// Work-products that we generate in this run.
work_products: RefCell<FnvHashMap<Arc<WorkProductId>, WorkProduct>>,
}
impl DepGraph {
pub fn new(enabled: bool) -> DepGraph {
DepGraph {
data: Rc::new(DepGraphThreadData::new(enabled))
data: Rc::new(DepGraphData {
thread: DepGraphThreadData::new(enabled),
previous_work_products: RefCell::new(FnvHashMap()),
work_products: RefCell::new(FnvHashMap())
})
}
}
@ -32,19 +55,19 @@ impl DepGraph {
/// then the other methods on this `DepGraph` will have no net effect.
#[inline]
pub fn enabled(&self) -> bool {
self.data.enabled()
self.data.thread.enabled()
}
pub fn query(&self) -> DepGraphQuery<DefId> {
self.data.query()
self.data.thread.query()
}
pub fn in_ignore<'graph>(&'graph self) -> raii::IgnoreTask<'graph> {
raii::IgnoreTask::new(&self.data)
raii::IgnoreTask::new(&self.data.thread)
}
pub fn in_task<'graph>(&'graph self, key: DepNode<DefId>) -> raii::DepTask<'graph> {
raii::DepTask::new(&self.data, key)
raii::DepTask::new(&self.data.thread, key)
}
pub fn with_ignore<OP,R>(&self, op: OP) -> R
@ -62,10 +85,84 @@ impl DepGraph {
}
pub fn read(&self, v: DepNode<DefId>) {
self.data.enqueue(DepMessage::Read(v));
self.data.thread.enqueue(DepMessage::Read(v));
}
pub fn write(&self, v: DepNode<DefId>) {
self.data.enqueue(DepMessage::Write(v));
self.data.thread.enqueue(DepMessage::Write(v));
}
/// Indicates that a previous work product exists for `v`. This is
/// invoked during initial start-up based on what nodes are clean
/// (and what files exist in the incr. directory).
pub fn insert_previous_work_product(&self, v: &Arc<WorkProductId>, data: WorkProduct) {
debug!("insert_previous_work_product({:?}, {:?})", v, data);
self.data.previous_work_products.borrow_mut()
.insert(v.clone(), data);
}
/// Indicates that we created the given work-product in this run
/// for `v`. This record will be preserved and loaded in the next
/// run.
pub fn insert_work_product(&self, v: &Arc<WorkProductId>, data: WorkProduct) {
debug!("insert_work_product({:?}, {:?})", v, data);
self.data.work_products.borrow_mut()
.insert(v.clone(), data);
}
/// Check whether a previous work product exists for `v` and, if
/// so, return the path that leads to it. Used to skip doing work.
pub fn previous_work_product(&self, v: &Arc<WorkProductId>) -> Option<WorkProduct> {
self.data.previous_work_products.borrow()
.get(v)
.cloned()
}
/// Access the map of work-products created during this run. Only
/// used during saving of the dep-graph.
pub fn work_products(&self) -> Ref<FnvHashMap<Arc<WorkProductId>, WorkProduct>> {
self.data.work_products.borrow()
}
}
/// A "work product" is an intermediate result that we save into the
/// incremental directory for later re-use. The primary example are
/// the object files that we save for each partition at code
/// generation time.
///
/// Each work product is associated with a dep-node, representing the
/// process that produced the work-product. If that dep-node is found
/// to be dirty when we load up, then we will delete the work-product
/// at load time. If the work-product is found to be clean, then we
/// will keep a record in the `previous_work_products` list.
///
/// In addition, work products have an associated hash. This hash is
/// an extra hash that can be used to decide if the work-product from
/// a previous compilation can be re-used (in addition to the dirty
/// edges check).
///
/// As the primary example, consider the object files we generate for
/// each partition. In the first run, we create partitions based on
/// the symbols that need to be compiled. For each partition P, we
/// hash the symbols in P and create a `WorkProduct` record associated
/// with `DepNode::TransPartition(P)`; the hash is the set of symbols
/// in P.
///
/// The next time we compile, if the `DepNode::TransPartition(P)` is
/// judged to be clean (which means none of the things we read to
/// generate the partition were found to be dirty), it will be loaded
/// into previous work products. We will then regenerate the set of
/// symbols in the partition P and hash them (note that new symbols
/// may be added -- for example, new monomorphizations -- even if
/// nothing in P changed!). We will compare that hash against the
/// previous hash. If it matches up, we can reuse the object file.
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct WorkProduct {
/// Extra hash used to decide if work-product is still suitable;
/// note that this is *not* a hash of the work-product itself.
/// See documentation on `WorkProduct` type for an example.
pub input_hash: u64,
/// Saved files associated with this CGU
pub saved_files: Vec<(OutputType, String)>,
}

View file

@ -20,7 +20,9 @@ mod visit;
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
pub use self::dep_node::DepNode;
pub use self::dep_node::WorkProductId;
pub use self::graph::DepGraph;
pub use self::graph::WorkProduct;
pub use self::query::DepGraphQuery;
pub use self::visit::visit_all_items_in_krate;
pub use self::raii::DepTask;

View file

@ -61,7 +61,7 @@ pub enum DebugInfoLevel {
FullDebugInfo,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub enum OutputType {
Bitcode,
Assembly,
@ -105,6 +105,17 @@ impl OutputType {
OutputType::DepInfo => "dep-info",
}
}
pub fn extension(&self) -> &'static str {
match *self {
OutputType::Bitcode => "bc",
OutputType::Assembly => "s",
OutputType::LlvmAssembly => "ll",
OutputType::Object => "o",
OutputType::DepInfo => "d",
OutputType::Exe => "",
}
}
}
#[derive(Clone)]
@ -215,15 +226,7 @@ impl OutputFilenames {
flavor: OutputType,
codegen_unit_name: Option<&str>)
-> PathBuf {
let extension = match flavor {
OutputType::Bitcode => "bc",
OutputType::Assembly => "s",
OutputType::LlvmAssembly => "ll",
OutputType::Object => "o",
OutputType::DepInfo => "d",
OutputType::Exe => "",
};
let extension = flavor.extension();
self.temp_path_ext(extension, codegen_unit_name)
}

View file

@ -80,7 +80,7 @@ pub struct Session {
// forms a unique global identifier for the crate. It is used to allow
// multiple crates with the same name to coexist. See the
// trans::back::symbol_names module for more information.
pub crate_disambiguator: Cell<ast::Name>,
pub crate_disambiguator: RefCell<token::InternedString>,
pub features: RefCell<feature_gate::Features>,
/// The maximum recursion limit for potentially infinitely recursive
@ -106,6 +106,9 @@ pub struct Session {
}
impl Session {
pub fn local_crate_disambiguator(&self) -> token::InternedString {
self.crate_disambiguator.borrow().clone()
}
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
@ -438,7 +441,7 @@ pub fn build_session_(sopts: config::Options,
plugin_attributes: RefCell::new(Vec::new()),
crate_types: RefCell::new(Vec::new()),
dependency_formats: RefCell::new(FnvHashMap()),
crate_disambiguator: Cell::new(token::intern("")),
crate_disambiguator: RefCell::new(token::intern("").as_str()),
features: RefCell::new(feature_gate::Features::new()),
recursion_limit: Cell::new(64),
next_node_id: Cell::new(1),

View file

@ -504,7 +504,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn crate_disambiguator(self, cnum: ast::CrateNum) -> token::InternedString {
if cnum == LOCAL_CRATE {
self.sess.crate_disambiguator.get().as_str()
self.sess.local_crate_disambiguator()
} else {
self.sess.cstore.crate_disambiguator(cnum)
}

View file

@ -10,6 +10,8 @@
use std::path::{self, Path, PathBuf};
use std::ffi::OsString;
use std::fs;
use std::io;
// Unfortunately, on windows, it looks like msvcrt.dll is silently translating
// verbatim paths under the hood to non-verbatim paths! This manifests itself as
@ -53,3 +55,15 @@ pub fn fix_windows_verbatim_for_gcc(p: &Path) -> PathBuf {
_ => p.to_path_buf(),
}
}
/// Copy `p` into `q`, preferring to use hard-linking if possible. If
/// `q` already exists, it is removed first.
pub fn link_or_copy<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<()> {
let p = p.as_ref();
let q = q.as_ref();
if q.exists() {
try!(fs::remove_file(&q));
}
fs::hard_link(p, q)
.or_else(|_| fs::copy(p, q).map(|_| ()))
}

View file

@ -88,7 +88,7 @@ pub fn compile_input(sess: &Session,
// We need nested scopes here, because the intermediate results can keep
// large chunks of memory alive and we want to free them as soon as
// possible to keep the peak memory usage low
let (outputs, trans) = {
let (outputs, trans, crate_name) = {
let krate = match phase_1_parse_input(sess, cfg, input) {
Ok(krate) => krate,
Err(mut parse_error) => {
@ -113,13 +113,13 @@ pub fn compile_input(sess: &Session,
};
let outputs = build_output_filenames(input, outdir, output, &krate.attrs, sess);
let id = link::find_crate_name(Some(sess), &krate.attrs, input);
let crate_name = link::find_crate_name(Some(sess), &krate.attrs, input);
let ExpansionResult { expanded_crate, defs, analysis, resolutions, mut hir_forest } = {
phase_2_configure_and_expand(
sess, &cstore, krate, &id, addl_plugins, control.make_glob_map,
sess, &cstore, krate, &crate_name, addl_plugins, control.make_glob_map,
|expanded_crate| {
let mut state = CompileState::state_after_expand(
input, sess, outdir, output, &cstore, expanded_crate, &id,
input, sess, outdir, output, &cstore, expanded_crate, &crate_name,
);
controller_entry_point!(after_expand, sess, state, Ok(()));
Ok(())
@ -127,7 +127,7 @@ pub fn compile_input(sess: &Session,
)?
};
write_out_deps(sess, &outputs, &id);
write_out_deps(sess, &outputs, &crate_name);
let arenas = ty::CtxtArenas::new();
@ -151,7 +151,7 @@ pub fn compile_input(sess: &Session,
&resolutions,
&expanded_crate,
&hir_map.krate(),
&id),
&crate_name),
Ok(()));
}
@ -171,7 +171,7 @@ pub fn compile_input(sess: &Session,
analysis,
resolutions,
&arenas,
&id,
&crate_name,
|tcx, mir_map, analysis, result| {
{
// Eventually, we will want to track plugins.
@ -186,7 +186,7 @@ pub fn compile_input(sess: &Session,
&analysis,
mir_map.as_ref(),
tcx,
&id);
&crate_name);
(control.after_analysis.callback)(&mut state);
if control.after_analysis.stop == Compilation::Stop {
@ -212,11 +212,11 @@ pub fn compile_input(sess: &Session,
// Discard interned strings as they are no longer required.
token::clear_ident_interner();
Ok((outputs, trans))
Ok((outputs, trans, crate_name.clone()))
})??
};
let phase5_result = phase_5_run_llvm_passes(sess, &trans, &outputs);
let phase5_result = phase_5_run_llvm_passes(sess, &crate_name, &trans, &outputs);
controller_entry_point!(after_llvm,
sess,
@ -566,7 +566,8 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session,
});
*sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs);
sess.crate_disambiguator.set(token::intern(&compute_crate_disambiguator(sess)));
*sess.crate_disambiguator.borrow_mut() =
token::intern(&compute_crate_disambiguator(sess)).as_str();
time(time_passes, "recursion limit", || {
middle::recursion_limit::update_recursion_limit(sess, &krate);
@ -1019,6 +1020,7 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
/// Run LLVM itself, producing a bitcode file, assembly file or object file
/// as a side effect.
pub fn phase_5_run_llvm_passes(sess: &Session,
crate_name: &str,
trans: &trans::CrateTranslation,
outputs: &OutputFilenames) -> CompileResult {
if sess.opts.cg.no_integrated_as {
@ -1040,6 +1042,10 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
|| write::run_passes(sess, trans, &sess.opts.output_types, outputs));
}
time(sess.time_passes(),
"serialize work products",
move || rustc_incremental::save_work_products(sess, crate_name));
if sess.err_count() > 0 {
Err(sess.err_count())
} else {
@ -1063,14 +1069,14 @@ fn escape_dep_filename(filename: &str) -> String {
filename.replace(" ", "\\ ")
}
fn write_out_deps(sess: &Session, outputs: &OutputFilenames, id: &str) {
fn write_out_deps(sess: &Session, outputs: &OutputFilenames, crate_name: &str) {
let mut out_filenames = Vec::new();
for output_type in sess.opts.output_types.keys() {
let file = outputs.path(*output_type);
match *output_type {
OutputType::Exe => {
for output in sess.crate_types.borrow().iter() {
let p = link::filename_for_input(sess, *output, id, outputs);
let p = link::filename_for_input(sess, *output, crate_name, outputs);
out_filenames.push(p);
}
}

View file

@ -36,7 +36,7 @@ impl<'a, 'tcx> SvhCalculate for TyCtxt<'a, 'tcx, 'tcx> {
// to ensure it is not incorporating implementation artifacts into
// the hash that are not otherwise visible.)
let crate_disambiguator = self.sess.crate_disambiguator.get();
let crate_disambiguator = self.sess.local_crate_disambiguator();
let krate = self.map.krate();
// FIXME: this should use SHA1, not SipHash. SipHash is not built to
@ -47,10 +47,10 @@ impl<'a, 'tcx> SvhCalculate for TyCtxt<'a, 'tcx, 'tcx> {
// FIXME(#32753) -- at (*) we `to_le` for endianness, but is
// this enough, and does it matter anyway?
"crate_disambiguator".hash(&mut state);
crate_disambiguator.as_str().len().to_le().hash(&mut state); // (*)
crate_disambiguator.as_str().hash(&mut state);
crate_disambiguator.len().to_le().hash(&mut state); // (*)
crate_disambiguator.hash(&mut state);
debug!("crate_disambiguator: {:?}", crate_disambiguator.as_str());
debug!("crate_disambiguator: {:?}", crate_disambiguator);
debug!("state: {:?}", state);
{
@ -119,6 +119,7 @@ mod svh_visitor {
use rustc::ty::TyCtxt;
use rustc::hir;
use rustc::hir::*;
use rustc::hir::map::DefPath;
use rustc::hir::intravisit as visit;
use rustc::hir::intravisit::{Visitor, FnKind};
@ -135,6 +136,15 @@ mod svh_visitor {
-> Self {
StrictVersionHashVisitor { st: st, tcx: tcx }
}
fn hash_def_path(&mut self, path: &DefPath) {
self.tcx.crate_name(path.krate).hash(self.st);
self.tcx.crate_disambiguator(path.krate).hash(self.st);
for data in &path.data {
data.data.as_interned_str().hash(self.st);
data.disambiguator.hash(self.st);
}
}
}
// To off-load the bulk of the hash-computation on #[derive(Hash)],
@ -289,19 +299,21 @@ mod svh_visitor {
impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
fn visit_nested_item(&mut self, item: ItemId) {
debug!("visit_nested_item: {:?} st={:?}", item, self.st);
let def_path = self.tcx.map.def_path_from_id(item.id);
def_path.hash(self.st);
let def_path = self.tcx.map.def_path_from_id(item.id).unwrap();
debug!("visit_nested_item: def_path={:?} st={:?}", def_path, self.st);
self.hash_def_path(&def_path);
}
fn visit_variant_data(&mut self, s: &'a VariantData, name: Name,
g: &'a Generics, _: NodeId, _: Span) {
debug!("visit_variant_data: st={:?}", self.st);
SawStructDef(name.as_str()).hash(self.st);
visit::walk_generics(self, g);
visit::walk_struct_def(self, s)
}
fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
debug!("visit_variant: st={:?}", self.st);
SawVariant.hash(self.st);
// walk_variant does not call walk_generics, so do it here.
visit::walk_generics(self, g);
@ -323,14 +335,17 @@ mod svh_visitor {
// pattern, please move that method up above this comment.)
fn visit_name(&mut self, _: Span, name: Name) {
debug!("visit_name: st={:?}", self.st);
SawIdent(name.as_str()).hash(self.st);
}
fn visit_lifetime(&mut self, l: &'a Lifetime) {
debug!("visit_lifetime: st={:?}", self.st);
SawLifetime(l.name.as_str()).hash(self.st);
}
fn visit_lifetime_def(&mut self, l: &'a LifetimeDef) {
debug!("visit_lifetime_def: st={:?}", self.st);
SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
}
@ -340,14 +355,18 @@ mod svh_visitor {
// that a change to a crate body will require downstream
// crates to be recompiled.
fn visit_expr(&mut self, ex: &'a Expr) {
debug!("visit_expr: st={:?}", self.st);
SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
}
fn visit_stmt(&mut self, s: &'a Stmt) {
debug!("visit_stmt: st={:?}", self.st);
SawStmt(saw_stmt(&s.node)).hash(self.st); visit::walk_stmt(self, s)
}
fn visit_foreign_item(&mut self, i: &'a ForeignItem) {
debug!("visit_foreign_item: st={:?}", self.st);
// FIXME (#14132) ideally we would incorporate privacy (or
// perhaps reachability) somewhere here, so foreign items
// that do not leak into downstream crates would not be
@ -357,6 +376,7 @@ mod svh_visitor {
fn visit_item(&mut self, i: &'a Item) {
debug!("visit_item: {:?} st={:?}", i, self.st);
// FIXME (#14132) ideally would incorporate reachability
// analysis somewhere here, so items that never leak into
// downstream crates (e.g. via monomorphisation or
@ -365,55 +385,68 @@ mod svh_visitor {
}
fn visit_mod(&mut self, m: &'a Mod, _s: Span, _n: NodeId) {
debug!("visit_mod: st={:?}", self.st);
SawMod.hash(self.st); visit::walk_mod(self, m)
}
fn visit_decl(&mut self, d: &'a Decl) {
debug!("visit_decl: st={:?}", self.st);
SawDecl.hash(self.st); visit::walk_decl(self, d)
}
fn visit_ty(&mut self, t: &'a Ty) {
debug!("visit_ty: st={:?}", self.st);
SawTy.hash(self.st); visit::walk_ty(self, t)
}
fn visit_generics(&mut self, g: &'a Generics) {
debug!("visit_generics: st={:?}", self.st);
SawGenerics.hash(self.st); visit::walk_generics(self, g)
}
fn visit_fn(&mut self, fk: FnKind<'a>, fd: &'a FnDecl,
b: &'a Block, s: Span, _: NodeId) {
debug!("visit_fn: st={:?}", self.st);
SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s)
}
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
debug!("visit_trait_item: st={:?}", self.st);
SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
}
fn visit_impl_item(&mut self, ii: &'a ImplItem) {
debug!("visit_impl_item: st={:?}", self.st);
SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
}
fn visit_struct_field(&mut self, s: &'a StructField) {
debug!("visit_struct_field: st={:?}", self.st);
SawStructField.hash(self.st); visit::walk_struct_field(self, s)
}
fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) {
debug!("visit_path: st={:?}", self.st);
SawPath.hash(self.st); visit::walk_path(self, path)
}
fn visit_block(&mut self, b: &'a Block) {
debug!("visit_block: st={:?}", self.st);
SawBlock.hash(self.st); visit::walk_block(self, b)
}
fn visit_pat(&mut self, p: &'a Pat) {
debug!("visit_pat: st={:?}", self.st);
SawPat.hash(self.st); visit::walk_pat(self, p)
}
fn visit_local(&mut self, l: &'a Local) {
debug!("visit_local: st={:?}", self.st);
SawLocal.hash(self.st); visit::walk_local(self, l)
}
fn visit_arm(&mut self, a: &'a Arm) {
debug!("visit_arm: st={:?}", self.st);
SawArm.hash(self.st); visit::walk_arm(self, a)
}
}

View file

@ -19,6 +19,7 @@
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![cfg_attr(not(stage0), deny(warnings))]
#![feature(question_mark)]
#![feature(rustc_private)]
#![feature(staged_api)]
@ -40,3 +41,6 @@ pub use assert_dep_graph::assert_dep_graph;
pub use calculate_svh::SvhCalculate;
pub use persist::load_dep_graph;
pub use persist::save_dep_graph;
pub use persist::save_trans_partition;
pub use persist::save_work_products;
pub use persist::in_incr_comp_dir;

View file

@ -10,8 +10,9 @@
//! The data that we will serialize and deserialize.
use rustc::dep_graph::DepNode;
use rustc::dep_graph::{DepNode, WorkProduct, WorkProductId};
use rustc::hir::def_id::DefIndex;
use std::sync::Arc;
use super::directory::DefPathIndex;
@ -55,6 +56,15 @@ pub struct SerializedHash {
pub hash: u64,
}
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedWorkProduct {
/// node that produced the work-product
pub id: Arc<WorkProductId>,
/// work-product data itself
pub work_product: WorkProduct,
}
/// Data for use when downstream crates get recompiled.
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedMetadataHashes {

View file

@ -14,12 +14,13 @@ use rbml::Error;
use rbml::opaque::Decoder;
use rustc::dep_graph::DepNode;
use rustc::hir::def_id::DefId;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::FnvHashSet;
use rustc_serialize::Decodable as RustcDecodable;
use std::io::Read;
use std::fs::File;
use std::path::Path;
use std::fs::{self, File};
use std::path::{Path};
use super::data::*;
use super::directory::*;
@ -38,18 +39,43 @@ type CleanEdges = Vec<(DepNode<DefId>, DepNode<DefId>)>;
/// actually it doesn't matter all that much.) See `README.md` for
/// more general overview.
pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let _ignore = tcx.dep_graph.in_ignore();
if tcx.sess.opts.incremental.is_none() {
return;
}
if let Some(dep_graph) = dep_graph_path(tcx) {
// FIXME(#32754) lock file?
load_dep_graph_if_exists(tcx, &dep_graph);
dirty_clean::check_dirty_clean_annotations(tcx);
let _ignore = tcx.dep_graph.in_ignore();
load_dep_graph_if_exists(tcx);
dirty_clean::check_dirty_clean_annotations(tcx);
}
fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let dep_graph_path = dep_graph_path(tcx).unwrap();
let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
Some(p) => p,
None => return // no file
};
let work_products_path = tcx_work_products_path(tcx).unwrap();
let work_products_data = match load_data(tcx.sess, &work_products_path) {
Some(p) => p,
None => return // no file
};
match decode_dep_graph(tcx, &dep_graph_data, &work_products_data) {
Ok(()) => return,
Err(err) => {
tcx.sess.warn(
&format!("decoding error in dep-graph from `{}` and `{}`: {}",
dep_graph_path.display(),
work_products_path.display(),
err));
}
}
}
pub fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, path: &Path) {
fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
if !path.exists() {
return;
return None;
}
let mut data = vec![];
@ -57,31 +83,30 @@ pub fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, path: &Pa
File::open(path)
.and_then(|mut file| file.read_to_end(&mut data))
{
Ok(_) => { }
Ok(_) => {
Some(data)
}
Err(err) => {
tcx.sess.err(
sess.err(
&format!("could not load dep-graph from `{}`: {}",
path.display(), err));
return;
None
}
}
match decode_dep_graph(tcx, &data) {
Ok(dirty) => dirty,
Err(err) => {
bug!("decoding error in dep-graph from `{}`: {}", path.display(), err);
}
}
}
/// Decode the dep graph and load the edges/nodes that are still clean
/// into `tcx.dep_graph`.
pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
data: &[u8])
dep_graph_data: &[u8],
work_products_data: &[u8])
-> Result<(), Error>
{
// Deserialize the directory and dep-graph.
let mut decoder = Decoder::new(data, 0);
let directory = try!(DefIdDirectory::decode(&mut decoder));
let serialized_dep_graph = try!(SerializedDepGraph::decode(&mut decoder));
let mut dep_graph_decoder = Decoder::new(dep_graph_data, 0);
let directory = try!(DefIdDirectory::decode(&mut dep_graph_decoder));
let serialized_dep_graph = try!(SerializedDepGraph::decode(&mut dep_graph_decoder));
debug!("decode_dep_graph: directory = {:#?}", directory);
debug!("decode_dep_graph: serialized_dep_graph = {:#?}", serialized_dep_graph);
@ -121,12 +146,18 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// Add nodes and edges that are not dirty into our main graph.
let dep_graph = tcx.dep_graph.clone();
for (source, target) in clean_edges.into_iter().chain(clean_nodes) {
let _task = dep_graph.in_task(target.clone());
dep_graph.read(source.clone());
debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source, target);
let _task = dep_graph.in_task(target);
dep_graph.read(source);
}
// Add in work-products that are still clean, and delete those that are
// dirty.
let mut work_product_decoder = Decoder::new(work_products_data, 0);
let work_products = try!(<Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder));
reconcile_work_products(tcx, work_products, &dirty_nodes);
Ok(())
}
@ -141,9 +172,9 @@ fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
match hash.node.map_def(|&i| retraced.def_id(i)) {
Some(dep_node) => {
let current_hash = hcx.hash(&dep_node).unwrap();
debug!("initial_dirty_nodes: hash of {:?} is {:?}, was {:?}",
dep_node, current_hash, hash.hash);
if current_hash != hash.hash {
debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
dep_node, current_hash, hash.hash);
dirty_nodes.insert(dep_node);
}
}
@ -177,6 +208,8 @@ fn compute_clean_edges(serialized_edges: &[(SerializedEdge)],
clean_edges.push((source, target))
} else {
// source removed, target must be dirty
debug!("compute_clean_edges: {:?} dirty because {:?} no longer exists",
target, serialized_source);
dirty_nodes.insert(target);
}
} else {
@ -213,3 +246,51 @@ fn compute_clean_edges(serialized_edges: &[(SerializedEdge)],
clean_edges
}
/// Go through the list of work-products produced in the previous run.
/// Delete any whose nodes have been found to be dirty or which are
/// otherwise no longer applicable.
fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
work_products: Vec<SerializedWorkProduct>,
dirty_nodes: &DirtyNodes) {
debug!("reconcile_work_products({:?})", work_products);
for swp in work_products {
let dep_node = DepNode::WorkProduct(swp.id.clone());
if dirty_nodes.contains(&dep_node) {
debug!("reconcile_work_products: dep-node for {:?} is dirty", swp);
delete_dirty_work_product(tcx, swp);
} else {
let all_files_exist =
swp.work_product
.saved_files
.iter()
.all(|&(_, ref file_name)| {
let path = in_incr_comp_dir(tcx.sess, &file_name).unwrap();
path.exists()
});
if all_files_exist {
debug!("reconcile_work_products: all files for {:?} exist", swp);
tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product);
} else {
debug!("reconcile_work_products: some file for {:?} does not exist", swp);
delete_dirty_work_product(tcx, swp);
}
}
}
}
fn delete_dirty_work_product(tcx: TyCtxt,
swp: SerializedWorkProduct) {
debug!("delete_dirty_work_product({:?})", swp);
for &(_, ref file_name) in &swp.work_product.saved_files {
let path = in_incr_comp_dir(tcx.sess, file_name).unwrap();
match fs::remove_file(&path) {
Ok(()) => { }
Err(err) => {
tcx.sess.warn(
&format!("file-system error deleting outdated file `{}`: {}",
path.display(), err));
}
}
}
}

View file

@ -19,6 +19,10 @@ mod hash;
mod load;
mod save;
mod util;
mod work_product;
pub use self::load::load_dep_graph;
pub use self::save::save_dep_graph;
pub use self::save::save_work_products;
pub use self::work_product::save_trans_partition;
pub use self::util::in_incr_comp_dir;

View file

@ -11,6 +11,7 @@
use rbml::opaque::Encoder;
use rustc::dep_graph::DepNode;
use rustc::middle::cstore::LOCAL_CRATE;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc_serialize::{Encodable as RustcEncodable};
use std::hash::{Hasher, SipHasher};
@ -24,19 +25,26 @@ use super::hash::*;
use super::util::*;
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
debug!("save_dep_graph()");
let _ignore = tcx.dep_graph.in_ignore();
let sess = tcx.sess;
let mut hcx = HashContext::new(tcx);
save_in(&mut hcx, dep_graph_path(tcx), encode_dep_graph);
save_in(&mut hcx, metadata_hash_path(tcx, LOCAL_CRATE), encode_metadata_hashes);
save_in(sess, dep_graph_path(tcx), |e| encode_dep_graph(&mut hcx, e));
save_in(sess, metadata_hash_path(tcx, LOCAL_CRATE), |e| encode_metadata_hashes(&mut hcx, e));
}
fn save_in<'a, 'tcx, F>(hcx: &mut HashContext<'a, 'tcx>,
opt_path_buf: Option<PathBuf>,
encode: F)
where F: FnOnce(&mut HashContext<'a, 'tcx>, &mut Encoder) -> io::Result<()>
{
let tcx = hcx.tcx;
pub fn save_work_products(sess: &Session, local_crate_name: &str) {
debug!("save_work_products()");
let _ignore = sess.dep_graph.in_ignore();
let path = sess_work_products_path(sess, local_crate_name);
save_in(sess, path, |e| encode_work_products(sess, e));
}
fn save_in<F>(sess: &Session,
opt_path_buf: Option<PathBuf>,
encode: F)
where F: FnOnce(&mut Encoder) -> io::Result<()>
{
let path_buf = match opt_path_buf {
Some(p) => p,
None => return
@ -49,7 +57,7 @@ fn save_in<'a, 'tcx, F>(hcx: &mut HashContext<'a, 'tcx>,
match fs::remove_file(&path_buf) {
Ok(()) => { }
Err(err) => {
tcx.sess.err(
sess.err(
&format!("unable to delete old dep-graph at `{}`: {}",
path_buf.display(), err));
return;
@ -59,10 +67,10 @@ fn save_in<'a, 'tcx, F>(hcx: &mut HashContext<'a, 'tcx>,
// generate the data in a memory buffer
let mut wr = Cursor::new(Vec::new());
match encode(hcx, &mut Encoder::new(&mut wr)) {
match encode(&mut Encoder::new(&mut wr)) {
Ok(()) => { }
Err(err) => {
tcx.sess.err(
sess.err(
&format!("could not encode dep-graph to `{}`: {}",
path_buf.display(), err));
return;
@ -77,7 +85,7 @@ fn save_in<'a, 'tcx, F>(hcx: &mut HashContext<'a, 'tcx>,
{
Ok(_) => { }
Err(err) => {
tcx.sess.err(
sess.err(
&format!("failed to write dep-graph to `{}`: {}",
path_buf.display(), err));
return;
@ -192,3 +200,22 @@ pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
Ok(())
}
pub fn encode_work_products(sess: &Session,
encoder: &mut Encoder)
-> io::Result<()>
{
let work_products: Vec<_> =
sess.dep_graph.work_products()
.iter()
.map(|(id, work_product)| {
SerializedWorkProduct {
id: id.clone(),
work_product: work_product.clone(),
}
})
.collect();
work_products.encode(encoder)
}

View file

@ -9,6 +9,7 @@
// except according to those terms.
use rustc::middle::cstore::LOCAL_CRATE;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use std::fs;
@ -17,33 +18,56 @@ use std::path::{Path, PathBuf};
use syntax::ast;
pub fn dep_graph_path(tcx: TyCtxt) -> Option<PathBuf> {
path(tcx, LOCAL_CRATE, "local")
tcx_path(tcx, LOCAL_CRATE, "local")
}
pub fn metadata_hash_path(tcx: TyCtxt, cnum: ast::CrateNum) -> Option<PathBuf> {
path(tcx, cnum, "metadata")
tcx_path(tcx, cnum, "metadata")
}
fn path(tcx: TyCtxt, cnum: ast::CrateNum, suffix: &str) -> Option<PathBuf> {
pub fn tcx_work_products_path(tcx: TyCtxt) -> Option<PathBuf> {
let crate_name = tcx.crate_name(LOCAL_CRATE);
sess_work_products_path(tcx.sess, &crate_name)
}
pub fn sess_work_products_path(sess: &Session,
local_crate_name: &str)
-> Option<PathBuf> {
let crate_disambiguator = sess.local_crate_disambiguator();
path(sess, local_crate_name, &crate_disambiguator, "work-products")
}
pub fn in_incr_comp_dir(sess: &Session, file_name: &str) -> Option<PathBuf> {
sess.opts.incremental.as_ref().map(|incr_dir| incr_dir.join(file_name))
}
fn tcx_path(tcx: TyCtxt,
cnum: ast::CrateNum,
middle: &str)
-> Option<PathBuf> {
path(tcx.sess, &tcx.crate_name(cnum), &tcx.crate_disambiguator(cnum), middle)
}
fn path(sess: &Session,
crate_name: &str,
crate_disambiguator: &str,
middle: &str)
-> Option<PathBuf> {
// For now, just save/load dep-graph from
// directory/dep_graph.rbml
tcx.sess.opts.incremental.as_ref().and_then(|incr_dir| {
sess.opts.incremental.as_ref().and_then(|incr_dir| {
match create_dir_racy(&incr_dir) {
Ok(()) => {}
Err(err) => {
tcx.sess.err(
sess.err(
&format!("could not create the directory `{}`: {}",
incr_dir.display(), err));
return None;
}
}
let crate_name = tcx.crate_name(cnum);
let crate_disambiguator = tcx.crate_disambiguator(cnum);
let file_name = format!("{}-{}.{}.bin",
crate_name,
crate_disambiguator,
suffix);
let file_name = format!("{}-{}.{}.bin", crate_name, crate_disambiguator, middle);
Some(incr_dir.join(file_name))
})
}

View file

@ -0,0 +1,63 @@
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains files for saving intermediate work-products.
use persist::util::*;
use rustc::dep_graph::{WorkProduct, WorkProductId};
use rustc::session::Session;
use rustc::session::config::OutputType;
use rustc::util::fs::link_or_copy;
use std::path::PathBuf;
use std::sync::Arc;
pub fn save_trans_partition(sess: &Session,
cgu_name: &str,
partition_hash: u64,
files: &[(OutputType, PathBuf)]) {
debug!("save_trans_partition({:?},{},{:?})",
cgu_name,
partition_hash,
files);
if sess.opts.incremental.is_none() {
return;
}
let work_product_id = Arc::new(WorkProductId(cgu_name.to_string()));
let saved_files: Option<Vec<_>> =
files.iter()
.map(|&(kind, ref path)| {
let file_name = format!("cgu-{}.{}", cgu_name, kind.extension());
let path_in_incr_dir = in_incr_comp_dir(sess, &file_name).unwrap();
match link_or_copy(path, &path_in_incr_dir) {
Ok(_) => Some((kind, file_name)),
Err(err) => {
sess.warn(&format!("error copying object file `{}` \
to incremental directory as `{}`: {}",
path.display(),
path_in_incr_dir.display(),
err));
None
}
}
})
.collect();
let saved_files = match saved_files {
Some(v) => v,
None => return,
};
let work_product = WorkProduct {
input_hash: partition_hash,
saved_files: saved_files,
};
sess.dep_graph.insert_work_product(&work_product_id, work_product);
}

View file

@ -243,7 +243,7 @@ impl<'a> CrateReader<'a> {
// Check for (potential) conflicts with the local crate
if self.local_crate_name == crate_name &&
self.sess.crate_disambiguator.get().as_str() == disambiguator {
self.sess.local_crate_disambiguator() == disambiguator {
span_fatal!(self.sess, span, E0519,
"the current crate is indistinguishable from one of its \
dependencies: it has the same crate-name `{}` and was \

View file

@ -1893,7 +1893,7 @@ fn encode_metadata_inner(rbml_w: &mut Encoder,
encode_crate_name(rbml_w, &ecx.link_meta.crate_name);
encode_crate_triple(rbml_w, &ecx.tcx.sess.opts.target_triple);
encode_hash(rbml_w, &ecx.link_meta.crate_hash);
encode_crate_disambiguator(rbml_w, &ecx.tcx.sess.crate_disambiguator.get().as_str());
encode_crate_disambiguator(rbml_w, &ecx.tcx.sess.local_crate_disambiguator());
encode_dylib_dependency_formats(rbml_w, &ecx);
encode_panic_strategy(rbml_w, &ecx);

View file

@ -0,0 +1,149 @@
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This pass is only used for UNIT TESTS related to incremental
//! compilation. It tests whether a particular `.o` file will be re-used
//! from a previous compilation or whether it must be regenerated.
//!
//! The user adds annotations to the crate of the following form:
//!
//! ```
//! #![rustc_partition_reused(module="spike", cfg="rpass2")]
//! #![rustc_partition_translated(module="spike-x", cfg="rpass2")]
//! ```
//!
//! The first indicates (in the cfg `rpass2`) that `spike.o` will be
//! reused, the second that `spike-x.o` will be recreated. If these
//! annotations are inaccurate, errors are reported.
//!
//! The reason that we use `cfg=...` and not `#[cfg_attr]` is so that
//! the HIR doesn't change as a result of the annotations, which might
//! perturb the reuse results.
use rustc::ty::TyCtxt;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
use syntax::parse::token::InternedString;
use {ModuleSource, ModuleTranslation};
const PARTITION_REUSED: &'static str = "rustc_partition_reused";
const PARTITION_TRANSLATED: &'static str = "rustc_partition_translated";
const MODULE: &'static str = "module";
const CFG: &'static str = "cfg";
#[derive(Debug, PartialEq)]
enum Disposition { Reused, Translated }
pub fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
modules: &[ModuleTranslation]) {
let _ignore = tcx.dep_graph.in_ignore();
if tcx.sess.opts.incremental.is_none() {
return;
}
let ams = AssertModuleSource { tcx: tcx, modules: modules };
for attr in &tcx.map.krate().attrs {
ams.check_attr(attr);
}
}
struct AssertModuleSource<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
modules: &'a [ModuleTranslation],
}
impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> {
fn check_attr(&self, attr: &ast::Attribute) {
let disposition = if attr.check_name(PARTITION_REUSED) {
Disposition::Reused
} else if attr.check_name(PARTITION_TRANSLATED) {
Disposition::Translated
} else {
return;
};
if !self.check_config(attr) {
debug!("check_attr: config does not match, ignoring attr");
return;
}
let mname = self.field(attr, MODULE);
let mtrans = self.modules.iter().find(|mtrans| &mtrans.name[..] == &mname[..]);
let mtrans = match mtrans {
Some(m) => m,
None => {
debug!("module name `{}` not found amongst:", mname);
for mtrans in self.modules {
debug!("module named `{}` with disposition {:?}",
mtrans.name,
self.disposition(mtrans));
}
self.tcx.sess.span_err(
attr.span,
&format!("no module named `{}`", mname));
return;
}
};
let mtrans_disposition = self.disposition(mtrans);
if disposition != mtrans_disposition {
self.tcx.sess.span_err(
attr.span,
&format!("expected module named `{}` to be {:?} but is {:?}",
mname,
disposition,
mtrans_disposition));
}
}
fn disposition(&self, mtrans: &ModuleTranslation) -> Disposition {
match mtrans.source {
ModuleSource::Preexisting(_) => Disposition::Reused,
ModuleSource::Translated(_) => Disposition::Translated,
}
}
fn field(&self, attr: &ast::Attribute, name: &str) -> InternedString {
for item in attr.meta_item_list().unwrap_or(&[]) {
if item.check_name(name) {
if let Some(value) = item.value_str() {
return value;
} else {
self.tcx.sess.span_fatal(
item.span,
&format!("associated value expected for `{}`", name));
}
}
}
self.tcx.sess.span_fatal(
attr.span,
&format!("no field `{}`", name));
}
/// Scan for a `cfg="foo"` attribute and check whether we have a
/// cfg flag called `foo`.
fn check_config(&self, attr: &ast::Attribute) -> bool {
let config = &self.tcx.map.krate().config;
let value = self.field(attr, CFG);
debug!("check_config(config={:?}, value={:?})", config, value);
if config.iter().any(|c| c.check_name(&value[..])) {
debug!("check_config: matched");
return true;
}
debug!("check_config: no match found");
return false;
}
}

View file

@ -10,15 +10,17 @@
use back::lto;
use back::link::{get_linker, remove};
use rustc_incremental::save_trans_partition;
use session::config::{OutputFilenames, Passes, SomePasses, AllPasses};
use session::Session;
use session::config::{self, OutputType};
use llvm;
use llvm::{ModuleRef, TargetMachineRef, PassManagerRef, DiagnosticInfoRef, ContextRef};
use llvm::SMDiagnosticRef;
use {CrateTranslation, ModuleTranslation};
use {CrateTranslation, ModuleLlvm, ModuleSource, ModuleTranslation};
use util::common::time;
use util::common::path2cstr;
use util::fs::link_or_copy;
use errors::{self, Handler, Level, DiagnosticBuilder};
use errors::emitter::Emitter;
use syntax_pos::MultiSpan;
@ -335,6 +337,8 @@ struct CodegenContext<'a> {
remark: Passes,
// Worker thread number
worker: usize,
// Directory where incremental data is stored (if any)
incremental: Option<PathBuf>,
}
impl<'a> CodegenContext<'a> {
@ -345,6 +349,7 @@ impl<'a> CodegenContext<'a> {
plugin_passes: sess.plugin_llvm_passes.borrow().clone(),
remark: sess.opts.cg.remark.clone(),
worker: 0,
incremental: sess.opts.incremental.clone(),
}
}
}
@ -422,10 +427,11 @@ unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_vo
// Unsafe due to LLVM calls.
unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
mtrans: ModuleTranslation,
mllvm: ModuleLlvm,
config: ModuleConfig,
output_names: OutputFilenames) {
let llmod = mtrans.llmod;
let llcx = mtrans.llcx;
let llmod = mllvm.llmod;
let llcx = mllvm.llcx;
let tm = config.tm;
// llcx doesn't outlive this function, so we can put this on the stack.
@ -609,7 +615,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
if copy_bc_to_obj {
debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out);
if let Err(e) = fs::copy(&bc_out, &obj_out) {
if let Err(e) = link_or_copy(&bc_out, &obj_out) {
cgcx.handler.err(&format!("failed to copy bitcode to object file: {}", e));
}
}
@ -628,8 +634,14 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
pub fn cleanup_llvm(trans: &CrateTranslation) {
for module in trans.modules.iter() {
unsafe {
llvm::LLVMDisposeModule(module.llmod);
llvm::LLVMContextDispose(module.llcx);
match module.source {
ModuleSource::Translated(llvm) => {
llvm::LLVMDisposeModule(llvm.llmod);
llvm::LLVMContextDispose(llvm.llcx);
}
ModuleSource::Preexisting(_) => {
}
}
}
}
}
@ -743,6 +755,23 @@ pub fn run_passes(sess: &Session,
run_work_multithreaded(sess, work_items, num_workers);
}
// If in incr. comp. mode, preserve the `.o` files for potential re-use
for mtrans in trans.modules.iter() {
let mut files = vec![];
if modules_config.emit_obj {
let path = crate_output.temp_path(OutputType::Object, Some(&mtrans.name));
files.push((OutputType::Object, path));
}
if modules_config.emit_bc {
let path = crate_output.temp_path(OutputType::Bitcode, Some(&mtrans.name));
files.push((OutputType::Bitcode, path));
}
save_trans_partition(sess, &mtrans.name, mtrans.symbol_name_hash, &files);
}
// All codegen is finished.
unsafe {
llvm::LLVMRustDisposeTargetMachine(tm);
@ -916,10 +945,37 @@ fn build_work_item(sess: &Session,
fn execute_work_item(cgcx: &CodegenContext,
work_item: WorkItem) {
unsafe {
optimize_and_codegen(cgcx,
work_item.mtrans,
work_item.config,
work_item.output_names);
match work_item.mtrans.source {
ModuleSource::Translated(mllvm) => {
debug!("llvm-optimizing {:?}", work_item.mtrans.name);
optimize_and_codegen(cgcx,
work_item.mtrans,
mllvm,
work_item.config,
work_item.output_names);
}
ModuleSource::Preexisting(wp) => {
let incremental = cgcx.incremental.as_ref().unwrap();
let name = &work_item.mtrans.name;
for (kind, saved_file) in wp.saved_files {
let obj_out = work_item.output_names.temp_path(kind, Some(name));
let source_file = incremental.join(&saved_file);
debug!("copying pre-existing module `{}` from {:?} to {}",
work_item.mtrans.name,
source_file,
obj_out.display());
match link_or_copy(&source_file, &obj_out) {
Ok(()) => { }
Err(err) => {
cgcx.handler.err(&format!("unable to copy {} to {}: {}",
source_file.display(),
obj_out.display(),
err));
}
}
}
}
}
}
}
@ -955,6 +1011,8 @@ fn run_work_multithreaded(sess: &Session,
let mut tx = Some(tx);
futures.push(rx);
let incremental = sess.opts.incremental.clone();
thread::Builder::new().name(format!("codegen-{}", i)).spawn(move || {
let diag_handler = Handler::with_emitter(true, false, box diag_emitter);
@ -966,6 +1024,7 @@ fn run_work_multithreaded(sess: &Session,
plugin_passes: plugin_passes,
remark: remark,
worker: i,
incremental: incremental,
};
loop {

View file

@ -26,8 +26,11 @@
#![allow(non_camel_case_types)]
use super::CrateTranslation;
use super::ModuleLlvm;
use super::ModuleSource;
use super::ModuleTranslation;
use assert_module_sources;
use back::link;
use back::linker::LinkerInfo;
use llvm::{BasicBlockRef, Linkage, ValueRef, Vector, get_param};
@ -40,7 +43,7 @@ use rustc::ty::subst::{self, Substs};
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::ty::adjustment::CustomCoerceUnsized;
use rustc::dep_graph::DepNode;
use rustc::dep_graph::{DepNode, WorkProduct};
use rustc::hir::map as hir_map;
use rustc::util::common::time;
use rustc::mir::mir_map::MirMap;
@ -2133,7 +2136,7 @@ pub fn maybe_create_entry_wrapper(ccx: &CrateContext) {
let instance = Instance::mono(ccx.shared(), main_def_id);
if !ccx.codegen_unit().items.contains_key(&TransItem::Fn(instance)) {
if !ccx.codegen_unit().contains_item(&TransItem::Fn(instance)) {
// We want to create the wrapper in the same codegen unit as Rust's main
// function.
return;
@ -2257,12 +2260,20 @@ fn write_metadata(cx: &SharedCrateContext,
/// Find any symbols that are defined in one compilation unit, but not declared
/// in any other compilation unit. Give these symbols internal linkage.
fn internalize_symbols<'a, 'tcx>(ccxs: &CrateContextList<'a, 'tcx>,
fn internalize_symbols<'a, 'tcx>(sess: &Session,
ccxs: &CrateContextList<'a, 'tcx>,
symbol_map: &SymbolMap<'tcx>,
reachable: &FnvHashSet<&str>) {
let scx = ccxs.shared();
let tcx = scx.tcx();
// In incr. comp. mode, we can't necessarily see all refs since we
// don't generate LLVM IR for reused modules, so skip this
// step. Later we should get smarter.
if sess.opts.debugging_opts.incremental.is_some() {
return;
}
// 'unsafe' because we are holding on to CStr's from the LLVM module within
// this block.
unsafe {
@ -2270,7 +2281,7 @@ fn internalize_symbols<'a, 'tcx>(ccxs: &CrateContextList<'a, 'tcx>,
// Collect all symbols that need to stay externally visible because they
// are referenced via a declaration in some other codegen unit.
for ccx in ccxs.iter() {
for ccx in ccxs.iter_need_trans() {
for val in iter_globals(ccx.llmod()).chain(iter_functions(ccx.llmod())) {
let linkage = llvm::LLVMGetLinkage(val);
// We only care about external declarations (not definitions)
@ -2315,7 +2326,7 @@ fn internalize_symbols<'a, 'tcx>(ccxs: &CrateContextList<'a, 'tcx>,
// Examine each external definition. If the definition is not used in
// any other compilation unit, and is not reachable from other crates,
// then give it internal linkage.
for ccx in ccxs.iter() {
for ccx in ccxs.iter_need_trans() {
for val in iter_globals(ccx.llmod()).chain(iter_functions(ccx.llmod())) {
let linkage = llvm::LLVMGetLinkage(val);
@ -2362,7 +2373,7 @@ fn create_imps(cx: &CrateContextList) {
"\x01__imp_"
};
unsafe {
for ccx in cx.iter() {
for ccx in cx.iter_need_trans() {
let exported: Vec<_> = iter_globals(ccx.llmod())
.filter(|&val| {
llvm::LLVMGetLinkage(val) ==
@ -2514,8 +2525,11 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let metadata_module = ModuleTranslation {
name: "metadata".to_string(),
llcx: shared_ccx.metadata_llcx(),
llmod: shared_ccx.metadata_llmod(),
symbol_name_hash: 0, // we always rebuild metadata, at least for now
source: ModuleSource::Translated(ModuleLlvm {
llcx: shared_ccx.metadata_llcx(),
llmod: shared_ccx.metadata_llmod(),
}),
};
let no_builtins = attr::contains_name(&krate.attrs, "no_builtins");
@ -2525,17 +2539,34 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let symbol_map = Rc::new(symbol_map);
let previous_work_products = trans_reuse_previous_work_products(tcx,
&codegen_units,
&symbol_map);
let crate_context_list = CrateContextList::new(&shared_ccx,
codegen_units,
previous_work_products,
symbol_map.clone());
let modules = crate_context_list.iter()
.map(|ccx| ModuleTranslation {
name: String::from(&ccx.codegen_unit().name[..]),
llcx: ccx.llcx(),
llmod: ccx.llmod()
let modules: Vec<_> = crate_context_list.iter_all()
.map(|ccx| {
let source = match ccx.previous_work_product() {
Some(buf) => ModuleSource::Preexisting(buf.clone()),
None => ModuleSource::Translated(ModuleLlvm {
llcx: ccx.llcx(),
llmod: ccx.llmod(),
}),
};
ModuleTranslation {
name: String::from(ccx.codegen_unit().name()),
symbol_name_hash: ccx.codegen_unit().compute_symbol_name_hash(tcx, &symbol_map),
source: source,
}
})
.collect();
assert_module_sources::assert_module_sources(tcx, &modules);
// Skip crate items and just output metadata in -Z no-trans mode.
if tcx.sess.opts.no_trans {
let linker_info = LinkerInfo::new(&shared_ccx, &[]);
@ -2551,41 +2582,44 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
// Instantiate translation items without filling out definitions yet...
for ccx in crate_context_list.iter() {
let trans_items = ccx.codegen_unit()
.items_in_deterministic_order(tcx, &symbol_map);
for ccx in crate_context_list.iter_need_trans() {
let cgu = ccx.codegen_unit();
let trans_items = cgu.items_in_deterministic_order(tcx, &symbol_map);
for (trans_item, linkage) in trans_items {
trans_item.predefine(&ccx, linkage);
}
tcx.dep_graph.with_task(cgu.work_product_dep_node(), || {
for (trans_item, linkage) in trans_items {
trans_item.predefine(&ccx, linkage);
}
});
}
// ... and now that we have everything pre-defined, fill out those definitions.
for ccx in crate_context_list.iter() {
let trans_items = ccx.codegen_unit()
.items_in_deterministic_order(tcx, &symbol_map);
for (trans_item, _) in trans_items {
trans_item.define(&ccx);
}
// If this codegen unit contains the main function, also create the
// wrapper here
maybe_create_entry_wrapper(&ccx);
// Run replace-all-uses-with for statics that need it
for &(old_g, new_g) in ccx.statics_to_rauw().borrow().iter() {
unsafe {
let bitcast = llvm::LLVMConstPointerCast(new_g, llvm::LLVMTypeOf(old_g));
llvm::LLVMReplaceAllUsesWith(old_g, bitcast);
llvm::LLVMDeleteGlobal(old_g);
for ccx in crate_context_list.iter_need_trans() {
let cgu = ccx.codegen_unit();
let trans_items = cgu.items_in_deterministic_order(tcx, &symbol_map);
tcx.dep_graph.with_task(cgu.work_product_dep_node(), || {
for (trans_item, _) in trans_items {
trans_item.define(&ccx);
}
}
// Finalize debuginfo
if ccx.sess().opts.debuginfo != NoDebugInfo {
debuginfo::finalize(&ccx);
}
// If this codegen unit contains the main function, also create the
// wrapper here
maybe_create_entry_wrapper(&ccx);
// Run replace-all-uses-with for statics that need it
for &(old_g, new_g) in ccx.statics_to_rauw().borrow().iter() {
unsafe {
let bitcast = llvm::LLVMConstPointerCast(new_g, llvm::LLVMTypeOf(old_g));
llvm::LLVMReplaceAllUsesWith(old_g, bitcast);
llvm::LLVMDeleteGlobal(old_g);
}
}
// Finalize debuginfo
if ccx.sess().opts.debuginfo != NoDebugInfo {
debuginfo::finalize(&ccx);
}
});
}
symbol_names_test::report_symbol_names(&shared_ccx);
@ -2654,7 +2688,8 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
time(shared_ccx.sess().time_passes(), "internalize symbols", || {
internalize_symbols(&crate_context_list,
internalize_symbols(sess,
&crate_context_list,
&symbol_map,
&reachable_symbols.iter()
.map(|s| &s[..])
@ -2679,6 +2714,38 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
}
/// For each CGU, identify if we can reuse an existing object file (or
/// maybe other context).
fn trans_reuse_previous_work_products(tcx: TyCtxt,
codegen_units: &[CodegenUnit],
symbol_map: &SymbolMap)
-> Vec<Option<WorkProduct>> {
debug!("trans_reuse_previous_work_products()");
codegen_units
.iter()
.map(|cgu| {
let id = cgu.work_product_id();
let hash = cgu.compute_symbol_name_hash(tcx, symbol_map);
debug!("trans_reuse_previous_work_products: id={:?} hash={}", id, hash);
if let Some(work_product) = tcx.dep_graph.previous_work_product(&id) {
if work_product.input_hash == hash {
debug!("trans_reuse_previous_work_products: reusing {:?}", work_product);
return Some(work_product);
} else {
debug!("trans_reuse_previous_work_products: \
not reusing {:?} because hash changed to {:?}",
work_product, hash);
}
}
None
})
.collect()
}
fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>)
-> (Vec<CodegenUnit<'tcx>>, SymbolMap<'tcx>) {
let time_passes = scx.sess().time_passes();
@ -2739,10 +2806,10 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
let mut item_to_cgus = HashMap::new();
for cgu in &codegen_units {
for (&trans_item, &linkage) in &cgu.items {
for (&trans_item, &linkage) in cgu.items() {
item_to_cgus.entry(trans_item)
.or_insert(Vec::new())
.push((cgu.name.clone(), linkage));
.push((cgu.name().clone(), linkage));
}
}

View file

@ -1029,7 +1029,7 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
assert!(!ccx.external_srcs().borrow().contains_key(&id));
let defined_in_current_codegen_unit = ccx.codegen_unit()
.items
.items()
.contains_key(&TransItem::Static(id));
if defined_in_current_codegen_unit {
if declare::get_declared_value(ccx, sym).is_none() {

View file

@ -10,7 +10,7 @@
use llvm;
use llvm::{ContextRef, ModuleRef, ValueRef, BuilderRef};
use rustc::dep_graph::{DepNode, DepTrackingMap, DepTrackingMapConfig};
use rustc::dep_graph::{DepNode, DepTrackingMap, DepTrackingMapConfig, WorkProduct};
use middle::cstore::LinkMeta;
use rustc::hir::def::ExportMap;
use rustc::hir::def_id::DefId;
@ -95,6 +95,7 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
pub struct LocalCrateContext<'tcx> {
llmod: ModuleRef,
llcx: ContextRef,
previous_work_product: Option<WorkProduct>,
tn: TypeNames, // FIXME: This seems to be largely unused.
codegen_unit: CodegenUnit<'tcx>,
needs_unwind_cleanup_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>,
@ -198,24 +199,39 @@ pub struct CrateContextList<'a, 'tcx: 'a> {
}
impl<'a, 'tcx: 'a> CrateContextList<'a, 'tcx> {
pub fn new(shared_ccx: &'a SharedCrateContext<'a, 'tcx>,
codegen_units: Vec<CodegenUnit<'tcx>>,
previous_work_products: Vec<Option<WorkProduct>>,
symbol_map: Rc<SymbolMap<'tcx>>)
-> CrateContextList<'a, 'tcx> {
CrateContextList {
shared: shared_ccx,
local_ccxs: codegen_units.into_iter().map(|codegen_unit| {
LocalCrateContext::new(shared_ccx, codegen_unit, symbol_map.clone())
local_ccxs: codegen_units.into_iter().zip(previous_work_products).map(|(cgu, wp)| {
LocalCrateContext::new(shared_ccx, cgu, wp, symbol_map.clone())
}).collect()
}
}
pub fn iter<'b>(&'b self) -> CrateContextIterator<'b, 'tcx> {
/// Iterate over all crate contexts, whether or not they need
/// translation. That is, whether or not a `.o` file is available
/// for re-use from a previous incr. comp.).
pub fn iter_all<'b>(&'b self) -> CrateContextIterator<'b, 'tcx> {
CrateContextIterator {
shared: self.shared,
index: 0,
local_ccxs: &self.local_ccxs[..]
local_ccxs: &self.local_ccxs[..],
filter_to_previous_work_product_unavail: false,
}
}
/// Iterator over all CCX that need translation (cannot reuse results from
/// previous incr. comp.).
pub fn iter_need_trans<'b>(&'b self) -> CrateContextIterator<'b, 'tcx> {
CrateContextIterator {
shared: self.shared,
index: 0,
local_ccxs: &self.local_ccxs[..],
filter_to_previous_work_product_unavail: true,
}
}
@ -239,24 +255,38 @@ pub struct CrateContextIterator<'a, 'tcx: 'a> {
shared: &'a SharedCrateContext<'a, 'tcx>,
local_ccxs: &'a [LocalCrateContext<'tcx>],
index: usize,
/// if true, only return results where `previous_work_product` is none
filter_to_previous_work_product_unavail: bool,
}
impl<'a, 'tcx> Iterator for CrateContextIterator<'a,'tcx> {
type Item = CrateContext<'a, 'tcx>;
fn next(&mut self) -> Option<CrateContext<'a, 'tcx>> {
if self.index >= self.local_ccxs.len() {
return None;
loop {
if self.index >= self.local_ccxs.len() {
return None;
}
let index = self.index;
self.index += 1;
let ccx = CrateContext {
shared: self.shared,
index: index,
local_ccxs: self.local_ccxs,
};
if
self.filter_to_previous_work_product_unavail &&
ccx.previous_work_product().is_some()
{
continue;
}
return Some(ccx);
}
let index = self.index;
self.index += 1;
Some(CrateContext {
shared: self.shared,
index: index,
local_ccxs: self.local_ccxs,
})
}
}
@ -510,6 +540,7 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
impl<'tcx> LocalCrateContext<'tcx> {
fn new<'a>(shared: &SharedCrateContext<'a, 'tcx>,
codegen_unit: CodegenUnit<'tcx>,
previous_work_product: Option<WorkProduct>,
symbol_map: Rc<SymbolMap<'tcx>>)
-> LocalCrateContext<'tcx> {
unsafe {
@ -521,7 +552,7 @@ impl<'tcx> LocalCrateContext<'tcx> {
// crashes if the module identifier is same as other symbols
// such as a function name in the module.
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
let llmod_id = format!("{}.rs", codegen_unit.name);
let llmod_id = format!("{}.rs", codegen_unit.name());
let (llcx, llmod) = create_context_and_module(&shared.tcx.sess,
&llmod_id[..]);
@ -535,6 +566,7 @@ impl<'tcx> LocalCrateContext<'tcx> {
let local_ccx = LocalCrateContext {
llmod: llmod,
llcx: llcx,
previous_work_product: previous_work_product,
codegen_unit: codegen_unit,
tn: TypeNames::new(),
needs_unwind_cleanup_cache: RefCell::new(FnvHashMap()),
@ -694,6 +726,10 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
self.local().llcx
}
pub fn previous_work_product(&self) -> Option<&WorkProduct> {
self.local().previous_work_product.as_ref()
}
pub fn codegen_unit(&self) -> &CodegenUnit<'tcx> {
&self.local().codegen_unit
}

View file

@ -239,7 +239,7 @@ fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
Falling back to on-demand instantiation.",
g,
TransItem::DropGlue(g).to_raw_string(),
ccx.codegen_unit().name);
ccx.codegen_unit().name());
ccx.stats().n_fallback_instantiations.set(ccx.stats()
.n_fallback_instantiations

View file

@ -37,6 +37,8 @@
#![feature(unicode)]
#![feature(question_mark)]
use rustc::dep_graph::WorkProduct;
extern crate arena;
extern crate flate;
extern crate getopts;
@ -86,6 +88,7 @@ mod macros;
mod abi;
mod adt;
mod asm;
mod assert_module_sources;
mod attributes;
mod base;
mod basic_block;
@ -132,7 +135,27 @@ mod value;
#[derive(Clone)]
pub struct ModuleTranslation {
/// The name of the module. When the crate may be saved between
/// compilations, incremental compilation requires that name be
/// unique amongst **all** crates. Therefore, it should contain
/// something unique to this crate (e.g., a module path) as well
/// as the crate name and disambiguator.
pub name: String,
pub symbol_name_hash: u64,
pub source: ModuleSource,
}
#[derive(Clone)]
pub enum ModuleSource {
/// Copy the `.o` files or whatever from the incr. comp. directory.
Preexisting(WorkProduct),
/// Rebuild from this LLVM module.
Translated(ModuleLlvm),
}
#[derive(Copy, Clone)]
pub struct ModuleLlvm {
pub llcx: llvm::ContextRef,
pub llmod: llvm::ModuleRef,
}

View file

@ -52,7 +52,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
debug!("leaving monomorphic fn {:?}", instance);
return (val, mono_ty);
} else {
assert!(!ccx.codegen_unit().items.contains_key(&TransItem::Fn(instance)));
assert!(!ccx.codegen_unit().contains_item(&TransItem::Fn(instance)));
}
debug!("monomorphic_fn({:?})", instance);

View file

@ -119,12 +119,15 @@
use collector::InliningMap;
use llvm;
use monomorphize;
use rustc::dep_graph::{DepNode, WorkProductId};
use rustc::hir::def_id::DefId;
use rustc::hir::map::DefPathData;
use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
use rustc::ty::TyCtxt;
use rustc::ty::item_path::characteristic_def_id_of_type;
use std::cmp::Ordering;
use std::hash::{Hash, Hasher, SipHasher};
use std::sync::Arc;
use symbol_map::SymbolMap;
use syntax::ast::NodeId;
use syntax::parse::token::{self, InternedString};
@ -140,11 +143,59 @@ pub enum PartitioningStrategy {
}
pub struct CodegenUnit<'tcx> {
pub name: InternedString,
pub items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>,
/// A name for this CGU. Incremental compilation requires that
/// name be unique amongst **all** crates. Therefore, it should
/// contain something unique to this crate (e.g., a module path)
/// as well as the crate name and disambiguator.
name: InternedString,
items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>,
}
impl<'tcx> CodegenUnit<'tcx> {
pub fn new(name: InternedString,
items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>)
-> Self {
CodegenUnit {
name: name,
items: items,
}
}
pub fn empty(name: InternedString) -> Self {
Self::new(name, FnvHashMap())
}
pub fn contains_item(&self, item: &TransItem<'tcx>) -> bool {
self.items.contains_key(item)
}
pub fn name(&self) -> &str {
&self.name
}
pub fn items(&self) -> &FnvHashMap<TransItem<'tcx>, llvm::Linkage> {
&self.items
}
pub fn work_product_id(&self) -> Arc<WorkProductId> {
Arc::new(WorkProductId(self.name().to_string()))
}
pub fn work_product_dep_node(&self) -> DepNode<DefId> {
DepNode::WorkProduct(self.work_product_id())
}
pub fn compute_symbol_name_hash(&self, tcx: TyCtxt, symbol_map: &SymbolMap) -> u64 {
let mut state = SipHasher::new();
let all_items = self.items_in_deterministic_order(tcx, symbol_map);
for (item, _) in all_items {
let symbol_name = symbol_map.get(item).unwrap();
symbol_name.hash(&mut state);
}
state.finish()
}
pub fn items_in_deterministic_order(&self,
tcx: TyCtxt,
symbol_map: &SymbolMap)
@ -277,10 +328,7 @@ fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
};
let make_codegen_unit = || {
CodegenUnit {
name: codegen_unit_name.clone(),
items: FnvHashMap(),
}
CodegenUnit::empty(codegen_unit_name.clone())
};
let mut codegen_unit = codegen_units.entry(codegen_unit_name.clone())
@ -319,10 +367,7 @@ fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
if codegen_units.is_empty() {
let codegen_unit_name = InternedString::new(FALLBACK_CODEGEN_UNIT);
codegen_units.entry(codegen_unit_name.clone())
.or_insert_with(|| CodegenUnit {
name: codegen_unit_name.clone(),
items: FnvHashMap(),
});
.or_insert_with(|| CodegenUnit::empty(codegen_unit_name.clone()));
}
PreInliningPartitioning {
@ -362,10 +407,8 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
// we reach the target count
while codegen_units.len() < target_cgu_count {
let index = codegen_units.len();
codegen_units.push(CodegenUnit {
name: numbered_codegen_unit_name(crate_name, index),
items: FnvHashMap()
});
codegen_units.push(
CodegenUnit::empty(numbered_codegen_unit_name(crate_name, index)));
}
}
@ -381,10 +424,8 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
follow_inlining(*root, inlining_map, &mut reachable);
}
let mut new_codegen_unit = CodegenUnit {
name: codegen_unit.name.clone(),
items: FnvHashMap(),
};
let mut new_codegen_unit =
CodegenUnit::empty(codegen_unit.name.clone());
// Add all translation items that are not already there
for trans_item in reachable {
@ -560,10 +601,9 @@ fn single_codegen_unit<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
items.insert(trans_item, linkage);
}
CodegenUnit {
name: numbered_codegen_unit_name(&tcx.crate_name[..], 0),
items: items
}
CodegenUnit::new(
numbered_codegen_unit_name(&tcx.crate_name[..], 0),
items)
}
fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {

View file

@ -23,12 +23,12 @@ use glue::DropGlueKind;
use llvm;
use monomorphize::{self, Instance};
use inline;
use rustc::dep_graph::DepNode;
use rustc::hir;
use rustc::hir::map as hir_map;
use rustc::hir::def_id::DefId;
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::ty::subst;
use rustc::dep_graph::DepNode;
use rustc_const_eval::fatal_const_eval_err;
use std::hash::{Hash, Hasher};
use syntax::ast::{self, NodeId};
@ -68,16 +68,27 @@ impl<'tcx> Hash for TransItem<'tcx> {
impl<'a, 'tcx> TransItem<'tcx> {
pub fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
debug!("BEGIN IMPLEMENTING '{} ({})' in cgu {}",
self.to_string(ccx.tcx()),
self.to_raw_string(),
ccx.codegen_unit().name);
ccx.codegen_unit().name());
// (*) This code executes in the context of a dep-node for the
// entire CGU. In some cases, we introduce dep-nodes for
// particular items that we are translating (these nodes will
// have read edges coming into the CGU node). These smaller
// nodes are not needed for correctness -- we always
// invalidate an entire CGU at a time -- but they enable
// finer-grained testing, since you can write tests that check
// that the incoming edges to a particular fn are from a
// particular set.
self.register_reads(ccx);
match *self {
TransItem::Static(node_id) => {
let def_id = ccx.tcx().map.local_def_id(node_id);
let _task = ccx.tcx().dep_graph.in_task(DepNode::TransCrateItem(def_id)); // (*)
let item = ccx.tcx().map.expect_item(node_id);
if let hir::ItemStatic(_, m, ref expr) = item.node {
match consts::trans_static(&ccx, m, expr, item.id, &item.attrs) {
@ -93,6 +104,9 @@ impl<'a, 'tcx> TransItem<'tcx> {
}
}
TransItem::Fn(instance) => {
let _task = ccx.tcx().dep_graph.in_task(
DepNode::TransCrateItem(instance.def)); // (*)
base::trans_instance(&ccx, instance);
}
TransItem::DropGlue(dg) => {
@ -103,7 +117,7 @@ impl<'a, 'tcx> TransItem<'tcx> {
debug!("END IMPLEMENTING '{} ({})' in cgu {}",
self.to_string(ccx.tcx()),
self.to_raw_string(),
ccx.codegen_unit().name);
ccx.codegen_unit().name());
}
/// If necessary, creates a subtask for trans'ing a particular item and registers reads on
@ -152,7 +166,7 @@ impl<'a, 'tcx> TransItem<'tcx> {
debug!("BEGIN PREDEFINING '{} ({})' in cgu {}",
self.to_string(ccx.tcx()),
self.to_raw_string(),
ccx.codegen_unit().name);
ccx.codegen_unit().name());
let symbol_name = ccx.symbol_map()
.get_or_compute(ccx.shared(), *self);
@ -174,7 +188,7 @@ impl<'a, 'tcx> TransItem<'tcx> {
debug!("END PREDEFINING '{} ({})' in cgu {}",
self.to_string(ccx.tcx()),
self.to_raw_string(),
ccx.codegen_unit().name);
ccx.codegen_unit().name());
}
fn predefine_static(ccx: &CrateContext<'a, 'tcx>,

View file

@ -481,6 +481,16 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat
is just used for rustc unit tests \
and will never be stable",
cfg_fn!(rustc_attrs))),
("rustc_partition_reused", Whitelisted, Gated("rustc_attrs",
"this attribute \
is just used for rustc unit tests \
and will never be stable",
cfg_fn!(rustc_attrs))),
("rustc_partition_translated", Whitelisted, Gated("rustc_attrs",
"this attribute \
is just used for rustc unit tests \
and will never be stable",
cfg_fn!(rustc_attrs))),
("rustc_symbol_name", Whitelisted, Gated("rustc_attrs",
"internal rustc attributes will never be stable",
cfg_fn!(rustc_attrs))),

View file

@ -0,0 +1,25 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-prefer-dynamic
#![crate_type="rlib"]
#[cfg(rpass1)]
pub type X = u32;
#[cfg(rpass2)]
pub type X = i32;
// this version doesn't actually change anything:
#[cfg(rpass3)]
pub type X = i32;
pub type Y = char;

View file

@ -0,0 +1,38 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Same test as `type_alias_cross_crate`, but with
// `no-prefer-dynamic`, ensuring that we test what happens when we
// build rlibs (before we were only testing dylibs, which meant we
// didn't realize we had to preserve a `bc` file as well).
// aux-build:a.rs
// revisions:rpass1 rpass2 rpass3
// no-prefer-dynamic
#![feature(rustc_attrs)]
extern crate a;
#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
#[rustc_clean(label="TypeckItemBody", cfg="rpass3")]
pub fn use_X() -> u32 {
let x: a::X = 22;
x as u32
}
#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
#[rustc_clean(label="TypeckItemBody", cfg="rpass3")]
pub fn use_Y() {
let x: a::Y = 'c';
}
pub fn main() { }

View file

@ -0,0 +1,62 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A variant of the first "spike" test that serves to test the
// `rustc_partition_reused` and `rustc_partition_translated` tests.
// Here we change and say that the `x` module will be reused (when in
// fact it will not), and then indicate that the test itself
// should-fail (because an error will be reported, and hence the
// revision rpass2 will not compile, despite being named rpass).
// revisions:rpass1 rpass2
// should-fail
#![feature(rustc_attrs)]
#![rustc_partition_reused(module="spike_neg1", cfg="rpass2")]
#![rustc_partition_reused(module="spike_neg1-x", cfg="rpass2")] // this is wrong!
#![rustc_partition_reused(module="spike_neg1-y", cfg="rpass2")]
mod x {
pub struct X {
x: u32, y: u32,
}
#[cfg(rpass1)]
fn make() -> X {
X { x: 22, y: 0 }
}
#[cfg(rpass2)]
fn make() -> X {
X { x: 11, y: 11 }
}
pub fn new() -> X {
make()
}
pub fn sum(x: &X) -> u32 {
x.x + x.y
}
}
mod y {
use x;
pub fn assert_sum() -> bool {
let x = x::new();
x::sum(&x) == 22
}
}
pub fn main() {
y::assert_sum();
}

View file

@ -0,0 +1,62 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A variant of the first "spike" test that serves to test the
// `rustc_partition_reused` and `rustc_partition_translated` tests.
// Here we change and say that the `y` module will be translated (when
// in fact it will not), and then indicate that the test itself
// should-fail (because an error will be reported, and hence the
// revision rpass2 will not compile, despite being named rpass).
// revisions:rpass1 rpass2
// should-fail
#![feature(rustc_attrs)]
#![rustc_partition_reused(module="spike_neg2", cfg="rpass2")]
#![rustc_partition_translated(module="spike_neg2-x", cfg="rpass2")]
#![rustc_partition_translated(module="spike_neg2-y", cfg="rpass2")] // this is wrong!
mod x {
pub struct X {
x: u32, y: u32,
}
#[cfg(rpass1)]
fn make() -> X {
X { x: 22, y: 0 }
}
#[cfg(rpass2)]
fn make() -> X {
X { x: 11, y: 11 }
}
pub fn new() -> X {
make()
}
pub fn sum(x: &X) -> u32 {
x.x + x.y
}
}
mod y {
use x;
pub fn assert_sum() -> bool {
let x = x::new();
x::sum(&x) == 22
}
}
pub fn main() {
y::assert_sum();
}

View file

@ -0,0 +1,63 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A first "spike" for incremental compilation: here, we change the
// content of the `make` function, and we find that we can reuse the
// `y` module entirely (but not the `x` module).
// revisions:rpass1 rpass2
#![feature(rustc_attrs)]
#![rustc_partition_reused(module="spike", cfg="rpass2")]
#![rustc_partition_translated(module="spike-x", cfg="rpass2")]
#![rustc_partition_reused(module="spike-y", cfg="rpass2")]
mod x {
pub struct X {
x: u32, y: u32,
}
#[cfg(rpass1)]
fn make() -> X {
X { x: 22, y: 0 }
}
#[cfg(rpass2)]
fn make() -> X {
X { x: 11, y: 11 }
}
#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
#[rustc_clean(label="ItemSignature", cfg="rpass2")]
pub fn new() -> X {
make()
}
#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
#[rustc_clean(label="ItemSignature", cfg="rpass2")]
pub fn sum(x: &X) -> u32 {
x.x + x.y
}
}
mod y {
use x;
#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
pub fn assert_sum() -> bool {
let x = x::new();
x::sum(&x) == 22
}
}
pub fn main() {
y::assert_sum();
}

View file

@ -20,6 +20,7 @@ extern crate rustc_metadata;
extern crate rustc_resolve;
extern crate rustc_errors;
extern crate rustc_errors as errors;
extern crate rustc_trans;
#[macro_use] extern crate syntax;
use std::ffi::{CStr, CString};
@ -37,6 +38,7 @@ use rustc::session::build_session;
use rustc_driver::{driver, abort_on_err};
use rustc_resolve::MakeGlobMap;
use rustc_metadata::cstore::CStore;
use rustc_trans::ModuleSource;
use libc::c_void;
use rustc_errors::registry::Registry;
@ -261,7 +263,10 @@ fn compile_program(input: &str, sysroot: PathBuf)
.filter_map(|(_, p)| p).collect();
assert_eq!(trans.modules.len(), 1);
let llmod = trans.modules[0].llmod;
let llmod = match trans.modules[0].source {
ModuleSource::Preexisting(_) => unimplemented!(),
ModuleSource::Translated(llvm) => llvm.llmod,
};
// Workaround because raw pointers do not impl Send
let modp = llmod as usize;

View file

@ -13,11 +13,13 @@
extern crate rustc;
extern crate rustc_driver;
extern crate rustc_llvm;
extern crate rustc_trans;
#[macro_use] extern crate syntax;
extern crate getopts;
use rustc_driver::{CompilerCalls, Compilation};
use rustc_driver::driver::CompileController;
use rustc_trans::ModuleSource;
use rustc::session::Session;
use syntax::codemap::FileLoader;
use std::io;
@ -51,7 +53,10 @@ impl<'a> CompilerCalls<'a> for JitCalls {
state.session.abort_if_errors();
let trans = state.trans.unwrap();
assert_eq!(trans.modules.len(), 1);
let rs_llmod = trans.modules[0].llmod;
let rs_llmod = match trans.modules[0].source {
ModuleSource::Preexisting(_) => unimplemented!(),
ModuleSource::Translated(llvm) => llvm.llmod,
};
unsafe { rustc_llvm::LLVMDumpModule(rs_llmod) };
});
cc