Merge pull request #495 from bjorn3/use_cg_clif_link

Use cg clif link
This commit is contained in:
bjorn3 2019-04-27 18:26:25 +02:00 committed by GitHub
commit d4ed46fa75
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 90 additions and 842 deletions

6
Cargo.lock generated
View file

@ -19,7 +19,7 @@ dependencies = [
[[package]] [[package]]
name = "ar" name = "ar"
version = "0.6.2" version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/bjorn3/rust-ar.git?branch=non_multiple_of_two_identifiers_gnu_format#efa2308b9ded41f4977d4fed98dac7af4d9075cf"
dependencies = [ dependencies = [
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -582,7 +582,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
name = "rustc_codegen_cranelift" name = "rustc_codegen_cranelift"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"ar 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "ar 0.6.2 (git+https://github.com/bjorn3/rust-ar.git?branch=non_multiple_of_two_identifiers_gnu_format)",
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift 0.30.0 (git+https://github.com/CraneStation/cranelift.git)", "cranelift 0.30.0 (git+https://github.com/CraneStation/cranelift.git)",
@ -843,7 +843,7 @@ dependencies = [
[metadata] [metadata]
"checksum aho-corasick 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e6f484ae0c99fec2e858eb6134949117399f222608d84cadb3f58c1f97c2364c" "checksum aho-corasick 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e6f484ae0c99fec2e858eb6134949117399f222608d84cadb3f58c1f97c2364c"
"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
"checksum ar 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "579681b3fecd1e9d6b5ce6969e05f9feb913f296eddaf595be1166a5ca597bc4" "checksum ar 0.6.2 (git+https://github.com/bjorn3/rust-ar.git?branch=non_multiple_of_two_identifiers_gnu_format)" = "<none>"
"checksum arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "92c7fb76bc8826a8b33b4ee5bb07a247a81e76764ab4d55e8f73e3a4d8808c71" "checksum arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "92c7fb76bc8826a8b33b4ee5bb07a247a81e76764ab4d55e8f73e3a4d8808c71"
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652" "checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
"checksum autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6d640bee2da49f60a4068a7fae53acde8982514ab7bae8b8cea9e88cbcfd799" "checksum autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6d640bee2da49f60a4068a7fae53acde8982514ab7bae8b8cea9e88cbcfd799"

View file

@ -38,5 +38,8 @@ indexmap = "1.0.2"
#[patch."https://github.com/gimli-rs/gimli.git"] #[patch."https://github.com/gimli-rs/gimli.git"]
#gimli = { path = "../" } #gimli = { path = "../" }
[patch.crates-io]
ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "non_multiple_of_two_identifiers_gnu_format" }
[profile.dev.overrides."*"] [profile.dev.overrides."*"]
opt-level = 3 opt-level = 3

View file

@ -13,6 +13,7 @@ alloc_system = { path = "./alloc_system" }
[patch.crates-io] [patch.crates-io]
rustc-std-workspace-core = { path = "./sysroot_src/src/tools/rustc-std-workspace-core" } rustc-std-workspace-core = { path = "./sysroot_src/src/tools/rustc-std-workspace-core" }
rustc-std-workspace-alloc = { path = "./rustc-std-workspace-alloc" }
compiler_builtins = { path = "./compiler_builtins" } compiler_builtins = { path = "./compiler_builtins" }
[profile.release] [profile.release]

View file

@ -0,0 +1,15 @@
[package]
name = "rustc-std-workspace-alloc"
version = "1.0.0"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
license = 'MIT/Apache-2.0'
description = """
Hack for the compiler's own build system
"""
edition = "2018"
[lib]
path = "lib.rs"
[dependencies]
alloc = { path = "../sysroot_src/src/liballoc" }

View file

@ -0,0 +1,5 @@
#![feature(no_core)]
#![no_core]
#![deny(rust_2018_idioms)]
pub use ::alloc::*;

View file

@ -1,4 +1,4 @@
#![feature(start, box_syntax, alloc_system, core_intrinsics, alloc, alloc_prelude, alloc_error_handler)] #![feature(start, box_syntax, alloc_system, core_intrinsics, alloc_prelude, alloc_error_handler)]
#![no_std] #![no_std]
extern crate alloc; extern crate alloc;

View file

@ -1,4 +1,3 @@
use std::collections::HashMap;
use std::fs::File; use std::fs::File;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -8,12 +7,14 @@ use rustc_codegen_ssa::{METADATA_FILENAME, RLIB_BYTECODE_EXTENSION};
use rustc_codegen_ssa::back::archive::{ArchiveBuilder, find_library}; use rustc_codegen_ssa::back::archive::{ArchiveBuilder, find_library};
struct ArchiveConfig<'a> { struct ArchiveConfig<'a> {
pub sess: &'a Session, sess: &'a Session,
pub dst: PathBuf, dst: PathBuf,
pub src: Option<PathBuf>, src: Option<PathBuf>,
pub lib_search_paths: Vec<PathBuf>, lib_search_paths: Vec<PathBuf>,
use_gnu_style_archive: bool,
} }
#[derive(Debug)]
enum ArchiveEntry { enum ArchiveEntry {
FromArchive { archive_index: usize, entry_index: usize }, FromArchive { archive_index: usize, entry_index: usize },
File(File), File(File),
@ -22,7 +23,9 @@ enum ArchiveEntry {
pub struct ArArchiveBuilder<'a> { pub struct ArArchiveBuilder<'a> {
config: ArchiveConfig<'a>, config: ArchiveConfig<'a>,
src_archives: Vec<ar::Archive<File>>, src_archives: Vec<ar::Archive<File>>,
entries: HashMap<String, ArchiveEntry>, // Don't use `HashMap` here, as the order is important. `rust.metadata.bin` must always be at
// the end of an archive for linkers to not get confused.
entries: Vec<(String, ArchiveEntry)>,
update_symbols: bool, update_symbols: bool,
} }
@ -34,25 +37,27 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
dst: output.to_path_buf(), dst: output.to_path_buf(),
src: input.map(|p| p.to_path_buf()), src: input.map(|p| p.to_path_buf()),
lib_search_paths: archive_search_paths(sess), lib_search_paths: archive_search_paths(sess),
// FIXME test for linux and System V derivatives instead
use_gnu_style_archive: !sess.target.target.options.is_like_osx,
}; };
let (src_archives, entries) = if let Some(src) = &config.src { let (src_archives, entries) = if let Some(src) = &config.src {
let mut archive = ar::Archive::new(File::open(src).unwrap()); let mut archive = ar::Archive::new(File::open(src).unwrap());
let mut entries = HashMap::new(); let mut entries = Vec::new();
let mut i = 0; let mut i = 0;
while let Some(entry) = archive.next_entry() { while let Some(entry) = archive.next_entry() {
let entry = entry.unwrap(); let entry = entry.unwrap();
entries.insert( entries.push((
String::from_utf8(entry.header().identifier().to_vec()).unwrap(), String::from_utf8(entry.header().identifier().to_vec()).unwrap(),
ArchiveEntry::FromArchive { archive_index: 0, entry_index: i }, ArchiveEntry::FromArchive { archive_index: 0, entry_index: i },
); ));
i += 1; i += 1;
} }
(vec![archive], entries) (vec![archive], entries)
} else { } else {
(vec![], HashMap::new()) (vec![], Vec::new())
}; };
ArArchiveBuilder { ArArchiveBuilder {
@ -64,22 +69,22 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
} }
fn src_files(&mut self) -> Vec<String> { fn src_files(&mut self) -> Vec<String> {
self.entries.keys().cloned().collect() self.entries.iter().map(|(name, _)| name.clone()).collect()
} }
fn remove_file(&mut self, name: &str) { fn remove_file(&mut self, name: &str) {
let file = self.entries.remove(name); let index = self.entries
assert!( .iter()
file.is_some(), .position(|(entry_name, _)| entry_name == name)
"Tried to remove file not existing in src archive", .expect("Tried to remove file not existing in src archive");
); self.entries.remove(index);
} }
fn add_file(&mut self, file: &Path) { fn add_file(&mut self, file: &Path) {
self.entries.insert( self.entries.push((
file.file_name().unwrap().to_str().unwrap().to_string(), file.file_name().unwrap().to_str().unwrap().to_string(),
ArchiveEntry::File(File::open(file).unwrap()), ArchiveEntry::File(File::open(file).unwrap()),
); ));
} }
fn add_native_library(&mut self, name: &str) { fn add_native_library(&mut self, name: &str) {
@ -119,7 +124,20 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
} }
fn build(mut self) { fn build(mut self) {
let mut builder = ar::Builder::new(File::create(&self.config.dst).unwrap()); enum BuilderKind {
Bsd(ar::Builder<File>),
Gnu(ar::GnuBuilder<File>),
}
let archive_file = File::create(&self.config.dst).unwrap();
let mut builder = if self.config.use_gnu_style_archive {
BuilderKind::Gnu(ar::GnuBuilder::new(
archive_file,
self.entries.iter().map(|(name, _)| name.as_bytes().to_vec()).collect(),
))
} else {
BuilderKind::Bsd(ar::Builder::new(archive_file))
};
// Add all files // Add all files
for (entry_name, entry) in self.entries.into_iter() { for (entry_name, entry) in self.entries.into_iter() {
@ -127,16 +145,23 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
ArchiveEntry::FromArchive { archive_index, entry_index } => { ArchiveEntry::FromArchive { archive_index, entry_index } => {
let entry = self.src_archives[archive_index].jump_to_entry(entry_index).unwrap(); let entry = self.src_archives[archive_index].jump_to_entry(entry_index).unwrap();
let orig_header = entry.header(); let orig_header = entry.header();
let mut header = let mut header =
ar::Header::new(orig_header.identifier().to_vec(), orig_header.size()); ar::Header::new(orig_header.identifier().to_vec(), orig_header.size());
header.set_mtime(orig_header.mtime()); header.set_mtime(orig_header.mtime());
header.set_uid(orig_header.uid()); header.set_uid(orig_header.uid());
header.set_gid(orig_header.gid()); header.set_gid(orig_header.gid());
header.set_mode(orig_header.mode()); header.set_mode(orig_header.mode());
builder.append(&header, entry).unwrap(); match builder {
BuilderKind::Bsd(ref mut builder) => builder.append(&header, entry).unwrap(),
BuilderKind::Gnu(ref mut builder) => builder.append(&header, entry).unwrap(),
}
} }
ArchiveEntry::File(mut file) => { ArchiveEntry::File(mut file) => {
builder.append_file(entry_name.as_bytes(), &mut file).unwrap(); match builder {
BuilderKind::Bsd(ref mut builder) => builder.append_file(entry_name.as_bytes(), &mut file).unwrap(),
BuilderKind::Gnu(ref mut builder) => builder.append_file(entry_name.as_bytes(), &mut file).unwrap(),
}
} }
} }
} }
@ -169,10 +194,10 @@ impl<'a> ArArchiveBuilder<'a> {
let entry = entry.unwrap(); let entry = entry.unwrap();
let file_name = String::from_utf8(entry.header().identifier().to_vec()).unwrap(); let file_name = String::from_utf8(entry.header().identifier().to_vec()).unwrap();
if !skip(&file_name) { if !skip(&file_name) {
self.entries.insert( self.entries.push((
file_name, file_name,
ArchiveEntry::FromArchive { archive_index, entry_index: i }, ArchiveEntry::FromArchive { archive_index, entry_index: i },
); ));
} }
i += 1; i += 1;
} }

View file

@ -28,7 +28,6 @@ use rustc::util::common::ErrorReported;
use rustc_codegen_ssa::back::linker::LinkerInfo; use rustc_codegen_ssa::back::linker::LinkerInfo;
use rustc_codegen_ssa::CrateInfo; use rustc_codegen_ssa::CrateInfo;
use rustc_codegen_utils::codegen_backend::CodegenBackend; use rustc_codegen_utils::codegen_backend::CodegenBackend;
use rustc_codegen_utils::link::out_filename;
use rustc_mir::monomorphize::partitioning::CodegenUnitExt; use rustc_mir::monomorphize::partitioning::CodegenUnitExt;
use cranelift::codegen::settings; use cranelift::codegen::settings;
@ -46,8 +45,6 @@ mod common;
mod constant; mod constant;
mod debuginfo; mod debuginfo;
mod intrinsics; mod intrinsics;
mod link;
mod link_copied;
mod linkage; mod linkage;
mod main_shim; mod main_shim;
mod metadata; mod metadata;
@ -343,13 +340,13 @@ impl CodegenBackend for CraneliftCodegenBackend {
} else { } else {
None None
}, },
metadata_module: CompiledModule { metadata_module: Some(CompiledModule {
name: "dummy_metadata".to_string(), name: "dummy_metadata".to_string(),
kind: ModuleKind::Metadata, kind: ModuleKind::Metadata,
object: None, object: None,
bytecode: None, bytecode: None,
bytecode_compressed: None, bytecode_compressed: None,
}, }),
crate_hash: tcx.crate_hash(LOCAL_CRATE), crate_hash: tcx.crate_hash(LOCAL_CRATE),
metadata, metadata,
windows_subsystem: None, // Windows is not yet supported windows_subsystem: None, // Windows is not yet supported
@ -366,20 +363,21 @@ impl CodegenBackend for CraneliftCodegenBackend {
_dep_graph: &DepGraph, _dep_graph: &DepGraph,
outputs: &OutputFilenames, outputs: &OutputFilenames,
) -> Result<(), ErrorReported> { ) -> Result<(), ErrorReported> {
let res = *res use rustc_codegen_ssa::back::link::link_binary;
let codegen_results = *res
.downcast::<CodegenResults>() .downcast::<CodegenResults>()
.expect("Expected CraneliftCodegenBackend's CodegenResult, found Box<Any>"); .expect("Expected CraneliftCodegenBackend's CodegenResult, found Box<Any>");
for &crate_type in sess.crate_types.borrow().iter() { let target_cpu = ::target_lexicon::HOST.to_string();
let output_name = out_filename(sess, crate_type, &outputs, &res.crate_name.as_str()); link_binary::<crate::archive::ArArchiveBuilder<'_>>(
match crate_type { sess,
CrateType::Rlib => link::link_rlib(sess, &res, output_name), &codegen_results,
CrateType::Dylib | CrateType::Executable => { outputs,
link::link_natively(sess, crate_type, &res, &output_name); &codegen_results.crate_name.as_str(),
} &target_cpu,
_ => sess.fatal(&format!("Unsupported crate type: {:?}", crate_type)), );
}
}
Ok(()) Ok(())
} }
} }

View file

@ -1,463 +0,0 @@
use std::ascii;
use std::char;
use std::env;
use std::fs::File;
use std::io;
use std::path::{Path, PathBuf};
use std::str;
use tempfile::Builder as TempFileBuilder;
use rustc::session::config::{self, CrateType, DebugInfo, RUST_CGU_EXT};
use rustc::session::search_paths::PathKind;
use rustc::session::Session;
use rustc_codegen_ssa::METADATA_FILENAME;
use rustc_codegen_ssa::back::command::Command;
use rustc_codegen_ssa::back::link::*;
use rustc_codegen_ssa::back::linker::*;
use rustc_fs_util::fix_windows_verbatim_for_gcc;
use rustc_target::spec::{LinkerFlavor, PanicStrategy, RelroLevel};
use crate::prelude::*;
use crate::link_copied::*;
pub(crate) fn link_rlib(sess: &Session, res: &CodegenResults, output_name: PathBuf) {
let file = File::create(&output_name).unwrap();
let mut builder = ar::Builder::new(file);
// Add main object file
for module in &res.modules {
if let Some(ref object_path) = module.object {
let object = File::open(object_path).expect("Someone deleted our object file");
let object_len = object.metadata().unwrap().len();
builder
.append(
&ar::Header::new(
(module.name.to_string() + RUST_CGU_EXT).into_bytes(),
object_len,
),
object,
)
.unwrap();
}
}
// Non object files need to be added after object files, because ranlib will
// try to read the native architecture from the first file, even if it isn't
// an object file
builder
.append(
&ar::Header::new(
METADATA_FILENAME.as_bytes().to_vec(),
res.metadata.raw_data.len() as u64,
),
::std::io::Cursor::new(res.metadata.raw_data.clone()),
)
.unwrap();
// Finalize archive
std::mem::drop(builder);
// Run ranlib to be able to link the archive
let status = std::process::Command::new("ranlib")
.arg(output_name)
.status()
.expect("Couldn't run ranlib");
if !status.success() {
sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
}
}
pub(crate) fn link_natively(
sess: &Session,
crate_type: CrateType,
codegen_results: &CodegenResults,
out_filename: &Path,
) {
let tmpdir = match TempFileBuilder::new().prefix("rustc").tempdir() {
Ok(tmpdir) => tmpdir,
Err(err) => sess.fatal(&format!("couldn't create a temp dir: {}", err)),
};
let (linker, flavor) = linker_and_flavor(sess);
// The invocations of cc share some flags across platforms
let (pname, mut cmd) = get_linker(sess, &linker, flavor);
let root = sess.target_filesearch(PathKind::Native).get_lib_path();
if let Some(args) = sess.target.target.options.pre_link_args.get(&flavor) {
cmd.args(args);
}
if let Some(args) = sess.target.target.options.pre_link_args_crt.get(&flavor) {
if sess.crt_static() {
cmd.args(args);
}
}
if let Some(ref args) = sess.opts.debugging_opts.pre_link_args {
cmd.args(args);
}
cmd.args(&sess.opts.debugging_opts.pre_link_arg);
let pre_link_objects = if crate_type == config::CrateType::Executable {
&sess.target.target.options.pre_link_objects_exe
} else {
&sess.target.target.options.pre_link_objects_dll
};
for obj in pre_link_objects {
cmd.arg(root.join(obj));
}
if crate_type == config::CrateType::Executable && sess.crt_static() {
for obj in &sess.target.target.options.pre_link_objects_exe_crt {
cmd.arg(root.join(obj));
}
}
if sess.target.target.options.is_like_emscripten {
cmd.arg("-s");
cmd.arg(if sess.panic_strategy() == PanicStrategy::Abort {
"DISABLE_EXCEPTION_CATCHING=1"
} else {
"DISABLE_EXCEPTION_CATCHING=0"
});
}
{
let target_cpu = ::target_lexicon::HOST.to_string();
let mut linker = codegen_results.linker_info.to_linker(cmd, &sess, flavor, &target_cpu);
link_args(&mut *linker, flavor, sess, crate_type, tmpdir.path(),
out_filename, codegen_results);
cmd = linker.finalize();
}
if let Some(args) = sess.target.target.options.late_link_args.get(&flavor) {
cmd.args(args);
}
for obj in &sess.target.target.options.post_link_objects {
cmd.arg(root.join(obj));
}
if sess.crt_static() {
for obj in &sess.target.target.options.post_link_objects_crt {
cmd.arg(root.join(obj));
}
}
if let Some(args) = sess.target.target.options.post_link_args.get(&flavor) {
cmd.args(args);
}
for &(ref k, ref v) in &sess.target.target.options.link_env {
cmd.env(k, v);
}
if sess.opts.debugging_opts.print_link_args {
println!("{:?}", &cmd);
}
// May have not found libraries in the right formats.
sess.abort_if_errors();
// Invoke the system linker
//
// Note that there's a terribly awful hack that really shouldn't be present
// in any compiler. Here an environment variable is supported to
// automatically retry the linker invocation if the linker looks like it
// segfaulted.
//
// Gee that seems odd, normally segfaults are things we want to know about!
// Unfortunately though in rust-lang/rust#38878 we're experiencing the
// linker segfaulting on Travis quite a bit which is causing quite a bit of
// pain to land PRs when they spuriously fail due to a segfault.
//
// The issue #38878 has some more debugging information on it as well, but
// this unfortunately looks like it's just a race condition in macOS's linker
// with some thread pool working in the background. It seems that no one
// currently knows a fix for this so in the meantime we're left with this...
let retry_on_segfault = env::var("RUSTC_RETRY_LINKER_ON_SEGFAULT").is_ok();
let mut prog;
let mut i = 0;
loop {
i += 1;
prog = exec_linker(sess, &mut cmd, out_filename, tmpdir.path());
let output = match prog {
Ok(ref output) => output,
Err(_) => break,
};
if output.status.success() {
break
}
let mut out = output.stderr.clone();
out.extend(&output.stdout);
let out = String::from_utf8_lossy(&out);
// Check to see if the link failed with "unrecognized command line option:
// '-no-pie'" for gcc or "unknown argument: '-no-pie'" for clang. If so,
// reperform the link step without the -no-pie option. This is safe because
// if the linker doesn't support -no-pie then it should not default to
// linking executables as pie. Different versions of gcc seem to use
// different quotes in the error message so don't check for them.
if sess.target.target.options.linker_is_gnu &&
flavor != LinkerFlavor::Ld &&
(out.contains("unrecognized command line option") ||
out.contains("unknown argument")) &&
out.contains("-no-pie") &&
cmd.get_args().iter().any(|e| e.to_string_lossy() == "-no-pie") {
for arg in cmd.take_args() {
if arg.to_string_lossy() != "-no-pie" {
cmd.arg(arg);
}
}
continue;
}
if !retry_on_segfault || i > 3 {
break
}
let msg_segv = "clang: error: unable to execute command: Segmentation fault: 11";
let msg_bus = "clang: error: unable to execute command: Bus error: 10";
if !(out.contains(msg_segv) || out.contains(msg_bus)) {
break
}
}
match prog {
Ok(prog) => {
fn escape_string(s: &[u8]) -> String {
str::from_utf8(s).map(|s| s.to_owned())
.unwrap_or_else(|_| {
let mut x = "Non-UTF-8 output: ".to_string();
x.extend(s.iter()
.flat_map(|&b| ascii::escape_default(b))
.map(char::from));
x
})
}
if !prog.status.success() {
let mut output = prog.stderr.clone();
output.extend_from_slice(&prog.stdout);
sess.struct_err(&format!("linking with `{}` failed: {}",
pname.display(),
prog.status))
.note(&format!("{:?}", &cmd))
.note(&escape_string(&output))
.emit();
sess.abort_if_errors();
}
},
Err(e) => {
let linker_not_found = e.kind() == io::ErrorKind::NotFound;
let mut linker_error = {
if linker_not_found {
sess.struct_err(&format!("linker `{}` not found", pname.display()))
} else {
sess.struct_err(&format!("could not exec the linker `{}`", pname.display()))
}
};
linker_error.note(&e.to_string());
if !linker_not_found {
linker_error.note(&format!("{:?}", &cmd));
}
linker_error.emit();
if sess.target.target.options.is_like_msvc && linker_not_found {
sess.note_without_error("the msvc targets depend on the msvc linker \
but `link.exe` was not found");
sess.note_without_error("please ensure that VS 2013, VS 2015 or VS 2017 \
was installed with the Visual C++ option");
}
sess.abort_if_errors();
}
}
// On macOS, debuggers need this utility to get run to do some munging of
// the symbols. Note, though, that if the object files are being preserved
// for their debug information there's no need for us to run dsymutil.
if sess.target.target.options.is_like_osx &&
sess.opts.debuginfo != DebugInfo::None
{
if let Err(e) = Command::new("dsymutil").arg(out_filename).output() {
sess.fatal(&format!("failed to run dsymutil: {}", e))
}
}
}
fn link_args(cmd: &mut dyn Linker,
flavor: LinkerFlavor,
sess: &Session,
crate_type: config::CrateType,
tmpdir: &Path,
out_filename: &Path,
codegen_results: &CodegenResults) {
// Linker plugins should be specified early in the list of arguments
cmd.linker_plugin_lto();
// The default library location, we need this to find the runtime.
// The location of crates will be determined as needed.
let lib_path = sess.target_filesearch(PathKind::All).get_lib_path();
// target descriptor
let t = &sess.target.target;
cmd.include_path(&fix_windows_verbatim_for_gcc(&lib_path));
for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) {
cmd.add_object(obj);
}
cmd.output_filename(out_filename);
if crate_type == config::CrateType::Executable &&
sess.target.target.options.is_like_windows {
if let Some(ref s) = codegen_results.windows_subsystem {
cmd.subsystem(s);
}
}
// If we're building a dynamic library then some platforms need to make sure
// that all symbols are exported correctly from the dynamic library.
if crate_type != config::CrateType::Executable ||
sess.target.target.options.is_like_emscripten {
cmd.export_symbols(tmpdir, crate_type);
}
// When linking a dynamic library, we put the metadata into a section of the
// executable. This metadata is in a separate object file from the main
// object file, so we link that in here.
if crate_type == config::CrateType::Dylib ||
crate_type == config::CrateType::ProcMacro {
if let Some(obj) = codegen_results.metadata_module.object.as_ref() {
cmd.add_object(obj);
}
}
let obj = codegen_results.allocator_module
.as_ref()
.and_then(|m| m.object.as_ref());
if let Some(obj) = obj {
cmd.add_object(obj);
}
// Try to strip as much out of the generated object by removing unused
// sections if possible. See more comments in linker.rs
if !sess.opts.cg.link_dead_code {
let keep_metadata = crate_type == config::CrateType::Dylib;
cmd.gc_sections(keep_metadata);
}
let used_link_args = &codegen_results.crate_info.link_args;
if crate_type == config::CrateType::Executable {
let mut position_independent_executable = false;
if t.options.position_independent_executables {
let empty_vec = Vec::new();
let args = sess.opts.cg.link_args.as_ref().unwrap_or(&empty_vec);
let more_args = &sess.opts.cg.link_arg;
let mut args = args.iter().chain(more_args.iter()).chain(used_link_args.iter());
if !sess.crt_static() && !args.any(|x| *x == "-static") {
position_independent_executable = true;
}
}
if position_independent_executable {
cmd.position_independent_executable();
} else {
// recent versions of gcc can be configured to generate position
// independent executables by default. We have to pass -no-pie to
// explicitly turn that off. Not applicable to ld.
if sess.target.target.options.linker_is_gnu
&& flavor != LinkerFlavor::Ld {
cmd.no_position_independent_executable();
}
}
}
let relro_level = match sess.opts.debugging_opts.relro_level {
Some(level) => level,
None => t.options.relro_level,
};
match relro_level {
RelroLevel::Full => {
cmd.full_relro();
},
RelroLevel::Partial => {
cmd.partial_relro();
},
RelroLevel::Off => {
cmd.no_relro();
},
RelroLevel::None => {
},
}
// Pass optimization flags down to the linker.
cmd.optimize();
// Pass debuginfo flags down to the linker.
cmd.debuginfo();
// We want to, by default, prevent the compiler from accidentally leaking in
// any system libraries, so we may explicitly ask linkers to not link to any
// libraries by default. Note that this does not happen for windows because
// windows pulls in some large number of libraries and I couldn't quite
// figure out which subset we wanted.
//
// This is all naturally configurable via the standard methods as well.
if !sess.opts.cg.default_linker_libraries.unwrap_or(false) &&
t.options.no_default_libraries
{
cmd.no_default_libraries();
}
// Take careful note of the ordering of the arguments we pass to the linker
// here. Linkers will assume that things on the left depend on things to the
// right. Things on the right cannot depend on things on the left. This is
// all formally implemented in terms of resolving symbols (libs on the right
// resolve unknown symbols of libs on the left, but not vice versa).
//
// For this reason, we have organized the arguments we pass to the linker as
// such:
//
// 1. The local object that LLVM just generated
// 2. Local native libraries
// 3. Upstream rust libraries
// 4. Upstream native libraries
//
// The rationale behind this ordering is that those items lower down in the
// list can't depend on items higher up in the list. For example nothing can
// depend on what we just generated (e.g., that'd be a circular dependency).
// Upstream rust libraries are not allowed to depend on our local native
// libraries as that would violate the structure of the DAG, in that
// scenario they are required to link to them as well in a shared fashion.
//
// Note that upstream rust libraries may contain native dependencies as
// well, but they also can't depend on what we just started to add to the
// link line. And finally upstream native libraries can't depend on anything
// in this DAG so far because they're only dylibs and dylibs can only depend
// on other dylibs (e.g., other native deps).
add_local_native_libraries(cmd, sess, codegen_results);
add_upstream_rust_crates(cmd, sess, codegen_results, crate_type, tmpdir);
add_upstream_native_libraries(cmd, sess, codegen_results, crate_type);
// Tell the linker what we're doing.
if crate_type != config::CrateType::Executable {
cmd.build_dylib(out_filename);
}
if crate_type == config::CrateType::Executable && sess.crt_static() {
cmd.build_static_executable();
}
if sess.opts.debugging_opts.pgo_gen.enabled() {
cmd.pgo_gen();
}
// Finally add all the linker arguments provided on the command line along
// with any #[link_args] attributes found inside the crate
if let Some(ref args) = sess.opts.cg.link_args {
cmd.args(args);
}
cmd.args(&sess.opts.cg.link_arg);
cmd.args(&used_link_args);
}

View file

@ -1,336 +0,0 @@
//! All functions here are copied from https://github.com/rust-lang/rust/blob/942864a000efd74b73e36bda5606b2cdb55ecf39/src/librustc_codegen_llvm/back/link.rs
use std::path::Path;
use rustc::middle::cstore::NativeLibraryKind;
use rustc::middle::dependency_format::Linkage;
use rustc::session::config::{self, OutputType, RUST_CGU_EXT};
use rustc::session::Session;
use rustc::util::common::time;
use rustc_codegen_ssa::{METADATA_FILENAME, RLIB_BYTECODE_EXTENSION};
use rustc_codegen_ssa::back::archive::ArchiveBuilder;
use rustc_codegen_ssa::back::linker::*;
use rustc_codegen_ssa::back::link::*;
use rustc_data_structures::fx::FxHashSet;
use rustc_fs_util::fix_windows_verbatim_for_gcc;
use crate::prelude::*;
use crate::archive::ArArchiveBuilder;
// # Rust Crate linking
//
// Rust crates are not considered at all when creating an rlib output. All
// dependencies will be linked when producing the final output (instead of
// the intermediate rlib version)
pub fn add_upstream_rust_crates(cmd: &mut dyn Linker,
sess: &Session,
codegen_results: &CodegenResults,
crate_type: config::CrateType,
tmpdir: &Path) {
// All of the heavy lifting has previously been accomplished by the
// dependency_format module of the compiler. This is just crawling the
// output of that module, adding crates as necessary.
//
// Linking to a rlib involves just passing it to the linker (the linker
// will slurp up the object files inside), and linking to a dynamic library
// involves just passing the right -l flag.
let formats = sess.dependency_formats.borrow();
let data = formats.get(&crate_type).unwrap();
// Invoke get_used_crates to ensure that we get a topological sorting of
// crates.
let deps = &codegen_results.crate_info.used_crates_dynamic;
// There's a few internal crates in the standard library (aka libcore and
// libstd) which actually have a circular dependence upon one another. This
// currently arises through "weak lang items" where libcore requires things
// like `rust_begin_unwind` but libstd ends up defining it. To get this
// circular dependence to work correctly in all situations we'll need to be
// sure to correctly apply the `--start-group` and `--end-group` options to
// GNU linkers, otherwise if we don't use any other symbol from the standard
// library it'll get discarded and the whole application won't link.
//
// In this loop we're calculating the `group_end`, after which crate to
// pass `--end-group` and `group_start`, before which crate to pass
// `--start-group`. We currently do this by passing `--end-group` after
// the first crate (when iterating backwards) that requires a lang item
// defined somewhere else. Once that's set then when we've defined all the
// necessary lang items we'll pass `--start-group`.
//
// Note that this isn't amazing logic for now but it should do the trick
// for the current implementation of the standard library.
let mut group_end = None;
let mut group_start = None;
let mut end_with = FxHashSet::default();
let info = &codegen_results.crate_info;
for &(cnum, _) in deps.iter().rev() {
if let Some(missing) = info.missing_lang_items.get(&cnum) {
end_with.extend(missing.iter().cloned());
if end_with.len() > 0 && group_end.is_none() {
group_end = Some(cnum);
}
}
end_with.retain(|item| info.lang_item_to_crate.get(item) != Some(&cnum));
if end_with.len() == 0 && group_end.is_some() {
group_start = Some(cnum);
break
}
}
// If we didn't end up filling in all lang items from upstream crates then
// we'll be filling it in with our crate. This probably means we're the
// standard library itself, so skip this for now.
if group_end.is_some() && group_start.is_none() {
group_end = None;
}
let mut compiler_builtins = None;
for &(cnum, _) in deps.iter() {
if group_start == Some(cnum) {
cmd.group_start();
}
// We may not pass all crates through to the linker. Some crates may
// appear statically in an existing dylib, meaning we'll pick up all the
// symbols from the dylib.
let src = &codegen_results.crate_info.used_crate_source[&cnum];
match data[cnum.as_usize() - 1] {
_ if codegen_results.crate_info.profiler_runtime == Some(cnum) => {
add_static_crate(cmd, sess, codegen_results, tmpdir, crate_type, cnum);
}
_ if codegen_results.crate_info.sanitizer_runtime == Some(cnum) => {
link_sanitizer_runtime(cmd, sess, codegen_results, tmpdir, cnum);
}
// compiler-builtins are always placed last to ensure that they're
// linked correctly.
_ if codegen_results.crate_info.compiler_builtins == Some(cnum) => {
assert!(compiler_builtins.is_none());
compiler_builtins = Some(cnum);
}
Linkage::NotLinked |
Linkage::IncludedFromDylib => {}
Linkage::Static => {
add_static_crate(cmd, sess, codegen_results, tmpdir, crate_type, cnum);
}
Linkage::Dynamic => {
add_dynamic_crate(cmd, sess, &src.dylib.as_ref().unwrap().0)
}
}
if group_end == Some(cnum) {
cmd.group_end();
}
}
// compiler-builtins are always placed last to ensure that they're
// linked correctly.
// We must always link the `compiler_builtins` crate statically. Even if it
// was already "included" in a dylib (e.g., `libstd` when `-C prefer-dynamic`
// is used)
if let Some(cnum) = compiler_builtins {
add_static_crate(cmd, sess, codegen_results, tmpdir, crate_type, cnum);
}
// Converts a library file-stem into a cc -l argument
fn unlib<'a>(config: &config::Config, stem: &'a str) -> &'a str {
if stem.starts_with("lib") && !config.target.options.is_like_windows {
&stem[3..]
} else {
stem
}
}
// We must link the sanitizer runtime using -Wl,--whole-archive but since
// it's packed in a .rlib, it contains stuff that are not objects that will
// make the linker error. So we must remove those bits from the .rlib before
// linking it.
fn link_sanitizer_runtime(cmd: &mut dyn Linker,
sess: &Session,
codegen_results: &CodegenResults,
tmpdir: &Path,
cnum: CrateNum) {
let src = &codegen_results.crate_info.used_crate_source[&cnum];
let cratepath = &src.rlib.as_ref().unwrap().0;
if sess.target.target.options.is_like_osx {
// On Apple platforms, the sanitizer is always built as a dylib, and
// LLVM will link to `@rpath/*.dylib`, so we need to specify an
// rpath to the library as well (the rpath should be absolute, see
// PR #41352 for details).
//
// FIXME: Remove this logic into librustc_*san once Cargo supports it
let rpath = cratepath.parent().unwrap();
let rpath = rpath.to_str().expect("non-utf8 component in path");
cmd.args(&["-Wl,-rpath".into(), "-Xlinker".into(), rpath.into()]);
}
let dst = tmpdir.join(cratepath.file_name().unwrap());
let mut archive = ArArchiveBuilder::new(sess, &dst, Some(cratepath));
archive.update_symbols();
for f in archive.src_files() {
if f.ends_with(RLIB_BYTECODE_EXTENSION) || f == METADATA_FILENAME {
archive.remove_file(&f);
}
}
archive.build();
cmd.link_whole_rlib(&dst);
}
// Adds the static "rlib" versions of all crates to the command line.
// There's a bit of magic which happens here specifically related to LTO and
// dynamic libraries. Specifically:
//
// * For LTO, we remove upstream object files.
// * For dylibs we remove metadata and bytecode from upstream rlibs
//
// When performing LTO, almost(*) all of the bytecode from the upstream
// libraries has already been included in our object file output. As a
// result we need to remove the object files in the upstream libraries so
// the linker doesn't try to include them twice (or whine about duplicate
// symbols). We must continue to include the rest of the rlib, however, as
// it may contain static native libraries which must be linked in.
//
// (*) Crates marked with `#![no_builtins]` don't participate in LTO and
// their bytecode wasn't included. The object files in those libraries must
// still be passed to the linker.
//
// When making a dynamic library, linkers by default don't include any
// object files in an archive if they're not necessary to resolve the link.
// We basically want to convert the archive (rlib) to a dylib, though, so we
// *do* want everything included in the output, regardless of whether the
// linker thinks it's needed or not. As a result we must use the
// --whole-archive option (or the platform equivalent). When using this
// option the linker will fail if there are non-objects in the archive (such
// as our own metadata and/or bytecode). All in all, for rlibs to be
// entirely included in dylibs, we need to remove all non-object files.
//
// Note, however, that if we're not doing LTO or we're not producing a dylib
// (aka we're making an executable), we can just pass the rlib blindly to
// the linker (fast) because it's fine if it's not actually included as
// we're at the end of the dependency chain.
fn add_static_crate(cmd: &mut dyn Linker,
sess: &Session,
codegen_results: &CodegenResults,
tmpdir: &Path,
crate_type: config::CrateType,
cnum: CrateNum) {
let src = &codegen_results.crate_info.used_crate_source[&cnum];
let cratepath = &src.rlib.as_ref().unwrap().0;
// See the comment above in `link_staticlib` and `link_rlib` for why if
// there's a static library that's not relevant we skip all object
// files.
let native_libs = &codegen_results.crate_info.native_libraries[&cnum];
let skip_native = native_libs.iter().any(|lib| {
lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib)
});
if (!are_upstream_rust_objects_already_included(sess) ||
ignored_for_lto(sess, &codegen_results.crate_info, cnum)) &&
crate_type != config::CrateType::Dylib &&
!skip_native {
cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath));
return
}
let dst = tmpdir.join(cratepath.file_name().unwrap());
let name = cratepath.file_name().unwrap().to_str().unwrap();
let name = &name[3..name.len() - 5]; // chop off lib/.rlib
time(sess, &format!("altering {}.rlib", name), || {
let mut archive = ArArchiveBuilder::new(sess, &dst, Some(cratepath));
archive.update_symbols();
let mut any_objects = false;
for f in archive.src_files() {
if f.ends_with(RLIB_BYTECODE_EXTENSION) || f == METADATA_FILENAME {
archive.remove_file(&f);
continue
}
let canonical = f.replace("-", "_");
let canonical_name = name.replace("-", "_");
// Look for `.rcgu.o` at the end of the filename to conclude
// that this is a Rust-related object file.
fn looks_like_rust(s: &str) -> bool {
let path = Path::new(s);
let ext = path.extension().and_then(|s| s.to_str());
if ext != Some(OutputType::Object.extension()) {
return false
}
let ext2 = path.file_stem()
.and_then(|s| Path::new(s).extension())
.and_then(|s| s.to_str());
ext2 == Some(RUST_CGU_EXT)
}
let is_rust_object =
canonical.starts_with(&canonical_name) &&
looks_like_rust(&f);
// If we've been requested to skip all native object files
// (those not generated by the rust compiler) then we can skip
// this file. See above for why we may want to do this.
let skip_because_cfg_say_so = skip_native && !is_rust_object;
// If we're performing LTO and this is a rust-generated object
// file, then we don't need the object file as it's part of the
// LTO module. Note that `#![no_builtins]` is excluded from LTO,
// though, so we let that object file slide.
let skip_because_lto = are_upstream_rust_objects_already_included(sess) &&
is_rust_object &&
(sess.target.target.options.no_builtins ||
!codegen_results.crate_info.is_no_builtins.contains(&cnum));
if skip_because_cfg_say_so || skip_because_lto {
archive.remove_file(&f);
} else {
any_objects = true;
}
}
if !any_objects {
return
}
archive.build();
// If we're creating a dylib, then we need to include the
// whole of each object in our archive into that artifact. This is
// because a `dylib` can be reused as an intermediate artifact.
//
// Note, though, that we don't want to include the whole of a
// compiler-builtins crate (e.g., compiler-rt) because it'll get
// repeatedly linked anyway.
if crate_type == config::CrateType::Dylib &&
codegen_results.crate_info.compiler_builtins != Some(cnum) {
cmd.link_whole_rlib(&fix_windows_verbatim_for_gcc(&dst));
} else {
cmd.link_rlib(&fix_windows_verbatim_for_gcc(&dst));
}
});
}
// Same thing as above, but for dynamic crates instead of static crates.
fn add_dynamic_crate(cmd: &mut dyn Linker, sess: &Session, cratepath: &Path) {
// If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format.
assert!(!are_upstream_rust_objects_already_included(sess));
// Just need to tell the linker about where the library lives and
// what its name is
let parent = cratepath.parent();
if let Some(dir) = parent {
cmd.include_path(&fix_windows_verbatim_for_gcc(dir));
}
let filestem = cratepath.file_stem().unwrap().to_str().unwrap();
cmd.link_rust_dylib(&unlib(&sess.target, filestem),
parent.unwrap_or(Path::new("")));
}
}