Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for storing code model to LLVM module IR #74002

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/librustc_codegen_llvm/back/write.rs
Expand Up @@ -108,7 +108,7 @@ fn to_llvm_relocation_model(relocation_model: RelocModel) -> llvm::RelocModel {
}
}

fn to_llvm_code_model(code_model: Option<CodeModel>) -> llvm::CodeModel {
pub(crate) fn to_llvm_code_model(code_model: Option<CodeModel>) -> llvm::CodeModel {
match code_model {
Some(CodeModel::Tiny) => llvm::CodeModel::Tiny,
Some(CodeModel::Small) => llvm::CodeModel::Small,
Expand Down
8 changes: 8 additions & 0 deletions src/librustc_codegen_llvm/context.rs
@@ -1,4 +1,5 @@
use crate::attributes;
use crate::back::write::to_llvm_code_model;
use crate::callee::get_fn;
use crate::coverageinfo;
use crate::debuginfo;
Expand Down Expand Up @@ -181,6 +182,13 @@ pub unsafe fn create_module(
}
}

// Linking object files with different code models is undefined behavior
// because the compiler would have to generate additional code (to span
// longer jumps) if a larger code model is used with a smaller one.
//
// See https://reviews.llvm.org/D52322 and https://reviews.llvm.org/D52323.
llvm::LLVMRustSetModuleCodeModel(llmod, to_llvm_code_model(sess.code_model()));

// If skipping the PLT is enabled, we need to add some module metadata
// to ensure intrinsic calls don't use it.
if !sess.needs_plt() {
Expand Down
1 change: 1 addition & 0 deletions src/librustc_codegen_llvm/llvm/ffi.rs
Expand Up @@ -2129,6 +2129,7 @@ extern "C" {
pub fn LLVMRustUnsetComdat(V: &Value);
pub fn LLVMRustSetModulePICLevel(M: &Module);
pub fn LLVMRustSetModulePIELevel(M: &Module);
pub fn LLVMRustSetModuleCodeModel(M: &Module, Model: CodeModel);
pub fn LLVMRustModuleBufferCreate(M: &Module) -> &'static mut ModuleBuffer;
pub fn LLVMRustModuleBufferPtr(p: &ModuleBuffer) -> *const u8;
pub fn LLVMRustModuleBufferLen(p: &ModuleBuffer) -> usize;
Expand Down
8 changes: 8 additions & 0 deletions src/rustllvm/PassWrapper.cpp
Expand Up @@ -1163,6 +1163,14 @@ extern "C" void LLVMRustSetModulePIELevel(LLVMModuleRef M) {
unwrap(M)->setPIELevel(PIELevel::Level::Large);
}

extern "C" void LLVMRustSetModuleCodeModel(LLVMModuleRef M,
LLVMRustCodeModel Model) {
auto CM = fromRust(Model);
if (!CM.hasValue())
return;
unwrap(M)->setCodeModel(*CM);
}

// Here you'll find an implementation of ThinLTO as used by the Rust compiler
// right now. This ThinLTO support is only enabled on "recent ish" versions of
// LLVM, and otherwise it's just blanket rejected from other compilers.
Expand Down
18 changes: 18 additions & 0 deletions src/test/codegen/codemodels.rs
@@ -0,0 +1,18 @@
// revisions: NOMODEL MODEL-SMALL MODEL-KERNEL MODEL-MEDIUM MODEL-LARGE
//[NOMODEL] compile-flags:
//[MODEL-SMALL] compile-flags: -C code-model=small
//[MODEL-KERNEL] compile-flags: -C code-model=kernel
//[MODEL-MEDIUM] compile-flags: -C code-model=medium
//[MODEL-LARGE] compile-flags: -C code-model=large

#![crate_type = "lib"]

// MODEL-SMALL: !llvm.module.flags = !{{{.*}}}
// MODEL-SMALL: !{{[0-9]+}} = !{i32 1, !"Code Model", i32 1}
// MODEL-KERNEL: !llvm.module.flags = !{{{.*}}}
// MODEL-KERNEL: !{{[0-9]+}} = !{i32 1, !"Code Model", i32 2}
// MODEL-MEDIUM: !llvm.module.flags = !{{{.*}}}
// MODEL-MEDIUM: !{{[0-9]+}} = !{i32 1, !"Code Model", i32 3}
// MODEL-LARGE: !llvm.module.flags = !{{{.*}}}
// MODEL-LARGE: !{{[0-9]+}} = !{i32 1, !"Code Model", i32 4}
// NOMODEL-NOT: Code Model