From 565a9bf20b726c5182a6c4c816c1d9a4108c778a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn=20Steinbrink?= Date: Sun, 21 Jul 2013 15:33:40 +0200 Subject: [PATCH] Provide lower level access to the LLVM IR builder Currently, the helper functions in the "build" module can only append at the end of a block. For certain things we'll want to be able to insert code at arbitrary locations inside a block though. Although can we do that by directly calling the LLVM functions, that is rather ugly and means that somethings need to be implemented twice. Once in terms of the helper functions and once in terms of low level LLVM functions. Instead of doing that, we should provide a Builder type that provides low level access to the builder, and which can be used by both, the helper functions in the "build" module, as well larger units of abstractions that combine several LLVM instructions. --- src/librustc/middle/trans/base.rs | 1 + src/librustc/middle/trans/build.rs | 759 +++++--------------- src/librustc/middle/trans/builder.rs | 947 +++++++++++++++++++++++++ src/librustc/middle/trans/context.rs | 5 + src/librustc/middle/trans/debuginfo.rs | 4 +- src/librustc/middle/trans/mod.rs | 1 + 6 files changed, 1149 insertions(+), 568 deletions(-) create mode 100644 src/librustc/middle/trans/builder.rs diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 45a44e7a8a7..5536fa6daa7 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -41,6 +41,7 @@ use middle::trans::_match; use middle::trans::adt; use middle::trans::base; use middle::trans::build::*; +use middle::trans::builder::noname; use middle::trans::callee; use middle::trans::common::*; use middle::trans::consts; diff --git a/src/librustc/middle/trans/build.rs b/src/librustc/middle/trans/build.rs index 26fce42f8e3..7861f658f53 100644 --- a/src/librustc/middle/trans/build.rs +++ b/src/librustc/middle/trans/build.rs @@ -11,21 +11,17 @@ use lib::llvm::llvm; use lib::llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect}; -use lib::llvm::{Opcode, IntPredicate, RealPredicate, False}; -use lib::llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef}; +use lib::llvm::{Opcode, IntPredicate, RealPredicate}; +use lib::llvm::{ValueRef, BasicBlockRef}; use lib; use middle::trans::common::*; -use middle::trans::machine::llalign_of_min; use syntax::codemap::span; -use middle::trans::base; +use middle::trans::builder::Builder; use middle::trans::type_::Type; use std::cast; use std::libc::{c_uint, c_ulonglong, c_char}; -use std::hashmap::HashMap; -use std::str; -use std::vec; pub fn terminate(cx: block, _: &str) { cx.terminated = true; @@ -37,56 +33,12 @@ pub fn check_not_terminated(cx: block) { } } -pub fn B(cx: block) -> BuilderRef { - unsafe { - let b = cx.fcx.ccx.builder.B; - llvm::LLVMPositionBuilderAtEnd(b, cx.llbb); - return b; - } +pub fn B(cx: block) -> Builder { + let b = cx.fcx.ccx.builder(); + b.position_at_end(cx.llbb); + b } -pub fn count_insn(cx: block, category: &str) { - if cx.ccx().sess.trans_stats() { - cx.ccx().stats.n_llvm_insns += 1; - } - do base::with_insn_ctxt |v| { - let h = &mut cx.ccx().stats.llvm_insns; - - // Build version of path with cycles removed. - - // Pass 1: scan table mapping str -> rightmost pos. - let mut mm = HashMap::new(); - let len = v.len(); - let mut i = 0u; - while i < len { - mm.insert(v[i], i); - i += 1u; - } - - // Pass 2: concat strings for each elt, skipping - // forwards over any cycles by advancing to rightmost - // occurrence of each element in path. - let mut s = ~"."; - i = 0u; - while i < len { - i = *mm.get(&v[i]); - s.push_char('/'); - s.push_str(v[i]); - i += 1u; - } - - s.push_char('/'); - s.push_str(category); - - let n = match h.find(&s) { - Some(&n) => n, - _ => 0u - }; - h.insert(s, n+1u); - } -} - - // The difference between a block being unreachable and being terminated is // somewhat obscure, and has to do with error checking. When a block is // terminated, we're saying that trying to add any further statements in the @@ -96,64 +48,47 @@ pub fn count_insn(cx: block, category: &str) { // further instructions to the block should simply be ignored. pub fn RetVoid(cx: block) { - unsafe { - if cx.unreachable { return; } - check_not_terminated(cx); - terminate(cx, "RetVoid"); - count_insn(cx, "retvoid"); - llvm::LLVMBuildRetVoid(B(cx)); - } + if cx.unreachable { return; } + check_not_terminated(cx); + terminate(cx, "RetVoid"); + B(cx).ret_void(); } pub fn Ret(cx: block, V: ValueRef) { - unsafe { - if cx.unreachable { return; } - check_not_terminated(cx); - terminate(cx, "Ret"); - count_insn(cx, "ret"); - llvm::LLVMBuildRet(B(cx), V); - } + if cx.unreachable { return; } + check_not_terminated(cx); + terminate(cx, "Ret"); + B(cx).ret(V); } pub fn AggregateRet(cx: block, RetVals: &[ValueRef]) { if cx.unreachable { return; } check_not_terminated(cx); terminate(cx, "AggregateRet"); - unsafe { - llvm::LLVMBuildAggregateRet(B(cx), vec::raw::to_ptr(RetVals), - RetVals.len() as c_uint); - } + B(cx).aggregate_ret(RetVals); } pub fn Br(cx: block, Dest: BasicBlockRef) { - unsafe { - if cx.unreachable { return; } - check_not_terminated(cx); - terminate(cx, "Br"); - count_insn(cx, "br"); - llvm::LLVMBuildBr(B(cx), Dest); - } + if cx.unreachable { return; } + check_not_terminated(cx); + terminate(cx, "Br"); + B(cx).br(Dest); } pub fn CondBr(cx: block, If: ValueRef, Then: BasicBlockRef, Else: BasicBlockRef) { - unsafe { - if cx.unreachable { return; } - check_not_terminated(cx); - terminate(cx, "CondBr"); - count_insn(cx, "condbr"); - llvm::LLVMBuildCondBr(B(cx), If, Then, Else); - } + if cx.unreachable { return; } + check_not_terminated(cx); + terminate(cx, "CondBr"); + B(cx).cond_br(If, Then, Else); } pub fn Switch(cx: block, V: ValueRef, Else: BasicBlockRef, NumCases: uint) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(V); } - check_not_terminated(cx); - terminate(cx, "Switch"); - return llvm::LLVMBuildSwitch(B(cx), V, Else, NumCases as c_uint); - } + if cx.unreachable { return _Undef(V); } + check_not_terminated(cx); + terminate(cx, "Switch"); + B(cx).switch(V, Else, NumCases) } pub fn AddCase(S: ValueRef, OnVal: ValueRef, Dest: BasicBlockRef) { @@ -164,22 +99,10 @@ pub fn AddCase(S: ValueRef, OnVal: ValueRef, Dest: BasicBlockRef) { } pub fn IndirectBr(cx: block, Addr: ValueRef, NumDests: uint) { - unsafe { - if cx.unreachable { return; } - check_not_terminated(cx); - terminate(cx, "IndirectBr"); - count_insn(cx, "indirectbr"); - llvm::LLVMBuildIndirectBr(B(cx), Addr, NumDests as c_uint); - } -} - -// This is a really awful way to get a zero-length c-string, but better (and a -// lot more efficient) than doing str::as_c_str("", ...) every time. -pub fn noname() -> *c_char { - unsafe { - static cnull: uint = 0u; - return cast::transmute(&cnull); - } + if cx.unreachable { return; } + check_not_terminated(cx); + terminate(cx, "IndirectBr"); + B(cx).indirect_br(Addr, NumDests); } pub fn Invoke(cx: block, @@ -196,16 +119,7 @@ pub fn Invoke(cx: block, debug!("Invoke(%s with arguments (%s))", cx.val_to_str(Fn), Args.map(|a| cx.val_to_str(*a)).connect(", ")); - unsafe { - count_insn(cx, "invoke"); - llvm::LLVMBuildInvoke(B(cx), - Fn, - vec::raw::to_ptr(Args), - Args.len() as c_uint, - Then, - Catch, - noname()) - } + B(cx).invoke(Fn, Args, Then, Catch) } pub fn FastInvoke(cx: block, Fn: ValueRef, Args: &[ValueRef], @@ -213,23 +127,14 @@ pub fn FastInvoke(cx: block, Fn: ValueRef, Args: &[ValueRef], if cx.unreachable { return; } check_not_terminated(cx); terminate(cx, "FastInvoke"); - unsafe { - count_insn(cx, "fastinvoke"); - let v = llvm::LLVMBuildInvoke(B(cx), Fn, vec::raw::to_ptr(Args), - Args.len() as c_uint, - Then, Catch, noname()); - lib::llvm::SetInstructionCallConv(v, lib::llvm::FastCallConv); - } + B(cx).fast_invoke(Fn, Args, Then, Catch); } pub fn Unreachable(cx: block) { - unsafe { - if cx.unreachable { return; } - cx.unreachable = true; - if !cx.terminated { - count_insn(cx, "unreachable"); - llvm::LLVMBuildUnreachable(B(cx)); - } + if cx.unreachable { return; } + cx.unreachable = true; + if !cx.terminated { + B(cx).unreachable(); } } @@ -241,298 +146,192 @@ pub fn _Undef(val: ValueRef) -> ValueRef { /* Arithmetic */ pub fn Add(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "add"); - return llvm::LLVMBuildAdd(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).add(LHS, RHS) } pub fn NSWAdd(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "nswadd"); - return llvm::LLVMBuildNSWAdd(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).nswadd(LHS, RHS) } pub fn NUWAdd(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "nuwadd"); - return llvm::LLVMBuildNUWAdd(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).nuwadd(LHS, RHS) } pub fn FAdd(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "fadd"); - return llvm::LLVMBuildFAdd(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).fadd(LHS, RHS) } pub fn Sub(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "sub"); - return llvm::LLVMBuildSub(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).sub(LHS, RHS) } pub fn NSWSub(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "nwsub"); - return llvm::LLVMBuildNSWSub(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).nswsub(LHS, RHS) } pub fn NUWSub(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "nuwsub"); - return llvm::LLVMBuildNUWSub(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).nuwsub(LHS, RHS) } pub fn FSub(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "sub"); - return llvm::LLVMBuildFSub(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).fsub(LHS, RHS) } pub fn Mul(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "mul"); - return llvm::LLVMBuildMul(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).mul(LHS, RHS) } pub fn NSWMul(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "nswmul"); - return llvm::LLVMBuildNSWMul(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).nswmul(LHS, RHS) } pub fn NUWMul(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "nuwmul"); - return llvm::LLVMBuildNUWMul(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).nuwmul(LHS, RHS) } pub fn FMul(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "fmul"); - return llvm::LLVMBuildFMul(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).fmul(LHS, RHS) } pub fn UDiv(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "udiv"); - return llvm::LLVMBuildUDiv(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).udiv(LHS, RHS) } pub fn SDiv(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "sdiv"); - return llvm::LLVMBuildSDiv(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).sdiv(LHS, RHS) } pub fn ExactSDiv(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "extractsdiv"); - return llvm::LLVMBuildExactSDiv(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).exactsdiv(LHS, RHS) } pub fn FDiv(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "fdiv"); - return llvm::LLVMBuildFDiv(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).fdiv(LHS, RHS) } pub fn URem(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "urem"); - return llvm::LLVMBuildURem(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).urem(LHS, RHS) } pub fn SRem(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "srem"); - return llvm::LLVMBuildSRem(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).srem(LHS, RHS) } pub fn FRem(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "frem"); - return llvm::LLVMBuildFRem(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).frem(LHS, RHS) } pub fn Shl(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "shl"); - return llvm::LLVMBuildShl(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).shl(LHS, RHS) } pub fn LShr(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "lshr"); - return llvm::LLVMBuildLShr(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).lshr(LHS, RHS) } pub fn AShr(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "ashr"); - return llvm::LLVMBuildAShr(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).ashr(LHS, RHS) } pub fn And(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "and"); - return llvm::LLVMBuildAnd(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).and(LHS, RHS) } pub fn Or(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "or"); - return llvm::LLVMBuildOr(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).or(LHS, RHS) } pub fn Xor(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "xor"); - return llvm::LLVMBuildXor(B(cx), LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).xor(LHS, RHS) } pub fn BinOp(cx: block, Op: Opcode, LHS: ValueRef, RHS: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(LHS); } - count_insn(cx, "binop"); - return llvm::LLVMBuildBinOp(B(cx), Op, LHS, RHS, noname()); - } + if cx.unreachable { return _Undef(LHS); } + B(cx).binop(Op, LHS, RHS) } pub fn Neg(cx: block, V: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(V); } - count_insn(cx, "neg"); - return llvm::LLVMBuildNeg(B(cx), V, noname()); - } + if cx.unreachable { return _Undef(V); } + B(cx).neg(V) } pub fn NSWNeg(cx: block, V: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(V); } - count_insn(cx, "nswneg"); - return llvm::LLVMBuildNSWNeg(B(cx), V, noname()); - } + if cx.unreachable { return _Undef(V); } + B(cx).nswneg(V) } pub fn NUWNeg(cx: block, V: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(V); } - count_insn(cx, "nuwneg"); - return llvm::LLVMBuildNUWNeg(B(cx), V, noname()); - } + if cx.unreachable { return _Undef(V); } + B(cx).nuwneg(V) } pub fn FNeg(cx: block, V: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(V); } - count_insn(cx, "fneg"); - return llvm::LLVMBuildFNeg(B(cx), V, noname()); - } + if cx.unreachable { return _Undef(V); } + B(cx).fneg(V) } pub fn Not(cx: block, V: ValueRef) -> ValueRef { - unsafe { - if cx.unreachable { return _Undef(V); } - count_insn(cx, "not"); - return llvm::LLVMBuildNot(B(cx), V, noname()); - } + if cx.unreachable { return _Undef(V); } + B(cx).not(V) } /* Memory */ pub fn Malloc(cx: block, Ty: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::i8p().to_ref()); } - count_insn(cx, "malloc"); - return llvm::LLVMBuildMalloc(B(cx), Ty.to_ref(), noname()); + B(cx).malloc(Ty) } } pub fn ArrayMalloc(cx: block, Ty: Type, Val: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::i8p().to_ref()); } - count_insn(cx, "arraymalloc"); - return llvm::LLVMBuildArrayMalloc(B(cx), Ty.to_ref(), Val, noname()); + B(cx).array_malloc(Ty, Val) } } pub fn Alloca(cx: block, Ty: Type, name: &str) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Ty.ptr_to().to_ref()); } - count_insn(cx, "alloca"); - if name.is_empty() { - llvm::LLVMBuildAlloca(B(cx), Ty.to_ref(), noname()) - } else { - str::as_c_str( - name, - |c| llvm::LLVMBuildAlloca(B(cx), Ty.to_ref(), c)) - } + B(cx).alloca(Ty, name) } } pub fn ArrayAlloca(cx: block, Ty: Type, Val: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Ty.ptr_to().to_ref()); } - count_insn(cx, "arrayalloca"); - return llvm::LLVMBuildArrayAlloca(B(cx), Ty.to_ref(), Val, noname()); + B(cx).array_alloca(Ty, Val) } } pub fn Free(cx: block, PointerVal: ValueRef) { - unsafe { - if cx.unreachable { return; } - count_insn(cx, "free"); - llvm::LLVMBuildFree(B(cx), PointerVal); - } + if cx.unreachable { return; } + B(cx).free(PointerVal) } pub fn Load(cx: block, PointerVal: ValueRef) -> ValueRef { @@ -547,8 +346,7 @@ pub fn Load(cx: block, PointerVal: ValueRef) -> ValueRef { }; return llvm::LLVMGetUndef(eltty.to_ref()); } - count_insn(cx, "load"); - return llvm::LLVMBuildLoad(B(cx), PointerVal, noname()); + B(cx).load(PointerVal) } } @@ -558,63 +356,43 @@ pub fn AtomicLoad(cx: block, PointerVal: ValueRef, order: AtomicOrdering) -> Val if cx.unreachable { return llvm::LLVMGetUndef(ccx.int_type.to_ref()); } - count_insn(cx, "load.atomic"); - let align = llalign_of_min(ccx, ccx.int_type); - return llvm::LLVMBuildAtomicLoad(B(cx), PointerVal, noname(), order, align as c_uint); + B(cx).atomic_load(PointerVal, order) } } pub fn LoadRangeAssert(cx: block, PointerVal: ValueRef, lo: c_ulonglong, hi: c_ulonglong, signed: lib::llvm::Bool) -> ValueRef { - let value = Load(cx, PointerVal); - - if !cx.unreachable { + if cx.unreachable { + let ccx = cx.fcx.ccx; + let ty = val_ty(PointerVal); + let eltty = if ty.kind() == lib::llvm::Array { + ty.element_type() + } else { + ccx.int_type + }; unsafe { - let t = llvm::LLVMGetElementType(llvm::LLVMTypeOf(PointerVal)); - let min = llvm::LLVMConstInt(t, lo, signed); - let max = llvm::LLVMConstInt(t, hi, signed); - - do [min, max].as_imm_buf |ptr, len| { - llvm::LLVMSetMetadata(value, lib::llvm::MD_range as c_uint, - llvm::LLVMMDNodeInContext(cx.fcx.ccx.llcx, - ptr, len as c_uint)); - } + llvm::LLVMGetUndef(eltty.to_ref()) } + } else { + B(cx).load_range_assert(PointerVal, lo, hi, signed) } - - value } pub fn Store(cx: block, Val: ValueRef, Ptr: ValueRef) { - unsafe { - if cx.unreachable { return; } - debug!("Store %s -> %s", - cx.val_to_str(Val), - cx.val_to_str(Ptr)); - count_insn(cx, "store"); - llvm::LLVMBuildStore(B(cx), Val, Ptr); - } + if cx.unreachable { return; } + B(cx).store(Val, Ptr) } pub fn AtomicStore(cx: block, Val: ValueRef, Ptr: ValueRef, order: AtomicOrdering) { - unsafe { - if cx.unreachable { return; } - debug!("Store %s -> %s", - cx.val_to_str(Val), - cx.val_to_str(Ptr)); - count_insn(cx, "store.atomic"); - let align = llalign_of_min(cx.ccx(), cx.ccx().int_type); - llvm::LLVMBuildAtomicStore(B(cx), Val, Ptr, order, align as c_uint); - } + if cx.unreachable { return; } + B(cx).atomic_store(Val, Ptr, order) } pub fn GEP(cx: block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()); } - count_insn(cx, "gep"); - return llvm::LLVMBuildGEP(B(cx), Pointer, vec::raw::to_ptr(Indices), - Indices.len() as c_uint, noname()); + B(cx).gep(Pointer, Indices) } } @@ -622,54 +400,37 @@ pub fn GEP(cx: block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef { // in C_i32() #[inline] pub fn GEPi(cx: block, base: ValueRef, ixs: &[uint]) -> ValueRef { - // Small vector optimization. This should catch 100% of the cases that - // we care about. - if ixs.len() < 16 { - let mut small_vec = [ C_i32(0), ..16 ]; - for small_vec.mut_iter().zip(ixs.iter()).advance |(small_vec_e, &ix)| { - *small_vec_e = C_i32(ix as i32); - } - InBoundsGEP(cx, base, small_vec.slice(0, ixs.len())) - } else { - let v = do ixs.iter().transform |i| { C_i32(*i as i32) }.collect::<~[ValueRef]>(); - count_insn(cx, "gepi"); - InBoundsGEP(cx, base, v) + unsafe { + if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()); } + B(cx).gepi(base, ixs) } } pub fn InBoundsGEP(cx: block, Pointer: ValueRef, Indices: &[ValueRef]) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()); } - count_insn(cx, "inboundsgep"); - return llvm::LLVMBuildInBoundsGEP( - B(cx), Pointer, vec::raw::to_ptr(Indices), Indices.len() as c_uint, noname()); + B(cx).inbounds_gep(Pointer, Indices) } } pub fn StructGEP(cx: block, Pointer: ValueRef, Idx: uint) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()); } - count_insn(cx, "structgep"); - return llvm::LLVMBuildStructGEP(B(cx), - Pointer, - Idx as c_uint, - noname()); + B(cx).struct_gep(Pointer, Idx) } } pub fn GlobalString(cx: block, _Str: *c_char) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::i8p().to_ref()); } - count_insn(cx, "globalstring"); - return llvm::LLVMBuildGlobalString(B(cx), _Str, noname()); + B(cx).global_string(_Str) } } pub fn GlobalStringPtr(cx: block, _Str: *c_char) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::i8p().to_ref()); } - count_insn(cx, "globalstringptr"); - return llvm::LLVMBuildGlobalStringPtr(B(cx), _Str, noname()); + B(cx).global_string_ptr(_Str) } } @@ -677,153 +438,134 @@ pub fn GlobalStringPtr(cx: block, _Str: *c_char) -> ValueRef { pub fn Trunc(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "trunc"); - return llvm::LLVMBuildTrunc(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).trunc(Val, DestTy) } } pub fn ZExt(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "zext"); - return llvm::LLVMBuildZExt(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).zext(Val, DestTy) } } pub fn SExt(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "sext"); - return llvm::LLVMBuildSExt(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).sext(Val, DestTy) } } pub fn FPToUI(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "fptoui"); - return llvm::LLVMBuildFPToUI(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).fptoui(Val, DestTy) } } pub fn FPToSI(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "fptosi"); - return llvm::LLVMBuildFPToSI(B(cx), Val, DestTy.to_ref(),noname()); + B(cx).fptosi(Val, DestTy) } } pub fn UIToFP(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "uitofp"); - return llvm::LLVMBuildUIToFP(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).uitofp(Val, DestTy) } } pub fn SIToFP(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "sitofp"); - return llvm::LLVMBuildSIToFP(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).sitofp(Val, DestTy) } } pub fn FPTrunc(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "fptrunc"); - return llvm::LLVMBuildFPTrunc(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).fptrunc(Val, DestTy) } } pub fn FPExt(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "fpext"); - return llvm::LLVMBuildFPExt(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).fpext(Val, DestTy) } } pub fn PtrToInt(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "ptrtoint"); - return llvm::LLVMBuildPtrToInt(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).ptrtoint(Val, DestTy) } } pub fn IntToPtr(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "inttoptr"); - return llvm::LLVMBuildIntToPtr(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).inttoptr(Val, DestTy) } } pub fn BitCast(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "bitcast"); - return llvm::LLVMBuildBitCast(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).bitcast(Val, DestTy) } } pub fn ZExtOrBitCast(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "zextorbitcast"); - return llvm::LLVMBuildZExtOrBitCast(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).zext_or_bitcast(Val, DestTy) } } pub fn SExtOrBitCast(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "sextorbitcast"); - return llvm::LLVMBuildSExtOrBitCast(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).sext_or_bitcast(Val, DestTy) } } pub fn TruncOrBitCast(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "truncorbitcast"); - return llvm::LLVMBuildTruncOrBitCast(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).trunc_or_bitcast(Val, DestTy) } } pub fn Cast(cx: block, Op: Opcode, Val: ValueRef, DestTy: Type, _: *u8) -> ValueRef { unsafe { - count_insn(cx, "cast"); if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - return llvm::LLVMBuildCast(B(cx), Op, Val, DestTy.to_ref(), noname()); + B(cx).cast(Op, Val, DestTy) } } pub fn PointerCast(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "pointercast"); - return llvm::LLVMBuildPointerCast(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).pointercast(Val, DestTy) } } pub fn IntCast(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "intcast"); - return llvm::LLVMBuildIntCast(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).intcast(Val, DestTy) } } pub fn FPCast(cx: block, Val: ValueRef, DestTy: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(DestTy.to_ref()); } - count_insn(cx, "fpcast"); - return llvm::LLVMBuildFPCast(B(cx), Val, DestTy.to_ref(), noname()); + B(cx).fpcast(Val, DestTy) } } @@ -833,8 +575,7 @@ pub fn ICmp(cx: block, Op: IntPredicate, LHS: ValueRef, RHS: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::i1().to_ref()); } - count_insn(cx, "icmp"); - return llvm::LLVMBuildICmp(B(cx), Op as c_uint, LHS, RHS, noname()); + B(cx).icmp(Op, LHS, RHS) } } @@ -842,8 +583,7 @@ pub fn FCmp(cx: block, Op: RealPredicate, LHS: ValueRef, RHS: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::i1().to_ref()); } - count_insn(cx, "fcmp"); - return llvm::LLVMBuildFCmp(B(cx), Op as c_uint, LHS, RHS, noname()); + B(cx).fcmp(Op, LHS, RHS) } } @@ -851,22 +591,14 @@ pub fn FCmp(cx: block, Op: RealPredicate, LHS: ValueRef, RHS: ValueRef) pub fn EmptyPhi(cx: block, Ty: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Ty.to_ref()); } - count_insn(cx, "emptyphi"); - return llvm::LLVMBuildPhi(B(cx), Ty.to_ref(), noname()); + B(cx).empty_phi(Ty) } } -pub fn Phi(cx: block, Ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) - -> ValueRef { +pub fn Phi(cx: block, Ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Ty.to_ref()); } - assert_eq!(vals.len(), bbs.len()); - let phi = EmptyPhi(cx, Ty); - count_insn(cx, "addincoming"); - llvm::LLVMAddIncoming(phi, vec::raw::to_ptr(vals), - vec::raw::to_ptr(bbs), - vals.len() as c_uint); - return phi; + B(cx).phi(Ty, vals, bbs) } } @@ -888,123 +620,58 @@ pub fn _UndefReturn(cx: block, Fn: ValueRef) -> ValueRef { } else { ccx.int_type }; - count_insn(cx, "ret_undef"); - return llvm::LLVMGetUndef(retty.to_ref()); + B(cx).count_insn("ret_undef"); + llvm::LLVMGetUndef(retty.to_ref()) } } -pub fn add_span_comment(bcx: block, sp: span, text: &str) { - let ccx = bcx.ccx(); - if ccx.sess.asm_comments() { - let s = fmt!("%s (%s)", text, ccx.sess.codemap.span_to_str(sp)); - debug!("%s", s); - add_comment(bcx, s); - } +pub fn add_span_comment(cx: block, sp: span, text: &str) { + B(cx).add_span_comment(sp, text) } -pub fn add_comment(bcx: block, text: &str) { - unsafe { - let ccx = bcx.ccx(); - if ccx.sess.asm_comments() { - let sanitized = text.replace("$", ""); - let comment_text = ~"# " + - sanitized.replace("\n", "\n\t# "); - count_insn(bcx, "inlineasm"); - let asm = do comment_text.as_c_str |c| { - llvm::LLVMConstInlineAsm(Type::func([], &Type::void()).to_ref(), - c, noname(), False, False) - }; - Call(bcx, asm, []); - } - } +pub fn add_comment(cx: block, text: &str) { + B(cx).add_comment(text) } pub fn InlineAsmCall(cx: block, asm: *c_char, cons: *c_char, inputs: &[ValueRef], output: Type, volatile: bool, alignstack: bool, dia: AsmDialect) -> ValueRef { - unsafe { - count_insn(cx, "inlineasm"); - - let volatile = if volatile { lib::llvm::True } - else { lib::llvm::False }; - let alignstack = if alignstack { lib::llvm::True } - else { lib::llvm::False }; - - let argtys = do inputs.map |v| { - debug!("Asm Input Type: %?", cx.val_to_str(*v)); - val_ty(*v) - }; - - debug!("Asm Output Type: %?", cx.ccx().tn.type_to_str(output)); - let fty = Type::func(argtys, &output); - let v = llvm::LLVMInlineAsm(fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint); - - Call(cx, v, inputs) - } + B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia) } pub fn Call(cx: block, Fn: ValueRef, Args: &[ValueRef]) -> ValueRef { if cx.unreachable { return _UndefReturn(cx, Fn); } - unsafe { - count_insn(cx, "call"); - - debug!("Call(Fn=%s, Args=%?)", - cx.val_to_str(Fn), - Args.map(|arg| cx.val_to_str(*arg))); - - do Args.as_imm_buf |ptr, len| { - llvm::LLVMBuildCall(B(cx), Fn, ptr, len as c_uint, noname()) - } - } + B(cx).call(Fn, Args) } pub fn FastCall(cx: block, Fn: ValueRef, Args: &[ValueRef]) -> ValueRef { if cx.unreachable { return _UndefReturn(cx, Fn); } - unsafe { - count_insn(cx, "fastcall"); - let v = llvm::LLVMBuildCall(B(cx), Fn, vec::raw::to_ptr(Args), - Args.len() as c_uint, noname()); - lib::llvm::SetInstructionCallConv(v, lib::llvm::FastCallConv); - return v; - } + B(cx).call(Fn, Args) } pub fn CallWithConv(cx: block, Fn: ValueRef, Args: &[ValueRef], Conv: CallConv) -> ValueRef { if cx.unreachable { return _UndefReturn(cx, Fn); } - unsafe { - count_insn(cx, "callwithconv"); - let v = llvm::LLVMBuildCall(B(cx), Fn, vec::raw::to_ptr(Args), - Args.len() as c_uint, noname()); - lib::llvm::SetInstructionCallConv(v, Conv); - return v; - } + B(cx).call_with_conv(Fn, Args, Conv) } -pub fn Select(cx: block, If: ValueRef, Then: ValueRef, Else: ValueRef) -> - ValueRef { - unsafe { - if cx.unreachable { return _Undef(Then); } - count_insn(cx, "select"); - return llvm::LLVMBuildSelect(B(cx), If, Then, Else, noname()); - } +pub fn Select(cx: block, If: ValueRef, Then: ValueRef, Else: ValueRef) -> ValueRef { + if cx.unreachable { return _Undef(Then); } + B(cx).select(If, Then, Else) } pub fn VAArg(cx: block, list: ValueRef, Ty: Type) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Ty.to_ref()); } - count_insn(cx, "vaarg"); - return llvm::LLVMBuildVAArg(B(cx), list, Ty.to_ref(), noname()); + B(cx).va_arg(list, Ty) } } -pub fn ExtractElement(cx: block, VecVal: ValueRef, Index: ValueRef) -> - ValueRef { +pub fn ExtractElement(cx: block, VecVal: ValueRef, Index: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); } - count_insn(cx, "extractelement"); - return llvm::LLVMBuildExtractElement(B(cx), VecVal, Index, noname()); + B(cx).extract_element(VecVal, Index) } } @@ -1012,8 +679,7 @@ pub fn InsertElement(cx: block, VecVal: ValueRef, EltVal: ValueRef, Index: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); } - count_insn(cx, "insertelement"); - llvm::LLVMBuildInsertElement(B(cx), VecVal, EltVal, Index, noname()) + B(cx).insert_element(VecVal, EltVal, Index) } } @@ -1021,52 +687,40 @@ pub fn ShuffleVector(cx: block, V1: ValueRef, V2: ValueRef, Mask: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); } - count_insn(cx, "shufflevector"); - llvm::LLVMBuildShuffleVector(B(cx), V1, V2, Mask, noname()) + B(cx).shuffle_vector(V1, V2, Mask) } } pub fn VectorSplat(cx: block, NumElts: uint, EltVal: ValueRef) -> ValueRef { unsafe { - let elt_ty = val_ty(EltVal); - let Undef = llvm::LLVMGetUndef(Type::vector(&elt_ty, NumElts as u64).to_ref()); - let VecVal = InsertElement(cx, Undef, EltVal, C_i32(0)); - ShuffleVector(cx, VecVal, Undef, C_null(Type::vector(&Type::i32(), NumElts as u64))) + if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); } + B(cx).vector_splat(NumElts, EltVal) } } pub fn ExtractValue(cx: block, AggVal: ValueRef, Index: uint) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::nil().to_ref()); } - count_insn(cx, "extractvalue"); - return llvm::LLVMBuildExtractValue( - B(cx), AggVal, Index as c_uint, noname()); + B(cx).extract_value(AggVal, Index) } } -pub fn InsertValue(cx: block, AggVal: ValueRef, EltVal: ValueRef, - Index: uint) { - unsafe { - if cx.unreachable { return; } - count_insn(cx, "insertvalue"); - llvm::LLVMBuildInsertValue(B(cx), AggVal, EltVal, Index as c_uint, - noname()); - } +pub fn InsertValue(cx: block, AggVal: ValueRef, EltVal: ValueRef, Index: uint) { + if cx.unreachable { return; } + B(cx).insert_value(AggVal, EltVal, Index) } pub fn IsNull(cx: block, Val: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::i1().to_ref()); } - count_insn(cx, "isnull"); - return llvm::LLVMBuildIsNull(B(cx), Val, noname()); + B(cx).is_null(Val) } } pub fn IsNotNull(cx: block, Val: ValueRef) -> ValueRef { unsafe { if cx.unreachable { return llvm::LLVMGetUndef(Type::i1().to_ref()); } - count_insn(cx, "isnotnull"); - return llvm::LLVMBuildIsNotNull(B(cx), Val, noname()); + B(cx).is_not_null(Val) } } @@ -1074,67 +728,40 @@ pub fn PtrDiff(cx: block, LHS: ValueRef, RHS: ValueRef) -> ValueRef { unsafe { let ccx = cx.fcx.ccx; if cx.unreachable { return llvm::LLVMGetUndef(ccx.int_type.to_ref()); } - count_insn(cx, "ptrdiff"); - return llvm::LLVMBuildPtrDiff(B(cx), LHS, RHS, noname()); + B(cx).ptrdiff(LHS, RHS) } } pub fn Trap(cx: block) { - unsafe { - if cx.unreachable { return; } - let b = B(cx); - let BB: BasicBlockRef = llvm::LLVMGetInsertBlock(b); - let FN: ValueRef = llvm::LLVMGetBasicBlockParent(BB); - let M: ModuleRef = llvm::LLVMGetGlobalParent(FN); - let T: ValueRef = str::as_c_str("llvm.trap", |buf| { - llvm::LLVMGetNamedFunction(M, buf) - }); - assert!((T as int != 0)); - let Args: ~[ValueRef] = ~[]; - count_insn(cx, "trap"); - llvm::LLVMBuildCall(b, T, vec::raw::to_ptr(Args), Args.len() as c_uint, noname()); - } + if cx.unreachable { return; } + B(cx).trap(); } pub fn LandingPad(cx: block, Ty: Type, PersFn: ValueRef, NumClauses: uint) -> ValueRef { - unsafe { - check_not_terminated(cx); - assert!(!cx.unreachable); - count_insn(cx, "landingpad"); - return llvm::LLVMBuildLandingPad( - B(cx), Ty.to_ref(), PersFn, NumClauses as c_uint, noname()); - } + check_not_terminated(cx); + assert!(!cx.unreachable); + B(cx).landing_pad(Ty, PersFn, NumClauses) } pub fn SetCleanup(cx: block, LandingPad: ValueRef) { - unsafe { - count_insn(cx, "setcleanup"); - llvm::LLVMSetCleanup(LandingPad, lib::llvm::True); - } + B(cx).set_cleanup(LandingPad) } pub fn Resume(cx: block, Exn: ValueRef) -> ValueRef { - unsafe { - check_not_terminated(cx); - terminate(cx, "Resume"); - count_insn(cx, "resume"); - return llvm::LLVMBuildResume(B(cx), Exn); - } + check_not_terminated(cx); + terminate(cx, "Resume"); + B(cx).resume(Exn) } // Atomic Operations pub fn AtomicCmpXchg(cx: block, dst: ValueRef, cmp: ValueRef, src: ValueRef, order: AtomicOrdering) -> ValueRef { - unsafe { - llvm::LLVMBuildAtomicCmpXchg(B(cx), dst, cmp, src, order) - } + B(cx).atomic_cmpxchg(dst, cmp, src, order) } pub fn AtomicRMW(cx: block, op: AtomicBinOp, dst: ValueRef, src: ValueRef, order: AtomicOrdering) -> ValueRef { - unsafe { - llvm::LLVMBuildAtomicRMW(B(cx), op, dst, src, order) - } + B(cx).atomic_rmw(op, dst, src, order) } diff --git a/src/librustc/middle/trans/builder.rs b/src/librustc/middle/trans/builder.rs new file mode 100644 index 00000000000..a4a976145b9 --- /dev/null +++ b/src/librustc/middle/trans/builder.rs @@ -0,0 +1,947 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use lib; +use lib::llvm::llvm; +use lib::llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect}; +use lib::llvm::{Opcode, IntPredicate, RealPredicate, False}; +use lib::llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef}; +use middle::trans::base; +use middle::trans::common::*; +use middle::trans::machine::llalign_of_min; +use middle::trans::type_::Type; +use std::cast; +use std::hashmap::HashMap; +use std::libc::{c_uint, c_ulonglong, c_char}; +use std::str; +use std::vec; +use syntax::codemap::span; + +pub struct Builder { + llbuilder: BuilderRef, + ccx: @mut CrateContext, +} + +// This is a really awful way to get a zero-length c-string, but better (and a +// lot more efficient) than doing str::as_c_str("", ...) every time. +pub fn noname() -> *c_char { + unsafe { + static cnull: uint = 0u; + cast::transmute(&cnull) + } +} + +impl Builder { + pub fn new(ccx: @mut CrateContext) -> Builder { + Builder { + llbuilder: ccx.builder.B, + ccx: ccx, + } + } + + pub fn count_insn(&self, category: &str) { + if self.ccx.sess.trans_stats() { + self.ccx.stats.n_llvm_insns += 1; + } + if self.ccx.sess.count_llvm_insns() { + do base::with_insn_ctxt |v| { + let h = &mut self.ccx.stats.llvm_insns; + + // Build version of path with cycles removed. + + // Pass 1: scan table mapping str -> rightmost pos. + let mut mm = HashMap::new(); + let len = v.len(); + let mut i = 0u; + while i < len { + mm.insert(v[i], i); + i += 1u; + } + + // Pass 2: concat strings for each elt, skipping + // forwards over any cycles by advancing to rightmost + // occurrence of each element in path. + let mut s = ~"."; + i = 0u; + while i < len { + i = *mm.get(&v[i]); + s.push_char('/'); + s.push_str(v[i]); + i += 1u; + } + + s.push_char('/'); + s.push_str(category); + + let n = match h.find(&s) { + Some(&n) => n, + _ => 0u + }; + h.insert(s, n+1u); + } + } + } + + pub fn position_before(&self, insn: ValueRef) { + unsafe { + llvm::LLVMPositionBuilderBefore(self.llbuilder, insn); + } + } + + pub fn position_at_end(&self, llbb: BasicBlockRef) { + unsafe { + llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb); + } + } + + pub fn ret_void(&self) { + self.count_insn("retvoid"); + unsafe { + llvm::LLVMBuildRetVoid(self.llbuilder); + } + } + + pub fn ret(&self, v: ValueRef) { + self.count_insn("ret"); + unsafe { + llvm::LLVMBuildRet(self.llbuilder, v); + } + } + + pub fn aggregate_ret(&self, ret_vals: &[ValueRef]) { + unsafe { + llvm::LLVMBuildAggregateRet(self.llbuilder, + vec::raw::to_ptr(ret_vals), + ret_vals.len() as c_uint); + } + } + + pub fn br(&self, dest: BasicBlockRef) { + self.count_insn("br"); + unsafe { + llvm::LLVMBuildBr(self.llbuilder, dest); + } + } + + pub fn cond_br(&self, cond: ValueRef, then_llbb: BasicBlockRef, else_llbb: BasicBlockRef) { + self.count_insn("condbr"); + unsafe { + llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb); + } + } + + pub fn switch(&self, v: ValueRef, else_llbb: BasicBlockRef, num_cases: uint) -> ValueRef { + unsafe { + llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint) + } + } + + pub fn indirect_br(&self, addr: ValueRef, num_dests: uint) { + self.count_insn("indirectbr"); + unsafe { + llvm::LLVMBuildIndirectBr(self.llbuilder, addr, num_dests as c_uint); + } + } + + pub fn invoke(&self, + llfn: ValueRef, + args: &[ValueRef], + then: BasicBlockRef, + catch: BasicBlockRef) + -> ValueRef { + self.count_insn("invoke"); + unsafe { + llvm::LLVMBuildInvoke(self.llbuilder, + llfn, + vec::raw::to_ptr(args), + args.len() as c_uint, + then, + catch, + noname()) + } + } + + pub fn fast_invoke(&self, + llfn: ValueRef, + args: &[ValueRef], + then: BasicBlockRef, + catch: BasicBlockRef) { + self.count_insn("fastinvoke"); + let v = self.invoke(llfn, args, then, catch); + lib::llvm::SetInstructionCallConv(v, lib::llvm::FastCallConv); + } + + pub fn unreachable(&self) { + self.count_insn("unreachable"); + unsafe { + llvm::LLVMBuildUnreachable(self.llbuilder); + } + } + + /* Arithmetic */ + pub fn add(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("add"); + unsafe { + llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn nswadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("nswadd"); + unsafe { + llvm::LLVMBuildNSWAdd(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn nuwadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("nuwadd"); + unsafe { + llvm::LLVMBuildNUWAdd(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn fadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("fadd"); + unsafe { + llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn sub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("sub"); + unsafe { + llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn nswsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("nwsub"); + unsafe { + llvm::LLVMBuildNSWSub(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn nuwsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("nuwsub"); + unsafe { + llvm::LLVMBuildNUWSub(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn fsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("sub"); + unsafe { + llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn mul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("mul"); + unsafe { + llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn nswmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("nswmul"); + unsafe { + llvm::LLVMBuildNSWMul(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn nuwmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("nuwmul"); + unsafe { + llvm::LLVMBuildNUWMul(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn fmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("fmul"); + unsafe { + llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn udiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("udiv"); + unsafe { + llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn sdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("sdiv"); + unsafe { + llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn exactsdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("exactsdiv"); + unsafe { + llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn fdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("fdiv"); + unsafe { + llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn urem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("urem"); + unsafe { + llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn srem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("srem"); + unsafe { + llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn frem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("frem"); + unsafe { + llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn shl(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("shl"); + unsafe { + llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn lshr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("lshr"); + unsafe { + llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn ashr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("ashr"); + unsafe { + llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn and(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("and"); + unsafe { + llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn or(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("or"); + unsafe { + llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn xor(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("xor"); + unsafe { + llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn binop(&self, op: Opcode, lhs: ValueRef, rhs: ValueRef) + -> ValueRef { + self.count_insn("binop"); + unsafe { + llvm::LLVMBuildBinOp(self.llbuilder, op, lhs, rhs, noname()) + } + } + + pub fn neg(&self, V: ValueRef) -> ValueRef { + self.count_insn("neg"); + unsafe { + llvm::LLVMBuildNeg(self.llbuilder, V, noname()) + } + } + + pub fn nswneg(&self, V: ValueRef) -> ValueRef { + self.count_insn("nswneg"); + unsafe { + llvm::LLVMBuildNSWNeg(self.llbuilder, V, noname()) + } + } + + pub fn nuwneg(&self, V: ValueRef) -> ValueRef { + self.count_insn("nuwneg"); + unsafe { + llvm::LLVMBuildNUWNeg(self.llbuilder, V, noname()) + } + } + pub fn fneg(&self, V: ValueRef) -> ValueRef { + self.count_insn("fneg"); + unsafe { + llvm::LLVMBuildFNeg(self.llbuilder, V, noname()) + } + } + + pub fn not(&self, V: ValueRef) -> ValueRef { + self.count_insn("not"); + unsafe { + llvm::LLVMBuildNot(self.llbuilder, V, noname()) + } + } + + /* Memory */ + pub fn malloc(&self, ty: Type) -> ValueRef { + self.count_insn("malloc"); + unsafe { + llvm::LLVMBuildMalloc(self.llbuilder, ty.to_ref(), noname()) + } + } + + pub fn array_malloc(&self, ty: Type, val: ValueRef) -> ValueRef { + self.count_insn("arraymalloc"); + unsafe { + llvm::LLVMBuildArrayMalloc(self.llbuilder, ty.to_ref(), val, noname()) + } + } + + pub fn alloca(&self, ty: Type, name: &str) -> ValueRef { + self.count_insn("alloca"); + unsafe { + if name.is_empty() { + llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname()) + } else { + str::as_c_str( + name, + |c| llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), c)) + } + } + } + + pub fn array_alloca(&self, ty: Type, val: ValueRef) -> ValueRef { + self.count_insn("arrayalloca"); + unsafe { + llvm::LLVMBuildArrayAlloca(self.llbuilder, ty.to_ref(), val, noname()) + } + } + + pub fn free(&self, ptr: ValueRef) { + self.count_insn("free"); + unsafe { + llvm::LLVMBuildFree(self.llbuilder, ptr); + } + } + + pub fn load(&self, ptr: ValueRef) -> ValueRef { + self.count_insn("load"); + unsafe { + llvm::LLVMBuildLoad(self.llbuilder, ptr, noname()) + } + } + + pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering) -> ValueRef { + self.count_insn("load.atomic"); + unsafe { + let align = llalign_of_min(self.ccx, self.ccx.int_type); + llvm::LLVMBuildAtomicLoad(self.llbuilder, ptr, noname(), order, align as c_uint) + } + } + + + pub fn load_range_assert(&self, ptr: ValueRef, lo: c_ulonglong, + hi: c_ulonglong, signed: lib::llvm::Bool) -> ValueRef { + let value = self.load(ptr); + + unsafe { + let t = llvm::LLVMGetElementType(llvm::LLVMTypeOf(ptr)); + let min = llvm::LLVMConstInt(t, lo, signed); + let max = llvm::LLVMConstInt(t, hi, signed); + + do [min, max].as_imm_buf |ptr, len| { + llvm::LLVMSetMetadata(value, lib::llvm::MD_range as c_uint, + llvm::LLVMMDNodeInContext(self.ccx.llcx, + ptr, len as c_uint)); + } + } + + value + } + + pub fn store(&self, val: ValueRef, ptr: ValueRef) { + debug!("Store %s -> %s", + self.ccx.tn.val_to_str(val), + self.ccx.tn.val_to_str(ptr)); + self.count_insn("store"); + unsafe { + llvm::LLVMBuildStore(self.llbuilder, val, ptr); + } + } + + pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) { + debug!("Store %s -> %s", + self.ccx.tn.val_to_str(val), + self.ccx.tn.val_to_str(ptr)); + self.count_insn("store.atomic"); + let align = llalign_of_min(self.ccx, self.ccx.int_type); + unsafe { + llvm::LLVMBuildAtomicStore(self.llbuilder, val, ptr, order, align as c_uint); + } + } + + pub fn gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef { + self.count_insn("gep"); + unsafe { + llvm::LLVMBuildGEP(self.llbuilder, ptr, vec::raw::to_ptr(indices), + indices.len() as c_uint, noname()) + } + } + + // Simple wrapper around GEP that takes an array of ints and wraps them + // in C_i32() + #[inline] + pub fn gepi(&self, base: ValueRef, ixs: &[uint]) -> ValueRef { + // Small vector optimization. This should catch 100% of the cases that + // we care about. + if ixs.len() < 16 { + let mut small_vec = [ C_i32(0), ..16 ]; + for small_vec.mut_iter().zip(ixs.iter()).advance |(small_vec_e, &ix)| { + *small_vec_e = C_i32(ix as i32); + } + self.inbounds_gep(base, small_vec.slice(0, ixs.len())) + } else { + let v = do ixs.iter().transform |i| { C_i32(*i as i32) }.collect::<~[ValueRef]>(); + self.count_insn("gepi"); + self.inbounds_gep(base, v) + } + } + + pub fn inbounds_gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef { + self.count_insn("inboundsgep"); + unsafe { + llvm::LLVMBuildInBoundsGEP( + self.llbuilder, ptr, vec::raw::to_ptr(indices), indices.len() as c_uint, noname()) + } + } + + pub fn struct_gep(&self, ptr: ValueRef, idx: uint) -> ValueRef { + self.count_insn("structgep"); + unsafe { + llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname()) + } + } + + pub fn global_string(&self, _Str: *c_char) -> ValueRef { + self.count_insn("globalstring"); + unsafe { + llvm::LLVMBuildGlobalString(self.llbuilder, _Str, noname()) + } + } + + pub fn global_string_ptr(&self, _Str: *c_char) -> ValueRef { + self.count_insn("globalstringptr"); + unsafe { + llvm::LLVMBuildGlobalStringPtr(self.llbuilder, _Str, noname()) + } + } + + /* Casts */ + pub fn trunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("trunc"); + unsafe { + llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn zext(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("zext"); + unsafe { + llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn sext(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("sext"); + unsafe { + llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn fptoui(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("fptoui"); + unsafe { + llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn fptosi(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("fptosi"); + unsafe { + llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty.to_ref(),noname()) + } + } + + pub fn uitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("uitofp"); + unsafe { + llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn sitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("sitofp"); + unsafe { + llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn fptrunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("fptrunc"); + unsafe { + llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn fpext(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("fpext"); + unsafe { + llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn ptrtoint(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("ptrtoint"); + unsafe { + llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn inttoptr(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("inttoptr"); + unsafe { + llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("bitcast"); + unsafe { + llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn zext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("zextorbitcast"); + unsafe { + llvm::LLVMBuildZExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn sext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("sextorbitcast"); + unsafe { + llvm::LLVMBuildSExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn trunc_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("truncorbitcast"); + unsafe { + llvm::LLVMBuildTruncOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn cast(&self, op: Opcode, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("cast"); + unsafe { + llvm::LLVMBuildCast(self.llbuilder, op, val, dest_ty.to_ref(), noname()) + } + } + + pub fn pointercast(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("pointercast"); + unsafe { + llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn intcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("intcast"); + unsafe { + llvm::LLVMBuildIntCast(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + pub fn fpcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef { + self.count_insn("fpcast"); + unsafe { + llvm::LLVMBuildFPCast(self.llbuilder, val, dest_ty.to_ref(), noname()) + } + } + + + /* Comparisons */ + pub fn icmp(&self, op: IntPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("icmp"); + unsafe { + llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname()) + } + } + + pub fn fcmp(&self, op: RealPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("fcmp"); + unsafe { + llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname()) + } + } + + /* Miscellaneous instructions */ + pub fn empty_phi(&self, ty: Type) -> ValueRef { + self.count_insn("emptyphi"); + unsafe { + llvm::LLVMBuildPhi(self.llbuilder, ty.to_ref(), noname()) + } + } + + pub fn phi(&self, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef { + assert_eq!(vals.len(), bbs.len()); + let phi = self.empty_phi(ty); + self.count_insn("addincoming"); + unsafe { + llvm::LLVMAddIncoming(phi, vec::raw::to_ptr(vals), + vec::raw::to_ptr(bbs), + vals.len() as c_uint); + phi + } + } + + pub fn add_span_comment(&self, sp: span, text: &str) { + if self.ccx.sess.asm_comments() { + let s = fmt!("%s (%s)", text, self.ccx.sess.codemap.span_to_str(sp)); + debug!("%s", s); + self.add_comment(s); + } + } + + pub fn add_comment(&self, text: &str) { + if self.ccx.sess.asm_comments() { + let sanitized = text.replace("$", ""); + let comment_text = fmt!("# %s", sanitized.replace("\n", "\n\t# ")); + self.count_insn("inlineasm"); + let asm = do comment_text.as_c_str |c| { + unsafe { + llvm::LLVMConstInlineAsm(Type::func([], &Type::void()).to_ref(), + c, noname(), False, False) + } + }; + self.call(asm, []); + } + } + + pub fn inline_asm_call(&self, asm: *c_char, cons: *c_char, + inputs: &[ValueRef], output: Type, + volatile: bool, alignstack: bool, + dia: AsmDialect) -> ValueRef { + self.count_insn("inlineasm"); + + let volatile = if volatile { lib::llvm::True } + else { lib::llvm::False }; + let alignstack = if alignstack { lib::llvm::True } + else { lib::llvm::False }; + + let argtys = do inputs.map |v| { + debug!("Asm Input Type: %?", self.ccx.tn.val_to_str(*v)); + val_ty(*v) + }; + + debug!("Asm Output Type: %?", self.ccx.tn.type_to_str(output)); + let fty = Type::func(argtys, &output); + unsafe { + let v = llvm::LLVMInlineAsm( + fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint); + self.call(v, inputs) + } + } + + pub fn call(&self, llfn: ValueRef, args: &[ValueRef]) -> ValueRef { + self.count_insn("call"); + + debug!("Call(llfn=%s, args=%?)", + self.ccx.tn.val_to_str(llfn), + args.map(|arg| self.ccx.tn.val_to_str(*arg))); + + do args.as_imm_buf |ptr, len| { + unsafe { + llvm::LLVMBuildCall(self.llbuilder, llfn, ptr, len as c_uint, noname()) + } + } + } + + pub fn fastcall(&self, llfn: ValueRef, args: &[ValueRef]) -> ValueRef { + self.count_insn("fastcall"); + unsafe { + let v = llvm::LLVMBuildCall(self.llbuilder, llfn, vec::raw::to_ptr(args), + args.len() as c_uint, noname()); + lib::llvm::SetInstructionCallConv(v, lib::llvm::FastCallConv); + v + } + } + + pub fn call_with_conv(&self, llfn: ValueRef, args: &[ValueRef], + conv: CallConv) -> ValueRef { + self.count_insn("callwithconv"); + unsafe { + let v = llvm::LLVMBuildCall(self.llbuilder, llfn, vec::raw::to_ptr(args), + args.len() as c_uint, noname()); + lib::llvm::SetInstructionCallConv(v, conv); + v + } + } + + pub fn select(&self, cond: ValueRef, then_val: ValueRef, else_val: ValueRef) -> ValueRef { + self.count_insn("select"); + unsafe { + llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname()) + } + } + + pub fn va_arg(&self, list: ValueRef, ty: Type) -> ValueRef { + self.count_insn("vaarg"); + unsafe { + llvm::LLVMBuildVAArg(self.llbuilder, list, ty.to_ref(), noname()) + } + } + + pub fn extract_element(&self, vec: ValueRef, idx: ValueRef) -> ValueRef { + self.count_insn("extractelement"); + unsafe { + llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname()) + } + } + + pub fn insert_element(&self, vec: ValueRef, elt: ValueRef, idx: ValueRef) -> ValueRef { + self.count_insn("insertelement"); + unsafe { + llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname()) + } + } + + pub fn shuffle_vector(&self, v1: ValueRef, v2: ValueRef, mask: ValueRef) -> ValueRef { + self.count_insn("shufflevector"); + unsafe { + llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname()) + } + } + + pub fn vector_splat(&self, num_elts: uint, elt: ValueRef) -> ValueRef { + unsafe { + let elt_ty = val_ty(elt); + let Undef = llvm::LLVMGetUndef(Type::vector(&elt_ty, num_elts as u64).to_ref()); + let vec = self.insert_element(Undef, elt, C_i32(0)); + self.shuffle_vector(vec, Undef, C_null(Type::vector(&Type::i32(), num_elts as u64))) + } + } + + pub fn extract_value(&self, agg_val: ValueRef, idx: uint) -> ValueRef { + self.count_insn("extractvalue"); + unsafe { + llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname()) + } + } + + pub fn insert_value(&self, agg_val: ValueRef, elt: ValueRef, + idx: uint) { + self.count_insn("insertvalue"); + unsafe { + llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint, + noname()); + } + } + + pub fn is_null(&self, val: ValueRef) -> ValueRef { + self.count_insn("isnull"); + unsafe { + llvm::LLVMBuildIsNull(self.llbuilder, val, noname()) + } + } + + pub fn is_not_null(&self, val: ValueRef) -> ValueRef { + self.count_insn("isnotnull"); + unsafe { + llvm::LLVMBuildIsNotNull(self.llbuilder, val, noname()) + } + } + + pub fn ptrdiff(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef { + self.count_insn("ptrdiff"); + unsafe { + llvm::LLVMBuildPtrDiff(self.llbuilder, lhs, rhs, noname()) + } + } + + pub fn trap(&self) { + unsafe { + let BB: BasicBlockRef = llvm::LLVMGetInsertBlock(self.llbuilder); + let FN: ValueRef = llvm::LLVMGetBasicBlockParent(BB); + let M: ModuleRef = llvm::LLVMGetGlobalParent(FN); + let T: ValueRef = str::as_c_str("llvm.trap", |buf| { + llvm::LLVMGetNamedFunction(M, buf) + }); + assert!((T as int != 0)); + let args: &[ValueRef] = []; + self.count_insn("trap"); + llvm::LLVMBuildCall( + self.llbuilder, T, vec::raw::to_ptr(args), args.len() as c_uint, noname()); + } + } + + pub fn landing_pad(&self, ty: Type, pers_fn: ValueRef, num_clauses: uint) -> ValueRef { + self.count_insn("landingpad"); + unsafe { + llvm::LLVMBuildLandingPad( + self.llbuilder, ty.to_ref(), pers_fn, num_clauses as c_uint, noname()) + } + } + + pub fn set_cleanup(&self, landing_pad: ValueRef) { + self.count_insn("setcleanup"); + unsafe { + llvm::LLVMSetCleanup(landing_pad, lib::llvm::True); + } + } + + pub fn resume(&self, exn: ValueRef) -> ValueRef { + self.count_insn("resume"); + unsafe { + llvm::LLVMBuildResume(self.llbuilder, exn) + } + } + + // Atomic Operations + pub fn atomic_cmpxchg(&self, dst: ValueRef, + cmp: ValueRef, src: ValueRef, + order: AtomicOrdering) -> ValueRef { + unsafe { + llvm::LLVMBuildAtomicCmpXchg(self.llbuilder, dst, cmp, src, order) + } + } + pub fn atomic_rmw(&self, op: AtomicBinOp, + dst: ValueRef, src: ValueRef, + order: AtomicOrdering) -> ValueRef { + unsafe { + llvm::LLVMBuildAtomicRMW(self.llbuilder, op, dst, src, order) + } + } +} diff --git a/src/librustc/middle/trans/context.rs b/src/librustc/middle/trans/context.rs index ffebb87d5cf..4990e78d524 100644 --- a/src/librustc/middle/trans/context.rs +++ b/src/librustc/middle/trans/context.rs @@ -19,6 +19,7 @@ use middle::astencode; use middle::resolve; use middle::trans::adt; use middle::trans::base; +use middle::trans::builder::Builder; use middle::trans::debuginfo; use middle::trans::type_use; use middle::ty; @@ -227,6 +228,10 @@ impl CrateContext { } } } + + pub fn builder(@mut self) -> Builder { + Builder::new(self) + } } #[unsafe_destructor] diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index 0e75e4e85c2..391a1283878 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -188,7 +188,7 @@ pub fn create_local_var_metadata(bcx: block, local: @ast::local) -> DIVariable { set_debug_location(cx, lexical_block_metadata(bcx), loc.line, loc.col.to_uint()); unsafe { let instr = llvm::LLVMDIBuilderInsertDeclareAtEnd(DIB(cx), llptr, var_metadata, bcx.llbb); - llvm::LLVMSetInstDebugLocation(trans::build::B(bcx), instr); + llvm::LLVMSetInstDebugLocation(trans::build::B(bcx).llbuilder, instr); } return var_metadata; @@ -247,7 +247,7 @@ pub fn create_argument_metadata(bcx: block, arg: &ast::arg, span: span) -> Optio unsafe { let instr = llvm::LLVMDIBuilderInsertDeclareAtEnd( DIB(cx), llptr, var_metadata, bcx.llbb); - llvm::LLVMSetInstDebugLocation(trans::build::B(bcx), instr); + llvm::LLVMSetInstDebugLocation(trans::build::B(bcx).llbuilder, instr); } return Some(var_metadata); } diff --git a/src/librustc/middle/trans/mod.rs b/src/librustc/middle/trans/mod.rs index 64d6bbec87c..d47d9a4ff16 100644 --- a/src/librustc/middle/trans/mod.rs +++ b/src/librustc/middle/trans/mod.rs @@ -22,6 +22,7 @@ pub mod context; pub mod consts; pub mod type_of; pub mod build; +pub mod builder; pub mod base; pub mod _match; pub mod uniq;