From f98f83a15de649f3b4d152737336e95a0969e265 Mon Sep 17 00:00:00 2001 From: Peter Zotov Date: Sat, 28 Dec 2013 18:01:53 +0400 Subject: [PATCH] Implement volatile_load and volatile_store intrinsics. --- src/librustc/lib/llvm.rs | 4 ++++ src/librustc/middle/trans/build.rs | 12 +++++++++++ src/librustc/middle/trans/builder.rs | 21 +++++++++++++++++++ src/librustc/middle/trans/intrinsic.rs | 20 ++++++++++++++++++ src/librustc/middle/typeck/check/mod.rs | 5 +++++ src/libstd/unstable/intrinsics.rs | 12 +++++++++++ .../run-make/volatile-intrinsics/Makefile | 10 +++++++++ src/test/run-make/volatile-intrinsics/main.rs | 19 +++++++++++++++++ 8 files changed, 103 insertions(+) create mode 100644 src/test/run-make/volatile-intrinsics/Makefile create mode 100644 src/test/run-make/volatile-intrinsics/main.rs diff --git a/src/librustc/lib/llvm.rs b/src/librustc/lib/llvm.rs index 50a35e9d1bf..ae90acd3cb7 100644 --- a/src/librustc/lib/llvm.rs +++ b/src/librustc/lib/llvm.rs @@ -787,6 +787,10 @@ pub mod llvm { pub fn LLVMIsTailCall(CallInst: ValueRef) -> Bool; pub fn LLVMSetTailCall(CallInst: ValueRef, IsTailCall: Bool); + /* Operations on load/store instructions (only) */ + pub fn LLVMGetVolatile(MemoryAccessInst: ValueRef) -> Bool; + pub fn LLVMSetVolatile(MemoryAccessInst: ValueRef, volatile: Bool); + /* Operations on phi nodes */ pub fn LLVMAddIncoming(PhiNode: ValueRef, IncomingValues: *ValueRef, diff --git a/src/librustc/middle/trans/build.rs b/src/librustc/middle/trans/build.rs index faf210c1a9e..7a0e319e505 100644 --- a/src/librustc/middle/trans/build.rs +++ b/src/librustc/middle/trans/build.rs @@ -349,6 +349,13 @@ pub fn Load(cx: &Block, PointerVal: ValueRef) -> ValueRef { } } +pub fn VolatileLoad(cx: &Block, PointerVal: ValueRef) -> ValueRef { + unsafe { + if cx.unreachable.get() { return llvm::LLVMGetUndef(Type::nil().to_ref()); } + B(cx).volatile_load(PointerVal) + } +} + pub fn AtomicLoad(cx: &Block, PointerVal: ValueRef, order: AtomicOrdering) -> ValueRef { unsafe { let ccx = cx.fcx.ccx; @@ -383,6 +390,11 @@ pub fn Store(cx: &Block, Val: ValueRef, Ptr: ValueRef) { B(cx).store(Val, Ptr) } +pub fn VolatileStore(cx: &Block, Val: ValueRef, Ptr: ValueRef) { + if cx.unreachable.get() { return; } + B(cx).volatile_store(Val, Ptr) +} + pub fn AtomicStore(cx: &Block, Val: ValueRef, Ptr: ValueRef, order: AtomicOrdering) { if cx.unreachable.get() { return; } B(cx).atomic_store(Val, Ptr, order) diff --git a/src/librustc/middle/trans/builder.rs b/src/librustc/middle/trans/builder.rs index eacc6f84db1..c4beb935ffe 100644 --- a/src/librustc/middle/trans/builder.rs +++ b/src/librustc/middle/trans/builder.rs @@ -449,6 +449,15 @@ impl Builder { } } + pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef { + self.count_insn("load.volatile"); + unsafe { + let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname()); + llvm::LLVMSetVolatile(insn, lib::llvm::True); + insn + } + } + pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering) -> ValueRef { self.count_insn("load.atomic"); unsafe { @@ -488,6 +497,18 @@ impl Builder { } } + pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) { + debug!("Store {} -> {}", + self.ccx.tn.val_to_str(val), + self.ccx.tn.val_to_str(ptr)); + assert!(is_not_null(self.llbuilder)); + self.count_insn("store.volatile"); + unsafe { + let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr); + llvm::LLVMSetVolatile(insn, lib::llvm::True); + } + } + pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) { debug!("Store {} -> {}", self.ccx.tn.val_to_str(val), diff --git a/src/librustc/middle/trans/intrinsic.rs b/src/librustc/middle/trans/intrinsic.rs index 02df3ff9c20..f652fbec228 100644 --- a/src/librustc/middle/trans/intrinsic.rs +++ b/src/librustc/middle/trans/intrinsic.rs @@ -73,6 +73,23 @@ pub fn trans_intrinsic(ccx: @CrateContext, } } + fn volatile_load_intrinsic(bcx: @Block) { + let first_real_arg = bcx.fcx.arg_pos(0u); + let src = get_param(bcx.fcx.llfn, first_real_arg); + + let val = VolatileLoad(bcx, src); + Ret(bcx, val); + } + + fn volatile_store_intrinsic(bcx: @Block) { + let first_real_arg = bcx.fcx.arg_pos(0u); + let dst = get_param(bcx.fcx.llfn, first_real_arg); + let val = get_param(bcx.fcx.llfn, first_real_arg + 1); + + VolatileStore(bcx, val, dst); + RetVoid(bcx); + } + fn copy_intrinsic(bcx: @Block, allow_overlap: bool, tp_ty: ty::t) { let ccx = bcx.ccx(); let lltp_ty = type_of::type_of(ccx, tp_ty); @@ -480,6 +497,9 @@ pub fn trans_intrinsic(ccx: @CrateContext, "bswap32" => simple_llvm_intrinsic(bcx, "llvm.bswap.i32", 1), "bswap64" => simple_llvm_intrinsic(bcx, "llvm.bswap.i64", 1), + "volatile_load" => volatile_load_intrinsic(bcx), + "volatile_store" => volatile_store_intrinsic(bcx), + "i8_add_with_overflow" => with_overflow_instrinsic(bcx, "llvm.sadd.with.overflow.i8", output_type), "i16_add_with_overflow" => diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 74575ba3517..ef968d5cf36 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -4226,6 +4226,11 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: @ast::foreign_item) { "bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), + "volatile_load" => + (1, ~[ ty::mk_imm_ptr(tcx, param(ccx, 0)) ], param(ccx, 0)), + "volatile_store" => + (1, ~[ ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0) ], ty::mk_nil()), + "i8_add_with_overflow" | "i8_sub_with_overflow" | "i8_mul_with_overflow" => (0, ~[ty::mk_i8(), ty::mk_i8()], ty::mk_tup(tcx, ~[ty::mk_i8(), ty::mk_bool()])), diff --git a/src/libstd/unstable/intrinsics.rs b/src/libstd/unstable/intrinsics.rs index d3649f0285c..46dc03e82b0 100644 --- a/src/libstd/unstable/intrinsics.rs +++ b/src/libstd/unstable/intrinsics.rs @@ -12,6 +12,15 @@ The corresponding definitions are in librustc/middle/trans/foreign.rs. +# Volatiles + +The volatile intrinsics provide operations intended to act on I/O +memory, which are guaranteed to not be reordered by the compiler +across other volatile intrinsics. See the LLVM documentation on +[[volatile]]. + +[volatile]: http://llvm.org/docs/LangRef.html#volatile-memory-accesses + # Atomics The atomic intrinsics provide common atomic operations on machine @@ -179,6 +188,9 @@ extern "rust-intrinsic" { /// Execute a breakpoint trap, for inspection by a debugger. pub fn breakpoint(); + #[cfg(not(stage0))] pub fn volatile_load(src: *T) -> T; + #[cfg(not(stage0))] pub fn volatile_store(dst: *mut T, val: T); + /// Atomic compare and exchange, sequentially consistent. pub fn atomic_cxchg(dst: &mut int, old: int, src: int) -> int; /// Atomic compare and exchange, acquire ordering. diff --git a/src/test/run-make/volatile-intrinsics/Makefile b/src/test/run-make/volatile-intrinsics/Makefile new file mode 100644 index 00000000000..fc19412e2ef --- /dev/null +++ b/src/test/run-make/volatile-intrinsics/Makefile @@ -0,0 +1,10 @@ +-include ../tools.mk + +all: + # The tests must pass... + $(RUSTC) main.rs + $(call RUN,main) + # ... and the loads/stores must not be optimized out. + $(RUSTC) main.rs --emit-llvm -S + grep "load volatile" $(TMPDIR)/main.ll + grep "store volatile" $(TMPDIR)/main.ll diff --git a/src/test/run-make/volatile-intrinsics/main.rs b/src/test/run-make/volatile-intrinsics/main.rs new file mode 100644 index 00000000000..5011a7540a7 --- /dev/null +++ b/src/test/run-make/volatile-intrinsics/main.rs @@ -0,0 +1,19 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::unstable::intrinsics::{volatile_load, volatile_store}; + +pub fn main() { + unsafe { + let mut i : int = 1; + volatile_store(&mut i, 2); + assert_eq!(volatile_load(&i), 2); + } +}