// except according to those terms.
use ffi::OsString;
-use fortanix_sgx_abi::ByteBuffer;
+use super::abi::usercalls::{copy_user_buffer, alloc, ByteBuffer};
+use sync::atomic::{AtomicUsize, Ordering};
+use sys::os_str::Buf;
+use sys_common::FromInner;
+use slice;
-pub unsafe fn init(argc: isize, argv: *const *const u8) {
- // See ABI
- let _len: usize = argc as _;
- let _args: *const ByteBuffer = argv as _;
+static ARGS: AtomicUsize = AtomicUsize::new(0);
+type ArgsStore = Vec<OsString>;
- // TODO
+pub unsafe fn init(argc: isize, argv: *const *const u8) {
+ if argc != 0 {
+ let args = alloc::User::<[ByteBuffer]>::from_raw_parts(argv as _, argc as _);
+ let args = args.iter()
+ .map( |a| OsString::from_inner(Buf { inner: copy_user_buffer(a) }) )
+ .collect::<ArgsStore>();
+ ARGS.store(Box::into_raw(Box::new(args)) as _, Ordering::Relaxed);
+ }
}
pub unsafe fn cleanup() {
+ let args = ARGS.swap(0, Ordering::Relaxed);
+ if args != 0 {
+ drop(Box::<ArgsStore>::from_raw(args as _))
+ }
}
pub fn args() -> Args {
- Args
+ let args = unsafe { (ARGS.load(Ordering::Relaxed) as *const ArgsStore).as_ref() };
+ if let Some(args) = args {
+ Args(args.iter())
+ } else {
+ Args([].iter())
+ }
}
-pub struct Args;
+pub struct Args(slice::Iter<'static, OsString>);
impl Args {
pub fn inner_debug(&self) -> &[OsString] {
- &[]
+ self.0.as_slice()
}
}
impl Iterator for Args {
type Item = OsString;
fn next(&mut self) -> Option<OsString> {
- None
+ self.0.next().cloned()
}
fn size_hint(&self) -> (usize, Option<usize>) {
- (0, Some(0))
+ self.0.size_hint()
}
}
impl ExactSizeIterator for Args {
fn len(&self) -> usize {
- 0
+ self.0.len()
}
}
impl DoubleEndedIterator for Args {
fn next_back(&mut self) -> Option<OsString> {
- None
+ self.0.next_back().cloned()
}
}
use path::{self, PathBuf};
use str;
use sys::{unsupported, Void, sgx_ineffective, decode_error_kind};
+use collections::HashMap;
+use vec;
+use sync::Mutex;
+use sync::atomic::{AtomicUsize, Ordering};
+use sync::Once;
pub fn errno() -> i32 {
RESULT_SUCCESS
unsupported()
}
-pub struct Env;
+static ENV: AtomicUsize = AtomicUsize::new(0);
+static ENV_INIT: Once = Once::new();
+type EnvStore = Mutex<HashMap<OsString, OsString>>;
-impl Iterator for Env {
- type Item = (OsString, OsString);
- fn next(&mut self) -> Option<(OsString, OsString)> {
- None
+fn get_env_store() -> Option<&'static EnvStore> {
+ unsafe { (ENV.load(Ordering::Relaxed) as *const EnvStore).as_ref() }
+}
+
+fn create_env_store() -> &'static EnvStore {
+ ENV_INIT.call_once(|| {
+ ENV.store(Box::into_raw(Box::new(EnvStore::default())) as _, Ordering::Relaxed)
+ });
+ unsafe {
+ &*(ENV.load(Ordering::Relaxed) as *const EnvStore)
}
}
+pub type Env = vec::IntoIter<(OsString, OsString)>;
+
pub fn env() -> Env {
- Env
+ let clone_to_vec = |map: &HashMap<OsString, OsString>| -> Vec<_> {
+ map.iter().map(|(k, v)| (k.clone(), v.clone()) ).collect()
+ };
+
+ get_env_store()
+ .map(|env| clone_to_vec(&env.lock().unwrap()) )
+ .unwrap_or_default()
+ .into_iter()
}
-pub fn getenv(_k: &OsStr) -> io::Result<Option<OsString>> {
- Ok(None)
+pub fn getenv(k: &OsStr) -> io::Result<Option<OsString>> {
+ Ok(get_env_store().and_then(|s| s.lock().unwrap().get(k).cloned() ))
}
-pub fn setenv(_k: &OsStr, _v: &OsStr) -> io::Result<()> {
- sgx_ineffective(()) // FIXME: this could trigger a panic higher up the stack
+pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {
+ let (k, v) = (k.to_owned(), v.to_owned());
+ create_env_store().lock().unwrap().insert(k, v);
+ Ok(())
}
-pub fn unsetenv(_k: &OsStr) -> io::Result<()> {
- sgx_ineffective(()) // FIXME: this could trigger a panic higher up the stack
+pub fn unsetenv(k: &OsStr) -> io::Result<()> {
+ if let Some(env) = get_env_store() {
+ env.lock().unwrap().remove(k);
+ }
+ Ok(())
}
pub fn temp_dir() -> PathBuf {