Skip to content

Instantly share code, notes, and snippets.

@leinlawun leinlawun/example.rs
Last active Apr 18, 2017

Embed
What would you like to do?
#![no_std]
#![feature(core_intrinsics)]
use core::mem::{size_of, transmute};
use core::intrinsics::{atomic_load, atomic_store};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct BigEndian;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct LittleEndian;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Error {
OutOfBounds(usize, usize, usize),
}
pub trait LoadCommon<O, D> {
#[inline]
fn load_common<F>(&self, offset: O, reverse: F) -> Result<D, Error>
where F: Fn(&mut [u8]);
}
pub trait Load<O, E, D> {
#[inline]
fn load(&self, offset: O) -> Result<D, Error>;
}
pub trait StoreCommon<O, D> {
#[inline]
fn store_common<F>(&mut self, offset: O, data: D, reverse: F)
-> Result<(), Error>
where F: Fn(&mut [u8]);
}
pub trait Store<O, E, D> {
#[inline]
fn store(&mut self, offset: O, data: D) -> Result<(), Error>;
}
impl<M, O, D> LoadCommon<O, D> for M
where M: AsRef<[u8]>,
O: Into<u64>
{
fn load_common<F>(&self, offset: O, reverse: F) -> Result<D, Error>
where F: Fn(&mut [u8])
{
let buffer = self.as_ref();
let start = offset.into() as usize;
let end = start + size_of::<D>();
if buffer.len() >= end {
let buffer = &buffer[start..end];
let offset = buffer.as_ptr() as *const D;
let data = unsafe {
atomic_load(offset)
};
let buffer: &mut [u8] = unsafe {
transmute((&data, size_of::<D>()))
};
reverse(buffer);
Ok(data)
} else {
Err(Error::OutOfBounds(buffer.len(), start, end))
}
}
}
impl<M, O, D> Load<O, BigEndian, D> for M
where M: LoadCommon<O, D>
{
fn load(&self, offset: O) -> Result<D, Error> {
self.load_common(offset, |buffer| {
if cfg!(target_endian = "little") {
buffer.reverse()
}
})
}
}
impl<M, O, D> Load<O, LittleEndian, D> for M
where M: LoadCommon<O, D>
{
fn load(&self, offset: O) -> Result<D, Error> {
self.load_common(offset, |buffer| {
if cfg!(target_endian = "big") {
buffer.reverse()
}
})
}
}
impl<M, O, D> StoreCommon<O, D> for M
where M: AsMut<[u8]>,
O: Into<u64>
{
fn store_common<F>(&mut self, offset: O, data: D, reverse: F)
-> Result<(), Error>
where F: Fn(&mut [u8])
{
let buffer = self.as_mut();
let start = offset.into() as usize;
let end = start + size_of::<D>();
if buffer.len() >= end {
let buffer = &mut buffer[start..end];
let offset = buffer.as_ptr() as *mut D;
let buffer: &mut [u8] = unsafe {
transmute((&data, size_of::<D>()))
};
reverse(buffer);
unsafe {
atomic_store(offset, data)
}
Ok(())
} else {
Err(Error::OutOfBounds(buffer.len(), start, end))
}
}
}
impl<M, O, D> Store<O, BigEndian, D> for M
where M: StoreCommon<O, D>
{
fn store(&mut self, offset: O, data: D) -> Result<(), Error> {
self.store_common(offset, data, |buffer| {
if cfg!(target_endian = "little") {
buffer.reverse()
}
})
}
}
impl<M, O, D> Store<O, LittleEndian, D> for M
where M: StoreCommon<O, D>
{
fn store(&mut self, offset: O, data: D) -> Result<(), Error> {
self.store_common(offset, data, |buffer| {
if cfg!(target_endian = "big") {
buffer.reverse()
}
})
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.