blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
d104e74bd84dde222b5e2b9fcd9c7e67bb0d3c41
|
Rust
|
wisehead/rust_lib
|
/01.basic/12.template_trait/trait.rs
|
UTF-8
| 400 | 3.5625 | 4 |
[] |
no_license
|
trait Descriptive {
fn describe(&self) -> String {
String::from("[Object]")
}
}
struct Person {
name: String,
age: u8
}
impl Descriptive for Person {
fn describe(&self) -> String {
format!("{} {}", self.name, self.age)
}
}
fn main() {
let cali = Person {
name: String::from("Cali"),
age: 24
};
println!("{}", cali.describe());
}
| true |
70d33de61c77b3169c9e670f0f87d7a0cf398308
|
Rust
|
snsvrno/lpsettings-rs
|
/lpsettings/src/location.rs
|
UTF-8
| 1,115 | 3.625 | 4 |
[] |
no_license
|
use std::fmt;
use std::env;
/// a location to be used to determine where to load a value from
#[derive(PartialEq)]
pub enum Location {
/// whatever the default recommeneded location is
Best,
// the local location
Local,
// the global location
Global
}
impl Location {
pub fn get_location() -> Location {
match env::var("LOVEPACK_SETTINGS_LOCATION") {
Err(_) => Location::Best,
Ok(value) => {
if value == "global" { Location::Global }
else { Location::Local }
}
}
}
pub fn to_string_cap(&self) -> String {
match self {
Location::Best => "".to_string(),
Location::Local => "Locally".to_string(),
Location::Global => "Globally".to_string(),
}
}
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Location::Best => write!(f,""),
Location::Local => write!(f,"locally"),
Location::Global => write!(f,"globally"),
}
}
}
| true |
8ccfad1db2b16428e972b7fd6f32d5670b49fe63
|
Rust
|
AntonHermann/fntools
|
/src/lib.rs
|
UTF-8
| 4,215 | 3.46875 | 3 |
[
"MIT"
] |
permissive
|
#![cfg_attr(not(stable), feature(unboxed_closures, fn_traits))]
#[cfg(not(stable))]
/// Features that uses nightly-only unstable API
pub mod unstable;
pub mod prelude {
pub use super::{ValueExt, swap_args, chain, compose};
}
/// Represents a type which can have functions applied to it (implemented
/// by default for all types).
pub trait ValueExt {
/// Apply a function which takes the parameter by value.
///
/// # Examples
/// ```
/// use fntools::ValueExt;
///
/// let val = (1, 4)
/// .apply(|(a, b)| a + b)
/// .apply(|it| it * it);
///
/// assert_eq!(val, 25)
/// ```
fn apply<R, F: FnOnce(Self) -> R>(self, f: F) -> R where Self: Sized {
f(self)
}
/// Execute function with reference to `self` and return `self`.
///
/// Similar to [`dbg!`] macro - `dbg!(expression)` and `(expression).also(|it| println!("{:?}", it))` do the same[^1] thing.
///
/// # Examples
/// ```
/// use fntools::ValueExt;
///
/// let mut also = 0;
/// let val = (1 + 3)
/// .also(|it: &i32| println!("{}", it)) // will print 4
/// .also(|it| also = it + 1); // mutable state is not really needed here, just for example.
///
/// assert_eq!(also, 5);
/// assert_eq!(val, 4);
/// ```
/// [^1]: actually no, cause `dbg!` also prints file/line
fn also<F: FnOnce(&Self) -> ()>(self, f: F) -> Self where Self: Sized {
f(&self);
self
}
//fn
}
// All functions of `ValueExt` actually require `Self: Sized` so `T: ?Sized` isn't currently needed, but it's placeholder for future.
impl<T: ?Sized> ValueExt for T {
// use default definitions...
}
/// Swap function arguments.
///
/// # Examples
/// ```
/// use fntools::swap_args;
///
/// let fun = |a: &str, b: i32| format!("{}{}", a, b);
/// let fun = swap_args(fun);
///
/// assert_eq!(fun(17, "hello, "), "hello, 17")
/// ```
pub fn swap_args<A, B, R, F>(f: F) -> impl FnOnce(B, A) -> R
where F: FnOnce(A, B) -> R
{
move |b: B, a: A| f(a, b)
}
/// Compose two functions.
///
/// Takes functions `f` and `g` and returns `f ∘ g = |a: A| f(g(a))`.
///
/// # Examples
/// ```
/// use fntools::unstable::chain;
///
/// let add_two = |a: i32| a + 2;
/// let add_three = |a: i32| a + 3;
/// let add_five = chain(add_two, add_three);
///
/// assert_eq!(add_five(4), 9);
/// ```
///
/// Note the order:
/// ```
/// use fntools::unstable::compose;
///
/// let to_16 = |i: i8| i16::from(i);
/// let to_32 = |i: i16| i32::from(i);
/// let to_64 = |i: i32| i64::from(i);
///
/// // execution order: to_16 -> to_32 -> to_64
/// let i8_to_i64 = compose(compose(to_64, to_32), to_16);
///
/// assert_eq!(i8_to_i64(8i8), 8i64);
/// ```
///
/// See also:
/// - [`unstable::compose`]
/// - [`fntools::chain`]
///
/// [`unstable::compose`]: crate::unstable::compose::compose
/// [`fntools::chain`]: crate::chain
pub fn compose<A, B, C, F, G>(f: F, g: G) -> impl Fn(A) -> C
where
G: Fn(A) -> B,
F: Fn(B) -> C,
{
move |a: A| f(g(a))
}
/// Compose two functions.
///
/// Takes functions `f` and `g` and returns `g ∘ f = |a: A| g(f(a))`.
///
/// # Examples
/// ```
/// use fntools::unstable::chain;
///
/// let add_two = |a: i32| a + 2;
/// let add_three = |a: i32| a + 3;
/// let add_five = chain(add_two, add_three);
///
/// assert_eq!(add_five(4), 9);
/// ```
///
/// Note the order:
/// ```
/// use fntools::unstable::chain;
///
/// let to_16 = |i: i8| i16::from(i);
/// let to_32 = |i: i16| i32::from(i);
/// let to_64 = |i: i32| i64::from(i);
///
/// // execution order: to_16 -> to_32 -> to_64
/// let i8_to_i64 = chain(to_16, chain(to_32, to_64));
///
/// assert_eq!(i8_to_i64(8i8), 8i64);
/// ```
///
/// See also:
/// - [`unstable::chain`]
/// - [`fntools::compose`]
///
/// [`unstable::chain`]: crate::unstable::chain::chain
/// [`fntools::compose`]: crate::compose
pub fn chain<A, B, C, F, G>(f: F, g: G) -> impl Fn(A) -> C
where
F: Fn(A) -> B,
G: Fn(B) -> C,
{
move |a: A| g(f(a))
}
pub fn constant<R>(val: R) -> impl FnOnce() -> R {
move || val
}
pub fn constant_clone<R: Clone>(val: R) -> impl Fn() -> R {
move || val.clone()
}
| true |
61e12b9e05313947e9d8592af7e5c1fab8c66f3b
|
Rust
|
Tomarchelone/mp3
|
/src/header.rs
|
UTF-8
| 6,751 | 2.9375 | 3 |
[
"CC0-1.0",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
use tables::*;
use Mp3Error;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Version {
Mpeg2_5,
Reserved,
Mpeg2,
Mpeg1,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Layer {
LayerI,
LayerII,
LayerIII,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Bitrate {
Indexed(u16),
FreeFormat,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Mode {
Stereo,
JointStereo,
DualChannel,
Mono,
}
/*
|------|----------------|------------------------------|
| | Layer I and II | Layer III |
|------|----------------|------------------|-----------|
| bits | Layer I & II | Intensity Stereo | MS Stereo |
|------|----------------|------------------|-----------|
| 00 | bands 4 to 31 | Off | Off |
| 01 | bands 8 to 31 | On | Off |
| 10 | bands 12 to 31 | Off | On |
| 11 | bands 16 to 31 | On | On |
|------|----------------|------------------|-----------|
*/
#[derive(Debug, Clone, PartialEq)]
pub enum ModeExtension {
Bands(u8),
Stereo(bool, bool),
}
#[derive(Debug, Clone, PartialEq)]
pub enum Emphasis {
None,
Ms50_15, // 50/15 ms
Reserved,
CcittJ17, // CCITT J.17
}
#[derive(Debug, Clone, PartialEq)]
pub struct FrameHeader {
// MPEG Audio version
version: Version,
// MPEG layer
pub layer: Layer,
// Indicates that frame is protected by CRC (16 bit CRC follows header).
protection: bool,
// Bit rate
pub bitrate: Bitrate,
// Sampling rate
pub sampling_rate: u16,
// Indicates that frame is padded with one extra slot (32 bits for Layer I, 8 bits for others).
pub padding: bool,
// Bit for application-specific triggers.
private: bool,
// Channel Mode
mode: Mode,
// Only used in Joint stereo mode to join informations that are of no use for stereo effect.
mode_extension: ModeExtension,
// Indicates that audio is copyrighted.
copyright: bool,
// Indicates that the frame is located on its original media.
original: bool,
// Tells the decoder that the file must be de-emphasized. Rarely used.
emphasis: Emphasis,
}
impl FrameHeader {
pub fn frame_size(&self) -> Option<usize> {
match self.bitrate {
Bitrate::FreeFormat => None,
Bitrate::Indexed(bitrate) => {
// The number of bytes a slot occupies
// This is described in sections 2.1 and 2.4.2.1 of ISO/IEC 11172-3
let slot_size = match self.layer {
Layer::LayerI => 4_usize,
_ => 1_usize,
};
// Now compute the number of slots.
// This is described in section 2.4.3.1 of ISO/IEC 11172-3
let multiplier = match self.layer {
Layer::LayerI => 12,
_ => 144000,
};
let mut slot_count =
(multiplier * (bitrate as usize)) / (self.sampling_rate as usize);
if self.padding {
slot_count += 1;
}
Some(slot_count * slot_size)
}
}
}
}
pub fn parse_frame_header(data: &[u8]) -> Result<FrameHeader, Mp3Error> {
let header = &data[..4];
// Sync word check
if (header[0] != 0xff_u8) || (header[1] & 0xe0_u8 != 0xe0_u8) {
return Err(Mp3Error::HeaderError);
}
let version = match header[1] & 0x18_u8 {
0 => Version::Mpeg2_5,
0x08_u8 => Version::Reserved,
0x10_u8 => Version::Mpeg2,
0x18_u8 => Version::Mpeg1,
_ => return Err(Mp3Error::HeaderError),
};
let layer = match header[1] & 0x06_u8 {
0x02_u8 => Layer::LayerIII,
0x04_u8 => Layer::LayerII,
0x06_u8 => Layer::LayerI,
_ => return Err(Mp3Error::HeaderError),
};
// Protected if the bit is 0
let protection = header[1] & 0x01_u8 == 0;
let bitrate_index = (header[2] >> 4) as usize;
if bitrate_index == 15 {
return Err(Mp3Error::HeaderError);
}
let bitrate = if bitrate_index != 0 {
let rate = match (version, layer) {
(Version::Mpeg1, Layer::LayerI) => BITRATE_INDEX[0][bitrate_index],
(Version::Mpeg1, Layer::LayerII) => BITRATE_INDEX[1][bitrate_index],
(Version::Mpeg1, Layer::LayerIII) => BITRATE_INDEX[2][bitrate_index],
(_, Layer::LayerI) => BITRATE_INDEX[3][bitrate_index],
(_, _) => BITRATE_INDEX[4][bitrate_index],
};
Bitrate::Indexed(rate)
} else {
Bitrate::FreeFormat
};
let sampling_rate_index = ((header[2] & 0x0c_u8) >> 2) as usize;
if sampling_rate_index == 3 {
return Err(Mp3Error::HeaderError);
}
let sampling_rate = match version {
Version::Mpeg1 => SAMPLING_RATE[0][sampling_rate_index],
Version::Mpeg2 => SAMPLING_RATE[1][sampling_rate_index],
Version::Mpeg2_5 => SAMPLING_RATE[2][sampling_rate_index],
_ => return Err(Mp3Error::HeaderError),
};
let padding = header[2] & 0x02_u8 != 0;
let private = header[2] & 0x01_u8 != 0;
let mode = match header[3] & 0xc0_u8 {
0 => Mode::Stereo,
0x40_u8 => Mode::JointStereo,
0x80_u8 => Mode::DualChannel,
0xc0_u8 => Mode::Mono,
_ => unreachable!(),
};
let mode_extension_bits = header[3] & 0x30_u8;
let mode_extension = match layer {
Layer::LayerIII => match mode_extension_bits {
0 => ModeExtension::Stereo(false, false),
0x10_u8 => ModeExtension::Stereo(true, false),
0x20_u8 => ModeExtension::Stereo(false, true),
0x30_u8 => ModeExtension::Stereo(true, true),
_ => unreachable!(),
},
Layer::LayerI | Layer::LayerII => match mode_extension_bits {
0 => ModeExtension::Bands(4),
0x10_u8 => ModeExtension::Bands(8),
0x20_u8 => ModeExtension::Bands(12),
0x30_u8 => ModeExtension::Bands(16),
_ => unreachable!(),
},
};
let copyright = header[3] & 0x08_u8 != 0;
let original = header[3] & 0x04_u8 != 0;
let emphasis = match header[3] & 0x03_u8 {
0 => Emphasis::None,
0x01_u8 => Emphasis::Ms50_15,
0x02_u8 => Emphasis::Reserved,
0x03_u8 => Emphasis::CcittJ17,
_ => unreachable!(),
};
Ok(FrameHeader {
version,
layer,
protection,
bitrate,
sampling_rate,
padding,
private,
mode,
mode_extension,
copyright,
original,
emphasis,
})
}
| true |
6d7f56f8a9f61126c8012a7825b0c4f4c0858f51
|
Rust
|
gbutler69/rust-exercism
|
/alphametics/src/equation/solution.rs
|
UTF-8
| 4,127 | 3.1875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
pub struct EquationSolverBuilder(EquationSolver);
pub struct EquationSolver {
digits: Vec<EquationSolverDigit>,
digits_index: HashMap<char, usize>,
}
struct EquationSolverDigit {
digit: u8,
allow_zero: bool,
}
pub trait EquationSolution {
fn solution_for(&self, alpha_digit: char) -> Option<u8>;
fn solution_digits(&self) -> HashMap<char, u8>;
fn next_solution(&mut self) -> bool;
}
impl EquationSolverBuilder {
pub fn new() -> Self {
Self(EquationSolver::init())
}
pub fn push_digit(&mut self, alpha_digit: char, allow_zero: bool) {
self.0.push_digit(alpha_digit, allow_zero);
}
pub fn solve(&mut self) -> &mut impl EquationSolution {
if self.0.solution_contains_repeats() {
self.0.next_solution();
}
&mut self.0
}
}
impl EquationSolver {
fn init() -> Self {
Self {
digits: Vec::new(),
digits_index: HashMap::new(),
}
}
fn push_digit(&mut self, alpha_digit: char, allow_zero: bool) {
let (digits_index, digits) = (&mut self.digits_index, &mut self.digits);
digits_index
.entry(alpha_digit)
.and_modify(|idx| digits[*idx].disallow_zero(!allow_zero))
.or_insert_with(|| {
digits.push(EquationSolverDigit::with(allow_zero));
digits.len() - 1
});
}
fn next_solution(&mut self) -> bool {
let mut ok = self.increment_solution();
while self.solution_contains_repeats() && ok {
ok = self.increment_solution();
}
ok
}
fn solution_contains_repeats(&self) -> bool {
for i in 0..self.digits.len() {
for j in (i + 1)..self.digits.len() {
if self.digits[i].digit == self.digits[j].digit {
return true;
}
}
}
false
}
fn increment_solution(&mut self) -> bool {
let mut digit_number: usize = 0;
let max_digit_idx = self.digits.len() - 1;
loop {
match &mut self.digits[digit_number] {
EquationSolverDigit { digit: 9, .. } if digit_number == max_digit_idx => {
return false;
}
EquationSolverDigit {
digit: digit @ 9,
allow_zero,
..
} => {
*digit = if *allow_zero { 0 } else { 1 };
digit_number += 1;
continue;
}
EquationSolverDigit { digit, .. } => {
*digit += 1;
return true;
}
}
}
}
}
impl EquationSolution for EquationSolver {
fn solution_for(&self, alpha_digit: char) -> Option<u8> {
self.digits_index
.get(&alpha_digit)
.map(|idx| self.digits[*idx].digit)
}
fn solution_digits(&self) -> HashMap<char, u8> {
self.digits_index
.iter()
.map(move |(c, idx)| (*c, self.digits[*idx].digit))
.collect()
}
fn next_solution(&mut self) -> bool {
EquationSolver::next_solution(self)
}
}
impl EquationSolverDigit {
fn with(allow_zero: bool) -> Self {
match allow_zero {
true => Self {
digit: 0,
allow_zero,
},
false => Self {
digit: 1,
allow_zero,
},
}
}
fn disallow_zero(&mut self, disallow: bool) {
self.allow_zero = self.allow_zero && !disallow;
}
}
// TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS
// TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS
// TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS
// TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS
// TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS TESTS
| true |
7b0fb1c633e8b4c6e8695b1c089882b93ed412b5
|
Rust
|
tveronezi/help_sample_runtime_block_on
|
/src/lib.rs
|
UTF-8
| 1,631 | 2.53125 | 3 |
[] |
no_license
|
pub mod docker {
use std::path::Path;
use futures_util::stream::TryStreamExt;
use hyper::Client;
use hyperlocal::{UnixClientExt, Uri};
pub async fn get_containers() -> Result<String, Box<dyn std::error::Error>> {
let path = Path::new("/var/run/docker.sock");
let url = Uri::new(path, "/containers/json").into();
let client = Client::unix();
let response_body = client.get(url).await?.into_body();
let bytes = response_body
.try_fold(Vec::default(), |mut buf, bytes| async {
buf.extend(bytes);
Ok(buf)
})
.await?;
let result = String::from_utf8(bytes)?;
Ok(result)
}
}
#[cfg(test)]
mod tests {
use testcontainers::{clients, images, Docker};
use tokio::runtime::Runtime;
#[test]
fn test_get_containers() {
let generic_postgres = images::generic::GenericImage::new("library/postgres:13")
.with_wait_for(images::generic::WaitFor::message_on_stderr(
"database system is ready to accept connections",
))
.with_env_var("POSTGRES_DB", "trend")
.with_env_var("POSTGRES_USER", "rusty")
.with_env_var("POSTGRES_PASSWORD", "rusty");
let docker = clients::Cli::default();
let container = docker.run(generic_postgres);
let container_id = &container.id()[..12];
Runtime::new().unwrap().block_on(async {
let result = super::docker::get_containers().await;
assert_eq!(format!("{}", container_id), result.unwrap());
});
}
}
| true |
14912a30c356f6a0474b7740fa90c6b81f04d2aa
|
Rust
|
95th/justc
|
/src/symbol/table.rs
|
UTF-8
| 1,242 | 3.125 | 3 |
[] |
no_license
|
use crate::symbol::Symbol;
use std::collections::HashMap;
#[derive(Debug, Clone, PartialEq)]
pub struct SymbolTable<T> {
map: HashMap<Symbol, T>,
changes: Vec<(Symbol, Option<T>)>,
}
impl<T> Default for SymbolTable<T> {
fn default() -> Self {
Self {
map: HashMap::new(),
changes: vec![],
}
}
}
pub struct UndoLog {
undo_len: usize,
}
impl<T> SymbolTable<T> {
pub fn new() -> Self {
Self::default()
}
pub fn insert(&mut self, key: Symbol, value: T) {
let old = self.map.insert(key, value);
self.changes.push((key, old));
}
pub fn get(&self, key: Symbol) -> Option<&T> {
self.map.get(&key)
}
pub fn is_defined(&self, key: Symbol) -> bool {
self.map.contains_key(&key)
}
pub fn snapshot(&self) -> UndoLog {
UndoLog {
undo_len: self.changes.len(),
}
}
pub fn rollback(&mut self, savepoint: UndoLog) {
while self.changes.len() > savepoint.undo_len {
let (k, old) = self.changes.pop().unwrap();
match old {
Some(v) => self.map.insert(k, v),
None => self.map.remove(&k),
};
}
}
}
| true |
7bcf1f685687d7ed78422afa2f257e96f9e517d4
|
Rust
|
eycorsican/leaf
|
/leaf/src/proxy/redirect/datagram.rs
|
UTF-8
| 2,679 | 2.578125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::{io, net::IpAddr};
use async_trait::async_trait;
use futures::TryFutureExt;
use crate::{proxy::*, session::*};
/// Handler with a redirect target address.
pub struct Handler {
pub address: String,
pub port: u16,
}
#[async_trait]
impl OutboundDatagramHandler for Handler {
fn connect_addr(&self) -> OutboundConnect {
OutboundConnect::Proxy(Network::Udp, self.address.clone(), self.port)
}
fn transport_type(&self) -> DatagramTransportType {
DatagramTransportType::Unreliable
}
async fn handle<'a>(
&'a self,
sess: &'a Session,
transport: Option<AnyOutboundTransport>,
) -> io::Result<AnyOutboundDatagram> {
let dgram = if let Some(OutboundTransport::Datagram(dgram)) = transport {
dgram
} else {
return Err(io::Error::new(io::ErrorKind::Other, "invalid input"));
};
let target = SocksAddr::from((
self.address.parse::<IpAddr>().map_err(|e| {
io::Error::new(io::ErrorKind::Other, format!("parse IpAddr failed: {}", e))
})?,
self.port,
));
Ok(Box::new(Datagram {
socket: dgram,
destination: sess.destination.clone(),
target,
}))
}
}
pub struct Datagram {
pub socket: Box<dyn OutboundDatagram>,
// The destination application datagrams send to.
pub destination: SocksAddr,
// The target we would like to redirect to.
pub target: SocksAddr,
}
impl OutboundDatagram for Datagram {
fn split(
self: Box<Self>,
) -> (
Box<dyn OutboundDatagramRecvHalf>,
Box<dyn OutboundDatagramSendHalf>,
) {
let (r, s) = self.socket.split();
(
Box::new(DatagramRecvHalf(r, self.destination)),
Box::new(DatagramSendHalf(s, self.target)),
)
}
}
pub struct DatagramRecvHalf(Box<dyn OutboundDatagramRecvHalf>, SocksAddr);
#[async_trait]
impl OutboundDatagramRecvHalf for DatagramRecvHalf {
async fn recv_from(&mut self, buf: &mut [u8]) -> io::Result<(usize, SocksAddr)> {
// Always rewrite the address, thus would allow only symmetric NAT sessions.
let dest = self.1.clone();
self.0.recv_from(buf).map_ok(|(n, _)| (n, dest)).await
}
}
pub struct DatagramSendHalf(Box<dyn OutboundDatagramSendHalf>, SocksAddr);
#[async_trait]
impl OutboundDatagramSendHalf for DatagramSendHalf {
async fn send_to(&mut self, buf: &[u8], _target: &SocksAddr) -> io::Result<usize> {
self.0.send_to(buf, &self.1).await
}
async fn close(&mut self) -> io::Result<()> {
self.0.close().await
}
}
| true |
aabf786df26eb33bd6a9d354d2e7b79a06d56e2f
|
Rust
|
bjnord/coding_practice
|
/hogan-57/7-struct/magic_8ball/src/main.rs
|
UTF-8
| 415 | 3.125 | 3 |
[] |
no_license
|
extern crate interact_io;
use interact_io::readln;
use rand::Rng;
const ANSWERS: &'static [&'static str] = &["Yes", "No", "Maybe", "Ask again later"];
fn main() {
let _question = readln::read_string("What's your question? ").unwrap();
let answer = pick_answer(4);
println!("{}.", ANSWERS[answer]);
}
fn pick_answer(n: usize) -> usize {
let mut rng = rand::thread_rng();
rng.gen_range(0, n)
}
| true |
0be0a7f440a87c726b6511bc98fd592ae0456a54
|
Rust
|
forgeyao/rust-learning
|
/RustByExample/9/Capturing/main.rs
|
UTF-8
| 1,071 | 3.65625 | 4 |
[
"Apache-2.0"
] |
permissive
|
/**
*
* https://doc.rust-lang.org/rust-by-example/fn/closures/capture.html
*/
fn main() {
use std::mem;
let color = String::from("green");
// borrow color
let print = || println!("`color`: {}", color);
print();
let _reborrow = &color;
print();
let _color_moved = color;
let mut count = 0;
// mut borrow
let mut inc = || {
count += 1;
println!("`count`: {}", count);
};
inc();
//let _reborrow = &count;
inc();
// 非 copy 类型
let movable = Box::new(3);
// move 进闭包
let consume = || {
println!("`movable`: {:?}", movable);
mem::drop(movable);
};
consume();
// movable 已经 drop 了,再请求会报错
//consume();
let haystack = vec![1, 2, 3];
// 强制 move
// 不带 move 就是 borrow
let contains = move |needle| haystack.contains(needle);
println!("{}", contains(&1));
println!("{}", contains(&4));
// 已经 move 了
//println!("There're {} elements in vec", haystack.len());
}
| true |
e4e3332b8a80b5895c3c5c0a323d6df1af0cd28c
|
Rust
|
ItsaMeTuni/calendar-server
|
/src/env_helpers.rs
|
UTF-8
| 379 | 2.921875 | 3 |
[] |
no_license
|
use std::env;
pub fn get_env(name: &str) -> String
{
env::vars()
.find(|(key, _)| key == name)
.expect(&format!("Missing {} environment variable.", name))
.1
}
pub fn get_env_default(name: &str, default: &str) -> String
{
env::vars()
.find(|(key, _)| key == name)
.map(|(_, value)| value)
.unwrap_or(default.to_owned())
}
| true |
cab3ce545ea9e030b3e134e2eb932e1feeb7924e
|
Rust
|
iliabylich/alloc-from-pool
|
/src/pool.rs
|
UTF-8
| 959 | 2.5625 | 3 |
[] |
no_license
|
use crate::{Factory, InnerPool, PoolValue};
#[derive(Debug)]
pub struct Pool<T: 'static> {
inner: *mut InnerPool<T>,
}
impl<T> Default for Pool<T> {
fn default() -> Self {
let inner = Box::leak(Box::new(InnerPool::new()));
Self { inner }
}
}
impl<T> Pool<T> {
pub fn new() -> Self {
Self::default()
}
fn inner_ref(&self) -> &InnerPool<T> {
unsafe { self.inner.as_ref().unwrap() }
}
pub fn alloc(&self, value: T) -> PoolValue<T> {
self.inner_ref().alloc(value)
}
pub fn factory(&self) -> Factory<T> {
Factory::new(self.inner)
}
#[cfg(test)]
pub(crate) fn total_allocations(&self) -> usize {
self.inner_ref().total_allocations()
}
#[cfg(test)]
pub(crate) fn size(&self) -> usize {
self.inner_ref().size()
}
}
impl<T> Drop for Pool<T> {
fn drop(&mut self) {
drop(unsafe { Box::from_raw(self.inner) })
}
}
| true |
f5c7a5402a8f8bd925f482c05c8f7559fc2ba724
|
Rust
|
owen8877/leetcode-rs
|
/src/problem_20.rs
|
UTF-8
| 1,240 | 3.625 | 4 |
[] |
no_license
|
pub fn is_valid(s: String) -> bool {
let n = s.len();
if n == 0 {
return true
}
if n % 2 == 1 {
return false
}
let mut previous_pos = vec![0; n];
let chars: Vec<char> = s.chars().collect();
let mut last_position = 0;
let mut counter = 0;
for i in 0..n {
let c = chars[i];
match c {
'{' | '[' | '(' => {
previous_pos[i] = last_position;
last_position = i;
counter += 1;
},
'}' | ']' | ')' => {
let d = chars[last_position];
if !(d == '(' && c == ')') && !(d == '[' && c == ']') && !(d == '{' && c == '}') {
return false
}
last_position = previous_pos[last_position];
counter -= 1;
},
_ => panic!("Didn't expect {}!", c),
}
}
counter == 0
}
#[test]
fn test_is_valid() {
// assert_eq!(is_valid("()".to_string()), true);
// assert_eq!(is_valid("()[]{}".to_string()), true);
// assert_eq!(is_valid("(]".to_string()), false);
// assert_eq!(is_valid("([)]".to_string()), false);
assert_eq!(is_valid("((".to_string()), false);
}
| true |
1feedf4b072b73a36b544cffa1bb06490e2a1592
|
Rust
|
woubuc/postage-rs
|
/src/sync.rs
|
UTF-8
| 3,693 | 2.640625 | 3 |
[
"MIT"
] |
permissive
|
use std::sync::Arc;
use notifier::Notifier;
use ref_count::RefCount;
use std::fmt::Debug;
use crate::Context;
use self::{notifier::NotificationGuard, ref_count::TryDecrement};
pub mod mpmc_circular_buffer;
pub mod notifier;
mod oneshot_cell;
mod ref_count;
// mod rr_lock;
mod state_cell;
pub(crate) mod transfer;
pub(crate) fn shared<E>(extension: E) -> (SenderShared<E>, ReceiverShared<E>) {
let inner = Arc::new(Shared::new(extension));
let sender = SenderShared {
inner: inner.clone(),
};
let receiver = ReceiverShared {
inner: inner.clone(),
};
(sender, receiver)
}
#[derive(Debug)]
pub struct Shared<E> {
sender_notify: Notifier,
sender_count: RefCount,
receiver_notify: Notifier,
receiver_count: RefCount,
pub(crate) extension: E,
}
impl<E> Shared<E> {
pub fn new(extension: E) -> Self {
Self {
sender_notify: Notifier::new(),
sender_count: RefCount::new(1),
receiver_notify: Notifier::new(),
receiver_count: RefCount::new(1),
extension,
}
}
}
pub struct SenderShared<E> {
inner: Arc<Shared<E>>,
}
impl<E> SenderShared<E> {
pub fn extension(&self) -> &E {
&self.inner.extension
}
pub fn notify_receivers(&self) {
self.inner.receiver_notify.notify();
}
pub fn notify_self(&self) {
self.inner.sender_notify.notify();
}
pub fn subscribe_recv(&self, cx: &Context<'_>) {
self.inner.sender_notify.subscribe(cx);
}
pub fn recv_guard<'a>(&'a self) -> NotificationGuard<'a> {
self.inner.sender_notify.guard()
}
pub fn is_alive(&self) -> bool {
self.inner.receiver_count.is_alive()
}
pub fn clone_receiver(&self) -> ReceiverShared<E> {
self.inner.receiver_count.increment();
ReceiverShared {
inner: self.inner.clone(),
}
}
pub fn is_closed(&self) -> bool {
!self.is_alive()
}
}
impl<E> Debug for SenderShared<E>
where
E: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner.fmt(f)
}
}
impl<E> Clone for SenderShared<E> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
inner.sender_count.increment();
Self { inner }
}
}
impl<E> Drop for SenderShared<E> {
fn drop(&mut self) {
match self.inner.sender_count.decrement() {
TryDecrement::Alive(_) => {}
TryDecrement::Dead => {
self.notify_receivers();
}
}
}
}
pub struct ReceiverShared<E> {
pub(crate) inner: Arc<Shared<E>>,
}
impl<E> ReceiverShared<E> {
pub fn extension(&self) -> &E {
&self.inner.extension
}
pub fn notify_senders(&self) {
self.inner.sender_notify.notify();
}
pub fn subscribe_send(&self, cx: &Context<'_>) {
self.inner.receiver_notify.subscribe(cx);
}
pub fn send_guard<'a>(&'a self) -> NotificationGuard<'a> {
self.inner.receiver_notify.guard()
}
pub fn is_alive(&self) -> bool {
self.inner.sender_count.is_alive()
}
pub fn is_closed(&self) -> bool {
!self.is_alive()
}
}
impl<E> Clone for ReceiverShared<E> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
inner.receiver_count.increment();
Self { inner }
}
}
impl<E> Drop for ReceiverShared<E> {
fn drop(&mut self) {
match self.inner.receiver_count.decrement() {
TryDecrement::Alive(_) => {}
TryDecrement::Dead => {
self.notify_senders();
}
}
}
}
| true |
789a85ce9da6a67c73cd2a0e673101df40f70b94
|
Rust
|
tomhoule/tide-cookie-session
|
/src/lib.rs
|
UTF-8
| 5,810 | 2.6875 | 3 |
[] |
no_license
|
// #[deny(missing_docs)]
#![feature(associated_type_defaults)]
#![feature(async_await)]
#![feature(futures_api)]
pub mod storage;
use storage::*;
use tide_core::{error::StringError, Context, box_async};
use futures::channel::oneshot;
use futures::future::BoxFuture;
use tide_cookies::ContextExt as _;
use cookie::{Cookie,SameSite};
const MIDDLEWARE_MISSING_MSG: &str =
"SessionMiddleware must be used to populate request and response cookies";
pub trait ContextExt {
fn set_session<T: 'static + Sync + Send + Default>(&mut self, new_session: T) -> Result<(), StringError>;
fn take_session<T: 'static + Sync + Send + Default>(&mut self) -> Result<T, StringError>;
// see rails security guide, reset_session
// could be implemented with a channel signaling the reset to the middleware
// fn reset(&self);
}
impl<AppData> ContextExt for tide::Context<AppData> {
fn set_session<T: 'static + Sync + Send + Default>(&mut self, new_session: T) -> Result<(), StringError> {
let session = self
.extensions_mut()
.remove::<Session<T>>()
.ok_or_else(|| StringError(MIDDLEWARE_MISSING_MSG.to_owned()))?;
session
.sender
.send(new_session)
.map_err(|_| StringError("Unable to handle session".to_owned()))
}
fn take_session<T: 'static + Sync + Send + Default>(&mut self) -> Result<T, StringError> {
let session = self
.extensions_mut()
.remove::<Session<T>>()
.ok_or_else(|| StringError(MIDDLEWARE_MISSING_MSG.to_owned()));
session
.map(|s| s.data)
}
}
/// `SessionShape` is the user-defined contents of the session. It has to be `Clone` and gets
/// copied often, so it is preferrable not to store large amounts of data in the session.
pub struct Session<Shape> {
data: Shape,
sender: oneshot::Sender<Shape>,
}
impl<SessionShape> Session<SessionShape>
where
SessionShape: Default,
{
fn new(data: SessionShape) -> (Self, oneshot::Receiver<SessionShape>) {
let (sender, receiver) = oneshot::channel();
(Session { data, sender }, receiver)
}
}
type SessionId = String;
/// The cookie session middleware.
pub struct CookieSessionMiddleware<Storage> {
/// The name of the cookie used to store the session id.
cookie_name: String,
storage: Storage,
}
/// The `Shape` parameter is the user-defined shape of the sessions managed by the
/// middleware.
impl<Storage, Shape> CookieSessionMiddleware<Storage>
where
Storage: SessionStorage<Value = Shape>,
//Shape: Send + Sync + 'static + Clone + Default,
Shape: 'static,
{
/// `cookie_name` will be the name of the cookie used to store the session id.
pub fn new(cookie_name: String, storage: Storage) -> Self {
CookieSessionMiddleware {
cookie_name,
storage,
}
}
/// Attempt to read the session id from the cookies on a request.
fn extract_session_id<A>(&self, ctx: &mut tide::Context<A>) -> Option<String> {
ctx.get_cookie(&self.cookie_name).expect("can't read cookies").map(|c| c.value().to_owned())
}
}
impl<AppData, Storage, Shape> tide::middleware::Middleware<AppData>
for CookieSessionMiddleware<Storage>
where
AppData: Send + Sync + 'static,
Storage: SessionStorage<Value = Shape> + Sync + Send + 'static,
Shape: Clone + Send + Sync + 'static + Default,
{
fn handle<'a>(
&'a self,
mut ctx: tide::Context<AppData>,
next: tide::middleware::Next<'a, AppData>,
) -> BoxFuture<'a, tide::Response> {
box_async! {
let session_id = self
.extract_session_id(&mut ctx)
.unwrap_or_else(new_session_id);
let session_shape = self.storage.get(&session_id).await
.ok()
.and_then(|a| a)
.unwrap_or_default();
let (session, mut receiver) = Session::new(session_shape);
ctx.extensions_mut().insert::<Session<Shape>>(session);
let mut session_cookie = Cookie::new(self.cookie_name.clone(), session_id.clone());
session_cookie.set_path("/");
session_cookie.set_http_only(true);
session_cookie.set_same_site(SameSite::Strict);
ctx.set_cookie(session_cookie);
let res = next.run(ctx).await;
let received = receiver.try_recv().ok().and_then(|a| a);
if let Some(received) = received {
self.storage.set(&session_id, received).await.expect("TODO: error handling");
}
res
}
}
}
/// Generate a new session id.
fn new_session_id() -> SessionId {
uuid::Uuid::new_v4().to_string()
}
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn cookie_session_id_extraction_works() {
// let middleware = CookieSessionMiddleware::<()>::new(
// "my_app_p".to_owned(),
// Arc::new(InMemorySession::new()),
// )
// .unwrap();
// let mut req = Request::new(http_service::Body::empty());
// req.headers_mut()
// .insert("Cookie", http::header::HeaderValue::from_static("abcd=3"));
// assert!(middleware.extract_session_id(&req).is_none());
// req.headers_mut().insert(
// "Cookie",
// http::header::HeaderValue::from_static("my_app_p=3"),
// );
// assert_eq!(&middleware.extract_session_id(&req).unwrap(), "3");
// req.headers_mut().insert(
// "Cookie",
// http::header::HeaderValue::from_static("something_else=44; my_app_p=3-4; other_app=2"),
// );
// assert_eq!(&middleware.extract_session_id(&req).unwrap(), "3-4");
// }
// }
| true |
da4d3e2a42cf03ed5d79d1970245f1b1a5f9c734
|
Rust
|
Azure/iot-identity-service
|
/cert/cert-renewal/src/cert_interface.rs
|
UTF-8
| 7,484 | 2.671875 | 3 |
[
"MIT"
] |
permissive
|
// Copyright (c) Microsoft. All rights reserved.
#[async_trait::async_trait]
pub trait CertInterface {
/// Represents a key used for a new certificate. Initially returned from cert renewal as a
/// temporary key, and later written to persistent storage with the renewed cert.
type NewKey: Send + Sync;
/// Retrieve a certificate from the provided `cert_id`. May return a chain where the certificate
/// with `cert_id` is element 0.
async fn get_cert(&mut self, cert_id: &str) -> Result<Vec<openssl::x509::X509>, crate::Error>;
/// Retrieve a private key from the provided `key_id`.
async fn get_key(
&mut self,
key_id: &str,
) -> Result<openssl::pkey::PKey<openssl::pkey::Private>, crate::Error>;
/// Renew the provided certificate.
///
/// This function should renew `old_cert_chain` and its key and return the renewed certificate and
/// key. It MUST leave `old_cert_chain` and its key intact upon returning; i.e. it must not erase
/// `old_cert_chain` or its key.
///
/// After this function returns, the renewal engine needs to perform additional checks and
/// calculations on the renewed certificate. If the renewal engine determines the new certificate
/// to be invalid, it will discard the renewed certificate and fall back to the old credentials.
///
/// Once the renewal engine determines the renewed certificate to be valid, it will call
/// `write_credentials`. The old certificate and its key may then be overwritten.
async fn renew_cert(
&mut self,
old_cert_chain: &[openssl::x509::X509],
key_id: &str,
) -> Result<(Vec<openssl::x509::X509>, Self::NewKey), crate::Error>;
/// Write the new credentials to storage, replacing any existing credentials with the same IDs.
///
/// This function is called when certificate renewal has successfully completed. It should write
/// the provided credentials to storage, committing them as the new versions of the provided IDs.
///
/// If any credential write fails, this function must revert and previous changes it made and
/// return an error. For example, if writing `cert` succeeds but writing `key` fails, then this
/// function must revert any changes to `cert` before returning an error.
async fn write_credentials(
&mut self,
old_cert_chain: &[openssl::x509::X509],
new_cert_chain: (&str, &[openssl::x509::X509]),
key: (&str, Self::NewKey),
) -> Result<(), crate::Error>;
}
#[cfg(test)]
#[derive(Clone, Debug)]
pub(crate) struct TestInterface {
pub keys: std::collections::BTreeMap<String, openssl::pkey::PKey<openssl::pkey::Private>>,
pub certs: std::collections::BTreeMap<String, Vec<openssl::x509::X509>>,
pub renew_err: Option<crate::Error>,
}
#[cfg(test)]
type ArcMutex<T> = std::sync::Arc<tokio::sync::Mutex<T>>;
#[cfg(test)]
pub(crate) mod test_interface {
use super::{ArcMutex, TestInterface};
pub(crate) fn new() -> ArcMutex<TestInterface> {
let interface = TestInterface {
keys: std::collections::BTreeMap::default(),
certs: std::collections::BTreeMap::default(),
renew_err: None,
};
let interface = tokio::sync::Mutex::new(interface);
std::sync::Arc::new(interface)
}
pub(crate) async fn new_cert(
interface: &ArcMutex<TestInterface>,
cert_id: &str,
key_id: &str,
common_name: &str,
not_before: i64,
not_after: i64,
) -> openssl::x509::X509 {
let mut interface = interface.lock().await;
let (cert, key) = test_common::credential::custom_test_certificate("test_cert", |cert| {
let mut name = openssl::x509::X509Name::builder().unwrap();
name.append_entry_by_text("CN", common_name).unwrap();
let name = name.build();
cert.set_subject_name(&name).unwrap();
let not_before = openssl::asn1::Asn1Time::from_unix(not_before).unwrap();
let not_after = openssl::asn1::Asn1Time::from_unix(not_after).unwrap();
cert.set_not_before(¬_before).unwrap();
cert.set_not_after(¬_after).unwrap();
});
interface
.certs
.insert(cert_id.to_string(), vec![cert.clone()]);
interface.keys.insert(key_id.to_string(), key);
cert
}
pub(crate) async fn set_renew_err(
interface: &ArcMutex<TestInterface>,
err: Option<crate::Error>,
) {
let mut interface = interface.lock().await;
interface.renew_err = err;
}
}
#[cfg(test)]
#[async_trait::async_trait]
impl CertInterface for ArcMutex<TestInterface> {
type NewKey = openssl::pkey::PKey<openssl::pkey::Private>;
async fn get_cert(&mut self, cert_id: &str) -> Result<Vec<openssl::x509::X509>, crate::Error> {
let interface = self.lock().await;
if let Some(cert) = interface.certs.get(cert_id) {
Ok(cert.clone())
} else {
Err(crate::Error::retryable_error("failed to get cert"))
}
}
async fn get_key(
&mut self,
key_id: &str,
) -> Result<openssl::pkey::PKey<openssl::pkey::Private>, crate::Error> {
let interface = self.lock().await;
if let Some(key) = interface.keys.get(key_id) {
Ok(key.clone())
} else {
Err(crate::Error::retryable_error("failed to get key"))
}
}
async fn renew_cert(
&mut self,
old_cert: &[openssl::x509::X509],
_key_id: &str,
) -> Result<(Vec<openssl::x509::X509>, Self::NewKey), crate::Error> {
let interface = self.lock().await;
if let Some(err) = &interface.renew_err {
Err(err.clone())
} else {
let (cert, key) = test_common::credential::custom_test_certificate(
// This is ignored as the subject name, but still used as the issuer name.
"test-cert",
|cert| {
cert.set_subject_name(old_cert[0].subject_name()).unwrap();
// Match the lifetime of the new cert to the old cert.
let not_before = crate::Time::from(old_cert[0].not_before());
let not_after = crate::Time::from(old_cert[0].not_after());
let lifetime = not_after - not_before;
assert!(lifetime > 0);
let now = i64::from(crate::Time::now());
let not_before = openssl::asn1::Asn1Time::from_unix(now).unwrap();
cert.set_not_before(¬_before).unwrap();
let not_after = now + lifetime;
let not_after = openssl::asn1::Asn1Time::from_unix(not_after).unwrap();
cert.set_not_after(¬_after).unwrap();
},
);
Ok((vec![cert], key))
}
}
async fn write_credentials(
&mut self,
_old_cert_chain: &[openssl::x509::X509],
new_cert_chain: (&str, &[openssl::x509::X509]),
key: (&str, Self::NewKey),
) -> Result<(), crate::Error> {
let mut interface = self.lock().await;
interface
.certs
.insert(new_cert_chain.0.to_string(), new_cert_chain.1.to_vec())
.unwrap();
interface.keys.insert(key.0.to_string(), key.1).unwrap();
Ok(())
}
}
| true |
0ca45ea9686c71f406d0f0e14034a355bcaad5ab
|
Rust
|
tilpner/includedir
|
/lib/src/lib.rs
|
UTF-8
| 3,470 | 2.984375 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
extern crate phf;
#[cfg(feature = "flate2")]
extern crate flate2;
use std::borrow::{Borrow, Cow};
use std::io::{self, BufReader, Cursor, Error, ErrorKind, Read};
use std::fs::File;
use std::sync::atomic::{AtomicBool, Ordering};
#[cfg(feature = "flate2")]
use flate2::bufread::GzDecoder;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Compression {
None,
#[cfg(feature = "flate2")]
Gzip
}
/// Runtime access to the included files
pub struct Files {
// Do not access these fields, they are only public to allow for code generation!
#[doc(hidden)]
pub files: phf::Map<&'static str, (Compression, &'static [u8])>,
#[doc(hidden)]
pub passthrough: AtomicBool
}
#[cfg(windows)]
fn as_key(path: &str) -> Cow<str> {
Cow::Owned(path.replace("\\", "/"))
}
#[cfg(not(windows))]
fn as_key(path: &str) -> Cow<str> {
Cow::Borrowed(path)
}
impl Files {
pub fn set_passthrough(&self, enabled: bool) {
self.passthrough.store(enabled, Ordering::Relaxed);
}
pub fn is_available(&self, path: &str) -> bool {
self.files.contains_key(path)
}
/// Returns an iterator over all available file names. Does not
/// decompress any compressed data.
pub fn file_names(&'static self) -> FileNames {
FileNames { iter: self.files.keys() }
}
pub fn get(&self, path: &str) -> io::Result<Cow<'static, [u8]>> {
match self.get_raw(path) {
Ok((Compression::None, data)) => Ok(data),
#[cfg(feature = "flate2")]
Ok((Compression::Gzip, compressed)) => {
let mut r = GzDecoder::new(Cursor::new(compressed));
let mut v = Vec::new();
r.read_to_end(&mut v)?;
Ok(Cow::Owned(v))
},
Err(e) => Err(e)
}
}
pub fn get_raw(&self, path: &str) -> io::Result<(Compression, Cow<'static, [u8]>)> {
if self.passthrough.load(Ordering::Relaxed) {
let mut r = BufReader::new(File::open(path)?);
let mut v = Vec::new();
r.read_to_end(&mut v)?;
return Ok((Compression::None, Cow::Owned(v)))
}
let key = as_key(path);
self.files.get(&*key)
.map(|&(c,d)| (c, Cow::Owned(d.to_owned())))
.ok_or_else(|| Error::new(ErrorKind::NotFound, "Key not found"))
}
pub fn read(&self, path: &str) -> io::Result<Box<dyn Read>> {
if self.passthrough.load(Ordering::Relaxed) {
return Ok(Box::new(BufReader::new(File::open(path)?)))
}
let key = as_key(path);
match self.files.get(key.borrow() as &str) {
Some(b) => {
match b.0 {
Compression::None => Ok(Box::new(Cursor::new(b.1))),
#[cfg(feature = "flate2")]
Compression::Gzip => Ok(Box::new(GzDecoder::new(Cursor::new(b.1)))),
}
}
None => Err(Error::new(ErrorKind::NotFound, "Key not found")),
}
}
}
/// Iterates over the file names available for `Files` object.
pub struct FileNames {
// Our internal iterator. We wrap this in a nice struct so our
// caller doesn't need to know the details.
iter: phf::map::Keys<'static, &'static str, (Compression, &'static [u8])>,
}
impl Iterator for FileNames {
type Item = &'static str;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().cloned()
}
}
| true |
8f0c8435d2eb61e4845322ae68003b97693e31e3
|
Rust
|
porglezomp/libgoscore
|
/src/lib.rs
|
UTF-8
| 13,220 | 3.359375 | 3 |
[] |
no_license
|
#![warn(missing_docs)]
extern crate libc;
use libc::c_char;
/// Contains bindings intended to be called from C
pub mod ffi;
// Data Structures /////////////////////////////////////////////////////////////
/// A wrapper around a `c_char` to enable accessors and setters for the
/// bitflags. A `Stone` contains information on whether a given point contains a
/// stone, whether it's dead, and what the value of the position is.
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub struct Stone(c_char);
/// A struct that represents a Go board.
#[derive(Debug)]
pub struct Board<'a> {
stones: &'a mut [Stone],
width: usize,
height: usize,
}
/// Which player's stone.
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Color {
#[allow(missing_docs)]
Black,
#[allow(missing_docs)]
White,
}
// Core Functions //////////////////////////////////////////////////////////////
/// Flags all the stones that it believes are dead.
pub fn guess_dead_stones(_board: &mut Board) {
}
/// Assigns scores to each stone, taking into account which stones are dead. A
/// stone is worth a point for a given player if it's occupied by that player's
/// living stone, or if it cannot reach a living stone of the opposing player.
pub fn score_stones(board: &mut Board) {
// Reset all the scores
for stone in board.stones.iter_mut() {
stone.set_score(None);
stone.set_processed_bit(false);
}
for stone in board.stones.iter_mut() {
if stone.is_present() && !stone.is_dead() {
let color = stone.color();
stone.set_score(color);
stone.set_processed_bit(true);
}
}
let mut queue = Vec::new();
let mut seen = std::collections::HashSet::<usize>::new();
for i in 0..(board.width * board.height) {
if !board.stones[i].processed_bit() {
queue.clear();
seen.clear();
queue.push(i);
let mut boundary_color = None::<Color>;
let mut both = false;
while let Some(i) = queue.pop() {
seen.insert(i);
let neighbors = board.neighbors(i);
for neighbor in neighbors {
if seen.contains(&neighbor) {
continue;
}
if board.stones[neighbor].is_dead() {
queue.push(neighbor);
continue;
}
if let Some(color) = board.stones[neighbor].color() {
if both {
continue;
}
match boundary_color {
Some(c) if c != color => both = true,
Some(_) => (),
None => boundary_color = Some(color),
}
} else {
queue.push(neighbor);
}
}
}
for &idx in &seen {
if !both {
board.stones[idx].set_score(boundary_color);
}
board.stones[idx].set_processed_bit(true);
}
}
}
for stone in board.stones.iter_mut() {
stone.set_processed_bit(false);
}
}
/// Computes the sum of all the scores on the board, accounting for komi. Komi
/// is the handicap and tie-breaker in Go. White always gets at least 1/2 point,
/// the standard Japanese komi is 6.5 points, most other countries use this
/// value as well. For the purpose of this function, pass `komi` rounded down
/// (so the standard 6.5 should be given as 6).
///
/// Values are returned as `(black, white, winner)`.
/// When `black == white`, white wins due to the implicit half-point advantage.
pub fn score_sums(board: &Board, komi: u32) -> (u32, u32, Color) {
let mut black = 0;
let mut white = komi;
for stone in board.stones.iter() {
match stone.score() {
None => (),
Some(Color::Black) => black += 1,
Some(Color::White) => white += 1,
}
}
let winner = if black > white {
Color::Black
} else {
Color::White
};
(black, white, winner)
}
// Board Operations ////////////////////////////////////////////////////////////
impl<'a> Board<'a> {
pub fn neighbors(&self, i: usize) -> Vec<usize> {
let mut neighborhood = Vec::with_capacity(4);
if i / self.width != 0 {
neighborhood.push(i - self.width);
}
if i % self.width != self.width - 1 {
neighborhood.push(i + 1);
}
if i / self.width != self.height - 1 {
neighborhood.push(i + self.width);
}
if i % self.width != 0 {
neighborhood.push(i - 1);
}
neighborhood
}
}
// Stone Accessors /////////////////////////////////////////////////////////////
impl Stone {
/// Creates an empty point.
pub fn new() -> Stone {
Stone(0)
}
/// Determines whether a stone is actually present at this point.
pub fn is_present(&self) -> bool {
self.present_bit()
}
/// Makes a stone present or not present. This leaves the color and score
/// the same as it was, but unobservable.
pub fn set_present(&mut self, present: bool) {
self.set_present_bit(present);
}
/// Determines whether or not the given stone is dead. If the stone is not
/// present, it will not return as alive independent of what the bit says.
pub fn is_dead(&self) -> bool {
self.is_present() && self.dead_bit()
}
/// Can label a stone as alive or dead. This sets the underlying bit whether
/// or not there is currently a stone at that point.
pub fn set_dead(&mut self, dead: bool) {
self.set_dead_bit(dead);
}
/// Returns the color of the stone, or `None` if there is no stone present.
pub fn color(&self) -> Option<Color> {
if self.is_present() {
if self.color_bit() {
Some(Color::White)
} else {
Some(Color::Black)
}
} else {
None
}
}
/// Allows you to set the color of the stone if one is present. If a stone
/// is not present, then passing `Some(color)` will make the stone present.
/// Passing `None` will set the stone to not present.
pub fn set_color(&mut self, color: Option<Color>) {
match color {
None => self.set_present_bit(false),
Some(Color::Black) => {
self.set_present_bit(true);
self.set_color_bit(false);
}
Some(Color::White) => {
self.set_present_bit(true);
self.set_color_bit(true);
}
}
}
/// Tells who the point is worth points for. If the spot is not worth points
/// for either player, then return `None`.
pub fn score(&self) -> Option<Color> {
if self.score_bit() {
if self.score_color_bit() {
Some(Color::White)
} else {
Some(Color::Black)
}
} else {
None
}
}
/// Sets the score for the point.
pub fn set_score(&mut self, color: Option<Color>) {
match color {
None => self.set_score_bit(false),
Some(Color::Black) => {
self.set_score_bit(true);
self.set_score_color_bit(false);
}
Some(Color::White) => {
self.set_score_bit(true);
self.set_score_color_bit(true);
}
}
}
}
// Bitflags ////////////////////////////////////////////////////////////////////
/// This bit is set if the space contains a stone.
const STONE_PRESENCE: c_char = 0x1;
/// This bit is set if the stone is white, and unset if it's black. If
/// `STONE_PRESENCE` is not set, then the value is unspecified.
const STONE_COLOR: c_char = 0x2;
/// This bit is set if the stone in the given space is dead, and unset if it's
/// not. If `STONE_PRESENCE` is not set, then the value is unspecified.
const STONE_DEAD: c_char = 0x4;
/// This bit is set if the given space is worth a point, and unset if it's not
/// worth any points.
const STONE_SCORE: c_char = 0x8;
/// This bit is set if the given space is worth a point for white, and unset if
/// it's worth a point for black. If `STONE_SCORE` is unset, then the value of
/// this bit is unspecified.
const STONE_SCORE_COLOR: c_char = 0x10;
/// This bit is internal to the algorithms. It is set once it has been processed.
const STONE_PROCESSED: c_char = 0x20;
// Bitflag Accessors ///////////////////////////////////////////////////////////
macro_rules! bitflag_getter_setter {
($BIT_CONST:ident , $get_name:ident , $set_name:ident) => {
#[allow(missing_docs)]
pub fn $get_name(&self) -> bool {
(self.0 & $BIT_CONST) != 0
}
#[allow(missing_docs)]
pub fn $set_name(&mut self, bit: bool) {
if bit {
self.0 |= $BIT_CONST;
} else {
self.0 &= !$BIT_CONST;
}
}
}
}
/// Raw accessors to the underlying bitflags.
impl Stone {
bitflag_getter_setter!(STONE_PRESENCE, present_bit, set_present_bit);
bitflag_getter_setter!(STONE_DEAD, dead_bit, set_dead_bit);
bitflag_getter_setter!(STONE_COLOR, color_bit, set_color_bit);
bitflag_getter_setter!(STONE_SCORE, score_bit, set_score_bit);
bitflag_getter_setter!(STONE_SCORE_COLOR, score_color_bit, set_score_color_bit);
bitflag_getter_setter!(STONE_PROCESSED, processed_bit, set_processed_bit);
}
// Tests ///////////////////////////////////////////////////////////////////////
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_bitflag_accessors() {
let mut stone = Stone::new();
assert!(!stone.present_bit());
assert!(!stone.dead_bit());
assert!(!stone.color_bit());
assert!(!stone.score_bit());
assert!(!stone.score_color_bit());
let bools = [false, true];
for &color in &bools {
for &score in &bools {
for &present in &bools {
for &dead in &bools {
for &score_color in &bools {
stone.set_present_bit(present);
stone.set_dead_bit(dead);
stone.set_color_bit(color);
stone.set_score_bit(score);
stone.set_score_color_bit(score_color);
assert_eq!(stone.present_bit(), present);
assert_eq!(stone.dead_bit(), dead);
assert_eq!(stone.color_bit(), color);
assert_eq!(stone.score_bit(), score);
assert_eq!(stone.score_color_bit(), score_color);
}
}
}
}
}
}
#[test]
fn test_stone_accessors() {
let mut stone = Stone::new();
assert!(!stone.is_present());
assert!(!stone.is_dead());
assert_eq!(stone.color(), None);
assert_eq!(stone.score(), None);
let colors = [None, Some(Color::Black), Some(Color::White)];
let bools = [false, true];
for &color in &colors {
for &score in &colors {
for &present in &bools {
for &dead in &bools {
stone.set_color(color);
stone.set_score(score);
stone.set_dead(dead);
assert_eq!(stone.color(), color);
assert_eq!(stone.score(), score);
assert_eq!(stone.is_dead(), dead && stone.is_present());
assert_eq!(stone.is_present(), color.is_some());
stone.set_present(present);
assert_eq!(stone.is_present(), present);
if present && color.is_some() {
assert_eq!(stone.color(), color);
}
if !present {
assert_eq!(stone.color(), None);
}
}
}
}
}
}
#[test]
fn test_neighborhood() {
let mut stones = [Stone::new()];
let board = Board {
stones: &mut stones,
width: 1,
height: 1,
};
assert_eq!(board.neighbors(0), vec![]);
let mut stones = [Stone::new(); 4];
let board = Board {
stones: &mut stones,
width: 2,
height: 2,
};
assert_eq!(board.neighbors(0), vec![1, 2]);
let mut stones = [Stone::new(); 9];
let board = Board {
stones: &mut stones,
width: 3,
height: 3,
};
assert_eq!(board.neighbors(4), vec![1, 5, 7, 3]);
}
}
| true |
82e47ed1081b245c93321c72e939112bd5b928f1
|
Rust
|
sansajn/rtest
|
/thread.rs
|
UTF-8
| 166 | 2.921875 | 3 |
[] |
no_license
|
use std::thread;
fn main() {
let t = thread::spawn(|| {
for i in 1..10 {
println!("hello {}", i);
}
});
t.join().unwrap();
}
| true |
cd3e34e2cc231433da0950ac170348c8a262b90e
|
Rust
|
fluencelabs/llamadb
|
/cli/src/main.rs
|
UTF-8
| 3,803 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
#![feature(duration_span)]
#[macro_use]
extern crate log;
extern crate env_logger;
extern crate linenoise;
extern crate llamadb;
use std::io::Write;
use std::time::Duration;
mod prettyselect;
use prettyselect::pretty_select;
fn main() {
env_logger::init().unwrap();
let mut lexer = llamadb::sqlsyntax::lexer::Lexer::new();
let mut db = llamadb::tempdb::TempDb::new();
let mut out = std::io::stdout();
loop {
let prompt = if lexer.tokens.is_empty() && lexer.is_no_state() {
"llamadb> "
} else {
" ...> "
};
let val = linenoise::input(prompt);
match val {
None => break,
Some(input) => {
if input == "testdata" {
let mut sink = std::io::sink();
match load_testdata(&mut sink, &mut db) {
Ok(()) => println!("Test data loaded."),
Err(message) => println!("{}", message)
};
continue;
}
lexer.feed_characters(input.chars());
lexer.feed_character(Some('\n'));
if !input.is_empty() && !lexer.tokens.is_empty() {
linenoise::history_add(&input);
}
while let Some(i) = lexer.tokens.iter().position(|token| token == &llamadb::sqlsyntax::lexer::Token::Semicolon) {
match execute(&mut out, &mut db, &lexer.tokens[0..i+1]) {
Ok(()) => (),
Err(message) => println!("{}", message)
};
let right = lexer.tokens.split_off(i+1);
lexer.tokens = right;
}
}
}
}
}
fn execute(out: &mut Write, db: &mut llamadb::tempdb::TempDb, tokens: &[llamadb::sqlsyntax::lexer::Token])
-> Result<(), String>
{
let statement = match llamadb::sqlsyntax::parser::parse_statement(tokens) {
Ok(stmt) => stmt,
Err(e) => return Err(format!("syntax error: {}", e))
};
execute_statement(out, db, statement)
}
fn execute_statement(out: &mut Write, db: &mut llamadb::tempdb::TempDb, statement: llamadb::sqlsyntax::ast::Statement)
-> Result<(), String>
{
use llamadb::tempdb::ExecuteStatementResponse;
let mut execute_result = None;
let duration = Duration::span(|| {
execute_result = Some(db.execute_statement(statement));
});
let seconds = duration.as_secs() as f32 + (duration.subsec_nanos() as f32 * 1.0e-9);
let duration_string = format!("{:.3}s", seconds);
let result = match execute_result.unwrap() {
Ok(r) => r,
Err(e) => return Err(format!("execution error: {}", e))
};
let write_result = match result {
ExecuteStatementResponse::Created => {
writeln!(out, "Created ({}).", duration_string)
},
ExecuteStatementResponse::Inserted(rows) => {
writeln!(out, "{} rows inserted ({}).", rows, duration_string)
},
ExecuteStatementResponse::Select { column_names, rows } => {
pretty_select(out, &column_names, rows, 32).and_then(|row_count| {
writeln!(out, "{} rows selected ({}).", row_count, duration_string)
})
},
ExecuteStatementResponse::Explain(plan) => {
writeln!(out, "{}", plan)
},
};
write_result.unwrap();
Ok(())
}
fn load_testdata(out: &mut Write, db: &mut llamadb::tempdb::TempDb) -> Result<(), String> {
let test_data = include_str!("testdata.sql");
let statements = llamadb::sqlsyntax::parse_statements(test_data);
for statement in statements {
execute_statement(out, db, statement)?;
}
Ok(())
}
| true |
37e2bb25681d35c1da666b5e63392f3d5502a4e4
|
Rust
|
Mirko-von-Leipzig/interledger-rs
|
/crates/interledger-btp/src/packet.rs
|
UTF-8
| 20,394 | 2.75 | 3 |
[
"Apache-2.0"
] |
permissive
|
use super::errors::{BtpPacketError, PacketTypeError};
use bytes::{Buf, BufMut};
use interledger_packet::{
oer::{self, BufOerExt, MutBufOerExt, VariableLengthTimestamp},
OerError,
};
#[cfg(test)]
use once_cell::sync::Lazy;
use std::borrow::Cow;
use std::str;
const REQUEST_ID_LEN: usize = 4;
pub trait Serializable<T> {
fn from_bytes(bytes: &[u8]) -> Result<T, BtpPacketError>;
fn to_bytes(&self) -> Vec<u8>;
}
#[derive(Debug, PartialEq, Clone)]
#[repr(u8)]
enum PacketType {
Message = 6,
Response = 1,
Error = 2,
Unknown,
}
impl PacketType {
/// Length on the wire.
const LEN: usize = 1;
}
impl From<u8> for PacketType {
fn from(type_int: u8) -> Self {
match type_int {
6 => PacketType::Message,
1 => PacketType::Response,
2 => PacketType::Error,
_ => PacketType::Unknown,
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum BtpPacket {
Message(BtpMessage),
Response(BtpResponse),
Error(BtpError),
}
impl Serializable<BtpPacket> for BtpPacket {
fn from_bytes(bytes: &[u8]) -> Result<BtpPacket, BtpPacketError> {
if bytes.is_empty() {
return Err(OerError::UnexpectedEof.into());
}
match PacketType::from(bytes[0]) {
PacketType::Message => Ok(BtpPacket::Message(BtpMessage::from_bytes(bytes)?)),
PacketType::Response => Ok(BtpPacket::Response(BtpResponse::from_bytes(bytes)?)),
PacketType::Error => Ok(BtpPacket::Error(BtpError::from_bytes(bytes)?)),
PacketType::Unknown => Err(PacketTypeError::Unknown(bytes[0]).into()),
}
}
fn to_bytes(&self) -> Vec<u8> {
match self {
BtpPacket::Message(packet) => packet.to_bytes(),
BtpPacket::Response(packet) => packet.to_bytes(),
BtpPacket::Error(packet) => packet.to_bytes(),
}
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum ContentType {
ApplicationOctetStream,
TextPlainUtf8,
Unknown(u8),
}
impl ContentType {
/// Length of the ContentType on the wire
const LEN: usize = 1;
}
impl From<u8> for ContentType {
fn from(type_int: u8) -> Self {
match type_int {
0 => ContentType::ApplicationOctetStream,
1 => ContentType::TextPlainUtf8,
x => ContentType::Unknown(x),
}
}
}
impl From<ContentType> for u8 {
fn from(ct: ContentType) -> Self {
match ct {
ContentType::ApplicationOctetStream => 0,
ContentType::TextPlainUtf8 => 1,
ContentType::Unknown(x) => x,
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct ProtocolData {
pub protocol_name: Cow<'static, str>,
pub content_type: ContentType,
pub data: Vec<u8>,
}
fn read_protocol_data(reader: &mut &[u8]) -> Result<Vec<ProtocolData>, BtpPacketError> {
// TODO: using bytes here might make sense
let mut protocol_data = Vec::new();
let num_entries = reader.read_var_uint()?;
for _ in 0..num_entries {
let protocol_name = str::from_utf8(reader.read_var_octet_string()?)?;
// avoid allocations for the names contained in the API. if this list needs to be expanded
// might be better to use phf but this might be still cheaper with 3 equality checks
let protocol_name = if protocol_name == "ilp" {
Cow::Borrowed("ilp")
} else if protocol_name == "auth" {
Cow::Borrowed("auth")
} else if protocol_name == "auth_token" {
Cow::Borrowed("auth_token")
} else {
Cow::Owned(protocol_name.to_owned())
};
if reader.remaining() < ContentType::LEN {
return Err(OerError::UnexpectedEof.into());
}
let content_type = ContentType::from(reader.get_u8());
let data = reader.read_var_octet_string()?.to_vec();
protocol_data.push(ProtocolData {
protocol_name,
content_type,
data,
});
}
check_no_trailing_bytes(reader)?;
Ok(protocol_data)
}
fn put_protocol_data<T: BufMut>(buf: &mut T, protocol_data: &[ProtocolData]) {
buf.put_var_uint(protocol_data.len() as u64);
for entry in protocol_data {
buf.put_var_octet_string(entry.protocol_name.as_bytes());
buf.put_u8(entry.content_type.into());
buf.put_var_octet_string(&*entry.data);
}
}
fn check_no_trailing_bytes(buf: &[u8]) -> Result<(), BtpPacketError> {
// according to spec, there should not be room for trailing bytes.
// this certainly helps with fuzzing.
if !buf.is_empty() {
return Err(BtpPacketError::TrailingBytesErr);
}
Ok(())
}
#[derive(Debug, PartialEq, Clone)]
pub struct BtpMessage {
pub request_id: u32,
pub protocol_data: Vec<ProtocolData>,
}
impl Serializable<BtpMessage> for BtpMessage {
fn from_bytes(bytes: &[u8]) -> Result<BtpMessage, BtpPacketError> {
let mut reader = bytes;
const MIN_LEN: usize = PacketType::LEN + REQUEST_ID_LEN + oer::EMPTY_VARLEN_OCTETS_LEN;
if reader.remaining() < MIN_LEN {
return Err(OerError::UnexpectedEof.into());
}
let packet_type = reader.get_u8();
if PacketType::from(packet_type) != PacketType::Message {
return Err(PacketTypeError::Unexpected(packet_type, PacketType::Message as u8).into());
}
let request_id = reader.get_u32();
let mut contents = reader.read_var_octet_string()?;
check_no_trailing_bytes(reader)?;
let protocol_data = read_protocol_data(&mut contents)?;
Ok(BtpMessage {
request_id,
protocol_data,
})
}
fn to_bytes(&self) -> Vec<u8> {
let mut buf = Vec::new();
buf.put_u8(PacketType::Message as u8);
buf.put_u32(self.request_id);
// TODO make sure this isn't copying the contents
let mut contents = Vec::new();
put_protocol_data(&mut contents, &self.protocol_data);
buf.put_var_octet_string(&*contents);
buf
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct BtpResponse {
pub request_id: u32,
pub protocol_data: Vec<ProtocolData>,
}
impl Serializable<BtpResponse> for BtpResponse {
fn from_bytes(bytes: &[u8]) -> Result<BtpResponse, BtpPacketError> {
let mut reader = bytes;
const MIN_LEN: usize = PacketType::LEN + REQUEST_ID_LEN + oer::EMPTY_VARLEN_OCTETS_LEN;
if reader.remaining() < MIN_LEN {
return Err(OerError::UnexpectedEof.into());
}
let packet_type = reader.get_u8();
if PacketType::from(packet_type) != PacketType::Response {
return Err(
PacketTypeError::Unexpected(packet_type, PacketType::Response as u8).into(),
);
}
let request_id = reader.get_u32();
let mut contents = reader.read_var_octet_string()?;
check_no_trailing_bytes(reader)?;
let protocol_data = read_protocol_data(&mut contents)?;
Ok(BtpResponse {
request_id,
protocol_data,
})
}
fn to_bytes(&self) -> Vec<u8> {
let mut buf = Vec::new();
buf.put_u8(PacketType::Response as u8);
buf.put_u32(self.request_id);
let mut contents = Vec::new();
put_protocol_data(&mut contents, &self.protocol_data);
buf.put_var_octet_string(&*contents);
buf
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct BtpError {
pub request_id: u32,
pub code: String,
pub name: String,
pub triggered_at: VariableLengthTimestamp,
pub data: String,
pub protocol_data: Vec<ProtocolData>,
}
impl Serializable<BtpError> for BtpError {
fn from_bytes(bytes: &[u8]) -> Result<BtpError, BtpPacketError> {
let mut reader = bytes;
const MIN_HEADER_LEN: usize =
PacketType::LEN + REQUEST_ID_LEN + oer::EMPTY_VARLEN_OCTETS_LEN;
if reader.remaining() < MIN_HEADER_LEN {
return Err(OerError::UnexpectedEof.into());
}
let packet_type = reader.get_u8();
if PacketType::from(packet_type) != PacketType::Error {
return Err(PacketTypeError::Unexpected(packet_type, PacketType::Error as u8).into());
}
let request_id = reader.get_u32();
let mut contents = reader.read_var_octet_string()?;
check_no_trailing_bytes(reader)?;
const CODE_LEN: usize = 3;
const MIN_CONTENTS_LEN: usize = CODE_LEN
+ oer::EMPTY_VARLEN_OCTETS_LEN
+ oer::MIN_VARLEN_TIMESTAMP_LEN
+ oer::EMPTY_VARLEN_OCTETS_LEN;
if contents.remaining() < MIN_CONTENTS_LEN {
return Err(OerError::UnexpectedEof.into());
}
let mut code = [0u8; CODE_LEN];
contents.copy_to_slice(&mut code);
let name = str::from_utf8(contents.read_var_octet_string()?)?.to_owned();
let triggered_at = contents.read_variable_length_timestamp()?;
let data = str::from_utf8(contents.read_var_octet_string()?)?.to_owned();
let protocol_data = read_protocol_data(&mut contents)?;
Ok(BtpError {
request_id,
code: str::from_utf8(&code[..])?.to_owned(),
name,
triggered_at,
data,
protocol_data,
})
}
fn to_bytes(&self) -> Vec<u8> {
let mut buf = Vec::new();
buf.put_u8(PacketType::Error as u8);
buf.put_u32(self.request_id);
let mut contents = Vec::new();
// TODO check that the code is only 3 chars
contents.put(self.code.as_bytes());
contents.put_var_octet_string(self.name.as_bytes());
contents.put_variable_length_timestamp(&self.triggered_at);
contents.put_var_octet_string(self.data.as_bytes());
put_protocol_data(&mut contents, &self.protocol_data);
buf.put_var_octet_string(&*contents);
buf
}
}
#[cfg(test)]
mod tests {
use super::*;
// separate mod helps to avoid the 30s test case with `cargo test -- fuzzed`
mod fuzzed {
use super::super::{put_protocol_data, read_protocol_data};
use super::BtpPacket;
use super::Serializable;
#[test]
fn fuzz_0_empty_input() {
fails_to_parse(&[]);
}
#[test]
fn fuzz_1_too_large_uint() {
fails_to_parse(&[6, 0, 0, 1, 0, 1, 45]);
// ^^
// length of uint
}
#[test]
fn fuzz_2_too_short_var_octets() {
fails_to_parse(&[1, 1, 0, 0, 4, 4, 0]);
// ^ length of var_octet_string
}
#[test]
fn fuzz_3_too_short_var_octets() {
// 9 is the length of the next section but there are only two bytes, this used to parse
// just fine because there was no checking for how much was actually read
// possible duplicate
fails_to_parse(&[1, 1, 65, 0, 0, 9, 1, 0]);
// ^ length of var_octet_string
}
#[test]
fn fuzz_4_trailing_bytes() {
// this one has garbage at the end
fails_to_parse(&[1, 0, 0, 2, 0, 2, 0, 0, 250, 134]);
// ^^^ ^^^ extra
}
#[test]
fn fuzz_5_trailing_bytes_inside_protocol_data() {
// this one again has garbage at the end, but inside the protocol data
fails_to_parse(&[1, 1, 0, 1, 0, 6, 1, 0, 6, 1, 6, 1, 1]);
// / | | ^ ^ ^ ^ ^
// protocol data len | | extra
// / |
// len of len /
// num_entries
}
#[test]
fn fuzz_6_too_short_var_octets() {
fails_to_parse(&[1, 1, 2, 217, 19, 50, 212]);
}
#[test]
fn fuzz_7_too_short_var_octets() {
fails_to_parse(&[2, 0, 0, 30, 30, 134, 30, 8, 36, 128, 96, 50]);
}
#[test]
fn fuzz_8_large_allocation() {
// old implementation tries to do malloc(2214616063) here
fails_to_parse(&[1, 1, 0, 6, 1, 132, 132, 0, 91, 255, 50]);
}
#[test]
fn fuzz_9_short_var_octet_string() {
// might be duplicate
#[rustfmt::skip]
fails_to_parse(&[
// message
6,
// requestid
0, 0, 1, 1,
// var octet string len
6,
// varuint len
1,
// varuint zero
0,
]);
}
#[test]
fn fuzz_10_garbage_protocol_data() {
// garbage in the protocol data
// possible duplicate
fails_to_parse(&[6, 0, 0, 1, 1, 6, 1, 0, 253, 1, 1, 1]);
}
#[test]
fn fuzz_11_failed_roundtrip() {
// didn't originally record why this failed roundtrip
roundtrip(&[6, 0, 0, 1, 1, 6, 1, 1, 0, 253, 1, 0]);
}
#[test]
fn fuzz_12_waste_in_length_of_length() {
// this has a length of length 128 | 1 which means single byte length, which doesn't
// really make sense per rules
fails_to_parse(
&[6, 0, 0, 1, 1, 7, 129, 1, 1, 1, 6, 1, 0],
// ^^^ ^
// len of len string len
);
}
#[test]
fn fuzz_13_wasteful_var_uint() {
#[rustfmt::skip]
fails_on_strict(
&[
// message
6,
// requestid
0, 0, 0, 0,
// var octet length
6,
// length of var uint
2,
// var uint byte 1/2
0,
// var uint byte 2/2
1,
// protocol name, var octet string
0,
// content type
1,
// data
0],
&[6, 0, 0, 0, 0, 5, 1, 1, 0, 1, 0],
);
}
#[test]
fn fuzz_14_invalid_timestamp() {
// this failed originally by producing a longer output than input in strict.
//
// longer output was created because the timestamp was parsed when it contained illegal
// characters, and the formatted version of the parsed timestamp was longer than in the
// input.
#[rustfmt::skip]
fails_to_parse(&[
// packettype error
2,
// request id
0, 127, 1, 12, 73,
// code
9, 9, 9,
// name = length prefix + 9x9
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
// timestamp = length prefix (18) + "4\t3\t\u{c}\t\t3\t\u{c}\t5\t3\t60Z"
// "4.3....3...5.3.60Z"
// this is output as (19) + "00040303050360.000Z"
18, 52, 9, 51, 9, 12, 9, 9, 51, 9, 12, 9, 53, 9, 51, 9, 54, 48, 90,
// data = length prefix + rest
0,
// protocol data
1, 3, 1, 1, 0, 0, 1, 0, 6, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 79, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 51,
]);
}
#[test]
fn fuzz_14_1() {
// protocol data from the previous test case
let input: &[u8] = &[
1, 3, 1, 1, 0, 0, 1, 0, 6, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 79, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 51,
];
let mut cursor = input;
let pd = read_protocol_data(&mut cursor).unwrap();
let mut out = bytes::BytesMut::new();
put_protocol_data(&mut out, &pd);
assert_eq!(input, out);
}
fn fails_on_strict(data: &[u8], lenient_output: &[u8]) {
let parsed = BtpPacket::from_bytes(data);
if cfg!(feature = "strict") {
parsed.unwrap_err();
} else {
// without strict, the input is not roundtrippable as it wastes bytes
let out = parsed.unwrap().to_bytes();
assert_eq!(out, lenient_output);
}
}
fn fails_to_parse(data: &[u8]) {
BtpPacket::from_bytes(data).unwrap_err();
}
fn roundtrip(data: &[u8]) {
let parsed = BtpPacket::from_bytes(data).expect("failed to parse test case input");
let out = parsed.to_bytes();
assert_eq!(data, out, "{:?}", parsed);
}
}
#[test]
fn content_type_roundtrips() {
// this is an important property for any of the datatypes, otherwise fuzzer will find the
// [^01] examples, which may not have any examples above.
for x in 0..=255u8 {
let y: u8 = ContentType::from(x).into();
assert_eq!(x, y);
}
}
mod btp_message {
use super::*;
static MESSAGE_1: Lazy<BtpMessage> = Lazy::new(|| BtpMessage {
request_id: 2,
protocol_data: vec![
ProtocolData {
protocol_name: "test".into(),
content_type: ContentType::ApplicationOctetStream,
data: hex_literal::hex!("FFFF")[..].to_vec(),
},
ProtocolData {
protocol_name: "text".into(),
content_type: ContentType::TextPlainUtf8,
data: b"hello".to_vec(),
},
],
});
static MESSAGE_1_SERIALIZED: &[u8] =
&hex_literal::hex!("060000000217010204746573740002ffff0474657874010568656c6c6f");
#[test]
fn from_bytes() {
assert_eq!(
BtpMessage::from_bytes(MESSAGE_1_SERIALIZED).unwrap(),
*MESSAGE_1
);
}
#[test]
fn to_bytes() {
assert_eq!(MESSAGE_1.to_bytes(), *MESSAGE_1_SERIALIZED);
}
}
mod btp_response {
use super::*;
static RESPONSE_1: Lazy<BtpResponse> = Lazy::new(|| BtpResponse {
request_id: 129,
protocol_data: vec![ProtocolData {
protocol_name: "some other protocol".into(),
content_type: ContentType::ApplicationOctetStream,
data: hex_literal::hex!("AAAAAA").to_vec(),
}],
});
static RESPONSE_1_SERIALIZED: &[u8] = &hex_literal::hex!(
"01000000811b010113736f6d65206f746865722070726f746f636f6c0003aaaaaa"
);
#[test]
fn from_bytes() {
assert_eq!(
BtpResponse::from_bytes(RESPONSE_1_SERIALIZED).unwrap(),
*RESPONSE_1
);
}
#[test]
fn to_bytes() {
assert_eq!(RESPONSE_1.to_bytes(), *RESPONSE_1_SERIALIZED);
}
}
mod btp_error {
use super::*;
static ERROR_1: Lazy<BtpError> = Lazy::new(|| BtpError {
request_id: 501,
code: String::from("T00"),
name: String::from("UnreachableError"),
triggered_at: VariableLengthTimestamp::parse_from_rfc3339("2018-08-31T02:53:24.899Z")
.unwrap(),
data: String::from("oops"),
protocol_data: vec![],
});
static ERROR_1_SERIALIZED: &[u8] = &hex_literal::hex!("02000001f52f54303010556e726561636861626c654572726f721332303138303833313032353332342e3839395a046f6f70730100");
#[test]
fn from_bytes() {
assert_eq!(BtpError::from_bytes(ERROR_1_SERIALIZED).unwrap(), *ERROR_1);
}
#[test]
fn to_bytes() {
assert_eq!(ERROR_1.to_bytes(), *ERROR_1_SERIALIZED);
}
}
}
| true |
05a46703d0ddc659222e54cd374467408689fe8f
|
Rust
|
gimli-rs/object
|
/src/read/coff/comdat.rs
|
UTF-8
| 6,664 | 2.640625 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use core::str;
use crate::endian::LittleEndian as LE;
use crate::pe;
use crate::read::{
self, ComdatKind, ObjectComdat, ReadError, ReadRef, Result, SectionIndex, SymbolIndex,
};
use super::{CoffFile, CoffHeader, ImageSymbol};
/// An iterator over the COMDAT section groups of a `CoffBigFile`.
pub type CoffBigComdatIterator<'data, 'file, R = &'data [u8]> =
CoffComdatIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
/// An iterator over the COMDAT section groups of a `CoffFile`.
#[derive(Debug)]
pub struct CoffComdatIterator<
'data,
'file,
R: ReadRef<'data> = &'data [u8],
Coff: CoffHeader = pe::ImageFileHeader,
> {
pub(super) file: &'file CoffFile<'data, R, Coff>,
pub(super) index: usize,
}
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
for CoffComdatIterator<'data, 'file, R, Coff>
{
type Item = CoffComdat<'data, 'file, R, Coff>;
fn next(&mut self) -> Option<Self::Item> {
loop {
let index = self.index;
let symbol = self.file.common.symbols.symbol(index).ok()?;
self.index += 1 + symbol.number_of_aux_symbols() as usize;
if let Some(comdat) = CoffComdat::parse(self.file, symbol, index) {
return Some(comdat);
}
}
}
}
/// A COMDAT section group of a `CoffBigFile`.
pub type CoffBigComdat<'data, 'file, R = &'data [u8]> =
CoffComdat<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
/// A COMDAT section group of a `CoffFile`.
#[derive(Debug)]
pub struct CoffComdat<
'data,
'file,
R: ReadRef<'data> = &'data [u8],
Coff: CoffHeader = pe::ImageFileHeader,
> {
file: &'file CoffFile<'data, R, Coff>,
symbol_index: SymbolIndex,
symbol: &'data Coff::ImageSymbol,
selection: u8,
}
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> CoffComdat<'data, 'file, R, Coff> {
fn parse(
file: &'file CoffFile<'data, R, Coff>,
section_symbol: &'data Coff::ImageSymbol,
index: usize,
) -> Option<CoffComdat<'data, 'file, R, Coff>> {
// Must be a section symbol.
if !section_symbol.has_aux_section() {
return None;
}
// Auxiliary record must have a non-associative selection.
let aux = file.common.symbols.aux_section(index).ok()?;
let selection = aux.selection;
if selection == 0 || selection == pe::IMAGE_COMDAT_SELECT_ASSOCIATIVE {
return None;
}
// Find the COMDAT symbol.
let mut symbol_index = index;
let mut symbol = section_symbol;
let section_number = section_symbol.section_number();
loop {
symbol_index += 1 + symbol.number_of_aux_symbols() as usize;
symbol = file.common.symbols.symbol(symbol_index).ok()?;
if section_number == symbol.section_number() {
break;
}
}
Some(CoffComdat {
file,
symbol_index: SymbolIndex(symbol_index),
symbol,
selection,
})
}
}
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> read::private::Sealed
for CoffComdat<'data, 'file, R, Coff>
{
}
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> ObjectComdat<'data>
for CoffComdat<'data, 'file, R, Coff>
{
type SectionIterator = CoffComdatSectionIterator<'data, 'file, R, Coff>;
#[inline]
fn kind(&self) -> ComdatKind {
match self.selection {
pe::IMAGE_COMDAT_SELECT_NODUPLICATES => ComdatKind::NoDuplicates,
pe::IMAGE_COMDAT_SELECT_ANY => ComdatKind::Any,
pe::IMAGE_COMDAT_SELECT_SAME_SIZE => ComdatKind::SameSize,
pe::IMAGE_COMDAT_SELECT_EXACT_MATCH => ComdatKind::ExactMatch,
pe::IMAGE_COMDAT_SELECT_LARGEST => ComdatKind::Largest,
pe::IMAGE_COMDAT_SELECT_NEWEST => ComdatKind::Newest,
_ => ComdatKind::Unknown,
}
}
#[inline]
fn symbol(&self) -> SymbolIndex {
self.symbol_index
}
#[inline]
fn name_bytes(&self) -> Result<&[u8]> {
// Find the name of first symbol referring to the section.
self.symbol.name(self.file.common.symbols.strings())
}
#[inline]
fn name(&self) -> Result<&str> {
let bytes = self.name_bytes()?;
str::from_utf8(bytes)
.ok()
.read_error("Non UTF-8 COFF COMDAT name")
}
#[inline]
fn sections(&self) -> Self::SectionIterator {
CoffComdatSectionIterator {
file: self.file,
section_number: self.symbol.section_number(),
index: 0,
}
}
}
/// An iterator over the sections in a COMDAT section group of a `CoffBigFile`.
pub type CoffBigComdatSectionIterator<'data, 'file, R = &'data [u8]> =
CoffComdatSectionIterator<'data, 'file, R, pe::AnonObjectHeaderBigobj>;
/// An iterator over the sections in a COMDAT section group of a `CoffFile`.
#[derive(Debug)]
pub struct CoffComdatSectionIterator<
'data,
'file,
R: ReadRef<'data> = &'data [u8],
Coff: CoffHeader = pe::ImageFileHeader,
> {
file: &'file CoffFile<'data, R, Coff>,
section_number: i32,
index: usize,
}
impl<'data, 'file, R: ReadRef<'data>, Coff: CoffHeader> Iterator
for CoffComdatSectionIterator<'data, 'file, R, Coff>
{
type Item = SectionIndex;
fn next(&mut self) -> Option<Self::Item> {
// Find associated COMDAT symbols.
// TODO: it seems gcc doesn't use associated symbols for this
loop {
let index = self.index;
let symbol = self.file.common.symbols.symbol(index).ok()?;
self.index += 1 + symbol.number_of_aux_symbols() as usize;
// Must be a section symbol.
if !symbol.has_aux_section() {
continue;
}
let section_number = symbol.section_number();
let aux = self.file.common.symbols.aux_section(index).ok()?;
if aux.selection == pe::IMAGE_COMDAT_SELECT_ASSOCIATIVE {
let number = if Coff::is_type_bigobj() {
u32::from(aux.number.get(LE)) | (u32::from(aux.high_number.get(LE)) << 16)
} else {
u32::from(aux.number.get(LE))
};
if number as i32 == self.section_number {
return Some(SectionIndex(section_number as usize));
}
} else if aux.selection != 0 {
if section_number == self.section_number {
return Some(SectionIndex(section_number as usize));
}
}
}
}
}
| true |
1fe4e104609f621d70bc05ab6fc26f692b70735a
|
Rust
|
Nertsal/nertsal-telegram-bot
|
/src/bot/users_state.rs
|
UTF-8
| 1,594 | 2.84375 | 3 |
[] |
no_license
|
use super::*;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct UsersState {
pub active_users: HashSet<ChatUser>,
pub chosen_users: HashSet<ChatUser>,
pub all_chosen_users: HashSet<ChatUser>,
}
impl UsersState {
pub fn new() -> Self {
Self {
active_users: HashSet::new(),
chosen_users: HashSet::new(),
all_chosen_users: HashSet::new(),
}
}
pub fn add_active_user(&mut self, user: ChatUser) -> bool {
if self.all_chosen_users.contains(&user) {
self.chosen_users.insert(user)
} else {
self.active_users.insert(user)
}
}
pub fn remove_active_user(&mut self, user: &ChatUser) -> bool {
self.active_users.remove(user) || self.chosen_users.remove(user)
}
pub fn add_chosen_user(&mut self, user: ChatUser) {
assert!(self.chosen_users.insert(user.clone()));
assert!(self.all_chosen_users.insert(user));
}
pub fn reset_chosen_users(&mut self) {
for chosen_user in self.chosen_users.drain() {
self.active_users.insert(chosen_user);
}
self.all_chosen_users.clear();
}
}
#[derive(Debug, Clone, Hash, Eq, Serialize, Deserialize)]
pub struct ChatUser {
pub name: String,
pub id: UserId,
}
impl ChatUser {
pub fn new(user: &User) -> Self {
Self {
name: get_user_name(user),
id: user.id,
}
}
}
impl PartialEq for ChatUser {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
| true |
ca3cc50967f8aa0385e64b78e188c56863fd1396
|
Rust
|
JulianKnodt/nadir
|
/src/search.rs
|
UTF-8
| 2,944 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
extern crate ndarray;
use std::ops::{Mul, Add, Sub, Div};
use std::sync::Arc;
use crate::function::{Function, FunctionGradient};
use crate::line_search::{golden_section_search};
use self::ndarray::{Ix, Ix1, Ix2};
/// The set of possible strategies to pick from
pub enum Strategy {
BFGS,
}
struct BFGS<A>
where A: Clone + Add<A, Output=A> + Mul<A, Output=A> + Sub<A, Output=A> {
hessian_approx: ndarray::Array<A, Ix2>,
inverse_approx: ndarray::Array<A, Ix2>,
}
impl <A, S> StrategyInstance<A, S> for BFGS<A>
where
A: Clone + Mul<A, Output=A> + Add<A, Output=A> + Sub<A, Output=A> + Div<A, Output=A> +
Div<f64, Output=A> + PartialOrd + ndarray::ScalarOperand,
S: ndarray::Data<Elem = A> {
fn predict(&mut self,
curr: &ndarray::ArrayBase<S,Ix1>,
f: &Function<A,S>,
grad: &FunctionGradient<A,S>
) -> ndarray::Array<A,Ix1> {
// B_k p_k = - grad f(x);
// I would imagine that in most cases converting this into an LU system and solving would be
// good, but wikipedia lists an efficient way to perform the inverse, which should be
// exploited.
let curr_grad = grad(curr);
let direction:ndarray::Array<A,Ix1> = curr_grad * self.inverse_approx;
let step_size = golden_section_search(f, curr, &direction);
let step: ndarray::Array<A,Ix1> = direction * step_size;
let next = curr + &step;
let update = &next - curr;
let update_t = update.t().to_owned();
let step_t = step.t().to_owned();
let test = ndarray::ArrayBase::eye(update.dim()) * update;
let hess_approx = &self.hessian_approx;
let next_hessian = hess_approx + (update * &update_t)/(update_t * &step)
- (hess_approx * &step * &step_t * hess_approx)/(&step_t * hess_approx * &step);
self.hessian_approx = next_hessian;
let inv_approx = &self.inverse_approx;
let next_inv = unimplemented!();
self.inverse_approx = next_inv;
return next;
}
}
/// The generic trait for all instances of strategies, for predicting the next value of
/// iteration
pub(crate) trait StrategyInstance <A, S> where S: ndarray::Data<Elem=A> {
fn predict(&mut self,
curr: &ndarray::ArrayBase<S, Ix1>,
f: &Function<A,S>,
grad: &FunctionGradient<A,S>
) -> ndarray::Array<A, Ix1>
where S: ndarray::Data<Elem=A>;
}
extern crate num_traits;
use self::num_traits::identities::{Zero, One};
impl Strategy {
/// returns a specific instance of a strategy so that it can be called in different methods
/// concurrently
pub(crate) fn instance<A, S>(&self, dim: Ix) -> Arc<StrategyInstance<A, S>>
where
A: Clone + Mul<A, Output=A> + Add<A, Output=A> + Sub<A, Output=A> + Div<A, Output=A> +
One + Zero + Div<f64, Output=A> + PartialOrd + ndarray::ScalarOperand,
S: ndarray::Data<Elem=A> {
match self {
Strategy::BFGS => Arc::new(BFGS{
hessian_approx: ndarray::ArrayBase::eye(dim),
inverse_approx: ndarray::ArrayBase::eye(dim),
}),
}
}
}
| true |
734feadb6e5b8e1bca0ea69c91beff9456fe3e4a
|
Rust
|
Drevoed/narwhalol
|
/src/constants/ranked_tier.rs
|
UTF-8
| 2,309 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
use std::convert::AsRef;
use std::fmt;
use Inner::*;
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct RankedTier(Inner);
#[derive(Clone, PartialEq, Eq, Hash)]
enum Inner {
Iron,
Bronze,
Silver,
Gold,
Platinum,
Diamond,
Master,
Grandmaster,
Challenger,
}
impl RankedTier {
pub const IRON: RankedTier = RankedTier(Iron);
pub const BRONZE: RankedTier = RankedTier(Bronze);
pub const SILVER: RankedTier = RankedTier(Silver);
pub const GOLD: RankedTier = RankedTier(Gold);
pub const PLATINUM: RankedTier = RankedTier(Platinum);
pub const DIAMOND: RankedTier = RankedTier(Diamond);
pub const MASTER: RankedTier = RankedTier(Master);
pub const GRANDMASTER: RankedTier = RankedTier(Grandmaster);
pub const CHALLENGER: RankedTier = RankedTier(Challenger);
#[inline]
pub fn as_str(&self) -> &str {
match self.0 {
Iron => "IRON",
Bronze => "BRONZE",
Silver => "SILVER",
Gold => "GOLD",
Platinum => "PLATINUM",
Diamond => "DIAMOND",
Master => "MASTER",
Grandmaster => "GRANDMASTER",
Challenger => "CHALLENGER",
}
}
}
impl AsRef<str> for RankedTier {
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl<'a> PartialEq<&'a RankedTier> for RankedTier {
#[inline]
fn eq(&self, other: &&'a RankedTier) -> bool {
self == *other
}
}
impl<'a> PartialEq<RankedTier> for &'a RankedTier {
#[inline]
fn eq(&self, other: &RankedTier) -> bool {
*self == other
}
}
impl PartialEq<str> for RankedTier {
#[inline]
fn eq(&self, other: &str) -> bool {
self.as_ref() == other
}
}
impl<'a> PartialEq<&'a str> for RankedTier {
#[inline]
fn eq(&self, other: &&'a str) -> bool {
self.as_ref() == *other
}
}
impl PartialEq<RankedTier> for str {
#[inline]
fn eq(&self, other: &RankedTier) -> bool {
self == other.as_ref()
}
}
impl fmt::Debug for RankedTier {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(self.as_ref())
}
}
impl fmt::Display for RankedTier {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(self.as_ref())
}
}
| true |
67c3209c9fa1436a4f0a3fabd9cbb541172b4bba
|
Rust
|
SalDev40/Rust
|
/dataStructures/src/libs/binaryTree.rs
|
UTF-8
| 2,789 | 3.4375 | 3 |
[] |
no_license
|
#![allow(non_snake_case)]
#![allow(dead_code)]
#![allow(unused_variables)]
use std::fmt::{Debug, Display};
use std::io::{Error, ErrorKind};
#[derive(Debug)]
pub struct Node<T> {
data: T,
left: BinTree<T>,
right: BinTree<T>,
}
#[derive(Debug)]
pub enum BinTree<T> {
NonEmptyTree(Box<Node<T>>),
EmptyTree,
}
impl<T> BinTree<T>
where
T: PartialOrd + Debug + Display + Copy,
{
fn new() -> Self {
return BinTree::EmptyTree;
}
fn add(&mut self, data: T) {
match &mut *self {
BinTree::NonEmptyTree(node) => {
if data <= node.data {
println!("GOING LEFT {:?} (data) <= {:?} (node)", data, node.data);
node.left.add(data)
} else {
println!("GOING RIGHT {:?} (node) < {:?} (data)", node.data, data);
node.right.add(data)
}
}
BinTree::EmptyTree => {
*self = BinTree::NonEmptyTree(Box::new(Node {
data: data,
left: BinTree::EmptyTree,
right: BinTree::EmptyTree,
}));
}
}
}
fn findItem(&self, item: T) -> Result<bool, Box<dyn std::error::Error>> {
let found: bool = false;
match &*self {
BinTree::NonEmptyTree(node) => {
if node.data != item {
if item <= node.data {
println!("SEARCHING LEFT {:?} (item) <= {:?} (node)", item, node.data);
node.left.findItem(item)?;
} else {
println!("SEARCHING RIGHT {:?} (node) < {:?} (item)", node.data, item);
//propogate error up recursively
node.right.findItem(item)?;
}
}
return Ok(true);
}
BinTree::EmptyTree => {
println!("TREE EMPTY!");
return Err(Box::new(Error::new(
ErrorKind::Other,
"find() -> ITEM NOT FOUND",
)));
}
}
}
fn removeItem(&mut self, data: T) {
}
}
pub fn printLog(x: i32, tree: &BinTree<i32>) {
println!("ADDED: {:?}", x);
// println!("CURRENT TREE -> {:#?} \n", tree);
}
pub fn binMain() {
let mut tree: BinTree<i32> = BinTree::new();
tree.add(10);
printLog(10, &tree);
tree.add(20);
printLog(20, &tree);
tree.add(5);
printLog(5, &tree);
tree.add(16);
printLog(16, &tree);
println!("{:#?}", tree);
println!("found -> {:#?}", tree.findItem(3).expect("FIND"));
// tree.add(2);
// println!("CURRENT TREE -> {:#?}", tree);
}
| true |
5a1bf3bff700c1ae7f911fbcb2e2f47c719341d9
|
Rust
|
luoxiangyong/travis-rust-demo
|
/src/main.rs
|
UTF-8
| 119 | 2.765625 | 3 |
[] |
no_license
|
fn add(a:i64,b:i32) -> i64 {
a + b as i64
}
fn main() {
println!("The Add结果是:{}", add(100i64,1000));
}
| true |
c3b5f4d26e2684765a0ce8d04c95be8b79662244
|
Rust
|
microsoft/rust_win_etw
|
/win_etw_provider/src/types.rs
|
UTF-8
| 3,711 | 2.578125 | 3 |
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
//! Contains items that are part of the implementation of `win_etw`, but not intended to be used
//! directly by application code. Only code generated by the `trace_logging_provider` macro
//! should use these types.
#![doc(hidden)]
pub use widestring::{U16CStr, U16CString};
use crate::EventDataDescriptor;
use zerocopy::{AsBytes, FromBytes};
/// The value used in `SocketAddrV4::family` to identify IPv4 addresses.
pub const AF_INET: u16 = 2;
/// The value used in `SocketAddrV6::family` to identify IPv6 addresses.
pub const AF_INET6: u16 = 23;
/// This has the same in-memory representation as the Win32 `[SOCKADDR_IN]` structure.
///
/// [SOCKADDR_IN]: https://docs.microsoft.com/en-us/windows/win32/api/ws2def/ns-ws2def-sockaddr_in
#[repr(C)]
#[derive(AsBytes, Clone)]
pub struct SocketAddrV4 {
/// Address family identifier.
pub family: u16,
/// Port identifier, stored in big-endian form.
pub port: [u8; 2],
/// IPv4 address, stored in big-endian form.
pub address: [u8; 4],
/// Zero padding.
pub zero: [u8; 8],
}
#[cfg(feature = "std")]
impl From<&std::net::SocketAddrV4> for SocketAddrV4 {
fn from(value: &std::net::SocketAddrV4) -> Self {
let port = value.port();
Self {
family: AF_INET,
address: value.ip().octets(),
port: port.to_be_bytes(),
zero: [0; 8],
}
}
}
impl<'a> From<&'a crate::types::SocketAddrV4> for EventDataDescriptor<'a> {
fn from(value: &'a crate::types::SocketAddrV4) -> EventDataDescriptor<'a> {
Self::from(value.as_bytes())
}
}
/// See `[SOCKADDR_IN6_LH](https://docs.microsoft.com/en-us/windows/win32/api/ws2ipdef/ns-ws2ipdef-sockaddr_in6_lh)`.
#[repr(C)]
#[derive(Clone, AsBytes, FromBytes)]
pub struct SocketAddrV6 {
/// Address family identifier.
pub family: u16,
/// Port identifier, stored in big-endian form.
pub port: [u8; 2],
/// IPv6 flow info.
pub flow_info: [u8; 4],
/// IPv6 address.
pub address: [u8; 16],
/// IPv6 scope.
pub scope_id: [u8; 4],
}
#[cfg(feature = "std")]
impl From<&std::net::SocketAddrV6> for SocketAddrV6 {
fn from(value: &std::net::SocketAddrV6) -> Self {
Self {
family: AF_INET6,
port: value.port().to_be_bytes(),
flow_info: value.flowinfo().to_be_bytes(),
address: value.ip().octets(),
scope_id: value.scope_id().to_be_bytes(),
}
}
}
impl<'a> From<&'a crate::types::SocketAddrV6> for EventDataDescriptor<'a> {
fn from(value: &'a crate::types::SocketAddrV6) -> EventDataDescriptor<'a> {
Self::from(value.as_bytes())
}
}
/// See `[FILETIME](https://docs.microsoft.com/en-us/windows/win32/api/minwinbase/ns-minwinbase-filetime)`.
#[repr(transparent)]
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct FILETIME(pub u64);
#[cfg(feature = "std")]
mod std_support {
use super::*;
use core::convert::TryFrom;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
/// Time elapsed between the Windows epoch and the UNIX epoch.
const WINDOWS_EPOCH_TO_UNIX_EPOCH: Duration = Duration::from_secs(11_644_473_600);
pub struct OutOfRangeError;
impl TryFrom<SystemTime> for FILETIME {
type Error = OutOfRangeError;
fn try_from(t: SystemTime) -> Result<Self, Self::Error> {
match t.duration_since(UNIX_EPOCH) {
Ok(unix_elapsed) => {
let windows_elapsed: Duration = unix_elapsed + WINDOWS_EPOCH_TO_UNIX_EPOCH;
Ok(FILETIME((windows_elapsed.as_nanos() / 100) as u64))
}
Err(_) => Err(OutOfRangeError),
}
}
}
}
| true |
b419347512a098eef217faf5cfe4d051c7d83867
|
Rust
|
lelongg/ros_package_manifest
|
/src/tags/license.rs
|
UTF-8
| 607 | 2.75 | 3 |
[] |
no_license
|
use roxmltree::Node;
use std::convert::TryFrom;
use thiserror::Error;
#[derive(Default, Debug, Clone, PartialEq)]
pub struct License {
pub license: String,
pub file: Option<String>,
}
#[derive(Debug, Clone, Error)]
pub enum LicenseError {
#[error("no license")]
NoLicense,
}
impl TryFrom<Node<'_, '_>> for License {
type Error = LicenseError;
fn try_from(node: Node) -> Result<Self, Self::Error> {
Ok(Self {
license: node.text().ok_or(LicenseError::NoLicense)?.to_string(),
file: node.attribute("file").map(ToString::to_string),
})
}
}
| true |
bb615b28999bf96244f618372e5bd5acb8f0e73c
|
Rust
|
felixwatts/harvest
|
/src/evaluator.rs
|
UTF-8
| 3,748 | 2.859375 | 3 |
[] |
no_license
|
use crate::plan::Plan;
use crate::constant::SEASON_LENGTH;
use crate::tasks::Tasks;
use crate::params::Params;
use crate::bed_plan::BedPlan;
pub struct Evaluator<'a> {
params: &'a Params,
plan: &'a Plan
}
impl<'a> Evaluator<'a> {
pub fn new(
params: &'a Params,
plan: &'a Plan) -> Self {
Evaluator {
params: params,
plan: plan
}
}
pub fn get_harvest_plan(&self) -> Vec<Vec<i32>> {
let mut harvest_plan = vec![ vec![0i32; SEASON_LENGTH * 2]; self.params.varieties.len() ];
for bed in 0..self.params.beds.len() {
let bed_plan = self.get_bed_plan(bed);
for bed_week in bed_plan.iter() {
if bed_week.harvestable_units != 0 {
harvest_plan[bed_week.crop][bed_week.week] += bed_week.harvestable_units;
}
}
}
harvest_plan
}
// Returns a number between 0 and 1 representing the extent to which each variety that could
// possibly be harvested in each week can be harvested with enough quantity to fill the market
pub fn get_market_saturation(&self) -> f32 {
let harvest_plan = self.get_harvest_plan();
let mut potential = 0;
let mut actual = 0;
for variety in 0..self.params.varieties.len() {
for week in 0..SEASON_LENGTH {
if self.params.varieties[variety].is_harvestable_in_week(week) {
potential += 1;
if harvest_plan[variety][week] >= self.params.num_baskets {
actual += 1;
}
}
}
}
actual as f32 / potential as f32
}
// returns the profit in value units realized by this planting schedule
// over one season. Note that this captures the value harvested between jan1 and dec 31
// and doesn't capture the value of crops planted in that time but not harvested
// until next season
pub fn get_profit(&self) -> i32 {
self._get_profit(SEASON_LENGTH)
}
// The fitness function used by the evolutionary algorithm. Captures the value of
// all crops harvested _or planted_ between jan1 and dec 31. This encourages plans
// that set up good overwintered crops for next year.
pub fn get_fitness(&self) -> i32 {
self._get_profit(SEASON_LENGTH*2)
}
fn _get_profit(&self, season_length: usize) -> i32 {
// TODO model cost of production better
let cost: i32 = self.plan.get_num_plantings();
let mut profit = -cost;
let harvest_plan = self.get_harvest_plan();
for variety in 0..self.params.varieties.len() {
for week in 0..season_length {
let harvestable_units = harvest_plan[variety][week];
let sellable_units = std::cmp::min(self.params.num_baskets, harvestable_units);
let val = sellable_units * self.params.varieties[variety].value_per_unit;
profit += val;
}
}
profit
}
pub fn get_bed_utilization(&self) -> f32 {
let mut utilization = 0.0;
for bed in 0..self.params.beds.len() {
utilization += self.get_bed_plan(bed).utilization();
}
utilization /= self.params.beds.len() as f32;
utilization
}
pub fn get_tasks(&self) -> Tasks {
let mut result = Tasks::new();
for bed in 0..self.params.beds.len() {
let plan = self.get_bed_plan(bed);
plan.write_instructions(&mut result);
}
result
}
fn get_bed_plan(&'a self, bed: usize) -> BedPlan<'a> {
BedPlan::new(bed, &self.plan, self.params)
}
}
| true |
cfe6731be43866e6e04704a4876f0e42d921c4f5
|
Rust
|
18616378431/myCode
|
/rust/test3-30/src/main.rs
|
UTF-8
| 410 | 3.765625 | 4 |
[] |
no_license
|
//为新类型实现Add操作
use std::ops::Add;
#[derive(Debug)]
struct Point {
x : i32,
y : i32,
}
impl Add for Point {
type Output = Point;
fn add(self, other : Point) -> Point {//Point Self Self::Output
Point {
x : self.x + other.x,
y : self.y + other.y,
}
}
}
fn main() {
println!("{:?}", Point {x : 1, y : 0} + Point {x : 2, y : 3});
}
| true |
cf427472b15f6687eaeb737ff0b1bbfa350d3828
|
Rust
|
icub3d/puzzles
|
/advent-of-code/2020/day24/src/main.rs
|
UTF-8
| 2,753 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{BufRead, BufReader};
fn moves(s: String) -> Vec<String> {
let mut mm = vec![];
let mut it = s.chars();
loop {
let cur = it.next();
match cur {
None => break,
Some(c) => {
if c == 'n' || c == 's' {
mm.push(vec![c, it.next().unwrap()].iter().collect::<String>());
} else {
mm.push(c.to_string());
}
}
}
}
mm
}
fn main() {
let br = BufReader::new(File::open("input").unwrap());
// We'll parse the line with these and then track it's position
// with the deltas.
let dirs = vec!["ne", "nw", "se", "sw", "e", "w"];
let deltas = vec![(0, 1), (-1, 1), (1, -1), (0, -1), (1, 0), (-1, 0)];
let mut directions = HashMap::new();
for (dir, delta) in dirs.iter().zip(deltas.iter()) {
directions.insert(dir.to_string(), delta);
}
// We'll track the tiles that are turned on in a set.
let mut black = HashSet::new();
// Iterate through all the lines.
for l in br.lines() {
let mut cur = (0, 0);
let mvs = moves(l.unwrap());
for mv in mvs {
let delta = directions[&mv];
cur.0 += delta.0;
cur.1 += delta.1;
}
if black.contains(&cur) {
black.remove(&cur);
} else {
black.insert(cur.clone());
}
}
println!("{:?}", black.len());
for _ in 0..100 {
// We now need to track white neighbors and black tiles we
// don't want to flip.
let mut white = HashMap::new();
let mut keep = HashSet::new();
// Loop through our current tileset.
for tile in black.iter() {
// Find neighbors or update white neighbors.
let mut neighbors = 0;
for (_, delta) in directions.iter() {
let neighbor = (tile.0 + delta.0, tile.1 + delta.1);
if black.contains(&neighbor) {
neighbors += 1;
} else {
*white.entry(neighbor).or_insert(0) += 1;
}
}
// Should this one stay black?
if neighbors == 1 || neighbors == 2 {
keep.insert(tile.clone());
}
}
let w2b = white
.iter()
.filter(|(_, count)| **count == 2)
.map(|(tile, _)| tile)
.cloned()
.collect::<HashSet<(isize, isize)>>();
black = keep
.union(&w2b)
.cloned()
.collect::<HashSet<(isize, isize)>>();
}
println!("{}", black.len());
}
| true |
4080abbb571f6c546913e96eaa85b736dad13764
|
Rust
|
Lukazoid/lz_quic
|
/src/packets/incoming_packet.rs
|
UTF-8
| 368 | 2.53125 | 3 |
[] |
no_license
|
use bytes::Bytes;
use chrono::{DateTime, UTC};
use packets::PacketHeader;
use std::net::SocketAddr;
/// An incoming packet before any decryption has taken place.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct IncomingPacket {
pub source_address: SocketAddr,
pub packet_header: PacketHeader,
pub data: Bytes,
pub received_at: DateTime<UTC>,
}
| true |
a432a84912c410f987f102169cfe9c980185a51d
|
Rust
|
timmonfette1/rustworking-cli
|
/src/main.rs
|
UTF-8
| 5,906 | 2.90625 | 3 |
[
"MIT"
] |
permissive
|
/* rustworking-cli
*
* Command line tool to handle network tasks for
* system administration. Has the ability to perform
* various tasks in bulk
*
* Currently supports:
* PING an IP address (single or in bulk)
*
* Coming soon:
* Send a TCP packet to test a connection
* Send a UDP packet to test a connection
*
* Author: Tim Monfette
* Version: 0.1.0
*/
extern crate argparse;
extern crate rustworking_core;
use argparse::{ArgumentParser, Store, StoreTrue, Print};
use rustworking_core::rustytools;
use std::env;
use std::process::exit;
use std::io::prelude::*;
use std::str::from_utf8;
use std::ascii::AsciiExt;
// Struct for the accepted options
struct Options {
verbose: bool,
tool: String,
ip: String,
port: String,
subnet: String,
filepath: String,
}
fn main() {
// Set default values
let mut options = Options {
verbose: false,
tool: String::new(),
ip: String::from("localhost"),
port: String::from("80"),
subnet: String::new(),
filepath: String::new()
};
// For getting the usage message when building the argparser
let help: String;
let mut buf = Vec::<u8>::new();
let args: Vec<String> = env::args().collect();
{ // Open scope
let mut ap = ArgumentParser::new();
ap.set_description("Test connections to a server or set of servers.");
ap.add_option(&["-V", "--version"],
Print(format!("Current version of rustworking: {}",
env!("CARGO_PKG_VERSION").to_string())), "Show version");
ap.refer(&mut options.verbose)
.add_option(&["-v", "--verbose"], StoreTrue,
"Verbose execution");
ap.refer(&mut options.tool)
.add_option(&["-t", "--tool"], Store,
"Network tool to use [ping, http, tcp, udp]");
ap.refer(&mut options.ip)
.add_option(&["-i", "--ip"], Store,
"IP Address of server");
ap.refer(&mut options.port)
.add_option(&["-p", "--port"], Store,
"Port to test connection on");
ap.refer(&mut options.subnet)
.add_option(&["-s", "--subnet"], Store,
"Subnet of addresses to test on");
ap.refer(&mut options.filepath)
.add_option(&["-f", "--filepath"], Store,
"Path to file of IP addresses");
// Get the usage message
if !ap.print_help(&args[0], &mut buf).is_ok() {
let mut stderr = std::io::stderr();
writeln!(&mut stderr, "Could not build help message.\n'argparser' not functioning correctly."
).expect("Could not write to stderr");
exit(1);
}
help = from_utf8(&buf[..]).unwrap().to_string();
ap.parse_args_or_exit();
} // end scope
// Make sure at least 1 argument is passed
if options.tool.is_empty() {
let mut stderr = std::io::stderr();
writeln!(&mut stderr, "rustworking: No tool specified.\n{}",
help).expect("Could not write to stderr");
exit(1);
}
// Handle verbose execution
if options.verbose {
println!("Beginning execution...");
println!("Tool: {}", options.tool);
println!("IP Address: {}", options.ip);
println!("Port: {}", options.port);
println!("Subnet: {}", options.subnet);
println!("Filepath: {}", options.filepath);
}
// Run the correct networking tool
match Some(&*options.tool.to_string().to_ascii_lowercase()) {
Some("ping") => ping_helper(options.verbose, &options.ip,
&options.subnet, &options.filepath),
Some("http") => http_helper(options.verbose, &options.ip,
&options.subnet, &options.filepath),
Some("tcp") => println!("Tool: tcp"),
Some("udp") => println!("Tool: udp"),
_ =>
{ let mut stderr = std::io::stderr();
writeln!(&mut stderr, "rustworking: Unrecognized tool '{}'.\n{}",
options.tool, help).expect("Could not write to stderr");
exit(1);
},
}
}
// Function to help run a PING
fn ping_helper(verbose: bool, ip: &str, subnet: &str, filepath: &str) {
if !filepath.is_empty() {
let results = rustytools::ping_file(verbose, filepath);
for res in results {
match res {
Ok(r) => println!("{}", r),
Err(e) => println!("{}", e),
}
}
} else if !subnet.is_empty() {
let results = rustytools::ping_subnet(verbose, subnet);
for res in results {
match res {
Ok(r) => println!("{}", r),
Err(e) => println!("{}", e),
}
}
} else {
match rustytools::ping_ip(verbose, ip) {
Ok(r) => println!("{}", r),
Err(e) => println!("{}", e),
}
}
}
// Function to help run an HTTP request
fn http_helper(verbose: bool, ip: &str, subnet: &str, filepath: &str) {
if !filepath.is_empty() {
let results = rustytools::http_file(verbose, filepath);
for res in results {
match res {
Ok(r) => println!("{}", r),
Err(e) => println!("{}", e),
}
}
} else if !subnet.is_empty() {
let results = rustytools::http_subnet(verbose, subnet);
for res in results {
match res {
Ok(r) => println!("{}", r),
Err(e) => println!("{}", e),
}
}
} else {
match rustytools::http_ip(verbose, ip) {
Ok(r) => println!("{}", r),
Err(e) => println!("{}", e),
}
}
}
| true |
e52aba90f1dc63dfc01077e5ef91f1f691eb4701
|
Rust
|
mknaw/jrnl
|
/src/time.rs
|
UTF-8
| 5,091 | 3.5625 | 4 |
[] |
no_license
|
use std::fmt;
/// Week days
#[derive(Copy, Clone, PartialEq)]
pub enum WeekDay {
Monday,
Tuesday,
Wednesday,
Thursday,
Friday,
Saturday,
Sunday,
}
pub const WEEKDAYS: [WeekDay; 7] = [
WeekDay::Monday,
WeekDay::Tuesday,
WeekDay::Wednesday,
WeekDay::Thursday,
WeekDay::Friday,
WeekDay::Saturday,
WeekDay::Sunday,
];
impl fmt::Display for WeekDay {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::Monday => "Mon",
Self::Tuesday => "Tue",
Self::Wednesday => "Wed",
Self::Thursday => "Thu",
Self::Friday => "Fri",
Self::Saturday => "Sat",
Self::Sunday => "Sun",
}
)
}
}
impl WeekDay {
// TODO probably should be fmt::Display
pub fn nday(&self) -> usize {
match self {
Self::Monday => 0,
Self::Tuesday => 1,
Self::Wednesday => 2,
Self::Thursday => 3,
Self::Friday => 4,
Self::Saturday => 5,
Self::Sunday => 6,
}
}
}
/// Months
#[derive(Copy, Clone)]
pub enum Month {
January,
February,
March,
April,
May,
June,
July,
August,
September,
October,
November,
December,
}
pub const MONTHS: [Month; 12] = [
Month::January,
Month::February,
Month::March,
Month::April,
Month::May,
Month::June,
Month::July,
Month::August,
Month::September,
Month::October,
Month::November,
Month::December,
];
impl fmt::Display for Month {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::January => "Jan",
Self::February => "Feb",
Self::March => "Mar",
Self::April => "Apr",
Self::May => "May",
Self::June => "Jun",
Self::July => "Jul",
Self::August => "Aug",
Self::September => "Sep",
Self::October => "Oct",
Self::November => "Nov",
Self::December => "Dec",
}
)
}
}
impl Month {
pub fn weekday_offset_from_jan(&self) -> u8 {
// TODO doing all this on u8s instead of a week day abstraction is ugly
match self {
Self::January | Self::October => 0,
Self::February | Self::March | Self::November => 3,
Self::April | Self::July => 6,
Self::May => 1,
Self::June => 4,
Self::August => 2,
Self::September | Self::December => 5,
}
}
pub fn leap_year_weekday_offset_from_jan(&self) -> u8 {
match self {
Self::January | Self::February => self.weekday_offset_from_jan(),
_ => self.weekday_offset_from_jan() + 1,
}
}
}
/// Year
pub struct Year {
year: u32,
}
impl fmt::Display for Year {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.year)
}
}
impl Year {
pub fn is_leap(&self) -> bool {
if self.year % 100 == 0 {
return self.year % 400 == 0;
}
return self.year % 4 == 0;
}
}
pub struct MonthYear {
pub month: Month,
pub year: Year,
}
impl fmt::Display for MonthYear {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}{}", self.month, self.year)
}
}
impl MonthYear {
pub fn new(month: Month, year: u32) -> Self {
let year = Year { year: year };
Self {
month: month,
year: year,
}
}
pub fn n_days(&self) -> u8 {
match self.month {
Month::February if self.year.is_leap() => 29,
Month::February => 28,
Month::January
| Month::March
| Month::May
| Month::July
| Month::August
| Month::October
| Month::December => 31,
Month::April | Month::June | Month::September | Month::November => 30,
}
}
fn jan_first_weekday(&self) -> u8 {
// Gauss' algo for calculating weekday of 1/1 for any given year.
let year = self.year.year;
let mut nday = 1;
nday += 5 * ((year - 1) % 4);
nday += 4 * ((year - 1) % 100);
nday += 6 * ((year - 1) % 400);
nday %= 7;
(nday - 1) as u8
}
pub fn first_weekday(&self) -> WeekDay {
let jan_first_weekday = self.jan_first_weekday();
if self.year.is_leap() {
let n_weekday =
(jan_first_weekday + self.month.leap_year_weekday_offset_from_jan()) % 7;
return WEEKDAYS[n_weekday as usize];
} else {
let n_weekday = (jan_first_weekday + self.month.weekday_offset_from_jan()) % 7;
return WEEKDAYS[n_weekday as usize];
}
}
}
| true |
817443bde66fdf83821d0bfcd9fca290699e2834
|
Rust
|
theemathas/binary_turk
|
/game/src/pos/legal.rs
|
UTF-8
| 1,844 | 3 | 3 |
[
"MIT"
] |
permissive
|
use std::iter;
use square::Square;
use moves::Move;
use super::Position;
pub struct Iter<'a>(iter::Chain<NoisyIter<'a>, QuietIter<'a>>);
impl<'a> Iterator for Iter<'a> {
type Item = Move;
fn next(&mut self) -> Option<Move> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
pub struct NoisyIter<'a>(Box<Iterator<Item = Move> + 'a>);
impl<'a> Iterator for NoisyIter<'a> {
type Item = Move;
fn next(&mut self) -> Option<Move> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
pub struct QuietIter<'a>(Box<Iterator<Item = Move> + 'a>);
impl<'a> Iterator for QuietIter<'a> {
type Item = Move;
fn next(&mut self) -> Option<Move> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
pub fn iter<'a>(p: &'a Position) -> Iter<'a> {
Iter(p.legal_noisy_iter().chain(p.legal_quiet_iter()))
}
pub fn noisy_iter<'a>(p: &'a Position) -> NoisyIter<'a> {
let mut temp = p.clone();
NoisyIter(Box::new(p.psudo_legal_noisy_iter().filter(move |x| is_legal(&mut temp, x))))
}
pub fn quiet_iter<'a>(p: &'a Position) -> QuietIter<'a> {
let mut temp = p.clone();
QuietIter(Box::new(p.psudo_legal_quiet_iter().filter(move |x| is_legal(&mut temp, x))))
}
fn is_legal(p: &mut Position, curr_move: &Move) -> bool {
let c = p.side_to_move();
p.with_move(curr_move, |new_pos| {
match curr_move.castle() {
None => {
!new_pos.can_take_king()
},
Some(side) => {
// Check for castling out of check, through check, and into check.
let check_squares: Vec<Square> = side.require_no_attack(c);
check_squares.iter().all( |val| !new_pos.can_move_to(*val) )
}
}
})
}
| true |
83aad82ba2c21b00748e13c855cd0b265bea7afc
|
Rust
|
hershi/cryptopals_rust
|
/utils/src/lib.rs
|
UTF-8
| 1,839 | 2.59375 | 3 |
[] |
no_license
|
#[macro_use]
extern crate lazy_static;
pub mod english_scoring;
pub mod encoding;
pub mod encryption;
pub mod repeating_xor_cracker;
pub mod mt19937;
pub mod sha1;
pub mod md4;
pub mod hmac;
pub mod hash_utils;
pub mod diffie_hellman;
pub fn xor(input: &[u8], key: &[u8]) -> Vec<u8> {
input
.iter()
.zip(key.iter().cycle())
.map(|(b, key)| b^key)
.collect()
}
pub fn pad_block(mut input: Vec<u8>, block_size: u8) -> Vec<u8> {
let block_size: usize = block_size as usize;
let last_block_size = input.len() % block_size;
let padding_needed = block_size - last_block_size;
input.resize(input.len() + padding_needed, padding_needed as u8);
input
}
pub fn validate_padding(input: &[u8], block_size: usize) -> Result<(), &str> {
if input.len() == 0 {
return Err("bad padding - empty input");
}
let last_byte = *input.last().unwrap() as usize;
if last_byte > block_size {
return Err("bad padding - last byte too big");
}
if last_byte <= 0 {
return Err("bad padding - last byte too small");
}
// Last byte is less than block size. Let's see if all the `last_byte` bytes
// have that value
let last_block = input.chunks(block_size).last().unwrap();
if last_block.iter()
.skip(block_size - last_byte as usize)
.any(|&b| b != last_byte as u8) {
return Err("bad padding - some bytes with wrong value");
}
Ok(())
}
pub fn strip_padding(input: &mut Vec<u8>, block_size: usize) {
assert!(validate_padding(input, block_size).is_ok());
let last_byte = *input.last().unwrap() as usize;
input.truncate(input.len() - last_byte);
}
pub fn random_buffer(size: usize) -> Vec<u8> {
std::iter::repeat_with(|| rand::random::<u8>())
.take(size)
.collect()
}
| true |
36c3b05c686b004a33a4dc2d893ecce95478a343
|
Rust
|
pdx-cs-rust/rust-misc
|
/stacktrait/examples/demo.rs
|
UTF-8
| 250 | 2.96875 | 3 |
[
"MIT"
] |
permissive
|
use stacktrait::*;
use std::collections::LinkedList;
fn main() {
let mut s = Vec::new();
s.spush(&5);
println!("{}", s.spop().unwrap());
let mut s = LinkedList::new();
s.spush("hello");
println!("{}", s.spop().unwrap());
}
| true |
b79e044c0c9ef80fc7b44a0ab88141c5b4272ff3
|
Rust
|
little-dude/netlink
|
/netlink-packet-generic/src/traits.rs
|
UTF-8
| 1,160 | 2.578125 | 3 |
[
"MIT",
"MITNFA"
] |
permissive
|
// SPDX-License-Identifier: MIT
//! Traits for implementing generic netlink family
/// Provide the definition for generic netlink family
///
/// Family payload type should implement this trait to provide necessary
/// informations in order to build the packet headers (`nlmsghdr` and `genlmsghdr`).
///
/// If you are looking for an example implementation, you can refer to the
/// [`crate::ctrl`] module.
pub trait GenlFamily {
/// Return the unique family name registered in the kernel
///
/// Let the resolver lookup the dynamically assigned ID
fn family_name() -> &'static str;
/// Return the assigned family ID
///
/// # Note
/// The implementation of generic family should assign the ID to `GENL_ID_GENERATE` (0x0).
/// So the controller can dynamically assign the family ID.
///
/// Regarding to the reason above, you should not have to implement the function
/// unless the family uses static ID.
fn family_id(&self) -> u16 {
0
}
/// Return the command type of the current message
fn command(&self) -> u8;
/// Indicate the protocol version
fn version(&self) -> u8;
}
| true |
e954b246d17cfaf613549c0c87fb11bca588da8f
|
Rust
|
ottingbob/rust-dojo
|
/12-scopes/12-3-3-aliasing.rs
|
UTF-8
| 1,085 | 3.90625 | 4 |
[] |
no_license
|
struct Point { x: i32, y: i32, z: i32 }
fn main() {
let mut point = Point { x: 0, y: 0, z: 0 };
let borrowed_point = &point;
let another_borrow = &point;
println!("Point has coordinates: ({}, {}, {})",
borrowed_point.x, another_borrow.y, point.z);
// Cant borrow point as mutable because its currently
// borrowed as immutable.
// let mutable_borrow = &mut point;
println!("Point has coordinates: ({}, {}, {})",
borrowed_point.x, another_borrow.y, point.z);
let mutable_borrow = &mut point;
mutable_borrow.x = 5;
mutable_borrow.y = 2;
mutable_borrow.z = 1;
// cant borrow point as immutable because its already
// borrowed as a mutable
// let y = &point.y;
// cant print because println! takes an immutable ref
// println!("Point Z coordinate is {}", point.z);
println!("Point has coordinates: ({}, {}, {})",
mutable_borrow.x, mutable_borrow.y, mutable_borrow.z);
let new_borrowed_point = &point;
println!("Point has coordinates: ({}, {}, {})",
new_borrowed_point.x, new_borrowed_point.y, new_borrowed_point.z);
}
| true |
b4e9cf8314c0ececd024661877c03037b7f83c82
|
Rust
|
razn-v/rasm
|
/src/register.rs
|
UTF-8
| 2,272 | 3.21875 | 3 |
[
"MIT"
] |
permissive
|
use std::str::FromStr;
/// List of available registers
#[derive(PartialEq, Eq)]
pub enum Register {
R0,
R1,
R2,
R3,
R4,
R5,
R6,
R7,
R8,
R9,
R10,
R11,
R12,
R13,
R14,
R15,
CPSR,
SPSR,
}
impl FromStr for Register {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"r0" => Ok(Self::R0),
"r1" => Ok(Self::R1),
"r2" => Ok(Self::R2),
"r3" => Ok(Self::R3),
"r4" => Ok(Self::R4),
"r5" => Ok(Self::R5),
"r6" => Ok(Self::R6),
"r7" => Ok(Self::R7),
"r8" => Ok(Self::R8),
"r9" => Ok(Self::R9),
"r10" => Ok(Self::R10),
"r11" | "fp" => Ok(Self::R11),
"r12" => Ok(Self::R12),
"r13" | "sp" => Ok(Self::R13),
"r14" | "lr" => Ok(Self::R14),
"r15" | "pc" => Ok(Self::R15),
"cpsr" | "cpsr_all" => Ok(Self::CPSR),
"spsr" | "spsr_all" => Ok(Self::SPSR),
_ => Err(()),
}
}
}
/// List of available coprocessor registers
pub enum CoRegister {
C0,
C1,
C2,
C3,
C4,
C5,
C6,
C7,
C8,
C9,
C10,
C11,
C12,
C13,
C14,
C15,
}
impl FromStr for CoRegister {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"c0" => Ok(Self::C0),
"c1" => Ok(Self::C1),
"c2" => Ok(Self::C2),
"c3" => Ok(Self::C3),
"c4" => Ok(Self::C4),
"c5" => Ok(Self::C5),
"c6" => Ok(Self::C6),
"c7" => Ok(Self::C7),
"c8" => Ok(Self::C8),
"c9" => Ok(Self::C9),
"c10" => Ok(Self::C10),
"c11" => Ok(Self::C11),
"c12" => Ok(Self::C12),
"c13" => Ok(Self::C13),
"c14" => Ok(Self::C14),
"c15" => Ok(Self::C15),
_ => Err(()),
}
}
}
| true |
4d01e9038a9acc8d7e69e97113fd43bc7482e836
|
Rust
|
copvampire/wooting_snake
|
/src/sound_manager.rs
|
UTF-8
| 2,923 | 2.96875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use rand::Rng;
use rodio;
use rodio::Device;
use rodio::Source;
use std::collections::HashMap;
use std::fs::File;
use std::convert::AsRef;
use std::io;
use std::io::prelude::*;
use std::sync::Arc;
type Sounds = Vec<Sound>;
type SoundsMap = HashMap<SoundType, Sounds>;
#[derive(Eq, PartialEq, std::hash::Hash, Clone, Copy)]
pub enum SoundType {
Click,
Death,
Eat,
Step,
Warp,
}
pub struct SoundManager {
sounds_map: SoundsMap,
device: Option<Device>,
rng: rand::rngs::ThreadRng,
}
pub struct Sound(Arc<Vec<u8>>);
impl AsRef<[u8]> for Sound {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl Sound {
pub fn load(filename: &str) -> io::Result<Sound> {
let mut buf = Vec::new();
let mut file = File::open(filename)?;
file.read_to_end(&mut buf)?;
Ok(Sound(Arc::new(buf)))
}
pub fn cursor(self: &Self) -> io::Cursor<Sound> {
io::Cursor::new(Sound(self.0.clone()))
}
pub fn decoder(self: &Self) -> rodio::Decoder<io::Cursor<Sound>> {
rodio::Decoder::new(self.cursor()).unwrap()
}
}
fn create_source(sounds_map: &mut SoundsMap, sound_type: SoundType, path: &str) {
let sounds = match sounds_map.get_mut(&sound_type) {
Some(decoders) => decoders,
None => {
sounds_map.insert(sound_type, Sounds::new());
sounds_map.get_mut(&sound_type).unwrap()
}
};
let sound = Sound::load(path).unwrap();
sounds.push(sound);
}
impl SoundManager {
pub fn new() -> Self {
let maybe_device = rodio::default_output_device();
let mut sounds_map = HashMap::new();
create_source(&mut sounds_map, SoundType::Click, "resources/click.wav");
create_source(&mut sounds_map, SoundType::Death, "resources/death.wav");
create_source(&mut sounds_map, SoundType::Eat, "resources/eat1.wav");
create_source(&mut sounds_map, SoundType::Eat, "resources/eat2.wav");
create_source(&mut sounds_map, SoundType::Eat, "resources/eat3.wav");
create_source(&mut sounds_map, SoundType::Step, "resources/step1.wav");
create_source(&mut sounds_map, SoundType::Step, "resources/step2.wav");
create_source(&mut sounds_map, SoundType::Warp, "resources/warp.wav");
SoundManager {
device: maybe_device,
sounds_map,
rng: rand::thread_rng(),
}
}
pub fn play(&mut self, sound_type: SoundType) {
let maybe_sounds = self.sounds_map.get(&sound_type);
if let Some(sounds) = maybe_sounds {
let random_index = self.rng.gen_range(0, sounds.len());
let maybe_sound = sounds.get(random_index);
if let Some(sound) = &maybe_sound {
if let Some(device) = &self.device {
rodio::play_raw(device, sound.decoder().convert_samples());
}
}
}
}
}
| true |
072456465b1a7e17893f163530fdb7cff037ef38
|
Rust
|
maidsafe/safe_network
|
/sn_networking/src/circular_vec.rs
|
UTF-8
| 2,039 | 3.28125 | 3 |
[] |
no_license
|
// Copyright 2023 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use crate::error::Error;
/// Based on https://users.rust-lang.org/t/the-best-ring-buffer-library/58489/7
/// A circular buffer implemented with a VecDeque.
#[derive(Debug)]
pub struct CircularVec<T> {
inner: std::collections::VecDeque<T>,
}
impl<T> CircularVec<T> {
/// Creates a new CircularVec with the given capacity.
pub fn new(capacity: usize) -> Self {
Self {
inner: std::collections::VecDeque::with_capacity(capacity),
}
}
/// Pushes an item into the CircularVec. If the CircularVec is full, the oldest item is removed.
#[allow(clippy::result_large_err)]
pub fn push(&mut self, item: T) -> Result<(), Error> {
if self.inner.len() == self.inner.capacity() {
self.inner
.pop_front()
.ok_or(Error::CircularVecPopFrontError)?;
}
self.inner.push_back(item);
Ok(())
}
/// Checks if the CircularVec contains the given item.
pub fn contains(&self, item: &T) -> bool
where
T: PartialEq,
{
self.inner.contains(item)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_push_and_contains() {
let mut cv = CircularVec::new(2);
assert!(cv.push(1).is_ok());
assert!(cv.push(2).is_ok());
assert!(cv.contains(&1));
assert!(cv.contains(&2));
assert!(cv.push(3).is_ok());
assert!(!cv.contains(&1));
assert!(cv.contains(&2));
assert!(cv.contains(&3));
}
}
| true |
367b3753c52fd8fc18d92ac0b793b4364a8c34d9
|
Rust
|
Aleman778/First-Compiler
|
/src/ir.rs
|
UTF-8
| 38,652 | 3.25 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::fmt;
use crate::ast::*;
use crate::intrinsics;
/**
* Used for building low-level intermediate representation.
*/
pub struct IrBuilder<'a> {
pub file: Option<&'a File>,
pub instructions: Vec<IrInstruction>,
pub functions: HashMap<IrIdent, IrBasicBlock>,
pub addr_size: isize, // address size in bytes on target architecture
scopes: Vec<IrScope>,
live_intervals: HashMap<IrIdent, IrLiveInterval>, // used per function moves to its basic block
// Unique identifier generators
register_symbol: Symbol,
register_index: u32,
basic_block_symbol: Symbol,
basic_block_index: u32,
if_exit_symbol: Symbol,
if_exit_index: u32,
if_else_symbol: Symbol,
if_else_index: u32,
while_enter_symbol: Symbol,
while_enter_index: u32,
while_exit_symbol: Symbol,
while_exit_index: u32,
}
/**
* IR scopes are defined as regular blocks that contains some
* helper information about variables that are active.
* Only for internal use to help build the IR.
*/
struct IrScope {
enter_label: Option<IrIdent>,
exit_label: Option<IrIdent>,
locals: HashMap<IrIdent, IrType>,
}
/**
* Basic block is defined by a sequence of instructions and is used
* to store context information about a particular block scope.
*/
pub struct IrBasicBlock {
pub prologue_index: usize,
pub epilogue_index: usize,
pub enter_label: IrIdent,
pub exit_label: IrIdent,
pub return_type: IrType,
pub func_address: Option<usize>, // used by jitter to call foreign functions
pub is_foreign: bool,
pub live_intervals: HashMap<IrIdent, IrLiveInterval>
}
/**
* Denotes an interval in the IR where a particular variable is alive.
*/
#[derive(Debug, Clone, PartialEq)]
pub struct IrLiveInterval {
pub begin: usize,
pub end: usize,
}
/**
* Three address code instruction, is defined an op code and up to three operands.
* Span is also used for debugging to retrieve the source location of a given instruction.
*/
#[derive(Debug, Clone, PartialEq)]
pub struct IrInstruction {
pub opcode: IrOpcode,
pub op1: IrOperand,
pub op2: IrOperand,
pub op3: IrOperand,
pub ty: IrType,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub enum IrOpcode {
Nop,
Alloca, // op1 = alloca ty
AllocParams, // allocates all defined parameters
Copy, // op1 = op2
CopyFromDeref, // op1 = *op2
CopyFromRef, // op1 = &op2 (always mutable)
CopyToDeref, // *op1 = op2
Clear, // op1 = 0
Add, // op1 = op2 + op3
Sub,
Mul,
Div,
Pow,
Mod,
And,
Or,
Xor,
Lt, // op1 = op2 < op3 (op1 always boolean)
Le,
Gt,
Ge,
Eq,
Ne,
IfLt, // jump op3 (if op1 binop op2 equals true)
IfGt,
IfLe,
IfGe,
IfEq,
IfNe,
Jump, // jump op1
Label, // label op1
Param, // param op1 (ordered left-to-right)
Call, // op1 := op2(...) (#parameter stored in op3)
Return, // return op1 (where op1 is optional)
Prologue, // marks beginning of function
Epilogue, // marks end of function
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum IrOperand {
Ident(IrIdent),
Value(IrValue),
None,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct IrIdent {
pub symbol: Symbol,
pub index: u32, // used to distinguish identifiers with the same symbol.
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum IrValue {
I32(i32),
U32(u32),
U64(u64),
Bool(bool),
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum IrType {
I8,
I32,
I64,
U32,
U64,
PtrI8(usize), // NOTE(alexander): argument defines the numbers of indirections
PtrI32(usize),
Unit,
}
pub fn create_ir_builder<'a>() -> IrBuilder<'a> {
IrBuilder {
file: None,
instructions: Vec::new(),
functions: HashMap::new(),
addr_size: std::mem::size_of::<usize>() as isize,
scopes: Vec::new(),
live_intervals: HashMap::new(),
register_symbol: intern_string(""),
register_index: 0,
basic_block_symbol: intern_string(".bb"),
basic_block_index: 0,
if_else_symbol: intern_string(".if_else"),
if_else_index: 0,
if_exit_symbol: intern_string(".if_exit"),
if_exit_index: 0,
while_enter_symbol: intern_string(".while_enter"),
while_enter_index: 0,
while_exit_symbol: intern_string(".while_exit"),
while_exit_index: 0,
}
}
pub fn create_ir_ident(symbol: Symbol, index: u32) -> IrIdent {
IrIdent { symbol, index }
}
fn create_ir_basic_block<'a>(
ib: &mut IrBuilder<'a>,
enter: Option<IrIdent>,
exit: Option<IrIdent>,
is_foreign: bool
) -> IrBasicBlock {
fn unique_bb_label<'a>(ib: &mut IrBuilder<'a>) -> IrIdent {
let label = IrIdent {
symbol: ib.basic_block_symbol,
index: ib.basic_block_index,
};
ib.basic_block_index += 1;
return label;
}
IrBasicBlock {
prologue_index: 0,
epilogue_index: 0,
enter_label: enter.unwrap_or(unique_bb_label(ib)),
exit_label: exit.unwrap_or(unique_bb_label(ib)),
return_type: IrType::Unit,
func_address: None,
is_foreign,
live_intervals: HashMap::new(),
}
}
fn create_ir_live_interval(begin: usize) -> IrLiveInterval {
IrLiveInterval {
begin,
end: begin + 1,
}
}
fn update_ir_live_interval<'a>(ib: &mut IrBuilder<'a>, op: IrOperand) {
let insn_len = ib.instructions.len();
if let IrOperand::Ident(ident) = op {
match ib.live_intervals.get_mut(&ident) {
Some(live_interval) => live_interval.end = insn_len,
None => {},
}
}
}
#[inline]
pub fn is_alive(insn: usize, interval: IrLiveInterval) -> bool {
return insn >= interval.begin && insn < interval.end;
}
fn allocate_register<'a>(ib: &mut IrBuilder<'a>) -> IrOperand {
let ident = IrIdent {
symbol: ib.register_symbol,
index: ib.register_index
};
ib.register_index += 1;
ib.live_intervals.insert(ident, create_ir_live_interval(ib.instructions.len()));
IrOperand::Ident(ident)
}
fn to_ir_ptr_type(ty: &Ty, indirections: &mut usize) -> IrType {
match &ty.kind {
TyKind::Int => IrType::PtrI32(*indirections),
TyKind::Bool => IrType::PtrI8(*indirections),
TyKind::Ref(type_ref) => {
*indirections += 1;
to_ir_ptr_type(&type_ref.elem, indirections)
}
_ => panic!("unexpected type"),
}
}
fn to_ir_type(ty: &Ty) -> IrType {
match &ty.kind {
TyKind::Int => IrType::I32,
TyKind::Bool => IrType::I8,
TyKind::Ref(type_ref) => to_ir_ptr_type(&type_ref.elem, &mut 1),
TyKind::Error => IrType::Unit,
TyKind::Unit => IrType::Unit,
}
}
pub fn size_of_ir_type(ty: IrType, addr_size: isize) -> isize {
match ty {
IrType::I8 => 1,
IrType::I32 => 4,
IrType::I64 => 8,
IrType::U32 => 4,
IrType::U64 => 8,
IrType::PtrI8(_) |
IrType::PtrI32(_) => addr_size,
IrType::Unit => 0,
}
}
pub fn build_ir_from_ast<'a>(ib: &mut IrBuilder<'a>, file: &'a File) {
ib.file = Some(file);
fn register_ast_items<'a>(ib: &mut IrBuilder<'a>, items: &Vec<Item>) {
for item in items {
match item {
Item::Fn(func) => {
let enter_label = create_ir_ident(func.ident.sym, 0);
let exit_label = create_ir_ident(func.ident.sym, 1);
let ident = create_ir_ident(func.ident.sym, 0);
let mut block = create_ir_basic_block(ib, Some(enter_label), Some(exit_label), false);
block.return_type = to_ir_type(&func.decl.output);
ib.functions.insert(ident, block);
}
Item::ForeignFn(func) => {
let func_ident = resolve_symbol(func.ident.sym);
let func_address = match func_ident {
"print_int" => {
intrinsics::print_int as *const () as usize
}
"print_bool" => {
intrinsics::print_bool as *const () as usize
}
"assert" => {
intrinsics::assert as *const () as usize
}
"assert_eq_int" => {
intrinsics::assert_eq_int as *const () as usize
}
"assert_eq_bool" => {
intrinsics::assert_eq_bool as *const () as usize
}
// NOTE: debug_break - backend level intrinsic
"debug_break" => {
continue; // dont't generate NULL function address, should be callable
}
// NOTE: trace - interpreter level intrinsic
"trace" => {
0usize // should never be called, NULL function address
}
_ => panic!("unknown foreign function {}", func_ident),
};
let ident = create_ir_ident(func.ident.sym, 0);
let mut block = create_ir_basic_block(ib, None, None, true);
block.return_type = to_ir_type(&func.decl.output);
block.func_address = Some(func_address);
ib.functions.insert(ident, block);
}
Item::ForeignMod(module) => register_ast_items(ib, &module.items),
}
}
}
// First register all items made available from the AST
register_ast_items(ib, &file.items);
for item in &file.items {
build_ir_from_item(ib, &item);
}
}
pub fn build_ir_from_item<'a>(ib: &mut IrBuilder<'a>, item: &Item) {
match item {
Item::Fn(func) => {
let enter_label = create_ir_ident(func.ident.sym, 0);
let exit_label = create_ir_ident(func.ident.sym, 1);
// NOTE(alexander): new function reset register index
ib.register_index = 0;
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Label,
op1: IrOperand::Ident(enter_label),
span: func.decl.span,
..Default::default()
});
let prologue_index = ib.instructions.len();
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Prologue,
..Default::default()
});
// Create the function scope, and setup arguments
let mut scope = IrScope {
enter_label: None,
exit_label: None,
locals: HashMap::new(),
};
for arg in &func.decl.inputs {
let ty = to_ir_type(&arg.ty);
let ident = create_ir_ident(arg.ident.sym, 0);
scope.locals.insert(ident, ty);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Param,
op1: IrOperand::Ident(ident),
ty,
..Default::default()
});
}
ib.instructions.push(IrInstruction {
opcode: IrOpcode::AllocParams,
..Default::default()
});
ib.scopes.push(scope);
build_ir_from_block(ib, &func.block, Some(enter_label), Some(exit_label), None);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Label,
op1: IrOperand::Ident(exit_label),
span: func.span,
..Default::default()
});
let epilogue_index = ib.instructions.len();
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Epilogue,
..Default::default()
});
let func_label = create_ir_ident(func.ident.sym, 0);
match ib.functions.get_mut(&func_label) {
Some(bb) => {
bb.prologue_index = prologue_index;
bb.epilogue_index = epilogue_index;
bb.live_intervals = ib.live_intervals.clone();
}
None => panic!("`{}` is not a registered function", func_label),
}
ib.live_intervals.clear();
ib.scopes.pop();
}
_ => {}
}
}
pub fn build_ir_from_block<'a>(
ib: &mut IrBuilder<'a>,
block: &Block,
enter_label: Option<IrIdent>,
exit_label: Option<IrIdent>,
assign_op: Option<IrOperand>
) -> (IrOperand, IrType) {
let scope = IrScope {
enter_label,
exit_label,
locals: HashMap::new(),
};
ib.scopes.push(scope);
let mut last_op = IrOperand::None;
let mut last_ty = IrType::Unit;
for stmt in &block.stmts {
let (op, ty) = build_ir_from_stmt(ib, &stmt);
last_op = op;
last_ty = ty;
}
let ret = if let Some(Stmt::Expr(_)) = block.stmts.last() {
if let IrOperand::None = last_op {
(IrOperand::None, IrType::Unit)
} else {
if ib.scopes.len() <= 2 { // Outermost scope, safe to return
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Return,
op1: last_op,
ty: last_ty,
span: block.span,
..Default::default()
});
(last_op, last_ty)
} else if let Some(op1) = assign_op { // Not outermost scope, store to register instead
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Copy,
op1,
op2: last_op,
ty: last_ty,
span: block.span,
..Default::default()
});
(op1, last_ty)
} else {
(IrOperand::None, IrType::Unit)
}
}
} else {
(IrOperand::None, IrType::Unit)
};
ib.scopes.pop();
return ret;
}
pub fn build_ir_from_stmt<'a>(ib: &mut IrBuilder<'a>, stmt: &Stmt) -> (IrOperand, IrType) {
match stmt {
Stmt::Local(local) => {
let init_type = to_ir_type(&local.ty);
let ident = create_ir_ident(local.ident.sym, 0);
let op1 = IrOperand::Ident(ident);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Alloca,
op1,
ty: init_type,
span: local.span,
..Default::default()
});
match &*local.init {
Some(expr) => {
if let Expr::If(if_expr) = expr {
build_ir_if_expr(ib, if_expr, Some(op1));
} else {
let op2 = build_ir_from_expr(ib, expr).0;
update_ir_live_interval(ib, op2);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Copy,
op1,
op2,
ty: init_type,
span: local.span,
..Default::default()
});
}
}
None => return (IrOperand::None, IrType::Unit)
};
ib.scopes[0].locals.insert(ident, init_type);
ib.live_intervals.insert(ident, create_ir_live_interval(ib.instructions.len()));
(IrOperand::None, IrType::Unit)
}
Stmt::Item(_) => (IrOperand::None, IrType::Unit),
Stmt::Semi(expr) => (build_ir_from_expr(ib, expr).0, IrType::Unit),
Stmt::Expr(expr) => build_ir_from_expr(ib, expr),
}
}
fn build_ir_conditional_if<'a>(ib: &mut IrBuilder<'a>, cond: &Expr, span: Span, false_target: IrIdent) {
fn binary_if_condition<'a>(ib: &mut IrBuilder<'a>, cond: &Expr) -> (IrOpcode, IrOperand, IrOperand, IrType) {
match cond {
Expr::Binary(binary) => {
let opcode = match binary.op {
BinOp::Lt => IrOpcode::IfGe,
BinOp::Gt => IrOpcode::IfLe,
BinOp::Le => IrOpcode::IfGt,
BinOp::Ge => IrOpcode::IfLt,
BinOp::Eq => IrOpcode::IfNe,
BinOp::Ne => IrOpcode::IfEq,
_ => IrOpcode::Nop,
};
if let IrOpcode::Nop = opcode {
(IrOpcode::Nop, IrOperand::None, IrOperand::None, IrType::Unit)
} else {
let (lhs, ty) = build_ir_from_expr(ib, &binary.left);
let rhs = build_ir_from_expr(ib, &binary.right).0;
(opcode, lhs, rhs, ty)
}
}
Expr::Paren(paren) => binary_if_condition(ib, &paren.expr),
_ => (IrOpcode::Nop, IrOperand::None, IrOperand::None, IrType::Unit),
}
}
let (mut opcode, mut op1, mut op2, mut ty) = binary_if_condition(ib, cond);
let op3 = IrOperand::Ident(false_target);
if let IrOpcode::Nop = opcode {
opcode = IrOpcode::IfEq;
op1 = build_ir_from_expr(ib, cond).0;
op2 = IrOperand::Value(IrValue::Bool(false));
ty = IrType::I8;
}
ib.instructions.push(IrInstruction {
opcode,
op1,
op2,
op3,
ty,
span: span,
..Default::default()
});
}
fn build_ir_if_expr<'a>(ib: &mut IrBuilder<'a>, if_expr: &ExprIf, assign_op: Option<IrOperand>) {
let exit_label = create_ir_ident(ib.if_exit_symbol, ib.if_exit_index);
ib.if_exit_index += 1;
let else_label = create_ir_ident(ib.if_else_symbol, ib.if_else_index);
ib.if_else_index += 1;
let false_label = match if_expr.else_block {
Some(_) => else_label,
None => exit_label,
};
build_ir_conditional_if(ib, &*if_expr.cond, if_expr.span, false_label);
build_ir_from_block(ib, &if_expr.then_block, None, Some(false_label), assign_op);
if false_label == else_label {
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Jump,
op1: IrOperand::Ident(exit_label),
..Default::default()
});
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Label,
op1: IrOperand::Ident(false_label),
..Default::default()
});
}
if let Some(block) = &if_expr.else_block {
build_ir_from_block(ib, &block, Some(false_label), Some(exit_label), assign_op);
}
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Label,
op1: IrOperand::Ident(exit_label),
..Default::default()
});
}
pub fn build_ir_from_expr<'a>(ib: &mut IrBuilder<'a>, expr: &Expr) -> (IrOperand, IrType) {
match expr {
Expr::Assign(assign) => {
let mut opcode = IrOpcode::Copy;
let (op1, ty) = match &*assign.left {
Expr::Ident(ident) => {
let ident = create_ir_ident(ident.sym, 0);
let ty = ib.scopes[0].locals.get(&ident).unwrap();
(IrOperand::Ident(ident), *ty)
}
Expr::Unary(unary) => {
if let UnOp::Deref = unary.op {
opcode = IrOpcode::CopyToDeref;
} else {
panic!("expected dereference");
}
let (op, ty) = build_ir_from_expr(ib, &unary.expr);
let ty = match ty {
IrType::PtrI8(1) => IrType::I8,
IrType::PtrI32(1) => IrType::I32,
IrType::PtrI8(x) => IrType::PtrI8(x - 1),
IrType::PtrI32(x) => IrType::PtrI32(x - 1),
_ => panic!("cannot dereference non-pointer type"),
};
(op, ty)
}
_ => panic!("expected identifier or dereference"),
};
if let Expr::If(if_expr) = &*assign.right {
build_ir_if_expr(ib, if_expr, Some(op1));
} else {
let op2 = build_ir_from_expr(ib, &assign.right).0;
update_ir_live_interval(ib, op1);
update_ir_live_interval(ib, op2);
ib.instructions.push(IrInstruction {
opcode,
op1,
op2,
ty,
span: assign.span,
..Default::default()
});
};
(op1, ty)
}
Expr::Binary(binary) => {
let op1 = allocate_register(ib);
let (op2, lhs_ty) = build_ir_from_expr(ib, &binary.left);
let op3 = build_ir_from_expr(ib, &binary.right).0;
let (opcode, ty) = match binary.op {
BinOp::Add => (IrOpcode::Add, IrType::I32),
BinOp::Sub => (IrOpcode::Sub, IrType::I32),
BinOp::Mul => (IrOpcode::Mul, IrType::I32),
BinOp::Div => (IrOpcode::Div, IrType::I32),
BinOp::Pow => (IrOpcode::Pow, IrType::I32),
BinOp::Mod => (IrOpcode::Mod, IrType::I32),
BinOp::And => (IrOpcode::And, IrType::I8),
BinOp::Or => (IrOpcode::Or, IrType::I8),
BinOp::Eq => (IrOpcode::Eq, lhs_ty),
BinOp::Ne => (IrOpcode::Ne, lhs_ty),
BinOp::Lt => (IrOpcode::Lt, lhs_ty),
BinOp::Le => (IrOpcode::Le, lhs_ty),
BinOp::Gt => (IrOpcode::Gt, lhs_ty),
BinOp::Ge => (IrOpcode::Ge, lhs_ty),
};
update_ir_live_interval(ib, op2);
update_ir_live_interval(ib, op3);
ib.instructions.push(IrInstruction {
opcode,
op1,
op2,
op3,
ty,
span: binary.span,
});
(op1, ty)
}
Expr::Block(block) => build_ir_from_block(ib, &block.block, None, None, None),
Expr::Break(_) |
Expr::Continue(_) => {
for scope in ib.scopes.iter().rev() {
if let Some(enter_label) = scope.enter_label {
if ib.while_enter_symbol == enter_label.symbol {
if let Expr::Continue(cont_expr) = expr {
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Jump,
op1: IrOperand::Ident(enter_label),
span: cont_expr.span,
..Default::default()
});
} else if let Expr::Break(break_expr) = expr {
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Jump,
op1: IrOperand::Ident(scope.exit_label.unwrap()),
span: break_expr.span,
..Default::default()
});
}
break;
}
}
}
(IrOperand::None, IrType::Unit)
}
Expr::Call(call) => {
// Setup parameters
let mut param_size = 0;
let call_insn_pos = ib.instructions.len() + call.args.len();
for arg in &call.args {
let (op1, ty) = build_ir_from_expr(ib, &arg);
// NOTE(alexander): update lifetime to include the call instruction also
if let IrOperand::Ident(ident) = op1 {
match ib.live_intervals.get_mut(&ident) {
Some(live_interval) => live_interval.end = call_insn_pos,
None => {},
}
}
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Param,
op1,
ty,
span: arg.get_span(),
..Default::default()
});
param_size += 1;
}
// Make the function call
let function_label = create_ir_ident(call.ident.sym, 0);
let op1 = allocate_register(ib);
let (op2, return_type) = match ib.functions.get(&function_label) {
Some(bb) => {
if let Some(func_address) = bb.func_address {
if ib.addr_size == 4 {
(IrOperand::Value(IrValue::U32(func_address as u32)), bb.return_type)
} else if ib.addr_size == 8 {
(IrOperand::Value(IrValue::U64(func_address as u64)), bb.return_type)
} else {
panic!("unsupported address size: `{}-bit`, expected 32- or 64-bit", ib.addr_size*8);
}
} else {
(IrOperand::Ident(function_label), bb.return_type)
}
}
None => (IrOperand::Ident(function_label), IrType::Unit),
};
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Call,
op1,
op2,
op3: IrOperand::Value(IrValue::I32(param_size)),
ty: return_type,
span: call.span,
..Default::default()
});
(op1, return_type)
}
Expr::Ident(ident) => {
let ident = create_ir_ident(ident.sym, 0);
let op = IrOperand::Ident(ident);
update_ir_live_interval(ib, op);
let ty = ib.scopes[0].locals.get(&ident).unwrap(); // TODO(alexander): should we not search all in scopes
(op, *ty)
}
Expr::If(if_expr) => {
build_ir_if_expr(ib, if_expr, None);
(IrOperand::None, IrType::Unit)
}
Expr::Lit(literal) => match literal.lit {
Lit::Int(val) => (IrOperand::Value(IrValue::I32(val)), IrType::I32),
Lit::Bool(val) => (IrOperand::Value(IrValue::Bool(val)), IrType::I8),
}
Expr::Paren(paren) => build_ir_from_expr(ib, &paren.expr),
Expr::Reference(reference) => {
let (op2, ty) = build_ir_from_expr(ib, &reference.expr);
let op1 = allocate_register(ib);
let ref_ty = match ty {
IrType::I8 => IrType::PtrI8(1),
IrType::I32 => IrType::PtrI32(1),
IrType::PtrI8(i) => IrType::PtrI8(i + 1),
IrType::PtrI32(i) => IrType::PtrI32(i + 1),
IrType::Unit => panic!("missing type info"),
_ => panic!("unsupported type"),
};
update_ir_live_interval(ib, op2);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::CopyFromRef,
op1,
op2,
ty: ref_ty,
span: reference.span,
..Default::default()
});
(op1, ref_ty)
}
Expr::Return(return_expr) => {
let (op1, ty) = match &*return_expr.expr {
Some(expr) => build_ir_from_expr(ib, expr),
None => (IrOperand::None, IrType::Unit),
};
update_ir_live_interval(ib, op1);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Return,
op1,
ty,
span: return_expr.span,
..Default::default()
});
(IrOperand::None, IrType::Unit)
}
Expr::Unary(unary) => {
match unary.op {
UnOp::Neg => {
let (op2, ty) = build_ir_from_expr(ib, &unary.expr);
let op1 = allocate_register(ib);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Clear,
op1,
ty,
..Default::default()
});
update_ir_live_interval(ib, op1);
update_ir_live_interval(ib, op2);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Sub,
op1,
op2,
ty,
span: unary.span,
..Default::default()
});
(op1, ty)
},
UnOp::Not => {
let (op1, ty) = build_ir_from_expr(ib, &unary.expr);
update_ir_live_interval(ib, op1);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Xor,
op1,
op2: IrOperand::Value(IrValue::Bool(true)),
ty,
span: unary.span,
..Default::default()
});
(op1, ty)
},
UnOp::Deref => {
let (op2, op2_ty) = build_ir_from_expr(ib, &unary.expr);
let op1 = allocate_register(ib);
let ty = match op2_ty {
IrType::I8 |
IrType::I32 |
IrType::I64 |
IrType::U32 |
IrType::U64 => panic!("cannot dereference non ref type"),
IrType::PtrI8(i) => if i > 1 {
IrType::PtrI8(i - 1)
} else {
IrType::I8
}
IrType::PtrI32(i) => if i > 1 {
IrType::PtrI32(i - 1)
} else {
IrType::I32
}
IrType::Unit => panic!("missing type info"),
};
update_ir_live_interval(ib, op2);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::CopyFromDeref,
op1,
op2,
ty,
span: unary.span,
..Default::default()
});
(op1, ty)
}
}
}
Expr::While(while_expr) => {
let enter_label = create_ir_ident(ib.while_enter_symbol, ib.while_enter_index);
let exit_label = create_ir_ident(ib.while_exit_symbol, ib.while_exit_index);
ib.while_enter_index += 1;
ib.while_exit_index += 1;
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Label,
op1: IrOperand::Ident(enter_label),
..Default::default()
});
build_ir_conditional_if(ib, &*while_expr.cond, while_expr.span, exit_label);
build_ir_from_block(ib, &while_expr.block, Some(enter_label), Some(exit_label), None);
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Jump,
op1: IrOperand::Ident(enter_label),
..Default::default()
});
ib.instructions.push(IrInstruction {
opcode: IrOpcode::Label,
op1: IrOperand::Ident(exit_label),
..Default::default()
});
(IrOperand::None, IrType::Unit)
}
}
}
impl Default for IrInstruction {
fn default() -> Self {
IrInstruction {
opcode: IrOpcode::Nop,
op1: IrOperand::None,
op2: IrOperand::None,
op3: IrOperand::None,
ty: IrType::Unit,
span: Span::new(),
}
}
}
impl fmt::Display for IrBuilder<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for insn in &self.instructions {
match insn.opcode {
IrOpcode::Label => if let IrOperand::Ident(ident) = insn.op1 {
write!(f, "{}:\n", ident)?;
} else {
write!(f, "{}:\n", insn.op1)?;
}
_ => write!(f, " {}\n", insn)?,
}
}
Ok(())
}
}
impl fmt::Display for IrInstruction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.opcode {
IrOpcode::Nop |
IrOpcode::AllocParams |
IrOpcode::IfLt |
IrOpcode::IfGt |
IrOpcode::IfLe |
IrOpcode::IfGe |
IrOpcode::IfEq |
IrOpcode::IfNe |
IrOpcode::Param |
IrOpcode::Return |
IrOpcode::Label |
IrOpcode::Jump |
IrOpcode::Prologue |
IrOpcode::Epilogue => {
write!(f, "{}", self.opcode)?;
if let IrType::Unit = self.ty {
} else {
write!(f, " {}", self.ty)?;
}
write!(f, " {}", self.op1)?;
if let IrOperand::None = self.op2 {
} else {
write!(f, ", {}", self.op2)?;
if let IrOperand::None = self.op3 {
} else {
write!(f, ", {}", self.op3)?;
}
}
}
_ => {
write!(f, "{} = {}", self.op1, self.opcode)?;
if let IrType::Unit = self.ty {
} else {
write!(f, " {}", self.ty)?;
}
if let IrOperand::None = self.op2 {
} else {
write!(f, " {}", self.op2)?;
if let IrOperand::None = self.op3 {
} else {
write!(f, ", {}", self.op3)?;
}
}
}
}
Ok(())
}
}
impl fmt::Display for IrType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
IrType::I8 => write!(f, "i8"),
IrType::I32 => write!(f, "i32"),
IrType::I64 => write!(f, "i64"),
IrType::U32 => write!(f, "u32"),
IrType::U64 => write!(f, "u64"),
IrType::PtrI8(i) => write!(f, "i8{}", "*".repeat(*i as usize)),
IrType::PtrI32(i) => write!(f, "i32{}", "*".repeat(*i as usize)),
IrType::Unit => write!(f, ""),
}
}
}
impl fmt::Display for IrIdent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = resolve_symbol(self.symbol);
if self.index > 0 || s.len() == 0 {
write!(f, "{}{}", s, self.index)
} else {
write!(f, "{}", s)
}
}
}
impl fmt::Display for IrOperand {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self {
IrOperand::Ident(label) => write!(f, "%{}", label),
IrOperand::Value(val) => match val {
IrValue::I32(v) => write!(f, "{}", v),
IrValue::U32(v) => write!(f, "{}", v),
IrValue::U64(v) => write!(f, "{}", v),
IrValue::Bool(v) => write!(f, "{}", v),
}
IrOperand::None => write!(f, ""),
}
}
}
impl fmt::Display for IrOpcode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
IrOpcode::Nop => write!(f, "nop"),
IrOpcode::Alloca => write!(f, "alloca"),
IrOpcode::AllocParams => write!(f, "alloc_params"),
IrOpcode::Copy => write!(f, "copy"),
IrOpcode::CopyFromRef => write!(f, "copy_from_ref"),
IrOpcode::CopyFromDeref => write!(f, "copy_from_deref"),
IrOpcode::CopyToDeref => write!(f, "copy_to_deref"),
IrOpcode::Clear => write!(f, "clear"),
IrOpcode::Add => write!(f, "add"),
IrOpcode::Sub => write!(f, "sub"),
IrOpcode::Mul => write!(f, "mul"),
IrOpcode::Div => write!(f, "div"),
IrOpcode::Pow => write!(f, "pow"),
IrOpcode::Mod => write!(f, "mod"),
IrOpcode::And => write!(f, "and"),
IrOpcode::Or => write!(f, "or"),
IrOpcode::Xor => write!(f, "xor"),
IrOpcode::Eq => write!(f, "eq"),
IrOpcode::Ne => write!(f, "ne"),
IrOpcode::Lt => write!(f, "lt"),
IrOpcode::Le => write!(f, "le"),
IrOpcode::Gt => write!(f, "gt"),
IrOpcode::Ge => write!(f, "ge"),
IrOpcode::IfLt => write!(f, "iflt"),
IrOpcode::IfGt => write!(f, "ifgt"),
IrOpcode::IfLe => write!(f, "ifle"),
IrOpcode::IfGe => write!(f, "ifge"),
IrOpcode::IfEq => write!(f, "ifeq"),
IrOpcode::IfNe => write!(f, "ifne"),
IrOpcode::Param => write!(f, "param"),
IrOpcode::Call => write!(f, "call"),
IrOpcode::Return => write!(f, "return"),
IrOpcode::Label => write!(f, "label"),
IrOpcode::Jump => write!(f, "jump"),
IrOpcode::Prologue => write!(f, "prologue"),
IrOpcode::Epilogue => write!(f, "epilogue\n"),
}
}
}
| true |
8ef68a40c22d49310d568a72883f9f39e4d019ef
|
Rust
|
rxRust/rxRust
|
/src/ops/future.rs
|
UTF-8
| 4,145 | 3.34375 | 3 |
[
"MIT"
] |
permissive
|
use std::{
cell::RefCell,
fmt::Display,
task::{Context, Poll},
};
use futures::{
channel::mpsc::{unbounded, UnboundedReceiver, UnboundedSender},
ready, Future, FutureExt, StreamExt,
};
use crate::{observable::Observable, observer::Observer};
/// Errors that can prevent an observable future from resolving correctly.
#[derive(Debug, Clone)]
pub enum ObservableError {
/// The observable had no values.
Empty,
/// The observable emitted more than one value.
MultipleValues,
}
impl Display for ObservableError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ObservableError::Empty => write!(f, "the observable has no values"),
ObservableError::MultipleValues => {
write!(f, "the observable emitted more than one value")
}
}
}
}
impl std::error::Error for ObservableError {}
// The value send in the channel
type Message<T, E> = Result<Result<T, E>, ObservableError>;
/// A future that resolves with the value emitted by an observable.
pub struct ObservableFuture<T, E> {
receiver: RefCell<UnboundedReceiver<Message<T, E>>>,
}
impl<T, E> ObservableFuture<T, E> {
/// Constructs a new `ObservableFuture<T, E>` that awaits the value emitted by a shared observable.
pub fn new<S>(observable: S) -> Self
where
S: Observable<T, E, ObservableFutureObserver<T, E>>,
{
let (sender, receiver) = unbounded::<Message<T, E>>();
observable
.actual_subscribe(ObservableFutureObserver { sender, last_value: None });
ObservableFuture { receiver: RefCell::new(receiver) }
}
}
impl<T, E> Future for ObservableFuture<T, E> {
type Output = Result<Result<T, E>, ObservableError>;
fn poll(
mut self: std::pin::Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Self::Output> {
let this = self.as_mut();
let mut receiver = this.receiver.borrow_mut();
// We poll the receiver stream until the observable completes
let ret = ready!(receiver.next().poll_unpin(cx));
match ret {
Some(msg) => Poll::Ready(msg),
None => Poll::Pending,
}
}
}
pub struct ObservableFutureObserver<T, E> {
sender: UnboundedSender<Message<T, E>>,
last_value: Option<Message<T, E>>,
}
impl<T, E> Observer<T, E> for ObservableFutureObserver<T, E> {
fn next(&mut self, value: T) {
send_observable_value(self, Ok(value));
}
fn error(mut self, err: E) {
send_observable_value(&mut self, Err(err));
}
fn complete(mut self) {
// When the observable complete we send the last emitted value of the observer and close the channel
// if not value is emitted an error is sent
let last_value = self
.last_value
.take()
.unwrap_or(Err(ObservableError::Empty));
self
.sender
.unbounded_send(last_value)
.expect("failed to send observable last emitted value");
self.sender.close_channel();
}
fn is_finished(&self) -> bool {
self.sender.is_closed()
}
}
fn send_observable_value<T, E>(
observer: &mut ObservableFutureObserver<T, E>,
value: Result<T, E>,
) {
match observer.last_value.as_mut() {
Some(x) => {
// A future only returns one value
*x = Err(ObservableError::MultipleValues);
}
None => {
observer.last_value.replace(Ok(value));
}
}
}
#[cfg(test)]
mod tests {
use crate::{observable::ObservableExt, ops::future::ObservableError};
#[tokio::test]
async fn to_future_observable_resolve_value_test() {
let fut = crate::observable::of(4)
.map(|x| format!("Number {x}"))
.to_future();
let value = fut.await.unwrap().ok().unwrap();
assert_eq!(format!("Number 4"), value);
}
#[tokio::test]
async fn to_future_error_empty_observable_test() {
let fut = crate::observable::empty::<i32>().to_future();
let value = fut.await;
assert!(matches!(value, Err(ObservableError::Empty)));
}
#[tokio::test]
async fn to_future_error_multiple_values_emitted_observable_test() {
let fut = crate::observable::from_iter([1, 2, 3]).to_future();
let value = fut.await;
assert!(matches!(value, Err(ObservableError::MultipleValues)));
}
}
| true |
74712b5ffa22c40995cc19f9b0bb598daadb71b5
|
Rust
|
PoorlyDefinedBehaviour/data-structures-and-algorithms
|
/rust/data_structures/stack_of_plates/main.rs
|
UTF-8
| 1,780 | 3.796875 | 4 |
[
"MIT"
] |
permissive
|
// Stack of Plates: Imagine a (literal) stack of plates. If the stack gets too high, it might topple.
// Therefore, in real life, we would likely start a new stack when the previous stack exceeds some
// threshold. Implement a data structure SetOfStacks that mimics this. SetOfStacks should be
// composed of several stacks and should create a new stack once the previous one exceeds capacity.
// SetOfStacks.push() and SetOfStacks. pop() should behave identically to a single stack
// (that is, pop() should return the same values as it would if there were just a single stack).
struct StackOfPlates<T> {
stacks: Vec<Vec<T>>,
max_stack_size: usize,
}
impl<T> StackOfPlates<T> {
fn new(max_stack_size: usize) -> Self {
assert!(max_stack_size > 0);
Self {
max_stack_size,
stacks: Vec::new(),
}
}
fn push(&mut self, value: T) {
if self.stacks.is_empty() {
self.stacks.push(vec![]);
}
let newest_stack = self.stacks.last_mut().unwrap();
if newest_stack.len() < self.max_stack_size {
newest_stack.push(value);
} else {
self.stacks.push(vec![value]);
}
}
fn pop(&mut self) -> Option<T> {
self.stacks.last_mut().and_then(|stack| stack.pop())
}
}
fn main() {}
#[cfg(test)]
mod tests {
use super::*;
use proptest::prelude::*;
proptest! {
#[test]
fn simple(max_stack_size: usize, values: Vec<i32>) {
prop_assert!(max_stack_size > 0);
let mut stack = StackOfPlates::new(max_stack_size);
let last = values.last().cloned();
for value in values.into_iter() {
stack.push(value);
}
assert_eq!(last, stack.pop());
}
}
}
| true |
ab2ade7e0e4cda638549f9abb01276c190ed9ea2
|
Rust
|
tesseract-one/Keychain.rs
|
/keychain-c/src/utils/panic.rs
|
UTF-8
| 811 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use error::ErrorPtr;
use keychain::Error;
use std::panic;
pub fn handle_exception<F: FnOnce() -> R + panic::UnwindSafe, R>(func: F) -> Result<R, ErrorPtr> {
handle_exception_result(|| Ok(func()))
}
pub fn handle_exception_result<F: FnOnce() -> Result<R, Error> + panic::UnwindSafe, R>(
func: F
) -> Result<R, ErrorPtr> {
match panic::catch_unwind(func) {
Ok(res) => res.map_err(|err| ErrorPtr::new(&err)),
Err(err) => {
if let Some(string) = err.downcast_ref::<String>() {
return Err(ErrorPtr::panic(&string));
} else if let Some(string) = err.downcast_ref::<&'static str>() {
return Err(ErrorPtr::panic(string));
}
return Err(ErrorPtr::panic(&format!("Reason: {:?}", err)));
}
}
}
pub fn hide_exceptions() {
panic::set_hook(Box::new(|_| {}));
}
| true |
cba64be00d95d78664ada58d2afa49e26eba981d
|
Rust
|
lfdominguez/elastic
|
/src/elastic/src/client/responses/tests/document_delete/mod.rs
|
UTF-8
| 808 | 2.5625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::{
client::responses::*,
http::{
receiver::parse,
StatusCode,
},
};
#[test]
fn success_parse_found_response() {
let f = include_bytes!("delete_found.json");
let deserialized = parse::<DeleteResponse>()
.from_slice(StatusCode::OK, f as &[_])
.unwrap();
assert_eq!("testindex", deserialized.index());
assert_eq!("testtype", deserialized.ty());
assert_eq!("1", deserialized.id());
assert_eq!(Some(2), deserialized.version());
assert!(deserialized.deleted());
}
#[test]
fn success_parse_not_found_response() {
let f = include_bytes!("delete_not_found.json");
let deserialized = parse::<DeleteResponse>()
.from_slice(StatusCode::NOT_FOUND, f as &[_])
.unwrap();
assert!(!deserialized.deleted());
}
| true |
ab88082704fa22396e781055d5257bc2c5cf92ae
|
Rust
|
bieganski/distributed
|
/dslab03/main.rs
|
UTF-8
| 612 | 2.59375 | 3 |
[] |
no_license
|
mod public_test;
mod solution;
use std::env;
use std::process;
fn parse_args() -> usize {
let args: Vec<String> = env::args().collect();
match args.len() {
1 => 10,
2 => match args.get(1).unwrap().parse() {
Ok(n) => n,
Err(_) => {
println!("Not an unsigned number as program argument!");
process::exit(1);
}
},
_ => {
println!("Only one argument - index of fibonacci number - can be passed!");
process::exit(1);
}
}
}
fn main() {
solution::fib(parse_args());
}
| true |
6238b56e96dc06eb17d072fe5ef3fcb78b893e38
|
Rust
|
sm921/pattern-making
|
/pmdraw/src/drawing.rs
|
UTF-8
| 4,385 | 2.984375 | 3 |
[] |
no_license
|
use std::f64::consts::PI;
// #[cfg(not(target_arch = "wasm32"))]
// use pmrender::show_lines;
use pmrender::show_lines;
use crate::shapes::{bezier::Bezier, circle::Circle, line::Line, point::Point, Shape};
#[derive(Clone)]
pub struct Drawing {
/// canvas width in centimeters
pub width: f64,
/// canvas height in centimeters
pub height: f64,
pub shapes: Vec<Shape>,
vertices: Vec<(f32, f32)>,
}
impl Drawing {
pub fn bezier(&mut self, b: &Bezier) {
self.bezier_with_precision(b, 100)
}
pub fn bezier_with_precision(&mut self, b: &Bezier, precision: u32) {
self.shapes.push(Shape::Bezier(b.clone()));
let t_range = b.t_range();
let mut t = t_range.from;
let dt = t_range.to / precision as f64;
while t <= t_range.to {
self.line_no_store(Line::new(
b.point_at(t),
if t + dt < t_range.to {
b.point_at(t + dt)
} else {
b.point_at(t_range.to)
},
));
t += dt;
}
}
pub fn circle(&mut self, origin: Point, r: f64) {
self.circle_with_precision(origin, r, 100)
}
pub fn circle_with_precision(&mut self, origin: Point, r: f64, precision: u32) {
let c = Circle::new(origin, r);
self.shapes.push(Shape::Circle(c));
let dt = 1.0 / (precision - 1) as f64;
let mut p0 = origin.to(r, 0.0);
for i in 1..precision {
let p1 = match i as f64 * dt {
t @ _ => {
if t >= 1.0 {
origin.to(r, 0.0)
} else {
let theta = 2.0 * PI * t;
origin.to(r * theta.cos(), r * theta.sin())
}
}
};
self.line(Line::new(p0, p1));
p0 = p1;
}
}
pub fn line_from_point(&mut self, origin_x: f64, origin_y: f64, end_x: f64, end_y: f64) {
self.line_with_store(
Line::new(Point::new(origin_x, origin_y), Point::new(end_x, end_y)),
true,
);
}
pub fn line(&mut self, l: Line) {
self.line_with_store(l, true);
}
pub fn line_borrow(&mut self, l: &Line) {
self.line_with_store_borrow(l, true);
}
fn line_no_store(&mut self, l: Line) {
self.line_with_store(l, false);
}
fn line_with_store(&mut self, l: Line, stores_shape: bool) {
if stores_shape {
self.shapes.push(Shape::Line(l.clone()));
}
self.vertices.push((l.origin.x as f32, l.origin.y as f32));
self.vertices.push((l.end.x as f32, l.end.y as f32));
}
fn line_with_store_borrow(&mut self, l: &Line, stores_shape: bool) {
if stores_shape {
self.shapes.push(Shape::Line(l.clone()));
}
self.vertices.push((l.origin.x as f32, l.origin.y as f32));
self.vertices.push((l.end.x as f32, l.end.y as f32));
}
pub fn new(width: f64, height: f64) -> Drawing {
let shapes = Vec::new();
let vertices = Vec::new();
Drawing {
width,
height,
shapes,
vertices,
}
}
pub fn point(&mut self, p: Point) {
self.shapes.push(Shape::Point(p));
self.circle_with_precision(p, 0.3, 20);
}
pub fn show(&self, _window_width: u32, _window_height: u32) {
// normalize coordinates
let scale = 2.0
/ (if self.width as f32 > self.height as f32 {
self.width as f32
} else {
self.height as f32
});
let _model: [[f32; 4]; 4] = [
[scale, 0.0, 0.0, 0.0], // 1. column: normalize x coodinates
[0.0, -scale, 0.0, 0.0], // 2. column: normalize and reverse y coodinates so that upperside is positive
[0.0, 0.0, scale, 0.0], // 3. column: normalize z coodinates
[-0.9, 0.9, 0.0, 1.0], // 4. column: move origin to the left bottom
];
if cfg!(wasm32) {
todo!()
} else {
// #[cfg(not(target_arch = "wasm32"))]
show_lines(
self.vertices.to_vec(),
_model,
_window_width,
_window_height,
)
};
}
}
| true |
6a258c7b90dcf52837c688b579af9d35f5a90bc9
|
Rust
|
JacobHenner/cargo
|
/src/cargo/util/sha256.rs
|
UTF-8
| 4,337 | 2.53125 | 3 |
[
"GPL-2.0-only",
"Apache-2.0",
"OpenSSL",
"MIT",
"GCC-exception-2.0",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"Zlib",
"curl",
"LicenseRef-scancode-openssl",
"LicenseRef-scancode-ssleay-windows",
"Unlicense",
"LGPL-2.1-only"
] |
permissive
|
pub use self::imp::Sha256;
// Someone upstream will link to OpenSSL, so we don't need to explicitly
// link to it ourselves. Hence we pick up Sha256 digests from OpenSSL
#[cfg(not(windows))]
#[allow(bad_style)]
mod imp {
use libc;
enum EVP_MD_CTX {}
enum EVP_MD {}
enum ENGINE {}
extern {
fn EVP_DigestInit_ex(ctx: *mut EVP_MD_CTX,
kind: *const EVP_MD,
imp: *mut ENGINE) -> libc::c_int;
fn EVP_DigestUpdate(ctx: *mut EVP_MD_CTX,
d: *const libc::c_void,
cnt: libc::size_t) -> libc::c_int;
fn EVP_DigestFinal_ex(ctx: *mut EVP_MD_CTX, md: *mut libc::c_uchar,
s: *mut libc::c_uint) -> libc::c_int;
fn EVP_MD_CTX_create() -> *mut EVP_MD_CTX;
fn EVP_MD_CTX_destroy(ctx: *mut EVP_MD_CTX);
fn EVP_sha256() -> *const EVP_MD;
}
pub struct Sha256 { ctx: *mut EVP_MD_CTX }
impl Sha256 {
pub fn new() -> Sha256 {
unsafe {
let ctx = EVP_MD_CTX_create();
assert!(!ctx.is_null());
let ret = Sha256 { ctx: ctx };
let n = EVP_DigestInit_ex(ret.ctx, EVP_sha256(), 0 as *mut _);
assert_eq!(n, 1);
return ret;
}
}
pub fn update(&mut self, bytes: &[u8]) {
unsafe {
let n = EVP_DigestUpdate(self.ctx, bytes.as_ptr() as *const _,
bytes.len() as libc::size_t);
assert_eq!(n, 1);
}
}
pub fn finish(&mut self) -> [u8; 32] {
unsafe {
let mut ret = [0u8; 32];
let mut out = 0;
let n = EVP_DigestFinal_ex(self.ctx, ret.as_mut_ptr(), &mut out);
assert_eq!(n, 1);
assert_eq!(out, 32);
return ret;
}
}
}
impl Drop for Sha256 {
fn drop(&mut self) {
unsafe { EVP_MD_CTX_destroy(self.ctx) }
}
}
}
// Leverage the crypto APIs that windows has built in.
#[cfg(windows)]
mod imp {
extern crate winapi;
extern crate advapi32;
use std::io;
use std::ptr;
use self::winapi::{DWORD, HCRYPTPROV, HCRYPTHASH};
use self::winapi::{PROV_RSA_AES, CRYPT_SILENT, CRYPT_VERIFYCONTEXT, CALG_SHA_256, HP_HASHVAL};
use self::advapi32::{CryptAcquireContextW, CryptCreateHash, CryptDestroyHash};
use self::advapi32::{CryptGetHashParam, CryptHashData, CryptReleaseContext};
macro_rules! call{ ($e:expr) => ({
if $e == 0 {
panic!("failed {}: {}", stringify!($e), io::Error::last_os_error())
}
}) }
pub struct Sha256 {
hcryptprov: HCRYPTPROV,
hcrypthash: HCRYPTHASH,
}
impl Sha256 {
pub fn new() -> Sha256 {
let mut hcp = 0;
call!(unsafe {
CryptAcquireContextW(&mut hcp, ptr::null(), ptr::null(),
PROV_RSA_AES,
CRYPT_VERIFYCONTEXT | CRYPT_SILENT)
});
let mut ret = Sha256 { hcryptprov: hcp, hcrypthash: 0 };
call!(unsafe {
CryptCreateHash(ret.hcryptprov, CALG_SHA_256,
0, 0, &mut ret.hcrypthash)
});
return ret;
}
pub fn update(&mut self, bytes: &[u8]) {
call!(unsafe {
CryptHashData(self.hcrypthash, bytes.as_ptr() as *mut _,
bytes.len() as DWORD, 0)
})
}
pub fn finish(&mut self) -> [u8; 32] {
let mut ret = [0u8; 32];
let mut len = ret.len() as DWORD;
call!(unsafe {
CryptGetHashParam(self.hcrypthash, HP_HASHVAL, ret.as_mut_ptr(),
&mut len, 0)
});
assert_eq!(len as usize, ret.len());
return ret;
}
}
impl Drop for Sha256 {
fn drop(&mut self) {
if self.hcrypthash != 0 {
call!(unsafe { CryptDestroyHash(self.hcrypthash) });
}
call!(unsafe { CryptReleaseContext(self.hcryptprov, 0) });
}
}
}
| true |
e2c3a8262f85b40a7619b179fdc2c03427618289
|
Rust
|
gaultier/kotlin-rs
|
/tests/var.rs
|
UTF-8
| 4,785 | 3.15625 | 3 |
[] |
no_license
|
use kotlin::compile::sexp;
use kotlin::error::*;
use kotlin::parse::Type;
#[test]
fn simple_var() {
let src = "var a = 1;";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(define a 1)"
);
}
#[test]
fn var_with_math_expr() {
let src = "var a = 5*10\n";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(define a (* 5 10))"
);
}
#[test]
fn var_with_no_expr() -> Result<(), String> {
let src = "var a = while (true) ;";
let mut out: Vec<u8> = Vec::new();
match sexp(src, &mut out) {
Err(Error {
kind: ErrorKind::ExpectedPrimary,
..
}) => Ok(()),
other => Err(format!("Should be a parse error: {:?}", other)),
}
}
#[test]
fn ref_var() {
let src = "var a = 1; var b = a * 2; if(b<4) {} else {}";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(begin (define a 1)\n (define b (* a 2))\n (if (< b 4) (begin ) (begin )) )"
)
}
#[test]
fn simple_val() {
let src = "val a = 1;";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(define a 1)"
);
}
#[test]
fn val_with_math_expr() {
let src = "val a = 5*10\n";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(define a (* 5 10))"
);
}
#[test]
fn val_with_no_expr() -> Result<(), String> {
let src = "val a = while (true) ;";
let mut out: Vec<u8> = Vec::new();
match sexp(src, &mut out) {
Err(Error {
kind: ErrorKind::ExpectedPrimary,
..
}) => Ok(()),
other => Err(format!("Should be a parse error: {:?}", other)),
}
}
#[test]
fn ref_val() {
let src = "val a = 1; val b = a * 2; if(b<4) {} else {}";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(begin (define a 1)\n (define b (* a 2))\n (if (< b 4) (begin ) (begin )) )"
)
}
#[test]
fn simple_var_assign() {
let src = "var a = 1; a = 4;";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(begin (define a 1)\n (set! a 4)\n )"
);
}
#[test]
fn var_assign_with_math_expr() {
let src = "var a = 5*10; a = a * 2;";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(begin (define a (* 5 10))\n (set! a (* a 2))\n )"
);
}
#[test]
fn val_reassign() -> Result<(), String> {
let src = "val a = 4; a = a*2;";
let mut out: Vec<u8> = Vec::new();
match sexp(src, &mut out) {
Err(Error {
kind: ErrorKind::CannotReassignVal(identifier),
..
}) if identifier == "a" => Ok(()),
other => Err(format!("Should be an error: {:?}", other)),
}
}
#[test]
fn var_assign_other_ops() {
let src = "var a = 5*10; a -=\n 1; a%=2; a/=3; a*=4 ";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(begin (define a (* 5 10))\n (set! a (-= 1))\n (set! a (%= 2))\n (set! a (/= 3))\n (set! a (*= 4))\n )"
);
}
#[test]
fn vars_with_type() {
let src = "var a:Int = 5*10; val b: Char = 'b';";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(begin (define a (* 5 10))\n (define b 'b')\n )"
);
}
#[test]
fn val_wrong_explicit_type() -> Result<(), String> {
let src = "val a: Int = 4L";
let mut out: Vec<u8> = Vec::new();
match sexp(src, &mut out) {
Err(Error {
kind: ErrorKind::IncompatibleTypes(Type::Long, Type::Int),
..
}) => Ok(()),
other => Err(format!("Should be an error: {:?}", other)),
}
}
#[test]
fn assign_with_paren() {
let src = "var a = 5*10; (((a))) = 1";
let mut out: Vec<u8> = Vec::new();
assert!(sexp(src, &mut out).is_ok());
assert_eq!(
std::str::from_utf8(&out).as_mut().unwrap().trim(),
"(begin (define a (* 5 10))\n (set! a 1)\n )"
);
}
| true |
d3a30c9f0c193f2c8f4989f10d5c3e0db6476b0e
|
Rust
|
tychedelia/franz
|
/franz_protocol/src/types.rs
|
UTF-8
| 46,089 | 2.859375 | 3 |
[] |
no_license
|
use std::string::String as StdString;
use std::hash::Hash;
use indexmap::IndexMap;
use string::TryFrom;
use super::{DecodeError, EncodeError, Encoder, Decoder, Encodable, Decodable, MapEncodable, MapDecodable, NewType, StrBytes};
use crate::buf::{ByteBuf, ByteBufMut};
macro_rules! define_copy_impl {
($e:ident, $t:ty) => (
impl Encoder<$t> for $e {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: $t) -> Result<(), EncodeError> {
self.encode(buf, &value)
}
fn compute_size(&self, value: $t) -> Result<usize, EncodeError> {
self.compute_size(&value)
}
fn fixed_size(&self) -> Option<usize> {
<Self as Encoder<&$t>>::fixed_size(self)
}
}
)
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Boolean;
impl<T: NewType<bool>> Encoder<&T> for Boolean {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
Ok(buf.put_u8(if *value.borrow() { 1 } else { 0 }))
}
fn compute_size(&self, _value: &T) -> Result<usize, EncodeError> {
Ok(1)
}
fn fixed_size(&self) -> Option<usize> {
Some(1)
}
}
impl<T: NewType<bool>> Decoder<T> for Boolean {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
Ok((buf.try_get_u8()? != 0).into())
}
}
define_copy_impl!(Boolean, bool);
macro_rules! define_simple_ints {
($($name:ident: $t:ty [$put:ident, $get:ident],)*) => (
$(
#[derive(Debug, Copy, Clone)]
pub struct $name;
impl<T: NewType<$t>> Encoder<&T> for $name {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
Ok(buf.$put(*value.borrow()))
}
fn compute_size(&self, _value: &T) -> Result<usize, EncodeError> {
Ok(std::mem::size_of::<$t>())
}
fn fixed_size(&self) -> Option<usize> {
Some(std::mem::size_of::<$t>())
}
}
impl<T: NewType<$t>> Decoder<T> for $name {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
Ok(buf.$get()?.into())
}
}
define_copy_impl!($name, $t);
)*
)
}
define_simple_ints!{
Int8: i8 [put_i8, try_get_i8],
Int16: i16 [put_i16, try_get_i16],
Int32: i32 [put_i32, try_get_i32],
Int64: i64 [put_i64, try_get_i64],
UInt32: u32 [put_u32, try_get_u32],
}
#[derive(Debug, Copy, Clone, Default)]
pub struct UnsignedVarInt;
impl<T: NewType<u32>> Encoder<&T> for UnsignedVarInt {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
let mut value = *value.borrow();
while value >= 0x80 {
buf.put_u8((value as u8) | 0x80);
value >>= 7;
}
buf.put_u8(value as u8);
Ok(())
}
fn compute_size(&self, value: &T) -> Result<usize, EncodeError> {
let value = *value.borrow();
Ok(match value {
0x0..=0x7f => 1,
0x80..=0x3fff => 2,
0x4000..=0x1fffff => 3,
0x200000..=0xfffffff => 4,
0x10000000..=0xffffffff => 5,
})
}
}
impl<T: NewType<u32>> Decoder<T> for UnsignedVarInt {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
let mut value = 0;
for i in 0..5 {
let b = buf.try_get_u8()? as u32;
value |= (b & 0x7F) << (i*7);
if b < 0x80 {
break;
}
}
Ok(value.into())
}
}
define_copy_impl!(UnsignedVarInt, u32);
#[derive(Debug, Copy, Clone, Default)]
pub struct UnsignedVarLong;
impl<T: NewType<u64>> Encoder<&T> for UnsignedVarLong {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
let mut value = *value.borrow();
while value >= 0x80 {
buf.put_u8((value as u8) | 0x80);
value >>= 7;
}
buf.put_u8(value as u8);
Ok(())
}
fn compute_size(&self, value: &T) -> Result<usize, EncodeError> {
let value = *value.borrow();
Ok(match value {
0x0..=0x7f => 1,
0x80..=0x3fff => 2,
0x4000..=0x1fffff => 3,
0x200000..=0xfffffff => 4,
0x10000000..=0x7ffffffff => 5,
0x800000000..=0x3ffffffffff => 6,
0x40000000000..=0x1ffffffffffff => 7,
0x2000000000000..=0xffffffffffffff => 8,
0x100000000000000..=0x7fffffffffffffff => 9,
0x8000000000000000..=0xffffffffffffffff => 10,
})
}
}
impl<T: NewType<u64>> Decoder<T> for UnsignedVarLong {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
let mut value = 0;
for i in 0..10 {
let b = buf.try_get_u8()? as u64;
value |= (b & 0x7F) << (i*7);
if b < 0x80 {
break;
}
}
Ok(value.into())
}
}
define_copy_impl!(UnsignedVarLong, u64);
#[derive(Debug, Copy, Clone, Default)]
pub struct VarInt;
impl<T: NewType<i32>> Encoder<&T> for VarInt {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
let value = *value.borrow();
let zigzag = ((value << 1) ^ (value >> 31)) as u32;
UnsignedVarInt.encode(buf, zigzag)
}
fn compute_size(&self, value: &T) -> Result<usize, EncodeError> {
let value = *value.borrow();
let zigzag = ((value << 1) ^ (value >> 31)) as u32;
UnsignedVarInt.compute_size(zigzag)
}
}
impl<T: NewType<i32>> Decoder<T> for VarInt {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
let zigzag: u32 = UnsignedVarInt.decode(buf)?;
Ok((((zigzag >> 1) as i32) ^ (-((zigzag & 1) as i32))).into())
}
}
define_copy_impl!(VarInt, i32);
#[derive(Debug, Copy, Clone, Default)]
pub struct VarLong;
impl<T: NewType<i64>> Encoder<&T> for VarLong {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
let value = *value.borrow();
let zigzag = ((value << 1) ^ (value >> 63)) as u64;
UnsignedVarLong.encode(buf, &zigzag)
}
fn compute_size(&self, value: &T) -> Result<usize, EncodeError> {
let value = *value.borrow();
let zigzag = ((value << 1) ^ (value >> 63)) as u64;
UnsignedVarLong.compute_size(zigzag)
}
}
impl<T: NewType<i64>> Decoder<T> for VarLong {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
let zigzag: u64 = UnsignedVarLong.decode(buf)?;
Ok((((zigzag >> 1) as i64) ^ (-((zigzag & 1) as i64))).into())
}
}
define_copy_impl!(VarLong, i64);
#[derive(Debug, Copy, Clone, Default)]
pub struct Uuid;
impl<T: NewType<uuid::Uuid>> Encoder<&T> for Uuid {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
Ok(buf.put_slice(value.borrow().as_bytes()))
}
fn compute_size(&self, _value: &T) -> Result<usize, EncodeError> {
Ok(16)
}
fn fixed_size(&self) -> Option<usize> {
Some(16)
}
}
impl<T: NewType<uuid::Uuid>> Decoder<T> for Uuid {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
let mut result = [0; 16];
buf.try_copy_to_slice(&mut result)?;
Ok(uuid::Uuid::from_bytes(result).into())
}
}
define_copy_impl!(Uuid, uuid::Uuid);
#[derive(Debug, Copy, Clone, Default)]
pub struct String;
impl Encoder<Option<&str>> for String {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&str>) -> Result<(), EncodeError> {
if let Some(s) = value {
if s.len() > std::i16::MAX as usize {
error!("String is too long to encode ({} bytes)", s.len());
Err(EncodeError)
} else {
Int16.encode(buf, s.len() as i16)?;
buf.put_slice(s.as_bytes());
Ok(())
}
} else {
Int16.encode(buf, -1)?;
Ok(())
}
}
fn compute_size(&self, value: Option<&str>) -> Result<usize, EncodeError> {
if let Some(s) = value {
if s.len() > std::i16::MAX as usize {
error!("String is too long to encode ({} bytes)", s.len());
Err(EncodeError)
} else {
Ok(2 + s.len())
}
} else {
Ok(2)
}
}
}
impl Encoder<Option<&StdString>> for String {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&StdString>) -> Result<(), EncodeError> {
String.encode(buf, value.map(|s| s.as_str()))
}
fn compute_size(&self, value: Option<&StdString>) -> Result<usize, EncodeError> {
String.compute_size(value.map(|s| s.as_str()))
}
}
impl Encoder<&Option<StdString>> for String {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<StdString>) -> Result<(), EncodeError> {
String.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<StdString>) -> Result<usize, EncodeError> {
String.compute_size(value.as_ref())
}
}
impl<T: NewType<StrBytes>> Encoder<Option<&T>> for String {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&T>) -> Result<(), EncodeError> {
String.encode(buf, value.map(|s| &**s.borrow()))
}
fn compute_size(&self, value: Option<&T>) -> Result<usize, EncodeError> {
String.compute_size(value.map(|s| &**s.borrow()))
}
}
impl<T: NewType<StrBytes>> Encoder<&Option<T>> for String {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<T>) -> Result<(), EncodeError> {
String.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<T>) -> Result<usize, EncodeError> {
String.compute_size(value.as_ref())
}
}
impl Encoder<&str> for String {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &str) -> Result<(), EncodeError> {
String.encode(buf, Some(value))
}
fn compute_size(&self, value: &str) -> Result<usize, EncodeError> {
String.compute_size(Some(value))
}
}
impl Encoder<&StdString> for String {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &StdString) -> Result<(), EncodeError> {
String.encode(buf, Some(value))
}
fn compute_size(&self, value: &StdString) -> Result<usize, EncodeError> {
String.compute_size(Some(value))
}
}
impl<T: NewType<StrBytes>> Encoder<&T> for String {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
String.encode(buf, Some(value))
}
fn compute_size(&self, value: &T) -> Result<usize, EncodeError> {
String.compute_size(Some(value))
}
}
impl Decoder<Option<StdString>> for String {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<StdString>, DecodeError> {
match Int16.decode(buf)? {
-1 => Ok(None),
n if n >= 0 => {
let mut strbuf = vec![0; n as usize];
buf.try_copy_to_slice(&mut strbuf)?;
Ok(Some(std::string::String::from_utf8(strbuf)?))
},
n => {
error!("String length is negative ({})", n);
Err(DecodeError)
}
}
}
}
impl<T: NewType<StrBytes>> Decoder<Option<T>> for String {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<T>, DecodeError> {
match Int16.decode(buf)? {
-1 => Ok(None),
n if n >= 0 => {
let strbuf = StrBytes::try_from(buf.try_get_bytes(n as usize)?)?;
Ok(Some(strbuf.into()))
},
n => {
error!("String length is negative ({})", n);
Err(DecodeError)
}
}
}
}
impl Decoder<StdString> for String {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<StdString, DecodeError> {
match String.decode(buf) {
Ok(None) => {
error!("String length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
impl<T: NewType<StrBytes>> Decoder<T> for String {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
match String.decode(buf) {
Ok(None) => {
error!("String length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct CompactString;
impl Encoder<Option<&str>> for CompactString {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&str>) -> Result<(), EncodeError> {
if let Some(s) = value {
// Use >= because we're going to add one to the length
if s.len() >= std::u32::MAX as usize {
error!("CompactString is too long to encode ({} bytes)", s.len());
Err(EncodeError)
} else {
UnsignedVarInt.encode(buf, (s.len() as u32) + 1)?;
buf.put_slice(s.as_bytes());
Ok(())
}
} else {
UnsignedVarInt.encode(buf, 0)?;
Ok(())
}
}
fn compute_size(&self, value: Option<&str>) -> Result<usize, EncodeError> {
if let Some(s) = value {
// Use >= because we're going to add one to the length
if s.len() >= std::u32::MAX as usize {
error!("CompactString is too long to encode ({} bytes)", s.len());
Err(EncodeError)
} else {
Ok(UnsignedVarInt.compute_size((s.len() as u32) + 1)? + s.len())
}
} else {
Ok(1)
}
}
}
impl Encoder<Option<&StdString>> for CompactString {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&StdString>) -> Result<(), EncodeError> {
CompactString.encode(buf, value.map(|s| s.as_str()))
}
fn compute_size(&self, value: Option<&StdString>) -> Result<usize, EncodeError> {
CompactString.compute_size(value.map(|s| s.as_str()))
}
}
impl<T: NewType<StrBytes>> Encoder<Option<&T>> for CompactString {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&T>) -> Result<(), EncodeError> {
CompactString.encode(buf, value.map(|s| &**s.borrow()))
}
fn compute_size(&self, value: Option<&T>) -> Result<usize, EncodeError> {
CompactString.compute_size(value.map(|s| &**s.borrow()))
}
}
impl Encoder<&Option<StdString>> for CompactString {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<StdString>) -> Result<(), EncodeError> {
CompactString.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<StdString>) -> Result<usize, EncodeError> {
CompactString.compute_size(value.as_ref())
}
}
impl<T: NewType<StrBytes>> Encoder<&Option<T>> for CompactString {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<T>) -> Result<(), EncodeError> {
CompactString.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<T>) -> Result<usize, EncodeError> {
CompactString.compute_size(value.as_ref())
}
}
impl Encoder<&str> for CompactString {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &str) -> Result<(), EncodeError> {
CompactString.encode(buf, Some(value))
}
fn compute_size(&self, value: &str) -> Result<usize, EncodeError> {
CompactString.compute_size(Some(value))
}
}
impl Encoder<&StdString> for CompactString {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &StdString) -> Result<(), EncodeError> {
CompactString.encode(buf, Some(value))
}
fn compute_size(&self, value: &StdString) -> Result<usize, EncodeError> {
CompactString.compute_size(Some(value))
}
}
impl<T: NewType<StrBytes>> Encoder<&T> for CompactString {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
CompactString.encode(buf, Some(value))
}
fn compute_size(&self, value: &T) -> Result<usize, EncodeError> {
CompactString.compute_size(Some(value))
}
}
impl Decoder<Option<StdString>> for CompactString {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<StdString>, DecodeError> {
match UnsignedVarInt.decode(buf)? {
0 => Ok(None),
n => {
let mut strbuf = vec![0; (n-1) as usize];
buf.try_copy_to_slice(&mut strbuf)?;
Ok(Some(std::string::String::from_utf8(strbuf)?.into()))
},
}
}
}
impl<T: NewType<StrBytes>> Decoder<Option<T>> for CompactString {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<T>, DecodeError> {
match UnsignedVarInt.decode(buf)? {
0 => Ok(None),
n => {
let strbuf = StrBytes::try_from(buf.try_get_bytes((n-1) as usize)?)?;
Ok(Some(strbuf.into()))
},
}
}
}
impl Decoder<StdString> for CompactString {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<StdString, DecodeError> {
match CompactString.decode(buf) {
Ok(None) => {
error!("CompactString length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
impl<T: NewType<StrBytes>> Decoder<T> for CompactString {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
match CompactString.decode(buf) {
Ok(None) => {
error!("CompactString length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Bytes;
impl Encoder<Option<&[u8]>> for Bytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&[u8]>) -> Result<(), EncodeError> {
if let Some(s) = value {
if s.len() > std::i32::MAX as usize {
error!("Data is too long to encode ({} bytes)", s.len());
Err(EncodeError)
} else {
Int32.encode(buf, s.len() as i32)?;
buf.put_slice(s);
Ok(())
}
} else {
Int32.encode(buf, -1)?;
Ok(())
}
}
fn compute_size(&self, value: Option<&[u8]>) -> Result<usize, EncodeError> {
if let Some(s) = value {
if s.len() > std::i32::MAX as usize {
error!("Data is too long to encode ({} bytes)", s.len());
Err(EncodeError)
} else {
Ok(4 + s.len())
}
} else {
Ok(4)
}
}
}
impl Encoder<Option<&Vec<u8>>> for Bytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&Vec<u8>>) -> Result<(), EncodeError> {
Bytes.encode(buf, value.map(|s| s.as_slice()))
}
fn compute_size(&self, value: Option<&Vec<u8>>) -> Result<usize, EncodeError> {
Bytes.compute_size(value.map(|s| s.as_slice()))
}
}
impl Encoder<&Option<Vec<u8>>> for Bytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<Vec<u8>>) -> Result<(), EncodeError> {
Bytes.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<Vec<u8>>) -> Result<usize, EncodeError> {
Bytes.compute_size(value.as_ref())
}
}
impl Encoder<Option<&bytes::Bytes>> for Bytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&bytes::Bytes>) -> Result<(), EncodeError> {
Bytes.encode(buf, value.map(|s| &**s))
}
fn compute_size(&self, value: Option<&bytes::Bytes>) -> Result<usize, EncodeError> {
Bytes.compute_size(value.map(|s| &**s))
}
}
impl Encoder<&Option<bytes::Bytes>> for Bytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<bytes::Bytes>) -> Result<(), EncodeError> {
Bytes.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<bytes::Bytes>) -> Result<usize, EncodeError> {
Bytes.compute_size(value.as_ref())
}
}
impl Encoder<&[u8]> for Bytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &[u8]) -> Result<(), EncodeError> {
Bytes.encode(buf, Some(value))
}
fn compute_size(&self, value: &[u8]) -> Result<usize, EncodeError> {
Bytes.compute_size(Some(value))
}
}
impl Encoder<&Vec<u8>> for Bytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Vec<u8>) -> Result<(), EncodeError> {
Bytes.encode(buf, Some(value))
}
fn compute_size(&self, value: &Vec<u8>) -> Result<usize, EncodeError> {
Bytes.compute_size(Some(value))
}
}
impl Encoder<&bytes::Bytes> for Bytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &bytes::Bytes) -> Result<(), EncodeError> {
Bytes.encode(buf, Some(value))
}
fn compute_size(&self, value: &bytes::Bytes) -> Result<usize, EncodeError> {
Bytes.compute_size(Some(value))
}
}
impl Decoder<Option<Vec<u8>>> for Bytes {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<Vec<u8>>, DecodeError> {
match Int32.decode(buf)? {
-1 => Ok(None),
n if n >= 0 => {
let mut data = vec![0; n as usize];
buf.try_copy_to_slice(&mut data)?;
Ok(Some(data))
},
n => {
error!("Data length is negative ({})", n);
Err(DecodeError)
}
}
}
}
impl Decoder<Option<bytes::Bytes>> for Bytes {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<bytes::Bytes>, DecodeError> {
match Int32.decode(buf)? {
-1 => Ok(None),
n if n >= 0 => {
Ok(Some(buf.try_get_bytes(n as usize)?))
},
n => {
error!("Data length is negative ({})", n);
Err(DecodeError)
}
}
}
}
impl Decoder<Vec<u8>> for Bytes {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Vec<u8>, DecodeError> {
match Bytes.decode(buf) {
Ok(None) => {
error!("Data length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
impl Decoder<bytes::Bytes> for Bytes {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<bytes::Bytes, DecodeError> {
match Bytes.decode(buf) {
Ok(None) => {
error!("Data length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct CompactBytes;
impl Encoder<Option<&[u8]>> for CompactBytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&[u8]>) -> Result<(), EncodeError> {
if let Some(s) = value {
// Use >= because we're going to add one to the length
if s.len() >= std::u32::MAX as usize {
error!("CompactBytes is too long to encode ({} bytes)", s.len());
Err(EncodeError)
} else {
UnsignedVarInt.encode(buf, (s.len() as u32) + 1)?;
buf.put_slice(s);
Ok(())
}
} else {
UnsignedVarInt.encode(buf, 0)?;
Ok(())
}
}
fn compute_size(&self, value: Option<&[u8]>) -> Result<usize, EncodeError> {
if let Some(s) = value {
// Use >= because we're going to add one to the length
if s.len() >= std::u32::MAX as usize {
error!("CompactBytes is too long to encode ({} bytes)", s.len());
Err(EncodeError)
} else {
Ok(UnsignedVarInt.compute_size((s.len() as u32) + 1)? + s.len())
}
} else {
Ok(1)
}
}
}
impl Encoder<Option<&Vec<u8>>> for CompactBytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&Vec<u8>>) -> Result<(), EncodeError> {
CompactBytes.encode(buf, value.map(|s| s.as_slice()))
}
fn compute_size(&self, value: Option<&Vec<u8>>) -> Result<usize, EncodeError> {
CompactBytes.compute_size(value.map(|s| s.as_slice()))
}
}
impl Encoder<&Option<Vec<u8>>> for CompactBytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<Vec<u8>>) -> Result<(), EncodeError> {
CompactBytes.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<Vec<u8>>) -> Result<usize, EncodeError> {
CompactBytes.compute_size(value.as_ref())
}
}
impl Encoder<Option<&bytes::Bytes>> for CompactBytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&bytes::Bytes>) -> Result<(), EncodeError> {
CompactBytes.encode(buf, value.map(|s| &**s))
}
fn compute_size(&self, value: Option<&bytes::Bytes>) -> Result<usize, EncodeError> {
CompactBytes.compute_size(value.map(|s| &**s))
}
}
impl Encoder<&Option<bytes::Bytes>> for CompactBytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<bytes::Bytes>) -> Result<(), EncodeError> {
CompactBytes.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<bytes::Bytes>) -> Result<usize, EncodeError> {
CompactBytes.compute_size(value.as_ref())
}
}
impl Encoder<&[u8]> for CompactBytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &[u8]) -> Result<(), EncodeError> {
CompactBytes.encode(buf, Some(value))
}
fn compute_size(&self, value: &[u8]) -> Result<usize, EncodeError> {
CompactBytes.compute_size(Some(value))
}
}
impl Encoder<&Vec<u8>> for CompactBytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Vec<u8>) -> Result<(), EncodeError> {
CompactBytes.encode(buf, Some(value))
}
fn compute_size(&self, value: &Vec<u8>) -> Result<usize, EncodeError> {
CompactBytes.compute_size(Some(value))
}
}
impl Encoder<&bytes::Bytes> for CompactBytes {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &bytes::Bytes) -> Result<(), EncodeError> {
CompactBytes.encode(buf, Some(value))
}
fn compute_size(&self, value: &bytes::Bytes) -> Result<usize, EncodeError> {
CompactBytes.compute_size(Some(value))
}
}
impl Decoder<Option<Vec<u8>>> for CompactBytes {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<Vec<u8>>, DecodeError> {
match UnsignedVarInt.decode(buf)? {
0 => Ok(None),
n => {
let mut data = vec![0; (n-1) as usize];
buf.try_copy_to_slice(&mut data)?;
Ok(Some(data))
},
}
}
}
impl Decoder<Option<bytes::Bytes>> for CompactBytes {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<bytes::Bytes>, DecodeError> {
match UnsignedVarInt.decode(buf)? {
0 => Ok(None),
n => {
Ok(Some(buf.try_get_bytes((n-1) as usize)?))
},
}
}
}
impl Decoder<Vec<u8>> for CompactBytes {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Vec<u8>, DecodeError> {
match CompactBytes.decode(buf) {
Ok(None) => {
error!("Data length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
impl Decoder<bytes::Bytes> for CompactBytes {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<bytes::Bytes, DecodeError> {
match CompactBytes.decode(buf) {
Ok(None) => {
error!("Data length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Struct {
pub version: i16
}
impl<T: Encodable> Encoder<&T> for Struct {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &T) -> Result<(), EncodeError> {
value.encode(buf, self.version)
}
fn compute_size(&self, value: &T) -> Result<usize, EncodeError> {
value.compute_size(self.version)
}
}
impl<T: Decodable> Decoder<T> for Struct {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<T, DecodeError> {
T::decode(buf, self.version)
}
}
impl<T: MapEncodable> Encoder<(&T::Key, &T)> for Struct {
fn encode<B: ByteBufMut>(&self, buf: &mut B, (key, value): (&T::Key, &T)) -> Result<(), EncodeError> {
value.encode(key, buf, self.version)
}
fn compute_size(&self, (key, value): (&T::Key, &T)) -> Result<usize, EncodeError> {
value.compute_size(key, self.version)
}
}
impl<T: MapDecodable> Decoder<(T::Key, T)> for Struct {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<(T::Key, T), DecodeError> {
T::decode(buf, self.version)
}
}
#[derive(Debug, Copy, Clone)]
pub struct Array<E>(pub E);
impl<T, E: for<'a> Encoder<&'a T>> Encoder<Option<&[T]>> for Array<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&[T]>) -> Result<(), EncodeError> {
if let Some(a) = value {
if a.len() > std::i32::MAX as usize {
error!("Array is too long to encode ({} items)", a.len());
Err(EncodeError)
} else {
Int32.encode(buf, a.len() as i32)?;
for item in a {
self.0.encode(buf, item)?;
}
Ok(())
}
} else {
Int32.encode(buf, -1)?;
Ok(())
}
}
fn compute_size(&self, value: Option<&[T]>) -> Result<usize, EncodeError> {
if let Some(a) = value {
if a.len() > std::i32::MAX as usize {
error!("Array is too long to encode ({} items)", a.len());
Err(EncodeError)
} else if let Some(fixed_size) = self.0.fixed_size() {
Ok(4 + a.len() * fixed_size)
} else {
let mut total_size = 4;
for item in a {
total_size += self.0.compute_size(item)?;
}
Ok(total_size)
}
} else {
Ok(4)
}
}
}
impl<T, E: for<'a> Encoder<&'a T>> Encoder<Option<&Vec<T>>> for Array<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&Vec<T>>) -> Result<(), EncodeError> {
self.encode(buf, value.map(|s| s.as_slice()))
}
fn compute_size(&self, value: Option<&Vec<T>>) -> Result<usize, EncodeError> {
self.compute_size(value.map(|s| s.as_slice()))
}
}
impl<T, E: for<'a> Encoder<&'a T>> Encoder<&Option<Vec<T>>> for Array<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<Vec<T>>) -> Result<(), EncodeError> {
self.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<Vec<T>>) -> Result<usize, EncodeError> {
self.compute_size(value.as_ref())
}
}
impl<T, E: for<'a> Encoder<&'a T>> Encoder<&[T]> for Array<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &[T]) -> Result<(), EncodeError> {
self.encode(buf, Some(value))
}
fn compute_size(&self, value: &[T]) -> Result<usize, EncodeError> {
self.compute_size(Some(value))
}
}
impl<T, E: for<'a> Encoder<&'a T>> Encoder<&Vec<T>> for Array<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Vec<T>) -> Result<(), EncodeError> {
self.encode(buf, Some(value))
}
fn compute_size(&self, value: &Vec<T>) -> Result<usize, EncodeError> {
self.compute_size(Some(value))
}
}
impl<K: Eq + Hash, V, E: for<'a> Encoder<(&'a K, &'a V)>> Encoder<Option<&IndexMap<K, V>>> for Array<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&IndexMap<K, V>>) -> Result<(), EncodeError> {
if let Some(a) = value {
if a.len() > std::i32::MAX as usize {
error!("Array is too long to encode ({} items)", a.len());
Err(EncodeError)
} else {
Int32.encode(buf, a.len() as i32)?;
for item in a {
self.0.encode(buf, item)?;
}
Ok(())
}
} else {
Int32.encode(buf, -1)?;
Ok(())
}
}
fn compute_size(&self, value: Option<&IndexMap<K, V>>) -> Result<usize, EncodeError> {
if let Some(a) = value {
if a.len() > std::i32::MAX as usize {
error!("Array is too long to encode ({} items)", a.len());
Err(EncodeError)
} else if let Some(fixed_size) = self.0.fixed_size() {
Ok(4 + a.len() * fixed_size)
} else {
let mut total_size = 4;
for item in a {
total_size += self.0.compute_size(item)?;
}
Ok(total_size)
}
} else {
Ok(4)
}
}
}
impl<K: Eq + Hash, V, E: for<'a> Encoder<(&'a K, &'a V)>> Encoder<&Option<IndexMap<K, V>>> for Array<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<IndexMap<K, V>>) -> Result<(), EncodeError> {
self.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<IndexMap<K, V>>) -> Result<usize, EncodeError> {
self.compute_size(value.as_ref())
}
}
impl<K: Eq + Hash, V, E: for<'a> Encoder<(&'a K, &'a V)>> Encoder<&IndexMap<K, V>> for Array<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &IndexMap<K, V>) -> Result<(), EncodeError> {
self.encode(buf, Some(value))
}
fn compute_size(&self, value: &IndexMap<K, V>) -> Result<usize, EncodeError> {
self.compute_size(Some(value))
}
}
impl<T, E: Decoder<T>> Decoder<Option<Vec<T>>> for Array<E> {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<Vec<T>>, DecodeError> {
match Int32.decode(buf)? {
-1 => Ok(None),
n if n >= 0 => {
let mut result = Vec::with_capacity(n as usize);
for _ in 0..n {
result.push(self.0.decode(buf)?);
}
Ok(Some(result))
},
n => {
error!("Array length is negative ({})", n);
Err(DecodeError)
}
}
}
}
impl<T, E: Decoder<T>> Decoder<Vec<T>> for Array<E> {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Vec<T>, DecodeError> {
match self.decode(buf) {
Ok(None) => {
error!("Array length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
impl<K: Eq + Hash, V, E: Decoder<(K, V)>> Decoder<Option<IndexMap<K, V>>> for Array<E> {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<IndexMap<K, V>>, DecodeError> {
match Int32.decode(buf)? {
-1 => Ok(None),
n if n >= 0 => {
let mut result = IndexMap::new();
for _ in 0..n {
let (k, v) = self.0.decode(buf)?;
result.insert(k, v);
}
Ok(Some(result))
},
n => {
error!("Array length is negative ({})", n);
Err(DecodeError)
}
}
}
}
impl<K: Eq + Hash, V, E: Decoder<(K, V)>> Decoder<IndexMap<K, V>> for Array<E> {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<IndexMap<K, V>, DecodeError> {
match self.decode(buf) {
Ok(None) => {
error!("Array length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct CompactArray<E>(pub E);
impl<T, E: for<'a> Encoder<&'a T>> Encoder<Option<&[T]>> for CompactArray<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&[T]>) -> Result<(), EncodeError> {
if let Some(a) = value {
// Use >= because we're going to add one to the length
if a.len() >= std::u32::MAX as usize {
error!("CompactArray is too long to encode ({} items)", a.len());
Err(EncodeError)
} else {
UnsignedVarInt.encode(buf, (a.len() as u32) + 1)?;
for item in a {
self.0.encode(buf, item)?;
}
Ok(())
}
} else {
UnsignedVarInt.encode(buf, 0)?;
Ok(())
}
}
fn compute_size(&self, value: Option<&[T]>) -> Result<usize, EncodeError> {
if let Some(a) = value {
// Use >= because we're going to add one to the length
if a.len() >= std::u32::MAX as usize {
error!("CompactArray is too long to encode ({} items)", a.len());
Err(EncodeError)
} else if let Some(fixed_size) = self.0.fixed_size() {
Ok(UnsignedVarInt.compute_size((a.len() as u32) + 1)? + a.len()*fixed_size)
} else {
let mut total_size = UnsignedVarInt.compute_size((a.len() as u32) + 1)?;
for item in a {
total_size += self.0.compute_size(item)?;
}
Ok(total_size)
}
} else {
Ok(1)
}
}
}
impl<T, E: for<'a> Encoder<&'a T>> Encoder<Option<&Vec<T>>> for CompactArray<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&Vec<T>>) -> Result<(), EncodeError> {
self.encode(buf, value.map(|s| s.as_slice()))
}
fn compute_size(&self, value: Option<&Vec<T>>) -> Result<usize, EncodeError> {
self.compute_size(value.map(|s| s.as_slice()))
}
}
impl<T, E: for<'a> Encoder<&'a T>> Encoder<&Option<Vec<T>>> for CompactArray<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<Vec<T>>) -> Result<(), EncodeError> {
self.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<Vec<T>>) -> Result<usize, EncodeError> {
self.compute_size(value.as_ref())
}
}
impl<T, E: for<'a> Encoder<&'a T>> Encoder<&[T]> for CompactArray<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &[T]) -> Result<(), EncodeError> {
self.encode(buf, Some(value))
}
fn compute_size(&self, value: &[T]) -> Result<usize, EncodeError> {
self.compute_size(Some(value))
}
}
impl<T, E: for<'a> Encoder<&'a T>> Encoder<&Vec<T>> for CompactArray<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Vec<T>) -> Result<(), EncodeError> {
self.encode(buf, Some(value))
}
fn compute_size(&self, value: &Vec<T>) -> Result<usize, EncodeError> {
self.compute_size(Some(value))
}
}
impl<K: Eq + Hash, V, E: for<'a> Encoder<(&'a K, &'a V)>> Encoder<Option<&IndexMap<K, V>>> for CompactArray<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: Option<&IndexMap<K, V>>) -> Result<(), EncodeError> {
if let Some(a) = value {
// Use >= because we're going to add one to the length
if a.len() >= std::u32::MAX as usize {
error!("CompactArray is too long to encode ({} items)", a.len());
Err(EncodeError)
} else {
UnsignedVarInt.encode(buf, (a.len() as u32) + 1)?;
for item in a {
self.0.encode(buf, item)?;
}
Ok(())
}
} else {
UnsignedVarInt.encode(buf, 0)?;
Ok(())
}
}
fn compute_size(&self, value: Option<&IndexMap<K, V>>) -> Result<usize, EncodeError> {
if let Some(a) = value {
// Use >= because we're going to add one to the length
if a.len() >= std::u32::MAX as usize {
error!("CompactArray is too long to encode ({} items)", a.len());
Err(EncodeError)
} else if let Some(fixed_size) = self.0.fixed_size() {
Ok(UnsignedVarInt.compute_size((a.len() as u32) + 1)? + a.len()*fixed_size)
} else {
let mut total_size = UnsignedVarInt.compute_size((a.len() as u32) + 1)?;
for item in a {
total_size += self.0.compute_size(item)?;
}
Ok(total_size)
}
} else {
Ok(1)
}
}
}
impl<K: Eq + Hash, V, E: for<'a> Encoder<(&'a K, &'a V)>> Encoder<&Option<IndexMap<K, V>>> for CompactArray<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &Option<IndexMap<K, V>>) -> Result<(), EncodeError> {
self.encode(buf, value.as_ref())
}
fn compute_size(&self, value: &Option<IndexMap<K, V>>) -> Result<usize, EncodeError> {
self.compute_size(value.as_ref())
}
}
impl<K: Eq + Hash, V, E: for<'a> Encoder<(&'a K, &'a V)>> Encoder<&IndexMap<K, V>> for CompactArray<E> {
fn encode<B: ByteBufMut>(&self, buf: &mut B, value: &IndexMap<K, V>) -> Result<(), EncodeError> {
self.encode(buf, Some(value))
}
fn compute_size(&self, value: &IndexMap<K, V>) -> Result<usize, EncodeError> {
self.compute_size(Some(value))
}
}
impl<T, E: Decoder<T>> Decoder<Option<Vec<T>>> for CompactArray<E> {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<Vec<T>>, DecodeError> {
match UnsignedVarInt.decode(buf)? {
0 => Ok(None),
n => {
let mut result = Vec::with_capacity((n-1) as usize);
for _ in 1..n {
result.push(self.0.decode(buf)?);
}
Ok(Some(result))
},
}
}
}
impl<T, E: Decoder<T>> Decoder<Vec<T>> for CompactArray<E> {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Vec<T>, DecodeError> {
match self.decode(buf) {
Ok(None) => {
error!("CompactArray length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
impl<K: Eq + Hash, V, E: Decoder<(K, V)>> Decoder<Option<IndexMap<K, V>>> for CompactArray<E> {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<Option<IndexMap<K, V>>, DecodeError> {
match UnsignedVarInt.decode(buf)? {
0 => Ok(None),
n => {
let mut result = IndexMap::new();
for _ in 1..n {
let (k, v) = self.0.decode(buf)?;
result.insert(k, v);
}
Ok(Some(result))
},
}
}
}
impl<K: Eq + Hash, V, E: Decoder<(K, V)>> Decoder<IndexMap<K, V>> for CompactArray<E> {
fn decode<B: ByteBuf>(&self, buf: &mut B) -> Result<IndexMap<K, V>, DecodeError> {
match self.decode(buf) {
Ok(None) => {
error!("CompactArray length is negative (-1)");
Err(DecodeError)
},
Ok(Some(s)) => Ok(s),
Err(e) => Err(e),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fmt::Debug;
fn test_encoder_decoder<V: PartialEq + Debug, E: for<'a> Encoder<&'a V> + Decoder<V>>(encoder: E, value: V, mut expected: &[u8]) {
let mut buf = vec![];
encoder.encode(&mut buf, &value).unwrap();
assert_eq!(buf, expected);
let decoded = encoder.decode(&mut expected).unwrap();
assert_eq!(value, decoded);
}
#[test]
fn smoke_varint_encoder_decoder() {
test_encoder_decoder(VarInt, 0, &[0]);
test_encoder_decoder(VarInt, -1, &[1]);
test_encoder_decoder(VarInt, 1, &[2]);
test_encoder_decoder(VarInt, -2, &[3]);
test_encoder_decoder(VarInt, 300, &[216, 4]);
test_encoder_decoder(VarInt, std::i32::MAX, &[254, 255, 255, 255, 15]);
test_encoder_decoder(VarInt, std::i32::MIN, &[255, 255, 255, 255, 15]);
}
#[test]
fn smoke_varlong_encoder_decoder() {
test_encoder_decoder(VarLong, 0, &[0]);
test_encoder_decoder(VarLong, -1, &[1]);
test_encoder_decoder(VarLong, 1, &[2]);
test_encoder_decoder(VarLong, -2, &[3]);
test_encoder_decoder(VarLong, 300, &[216, 4]);
test_encoder_decoder(VarLong, std::i64::MAX, &[254, 255, 255, 255, 255, 255, 255, 255, 255, 1]);
test_encoder_decoder(VarLong, std::i64::MIN, &[255, 255, 255, 255, 255, 255, 255, 255, 255, 1]);
}
#[test]
fn smoke_string_encoder_decoder() {
test_encoder_decoder(String, std::string::String::from("hello"), &[0, 5, 104, 101, 108, 108, 111]);
test_encoder_decoder(String, None::<std::string::String>, &[255, 255]);
}
#[test]
fn smoke_compact_string_encoder_decoder() {
test_encoder_decoder(CompactString, std::string::String::from("hello"), &[6, 104, 101, 108, 108, 111]);
test_encoder_decoder(CompactString, None::<std::string::String>, &[0]);
}
#[test]
fn smoke_bytes_encoder_decoder() {
test_encoder_decoder(Bytes, vec![1,2,3,4], &[0, 0, 0, 4, 1, 2, 3, 4]);
test_encoder_decoder(Bytes, None::<Vec<u8>>, &[255, 255, 255, 255]);
}
#[test]
fn smoke_compact_bytes_encoder_decoder() {
test_encoder_decoder(CompactBytes, vec![1,2,3,4], &[5, 1, 2, 3, 4]);
test_encoder_decoder(CompactBytes, None::<Vec<u8>>, &[0]);
}
}
| true |
cf2133479be952500d772834d9b968ddf9b493db
|
Rust
|
HaronK/aoc2019
|
/task17_1/src/main.rs
|
UTF-8
| 1,234 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
use crate::robot::*;
use anyhow::{anyhow, Result};
use common::log::*;
use std::fs::File;
use std::io::{prelude::*, BufReader};
mod robot;
fn main() -> Result<()> {
let log = Log::new(false);
let file = File::open("input.txt")?;
let reader = BufReader::new(file);
let prog_str = reader
.lines()
.nth(0)
.ok_or_else(|| anyhow!("ERROR: Cannot read program string."))??;
let mut robot = Robot::new(&prog_str, &log)?;
robot.camera_scan()?;
// robot.show()?;
let intersections = robot.get_intersections();
println!("Intersections: {:?}", intersections);
let calibration: usize = intersections.iter().map(|p| p.x * p.y).sum();
println!("Calibration: {}", calibration);
robot.wake_up();
let dust_count = robot.move_robot(
"A,A,B,C,C,A,B,C,A,B",
"L,12,L,12,R,12",
"L,8,L,8,R,12,L,8,L,8",
"L,10,R,8,R,12",
)?;
println!("Dust count: {}", dust_count);
Ok(())
}
// Manually calculated :)
// L12,L12,R12,L12,L12,R12,L8,L8,R12,L8,L8,L10,R8,R12,L10,R8,R12,L12,L12,R12,L8,L8,R12,L8,L8,L10,R8,R12,L12,L12,R12,L8,L8,R12,L8,L8
// A,A,B,C,C,A,B,C,A,B
// A: L,12,L,12,R,12
// B: L,8,L,8,R,12,L,8,L,8
// C: L,10,R,8,R,12
| true |
baf54efb8f2e86e00336d4311aa1adf755d341a9
|
Rust
|
clap-rs/thunder
|
/examples/thor.rs
|
UTF-8
| 861 | 3.328125 | 3 |
[] |
no_license
|
//! Thor is the god of thunder
#![feature(proc_macro)]
extern crate clap;
extern crate thunder;
use thunder::thunderclap;
struct Thor;
/// An application that shoots lightning out of its hands
#[thunderclap(drunk: bool: "Bla bla bla")]
impl Thor {
/// Say hello to someone at home
fn hello(name: &str, times: Option<u128>) {
(0..times.unwrap_or(1)).for_each(|_| {
println!("Hello {}!", name);
});
}
// /// Say goodbye. Or don't, if you're shy
// fn bye(name: Option<&str>) {
// println!("Not saying bye is rude: {:?}", name);
// }
// /// Thor will rudely comment on your age
// fn aged(age: Option<i128>) {
// println!("Ha, look at you being: {:?}", age);
// }
// /// Prints 'bar'
// fn foo() {
// println!("bar");
// }
}
fn main() {
Thor::start();
}
| true |
b9f50448f1cfcca977908f4f4319b8e6fc3c401c
|
Rust
|
jlgerber/jobsyspolice
|
/src/jspt/parser/node.rs
|
UTF-8
| 16,692 | 2.953125 | 3 |
[] |
no_license
|
use nom::{
IResult,
branch::alt,
sequence::{tuple,preceded, delimited},
bytes::complete::{tag},
combinator::{ map, },
character::complete::{char, space0, multispace0, },
};
use crate::jspt::helpers::*;
use crate::jspt::{Node, ParseResult, parse_metadata};
/// Parses a Node given an input str. The parser is composed of a number
/// of alternative parsers targetting specific types of nodes.
pub fn parse_node(input: &str) -> IResult<&str, ParseResult> {
alt((
parse_node_pair,
parse_node_envvar,
parse_node_revar,
parse_node_regexcomplex,
parse_node_regexsimple,
parse_node_simple,
))
(input)
}
#[cfg(test)]
mod parse_node {
use super::*;
//use nom::error::ErrorKind;
#[test]
fn can_parse_simple() {
let result = parse_node(r#" rd "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::Simple("rd".to_string(), None)) ) ) );
}
#[test]
fn can_parse_node_pair() {
let result = parse_node(r#"rd = RD "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_pair("rd", "RD", None)) ) )) ;
}
#[test]
fn can_parse_node_envvar() {
let result = parse_node(r#"rd = $$rdexpr "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_envvar("rd", "rdexpr",None)) )) ) ;
}
#[test]
fn can_parse_node_revar() {
let result = parse_node(r#"rd = $rdexpr "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_revar("rd", "rdexpr",None)) )) ) ;
}
#[test]
fn can_parse_node_regexsimple() {
let result = parse_node(r#"rd = "(foo|bar)" "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_regexsimple("rd", "(foo|bar)", None))) ) ) ;
}
#[test]
fn can_parse_node_regexcomplex() {
let result = parse_node_regexcomplex(r#"rd = "(foo|bar)" "(bla|mange)" "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_regexcomplex("rd", "(foo|bar)", "(bla|mange)", None )) ) )) ;
}
}
fn parse_node_simple(input: &str) -> IResult<&str, ParseResult> {
alt((
parse_node_simple_meta,
parse_node_simple_nometa,
))(input)
}
// parse simple node witout metadata
// EG
// rd_node = rd
fn parse_node_simple_nometa(input: &str) -> IResult<&str, ParseResult> {
map (
delimited( space0, variable, space0),
| item| {
ParseResult::Node(Node::Simple(item.to_string(), None))
}
)
(input)
}
// parse the simple node with metadata
// EG
//rd_node = rd [volume, owner:jgerber ]
fn parse_node_simple_meta(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
preceded( space0, variable),
parse_metadata
)),
| item| {
let (var, meta) = item;
let meta = if meta.is_empty() {None} else {Some(meta)};
ParseResult::Node(Node::Simple(var.to_string(), meta))
}
)
(input)
}
#[cfg(test)]
mod parse_node_simple {
use super::*;
//use nom::error::ErrorKind;
use crate::jspt::JsptMetadata;
#[test]
fn can_parse_node_simple() {
let result = parse_node_simple(r#" rd"#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::Simple("rd".to_string(), None)) ) ) ) ;
}
#[test]
fn can_parse_node_simple_meta() {
let result = parse_node_simple(r#" rd [ volume ] "#);
let md = JsptMetadata::new().set_volume(true);
assert_eq!(
result,
Ok((
"",
ParseResult::Node(
Node::Simple(
"rd".to_string(),
Some(md)
)) ) ) ) ;
}
}
// parse simple node - that is:
// rd_node = rd
fn parse_node_pair(input: &str) -> IResult<&str, ParseResult> {
alt((
parse_node_pair_meta,
parse_node_pair_nometa,
))(input)
}
// parse a node pari without metadata
// EG
// rd_node = rd
fn parse_node_pair_nometa(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
preceded(space0, variable),
preceded(space0, char('=')),
delimited( space0, variable, multispace0)
)),
| item| {
let (var,_,val) = item ;
ParseResult::Node(Node::new_pair(var, val, None))
}
)
(input)
}
// parse a node pair with metadata
// EG
// rd_node = rd [ owner: foobar ]
fn parse_node_pair_meta(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
preceded(space0, variable),
preceded(space0, char('=')),
preceded( space0, variable) ,
parse_metadata,
)),
| item| {
let (var,_,val, meta) = item ;
let meta = if meta.is_empty() {None} else {Some(meta)};
ParseResult::Node(Node::new_pair(var, val, meta))
}
)
(input)
}
#[cfg(test)]
mod parse_node_pair {
use super::*;
//use nom::error::ErrorKind;
use crate::jspt::JsptMetadata;
#[test]
fn can_parse_node_pair() {
let result = parse_node_pair(r#"rd = RD "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_pair("rd", "RD", None)) )) ) ;
}
#[test]
fn can_parse_node_pair_meta() {
let md = JsptMetadata::new().set_volume(true).set_owner(Some("jgerber"));
let result = parse_node_pair(r#"rd = RD [volume, owner:jgerber ]"#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_pair("rd", "RD", Some(md))) )) ) ;
}
}
// parse a Node::EnvVar from input, with or without metadata.
// eg
// rd = $rd
fn parse_node_envvar(input: &str) -> IResult<&str, ParseResult> {
alt((
parse_node_envvar_meta,
parse_node_envvar_nometa,
))
(input)
}
// parse env variable node without metadata. regex node references a named regex.
// EG
// `rd_node = $$DD_RND`
fn parse_node_envvar_nometa(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
// drops zero or more spaces in front of a variable (upper lower case number _-)
preceded(space0, variable),
// drop zero or more spaces in front of '='
preceded(space0, char('=')),
// drop zero or more spaces around variable preceded by $$ and drop zero or more spaces and returns
delimited( space0, preceded(tag("$$"),variable), multispace0)
)),
| item| {
let (var,_,val) = item ;
ParseResult::Node( Node::new_envvar(var, val, None))
}
)
(input)
}
// Parse a envvar variable node with metadata from a &str.
// EG
// rd = $rd [volume]
fn parse_node_envvar_meta(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
// drops zero or more spaces in front of a variable (upper lower case number _-)
preceded(space0, variable),
// drop zero or more spaces in front of '='
preceded(space0, char('=')),
// drop zero or more spaces around variable preceded by $ and drop zero or more spaces and returns
delimited( space0, preceded(tag("$$"),variable), space0),
parse_metadata,
)),
| item| {
let (var,_,val, meta) = item ;
let meta = if meta.is_empty() {None} else {Some(meta)};
ParseResult::Node( Node::new_envvar(var, val, meta))
}
)
(input)
}
#[cfg(test)]
mod parse_node_envvar {
use super::*;
//use nom::error::ErrorKind;
#[test]
fn can_parse_node_envvar() {
let result = parse_node_envvar(r#"rd = $$rdexpr "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_envvar("rd", "rdexpr", None)) ) )) ;
}
#[test]
fn can_parse_node_pair_with_return() {
let result = parse_node_envvar(r#" rd = $$rdexpr
"#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_envvar("rd", "rdexpr", None) )) ) );
}
}
// parse a Node::ReVar from input, with or without metadata.
// eg
// rd = $rd
fn parse_node_revar(input: &str) -> IResult<&str, ParseResult> {
alt((
parse_node_revar_meta,
parse_node_revar_nometa,
))
(input)
}
// parse revar variable node without metadata. regex node references a named regex.
// EG
// `rd_node = $rd`
fn parse_node_revar_nometa(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
// drops zero or more spaces in front of a variable (upper lower case number _-)
preceded(space0, variable),
// drop zero or more spaces in front of '='
preceded(space0, char('=')),
// drop zero or more spaces around variable preceded by $ and drop zero or more spaces and returns
delimited( space0, preceded(tag("$"),variable), multispace0)
)),
| item| {
let (var,_,val) = item ;
ParseResult::Node( Node::new_revar(var, val, None))
}
)
(input)
}
// Parse a revar variable node with metadata from a &str.
// EG
// rd = $rd [volume]
fn parse_node_revar_meta(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
// drops zero or more spaces in front of a variable (upper lower case number _-)
preceded(space0, variable),
// drop zero or more spaces in front of '='
preceded(space0, char('=')),
// drop zero or more spaces around variable preceded by $ and drop zero or more spaces and returns
delimited( space0, preceded(tag("$"),variable), space0),
parse_metadata,
)),
| item| {
let (var,_,val, meta) = item ;
let meta = if meta.is_empty() {None} else {Some(meta)};
ParseResult::Node( Node::new_revar(var, val, meta))
}
)
(input)
}
#[cfg(test)]
mod parse_node_revar {
use super::*;
//use nom::error::ErrorKind;
use crate::jspt::metadata::JsptMetadata;
#[test]
fn can_parse_node_revar() {
let result = parse_node_revar(r#"rd = $rdexpr "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_revar("rd", "rdexpr", None)) ) )) ;
}
#[test]
fn can_parse_node_revar_meta() {
let result = parse_node_revar(r#"rd = $rdexpr [perms:777]"#);
let md = JsptMetadata::new().set_permissions(Some("777"));
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_revar("rd", "rdexpr", Some(md) )) ) )) ;
}
#[test]
fn can_parse_node_revar_meta_navalias() {
let result = parse_node_revar(r#"rd = $rdexpr [navalias:cs]"#);
let md = JsptMetadata::new().set_navalias(Some(("cs", None)));
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_revar("rd", "rdexpr", Some(md) )) ) )) ;
}
#[test]
fn can_parse_node_pair_with_return() {
let result = parse_node_revar(r#" rd = $rdexpr
"#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_revar("rd", "rdexpr", None) )) ) );
}
}
// parse a simple regex node from input with our without metadata.
// eg
// rd_mode = $rd [volume]
fn parse_node_regexsimple(input: &str) -> IResult<&str, ParseResult> {
alt((
parse_node_regexsimple_meta,
parse_node_regexsimple_nometa,
))
(input)
}
// parse regex variable node without metadata. regex node references a named regex
// `rd_node = $rd`
fn parse_node_regexsimple_nometa(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
// drops zero or more spaces in front of a variable (upper lower case number _-)
preceded(space0, variable),
// drop zero or more spaces in front of '='
preceded(space0, char('=')),
// drop zero or more spaces around variable preceded by $ and drop zero or more spaces and returns
delimited( space0, quoted_regex_str, multispace0)
)),
| item| {
let (var,_,val) = item ;
ParseResult::Node(Node::new_regexsimple(var, val, None))
}
)
(input)
}
// parse a simple regex node with metadata
// eg
// rd = $rd [owner: jgerber]
fn parse_node_regexsimple_meta(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
// drops zero or more spaces in front of a variable (upper lower case number _-)
preceded(space0, variable),
// drop zero or more spaces in front of '='
preceded(space0, char('=')),
// drop zero or more spaces around variable preceded by $ and drop zero or more spaces and returns
delimited( space0, quoted_regex_str, space0),
parse_metadata
)),
| item| {
let (var,_, val, meta) = item ;
let meta = if meta.is_empty() {None} else {Some(meta)};
ParseResult::Node(Node::new_regexsimple(var, val, meta))
}
)
(input)
}
#[cfg(test)]
mod parse_node_regexsimple {
use super::*;
//use nom::error::ErrorKind;
#[test]
fn can_parse_node_regexsimple() {
let result = parse_node_regexsimple(r#"rd = "(foo|bar)" "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_regexsimple("rd", "(foo|bar)", None)) ) )) ;
}
#[test]
fn can_parse_node_regexsimplewith_return() {
let result = parse_node_regexsimple(r#" rd = "[a-zA-Z0-1_-]"
"#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_regexsimple("rd", "[a-zA-Z0-1_-]", None) ) )) );
}
}
// parse a complex regex node from a &str input with or without metadata.
fn parse_node_regexcomplex(input: &str) -> IResult<&str, ParseResult> {
alt((
parse_node_regexcomplex_meta,
parse_node_regexcomplex_nometa
))
(input)
}
// parse regex variable node without metadata. regex node references a named regex
// EG
// `rd_node = $rd`
fn parse_node_regexcomplex_nometa(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
// drops zero or more spaces in front of a variable (upper lower case number _-)
preceded(space0, variable),
// drop zero or more spaces in front of '='
preceded(space0, char('=')),
// drop zero or more spaces around variable preceded by $ and drop zero or more spaces and returns
preceded( space0, quoted_regex_str),
delimited( space0, quoted_regex_str, multispace0)
)),
| item| {
let (var,_,pos, neg) = item ;
ParseResult::Node( Node::new_regexcomplex(var, pos, neg, None))
}
)
(input)
}
// parse a complex regex node with metadata from an input &str.
// eg
// rd_node = $rd [volume ]
fn parse_node_regexcomplex_meta(input: &str) -> IResult<&str, ParseResult> {
map (
tuple((
// drops zero or more spaces in front of a variable (upper lower case number _-)
preceded(space0, variable),
// drop zero or more spaces in front of '='
preceded(space0, char('=')),
// drop zero or more spaces around variable preceded by $ and drop zero or more spaces and returns
preceded( space0, quoted_regex_str),
delimited( space0, quoted_regex_str, space0),
parse_metadata
)),
| item| {
let (var,_,pos, neg, meta) = item ;
let meta = if meta.is_empty() {None} else {Some(meta)};
ParseResult::Node( Node::new_regexcomplex(var, pos, neg, meta))
}
)
(input)
}
#[cfg(test)]
mod parse_node_regexcomplex {
use super::*;
//use nom::error::ErrorKind;
#[test]
fn can_parse_node_regexcomplex() {
let result = parse_node_regexcomplex(r#"rd = "(foo|bar)" "(bla|mange)" "#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_regexcomplex("rd", "(foo|bar)", "(bla|mange)", None )) ) )) ;
}
#[test]
fn can_parse_node_regexsimplewith_return() {
let result = parse_node_regexcomplex(r#" rd = "[a-zA-Z0-1_-]" "(bla|mange)"
"#);
assert_eq!(result, Ok( ("", ParseResult::Node(Node::new_regexcomplex("rd", "[a-zA-Z0-1_-]","(bla|mange)", None) )) ) );
}
}
| true |
87e232e90c3d371ba12a1692e6de210963065d58
|
Rust
|
AdelaideAuto-IDLab/bTracked
|
/tracking/src/filter_runner.rs
|
UTF-8
| 8,601 | 2.6875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use glm::{self, vec3, Vec3};
use rand::{self, Rng, FromEntropy, rngs::SmallRng, distributions::StandardNormal};
use stats;
use particle_filter::{Filter, ParticleFilter};
use {
signal::MeasurementModel,
geometry::World,
distance_field::DistanceField,
util, TrackingConfig, FilterConfig, ModelConfig, Measurement
};
#[derive(Copy, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub enum ParticleMode {
Stationary,
Moving,
}
#[derive(Copy, Clone, Serialize, Deserialize)]
pub struct Particle {
pub position: Vec3,
pub velocity: Vec3,
pub pose: f32,
pub turn_rate: f32,
pub mode: ParticleMode,
}
struct SystemNoise {
position: Vec3,
velocity: Vec3,
turn_rate: f32,
pose: f32,
}
impl SystemNoise {
fn gen(rng: &mut SmallRng, config: &ModelConfig, dt: f32) -> SystemNoise {
let mut randn = |stddev: f32| rng.sample(StandardNormal) as f32 * stddev;
SystemNoise {
position: vec3(
0.5 * dt * dt * randn(config.kinematic_noise),
0.5 * dt * dt * randn(config.kinematic_noise),
0.0,
),
velocity: vec3(
dt * randn(config.kinematic_noise),
dt * randn(config.kinematic_noise),
0.0,
),
turn_rate: dt * dt * randn(config.turn_rate_noise),
pose: dt * randn(config.pose_noise),
}
}
}
struct MotionModel {
distance_field: DistanceField,
stationary: ModelConfig,
motion: ModelConfig,
speed: f32,
}
fn propagate(model: &MotionModel, rng: &mut SmallRng, particle: Particle, dt: f32) -> Particle {
let Particle { mut position, mut velocity, mut pose, mut turn_rate, mut mode } = particle;
match mode {
ParticleMode::Stationary => {
let noise = SystemNoise::gen(rng, &model.stationary, dt);
position += noise.position;
turn_rate += noise.turn_rate;
pose = util::wrap_angle(pose + noise.pose + dt * turn_rate);
if rng.gen::<f32>() < model.stationary.transition_prob * dt {
mode = ParticleMode::Moving;
velocity = util::rand_velocity(model.speed);
// velocity = model.speed * vec3(pose.cos(), pose.sin(), 0.0);
}
},
ParticleMode::Moving => {
let noise = SystemNoise::gen(rng, &model.motion, dt);
let repulsion = {
let [x, y] = model.distance_field.query(position.x, position.y);
dt * vec3(x, y, 0.0)
};
if turn_rate == 0.0 {
// Prevent degeneracy due to floating point division by zero.
// (Note: limit x -> 0: sin(x) / x == 1)
turn_rate = 0.00001;
}
let motion_model = (1.0 / turn_rate) * glm::mat3(
(dt * turn_rate).sin(), (dt * turn_rate).cos() - 1.0, 0.0,
1.0 - (dt * turn_rate).cos(), (dt * turn_rate).sin(), 0.0,
0.0, 0.0, 0.0,
);
position += motion_model * velocity - repulsion + noise.position;
velocity = glm::rotate_z_vec3(&velocity, dt * turn_rate) - repulsion + noise.velocity;
turn_rate += noise.turn_rate;
let delta_p = position - particle.position;
pose = delta_p.y.atan2(delta_p.x);
if rng.gen::<f32>() < model.motion.transition_prob * dt {
mode = ParticleMode::Stationary;
}
}
}
Particle { position, velocity, pose, turn_rate, mode }
}
type BoxFilter = Box<dyn Filter<Particle=Particle, Measurement=[Measurement]>>;
pub struct FilterRunner {
filter: BoxFilter,
config: FilterConfig,
width: f32,
height: f32,
}
impl FilterRunner {
pub fn new(tracking_config: &TrackingConfig, distance_field: Option<DistanceField>) -> FilterRunner {
let TrackingConfig { ref filter, ref geometry } = tracking_config;
let scale = geometry.scale;
let width = geometry.boundary.width / scale;
let height = geometry.boundary.height / scale;
let particles = generate_initial_particles(width, height, filter);
let model = MotionModel {
distance_field: {
distance_field.unwrap_or_else(|| DistanceField::new(&World::new(geometry), 100.0))
},
stationary: filter.stationary,
motion: filter.motion,
speed: filter.speed,
};
let mut rng = SmallRng::from_entropy();
let propagation_fn = move |particle: Particle, dt: f32| {
propagate(&model, &mut rng, particle, dt)
};
let noise_fn = move |particle: Particle, _dt: f32| {
particle
};
let measurement_model = MeasurementModel::new(filter.signal.clone());
let weight_fn = move |particle: Particle, measurements: &[Measurement]| {
let x = particle.position.x;
let y = particle.position.y;
if x < 0.0 || x > width || y < 0.0 || y > height {
// Particle is out of bounds
return 0.0;
}
measurement_model.probability(measurements, particle)
};
let particle_filter = ParticleFilter::new(particles, propagation_fn, noise_fn, weight_fn);
FilterRunner {
filter: Box::new(particle_filter) as BoxFilter,
config: filter.clone(),
width,
height,
}
}
pub fn step(&mut self, measurements: &[Measurement], dt: f32) -> f32 {
let weight = self.filter.step(measurements, dt);
if weight < self.config.reinit_threshold {
self.reinitialize();
return 1.0;
}
weight
}
pub fn reinitialize(&mut self) {
let particles = generate_initial_particles(self.width, self.height, &self.config);
self.filter.merge_particles(&particles, self.config.reinit_ratio);
}
pub fn get_estimate(&self) -> Particle {
let particles = self.filter.get_particles();
// Taking the median of each of the values individually isn't really correct here, what we
// really want is the cluster centroid, but that is more costly to compute and in practice
// the median works fairly well.
macro_rules! median_of {
($x:expr) => {
stats::median(particles.iter().map($x).map(|x| x as f64)).unwrap() as f32
}
};
let x = median_of!(|p| p.position.x);
let y = median_of!(|p| p.position.y);
let dx = median_of!(|p| p.velocity.x);
let dy = median_of!(|p| p.velocity.y);
let pose = median_of!(|p| p.pose);
let turn_rate = median_of!(|p| p.turn_rate);
let stationary = particles.iter().filter(|p| p.mode == ParticleMode::Stationary).count();
let mode = match stationary > particles.len() / 2 {
true => ParticleMode::Stationary,
false => ParticleMode::Moving,
};
Particle {
position: vec3(x, y, 0.0),
velocity: vec3(dx, dy, 0.0),
pose,
turn_rate,
mode
}
}
pub fn get_snapshot(&self, num_particles: usize) -> Vec<Particle> {
let particles = self.filter.get_particles();
if num_particles >= particles.len() {
return particles.into();
}
let mut rng = rand::thread_rng();
rand::seq::sample_slice(&mut rng, particles, num_particles)
}
}
/// Generate an initial particle distribution based on the provided config
fn generate_initial_particles(width: f32, height: f32, config: &FilterConfig) -> Vec<Particle> {
let mut rng = rand::thread_rng();
(0..config.num_particles).map(|_| {
let position = {
let base = rng.gen::<[f32; 2]>();
glm::vec3(base[0] * width, base[1] * height, 0.0)
};
let velocity = util::rand_velocity(config.speed);
let mode = match rng.gen::<bool>() {
true => ParticleMode::Stationary,
false => ParticleMode::Moving,
};
let turn_rate = rng.sample(StandardNormal) as f32 * config.turn_rate_stddev + config.turn_rate_mean;
Particle {
position,
velocity,
pose: velocity[1].atan2(velocity[0]),
turn_rate: if rng.gen::<bool>() { -1.0 * turn_rate } else { turn_rate },
mode,
}
}).collect()
}
| true |
32a2884d4af3fdc6bb2b5eb796caaecfe5432f5f
|
Rust
|
barneyb/rust_playground
|
/src/aoc_2019_01.rs
|
UTF-8
| 1,170 | 3.421875 | 3 |
[] |
no_license
|
use crate::cli;
use crate::fs;
pub fn run() {
let masses: Vec<usize> = fs::read_lines(cli::aoc_filename("aoc_2019_01.txt"), |l| {
l.parse::<usize>().unwrap()
})
.unwrap();
let fuel: usize = masses.iter().map(needed_fuel).sum();
println!("Fuel needed: {}", fuel);
let fuel: usize = masses.iter().map(actually_needed_fuel).sum();
println!("Fuel ACTUALLY needed: {}", fuel);
}
fn needed_fuel(mass: &usize) -> usize {
mass / 3 - 2
}
fn actually_needed_fuel(mass: &usize) -> usize {
let mut curr = needed_fuel(mass);
let mut total = curr;
while curr > 8 {
curr = needed_fuel(&curr);
total += curr;
}
total
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn part_one_mass_cases() {
assert_eq!(needed_fuel(&12), 2);
assert_eq!(needed_fuel(&14), 2);
assert_eq!(needed_fuel(&1969), 654);
assert_eq!(needed_fuel(&100756), 33583);
}
#[test]
fn part_two_mass_cases() {
assert_eq!(actually_needed_fuel(&14), 2);
assert_eq!(actually_needed_fuel(&1969), 966);
assert_eq!(actually_needed_fuel(&100756), 50346);
}
}
| true |
c5739e58efa8263f3dba3ab7dfcd3f9f29f5edb5
|
Rust
|
rust-lang-ja/rust-by-example-ja
|
/src-old/error/result_map/result.rs
|
UTF-8
| 1,147 | 4.09375 | 4 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::num::ParseIntError;
// 返り値の型を書き直し、`unwrap()`を用いないパターンマッチに変更したが、
// まだ少しごちゃごちゃしている。`Option`の場合と同様に
// スッキリさせられないだろうか?答えはYes
fn double_number(number_str: &str) -> Result<i32, ParseIntError> {
match number_str.parse::<i32>() {
Ok(n) => Ok(2 * n),
Err(e) => Err(e),
}
}
// 上と全く同じ機能を、`map()`を用いて記述する。
// 値がparse可能な時のみその値を変更し、そうでなければエラーを返す。
fn double_number_map(number_str: &str) -> Result<i32, ParseIntError> {
number_str.parse::<i32>().map(|n| 2 * n)
}
fn print(result: Result<i32, ParseIntError>) {
match result {
Ok(n) => println!("n is {}", n),
Err(e) => println!("Error: {}", e),
}
}
fn main() {
// 以前と同様、問題なく想定通りの値を表示する。
let twenty = double_number("10");
print(twenty);
// 以前の`panic`の内容よりも遥かに良い。
let tt = double_number_map("t");
print(tt);
}
| true |
02c698ae58a0d2b9405f871fe410ec2aa3a59791
|
Rust
|
moon-cat-liquid/link
|
/src/lib.rs
|
UTF-8
| 21,498 | 3.90625 | 4 |
[] |
no_license
|
//! 这是一个rust的单向链表的实现,本链表实现了集合的基本功能。
/// 链表结构体
#[derive(Clone)]
pub struct Link<T> (Option<Box<Node<T>>>);
///节点结构体
#[derive(Clone)]
pub struct Node<T> {
pub value: T,
next: Link<T>,
}
impl<T> Node<T> {
/// 创建节点
fn new(value:T, data: Option<Box<Self>>) -> Self {
Self {value, next: Link::from(data)}
}
/// 节点转化为链表
/// # 例子
/// ```
/// use link::*;
/// let l:Link<usize> = link![1,2,3,4];
/// let a = l.get(1).unwrap().clone();
/// assert_eq!(a.as_link(), link![2,3,4]);
/// ```
pub fn as_link(self) -> Link<T> {
Link(Some(Box::new(self)))
}
/// 查看不可变子节点
/// # 例子
/// ```
/// use link::*;
/// let l: Link<usize> = link![1,2,3];
/// let node = l.get(1).unwrap();
/// assert_eq!(node.next().unwrap().value, 3);
/// ```
pub fn next(&self) -> Option<&Self> {
self.next.0.as_ref().map(|n| n.as_ref())
}
/// 节点跳过
/// # 例子
/// ```
/// use link::*;
/// let l: Link<usize> = link![1,2,3,4];
/// let mut node: &Node<usize> = l.get(0).unwrap();
/// assert_eq!(node.value, 1);
/// node = node.skip(1).unwrap();
/// assert_eq!(node.value, 2);
/// node = node.skip(2).unwrap();
/// assert_eq!(node.value, 4);
/// ```
pub fn skip(&self, n: usize) -> Option<&Self> {
let mut node = self;
for _ in 0..n {
node = node.next.0.as_ref()?;
}
Some(node)
}
/// 获取可变子节点
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<usize> = link![1,2,3];
/// let node = l.get_mut(1).unwrap();
/// node.next_mut().unwrap().value = 4;
/// assert_eq!(l, link![1,2,4]);
/// ```
pub fn next_mut(&mut self) -> Option<&mut Self> {
self.next.0.as_mut().map(|n| n.as_mut())
}
/// 可变节点跳过
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<usize> = link![1,2,3,4];
/// let node: &mut Node<usize> = l.get_mut(1).unwrap();
/// node.skip_mut(1).unwrap().value = 5;
/// assert_eq!(l, link![1,2,5,4]);
/// ```
pub fn skip_mut(self: &mut Self, n: usize) -> Option<&mut Self>{
let mut node = self;
for _ in 0..n {
node = node.next.0.as_mut()?;
}
Some(node)
}
/// 插入子节点
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<usize> = link![1,2,3];
/// let node = l.get_mut(1).unwrap();
/// node.insert_next(4);
/// assert_eq!(l, link![1,2,4,3]);
/// ```
pub fn insert_next(&mut self, value: T) -> &mut Self{
let n = Node::new(value, self.next.0.take());
self.next = Link::from(Some(Box::new(n)));
self.next.0.as_mut().unwrap()
}
/// 删除子节点
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<usize> = link![1,2,3];
/// let node = l.get_mut(1).unwrap();
/// node.pop_next();
/// assert_eq!(l, link![1,2]);
/// ```
pub fn pop_next(&mut self) -> Option<T> {
let n = self.next.0.take()?;
self.next = n.next;
Some(n.value)
}
}
impl<T> Link<T> {
/// 从节点创建链表
fn from(data: Option<Box<Node<T>>>) -> Self {
Link(data)
}
/// 获取链表的尾节点的可变引用
fn end_node(mut node: &mut Box<Node<T>>) -> &mut Box<Node<T>> {
while let Some(ref mut t) = node.next.0 {
node = t;
}
node
}
/// 获取链表的某一位置的节点的不可变引用
/// # 输入
/// i: 目标节点相对起始节点的索引
/// # 输出
/// Option<&Box<Node<T>>>: some(目标节点的不可变引用),当目标节点获取失败(输入错误)时为None
pub fn get(&self, i: usize) -> Option<&Box<Node<T>>> {
let mut node = self.0.as_ref()?;
for _ in 0..i {
node = node.next.0.as_ref()?;
}
Some(node)
}
/// 获取链表的某一位置的节点的可变引用
/// # 输入
/// i: 目标节点相对起始节点的索引
/// # 输出
/// Option<&mut Box<Node<T>>>: some(目标节点的可变引用),当目标节点获取失败(输入错误)时为None
pub fn get_mut(&mut self, i: usize) -> Option<&mut Box<Node<T>>> {
let mut node = self.0.as_mut()?;
for _ in 0..i {
node = node.next.0.as_mut()?;
}
Some(node)
}
/// 引发超出链表的范围的恐慌
fn out_of_range(index: usize) -> ! {
panic!("index {} out of range for Link", index);
}
/// 创建空链表
/// # 例子
/// ```
/// use link::*;
/// let l: Link<isize> = Link::new();
/// assert_eq!(format!("{:?}", l), "[]");
/// ```
pub fn new() -> Self {
Link::from(None)
}
/// 判断链表是否为空
pub fn empty(&self) -> bool {
self.0.is_none()
}
/// 获取链表长度
pub fn len(&self) -> usize {
let mut len:usize = 0;
let mut node = self.0.as_ref();
while let Some(n) = node {
node = n.next.0.as_ref();
len += 1;
}
len
}
/// 拼接a, b两个链表,相当于a = a + b
/// # 例子
/// ```
/// use link::*;
/// let mut a: Link<isize> = link![1, 2];
/// let b: Link<isize> = link![3, 4];
/// a.concat(b);
/// assert_eq!(format!("{:?}", a), "[1, 2, 3, 4]");
/// ```
pub fn concat(&mut self, other: Self) {
match self.0.as_mut() {
//空链表
None => *self = other,
//非空链表
Some(node) => Self::end_node(node).next = other,
}
}
/// 分割链表
/// # 例子
/// ```
/// use link::*;
/// let mut a: Link<isize> = link![1, 2, 3];
/// let b = a.split_off(0);
/// assert_eq!(a, link![1]);
/// assert_eq!(b, link![2, 3]);
/// ```
pub fn split_off(&mut self, at: usize) -> Self {
if let Some(node) = self.get_mut(at) {
Link::from(node.next.0.take())
} else {
Link::new()
}
}
/// 转移链表,转移后原链表为空链表
/// # 例子
/// ```
/// use link::*;
/// let mut a: Link<isize> = link![0, 1, 2];
/// let b = a.take();
/// assert_eq!(format!("{:?}", a), "[]");
/// assert_eq!(format!("{:?}", b), "[0, 1, 2]");
/// ```
pub fn take(&mut self) -> Self {
Link::from(self.0.take())
}
/// 在链表的尾部追加元素
pub fn push_back(&mut self, val: T) {
let n = Node::new(val, None);
match self.0.as_mut() {
//空链表
None => {
self.0 = Some(Box::new(n));
},
//非空链表
Some(node) => {
Self::end_node(node).next = Self::from(Some(Box::new(n)));
},
}
}
/// 弹出最后第一个元素,当链表为空时返回None
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<isize> = link![0, 1, 2];
/// let v = l.pop_back();
/// assert_eq!(format!("{:?}", l), "[0, 1]");
/// assert_eq!(v, Some(2));
/// ```
pub fn pop_back(&mut self) -> Option<T> {
// 不利用长度的实现
let node = &mut self.0;
if node.as_ref()?.next.0.is_none() {
return Some(node.take()?.value)
}
let mut node = node.as_mut()?;
while node.next.0.as_ref().and_then(|s| s.next.0.as_ref()).is_some() {
node = node.next.0.as_mut()?;
}
Some(node.next.0.take()?.value)
// 利用长度的实现
// match self.len {
// 0 => None,
// 1 => self.pop(),
// len => {
// let node = Self::get_mut(self.0.as_mut(), len-2)?;
// let n = node.next.0.take()?;
// self.len -= 1;
// Some(n.value)
// }
// }
}
/// 在链表的头部压入一个元素
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<isize> = link![0, 1, 2];
/// l.push(-1);
/// assert_eq!(format!("{:?}", l), "[-1, 0, 1, 2]");
/// ```
pub fn push(&mut self, val: T) {
let n = Node::new(val, self.0.take());
self.0 = Some(Box::new(n));
}
/// 弹出第一个元素,当链表为空时返回None
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<isize> = link![0, 1, 2];
/// let v = l.pop();
/// assert_eq!(format!("{:?}", l), "[1, 2]");
/// assert_eq!(v, Some(0));
/// ```
pub fn pop(&mut self) -> Option<T> {
let node = self.0.take()?;
*self = node.next;
Some(node.value)
}
/// 获取链表的第一个元素的不可变引用,当链表为空时返回None
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<isize> = link![0, 1, 2];
/// let v = l.front();
/// assert_eq!(v, Some(&0));
/// ```
pub fn front(&self) -> Option<&T> {
//等价
// match self.0 {
// Some(ref n) => Some(&n.value),
// None => None,
// }
Some(&self.0.as_ref()?.value)
}
/// 获取链表的第一个元素的可变引用,当链表为空时返回None
pub fn front_mut(&mut self) -> Option<&mut T> {
Some(&mut self.0.as_mut()?.value)
}
/// 获取链表的最后一个元素的不可变引用,当链表为空时返回None
pub fn back(&self) -> Option<&T> {
let mut p = self.0.as_ref()?;
while let Some(t) = p.next.0.as_ref() {
p = t;
}
Some(&p.value)
}
/// 获取链表的最后一个元素的可变引用,当链表为空时返回None
pub fn back_mut(&mut self) -> Option<&mut T> {
let mut p = self.0.as_mut()?;
while let Some(t) = p.next.0.as_mut() {
p = t;
}
Some(&mut p.value)
}
/// 在指定位置插入元素,返回被插入元素的不可变引用,当插入失败时返回None
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<isize> = link![0, 1, 2];
/// let v = l.insert(2, 3);
/// assert_eq!(v, Some(&3));
/// assert_eq!(format!("{:?}", l), "[0, 1, 3, 2]");
/// ```
pub fn insert(&mut self, i: usize, val: T) -> Option<&T> {
if i == 0 {
self.push(val);
self.front()
} else {
let node = self.get_mut(i-1)?;
let n = Node::new(val, node.next.0.take());
node.next = Self::from(Some(Box::new(n)));
Some(&node.next.0.as_ref()?.value)
}
}
/// 在指定位置删除元素,返回被删元素,当插入失败时返回None
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<isize> = link![0, 1, 2];
/// let v = l.delete(1);
/// assert_eq!(v, Some(1));
/// assert_eq!(format!("{:?}", l), "[0, 2]");
/// ```
pub fn delete(&mut self, i: usize) -> Option<T> {
if i == 0 {
self.pop()
} else {
let node = self.get_mut(i-1)?;
let n = node.next.0.take()?;
node.next = n.next;
Some(n.value)
}
}
/// 生成不可变迭代器
/// # 例子
/// ```
/// use link::*;
/// let l: Link<isize> = link![0, 1, 2];
/// let mut iter = l.iter();
/// assert_eq!(iter.next(), Some(&0));
/// assert_eq!(iter.next(), Some(&1));
/// assert_eq!(iter.next(), Some(&2));
/// assert_eq!(iter.next(), None);
/// ```
pub fn iter(&self) -> Iter<'_, T> {
Iter {data: self.0.as_ref()}
}
/// 生成可变迭代器
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<isize> = link![0, 1, 2];
/// let mut iter = l.iter_mut();
/// assert_eq!(iter.next(), Some(&mut 0));
/// assert_eq!(iter.next(), Some(&mut 1));
/// assert_eq!(iter.next(), Some(&mut 2));
/// assert_eq!(iter.next(), None);
/// ```
pub fn iter_mut(&mut self) -> IterMut<'_, T> {
IterMut {data: self.0.as_mut()}
}
/// 用重复的元素创建链表
/// # 例子
/// ```
/// use link::*;
/// let a: Link<isize> = Link::from_elem(-1, 3);
/// assert_eq!(format!("{:?}", a), "[-1, -1, -1]");
/// ```
pub fn from_elem(val: T, n: usize) -> Self
where
T: Clone {
let mut link: Link<T> = Self::new();
for _ in 0..n {
link.push(val.clone());
}
link
}
}
use std::iter;
/// 不可变引用的迭代器
pub struct Iter<'a, T> {
data: Option<&'a Box<Node<T>>>
}
impl<'a, T> iter::Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(& mut self) -> Option<Self::Item> {
let node = self.data?;
self.data = node.next.0.as_ref();
Some(&node.value)
}
}
/// 不可变引用的迭代适配器
/// # 例子
/// ```
/// use link::*;
/// let l: Link<isize> = link![1, 2, 3];
/// let mut s = 0;
/// for i in &l {
/// s += i;
/// }
/// assert_eq!(s, 6);
/// ```
impl<'a, T> iter::IntoIterator for &'a Link<T> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
/// 可变引用的迭代器
pub struct IterMut<'a, T> {
data: Option<&'a mut Box<Node<T>>>
}
impl<'a, T> iter::Iterator for IterMut<'a, T> {
type Item = &'a mut T;
fn next(& mut self) -> Option<Self::Item> {
let node = self.data.take()?;
self.data = node.next.0.as_mut();
Some(&mut node.value)
}
}
impl<'a, T> IterMut<'a, T> {
/// 迭代器插入结点
/// ```
/// use link::*;
/// let mut l: Link<usize> = link![1,2,3];
/// let mut a = l.iter_mut();
/// a.next();
/// a.insert_next(4).unwrap();
/// assert_eq!(l, link![1,2,4,3]);
/// ```
pub fn insert_next(&mut self, value: T) -> Result<(), &str> {
if let Some(node) = &mut self.data {
let n = Node::new(value, node.next.0.take());
node.next = Link::from(Some(Box::new(n)));
Ok(())
} else {
Err("The iterator is pointed at no data!")
}
}
/// 迭代器删除结点
/// ```
/// use link::*;
/// let mut l: Link<usize> = link![1,2,3];
/// let mut a = l.iter_mut();
/// a.next();
/// assert_eq!(a.pop_next(), Some(3));
/// assert_eq!(l, link![1,2]);
/// ```
pub fn pop_next(&mut self) -> Option<T> {
if let Some(node) = &mut self.data {
let n = node.next.0.take()?;
node.next = n.next;
Some(n.value)
} else {
None
}
}
}
/// 可变引用的迭代适配器
/// # 例子
/// ```
/// use link::*;
/// let mut l: Link<isize> = link![1, 2, 3];
/// for i in &mut l {
/// *i += 1;
/// }
/// assert_eq!(format!("{:?}", l), "[2, 3, 4]");
/// ```
impl<'a, T> iter::IntoIterator for &'a mut Link<T> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
/// 元素迭代器
pub struct IntoIter<T> {
data: Link<T>
}
impl<T> iter::Iterator for IntoIter<T> {
type Item = T;
fn next(& mut self) -> Option<Self::Item> {
let node = self.data.0.take()?;
self.data = node.next;
Some(node.value)
}
}
/// 元素迭代适配器
/// # 例子
/// ```
/// use link::*;
/// let l: Link<isize> = link![0, 1, 2];
/// let mut iter = l.into_iter();
/// assert_eq!(iter.next(), Some(0));
/// assert_eq!(iter.next(), Some(1));
/// assert_eq!(iter.next(), Some(2));
/// assert_eq!(iter.next(), None);
/// ```
impl<T> iter::IntoIterator for Link<T> {
type Item = T;
type IntoIter = IntoIter<T>;
fn into_iter(self) -> Self::IntoIter {
IntoIter {data: self}
}
}
/// 迭代转化器
/// # 例子
/// ```
/// use link::*;
/// let a: Link<isize> = std::iter::repeat(-1).take(3).collect();
/// assert_eq!(format!("{:?}", a), "[-1, -1, -1]");
/// ```
impl<T> iter::FromIterator<T> for Link<T> {
fn from_iter<I>(iter: I) -> Self
where
I: iter::IntoIterator<Item = T> {
let mut link: Link<T> = Self::new();
let mut node = &mut link;
for i in iter {
*node = Node::new(i, None).as_link();
node = &mut node.0.as_mut().unwrap().next;
}
link
}
}
/// 迭代复制转化器
/// # 例子
/// ```
/// use link::*;
/// let v = vec![-1; 3];
/// let a: Link<isize> = v.iter().collect();
/// assert_eq!(format!("{:?}", a), "[-1, -1, -1]");
/// ```
impl<'a, T: Clone + 'a> iter::FromIterator<&'a T> for Link<T> {
fn from_iter<I>(iter: I) -> Self
where
I: iter::IntoIterator<Item = &'a T> {
let mut link: Link<T> = Self::new();
let mut node = &mut link;
for i in iter {
*node = Node::new(i.clone(), None).as_link();
node = &mut node.0.as_mut().unwrap().next;
}
link
}
}
/// 创建链表的宏
/// # 例子
/// ```
/// use link::*;
/// let a: Link<isize> = link![0, 1, 2];
/// let b: Link<isize> = link![-1; 3];
/// let c: Link<isize> = link![];
/// assert_eq!(format!("{:?}", a), "[0, 1, 2]");
/// assert_eq!(format!("{:?}", b), "[-1, -1, -1]");
/// assert_eq!(format!("{:?}", c), "[]");
/// ```
#[macro_export]
macro_rules! link {
($($x:expr),+) => (vec![$($x),*].into_iter().collect());
($x:expr; $n:expr) => ($crate::Link::from_elem($x, $n));
() => ($crate::Link::new())
}
use std::fmt;
/// 格式化表示
impl<T: fmt::Debug> fmt::Debug for Link<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self).finish()
}
}
use std::ops;
/// 索引(查看)操作
/// # 例子
/// ```
/// use link::*;
/// let a: Link<isize> = link![1, 2, 3];
/// assert_eq!(a[0], 1);
/// assert_eq!(a[1], 2);
/// assert_eq!(a[2], 3);
/// ```
impl<T> ops::Index<usize> for Link<T> {
type Output = T;
fn index(&self, i: usize) -> &Self::Output {
match self.get(i) {
Some(n) => &n.value,
None => Self::out_of_range(i)
}
}
}
/// 索引(修改)操作
/// # 例子
/// ```
/// use link::*;
/// let mut a: Link<isize> = link![1, 2, 3];
/// a[1] = -1;
/// assert_eq!(format!("{:?}", a), "[1, -1, 3]");
/// ```
impl<T> ops::IndexMut<usize> for Link<T> {
fn index_mut(&mut self, i: usize) -> &mut Self::Output {
match self.get_mut(i) {
Some(n) => &mut n.value,
None => Self::out_of_range(i)
}
}
}
/// 加法(追加)操作
/// # 例子
/// ```
/// use link::*;
/// let mut a: Link<isize> = link![1, 2, 3];
/// a = a + 4;
/// assert_eq!(format!("{:?}", a), "[1, 2, 3, 4]");
/// ```
impl<T> ops::Add<T> for Link<T> {
type Output = Self;
fn add(mut self, rhs: T) -> Self::Output {
self.push_back(rhs);
self
}
}
/// 加法(拼接)操作
/// # 例子
/// ```
/// use link::*;
/// let mut a: Link<isize> = link![1, 2, 3];
/// let b: Link<isize> = link![4, 5];
/// a = a + b;
/// assert_eq!(format!("{:?}", a), "[1, 2, 3, 4, 5]");
/// ```
impl<T> ops::Add for Link<T> {
type Output = Self;
fn add(mut self, rhs: Self) -> Self::Output {
self.concat(rhs);
self
}
}
/// 自加(追加)操作
/// # 例子
/// ```
/// use link::*;
/// let mut a: Link<isize> = link![1, 2, 3];
/// a += 4;
/// assert_eq!(format!("{:?}", a), "[1, 2, 3, 4]");
/// ```
impl<T> ops::AddAssign<T> for Link<T> {
fn add_assign(&mut self, other: T) {
self.push_back(other);
}
}
/// 自加(拼接)操作
/// # 例子
/// ```
/// use link::*;
/// let mut a: Link<isize> = link![1, 2, 3];
/// let b: Link<isize> = link![4, 5];
/// a += b;
/// assert_eq!(format!("{:?}", a), "[1, 2, 3, 4, 5]");
/// ```
impl<T> ops::AddAssign for Link<T> {
fn add_assign(&mut self, other: Self) {
self.concat(other);
}
}
/// 相等操作
/// # 例子
/// ```
/// use link::*;
/// let a: Link<isize> = link![1, 2, 3];
/// assert_eq!(a, link![1, 2, 3]);
/// ```
use std::cmp;
impl<T> cmp::PartialEq for Link<T>
where
T: cmp::PartialEq {
fn eq(&self, other: &Self) -> bool {
for (v1, v2) in self.iter().zip(other.iter()) {
if v1 != v2 {
return false;
}
}
true
}
}
#[cfg(test)]
mod tests {
#[test]
fn test1() {
use crate::*;
let mut a: Link<isize> = link![1, 2, 3];
let b = a.split_off(1);
assert_eq!(a, link![1, 2]);
assert_eq!(b, link![3]);
}
}
| true |
d431492663da84ab23dbaa24d76fea128e957c4a
|
Rust
|
ttys3/mdcat
|
/src/terminal/terminology.rs
|
UTF-8
| 2,873 | 2.90625 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2018 Vinícius dos Santos Oliveira <[email protected]>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! [Terminology][] specific functions.
//!
//! [Terminology]: http://terminolo.gy
use super::TerminalSize;
use std::io::{Result, Write};
use url::Url;
/// Whether we run in terminology or not.
pub fn is_terminology() -> bool {
std::env::var("TERMINOLOGY")
.map(|value| value.trim() == "1")
.unwrap_or(false)
}
/// Provides access to printing images for Terminology.
#[derive(Debug)]
pub struct TerminologyImages;
impl TerminologyImages {
/// Write an inline image for Terminology.
pub fn write_inline_image<W: Write>(
&self,
writer: &mut W,
max_size: TerminalSize,
url: &Url,
) -> Result<()> {
// Terminology escape sequence is like: set texture to path, then draw a
// rectangle of chosen character to be replaced by the given texture.
// Documentation gives the following C example:
//
// printf("\033}is#5;3;%s\000"
// "\033}ib\000#####\033}ie\000\n"
// "\033}ib\000#####\033}ie\000\n"
// "\033}ib\000#####\033}ie\000\n", "/tmp/icon.png");
//
// We need to compute image proportion to draw the appropriate
// rectangle. If we can't compute the image proportion (e.g. it's an
// external URL), we fallback to a rectangle that is half of the screen.
let columns = max_size.width;
let lines = Some(url)
.filter(|url| url.scheme() == "file")
.and_then(|url| url.to_file_path().ok())
.and_then(|path| image::image_dimensions(path).ok())
.map(|(width, height)| {
let (w, h) = (f64::from(width), f64::from(height));
// We divide by 2 because terminal cursor/font most likely has a
// 1:2 proportion
(h * (columns / 2) as f64 / w) as usize
})
.unwrap_or(max_size.height / 2);
let mut command = format!("\x1b}}ic#{};{};{}\x00", columns, lines, url.as_str());
for _ in 0..lines {
command.push_str("\x1b}ib\x00");
for _ in 0..columns {
command.push('#');
}
command.push_str("\x1b}ie\x00\n");
}
writer.write_all(command.as_bytes())?;
Ok(())
}
}
| true |
9fb21573908714911ebe7cc318f9f123c4da8ef8
|
Rust
|
DarkKowalski/gameboy
|
/src/timer.rs
|
UTF-8
| 3,694 | 3.53125 | 4 |
[
"WTFPL"
] |
permissive
|
// Sometimes it's useful to have a timer that interrupts at regular intervals for routines that require periodic or
// percise updates. The timer in the GameBoy has a selectable frequency of 4096, 16384, 65536, or 262144 Hertz.
// This frequency increments the Timer Counter (TIMA). When it overflows, it generates an interrupt. It is then loaded
// with the contents of Timer Modulo (TMA).
//
// See: http://gbdev.gg8.se/wiki/articles/Timer_and_Divider_Registers
#[derive(Default)]
pub struct Timer {
// Each time when the timer overflows (ie. when TIMA gets bigger than FFh), then an interrupt is requested by
// setting Bit 2 in the IF Register (FF0F). When that interrupt is enabled, then the CPU will execute it by calling
// the timer interrupt vector at 0050h.
pub intf: u8,
// This register is incremented at rate of 16384Hz (~16779Hz on SGB). Writing any value to this register resets it
// to 00h.
// Note: The divider is affected by CGB double speed mode, and will increment at 32768Hz in double speed.
div: u8,
// This timer is incremented by a clock frequency specified by the TAC register ($FF07). When the value overflows
// (gets bigger than FFh) then it will be reset to the value specified in TMA (FF06), and an interrupt will be
// requested, as described below.
tima: u8,
// When the TIMA overflows, this data will be loaded.
tma: u8,
// Bit 2 - Timer Enable
// Bits 1-0 - Input Clock Select
// 00: CPU Clock / 1024 (DMG, CGB: 4096 Hz, SGB: ~4194 Hz)
// 01: CPU Clock / 16 (DMG, CGB: 262144 Hz, SGB: ~268400 Hz)
// 10: CPU Clock / 64 (DMG, CGB: 65536 Hz, SGB: ~67110 Hz)
// 11: CPU Clock / 256 (DMG, CGB: 16384 Hz, SGB: ~16780 Hz)
tac: u8,
freq: u32,
// Count the number of cycles and set 0 each 256 cycles
tmp1: u32,
// Count the number of cycles and set 0 each "freq" cycles
tmp2: u32,
}
impl Timer {
pub fn power_up() -> Self {
Timer {
freq: 256,
..Timer::default()
}
}
pub fn get(&self, a: u16) -> u8 {
match a {
0xff04 => self.div,
0xff05 => self.tima,
0xff06 => self.tma,
0xff07 => self.tac,
_ => panic!("Unsupported address"),
}
}
pub fn set(&mut self, a: u16, v: u8) {
match a {
0xff04 => self.div = 0x00,
0xff05 => self.tima = v,
0xff06 => self.tma = v,
0xff07 => {
self.tac = v;
match v & 0x03 {
0x00 => self.freq = 1024,
0x01 => self.freq = 16,
0x02 => self.freq = 64,
0x03 => self.freq = 256,
_ => panic!(""),
};
}
_ => panic!("Unsupported address"),
}
}
pub fn next(&mut self, cycles: u32) {
// Increment div at rate of 16384Hz. Because the clock cycles is 4194304, so div increment every 256 cycles.
let c = 256;
self.tmp1 += cycles;
if self.tmp1 >= c {
self.div = self.div.wrapping_add(1);
self.tmp1 -= c;
}
// Increment tima at rate of Clock / freq
if (self.tac & 0x04) != 0x00 {
self.tmp2 += cycles;
while self.tmp2 >= self.freq {
self.tima = self.tima.wrapping_add(1);
if self.tima == 0x00 {
self.tima = self.tma;
self.intf |= 0x04;
}
self.tmp2 -= self.freq;
}
}
}
}
| true |
155c8198f892eaf337fe2b06d0d858b18d60308b
|
Rust
|
rodrimati1992/core_extensions
|
/core_extensions_proc_macros/src/derive/transparent_newtype_derive/tn_attribute_parsing.rs
|
UTF-8
| 3,878 | 2.640625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use crate::derive::{
attr_parsing::{self, AttrParsing, SharedConfig, ParseCtx},
utils::Empty,
DataStructure, Field, ParseBufferExt,
};
use proc_macro2::Span;
use syn::{
parse::ParseBuffer,
Attribute,
};
pub(super) struct WrappedField<'a> {
pub(super) field: &'a Field<'a>,
pub(super) transparency: WrappedFieldTranparency,
}
pub(super) enum WrappedFieldTranparency {
Direct,
Delegated,
}
struct ParsedAttributes<'a> {
field: Option<WrappedField<'a>>,
has_transparent_repr: Option<bool>,
shared: SharedConfig,
}
pub(super) struct Configuration<'a> {
pub(super) field: WrappedField<'a>,
pub(super) shared: SharedConfig,
}
pub(super) fn parse_attributes<'a>(ds: &'a DataStructure<'a>) -> syn::Result<Configuration<'a>> {
ParsedAttributes{
field: None,
has_transparent_repr: Some(false),
shared: SharedConfig::new(),
}.parse_item_attributes(ds)
}
mod keyword {
syn::custom_keyword!(delegate);
syn::custom_keyword!(transparent);
}
impl<'a> AttrParsing<'a> for ParsedAttributes<'a> {
type Config = Configuration<'a>;
const HELPER_ATTR: &'static str = "twrap";
fn shared_config_mut(&mut self) -> &mut SharedConfig {
&mut self.shared
}
fn parse_helper_attribute(
&mut self,
_ds: &'a DataStructure<'a>,
ctx: ParseCtx<'a>,
input: &'_ ParseBuffer<'_>,
) -> syn::Result<()> {
let field = attr_parsing::check_is_field(ctx, &Empty(input.span()))?;
if self.field.is_some() {
return Err(input.error("cannot use the `#[twrap]` attribute on multiple fields"));
}
let mut assign_field = |transparency| {
self.field = Some(WrappedField{field, transparency});
};
if let Some(_) = input.peek_parse(keyword::delegate)? {
assign_field(WrappedFieldTranparency::Delegated);
} else if input.is_empty() {
assign_field(WrappedFieldTranparency::Direct);
} else {
return Err(input.error("expected either `#[twrap(delegate)]` or `#[twrap]`"));
}
Ok(())
}
fn parse_other_container_attr(
&mut self,
_ds: &'a DataStructure<'a>,
attribute: &Attribute,
) -> syn::Result<()> {
if attribute.path.is_ident("repr") {
attribute.parse_args_with(move|input: &'_ ParseBuffer<'_>| {
match (input.peek_parse(keyword::transparent)?, &mut self.has_transparent_repr) {
(Some(_), Some(has_transparent_repr)) if input.is_empty() =>
*has_transparent_repr = true,
(_, has_transparent_repr) =>
*has_transparent_repr = None,
}
input.parse::<crate::TokenStream2>()?;
Ok(())
})
} else {
Ok(())
}
}
fn finish(mut self, ds: &'a DataStructure<'a>) -> syn::Result<Self::Config> {
if self.field.is_none() {
if let [field] = &ds.variants[0].fields[..] {
self.field = Some(WrappedField{
field,
transparency: WrappedFieldTranparency::Direct,
})
}
}
let field = self.field.ok_or_else(||{
syn::Error::new(
Span::call_site(),
"Expected a `#[twrap]` attribute on exactly one field",
)
})?;
if self.has_transparent_repr != Some(true) {
let msg = "\
This type must have a `#[repr(transparent)]` attribute,\
and no other representation attribute.\
";
return Err(syn::Error::new(Span::call_site(), msg));
}
Ok(Configuration{
field,
shared: self.shared,
})
}
}
| true |
8c3f9d7f519e11383818caf9110a9e499463b791
|
Rust
|
keeslinp/rust_invaders
|
/src/main_state.rs
|
UTF-8
| 2,043 | 3.203125 | 3 |
[] |
no_license
|
extern crate ggez;
use ggez::event;
use ggez::event::Keycode;
use ggez::event::Mod;
use ggez::{GameResult, Context};
use ggez::graphics;
use std::rc::Rc;
use std::time::Duration;
use states::menu_state::MenuState;
use states::play_state::PlayState;
use std::collections::HashMap;
use states::GameState;
// First we make a structure to contain the game's state
pub struct MainState {
frames: usize,
states: Vec<Box<GameState>>,
state: usize,
keys: HashMap<Keycode, bool>,
}
// Then we implement the `ggez:event::EventHandler` trait on it, which
// requires callbacks for updating and drawing the game state each frame.
//
// The `EventHandler` trait also contains callbacks for event handling
// that you can override if you wish, but the defaults are fine.
impl MainState {
pub fn new(ctx: &mut Context) -> GameResult<MainState> {
let font = Rc::new(graphics::Font::new(ctx, "/OpenSans-Regular.ttf", 48)?);
let s = MainState {
frames: 0,
keys: HashMap::new(),
states: vec![Box::new(MenuState::new(&font, ctx)), Box::new(PlayState::new(ctx))],
state: 0,
};
Ok(s)
}
}
impl event::EventHandler for MainState {
fn update(&mut self, _ctx: &mut Context, _dt: Duration) -> GameResult<()> {
self.state = self.states[self.state].update(&mut self.keys, _ctx, _dt)?;
self.keys.clear();
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult<()> {
graphics::clear(ctx);
(&self).states[self.state].draw(&self, ctx)?;
// Drawables are drawn from their center.
graphics::present(ctx);
self.frames += 1;
if (self.frames % 100) == 0 {
println!("FPS: {}", ggez::timer::get_fps(ctx));
}
Ok(())
}
fn key_down_event(&mut self, keycode: Keycode, _: Mod, _: bool) {
self.keys.insert(keycode, true);
}
fn key_up_event(&mut self, keycode: Keycode, _: Mod, _: bool) {
self.keys.insert(keycode, false);
}
}
| true |
8f1c43ef57d4f4240dfbb4f81dfe167c966950a9
|
Rust
|
zackw/openvpn_netns_tools
|
/src/subprocess.rs
|
UTF-8
| 3,125 | 2.75 | 3 |
[] |
no_license
|
/// Subprocess management.
use std::io;
use std::num;
use std::str;
use std::io::Write;
use std::process::{Child,Command,Stdio,ExitStatus};
use nix::sys::signal::SigSet;
//use nix::sys::signal::SIG_SETMASK;
//use std::os::unix::process::CommandExt;
use libc::pid_t;
use err::*;
#[allow(dead_code)] // until we turn sigmasks back on
pub struct ChildEnv {
pub env: Vec<(String, String)>,
pub mask: SigSet,
pub verbose: bool,
pub dryrun: bool,
}
fn internal_spawn(argv: &[&str], env: &ChildEnv, stdout: Stdio)
-> io::Result<Child> {
if env.verbose {
writeln!(io::stderr(), "{}", argv.join(" ")).unwrap();
}
let exe = if env.dryrun { "true" } else { argv[0] };
let mut cmd = Command::new(exe);
cmd.stdin(Stdio::null());
cmd.stdout(stdout);
cmd.args(&argv[1..]);
cmd.env_clear();
for &(ref k, ref v) in env.env.iter() {
cmd.env(k, v);
}
/*
cmd.before_exec(|| {
pthread_sigmask(SIG_SETMASK, Some(env.mask), None)
});
*/
cmd.spawn()
}
fn check_child_status(argv: &[&str], status: &ExitStatus)
-> Result<(), HLError> {
if status.success() {
Ok(())
} else {
Err(map_unsuc_child(status, argv))
}
}
pub fn spawn(argv: &[&str], env: &ChildEnv) -> Result<Child, HLError> {
internal_spawn(argv, env, Stdio::inherit())
.map_err(|e| map_io_err(e, format!("spawn {}", argv[0])))
}
pub fn run(argv: &[&str], env: &ChildEnv) -> Result<(), HLError> {
let mut child = try!(spawn(argv, env));
let status = try!(child.wait()
.map_err(|e| map_io_err(e, format!("wait for {}",
argv[0]))));
check_child_status(argv, &status)
}
pub fn run_ignore_failure(argv: &[&str], env: &ChildEnv) {
match run(argv, env) {
Ok(_) => (),
Err(e) => {
writeln!(io::stderr(), "{}", e).unwrap();
}
}
}
pub fn run_get_output(argv: &[&str], env: &ChildEnv)
-> Result<Vec<u8>, HLError> {
let child = try!(internal_spawn(argv, env, Stdio::piped())
.map_err(|e| map_io_err(e, format!("spawn {}",
argv[0]))));
let output = try!(child.wait_with_output()
.map_err(|e| map_io_err(e, format!("reading from {}",
argv[0]))));
try!(check_child_status(argv, &output.status));
Ok(output.stdout)
}
pub fn run_get_output_pids(argv: &[&str], env: &ChildEnv)
-> Result<Vec<pid_t>, HLError> {
let raw_output = try!(run_get_output(argv, env));
let output = try!(str::from_utf8(&raw_output)
.map_err(|e| map_utf8_err(e, format!("{:?}",
raw_output))));
output
.split_whitespace().map(|s| s.parse::<pid_t>())
.collect::<Result<Vec<pid_t>, num::ParseIntError>>()
.map_err(|e| map_pi_err(e, String::from("expected process id")))
}
| true |
e58bdb8ee4d757a1c1134b897aae90f080bf2593
|
Rust
|
ST92/secondary-control-keyboard
|
/src/main.rs
|
UTF-8
| 5,517 | 3 | 3 |
[] |
no_license
|
/*!
Xorg input device grabbing utility
Claims all input from an USB keyboard to use it's keys to perform utility functions
for me, regardless of what's on my screen or what window has focus.
made by Supersonic Tumbleweed to teach myself Rust while solving a real problem.
*/
mod inputs;
mod xorg_functionality;
use evdev::Key;
use inputs::{disable_device, get_keyboard, watch_keys, EventResult};
mod config {
///
/// The keyboard device name in question. It might be different, and most likely
/// will be different on any system other than mine.
///
pub const KEYBOARD_IDENTIFIER: &str = "USB HCT Keyboard";
///
/// XOrg display identifier.
/// :0 means first screen on local machine.
///
pub const XORG_DISPLAY: &str = ":0";
///
/// Configuration resides here, mostly bindings.
/// CONFIG_DIR/bind/just/a will be executed on pressing A
///
/// TODO::
/// CONFIG_DIR/bind/alt/a will be executed on pressing ALT+A
/// CONFIG_DIR/bind/ctrl/a will be executed on pressing CTRL+A
/// CONFIG_DIR/bind/shift/a will be executed on pressing SHIFT+A
/// pressing CTRL+ALT+A will call both ./alt/a and ./ctrl/a
///
pub const CONFIG_DIR: &str = ".config/keycop";
///
/// Print additional info?
///
pub const DEBUG_INFO: bool = true;
}
fn main() {
let mut keyboard = get_keyboard();
disable_device(&keyboard);
watch_keys(&mut keyboard, &on_keypress, &on_keyrelease);
}
fn on_keypress(key: Key) -> EventResult {
let mut do_exit = false;
match key {
Key::KEY_ESC => do_exit = true, // exit program
Key::KEY_INSERT
| Key::KEY_HOME
| Key::KEY_DELETE
| Key::KEY_END
| Key::KEY_PAGEUP
| Key::KEY_PAGEDOWN => {
expo_goto_specific_by_key(key);
}
Key::KEY_UP | Key::KEY_DOWN | Key::KEY_LEFT | Key::KEY_RIGHT => {
expo_move_relative(Direction::from(key));
}
// Limit keys to main block - "`1234[...]QWERTY[...]VBNM,./"
Key(code) if code > Key::KEY_ESC.code() && code <= Key::KEY_SLASH.code() => {
// TODO: precompute this and lookup
let key_name = format!("{:?}", key);
let key_name = key_name.strip_prefix("KEY_").unwrap();
trigger_bind("just", key_name);
}
_ => {}
};
match do_exit {
true => EventResult::Exit,
false => EventResult::Continue,
}
}
fn trigger_bind(key_set: &str, key_name: &str) -> () {
let key_name = key_name.to_ascii_lowercase();
println!("Triggering {:}/{:}", key_set, key_name);
let mut path = dirs::home_dir().expect("failed to acquire home directory path");
path.extend(vec![crate::config::CONFIG_DIR, "bind", &key_set, &key_name]);
match std::process::Command::new(path).spawn() {
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
Err(e) => {
eprintln!("{:?}", e)
}
}
}
///
/// Switch to specific virtual desktop
/// Keys map according to the usual physical layout of
/// INS, DEL, HOME, END, PGUP, PGDN block in a 3x2 grid
///
fn expo_goto_specific_by_key(key: Key) -> () {
xorg_functionality::trigger_expo();
// My local geometry is 3 wide x 2 tall virtual desktops
// this goes to top left one always
expo_move_relative(Direction::Up);
expo_move_relative(Direction::Left);
expo_move_relative(Direction::Left);
match key {
Key::KEY_DELETE | Key::KEY_END | Key::KEY_PAGEDOWN => {
expo_move_relative(Direction::Down);
}
_ => (),
}
match key {
Key::KEY_HOME | Key::KEY_END => expo_move_relative(Direction::Right),
Key::KEY_PAGEUP | Key::KEY_PAGEDOWN => {
expo_move_relative(Direction::Right);
expo_move_relative(Direction::Right);
}
_ => (),
}
xorg_functionality::trigger_expo();
}
fn on_keyrelease(_key: Key) -> EventResult {
EventResult::Continue
}
enum Direction {
Left,
Right,
Up,
Down,
Stay,
}
impl From<Key> for Direction {
fn from(key: Key) -> Self {
match key {
Key::KEY_LEFT => Self::Left,
Key::KEY_RIGHT => Self::Right,
Key::KEY_UP => Self::Up,
Key::KEY_DOWN => Self::Down,
_ => Self::Stay,
}
}
}
impl Into<&str> for Direction {
fn into(self) -> &'static str {
match self {
Self::Left => "Left",
Self::Right => "Right",
Self::Up => "Up",
Self::Down => "Down",
_ => "",
}
}
}
// FIXME: maybe send multiple keys with one call?
fn xdo_send_arrow_key(direction: Direction) {
let xdo = libxdo::XDo::new(Some(crate::config::XORG_DISPLAY)).expect("xdo acquire failed");
xdo.send_keysequence(direction.into(), 1000)
.expect("unable to simulate key");
}
///
/// Switch desktop relative to current
///
fn expo_move_relative(direction: Direction) {
xorg_functionality::trigger_expo();
xdo_send_arrow_key(direction);
xorg_functionality::trigger_expo();
}
#[test]
fn test_traverse_expo() {
xorg_functionality::trigger_expo();
let xdo = libxdo::XDo::new(Some(crate::config::XORG_DISPLAY)).expect("xdo acquire failed");
xdo.send_keysequence("Right", 0)
.expect("unable to press right key");
xorg_functionality::trigger_expo();
}
#[test]
fn test_trigger_bind() {
trigger_bind("just", "a");
}
| true |
4b3bff5d4cf69527cbabb03e1d899d06c3e5a2d3
|
Rust
|
0xb10ckdev/forest
|
/benches/example-benchmark.rs
|
UTF-8
| 1,519 | 2.71875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright 2019-2023 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
fn fibonacci_slow(n: u64) -> u64 {
match n {
0 => 1,
1 => 1,
n => fibonacci_slow(n - 1) + fibonacci_slow(n - 2),
}
}
fn fibonacci_fast(n: u64) -> u64 {
let mut a = 0;
let mut b = 1;
match n {
0 => b,
_ => {
for _ in 0..n {
let c = a + b;
a = b;
b = c;
}
b
}
}
}
fn bench_fibs(c: &mut Criterion) {
let mut group = c.benchmark_group("Fibonacci");
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
for i in 0..5 {
let i = &i;
group.bench_with_input(BenchmarkId::new("Recursive", i), i, |b, i| {
b.iter(|| fibonacci_slow(*i))
});
group.bench_with_input(BenchmarkId::new("Iterative", i), i, |b, i| {
b.iter(|| fibonacci_fast(*i))
});
group.bench_with_input(BenchmarkId::new("Recursive Async", i), i, |b, i| {
b.to_async(&runtime).iter(|| async { fibonacci_slow(*i) })
});
group.bench_with_input(BenchmarkId::new("Iterative Async", i), i, |b, i| {
b.to_async(&runtime).iter(|| async { fibonacci_fast(*i) })
});
}
group.finish();
}
criterion_group!(benches, bench_fibs);
criterion_main!(benches);
| true |
89779142fa637026a3fa3840b9965eddc7679a14
|
Rust
|
seanpianka/leetcode-rust
|
/src/greedy/candy.rs
|
UTF-8
| 595 | 3.453125 | 3 |
[] |
no_license
|
/// https://leetcode.com/problems/candy/
fn candy(ratings: Vec<i32>) -> i32 {
let n = ratings.len();
// 首先每个孩子至少要给1个糖果
let mut nums = vec![1; n];
// 先看每个孩子的左边情况,如果当前孩子比左边的成绩更好,则理应比左边的多一个糖果
for i in 1..n {
if ratings[i] > ratings[i - 1] {
nums[i] = nums[i - 1] + 1;
}
}
for i in (0..n - 1).rev() {
if ratings[i] > ratings[i + 1] {
nums[i] = nums[i].max(nums[i + 1] + 1);
}
}
nums.into_iter().sum()
}
| true |
2d2e8937a49c239c2ae2864d12ddacacca5c09e3
|
Rust
|
akusumoto/xbook-deployer
|
/src/main.rs
|
UTF-8
| 3,592 | 3.0625 | 3 |
[] |
no_license
|
use std::env;
use std::path::{PathBuf};
use std::fs;
use std::io;
use std::io::Write;
extern crate fs_extra;
use fs_extra::file;
extern crate regex;
use regex::Regex;
fn load_args() -> Result<(PathBuf, PathBuf), String> {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
Err(String::from("invalid option: src dir = ?, dst dir = ?"))
}
else if args.len() < 3 {
Err(String::from(format!("invalid option: src dir = {}, dst dir = ?", args[1])))
}
else {
let src = PathBuf::from(&args[1]);
let dst = PathBuf::from(&args[2]);
Ok((src, dst))
}
}
fn load_xbooks(dir: &PathBuf) -> Result<Vec<PathBuf>, io::Error> {
let mut files: Vec<PathBuf> = Vec::new();
for p in fs::read_dir(dir)? {
let path = p?.path();
if path.is_file() {
if let Some(ext) = path.extension() {
match ext.to_os_string().into_string() {
Ok(s) => if s == "zip" { files.push(path.to_path_buf()); }
Err(e) => println!("failed to check extension - {} ({})", path.display(), e.into_string().unwrap()),
}
}
}
}
Ok(files)
}
fn deploy_xbooks(src_files: &Vec<PathBuf>, dst_dir: &PathBuf) {
let xbook_re: Regex = Regex::new(r"^\[(.+)\] .+\.zip$").unwrap();
for src_file in src_files {
//println!("{}", src_file.display());
let fname = src_file.file_name().unwrap().to_string_lossy().into_owned();
match xbook_re.captures(fname.as_str()) {
Some(caps) => {
let mut dst_auther_dir = PathBuf::from(dst_dir);
dst_auther_dir.push(caps.get(1).map_or("", |m| m.as_str()));
let mut dst_file = PathBuf::from(&dst_auther_dir);
dst_file.push(src_file.file_name().unwrap());
if ! dst_auther_dir.is_dir() {
match fs::create_dir(&dst_auther_dir) {
Ok(_) => println!("created dir {}", dst_auther_dir.display()),
Err(e) => println!("failed to create directory - {} ({})", dst_auther_dir.display(), e.to_string()),
}
}
let opt = file::CopyOptions::new();
match file::move_file(&src_file, &dst_file, &opt) {
Ok(_) => println!("{} -> {}", fname, dst_file.display()),
Err(err) => println!("failed to move file - {} ({})", src_file.display(), err.to_string()),
}
},
None => println!("invalid file name format - {}", fname),
}
}
}
/*
fn move_file(src: &PathBuf, dst_dir: &PathBuf) {
let mut dst = PathBuf::new();
dst.push(&dst_dir);
dst.push(src.file_name().unwrap());
let opt = file::CopyOptions::new();
match file::move_file(&src, &dst, &opt) {
Ok(_) => println!("{} -> {}", src.display(), dst.display()),
Err(e) => println!("{}", e.to_string()),
}
}
*/
fn press_any_key() -> Result<(), String> {
print!("Press any key? ");
io::stdout().flush().unwrap();
let mut s = String::new();
io::stdin().read_line(&mut s).unwrap();
Ok(())
}
fn main() -> Result<(), String>{
let (src_dir, dst_dir) = load_args()?;
match load_xbooks(&src_dir) {
Ok(src_files) => deploy_xbooks(&src_files, &dst_dir),
Err(err) => println!("{}", err.to_string()),
}
/*
for src_file in load_xbooks(&src_dir).unwrap() {
move_file(&src_file, &dst_dir);
}
*/
//press_any_key()
Ok(())
}
| true |
73fcfe3d5fe8376bfd8cd51e9beb365e76af3bb5
|
Rust
|
denis-gudim/rust-algo
|
/src/sort/quick_sort_hoare.rs
|
UTF-8
| 765 | 3.140625 | 3 |
[] |
no_license
|
pub fn sort<T: Ord>(list: &mut [T], low: usize, high: usize) {
if high > low {
let mut i = low;
let mut j = high;
loop {
while list[i] < list[low] { i += 1 }
while list[j] > list[low] { j -= 1 }
if i >= j { break }
list.swap(i, j);
i += 1;
j -= 1;
}
sort(list, low, j);
sort(list, j + 1, high);
}
}
#[cfg(test)]
mod tests {
use super::*;
use rand::{distributions::Uniform, Rng};
#[test]
fn quick_sort_hoare_test() {
// Arrange
let mut rng = rand::thread_rng();
let range = Uniform::new(0, 100);
let mut list : Vec<u64> = (0..100).map(|_| rng.sample(&range)).collect();
let high = list.len() - 1;
// Act
sort(&mut list, 0, high);
// Assert
for i in 1..100{
assert_le!(list[i-1], list[i]);
}
}
}
| true |
45fee673166edcf19865c92b25c24bca6bbfd134
|
Rust
|
likr/atcoder
|
/abc149/src/bin/f.rs
|
UTF-8
| 2,396 | 2.890625 | 3 |
[] |
no_license
|
use proconio::input;
use std::collections::HashSet;
const M: usize = 1000000007;
fn count_children(
graph: &Vec<Vec<usize>>,
u: usize,
nodes: &mut Vec<usize>,
leaves: &mut Vec<usize>,
visited: &mut HashSet<usize>,
) {
visited.insert(u);
nodes[u] = 1;
leaves[u] = if graph[u].len() == 1 { 1 } else { 0 };
for &v in &graph[u] {
if !visited.contains(&v) {
count_children(graph, v, nodes, leaves, visited);
nodes[u] += nodes[v];
leaves[u] += leaves[v];
}
}
}
fn inv(a: usize) -> usize {
let m = M as i64;
let mut a = a as i64;
let mut b = m as i64;
let mut u = 1;
let mut v = 0;
let mut tmp;
while b != 0 {
let t = a / b;
a -= t * b;
tmp = a;
a = b;
b = tmp;
u -= t * v;
tmp = u;
u = v;
v = tmp;
}
u %= m;
if u < 0 {
u += m;
}
return u as usize;
}
fn main() {
input! {
n: usize,
ab: [(usize, usize); n - 1],
}
let mut graph = vec![vec![]; n + 1];
for &(ai, bi) in &ab {
graph[ai].push(bi);
graph[bi].push(ai);
}
let mut root = 0;
for u in 1..=n {
if graph[u].len() > graph[root].len() {
root = u;
}
}
let mut nodes = vec![0; n + 1];
let mut leaves = vec![0; n + 1];
let mut visited = HashSet::new();
count_children(&graph, root, &mut nodes, &mut leaves, &mut visited);
eprintln!("{:?}", nodes);
eprintln!("{:?}", leaves);
let mut f = vec![0; n + 1];
f[0] = 1;
for i in 1..=n {
f[i] = f[i - 1] * (i % M) % M;
}
let mut x = 1;
for _ in 0..n {
x = x * 2 % M;
}
let mut y = 0;
for u in 1..=n {
let nu = nodes[u];
let lu = leaves[u];
let mu = nu - lu;
if lu == 1 {
continue;
}
let mut a = 0;
for k in 2..=lu {
let num = f[lu];
let denom = f[k] * f[lu - k] % M;
a = (a + num * inv(denom) % M) % M;
}
let mut b = 0;
for k in 0..=mu {
let num = f[mu];
let denom = f[k] * f[mu - k] % M;
b = (b + k * (num * inv(denom) % M) % M) % M;
}
eprintln!("{} {}", a, b);
y = (y + a * b % M) % M;
}
println!("{}", y * inv(x) % M);
}
| true |
41cb89f8fdd1a9432d122dad38f76379db78718b
|
Rust
|
althonos/uniprot.rs
|
/src/uniref/model/reference.rs
|
UTF-8
| 1,043 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use std::io::BufRead;
use crate::error::Error;
use crate::parser::utils::decode_attribute;
use crate::parser::FromXml;
use quick_xml::events::BytesStart;
use quick_xml::Reader;
use super::Property;
/// A UniRef database reference.
#[derive(Debug, Clone)]
pub struct Reference {
pub id: String,
pub ty: String,
pub properties: Vec<Property>,
}
impl FromXml for Reference {
fn from_xml<B: BufRead>(
event: &BytesStart,
reader: &mut Reader<B>,
buffer: &mut Vec<u8>,
) -> Result<Self, Error> {
debug_assert_eq!(event.local_name(), b"dbReference");
// decode attributes
let id = decode_attribute(event, reader, "id", "reference")?;
let ty = decode_attribute(event, reader, "type", "reference")?;
let mut properties = Vec::new();
parse_inner! {event, reader, buffer,
e @ b"property" => {
properties.push(FromXml::from_xml(&e, reader, buffer)?);
}
}
Ok(Reference { id, ty, properties })
}
}
| true |
2f81f69df056e2f51565d5e2e5063ddf90cd03cd
|
Rust
|
untoldwind/razer_test_test
|
/src/cli/set_color.rs
|
UTF-8
| 230 | 2.546875 | 3 |
[] |
no_license
|
use devices::{self, Color};
use errors::Result;
pub fn set_color(color: Color) -> Result<()> {
for device in devices::list_devices()? {
println!("{} {:?}", device.name(), device.set_color(color));
}
Ok(())
}
| true |
72dd33f38735dbebec24d4a956c822b768046ccc
|
Rust
|
FrazAli/aoc
|
/2022/d01/src/main.rs
|
UTF-8
| 1,028 | 3.53125 | 4 |
[] |
no_license
|
use std::fs::File;
use std::io::Read;
struct Elf {
calories: Vec<usize>,
total: usize,
}
fn main() {
let mut file: File = File::open("input.txt")
.expect("Unable to open the file");
let mut data: String = String::new();
file.read_to_string(&mut data)
.expect("Unable to read the file");
let mut elves: Vec<Elf> = Vec::new();
let mut values: Vec<usize> = Vec::new();
for line in data.lines() {
if line.is_empty() {
elves.push(Elf {
calories: values.clone(),
total: values.iter().sum(),
});
values.truncate(0); // drop all elements
} else {
values.push(line.parse().unwrap());
}
}
elves.sort_by_key(|e| e.total);
let elf: &Elf = &elves[elves.len() - 1];
println!("Calories: {:?}", elf.calories);
println!("Part-1: {}", elf.total);
elves.reverse();
elves.truncate(3);
println!("Part-2: {}", elves.iter().map(|e| e.total).sum::<usize>());
}
| true |
88b5b4cc48360b5d75ac020de6e607081592768d
|
Rust
|
RobinThrift/befunge-rs
|
/src/parser.rs
|
UTF-8
| 646 | 3.40625 | 3 |
[] |
no_license
|
pub fn parse(code: Vec<String>) -> Vec<Vec<String>> {
let mut tokens = Vec::new();
for l in code.iter() {
let mut line_tokens = Vec::new();
for c in l.as_slice().chars() {
line_tokens.push(c.to_string());
}
tokens.push(line_tokens);
}
return tokens;
}
#[cfg(test)]
mod tests {
#[test]
fn parse_test() {
let input = vec!["3+3".to_string(), "#".to_string()];
let tokens = super::parse(input);
assert_eq!(tokens, vec![
vec!["3".to_string(), "+".to_string(), "3".to_string()],
vec!["#".to_string()]
]);
}
}
| true |
b02ae1605ed21b03abbe18bf87820409aea13238
|
Rust
|
dawidovsky/IIUWr
|
/Rust/L1/zad3/src/main.rs
|
UTF-8
| 699 | 3.59375 | 4 |
[] |
no_license
|
fn main() {}
fn square_area_to_circle(size:f64) -> f64 {
size/4.0 * std::f64::consts::PI
}
fn assert_close(a:f64, b:f64, epsilon:f64) {
assert!( (a-b).abs() < epsilon, format!("Expected: {}, got: {}",b,a) );
}
#[test]
fn test1() {
assert_close(square_area_to_circle(9.0), 7.0685834705770345, 1e-8);
}
#[test]
fn test2() {
assert_close(square_area_to_circle(20.0), 15.70796326794897, 1e-8);
}
#[test]
fn test3() {
assert_close(square_area_to_circle(10.0), 7.853981633974483, 1e-8);
}
#[test]
fn test4() {
assert_close(square_area_to_circle(100.0), 78.53981633974483, 1e-8);
}
#[test]
fn test5() {
assert_close(square_area_to_circle(3.0), 2.356194490192345, 1e-8);
}
| true |
78c1d194178a323b16b35ec79e5c5ca4260f92d6
|
Rust
|
zandeck/Amalia_fork
|
/src/vertex_computation/compute.rs
|
UTF-8
| 2,523 | 2.5625 | 3 |
[] |
no_license
|
use md5::md5mesh::*;
use cgmath::{Vector3, InnerSpace};
use vertex_computation::convert::generate_indices;
pub fn prepare_mesh(m: &Mesh, v_joints: &Vec<Joint>) -> Vec<Vector3<f32>> {
let mut position_buffer : Vec<Vector3<f32>> = Vec::new();
for vertice in &m.vertices {
let mut new_vertice : Vector3<f32> = Vector3::new(0., 0., 0.);
for i in 0..vertice.weight_count {
let w : &Weight = &m.weights[ (vertice.start_weight + i) as usize];
let j : &Joint = &v_joints[ w.joint_index as usize] ;
let rot_position = j.orientation * w.position;
new_vertice += ( j.position + rot_position ) * w.bias;
}
// println!("{:?}", new_vertice);
position_buffer.push(new_vertice);
}
// println!("{:?}", position_buffer);
position_buffer
}
pub fn prepare_normals(m: &Mesh, vertices_position: &Vec<Vector3<f32>>) -> Vec<Vector3<f32>> {
let mut normal_buffer : Vec<Vector3<f32>> = vec![ Vector3::new(0., 0., 0.); vertices_position.len() ];
for t in &m.triangles {
let (i0, i1, i2) = t.vertex_indices;
let v0 = &vertices_position[ i0 as usize ];
let v1 = &vertices_position[ i1 as usize ];
let v2 = &vertices_position[ i2 as usize ];
//println!("v0 : {:?}", v0);
//println!("v1 : {:?}", v1);
//println!("v2 : {:?}", v2);
let cross_product = (v2 - v0).cross(v1 - v0);
//println!("Cross_product : {:?}", cross_product);
normal_buffer[ i0 as usize ] += cross_product;
normal_buffer[ i1 as usize ] += cross_product;
normal_buffer[ i2 as usize ] += cross_product;
}
// println!("{:?}", normal_buffer);
for i in 0..normal_buffer.len() {
normal_buffer[ i ] = normal_buffer[ i ].normalize();
}
normal_buffer
}
pub fn prepare_full_mesh(ms: &Md5Mesh) -> (Vec<Vector3<f32>>, Vec<Vector3<f32>>, Vec<u16>) {
let mut res_v : Vec<Vector3<f32>> = Vec::new();
let mut res_n : Vec<Vector3<f32>> = Vec::new();
let mut res_i : Vec<u16> = Vec::new();
for m in &ms.meshes {
let mut tmp = prepare_mesh(m, &ms.joints);
let mut tmp_normals = prepare_normals(m, &tmp);
let mut tmp_i = generate_indices(&m);
for i in 0..tmp_i.len() {
tmp_i[ i ] += res_v.len() as u16;
}
res_v.append(&mut tmp);
res_n.append(&mut tmp_normals);
res_i.append(&mut tmp_i);
}
(res_v, res_n, res_i)
}
| true |
6fd322a5e70567f4b0b3872b615a1353a399fc37
|
Rust
|
jsdelivrbot/euler_criterion.rs
|
/problems/013/013.rs.broken
|
UTF-8
| 976 | 2.671875 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#![feature(slicing_syntax)]
extern crate num;
extern crate test;
extern crate time;
use num::bigint::BigUint;
use std::io::{File, stdio};
use std::iter::AdditiveIterator;
use std::os;
fn solution(input: &str) -> String {
input.
lines().
filter_map(|line| from_str::<BigUint>(line.trim())).
sum().
to_string()[0..10].
to_string()
}
fn main() {
let contents = File::open(&Path::new("013.txt")).read_to_string().unwrap();
let contents = contents[];
match os::args()[] {
[_, ref flag] if flag[] == "-a" => return println!("{}", solution(contents)),
_ => {},
}
for line in stdio::stdin().lines() {
let iters: u64 = from_str(line.unwrap()[].trim()).unwrap();
let start = time::precise_time_ns();
for _ in range(0, iters) {
test::black_box(solution(contents));
}
let end = time::precise_time_ns();
println!("{}", end - start);
}
}
| true |
c7ae8481a3aeadfc7ce5618e3a7a0a09207a1140
|
Rust
|
zjw1918/proj-rust-minigrep
|
/src/zjw_learn/smart_pointer.rs
|
UTF-8
| 6,570 | 3.140625 | 3 |
[] |
no_license
|
use self::List::{ Cons, Nil };
use self::ListRc::{ ConsRc, NilRc };
use self::ListRf::{ ConsRf, NilRf };
use self::List1::{ Cons1, Nil1 };
use std::ops::Deref;
use std::rc::{Rc, Weak};
use std::cell::RefCell;
pub fn run() {
// let list = Cons(1,
// Box::new(Cons(2,
// Box::new(Cons(3,
// Box::new(Nil))))));
// println!("{:?}", list);
// let x = 5;
// // let y = &x;
// let y = MyBox::new(x);
// assert_eq!(5, x);
// assert_eq!(5, *y);
// deref, 解引用强制多态
// let hello = |x: &str| println!("{}", x);
// let m = MyBox::new(String::from("Hello Rust"));
// hello(&m[1..4]);
// drop trait
// let c = CustomSmartPointer { data: String::from("my stuff") };
// let d = CustomSmartPointer { data: String::from("other stuff") };
// drop(c); // 不能调用c.drop(), 会导致 double free错误
// println!("CustomSmartPointers created.");
// rc 引用智能指针
// 如下不能被编译,因为所有权moved
// let a = Cons(5, Box::new(Cons(10, Box::new(Nil))));
// let b = Cons(3, Box::new(a));
// let c = Cons(4, Box::new(a));
// let a = Rc::new(ConsRc(5, Rc::new(ConsRc(10, Rc::new(NilRc)))));
// let b = ConsRc(3, Rc::clone(&a)); // Rc::clone 只会增加引用计数
// let c = ConsRc(4, Rc::clone(&a));
// println!("{:?} \n{:?} \n{:?}", a, b, c);
let a = RefCell::new([1,2,3,4]);
a.borrow_mut()[0] += 10;
println!("{:?}", a); // 对比下面
let a = RefCell::new(1);
*a.borrow_mut() += 10;
println!("{:?}", a);
let value = Rc::new(RefCell::new(5)); // 可变,且可被公用
let a = Rc::new(ConsRf(Rc::clone(&value), Rc::new(NilRf)));
let b = ConsRf(Rc::new(RefCell::new(6)), Rc::clone(&a));
let c = ConsRf(Rc::new(RefCell::new(10)), Rc::clone(&a));
*value.borrow_mut() += 10;
println!("a after = {:?}", a);
println!("b after = {:?}", b);
println!("c after = {:?}", c);
// 演示:循环引用
let a = Rc::new(Cons1(5, RefCell::new(Rc::new(Nil1))));
println!("a counts {}", Rc::strong_count(&a));
println!("a tail {:?}", a.tail());
let b = Rc::new(Cons1(10, RefCell::new(Rc::clone(&a))));
println!("b counts {}", Rc::strong_count(&b));
println!("b tail {:?}", b.tail());
// 此举导致 循环引用,栈溢出
if let Some(link) = a.tail() {
*link.borrow_mut() = Rc::clone(&b);
}
println!("b rc count after changing a = {}", Rc::strong_count(&b));
println!("a rc count after changing a = {}", Rc::strong_count(&a));
// println!("b tail {:?}", b.tail()); // 栈溢出
// 演示弱引用
let leaf = Rc::new(Node {
value: 1,
parent: RefCell::new(Weak::new()),
children: RefCell::new(vec![]),
});
println!("leaf parent: {:?}", leaf.parent.borrow().upgrade());
let branch = Rc::new(Node {
value: 6,
parent: RefCell::new(Weak::new()),
children: RefCell::new(vec![Rc::clone(&leaf)]),
});
*leaf.parent.borrow_mut() = Rc::downgrade(&branch); // downgrade 产生弱引用
{
branch; // 强制使branch过期
}
println!(
"leaf strong = {}, weak = {}",
Rc::strong_count(&leaf),
Rc::weak_count(&leaf),
);
println!("leaf parent: {:?}", leaf.parent.borrow().upgrade());
}
// Box 用来实现递归
// 不用Box的话,编译器无法判断需要多少空间来存储,Box的大小是固定的,会在堆上分配内存
#[derive(Debug)]
enum List {
Cons(i32, Box<List>),
Nil,
}
// 可以共享所有权版本的 cons list。有引用计数的智能指针
#[derive(Debug)]
enum ListRc {
ConsRc(i32, Rc<ListRc>),
NilRc,
}
// refcell
#[derive(Debug)]
enum ListRf {
ConsRf(Rc<RefCell<i32>>, Rc<ListRf>), // 一个公共的、可变的i32字段
// ConsRf(RefCell<i32>, Rc<ListRf>),
NilRf,
}
struct MyBox<T>(T);
impl<T> MyBox<T> {
fn new(x: T) -> MyBox<T> {
MyBox(x)
}
}
impl<T> Deref for MyBox<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
// drop trait
struct CustomSmartPointer {
data: String,
}
impl Drop for CustomSmartPointer {
fn drop(&mut self) {
println!("Dropping CustomSmartPointer with data `{}`!", self.data);
}
}
// RefCell 与 内部可变性模式
pub trait Messenger {
fn send(&self, msg: &str);
}
pub struct LimitTracker<'a, T: 'a + Messenger> {
messenger: &'a T,
value: usize,
max: usize,
}
impl<'a, T> LimitTracker<'a, T> where T: Messenger {
pub fn new(messenger: &T, max: usize) -> LimitTracker<T> {
LimitTracker {
messenger, value: 0, max
}
}
pub fn set_value(&mut self, value: usize) {
self.value = value;
let percentage_of_max = self.value as f64 / self.max as f64;
if percentage_of_max >= 0.75 && percentage_of_max < 0.9 {
self.messenger.send("Warning: You've used up over 75% of your quota!");
} else if percentage_of_max >= 0.9 && percentage_of_max < 1.0 {
self.messenger.send("Urgent warning: You've used up over 90% of your quota!");
} else if percentage_of_max >= 1.0 {
self.messenger.send("Error: You are over your quota!");
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::cell::RefCell;
struct MockMessenger {
messages: RefCell<Vec<String>>, // 内部可变性模式
}
impl MockMessenger {
fn new() -> MockMessenger {
MockMessenger {
messages: RefCell::new(vec![]),
}
}
}
impl Messenger for MockMessenger {
fn send(&self, msg: &str) {
self.messages.borrow_mut().push(String::from(msg));
}
}
#[test]
fn it_sends_an_over_75_percent_warning_message() {
let mock_messenger = MockMessenger::new();
let mut limitTracker = LimitTracker::new(&mock_messenger, 100);
limitTracker.set_value(80);
assert_eq!(mock_messenger.messages.borrow().len(), 1);
}
}
// 引用循环,内存泄漏,式安全的?
#[derive(Debug)]
enum List1 {
Cons1(i32, RefCell<Rc<List1>>),
Nil1,
}
impl List1 {
fn tail(&self) -> Option<&RefCell<Rc<List1>>> {
match *self {
Cons1(_, ref item) => Some(item),
Nil1 => None,
}
}
}
// Weak<T> 树结点的子父节点
#[derive(Debug)]
struct Node {
value: i32,
parent: RefCell<Weak<Node>>,
children: RefCell<Vec<Rc<Node>>>,
}
| true |
7bd3442a0b07eee76e6c15ce793d1921bf03cacc
|
Rust
|
jcdyer/rawbson
|
/src/lib.rs
|
UTF-8
| 64,002 | 3.46875 | 3 |
[
"MIT"
] |
permissive
|
/*!
A rawbson document can be created from a `Vec<u8>` containing raw BSON data, and elements
accessed via methods similar to those in the [bson-rust](https://crates.io/crate/bson-rust)
crate. Note that rawbson returns a Result<Option<T>>, since the bytes contained in the
document are not fully validated until trying to access the contained data.
```rust
use rawbson::{
DocBuf,
elem,
};
// \x13\x00\x00\x00 // total document size
// \x02 // 0x02 = type String
// hi\x00 // field name
// \x06\x00\x00\x00y'all\x00 // field value
// \x00 // document terminating NUL
let doc = DocBuf::new(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?;
let elem: Option<elem::Element> = doc.get("hi")?;
assert_eq!(
elem.unwrap().as_str()?,
"y'all",
);
# Ok::<(), rawbson::RawError>(())
```
### bson-rust interop
This crate is designed to interoperate smoothly with the bson crate.
A [`DocBuf`] can be created from a [`bson::document::Document`]. Internally, this
serializes the `Document` to a `Vec<u8>`, and then includes those bytes in the [`DocBuf`].
```rust
use bson::doc;
use rawbson::{
DocBuf,
};
let document = doc!{"goodbye": {"cruel": "world"}};
let raw = DocBuf::from_document(&document);
let value: Option<&str> = raw.get_document("goodbye")?
.map(|doc| doc.get_str("cruel"))
.transpose()?
.flatten();
assert_eq!(
value,
Some("world"),
);
# Ok::<(), rawbson::RawError>(())
```
### Reference types
A BSON document can also be accessed with the [`Doc`] reference type,
which is an unsized type that represents the BSON payload as a `[u8]`.
This allows accessing nested documents without reallocation. [Doc]
must always be accessed via a pointer type, similarly to `[T]` and `str`.
This type will coexist with the now deprecated [DocRef] type for at
least one minor release.
The below example constructs a bson document in a stack-based array,
and extracts a &str from it, performing no heap allocation.
```rust
use rawbson::Doc;
let bytes = b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00";
assert_eq!(Doc::new(bytes)?.get_str("hi")?, Some("y'all"));
# Ok::<(), rawbson::RawError>(())
```
### Iteration
[`Doc`] implements [`IntoIterator`](std::iter::IntoIterator), which can also
be accessed via [`DocBuf::iter`].
```rust
use bson::doc;
use rawbson::{DocBuf, elem::Element};
let doc = DocBuf::from_document(&doc! {"crate": "rawbson", "license": "MIT"});
let mut dociter = doc.iter();
let (key, value): (&str, Element) = dociter.next().unwrap()?;
assert_eq!(key, "crate");
assert_eq!(value.as_str()?, "rawbson");
let (key, value): (&str, Element) = dociter.next().unwrap()?;
assert_eq!(key, "license");
assert_eq!(value.as_str()?, "MIT");
# Ok::<(), rawbson::RawError>(())
```
### serde support
There is also serde deserialization support.
Serde serialization support is not yet provided. For now, use
[`bson::to_document`] instead, and then serialize it out using
[`bson::Document::to_writer`] or [`DocBuf::from_document`].
```rust
use serde::Deserialize;
use bson::{doc, Document, oid::ObjectId, DateTime};
use rawbson::{DocBuf, de::from_docbuf};
#[derive(Deserialize)]
#[serde(rename_all="camelCase")]
struct User {
#[serde(rename = "_id")]
id: ObjectId,
first_name: String,
last_name: String,
birthdate: Option<chrono::DateTime<chrono::Utc>>,
#[serde(flatten)]
extra: Document,
}
let doc = DocBuf::from_document(&doc!{
"_id": ObjectId::with_string("543254325432543254325432")?,
"firstName": "John",
"lastName": "Doe",
"birthdate": null,
"luckyNumbers": [3, 60, 2147483647],
"nickname": "Red",
});
let user: User = from_docbuf(&doc)?;
assert_eq!(user.id.to_hex(), "543254325432543254325432");
assert_eq!(user.first_name, "John");
assert_eq!(user.last_name, "Doe");
assert_eq!(user.extra.get_str("nickname")?, "Red");
assert!(user.birthdate.is_none());
# Ok::<(), Box<dyn std::error::Error>>(())
```
*/
use std::{
borrow::Borrow,
convert::{TryFrom, TryInto},
ops::Deref,
};
use chrono::{DateTime, Utc};
use bson::{decimal128::Decimal128, document::ValueAccessError, oid, spec::ElementType, Bson};
pub mod de;
pub mod elem;
#[cfg(test)]
mod props;
/// Error to indicate that either a value was empty or it contained an unexpected
/// type, for use with the direct getters.
#[derive(Debug, PartialEq)]
pub enum RawError {
/// Found a Bson value with the specified key, but not with the expected type
UnexpectedType,
/// The found value was not well-formed
MalformedValue(String),
/// Found a value where a utf-8 string was expected, but it was not valid
/// utf-8. The error value contains the malformed data as a string.
Utf8EncodingError(Vec<u8>),
}
impl std::fmt::Display for RawError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use RawError::*;
match self {
UnexpectedType => write!(f, "unexpected type"),
MalformedValue(s) => write!(f, "malformed value: {:?}", s),
Utf8EncodingError(_) => write!(f, "utf-8 encoding error"),
}
}
}
impl std::error::Error for RawError {}
pub type RawResult<T> = Result<T, RawError>;
type OptResult<T> = RawResult<Option<T>>;
impl<'a> From<RawError> for ValueAccessError {
fn from(src: RawError) -> ValueAccessError {
match src {
RawError::UnexpectedType => ValueAccessError::UnexpectedType,
RawError::MalformedValue(_) => ValueAccessError::UnexpectedType,
RawError::Utf8EncodingError(_) => ValueAccessError::UnexpectedType,
}
}
}
impl<'a> From<ValueAccessError> for RawError {
fn from(src: ValueAccessError) -> RawError {
match src {
ValueAccessError::NotPresent => unreachable!("This should be converted to an Option"),
ValueAccessError::UnexpectedType => RawError::UnexpectedType,
_ => RawError::UnexpectedType,
}
}
}
/// A BSON document, stored as raw binary data on the heap. This can be created from
/// a `Vec<u8>` or a [`bson::Document`].
///
/// Accessing elements within the `DocBuf` is similar to element access in [bson::Document],
/// but as the contents are parsed during iteration, instead of at creation time, format
/// errors can happen at any time during use, instead of at creation time.
///
/// DocBuf can be iterated over, yielding a Result containing key-value pairs that
/// borrow from the DocBuf instead of allocating, when necessary.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// let docbuf = DocBuf::new(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?;
/// let mut iter = docbuf.iter();
/// let (key, value) = iter.next().unwrap()?;
/// assert_eq!(key, "hi");
/// assert_eq!(value.as_str(), Ok("y'all"));
/// assert!(iter.next().is_none());
/// # Ok::<(), RawError>(())
/// ```
///
/// Individual elements can be accessed using [`docbuf.get(&key)`](Doc::get), or any of
/// the `get_*` methods, like [`docbuf.get_object_id(&key)`](Doc::get_object_id), and
/// [`docbuf.get_str(&str)`](Doc::get_str). Accessing elements is an O(N) operation,
/// as it requires iterating through the document from the beginning to find the requested
/// key.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// let docbuf = DocBuf::new(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?;
/// assert_eq!(docbuf.get_str("hi")?, Some("y'all"));
/// # Ok::<(), RawError>(())
/// ```
#[derive(Clone, Debug)]
pub struct DocBuf {
data: Box<[u8]>,
}
impl DocBuf {
/// Create a new `DocBuf` from the provided `Vec`.
///
/// The data is checked for a declared length equal to the length of the Vec,
/// and a trailing NUL byte. Other validation is deferred to access time.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// let docbuf: DocBuf = DocBuf::new(b"\x05\0\0\0\0".to_vec())?;
/// # Ok::<(), RawError>(())
/// ```
pub fn new(data: Vec<u8>) -> RawResult<DocBuf> {
if data.len() < 5 {
return Err(RawError::MalformedValue("document too short".into()));
}
let length = i32_from_slice(&data[..4]);
if data.len() as i32 != length {
return Err(RawError::MalformedValue("document length incorrect".into()));
}
if data[data.len() - 1] != 0 {
return Err(RawError::MalformedValue(
"document not null-terminated".into(),
));
}
Ok(unsafe { DocBuf::new_unchecked(data) })
}
/// Create a DocBuf from a [bson::Document].
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// use bson::{doc, oid};
/// let document = doc! {
/// "_id": oid::ObjectId::new(),
/// "name": "Herman Melville",
/// "title": "Moby-Dick",
/// };
/// let docbuf: DocBuf = DocBuf::from_document(&document);
/// # Ok::<(), RawError>(())
/// ```
pub fn from_document(doc: &bson::Document) -> DocBuf {
let mut data = Vec::new();
doc.to_writer(&mut data).unwrap();
unsafe { DocBuf::new_unchecked(data) }
}
/// Create a DocBuf from an owned Vec<u8> without performing any checks on the provided data.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// let docbuf: DocBuf = unsafe {
/// DocBuf::new_unchecked(b"\x05\0\0\0\0".to_vec())
/// };
/// # Ok::<(), RawError>(())
/// ```
///
/// # Safety
///
/// The provided bytes must have a valid length marker, and be NUL terminated.
pub unsafe fn new_unchecked(data: Vec<u8>) -> DocBuf {
DocBuf {
data: data.into_boxed_slice(),
}
}
/// Return a [`&Doc`](Doc) borrowing from the data contained in self.
///
/// # Deprecation
///
/// DocRef is now a deprecated type alias for [Doc]. DocBuf can
/// dereference to &Doc directly, or be converted using [AsRef::as_ref],
/// so this function is unnecessary.
///
/// ```
/// # use rawbson::{DocBuf, DocRef, RawError};
/// let docbuf = DocBuf::new(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?;
/// let docref: DocRef = docbuf.as_docref();
/// # Ok::<(), RawError>(())
/// ```
#[deprecated(since = "0.2.0", note = "use docbuf.as_ref() instead")]
pub fn as_docref(&self) -> &Doc {
self.as_ref()
}
/// Return an iterator over the elements in the `DocBuf`, borrowing data.
///
/// The associated item type is `Result<&str, Element<'_>>`. An error is
/// returned if data is malformed.
///
/// ```
/// # use rawbson::{elem, DocBuf, RawError};
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc! { "ferris": true });
/// for element in docbuf.iter() {
/// let (key, value): (&str, elem::Element) = element?;
/// assert_eq!(key, "ferris");
/// assert_eq!(value.as_bool()?, true);
/// }
/// # Ok::<(), RawError>(())
/// ```
///
/// # Note:
///
/// There is no owning iterator for DocBuf. If you need ownership over
/// elements that might need to allocate, you must explicitly convert
/// them to owned types yourself.
pub fn iter(&self) -> DocIter<'_> {
self.into_iter()
}
/// Return the contained data as a `Vec<u8>`
///
/// ```
/// # use rawbson::DocBuf;
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc!{});
/// assert_eq!(docbuf.into_inner(), b"\x05\x00\x00\x00\x00".to_vec());
/// ```
pub fn into_inner(self) -> Vec<u8> {
self.data.to_vec()
}
}
impl TryFrom<DocBuf> for bson::Document {
type Error = RawError;
fn try_from(rawdoc: DocBuf) -> RawResult<bson::Document> {
bson::Document::try_from(rawdoc.as_ref())
}
}
impl<'a> IntoIterator for &'a DocBuf {
type IntoIter = DocIter<'a>;
type Item = RawResult<(&'a str, elem::Element<'a>)>;
fn into_iter(self) -> DocIter<'a> {
DocIter {
doc: &self,
offset: 4,
}
}
}
impl AsRef<Doc> for DocBuf {
fn as_ref(&self) -> &Doc {
// SAFETY: Constructing the DocBuf checks the envelope validity of the BSON document.
unsafe { Doc::new_unchecked(&self.data) }
}
}
impl Borrow<Doc> for DocBuf {
fn borrow(&self) -> &Doc {
&*self
}
}
impl ToOwned for Doc {
type Owned = DocBuf;
fn to_owned(&self) -> Self::Owned {
self.to_docbuf()
}
}
/// A BSON document, referencing raw binary data stored elsewhere. This can be created from
/// a [DocBuf] or any type that contains valid BSON data, and can be referenced as a `[u8]`,
/// including static binary literals, [Vec<u8>](std::vec::Vec), or arrays.
///
/// Accessing elements within the `Doc` is similar to element access in [bson::Document],
/// but as the contents are parsed during iteration, instead of at creation time, format
/// errors can happen at any time during use, instead of at creation time.
///
/// Doc can be iterated over, yielding a Result containing key-value pairs that share the
/// borrow with the source bytes instead of allocating, when necessary.
///
/// ```
/// # use rawbson::{Doc, RawError};
/// let doc = Doc::new(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00")?;
/// let mut iter = doc.into_iter();
/// let (key, value) = iter.next().unwrap()?;
/// assert_eq!(key, "hi");
/// assert_eq!(value.as_str(), Ok("y'all"));
/// assert!(iter.next().is_none());
/// # Ok::<(), RawError>(())
/// ```
///
/// Individual elements can be accessed using [`doc.get(&key)`](Doc::get), or any of
/// the `get_*` methods, like [`doc.get_object_id(&key)`](Doc::get_object_id), and
/// [`doc.get_str(&str)`](Doc::get_str). Accessing elements is an O(N) operation,
/// as it requires iterating through the document from the beginning to find the requested
/// key.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// let docbuf = DocBuf::new(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?;
/// assert_eq!(docbuf.get_str("hi")?, Some("y'all"));
/// # Ok::<(), RawError>(())
/// ```
#[derive(Debug)]
pub struct Doc {
data: [u8],
}
impl Doc {
pub fn new<D: AsRef<[u8]> + ?Sized>(data: &D) -> RawResult<&Doc> {
let data = data.as_ref();
if data.len() < 5 {
return Err(RawError::MalformedValue("document too short".into()));
}
let length = i32_from_slice(&data[..4]);
if data.len() as i32 != length {
return Err(RawError::MalformedValue("document length incorrect".into()));
}
if data[data.len() - 1] != 0 {
return Err(RawError::MalformedValue(
"document not null-terminated".into(),
));
}
Ok(unsafe { Doc::new_unchecked(data) })
}
/// Create a new Doc referencing the provided data slice.
///
/// # Safety
///
/// The provided data must begin with a valid size
/// and end with a NUL-terminator.
///
/// ```
/// # use rawbson::{Doc, RawError};
/// let doc: &Doc = unsafe { Doc::new_unchecked(b"\x05\0\0\0\0") };
/// ```
pub unsafe fn new_unchecked<D: AsRef<[u8]> + ?Sized>(data: &D) -> &Doc {
#[allow(unused_unsafe)]
unsafe {
&*(data.as_ref() as *const [u8] as *const Doc)
}
}
/// Create a new DocBuf with an owned copy of the data in self.
///
/// ```
/// # use rawbson::{Doc, RawError};
/// use rawbson::DocBuf;
/// let data = b"\x05\0\0\0\0";
/// let doc = Doc::new(data)?;
/// let docbuf: DocBuf = doc.to_docbuf();
/// # Ok::<(), RawError>(())
pub fn to_docbuf(&self) -> DocBuf {
// SAFETY: The validity of the data is checked by self.
unsafe { DocBuf::new_unchecked(self.data.to_owned()) }
}
/// Get an element from the document. Finding a particular key requires
/// iterating over the document from the beginning, so this is an O(N)
/// operation.
///
/// Returns an error if the document is malformed. Returns `Ok(None)`
/// if the key is not found in the document.
///
/// ```
/// # use rawbson::{DocBuf, elem::Element, RawError};
/// use bson::{doc, oid::ObjectId};
/// let docbuf = DocBuf::from_document(&doc! {
/// "_id": ObjectId::new(),
/// "f64": 2.5,
/// });
/// let element = docbuf.get("f64")?.expect("finding key f64");
/// assert_eq!(element.as_f64(), Ok(2.5));
/// assert!(docbuf.get("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get<'a>(&'a self, key: &str) -> OptResult<elem::Element<'a>> {
for result in self.into_iter() {
let (thiskey, bson) = result?;
if thiskey == key {
return Ok(Some(bson));
}
}
Ok(None)
}
fn get_with<'a, T>(
&'a self,
key: &str,
f: impl FnOnce(elem::Element<'a>) -> RawResult<T>,
) -> OptResult<T> {
self.get(key)?.map(f).transpose()
}
/// Get an element from the document, and convert it to f64.
///
/// Returns an error if the document is malformed, or if the retrieved value
/// is not an f64. Returns `Ok(None)` if the key is not found in the document.
///
/// ```
/// # use rawbson::{DocBuf, elem::Element, RawError};
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc! {
/// "bool": true,
/// "f64": 2.5,
/// });
/// assert_eq!(docbuf.get_f64("f64"), Ok(Some(2.5)));
/// assert_eq!(docbuf.get_f64("bool"), Err(RawError::UnexpectedType));
/// assert_eq!(docbuf.get_f64("unknown"), Ok(None));
/// # Ok::<(), RawError>(())
/// ```
pub fn get_f64(&self, key: &str) -> OptResult<f64> {
self.get_with(key, elem::Element::as_f64)
}
/// Get an element from the document, and convert it to a &str.
///
/// The returned &str is a borrowed reference into the DocBuf. To use it
/// beyond the lifetime of self, call to_docbuf() on it.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a string. Returns `Ok(None)` if the key is not found in the
/// document.
///
/// ```
/// # use rawbson::{DocBuf, elem::Element, RawError};
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc! {
/// "string": "hello",
/// "bool": true,
/// });
/// assert_eq!(docbuf.get_str("string"), Ok(Some("hello")));
/// assert_eq!(docbuf.get_str("bool"), Err(RawError::UnexpectedType));
/// assert_eq!(docbuf.get_str("unknown"), Ok(None));
/// # Ok::<(), RawError>(())
/// ```
pub fn get_str<'a>(&'a self, key: &str) -> OptResult<&'a str> {
self.get_with(key, elem::Element::as_str)
}
/// Get an element from the document, and convert it to a [Doc].
///
/// The returned [Doc] is a borrowed reference into self. To use it
/// beyond the lifetime of self, call to_owned() on it.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a document. Returns `Ok(None)` if the key is not found in the
/// document.
///
/// ```
/// # use rawbson::{DocBuf, elem::Element, RawError};
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc! {
/// "doc": { "key": "value"},
/// "bool": true,
/// });
/// assert_eq!(docbuf.get_document("doc")?.expect("finding key doc").get_str("key"), Ok(Some("value")));
/// assert_eq!(docbuf.get_document("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_document("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_document<'a>(&'a self, key: &str) -> OptResult<&'a Doc> {
self.get_with(key, elem::Element::as_document)
}
/// Get an element from the document, and convert it to an [ArrayRef].
///
/// The returned [ArrayRef] is a borrowed reference into the DocBuf.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a document. Returns `Ok(None)` if the key is not found in the
/// document.
///
/// ```
/// # use rawbson::{DocBuf, elem::Element, RawError};
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc! {
/// "array": [true, 3, null],
/// "bool": true,
/// });
/// let mut arriter = docbuf.get_array("array")?.expect("finding key array").into_iter();
/// let _: bool = arriter.next().unwrap()?.as_bool()?;
/// let _: i32 = arriter.next().unwrap()?.as_i32()?;
/// let () = arriter.next().unwrap()?.as_null()?;
/// assert!(arriter.next().is_none());
/// assert!(docbuf.get_array("bool").is_err());
/// assert!(docbuf.get_array("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_array<'a>(&'a self, key: &str) -> OptResult<&'a Array> {
self.get_with(key, elem::Element::as_array)
}
/// Get an element from the document, and convert it to an [elem::RawBsonBinary].
///
/// The returned [RawBsonBinary](elem::RawBsonBinary) is a borrowed reference into the DocBuf.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not binary data. Returns `Ok(None)` if the key is not found in the
/// document.
///
/// ```
/// # use rawbson::{DocBuf, elem, RawError};
/// use bson::{doc, Binary, spec::BinarySubtype};
/// let docbuf = DocBuf::from_document(&doc! {
/// "binary": Binary { subtype: BinarySubtype::Generic, bytes: vec![1, 2, 3] },
/// "bool": true,
/// });
/// assert_eq!(docbuf.get_binary("binary")?.map(elem::RawBsonBinary::as_bytes), Some(&[1, 2, 3][..]));
/// assert_eq!(docbuf.get_binary("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_binary("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_binary<'a>(&'a self, key: &str) -> OptResult<elem::RawBsonBinary<'a>> {
self.get_with(key, elem::Element::as_binary)
}
/// Get an element from the document, and convert it to a [bson::oid::ObjectId].
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not an object ID. Returns `Ok(None)` if the key is not found in the
/// document.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// use bson::{doc, oid::ObjectId};
/// let docbuf = DocBuf::from_document(&doc! {
/// "_id": ObjectId::new(),
/// "bool": true,
/// });
/// let _: ObjectId = docbuf.get_object_id("_id")?.unwrap();
/// assert_eq!(docbuf.get_object_id("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_object_id("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_object_id(&self, key: &str) -> OptResult<oid::ObjectId> {
self.get_with(key, elem::Element::as_object_id)
}
/// Get an element from the document, and convert it to a [bool].
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a boolean. Returns `Ok(None)` if the key is not found in the
/// document.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// use bson::{doc, oid::ObjectId};
/// let docbuf = DocBuf::from_document(&doc! {
/// "_id": ObjectId::new(),
/// "bool": true,
/// });
/// assert!(docbuf.get_bool("bool")?.unwrap());
/// assert_eq!(docbuf.get_bool("_id").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_object_id("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_bool(&self, key: &str) -> OptResult<bool> {
self.get_with(key, elem::Element::as_bool)
}
/// Get an element from the document, and convert it to a [chrono::DateTime].
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a boolean. Returns `Ok(None)` if the key is not found in the
/// document.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// use bson::doc;
/// use chrono::{Utc, Datelike, TimeZone};
/// let docbuf = DocBuf::from_document(&doc! {
/// "created_at": Utc.ymd(2020, 3, 15).and_hms(17, 0, 0),
/// "bool": true,
/// });
/// assert_eq!(docbuf.get_datetime("created_at")?.unwrap().year(), 2020);
/// assert_eq!(docbuf.get_datetime("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_datetime("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_datetime(&self, key: &str) -> OptResult<DateTime<Utc>> {
self.get_with(key, elem::Element::as_datetime)
}
/// Get an element from the document, and convert it to the `()` type.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not null. Returns `Ok(None)` if the key is not found in the
/// document.
///
/// There is not much reason to use the () value, so this method mostly
/// exists for consistency with other element types, and as a way to assert
/// type of the element.
/// ```
/// # use rawbson::{DocBuf, RawError};
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc! {
/// "null": null,
/// "bool": true,
/// });
/// docbuf.get_null("null")?.unwrap();
/// assert_eq!(docbuf.get_null("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_null("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_null(&self, key: &str) -> OptResult<()> {
self.get_with(key, elem::Element::as_null)
}
/// Get an element from the document, and convert it to an [elem::RawBsonRegex].
///
/// The [RawBsonRegex](elem::RawBsonRegex) borrows data from the DocBuf.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a regex. Returns `Ok(None)` if the key is not found in the
/// document.
/// ```
/// # use rawbson::{DocBuf, RawError, elem};
/// use bson::{doc, Regex};
/// let docbuf = DocBuf::from_document(&doc! {
/// "regex": Regex {
/// pattern: String::from(r"end\s*$"),
/// options: String::from("i"),
/// },
/// "bool": true,
/// });
/// assert_eq!(docbuf.get_regex("regex")?.unwrap().pattern(), r"end\s*$");
/// assert_eq!(docbuf.get_regex("regex")?.unwrap().options(), "i");
/// assert_eq!(docbuf.get_regex("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_regex("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_regex<'a>(&'a self, key: &str) -> OptResult<elem::RawBsonRegex<'a>> {
self.get_with(key, elem::Element::as_regex)
}
/// Get an element from the document, and convert it to an &str representing the
/// javascript element type.
///
/// The &str borrows data from the DocBuf. If you need an owned copy of the data,
/// you should call .to_owned() on the result.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a javascript code object. Returns `Ok(None)` if the key is not found
/// in the document.
/// ```
/// # use rawbson::{DocBuf, RawError, elem};
/// use bson::{doc, Bson};
/// let docbuf = DocBuf::from_document(&doc! {
/// "js": Bson::JavaScriptCode(String::from("console.log(\"hi y'all\");")),
/// "bool": true,
/// });
/// assert_eq!(docbuf.get_javascript("js")?, Some("console.log(\"hi y'all\");"));
/// assert_eq!(docbuf.get_javascript("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_javascript("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_javascript<'a>(&'a self, key: &str) -> OptResult<&'a str> {
self.get_with(key, elem::Element::as_javascript)
}
/// Get an element from the document, and convert it to an &str representing the
/// symbol element type.
///
/// The &str borrows data from the DocBuf. If you need an owned copy of the data,
/// you should call .to_owned() on the result.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a symbol object. Returns `Ok(None)` if the key is not found
/// in the document.
/// ```
/// # use rawbson::{DocBuf, RawError, elem};
/// use bson::{doc, Bson};
/// let docbuf = DocBuf::from_document(&doc! {
/// "symbol": Bson::Symbol(String::from("internal")),
/// "bool": true,
/// });
/// assert_eq!(docbuf.get_symbol("symbol")?, Some("internal"));
/// assert_eq!(docbuf.get_symbol("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_symbol("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_symbol<'a>(&'a self, key: &str) -> OptResult<&'a str> {
self.get_with(key, elem::Element::as_symbol)
}
/// Get an element from the document, and extract the data as a javascript code with scope.
///
/// The return value is a `(&str, &Doc)` where the &str represents the javascript code,
/// and the [`&Doc`](Doc) represents the scope. Both elements borrow data from the DocBuf.
/// If you need an owned copy of the data, you should call [js.to_owned()](ToOwned::to_owned) on
/// the code or [scope.to_docbuf()](Doc::to_docbuf) on the scope.
///
/// Returns an error if the document is malformed or if the retrieved value
/// is not a javascript code with scope object. Returns `Ok(None)` if the key is not found
/// in the document.
/// ```
/// # use rawbson::{DocBuf, RawError, elem};
/// use bson::{doc, JavaScriptCodeWithScope};
/// let docbuf = DocBuf::from_document(&doc! {
/// "js": JavaScriptCodeWithScope {
/// code: String::from("console.log(\"i:\", i);"),
/// scope: doc!{"i": 42},
/// },
/// "bool": true,
/// });
/// let (js, scope) = docbuf.get_javascript_with_scope("js")?.unwrap();
/// assert_eq!(js, "console.log(\"i:\", i);");
/// assert_eq!(scope.get_i32("i")?.unwrap(), 42);
/// assert_eq!(docbuf.get_javascript_with_scope("bool").unwrap_err(), RawError::UnexpectedType);
/// assert!(docbuf.get_javascript_with_scope("unknown")?.is_none());
/// # Ok::<(), RawError>(())
/// ```
pub fn get_javascript_with_scope<'a>(&'a self, key: &str) -> OptResult<(&'a str, &'a Doc)> {
self.get_with(key, elem::Element::as_javascript_with_scope)
}
/// Get an element from the document, and convert it to i32.
///
/// Returns an error if the document is malformed, or if the retrieved value
/// is not an i32. Returns `Ok(None)` if the key is not found in the document.
///
/// ```
/// # use rawbson::{DocBuf, RawError};
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc! {
/// "bool": true,
/// "i32": 1_000_000,
/// });
/// assert_eq!(docbuf.get_i32("i32"), Ok(Some(1_000_000)));
/// assert_eq!(docbuf.get_i32("bool"), Err(RawError::UnexpectedType));
/// assert_eq!(docbuf.get_i32("unknown"), Ok(None));
/// # Ok::<(), RawError>(())
/// ```
pub fn get_i32(&self, key: &str) -> OptResult<i32> {
self.get_with(key, elem::Element::as_i32)
}
/// Get an element from the document, and convert it to a timestamp.
///
/// Returns an error if the document is malformed, or if the retrieved value
/// is not an i32. Returns `Ok(None)` if the key is not found in the document.
///
/// ```
/// # use rawbson::{DocBuf, elem, RawError};
/// use bson::{doc, Timestamp};
/// let docbuf = DocBuf::from_document(&doc! {
/// "bool": true,
/// "ts": Timestamp { time: 649876543, increment: 9 },
/// });
/// let timestamp = docbuf.get_timestamp("ts")?.unwrap();
///
/// assert_eq!(timestamp.time(), 649876543);
/// assert_eq!(timestamp.increment(), 9);
/// assert_eq!(docbuf.get_timestamp("bool"), Err(RawError::UnexpectedType));
/// assert_eq!(docbuf.get_timestamp("unknown"), Ok(None));
/// # Ok::<(), RawError>(())
/// ```
pub fn get_timestamp<'a>(&'a self, key: &str) -> OptResult<elem::RawBsonTimestamp<'a>> {
self.get_with(key, elem::Element::as_timestamp)
}
/// Get an element from the document, and convert it to i64.
///
/// Returns an error if the document is malformed, or if the retrieved value
/// is not an i64. Returns `Ok(None)` if the key is not found in the document.
///
/// ```
/// # use rawbson::{DocBuf, elem::Element, RawError};
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc! {
/// "bool": true,
/// "i64": 9223372036854775807_i64,
/// });
/// assert_eq!(docbuf.get_i64("i64"), Ok(Some(9223372036854775807)));
/// assert_eq!(docbuf.get_i64("bool"), Err(RawError::UnexpectedType));
/// assert_eq!(docbuf.get_i64("unknown"), Ok(None));
/// # Ok::<(), RawError>(())
/// ```
pub fn get_i64(&self, key: &str) -> OptResult<i64> {
self.get_with(key, elem::Element::as_i64)
}
/// Return a reference to the contained data as a `&[u8]`
///
/// ```
/// # use rawbson::DocBuf;
/// use bson::doc;
/// let docbuf = DocBuf::from_document(&doc!{});
/// assert_eq!(docbuf.as_bytes(), b"\x05\x00\x00\x00\x00");
/// ```
pub fn as_bytes(&self) -> &[u8] {
&self.data
}
}
impl AsRef<Doc> for Doc {
fn as_ref(&self) -> &Doc {
self
}
}
impl Deref for DocBuf {
type Target = Doc;
fn deref(&self) -> &Self::Target {
// SAFETY: The validity of the data is checked when creating DocBuf.
unsafe { Doc::new_unchecked(&self.data) }
}
}
impl TryFrom<&Doc> for bson::Document {
type Error = RawError;
fn try_from(rawdoc: &Doc) -> RawResult<bson::Document> {
rawdoc
.into_iter()
.map(|res| res.and_then(|(k, v)| Ok((k.to_owned(), v.try_into()?))))
.collect()
}
}
impl<'a> IntoIterator for &'a Doc {
type IntoIter = DocIter<'a>;
type Item = RawResult<(&'a str, elem::Element<'a>)>;
fn into_iter(self) -> DocIter<'a> {
DocIter {
doc: self,
offset: 4,
}
}
}
pub struct DocIter<'a> {
doc: &'a Doc,
offset: usize,
}
impl<'a> Iterator for DocIter<'a> {
type Item = RawResult<(&'a str, elem::Element<'a>)>;
fn next(&mut self) -> Option<RawResult<(&'a str, elem::Element<'a>)>> {
if self.offset == self.doc.data.len() - 1 {
if self.doc.data[self.offset] == 0 {
// end of document marker
return None;
} else {
return Some(Err(RawError::MalformedValue(
"document not null terminated".into(),
)));
}
}
let key = match read_nullterminated(&self.doc.data[self.offset + 1..]) {
Ok(key) => key,
Err(err) => return Some(Err(err)),
};
let valueoffset = self.offset + 1 + key.len() + 1; // type specifier + key + \0
let element_type = match ElementType::from(self.doc.data[self.offset]) {
Some(et) => et,
None => {
return Some(Err(RawError::MalformedValue(format!(
"invalid tag: {}",
self.doc.data[self.offset]
))))
}
};
let element_size = match element_type {
ElementType::Double => 8,
ElementType::String => {
let size =
4 + i32_from_slice(&self.doc.data[valueoffset..valueoffset + 4]) as usize;
if self.doc.data[valueoffset + size - 1] != 0 {
return Some(Err(RawError::MalformedValue(
"string not null terminated".into(),
)));
}
size
}
ElementType::EmbeddedDocument => {
let size = i32_from_slice(&self.doc.data[valueoffset..valueoffset + 4]) as usize;
if self.doc.data[valueoffset + size - 1] != 0 {
return Some(Err(RawError::MalformedValue(
"document not null terminated".into(),
)));
}
size
}
ElementType::Array => {
let size = i32_from_slice(&self.doc.data[valueoffset..valueoffset + 4]) as usize;
if self.doc.data[valueoffset + size - 1] != 0 {
return Some(Err(RawError::MalformedValue(
"array not null terminated".into(),
)));
}
size
}
ElementType::Binary => {
5 + i32_from_slice(&self.doc.data[valueoffset..valueoffset + 4]) as usize
}
ElementType::Undefined => 0,
ElementType::ObjectId => 12,
ElementType::Boolean => 1,
ElementType::DateTime => 8,
ElementType::Null => 0,
ElementType::RegularExpression => {
let regex = match read_nullterminated(&self.doc.data[valueoffset..]) {
Ok(regex) => regex,
Err(err) => return Some(Err(err)),
};
let options =
match read_nullterminated(&self.doc.data[valueoffset + regex.len() + 1..]) {
Ok(options) => options,
Err(err) => return Some(Err(err)),
};
regex.len() + options.len() + 2
}
ElementType::DbPointer => {
let string_size =
4 + i32_from_slice(&self.doc.data[valueoffset..valueoffset + 4]) as usize;
let id_size = 12;
if self.doc.data[valueoffset + string_size - 1] != 0 {
return Some(Err(RawError::MalformedValue(
"DBPointer string not null-terminated".into(),
)));
}
string_size + id_size
}
ElementType::JavaScriptCode => {
let size =
4 + i32_from_slice(&self.doc.data[valueoffset..valueoffset + 4]) as usize;
if self.doc.data[valueoffset + size - 1] != 0 {
return Some(Err(RawError::MalformedValue(
"javascript code not null-terminated".into(),
)));
}
size
}
ElementType::Symbol => {
4 + i32_from_slice(&self.doc.data[valueoffset..valueoffset + 4]) as usize
}
ElementType::JavaScriptCodeWithScope => {
let size = i32_from_slice(&self.doc.data[valueoffset..valueoffset + 4]) as usize;
if self.doc.data[valueoffset + size - 1] != 0 {
return Some(Err(RawError::MalformedValue(
"javascript with scope not null-terminated".into(),
)));
}
size
}
ElementType::Int32 => 4,
ElementType::Timestamp => 8,
ElementType::Int64 => 8,
ElementType::Decimal128 => 16,
ElementType::MaxKey => 0,
ElementType::MinKey => 0,
};
let nextoffset = valueoffset + element_size;
self.offset = nextoffset;
Some(Ok((
key,
elem::Element::new(element_type, &self.doc.data[valueoffset..nextoffset]),
)))
}
}
pub type ArrayRef<'a> = &'a Array;
pub struct Array {
doc: Doc,
}
impl Array {
pub fn new(data: &[u8]) -> RawResult<&Array> {
Ok(Array::from_doc(Doc::new(data)?))
}
/// Return a new Array from the provided bytes.
///
/// # Safety
///
/// The provided bytes must start with a valid length indicator
/// and end with a NUL terminator, as described in [the bson
/// spec](http://bsonspec.org/spec.html).
///
/// The following is valid:
/// ```
/// # use rawbson::Array;
/// // Represents the array [null, 514i32], which is the same as the document
/// // {"0": null, "1": 514}
/// let bson = b"\x0f\0\0\0\x0A0\0\x101\0\x02\x02\0\0\0";
/// let arr = unsafe { Array::new_unchecked(bson) };
/// let mut arriter = arr.into_iter();
/// assert!(arriter.next().unwrap().and_then(|b| b.as_null()).is_ok());
/// assert_eq!(arriter.next().unwrap().and_then(|b| b.as_i32()).unwrap(), 514);
/// ```
///
/// And so is this, even though the provided document is not an array, because
/// the errors will be caught during decode.
///
/// ```
/// # use rawbson::Array;
/// // Represents the document {"0": null, "X": 514}
/// let bson = b"\x0f\0\0\0\x0A0\0\x10X\0\x02\x02\0\0\0";
/// let arr = unsafe { Array::new_unchecked(bson) };
/// let mut arriter = arr.into_iter();
/// assert!(arriter.next().unwrap().and_then(|b| b.as_null()).is_ok());
/// assert!(arriter.next().unwrap().is_err());
/// assert!(arriter.next().is_none());
/// ```
///
/// # Bad:
///
/// The following, however, indicates the wrong size for the document, and is
/// therefore unsound.
///
/// ```
/// # use rawbson::Array;
/// // Contains a length indicator, that is longer than the array
/// let invalid = b"\x06\0\0\0\0";
/// let arr: &Array = unsafe { Array::new_unchecked(invalid) };
/// ```
pub unsafe fn new_unchecked(data: &[u8]) -> &Array {
#[allow(unused_unsafe)]
let doc = unsafe { Doc::new_unchecked(data) };
Array::from_doc(doc)
}
pub fn from_doc(doc: &Doc) -> &Array {
// SAFETY: Array layout matches Doc layout
unsafe { &*(doc as *const Doc as *const Array) }
}
pub fn get(&self, index: usize) -> OptResult<elem::Element<'_>> {
self.into_iter().nth(index).transpose()
}
fn get_with<'a, T>(
&'a self,
index: usize,
f: impl FnOnce(elem::Element<'a>) -> RawResult<T>,
) -> OptResult<T> {
self.get(index)?.map(f).transpose()
}
pub fn get_f64(&self, index: usize) -> OptResult<f64> {
self.get_with(index, elem::Element::as_f64)
}
pub fn get_str(&self, index: usize) -> OptResult<&str> {
self.get_with(index, elem::Element::as_str)
}
pub fn get_document(&self, index: usize) -> OptResult<&Doc> {
self.get_with(index, elem::Element::as_document)
}
pub fn get_array(&self, index: usize) -> OptResult<&Array> {
self.get_with(index, elem::Element::as_array)
}
pub fn get_binary(&self, index: usize) -> OptResult<elem::RawBsonBinary<'_>> {
self.get_with(index, elem::Element::as_binary)
}
pub fn get_object_id(&self, index: usize) -> OptResult<oid::ObjectId> {
self.get_with(index, elem::Element::as_object_id)
}
pub fn get_bool(&self, index: usize) -> OptResult<bool> {
self.get_with(index, elem::Element::as_bool)
}
pub fn get_datetime(&self, index: usize) -> OptResult<DateTime<Utc>> {
self.get_with(index, elem::Element::as_datetime)
}
pub fn get_null(&self, index: usize) -> OptResult<()> {
self.get_with(index, elem::Element::as_null)
}
pub fn get_regex(&self, index: usize) -> OptResult<elem::RawBsonRegex<'_>> {
self.get_with(index, elem::Element::as_regex)
}
pub fn get_javascript(&self, index: usize) -> OptResult<&str> {
self.get_with(index, elem::Element::as_javascript)
}
pub fn get_symbol(&self, index: usize) -> OptResult<&str> {
self.get_with(index, elem::Element::as_symbol)
}
pub fn get_javascript_with_scope(&self, index: usize) -> OptResult<(&str, &Doc)> {
self.get_with(index, elem::Element::as_javascript_with_scope)
}
pub fn get_i32(&self, index: usize) -> OptResult<i32> {
self.get_with(index, elem::Element::as_i32)
}
pub fn get_timestamp(&self, index: usize) -> OptResult<elem::RawBsonTimestamp<'_>> {
self.get_with(index, elem::Element::as_timestamp)
}
pub fn get_i64(&self, index: usize) -> OptResult<i64> {
self.get_with(index, elem::Element::as_i64)
}
pub fn to_vec(&self) -> RawResult<Vec<elem::Element<'_>>> {
self.into_iter().collect()
}
pub fn as_bytes(&self) -> &[u8] {
self.doc.as_bytes()
}
}
impl TryFrom<&Array> for Vec<Bson> {
type Error = RawError;
fn try_from(arr: &Array) -> RawResult<Vec<Bson>> {
arr.into_iter()
.map(|result| {
let rawbson = result?;
Bson::try_from(rawbson)
})
.collect()
}
}
impl<'a> IntoIterator for &'a Array {
type IntoIter = ArrayIter<'a>;
type Item = RawResult<elem::Element<'a>>;
fn into_iter(self) -> ArrayIter<'a> {
ArrayIter {
dociter: self.doc.into_iter(),
index: 0,
}
}
}
pub struct ArrayIter<'a> {
dociter: DocIter<'a>,
index: usize,
}
impl<'a> Iterator for ArrayIter<'a> {
type Item = RawResult<elem::Element<'a>>;
fn next(&mut self) -> Option<RawResult<elem::Element<'a>>> {
let value = self.dociter.next().map(|result| {
let (key, bson) = match result {
Ok(value) => value,
Err(err) => return Err(err),
};
let index: usize = key
.parse()
.map_err(|_| RawError::MalformedValue("non-integer array index found".into()))?;
if index == self.index {
Ok(bson)
} else {
Err(RawError::MalformedValue("wrong array index found".into()))
}
});
self.index += 1;
value
}
}
/// Given a 4 byte u8 slice, return an i32 calculated from the bytes in
/// little endian order
///
/// # Panics
///
/// This function panics if given a slice that is not four bytes long.
fn i32_from_slice(val: &[u8]) -> i32 {
i32::from_le_bytes(val.try_into().expect("i32 is four bytes"))
}
/// Given an 8 byte u8 slice, return an i64 calculated from the bytes in
/// little endian order
///
/// # Panics
///
/// This function panics if given a slice that is not eight bytes long.
fn i64_from_slice(val: &[u8]) -> i64 {
i64::from_le_bytes(val.try_into().expect("i64 is eight bytes"))
}
/// Given a 4 byte u8 slice, return a u32 calculated from the bytes in
/// little endian order
///
/// # Panics
///
/// This function panics if given a slice that is not four bytes long.
fn u32_from_slice(val: &[u8]) -> u32 {
u32::from_le_bytes(val.try_into().expect("u32 is four bytes"))
}
fn d128_from_slice(val: &[u8]) -> Decimal128 {
// TODO: Handle Big Endian platforms
let d =
unsafe { decimal::d128::from_raw_bytes(val.try_into().expect("d128 is sixteen bytes")) };
Decimal128::from(d)
}
fn read_nullterminated(buf: &[u8]) -> RawResult<&str> {
let mut splits = buf.splitn(2, |x| *x == 0);
let value = splits
.next()
.ok_or_else(|| RawError::MalformedValue("no value".into()))?;
if splits.next().is_some() {
Ok(try_to_str(value)?)
} else {
Err(RawError::MalformedValue("expected null terminator".into()))
}
}
fn read_lenencoded(buf: &[u8]) -> RawResult<&str> {
let length = i32_from_slice(&buf[..4]);
assert!(buf.len() as i32 >= length + 4);
try_to_str(&buf[4..4 + length as usize - 1])
}
fn try_to_str(data: &[u8]) -> RawResult<&str> {
match std::str::from_utf8(data) {
Ok(s) => Ok(s),
Err(_) => Err(RawError::Utf8EncodingError(data.into())),
}
}
pub type DocRef<'a> = &'a Doc;
#[cfg(test)]
mod tests {
use super::*;
use bson::{doc, spec::BinarySubtype, Binary, Bson, JavaScriptCodeWithScope, Regex, Timestamp};
use chrono::TimeZone;
fn to_bytes(doc: &bson::Document) -> Vec<u8> {
let mut docbytes = Vec::new();
doc.to_writer(&mut docbytes).unwrap();
docbytes
}
#[test]
fn string_from_document() {
let docbytes = to_bytes(&doc! {
"this": "first",
"that": "second",
"something": "else",
});
let rawdoc = Doc::new(&docbytes).unwrap();
assert_eq!(
rawdoc.get("that").unwrap().unwrap().as_str().unwrap(),
"second",
);
}
#[test]
fn nested_document() {
let docbytes = to_bytes(&doc! {
"outer": {
"inner": "surprise",
},
});
let rawdoc = Doc::new(&docbytes).unwrap();
assert_eq!(
rawdoc
.get("outer")
.expect("get doc result")
.expect("get doc option")
.as_document()
.expect("as doc")
.get("inner")
.expect("get str result")
.expect("get str option")
.as_str()
.expect("as str"),
"surprise",
);
}
#[test]
fn iterate() {
let docbytes = to_bytes(&doc! {
"apples": "oranges",
"peanut butter": "chocolate",
"easy as": {"do": 1, "re": 2, "mi": 3},
});
let rawdoc = Doc::new(&docbytes).expect("malformed bson document");
let mut dociter = rawdoc.into_iter();
let next = dociter.next().expect("no result").expect("invalid bson");
assert_eq!(next.0, "apples");
assert_eq!(next.1.as_str().expect("result was not a str"), "oranges");
let next = dociter.next().expect("no result").expect("invalid bson");
assert_eq!(next.0, "peanut butter");
assert_eq!(next.1.as_str().expect("result was not a str"), "chocolate");
let next = dociter.next().expect("no result").expect("invalid bson");
assert_eq!(next.0, "easy as");
let _doc = next.1.as_document().expect("result was a not a document");
let next = dociter.next();
assert!(next.is_none());
}
#[test]
fn rawdoc_to_doc() {
let docbytes = to_bytes(&doc! {
"f64": 2.5,
"string": "hello",
"document": {},
"array": ["binary", "serialized", "object", "notation"],
"binary": Binary { subtype: BinarySubtype::Generic, bytes: vec![1, 2, 3] },
"object_id": oid::ObjectId::with_bytes([1, 2, 3, 4, 5,6,7,8,9,10, 11,12]),
"boolean": true,
"datetime": Utc::now(),
"null": Bson::Null,
"regex": Bson::RegularExpression(Regex { pattern: String::from(r"end\s*$"), options: String::from("i")}),
"javascript": Bson::JavaScriptCode(String::from("console.log(console);")),
"symbol": Bson::Symbol(String::from("artist-formerly-known-as")),
"javascript_with_scope": Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope{ code: String::from("console.log(msg);"), scope: doc!{"ok": true}}),
"int32": 23i32,
"timestamp": Bson::Timestamp(Timestamp { time: 3542578, increment: 0 }),
"int64": 46i64,
"end": "END",
});
let rawdoc = Doc::new(&docbytes).expect("invalid document");
let _doc: bson::Document = rawdoc.try_into().expect("invalid bson");
}
#[test]
fn f64() {
#![allow(clippy::float_cmp)]
let rawdoc = DocBuf::from_document(&doc! {"f64": 2.5});
assert_eq!(
rawdoc
.get("f64")
.expect("error finding key f64")
.expect("no key f64")
.as_f64()
.expect("result was not a f64"),
2.5,
);
}
#[test]
fn string() {
let rawdoc = DocBuf::from_document(&doc! {"string": "hello"});
assert_eq!(
rawdoc
.get("string")
.expect("error finding key string")
.expect("no key string")
.as_str()
.expect("result was not a string"),
"hello",
);
}
#[test]
fn document() {
let rawdoc = DocBuf::from_document(&doc! {"document": {}});
let doc = rawdoc
.get("document")
.expect("error finding key document")
.expect("no key document")
.as_document()
.expect("result was not a document");
assert_eq!(&doc.data, [5, 0, 0, 0, 0].as_ref()); // Empty document
}
#[test]
fn array() {
let rawdoc =
DocBuf::from_document(&doc! { "array": ["binary", "serialized", "object", "notation"]});
let array = rawdoc
.get("array")
.expect("error finding key array")
.expect("no key array")
.as_array()
.expect("result was not an array");
assert_eq!(array.get_str(0), Ok(Some("binary")));
assert_eq!(array.get_str(3), Ok(Some("notation")));
assert_eq!(array.get_str(4), Ok(None));
}
#[test]
fn binary() {
let rawdoc = DocBuf::from_document(&doc! {
"binary": Binary { subtype: BinarySubtype::Generic, bytes: vec![1u8, 2, 3] }
});
let binary: elem::RawBsonBinary<'_> = rawdoc
.get("binary")
.expect("error finding key binary")
.expect("no key binary")
.as_binary()
.expect("result was not a binary object");
assert_eq!(binary.subtype, BinarySubtype::Generic);
assert_eq!(binary.data, &[1, 2, 3]);
}
#[test]
fn object_id() {
let rawdoc = DocBuf::from_document(&doc! {
"object_id": oid::ObjectId::with_bytes([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]),
});
let oid = rawdoc
.get("object_id")
.expect("error finding key object_id")
.expect("no key object_id")
.as_object_id()
.expect("result was not an object id");
assert_eq!(oid.to_hex(), "0102030405060708090a0b0c");
}
#[test]
fn boolean() {
let rawdoc = DocBuf::from_document(&doc! {
"boolean": true,
});
let boolean = rawdoc
.get("boolean")
.expect("error finding key boolean")
.expect("no key boolean")
.as_bool()
.expect("result was not boolean");
assert_eq!(boolean, true);
}
#[test]
fn datetime() {
let rawdoc = DocBuf::from_document(&doc! {
"boolean": true,
"datetime": Utc.ymd(2000,10,31).and_hms(12, 30, 45),
});
let datetime = rawdoc
.get("datetime")
.expect("error finding key datetime")
.expect("no key datetime")
.as_datetime()
.expect("result was not datetime");
assert_eq!(datetime.to_rfc3339(), "2000-10-31T12:30:45+00:00");
}
#[test]
fn null() {
let rawdoc = DocBuf::from_document(&doc! {
"null": null,
});
let () = rawdoc
.get("null")
.expect("error finding key null")
.expect("no key null")
.as_null()
.expect("was not null");
}
#[test]
fn regex() {
let rawdoc = DocBuf::from_document(&doc! {
"regex": Bson::RegularExpression(Regex { pattern: String::from(r"end\s*$"), options: String::from("i")}),
});
let regex = rawdoc
.get("regex")
.expect("error finding key regex")
.expect("no key regex")
.as_regex()
.expect("was not regex");
assert_eq!(regex.pattern, r"end\s*$");
assert_eq!(regex.options, "i");
}
#[test]
fn javascript() {
let rawdoc = DocBuf::from_document(&doc! {
"javascript": Bson::JavaScriptCode(String::from("console.log(console);")),
});
let js = rawdoc
.get("javascript")
.expect("error finding key javascript")
.expect("no key javascript")
.as_javascript()
.expect("was not javascript");
assert_eq!(js, "console.log(console);");
}
#[test]
fn symbol() {
let rawdoc = DocBuf::from_document(&doc! {
"symbol": Bson::Symbol(String::from("artist-formerly-known-as")),
});
let symbol = rawdoc
.get("symbol")
.expect("error finding key symbol")
.expect("no key symbol")
.as_symbol()
.expect("was not symbol");
assert_eq!(symbol, "artist-formerly-known-as");
}
#[test]
fn javascript_with_scope() {
let rawdoc = DocBuf::from_document(&doc! {
"javascript_with_scope": Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope{ code: String::from("console.log(msg);"), scope: doc!{"ok": true}}),
});
let (js, scopedoc) = rawdoc
.get("javascript_with_scope")
.expect("error finding key javascript_with_scope")
.expect("no key javascript_with_scope")
.as_javascript_with_scope()
.expect("was not javascript with scope");
assert_eq!(js, "console.log(msg);");
let (scope_key, scope_value_bson) = scopedoc
.into_iter()
.next()
.expect("no next value in scope")
.expect("invalid element");
assert_eq!(scope_key, "ok");
let scope_value = scope_value_bson.as_bool().expect("not a boolean");
assert_eq!(scope_value, true);
}
#[test]
fn int32() {
let rawdoc = DocBuf::from_document(&doc! {
"int32": 23i32,
});
let int32 = rawdoc
.get("int32")
.expect("error finding key int32")
.expect("no key int32")
.as_i32()
.expect("was not int32");
assert_eq!(int32, 23i32);
}
#[test]
fn timestamp() {
let rawdoc = DocBuf::from_document(&doc! {
"timestamp": Bson::Timestamp(Timestamp { time: 3542578, increment: 7 }),
});
let ts = rawdoc
.get("timestamp")
.expect("error finding key timestamp")
.expect("no key timestamp")
.as_timestamp()
.expect("was not a timestamp");
assert_eq!(ts.increment(), 7);
assert_eq!(ts.time(), 3542578);
}
#[test]
fn int64() {
let rawdoc = DocBuf::from_document(&doc! {
"int64": 46i64,
});
let int64 = rawdoc
.get("int64")
.expect("error finding key int64")
.expect("no key int64")
.as_i64()
.expect("was not int64");
assert_eq!(int64, 46i64);
}
#[test]
fn document_iteration() {
let docbytes = to_bytes(&doc! {
"f64": 2.5,
"string": "hello",
"document": {},
"array": ["binary", "serialized", "object", "notation"],
"binary": Binary { subtype: BinarySubtype::Generic, bytes: vec![1u8, 2, 3] },
"object_id": oid::ObjectId::with_bytes([1, 2, 3, 4, 5,6,7,8,9,10, 11,12]),
"boolean": true,
"datetime": Utc::now(),
"null": Bson::Null,
"regex": Bson::RegularExpression(Regex { pattern: String::from(r"end\s*$"), options: String::from("i")}),
"javascript": Bson::JavaScriptCode(String::from("console.log(console);")),
"symbol": Bson::Symbol(String::from("artist-formerly-known-as")),
"javascript_with_scope": Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope{ code: String::from("console.log(msg);"), scope: doc!{"ok": true}}),
"int32": 23i32,
"timestamp": Bson::Timestamp(Timestamp { time: 3542578, increment: 0 }),
"int64": 46i64,
"end": "END",
});
let rawdoc = unsafe { Doc::new_unchecked(&docbytes) };
assert_eq!(
rawdoc
.into_iter()
.collect::<Result<Vec<(&str, _)>, RawError>>()
.expect("collecting iterated doc")
.len(),
17
);
let end = rawdoc
.get("end")
.expect("error finding key end")
.expect("no key end")
.as_str()
.expect("was not str");
assert_eq!(end, "END");
}
#[test]
fn into_bson_conversion() {
let docbytes = to_bytes(&doc! {
"f64": 2.5,
"string": "hello",
"document": {},
"array": ["binary", "serialized", "object", "notation"],
"object_id": oid::ObjectId::with_bytes([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]),
"binary": Binary { subtype: BinarySubtype::Generic, bytes: vec![1u8, 2, 3] },
"boolean": false,
});
let rawbson = elem::Element::new(ElementType::EmbeddedDocument, &docbytes);
let b: Bson = rawbson.try_into().expect("invalid bson");
let doc = b.as_document().expect("not a document");
assert_eq!(*doc.get("f64").expect("f64 not found"), Bson::Double(2.5));
assert_eq!(
*doc.get("string").expect("string not found"),
Bson::String(String::from("hello"))
);
assert_eq!(
*doc.get("document").expect("document not found"),
Bson::Document(doc! {})
);
assert_eq!(
*doc.get("array").expect("array not found"),
Bson::Array(
vec!["binary", "serialized", "object", "notation"]
.into_iter()
.map(|s| Bson::String(String::from(s)))
.collect()
)
);
assert_eq!(
*doc.get("object_id").expect("object_id not found"),
Bson::ObjectId(oid::ObjectId::with_bytes([
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12
]))
);
assert_eq!(
*doc.get("binary").expect("binary not found"),
Bson::Binary(Binary {
subtype: BinarySubtype::Generic,
bytes: vec![1, 2, 3]
})
);
assert_eq!(
*doc.get("boolean").expect("boolean not found"),
Bson::Boolean(false)
);
}
}
#[cfg(test)]
mod proptests {
use proptest::prelude::*;
use std::convert::TryInto;
use super::DocBuf;
use crate::props::arbitrary_bson;
use bson::doc;
fn to_bytes(doc: &bson::Document) -> Vec<u8> {
let mut docbytes = Vec::new();
doc.to_writer(&mut docbytes).unwrap();
docbytes
}
proptest! {
#[test]
fn no_crashes(s: Vec<u8>) {
let _ = DocBuf::new(s);
}
#[test]
fn roundtrip_bson(bson in arbitrary_bson()) {
println!("{:?}", bson);
let doc = doc!{"bson": bson};
let raw = to_bytes(&doc);
let raw = DocBuf::new(raw);
prop_assert!(raw.is_ok());
let raw = raw.unwrap();
let roundtrip: Result<bson::Document, _> = raw.try_into();
prop_assert!(roundtrip.is_ok());
let roundtrip = roundtrip.unwrap();
prop_assert_eq!(doc, roundtrip);
}
}
}
| true |
c26895c9e6d879deb07ad8ee0bd6d55e6845a57f
|
Rust
|
0ndorio/advent-of-code
|
/2018/aoc07/src/step.rs
|
UTF-8
| 1,575 | 3.09375 | 3 |
[
"Unlicense"
] |
permissive
|
use std::{
cell::RefCell,
cmp::Ordering,
collections::HashSet,
hash::{Hash, Hasher},
ops::Deref,
rc::Rc,
};
#[derive(Ord, PartialOrd, Eq, Clone)]
pub struct StepCell(pub Rc<RefCell<Step>>);
impl StepCell {
pub fn new(step: Step) -> Self {
Self {
0: Rc::new(RefCell::new(step)),
}
}
}
impl PartialEq for StepCell {
fn eq(&self, other: &Self) -> bool {
self.0.eq(other)
}
}
impl Hash for StepCell {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.borrow().hash(state);
}
}
impl Deref for StepCell {
type Target = Rc<RefCell<Step>>;
fn deref(&self) -> &<Self as Deref>::Target {
&self.0
}
}
#[derive(Clone)]
pub struct Step {
pub identifier: char,
pub depends_on: HashSet<StepCell>,
pub parent_for: HashSet<StepCell>,
}
impl Step {
pub fn new(identifier: char) -> Self {
Self {
identifier,
depends_on: HashSet::new(),
parent_for: HashSet::new(),
}
}
}
impl PartialEq for Step {
fn eq(&self, other: &Self) -> bool {
self.identifier.eq(&other.identifier)
}
}
impl Eq for Step {}
impl PartialOrd for Step {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.identifier.partial_cmp(&other.identifier)
}
}
impl Ord for Step {
fn cmp(&self, other: &Self) -> Ordering {
self.identifier.cmp(&other.identifier)
}
}
impl Hash for Step {
fn hash<H: Hasher>(&self, state: &mut H) {
self.identifier.hash(state);
}
}
| true |
4f669ab69caa4db54d2acaf5ed29178235425840
|
Rust
|
Ummon/AdventOfCode2019
|
/src/day10.rs
|
UTF-8
| 6,415 | 3.046875 | 3 |
[] |
no_license
|
use std::collections::{HashMap, HashSet};
pub fn read_map(raw: &str) -> Vec<(i32, i32)> {
let lines: Vec<&str> = raw.lines().map(|l| l.trim()).collect();
let mut map = Vec::<(i32, i32)>::new();
for x in 0 .. lines[0].len() {
for (y, line) in lines.iter().enumerate() {
if line.chars().nth(x) == Some('#') {
map.push((x as i32, y as i32));
}
}
}
map
}
fn angle(x1: i32, y1: i32, x2: i32, y2: i32) -> i64 {
// Axis are reverted to match the clockwise laser rotation beginning up.
let angle_f64 = (2.0 * std::f64::consts::PI - ((x1 - x2) as f64).atan2((y1 - y2) as f64)) % (2.0 * std::f64::consts::PI);
(angle_f64 * 1_000_000.0) as i64
}
fn squared_distance(x1: i32, y1: i32, x2: i32, y2: i32) -> i64 {
((x1 - x2) as i64).pow(2) + ((y1 - y2) as i64).pow(2)
}
pub fn find_best_location(map: &[(i32, i32)]) -> (usize, (i32, i32)) {
let mut best_nb_observable_asteroid = 0;
let (mut best_x, mut best_y) = (0, 0);
for (x1, y1) in map {
let mut angles = HashSet::<i64>::new();
for (x2, y2) in map {
angles.insert(angle(*x1, *y1, *x2, *y2));
}
let n = angles.len();
if n > best_nb_observable_asteroid {
best_nb_observable_asteroid = n;
best_x = *x1;
best_y = *y1;
}
}
(best_nb_observable_asteroid, (best_x, best_y))
}
type PositionsAndDistances = Vec<((i32, i32), i64)>;
pub fn location_nth_vaporized_asteroid(pos: (i32, i32), map: &[(i32, i32)], n: usize) -> (i32, i32) {
// Angle -> [(position, distance)].
let mut asteroids = HashMap::<i64, PositionsAndDistances>::new();
let (x1, y1) = pos;
for (x2, y2) in map {
let angle = angle(x1, y1, *x2 , *y2);
let dist = squared_distance(x1, y1, *x2 , *y2);
match asteroids.get_mut(&angle) {
Some (lineup_asteroids) => lineup_asteroids.push(((*x2, *y2), dist)),
None => { asteroids.insert(angle, vec![((*x2, *y2), dist)]); }
}
}
// Sort everything by angle and by distance.
let mut sorted_asteroids: Vec<(&i64, &mut PositionsAndDistances)> = asteroids.iter_mut().collect();
sorted_asteroids.sort_by(|(a1, _), (a2, _)| a1.cmp(a2));
for (_, lineup_asteroids) in sorted_asteroids.iter_mut() {
lineup_asteroids.sort_by(|(_, l1), (_, l2)| l1.cmp(l2))
}
let mut i = 1;
loop {
for (_, lineup_asteroids) in sorted_asteroids.iter_mut() {
let ((x, y), _) = lineup_asteroids.remove(0);
if i == n { return (x, y) }
i += 1;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn part1_sample_1() {
let raw_map =
".#..#
.....
#####
....#
...##";
let map = read_map(raw_map);
assert_eq!(find_best_location(&map).0, 8);
}
#[test]
fn part1_sample_2() {
let raw_map =
"......#.#.
#..#.#....
..#######.
.#.#.###..
.#..#.....
..#....#.#
#..#....#.
.##.#..###
##...#..#.
.#....####";
let map = read_map(raw_map);
assert_eq!(find_best_location(&map).0, 33);
}
#[test]
fn part1_sampl3() {
let raw_map =
"#.#...#.#.
.###....#.
.#....#...
##.#.#.#.#
....#.#.#.
.##..###.#
..#...##..
..##....##
......#...
.####.###.";
let map = read_map(raw_map);
assert_eq!(find_best_location(&map).0, 35);
}
#[test]
fn part1_sample_4() {
let raw_map =
".#..#..###
####.###.#
....###.#.
..###.##.#
##.##.#.#.
....###..#
..#.#..#.#
#..#.#.###
.##...##.#
.....#.#..";
let map = read_map(raw_map);
assert_eq!(find_best_location(&map).0, 41);
}
#[test]
fn part1_sample_5() {
let raw_map =
".#..##.###...#######
##.############..##.
.#.######.########.#
.###.#######.####.#.
#####.##.#.##.###.##
..#####..#.#########
####################
#.####....###.#.#.##
##.#################
#####.##.###..####..
..######..##.#######
####.##.####...##..#
.#####..#.######.###
##...#.##########...
#.##########.#######
.####.#.###.###.#.##
....##.##.###..#####
.#.#.###########.###
#.#.#.#####.####.###
###.##.####.##.#..##";
let map = read_map(raw_map);
assert_eq!(find_best_location(&map).0, 210);
}
#[test]
fn part2_sample_1() {
let raw_map =
".#....#####...#..
##...##.#####..##
##...#...#.#####.
..#.....X...###..
..#.#.....#....##";
let map = read_map(raw_map);
let pos = (8, 3);
let pos_9th = location_nth_vaporized_asteroid(pos, &map, 9);
assert_eq!(pos_9th, (15, 1));
let pos_18th = location_nth_vaporized_asteroid(pos, &map, 18);
assert_eq!(pos_18th, (4, 4));
}
#[test]
fn part2_sample_2() {
let raw_map =
".#..##.###...#######
##.############..##.
.#.######.########.#
.###.#######.####.#.
#####.##.#.##.###.##
..#####..#.#########
####################
#.####....###.#.#.##
##.#################
#####.##.###..####..
..######..##.#######
####.##.####...##..#
.#####..#.######.###
##...#.##########...
#.##########.#######
.####.#.###.###.#.##
....##.##.###..#####
.#.#.###########.###
#.#.#.#####.####.###
###.##.####.##.#..##";
let map = read_map(raw_map);
let pos = find_best_location(&map).1;
let pos_200th = location_nth_vaporized_asteroid(pos, &map, 200);
assert_eq!(pos_200th, (8, 2));
}
}
| true |
c2a46cd4462f7810751562de6a6a354ad540533a
|
Rust
|
MagneticMartian/specaneca
|
/src/main.rs
|
UTF-8
| 3,203 | 2.8125 | 3 |
[] |
no_license
|
use rand::Rng;
use std::f64::consts::PI;
use num::complex::Complex;
use plotlib::page::Page;
use plotlib::repr::Plot;
use plotlib::view::ContinuousView;
use plotlib::style::{PointMarker, PointStyle};
static STEPS: usize = 100;
static COLS: usize = 700;
static PERIODS: usize = 1024;
fn linspace(start: f64, stop: f64, step: usize) -> Vec<f64> {
let mut res_vec: Vec<f64> = vec![start];
let size = (stop-start)/(step as f64);
for i in 1..step+1 {
res_vec.push(start + (size*(i as f64)));
}
res_vec
}
#[derive(Debug)]
struct ECA<'a> {
n: &'a usize,
t: &'a usize,
rule: &'a Vec<usize>
}
#[derive(Debug)]
struct Spectrum<'b> {
n: &'b usize,
t: &'b usize,
x: &'b Vec<Vec<usize>>,
freq: &'b Vec<f64>
}
fn eca_rule_result(x: (usize, usize, usize), rule: Vec<usize>) -> usize {
let rule_loc = match x {
(0, 0, 0) => 7,
(0, 0, 1) => 6,
(0, 1, 0) => 5,
(0, 1, 1) => 4,
(1, 0, 0) => 3,
(1, 0, 1) => 2,
(1, 1, 0) => 1,
(1, 1, 1) => 0,
_ => panic!("Not allowed"),
};
if rule[rule_loc] == 1 {
1
} else {
0
}
}
fn construct(n: &usize, t: &usize, rule: &Vec<usize>) -> Vec<Vec<usize>> {
let mut x = vec![vec![0; *n]; *t];
let mut rng = rand::thread_rng();
for i in 0..*n {
x[0][i] = rng.gen_range(0,2);
}
for m in 1..*t {
for i in 1..*n-1 {
x[m][i] = eca_rule_result((x[m-1][i-1],x[m-1][i],x[m-1][i+1]), rule.to_vec());
}
}
x
}
fn dft(n: usize, t: &usize, x: &Vec<Vec<usize>>, f: f64) -> Complex<f64> {
let mut x_hat = Complex::new(0.0,0.0);
for j in 0..*t {
x_hat.re += (x[j][n] as f64)*(2.0*PI*(*t as f64)*f/(*t as f64)).cos()/(*t as f64);
x_hat.im += (x[j][n] as f64)*(2.0*PI*(*t as f64)*f/(*t as f64)).sin()/(*t as f64);
}
x_hat
}
fn density (dft_vec: Vec<Vec<Complex<f64>>>) -> Vec<f64> {
let mut s = vec![0.0; STEPS];
for f in 0..STEPS {
for i in 0..COLS{
s[f] += (dft_vec[f][i].re*dft_vec[f][i].re)+(dft_vec[f][i].im*dft_vec[f][i].im);
}
s[f] = s[f]/(COLS as f64);
}
s
}
fn main() {
let rule = vec![0, 1, 1, 0, 1, 1, 1, 0];
let n = COLS;
let t = PERIODS;
let x = ECA{ n: &n, t: &t, rule: &rule };
let y = construct(x.n,x.t,x.rule);
let freq = linspace(0.0,10.0,STEPS);
let spectral = Spectrum { n: &n, t: &t, x: &y, freq: &freq} ;
let k = &n;
let mut dft_vec = vec![vec![Complex::new(0.0,0.0); *k]; freq.len()];
for j in 0..freq.len() {
for i in 0..n {
dft_vec[j][i] = dft(i, spectral.t, spectral.x, spectral.freq[j]);
}
}
let s = density(dft_vec);
let mut res_vec: Vec<(f64,f64)> = vec![((freq[0],s[0]))];
for i in 1..STEPS {
res_vec.push((freq[i],s[i]));
}
let data1 = res_vec;
// We create our scatter plot from the data
let s1: Plot = Plot::new(data1).point_style(
PointStyle::new()
.marker(PointMarker::Square) // setting the marker to be a square
.colour("#DD3355"),
); // and a custom colour
// The 'view' describes what set of data is drawn
let v = ContinuousView::new()
.add(s1)
.x_range(0., 10.)
.y_range(0.330, 0.334)
.x_label("Frequency")
.y_label("Power Spectrum");
// A page with a single view is then saved to an SVG file
Page::single(&v).save("scatter.svg").unwrap();
}
| true |
0b835c452915276a61f8a7490bca717b698c4f78
|
Rust
|
informationsea/xlsxwriter-rs
|
/libxlsxwriter/src/format.rs
|
UTF-8
| 19,416 | 2.625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::{CStringHelper, XlsxError};
#[allow(clippy::unreadable_literal)]
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormatColor {
Black,
Blue,
Brown,
Cyan,
Gray,
Green,
Lime,
Magenta,
Navy,
Orange,
Purple,
Red,
Pink,
Silver,
White,
Yellow,
Custom(u32),
}
#[allow(clippy::unreadable_literal)]
impl FormatColor {
#[must_use]
pub fn value(self) -> u32 {
match self {
FormatColor::Black => 0x1000000,
FormatColor::Blue => 0x00_00_FF,
FormatColor::Brown => 0x80_00_00,
FormatColor::Cyan => 0x00_FF_FF,
FormatColor::Gray => 0x80_80_80,
FormatColor::Green => 0x00_80_00,
FormatColor::Lime => 0x00_FF_00,
FormatColor::Magenta => 0xFF_00_FF,
FormatColor::Navy => 0x00_00_80,
FormatColor::Orange => 0xFF_66_00,
FormatColor::Purple => 0x80_00_80,
FormatColor::Red => 0xFF_00_00,
FormatColor::Pink => 0xFF_00_FF,
FormatColor::Silver => 0xC0_C0_C0,
FormatColor::White => 0xFF_FF_FF,
FormatColor::Yellow => 0xFF_FF_00,
FormatColor::Custom(x) => x,
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormatUnderline {
Single,
Double,
SingleAccounting,
DoubleAccounting,
}
impl FormatUnderline {
#[must_use]
pub fn value(self) -> u8 {
let value = match self {
FormatUnderline::Single => {
libxlsxwriter_sys::lxw_format_underlines_LXW_UNDERLINE_SINGLE
}
FormatUnderline::SingleAccounting => {
libxlsxwriter_sys::lxw_format_underlines_LXW_UNDERLINE_SINGLE_ACCOUNTING
}
FormatUnderline::Double => {
libxlsxwriter_sys::lxw_format_underlines_LXW_UNDERLINE_DOUBLE
}
FormatUnderline::DoubleAccounting => {
libxlsxwriter_sys::lxw_format_underlines_LXW_UNDERLINE_DOUBLE_ACCOUNTING
}
};
value as u8
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormatScript {
SuperScript,
SubScript,
}
impl FormatScript {
#[must_use]
pub fn value(self) -> u8 {
let value = match self {
FormatScript::SuperScript => libxlsxwriter_sys::lxw_format_scripts_LXW_FONT_SUPERSCRIPT,
FormatScript::SubScript => libxlsxwriter_sys::lxw_format_scripts_LXW_FONT_SUBSCRIPT,
};
value as u8
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormatAlignment {
None,
Left,
Center,
Right,
Fill,
Justify,
CenterAcross,
Distributed,
}
impl FormatAlignment {
#[must_use]
pub fn value(self) -> u8 {
let value = match self {
FormatAlignment::None => libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_NONE,
FormatAlignment::Left => libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_LEFT,
FormatAlignment::Center => libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_CENTER,
FormatAlignment::Right => libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_RIGHT,
FormatAlignment::Fill => libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_FILL,
FormatAlignment::Justify => libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_JUSTIFY,
FormatAlignment::CenterAcross => {
libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_CENTER_ACROSS
}
FormatAlignment::Distributed => {
libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_DISTRIBUTED
}
};
value as u8
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormatVerticalAlignment {
None,
VerticalTop,
VerticalBottom,
VerticalCenter,
VerticalJustify,
VerticalDistributed,
}
impl FormatVerticalAlignment {
#[must_use]
pub fn value(self) -> u8 {
let value = match self {
FormatVerticalAlignment::None => {
libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_NONE
}
FormatVerticalAlignment::VerticalTop => {
libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_VERTICAL_TOP
}
FormatVerticalAlignment::VerticalBottom => {
libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_VERTICAL_BOTTOM
}
FormatVerticalAlignment::VerticalCenter => {
libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_VERTICAL_CENTER
}
FormatVerticalAlignment::VerticalJustify => {
libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_VERTICAL_JUSTIFY
}
FormatVerticalAlignment::VerticalDistributed => {
libxlsxwriter_sys::lxw_format_alignments_LXW_ALIGN_VERTICAL_DISTRIBUTED
}
};
value as u8
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormatPatterns {
None,
Solid,
MediumGray,
DarkGray,
LightGray,
DarkHorizontal,
DarkVertical,
DarkDown,
DarkUp,
DarkGrid,
DarkTrellis,
LightHorizontal,
LightVertical,
LightDown,
LightUp,
LightGrid,
LightTrellis,
Gray125,
Gray0625,
}
impl FormatPatterns {
#[must_use]
pub fn value(self) -> u8 {
let value = match self {
FormatPatterns::None => libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_NONE,
FormatPatterns::Solid => libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_SOLID,
FormatPatterns::MediumGray => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_MEDIUM_GRAY
}
FormatPatterns::DarkGray => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_DARK_GRAY
}
FormatPatterns::LightGray => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_LIGHT_GRAY
}
FormatPatterns::DarkHorizontal => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_DARK_HORIZONTAL
}
FormatPatterns::DarkVertical => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_DARK_VERTICAL
}
FormatPatterns::DarkDown => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_DARK_DOWN
}
FormatPatterns::DarkUp => libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_DARK_UP,
FormatPatterns::DarkGrid => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_DARK_GRID
}
FormatPatterns::DarkTrellis => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_DARK_TRELLIS
}
FormatPatterns::LightHorizontal => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_LIGHT_HORIZONTAL
}
FormatPatterns::LightVertical => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_LIGHT_VERTICAL
}
FormatPatterns::LightDown => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_LIGHT_DOWN
}
FormatPatterns::LightUp => libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_LIGHT_UP,
FormatPatterns::LightGrid => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_LIGHT_GRID
}
FormatPatterns::LightTrellis => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_LIGHT_TRELLIS
}
FormatPatterns::Gray125 => libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_GRAY_125,
FormatPatterns::Gray0625 => {
libxlsxwriter_sys::lxw_format_patterns_LXW_PATTERN_GRAY_0625
}
};
value as u8
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormatBorder {
None,
Thin,
Medium,
Dashed,
Dotted,
Thick,
Double,
Hair,
MediumDashed,
DashDot,
MediumDashDot,
DashDotDot,
MediumDashDotDot,
SlantDashDot,
}
impl FormatBorder {
#[must_use]
pub fn value(self) -> u8 {
let value = match self {
FormatBorder::None => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_NONE,
FormatBorder::Thin => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_THIN,
FormatBorder::Medium => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_MEDIUM,
FormatBorder::Dashed => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_DASHED,
FormatBorder::Dotted => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_DOTTED,
FormatBorder::Thick => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_THICK,
FormatBorder::Double => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_DOUBLE,
FormatBorder::Hair => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_HAIR,
FormatBorder::MediumDashed => {
libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_MEDIUM_DASHED
}
FormatBorder::DashDot => libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_DASH_DOT,
FormatBorder::MediumDashDot => {
libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_MEDIUM_DASH_DOT
}
FormatBorder::DashDotDot => {
libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_DASH_DOT_DOT
}
FormatBorder::MediumDashDotDot => {
libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_MEDIUM_DASH_DOT_DOT
}
FormatBorder::SlantDashDot => {
libxlsxwriter_sys::lxw_format_borders_LXW_BORDER_SLANT_DASH_DOT
}
};
value as u8
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Hash, Default)]
pub struct Format {
font_name: Option<String>,
// font size / 100
font_size: Option<u32>,
font_color: Option<FormatColor>,
bold: bool,
italic: bool,
underline: Option<FormatUnderline>,
font_strikeout: bool,
font_script: Option<FormatScript>,
num_format: Option<String>,
unlocked: bool,
hidden: bool,
align: Option<FormatAlignment>,
vertical_align: Option<FormatVerticalAlignment>,
rotation: Option<i16>,
text_wrap: bool,
indent: Option<u8>,
shrink: bool,
pattern: Option<FormatPatterns>,
bg_color: Option<FormatColor>,
fg_color: Option<FormatColor>,
border: Option<FormatBorder>,
bottom: Option<FormatBorder>,
top: Option<FormatBorder>,
left: Option<FormatBorder>,
right: Option<FormatBorder>,
border_color: Option<FormatColor>,
bottom_color: Option<FormatColor>,
top_color: Option<FormatColor>,
left_color: Option<FormatColor>,
right_color: Option<FormatColor>,
}
impl Format {
#[must_use]
pub fn new() -> Self {
Self::default()
}
pub fn set_font_name(&mut self, font_name: &str) -> &mut Self {
self.font_name = Some(font_name.to_string());
self
}
pub fn set_font_size(&mut self, font_size: f64) -> &mut Self {
self.font_size = Some((font_size * 100.).round() as u32);
self
}
pub fn set_font_color(&mut self, font_color: FormatColor) -> &mut Self {
self.font_color = Some(font_color);
self
}
pub fn set_bold(&mut self) -> &mut Self {
self.bold = true;
self
}
pub fn set_italic(&mut self) -> &mut Self {
self.italic = true;
self
}
pub fn set_underline(&mut self, underline: FormatUnderline) -> &mut Self {
self.underline = Some(underline);
self
}
pub fn set_font_strikeout(&mut self) -> &mut Self {
self.font_strikeout = true;
self
}
pub fn set_font_script(&mut self, script: FormatScript) -> &mut Self {
self.font_script = Some(script);
self
}
pub fn set_num_format(&mut self, num_format: &str) -> &mut Self {
self.num_format = Some(num_format.to_string());
self
}
pub fn set_unlocked(&mut self) -> &mut Self {
self.unlocked = true;
self
}
pub fn set_hidden(&mut self) -> &mut Self {
self.hidden = true;
self
}
pub fn set_align(&mut self, align: FormatAlignment) -> &mut Self {
self.align = Some(align);
self
}
pub fn set_vertical_align(&mut self, align: FormatVerticalAlignment) -> &mut Self {
self.vertical_align = Some(align);
self
}
pub fn set_text_wrap(&mut self) -> &mut Self {
self.text_wrap = true;
self
}
pub fn set_rotation(&mut self, angle: i16) -> &mut Self {
self.rotation = Some(angle);
self
}
pub fn set_indent(&mut self, level: u8) -> &mut Self {
self.indent = Some(level);
self
}
pub fn set_shrink(&mut self) -> &mut Self {
self.shrink = true;
self
}
pub fn set_pattern(&mut self, pattern: FormatPatterns) -> &mut Self {
self.pattern = Some(pattern);
self
}
pub fn set_bg_color(&mut self, color: FormatColor) -> &mut Self {
self.bg_color = Some(color);
self
}
pub fn set_fg_color(&mut self, color: FormatColor) -> &mut Self {
self.fg_color = Some(color);
self
}
pub fn set_border(&mut self, border: FormatBorder) -> &mut Self {
self.border = Some(border);
self
}
pub fn set_border_bottom(&mut self, border: FormatBorder) -> &mut Self {
self.bottom = Some(border);
self
}
pub fn set_border_top(&mut self, border: FormatBorder) -> &mut Self {
self.top = Some(border);
self
}
pub fn set_border_left(&mut self, border: FormatBorder) -> &mut Self {
self.left = Some(border);
self
}
pub fn set_border_right(&mut self, border: FormatBorder) -> &mut Self {
self.right = Some(border);
self
}
pub fn set_border_color(&mut self, color: FormatColor) -> &mut Self {
self.border_color = Some(color);
self
}
pub fn set_border_bottom_color(&mut self, color: FormatColor) -> &mut Self {
self.bottom_color = Some(color);
self
}
pub fn set_border_top_color(&mut self, color: FormatColor) -> &mut Self {
self.top_color = Some(color);
self
}
pub fn set_border_left_color(&mut self, color: FormatColor) -> &mut Self {
self.left_color = Some(color);
self
}
pub fn set_border_right_color(&mut self, color: FormatColor) -> &mut Self {
self.right_color = Some(color);
self
}
pub(crate) fn set_internal_format(
&self,
format: *mut libxlsxwriter_sys::lxw_format,
) -> Result<(), XlsxError> {
let mut c_string_helper = CStringHelper::new();
unsafe {
if let Some(font_name) = self.font_name.as_deref() {
libxlsxwriter_sys::format_set_font_name(format, c_string_helper.add(font_name)?);
}
if let Some(font_size) = self.font_size {
let font_size: f64 = font_size.into();
libxlsxwriter_sys::format_set_font_size(format, font_size / 100.0);
}
if let Some(font_color) = self.font_color {
libxlsxwriter_sys::format_set_font_color(format, font_color.value());
}
if self.bold {
libxlsxwriter_sys::format_set_bold(format);
}
if self.italic {
libxlsxwriter_sys::format_set_italic(format);
}
if let Some(underline) = self.underline {
libxlsxwriter_sys::format_set_underline(format, underline.value());
}
if self.font_strikeout {
libxlsxwriter_sys::format_set_font_strikeout(format);
}
if let Some(font_script) = self.font_script {
libxlsxwriter_sys::format_set_font_script(format, font_script.value());
}
if let Some(num_format) = self.num_format.as_deref() {
libxlsxwriter_sys::format_set_num_format(format, c_string_helper.add(num_format)?);
}
if self.unlocked {
libxlsxwriter_sys::format_set_unlocked(format);
}
if self.hidden {
libxlsxwriter_sys::format_set_hidden(format);
}
if let Some(align) = self.align {
libxlsxwriter_sys::format_set_align(format, align.value());
}
if let Some(vertical_align) = self.vertical_align {
libxlsxwriter_sys::format_set_align(format, vertical_align.value());
}
if let Some(angle) = self.rotation {
libxlsxwriter_sys::format_set_rotation(format, angle);
}
if self.text_wrap {
libxlsxwriter_sys::format_set_text_wrap(format);
}
if let Some(indent) = self.indent {
libxlsxwriter_sys::format_set_indent(format, indent);
}
if self.shrink {
libxlsxwriter_sys::format_set_shrink(format);
}
if let Some(pattern) = self.pattern {
libxlsxwriter_sys::format_set_pattern(format, pattern.value());
}
if let Some(bg_color) = self.bg_color {
libxlsxwriter_sys::format_set_bg_color(format, bg_color.value());
}
if let Some(fg_color) = self.fg_color {
libxlsxwriter_sys::format_set_bg_color(format, fg_color.value());
}
if let Some(style) = self.border {
libxlsxwriter_sys::format_set_border(format, style.value());
}
if let Some(style) = self.bottom {
libxlsxwriter_sys::format_set_bottom(format, style.value());
}
if let Some(style) = self.top {
libxlsxwriter_sys::format_set_top(format, style.value());
}
if let Some(style) = self.left {
libxlsxwriter_sys::format_set_left(format, style.value());
}
if let Some(style) = self.right {
libxlsxwriter_sys::format_set_right(format, style.value());
}
if let Some(color) = self.border_color {
libxlsxwriter_sys::format_set_border_color(format, color.value());
}
if let Some(color) = self.bottom_color {
libxlsxwriter_sys::format_set_bottom_color(format, color.value());
}
if let Some(color) = self.top_color {
libxlsxwriter_sys::format_set_top_color(format, color.value());
}
if let Some(color) = self.left_color {
libxlsxwriter_sys::format_set_left_color(format, color.value());
}
if let Some(color) = self.right_color {
libxlsxwriter_sys::format_set_right_color(format, color.value());
}
}
Ok(())
}
}
| true |
4342ed16889a0d4d60a83a9a8ffe70492b357b35
|
Rust
|
Shipica/Shipica.github.io
|
/shipico/src/old/widget/common.rs
|
UTF-8
| 3,243 | 2.875 | 3 |
[
"MIT"
] |
permissive
|
//! TODO rename this module maybe?
//!
//! Common is not the best name for it, but i don't know
//! what is better
use web_sys::DomMatrix;
use crate::{
canvas::Canvas,
math::{Matrix, Vec2},
};
use super::Widget;
// ----------------------------------------------------------------
// Transform
// ----------------------------------------------------------------
pub struct Transform<T>
where
T: Widget,
{
pub transform: Matrix,
pub inner: T,
}
impl<T> Widget for Transform<T>
where
T: Widget,
{
#[inline]
fn draw(&self, canvas: &mut Canvas) {
canvas.transform(self.transform);
self.inner.draw(canvas);
canvas.transform(self.transform.inverse());
}
}
// ----------------------------------------------------------------
// Fill
// ----------------------------------------------------------------
pub struct Fill<T>
where
T: Widget,
{
pub inner: T,
}
impl<T> Widget for Fill<T>
where
T: Widget,
{
#[inline]
fn draw(&self, canvas: &mut Canvas) {
self.inner.draw(canvas);
canvas.fill();
}
}
// ----------------------------------------------------------------
// Stroke
// ----------------------------------------------------------------
pub struct Stroke<T>
where
T: Widget,
{
pub inner: T,
}
impl<T> Widget for Stroke<T>
where
T: Widget,
{
#[inline]
fn draw(&self, canvas: &mut Canvas) {
self.inner.draw(canvas);
canvas.stroke();
}
}
// ----------------------------------------------------------------
// Translate
// ----------------------------------------------------------------
pub struct Translate<T>
where
T: Widget,
{
pub translation: Vec2,
pub inner: T,
}
impl<T> Widget for Translate<T>
where
T: Widget,
{
#[inline]
fn draw(&self, canvas: &mut Canvas) {
canvas.translate([self.translation.x, self.translation.y]);
self.inner.draw(canvas);
canvas.translate([-self.translation.x, -self.translation.y]);
}
}
// ----------------------------------------------------------------
// Scale
// ----------------------------------------------------------------
pub struct Scale<T>
where
T: Widget,
{
pub scale: f64,
pub inner: T,
}
impl<T> Widget for Scale<T>
where
T: Widget,
{
#[inline]
fn draw(&self, canvas: &mut Canvas) {
canvas.scale(self.scale);
self.inner.draw(canvas);
canvas.scale(1.0 / self.scale);
}
}
// ----------------------------------------------------------------
// Rotate
// ----------------------------------------------------------------
pub struct Rotate<T>
where
T: Widget,
{
pub angle: f64,
pub inner: T,
}
impl<T> Widget for Rotate<T>
where
T: Widget,
{
#[inline]
fn draw(&self, canvas: &mut Canvas) {
canvas.rotate(self.angle);
self.inner.draw(canvas);
canvas.rotate(-self.angle);
}
}
pub struct Inspect<T, F>
where
T: Widget,
F: Fn(),
{
pub inner: T,
pub f: F,
}
impl<T, F> Widget for Inspect<T, F>
where
T: Widget,
F: Fn(),
{
#[inline]
fn draw(&self, canvas: &mut Canvas) {
if canvas.debug {
(self.f)();
}
self.inner.draw(canvas);
}
}
| true |
10bd1f652916b53c03fcd2ef2828d88b56e40bfa
|
Rust
|
shakyShane/dsa
|
/src/balanced_recursive.rs
|
UTF-8
| 1,220 | 3.703125 | 4 |
[] |
no_license
|
use std::str::Chars;
fn balanced_recursive(input: &str) -> bool {
expect(None, &mut input.chars())
}
fn expect(end: Option<char>, input: &mut Chars) -> bool {
loop {
let c = input.next();
let good = match c {
Some('(') => expect(Some(')'), input),
Some('[') => expect(Some(']'), input),
Some('{') => expect(Some('}'), input),
Some(')') | Some(']') | Some('}') | None => {
return end == c;
}
_ => true, // any other char
};
if !good {
return false;
}
}
}
#[test]
fn test_balanced_recursive() {
assert_eq!(balanced_recursive("[]"), true);
assert_eq!(balanced_recursive("["), false);
assert_eq!(balanced_recursive("(())"), true);
assert_eq!(balanced_recursive("((()"), false);
assert_eq!(balanced_recursive(")(())"), false);
assert_eq!(balanced_recursive("))))"), false);
assert_eq!(balanced_recursive("(()))("), false);
assert_eq!(balanced_recursive("([])"), true);
assert_eq!(balanced_recursive("([[[]]])"), true);
assert_eq!(balanced_recursive("([[[00]])"), false);
assert_eq!(balanced_recursive("([[[{0}]]])"), true);
}
| true |
074944b37af08045b7c576b2a1be84c1ca5e7636
|
Rust
|
madmax28/aoc2019
|
/src/day16/mod.rs
|
UTF-8
| 2,017 | 3.203125 | 3 |
[] |
no_license
|
#[derive(Debug)]
enum Error {
InvalidInput,
}
const BASE: &[i32] = &[0, 1, 0, -1];
fn fft(values: &mut Vec<i32>, skip: usize) {
let init = values.clone();
let total_len = init.len() + skip;
for idx in 0..init.len() {
let phase = skip + idx;
if phase <= total_len / 3 {
values[idx] = init[idx..]
.iter()
.enumerate()
.map(|(idx, &val)| {
val * BASE[(idx / (phase + 1) + 1) % BASE.len()]
})
.sum::<i32>()
.abs()
% 10;
} else {
values[idx] = &init[idx..]
.iter()
.take((phase + 1).min(total_len - phase))
.sum::<i32>()
% 10;
}
}
}
fn parse(input: &str) -> crate::Result<Vec<i32>> {
input
.lines()
.next()
.ok_or_else(|| crate::Error::boxed(Error::InvalidInput))?
.chars()
.map(|c| -> crate::Result<i32> {
Ok(c.to_digit(10)
.map(|n| n as i32)
.ok_or_else(|| crate::Error::boxed(Error::InvalidInput))?)
})
.collect::<Result<_, _>>()
}
pub fn part1(input: &str) -> crate::Result<i32> {
let mut nums = parse(input)?;
for _ in 0..100 {
fft(&mut nums, 0);
}
Ok(nums.iter().take(8).fold(0, |acc, n| acc * 10 + n))
}
pub fn part2(input: &str) -> crate::Result<i32> {
let nums = parse(input)?;
let offset = nums.iter().take(7).fold(0, |acc, n| acc * 10 + n) as usize;
let mut nums: Vec<i32> = {
let len = nums.len() * 10_000 - offset;
nums.into_iter().cycle().skip(offset).take(len).collect()
};
for _ in 0..100 {
// Solves in 4976.325 008 743 seconds \o/
// fft(&mut nums, offset)
for idx in (0..nums.len() - 1).rev() {
nums[idx] = (nums[idx] + nums[idx + 1]) % 10;
}
}
Ok(nums.iter().take(8).fold(0, |acc, n| acc * 10 + n))
}
| true |
cbb2031a7bd88fe5669cba1f7eec7300fc121986
|
Rust
|
ftsell/pixelflut
|
/rust/src/net/mod.rs
|
UTF-8
| 2,212 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
//!
//! Networking layer for pixelflut servers and clients as well as on-the-wire protocol handling
//!
use std::convert::TryFrom;
use anyhow::Result;
use bytes::{Buf, Bytes};
use crate::net::framing::Frame;
use crate::pixmap::traits::{PixmapBase, PixmapRead, PixmapWrite};
use crate::pixmap::SharedPixmap;
use crate::protocol::{Request, Response, StateEncodingAlgorithm};
use crate::state_encoding::SharedMultiEncodings;
pub mod framing;
pub mod tcp_server;
pub mod udp_server;
pub mod ws_server;
//static LOG_TARGET: &str = "pixelflut.net";
/// handle a request frame and return a response frame
fn handle_frame<P, B>(
input: Frame<B>,
pixmap: &SharedPixmap<P>,
encodings: &SharedMultiEncodings,
) -> Option<Frame<Bytes>>
where
P: PixmapBase + PixmapRead + PixmapWrite,
B: Buf,
{
// try parse the received frame as request
match Request::try_from(input) {
Err(e) => Some(Frame::new_from_string(e.to_string())),
Ok(request) => match handle_request(request, pixmap, encodings) {
Err(e) => Some(Frame::new_from_string(e.to_string())),
Ok(response) => response.map(|r| r.into()),
},
}
}
/// handle a request and return a response
fn handle_request<P>(
request: Request,
pixmap: &SharedPixmap<P>,
encodings: &SharedMultiEncodings,
) -> Result<Option<Response>>
where
P: PixmapBase + PixmapRead + PixmapWrite,
{
match request {
Request::Size => Ok(Some(Response::Size(pixmap.get_size()?.0, pixmap.get_size()?.1))),
Request::Help(topic) => Ok(Some(Response::Help(topic))),
Request::PxGet(x, y) => Ok(Some(Response::Px(x, y, pixmap.get_pixel(x, y)?))),
Request::PxSet(x, y, color) => {
pixmap.set_pixel(x, y, color)?;
Ok(None)
}
Request::State(algorithm) => match algorithm {
StateEncodingAlgorithm::Rgb64 => Ok(Some(Response::State(
algorithm,
encodings.rgb64.lock().unwrap().clone(),
))),
StateEncodingAlgorithm::Rgba64 => Ok(Some(Response::State(
algorithm,
encodings.rgba64.lock().unwrap().clone(),
))),
},
}
}
| true |
7601f0df15931f43f89c71f4dd3c3073b8d197b4
|
Rust
|
MichelleJiam/AdventofCode2020
|
/D02/d02a.rs
|
UTF-8
| 748 | 3.265625 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::Read;
fn main() {
let mut file = File::open("d02-input").unwrap();
let mut input = String::new();
file.read_to_string(&mut input).unwrap();
let mut valid_pass = 0;
for line in input.lines() {
let line2 = &line.replace(":", " ");
let chunks: Vec<_> = line2.split_whitespace().collect();
// chunks[0] = range, chunks[1] = char, chunks[2] = password
let bounds: Vec<_> = chunks[0].split("-").collect();
// bounds[0] = lower limit, bounds[1] = upper limit
let count = chunks[2].matches(chunks[1]).count();
if bounds[0].parse::<usize>().unwrap() <= count {
if count <= bounds[1].parse::<usize>().unwrap() {
valid_pass += 1;
}
}
}
println!("Number of valid passwords: {}", valid_pass);
}
| true |
6326210a496ac4f0d08dc7b41aad2b59269892e8
|
Rust
|
fulmicoton/irc-search-index
|
/src/index.rs
|
UTF-8
| 2,498 | 2.828125 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::path::Path;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
use std::time::Instant;
use tantivy::Index;
use tantivy::schema::*;
use walkdir::WalkDir;
use regex::Regex;
use errors::*;
lazy_static! {
static ref RE: Regex = Regex::new(r"(?x)
(?P<time>\d{2}:\d{2})\s
[+@&]?
\s*
(?P<nick>[^\s][^>]+)
>
\s
(?P<msg>.+)").unwrap();
static ref WS: Regex = Regex::new(r"\s+").unwrap();
}
pub fn build_index(index_path: &str, data_path: &str) -> Result<()> {
let mut schema_builder = SchemaBuilder::default();
schema_builder.add_text_field("time", TEXT | STORED);
schema_builder.add_text_field("nick", TEXT | STORED);
schema_builder.add_text_field("msg", TEXT | STORED);
let schema = schema_builder.build();
let index_path = Path::new(index_path);
let index = Index::create(index_path, schema.clone())?;
let mut index_writer = index.writer(500_000_000)?;
let time_field = schema.get_field("time").unwrap();
let nick_field = schema.get_field("nick").unwrap();
let msg_field = schema.get_field("msg").unwrap();
let mut count = 0;
println!("Indexing...");
let now = Instant::now();
for entry in WalkDir::new(data_path) {
let entry = entry.unwrap();
if entry.file_type().is_dir() {
continue;
}
let date = entry.path().file_stem().expect("Can't stem filename");
let date = date.to_string_lossy();
let file = File::open(entry.path())?;
let reader = BufReader::new(file);
for line in reader.lines() {
let line = line?;
let caps = match RE.captures(&line) {
Some(m) => m,
None => continue,
};
let datetime = format!("{} {}", date, &caps["time"]);
if WS.is_match(&caps["nick"]) {
continue;
}
let mut doc = Document::default();
doc.add_text(time_field.clone(), &datetime);
doc.add_text(nick_field.clone(), &caps["nick"]);
doc.add_text(msg_field.clone(), &caps["msg"]);
index_writer.add_document(doc);
count += 1;
}
}
println!("Indexing took {} seconds", now.elapsed().as_secs());
let now = Instant::now();
index_writer.commit().expect("Can't write index");
println!("Writing index took {} seconds", now.elapsed().as_secs());
println!("Indexed {} lines", count);
Ok(())
}
| true |
f837f8be6722c37e0bbf54bf4a339ea2ee8cac8f
|
Rust
|
ebkalderon/deck
|
/deck-core/src/manifest.rs
|
UTF-8
| 10,367 | 2.8125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Reproducible package manifest data.
pub use self::sources::Source;
use std::collections::{BTreeMap, BTreeSet};
use std::fmt::{Display, Error as FmtError, Formatter, Result as FmtResult};
use std::str::FromStr;
use serde::{Deserialize, Serialize};
use toml::de::Error as DeserializeError;
use self::outputs::Outputs;
use self::sources::Sources;
use crate::hash::Hash;
use crate::id::{ManifestId, OutputId};
use crate::name::Name;
mod outputs;
mod sources;
/// The serializable `package` table in the manifest.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
struct Package {
name: Name,
version: String,
dependencies: BTreeSet<ManifestId>,
build_dependencies: BTreeSet<ManifestId>,
dev_dependencies: BTreeSet<ManifestId>,
}
/// A reproducible package manifest.
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Manifest {
package: Package,
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
env: BTreeMap<String, String>,
#[serde(rename = "output")]
outputs: Outputs,
#[serde(default, rename = "source", skip_serializing_if = "Sources::is_empty")]
sources: Sources,
}
impl Manifest {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn build<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> ManifestBuilder
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
ManifestBuilder::new(name, version, default_output_hash, refs)
}
/// Computes the content-addressable ID of this manifest.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let id = manifest.compute_id();
/// assert_eq!(id, "[email protected]");
/// ```
#[inline]
pub fn compute_id(&self) -> ManifestId {
let name = self.package.name.clone();
let version = self.package.version.clone();
let hash = Hash::compute().input(&self.to_string()).finish();
ManifestId::new(name, version, hash)
}
/// Returns the name of the package.
///
/// This string is guaranteed not to be empty.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let name = manifest.name();
/// assert_eq!(name, "foo");
/// ```
#[inline]
pub fn name(&self) -> &str {
self.package.name.as_str()
}
/// Returns the semantic version of the package.
///
/// # Example
///
/// ```
/// # use deck_core::Manifest;
/// #
/// let manifest = Manifest::build("foo", "1.0.0", "fc3j3vub6kodu4jtfoakfs5xhumqi62m", None)
/// .finish()
/// .unwrap();
///
/// let version = manifest.version();
/// assert_eq!(version, "1.0.0");
/// ```
#[inline]
pub fn version(&self) -> &str {
&self.package.version
}
/// Iterates over the package's runtime dependencies.
#[inline]
pub fn dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dependencies.iter()
}
/// Iterates over the package's build-time dependencies.
#[inline]
pub fn build_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.build_dependencies.iter()
}
/// Iterates over the package's optional testing dependencies.
#[inline]
pub fn dev_dependencies(&self) -> impl Iterator<Item = &ManifestId> {
self.package.dev_dependencies.iter()
}
/// Iterates over the package builder's environment variables as key-value pairs.
#[inline]
pub fn env(&self) -> impl Iterator<Item = (&String, &String)> + '_ {
self.env.iter()
}
/// Iterates over the package's build outputs.
///
/// # Note
///
/// Every package is guaranteed to produce at least one default output and zero or more additional
/// outputs. When a manifest is built from source, all outputs are built together.
#[inline]
pub fn outputs(&self) -> impl Iterator<Item = OutputId> + '_ {
let name = self.package.name.clone();
let ver = self.package.version.clone();
self.outputs.iter_with(name, ver)
}
/// Iterates over the package's sources.
#[inline]
pub fn sources(&self) -> impl Iterator<Item = &Source> {
self.sources.iter()
}
}
impl Display for Manifest {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
toml::to_string(self)
.map_err(|e| {
println!("couldn't display self: {}", e);
FmtError::default()
})
.and_then(|s| write!(fmt, "{}", s))
}
}
impl FromStr for Manifest {
type Err = DeserializeError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
toml::from_str(s)
}
}
/// Builder for creating new `Manifest`s.
#[derive(Clone, Debug)]
pub struct ManifestBuilder {
package: Result<Package, ()>,
env: BTreeMap<String, String>,
sources: Sources,
outputs: Result<Outputs, ()>,
}
impl ManifestBuilder {
/// Creates a `Manifest` with the given name, version, default output [`Hash`], and references.
///
/// [`Hash`]: ../struct.Hash.html
pub fn new<T, U>(name: T, version: T, default_output_hash: T, refs: U) -> Self
where
T: AsRef<str>,
U: IntoIterator<Item = OutputId>,
{
let package = name.as_ref().parse().map(|name| Package {
name,
version: version.as_ref().into(),
dependencies: BTreeSet::new(),
build_dependencies: BTreeSet::new(),
dev_dependencies: BTreeSet::new(),
});
let outputs = default_output_hash
.as_ref()
.parse()
.map(|hash| Outputs::new(hash, refs));
ManifestBuilder {
package,
env: BTreeMap::new(),
sources: Sources::new(),
outputs,
}
}
/// Adds a runtime dependency on `id`.
pub fn dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dependencies.insert(id);
}
self
}
/// Adds a build dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source.
/// Otherwise, the dependency is ignored. Artifacts from build dependencies cannot be linked to
/// at runtime.
pub fn build_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.build_dependencies.insert(id);
}
self
}
/// Adds a test-only dependency on `id`.
///
/// # Laziness
///
/// This kind of dependency is only downloaded when the package is being built from source and
/// running tests is enabled. Otherwise, the dependency is ignored. Artifacts from dev
/// dependencies cannot be linked to at runtime, and they are never included in the final
/// output.
pub fn dev_dependency(mut self, id: ManifestId) -> Self {
if let Ok(ref mut p) = self.package {
p.dev_dependencies.insert(id);
}
self
}
/// Declares an additional build output directory produced by this manifest.
///
/// Build output directories can accept other build outputs as refs, allowing them to be
/// symlinked into the directory structure for runtime dependencies.
///
/// By default, all manifests produce a single default output. This method allows for secondary
/// "named" outputs to be added with supplementary content, e.g. `doc` for HTML documentation,
/// `man` for man pages, `debug` for debug information, etc.
pub fn output<T>(mut self, name: Name, precomputed_hash: Hash, refs: T) -> Self
where
T: IntoIterator<Item = OutputId>,
{
if let Ok(ref mut out) = self.outputs {
out.append(name, precomputed_hash, refs);
}
self
}
/// Adds an external fetchable source to this manifest.
///
/// # Laziness
///
/// Sources are only downloaded when the package is being built from source. Otherwise, the
/// sources are essentially ignored.
pub fn source(mut self, source: Source) -> Self {
self.sources.insert(source);
self
}
/// Constructs and returns the new [`Manifest`].
///
/// If the package name is empty or contains invalid characters, or if the default output hash
/// is invalid, then this method will return `Err`.
///
/// [`Manifest`]: ./struct.Manifest.html
pub fn finish(self) -> Result<Manifest, ()> {
Ok(Manifest {
package: self.package?,
env: self.env,
outputs: self.outputs?,
sources: self.sources,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
const MANIFEST: &'static str = r#"
[package]
name = "hello"
version = "1.2.3"
dependencies = ["[email protected]"]
build-dependencies = ["[email protected]"]
dev-dependencies = []
[env]
LANG = "C_ALL"
[[output]]
precomputed-hash = "fc3j3vub6kodu4jtfoakfs5xhumqi62m"
references = ["[email protected]:bin-fc3j3vub6kodu4jtfoakfs5xhumqi62m"]
[[output]]
name = "doc"
precomputed-hash = "fc3j3vub6kodu4jtfoakfs5xhumqi62m"
[[output]]
name = "man"
precomputed-hash = "fc3j3vub6kodu4jtfoakfs5xhumqi62m"
references = ["[email protected]:bin-fc3j3vub6kodu4jtfoakfs5xhumqi62m"]
[[source]]
uri = "https://www.example.com/hello.tar.gz"
hash = "1234567890abcdef"
"#;
#[test]
fn example_deserialize() {
let example: Manifest = MANIFEST.parse().expect("Failed to parse manifest");
println!("{}", example);
}
}
| true |
1a921fd919cfc7152ea42df29f4d0c2c8cc69ff6
|
Rust
|
southball/judge-controller
|
/src/util.rs
|
UTF-8
| 1,508 | 3.015625 | 3 |
[] |
no_license
|
use futures_util::stream::{Stream, StreamExt};
use std::io::Write;
pub async fn write_stream_to_file<'a, T>(
stream: &mut T,
path: &'a std::path::Path,
) -> Result<(), Box<dyn std::error::Error>>
where
T: Stream<Item = reqwest::Result<bytes::Bytes>> + std::marker::Unpin,
{
let mut file = std::fs::File::create(path).unwrap();
while let Some(Ok(item)) = stream.next().await {
file.write(&item)?;
}
file.flush()?;
Ok(())
}
/// Unzip the zip file at zip_path to folder at folder_path.
pub async fn unzip<'a>(
zip_path: &'a std::path::Path,
folder_path: &'a std::path::Path,
) -> Result<(), Box<dyn std::error::Error>> {
log::info!(
"Extracting {} to {}...",
zip_path.to_str().unwrap(),
folder_path.to_str().unwrap()
);
let zip_file = std::fs::File::open(zip_path).unwrap();
let mut archive = zip::ZipArchive::new(zip_file).unwrap();
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let filename = file.sanitized_name();
let target = folder_path.join(&filename);
if filename.ends_with("/") {
std::fs::create_dir_all(&target)?;
} else {
if let Some(p) = target.parent() {
if !p.exists() {
std::fs::create_dir_all(&p)?;
}
}
let mut sink = std::fs::File::create(&target).unwrap();
std::io::copy(&mut file, &mut sink)?;
}
}
Ok(())
}
| true |
02ead2dd85173f697f302069c5fed79fe7eab8bc
|
Rust
|
TomaszWaszczyk/kernel-from-scratch
|
/src/gdt/segment_descriptor.rs
|
UTF-8
| 2,838 | 3.046875 | 3 |
[] |
no_license
|
/// Segment Descriptor
#[derive(Debug, Clone, Copy, Default)]
#[repr(C, packed)]
pub struct SegmentDescriptor {
lim0_15: u16,
base0_15: u16,
base16_23: u8,
access: u8,
lim16_19_flags: u8,
base24_31: u8,
}
impl SegmentDescriptor {
pub const fn new(base: u32, limit: u32, access: u8, flags: u8) -> SegmentDescriptor {
SegmentDescriptor {
lim0_15: (limit & 0xffff) as u16,
base0_15: (base & 0xffff) as u16,
base16_23: ((base & 0xff0000) >> 16) as u8,
access,
lim16_19_flags: ((limit & 0xf0000) >> 16) as u8 | (flags & 0xf) << 4,
base24_31: ((base & 0xff000000) >> 24) as u8,
}
}
fn base(&self) -> usize {
self.base0_15 as usize | (self.base16_23 as usize) << 16 | (self.base24_31 as usize) << 24
}
fn limit(&self) -> usize {
self.lim0_15 as usize | ((self.lim16_19_flags & 0xf) as usize) << 16
}
fn present(&self) -> bool {
self.access & 0x80 != 0
}
fn privilege(&self) -> u8 {
(self.access & 0x60) >> 5
}
fn desc_type(&self) -> bool {
self.access & 0x10 != 0
}
fn executable(&self) -> bool {
self.access & 0x8 != 0
}
fn direction_conforming(&self) -> bool {
self.access & 0x4 != 0
}
fn readable_writable(&self) -> bool {
self.access & 0x2 != 0
}
fn accessed(&self) -> bool {
self.access & 0x1 != 0
}
fn granularity(&self) -> bool {
self.lim16_19_flags & 0x80 != 0
}
fn operand_size(&self) -> bool {
self.lim16_19_flags & 0x40 != 0
}
fn long(&self) -> bool {
self.lim16_19_flags & 0x20 != 0
}
fn available(&self) -> bool {
self.lim16_19_flags & 0x10 != 0
}
}
use core::fmt;
impl fmt::Display for SegmentDescriptor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "base: {:#010x}, limit: {:#07x}\npresent: {}, privilege: {}\ntype {}, dc: {}, rw: {}\naccessed: {}, granularity: {}, size: {}, long: {}, available {}",
self.base(), self.limit(), self.present(), self.privilege(),
match self.desc_type() {
false => "System",
true => match self.executable() {
false => "Data",
true => "Code",
}
},
self.direction_conforming(), self.readable_writable(), self.accessed(),
match self.granularity() {
false => "1B",
true => "4KiB",
},
match self.operand_size(){
false => "16bit",
true => "32bit",
},
self.long(), self.available())
}
}
| true |
7fce878391638b4b0fb802cf1bbe2ef125de9060
|
Rust
|
MaaxGr/latex-toc-markdown
|
/src/toc_formatter.rs
|
UTF-8
| 3,036 | 3.578125 | 4 |
[] |
no_license
|
use regex::Regex;
pub fn line_to_md(line: &str) -> String {
let toc_layer = get_toc_layer(line);
if line.contains("nonumberline") {
let regex = Regex::new(r"\\contentsline \{[^{]+}\{\\nonumberline ([^}]+)}.+").unwrap();
let caps = regex.captures(line).unwrap();
let text = caps.get(1).unwrap().as_str();
return format_text_with_layer(toc_layer, text)
}
if line.contains("numberline") {
let regex = Regex::new(r"\\contentsline \{[^{]+}\{\\numberline \{(\d+[.*\d]*)}([^}]+)}.+").unwrap();
let caps = regex.captures(line).unwrap();
let number = caps.get(1).unwrap().as_str();
let text = caps.get(2).unwrap().as_str();
return format_text_with_layer(toc_layer, format!("{} {}", number, text).as_str())
} else {
let regex = Regex::new(r"\\contentsline \{[^{]+}\{([^{]+)}.+").unwrap();
let caps = regex.captures(line).unwrap();
let text = caps.get(1).unwrap().as_str();
return format_text_with_layer(toc_layer, text)
}
}
fn get_toc_layer(line: &str) -> usize {
if line.contains("{chapter}") {
return 1
}
if line.contains("{section}") {
return 2
}
if line.contains("{subsection}") {
return 3
}
if line.contains("{subsubsection}") {
return 4
}
if line.contains("{paragraph}") {
return 5
}
return 0
}
fn format_text_with_layer(layer: usize, text: &str) -> String {
let spaces = (layer - 1) * 2;
return format!("{}* {}", " ".repeat(spaces), text)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn line_to_md_chapter() {
let expected = "* Abbildungsverzeichnis";
let subject_under_test = "\\contentsline {chapter}{Abbildungsverzeichnis}{4}{chapter*.2}%";
assert_eq!(expected, line_to_md(subject_under_test));
}
#[test]
fn line_to_md_chapter_numberline() {
let expected = "* 1 Einführung";
let subject_under_test = "\\contentsline {chapter}{\\numberline {1}Einführung}{8}{chapter.1}%";
assert_eq!(expected, line_to_md(subject_under_test));
}
#[test]
fn line_to_md_section_numberline() {
let expected = " * 1.1 Einleitung";
let subject_under_test = "\\contentsline {section}{\\numberline {1.1}Einleitung}{8}{section.1.1}%";
assert_eq!(expected, line_to_md(subject_under_test));
}
#[test]
fn line_to_md_subsection_numberline() {
let expected = " * 2.4.1 Allgemeines";
let subject_under_test = "\\contentsline {subsection}{\\numberline {2.4.1}Allgemeines}{13}{subsection.2.4.1}%";
assert_eq!(expected, line_to_md(subject_under_test));
}
#[test]
fn line_to_md_paragraph_nonumberline() {
let expected = " * Auswahl des Webservers";
let subject_under_test = "\\contentsline {paragraph}{\\nonumberline Auswahl des Webservers}{31}{section*.7}%";
assert_eq!(expected, line_to_md(subject_under_test));
}
}
| true |
6917c517d2f726b3bb37b0961f43aba5ea880105
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/mir-opt/simplify_cfg.rs
|
UTF-8
| 1,179 | 2.53125 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// Test that the goto chain starting from bb0 is collapsed.
fn main() {
loop {
if bar() {
break;
}
}
}
#[inline(never)]
fn bar() -> bool {
true
}
// END RUST SOURCE
// START rustc.main.SimplifyCfg-initial.before.mir
// bb0: {
// goto -> bb1;
// }
// bb1: {
// falseUnwind -> [real: bb3, cleanup: bb4];
// }
// ...
// bb11: {
// ...
// goto -> bb1;
// }
// END rustc.main.SimplifyCfg-initial.before.mir
// START rustc.main.SimplifyCfg-initial.after.mir
// bb0: {
// falseUnwind -> [real: bb1, cleanup: bb2];
// }
// ...
// bb5: {
// ...
// goto -> bb0;
// }
// END rustc.main.SimplifyCfg-initial.after.mir
// START rustc.main.SimplifyCfg-early-opt.before.mir
// bb0: {
// goto -> bb1;
// }
// bb1: {
// StorageLive(_2);
// _2 = const bar() -> bb3;
// }
// END rustc.main.SimplifyCfg-early-opt.before.mir
// START rustc.main.SimplifyCfg-early-opt.after.mir
// bb0: {
// StorageLive(_2);
// _2 = const bar() -> bb1;
// }
// END rustc.main.SimplifyCfg-early-opt.after.mir
| true |
1afe35b5830e3412e73202eef174aaae9b0c6122
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/ui/dropck/dropck-union.rs
|
UTF-8
| 748 | 2.921875 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
#![feature(untagged_unions)]
use std::cell::Cell;
use std::ops::Deref;
use std::mem::ManuallyDrop;
union Wrap<T> { x: ManuallyDrop<T> }
impl<T> Drop for Wrap<T> {
fn drop(&mut self) {
unsafe { std::ptr::drop_in_place(&mut *self.x as *mut T); }
}
}
impl<T> Wrap<T> {
fn new(x: T) -> Self {
Wrap { x: ManuallyDrop::new(x) }
}
}
impl<T> Deref for Wrap<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe {
&self.x
}
}
}
struct C<'a>(Cell<Option<&'a C<'a>>>);
impl<'a> Drop for C<'a> {
fn drop(&mut self) {}
}
fn main() {
let v : Wrap<C> = Wrap::new(C(Cell::new(None)));
v.0.set(Some(&v)); //~ ERROR: `v` does not live long enough
}
| true |
d7fb63054855787e65afb92107e9b608c03d439d
|
Rust
|
oxidecomputer/openapi-generator
|
/samples/client/petstore/rust/reqwest/fileResponseTest/src/apis/mod.rs
|
UTF-8
| 876 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use reqwest;
use serde_json;
#[derive(Debug, Clone)]
pub struct ResponseContent<T> {
pub status: reqwest::StatusCode,
pub content: String,
pub entity: Option<T>,
}
#[derive(Debug)]
pub enum Error<T> {
Reqwest(reqwest::Error),
Serde(serde_json::Error),
Io(std::io::Error),
ResponseError(ResponseContent<T>),
}
impl <T> From<reqwest::Error> for Error<T> {
fn from(e: reqwest::Error) -> Self {
Error::Reqwest(e)
}
}
impl <T> From<serde_json::Error> for Error<T> {
fn from(e: serde_json::Error) -> Self {
Error::Serde(e)
}
}
impl <T> From<std::io::Error> for Error<T> {
fn from(e: std::io::Error) -> Self {
Error::Io(e)
}
}
pub fn urlencode<T: AsRef<str>>(s: T) -> String {
::url::form_urlencoded::byte_serialize(s.as_ref().as_bytes()).collect()
}
pub mod default_api;
pub mod configuration;
| true |
1695ca83aaf896972cb619f59dacccbc34dcb5e1
|
Rust
|
kyleburton/sandbox
|
/examples/rust/learn-rust/arrays-and-slices/src/main.rs
|
UTF-8
| 982 | 3.53125 | 4 |
[] |
no_license
|
use std::mem;
fn analyze_slice(slice: &[i32]) {
println!("First element of the slice: {}", slice[0]);
println!("The slice has {} elements", slice.len());
}
fn main() {
let xs: [i32; 5] = [1, 2, 3, 4, 5];
let ys: [i32; 500] = [0; 500];
println!("First element of the xs array: {}", xs[0]);
println!("Second element of the xs array: {}", xs[1]);
println!("Number of elements in the xs array: {}", xs.len());
println!("xs occupies {} bytes", mem::size_of_val(&xs));
println!("Borrow the whole array as a slice.");
analyze_slice(&xs);
println!("Borrow a section of the array as a slice.");
analyze_slice(&ys[1 .. 4]);
let empty_array: [u32; 0] = [];
assert_eq!(&empty_array, &[]);
assert_eq!(&empty_array, &[][..]);
for ii in 0..xs.len() +1 {
match xs.get(ii) {
Some(xval) => println!("{}: {}", ii, xval),
None => println!("Slow Down! {} is too far!", ii),
}
}
}
| true |
3c751a68f3476c18e965737dd31ade5c617fc741
|
Rust
|
SimonBartonPSU/Fractal-Generator
|
/src/auto_random.rs
|
UTF-8
| 1,382 | 2.9375 | 3 |
[
"MIT"
] |
permissive
|
// Copyright © 2019 Liam Rotchford, Simon Barton
//! Automatic fractal generation for those who wish to skip the menu system.
use crate::barnsley::*;
use crate::julia_sets::*;
use crate::mandelbrot::*;
use crate::util::*;
use rand::Rng;
/// str literals for randomly selecting a fractal
const FRACTALS: [&str; 4] = ["barnsley", "mandelbrot", "julia", "multi-julia"];
/// Ultimate automation of the fractal generation process.
/// This does all of work of deciding fractal properties for a user
/// including which fractal to use, what colors, and what transformations
pub fn auto_random(num_to_make: usize, filename: &str) {
for i in 0..num_to_make {
let fractal_index = rand::thread_rng().gen_range(0, 4);
let fractal = FRACTALS[fractal_index];
let filename = filename.to_owned() + &i.to_string() + ".png";
let mut scheme = Scheme {
fractal: fractal.to_string(),
..Default::default()
};
randomize(&mut scheme);
match fractal {
"barnsley" => barnsley_fern(1000, 1000, &filename, &mut scheme),
"julia" | "multi-julia" => julia_fractal(fractal, 1000, 1000, &filename, &scheme),
"mandelbrot" => mandelbrot_fractal(1000, 1000, &filename, &mut scheme),
_ => println!("Unsupported fractal type"),
}
random_transforms(&scheme, &filename);
}
}
| true |
533e5fef19c2760891310995518599301ca1f1f8
|
Rust
|
alessiofino/MySearch
|
/src/main.rs
|
UTF-8
| 5,863 | 2.6875 | 3 |
[] |
no_license
|
use std::{char, fs::File, io::BufReader, time::Instant};
use clap::{App, Arg};
use log::error;
use mysearch::index::Index;
use serde_json::Value;
use ncurses::*;
extern crate jemallocator;
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
struct CursedPrinter {
attributes: Vec<String>,
}
impl CursedPrinter {
fn new(attr: Option<&str>) -> Self {
let attributes = if let Some(attr) = attr {
attr.split(",").map(|s| s.to_owned()).collect()
} else {
vec![]
};
Self { attributes }
}
fn print(&self, value: Value) {
if self.attributes.is_empty() || !value.is_object() {
addstr(&format!("{:#}", value));
} else {
let obj = value.as_object().unwrap();
for attr in &self.attributes {
if let Some(value) = obj.get(attr) {
attron(A_BOLD());
addstr(&attr);
attroff(A_BOLD());
addstr(" : ");
addstr(&value.to_string());
addstr("\n");
}
}
}
}
}
fn main() {
let matches = App::new("MySearch")
.version("1.0")
.author("Alessio Fino. <[email protected]>")
.about("Command line utility to perform text search in JSON documents")
.arg(
Arg::with_name("index")
.long("index")
.value_name("JSON_FILE")
.help("Index a JSON document")
.takes_value(true),
)
.arg(
Arg::with_name("no-curses")
.long("no-curses")
.value_name("SEARCH_QUERY")
.help("Do a single search without ncurses TUI")
.takes_value(true),
)
.arg(
Arg::with_name("attributes")
.long("attributes")
.short("a")
.value_name("ATTRIBUTE_LIST")
.help("Attributes of the json documents to show in the results")
.takes_value(true),
)
.get_matches();
let printer = CursedPrinter::new(matches.value_of("attributes"));
if let Some(json_file_path) = matches.value_of("index") {
let file = match File::open(json_file_path) {
Ok(file) => file,
Err(err) => {
error!("Error opening json file: {:?}", err);
return;
}
};
let mut reader = BufReader::new(file);
let documents: Vec<Value> = match serde_json::from_reader(&mut reader) {
Ok(documents) => documents,
Err(err) => {
error!("Error deserializing json file: {:?}", err);
return;
}
};
Index::create_index(documents);
return;
}
match Index::open() {
Ok(index) => {
if let Some(query) = matches.value_of("no-curses") {
single_search(index, query);
} else {
ncur(index, printer);
}
}
Err(err) => {
error!("Error opening index: {:?}", err);
}
}
}
fn single_search(mut index: Index, query: &str) {
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
let search_results = index.search(query, 10);
let mut output_string = String::new();
let mut result_counter = 0;
'outer: for (_typo, results) in search_results.iter() {
for (_distance, id) in results {
let document = index.fetch_document(*id).unwrap();
output_string += &format!("{:#}", document);
result_counter += 1;
if result_counter >= 10 {
break 'outer;
}
}
}
println!("{}", output_string);
}
fn ncur(mut index: Index, printer: CursedPrinter) {
initscr();
/* Allow for extended keyboard (like F1). */
keypad(stdscr(), true);
noecho();
clear();
attron(A_REVERSE());
addstr(&&format!("Exit: F1 ||\n",));
attroff(A_REVERSE());
attron(A_BOLD());
addstr("Search: ");
attroff(A_BOLD());
refresh();
let mut text_query = String::new();
loop {
let ch = getch();
match ch {
KEY_F1 => {
endwin();
return;
}
KEY_ENTER => {}
KEY_BACKSPACE | KEY_DC | 127 => {
text_query.pop();
}
KEY_DL => {
text_query.clear();
}
_ => {
let c = char::from_u32(ch as u32).expect("Invalid char");
text_query.push(c);
}
}
let start_instant = Instant::now();
let search_results = index.search(&text_query, 10);
let elapsed = start_instant.elapsed().as_micros();
let legend = format!(
"Exit: F1 or Ctrl+C || Found {} results in {}us",
search_results
.iter()
.map(|(_, ids)| ids.len())
.sum::<usize>(),
elapsed
);
clear();
attron(A_REVERSE());
addstr(&legend);
attroff(A_REVERSE());
attron(A_BOLD());
addstr("\nSearch: ");
attroff(A_BOLD());
addstr(&text_query);
addstr("\n");
let mut result_counter = 0;
'outer: for (_typo, results) in search_results.iter() {
for (_distance, id) in results {
let document = index.fetch_document(*id).unwrap();
addstr("\n");
printer.print(document);
result_counter += 1;
if result_counter >= 10 {
break 'outer;
}
}
}
refresh();
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.