blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
9aa82a77d10c2f8e826aef619d50f6d1753ed6ee
Rust
iamsubhranil/rexpr
/src/eval.rs
UTF-8
1,527
3.640625
4
[]
no_license
use crate::builtin::Builtin; use crate::lexer::TokenType; use crate::parser::Node; pub fn eval(tree: &Node) -> f64 { match tree { Node::Literal(val) => *val, Node::Operator(left, operator, right) => { let leftval = eval(left.as_ref()); let rightval = eval(right.as_ref()); match operator { TokenType::Plus => leftval + rightval, TokenType::Minus => leftval - rightval, TokenType::Star => leftval * rightval, TokenType::Backslash => leftval / rightval, TokenType::Cap => leftval.powf(rightval), TokenType::Percentage => leftval % rightval, _ => panic!("Invalid operator {:?}!", operator), } } Node::Function(func, args) => { let argvalues = args.iter().map(|a| eval(a)).collect::<Vec<f64>>(); match func { TokenType::Builtin(idx) => Builtin::exec_builtin(*idx, &argvalues), _ => panic!("Invalid function {:?}!", func), } } } } #[cfg(test)] pub mod tests { use super::eval; use crate::parser::Parser; pub fn expect_eq(s: &str, val: f64) { assert_eq!(eval(&Parser::new(s).parse()), val); } #[test] fn expressions() { expect_eq("1 + 2 * 3 / 4", 2.5); expect_eq("2 ^ 2 ^ 3", 256.0); expect_eq("1 + 2 - 3 * 4 / 5 ^ 6", 2.999232); expect_eq("(((((1 + 2) * 3) - 4) / 5) ^ 6)", 1.0); } }
true
6faf4a7d3117e26f70f4cac5946f5bac9760c55e
Rust
eugeneia/rush
/vendor/once_cell/src/imp_std.rs
UTF-8
9,758
3.140625
3
[ "Apache-2.0", "MIT" ]
permissive
// There's a lot of scary concurrent code in this module, but it is copied from // `std::sync::Once` with two changes: // * no poisoning // * init function can fail use std::{ cell::{Cell, UnsafeCell}, marker::PhantomData, panic::{RefUnwindSafe, UnwindSafe}, sync::atomic::{AtomicBool, AtomicUsize, Ordering}, thread::{self, Thread}, }; #[derive(Debug)] pub(crate) struct OnceCell<T> { // This `state` word is actually an encoded version of just a pointer to a // `Waiter`, so we add the `PhantomData` appropriately. state_and_queue: AtomicUsize, _marker: PhantomData<*mut Waiter>, // FIXME: switch to `std::mem::MaybeUninit` once we are ready to bump MSRV // that far. It was stabilized in 1.36.0, so, if you are reading this and // it's higher than 1.46.0 outside, please send a PR! ;) (and do the same // for `Lazy`, while we are at it). pub(crate) value: UnsafeCell<Option<T>>, } // Why do we need `T: Send`? // Thread A creates a `OnceCell` and shares it with // scoped thread B, which fills the cell, which is // then destroyed by A. That is, destructor observes // a sent value. unsafe impl<T: Sync + Send> Sync for OnceCell<T> {} unsafe impl<T: Send> Send for OnceCell<T> {} impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for OnceCell<T> {} impl<T: UnwindSafe> UnwindSafe for OnceCell<T> {} // Three states that a OnceCell can be in, encoded into the lower bits of `state` in // the OnceCell structure. const INCOMPLETE: usize = 0x0; const RUNNING: usize = 0x1; const COMPLETE: usize = 0x2; // Mask to learn about the state. All other bits are the queue of waiters if // this is in the RUNNING state. const STATE_MASK: usize = 0x3; // Representation of a node in the linked list of waiters in the RUNNING state. #[repr(align(4))] // Ensure the two lower bits are free to use as state bits. struct Waiter { thread: Cell<Option<Thread>>, signaled: AtomicBool, next: *const Waiter, } // Head of a linked list of waiters. // Every node is a struct on the stack of a waiting thread. // Will wake up the waiters when it gets dropped, i.e. also on panic. struct WaiterQueue<'a> { state_and_queue: &'a AtomicUsize, set_state_on_drop_to: usize, } impl<T> OnceCell<T> { pub(crate) const fn new() -> OnceCell<T> { OnceCell { state_and_queue: AtomicUsize::new(INCOMPLETE), _marker: PhantomData, value: UnsafeCell::new(None), } } /// Safety: synchronizes with store to value via Release/(Acquire|SeqCst). #[inline] pub(crate) fn is_initialized(&self) -> bool { // An `Acquire` load is enough because that makes all the initialization // operations visible to us, and, this being a fast path, weaker // ordering helps with performance. This `Acquire` synchronizes with // `SeqCst` operations on the slow path. self.state_and_queue.load(Ordering::Acquire) == COMPLETE } /// Safety: synchronizes with store to value via SeqCst read from state, /// writes value only once because we never get to INCOMPLETE state after a /// successful write. #[cold] pub(crate) fn initialize<F, E>(&self, f: F) -> Result<(), E> where F: FnOnce() -> Result<T, E>, { let mut f = Some(f); let mut res: Result<(), E> = Ok(()); let slot = &self.value; initialize_inner(&self.state_and_queue, &mut || { let f = f.take().unwrap(); match f() { Ok(value) => { unsafe { *slot.get() = Some(value) }; true } Err(e) => { res = Err(e); false } } }); res } } // Corresponds to `std::sync::Once::call_inner` // Note: this is intentionally monomorphic fn initialize_inner(my_state_and_queue: &AtomicUsize, init: &mut dyn FnMut() -> bool) -> bool { let mut state_and_queue = my_state_and_queue.load(Ordering::Acquire); loop { match state_and_queue { COMPLETE => return true, INCOMPLETE => { let old = my_state_and_queue.compare_and_swap( state_and_queue, RUNNING, Ordering::Acquire, ); if old != state_and_queue { state_and_queue = old; continue; } let mut waiter_queue = WaiterQueue { state_and_queue: my_state_and_queue, set_state_on_drop_to: INCOMPLETE, // Difference, std uses `POISONED` }; let success = init(); // Difference, std always uses `COMPLETE` waiter_queue.set_state_on_drop_to = if success { COMPLETE } else { INCOMPLETE }; return success; } _ => { assert!(state_and_queue & STATE_MASK == RUNNING); wait(&my_state_and_queue, state_and_queue); state_and_queue = my_state_and_queue.load(Ordering::Acquire); } } } } // Copy-pasted from std exactly. fn wait(state_and_queue: &AtomicUsize, mut current_state: usize) { loop { if current_state & STATE_MASK != RUNNING { return; } let node = Waiter { thread: Cell::new(Some(thread::current())), signaled: AtomicBool::new(false), next: (current_state & !STATE_MASK) as *const Waiter, }; let me = &node as *const Waiter as usize; let old = state_and_queue.compare_and_swap(current_state, me | RUNNING, Ordering::Release); if old != current_state { current_state = old; continue; } while !node.signaled.load(Ordering::Acquire) { thread::park(); } break; } } // Copy-pasted from std exactly. impl Drop for WaiterQueue<'_> { fn drop(&mut self) { let state_and_queue = self.state_and_queue.swap(self.set_state_on_drop_to, Ordering::AcqRel); assert_eq!(state_and_queue & STATE_MASK, RUNNING); unsafe { let mut queue = (state_and_queue & !STATE_MASK) as *const Waiter; while !queue.is_null() { let next = (*queue).next; let thread = (*queue).thread.replace(None).unwrap(); (*queue).signaled.store(true, Ordering::Release); queue = next; thread.unpark(); } } } } // These test are snatched from std as well. #[cfg(test)] mod tests { use std::panic; use std::{sync::mpsc::channel, thread}; use super::OnceCell; impl<T> OnceCell<T> { fn init(&self, f: impl FnOnce() -> T) { enum Void {} let _ = self.initialize(|| Ok::<T, Void>(f())); } } #[test] fn smoke_once() { static O: OnceCell<()> = OnceCell::new(); let mut a = 0; O.init(|| a += 1); assert_eq!(a, 1); O.init(|| a += 1); assert_eq!(a, 1); } #[test] #[cfg_attr(miri, ignore)] // miri doesn't support threads fn stampede_once() { static O: OnceCell<()> = OnceCell::new(); static mut RUN: bool = false; let (tx, rx) = channel(); for _ in 0..10 { let tx = tx.clone(); thread::spawn(move || { for _ in 0..4 { thread::yield_now() } unsafe { O.init(|| { assert!(!RUN); RUN = true; }); assert!(RUN); } tx.send(()).unwrap(); }); } unsafe { O.init(|| { assert!(!RUN); RUN = true; }); assert!(RUN); } for _ in 0..10 { rx.recv().unwrap(); } } #[test] fn poison_bad() { static O: OnceCell<()> = OnceCell::new(); // poison the once let t = panic::catch_unwind(|| { O.init(|| panic!()); }); assert!(t.is_err()); // we can subvert poisoning, however let mut called = false; O.init(|| { called = true; }); assert!(called); // once any success happens, we stop propagating the poison O.init(|| {}); } #[test] #[cfg_attr(miri, ignore)] // miri doesn't support threads fn wait_for_force_to_finish() { static O: OnceCell<()> = OnceCell::new(); // poison the once let t = panic::catch_unwind(|| { O.init(|| panic!()); }); assert!(t.is_err()); // make sure someone's waiting inside the once via a force let (tx1, rx1) = channel(); let (tx2, rx2) = channel(); let t1 = thread::spawn(move || { O.init(|| { tx1.send(()).unwrap(); rx2.recv().unwrap(); }); }); rx1.recv().unwrap(); // put another waiter on the once let t2 = thread::spawn(|| { let mut called = false; O.init(|| { called = true; }); assert!(!called); }); tx2.send(()).unwrap(); assert!(t1.join().is_ok()); assert!(t2.join().is_ok()); } #[test] #[cfg(target_pointer_width = "64")] fn test_size() { use std::mem::size_of; assert_eq!(size_of::<OnceCell<u32>>(), 4 * size_of::<u32>()); } }
true
cd2f512755d011a4f5b3cfd56b083478f851d1a4
Rust
aspires/lucet
/lucet-idl/src/parser.rs
UTF-8
33,262
2.921875
3
[ "LLVM-exception", "Apache-2.0" ]
permissive
use super::lexer::{Keyword, LexError, Lexer, LocatedError, LocatedToken, Token}; use super::types::{AtomType, Attr, Location}; use std::error::Error; use std::fmt; #[derive(Debug, PartialEq, Eq, Clone)] pub enum SyntaxDecl { Struct { name: String, members: Vec<StructMember>, attrs: Vec<Attr>, location: Location, }, TaggedUnion { name: String, variants: Vec<UnionVariant>, attrs: Vec<Attr>, location: Location, }, Enum { name: String, variants: Vec<EnumVariant>, attrs: Vec<Attr>, location: Location, }, Alias { name: String, what: SyntaxRef, attrs: Vec<Attr>, location: Location, }, } impl SyntaxDecl { pub fn name(&self) -> &str { match self { SyntaxDecl::Struct { name, .. } => &name, SyntaxDecl::TaggedUnion { name, .. } => &name, SyntaxDecl::Enum { name, .. } => &name, SyntaxDecl::Alias { name, .. } => &name, } } pub fn location(&self) -> &Location { match self { SyntaxDecl::Struct { location, .. } => &location, SyntaxDecl::TaggedUnion { location, .. } => &location, SyntaxDecl::Enum { location, .. } => &location, SyntaxDecl::Alias { location, .. } => &location, } } } #[derive(Debug, PartialEq, Eq, Clone)] pub enum SyntaxRef { Atom { atom: AtomType, location: Location, }, Ptr { to: Box<SyntaxRef>, location: Location, }, Name { name: String, location: Location, }, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct StructMember { pub name: String, pub type_: SyntaxRef, pub attrs: Vec<Attr>, pub location: Location, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct UnionVariant { pub name: String, pub type_: Option<SyntaxRef>, pub attrs: Vec<Attr>, pub location: Location, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct EnumVariant { pub name: String, pub attrs: Vec<Attr>, pub location: Location, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct ParseError { pub location: Location, pub message: String, } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Parse error at line {} column {}: {}", self.location.line, self.location.column, self.message ) } } impl Error for ParseError { fn description(&self) -> &str { "Parse error" } } macro_rules! parse_err { ($loc:expr, $msg: expr ) => { Err(ParseError { location: $loc.clone(), message: $msg.to_string(), }) }; ($loc:expr, $fmt:expr, $( $arg:expr),+ ) => { Err(ParseError { location: $loc.clone(), message: format!( $fmt, $( $arg ),+ ), }) }; } macro_rules! err_ctx { ($ctx:expr, $res:expr) => { match $res { Ok(a) => Ok(a), Err(ParseError { location, message }) => Err(ParseError { location, message: format!("in {}:\n{}", $ctx, message), }), } }; } pub struct Parser<'a> { lex: Lexer<'a>, lookahead: Option<Token<'a>>, pub lex_error: Option<LexError>, location: Location, } impl<'a> Parser<'a> { pub fn new(text: &'a str) -> Parser { Parser { lex: Lexer::new(text), lookahead: None, lex_error: None, location: Location { line: 0, column: 0 }, } } fn consume(&mut self) -> Token<'a> { self.lookahead.take().expect("no token to consume") } fn token(&mut self) -> Option<Token<'a>> { while self.lookahead == None { match self.lex.next() { Some(Ok(LocatedToken { token, location })) => { self.location = location; self.lookahead = Some(token) } Some(Err(LocatedError { error, location })) => { self.location = location; self.lex_error = Some(error); break; } None => break, } } self.lookahead } fn match_token(&mut self, want: Token<'a>, err_msg: &str) -> Result<Token<'a>, ParseError> { if self.token() == Some(want) { Ok(self.consume()) } else { parse_err!(self.location, err_msg) } } fn match_a_word(&mut self, err_msg: &str) -> Result<&'a str, ParseError> { match self.token() { Some(Token::Word(text)) => { self.consume(); Ok(text) } t => parse_err!(self.location, "{}, got {:?}", err_msg, t), } } fn match_attr_body(&mut self) -> Result<Attr, ParseError> { let location = self.location; self.match_token(Token::LBracket, "expected attribute start [")?; let key = self.match_a_word("expected attribute key")?; self.match_token(Token::Equals, "expected =")?; let val = match self.token() { Some(Token::Word(text)) => text, Some(Token::Quote(text)) => text, _ => parse_err!(self.location, "expected word or quoted string")?, }; self.consume(); self.match_token(Token::RBracket, "expected ]")?; Ok(Attr::new(key, val, location)) } fn match_struct_body(&mut self) -> Result<Vec<StructMember>, ParseError> { let mut members = Vec::new(); let mut attrs = Vec::new(); loop { match self.token() { Some(Token::RBrace) => { self.consume(); break; } Some(Token::Hash) => { self.consume(); attrs.push(self.match_attr_body()?); } Some(Token::Word(member_name)) => { let location = self.location; self.consume(); self.match_token(Token::Colon, "expected :")?; let member_ref = self.match_ref("expected member type")?; members.push(StructMember { name: member_name.to_string(), type_: member_ref, attrs: attrs.clone(), location, }); attrs.clear(); match self.token() { Some(Token::Comma) => { self.consume(); continue; } Some(Token::RBrace) => { self.consume(); break; } _ => parse_err!(self.location, "in struct body:\nexpected , or }}")?, } } _ => parse_err!(self.location, "in struct body:\nexpected member name or }}")?, } } Ok(members) } fn match_tagged_union_body(&mut self) -> Result<Vec<UnionVariant>, ParseError> { let mut variants = Vec::new(); let mut attrs = Vec::new(); loop { match self.token() { Some(Token::RBrace) => { self.consume(); break; } Some(Token::Hash) => { self.consume(); attrs.push(self.match_attr_body()?); } Some(Token::Word(variant_name)) => { let location = self.location; self.consume(); self.match_token(Token::Colon, "expected :")?; let type_ = match self.token() { Some(Token::LPar) => { self.consume(); self.match_token(Token::RPar, "expected )")?; None } _ => Some(self.match_ref("expected member type or ()")?), }; variants.push(UnionVariant { name: variant_name.to_owned(), type_, attrs: attrs.clone(), location, }); attrs.clear(); match self.token() { Some(Token::Comma) => { self.consume(); continue; } Some(Token::RBrace) => { self.consume(); break; } _ => parse_err!(self.location, "expected , or }}")?, } } _ => parse_err!(self.location, "expected variant")?, } } Ok(variants) } fn match_enum_body(&mut self) -> Result<Vec<EnumVariant>, ParseError> { let mut names = Vec::new(); let mut attrs = Vec::new(); loop { match self.token() { Some(Token::RBrace) => { self.consume(); break; } Some(Token::Hash) => { self.consume(); attrs.push(self.match_attr_body()?); } Some(Token::Word(name)) => { let location = self.location; self.consume(); names.push(EnumVariant { name: name.to_owned(), attrs: attrs.clone(), location, }); attrs.clear(); match self.token() { Some(Token::Comma) => { self.consume(); continue; } Some(Token::RBrace) => { self.consume(); break; } _ => parse_err!(self.location, "expected , or }}")?, } } _ => parse_err!(self.location, "expected variant")?, } } Ok(names) } pub fn match_decl(&mut self, err_msg: &str) -> Result<Option<SyntaxDecl>, ParseError> { let mut attrs = Vec::new(); loop { match self.token() { Some(Token::Keyword(Keyword::Struct)) => { let location = self.location; self.consume(); let name = err_ctx!(err_msg, self.match_a_word("expected struct name"))?; err_ctx!(err_msg, self.match_token(Token::LBrace, "expected {"))?; let members = err_ctx!(err_msg, self.match_struct_body())?; return Ok(Some(SyntaxDecl::Struct { name: name.to_owned(), members, attrs, location, })); } Some(Token::Keyword(Keyword::TaggedUnion)) => { let location = self.location; self.consume(); let name = err_ctx!(err_msg, self.match_a_word("expected tagged union name"))?; err_ctx!(err_msg, self.match_token(Token::LBrace, "expected {"))?; let variants = err_ctx!(err_msg, self.match_tagged_union_body())?; return Ok(Some(SyntaxDecl::TaggedUnion { name: name.to_owned(), variants, attrs, location, })); } Some(Token::Keyword(Keyword::Enum)) => { let location = self.location; self.consume(); let name = err_ctx!(err_msg, self.match_a_word("expected enum name"))?; err_ctx!(err_msg, self.match_token(Token::LBrace, "expected {"))?; let variants = err_ctx!(err_msg, self.match_enum_body())?; return Ok(Some(SyntaxDecl::Enum { name: name.to_owned(), variants, attrs, location, })); } Some(Token::Keyword(Keyword::Type)) => { let location = self.location; self.consume(); let name = err_ctx!(err_msg, self.match_a_word("expected type name"))?; err_ctx!(err_msg, self.match_token(Token::Equals, "expected ="))?; let what = self.match_ref("type value")?; return Ok(Some(SyntaxDecl::Alias { name: name.to_owned(), what, attrs, location, })); } Some(Token::Hash) => { self.consume(); attrs.push(self.match_attr_body()?); continue; } Some(_) => return parse_err!(self.location, "expected keyword or attribute"), None => return Ok(None), } } } pub fn match_decls(&mut self) -> Result<Vec<SyntaxDecl>, ParseError> { let mut decls = Vec::new(); loop { match self.match_decl("declaration") { Ok(Some(decl)) => decls.push(decl), Ok(None) => break, Err(e) => Err(e)?, } } Ok(decls) } fn match_ref(&mut self, err_msg: &str) -> Result<SyntaxRef, ParseError> { match self.token() { Some(Token::Atom(atom)) => { let location = self.location; self.consume(); Ok(SyntaxRef::Atom { atom, location }) } Some(Token::Word(name)) => { let location = self.location; self.consume(); Ok(SyntaxRef::Name { name: name.to_owned(), location, }) } Some(Token::Star) => { let location = self.location; self.consume(); let ref_of = self.match_ref(err_msg)?; Ok(SyntaxRef::Ptr { to: Box::new(ref_of), location, }) } _ => err_ctx!( err_msg, parse_err!(self.location, "expected atom, ref, or type name") ), } } } #[cfg(test)] mod tests { use super::*; #[test] fn structs() { let mut parser = Parser::new("struct foo {}"); assert_eq!( parser .match_decl("empty struct") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Struct { name: "foo".to_string(), members: Vec::new(), attrs: Vec::new(), location: Location { line: 1, column: 0 }, } ); let mut parser = Parser::new("struct foo {a: i32 }"); // column ruler: 0 7 12 15 assert_eq!( parser .match_decl("foo a i32") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Struct { name: "foo".to_string(), members: vec![StructMember { name: "a".to_owned(), type_: SyntaxRef::Atom { atom: AtomType::I32, location: Location { line: 1, column: 15, }, }, attrs: Vec::new(), location: Location { line: 1, column: 12, }, }], attrs: Vec::new(), location: Location { line: 1, column: 0 }, } ); let mut parser = Parser::new("struct foo {b: i32, }"); // 0 7 12 15 assert_eq!( parser .match_decl("foo b i32 with trailing comma") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Struct { name: "foo".to_string(), members: vec![StructMember { name: "b".to_owned(), type_: SyntaxRef::Atom { atom: AtomType::I32, location: Location { line: 1, column: 15, }, }, attrs: Vec::new(), location: Location { line: 1, column: 12, }, }], attrs: Vec::new(), location: Location { line: 1, column: 0 }, } ); let mut parser = Parser::new("struct c { d: f64, e: *u8 }"); // 0 7 11 14 19 22 assert_eq!( parser .match_decl("struct c") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Struct { name: "c".to_string(), members: vec![ StructMember { name: "d".to_owned(), type_: SyntaxRef::Atom { atom: AtomType::F64, location: Location { line: 1, column: 14, }, }, attrs: Vec::new(), location: Location { line: 1, column: 11, }, }, StructMember { name: "e".to_owned(), type_: SyntaxRef::Ptr { to: Box::new(SyntaxRef::Atom { atom: AtomType::U8, location: Location { line: 1, column: 23, }, }), location: Location { line: 1, column: 22, }, }, attrs: Vec::new(), location: Location { line: 1, column: 19, }, }, ], attrs: Vec::new(), location: Location { line: 1, column: 0 }, } ); // Test out attributes: let mut parser = Parser::new("#[key1=val1] struct foo {}"); assert_eq!( parser .match_decl("empty struct") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Struct { name: "foo".to_string(), members: Vec::new(), attrs: vec![Attr::new("key1", "val1", Location { line: 1, column: 0 })], location: Location { line: 1, column: 13, }, } ); let mut parser = Parser::new("#[key2=\"1 value with spaces!\"]\nstruct foo {}"); assert_eq!( parser .match_decl("empty struct") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Struct { name: "foo".to_string(), members: Vec::new(), attrs: vec![Attr::new( "key2", "1 value with spaces!", Location { line: 1, column: 0 }, )], location: Location { line: 2, column: 0 }, } ); let mut parser = Parser::new("#[key1=val1]\n\t#[key2 = \"val2\" ]\nstruct foo {}"); assert_eq!( parser .match_decl("empty struct") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Struct { name: "foo".to_string(), members: Vec::new(), attrs: vec![ Attr::new("key1", "val1", Location { line: 1, column: 0 }), Attr::new("key2", "val2", Location { line: 2, column: 8 }), ], location: Location { line: 3, column: 0 }, } ); let mut parser = Parser::new("struct foo {\n\t#[key=val]\n\tmem: f32,\n}"); assert_eq!( parser .match_decl("empty struct") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Struct { name: "foo".to_string(), members: vec![StructMember { name: "mem".to_owned(), type_: SyntaxRef::Atom { atom: AtomType::F32, location: Location { line: 3, column: 13, }, }, attrs: vec![Attr::new("key", "val", Location { line: 2, column: 8 })], location: Location { line: 3, column: 8 }, }], attrs: Vec::new(), // location: Location { line: 1, column: 0 }, } ); } #[test] fn tagged_unions() { let mut parser = Parser::new("taggedunion foo {}"); assert_eq!( parser .match_decl("empty tagged union") .expect("valid parse") .expect("valid decl"), SyntaxDecl::TaggedUnion { name: "foo".to_owned(), variants: Vec::new(), attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); let mut parser = Parser::new("taggedunion bar {a: (), }"); // 0 12 17 assert_eq!( parser .match_decl("tagged union, trailing comma") .expect("valid parse") .expect("valid decl"), SyntaxDecl::TaggedUnion { name: "bar".to_owned(), variants: vec![UnionVariant { name: "a".to_owned(), type_: None, attrs: Vec::new(), location: Location { line: 1, column: 17, }, }], attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); let mut parser = Parser::new("taggedunion bat {a : ( ) }"); // 0 12 17 assert_eq!( parser .match_decl("tagged union, no trailing comma") .expect("valid parse") .expect("valid decl"), SyntaxDecl::TaggedUnion { name: "bat".to_owned(), variants: vec![UnionVariant { name: "a".to_owned(), type_: None, attrs: Vec::new(), location: Location { line: 1, column: 17, }, }], attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); let mut parser = Parser::new("taggedunion baz {a:(), b:f32, }"); // 0 12 17 23 assert_eq!( parser .match_decl("2 member tagged union, trailing comma") .expect("valid parse") .expect("valid decl"), SyntaxDecl::TaggedUnion { name: "baz".to_owned(), variants: vec![ UnionVariant { name: "a".to_owned(), type_: None, attrs: Vec::new(), location: Location { line: 1, column: 17, }, }, UnionVariant { name: "b".to_owned(), type_: Some(SyntaxRef::Atom { atom: AtomType::F32, location: Location { line: 1, column: 25, }, }), attrs: Vec::new(), location: Location { line: 1, column: 23, }, }, ], attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); let mut parser = Parser::new("taggedunion acab {a:(), b: something_else }"); // 0 12 18 24 27 assert_eq!( parser .match_decl("2 member tagged union, no trailing comma") .expect("valid parse") .expect("valid decl"), SyntaxDecl::TaggedUnion { name: "acab".to_owned(), variants: vec![ UnionVariant { name: "a".to_owned(), type_: None, attrs: Vec::new(), location: Location { line: 1, column: 18, }, }, UnionVariant { name: "b".to_owned(), type_: Some(SyntaxRef::Name { name: "something_else".to_owned(), location: Location { line: 1, column: 27, }, }), attrs: Vec::new(), location: Location { line: 1, column: 24, }, }, ], attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); let mut parser = Parser::new("#[attr1=ftp]\ntaggedunion acab {\n#[yes=\"all of them\"] are_complicit: (),\n#[especially= PPB\n]and_always: lie}"); assert_eq!( parser .match_decl("2 member tagged union, no trailing comma, with attributes") .expect("valid parse") .expect("valid decl"), SyntaxDecl::TaggedUnion { name: "acab".to_owned(), variants: vec![ UnionVariant { name: "are_complicit".to_owned(), type_: None, attrs: vec![Attr::new( "yes", "all of them", Location { line: 3, column: 0 }, )], location: Location { line: 3, column: 21, }, }, UnionVariant { name: "and_always".to_owned(), type_: Some(SyntaxRef::Name { name: "lie".to_owned(), location: Location { line: 5, column: 13, }, }), attrs: vec![Attr::new( "especially", "PPB", Location { line: 4, column: 0 }, )], location: Location { line: 5, column: 1 }, }, ], attrs: vec![Attr::new("attr1", "ftp", Location { line: 1, column: 0 })], location: Location { line: 2, column: 0 }, }, ); } #[test] fn enums() { let mut parser = Parser::new("enum foo {}"); // 0 5 assert_eq!( parser .match_decl("empty enum") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Enum { name: "foo".to_owned(), variants: Vec::new(), attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); let mut parser = Parser::new("enum foo {first,}"); // 0 5 10 assert_eq!( parser .match_decl("one entry enum, trailing comma") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Enum { name: "foo".to_owned(), variants: vec![EnumVariant { name: "first".to_owned(), attrs: Vec::new(), location: Location { line: 1, column: 10, }, }], attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); let mut parser = Parser::new("enum bar {first}"); // 0 5 10 assert_eq!( parser .match_decl("one entry enum, no trailing comma") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Enum { name: "bar".to_owned(), variants: vec![EnumVariant { name: "first".to_owned(), attrs: Vec::new(), location: Location { line: 1, column: 10, }, }], attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); let mut parser = Parser::new("enum baz { one, two, three\n, four, }"); // 0 5 11 16 21 0 2 assert_eq!( parser .match_decl("four entry enum, trailing comma") .expect("valid parse") .expect("valid decl"), SyntaxDecl::Enum { name: "baz".to_owned(), variants: vec![ EnumVariant { name: "one".to_owned(), attrs: Vec::new(), location: Location { line: 1, column: 11, }, }, EnumVariant { name: "two".to_owned(), attrs: Vec::new(), location: Location { line: 1, column: 16, }, }, EnumVariant { name: "three".to_owned(), attrs: Vec::new(), location: Location { line: 1, column: 21, }, }, EnumVariant { name: "four".to_owned(), attrs: Vec::new(), location: Location { line: 2, column: 2 }, }, ], attrs: Vec::new(), location: Location { line: 1, column: 0 }, }, ); } }
true
0031cc31dd6c335609a0390e7645c3d696d41e4e
Rust
emgre/agc
/agc/src/cpu/mod.rs
UTF-8
12,406
3
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use registers::BranchRegister; use crate::cpu::instructions::*; use crate::cpu::registers::{AddressRegister, MemoryAddress, SequenceRegister}; use crate::memory::{ErasableStorage, FixedStorage, MemoryWord}; use crate::word::*; mod control_pulses; mod instructions; mod registers; #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum TimePulse { T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, } impl TimePulse { fn next(&self) -> Self { match self { Self::T1 => Self::T2, Self::T2 => Self::T3, Self::T3 => Self::T4, Self::T4 => Self::T5, Self::T5 => Self::T6, Self::T6 => Self::T7, Self::T7 => Self::T8, Self::T8 => Self::T9, Self::T9 => Self::T10, Self::T10 => Self::T11, Self::T11 => Self::T12, Self::T12 => Self::T1, } } } impl From<TimePulse> for usize { fn from(from: TimePulse) -> usize { match from { TimePulse::T1 => 1, TimePulse::T2 => 2, TimePulse::T3 => 3, TimePulse::T4 => 4, TimePulse::T5 => 5, TimePulse::T6 => 6, TimePulse::T7 => 7, TimePulse::T8 => 8, TimePulse::T9 => 9, TimePulse::T10 => 10, TimePulse::T11 => 11, TimePulse::T12 => 12, } } } pub struct Cpu { // These registers are visible to the programmer /// Accumulator pub a: W16, /// Low-order product pub l: W16, /// Return address register pub q: W16, /// Program counter pub z: W16, pub ebank: W3, pub fbank: W5, // These registers are hidden to the programmer. They are only used by control pulses /// Buffer register pub b: W16, /// Memory buffer register pub g: W16, /// Memory address register pub s: AddressRegister, /// Sequence register pub sq: SequenceRegister, /// Stage counter pub st: W3, /// Arithmetic X pub x: W16, /// Arithmetic Y pub y: W16, /// Carry flip-flop pub ci: bool, /// Branch register (used to make decision) pub br: BranchRegister, // Storage of the computer /// Erasable (read-write) memory storage erasable_storage: ErasableStorage, /// Fixed (read-only) memory storage fixed_storage: FixedStorage, // Emulation parameters pub current_timepulse: TimePulse, /// Value of S after T1 /// /// This is necessary because even if another address is written /// to the S register, the original address is used when writing /// back to erasable memory current_s: AddressRegister, /// At next T12, read the next instruction into register SQ /// /// This is generated by control pulse NISQ. nisq: bool, /// Signals that the next instruction is extended. /// /// This is the FUTEXT flip-flop. ext: bool, /// Value of ST at next MCT next_st: W3, inhibit_interrupts: bool, } impl Cpu { /// Create a CPU from a fixed storage ROM /// /// All internal parameters will be initialized to zero and /// the CPU will be reset, ready to perform a GOJAM. pub fn new(fixed_storage: FixedStorage) -> Self { Cpu { a: W16::zero(), l: W16::zero(), q: W16::zero(), z: W16::zero(), ebank: W3::zero(), fbank: W5::zero(), b: W16::zero(), g: W16::zero(), s: AddressRegister::zero(), sq: SequenceRegister::new(W6::zero(), false), st: W3::from(0o1), x: W16::zero(), y: W16::zero(), ci: false, br: BranchRegister::new(), erasable_storage: ErasableStorage::new(), fixed_storage, current_timepulse: TimePulse::T1, current_s: AddressRegister::zero(), nisq: false, ext: false, next_st: W3::zero(), inhibit_interrupts: false, } } pub fn current_subinstruction(&self) -> &'static Subinstruction { // STD2 is always executed if ST = 0b010 if self.st == W3::from(0b010) { return &STD2; } if !self.sq.is_extended() { // Non-extended subinstructions match self.sq.order_code().as_u16() { 0b000 => match self.st.as_u16() { 0b000 => &TC0, 0b001 => &GOJ1, _ => panic!("opcode {} with st {} does not exist", self.sq, self.st), }, 0b001 => match self.sq.quarter_code().as_u16() { 0b00 => unimplemented!("opcode {}", self.sq), 0b01 | 0b10 | 0b11 => &TCF0, _ => panic!("opcode {} with st {} does not exist", self.sq, self.st), }, 0b010 => match self.sq.quarter_code().as_u16() { 0b00 => unimplemented!("opcode {}", self.sq), 0b01 => unimplemented!("opcode {}", self.sq), 0b10 => &INCR0, 0b11 => unimplemented!("opcode {}", self.sq), _ => panic!("opcode {} with st {} does not exist", self.sq, self.st), }, 0b011 => &CA0, 0b100 => &CS0, 0b101 => match self.sq.quarter_code().as_u16() { 0b00 => unimplemented!("opcode {}", self.sq), 0b01 => unimplemented!("opcode {}", self.sq), 0b10 => &TS0, 0b11 => &XCH0, _ => unimplemented!("opcode {}", self.sq), }, 0b110 => unimplemented!("opcode {}", self.sq), 0b111 => unimplemented!("opcode {}", self.sq), _ => panic!("opcode {} does not exist", self.sq), } } else { // Extended subinstructions match self.sq.order_code().as_u16() { 0b000 => match self.sq.peripheral_code().as_u16() { 0b000 => unimplemented!("opcode {}", self.sq), 0b001 => &WRITE0, 0b010 => unimplemented!("opcode {}", self.sq), 0b011 => unimplemented!("opcode {}", self.sq), 0b100 => unimplemented!("opcode {}", self.sq), 0b101 => unimplemented!("opcode {}", self.sq), 0b110 => unimplemented!("opcode {}", self.sq), 0b111 => unimplemented!("opcode {}", self.sq), _ => panic!("opcode {} does not exist", self.sq), } 0b001 => unimplemented!("opcode {}", self.sq), 0b010 => unimplemented!("opcode {}", self.sq), 0b011 => unimplemented!("opcode {}", self.sq), 0b100 => unimplemented!("opcode {}", self.sq), 0b101 => unimplemented!("opcode {}", self.sq), 0b110 => unimplemented!("opcode {}", self.sq), 0b111 => unimplemented!("opcode {}", self.sq), _ => panic!("opcode {} does not exist", self.sq), } } } fn execute_control_pulses(&mut self, t: TimePulse) { let actions = self.current_subinstruction().actions(t); let br = self.br; let mut wl = W16::zero(); for action in actions.iter().filter(|action| action.execute(br)) { wl |= (action.control_pulse().exec_write_wl)(self); } for action in actions.iter().filter(|action| action.execute(br)) { (action.control_pulse().exec_read_wl)(self, wl); } } /// Run a single step, i.e. a single action pub fn step_control_pulse(&mut self) { // Execute the control pulses self.execute_control_pulses(self.current_timepulse); // Execute additional task match self.current_timepulse { TimePulse::T1 => { // Save S value self.current_s = self.s; } TimePulse::T4 => { // Perform erasable memory read match self.current_s.address() { MemoryAddress::UnswitchedErasableMemory(bank, address) => { self.g |= self .erasable_storage .read(bank, address) .as_register_value(); } MemoryAddress::SwitchedErasableMemory(address) => { self.g |= self .erasable_storage .read(self.ebank, address) .as_register_value(); } _ => (), }; } TimePulse::T6 => { // Perform fixed memory read match self.current_s.address() { MemoryAddress::UnswitchedFixedMemory(bank, address) => { self.g |= self .fixed_storage .read(bank.into(), address) .as_register_value(); } MemoryAddress::SwitchedFixedMemory(address) => { // TODO: take into account super-bit self.g |= self .fixed_storage .read(self.fbank.into(), address) .as_register_value(); } _ => (), }; } TimePulse::T10 => { // Perform erasable memory write match self.current_s.address() { MemoryAddress::UnswitchedErasableMemory(bank, address) => { self.erasable_storage.write( bank, address, MemoryWord::with_proper_parity(self.g.into()), ); } MemoryAddress::SwitchedErasableMemory(address) => { self.erasable_storage.write( self.ebank, address, MemoryWord::with_proper_parity(self.g.into()), ); } _ => (), }; } TimePulse::T12 => { // Set stage counter self.st = self.next_st; self.next_st = W3::zero(); // If NISQ was triggered, load next instruction into SQ // This is the equivalent of control pulses RB and WSQ // TODO: should also re-enable some interrupts if self.nisq { self.sq = SequenceRegister::new(W6::from(self.b >> 9), self.ext); self.nisq = false; // Reset FUTEXT only once we have executed the extended instruction // (instructions with ST=0b010 are _not_ extended instructions) if self.st != W3::from(0b010) { self.ext = false; } } // Reset the carry flip-flop self.ci = false; } _ => (), } // Increment timepulse counter self.current_timepulse = self.current_timepulse.next(); } /// Run a single subinstruction, i.e. a single MCT pub fn step_subinstruction(&mut self) { // execute at least one control pulse self.step_control_pulse(); // continue until we read T1 while self.current_timepulse != TimePulse::T1 { self.step_control_pulse(); } } pub fn current_subsintruction_name(&self) -> &'static str { self.current_subinstruction().name } // Read content of the adder unit fn u(&self) -> W16 { // TODO: do the actual calculation here, this is way too imprecise let mut result = self.x.as_u16() + self.y.as_u16(); if self.ci { result += 1; } W16::from(result) } }
true
c348084a5b1c0b3295abc5e69b07d31c6934351a
Rust
ticki/libstd
/src/fs.rs
UTF-8
11,723
2.734375
3
[]
no_license
use core_collections::borrow::ToOwned; use io::{self, BufRead, BufReader, Read, Error, Result, Write, Seek, SeekFrom}; use os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd}; use mem; use path::{PathBuf, Path}; use string::String; use sys_common::AsInner; use vec::Vec; use syscall::{open, dup, close, fpath, fstat, ftruncate, read, write, lseek, fsync, mkdir, rmdir, unlink}; use syscall::{O_RDWR, O_RDONLY, O_WRONLY, O_APPEND, O_CREAT, O_TRUNC, MODE_DIR, MODE_FILE, MODE_PERM, SEEK_SET, SEEK_CUR, SEEK_END, Stat}; /// A Unix-style file #[derive(Debug)] pub struct File { /// The id for the file fd: usize, } impl File { /// Open a new file using a path pub fn open<P: AsRef<Path>>(path: P) -> Result<File> { let path_str = path.as_ref().as_os_str().as_inner(); open(path_str, O_RDONLY).map(|fd| unsafe { File::from_raw_fd(fd) }).map_err(|x| Error::from_sys(x)) } /// Create a new file using a path pub fn create<P: AsRef<Path>>(path: P) -> Result<File> { let path_str = path.as_ref().as_os_str().as_inner(); open(path_str, O_CREAT | O_RDWR | O_TRUNC | 0o664).map(|fd| unsafe { File::from_raw_fd(fd) }).map_err(|x| Error::from_sys(x)) } /// Duplicate the file pub fn dup(&self, buf: &[u8]) -> Result<File> { dup(self.fd, buf).map(|fd| unsafe { File::from_raw_fd(fd) }).map_err(|x| Error::from_sys(x)) } /// Get information about a file pub fn metadata(&self) -> Result<Metadata> { let mut stat = Stat::default(); try!(fstat(self.fd, &mut stat).map_err(|x| Error::from_sys(x))); Ok(Metadata { stat: stat }) } /// Get the canonical path of the file pub fn path(&self) -> Result<PathBuf> { let mut buf: [u8; 4096] = [0; 4096]; match fpath(self.fd, &mut buf) { Ok(count) => Ok(PathBuf::from(unsafe { String::from_utf8_unchecked(Vec::from(&buf[0..count])) })), Err(err) => Err(Error::from_sys(err)), } } /// Flush the file data and metadata pub fn sync_all(&mut self) -> Result<()> { fsync(self.fd).and(Ok(())).map_err(|x| Error::from_sys(x)) } /// Flush the file data pub fn sync_data(&mut self) -> Result<()> { fsync(self.fd).and(Ok(())).map_err(|x| Error::from_sys(x)) } /// Truncates the file pub fn set_len(&self, size: u64) -> Result<()> { ftruncate(self.fd, size as usize).and(Ok(())).map_err(|x| Error::from_sys(x)) } } impl AsRawFd for File { fn as_raw_fd(&self) -> RawFd { self.fd } } impl FromRawFd for File { unsafe fn from_raw_fd(fd: RawFd) -> Self { File { fd: fd } } } impl IntoRawFd for File { fn into_raw_fd(self) -> RawFd { let fd = self.fd; mem::forget(self); fd } } impl Read for File { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { read(self.fd, buf).map_err(|x| Error::from_sys(x)) } } impl Write for File { fn write(&mut self, buf: &[u8]) -> Result<usize> { write(self.fd, buf).map_err(|x| Error::from_sys(x)) } fn flush(&mut self) -> Result<()> { fsync(self.fd).and(Ok(())).map_err(|x| Error::from_sys(x)) } } impl Seek for File { /// Seek a given position fn seek(&mut self, pos: SeekFrom) -> Result<u64> { let (whence, offset) = match pos { SeekFrom::Start(offset) => (SEEK_SET, offset as isize), SeekFrom::Current(offset) => (SEEK_CUR, offset as isize), SeekFrom::End(offset) => (SEEK_END, offset as isize), }; lseek(self.fd, offset, whence).map(|position| position as u64).map_err(|x| Error::from_sys(x)) } } impl Drop for File { fn drop(&mut self) { let _ = close(self.fd); } } #[derive(Copy, Clone, Eq, PartialEq)] pub struct FileType { dir: bool, file: bool, } impl FileType { pub fn is_dir(&self) -> bool { self.dir } pub fn is_file(&self) -> bool { self.file } pub fn is_symlink(&self) -> bool { false } } impl ::os::unix::fs::FileTypeExt for FileType { fn is_block_device(&self) -> bool { false } fn is_char_device(&self) -> bool { false } fn is_fifo(&self) -> bool { false } fn is_socket(&self) -> bool { false } } pub struct OpenOptions { read: bool, write: bool, append: bool, create: bool, truncate: bool, mode: u16, } impl OpenOptions { pub fn new() -> OpenOptions { OpenOptions { read: false, write: false, append: false, create: false, truncate: false, mode: 0, } } pub fn read(&mut self, read: bool) -> &mut OpenOptions { self.read = read; self } pub fn write(&mut self, write: bool) -> &mut OpenOptions { self.write = write; self } pub fn append(&mut self, append: bool) -> &mut OpenOptions { self.append = append; self } pub fn create(&mut self, create: bool) -> &mut OpenOptions { self.create = create; self } pub fn truncate(&mut self, truncate: bool) -> &mut OpenOptions { self.truncate = truncate; self } pub fn open<P: AsRef<Path>>(&self, path: P) -> Result<File> { let mut flags = 0; if self.read && self.write { flags |= O_RDWR; } else if self.read { flags |= O_RDONLY; } else if self.write { flags |= O_WRONLY; } if self.append { flags |= O_APPEND; } if self.create { flags |= O_CREAT; } if self.truncate { flags |= O_TRUNC; } flags |= (self.mode & MODE_PERM) as usize; let path_str = path.as_ref().as_os_str().as_inner(); open(path_str, flags).map(|fd| unsafe { File::from_raw_fd(fd) }).map_err(|x| Error::from_sys(x)) } } impl ::os::unix::fs::OpenOptionsExt for OpenOptions { fn mode(&mut self, mode: u32) -> &mut Self { self.mode = mode as u16; self } } pub struct Metadata { stat: Stat } impl Metadata { pub fn file_type(&self) -> FileType { FileType { dir: self.stat.st_mode & MODE_DIR == MODE_DIR, file: self.stat.st_mode & MODE_FILE == MODE_FILE } } pub fn is_dir(&self) -> bool { self.stat.st_mode & MODE_DIR == MODE_DIR } pub fn is_file(&self) -> bool { self.stat.st_mode & MODE_FILE == MODE_FILE } pub fn len(&self) -> u64 { self.stat.st_size } pub fn permissions(&self) -> Permissions { Permissions { mode: self.stat.st_mode & MODE_PERM } } } impl ::os::unix::fs::MetadataExt for Metadata { fn mode(&self) -> u32 { self.stat.st_mode as u32 } fn uid(&self) -> u32 { self.stat.st_uid } fn gid(&self) -> u32 { self.stat.st_gid } fn size(&self) -> u64 { self.stat.st_size } } pub struct Permissions { mode: u16 } impl Permissions { pub fn readonly(&self) -> bool { self.mode & 0o222 == 0 } pub fn set_readonly(&mut self, readonly: bool) { if readonly { self.mode &= !0o222; } else { self.mode |= 0o222; } } } impl ::os::unix::fs::PermissionsExt for Permissions { fn mode(&self) -> u32 { self.mode as u32 } fn set_mode(&mut self, mode: u32) { self.mode = mode as u16; } fn from_mode(mode: u32) -> Self { Permissions { mode: mode as u16 } } } pub struct DirEntry { path: PathBuf, } impl DirEntry { pub fn file_name(&self) -> &Path { unsafe { mem::transmute(self.path.file_name().unwrap().to_str().unwrap()) } } pub fn file_type(&self) -> Result<FileType> { self.metadata().map(|metadata| metadata.file_type()) } pub fn metadata(&self) -> Result<Metadata> { metadata(&self.path) } pub fn path(&self) -> PathBuf { self.path.clone() } } pub struct ReadDir { path: PathBuf, file: BufReader<File>, } impl Iterator for ReadDir { type Item = Result<DirEntry>; fn next(&mut self) -> Option<Result<DirEntry>> { let mut name = String::new(); match self.file.read_line(&mut name) { Ok(0) => None, Ok(_) => { if name.ends_with('\n') { name.pop(); } let mut path = self.path.clone(); path.push(name); Some(Ok(DirEntry { path: path })) }, Err(err) => Some(Err(err)) } } } /// Find the canonical path of a file pub fn canonicalize<P: AsRef<Path>>(path: P) -> Result<PathBuf> { match File::open(path) { Ok(file) => { match file.path() { Ok(realpath) => Ok(realpath), Err(err) => Err(err) } }, Err(err) => Err(err) } } /// Get information about a file pub fn metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> { try!(File::open(path)).metadata() } /// Get information about a file without following symlinks /// Warning: Redox does not currently support symlinks pub fn symlink_metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> { metadata(path) } /// Create a new directory, using a path /// The default mode of the directory is 775 pub fn create_dir<P: AsRef<Path>>(path: P) -> Result<()> { let path_str = path.as_ref().as_os_str().as_inner(); mkdir(path_str, 0o775).and(Ok(())).map_err(|x| Error::from_sys(x)) } /// Recursively create a directory and all of its parent components if they are missing. pub fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<()> { if let Some(parent) = path.as_ref().parent() { try!(create_dir_all(&parent)); } if let Err(_err) = metadata(&path) { try!(create_dir(&path)); } Ok(()) } /// Copy the contents of one file to another pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> { let mut infile = try!(File::open(from)); let mut outfile = try!(File::create(to)); io::copy(&mut infile, &mut outfile) } /// Rename a file or directory to a new name pub fn rename<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<()> { try!(copy(Path::new(from.as_ref()), to)); remove_file(from) } /// Return an iterator over the entries within a directory pub fn read_dir<P: AsRef<Path>>(path: P) -> Result<ReadDir> { let path_buf = path.as_ref().to_owned(); File::open(&path_buf).map(|file| ReadDir { path: path_buf, file: BufReader::new(file) }) } /// Removes an existing, empty directory pub fn remove_dir<P: AsRef<Path>>(path: P) -> Result<()> { let path_str = path.as_ref().as_os_str().as_inner(); rmdir(path_str).and(Ok(())).map_err(|x| Error::from_sys(x)) } /// Removes a directory at this path, after removing all its contents. Use carefully! pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> Result<()> { for child in try!(read_dir(&path)) { let child = try!(child); if try!(child.file_type()).is_dir() { try!(remove_dir_all(&child.path())); } else { try!(remove_file(&child.path())); } } remove_dir(path) } /// Removes a file from the filesystem pub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> { let path_str = path.as_ref().as_os_str().as_inner(); unlink(path_str).and(Ok(())).map_err(|x| Error::from_sys(x)) }
true
fe023969113d9d4600c3a491ab4cc778166594c3
Rust
Open-Source-Projects-2021/patternfly-yew-quickstart
/src/components/form.rs
UTF-8
2,813
2.65625
3
[ "Apache-2.0" ]
permissive
use crate::{example, example::ExamplePage}; use patternfly_yew::*; use yew::prelude::*; pub struct FormExample {} impl Component for FormExample { type Message = (); type Properties = (); fn create(_props: Self::Properties, _link: ComponentLink<Self>) -> Self { Self {} } fn update(&mut self, _msg: Self::Message) -> ShouldRender { true } fn change(&mut self, _props: Self::Properties) -> ShouldRender { false } fn view(&self) -> Html { let example = example! {"Text Input" => <Form> <FormGroup label="Normal"> <TextInput/> </FormGroup> </Form> }; html! { <> <ExamplePage title="Form"> <Form> <FormGroup label="Test"> <Button label="Click me" variant=Variant::Primary/> </FormGroup> <FormGroup label="Test" required=true helper_text="Some help for you."> <Button label="Click me too" variant=Variant::Secondary/> </FormGroup> </Form> { example } <h2>{"More"}</h2> <Form> <FormGroup label="Normal"> <TextInput/> </FormGroup> <FormGroup label="Success"> <TextInput state=InputState::Success/> </FormGroup> <FormGroup label="Warning"> <TextInput state=InputState::Warning/> </FormGroup> <FormGroup label="Error"> <TextInput state=InputState::Error/> </FormGroup> <FormGroup label="Read Only"> <TextInput readonly=true/> </FormGroup> <FormGroup label="Disabled"> <TextInput disabled=true/> </FormGroup> <FormGroup label="Search"> <TextInput icon=TextInputIcon::Search/> </FormGroup> <FormGroup label="Calendar"> <TextInput icon=TextInputIcon::Calendar/> </FormGroup> <FormGroup label="Clock"> <TextInput icon=TextInputIcon::Clock/> </FormGroup> </Form> </ExamplePage> </> } } }
true
3b36b6a4310298b14cf5ded7e8f4d398d37afa02
Rust
Raz-Hemo/SpaceWarSupreme
/src/engine/systems/static_skybox.rs
UTF-8
1,392
2.703125
3
[]
no_license
use crate::engine::prelude::*; use specs::ReadStorage; use crate::engine::components::StaticSkyboxComponent; pub struct StaticSkyboxSystem { skybox: Option<String>, last_multi_skybox_warning: Option<std::time::Instant>, } impl StaticSkyboxSystem { pub fn new() -> StaticSkyboxSystem { StaticSkyboxSystem { skybox: None, last_multi_skybox_warning: None, } } pub fn get_and_flush(&mut self) -> Option<String> { std::mem::replace(&mut self.skybox, None) } } impl<'a> specs::System<'a> for StaticSkyboxSystem { type SystemData = ReadStorage<'a, StaticSkyboxComponent>; fn run(&mut self, skyboxes: Self::SystemData) { use specs::Join; for skybox in skyboxes.join() { if !skybox.visible { continue; } if self.skybox.is_some() { if self.last_multi_skybox_warning.is_none() || self.last_multi_skybox_warning.unwrap().elapsed().as_secs_f32() > consts::MULTI_SKYBOX_WARNING_INTERVAL_SECONDS { log::warning("Multiple skyboxes are visible. This hurts performance."); self.last_multi_skybox_warning = Some(std::time::Instant::now()); } break; } self.skybox = Some(skybox.skybox.clone()); } } }
true
bedca2e143bccf053784b9e09c62ad6489b23bf0
Rust
Amanieu/thread_local-rs
/src/cached.rs
UTF-8
4,451
3.234375
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
#![allow(deprecated)] use super::{IntoIter, IterMut, ThreadLocal}; use std::fmt; use std::panic::UnwindSafe; use std::usize; /// Wrapper around [`ThreadLocal`]. /// /// This used to add a fast path for a single thread, however that has been /// obsoleted by performance improvements to [`ThreadLocal`] itself. #[deprecated(since = "1.1.0", note = "Use `ThreadLocal` instead")] pub struct CachedThreadLocal<T: Send> { inner: ThreadLocal<T>, } impl<T: Send> Default for CachedThreadLocal<T> { fn default() -> CachedThreadLocal<T> { CachedThreadLocal::new() } } impl<T: Send> CachedThreadLocal<T> { /// Creates a new empty `CachedThreadLocal`. #[inline] pub fn new() -> CachedThreadLocal<T> { CachedThreadLocal { inner: ThreadLocal::new(), } } /// Returns the element for the current thread, if it exists. #[inline] pub fn get(&self) -> Option<&T> { self.inner.get() } /// Returns the element for the current thread, or creates it if it doesn't /// exist. #[inline] pub fn get_or<F>(&self, create: F) -> &T where F: FnOnce() -> T, { self.inner.get_or(create) } /// Returns the element for the current thread, or creates it if it doesn't /// exist. If `create` fails, that error is returned and no element is /// added. #[inline] pub fn get_or_try<F, E>(&self, create: F) -> Result<&T, E> where F: FnOnce() -> Result<T, E>, { self.inner.get_or_try(create) } /// Returns a mutable iterator over the local values of all threads. /// /// Since this call borrows the `ThreadLocal` mutably, this operation can /// be done safely---the mutable borrow statically guarantees no other /// threads are currently accessing their associated values. #[inline] pub fn iter_mut(&mut self) -> CachedIterMut<T> { CachedIterMut { inner: self.inner.iter_mut(), } } /// Removes all thread-specific values from the `ThreadLocal`, effectively /// reseting it to its original state. /// /// Since this call borrows the `ThreadLocal` mutably, this operation can /// be done safely---the mutable borrow statically guarantees no other /// threads are currently accessing their associated values. #[inline] pub fn clear(&mut self) { self.inner.clear(); } } impl<T: Send> IntoIterator for CachedThreadLocal<T> { type Item = T; type IntoIter = CachedIntoIter<T>; fn into_iter(self) -> CachedIntoIter<T> { CachedIntoIter { inner: self.inner.into_iter(), } } } impl<'a, T: Send + 'a> IntoIterator for &'a mut CachedThreadLocal<T> { type Item = &'a mut T; type IntoIter = CachedIterMut<'a, T>; fn into_iter(self) -> CachedIterMut<'a, T> { self.iter_mut() } } impl<T: Send + Default> CachedThreadLocal<T> { /// Returns the element for the current thread, or creates a default one if /// it doesn't exist. pub fn get_or_default(&self) -> &T { self.get_or(T::default) } } impl<T: Send + fmt::Debug> fmt::Debug for CachedThreadLocal<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ThreadLocal {{ local_data: {:?} }}", self.get()) } } impl<T: Send + UnwindSafe> UnwindSafe for CachedThreadLocal<T> {} /// Mutable iterator over the contents of a `CachedThreadLocal`. #[deprecated(since = "1.1.0", note = "Use `IterMut` instead")] pub struct CachedIterMut<'a, T: Send + 'a> { inner: IterMut<'a, T>, } impl<'a, T: Send + 'a> Iterator for CachedIterMut<'a, T> { type Item = &'a mut T; #[inline] fn next(&mut self) -> Option<&'a mut T> { self.inner.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } impl<'a, T: Send + 'a> ExactSizeIterator for CachedIterMut<'a, T> {} /// An iterator that moves out of a `CachedThreadLocal`. #[deprecated(since = "1.1.0", note = "Use `IntoIter` instead")] pub struct CachedIntoIter<T: Send> { inner: IntoIter<T>, } impl<T: Send> Iterator for CachedIntoIter<T> { type Item = T; #[inline] fn next(&mut self) -> Option<T> { self.inner.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } impl<T: Send> ExactSizeIterator for CachedIntoIter<T> {}
true
bb1d6953882aac37920ac69fcb45854317e98d8a
Rust
RichardlL/flac
/src/utility/types.rs
UTF-8
7,444
3.203125
3
[ "BSD-3-Clause" ]
permissive
use nom::{Err, IResult, Needed}; use std::io; use std::io::Read; use std::ptr; use std::cmp; use super::StreamProducer; #[derive(Debug)] pub enum ErrorKind { IO(io::Error), Incomplete(usize), Continue, EndOfInput, Unknown, } /// Structure that hold a slice of bytes. pub struct ByteStream<'a> { offset: usize, bytes: &'a [u8], } impl<'a> ByteStream<'a> { /// Construct a `ByteStream` based on the passed in byte slice. pub fn new(bytes: &'a [u8]) -> Self { ByteStream { offset: 0, bytes: bytes, } } /// Return the number of bytes that haven't been consumed yet. #[inline] pub fn len(&self) -> usize { self.bytes.len() - self.offset } /// Return true if the stream contains no more bytes. #[inline] pub fn is_empty(&self) -> bool { self.len() == 0 } } impl<'a> StreamProducer for ByteStream<'a> { fn parse<F, T>(&mut self, f: F) -> Result<T, ErrorKind> where F: FnOnce(&[u8]) -> IResult<&[u8], T> { if self.is_empty() { return Err(ErrorKind::EndOfInput); } match f(&self.bytes[self.offset..]) { IResult::Done(i, o) => { self.offset += self.len() - i.len(); Ok(o) } IResult::Incomplete(n) => { let mut needed = self.len(); if let Needed::Size(size) = n { needed = size; } Err(ErrorKind::Incomplete(needed)) } IResult::Error(_) => Err(ErrorKind::Unknown), } } } // Growable buffer of bytes. // // Mainly used to the `ReadStream` structure but can be used seperately for // manually filling with some `Read` source. pub struct Buffer { data: Vec<u8>, filled: usize, offset: usize, } impl Buffer { // Default constructor for `Buffer` pub fn new() -> Self { Self::with_capacity(1024) } // Explicitly set the buffer capacity. pub fn with_capacity(capacity: usize) -> Self { let mut buffer = Vec::with_capacity(capacity); unsafe { buffer.set_len(capacity); } Buffer { data: buffer, filled: 0, offset: 0, } } // Return the number of read bytes that haven't been consumed yet. #[inline] pub fn len(&self) -> usize { self.filled - self.offset } // Return true if buffer contains no more bytes. #[inline] pub fn is_empty(&self) -> bool { self.len() == 0 } // The set length of the unlining buffer. #[inline] pub fn capacity(&self) -> usize { self.data.len() } // Return a reference to the slice of unread bytes. pub fn as_slice(&self) -> &[u8] { &self.data[self.offset..self.filled] } // Fill the buffer with bytes from a `Read` source. pub fn fill<R: Read>(&mut self, reader: &mut R) -> io::Result<usize> { reader.read(&mut self.data[self.filled..]).map(|consumed| { self.filled += consumed; consumed }) } // Resize the current buffer // // This will only allocate data when the size requests is larger than the // current capacity of the buffer, otherwise it moves the currently filled // data to the beginning of the buffer. pub fn resize(&mut self, size: usize) { if size > self.data.capacity() { self.data.reserve(size); let capacity = self.data.capacity(); unsafe { self.data.set_len(capacity); } } if self.data.len() - self.filled < size { let length = self.filled - self.offset; let mut_ptr = self.data.as_mut_ptr(); unsafe { let offset_ptr = self.data.as_ptr().offset(self.offset as isize); ptr::copy(offset_ptr, mut_ptr, length); } self.filled -= self.offset; self.offset = 0; } } // Move the offset by the amount of consumed bytes. pub fn consume(&mut self, consumed: usize) { self.offset += consumed; } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum ParserState { Incomplete, EndOfInput, } fn fill<R: Read>(buffer: &mut Buffer, reader: &mut R, needed: usize) -> io::Result<usize> { let mut read = 0; if buffer.len() < needed { buffer.resize(needed); while buffer.len() < needed { let size_read = try!(buffer.fill(reader)); if size_read > 0 { read += size_read; } else { break; } } } Ok(read) } /// Structure that hold a reader for a source of bytes. pub struct ReadStream<R: Read> { reader: R, buffer: Buffer, needed: usize, state: ParserState, } impl<R> ReadStream<R> where R: Read { /// Constructor for `ReadStream` based on a `Read` source. pub fn new(reader: R) -> Self { ReadStream { reader: reader, buffer: Buffer::new(), needed: 0, state: ParserState::Incomplete, } } // Fill the stream with bytes from a `Read` source. fn fill(&mut self) -> io::Result<usize> { let needed = cmp::max(1, self.needed); fill(&mut self.buffer, &mut self.reader, needed).map(|consumed| { if self.buffer.len() < needed { self.state = ParserState::EndOfInput; } consumed }) } } fn from_iresult<T>(buffer: &Buffer, result: IResult<&[u8], T>) -> Result<(usize, T), ErrorKind> { match result { IResult::Done(i, o) => Ok((buffer.len() - i.len(), o)), IResult::Incomplete(n) => { let mut needed = buffer.capacity(); if let Needed::Size(size) = n { needed = size; } Err(ErrorKind::Incomplete(needed)) } IResult::Error(_) => Err(ErrorKind::Unknown), } } impl<R> StreamProducer for ReadStream<R> where R: Read { fn parse<F, T>(&mut self, f: F) -> Result<T, ErrorKind> where F: FnOnce(&[u8]) -> IResult<&[u8], T> { if self.state != ParserState::EndOfInput { try!(self.fill().map_err(ErrorKind::IO)); } let mut buffer = &mut self.buffer; if buffer.is_empty() { self.state = ParserState::EndOfInput; return Err(ErrorKind::EndOfInput); } let result = { let iresult = f(buffer.as_slice()); from_iresult(&buffer, iresult) }; match result { Ok((consumed, o)) => { buffer.consume(consumed); Ok(o) } Err(kind) => { if let ErrorKind::Incomplete(needed) = kind { self.needed = needed; Err(ErrorKind::Continue) } else { Err(kind) } } } } } #[cfg(test)] mod tests { use super::*; use utility::StreamProducer; use nom::be_u32; #[test] fn test_buffer() { let mut buffer = Buffer::new(); let bytes = b"Hello World"; let mut reader = &bytes[..]; assert!(buffer.is_empty()); assert_eq!(buffer.capacity(), 1024); let bytes_read = buffer.fill(&mut reader).unwrap_or(0); let bytes_len = bytes.len(); assert_eq!(bytes_read, bytes_len); assert_eq!(buffer.len(), bytes_len); assert_eq!(buffer.as_slice(), bytes); buffer.resize(512); assert_eq!(buffer.capacity(), 1024); } #[test] fn test_byte_stream() { let bytes = b"Hello World"; let mut stream = ByteStream::new(bytes); assert_eq!(stream.len(), bytes.len()); let result = stream.parse(be_u32).unwrap_or(0); assert_eq!(result, 1214606444); assert_eq!(stream.len(), 7); } #[test] fn test_read_stream() { let bytes = b"Hello World"; let mut stream = ReadStream::new(&bytes[..]); let result = stream.parse(be_u32).unwrap_or(0); assert_eq!(result, 1214606444) } }
true
f48b160c1d257ac6e164d91a531313cf127aaffe
Rust
leondejong/rust-playground
/general/collections/src/tree.rs
UTF-8
8,172
3.375
3
[ "Apache-2.0" ]
permissive
use chrono::prelude::*; use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use std::collections::BTreeMap; use std::default::Default; cfg_if::cfg_if! { if #[cfg(test)] { fn now() -> DateTime<Utc> { Utc.with_ymd_and_hms(1234, 5, 6, 7, 8, 9).unwrap() } } else { fn now() -> DateTime<Utc> { Utc::now() } } } fn random(length: usize) -> String { thread_rng() .sample_iter(&Alphanumeric) .take(length) .map(char::from) .collect::<String>() } #[derive(Clone, Debug, PartialEq)] pub struct Item { id: String, name: String, content: String, created: DateTime<Utc>, updated: DateTime<Utc>, active: bool, } impl Item { pub fn new(id: &str, name: &str, content: &str, active: bool) -> Self { let datetime = now(); Self { id: id.into(), name: name.into(), content: content.into(), created: datetime, updated: datetime, active, } } pub fn update(&mut self, name: &str, content: &str, active: bool) -> &Self { self.name = name.into(); self.content = content.into(); self.updated = now(); self.active = active; self } pub fn id(&self) -> &str { &self.id } pub fn name(&self) -> &str { &self.name } pub fn content(&self) -> &str { &self.content } pub fn active(&self) -> bool { self.active } } #[derive(Clone, Debug, PartialEq)] pub struct List { items: BTreeMap<String, Item>, } impl List { pub fn new() -> Self { Self { items: BTreeMap::new(), } } pub fn all(&self) -> Vec<&Item> { self.items.values().collect() } pub fn one(&self, id: &str) -> Option<&Item> { self.items.get(id) } pub fn add(&mut self, name: &str, content: &str, active: bool) -> Option<&Item> { let id = random(16); let item = Item::new(&id, name, content, active); self.items.insert(id.clone(), item); Some(&self.items[&id]) } pub fn update(&mut self, id: &str, name: &str, content: &str, active: bool) -> Option<&Item> { if let Some(item) = self.items.get_mut(id) { Some(item.update(name, content, active)) } else { None } } pub fn remove(&mut self, id: &str) -> Option<Item> { self.items.remove(id) } } impl Default for List { fn default() -> Self { Self::new() } } #[cfg(test)] mod test { use super::*; #[test] fn item_new() { let datetime = now(); let reference = Item { id: "id".into(), name: "name".into(), content: "content".into(), created: datetime, updated: datetime, active: true, }; let item = Item::new("id", "name", "content", true); assert_eq!(reference, item); } #[test] fn item_update() { let datetime = now(); let reference = Item { id: "id".into(), name: "name".into(), content: "content".into(), created: datetime, updated: datetime, active: true, }; let mut item = Item { id: "id".into(), name: "n".into(), content: "c".into(), created: datetime, updated: datetime, active: true, }; item.update("name", "content", true); assert_eq!(reference, item); } #[test] fn item_get() { let datetime = now(); let item = Item { id: "id".into(), name: "name".into(), content: "content".into(), created: datetime, updated: datetime, active: true, }; assert_eq!(item.id(), "id"); assert_eq!(item.name(), "name"); assert_eq!(item.content(), "content"); assert_eq!(item.active(), true); } #[test] fn list_new() { let reference = List { items: BTreeMap::new(), }; let list = List::new(); assert_eq!(reference, list); } #[test] fn list_default() { let reference = List { items: BTreeMap::new(), }; let list = List::default(); assert_eq!(reference, list); } #[test] fn list_get() { let datetime = now(); let mut items = BTreeMap::new(); let item = Item { id: "id".into(), name: "name".into(), content: "content".into(), created: datetime, updated: datetime, active: true, }; items.insert("id".into(), item.clone()); let list = List { items }; assert_eq!(list.one("id"), Some(&item)); } #[test] fn list_all() { let datetime = now(); let mut items = BTreeMap::new(); let item1 = Item { id: "id1".into(), name: "name1".into(), content: "content1".into(), created: datetime, updated: datetime, active: true, }; let item2 = Item { id: "id2".into(), name: "name2".into(), content: "content2".into(), created: datetime, updated: datetime, active: true, }; let item3 = Item { id: "id3".into(), name: "name3".into(), content: "content3".into(), created: datetime, updated: datetime, active: true, }; items.insert("id1".into(), item1.clone()); items.insert("id2".into(), item2.clone()); items.insert("id3".into(), item3.clone()); let list = List { items }; assert_eq!(list.all(), vec![&item1, &item2, &item3]); } #[test] fn list_add() { let datetime = now(); let mut list = List { items: BTreeMap::new(), }; let item = list.add("name", "content", true).unwrap().clone(); let mut items = BTreeMap::new(); let item = Item { id: item.id().into(), name: "name".into(), content: "content".into(), created: datetime, updated: datetime, active: true, }; items.insert(item.id().into(), item); let reference = List { items }; assert_eq!(reference, list); } #[test] fn list_update() { let datetime = now(); let mut reference_items = BTreeMap::new(); let mut items = BTreeMap::new(); let reference_item = Item { id: "id".into(), name: "name".into(), content: "content".into(), created: datetime, updated: datetime, active: true, }; let item = Item { id: "id".into(), name: "n".into(), content: "c".into(), created: datetime, updated: datetime, active: false, }; reference_items.insert("id".into(), reference_item); items.insert("id".into(), item); let reference_list = List { items: reference_items, }; let mut list = List { items }; list.update("id".into(), "name", "content", true); assert_eq!(reference_list, list); } #[test] fn list_remove() { let datetime = now(); let mut items = BTreeMap::new(); let item = Item { id: "id".into(), name: "name".into(), content: "content".into(), created: datetime, updated: datetime, active: true, }; items.insert("id".into(), item); let reference = List { items: BTreeMap::new(), }; let mut list = List { items }; list.remove("id".into()); assert_eq!(reference, list); } }
true
a66a8c5b68e00e9b4d5086e360a5722ca020a83d
Rust
cesarb/clear_on_drop
/src/hide.rs
UTF-8
3,063
3.1875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Prevent agressive code removal optimizations. //! //! The functions in this module "hide" a variable from the optimizer, //! so that it believes the variable has been read and/or modified in //! unpredictable ways, while in fact nothing happened. //! //! Inspired by/based on Linux kernel's `OPTIMIZER_HIDE_VAR`, which in //! turn was based on the earlier `RELOC_HIDE` macro. /// Make the optimizer believe the memory pointed to by `ptr` is read /// and modified arbitrarily. #[inline] pub fn hide_mem<T: ?Sized>(ptr: &mut T) { hide_mem_impl(ptr); } /// Make the optimizer believe the pointer returned by this function is /// possibly unrelated (except for the lifetime) to `ptr`. #[inline] pub fn hide_ptr<P>(mut ptr: P) -> P { hide_mem::<P>(&mut ptr); ptr } pub use self::impls::hide_mem_impl; // On nightly, inline assembly can be used. #[cfg(feature = "nightly")] mod impls { use core::arch::asm; trait HideMemImpl { fn hide_mem_impl(ptr: *mut Self); } impl<T: ?Sized> HideMemImpl for T { #[inline] default fn hide_mem_impl(ptr: *mut Self) { unsafe { //llvm_asm!("" : : "r" (ptr as *mut u8) : "memory"); asm!("/* {0} */", in(reg) (ptr as *mut u8), options(nostack)); } } } impl<T: Sized> HideMemImpl for T { #[inline] fn hide_mem_impl(ptr: *mut Self) { unsafe { //llvm_asm!("" : "=*m" (ptr) : "*0" (ptr)); asm!("/* {0} */", in(reg) ptr, options(nostack)); } } } #[inline] pub fn hide_mem_impl<T: ?Sized>(ptr: *mut T) { HideMemImpl::hide_mem_impl(ptr) } } // When a C compiler is available, a dummy C function can be used. #[cfg(not(feature = "no_cc"))] mod impls { extern "C" { fn clear_on_drop_hide(ptr: *mut u8) -> *mut u8; } #[inline] pub fn hide_mem_impl<T: ?Sized>(ptr: *mut T) { unsafe { clear_on_drop_hide(ptr as *mut u8); } } } // When neither is available, pretend the pointer is sent to a thread, // and hope this is enough to confuse the optimizer. #[cfg(all(feature = "no_cc", not(feature = "nightly")))] mod impls { use core::sync::atomic::{AtomicUsize, Ordering}; #[inline(never)] pub fn hide_mem_impl<T: ?Sized>(ptr: *mut T) { static DUMMY: AtomicUsize = AtomicUsize::new(0); DUMMY.store(ptr as *mut u8 as usize, Ordering::Release); } } #[cfg(test)] mod tests { struct Place { data: [u32; 4], } const DATA: [u32; 4] = [0x01234567, 0x89abcdef, 0xfedcba98, 0x76543210]; #[test] fn hide_mem() { let mut place = Place { data: DATA }; super::hide_mem(&mut place); assert_eq!(place.data, DATA); } #[test] fn hide_ptr() { let mut place = Place { data: DATA }; let before = &mut place as *mut _; let after = super::hide_ptr(&mut place); assert_eq!(before, after as *mut _); assert_eq!(after.data, DATA); } }
true
3f06e5814d0895143d6c0e5cdfe0803ed7a5d182
Rust
kuviman/codevisual
/codevisual/src/asset/simple.rs
UTF-8
1,435
2.625
3
[ "MIT" ]
permissive
use *; pub struct SimpleAssetFuture<T> { handle: Arc<Mutex<Option<Result<T, Error>>>>, } impl<T> SimpleAssetFuture<T> { pub fn new() -> Self { Self { handle: Arc::new(Mutex::new(None)), } } pub fn get_handle(&self) -> Arc<Mutex<Option<Result<T, Error>>>> { self.handle.clone() } } impl<T> AssetFuture for SimpleAssetFuture<T> { type Output = T; fn is_loaded(&self) -> Result<bool, Error> { let lock = self.handle.lock().unwrap(); match *lock { Some(Ok(_)) => Ok(true), Some(Err(ref e)) => bail!("{}", e), None => Ok(false), } } fn unwrap(&self) -> Result<T, Error> { let mut lock = self.handle.lock().unwrap(); mem::replace(&mut *lock, None).unwrap() } } pub struct MapAssetFuture<I, F> { inner: I, f: RefCell<Option<F>>, } impl<I, F> MapAssetFuture<I, F> { pub fn new(inner: I, f: F) -> Self { Self { inner, f: RefCell::new(Some(f)), } } } impl<I: AssetFuture, U, F> AssetFuture for MapAssetFuture<I, F> where F: FnOnce(I::Output) -> Result<U, Error>, { type Output = U; fn is_loaded(&self) -> Result<bool, Error> { self.inner.is_loaded() } fn unwrap(&self) -> Result<U, Error> { let f = mem::replace(&mut *self.f.borrow_mut(), None).unwrap(); f(self.inner.unwrap()?) } }
true
8b293ae96a544a5ba7a0096b228209ce603eaa67
Rust
mbilker/kbinxml-rs
/kbinxml/src/error.rs
UTF-8
4,373
2.515625
3
[ "MIT" ]
permissive
use std::error::Error; use std::io; use std::num::{ParseFloatError, ParseIntError}; use std::result::Result as StdResult; use quick_xml::Error as QuickXmlError; use rustc_hex::FromHexError; use snafu::Snafu; use crate::byte_buffer::ByteBufferError; use crate::encoding_type::EncodingError; use crate::node_types::StandardType; use crate::reader::ReaderError; use crate::sixbit::SixbitError; use crate::text_reader::TextReaderError; use crate::value::Value; use crate::writer::WriterError; pub type Result<T> = StdResult<T, KbinError>; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum KbinError { #[snafu(display("Unable to read bytes or not enough data read"))] DataConvert { source: io::Error }, #[snafu(display("No node collection found"))] NoNodeCollection, #[snafu(display( "Size Mismatch, type: {}, expected size: {}, actual size: {}", node_type, expected, actual ))] SizeMismatch { node_type: &'static str, expected: usize, actual: usize, }, #[snafu(display("Unable to interpret input as {}", node_type))] StringParse { node_type: &'static str, source: Box<dyn Error + Send + Sync>, }, #[snafu(display("Unable to interpret integer input as {}", node_type))] StringParseInt { node_type: &'static str, source: ParseIntError, }, #[snafu(display("Unable to interpret float input as {}", node_type))] StringParseFloat { node_type: &'static str, source: ParseFloatError, }, #[snafu(display("Unable to convert from hexadecimal"))] Hex { source: FromHexError }, #[snafu(display("Type mismatch, expected: {}, found: {}", expected, found))] TypeMismatch { expected: StandardType, found: StandardType, }, #[snafu(display("Value mismatch, expected {}, but found {:?}", node_type, value))] ValueTypeMismatch { node_type: StandardType, value: Value, }, #[snafu(display("Value mismatch, expected an array, but found {:?}", value))] ExpectedValueArray { value: Value }, #[snafu(display("Invalid input for boolean: {}", input))] InvalidBooleanInput { input: u8 }, #[snafu(display("Invalid node type for operation: {:?}", node_type))] InvalidNodeType { node_type: StandardType }, #[snafu(display("Invalid state"))] InvalidState, #[snafu(display("Failed to handle byte buffer operation"))] ByteBuffer { #[snafu(backtrace)] source: ByteBufferError, }, #[snafu(display("Failed to handle string encoding operation"))] Encoding { #[snafu(backtrace)] source: EncodingError, }, #[snafu(display("Failed to handle sixbit string operation"))] Sixbit { #[snafu(backtrace)] source: SixbitError, }, #[snafu(display("Failed to read binary XML"))] Reader { #[snafu(backtrace)] source: ReaderError, }, #[snafu(display("Failed to write binary XML"))] Writer { #[snafu(backtrace)] source: WriterError, }, #[snafu(display("Failed to read text XML"))] TextReader { #[snafu(backtrace)] source: TextReaderError, }, #[snafu(display("Error handling XML"))] XmlError { source: QuickXmlError }, } impl From<ByteBufferError> for KbinError { #[inline] fn from(source: ByteBufferError) -> Self { KbinError::ByteBuffer { source } } } impl From<EncodingError> for KbinError { #[inline] fn from(source: EncodingError) -> Self { KbinError::Encoding { source } } } impl From<SixbitError> for KbinError { #[inline] fn from(source: SixbitError) -> Self { KbinError::Sixbit { source } } } impl From<ReaderError> for KbinError { #[inline] fn from(source: ReaderError) -> Self { KbinError::Reader { source } } } impl From<WriterError> for KbinError { #[inline] fn from(source: WriterError) -> Self { KbinError::Writer { source } } } impl From<TextReaderError> for KbinError { #[inline] fn from(source: TextReaderError) -> Self { KbinError::TextReader { source } } } impl From<QuickXmlError> for KbinError { #[inline] fn from(source: QuickXmlError) -> Self { KbinError::XmlError { source } } }
true
955062f933ef9702cb655250855638647c40e566
Rust
dirvine/RustNN
/tests/xor.rs
UTF-8
877
2.734375
3
[ "Apache-2.0" ]
permissive
#![feature(std_misc)] extern crate nn; use std::num::Float; use nn::{NN, HaltCondition}; use std::time::Duration; #[test] fn xor_timed() { // create examples of the xor function let examples = [ (vec![0f64, 0f64], vec![0f64]), (vec![0f64, 1f64], vec![1f64]), (vec![1f64, 0f64], vec![1f64]), (vec![1f64, 1f64], vec![0f64]), ]; // create a new neural network let mut net = NN::new(&[2,3,1]); // train the network net.train(&examples) .halt_condition( HaltCondition::Timer(Duration::seconds(10)) ) .log_interval(None) .momentum(0.1) .go(); // test the trained network for &(ref inputs, ref outputs) in examples.iter() { let results = net.run(inputs); let (result, key) = (Float::round(results[0]), outputs[0]); assert!(result == key); } }
true
2c6f3ba3994c5f65014a34a96b6f38073573ef07
Rust
TehPers/AdventOfCode
/years/aoc2020/src/day03/main.rs
UTF-8
941
3.03125
3
[ "MIT" ]
permissive
use anyhow::Context; use std::io::{BufRead, BufReader}; const INPUT: &[u8] = include_bytes!("input.txt"); fn count_trees(lines: &Vec<String>, right: usize, down: usize) -> usize { lines .iter() .step_by(down) .enumerate() .map(|(i, line)| line.as_bytes()[(i * right) % line.len()]) .filter(|&c| c == b'#') .count() } fn solve(lines: &Vec<String>, steps: Vec<(usize, usize)>) -> usize { steps .into_iter() .map(|(right, down)| count_trees(&lines, right, down)) .product() } fn main() -> anyhow::Result<()> { let input = BufReader::new(INPUT); let lines = input .lines() .collect::<Result<Vec<_>, _>>() .context("failure reading input file")?; println!("part 1: {}", solve(&lines, vec![(3, 1)])); println!( "part 2: {}", solve(&lines, vec![(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)]) ); Ok(()) }
true
d297c85f9b5cb24ac4302efa7910a9053314ff5d
Rust
tuplecats/docker-client
/src/image/create/request.rs
UTF-8
2,988
3.09375
3
[ "MIT" ]
permissive
#[derive(Default)] pub struct RequestBuilder { from_image: String, from_src: String, repo: String, tag: String, message: String, platform: String } impl RequestBuilder { pub fn new() -> RequestBuilder { RequestBuilder::default() } pub fn image<T>(mut self, image: T) -> Self where T: Into<String> { self.from_image = image.into(); self } pub fn source<T>(mut self, source: T) -> Self where T: Into<String> { self.from_src = source.into(); self } pub fn repo<T>(mut self, repo: T) -> Self where T: Into<String> { self.repo = repo.into(); self } pub fn tag<T>(mut self, tag: T) -> Self where T: Into<String> { self.tag = tag.into(); self } pub fn message<T>(mut self, message: T) -> Self where T: Into<String> { self.message = message.into(); self } pub fn platform<T>(mut self, platform: T) -> Self where T: Into<String> { self.platform = platform.into(); self } pub fn build(self) -> Request { Request { from_image: self.from_image, from_src: self.from_src, repo: self.repo, tag: match self.tag.as_str() { "" => String::from("latest"), _=> self.tag.clone() }, message: self.message, platform: self.platform } } } pub struct Request { from_image: String, from_src: String, repo: String, tag: String, message: String, platform: String } impl Request { pub fn image(&self) -> &str { &self.from_image } pub fn source(&self) -> &str { &self.from_src } pub fn repo(&self) -> &str { &self.repo } pub fn tag(&self) -> &str { &self.tag } pub fn message(&self) -> &str { &self.message } pub fn platform(&self) -> &str { &self.platform } pub fn get_path(&self) -> String { let mut path = String::from("/images/create?"); if !self.from_image.is_empty() { path.push_str(format!("{}={}&", "fromImage", self.from_image).as_str()); } if !self.from_src.is_empty() { path.push_str(format!("{}={}&", "fromSrc", self.from_src).as_str()); } if !self.repo.is_empty() { path.push_str(format!("{}={}&", "repo", self.repo).as_str()); } if !self.tag.is_empty() { path.push_str(format!("{}={}&", "tag", self.tag).as_str()); } if !self.message.is_empty() { path.push_str(format!("{}={}&", "message", self.message).as_str()); } if !self.platform.is_empty() { path.push_str(format!("{}={}&", "platform", self.platform).as_str()); } path.pop(); path } }
true
9c60f25d60fea6d659367c2cdae3051b981f8d02
Rust
Pamplemousse/mathematical_expression_evaluation
/src/parser/mod.rs
UTF-8
7,274
3.640625
4
[]
no_license
use tokenizer::token::*; pub fn parse(tokens: Vec<Token>) -> Vec<Token> { let mut ast: Vec<Token> = Vec::new(); let mut operator_stack: Vec<Token> = Vec::new(); for token in tokens { match token { Token::Literal(_) => ast.push(token), Token::Operator(operator) => { while !operator_stack.is_empty() { let top_token: Token = operator_stack.last().unwrap().clone(); match top_token { Token::LeftParenthesis => break, Token::Operator(top_operator) => { if top_operator > operator { operator_stack.pop(); ast.push(Token::Operator(top_operator)); } else { break; } } _ => panic!("Dev error: The operator_stack should only hold LeftParentheses and Operators."), } } operator_stack.push(Token::Operator(operator)); }, Token::LeftParenthesis => operator_stack.push(token), Token::RightParenthesis => { while !operator_stack.is_empty() { let top_token: Token = operator_stack.last().unwrap().clone(); match top_token { Token::LeftParenthesis => { operator_stack.pop(); break; }, Token::Operator(top_operator) => { operator_stack.pop(); ast.push(Token::Operator(top_operator)); } _ => panic!("Dev error: The operator_stack should only hold LeftParentheses and Operators."), } } }, } } operator_stack.reverse(); ast.append(&mut operator_stack); ast } #[cfg(test)] mod tests { use super::*; use tokenizer::tokenize; use tokenizer::token::literal::Literal; use tokenizer::token::operator::Operator; #[test] fn parse_with_empty_list_of_tokens() { let tokens: Vec<Token> = tokenize(""); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ ].to_vec(); assert_eq!(result, expected_result); } #[test] fn parse_with_a_two_literals_multiplication() { let tokens: Vec<Token> = tokenize("22*3"); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ Token::Literal(Literal::from(String::from("22"))), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Times) ].to_vec(); assert_eq!(result, expected_result); } #[test] fn parse_with_multiplication_and_addition() { let tokens: Vec<Token> = tokenize("22*3+2"); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ Token::Literal(Literal::from(String::from("22"))), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Times), Token::Literal(Literal::from(String::from("2"))), Token::Operator(Operator::Plus) ].to_vec(); assert_eq!(result, expected_result); } #[test] fn parse_with_addition_and_multiplication() { let tokens: Vec<Token> = tokenize("22+3*2"); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ Token::Literal(Literal::from(String::from("22"))), Token::Literal(Literal::from(String::from("3"))), Token::Literal(Literal::from(String::from("2"))), Token::Operator(Operator::Times), Token::Operator(Operator::Plus) ].to_vec(); assert_eq!(result, expected_result); } #[test] fn parse_with_multiplication_and_addition_and_multiplication() { let tokens: Vec<Token> = tokenize("22*3+2*3"); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ Token::Literal(Literal::from(String::from("22"))), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Times), Token::Literal(Literal::from(String::from("2"))), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Times), Token::Operator(Operator::Plus) ].to_vec(); assert_eq!(result, expected_result); } #[test] fn parse_with_addition_and_multiplication_and_addition() { let tokens: Vec<Token> = tokenize("22+3*2+3"); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ Token::Literal(Literal::from(String::from("22"))), Token::Literal(Literal::from(String::from("3"))), Token::Literal(Literal::from(String::from("2"))), Token::Operator(Operator::Times), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Plus), Token::Operator(Operator::Plus) ].to_vec(); assert_eq!(result, expected_result); } #[test] fn parse_with_multiplication_and_addition_and_addition() { let tokens: Vec<Token> = tokenize("22*3+2+3"); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ Token::Literal(Literal::from(String::from("22"))), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Times), Token::Literal(Literal::from(String::from("2"))), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Plus), Token::Operator(Operator::Plus) ].to_vec(); assert_eq!(result, expected_result); } #[test] fn parse_with_addition_and_addition_and_multiplication() { let tokens: Vec<Token> = tokenize("22+3+2*3"); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ Token::Literal(Literal::from(String::from("22"))), Token::Literal(Literal::from(String::from("3"))), Token::Literal(Literal::from(String::from("2"))), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Times), Token::Operator(Operator::Plus), Token::Operator(Operator::Plus) ].to_vec(); assert_eq!(result, expected_result); } #[test] fn parse_with_parentheses() { let tokens: Vec<Token> = tokenize("(22+3)*2"); let result: Vec<Token> = parse(tokens); let expected_result: Vec<Token> = [ Token::Literal(Literal::from(String::from("22"))), Token::Literal(Literal::from(String::from("3"))), Token::Operator(Operator::Plus), Token::Literal(Literal::from(String::from("2"))), Token::Operator(Operator::Times), ].to_vec(); assert_eq!(result, expected_result); } }
true
142081df5dc75c6563bac17d12677bbbbc3f9f54
Rust
jkremser/rustlings
/exercises/option/option1.rs
UTF-8
433
3.21875
3
[ "MIT" ]
permissive
// option1.rs // Make me compile! Execute `rustlings hint option1` for hints // you can modify anything EXCEPT for this function's sig fn print_number(maybe_number: Option<u16>) { println!("printing: {}", maybe_number.unwrap()); } fn main() { print_number(Some(13)); print_number(Some(99)); let numbers: Vec<Option<u16>> = (0..5).into_iter().map(|x| { Some(((x * 1235) + 2) / (4 * 16)) }).collect(); }
true
4d5a9979493c7be4e2c6941765c991412add5566
Rust
AyeGill/heron
/examples/quickstart.rs
UTF-8
1,329
2.859375
3
[ "MIT" ]
permissive
use bevy::prelude::*; use heron::prelude::*; #[bevy_main] fn main() { App::build() .add_plugins(DefaultPlugins) .add_plugin(PhysicsPlugin::default()) // Add the Heron plugin .insert_resource(Gravity::from(Vec3::new(0.0, -300.0, 0.0))) // Define gravity .add_startup_system(spawn.system()) .run(); } fn spawn(mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>) { // Ensure we can see things commands.spawn_bundle(OrthographicCameraBundle::new_2d()); // the size of our sprite let size = Vec2::new(30.0, 30.0); commands // here we add a Sprite. We can add any bundle of our choice; the // only required component is a GlobalTransform .spawn_bundle(SpriteBundle { sprite: Sprite::new(size), material: materials.add(Color::GREEN.into()), transform: Transform::from_translation(Vec3::new(0.0, 200.0, 0.0)), ..Default::default() }) // Make it a physics body, by adding the RigidBody component .insert(RigidBody::Dynamic) // Attach a collision shape .insert(CollisionShape::Cuboid { // let the size be consistent with our sprite half_extends: size.extend(0.0) / 2.0, border_radius: None, }); }
true
0ab751f8d392eeb94309806a3ed9527e3a70e866
Rust
angristan/rlxc
/src/lxc.rs
UTF-8
4,863
2.96875
3
[]
no_license
//! Rust wrapper for `struct lxc_container`. Implements methods to control //! containers. use failure::*; use std::ffi::CStr; use std::ffi::CString; use std::os::raw::{c_char, c_int}; use std::path::Path; use std::ptr; use std::time::Duration; use crate::util::ffi::{AllocatedStringArrayIter, ToCString}; mod attach_options; pub use attach_options::*; /// The main container handle. This implements the methods for `struct /// lxc_container`. pub struct Lxc { handle: *mut lxc_sys::lxc_container, } /// Get an iterator over all containers defined in the given `path`. This is a /// wrapper for liblxc's `list_all_containers` function. pub fn list_all_containers<T: AsRef<Path>>( path: T, ) -> Result<AllocatedStringArrayIter, Error> { let cpath = path.as_ref().to_c_string()?; let mut names: *mut *mut c_char = ptr::null_mut(); let nr = unsafe { lxc_sys::list_all_containers( cpath.as_ptr(), &mut names, ptr::null_mut(), ) }; if nr < 0 { bail!("failed to list containers"); } Ok(AllocatedStringArrayIter::new(names, nr as usize)) } /// Returns the currently used liblxc's version string. pub fn get_version() -> &'static str { let cstr: &CStr = unsafe { CStr::from_ptr(lxc_sys::lxc_get_version()) }; cstr.to_str().unwrap_or("unknown") } pub fn get_global_config_item(key: &str) -> Result<&'static str, Error> { let ckey = CString::new(key).unwrap(); let cstr: &CStr = unsafe { CStr::from_ptr(lxc_sys::lxc_get_global_config_item(ckey.as_ptr())) }; if cstr.as_ptr().is_null() { bail!("failed to find value of {}", key); } Ok(cstr.to_str().unwrap()) } pub fn get_default_path() -> &'static str { let path = match get_global_config_item("lxc.lxcpath") { Ok(s) => s, Err(_) => return "", }; path } impl Lxc { /// Create a new container handler for the container of the given `name` /// residing under the provided `path`. pub fn new(name: &str, path: &str) -> Result<Lxc, Error> { let cname = CString::new(name).unwrap(); let cpath = CString::new(path).unwrap(); let handle = unsafe { lxc_sys::lxc_container_new(cname.as_ptr(), cpath.as_ptr()) }; if handle.is_null() { bail!("failed to allocate new container"); } Ok(Lxc { handle }) } /// Attempt to start the container. If `stub` is true, the container's /// `lxc.execute.cmd` is executed instead of `lxc.init.cmd`. pub fn start(&self, stub: bool) -> Result<(), Error> { let useinit = if stub { 1 } else { 0 }; let started = unsafe { (*self.handle).start.unwrap()(self.handle, useinit, ptr::null()) }; if !started { bail!("failed to start container"); } Ok(()) } /// Atetmpt to shutdown a container with a timeout. pub fn shutdown(&self, timeout: Option<Duration>) -> Result<(), Error> { let timeout: c_int = match timeout { Some(to) => { let secs = to.as_secs(); // seconds can be large... if secs > (!(0 as c_int)) as u64 { bail!("timeout too large"); } secs as _ } None => -1, }; let down = unsafe { (*self.handle).shutdown.unwrap()(self.handle, timeout) }; if !down { bail!("failed to shutdown container"); } Ok(()) } /// Attempt to stop a running container. pub fn stop(&self) -> Result<(), Error> { let stopped = unsafe { (*self.handle).stop.unwrap()(self.handle) }; if !stopped { bail!("failed to start container"); } Ok(()) } /// Determine if the caller may control the container. pub fn may_control(&self) -> bool { unsafe { (*self.handle).may_control.unwrap()(self.handle) } } /// Determine if the container is running. pub fn is_running(&self) -> bool { unsafe { (*self.handle).is_running.unwrap()(self.handle) } } /// Try to run a program inside the container. pub fn attach_run_wait( &self, options: &mut AttachOptions, program: &str, argv: Vec<&str>, ) -> i32 { let cprogram = CString::new(program).unwrap(); let cargv: Vec<_> = argv.iter().map(|arg| CString::new(*arg).unwrap()).collect(); let mut args: Vec<_> = cargv.iter().map(|arg| arg.as_ptr()).collect(); args.push(std::ptr::null()); unsafe { (*self.handle).attach_run_wait.unwrap()( self.handle, options.raw(), cprogram.as_ptr(), args.as_ptr(), ) } } }
true
9dccce9d74973558dec4ed7ad8c25cc1480081e6
Rust
seanwallawalla-forks/nushell
/crates/nu-command/src/commands/dataframe/melt.rs
UTF-8
3,121
2.859375
3
[ "MIT" ]
permissive
use crate::{commands::dataframe::utils::parse_polars_error, prelude::*}; use nu_engine::WholeStreamCommand; use nu_errors::ShellError; use nu_protocol::{dataframe::NuDataFrame, Signature, SyntaxShape, Value}; use super::utils::convert_columns; pub struct DataFrame; impl WholeStreamCommand for DataFrame { fn name(&self) -> &str { "dataframe melt" } fn usage(&self) -> &str { "[DataFrame] Unpivot a DataFrame from wide to long format" } fn signature(&self) -> Signature { Signature::build("dataframe melt") .required("id_columns", SyntaxShape::Table, "Id columns for melting") .rest(SyntaxShape::Any, "columns used as value columns") } fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> { command(args) } fn examples(&self) -> Vec<Example> { vec![Example { description: "melt dataframe", example: "[[a b]; [a 2] [b 4] [a 6]] | dataframe to-df | dataframe melt a b", result: None, }] } } fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> { let tag = args.call_info.name_tag.clone(); let id_col: Vec<Value> = args.req(0)?; let val_col: Vec<Value> = args.rest(1)?; let (id_col_string, id_col_span) = convert_columns(&id_col, &tag)?; let (val_col_string, val_col_span) = convert_columns(&val_col, &tag)?; let df = NuDataFrame::try_from_stream(&mut args.input, &tag.span)?; check_column_datatypes(df.as_ref(), &id_col_string, &id_col_span)?; check_column_datatypes(df.as_ref(), &val_col_string, &val_col_span)?; let res = df .as_ref() .melt(&id_col_string, &val_col_string) .map_err(|e| parse_polars_error::<&str>(&e, &tag.span, None))?; Ok(OutputStream::one(NuDataFrame::dataframe_to_value(res, tag))) } fn check_column_datatypes<T: AsRef<str>>( df: &polars::prelude::DataFrame, cols: &[T], col_span: &Span, ) -> Result<(), ShellError> { if cols.is_empty() { return Err(ShellError::labeled_error( "Merge error", "empty column list", col_span, )); } // Checking if they are same type if cols.len() > 1 { for w in cols.windows(2) { let l_series = df .column(w[0].as_ref()) .map_err(|e| parse_polars_error::<&str>(&e, &col_span, None))?; let r_series = df .column(w[1].as_ref()) .map_err(|e| parse_polars_error::<&str>(&e, &col_span, None))?; if l_series.dtype() != r_series.dtype() { return Err(ShellError::labeled_error_with_secondary( "Merge error", "found different column types in list", col_span, format!( "datatypes {} and {} are incompatible", l_series.dtype(), r_series.dtype() ), col_span, )); } } } Ok(()) }
true
1dcb3c905ca12df54eb1933f4b8154581dc5a108
Rust
dezuman/narm
/src/bitmanip.rs
UTF-8
3,104
3.6875
4
[ "MIT" ]
permissive
/// Bit manipulation helper functions for integer types pub trait BitManipulation{ /// gets the value of the bit at the specified index fn get_bit(&self, index: u8) -> bool; /// sets the value of the bit at the specified index fn set_bit(&mut self, index: u8, value: bool); /// gets the value of the bit at the specified index, with the value being treated as a big endian integer fn get_bit_big_endian(&self, index: u8) -> bool; } pub trait IntAlign{ fn align4(&self) -> u32; } impl IntAlign for u32{ fn align4(&self) -> u32{ (*self) & (!0b11) } } impl BitManipulation for u32{ fn get_bit(&self, index: u8) -> bool{ self & (1 << index) > 0 } fn get_bit_big_endian(&self, index: u8) -> bool{ self & (1 << (31 - index)) > 0 } fn set_bit(&mut self, index: u8, value: bool){ if value{ *self = *self | (1 << index); }else{ *self = *self & (0xFFFFFFFF ^ (1 << index)); } } } impl BitManipulation for u64{ fn get_bit(&self, index: u8) -> bool{ self & (1 << index) > 0 } fn get_bit_big_endian(&self, index: u8) -> bool{ self & (1 << (63 - index)) > 0 } fn set_bit(&mut self, index: u8, value: bool){ if value{ *self = *self | (1 << index); }else{ *self = *self & (0xFFFFFFFFFFFFFFFF ^ (1 << index)); } } } impl BitManipulation for u16{ fn get_bit(&self, index: u8) -> bool{ *self & (1 << index) > 0 } fn get_bit_big_endian(&self, index: u8) -> bool{ self & (1 << (15 - index)) > 0 } fn set_bit(&mut self, index: u8, value: bool){ if value{ *self = *self | (1 << index); }else{ *self = *self & (0xFFFF ^ (1 << index)); } } } impl BitManipulation for u8{ fn get_bit(&self, index: u8) -> bool{ self & (1 << index) > 0 } fn get_bit_big_endian(&self, index: u8) -> bool{ self & (1 << (7 - index)) > 0 } fn set_bit(&mut self, index: u8, value: bool){ if value{ *self = *self | (1 << index); }else{ *self = *self & (0xFF ^ (1 << index)); } } } // Source: https://github.com/archshift/bitutils-rs MIT licensed /// Sign extend a `size`-bit number (stored in a u32) to an i32. /// /// let i5bit = 0b11110; /// let i32bit = narm::bitmanip::sign_extend32(i5bit, 5); /// assert_eq!(i32bit, -2); /// #[inline] pub fn sign_extend32(data: u32, size: u32) -> i32 { assert!(size > 0 && size <= 32); ((data << (32 - size)) as i32) >> (32 - size) } #[cfg(test)] mod tests{ use super::*; #[test] fn test_bits(){ let tmp = 0b0000_1000u8; assert!(!tmp.get_bit(0)); assert!(tmp.get_bit(3)); assert!(tmp.get_bit_big_endian(4)); let mut tmp2 = tmp; tmp2.set_bit(2, true); tmp2.set_bit(3, false); tmp2.set_bit(0, true); assert!(tmp2 == 0b0000_0101); assert!(tmp2.get_bit(0)); assert!(tmp2.get_bit_big_endian(7)); } }
true
e3513e005371ec4076b2d3302a8469ee419bf683
Rust
peterallin/rideways
/sdl_input/src/control_state.rs
UTF-8
1,446
3.25
3
[]
no_license
#[derive(Debug, Copy, Clone, Default)] pub struct ControlState { pub left: bool, pub right: bool, pub up: bool, pub down: bool, pub fire: bool, } impl ControlState { pub fn new() -> Self { ControlState { left: false, right: false, up: false, down: false, fire: false, } } pub fn update(&mut self, event: &sdl2::event::Event) { match event { sdl2::event::Event::KeyUp { keycode: Some(key), .. } => match key { sdl2::keyboard::Keycode::W => self.up = false, sdl2::keyboard::Keycode::A => self.left = false, sdl2::keyboard::Keycode::S => self.down = false, sdl2::keyboard::Keycode::D => self.right = false, sdl2::keyboard::Keycode::Return => self.fire = false, _ => {} }, sdl2::event::Event::KeyDown { keycode: Some(key), .. } => match key { sdl2::keyboard::Keycode::W => self.up = true, sdl2::keyboard::Keycode::A => self.left = true, sdl2::keyboard::Keycode::S => self.down = true, sdl2::keyboard::Keycode::D => self.right = true, sdl2::keyboard::Keycode::Return => self.fire = true, _ => {} }, _ => {} } } }
true
caaecd4481920ba7231bebdf3c49ea15929582ca
Rust
prz23/zinc
/zinc-types/src/instructions/evaluation_stack/push.rs
UTF-8
1,172
3.203125
3
[ "Apache-2.0" ]
permissive
//! //! The `push constant` instruction. //! use std::fmt; use num::BigInt; use serde::Deserialize; use serde::Serialize; use crate::data::r#type::scalar::Type as ScalarType; use crate::instructions::Instruction; /// /// The `push constant` instruction. /// #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct Push { /// The constant value. pub value: BigInt, /// The constant type. pub scalar_type: ScalarType, } impl Push { /// /// A shortcut constructor. /// pub fn new(value: BigInt, scalar_type: ScalarType) -> Self { Self { value, scalar_type } } /// /// A shortcut constructor. /// pub fn new_field(value: BigInt) -> Self { Self::new(value, ScalarType::Field) } /// /// If the instruction is for the debug mode only. /// pub fn is_debug(&self) -> bool { false } } impl Into<Instruction> for Push { fn into(self) -> Instruction { Instruction::Push(self) } } impl fmt::Display for Push { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "push {} as {}", self.value, self.scalar_type) } }
true
c3e266432c06ad60fc6b7e627ab73cf92719ab3d
Rust
ozankasikci/rust-music-theory
/src/chord/errors.rs
UTF-8
651
2.84375
3
[ "MIT" ]
permissive
use crate::note::NoteError; use std::error; use std::fmt; /// An error while parsing a chord. #[derive(Debug, Clone)] pub enum ChordError { InvalidRegex, } impl fmt::Display for ChordError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Invalid Regex!") } } impl error::Error for ChordError {} impl From<NoteError> for ChordError { fn from(e: NoteError) -> Self { match e { _ => ChordError::InvalidRegex, } } } impl From<regex::Error> for ChordError { fn from(e: regex::Error) -> Self { match e { _ => ChordError::InvalidRegex, } } }
true
805aaff103b7f0f2de559b87c6d0ac234ed1faf2
Rust
shaddyshad/inable_nav
/src/parser/interface.rs
UTF-8
2,921
3.453125
3
[]
no_license
use std::borrow::Cow; use regex::Regex; // tokens #[derive(Debug, Eq, PartialEq)] pub enum Token { ParseError(Cow<'static, str>), TagToken(Tag) } // token kinds #[derive(Debug, Eq, PartialEq, Copy, Clone)] pub enum TagKind { StartTag, EndTag } #[derive(Debug, Eq, PartialEq)] pub struct Attribute { pub name: String, pub value: String } #[derive(Debug, Eq, PartialEq)] pub struct Tag { pub kind: TagKind, pub name: TagName, pub attributes: Vec<Attribute>, pub is_self_closing: bool, pub value: Option<String> } #[derive(Debug, Eq, PartialEq, Clone)] pub struct TagName(pub String); impl TagName { pub fn new(name: &str) -> Self { TagName(name.to_string()) } pub fn get(&self) -> &str { &self.0 } } // Token sink result #[derive(Eq, PartialEq, Debug, Clone)] pub enum SinkResult { Continue, } //check if is an answer fn matches<'a>(name: &'a str, pattern: &'static str) -> bool { let re = Regex::new(pattern).unwrap(); return re.is_match(name); } impl Tag { // get the name of this tag pub fn name(&self) -> &TagName { &self.name } // get the value from the tag pub fn value(&self) -> Option<String> { self.value.clone() } // check if it is an opening tag pub fn is_start_tag(&self) -> bool{ self.kind == TagKind::StartTag } pub fn is_end_tag(&self) -> bool { !self.is_start_tag() } // section_number holds the section name so we can replace pub fn is_section_name(&self) -> bool { matches( &self.name.get(), "section_number" ) } //check if it is a document tag pub fn is_document(&self) -> bool { let doc_tag = "xml"; matches( &self.name.get(), doc_tag ) } pub fn is_root(&self) -> bool { matches( &self.name.get(), "root" ) } pub fn is_question_number(&self) -> bool { matches( &self.name.get(), r"^question_number$" ) } pub fn is_section(&self) -> bool { matches( &self.name.get(), "SECTION_" ) } pub fn is_question(&self) -> bool { matches( &self.name.get(), "^question$" ) } pub fn is_item(&self) -> bool { matches( &self.name.get(), "item" ) } pub fn is_instructions(&self) -> bool { matches( &self.name.get(), "instructions" ) } // meta_data pub fn is_meta(&self) -> bool { matches( &self.name.get(), "meta_data" ) } // new page pub fn is_page(&self) -> bool { matches( &self.name.get(), r"^page_\d{1}$" ) } }
true
a8c38f6d59853b036ed6e55dc4d39b3598230799
Rust
llxzy/adventofcode2020
/rust/src/day3.rs
UTF-8
988
3.453125
3
[]
no_license
use std::fs::read_to_string; fn count_trees(right: usize, down: usize, lines: &String) -> i64 { let mut tree_count: i64 = 0; let mut current_pos = 0; let l: Vec<&str> = lines.lines().collect(); let length = l[0].len(); for i in (0..(l.len()-1)).step_by(down as usize) { let new_pos = (current_pos + right) % length; if l[i+down].chars().nth(new_pos).unwrap() == '#' { tree_count += 1; } current_pos = new_pos; } tree_count } fn count_all(lines: &String) -> i64 { let mut total = 1; let directions = [ (1, 1), (3, 1), (5, 1), (7, 1), (1, 2) ]; for (x, y) in &directions { total = total * count_trees(*x, *y, lines); } total } pub fn main() { let lines = read_to_string("./src/day3_input.txt").unwrap(); println!("Day 3, part 1 answer: {}", count_trees(3, 1, &lines)); println!("Day 3, part 2 answer: {}", count_all(&lines)); }
true
23a55de4919e049871a37d7ecebbba3f2a4f6eac
Rust
EmbarkStudios/rust-gpu
/crates/spirv-builder/src/depfile.rs
UTF-8
4,564
3.09375
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Reading Makefile-style dependency files. //! Taken with permission from <https://github.com/m-ou-se/ninj/blob/master/lib/depfile/mod.rs> use raw_string::{RawStr, RawString}; use std::fs::File; use std::io::{BufRead, BufReader, Error, ErrorKind, Read}; use std::mem::{replace, take}; use std::path::Path; /// Read a Makfile-style dependency file. /// /// `f` is called for every target. The first argument is the target, the /// second is the list of dependencies. pub fn read_deps_file( file_name: &Path, f: impl FnMut(RawString, Vec<RawString>) -> Result<(), Error>, ) -> Result<(), Error> { let file = File::open(file_name) .map_err(|e| Error::new(e.kind(), format!("Unable to read {file_name:?}: {e}")))?; read_deps_file_from(file, f) } /// Read a Makfile-style dependency file. /// /// `f` is called for every target. The first argument is the target, the /// second is the list of dependencies. pub fn read_deps_file_from( file: impl Read, mut f: impl FnMut(RawString, Vec<RawString>) -> Result<(), Error>, ) -> Result<(), Error> { let mut file = BufReader::new(file); let mut state = State::default(); let mut line = RawString::new(); loop { line.clear(); if file.read_until(b'\n', line.as_mut_bytes())? == 0 { break; } if line.last() == Some(b'\n') { line.pop(); } if cfg!(windows) && line.last() == Some(b'\r') { line.pop(); } let mut write_offset = 0; let mut read_offset = 0; loop { match memchr::memchr2(b' ', b'\\', line[read_offset..].as_bytes()) .map(|i| i + read_offset) { Some(i) if line[i] == b'\\' && i + 1 == line.len() => { // Backslash at the end of the line state.add_part(&line[write_offset..i]); state.finish_path()?; break; } Some(i) if line[i] == b'\\' => { // Backslash before character. let c = line[i + 1]; match c { b' ' | b'\\' | b'#' | b'*' | b'[' | b']' | b'|' => { // Escaped character. Drop the '\'. state.add_part(&line[write_offset..i]); write_offset = i + 1; } _ => (), // Keep the '\'. } read_offset = i + 2; } Some(i) => { // A space. debug_assert_eq!(line[i], b' '); state.add_part(&line[write_offset..i]); state.finish_path()?; write_offset = i + 1; read_offset = i + 1; } None => { // End of the line. state.add_part(&line[write_offset..]); state.finish_deps(&mut f)?; break; } } } } if state.target.is_none() { Ok(()) } else { Err(Error::new(ErrorKind::InvalidData, "Unexpected end of file")) } } #[derive(Default)] struct State { /// The (incomplete) path we're currently reading. path: RawString, /// The target, once we've finished reading it. target: Option<RawString>, /// The rest of the paths we've finished reading. deps: Vec<RawString>, } impl State { fn add_part(&mut self, s: &RawStr) { self.path.push_str(s); } fn finish_path(&mut self) -> Result<(), Error> { if !self.path.is_empty() { let mut path = replace(&mut self.path, RawString::new()); if self.target.is_none() && path.last() == Some(b':') { path.pop(); self.target = Some(path); } else if self.target.is_none() { return Err(Error::new( ErrorKind::InvalidData, "Rule in dependency file has multiple outputs", )); } else { self.deps.push(path); } } Ok(()) } fn finish_deps( &mut self, f: &mut impl FnMut(RawString, Vec<RawString>) -> Result<(), Error>, ) -> Result<(), Error> { self.finish_path()?; if let Some(target) = self.target.take() { f(target, take(&mut self.deps))?; } Ok(()) } }
true
f6aacb79ee681fafe9bda40c83f7f28a74c0a73c
Rust
Azure/azure-sdk-for-rust
/services/mgmt/serialconsole/src/package_2018_05/models.rs
UTF-8
7,086
2.59375
3
[ "LicenseRef-scancode-generic-cla", "MIT", "LGPL-2.1-or-later" ]
permissive
#![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::de::{value, Deserializer, IntoDeserializer}; use serde::{Deserialize, Serialize, Serializer}; use std::str::FromStr; #[doc = "An error response from the service."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct CloudError { #[doc = "An error response from the Batch service."] #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<CloudErrorBody>, } impl CloudError { pub fn new() -> Self { Self::default() } } #[doc = "An error response from the Batch service."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct CloudErrorBody { #[doc = "An identifier for the error. Codes are invariant and are intended to be consumed programmatically."] #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[doc = "A message describing the error, intended to be suitable for display in a user interface."] #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[doc = "The target of the particular error. For example, the name of the property in error."] #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[doc = "A list of additional details about the error."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub details: Vec<CloudErrorBody>, } impl CloudErrorBody { pub fn new() -> Self { Self::default() } } #[doc = "Returns whether or not Serial Console is disabled."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct DisableSerialConsoleResult { #[doc = "Whether or not Serial Console is disabled."] #[serde(default, skip_serializing_if = "Option::is_none")] pub disabled: Option<bool>, } impl DisableSerialConsoleResult { pub fn new() -> Self { Self::default() } } #[doc = "Returns whether or not Serial Console is disabled (enabled)."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct EnableSerialConsoleResult { #[doc = "Whether or not Serial Console is disabled (enabled)."] #[serde(default, skip_serializing_if = "Option::is_none")] pub disabled: Option<bool>, } impl EnableSerialConsoleResult { pub fn new() -> Self { Self::default() } } #[doc = "Error saying that the provided subscription could not be found"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct GetSerialConsoleSubscriptionNotFound { #[doc = "Error code"] #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[doc = "Subscription not found message"] #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } impl GetSerialConsoleSubscriptionNotFound { pub fn new() -> Self { Self::default() } } #[doc = "The resource model definition for a ARM proxy resource. It will have everything other than required location and tags"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct ProxyResource { #[serde(flatten)] pub resource: Resource, } impl ProxyResource { pub fn new() -> Self { Self::default() } } #[doc = "The Resource model definition."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Resource { #[doc = "Resource Id"] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[doc = "Resource name"] #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[doc = "Resource type"] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } impl Resource { pub fn new() -> Self { Self::default() } } #[doc = "Serial Console operations"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SerialConsoleOperations { #[doc = "A list of Serial Console operations"] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub value: Vec<serde_json::Value>, } impl SerialConsoleOperations { pub fn new() -> Self { Self::default() } } #[doc = "Returns whether or not Serial Console is disabled."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SerialConsoleStatus { #[doc = "Whether or not Serial Console is disabled."] #[serde(default, skip_serializing_if = "Option::is_none")] pub disabled: Option<bool>, } impl SerialConsoleStatus { pub fn new() -> Self { Self::default() } } #[doc = "Represents the serial port of the parent resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SerialPort { #[serde(flatten)] pub proxy_resource: ProxyResource, #[doc = "The properties of the serial port."] #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SerialPortProperties>, } impl SerialPort { pub fn new() -> Self { Self::default() } } #[doc = "Returns a connection string to the serial port of the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SerialPortConnectResult { #[doc = "Connection string to the serial port of the resource."] #[serde(rename = "connectionString", default, skip_serializing_if = "Option::is_none")] pub connection_string: Option<String>, } impl SerialPortConnectResult { pub fn new() -> Self { Self::default() } } #[doc = "The list serial ports operation response."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SerialPortListResult { #[doc = "The list of serial ports."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub value: Vec<SerialPort>, } impl SerialPortListResult { pub fn new() -> Self { Self::default() } } #[doc = "The properties of the serial port."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SerialPortProperties { #[doc = "Specifies whether the port is enabled for a serial console connection."] #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<serial_port_properties::State>, } impl SerialPortProperties { pub fn new() -> Self { Self::default() } } pub mod serial_port_properties { use super::*; #[doc = "Specifies whether the port is enabled for a serial console connection."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum State { #[serde(rename = "enabled")] Enabled, #[serde(rename = "disabled")] Disabled, } }
true
fc4dce424ed79e6437895ed6e3063f6fadfdaf28
Rust
HelveticaScenario/wasm-sketches
/crate/src/sketches/erase2.rs
UTF-8
5,892
2.6875
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::pico::*; use crate::sketch::*; use std::cell::RefCell; use std::cmp; pub struct Erase2 { pub last_mouse: Option<Point>, pub radius: i32, pub count: u32, } impl Sketch for Erase2 { fn new() -> Erase2 { set_dimensions(1024, 1024); set_target(0); cls(0); set_target(1); cls(0); set_target(2); cls(0); let mut screen = screen(1); let real_width = WIDTH(); let real_height = HEIGHT(); // for y in 0..height { // for x in 0..width { // let i = y * width + x; // let num: u8 = rand::random(); // screen[i] = (num % 15) + 1; // } // } let width = 128; let width_mult = real_width / width; let height = 128; let height_mult = real_height / height; for y in 0..height { for x in 0..width { let x0 = (x * width_mult); let y0 = (y * height_mult); let x1 = x0 + width_mult; let y1 = y0 + height_mult; let c = (x + y) % 16; rect_fill(x0 as i32, y0 as i32, x1 as i32, y1 as i32, c as i32); } } // for y in 0..height { // for x in 0..width { // let i = y * width + x; // screen[i] = (((x / 8) + (y / 8) as usize) % 16) as u8; // } // } palt(0, false); palt(16, true); Erase2 { last_mouse: None, radius: 10, count: 0, } } fn update(&mut self, new_time: f32, old_time: f32) { set_target(1); self.count += 1; self.count = self.count % (16 * 8); let offset = self.count as usize; { let real_width = WIDTH(); let real_height = HEIGHT(); let width = 128; let width_mult = real_width / width; let height = 128; let height_mult = real_height / height; for y in 0..height { for x in 0..width { let x0 = (x * width_mult); let y0 = (y * height_mult); let x1 = x0 + width_mult; let y1 = y0 + height_mult; let c = (x + y + (offset / 2) as usize) % 16; rect_fill(x0 as i32, y0 as i32, x1 as i32, y1 as i32, c as i32); } } } set_target(2); let mouse_pos = get_mouse_pos(); if let Some(Point { x: new_x, y: new_y }) = mouse_pos { // cls(0); if let Some(Point { x: last_x, y: last_y, }) = self.last_mouse { if new_x == last_x && new_y == last_y { circ_fill(new_x, new_y, self.radius, 16); } else { // let mut x = (new_x - last_x) as f32; // let mut y = (new_y - last_y) as f32; // let mag = ((x * x).abs() + (y * y).abs()).sqrt(); // x /= mag; // y /= mag; // let (norm_x, norm_y) = (y, (-x)); // let (norm_x, norm_y) = ( // (norm_x * self.radius as f32) as i32, // (norm_y * self.radius as f32) as i32, // ); // tri_fill( // last_x - norm_x, // last_y - norm_y, // new_x - norm_x, // new_y - norm_y, // last_x + norm_x, // last_y + norm_y, // 1, // ); // tri_fill( // new_x - norm_x, // new_y - norm_y, // last_x + norm_x, // last_y + norm_y, // new_x + norm_x, // new_y + norm_y, // 1, // ); // circ_fill(new_x, new_y, self.radius, 1); // circ_fill(last_x, last_y, self.radius, 1); fat_line(last_x, last_y, new_x, new_y, self.radius, true, 16); } } else { circ_fill(new_x, new_y, self.radius, 16); } self.last_mouse = Some(Point { x: new_x, y: new_y }); } else { if let Some(Point { x, y }) = self.last_mouse { self.last_mouse = None; } } set_target(0); { let real_width = WIDTH(); let real_height = HEIGHT(); let width = 128; let width_mult = real_width / width; let height = 128; let height_mult = real_height / height; for y in 0..height { for x in 0..width { let x0 = (x * width_mult); let y0 = (y * height_mult); let x1 = x0 + width_mult; let y1 = y0 + height_mult; let v = (y as i32) * -2 + x as i32 + (offset / 4) as i32; let c = wrap_byte(v) % 16; rect_fill(x0 as i32, y0 as i32, x1 as i32, y1 as i32, c as i32); } } } // copy_screen(1, 0); // copy_screen_with_transparency(2, 0); copy_screen_with_transparency_mask(1, 0, 2); } } pub fn new() -> Box<RefCell<Sketch>> { Box::new(RefCell::new(Erase2::new())) as Box<RefCell<Sketch>> } pub static sketch: SketchDescriptor = SketchDescriptor { name: "Erase 2", constructor: &new, mobile: true, desktop: true, public: true, url: "erase-2", };
true
a962213398e933eef616611b7be5b54e8bafd7b4
Rust
Fanaen/web-tracing
/apps/wasm-module/src/wasm_api.rs
UTF-8
9,831
2.8125
3
[ "MIT" ]
permissive
use wasm_bindgen::prelude::*; use nalgebra_glm::Vec3; use nalgebra_glm::sqrt; use crate::pathtracer::camera::{Camera}; use crate::pathtracer::PathTracer; use crate::utils::set_panic_hook; use crate::pathtracer::material::LambertianMaterial; use crate::pathtracer::hit::HitableShape; use crate::pathtracer::math::saturate; use crate::pathtracer::sphere::Sphere; use crate::pathtracer::triangle::Triangle; use crate::pathtracer::pointlight::PointLight; #[wasm_bindgen] pub struct Context { pub camera_pos: Vector3, pub camera_rotation: Vector3, pub camera_fov: f32, pub sample_per_pixel: u16, pathtracer: PathTracer } #[wasm_bindgen] impl Context { pub fn new() -> Context { set_panic_hook(); let camera = Camera::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), 45., 320, 160, ); let pathtracer = PathTracer::new(camera, 1); Context { camera_pos: Vector3::new(0.0, 0.0, 0.0), camera_rotation: Vector3::new(0.0, 0.0, 0.0), camera_fov: 0.0, sample_per_pixel: 1, pathtracer } } pub fn draw( &mut self, tile_x: u32, tile_y: u32, tile_size: u32, width: u32, height: u32, ) -> Result<Vec<u8>, JsValue> { let camera = Camera::new( self.camera_pos.into(), self.camera_rotation.into(), self.camera_fov, width, height, ); self.pathtracer.camera = camera; self.pathtracer.samples = self.sample_per_pixel; // Call the pathtracer once per pixel and build the image let data_size = (tile_size * tile_size) as usize; let mut data = Vec::with_capacity(data_size); for y in (tile_y..(tile_y + tile_size)).rev() { for x in tile_x..(tile_x + tile_size) { let col = self.pathtracer.compute_pixel(x, y); let better_color = saturate(sqrt(&col)); data.push((255.99 * better_color.x) as u8); data.push((255.99 * better_color.y) as u8); data.push((255.99 * better_color.z) as u8); data.push(255); } } Ok(data) } /// Create a new light or edit an existing one. pub fn create_or_edit_light(&mut self, id: u32, x: f32, y: f32, z: f32, intensity: f32) { // Check if the light already exists. match self.pathtracer.lights.find(id) { // Edit the light. Some(light) => { light.position.x = x; light.position.y = y; light.position.z = z; light.intensity = intensity; }, // Create a new light. None => { let light = PointLight::new( id, Vec3::new(x, y, z), intensity); self.pathtracer.lights.add(light.into()); } } } pub fn remove_light(&mut self, id: u32) { self.pathtracer.lights.remove(id); } pub fn add_sphere(&mut self, id: u32, x: f32, y: f32, z: f32, radius: f32) { self.pathtracer.world.add(Sphere::new( id, Vec3::new(x, y, z), radius, LambertianMaterial { albedo: Vec3::new(0.5, 0.5, 0.5), }.into(), ).into()); } pub fn update_sphere(&mut self, id: u32, x: f32, y: f32, z: f32, radius: f32) -> bool { if let Some(shape) = self.pathtracer.world.find(id) { match shape { HitableShape::Sphere(sphere) => { sphere.center.x = x; sphere.center.y = y; sphere.center.z = z; sphere.radius = radius; }, _ => () } true } else { false } } pub fn remove_sphere(&mut self, id: u32) { self.pathtracer.world.remove(id); } pub fn add_triangle(&mut self, id: u32, a_x: f32, a_y: f32, a_z: f32, b_x: f32, b_y: f32, b_z: f32, c_x: f32, c_y: f32, c_z: f32) { self.pathtracer.world.add(Triangle::new( id, Vec3::new(a_x, a_y, a_z), Vec3::new(b_x, b_y, b_z), Vec3::new(c_x, c_y, c_z), LambertianMaterial { albedo: Vec3::new(0.5, 0.5, 0.5), }.into(), ).into()); } pub fn update_triangle(&mut self, id: u32, a_x: f32, a_y: f32, a_z: f32, b_x: f32, b_y: f32, b_z: f32, c_x: f32, c_y: f32, c_z: f32 ) -> bool { if let Some(shape) = self.pathtracer.world.find(id) { match shape { HitableShape::Triangle(triangle) => { triangle.vertex_a.x = a_x; triangle.vertex_a.y = a_y; triangle.vertex_a.z = a_z; triangle.vertex_b.x = b_x; triangle.vertex_b.y = b_y; triangle.vertex_b.z = b_z; triangle.vertex_c.x = c_x; triangle.vertex_c.y = c_y; triangle.vertex_c.z = c_z; }, _ => () } true } else { false } } pub fn remove_triangle(&mut self, id: u32) { self.pathtracer.world.remove(id); } pub fn add_model(&mut self, id: u32, x: f32, y: f32, z: f32, vertices: Vec<f32>, triangles: Vec<u16>) { self.remove_model(id); let pos = Vec3::new(x, y, z); for vertex in vertices.chunks(9) { assert_eq!(vertex.len(), 9); self.pathtracer.world.add(Triangle::new( id, Vec3::new(vertex[0], vertex[1],vertex[2]), Vec3::new(vertex[3], vertex[4],vertex[5]), Vec3::new(vertex[6], vertex[7],vertex[8]), LambertianMaterial { albedo: Vec3::new(0.5, 0.5, 0.5), }.into(), ).into()); } log(self.pathtracer.world.stats().as_str()); // for triangle in triangles.chunks(3) { // assert_eq!(triangle.len(), 3); // self.pathtracer.world.add(Triangle::new( // id, // extract_triangle(&vertices, triangle[0]), // extract_triangle(&vertices, triangle[1]), // extract_triangle(&vertices, triangle[2]), // LambertianMaterial { // albedo: Vec3::new(0.5, 0.5, 0.5), // }.into(), // ).into()); // } } pub fn update_model(&mut self, id: u32, x: f32, y: f32, z: f32, vertices: Vec<f32>, triangles: Vec<u16>) -> bool { self.remove_model(id); self.add_model(id, x, y, z, vertices, triangles); log(self.pathtracer.world.stats().as_str()); true } pub fn remove_model(&mut self, id: u32) { self.pathtracer.world.remove(id); } pub fn set_lambert(&mut self, id: u32, r: u32, g: u32, b: u32) -> bool { if let Some(shape) = self.pathtracer.world.find(id) { match shape { HitableShape::Sphere(sphere) => { sphere.material = LambertianMaterial { albedo: Vec3::new(r as f32 / 255.9, g as f32 / 255.9, b as f32 / 255.9), }.into(); }, HitableShape::Triangle(triangle) => { triangle.material = LambertianMaterial { albedo: Vec3::new(r as f32 / 255.9, g as f32 / 255.9, b as f32 / 255.9), }.into(); }, } true } else { false } } } fn extract_triangle(vertices: &Vec<f32>, index: u16) -> Vec3 { let index = index as usize; Vec3::new( vertices[index], vertices[index + 1], vertices[index + 2], ) } /// Wraps around the Vec3 struct from nalgebra for wasm-bindgen #[wasm_bindgen] #[derive(Debug, PartialEq, Clone, Copy)] pub struct Vector3 { pub x: f32, pub y: f32, pub z: f32, } #[wasm_bindgen] impl Vector3 { pub fn new(x: f32, y: f32, z: f32) -> Vector3 { Vector3 { x, y, z } } } impl From<Vec3> for Vector3 { fn from(vec: Vec3) -> Self { Vector3::new(vec.x, vec.y, vec.z) } } impl Into<Vec3> for Vector3 { fn into(self) -> Vec3 { Vec3::new(self.x, self.y, self.z) } } #[wasm_bindgen] extern "C" { // Use `js_namespace` here to bind `console.log(..)` instead of just // `log(..)` #[wasm_bindgen(js_namespace = console)] fn log(s: &str); // The `console.log` is quite polymorphic, so we can bind it with multiple // signatures. Note that we need to use `js_name` to ensure we always call // `log` in JS. #[wasm_bindgen(js_namespace = console, js_name = log)] fn log_u32(a: u32); // Multiple arguments too! #[wasm_bindgen(js_namespace = console, js_name = log)] fn log_many(a: &str, b: &str); }
true
ac1db22103c48c5d329041524e3863dc7146c7ce
Rust
sriniv27/rust-task-parser
/src/main.rs
UTF-8
1,105
3
3
[]
no_license
#![allow(unused)] use chrono::*; use csv; use serde::{Deserialize, Serialize}; use std::error::Error; use std::fs::File; use std::io::prelude::*; use std::process; // The `record` string is a 6 element string where the elements are // ordered as: // ID,Status,Subject,CreatedOn,ModifiedOn,CompletedOn // type Task = (String, String, String, String, String, String); #[derive(Debug,Serialize, Deserialize)] struct Task { id: String, status: Status, subject: String, created_on : String, modified_on: String, completed_on:String } #[derive(Deserialize, Serialize, Debug)] enum Status{ NotStarted, InProgress, Complete } fn main_func() -> Result<(), Box<dyn Error>> { let filename = "statusUpdate.csv"; let mut data_reader = csv::Reader::from_path(filename).expect("could not read from file"); let mut iter = data_reader.deserialize(); if let Some(res) = iter.next() { let task_item: Task = res?; } Ok(()) } fn main() { if let Err(err) = main_func() { println!("ERROR: {}", err); process::exit(1); } }
true
c07db2df695cde0288c281dd50edb56188dd3f35
Rust
PistonDevelopers/turbine
/reactive/src/ptr.rs
UTF-8
2,106
3.40625
3
[ "MIT", "Apache-2.0" ]
permissive
//! Pointers to higher order structures. use std::marker::PhantomData; /// Points to bool. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Bool(usize); /// Points to a scalar. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Point1<T>(pub(crate) usize, PhantomData<T>); /// Points to a 2D point. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Point2<T>(pub(crate) usize, PhantomData<T>); /// Points to a 3D point. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Point3<T>(pub(crate) usize, PhantomData<T>); /// Points to a 4D point. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Point4<T>(pub(crate) usize, PhantomData<T>); /// Points to a spline. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Spline<T>(usize, PhantomData<T>); /// Points to a surface. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Surface<T>(usize, PhantomData<T>); /// Points to a color. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Color(usize); /// Points to a color spline. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct ColorSpline(usize); /// Points to a bone. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Bone<T>(usize, PhantomData<T>); macro_rules! from_impl { ($point:ident) => { impl<T> From<usize> for $point<T> { fn from(val: usize) -> $point<T> {$point(val, PhantomData)} } impl<T> From<$point<T>> for usize { fn from(val: $point<T>) -> usize {val.0} } } } from_impl!{Point1} from_impl!{Point2} from_impl!{Point3} from_impl!{Point4} from_impl!{Spline} from_impl!{Surface} from_impl!{Bone} impl From<usize> for Bool { fn from(val: usize) -> Bool {Bool(val)} } impl From<Bool> for usize { fn from(val: Bool) -> usize {val.0} } impl From<usize> for Color { fn from(val: usize) -> Color {Color(val)} } impl From<Color> for usize { fn from(val: Color) -> usize {val.0} } impl From<usize> for ColorSpline { fn from(val: usize) -> ColorSpline {ColorSpline(val)} } impl From<ColorSpline> for usize { fn from(val: ColorSpline) -> usize {val.0} }
true
21eeb4ebce0bdfa2e3f70fe32d714d08e0384b0f
Rust
kaj/rsass
/rsass/tests/spec/core_functions/selector/nest/list.rs
UTF-8
3,544
2.546875
3
[ "MIT", "Apache-2.0" ]
permissive
//! Tests auto-converted from "sass-spec/spec/core_functions/selector/nest/list.hrx" #[allow(unused)] fn runner() -> crate::TestRunner { super::runner().with_cwd("list") } mod list { #[allow(unused)] use super::runner; #[test] fn test_final() { assert_eq!( runner().ok("a {b: selector-nest(\"c\", \"d, e\")}\n"), "a {\ \n b: c d, c e;\ \n}\n" ); } #[test] fn initial() { assert_eq!( runner().ok("a {b: selector-nest(\"c, d\", \"e\")}\n"), "a {\ \n b: c e, d e;\ \n}\n" ); } #[test] fn many() { assert_eq!( runner() .ok("a {b: selector-nest(\"c, d\", \"e, f\", \"g, h\")}\n"), "a {\ \n b: c e g, c e h, c f g, c f h, d e g, d e h, d f g, d f h;\ \n}\n" ); } mod parent { #[allow(unused)] use super::runner; #[test] fn alone() { assert_eq!( runner().ok("a {b: selector-nest(\"c, d\", \"&\")}\n"), "a {\ \n b: c, d;\ \n}\n" ); } #[test] fn complex() { assert_eq!( runner().ok("a {b: selector-nest(\"c, d\", \"e &.f\")}\n"), "a {\ \n b: e c.f, e d.f;\ \n}\n" ); } #[test] fn compound() { assert_eq!( runner().ok("a {b: selector-nest(\"c, d\", \"&.e\")}\n"), "a {\ \n b: c.e, d.e;\ \n}\n" ); } #[test] fn in_one_complex() { assert_eq!( runner().ok("a {b: selector-nest(\"c, d\", \"&.e, f\")}\n"), "a {\ \n b: c.e, c f, d.e, d f;\ \n}\n" ); } #[test] #[ignore] // wrong result fn multiple() { assert_eq!( runner().ok("a {b: selector-nest(\"c, d\", \"&.e &.f\")}\n"), "a {\ \n b: c.e c.f, c.e d.f, d.e c.f, d.e d.f;\ \n}\n" ); } mod selector_pseudo { #[allow(unused)] use super::runner; #[test] #[ignore] // wrong result fn is() { assert_eq!( runner() .ok("a {b: selector-nest(\"c, d\", \":is(&)\")}\n"), "a {\ \n b: :is(c, d);\ \n}\n" ); } #[test] #[ignore] // wrong result fn matches() { assert_eq!( runner().ok( "a {b: selector-nest(\"c, d\", \":matches(&)\")}\n" ), "a {\ \n b: :matches(c, d);\ \n}\n" ); } #[test] #[ignore] // wrong result fn test_where() { assert_eq!( runner().ok( "a {b: selector-nest(\"c, d\", \":where(&)\")}\n" ), "a {\ \n b: :where(c, d);\ \n}\n" ); } } #[test] fn suffix() { assert_eq!( runner().ok("a {b: selector-nest(\"c, d\", \"&e\")}\n"), "a {\ \n b: ce, de;\ \n}\n" ); } } }
true
bb3118124db3f70a5ab071c4a11f36b332c10395
Rust
lightclient/nushell
/crates/nu-cli/src/commands/run_alias.rs
UTF-8
3,270
2.5625
3
[ "MIT" ]
permissive
use crate::commands::classified::block::run_block; use crate::commands::WholeStreamCommand; use crate::prelude::*; use derive_new::new; use nu_errors::ShellError; use nu_protocol::{hir::Block, ReturnSuccess, Signature, SyntaxShape}; #[derive(new, Clone)] pub struct AliasCommand { name: String, args: Vec<String>, block: Block, } #[async_trait] impl WholeStreamCommand for AliasCommand { fn name(&self) -> &str { &self.name } fn signature(&self) -> Signature { let mut alias = Signature::build(&self.name); for arg in &self.args { alias = alias.optional(arg, SyntaxShape::Any, ""); } alias } fn usage(&self) -> &str { "" } async fn run( &self, args: CommandArgs, registry: &CommandRegistry, ) -> Result<OutputStream, ShellError> { let tag = args.call_info.name_tag.clone(); let call_info = args.call_info.clone(); let registry = registry.clone(); let block = self.block.clone(); let alias_command = self.clone(); let mut context = Context::from_args(&args, &registry); let input = args.input; let stream = async_stream! { let mut scope = call_info.scope.clone(); let evaluated = call_info.evaluate(&registry).await?; if let Some(positional) = &evaluated.args.positional { for (pos, arg) in positional.iter().enumerate() { scope.vars.insert(alias_command.args[pos].to_string(), arg.clone()); } } let result = run_block( &block, &mut context, input, &scope.it, &scope.vars, &scope.env, ).await; match result { Ok(stream) if stream.is_empty() => { yield Err(ShellError::labeled_error( "Expected a block", "alias needs a block", tag, )); } Ok(mut stream) => { // We collect first to ensure errors are put into the context while let Some(result) = stream.next().await { yield Ok(ReturnSuccess::Value(result)); } let errors = context.get_errors(); if let Some(x) = errors.first() { yield Err(ShellError::labeled_error_with_secondary( "Alias failed to run", "alias failed to run", tag.clone(), x.to_string(), tag )); } } Err(e) => { yield Err(ShellError::labeled_error_with_secondary( "Alias failed to run", "alias failed to run", tag.clone(), e.to_string(), tag )); } } }; Ok(stream.to_output_stream()) } }
true
9d0800813492d93e35047326c19447287bfca98f
Rust
AlephAlpha/rlifesrc
/lib/src/error.rs
UTF-8
836
2.875
3
[ "MIT" ]
permissive
//! All kinds of errors in this crate. use crate::cells::{Coord, State}; use ca_rules::ParseRuleError; use displaydoc::Display; use thiserror::Error; /// All kinds of errors in this crate. #[derive(Clone, Debug, PartialEq, Eq, Display, Error)] pub enum Error { /// Unable to set cell at {0:?}. SetCellError(Coord), /// Invalid rule: {0:?}. ParseRuleError(#[from] ParseRuleError), /// B0S8 rules are not supported yet. Please use the inverted rule. B0S8Error, /// Symmetry or transformation requires the world to be square. SquareWorldError, /// Symmetry or transformation requires the world to have no diagonal width. DiagonalWidthError, /// Width / height / period should be positive. NonPositiveError, /// Cell at {0:?} has invalid state: {1:?}. InvalidState(Coord, State), }
true
4c584fe6661c89861c9e28d14eff7d9d6301c04e
Rust
virtualgraham/wasm-graph-app
/src/graph/memstore/quadstore.rs
UTF-8
15,991
2.703125
3
[]
no_license
use crate::graph::value::Value; use crate::graph::refs::{Size, Ref, Namer, Content}; use crate::graph::iterator::{Shape, Null}; use crate::graph::quad::{QuadStore, Quad, Direction, Stats, Delta, IgnoreOptions, Procedure}; use std::rc::Rc; use std::cell::RefCell; use std::collections::HashMap; use std::collections::BTreeMap; use std::collections::BTreeSet; use super::iterator::MemStoreIterator; use super::all_iterator::MemStoreAllIterator; use std::sync::{Arc, RwLock}; use std::ops::Bound; pub struct InternalMemStore { vals: HashMap<Value, i64>, // value to value_id quads: HashMap<InternalQuad, i64>, // quad to quad_id prim: BTreeMap<i64, Primitive>, // value_id or quad_id to value or quad index: QuadDirectionIndex, // value_id and direction to quad id last: i64, // keeps track of ids for values and quads horizon: i64 // keeps track of ids for transactions } impl InternalMemStore { fn new() -> InternalMemStore { InternalMemStore { vals: HashMap::new(), quads: HashMap::new(), prim: BTreeMap::new(), index: QuadDirectionIndex::new(), last: 0, horizon: 0 } } fn add_primitive(&mut self, mut p: Primitive) -> i64 { self.last += 1; let id = self.last; p.id = id; p.refs = 1; self.prim.insert(id, p); return id } fn resolve_val(&mut self, v: &Value, add: bool) -> Option<i64> { if let Value::None = v { return None } let id = self.vals.get(v); if id.is_some() || !add { // if the value exsists and we are adding it, increment refs if id.is_some() && add { self.prim.get_mut(id.unwrap()).as_mut().unwrap().refs += 1; } // return val_id return id.map(|x| *x) } // value is new and we are adding it let id = self.add_primitive(Primitive::new_value(v.clone())); self.vals.insert(v.clone(), id); return Some(id) } fn resolve_quad(&mut self, q: &Quad, add: bool) -> Option<InternalQuad> { let mut p = InternalQuad{s: 0, p: 0, o: 0, l: 0}; // find all value ids for each direction of quad for dir in Direction::iterator() { let v = q.get(dir); if let Value::None = v { continue } let vid = self.resolve_val(v, add); if let Some(i) = vid { p.set_dir(dir, i); } else { // if any value is not found or undefined return zero value internal quad return None } } return Some(p) } fn find_quad(&mut self, q: &Quad) -> Option<i64> { let quad = self.resolve_quad(q, false); if let Some(q) = quad { if let Some(id) = self.quads.get(&q) { return Some(*id) } } None } fn delete_quad_nodes(&mut self, q: &InternalQuad) { for dir in Direction::iterator() { let id = q.dir(dir); if id == 0 { continue } let mut delete = false; if let Some(p) = self.prim.get_mut(&id) { p.refs -= 1; if p.refs < 0 { panic!("remove of delete node"); } else if p.refs == 0 { delete = true; } } if delete { self.delete(id); } } } fn resolve_quad_default(&mut self, q: &Quad, add: bool) -> InternalQuad { match self.resolve_quad(q, add) { Some(q) => q, None => InternalQuad{s: 0, p: 0, o: 0, l: 0} } } fn delete(&mut self, id: i64) -> bool { let mut quad:Option<InternalQuad> = None; if let Some(p) = self.prim.get(&id) { if p.is_node() { self.vals.remove(p.unwrap_value()); } else { quad = Some(p.unwrap_quad().clone()); } } else { return false } self.prim.remove(&id); if let Some(q) = quad { for d in Direction::iterator() { self.index.remove(&q.dir(d), d, &id); } self.quads.remove(&q); self.delete_quad_nodes(&q); } return true } fn add_quad(&mut self, q: Quad) -> i64 { // get value_ids for each direction let p = self.resolve_quad_default(&q, false); // get quad id let id = self.quads.get(&p); // if id already exsists, the quad therefor exsists already. return the id if let Some(i) = id { return *i } // get value_ids for each direction, this time inserting the values as neccecery let p = self.resolve_quad_default(&q, true); // add value primitive let pr = Primitive::new_quad(p.clone()); let id = self.add_primitive(pr); // add quad self.quads.insert(p.clone(), id); // add to index for d in Direction::iterator() { self.index.insert(p.dir(d), d, id); } return id; } fn lookup_val(&self, id: &i64) -> Option<Value> { match self.prim.get(id) { Some(p) => { match &p.content { PrimitiveContent::Value(v) => Some(v.clone()), _ => None } }, None => None } } fn internal_quad(&self, r: &Ref) -> Option<InternalQuad> { let key = if let Some(k) = r.key() { if let Some(i) = k.as_i64().as_ref() { self.prim.get(i) } else { None } } else { None }; match key { Some(p) => { match &p.content { PrimitiveContent::Quad(q) => Some(q.clone()), _ => None } }, None => None } } fn lookup_quad_dirs(&self, p: InternalQuad) -> Quad { let mut q = Quad::new_undefined_vals(); for dir in Direction::iterator() { let vid = p.dir(dir); if vid == 0 { continue } let val = self.lookup_val(&vid); if let Some(v) = val { q.set_val(dir, v); } } return q } // fn get_val(&self, v: &Value) -> Option<&i64> { // self.vals.get(v) // } // fn get_index(&self, d: &Direction, value_id: &i64) -> BTreeSet<i64> { // self.index.get(d, value_id) // } } pub trait PrimStore { fn len(&self) -> usize; fn get(&self, key: &i64) -> Option<&Primitive>; fn iter(&self) -> std::collections::btree_map::Iter<'_, i64, Primitive>; fn range(&self, bounds: (Bound<i64>, Bound<i64>)) -> std::collections::btree_map::Range<'_, i64, Primitive>; } impl PrimStore for InternalMemStore { fn len(&self) -> usize { self.prim.len() } fn get(&self, key: &i64) -> Option<&Primitive> { self.prim.get(key) } fn iter(&self) -> std::collections::btree_map::Iter<'_, i64, Primitive> { self.prim.iter() } fn range(&self, range: (Bound<i64>, Bound<i64>)) -> std::collections::btree_map::Range<'_, i64, Primitive> { self.prim.range(range) } } pub struct MemStore { store: Arc<RwLock<InternalMemStore>> } impl MemStore { pub fn new() -> MemStore { MemStore { store: Arc::new(RwLock::new(InternalMemStore::new())) } } } impl Namer for MemStore { fn value_of(&self, v: &Value) -> Option<Ref> { let datastore = self.store.read().unwrap(); if let Value::None = v { return None } let id = datastore.vals.get(v); match id { Some(i) => Some(Ref { k: Value::from(*i), content: Content::None }), None => None } } fn name_of(&self, key: &Ref) -> Option<Value> { let datastore = self.store.read().unwrap(); if let Content::Value(v) = &key.content { return Some(v.clone()) } let n = if let Some(k) = key.key() { k.as_i64() } else { None }; if let Some(i) = n { return datastore.lookup_val(&i) } else { return None } } } impl QuadStore for MemStore { fn quad(&self, r: &Ref) -> Option<Quad> { let datastore = self.store.read().unwrap(); let quad = datastore.internal_quad(r); match quad { Some(q) => Some(datastore.lookup_quad_dirs(q)), None => None } } fn quad_iterator(&self, d: &Direction, r: &Ref) -> Rc<RefCell<dyn Shape>> { let datastore = self.store.read().unwrap(); let id = if let Some(k) = r.key() { k.as_i64() } else { None }; if let Some(i) = id { let quad_ids = datastore.index.get(d, &i); if !quad_ids.is_empty() { return MemStoreIterator::new(Rc::new(quad_ids), d.clone()) } } Null::new() } fn quad_iterator_size(&self, d: &Direction, r: &Ref) -> Result<Size, String> { let datastore = self.store.read().unwrap(); let id = if let Some(k) = r.key() { k.as_i64() } else { None }; if let Some(i) = id { let quad_ids = datastore.index.get(d, &i); return Ok(Size{value: quad_ids.len() as i64, exact: true}) } return Ok(Size{value: 0, exact: true}) } fn quad_direction(&self, r: &Ref, d: &Direction) -> Option<Ref> { let datastore = self.store.read().unwrap(); let quad = datastore.internal_quad(r); println!("memstore quad_direction quad {:?}", quad); match quad { Some(q) => { let id = q.dir(d); if id == 0 { // The quad exsists, but the value is none return Some(Ref::none()) } return Some(Ref { k: Value::from(id), content: Content::None }) } // the quad does not exsist None => None } } fn stats(&self, exact: bool) -> Result<Stats, String> { let datastore = self.store.read().unwrap(); Ok(Stats { nodes: Size { value: datastore.vals.len() as i64, exact: true }, quads: Size { value: datastore.quads.len() as i64, exact: true } }) } fn apply_deltas(&mut self, deltas: Vec<Delta>, ignore_opts: &IgnoreOptions) -> Result<(), String> { let mut datastore = self.store.write().unwrap(); if !ignore_opts.ignore_dup || !ignore_opts.ignore_missing { for d in &deltas { match d.action { Procedure::Add => { if !ignore_opts.ignore_dup { if let Some(_) = datastore.find_quad(&d.quad) { return Err("ErrQuadExists".into()) } } }, Procedure::Delete => { if !ignore_opts.ignore_missing { if let Some(_) = datastore.find_quad(&d.quad) { } else { return Err("ErrQuadNotExist".into()) } } }, } } } for d in &deltas { match &d.action { Procedure::Add => { datastore.add_quad(d.quad.clone()); }, Procedure::Delete => { if let Some(id) = datastore.find_quad(&d.quad) { datastore.delete(id); } } } } datastore.horizon += 1; Ok(()) } fn nodes_all_iterator(&self) -> Rc<RefCell<dyn Shape>> { let datastore = self.store.read().unwrap(); MemStoreAllIterator::new(self.store.clone(), datastore.last, true) } fn quads_all_iterator(&self) -> Rc<RefCell<dyn Shape>> { let datastore = self.store.read().unwrap(); MemStoreAllIterator::new(self.store.clone(), datastore.last, false) } fn close(&self) -> Option<String> { None } } #[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] pub struct QuadDirectionKey { direction: i8, value_id: i64, quad_id: i64, } impl QuadDirectionKey { pub fn new(value_id: i64, direction: &Direction, quad_id: i64) -> QuadDirectionKey { QuadDirectionKey { direction: direction.to_byte(), value_id, quad_id } } } struct QuadDirectionIndex { index: BTreeSet<QuadDirectionKey>, } impl QuadDirectionIndex { fn new() -> QuadDirectionIndex { QuadDirectionIndex { index: BTreeSet::new() } } // get all quad_ids that have the given value_id at the given location fn get(&self, d: &Direction, value_id: &i64) -> BTreeSet<i64> { let lower_bound = QuadDirectionKey::new(value_id.clone(), d, 0); self.index.range(lower_bound..).take_while(|k| { k.value_id == *value_id }).map(|k| k.quad_id).collect() } fn insert(&mut self, value_id: i64, d: &Direction, quad_id: i64) { self.index.insert(QuadDirectionKey::new(value_id, d, quad_id)); } fn remove(&mut self, value_id: &i64, d: &Direction, quad_id: &i64) { self.index.remove(&QuadDirectionKey::new(value_id.clone(), d, quad_id.clone())); } } pub enum PrimitiveContent { Value(Value), Quad(InternalQuad) } pub struct Primitive { pub id: i64, pub refs: i32, pub content: PrimitiveContent } impl Primitive { pub fn new_value(v: Value) -> Primitive { Primitive { id: 0, content: PrimitiveContent::Value(v), refs: 0 } } pub fn new_quad(q: InternalQuad) -> Primitive { Primitive { id: 0, content: PrimitiveContent::Quad(q), refs: 0 } } pub fn unwrap_value(&self) -> &Value { if let PrimitiveContent::Value(v) = &self.content { return &v } else { panic!("Primitive does not contain value") } } pub fn unwrap_quad(&self) -> &InternalQuad { if let PrimitiveContent::Quad(q) = &self.content { return &q } else { panic!("Primitive does not contain quad") } } pub fn is_node(&self) -> bool { if let PrimitiveContent::Value(_) = self.content { return true } return false } } #[derive(PartialEq, Hash, Clone, Debug)] pub struct InternalQuad { s: i64, p: i64, o: i64, l: i64, } impl Eq for InternalQuad {} impl InternalQuad { fn dir(&self, dir: &Direction) -> i64 { match dir { Direction::Subject => self.s, Direction::Predicate => self.p, Direction::Object => self.o, Direction::Label => self.l } } fn set_dir(&mut self, dir: &Direction, vid: i64) { match dir { Direction::Subject => self.s = vid, Direction::Predicate => self.p = vid, Direction::Object => self.o = vid, Direction::Label => self.l = vid, }; } }
true
7fb8ee3b1f71bbb43facc6c050f19f8d45d352de
Rust
Intrebute/MysteryParsing
/src/lib.rs
UTF-8
5,049
2.625
3
[]
no_license
use std::collections::HashMap; use indenty::{RoseTree, tree}; use nom::{ branch::alt, bytes::complete::tag, character::complete::{line_ending, not_line_ending, space0}, combinator::map, IResult, multi::many0, sequence::terminated, }; pub mod dialogue_action; #[derive(Debug, PartialEq)] pub enum Error<PE> { NomError(nom::Err<PE>), IndentError(indenty::IndentationError), GenericError(String), } impl<PE> From<nom::Err<PE>> for Error<PE> { fn from(e: nom::Err<PE>) -> Self { Error::NomError(e) } } impl<PE> From<indenty::IndentationError> for Error<PE> { fn from(e: indenty::IndentationError) -> Self { Error::IndentError(e) } } impl<PE> From<&str> for Error<PE> { fn from(i: &str) -> Self { Error::GenericError(i.into()) } } impl<PE> From<String> for Error<PE> { fn from(i: String) -> Self { Error::GenericError(i) } } pub type DialogueMap = HashMap<String, Vec<DialogueAction>>; pub enum DialogueAction { Say(Vec<String>), Ask(Question), Still(CharacterStill), } pub struct CharacterStill { pub character: String, pub still: String, } pub struct Answer { pub option_text: String, pub next_action: TagOrBlock, } pub struct Question { pub question: String, pub answers: Vec<Answer>, } pub enum TagOrBlock { Tag(String), Block(Vec<DialogueAction>), } pub struct TagBlock { pub tag: String, pub block: Vec<DialogueAction>, } fn hash_tag(input: &str) -> IResult<&str, &str> { let (input, rest) = tag("#")(input)?; Ok((input, rest)) } fn empty_line(input: &str) -> IResult<&str, ()> { let (input, _) = space0(input)?; let (input, _) = line_ending(input)?; Ok((input, ())) } fn indented_line(input: &str) -> IResult<&str, (&str, &str)> { let (input, indentation) = space0(input)?; let (input, line) = terminated(not_line_ending, line_ending)(input)?; Ok((input, (indentation, line))) } fn dense_lines(input: &str) -> IResult<&str, Vec<(&str, &str)>> { map( many0(alt(( map(empty_line, |()| None), map(indented_line, |l| Some(l)), ))), |mut v| v.drain(..).flatten().collect(), )(input) } pub fn forestify(input: &str) -> Result<Vec<RoseTree<&str>>, Error<(&str, nom::error::ErrorKind)>> { let (_, dlines) = dense_lines(input)?; Ok(RoseTree::from_prefixables(dlines.into_iter())?) } pub trait Parser<I, O, E>: Fn(I) -> Result<O, E> {} impl<I, O, E, T> Parser<I, O, E> for T where T: Fn(I) -> Result<O, E> {} mod tree_parsing { use indenty::RoseTree; use nom::{Compare, InputTake}; use nom::error::ParseError; use crate::{DialogueAction, Error, Parser}; pub type TResult<I, O, E = String> = Result<O, Error<(I, E)>>; pub fn node<T, E>(tree: &RoseTree<T>) -> TResult<RoseTree<T>, &T, E> { if tree.children.len() == 0 { Ok(&tree.value) } else { Err("node should not have children".into()) } } pub fn all_children<'t, T: 't, O, P, E>(p: P) -> impl Parser<&'t RoseTree<T>, Vec<O>, E> where P: Parser<&'t RoseTree<T>, O, E>, { move |tree: &'t RoseTree<T>| -> Result<Vec<O>, E> { let mut result = vec![]; for c in &tree.children { match p(&c) { Ok(o) => { result.push(o); } Err(e) => { return Err(e); } } } Ok(result) } } pub fn vector_tree<'t, T: 't, E>( tree: &RoseTree<T>, ) -> TResult<&RoseTree<T>, (&T, Vec<&T>), E> { let children: Vec<&T> = all_children(node)(tree).map_err(|_: Error<(RoseTree<T>, E)>| { Error::GenericError("all children should be nodes".into()) })?; Ok((&tree.value, children)) } pub fn say<'t, E: ParseError<&'t str>>( tree: &'t RoseTree<&'t str>, ) -> TResult<&'t RoseTree<&'t str>, DialogueAction, E> { use nom::bytes::complete::tag; use nom::IResult; let (head, contents) = vector_tree(tree)?; // let r: IResult<&str, &str> = tag("Say:")(*head); tag("Say:")(*head) .map(|_| { DialogueAction::Say( contents .into_iter() .map(|l| l.to_string()) .collect::<Vec<String>>(), ) }) .map_err(|_: nom::Err<nom::error::VerboseError<&str>>| { Error::GenericError("SAY".to_string()) }) } } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let i = tree![0 => tree![2], tree![3], tree![5]]; let r: tree_parsing::TResult<&RoseTree<i32>, (&i32, Vec<&i32>), String> = tree_parsing::vector_tree(&i); assert_eq!(r, Ok((&0, vec![&2, &3, &5]))); } }
true
fb18747f2a0f44edd35fa6d13463fbb74668c320
Rust
vnermolaev/trace
/examples/example_format.rs
UTF-8
345
2.921875
3
[]
no_license
use trace::trace; fn main() { env_logger::init(); foo(1, 2); } #[trace(a = "received {:?}")] fn foo(a: i32, b: i32) { println!("I'm in foo!"); bar((a, b)); } #[trace(res = "returning {:?}", disable(b))] fn bar((a, b): (i32, i32)) -> i32 { println!("I'm in bar!"); if a == 1 { 2 } else { b } }
true
b1247dbf11be9a641c06fc031a24de20f0bf9f8f
Rust
BurntSushi/rust-snappy
/src/bytes.rs
UTF-8
3,933
3.125
3
[ "BSD-3-Clause" ]
permissive
use std::convert::TryInto; use std::io; /// Read a u16 in little endian format from the beginning of the given slice. /// This panics if the slice has length less than 2. pub fn read_u16_le(slice: &[u8]) -> u16 { u16::from_le_bytes(slice[..2].try_into().unwrap()) } /// Read a u24 (returned as a u32 with the most significant 8 bits always set /// to 0) in little endian format from the beginning of the given slice. This /// panics if the slice has length less than 3. pub fn read_u24_le(slice: &[u8]) -> u32 { slice[0] as u32 | (slice[1] as u32) << 8 | (slice[2] as u32) << 16 } /// Read a u32 in little endian format from the beginning of the given slice. /// This panics if the slice has length less than 4. pub fn read_u32_le(slice: &[u8]) -> u32 { u32::from_le_bytes(slice[..4].try_into().unwrap()) } /// Like read_u32_le, but from an io::Read implementation. If io::Read does /// not yield at least 4 bytes, then this returns an unexpected EOF error. pub fn io_read_u32_le<R: io::Read>(mut rdr: R) -> io::Result<u32> { let mut buf = [0; 4]; rdr.read_exact(&mut buf)?; Ok(u32::from_le_bytes(buf)) } /// Write a u16 in little endian format to the beginning of the given slice. /// This panics if the slice has length less than 2. pub fn write_u16_le(n: u16, slice: &mut [u8]) { assert!(slice.len() >= 2); let bytes = n.to_le_bytes(); slice[0] = bytes[0]; slice[1] = bytes[1]; } /// Write a u24 (given as a u32 where the most significant 8 bits are ignored) /// in little endian format to the beginning of the given slice. This panics /// if the slice has length less than 3. pub fn write_u24_le(n: u32, slice: &mut [u8]) { slice[0] = n as u8; slice[1] = (n >> 8) as u8; slice[2] = (n >> 16) as u8; } /// Write a u32 in little endian format to the beginning of the given slice. /// This panics if the slice has length less than 4. pub fn write_u32_le(n: u32, slice: &mut [u8]) { assert!(slice.len() >= 4); let bytes = n.to_le_bytes(); slice[0] = bytes[0]; slice[1] = bytes[1]; slice[2] = bytes[2]; slice[3] = bytes[3]; } /// https://developers.google.com/protocol-buffers/docs/encoding#varints pub fn write_varu64(data: &mut [u8], mut n: u64) -> usize { let mut i = 0; while n >= 0b1000_0000 { data[i] = (n as u8) | 0b1000_0000; n >>= 7; i += 1; } data[i] = n as u8; i + 1 } /// https://developers.google.com/protocol-buffers/docs/encoding#varints pub fn read_varu64(data: &[u8]) -> (u64, usize) { let mut n: u64 = 0; let mut shift: u32 = 0; for (i, &b) in data.iter().enumerate() { if b < 0b1000_0000 { return match (b as u64).checked_shl(shift) { None => (0, 0), Some(b) => (n | b, i + 1), }; } match ((b as u64) & 0b0111_1111).checked_shl(shift) { None => return (0, 0), Some(b) => n |= b, } shift += 7; } (0, 0) } /// Does an unaligned load of a little endian encoded u32. /// /// This is unsafe because `data` must point to some memory of size at least 4. pub unsafe fn loadu_u32_le(data: *const u8) -> u32 { loadu_u32_ne(data).to_le() } /// Does an unaligned load of a native endian encoded u32. /// /// This is unsafe because `data` must point to some memory of size at least 4. pub unsafe fn loadu_u32_ne(data: *const u8) -> u32 { (data as *const u32).read_unaligned() } /// Does an unaligned load of a little endian encoded u64. /// /// This is unsafe because `data` must point to some memory of size at least 8. pub unsafe fn loadu_u64_le(data: *const u8) -> u64 { loadu_u64_ne(data).to_le() } /// Does an unaligned load of a native endian encoded u64. /// /// This is unsafe because `data` must point to some memory of size at least 8. pub unsafe fn loadu_u64_ne(data: *const u8) -> u64 { (data as *const u64).read_unaligned() }
true
3e7b724f15491f20724ef2c93dbc2499ef163a42
Rust
rschifflin/networking
/gudp/src/service/builder.rs
UTF-8
1,875
2.890625
3
[]
no_license
use std::io; use std::net::SocketAddr; use clock::Clock; use super::{Conf, Service}; // NOTE: If we had generic specialization, this would not need 2 separate structs // NOTE: If we had a delegate pattern, we wouldnt have to have two identical impls // Instead, we make two separate structs and macro-ize their shared code pub struct Builder { conf: Conf } pub struct ClockedBuilder<C: 'static + Clock + Send> { clock: C, conf: Conf } macro_rules! impl_builder { ( $builder:ty ) => { pub fn example(mut self, example: usize) -> $builder { self.conf.example = example; self } pub fn on_packet_sent(mut self, f: Box<dyn FnMut((SocketAddr, SocketAddr), &[u8], u32) + Send>) -> $builder { self.conf.on_packet_sent = Some(f); self } pub fn on_packet_acked(mut self, f: Box<dyn FnMut((SocketAddr, SocketAddr), u32) + Send>) -> $builder { self.conf.on_packet_acked = Some(f); self } pub fn on_packet_lost(mut self, f: Box<dyn FnMut((SocketAddr, SocketAddr), u32) + Send>) -> $builder { self.conf.on_packet_lost = Some(f); self } } } // Default case with system clock impl Builder { impl_builder!(Builder); pub fn new() -> Builder { Builder { conf: Conf::default() } } pub fn clock<C: 'static + Clock + Send>(self, clock: C) -> ClockedBuilder<C> { ClockedBuilder { conf: self.conf, clock } } pub fn build(self) -> io::Result<Service> { Service::initialize(self.conf) } } // Custom clock case impl <C: 'static + Clock + Send> ClockedBuilder<C> { impl_builder!(ClockedBuilder<C>); pub fn clock<C2: 'static + Clock + Send>(self, clock: C2) -> ClockedBuilder<C2> { ClockedBuilder { conf: self.conf, clock } } pub fn build(self) -> io::Result<Service> { Service::initialize_with_clock(self.conf, self.clock) } }
true
16278d66a9e7779927f8873792859ae7799da97a
Rust
theseus-os/Theseus
/libs/debugit/src/lib.rs
UTF-8
1,217
3.296875
3
[ "MIT" ]
permissive
//! Use debug printlns, without the trait bounds (using specialization to //! find the right impl anyway). #![no_std] #![allow(incomplete_features)] #![feature(specialization)] #[cfg(test)] #[macro_use] extern crate std; use core::fmt; /// Formats the given argument using its `Debug` trait definition /// and returns the `core::fmt::Arguments` containing its Debug output, /// iff the argument's type implements the Debug trait. /// /// If it does *not* implement the Debug trait, then the type's name is printed instead. /// /// # Examples /// ``` /// #[macro_use] extern crate debugit; /// /// println!("{}", debugit!(my_struct)); /// ``` #[macro_export] macro_rules! debugit { ($value:expr) => { format_args!("{:?}", $crate::DebugIt(&$value)) } } /// A helper type for using with the `debugit!()` macro. #[derive(Copy, Clone)] pub struct DebugIt<T>(pub T); impl<T> fmt::Debug for DebugIt<T> { default fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{{ non-Debug: {} }}", core::any::type_name::<T>()) } } impl<T> fmt::Debug for DebugIt<T> where T: fmt::Debug { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } }
true
b1f25a87e7cc32540b61c9ef08966a563813054e
Rust
mitermayer/rsocket-rust
/rsocket/src/payload/setup.rs
UTF-8
3,308
2.953125
3
[ "Apache-2.0" ]
permissive
use crate::frame::Setup; use crate::utils::DEFAULT_MIME_TYPE; use bytes::Bytes; use std::time::Duration; #[derive(Debug)] pub struct SetupPayload { m: Option<Bytes>, d: Option<Bytes>, keepalive: (Duration, Duration), mime_m: Option<String>, mime_d: Option<String>, } #[derive(Debug)] pub struct SetupPayloadBuilder { inner: SetupPayload, } impl SetupPayload { pub fn builder() -> SetupPayloadBuilder { SetupPayloadBuilder::new() } } impl SetupPayloadBuilder { fn new() -> SetupPayloadBuilder { SetupPayloadBuilder { inner: SetupPayload { m: None, d: None, keepalive: (Duration::from_secs(20), Duration::from_secs(90)), mime_m: Some(String::from(DEFAULT_MIME_TYPE)), mime_d: Some(String::from(DEFAULT_MIME_TYPE)), }, } } pub fn set_metadata(mut self, metadata: Bytes) -> Self { self.inner.m = Some(metadata); self } pub fn set_metadata_utf8(self, metadata: &str) -> Self { self.set_metadata(Bytes::from(String::from(metadata))) } pub fn set_data(mut self, data: Bytes) -> Self { self.inner.d = Some(data); self } pub fn set_data_utf8(self, data: &str) -> Self { self.set_data(Bytes::from(String::from(data))) } pub fn set_keepalive( mut self, tick_period: Duration, ack_timeout: Duration, missed_acks: u64, ) -> Self { let lifetime_mills = (ack_timeout.as_millis() as u64) * missed_acks; self.inner.keepalive = (tick_period, Duration::from_millis(lifetime_mills)); self } pub fn set_data_mime_type(mut self, mime: &str) -> Self { self.inner.mime_d = Some(String::from(mime)); self } pub fn set_metadata_mime_type(mut self, mime: &str) -> Self { self.inner.mime_m = Some(String::from(mime)); self } pub fn build(self) -> SetupPayload { self.inner } } impl SetupPayload { pub fn metadata(&self) -> &Option<Bytes> { &self.m } pub fn data(&self) -> &Option<Bytes> { &self.d } pub fn split(self) -> (Option<Bytes>, Option<Bytes>) { (self.d, self.m) } pub fn keepalive_interval(&self) -> Duration { self.keepalive.0 } pub fn keepalive_lifetime(&self) -> Duration { self.keepalive.1 } pub fn metadata_mime_type(&self) -> &Option<String> { &self.mime_m } pub fn data_mime_type(&self) -> &Option<String> { &self.mime_d } } impl From<Setup> for SetupPayload { fn from(input: Setup) -> SetupPayload { let mut bu = SetupPayload::builder(); // TODO: fill other properties. bu = bu.set_data_mime_type(input.get_mime_data()); bu = bu.set_metadata_mime_type(input.get_mime_metadata()); // bu.set_data_mime_type(String::input.get_mime_data()); let ka = (input.get_keepalive(), input.get_lifetime()); let (d, m) = input.split(); if let Some(b) = d { bu = bu.set_data(b); } if let Some(b) = m { bu = bu.set_metadata(b); } let mut pa = bu.build(); pa.keepalive = ka; pa } }
true
53c21d8fd7649f313089c74138a7791841b3ea21
Rust
suclogger/leetcode-rust
/chapter_5/sort-colors/src/main.rs
UTF-8
568
3.28125
3
[]
no_license
fn main() { } pub fn sort_colors(nums: &mut Vec<i32>) { let mut l = 0; let mut r = nums.len() - 1; let mut idx = 0; while idx <= r && r > 0 { if nums[idx] == 2 { Self::swap(nums, idx, r); if r > 0 { r-=1; } } else if nums[idx] == 0 { Self::swap(nums, idx, l); l+=1; idx+=1; } else { idx+=1; } } } fn swap(nums: &mut Vec<i32>, l: usize, r: usize) { let tmp = nums[l]; nums[l] = nums[r]; nums[r] = tmp; }
true
a1326e9190c567c0ed629df8bfed51f7c227acb3
Rust
TyPR124/winping
/src/pinger.rs
UTF-8
10,686
2.71875
3
[ "MIT", "Apache-2.0" ]
permissive
use winapi::{ shared::{ minwindef::TRUE, ntdef::{HANDLE, NULL}, ws2def::AF_INET6, ws2ipdef::SOCKADDR_IN6, }, um::{ handleapi::INVALID_HANDLE_VALUE, icmpapi::{ Icmp6CreateFile, Icmp6SendEcho2, IcmpCloseHandle, IcmpCreateFile, IcmpSendEcho, IcmpSendEcho2Ex, }, ipexport::{IP_FLAG_DF, IP_SUCCESS}, }, }; #[cfg(target_pointer_width = "32")] use winapi::um::ipexport::IP_OPTION_INFORMATION; #[cfg(target_pointer_width = "64")] use winapi::um::ipexport::IP_OPTION_INFORMATION32 as IP_OPTION_INFORMATION; use std::{ fmt::{self, Debug, Display, Formatter}, net::{IpAddr, Ipv4Addr, Ipv6Addr}, sync::Arc, }; use crate::{ util::{windows_ipv4, windows_ipv6}, Buffer, Error, }; struct Handles { v4: HANDLE, v6: HANDLE, } /// A pair of IP (v4 or v6) addresses, source and destination. #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum IpPair { V4 { src: Ipv4Addr, dst: Ipv4Addr }, V6 { src: Ipv6Addr, dst: Ipv6Addr }, } /// A pinger that blocks when sending. #[derive(Clone)] pub struct Pinger { handles: Arc<Handles>, ttl: u8, df: bool, timeout: u32, } /// An error when creating a Pinger. pub enum CreateError { /// The ICMPv4 handle could not be created. NoV4(Pinger), /// The ICMPv6 handle could not be created. NoV6(Pinger), /// Neither handle could be created. None, } impl Debug for CreateError { fn fmt(&self, out: &mut Formatter) -> fmt::Result { write!( out, "{}", match self { CreateError::None => "Failed to create ICMP V4 and V6 handles", CreateError::NoV4(_) => "Failed to create ICMP V4 handle", CreateError::NoV6(_) => "Failed to create ICMP V6 handle", } ) } } impl Display for CreateError { fn fmt(&self, out: &mut Formatter) -> fmt::Result { Debug::fmt(self, out) } } impl std::error::Error for CreateError {} impl Pinger { /// Creates a new Pinger. /// If one ICMP handle (v4 or v6) fails to initialize, /// this will return the Pinger embedded in an error. The /// Pinger is still usable in this state, but sending /// on the failed version will return an Error. /// If both v4 and v6 fail, the pinger is not embedded /// in the error. pub fn new() -> Result<Self, CreateError> { let (v4, v6) = unsafe { (IcmpCreateFile(), Icmp6CreateFile()) }; let ret = Self { handles: Arc::new(Handles { v4, v6 }), ttl: 255, df: false, timeout: 2000, }; match (v4, v6) { (INVALID_HANDLE_VALUE, INVALID_HANDLE_VALUE) => Err(CreateError::None), (INVALID_HANDLE_VALUE, _) => Err(CreateError::NoV6(ret)), (_, INVALID_HANDLE_VALUE) => Err(CreateError::NoV4(ret)), (_, _) => Ok(ret), } } /// Creates a new Pinger, ignoring v6 failures. If you want to use /// both v4 and v6, use new() instead. pub fn new_v4() -> Option<Self> { match Self::new() { Ok(ret) | Err(CreateError::NoV6(ret)) => Some(ret), _ => None, } } /// Creates a new Pinger, ignoring v4 failures. If you want to use /// both v4 and v6, use new() instead. pub fn new_v6() -> Option<Self> { match Self::new() { Ok(ret) | Err(CreateError::NoV4(ret)) => Some(ret), _ => None, } } /// Sets the IP TTL for future requets. pub fn set_ttl(&mut self, ttl: u8) { self.ttl = ttl; } /// Gets the current IP TTL value. pub fn ttl(&self) -> u8 { self.ttl } /// Sets the IP Don't Fragment bit for future requests. pub fn set_df(&mut self, df: bool) { self.df = df; } /// Gets the current IP Don't Fragment bit. pub fn df(&self) -> bool { self.df } /// Sets the timeout, in milliseconds, for future requests. pub fn set_timeout(&mut self, timeout: u32) { self.timeout = timeout; } /// Gets the current timeout in milliseconds. pub fn timeout(&self) -> u32 { self.timeout } #[inline] fn make_ip_opts(&self) -> IP_OPTION_INFORMATION { IP_OPTION_INFORMATION { Ttl: self.ttl, Flags: if self.df { IP_FLAG_DF } else { 0 }, ..Default::default() } } /// Send an ICMPv4 request to the destination address. On success, returns the round trip time in milliseconds. pub fn send4(&self, dst: Ipv4Addr, buf: &mut Buffer) -> Result<u32, Error> { buf.init_for_send(); let ret = unsafe { IcmpSendEcho( self.handles.v4, windows_ipv4(dst), buf.request_data_ptr(), buf.request_data_len(), &mut self.make_ip_opts(), buf.reply_data_ptr(), buf.reply_data_len(), self.timeout, ) }; if ret == 0 { Err(Error::from_lasterror()) } else { let reply = buf.as_echo_reply().unwrap(); let (status, rtt) = (reply.Status, reply.RoundTripTime); buf.set_filled4(); if status == IP_SUCCESS { Ok(rtt) } else { Err(Error::from_iperror(status)) } } } /// Sends an ICMPv4 request from the source address to the destination address. On success, returns the round trip time in milliseconds. pub fn send4_from(&self, src: Ipv4Addr, dst: Ipv4Addr, buf: &mut Buffer) -> Result<u32, Error> { buf.init_for_send(); let ret = unsafe { IcmpSendEcho2Ex( self.handles.v4, NULL, // Event NULL as _, // ApcRoutine NULL, // ApcContext windows_ipv4(src), windows_ipv4(dst), buf.request_data_ptr(), buf.request_data_len(), &mut self.make_ip_opts(), buf.reply_data_ptr(), buf.reply_data_len(), self.timeout, ) }; if ret == 0 { Err(Error::from_lasterror()) } else { let reply = buf.as_echo_reply().unwrap(); let (status, rtt) = (reply.Status, reply.RoundTripTime); buf.set_filled4(); if status == IP_SUCCESS { Ok(rtt) } else { Err(Error::from_iperror(status)) } } } /// Sends an ICMPv6 request to the destination address. On success, returns the round trip time in milliseconds. pub fn send6(&self, dst: Ipv6Addr, buf: &mut Buffer) -> Result<u32, Error> { let mut dst = SOCKADDR_IN6 { sin6_family: AF_INET6 as _, sin6_addr: windows_ipv6(dst), ..Default::default() }; buf.init_for_send(); let ret = unsafe { Icmp6SendEcho2( self.handles.v6, NULL, // Event NULL as _, // ApcRoutine NULL, // ApcContext &mut SOCKADDR_IN6::default(), &mut dst, buf.request_data_ptr(), buf.request_data_len(), &mut self.make_ip_opts(), buf.reply_data_ptr(), buf.reply_data_len(), self.timeout, ) }; if ret == 0 { Err(Error::from_lasterror()) } else { let reply = buf.as_echo_reply6().unwrap(); let (status, rtt) = (reply.Status, reply.RoundTripTime as u32); buf.set_filled6(); if status == IP_SUCCESS { Ok(rtt) } else { Err(Error::from_iperror(status)) } } } /// Sends an ICMPv6 request from the source address to the destination address. On success, returns the round trip time in milliseconds. pub fn send6_from(&self, src: Ipv6Addr, dst: Ipv6Addr, buf: &mut Buffer) -> Result<u32, Error> { let mut dst = SOCKADDR_IN6 { sin6_family: AF_INET6 as _, sin6_addr: windows_ipv6(dst), ..Default::default() }; let mut src = SOCKADDR_IN6 { sin6_family: AF_INET6 as _, sin6_addr: windows_ipv6(src), ..Default::default() }; buf.init_for_send(); let ret = unsafe { Icmp6SendEcho2( self.handles.v6, NULL, // Event NULL as _, // ApcRoutine NULL, // ApcContext &mut src, &mut dst, buf.request_data_ptr(), buf.request_data_len(), &mut self.make_ip_opts(), buf.reply_data_ptr(), buf.reply_data_len(), self.timeout, ) }; if ret == 0 { Err(Error::from_lasterror()) } else { let reply = buf.as_echo_reply6().unwrap(); let (status, rtt) = (reply.Status, reply.RoundTripTime as u32); buf.set_filled6(); if status == IP_SUCCESS { Ok(rtt) } else { Err(Error::from_iperror(status)) } } } /// Sends an ICMP request to the destination address. Supports both v4 and v6. On success, returns the round trip time in milliseconds. pub fn send(&self, dst: IpAddr, buf: &mut Buffer) -> Result<u32, Error> { match dst { IpAddr::V4(ip) => self.send4(ip, buf), IpAddr::V6(ip) => self.send6(ip, buf), } } /// Sends an ICMP request from the source address to the destination address. Supports both v4 and v6. On success, returns the round trip time in milliseconds. pub fn send_from(&mut self, src_dst_pair: IpPair, buf: &mut Buffer) -> Result<u32, Error> { match src_dst_pair { IpPair::V4 { src, dst } => self.send4_from(src, dst, buf), IpPair::V6 { src, dst } => self.send6_from(src, dst, buf), } } } impl Drop for Handles { fn drop(&mut self) { if self.v4 != INVALID_HANDLE_VALUE { let ret = unsafe { IcmpCloseHandle(self.v4) }; debug_assert_eq!(TRUE, ret); } if self.v6 != INVALID_HANDLE_VALUE { let ret = unsafe { IcmpCloseHandle(self.v6) }; debug_assert_eq!(TRUE, ret); } } }
true
eec28ec9c362d4f0d0d45f22b9763b7e8d90998b
Rust
KanoczTomas/bitcoin_playground
/src/traits/gen_rand_u256.rs
UTF-8
945
3.09375
3
[]
no_license
use crate::types::U256; use rand::Rng; //inspired by https://github.com/rust-num/num-bigint/blob/master/src/bigrand.rs /// Trait to generate random U256 numbers pub trait GenRandU256 { /// Generate a random U256. fn gen_u256(&mut self) -> U256; /// Generate a random `U256` within the given range. The lower /// bound is inclusive; the upper bound is exclusive. Fails when /// the upper bound is not greater than the lower bound. fn gen_u256_range(&mut self, lbound: &U256, ubound: &U256) -> U256; } impl<R: Rng + ?Sized> GenRandU256 for R { fn gen_u256(&mut self) -> U256 { let mut data = [0u64; 4]; self.fill(&mut data); U256(data) } fn gen_u256_range(&mut self, low: &U256, high: &U256) -> U256 { let mut num; loop { num = self.gen_u256(); if num >= *low && num < *high { break; } } num } }
true
04bef5675553e4819d8ab7abc795024d1621da3f
Rust
tenx-tech/prost
/benches/varint.rs
UTF-8
2,953
2.765625
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#![feature(test)] extern crate bytes; extern crate prost; extern crate test; use bytes::IntoBuf; use prost::encoding::{ decode_varint, encode_varint, encoded_len_varint, }; macro_rules! varint_bench { ($encode_name:ident, $decode_name:ident, $encoded_len_name: ident, $encode:expr) => { #[bench] fn $encode_name(b: &mut test::Bencher) { let mut buf = Vec::with_capacity(100 * 10); b.iter(|| { buf.clear(); $encode(&mut buf); test::black_box(&buf[..]); }); b.bytes = 100 * 8; } #[bench] fn $decode_name(b: &mut test::Bencher) { let mut buf = Vec::with_capacity(100 * 10); $encode(&mut buf); let buf = &buf[..]; let mut values = [0u64; 100]; b.iter(|| { let mut buf = buf.into_buf(); for i in 0..100 { values[i] = decode_varint(&mut buf).unwrap(); } test::black_box(&values[..]); }); b.bytes = 100 * 8; } #[bench] fn $encoded_len_name(b: &mut test::Bencher) { let mut values = [0u64; 100]; { let mut buf = Vec::with_capacity(100 * 10); $encode(&mut buf); let mut buf = (&buf[..]).into_buf(); for i in 0..100 { values[i] = decode_varint(&mut buf).unwrap(); } } b.iter(|| { let mut sum = 0; for &value in values.iter() { sum += encoded_len_varint(value); } test::black_box(sum); }); b.bytes = 100 * 8; } } } /// Benchmark encoding and decoding 100 varints of mixed width (average 5.5 bytes). varint_bench!(encode_varint_mixed, decode_varint_mixed, encoded_len_varint_mixed, |ref mut buf| { for width in 0..10 { let exponent = width * 7; for offset in 0..10 { encode_varint(offset + (1 << exponent), buf); } } }); /// Benchmark encoding and decoding 100 small (1 byte) varints. varint_bench!(encode_varint_small, decode_varint_small, encoded_len_varint_small, |ref mut buf| { for value in 0..100 { encode_varint(value, buf); } }); /// Benchmark encoding and decoding 100 medium (5 byte) varints. varint_bench!(encode_varint_medium, decode_varint_medium, encoded_len_varint_medium, |ref mut buf| { let start = 1 << 28; for value in start..start + 100 { encode_varint(value, buf); } }); /// Benchmark encoding and decoding 100 large (10 byte) varints. varint_bench!(encode_varint_large, decode_varint_large, encoded_len_varint_large, |ref mut buf| { let start = 1 << 63; for value in start..start + 100 { encode_varint(value, buf); } });
true
ad249465f216c79cdfae119a97e164f0282c2585
Rust
adam0000345/sentry-rustdemo
/src/test.rs
UTF-8
4,896
2.515625
3
[]
no_license
extern crate actix_web; extern crate sentry; extern crate sentry_actix; #[macro_use] extern crate failure; #[macro_use] extern crate lazy_static; use std::num::ParseIntError; use std::env; use std::collections::HashMap; use sentry::integrations::failure::capture_error; use sentry::{configure_scope, User}; use actix_web::{http}; use actix_web::Json; use actix_web::Result; use serde::Deserialize; use serde::Serialize; use sentry::protocol::value::to_value; use std::sync::Mutex; use sentry::integrations::panic::register_panic_handler; lazy_static! { #[derive(Copy, Deserialize, Clone, Debug)] static ref HASHMAP: Mutex<HashMap<&'static str, u32>> = { let mut Inventory = HashMap::new(); Inventory.insert("wrench", 1); Inventory.insert("nails", 1); Inventory.insert("hammer", 1); Mutex::new(Inventory) }; } use actix_web::{server, App, HttpRequest, HttpResponse}; use sentry_actix::SentryMiddleware; fn multiply_new(first_number_str: &str, second_number_str: &str) -> Result<i32, ParseIntError> { let first_number: i32 = first_number_str.parse()?; let second_number: i32 = second_number_str.parse()?; Ok(first_number * second_number) } fn handled_new(_req: &HttpRequest) -> HttpResponse { let first = "t"; let second = "2"; let result = match multiply_new(first, second) { Ok(result) => result, Err(err) => { let foo = err.into(); capture_error(&foo); let result: HttpResponse = "try again".to_string().into(); return result; } }; let result: HttpResponse = (format!("{} * {} => {}", first, second, result)).into(); return result; } fn fakedatabseapp(_req: &HttpRequest) -> HttpResponse{ panic!("Unhandled request!"); } #[derive(Deserialize, Clone, Debug)] struct CardSubmittedPayload { card_id: i64, } #[derive(Serialize, Deserialize, Clone, Debug)] struct Item { id: String, name: String, price: f64, img: String, } #[derive(Serialize, Clone, Debug, Deserialize)] struct CheckoutPayload { email: String, cart: Vec<Item>, } fn process_order(cart: &Vec<Item>) -> HttpResponse { let mut map = HASHMAP.lock().unwrap(); println!("The entry for `0` is \"{:?}\".", map.get("foo")); for cartitem in cart.iter() { if map.get(cartitem.id.as_str()).map(|id| id <= &0).unwrap_or(false) { let mut string = String::new(); string.push_str("Not enough inventory for "); string.push_str(&cartitem.id); configure_scope(|scope| { scope.set_extra("inventory", to_value(map.clone()).unwrap()); }); capture_error(&format_err!("Error: {}", string)); let result: HttpResponse = string.to_string().into(); return result; } else if map.get(cartitem.id.as_str()).map(|id| id > &0).unwrap_or(false) { if let Some (id) = map.get_mut(cartitem.id.as_str()) { *id -= 1; println!("Success: {:?} was purchased, remaining stock is {:?}", cartitem.id, cartitem.id.as_str()); } else { false; } } } let result: HttpResponse = (format!("Everything ok")).into(); return result; } fn checkout(req: HttpRequest, body: Json<CheckoutPayload>) -> HttpResponse { configure_scope(|scope| { scope.set_user(Some(User { email: Some((*body.email).to_string()), ..Default::default() })); let mut string = String::new(); string.push_str(req.headers().get("X-Transaction-ID").unwrap().to_str().unwrap()); scope.set_tag("transaction_id", string); string = String::new(); string.push_str(req.headers().get("X-Session-ID").unwrap().to_str().unwrap()); scope.set_tag("session_id", string); string = String::new(); }); return process_order(&body.cart); } fn main() { register_panic_handler(); let _guard = sentry::init("https://[email protected]/5250920"); env::set_var("RUST_BACKTRACE", "1"); server::new(|| { App::new().middleware(SentryMiddleware::new()) .resource("/handled_new",|r| r.method(http::Method::GET).f(handled_new)) .resource("/unhandled",|r| r.method(http::Method::GET).f(fakedatabseapp)) .resource("/checkout", |r| r.method(http::Method::POST).with(checkout))}).bind("127.0.0.1:3001") .unwrap() .run(); sentry::integrations::panic::register_panic_handler(); }
true
2c372d0d915cb516ec8e6dc135a6fdc27b2d7091
Rust
jazzay/tauri
/tauri/src/salt.rs
UTF-8
1,437
3.03125
3
[ "CC-BY-NC-ND-4.0", "MIT" ]
permissive
use std::sync::Mutex; use lazy_static::lazy_static; use uuid::Uuid; use web_view::WebView; struct Salt { value: String, one_time: bool, } lazy_static! { static ref SALTS: Mutex<Vec<Salt>> = Mutex::new(vec![]); } pub fn generate() -> String { let salt = Uuid::new_v4(); SALTS .lock() .expect("Failed to lock Salt mutex: generate()") .push(Salt { value: salt.to_string(), one_time: true, }); salt.to_string() } pub fn generate_static() -> String { let salt = Uuid::new_v4(); SALTS .lock() .expect("Failed to lock SALT mutex: generate_static()") .push(Salt { value: salt.to_string(), one_time: false, }); salt.to_string() } pub fn is_valid(salt: String) -> bool { let mut salts = SALTS.lock().expect("Failed to lock Salt mutex: is_valid()"); match salts.iter().position(|s| s.value == salt) { Some(index) => { if salts[index].one_time { salts.remove(index); } true } None => false, } } pub fn validate<T: 'static>( webview: &mut WebView<'_, T>, salt: String, callback: String, error: String, ) { let response = if is_valid(salt) { Ok("'VALID'".to_string()) } else { Err("'INVALID SALT'".to_string()) }; let callback_string = crate::api::rpc::format_callback_result(response, callback, error); webview .eval(callback_string.as_str()) .expect("Failed to eval JS from validate()"); }
true
0ed73b1de5c45fe8f35e8130d00d0b93b2a3ce7d
Rust
pijamarda/project-euler
/src/main.rs
UTF-8
5,116
3.78125
4
[]
no_license
/* Project Euler Problems: https://projecteuler.net */ fn main() { println!("Project Euler"); /* problem1(1000); problem2(4000000); problem3(600851475143); problem4(56); problem5(20); */ problem4(5677848); } /* Multiples of 3 and 5 If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23. Find the sum of all the multiples of 3 or 5 below 1000. */ #[allow(dead_code)] fn problem1(num: i32) { let mut sum = 0; for i in 1..num { if (i % 3 == 0) || (i % 3 == 0) { sum += i; } } println!("The sum of all the multiples of 3 or 5 below {} is:",num); println!("{}",sum); } /* Even Fibonacci numbers Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be: 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ... By considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the even-valued terms. */ #[allow(dead_code)] fn problem2(max: i32) { let mut n1 = 1; let mut n2 = 2; let mut suma = 0; let mut even_sum = 2; while suma < max { suma = n1 + n2; n1 = n2; n2 = suma; if (suma % 2 == 0) && (suma < max) { //print!("{} ", suma); even_sum += suma; } } println!("The sum of the even-valued terms that not exceed {} is: ", max); println!("{}", even_sum); } /* Largest prime factor The prime factors of 13195 are 5, 7, 13 and 29. What is the largest prime factor of the number 600.851.475.143 ? */ #[allow(dead_code)] fn problem3(number_original: u64) { let mut largest: u64 = 0; let mut i:u64 = 2; let mut number = number_original; while i <= number { //println!("Trying: {}",i); if number % i == 0 { if is_prime_u64(i) { //print!("{} ",i); largest = i; number = number / i; } } i += 1; } println!("The largest prime factor of {} is:", number_original); println!("{}", largest); } /* Largest palindrome product A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 × 99. Find the largest palindrome made from the product of two 3-digit numbers. */ #[allow(dead_code)] fn problem4(num: i32) { check_palindrome(num); } /* Smallest multiple Problem 5 2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder. What is the smallest positive number that is evenly divisible by all of the numbers from 1 to 20? */ #[allow(dead_code)] fn problem5(max: i32) { let mut found = false; let mut number = 1; let mut i = 1; while !found { while i <= max { if number % i == 0 { i += 1; } else { i = 1; number += 1; continue; } } found = true; println!("The smallest positive number that is evenly divisible by all of the numbers from 1 to {} is:", max); println!("{}", number); } } fn check_palindrome(num: i32) -> bool { let mut is_palindrome = false; let num_text = num.to_string(); let mut num_text_vec = num_text.chars(); let mut num_text_vec_reverse = num_text.chars().rev(); println!("{}", num_text); let mut len_word = num_text.len(); if len_word % 2 == 0 { len_word = len_word / 2; } else { len_word = (len_word - 1 ) / 2; } println!("{}",len_word); while len_word > 0 { let result1 = num_text_vec.next(); let result2 = num_text_vec_reverse.next(); println!("{:?} = {:?}", result1, result2); len_word = len_word - 1; } println!(""); if is_palindrome { is_palindrome=true; } return is_palindrome; } // Used to help on problem 2 #[allow(dead_code)] fn fibonacci(max: i32) { let mut n1 = 1; let mut n2 = 2; let mut suma = 0; print!("{} ", n1); print!("{} ", n2); while suma < max { suma = n1 + n2; n1 = n2; n2 = suma; if suma < max { print!("{} ", suma); } } println!(""); } #[allow(dead_code)] fn is_prime(number: i32) -> bool { let mut prime: bool = true; for i in 2..(number - 1) { if number % i == 0 { prime = false; } } return prime; } fn is_prime_u64(number: u64) -> bool { let mut prime: bool = true; for i in 2..number/2 { //print!("{} ", i); if number % i == 0 { prime = false; break; } } return prime; }
true
d03f1fb1c67c5caff5a319067009b338ee7463a5
Rust
bcourtine/tmdb-client-rs
/tests/movie_api_tests.rs
UTF-8
1,839
2.71875
3
[]
no_license
use tmdb_client::apis::client::APIClient; #[test] fn movie_by_id_should_give_results() { let client = APIClient::new_from_env(); let result = client.movies_api().get_movie_details(19995, None, None, None); let movie = result.expect("Error querying movie 19995 (Avatar)"); assert_eq!(movie.title, Some("Avatar".to_owned())); // Append lists should not be valuated. assert!(movie.credits.is_none()); assert!(movie.videos.is_none()); assert!(movie.images.is_none()); assert!(movie.release_dates.is_none()); assert!(movie.translations.is_none()); assert!(movie.keywords.is_none()); assert!(movie.reviews.is_none()); assert!(movie.external_ids.is_none()); } #[test] fn appends_to_movie_by_id_should_be_valuated() { let client = APIClient::new_from_env(); let result = client.movies_api().get_movie_details( 19995, None, None, Some("credits,videos,images,release_dates,translations,keywords,reviews,external_ids"), ); let movie = result.expect("Error querying movie 19995 (Avatar)"); assert_eq!(movie.title, Some("Avatar".to_owned())); // Append lists should be valuated. assert!(movie.credits.is_some()); assert!(movie.videos.is_some()); assert!(movie.images.is_some()); assert!(movie.release_dates.is_some()); assert!(movie.translations.is_some()); assert!(movie.keywords.is_some()); assert!(movie.reviews.is_some()); assert!(movie.external_ids.is_some()); } #[test] fn movie_external_ids_should_be_valuated() { let client = APIClient::new_from_env(); let result = client.movies_api().get_movie_external_ids(19995); let external_ids = result.expect("Error querying movie 19995 (Avatar)"); assert_eq!(external_ids.id, Some(19995)); assert!(external_ids.imdb_id.is_some()); }
true
4c7e0e5aee6725db281d0f5ecf876e3e6bb82411
Rust
YuhanLiin/msp430fr2355-quickstart
/src/gpio.rs
UTF-8
14,759
2.515625
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use core::marker::PhantomData; use msp430fr2355 as pac; pub trait PmmExt { fn freeze(self) -> Pmm; } pub struct Pmm(()); impl PmmExt for pac::PMM { fn freeze(self) -> Pmm { self.pm5ctl0.write(|w| w.locklpm5().locklpm5_0()); Pmm(()) } } pub trait GpioExt { type Gpio; fn constrain(self) -> Self::Gpio; } pub struct Output<TOK>(PhantomData<TOK>); pub struct Input<PULL, INTR>(PhantomData<PULL>, PhantomData<INTR>); pub struct Alternate1; pub struct Alternate2; pub struct Alternate3; pub struct Pullup; pub struct Pulldown; pub struct Floating; pub struct Unknown; pub struct Enabled; pub struct Disabled; pub struct Locked; pub struct Unlocked; pub struct NoOutToken; pub struct OutToken<'out>(PhantomData<&'out POUT>); pub trait ConvertToInput {} impl<TOK> ConvertToInput for Output<TOK> {} impl ConvertToInput for Unknown {} pub trait ConvertToOutput {} impl<PULL> ConvertToOutput for Input<PULL, Disabled> {} impl ConvertToOutput for Unknown {} pub trait Known {} impl Known for Floating {} impl Known for Pulldown {} impl Known for Pullup {} pub struct P1<DIR, LOCK> { periph: pac::P1, _dir: PhantomData<DIR>, _lock: PhantomData<LOCK>, } macro_rules! make_periph { ($Px:ident, $periph:expr) => { $Px { periph: $periph, _dir: PhantomData, _lock: PhantomData, } }; ($Px:ident) => { $Px { _dir: PhantomData, _lock: PhantomData, } }; } impl GpioExt for pac::P1 { type Gpio = P1<Unknown, Locked>; fn constrain(self) -> Self::Gpio { self.p1sel0.write(|w| unsafe { w.bits(0) }); self.p1sel1.write(|w| unsafe { w.bits(0) }); make_periph!(P1, self) } } impl<PULL, LOCK> P1<Input<PULL, Disabled>, LOCK> { pub fn pulldown(self) -> P1<Input<Pulldown, Disabled>, LOCK> { self.periph.p1out.write(|w| unsafe { w.bits(0x00) }); self.periph.p1ren.write(|w| unsafe { w.bits(0xFF) }); make_periph!(P1, self.periph) } pub fn pullup(self) -> P1<Input<Pullup, Disabled>, LOCK> { self.periph.p1out.write(|w| unsafe { w.bits(0xFF) }); self.periph.p1ren.write(|w| unsafe { w.bits(0xFF) }); make_periph!(P1, self.periph) } pub fn float(self) -> P1<Input<Floating, Disabled>, LOCK> { self.periph.p1ren.write(|w| unsafe { w.bits(0x00) }); make_periph!(P1, self.periph) } } impl<PULL: Known> P1<Input<PULL, Disabled>, Unlocked> { pub fn enable_intr_rising_edge(self) -> P1<Input<PULL, Enabled>, Unlocked> { self.periph.p1ies.write(|w| unsafe { w.bits(0x00) }); self.periph.p1ifg.write(|w| unsafe { w.bits(0x00) }); self.periph.p1ie.write(|w| unsafe { w.bits(0xFF) }); make_periph!(P1, self.periph) } pub fn enable_intr_falling_edge(self) -> P1<Input<PULL, Enabled>, Unlocked> { self.periph.p1ies.write(|w| unsafe { w.bits(0xFF) }); self.periph.p1ifg.write(|w| unsafe { w.bits(0x00) }); self.periph.p1ie.write(|w| unsafe { w.bits(0xFF) }); make_periph!(P1, self.periph) } } impl<PULL> P1<Input<PULL, Enabled>, Unlocked> { pub fn disable_intr(self) -> P1<Input<PULL, Disabled>, Unlocked> { self.periph.p1ie.write(|w| unsafe { w.bits(0x00) }); make_periph!(P1, self.periph) } } impl<PULL: Known, INTR> P1<Input<PULL, INTR>, Unlocked> { pub fn read(&self) -> u8 { self.periph.p1in.read().bits() } pub fn clear_intr(&mut self) { self.periph.p1ifg.write(|w| unsafe { w.bits(0x00) }); } pub fn set_intr(&mut self) { self.periph.p1ifg.write(|w| unsafe { w.bits(0xFF) }); } } // Don't need POUT token because we own the whole register already impl P1<Output<NoOutToken>, Unlocked> { pub fn write(&mut self, val: u8) { self.periph.p1out.write(|w| unsafe { w.bits(val) }); } pub fn toggle(&mut self) { self.periph.p1out.modify(|r, w| unsafe { w.bits(r.bits()) }); } } impl<DIR: ConvertToInput, LOCK> P1<DIR, LOCK> { pub fn to_input(self) -> P1<Input<Unknown, Disabled>, LOCK> { self.periph.p1dir.write(|w| unsafe { w.bits(0x00) }); make_periph!(P1, self.periph) } } impl<DIR: ConvertToOutput, LOCK> P1<DIR, LOCK> { pub fn to_output(self) -> P1<Output<NoOutToken>, LOCK> { self.periph.p1dir.write(|w| unsafe { w.bits(0xFF) }); make_periph!(P1, self.periph) } } impl<DIR> P1<DIR, Locked> { pub fn unlock(self, _lock: &Pmm) -> P1<DIR, Unlocked> { make_periph!(P1, self.periph) } } pub struct PSEL(()); pub struct POUT(()); impl<DIR, LOCK> P1<DIR, LOCK> { pub fn split(self) -> Parts<DIR, DIR, LOCK> { Parts { psel: PSEL(()), pout: POUT(()), p1_0: make_periph!(P1_0), p1_1: make_periph!(P1_1), } } } pub struct Parts<DIR0, DIR1, LOCK> { pub psel: PSEL, pub pout: POUT, pub p1_0: P1_0<DIR0, LOCK>, pub p1_1: P1_1<DIR1, LOCK>, } // P1.0 pub struct P1_0<DIR, LOCK> { _dir: PhantomData<DIR>, _lock: PhantomData<LOCK>, } impl<PULL: Known, INTR> P1_0<Input<PULL, INTR>, Unlocked> { pub fn read(&self) -> bool { unsafe { &*pac::P1::ptr() }.p1in.read().bits() & (1 << 0) != 0 } } impl P1_0<Output<NoOutToken>, Unlocked> { pub fn enable<'out>(self, _pout: &'out POUT) -> P1_0<Output<OutToken<'out>>, Unlocked> { make_periph!(P1_0) } } impl<'out> P1_0<Output<OutToken<'out>>, Unlocked> { pub fn set_bit(&mut self) { unsafe { &*pac::P1::ptr() } .p1out .modify(|r, w| unsafe { w.bits(r.bits() | 1) }); } pub fn clear_bit(&mut self) { unsafe { &*pac::P1::ptr() } .p1out .modify(|r, w| unsafe { w.bits(r.bits() & !1) }); } } impl<DIR> P1_0<DIR, Locked> { pub fn unlock(self, _lock: &Pmm) -> P1_0<DIR, Unlocked> { make_periph!(P1_0) } } impl<DIR, LOCK> P1_0<DIR, LOCK> { pub fn alternate1(self, _psel: &PSEL) -> P1_0<Alternate1, LOCK> { let periph = unsafe { &*pac::P1::ptr() }; periph.p1sel0.modify(|r, w| unsafe { w.bits(r.bits() | 1) }); periph .p1sel1 .modify(|r, w| unsafe { w.bits(r.bits() & !1) }); make_periph!(P1_0) } pub fn alternate2(self, _psel: &PSEL) -> P1_0<Alternate2, LOCK> { let periph = unsafe { &*pac::P1::ptr() }; periph .p1sel0 .modify(|r, w| unsafe { w.bits(r.bits() & !1) }); periph.p1sel1.modify(|r, w| unsafe { w.bits(r.bits() | 1) }); make_periph!(P1_0) } pub fn alternate3(self, _psel: &PSEL) -> P1_0<Alternate3, LOCK> { let periph = unsafe { &*pac::P1::ptr() }; periph.p1sel0.modify(|r, w| unsafe { w.bits(r.bits() | 1) }); periph.p1sel1.modify(|r, w| unsafe { w.bits(r.bits() | 1) }); make_periph!(P1_0) } } // P1.1 pub struct P1_1<DIR, LOCK> { _dir: PhantomData<DIR>, _lock: PhantomData<LOCK>, } impl<PULL: Known, INTR> P1_1<Input<PULL, INTR>, Unlocked> { pub fn read(&self) -> bool { unsafe { &*pac::P1::ptr() }.p1in.read().bits() & (1 << 0) != 0 } } impl P1_1<Output<NoOutToken>, Unlocked> { pub fn enable<'out>(self, _pout: &'out POUT) -> P1_1<Output<OutToken<'out>>, Unlocked> { make_periph!(P1_1) } } impl<'out> P1_1<Output<OutToken<'out>>, Unlocked> { pub fn set_bit(&mut self) { unsafe { &*pac::P1::ptr() } .p1out .modify(|r, w| unsafe { w.bits(r.bits() | 1) }); } pub fn clear_bit(&mut self) { unsafe { &*pac::P1::ptr() } .p1out .modify(|r, w| unsafe { w.bits(r.bits() & !1) }); } } impl<DIR> P1_1<DIR, Locked> { pub fn unlock(self, _lock: &Pmm) -> P1_1<DIR, Unlocked> { make_periph!(P1_1) } } impl<DIR, LOCK> P1_1<DIR, LOCK> { pub fn alternate1(self, _psel: &PSEL) -> P1_1<Alternate1, LOCK> { let periph = unsafe { &*pac::P1::ptr() }; periph.p1sel0.modify(|r, w| unsafe { w.bits(r.bits() | 1) }); periph .p1sel1 .modify(|r, w| unsafe { w.bits(r.bits() & !1) }); make_periph!(P1_1) } pub fn alternate2(self, _psel: &PSEL) -> P1_1<Alternate2, LOCK> { let periph = unsafe { &*pac::P1::ptr() }; periph .p1sel0 .modify(|r, w| unsafe { w.bits(r.bits() & !1) }); periph.p1sel1.modify(|r, w| unsafe { w.bits(r.bits() | 1) }); make_periph!(P1_1) } pub fn alternate3(self, _psel: &PSEL) -> P1_1<Alternate3, LOCK> { let periph = unsafe { &*pac::P1::ptr() }; periph.p1sel0.modify(|r, w| unsafe { w.bits(r.bits() | 1) }); periph.p1sel1.modify(|r, w| unsafe { w.bits(r.bits() | 1) }); make_periph!(P1_1) } } /**************************************************************************/ // Proxy stuff starts here trait WritePdir { fn pdir_on(&self) -> bool; } trait WritePout { fn pout_on(&self) -> bool; } trait WritePren { fn pren_on(&self) -> bool; } macro_rules! make_proxy { ($Px:ident) => { $Px { _dir: PhantomData, _lock: PhantomData, out: false, } }; ($Px:ident, $out:expr) => { $Px { _dir: PhantomData, _lock: PhantomData, out: $out, } }; } pub struct P1_0Proxy<DIR, LOCK> { _dir: PhantomData<DIR>, _lock: PhantomData<LOCK>, out: bool, } impl<DIR, LOCK> WritePdir for P1_0Proxy<DIR, LOCK> { default fn pdir_on(&self) -> bool { false } } impl<DIR, LOCK> WritePout for P1_0Proxy<DIR, LOCK> { default fn pout_on(&self) -> bool { false } } impl<DIR, LOCK> WritePren for P1_0Proxy<DIR, LOCK> { default fn pren_on(&self) -> bool { false } } impl<LOCK> WritePout for P1_0Proxy<Output<NoOutToken>, LOCK> { fn pout_on(&self) -> bool { self.out } } impl<INTR, LOCK> WritePout for P1_0Proxy<Input<Pullup, INTR>, LOCK> { fn pout_on(&self) -> bool { true } } impl<LOCK> WritePdir for P1_0Proxy<Output<NoOutToken>, LOCK> { fn pdir_on(&self) -> bool { true } } impl<INTR, LOCK> WritePren for P1_0Proxy<Input<Pullup, INTR>, LOCK> { fn pren_on(&self) -> bool { true } } impl<INTR, LOCK> WritePren for P1_0Proxy<Input<Pulldown, INTR>, LOCK> { fn pren_on(&self) -> bool { true } } impl<PULL, LOCK> P1_0Proxy<Input<PULL, Disabled>, LOCK> { pub fn pulldown(self) -> P1_0Proxy<Input<Pulldown, Disabled>, LOCK> { make_proxy!(P1_0Proxy) } pub fn pullup(self) -> P1_0Proxy<Input<Pullup, Disabled>, LOCK> { make_proxy!(P1_0Proxy) } pub fn float(self) -> P1_0Proxy<Input<Floating, Disabled>, LOCK> { make_proxy!(P1_0Proxy) } } impl<DIR: ConvertToInput, LOCK> P1_0Proxy<DIR, LOCK> { pub fn to_input(self) -> P1_0Proxy<Input<Unknown, Disabled>, LOCK> { make_proxy!(P1_0Proxy) } } impl<DIR: ConvertToOutput, LOCK> P1_0Proxy<DIR, LOCK> { pub fn to_output(self) -> P1_0Proxy<Output<NoOutToken>, LOCK> { make_proxy!(P1_0Proxy, false) } } impl P1_0Proxy<Output<NoOutToken>, Unlocked> { pub fn on(self) -> P1_0Proxy<Output<NoOutToken>, Unlocked> { make_proxy!(P1_0Proxy, true) } pub fn off(self) -> P1_0Proxy<Output<NoOutToken>, Unlocked> { make_proxy!(P1_0Proxy, false) } } pub struct P1_1Proxy<DIR, LOCK> { _dir: PhantomData<DIR>, _lock: PhantomData<LOCK>, out: bool, } impl<DIR, LOCK> WritePdir for P1_1Proxy<DIR, LOCK> { default fn pdir_on(&self) -> bool { false } } impl<DIR, LOCK> WritePout for P1_1Proxy<DIR, LOCK> { default fn pout_on(&self) -> bool { false } } impl<DIR, LOCK> WritePren for P1_1Proxy<DIR, LOCK> { default fn pren_on(&self) -> bool { false } } impl<LOCK> WritePout for P1_1Proxy<Output<NoOutToken>, LOCK> { fn pout_on(&self) -> bool { self.out } } impl<INTR, LOCK> WritePout for P1_1Proxy<Input<Pullup, INTR>, LOCK> { fn pout_on(&self) -> bool { true } } impl<LOCK> WritePdir for P1_1Proxy<Output<NoOutToken>, LOCK> { fn pdir_on(&self) -> bool { true } } impl<INTR, LOCK> WritePren for P1_1Proxy<Input<Pullup, INTR>, LOCK> { fn pren_on(&self) -> bool { true } } impl<INTR, LOCK> WritePren for P1_1Proxy<Input<Pulldown, INTR>, LOCK> { fn pren_on(&self) -> bool { true } } impl<PULL, LOCK> P1_1Proxy<Input<PULL, Disabled>, LOCK> { pub fn pulldown(self) -> P1_1Proxy<Input<Pulldown, Disabled>, LOCK> { make_proxy!(P1_1Proxy) } pub fn pullup(self) -> P1_1Proxy<Input<Pullup, Disabled>, LOCK> { make_proxy!(P1_1Proxy) } pub fn float(self) -> P1_1Proxy<Input<Floating, Disabled>, LOCK> { make_proxy!(P1_1Proxy) } } impl<DIR: ConvertToInput, LOCK> P1_1Proxy<DIR, LOCK> { pub fn to_input(self) -> P1_1Proxy<Input<Unknown, Disabled>, LOCK> { make_proxy!(P1_1Proxy) } } impl<DIR: ConvertToOutput, LOCK> P1_1Proxy<DIR, LOCK> { pub fn to_output(self) -> P1_1Proxy<Output<NoOutToken>, LOCK> { make_proxy!(P1_1Proxy, false) } } impl P1_1Proxy<Output<NoOutToken>, Unlocked> { pub fn on(self) -> P1_1Proxy<Output<NoOutToken>, Unlocked> { make_proxy!(P1_1Proxy, true) } pub fn off(self) -> P1_1Proxy<Output<NoOutToken>, Unlocked> { make_proxy!(P1_1Proxy, false) } } pub struct BatchParts<DIR0, DIR1, LOCK> { pub p1_0: P1_0Proxy<DIR0, LOCK>, pub p1_1: P1_1Proxy<DIR1, LOCK>, } impl<DIR0, DIR1, LOCK> Parts<DIR0, DIR1, LOCK> { pub fn batch(self) -> BatchParts<DIR0, DIR1, LOCK> { BatchParts { p1_0: make_proxy!(P1_0Proxy), p1_1: make_proxy!(P1_1Proxy), } } } impl<DIR0, DIR1, LOCK> BatchParts<DIR0, DIR1, LOCK> { pub fn write(self) -> Parts<DIR0, DIR1, LOCK> { let mut pdir: u8 = 0; let mut pout: u8 = 0; let mut pren: u8 = 0; pdir |= self.p1_0.pdir_on() as u8; pout |= self.p1_0.pout_on() as u8; pren |= self.p1_0.pren_on() as u8; pdir |= (self.p1_1.pdir_on() as u8) << 1; pout |= (self.p1_1.pout_on() as u8) << 1; pren |= (self.p1_1.pren_on() as u8) << 1; let p1 = unsafe { &*pac::P1::ptr() }; p1.p1dir.write(|w| unsafe { w.bits(pdir) }); p1.p1out.write(|w| unsafe { w.bits(pout) }); p1.p1ren.write(|w| unsafe { w.bits(pren) }); Parts { psel: PSEL(()), pout: POUT(()), p1_0: make_periph!(P1_0), p1_1: make_periph!(P1_1), } } }
true
dd0f01346d9f0bfe270e6ebf72065cf8d26da156
Rust
gottstech/grinrelay
/src/broker/stomp/frame.rs
UTF-8
5,821
2.859375
3
[ "Apache-2.0" ]
permissive
#![macro_use] use bytes::BytesMut; use rustc_serialize::hex::ToHex; use serde::{Serialize, Serializer}; use std::fmt; use std::fmt::Formatter; use std::str::from_utf8; use super::header::*; use super::subscription::AckMode; #[derive(Copy, Clone, Debug, Serialize)] pub enum Command { Send, Subscribe, Unsubscribe, Begin, Commit, Abort, Ack, Nack, Disconnect, Connect, Stomp, Connected, Message, Receipt, Error, } #[macro_export] macro_rules! header_list [ ($($header: expr), *) => ({ let header_list = HeaderList::new(); $(header_list.push($header);)* header_list }); ($($key:expr => $value: expr), *) => ({ let mut header_list = HeaderList::new(); $(header_list.push(Header::new($key, $value));)* header_list }) ]; impl Command { pub fn as_str(&self) -> &'static str { use self::Command::*; match *self { Send => "SEND", Subscribe => "SUBSCRIBE", Unsubscribe => "UNSUBSCRIBE", Begin => "BEGIN", Commit => "COMMIT", Abort => "ABORT", Ack => "ACK", Nack => "NACK", Disconnect => "DISCONNECT", Connect => "CONNECT", Stomp => "STOMP", Connected => "CONNECTED", Message => "MESSAGE", Receipt => "RECEIPT", Error => "ERROR", } } } impl fmt::Display for Command { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "{}", self.as_str()) } } pub trait ToFrameBody { fn to_frame_body<'a>(&'a self) -> &'a [u8]; } impl<'b> ToFrameBody for &'b [u8] { fn to_frame_body<'a>(&'a self) -> &'a [u8] { self } } impl<'b> ToFrameBody for &'b str { fn to_frame_body<'a>(&'a self) -> &'a [u8] { self.as_bytes() } } impl ToFrameBody for String { fn to_frame_body<'a>(&'a self) -> &'a [u8] { self.as_str().as_bytes() } } #[derive(Clone, Debug, Serialize)] pub struct Frame { pub command: Command, pub headers: HeaderList, #[serde(serialize_with = "vec_to_hex")] pub body: Vec<u8>, } /// Serializes `Vec<u8>` to a lowercase hex string. pub fn vec_to_hex<T, S>(buffer: &T, serializer: S) -> Result<S::Ok, S::Error> where T: AsRef<[u8]>, S: Serializer, { serializer.serialize_str(&buffer.as_ref().to_hex()) } #[derive(Debug)] pub enum Transmission { HeartBeat, CompleteFrame(Frame), } impl Transmission { pub fn write(&self, out: &mut BytesMut) { match *self { Transmission::HeartBeat => out.extend("\n".as_bytes()), Transmission::CompleteFrame(ref frame) => frame.write(out), } } } impl fmt::Display for Frame { fn fmt(&self, f: &mut Formatter) -> fmt::Result { let space_required = self.count_bytes(); let mut frame_string = String::with_capacity(space_required); // Faster to just allocate? frame_string.push_str(self.command.as_str()); frame_string.push_str("\n"); for header in self.headers.iter() { frame_string.push_str(&header.get_raw()); frame_string.push_str("\n"); } frame_string.push_str("\n"); let body_string: &str = match from_utf8(self.body.as_ref()) { Ok(ref s) => *s, Err(_) => "<Binary content>", // Space is wasted in this case. Could shrink to fit? }; frame_string.push_str(body_string); write!(f, "{}", frame_string) } } impl Frame { fn empty(command: Command, headers: HeaderList) -> Self { Self { command, headers, body: Vec::new(), } } fn count_bytes(&self) -> usize { let mut space_required: usize = 0; // Add one to space calculations to make room for '\n' space_required += self.command.as_str().len() + 1; space_required += self .headers .iter() .fold(0, |length, header| length + header.get_raw().len() + 1); space_required += 1; // Newline at end of headers space_required += self.body.len(); space_required } pub fn write(&self, out: &mut BytesMut) { trace!( "Sending frame:\n{}", serde_json::to_string_pretty(&self).unwrap() ); out.extend(self.command.as_str().as_bytes()); out.extend("\n".as_bytes()); for header in self.headers.iter() { out.extend(header.get_raw().as_bytes()); out.extend("\n".as_bytes()); } out.extend("\n".as_bytes()); out.extend(&self.body); out.extend(&[0]); } pub fn connect(tx_heartbeat_ms: u32, rx_heartbeat_ms: u32) -> Self { let heart_beat = format!("{},{}", tx_heartbeat_ms, rx_heartbeat_ms); Self::empty( Command::Connect, header_list![ ACCEPT_VERSION => "1.2", HEART_BEAT => heart_beat.as_ref(), CONTENT_LENGTH => "0" ], ) } pub fn disconnect() -> Self { Self::empty( Command::Disconnect, header_list![ RECEIPT => "msg/disconnect" ], ) } pub fn subscribe(subscription_id: &str, destination: &str, ack_mode: AckMode) -> Self { Self::empty( Command::Subscribe, header_list![ DESTINATION => destination, ID => subscription_id, ACK => ack_mode.as_str() ], ) } pub fn unsubscribe(subscription_id: &str) -> Self { Self::empty( Command::Unsubscribe, header_list![ ID => subscription_id ], ) } pub fn ack(ack_id: &str) -> Self { Self::empty( Command::Ack, header_list![ ID => ack_id ], ) } pub fn nack(message_id: &str) -> Self { Self::empty( Command::Nack, header_list![ ID => message_id ], ) } pub fn send(destination: &str, body: &[u8]) -> Self { Self { command: Command::Send, headers: header_list![ DESTINATION => destination, CONTENT_LENGTH => body.len().to_string().as_ref() ], body: body.into(), } } pub fn begin(transaction_id: &str) -> Self { Self::empty( Command::Begin, header_list![ TRANSACTION => transaction_id ], ) } pub fn abort(transaction_id: &str) -> Self { Self::empty( Command::Abort, header_list![ TRANSACTION => transaction_id ], ) } pub fn commit(transaction_id: &str) -> Self { Self::empty( Command::Commit, header_list![ TRANSACTION => transaction_id ], ) } }
true
2f883d48ac9e111cd61b9468229321bc8b25c2af
Rust
paxyqi/cs140e
/os/kernel/src/shell.rs
UTF-8
3,582
3.484375
3
[]
no_license
//use std::path::{Path,PathBuf}; use stack_vec::StackVec; use console::{kprint, kprintln, CONSOLE}; use std::str; use std::io::Write; /// Error type for `Command` parse failures. #[derive(Debug)] enum Error { Empty, TooManyArgs } /// A structure representing a single shell command. struct Command<'a> { args: StackVec<'a, &'a str> } impl<'a> Command<'a> { /// Parse a command from a string `s` using `buf` as storage for the /// arguments. /// /// # Errors /// /// If `s` contains no arguments, returns `Error::Empty`. If there are more /// arguments than `buf` can hold, returns `Error::TooManyArgs`. fn parse(s: &'a str, buf: &'a mut [&'a str]) -> Result<Command<'a>, Error> { let mut args = StackVec::new(buf); for arg in s.split(' ').filter(|a| !a.is_empty()) { args.push(arg).map_err(|_| Error::TooManyArgs)?; } if args.is_empty() { return Err(Error::Empty); } Ok(Command { args }) } /// Returns this command's path. This is equivalent to the first argument. fn path(&self) -> &str { self.args[0] } fn execute(&self)->bool{ match self.path(){ "echo"=>handle_echo(&self.args[1..]), path=>kprintln!("Unknown command:{}",path) } true } } fn handle_echo(args: &[&str]) { let len = args.len(); if len > 0 { for s in args[..len - 1].iter() { kprint!("{} ", s); } kprintln!("{}", args[len - 1]); } } const BELL: u8 = 7; const BACKSPACE: u8 = 8; const DELETE: u8 = 127; /// Starts a shell using `prefix` as the prefix for each line. This function /// never returns: it is perpetually in a shell loop. pub fn shell(prefix: &str){ //let mut working_dir=PathBuf::from("/"); loop{ let mut buf_storage=[0u8;512]; let mut buf=StackVec::new(&mut buf_storage); kprint!("{}",prefix); loop{ let byte = CONSOLE.lock().read_byte(); if byte == b'\r' || byte == b'\n'{ let mut command_storage:[&str;64]=["";64]; let result = Command::parse(str::from_utf8(buf.into_slice()).unwrap(),&mut command_storage); kprint!("\n"); match result{ Err(Error::TooManyArgs)=>{ kprintln!("error:too mant arguments"); }, Err(Error::Empty)=>{ //no command, ignore }, Ok(command)=>{ if !command.execute(){ return; } }, } break }else{ let mut console = CONSOLE.lock(); if byte == BACKSPACE || byte ==DELETE{ if buf.pop()==None{ console.write_byte(BELL); }else{ console.write(&[BACKSPACE,b' ',BACKSPACE]).expect("write"); } }else if byte<32 || byte ==255{ console.write_byte(BELL); }else{ if buf.push(byte).is_err(){ console.write_byte(BELL); }else{ console.write_byte(byte); } } } } } }
true
fc8ebbc6c8b9dcd791e9d65dfa272d3314f35406
Rust
ChrisMacNaughton/xfs-rs
/src/lib.rs
UTF-8
35,430
2.5625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
#![recursion_limit="100"] #[macro_use] extern crate nom; use std::error; use std::fmt; use std::fs::File; use std::io; use std::io::prelude::*; use std::str::FromStr; use self::nom::{le_u8, is_digit, space, newline}; #[cfg(test)] mod tests { use nom; #[test] fn it_parses_a_u32() { let input = b"12345"; let result = super::take_u32(input); match result { nom::IResult::Done(_, f) => assert_eq!(f, 12345u32), _ => unreachable!(), } } #[test] fn it_parses_a_u32_with_whitespace() { let input = b"12345 "; let result = super::take_u32(input); match result { nom::IResult::Done(_, f) => assert_eq!(f, 12345u32), _ => unreachable!(), } } #[test] fn it_parses_extent_allocation() { let example_output = b"extent_alloc 4260849 125170297 4618726 131131897"; match super::extent_alloc(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.allocated_extents, 4260849); assert_eq!(result.freed_blocks, 131131897); } _ => unreachable!(), } } #[test] fn it_parses_allocation_btree() { let example_output = b"abt 29491162 337391304 11257328 11133039"; match super::abt(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.inserts, 11257328); } _ => unreachable!(), } } #[test] fn it_parses_block_mapping() { let example_output = b"blk_map 381213360 115456141 10903633 69612322 7448401 507596777 0"; match super::blk_map(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.list_delete, 7448401); } _ => unreachable!(), } } #[test] fn it_parses_block_map_btree() { let example_output = b"bmbt 771328 6236258 602114 86646"; match super::bmbt(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.deletes, 86646); } _ => unreachable!(), } } #[test] fn it_parses_directory_operations() { let example_output = b"dir 21253907 6921870 6969079 779205554"; match super::dir(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.lookups, 21253907); } _ => unreachable!(), } } #[test] fn it_parses_transactions() { let example_output = b"trans 126946406 38184616 6342392"; match super::trans(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.waited, 126946406); } _ => unreachable!(), } } #[test] fn it_parses_inode_operations() { let example_output = b"ig 17754368 2019571 102 15734797 0 15672217 3962470"; match super::ig(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.cache_lookups, 17754368); } _ => unreachable!(), } } #[test] fn it_parses_log_operations() { let example_output = b"log 129491915 3992515264 458018 153771989 127040250"; match super::log(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.log_writes, 129491915); } _ => unreachable!(), } } #[test] fn it_parses_tail_pushing_stats() { let example_output = b"push_ail 171473415 0 6896837 3324292 8069877 65884 1289485 0 22535 7337"; match super::push_ail(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.logspace, 171473415); } _ => unreachable!(), } } #[test] fn it_parses_io_map_write_convert() { let example_output = b"xstrat 4140059 0"; match super::xstrat(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.quick, 4140059); } _ => unreachable!(), } } #[test] fn it_parses_read_write_stats() { let example_output = b"rw 1595677950 1046884251"; match super::rw(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.write, 1595677950); } _ => unreachable!(), } } #[test] fn it_parses_attribute_operations() { let example_output = b"attr 194724197 0 7 0"; match super::attr(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.get, 194724197); } _ => unreachable!(), } } #[test] fn it_parses_inode_clustering() { let example_output = b"icluster 20772185 2488203 13909520"; match super::icluster(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.count, 20772185); } _ => unreachable!(), } } #[test] fn it_parses_vnode_statistics() { let example_output = b"vnodes 62578 15959666 0 0 15897088 15897088 15897088 0"; match super::vnodes(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.active, 62578); } _ => unreachable!(), } } #[test] fn it_parses_buf_statistics() { let example_output = b"buf 2090581631 1972536890 118044776 225145 9486625 0 0 2000152616 809762"; match super::buf(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.get, 2090581631); } _ => unreachable!(), } } #[test] fn it_parses_extended_precision_counters() { let example_output = b"xpc 6908312903680 67735504884757 19760115252482"; match super::xpc(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result.xstrat_bytes, 6908312903680); } _ => unreachable!(), } } #[test] fn it_parses_debug() { let example_output = b"debug 0"; match super::debug(example_output) { nom::IResult::Done(_, result) => { assert_eq!(result, false); } _ => unreachable!(), } } #[test] fn it_parses_example() { let example_output = b"extent_alloc 4260849 125170297 4618726 131131897 abt 29491162 337391304 11257328 11133039 blk_map 381213360 115456141 10903633 69612322 7448401 507596777 0 bmbt 771328 6236258 602114 86646 dir 21253907 6921870 6969079 779205554 trans 126946406 38184616 6342392 ig 17754368 2019571 102 15734797 0 15672217 3962470 log 129491915 3992515264 458018 153771989 127040250 push_ail 171473415 0 6896837 3324292 8069877 65884 1289485 0 22535 7337 xstrat 4140059 0 rw 1595677950 1046884251 attr 194724197 0 7 0 icluster 20772185 2488203 13909520 vnodes 62578 15959666 0 0 15897088 15897088 15897088 0 buf 2090581631 1972536890 118044776 225145 9486625 0 0 2000152616 809762 xpc 6908312903680 67735504884757 19760115252482 debug 0"; let result = super::parse(example_output).unwrap(); assert_eq!(result.extent_allocation.freed_extents, 4618726); assert_eq!(result.allocation_btree.lookups, 29491162); assert_eq!(result.block_mapping.unmap, 10903633); assert_eq!(result.block_map_btree.inserts, 602114); assert_eq!(result.directory_operations.get_dents, 779205554); assert_eq!(result.transactions.empty, 6342392); assert_eq!(result.inode_operations.inode_attr_changes, 3962470); assert_eq!(result.log_operations.force_sleep, 127040250); assert_eq!(result.tail_pushing_stats.push_ail_flush, 7337); assert_eq!(result.io_map_write_convert.split, 0); assert_eq!(result.read_write_stats.read, 1046884251); assert_eq!(result.attribute_operations.list, 0); assert_eq!(result.inode_clustering.flushinode, 13909520); assert_eq!(result.vnode_statistics.free, 0); assert_eq!(result.buf_statistics.get_read, 809762); assert_eq!(result.extended_precision_counters.read_bytes, 19760115252482); assert_eq!(result.debug, false); } #[test] fn it_parses_newer_version_with_extra_fields() { let example_output = b"extent_alloc 0 0 0 0 abt 0 0 0 0 blk_map 0 0 0 0 0 0 0 bmbt 0 0 0 0 dir 0 0 0 0 trans 0 0 0 ig 0 0 0 0 0 0 0 log 0 0 0 0 0 push_ail 0 0 0 0 0 0 0 0 0 0 xstrat 0 0 rw 0 0 attr 0 0 0 0 icluster 0 0 0 vnodes 0 0 0 0 0 0 0 0 buf 0 0 0 0 0 0 0 0 0 abtb2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 abtc2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 bmbt2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ibt2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 fibt2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 qm 0 0 0 0 0 0 0 0 xpc 0 0 0 debug 0 "; let result = super::parse(example_output).unwrap(); assert_eq!(result.extent_allocation.freed_extents, 0); assert_eq!(result.allocation_btree.lookups, 0); assert_eq!(result.block_mapping.unmap, 0); assert_eq!(result.block_map_btree.inserts, 0); assert_eq!(result.directory_operations.get_dents, 0); assert_eq!(result.transactions.empty, 0); assert_eq!(result.inode_operations.inode_attr_changes, 0); assert_eq!(result.log_operations.force_sleep, 0); assert_eq!(result.tail_pushing_stats.push_ail_flush, 0); assert_eq!(result.io_map_write_convert.split, 0); assert_eq!(result.read_write_stats.read, 0); assert_eq!(result.attribute_operations.list, 0); assert_eq!(result.inode_clustering.flushinode, 0); assert_eq!(result.vnode_statistics.free, 0); assert_eq!(result.buf_statistics.get_read, 0); assert_eq!(result.extended_precision_counters.read_bytes, 0); assert_eq!(result.debug, false); } } pub struct XfsStat { pub extent_allocation: ExtentAllocation, pub allocation_btree: AllocationBTree, pub block_mapping: BlockMapping, pub block_map_btree: BlockMapBTree, pub directory_operations: DirectoryOperations, pub transactions: Transactions, pub inode_operations: InodeOperations, pub log_operations: LogOperations, pub tail_pushing_stats: TailPushingStats, pub io_map_write_convert: IoMapWriteConvert, pub read_write_stats: ReadWriteStats, pub attribute_operations: AttributeOperations, pub inode_clustering: InodeClustering, pub vnode_statistics: VnodeStatistics, pub buf_statistics: BufStatistics, pub extended_precision_counters: ExtendedPrecisionCounters, pub debug: bool, } pub struct ExtentAllocation { /// Number of file system extents allocated over all XFS filesystems. pub allocated_extents: u32, /// Number of file system blocks allocated over all XFS filesystems. pub allocated_blocks: u32, /// Number of file system extents freed over all XFS filesystems. pub freed_extents: u32, /// Number of file system blocks freed over all XFS filesystems. pub freed_blocks: u32, } pub struct AllocationBTree { /// Number of lookup operations in XFS filesystem allocation btrees. pub lookups: u32, /// Number of compares in XFS filesystem allocation btree lookups. pub compares: u32, /// Number of extent records inserted into XFS filesystem allocation btrees. pub inserts: u32, /// Number of extent records deleted from XFS filesystem allocation btrees. pub deletes: u32, } pub struct BlockMapping { /// Number of block map for read operations performed on XFS files. pub map_read: u32, /// Number of block map for write operations performed on XFS files. pub map_write: u32, /// Number of block unmap (delete) operations performed on XFS files. pub unmap: u32, /// Number of extent list insertion operations for XFS files. pub list_insert: u32, /// Number of extent list deletion operations for XFS files. pub list_delete: u32, /// Number of extent list lookup operations for XFS files. pub list_lookup: u32, /// Number of extent list comparisons in XFS extent list lookups. pub list_compare: u32, } pub struct BlockMapBTree { /// Number of block map btree lookup operations on XFS files. pub lookups: u32, /// Number of block map btree compare operations in XFS block map lookups. pub compares: u32, /// Number of block map btree records inserted for XFS files. pub inserts: u32, /// Number of block map btree records deleted for XFS files. pub deletes: u32, } pub struct DirectoryOperations { /// This is a count of the number of file name directory lookups in XFS /// filesystems. It counts only those lookups which miss in the operating /// system's directory name lookup cache and must search the real directory /// structure for the name in question. The count is incremented once for each /// level of a pathname search that results in a directory lookup. pub lookups: u32, /// This is the number of times a new directory entry was created in XFS filesystems. Each time that a new file, directory, link, symbolic link, or special file is created in the directory hierarchy the count is incremented. pub creates: u32, /// This is the number of times an existing directory entry was removed in XFS filesystems. Each time that a file, directory, link, symbolic link, or special file is removed from the directory hierarchy the count is incremented. pub removes: u32, /// This is the number of times the XFS directory getdents operation was performed. The getdents operation is used by programs to read the contents of directories in a file system independent fashion. This count corresponds exactly to the number of times the getdents(2) system call was successfully used on an XFS directory. pub get_dents: u32, } pub struct Transactions { /// This is the number of meta-data transactions which waited to be committed to the on-disk log before allowing the process performing the transaction to continue. These transactions are slower and more expensive than asynchronous transactions, because they force the in memory log buffers to be forced to disk more often and they wait for the completion of the log buffer writes. Synchronous transactions include file truncations and all directory updates when the file system is mounted with the 'wsync' option. pub waited: u32, /// This is the number of meta-data transactions which did not wait to be committed to the on-disk log before allowing the process performing the transaction to continue. These transactions are faster and more efficient than synchronous transactions, because they commit their data to the in memory log buffers without forcing those buffers to be written to disk. This allows multiple asynchronous transactions to be committed to disk in a single log buffer write. Most transactions used in XFS file systems are asynchronous. pub async: u32, /// This is the number of meta-data transactions which did not actually change anything. These are transactions which were started for some purpose, but in the end it turned out that no change was necessary. pub empty: u32, } pub struct InodeOperations { /// This is the number of times the operating system looked for an XFS inode in the inode cache. Whether the inode was found in the cache or needed to be read in from the disk is not indicated here, but this can be computed from the ig_found and ig_missed counts. pub cache_lookups: u32, /// This is the number of times the operating system looked for an XFS inode in the inode cache and found it. The closer this count is to the ig_attempts count the better the inode cache is performing. pub cache_hits: u32, /// This is the number of times the operating system looked for an XFS inode in the inode cache and saw that it was there but was unable to use the in memory inode because it was being recycled by another process. pub cache_recycle: u32, /// This is the number of times the operating system looked for an XFS inode in the inode cache and the inode was not there. The further this count is from the ig_attempts count the better. pub cache_missed: u32, /// This is the number of times the operating system looked for an XFS inode in the inode cache and found that it was not there but upon attempting to add the inode to the cache found that another process had already inserted it. pub cache_dup: u32, /// This is the number of times the operating system recycled an XFS inode from the inode cache in order to use the memory for that inode for another purpose. Inodes are recycled in order to keep the inode cache from growing without bound. If the reclaim rate is high it may be beneficial to raise the vnode_free_ratio kernel tunable variable to increase the size of the inode cache. pub cache_reclaime: u32, /// This is the number of times the operating system explicitly changed the attributes of an XFS inode. For example, this could be to change the inode's owner, the inode's size, or the inode's timestamps. pub inode_attr_changes: u32, } pub struct LogOperations { /// This variable counts the number of log buffer writes going to the physical log partitions of all XFS filesystems. Log data traffic is proportional to the level of meta-data updating. Log buffer writes get generated when they fill up or external syncs occur. pub log_writes: u32, /// This variable counts (in 512-byte units) the information being written to the physical log partitions of all XFS filesystems. Log data traffic is proportional to the level of meta-data updating. The rate with which log data gets written depends on the size of internal log buffers and disk write speed. Therefore, filesystems with very high meta-data updating may need to stripe the log partition or put the log partition on a separate drive. pub log_blocks: u32, /// This variable keeps track of times when a logged transaction can not get any log buffer space. When this occurs, all of the internal log buffers are busy flushing their data to the physical on-disk log. pub noiclogs: u32, /// The number of times the in-core log is forced to disk. It is equivalent to the number of successful calls to the function xfs_log_force(). pub log_forced: u32, /// Value exported from the xs_log_force_sleep field of struct xfsstats. pub force_sleep: u32, } pub struct TailPushingStats { /// Value from the xs_try_logspace field of struct xfsstats. pub logspace: u32, /// Value from the xs_sleep_logspace field of struct xfsstats. pub sleep_logspace: u32, /// The number of times the tail of the AIL is moved forward. It is equivalent to the number of successful calls to the function xfs_trans_push_ail(). pub push_ails: u32, /// Value from xs_push_ail_success field of struct xfsstats. pub push_ail_success: u32, /// Value from xs_push_ail_pushbuf field of struct xfsstats. pub push_ail_pushbuf: u32, /// Value from xs_push_ail_pinned field of struct xfsstats. pub push_ail_pinned: u32, /// Value from xs_push_ail_locked field of struct xfsstats. pub push_ail_locked: u32, /// Value from xs_push_ail_flushing field of struct xfsstats. pub push_ail_flushing: u32, /// Value from xs_push_ail_restarts field of struct xfsstats. pub push_ail_restarts: u32, /// Value from xs_push_ail_flush field of struct xfsstats. pub push_ail_flush: u32, } pub struct IoMapWriteConvert { /// This is the number of buffers flushed out by the XFS flushing daemons which are written to contiguous space on disk. The buffers handled by the XFS daemons are delayed allocation buffers, so this count gives an indication of the success of the XFS daemons in allocating contiguous disk space for the data being flushed to disk. pub quick: u32, /// This is the number of buffers flushed out by the XFS flushing daemons which are written to non-contiguous space on disk. The buffers handled by the XFS daemons are delayed allocation buffers, so this count gives an indication of the failure of the XFS daemons in allocating contiguous disk space for the data being flushed to disk. Large values in this counter indicate that the file system has become fragmented. pub split: u32, } pub struct ReadWriteStats { /// This is the number of write(2) system calls made to files in XFS file systems. pub write: u32, /// This is the number of read(2) system calls made to files in XFS file systems. pub read: u32, } pub struct AttributeOperations { /// The number of "get" operations performed on extended file attributes within XFS filesystems. The "get" operation retrieves the value of an extended attribute. pub get: u32, /// The number of "set" operations performed on extended file attributes within XFS filesystems. The "set" operation creates and sets the value of an extended attribute. pub set: u32, /// The number of "remove" operations performed on extended file attributes within XFS filesystems. The "remove" operation deletes an extended attribute. pub remove: u32, /// The number of "list" operations performed on extended file attributes within XFS filesystems. The "list" operation retrieves the set of extended attributes associated with a file. pub list: u32, } pub struct InodeClustering { /// This is the number of calls to xfs_iflush which gets called when an inode is being flushed (such as by bdflush or tail pushing). xfs_iflush searches for other inodes in the same cluster which are dirty and flushable. pub count: u32, /// Value from xs_icluster_flushcnt field of struct xfsstats. pub flushcnt: u32, /// This is the number of times that the inode clustering was not able to flush anything but the one inode it was called with. pub flushinode: u32, } pub struct VnodeStatistics { /// Number of vnodes not on free lists. pub active: u32, /// Number of times vn_alloc called. pub alloc: u32, /// Number of times vn_get called. pub get: u32, /// Number of times vn_hold called. pub hold: u32, /// Number of times vn_rele called. pub rele: u32, /// Number of times vn_reclaim called. pub reclaim: u32, /// Number of times vn_remove called. pub remove: u32, /// Number of times vn_free called. pub free: u32, } pub struct BufStatistics { pub get: u32, pub create: u32, pub get_locked: u32, pub get_locked_waited: u32, pub busy_locked: u32, pub miss_locked: u32, pub page_retries: u32, pub page_found: u32, pub get_read: u32, } pub struct ExtendedPrecisionCounters { /// This is a count of bytes of file data flushed out by the XFS flushing daemons. pub xstrat_bytes: u64, /// This is a count of bytes written via write(2) system calls to files in XFS file systems. It can be used in conjunction with the write_calls count to calculate the average size of the write operations to files in XFS file systems. pub write_bytes: u64, /// This is a count of bytes read via read(2) system calls to files in XFS file systems. It can be used in conjunction with the read_calls count to calculate the average size of the read operations to files in XFS file systems. pub read_bytes: u64, } #[derive(Debug)] pub enum XfsError { /// We encounter an error reading from /proc/fs/xfs/stat Io(io::Error), /// We don't have enough information for a complete parse Incomplete, /// We encounter an error with the data wer're parsing Parse, } impl fmt::Display for XfsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { // Both underlying errors already impl `Display`, so we defer to // their implementations. XfsError::Io(ref err) => write!(f, "IO error: {}", err), XfsError::Incomplete => write!(f, "Not enough data for XFS parse"), XfsError::Parse => write!(f, "Parse error"), } } } impl error::Error for XfsError { fn description(&self) -> &str { // Both underlying errors already impl `Error`, so we defer to their // implementations. match *self { XfsError::Io(ref err) => err.description(), XfsError::Incomplete => "There is not enough data for parsing", XfsError::Parse => "There was an error parsing", } } fn cause(&self) -> Option<&error::Error> { match *self { // N.B. Both of these implicitly cast `err` from their concrete // types (either `&io::Error` or `&num::ParseIntError`) // to a trait object `&Error`. This works because both error types // implement `Error`. XfsError::Io(ref err) => Some(err), XfsError::Incomplete => None, XfsError::Parse => None, } } } impl From<io::Error> for XfsError { fn from(err: io::Error) -> XfsError { XfsError::Io(err) } } pub fn parse(input: &[u8]) -> Result<XfsStat, XfsError> { match xfs_stat(input) { nom::IResult::Done(_, stat) => Ok(stat), nom::IResult::Error(_) => { Err(XfsError::Parse) }, nom::IResult::Incomplete(_) => { Err(XfsError::Incomplete) }, // _ => None, } } pub fn read() -> Result<String, XfsError> { let mut f = try!(File::open("/proc/fs/xfs/stat")); let mut s = String::new(); try!(f.read_to_string(&mut s)); Ok(s) } pub fn get() -> Result<XfsStat, XfsError> { let contents = try!(read()); parse((&contents[..]).as_bytes()) } named!(xfs_stat <XfsStat>, chain!( extent_alloc: extent_alloc ~ newline ~ abt: abt ~ newline ~ blk_map: blk_map ~ newline ~ block_map_btree: bmbt ~ newline ~ directory_operations: dir ~ newline ~ transactions: trans ~ newline ~ inode_operations: ig ~ newline ~ log_operations: log ~ newline ~ tail_pushing_stats: push_ail ~ newline ~ io_map_write_convert: xstrat ~ newline ~ read_write_stats: rw ~ newline ~ attribute_operations: attr ~ newline ~ inode_clustering: icluster ~ newline ~ vnode_statistics: vnodes ~ newline ~ buf_statistics: buf ~ newline ~ extended_precision_counters: xpc ~ newline ~ dbg: debug, || { XfsStat { extent_allocation: extent_alloc, allocation_btree: abt, block_mapping: blk_map, block_map_btree: block_map_btree, directory_operations: directory_operations, transactions: transactions, inode_operations: inode_operations, log_operations:log_operations, tail_pushing_stats: tail_pushing_stats, io_map_write_convert: io_map_write_convert, read_write_stats: read_write_stats, attribute_operations: attribute_operations, inode_clustering: inode_clustering, vnode_statistics: vnode_statistics, buf_statistics: buf_statistics, extended_precision_counters: extended_precision_counters, debug: dbg, } } ) ); named!(debug <bool>, chain!( tag!("debug") ~ space ~ dbg: le_u8, || { if dbg == 1 { true } else { false } } ) ); named!(take_u32 <u32>, chain!( uint_slice: take_while!(is_digit) ~ opt!(space), || { let int_str = String::from_utf8_lossy(uint_slice); u32::from_str(&int_str[..]).unwrap() } ) ); named!(take_u64 <u64>, chain!( uint_slice: take_while!(is_digit) ~ opt!(space), || { let int_str = String::from_utf8_lossy(uint_slice); u64::from_str(&int_str[..]).unwrap() } ) ); named!(extent_alloc <ExtentAllocation>, chain!( tag!("extent_alloc") ~ space ~ allocx: take_u32 ~ allocb: take_u32 ~ freex: take_u32 ~ freeb: take_u32, || { ExtentAllocation { allocated_extents: allocx, allocated_blocks: allocb, freed_extents: freex, freed_blocks: freeb, } } ) ); named!(abt <AllocationBTree>, chain!( tag!("abt") ~ space ~ lookups: take_u32 ~ compares: take_u32 ~ inserts: take_u32 ~ deletes: take_u32, || { AllocationBTree { lookups: lookups, compares: compares, inserts: inserts, deletes: deletes, } } ) ); named!(blk_map <BlockMapping>, chain!( tag!("blk_map") ~ space ~ map_read: take_u32 ~ map_write: take_u32 ~ unmap: take_u32 ~ list_insert: take_u32 ~ list_delete: take_u32 ~ list_lookup: take_u32 ~ list_compare: take_u32, ||{ BlockMapping { map_read: map_read, map_write: map_write, unmap: unmap, list_insert: list_insert, list_delete: list_delete, list_lookup: list_lookup, list_compare: list_compare, } } ) ); named!(bmbt <BlockMapBTree>, chain!( tag!("bmbt") ~ space ~ lookup: take_u32 ~ compare: take_u32 ~ insrec: take_u32 ~ delrec: take_u32, || { BlockMapBTree { lookups: lookup, compares: compare, inserts: insrec, deletes: delrec, } } ) ); named!(dir <DirectoryOperations>, chain!( tag!("dir") ~ space ~ lookups: take_u32 ~ creates: take_u32 ~ removes: take_u32 ~ get_dents: take_u32, || { DirectoryOperations { lookups: lookups, creates: creates, removes: removes, get_dents: get_dents, } } ) ); named!(trans <Transactions>, chain!( tag!("trans") ~ space ~ waited: take_u32 ~ async: take_u32 ~ empty: take_u32, ||{ Transactions { waited: waited, async: async, empty: empty, } } ) ); named!(ig <InodeOperations>, chain!( tag!("ig") ~ space ~ cache_lookups: take_u32 ~ cache_hits: take_u32 ~ cache_recycle: take_u32 ~ cache_missed: take_u32 ~ cache_dup: take_u32 ~ cache_reclaime: take_u32 ~ inode_attr_changes: take_u32, || { InodeOperations { cache_lookups: cache_lookups, cache_hits: cache_hits, cache_recycle: cache_recycle, cache_missed: cache_missed, cache_dup: cache_dup, cache_reclaime: cache_reclaime, inode_attr_changes: inode_attr_changes, } } ) ); named!(log <LogOperations>, chain!( tag!("log") ~ space ~ log_writes: take_u32 ~ log_blocks: take_u32 ~ noiclogs: take_u32 ~ log_forced: take_u32 ~ force_sleep: take_u32, || { LogOperations { log_writes: log_writes, log_blocks: log_blocks, noiclogs: noiclogs, log_forced: log_forced, force_sleep: force_sleep, } } ) ); named!(push_ail <TailPushingStats>, chain!( tag!("push_ail") ~ space ~ logspace: take_u32 ~ sleep_logspace: take_u32 ~ push_ails: take_u32 ~ push_ail_success: take_u32 ~ push_ail_pushbuf: take_u32 ~ push_ail_pinned: take_u32 ~ push_ail_locked: take_u32 ~ push_ail_flushing: take_u32 ~ push_ail_restarts: take_u32 ~ push_ail_flush: take_u32, || { TailPushingStats { logspace: logspace, sleep_logspace: sleep_logspace, push_ails: push_ails, push_ail_success: push_ail_success, push_ail_pushbuf: push_ail_pushbuf, push_ail_pinned: push_ail_pinned, push_ail_locked: push_ail_locked, push_ail_flushing: push_ail_flushing, push_ail_restarts: push_ail_restarts, push_ail_flush: push_ail_flush, } } ) ); named!(xstrat <IoMapWriteConvert>, chain!( tag!("xstrat") ~ space ~ quick: take_u32 ~ split: take_u32, || { IoMapWriteConvert { quick: quick, split: split, } } ) ); named!(rw <ReadWriteStats>, chain!( tag!("rw") ~ space ~ write: take_u32 ~ read: take_u32, || { ReadWriteStats { write: write, read: read, } } ) ); named!(attr <AttributeOperations>, chain!( tag!("attr") ~ space ~ get: take_u32 ~ set: take_u32 ~ remove: take_u32 ~ list: take_u32, || { AttributeOperations { get: get, set: set, remove: remove, list: list, } } ) ); named!(icluster <InodeClustering>, chain!( tag!("icluster") ~ space ~ count: take_u32 ~ flushcnt: take_u32 ~ flushinode: take_u32, || { InodeClustering { count: count, flushcnt: flushcnt, flushinode: flushinode, } } ) ); named!(vnodes <VnodeStatistics>, chain!( tag!("vnodes") ~ space ~ active: take_u32 ~ alloc: take_u32 ~ get: take_u32 ~ hold: take_u32 ~ rele: take_u32 ~ reclaim: take_u32 ~ remove: take_u32 ~ free: take_u32, || { VnodeStatistics { active: active, alloc: alloc, get: get, hold: hold, rele: rele, reclaim: reclaim, remove: remove, free: free, } } ) ); named!(buf <BufStatistics>, chain!( tag!("buf") ~ space ~ get: take_u32 ~ create: take_u32 ~ get_locked: take_u32 ~ get_locked_waited: take_u32 ~ busy_locked: take_u32 ~ miss_locked: take_u32 ~ page_retries: take_u32 ~ page_found: take_u32 ~ get_read: take_u32, || { BufStatistics { get: get, create: create, get_locked: get_locked, get_locked_waited: get_locked_waited, busy_locked: busy_locked, miss_locked: miss_locked, page_retries: page_retries, page_found: page_found, get_read: get_read, } } ) ); named!(xpc <ExtendedPrecisionCounters>, chain!( take_until!("xpc") ~ tag!("xpc") ~ space ~ xstrat_bytes: take_u64 ~ write_bytes: take_u64 ~ read_bytes: take_u64, ||{ ExtendedPrecisionCounters { xstrat_bytes: xstrat_bytes, write_bytes: write_bytes, read_bytes: read_bytes, } } ) );
true
86cb9aeb149a3cfd9dd529539ef7a2fd97d66db2
Rust
mathw/adventofcode
/aoc2020/src/day6/mod.rs
UTF-8
1,715
3.390625
3
[]
no_license
use crate::dayerror::DayError; use std::collections::HashSet; pub fn part1() -> Result<String, DayError> { let answer = do_part1(include_str!("input.txt")); Ok(format!("The answer is {}", answer)) } pub fn part2() -> Result<String, DayError> { let answer = do_part2(include_str!("input.txt")); Ok(format!("The answer is {}", answer)) } fn do_part1(input: &str) -> usize { all_sets(input).map(|s| s.len()).sum() } fn do_part2(input: &str) -> usize { input .split("\n\n") .map(|g| group_to_person_sets(g)) .map(|people| group_intersection(people).len()) .sum() } fn group_to_sets(s: &str) -> HashSet<char> { s.chars().filter(|c| c.is_alphabetic()).collect() } fn all_sets<'a>(s: &'a str) -> impl Iterator<Item = HashSet<char>> + 'a { s.split("\n\n").map(|g| group_to_sets(g)) } fn group_to_person_sets(s: &str) -> Vec<HashSet<char>> { s.lines() .map(|l| { l.chars() .filter(|c| c.is_alphabetic()) .collect::<HashSet<char>>() }) .collect() } fn group_intersection(people: impl IntoIterator<Item = HashSet<char>>) -> HashSet<char> { let mut iter = people.into_iter(); if let Some(first) = iter.next() { iter.fold(first, |state, item| { state .intersection(&item) .cloned() .collect::<HashSet<char>>() }) } else { HashSet::new() } } #[test] fn test_group_to_set() { let group = "a bc d d a"; let set = group_to_sets(group); let expected_set: HashSet<char> = vec!['a', 'b', 'c', 'd'].into_iter().collect(); assert_eq![set, expected_set]; }
true
f11d13c59979f2759aa01001f6dc8afb2b1864f8
Rust
HadrienG2/weave-parallel-benchmarks-rs
/src/bin/fib.rs
UTF-8
991
3.234375
3
[]
no_license
fn fib(n: usize) -> usize { if n < 2 { return n; } let (x, y) = if cfg!(feature = "idiomatic") { // Idiomatic mode, use optimized Rayon path for the common-case // scenario of binary fork-join parallelization rayon::join(|| fib(n-1), || fib(n-2)) } else { // Non-idiomatic mode, strictly imitate the Weave version by // explicitly spawning one task. This will be quite slow as "scope" // has not received as much optimization love as "join"... let (mut x, mut y) = (0, 0); rayon::scope(|s| { s.spawn(|_| x = fib(n - 1)); y = fib(n - 2); }); (x, y) }; x + y } fn main() { let mut args = std::env::args().skip(1); let n = args.next() .map(|s| s.parse().expect("Expected fibonacci number")) .unwrap_or(40); println!("fib({}) = {}", n, fib(n)); }
true
336fcffe448b46dd2eefbdfb431aa6cbf07e9c0f
Rust
anderoo/game-of-life
/src/universe.rs
UTF-8
3,558
3.421875
3
[]
no_license
use super::cell::{Cell, CellKind}; use std::fmt; #[derive(Clone, PartialEq, Debug)] pub struct Universe { pub size: usize, pub cells: Vec<Vec<Cell>> } impl fmt::Display for Universe { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for i in 0..self.size { for j in 0..self.size { write!(f, "{}", self.cells[i][j]).expect("Unable to write Cell"); } write!(f, "\n").expect("Unable to write newline"); } Ok(()) } } impl Universe { pub fn new(size: usize) -> Self { let mut cells: Vec<Vec<Cell>> = vec![vec![]; size]; for i in 0..size { for _ in 0..size { cells[i].push(Cell::new()); } } Universe { size, cells } } pub fn tick(&mut self) { let prev = self.cells.clone(); for i in 0..self.size { for j in 0..self.size { self.cell_tick(&prev, i, j); } } } pub fn alive_cells(&self) -> i32 { self.count_cell_kinds(&CellKind::ALIVE) } pub fn dead_cells(&self) -> i32 { self.count_cell_kinds(&CellKind::DEAD) } fn count_cell_kinds(&self, kind: &CellKind) -> i32 { self.cells.iter() .map(|r| r.iter().filter(|c| c.kind.eq(kind)).count() as i32) .sum() } fn cell_tick(&mut self, prev: &Vec<Vec<Cell>>, i: usize, j: usize) { let size = self.size as i32; let neighbours: [(i32, i32); 8] = [ (-1, -1), (-1, 0), (-1, 1), // Top row (1, 1), (1, 0), (1, -1), // Bottom row (0, 1), (0, -1), // Center row ]; let alive_neighbours = neighbours.iter() .map(|(x, y)| ((i as i32) + x, (j as i32) + y)) .filter(|(x, y)| x >= &0 && y >= &0 && x <= &(size - 1) && y <= &(size - 1)) .filter(|(x, y)| prev[*x as usize][*y as usize].kind == CellKind::ALIVE) .count(); self.cells[i][j].next(alive_neighbours as i32); } } #[cfg(test)] mod tests { use super::*; fn base_universe() -> Universe { Universe { size: 3, cells: vec![ vec![dead_cell(), alive_cell(), dead_cell()], vec![dead_cell(), dead_cell(), alive_cell()], vec![alive_cell(), alive_cell(), alive_cell()], ], } } fn alive_cell() -> Cell { Cell { kind: CellKind::ALIVE } } fn dead_cell() -> Cell { Cell { kind: CellKind::DEAD } } #[test] fn test_tick() { let mut universe = base_universe(); let expected = Universe { size: 3, cells: vec![ vec![dead_cell(), dead_cell(), dead_cell()], vec![alive_cell(), dead_cell(), alive_cell()], vec![dead_cell(), alive_cell(), alive_cell()], ], }; universe.tick(); assert_eq!(universe, expected); } #[test] fn test_alive_cells() { let universe = base_universe(); assert_eq!(universe.alive_cells(), 5); } #[test] fn test_dead_cells() { let universe = base_universe(); assert_eq!(universe.dead_cells(), 4); } #[test] fn test_fmt() { let universe = base_universe(); let expected = "\ ◻ ◼ ◻ \n\ ◻ ◻ ◼ \n\ ◼ ◼ ◼ \n\ "; assert_eq!(format!("{}", universe), expected); } }
true
f8b1eb816727f9784d0dd0d4ec762deee2c829a7
Rust
esbraff/moonlight-lang
/src/parser.rs
UTF-8
5,711
3.421875
3
[]
no_license
use expressions::Expression; use tokens::TokenType; use tokens::Token; pub struct Parser<'a> { pub input: &'a Vec<Token>, pub output: Vec<Box<Expression>>, position: usize, lenght: usize } impl<'a> Parser<'a> { pub fn new(input: &'a Vec<Token>) -> Parser { Parser { input: input, output: Vec::new(), position: 0, lenght: input.len() } } fn match_type(&mut self, token_type: TokenType) -> bool { let curr_token = self.peek(0); if curr_token.token_type != token_type { return false; } self.position += 1; true } fn peek(&self, relative_position: i32) -> Token { let pos = (self.position as i32 + relative_position) as usize; if pos >= self.lenght { return Token::new(TokenType::EOF, String::new()); } self.input[pos].clone() } fn expression(&mut self) -> Box<Expression> { self.additive() } fn additive(&mut self) -> Box<Expression> { let mut expr = self.multiplicative(); loop { if self.match_type(TokenType::Add) { expr = Box::new(Expression::Binary(TokenType::Add, expr, self.multiplicative())); continue; } if self.match_type(TokenType::Substract) { expr = Box::new(Expression::Binary(TokenType::Substract, expr, self.multiplicative())); continue; } break; } expr } fn multiplicative(&mut self) -> Box<Expression> { let mut expr = self.unary(); loop { if self.match_type(TokenType::Multiply) { expr = Box::new(Expression::Binary(TokenType::Multiply, expr, self.multiplicative())); continue; } if self.match_type(TokenType::Divide) { expr = Box::new(Expression::Binary(TokenType::Divide, expr, self.multiplicative())); continue; } break; } expr } fn unary(&mut self) -> Box<Expression> { if self.match_type(TokenType::Substract) { return Box::new(Expression::Unary(TokenType::Substract, self.primary())); } if self.match_type(TokenType::Add) { return Box::new(Expression::Unary(TokenType::Add, self.primary())); } self.primary() } fn primary(&mut self) -> Box<Expression> { let mut curr_token = self.peek(0); if self.match_type(TokenType::LeftBrace) { let mut exprs = Vec::new(); while !self.match_type(TokenType::RightBrace) { let expr = self.expression(); exprs.push(expr); } return Box::new(Expression::Block(exprs)); } if self.match_type(TokenType::Func) { let mut args = Vec::new(); if self.match_type(TokenType::LeftParen) { loop { curr_token = self.peek(0); if self.match_type(TokenType::VariableKey) { let arg_name = curr_token.data; args.push(arg_name); } else if self.match_type(TokenType::RightParen) { break; } else { let token_type = curr_token.token_type; panic!("Expected {:?} or {:?}, found {:?}", TokenType::RightParen, TokenType::VariableKey, token_type); } } } let expr = self.expression(); return Box::new(Expression::Function(expr, args)); } if self.match_type(TokenType::Null) { return Box::new(Expression::Null); } if self.match_type(TokenType::Number) { return Box::new(Expression::NumberValue(curr_token.data.parse().unwrap())); } if self.match_type(TokenType::StringValue) { return Box::new(Expression::StringValue(curr_token.data)); } if self.match_type(TokenType::VariableKey) { if self.match_type(TokenType::Setter) { let var_key_offset = -2; return Box::new(Expression::SetVariable(self.peek(var_key_offset).data, self.expression())); } if self.match_type(TokenType::Remover) { let var_key_offset = -2; return Box::new(Expression::SetVariable(self.peek(var_key_offset).data, Box::new(Expression::Null))); } if self.match_type(TokenType::LeftParen) { let mut args = Vec::new(); loop { if self.match_type(TokenType::RightParen) { break; } else { args.push(self.expression()); } } return Box::new(Expression::CallFunc(curr_token.data, args)); } return Box::new(Expression::GetVariable(curr_token.data)); } if self.match_type(TokenType::HexNumber) { return Box::new(Expression::NumberValue(i64::from_str_radix(&curr_token.data, 16).unwrap() as f64)); } if self.match_type(TokenType::LeftParen) { let expr = self.expression(); self.match_type(TokenType::RightParen); return expr; } panic!("unknown expr"); } pub fn parse(&mut self) { while !self.match_type(TokenType::EOF) { let expr = self.expression(); self.output.push(expr); } } }
true
203986d5d31ee21636e65551882f1a82bad0c04e
Rust
rustasync/runtime
/src/net/udp.rs
UTF-8
14,207
3.59375
4
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
//! Asynchronous UDP bindings. //! //! To create a bi-directional UDP socket use [`UdpSocket::bind`]. Sending data from the socket is //! done by using [`send_to`] which returns the [`SendTo`] future. Reading data from the socket is //! done by using [`recv_from`] which returns the [`RecvFrom`] future. //! //! [`UdpSocket::bind`]: struct.UdpSocket.html#method.bind //! [`send_to`]: struct.UdpSocket.html#method.send_to //! [`recv_from`]: struct.UdpSocket.html#method.recv_from //! [`RecvFrom`]: struct.RecvFrom.html //! [`SendTo`]: struct.SendTo.html use futures::prelude::*; use std::io; use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr, ToSocketAddrs}; use std::pin::Pin; use std::task::{Context, Poll}; /// A UDP socket. /// /// After creating a `UdpSocket` by [`bind`]ing it to a socket address, data can be [sent to] and /// [received from] any other socket address. /// /// As stated in the User Datagram Protocol's specification in [IETF RFC 768], UDP is an unordered, /// unreliable protocol. Refer to [`TcpListener`] and [`TcpStream`] for async TCP primitives, and /// [`std::net`] for synchronous networking primitives. /// /// [`bind`]: #method.bind /// [received from]: #method.recv_from /// [sent to]: #method.send_to /// [`TcpListener`]: ../struct.TcpListener.html /// [`TcpStream`]: ../struct.TcpStream.html /// [`std::net`]: https://doc.rust-lang.org/std/net/index.html /// [IETF RFC 768]: https://tools.ietf.org/html/rfc768 /// /// ## Examples /// ```no_run /// use runtime::net::UdpSocket; /// /// #[runtime::main] /// async fn main() -> std::io::Result<()> { /// let mut socket = UdpSocket::bind("127.0.0.1:8080")?; /// let mut buf = vec![0u8; 1024]; /// /// println!("Listening on {}", socket.local_addr()?); /// /// loop { /// let (recv, peer) = socket.recv_from(&mut buf).await?; /// let sent = socket.send_to(&buf[..recv], &peer).await?; /// println!("Sent {} out of {} bytes to {}", sent, recv, peer); /// } /// } /// ``` #[derive(Debug)] pub struct UdpSocket { inner: Pin<Box<dyn runtime_raw::UdpSocket>>, } impl UdpSocket { /// Creates a UDP socket from the given address. /// /// Binding with a port number of 0 will request that the OS assigns a port to this socket. The /// port allocated can be queried via the [`local_addr`] method. /// /// [`local_addr`]: #method.local_addr /// /// # Examples /// /// ```no_run /// use runtime::net::UdpSocket; /// /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// let socket = UdpSocket::bind("127.0.0.1:0")?; /// # Ok(()) /// # } /// ``` pub fn bind<A: ToSocketAddrs>(addr: A) -> io::Result<Self> { let mut last_err = None; for addr in addr.to_socket_addrs()? { match runtime_raw::current_runtime().bind_udp_socket(&addr) { Ok(inner) => return Ok(UdpSocket { inner }), Err(e) => last_err = Some(e), } } Err(last_err.unwrap_or_else(|| { io::Error::new( io::ErrorKind::InvalidInput, "could not resolve to any addresses", ) })) } /// Returns the local address that this listener is bound to. /// /// This can be useful, for example, when binding to port 0 to figure out which port was /// actually bound. /// /// # Examples /// /// ```no_run /// use runtime::net::UdpSocket; /// /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// let socket = UdpSocket::bind("127.0.0.1:0")?; /// println!("Address: {:?}", socket.local_addr()); /// # Ok(()) /// # } /// ``` pub fn local_addr(&self) -> io::Result<SocketAddr> { self.inner.local_addr() } /// Sends data on the socket to the given address. /// /// On success, returns the number of bytes written. /// /// # Examples /// /// ```no_run /// # use std::error::Error; /// use runtime::net::UdpSocket; /// /// const THE_MERCHANT_OF_VENICE: &[u8] = b" /// If you prick us, do we not bleed? /// If you tickle us, do we not laugh? /// If you poison us, do we not die? /// And if you wrong us, shall we not revenge? /// "; /// /// # async fn send_data() -> Result<(), Box<dyn Error + 'static>> { /// let mut socket = UdpSocket::bind("127.0.0.1:0")?; /// /// let addr = "127.0.0.1:7878"; /// let sent = socket.send_to(THE_MERCHANT_OF_VENICE, &addr).await?; /// println!("Sent {} bytes to {}", sent, addr); /// # Ok(()) /// # } /// ``` pub fn send_to<'socket, 'buf, A: ToSocketAddrs>( &'socket mut self, buf: &'buf [u8], addr: A, ) -> SendToFuture<'socket, 'buf> { let addr = addr .to_socket_addrs() .map(|mut iter| iter.next()) .transpose(); SendToFuture { buf, addr, socket: self, } } /// Receives data from the socket. /// /// On success, returns the number of bytes read and the origin. /// /// # Examples /// /// ```no_run /// # use std::error::Error; /// use runtime::net::UdpSocket; /// /// # async fn recv_data() -> Result<Vec<u8>, Box<dyn Error + 'static>> { /// let mut socket = UdpSocket::bind("127.0.0.1:0")?; /// /// let mut buf = vec![0; 1024]; /// let (recv, peer) = socket.recv_from(&mut buf).await?; /// println!("Received {} bytes from {}", recv, peer); /// # Ok(buf) /// # } /// ``` pub fn recv_from<'socket, 'buf>( &'socket mut self, buf: &'buf mut [u8], ) -> RecvFromFuture<'socket, 'buf> { RecvFromFuture { buf, socket: self } } /// Gets the value of the `SO_BROADCAST` option for this socket. /// /// For more information about this option, see [`set_broadcast`]. /// /// [`set_broadcast`]: #method.set_broadcast pub fn broadcast(&self) -> io::Result<bool> { self.inner.broadcast() } /// Sets the value of the `SO_BROADCAST` option for this socket. /// /// When enabled, this socket is allowed to send packets to a broadcast /// address. pub fn set_broadcast(&self, on: bool) -> io::Result<()> { self.inner.set_broadcast(on) } /// Gets the value of the `IP_MULTICAST_LOOP` option for this socket. /// /// For more information about this option, see [`set_multicast_loop_v4`]. /// /// [`set_multicast_loop_v4`]: #method.set_multicast_loop_v4 pub fn multicast_loop_v4(&self) -> io::Result<bool> { self.inner.multicast_loop_v4() } /// Sets the value of the `IP_MULTICAST_LOOP` option for this socket. /// /// If enabled, multicast packets will be looped back to the local socket. /// /// # Note /// /// This may not have any affect on IPv6 sockets. pub fn set_multicast_loop_v4(&self, on: bool) -> io::Result<()> { self.inner.set_multicast_loop_v4(on) } /// Gets the value of the `IP_MULTICAST_TTL` option for this socket. /// /// For more information about this option, see [`set_multicast_ttl_v4`]. /// /// [`set_multicast_ttl_v4`]: #method.set_multicast_ttl_v4 pub fn multicast_ttl_v4(&self) -> io::Result<u32> { self.inner.multicast_ttl_v4() } /// Sets the value of the `IP_MULTICAST_TTL` option for this socket. /// /// Indicates the time-to-live value of outgoing multicast packets for /// this socket. The default value is 1 which means that multicast packets /// don't leave the local network unless explicitly requested. /// /// # Note /// /// This may not have any affect on IPv6 sockets. pub fn set_multicast_ttl_v4(&self, ttl: u32) -> io::Result<()> { self.inner.set_multicast_ttl_v4(ttl) } /// Gets the value of the `IPV6_MULTICAST_LOOP` option for this socket. /// /// For more information about this option, see [`set_multicast_loop_v6`]. /// /// [`set_multicast_loop_v6`]: #method.set_multicast_loop_v6 pub fn multicast_loop_v6(&self) -> io::Result<bool> { self.inner.multicast_loop_v6() } /// Sets the value of the `IPV6_MULTICAST_LOOP` option for this socket. /// /// Controls whether this socket sees the multicast packets it sends itself. /// /// # Note /// /// This may not have any affect on IPv4 sockets. pub fn set_multicast_loop_v6(&self, on: bool) -> io::Result<()> { self.inner.set_multicast_loop_v6(on) } /// Gets the value of the `IP_TTL` option for this socket. /// /// For more information about this option, see [`set_ttl`]. /// /// [`set_ttl`]: #method.set_ttl pub fn ttl(&self) -> io::Result<u32> { self.inner.ttl() } /// Sets the value for the `IP_TTL` option on this socket. /// /// This value sets the time-to-live field that is used in every packet sent /// from this socket. pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { self.inner.set_ttl(ttl) } /// Executes an operation of the `IP_ADD_MEMBERSHIP` type. /// /// This function specifies a new multicast group for this socket to join. The address must be /// a valid multicast address, and `interface` is the address of the local interface with which /// the system should join the multicast group. If it's equal to `INADDR_ANY` then an /// appropriate interface is chosen by the system. /// /// # Examples /// /// ```rust,no_run /// use runtime::net::UdpSocket; /// use std::net::Ipv4Addr; /// /// # #[runtime::main] /// # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> { /// let interface = Ipv4Addr::new(0, 0, 0, 0); /// let mdns_addr = Ipv4Addr::new(224, 0, 0, 123); /// /// let socket = UdpSocket::bind("127.0.0.1:0")?; /// socket.join_multicast_v4(&mdns_addr, &interface)?; /// # Ok(()) } /// ``` pub fn join_multicast_v4(&self, multiaddr: &Ipv4Addr, interface: &Ipv4Addr) -> io::Result<()> { self.inner.join_multicast_v4(multiaddr, interface) } /// Executes an operation of the `IPV6_ADD_MEMBERSHIP` type. /// /// This function specifies a new multicast group for this socket to join. The address must be /// a valid multicast address, and `interface` is the index of the interface to join/leave (or /// 0 to indicate any interface). /// /// # Examples /// /// ```rust,no_run /// use runtime::net::UdpSocket; /// use std::net::{Ipv6Addr, SocketAddr}; /// /// # #[runtime::main] /// # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> { /// let socket_addr = SocketAddr::new(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0).into(), 0); /// let mdns_addr = Ipv6Addr::new(0xFF02, 0, 0, 0, 0, 0, 0, 0x0123) ; /// let socket = UdpSocket::bind(&socket_addr)?; /// /// socket.join_multicast_v6(&mdns_addr, 0)?; /// # Ok(()) } /// ``` pub fn join_multicast_v6(&self, multiaddr: &Ipv6Addr, interface: u32) -> io::Result<()> { self.inner.join_multicast_v6(multiaddr, interface) } /// Executes an operation of the `IP_DROP_MEMBERSHIP` type. /// /// For more information about this option, see [`join_multicast_v4`]. /// /// [`join_multicast_v4`]: #method.join_multicast_v4 pub fn leave_multicast_v4(&self, multiaddr: &Ipv4Addr, interface: &Ipv4Addr) -> io::Result<()> { self.inner.leave_multicast_v4(multiaddr, interface) } /// Executes an operation of the `IPV6_DROP_MEMBERSHIP` type. /// /// For more information about this option, see [`join_multicast_v6`]. /// /// [`join_multicast_v6`]: #method.join_multicast_v6 pub fn leave_multicast_v6(&self, multiaddr: &Ipv6Addr, interface: u32) -> io::Result<()> { self.inner.leave_multicast_v6(multiaddr, interface) } } /// The future returned by [`UdpSocket::send_to`]. /// /// On success, returns the number of bytes written. /// /// [`UdpSocket::send_to`]: struct.UdpSocket.html#method.send_to #[must_use = "futures do nothing unless you `.await` or poll them"] #[derive(Debug)] pub struct SendToFuture<'socket, 'buf> { /// The open socket we use to send the message from. socket: &'socket mut UdpSocket, /// The message we're trying to send. buf: &'buf [u8], /// The address we'll try to connect to. addr: Option<io::Result<SocketAddr>>, } impl<'socket, 'buf> Future for SendToFuture<'socket, 'buf> { type Output = io::Result<usize>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let SendToFuture { socket, buf, addr } = &mut *self; let addr = match addr.take() { Some(addr) => addr?, None => { let err_msg = "no addresses to send data to"; let err = io::Error::new(io::ErrorKind::InvalidInput, err_msg); return Poll::Ready(Err(err)); } }; let poll = socket.inner.as_mut().poll_send_to(cx, buf, &addr); self.addr = Some(Ok(addr)); poll } } /// The future returned by [`UdpSocket::recv_from`]. /// /// On success, returns the number of bytes read and the origin. /// /// [`UdpSocket::recv_from`]: struct.UdpSocket.html#method.recv_from #[must_use = "futures do nothing unless you `.await` or poll them"] #[derive(Debug)] pub struct RecvFromFuture<'socket, 'buf> { socket: &'socket mut UdpSocket, buf: &'buf mut [u8], } impl<'socket, 'buf> Future for RecvFromFuture<'socket, 'buf> { type Output = io::Result<(usize, SocketAddr)>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let RecvFromFuture { socket, buf } = &mut *self; socket.inner.as_mut().poll_recv_from(cx, buf) } } #[cfg(unix)] mod sys { use super::UdpSocket; use std::os::unix::prelude::*; impl AsRawFd for UdpSocket { fn as_raw_fd(&self) -> RawFd { self.inner.as_raw_fd() } } }
true
3bc002fd563a6e3f5c744b3c30fd1e3632989fae
Rust
tailhook/nginx-config
/src/access.rs
UTF-8
1,357
2.734375
3
[ "MIT", "Apache-2.0" ]
permissive
use std::net::IpAddr; use combine::{Parser}; use combine::{choice}; use combine::error::StreamError; use combine::easy::Error; use ast::{Item, Source}; use helpers::{semi, ident, string}; use tokenizer::{TokenStream, Token}; fn parse_source<'a>(val: Token<'a>) -> Result<Source, Error<Token<'a>, Token<'a>>> { let value = val.value; if value == "all" { return Ok(Source::All); } else if value == "unix:" { return Ok(Source::Unix); } let mut pair = value.splitn(2, '/'); let addr = pair.next().unwrap().parse::<IpAddr>()?; if let Some(net) = pair.next() { let subnet = net.parse::<u8>() .map_err(|e| Error::unexpected_message( format!("invalid subnet: {}", e)))?; return Ok(Source::Network(addr, subnet)); } else { return Ok(Source::Ip(addr)); } } fn allow<'a>() -> impl Parser<Output=Item, Input=TokenStream<'a>> { ident("allow") .with(string()) .and_then(parse_source) .skip(semi()) .map(Item::Allow) } fn deny<'a>() -> impl Parser<Output=Item, Input=TokenStream<'a>> { ident("deny") .with(string()) .and_then(parse_source) .skip(semi()) .map(Item::Deny) } pub fn directives<'a>() -> impl Parser<Output=Item, Input=TokenStream<'a>> { choice(( allow(), deny(), )) }
true
480b1772af8f8d135649b852eb74be8e9e2e82fa
Rust
haetze/DemoRust
/projects/schiffeversenken/src/main.rs
UTF-8
3,630
2.9375
3
[]
no_license
mod data; extern crate futures_await as futures; #[macro_use] extern crate tokio_core; use std::{env, io}; use std::net::SocketAddr; use futures::{Future, Poll}; use tokio_core::net::UdpSocket; use tokio_core::reactor::Core; struct Server { socket: UdpSocket, buf: Vec<u8>, to_send: Option<(usize, SocketAddr)>, playerlist: data::Playerlist, } impl Future for Server { type Item = (); type Error = io::Error; fn poll(&mut self) -> Poll<(), io::Error> { loop { if let Some((size, peer)) = self.to_send { let words: Vec<_>= self.buf[..size].split(|e| *e == b' ') .map(|b| String::from_utf8(b.to_vec()).unwrap()) .collect(); println!("{:?}", words); match &words[0].trim() as &str { "available" => { if self.playerlist.exists(&words[1]){ try_nb!(self.socket.send_to(b"226\n" , &peer)); }else{ let ip_string = format!("{}", peer.ip()); self.playerlist.add(&words[1].trim().to_string(), &ip_string); try_nb!(self.socket.send_to(b"200" , &peer)); } }, "request_list" => { let mut s = format!("201 {}", self.playerlist.list.len()); try_nb!(self.socket.send_to(s.as_bytes() , &peer)); for player in &self.playerlist.list { let s = format!("{}", player.name); try_nb!(self.socket.send_to(s.as_bytes() , &peer)); } }, "request_player" => { match self.playerlist.find(&words[1].trim().to_string()) { None => { try_nb!(self.socket.send_to(b"404" , &peer)); }, Some(player) => { let s = format!("202 {}", player.ip); println!("{}", s); try_nb!(self.socket.send_to(s.as_bytes() , &peer)); }, }; }, _ => { try_nb!(self.socket.send_to(b"no match" , &peer)); }, }; self.to_send = None; } // If we're here then `to_send` is `None`, so we take a look for the // next message we're going to echo back. self.to_send = Some(try_nb!(self.socket.recv_from(&mut self.buf))); } } } fn main() { println!("Hello, world!"); let addr = env::args().nth(1).unwrap_or("10.0.0.12:8080".to_string()); let addr = addr.parse::<SocketAddr>().unwrap(); // Create the event loop that will drive this server, and also bind the // socket we'll be listening to. let mut l = Core::new().unwrap(); let handle = l.handle(); let socket = UdpSocket::bind(&addr, &handle).unwrap(); println!("Listening on: {}", socket.local_addr().unwrap()); // Next we'll create a future to spawn (the one we defined above) and then // we'll run the event loop by running the future. l.run(Server { socket: socket, buf: vec![0; 1024], to_send: None, playerlist: data::Playerlist::new(), }).unwrap(); }
true
503932c72ce6cb1aeb016e0774f942e22a912002
Rust
wartyz/spectrum
/src/hardware.rs
UTF-8
2,783
3.03125
3
[]
no_license
#![allow(non_snake_case)] pub const ANCHO_PANTALLA_GB: usize = 160 * 2; pub const ALTO_PANTALLA_GB: usize = 144 * 2; pub const ANCHO_PANTALLA_Z80: usize = 0; pub const ALTO_PANTALLA_Z80: usize = 0; use crate::cpu::CPU; use crate::procesador::PROCESADOR; // Aqui se crean las iteraciones del hardware con la memoria o la cpu #[derive(Copy, Clone)] pub enum CHIPSET { ChipSetZ80, ChipSetSharpLr35902, } struct HwZ80 {} impl HwZ80 { pub fn new() -> HwZ80 { HwZ80 {} } } struct HwSharpLr35902 { ly: u8, } impl HwSharpLr35902 { pub fn new() -> HwSharpLr35902 { HwSharpLr35902 { ly: 0, } } } pub struct Hardware { chipset: CHIPSET, ancho_pixels: usize, alto_pixels: usize, hw_z80: HwZ80, hw_sharp_lr_35902: HwSharpLr35902, viewport: Vec<u32>, } impl Hardware { pub fn new(cpu: &CPU) -> Hardware { match cpu.procesador { PROCESADOR::SharpLr35902 => { Hardware { chipset: CHIPSET::ChipSetSharpLr35902, ancho_pixels: ANCHO_PANTALLA_GB, alto_pixels: ALTO_PANTALLA_GB, hw_z80: HwZ80::new(), hw_sharp_lr_35902: HwSharpLr35902::new(), viewport: vec![60; ANCHO_PANTALLA_GB * ALTO_PANTALLA_GB], } } PROCESADOR::Z80 => { Hardware { chipset: CHIPSET::ChipSetZ80, ancho_pixels: ANCHO_PANTALLA_Z80, alto_pixels: ALTO_PANTALLA_Z80, hw_z80: HwZ80::new(), hw_sharp_lr_35902: HwSharpLr35902::new(), viewport: vec![0; ANCHO_PANTALLA_Z80 * ALTO_PANTALLA_Z80], } } _ => panic!("Procesador no reconocido en hardware") } } pub fn get_viewport(&mut self) -> &Vec<u32> { &self.viewport } pub fn get_ancho_pixels(&mut self) -> usize { self.ancho_pixels } pub fn get_alto_pixels(&mut self) -> usize { self.alto_pixels } pub fn ejecuta_hardware(&mut self, cpu: &mut CPU) { match self.chipset { CHIPSET::ChipSetSharpLr35902 => self.hardware_GB(cpu), CHIPSET::ChipSetZ80 => self.hardware_Z80(cpu), _ => panic!("Chipset no reconocido en hardware") }; } pub fn hardware_GB(&mut self, cpu: &mut CPU) { if cpu.t % 20 == 1 { self.hw_sharp_lr_35902.ly += 1; } if self.hw_sharp_lr_35902.ly == 154 { self.hw_sharp_lr_35902.ly = 0; } cpu.mem.escribe_byte_en_mem(0xFF44, self.hw_sharp_lr_35902.ly); } pub fn hardware_Z80(&mut self, cpu: &mut CPU) {} }
true
2cca767cd3af9d032ddd1ee61ebae0aa70068b79
Rust
cnsr/rust-snake
/src/lib/snake.rs
UTF-8
1,728
2.9375
3
[]
no_license
use crate::lib::types::{Cell, SnakeHead, Grid, SnakeSegment, Food}; use crate::lib::grid_init; extern crate rand; use rand::Rng; pub fn change_grid(mut grid: Grid, head: &SnakeHead, dimensions: (u32, u32)) -> Grid{ let color: Cell = Cell { red: head.color.red, green: head.color.green, blue: head.color.green, }; grid.grid[head.row as usize][head.column as usize] = color; grid } pub fn init_snake(direction: (i32, i32)) -> SnakeHead { let mut head: SnakeHead = SnakeHead { row: 24, column: 24, color: Cell {red: 255_u8, green: 0_u8, blue: 0_u8}, len: 24, // if > 26 will panic body: Vec::new(), direction: direction, }; head.init(); head } pub fn init_food() -> Food { let mut rng = rand::thread_rng(); let mut food: Food = Food { score: 1, row: rng.gen_range(0, 60), col: rng.gen_range(0, 60), color: Cell {red: 0_u8, green: 255_u8, blue: 0_u8}, }; if !(food.row % 2 == 0) { food.row += 1; } if !(food.col % 2 == 0) { food.col += 1; } food } pub fn draw_body(head: &mut SnakeHead, mut grid: Grid, direction: (i32, i32)) -> Grid { grid.reset(); let cell_color: Cell = Cell{red: 0_u8, green: 196_u8, blue: 0_u8}; let mut prev: (i32, i32); for x in (0..head.len).rev() { if x >= 1 { prev = (head.body[x as usize].row, head.body[x as usize].column); } else { prev = (head.row, head.column); } grid.grid[prev.0 as usize][prev.1 as usize] = head.body[x as usize].color; } grid } pub fn draw_food(food: &mut Food, mut grid: Grid) -> Grid { food.draw(grid) }
true
6fd0b7fc0582e1b37808ee6481aee9fef1fd634d
Rust
RonaldColyar/RustOOPParadigmTest
/src/main.rs
UTF-8
1,150
3.5625
4
[]
no_license
struct User{ username : String , password : String, display_name: String, phone_number : String, age : String } impl User{ // constructor pub fn new(user:&str , password:&str , display_name:&str , pn:&str , age :&str)-> User{ return User{ username:user.to_string(), password:password.to_string(), display_name:display_name.to_string(), phone_number:pn.to_string(), age:age.to_string() }; } pub fn set_name(&mut self ,new_name : &str){ self.display_name = new_name.to_string(); } pub fn change_password(&mut self , new_password : &str){ self.password = new_password.to_string(); } pub fn change_username(&mut self , new_username : &str){ self.username = new_username.to_string(); } pub fn change_phone_number(&mut self , new_num :&str){ self.phone_number = new_num.to_string(); } } fn main() { let mut data :User = User::new( "ron", "000", "theking", "1223234332", "45" ); }
true
c94b604504157bc046abe9f36e1535588c2ae0e5
Rust
maxtnuk/My_workspace
/rust/do_myself/sqrt/src/main.rs
UTF-8
135
3.015625
3
[]
no_license
fn main() { let a = 55.0_f64; let b = 40.0_f64; let result = a * a + b * b; println!("result is {}", result.sqrt()); }
true
945466e3f183e6d66f17e37bde03231285a2c29e
Rust
kjn-void/advent-of-code-2019
/src/day18/mod.rs
UTF-8
8,246
2.84375
3
[ "MIT" ]
permissive
use super::vec2d::*; use super::Solution; use std::cmp::Ordering; use std::collections::{BinaryHeap, HashMap, HashSet, VecDeque}; type Walls = HashSet<Vec2D>; type Map = HashMap<Vec2D, u32>; type KeyDistMatrix = HashMap<u32, Vec<KeyDistance>>; #[derive(Debug)] struct Robot { at_key: u32, // key the robot currently standing at have_keys: u32, // bitfield, bit 1 << (key - 'a') is set of key is collected steps: Distance, } impl Eq for Robot {} impl PartialEq for Robot { fn eq(&self, other: &Self) -> bool { self.steps == other.steps } } impl Ord for Robot { fn cmp(&self, other: &Self) -> Ordering { other.steps.cmp(&self.steps) } } impl PartialOrd for Robot { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(other.steps.cmp(&self.steps)) } } #[derive(Debug)] struct KeyDistance { steps: Distance, to: u32, // key at the end of this edge keys: u32, // bitfield, 1 << (key - 'a') is set if the key is along the path doors: u32, // bitfield, 1 << (door - 'A') is set if the door is along the path } fn has_item(item: u32, item_bag: u32) -> bool { (item & item_bag) != 0 } fn is_key(tile: char) -> bool { tile.is_ascii_lowercase() } fn key(tile: char) -> u32 { 1 << (tile as usize - 'a' as usize) as u32 } fn is_door(tile: char) -> bool { tile.is_ascii_uppercase() } fn door(tile: char) -> u32 { 1 << (tile as u32 - 'A' as u32) } // Returns the distance from 'key' to all other keys and what doors are along // the route. fn distances(origin: Vec2D, walls: &Walls, keys: &Map, doors: &Map) -> Vec<KeyDistance> { let mut visited = HashSet::new(); let mut queue = VecDeque::new(); let mut edges = Vec::new(); queue.push_back((origin, 0, 0, 0)); while let Some((pos, steps, have_keys, seen_doors)) = queue.pop_front() { visited.insert(pos); let candidates = [UP, DOWN, LEFT, RIGHT] .iter() .map(|&dir| pos + dir) .filter(|new_pos| !visited.contains(new_pos) && !walls.contains(new_pos)); for new_pos in candidates { let new_key = *keys.get(&new_pos).unwrap_or(&0); let new_steps = steps + 1; if new_key != 0 { edges.push(KeyDistance { to: new_key, steps: new_steps, keys: have_keys, doors: seen_doors, }); } queue.push_back(( new_pos, new_steps, have_keys | new_key, seen_doors | *doors.get(&new_pos).unwrap_or(&0), )) } } edges } fn dist_matrix(walls: &Walls, keys: &Map, doors: &Map) -> KeyDistMatrix { let mut hm = KeyDistMatrix::new(); for (&pos, &key) in keys { hm.insert(key, distances(pos, walls, keys, doors)); } hm } fn push_reachables(pqueue: &mut BinaryHeap<Robot>, robot: &Robot, edges: &KeyDistMatrix) { for edge in edges.get(&robot.at_key).unwrap() { let visited = has_item(edge.to, robot.have_keys); let can_unlock_doors = (!robot.have_keys & edge.doors) == 0; let all_keys_collected = (!robot.have_keys & edge.keys) == 0; let new_steps = edge.steps + robot.steps; if !visited && can_unlock_doors && all_keys_collected { pqueue.push(Robot { at_key: edge.to, have_keys: robot.have_keys | edge.to, steps: new_steps, }); } } } fn collect_keys(start_pos: Vec2D, walls: &Walls, keys: &Map, doors: &Map) -> Distance { let all_keys = keys.values().fold(0, |keys, &key| keys | key); let dists = dist_matrix(walls, keys, doors); let mut pqueue = BinaryHeap::new(); let mut duplicate_route = HashSet::new(); // Add all keys reachable from the start position for edge in distances(start_pos, walls, keys, doors) { if edge.doors == 0 && edge.keys == 0 { pqueue.push(Robot { at_key: edge.to, have_keys: edge.to, steps: edge.steps, }); } } let mut min_steps = std::u32::MAX; while let Some(robot) = pqueue.pop() { if robot.steps > min_steps { break; } if robot.have_keys == all_keys { min_steps = robot.steps; } else if duplicate_route.insert((robot.at_key, robot.have_keys)) { push_reachables(&mut pqueue, &robot, &dists); } } min_steps } impl Solution for Day18 { fn part1(&self) -> String { collect_keys(self.start_pos, &self.walls, &self.keys, &self.doors).to_string() } fn part2(&self) -> String { let mut tot_distance = 0; // Shift map so that the original start position is at (0, 0), one // submap per quadrant let mut walls = self .walls .iter() .clone() .map(|&pos| pos - self.start_pos) .collect::<Walls>(); walls.extend(&[UP, DOWN, LEFT, RIGHT]); for &start_pos in &[UP + LEFT, UP + RIGHT, DOWN + LEFT, DOWN + RIGHT] { let keys = self .keys .clone() .into_iter() .map(|(pos, key)| (pos - self.start_pos, key)) .filter(|(pos, _)| pos.x() * start_pos.x() > 0 && pos.y() * start_pos.y() > 0) .collect::<Map>(); let doors = self .doors .clone() .into_iter() .map(|(pos, door)| (pos - self.start_pos, door)) .filter(|(pos, _)| pos.x() * start_pos.x() > 0 && pos.y() * start_pos.y() > 0) .filter(|(_, door)| keys.values().find(|&key| key == door) != None) .collect::<Map>(); tot_distance += collect_keys(start_pos, &walls, &keys, &doors); } tot_distance.to_string() } } // State required to solve day 18 pub struct Day18 { start_pos: Vec2D, walls: Walls, keys: Map, doors: Map, } pub fn solution(lines: Vec<&str>) -> Box<dyn Solution> { let mut keys = Map::new(); let mut doors = Map::new(); let mut walls = Walls::new(); let mut start_pos = None; for (y, line) in lines.iter().enumerate() { for (x, tile) in line.chars().enumerate() { let pos = Vec2D::from(x as Coord, y as Coord); if tile == '@' { start_pos = Some(pos); } else if tile == '#' { walls.insert(pos); } else if is_key(tile) { keys.insert(pos, key(tile)); } else if is_door(tile) { doors.insert(pos, door(tile)); } } } Box::new(Day18 { start_pos: start_pos.unwrap(), walls, keys, doors, }) } #[cfg(test)] mod tests { use super::*; #[test] fn d18_ex1() { let input = vec!["#########", "#[email protected]#", "#########"]; assert_eq!(solution(input).part1(), "8"); } #[test] fn d18_ex2() { let input = vec![ "########################", "#[email protected].#", "######################.#", "#d.....................#", "########################", ]; assert_eq!(solution(input).part1(), "86"); } #[test] fn d18_ex3() { let input = vec![ "#################", "#i.G..c...e..H.p#", "########.########", "#j.A..b...f..D.o#", "########@########", "#k.E..a...g..B.n#", "########.########", "#l.F..d...h..C.m#", "#################", ]; assert_eq!(solution(input).part1(), "136"); } #[test] fn d18_ex4() { let input = vec![ "########################", "#@..............ac.GI.b#", "###d#e#f################", "###A#B#C################", "###g#h#i################", "########################", ]; assert_eq!(solution(input).part1(), "81"); } }
true
8d18634be29e8396d34c9bb63f54c0195cc5ab79
Rust
alan-signal/utils
/pkcs8/src/traits.rs
UTF-8
8,993
2.921875
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Traits for parsing objects from PKCS#8 encoded documents use crate::{PrivateKeyInfo, Result, SubjectPublicKeyInfo}; use core::convert::TryFrom; #[cfg(feature = "alloc")] use crate::{PrivateKeyDocument, PublicKeyDocument}; #[cfg(feature = "encryption")] use { crate::{EncryptedPrivateKeyDocument, EncryptedPrivateKeyInfo}, rand_core::{CryptoRng, RngCore}, }; #[cfg(feature = "pem")] use alloc::string::String; #[cfg(feature = "std")] use std::path::Path; #[cfg(any(feature = "pem", feature = "std"))] use zeroize::Zeroizing; /// Parse a private key object from a PKCS#8 encoded document. pub trait FromPrivateKey: Sized { /// Parse the [`PrivateKeyInfo`] from a PKCS#8-encoded document. fn from_pkcs8_private_key_info(private_key_info: PrivateKeyInfo<'_>) -> Result<Self>; /// Deserialize PKCS#8 private key from ASN.1 DER-encoded data /// (binary format). fn from_pkcs8_der(bytes: &[u8]) -> Result<Self> { Self::from_pkcs8_private_key_info(PrivateKeyInfo::try_from(bytes)?) } /// Deserialize encrypted PKCS#8 private key from ASN.1 DER-encoded data /// (binary format) and attempt to decrypt it using the provided password. #[cfg(feature = "encryption")] #[cfg_attr(docsrs, doc(cfg(feature = "encryption")))] fn from_pkcs8_encrypted_der(bytes: &[u8], password: impl AsRef<[u8]>) -> Result<Self> { EncryptedPrivateKeyInfo::try_from(bytes)? .decrypt(password) .and_then(|doc| Self::from_pkcs8_doc(&doc)) } /// Deserialize PKCS#8 private key from a [`PrivateKeyDocument`]. #[cfg(feature = "alloc")] #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] fn from_pkcs8_doc(doc: &PrivateKeyDocument) -> Result<Self> { Self::from_pkcs8_private_key_info(doc.private_key_info()) } /// Deserialize PKCS#8-encoded private key from PEM. /// /// Keys in this format begin with the following delimiter: /// /// ```text /// -----BEGIN PRIVATE KEY----- /// ``` #[cfg(feature = "pem")] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] fn from_pkcs8_pem(s: &str) -> Result<Self> { PrivateKeyDocument::from_pem(s).and_then(|doc| Self::from_pkcs8_doc(&doc)) } /// Deserialize encrypted PKCS#8-encoded private key from PEM and attempt /// to decrypt it using the provided password. /// /// Keys in this format begin with the following delimiter: /// /// ```text /// -----BEGIN ENCRYPTED PRIVATE KEY----- /// ``` #[cfg(all(feature = "encryption", feature = "pem"))] #[cfg_attr(docsrs, doc(cfg(feature = "encryption")))] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] fn from_pkcs8_encrypted_pem(s: &str, password: impl AsRef<[u8]>) -> Result<Self> { EncryptedPrivateKeyDocument::from_pem(s)? .decrypt(password) .and_then(|doc| Self::from_pkcs8_doc(&doc)) } /// Load PKCS#8 private key from an ASN.1 DER-encoded file on the local /// filesystem (binary format). #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn read_pkcs8_der_file(path: impl AsRef<Path>) -> Result<Self> { PrivateKeyDocument::read_der_file(path).and_then(|doc| Self::from_pkcs8_doc(&doc)) } /// Load PKCS#8 private key from a PEM-encoded file on the local filesystem. #[cfg(all(feature = "pem", feature = "std"))] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn read_pkcs8_pem_file(path: impl AsRef<Path>) -> Result<Self> { PrivateKeyDocument::read_pem_file(path).and_then(|doc| Self::from_pkcs8_doc(&doc)) } } /// Parse a public key object from an encoded SPKI document. pub trait FromPublicKey: Sized { /// Parse [`SubjectPublicKeyInfo`] into a public key object. fn from_spki(spki: SubjectPublicKeyInfo<'_>) -> Result<Self>; /// Deserialize object from ASN.1 DER-encoded [`SubjectPublicKeyInfo`] /// (binary format). fn from_public_key_der(bytes: &[u8]) -> Result<Self> { Self::from_spki(SubjectPublicKeyInfo::try_from(bytes)?) } /// Deserialize PKCS#8 private key from a [`PrivateKeyDocument`]. #[cfg(feature = "alloc")] #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] fn from_public_key_doc(doc: &PublicKeyDocument) -> Result<Self> { Self::from_spki(doc.spki()) } /// Deserialize PEM-encoded [`SubjectPublicKeyInfo`]. /// /// Keys in this format begin with the following delimiter: /// /// ```text /// -----BEGIN PUBLIC KEY----- /// ``` #[cfg(feature = "pem")] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] fn from_public_key_pem(s: &str) -> Result<Self> { PublicKeyDocument::from_pem(s).and_then(|doc| Self::from_public_key_doc(&doc)) } /// Load public key object from an ASN.1 DER-encoded file on the local /// filesystem (binary format). #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn read_public_key_der_file(path: impl AsRef<Path>) -> Result<Self> { PublicKeyDocument::read_der_file(path).and_then(|doc| Self::from_public_key_doc(&doc)) } /// Load public key object from a PEM-encoded file on the local filesystem. #[cfg(all(feature = "pem", feature = "std"))] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn read_public_key_pem_file(path: impl AsRef<Path>) -> Result<Self> { PublicKeyDocument::read_pem_file(path).and_then(|doc| Self::from_public_key_doc(&doc)) } } /// Serialize a private key object to a PKCS#8 encoded document. #[cfg(feature = "alloc")] #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] pub trait ToPrivateKey { /// Serialize a [`PrivateKeyDocument`] containing a PKCS#8-encoded private key. fn to_pkcs8_der(&self) -> Result<PrivateKeyDocument>; /// Create an [`EncryptedPrivateKeyDocument`] containing the ciphertext of /// a PKCS#8 encoded private key encrypted under the given `password`. #[cfg(feature = "encryption")] #[cfg_attr(docsrs, doc(cfg(feature = "encryption")))] fn to_pkcs8_encrypted_der( &self, rng: impl CryptoRng + RngCore, password: impl AsRef<[u8]>, ) -> Result<EncryptedPrivateKeyDocument> { self.to_pkcs8_der()?.encrypt(rng, password) } /// Serialize this private key as PEM-encoded PKCS#8. #[cfg(feature = "pem")] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] fn to_pkcs8_pem(&self) -> Result<Zeroizing<String>> { Ok(self.to_pkcs8_der()?.to_pem()) } /// Serialize this private key as an encrypted PEM-encoded PKCS#8 private /// key using the `provided` to derive an encryption key. #[cfg(all(feature = "encryption", feature = "pem"))] #[cfg_attr(docsrs, doc(cfg(feature = "encryption")))] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] fn to_pkcs8_encrypted_pem( &self, rng: impl CryptoRng + RngCore, password: impl AsRef<[u8]>, ) -> Result<Zeroizing<String>> { self.to_pkcs8_encrypted_der(rng, password) .map(|key| key.to_pem()) } /// Write ASN.1 DER-encoded PKCS#8 private key to the given path #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn write_pkcs8_der_file(&self, path: impl AsRef<Path>) -> Result<()> { self.to_pkcs8_der()?.write_der_file(path) } /// Write ASN.1 DER-encoded PKCS#8 private key to the given path #[cfg(all(feature = "pem", feature = "std"))] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn write_pkcs8_pem_file(&self, path: impl AsRef<Path>) -> Result<()> { self.to_pkcs8_der()?.write_pem_file(path) } } /// Serialize a public key object to a SPKI-encoded document. #[cfg(feature = "alloc")] #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] pub trait ToPublicKey { /// Serialize a [`PublicKeyDocument`] containing a SPKI-encoded public key. fn to_public_key_der(&self) -> Result<PublicKeyDocument>; /// Serialize this public key as PEM-encoded SPKI. #[cfg(feature = "pem")] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] fn to_public_key_pem(&self) -> Result<String> { Ok(self.to_public_key_der()?.to_pem()) } /// Write ASN.1 DER-encoded public key to the given path #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn write_public_key_der_file(&self, path: impl AsRef<Path>) -> Result<()> { self.to_public_key_der()?.write_der_file(path) } /// Write ASN.1 DER-encoded public key to the given path #[cfg(all(feature = "pem", feature = "std"))] #[cfg_attr(docsrs, doc(cfg(feature = "pem")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] fn write_public_key_pem_file(&self, path: impl AsRef<Path>) -> Result<()> { self.to_public_key_der()?.write_pem_file(path) } }
true
b989aecc9a5db5df9fae62259f64aa5f046ef36a
Rust
werner/bitprim-rust
/src/hash.rs
UTF-8
1,666
3.296875
3
[]
no_license
use std::cmp::Ordering; use hex_error::HexError; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[repr(C, packed)] pub struct Hash { pub hash: [u8; 32usize], } impl Hash { /* For compatibility with opaque collection that needs a builder */ pub fn new(raw: Hash) -> Hash { raw } pub fn to_hex(&self) -> String { self.hash .iter() .rev() .map(|b| format!("{:02x}", b)) .collect::<Vec<String>>() .join("") } pub fn from_hex(s: &str) -> Result<Hash, HexError> { if s.len() != 64 { return Err(HexError::BadLength(s.len())); } let bytes = s.as_bytes(); let mut ret = [0; 32]; for i in 0..32 { let hi = match bytes[2*i] { b @ b'0'...b'9' => (b - b'0') as u8, b @ b'a'...b'f' => (b - b'a' + 10) as u8, b @ b'A'...b'F' => (b - b'A' + 10) as u8, b => return Err(HexError::BadCharacter(b as char)) }; let lo = match bytes[2*i + 1] { b @ b'0'...b'9' => (b - b'0') as u8, b @ b'a'...b'f' => (b - b'a' + 10) as u8, b @ b'A'...b'F' => (b - b'A' + 10) as u8, b => return Err(HexError::BadCharacter(b as char)) }; ret[31 - i] = hi * 0x10 + lo; } Ok(Hash { hash: ret }) } } impl Ord for Hash { fn cmp(&self, other: &Hash) -> Ordering { self.hash.cmp(&other.hash) } } impl PartialOrd for Hash { fn partial_cmp(&self, other: &Hash) -> Option<Ordering> { Some(self.cmp(other)) } }
true
dfaa688d2a0f42c99415fd90cbc29c4e63763ad5
Rust
Soveu/random
/rust/child/main.rs
UTF-8
677
2.765625
3
[]
no_license
use std::io::{self, Read}; use std::process::{Command, Stdio}; use std::thread; fn main() -> io::Result<()> { let mut child = Command::new("./test") .stdout(Stdio::piped()) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .spawn()?; let mut outbuf = String::new(); let mut errbuf = String::new(); thread::sleep_ms(1000); if let Some(err) = child.stderr.as_mut() { err.read_to_string(&mut errbuf)?; } if let Some(out) = child.stdout.as_mut() { out.read_to_string(&mut outbuf)?; } println!("out={:?} err={:?}", outbuf, errbuf); println!("exit status {:?}", child.wait()?); Ok(()) }
true
0d09ff715b8023760ef175ad6e7a47f5a4ab60f1
Rust
tizianocolagrossi/RustEx
/esercizi/lifetimes2.rs
UTF-8
327
3.203125
3
[]
no_license
// Fix the compilation error by changing the create_user function input to a slice string fn create_user<'a>(name: &'a str) -> User { User { name: name } } #[derive(Debug)] struct User<'b> { name: &'b str, } fn main (){ let user = create_user(&"test"); println!("{:?}",user); }
true
b597574cdb05f9bc2fedb21bbfbbe9e9e8aa2645
Rust
gnoliyil/fuchsia
/src/graphics/lib/compute/rive-rs/src/dyn_vec.rs
UTF-8
1,471
2.75
3
[ "BSD-2-Clause" ]
permissive
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use std::{cell::RefCell, cmp::Ordering}; #[derive(Debug)] pub struct DynVec<T> { vec: RefCell<Vec<T>>, } impl<T> DynVec<T> { pub fn new() -> Self { Self { vec: RefCell::new(Vec::new()) } } pub fn is_empty(&self) -> bool { self.vec.borrow().is_empty() } pub fn len(&self) -> usize { self.vec.borrow().len() } pub fn push(&self, val: T) { self.vec.borrow_mut().push(val); } pub fn truncate(&self, len: usize) { self.vec.borrow_mut().truncate(len); } pub fn sort_by<F>(&self, compare: F) where F: FnMut(&T, &T) -> Ordering, { self.vec.borrow_mut().sort_by(compare); } } impl<T: Clone> DynVec<T> { pub fn iter(&self) -> DynVecIter<'_, T> { DynVecIter { vec: &self.vec, index: 0 } } pub fn index(&self, index: usize) -> T { self.vec.borrow()[index].clone() } } impl<T> Default for DynVec<T> { fn default() -> Self { Self::new() } } pub struct DynVecIter<'v, T> { vec: &'v RefCell<Vec<T>>, index: usize, } impl<T: Clone> Iterator for DynVecIter<'_, T> { type Item = T; fn next(&mut self) -> Option<Self::Item> { let val = self.vec.borrow().get(self.index).cloned(); self.index += 1; val } }
true
7132f76ffbd43df456bf51598bd87b48802996c4
Rust
kabergstrom/lonely
/benches/ring.rs
UTF-8
3,707
2.90625
3
[]
no_license
#![feature(test)] extern crate test; use lonely::HeapRing; const ELEMENTS: usize = 100_000; const NUM_PRODUCERS: usize = 4; const ELEMENTS_PER_PRODUCER: usize = ELEMENTS / NUM_PRODUCERS; #[bench] fn ring_spsc(b: &mut test::Bencher) { let buffer = HeapRing::<u32>::new(1_000); let producer_buf = producer_thread(buffer.clone()); b.iter(|| { producer_buf.push(ELEMENTS as u32); let mut num_received = 0; while num_received != ELEMENTS { if let Some(_) = buffer.pop_single() { num_received += 1; } } assert_eq!(num_received, ELEMENTS); }); producer_buf.push(0); } fn producer_thread(output_buf: HeapRing<u32>) -> HeapRing<u32> { let producer_buf = HeapRing::new(4); let to_return = producer_buf.clone(); std::thread::spawn(move || loop { let to_produce = loop { if let Some(to_produce) = producer_buf.pop_single() { if to_produce == 0 { return; } else { break to_produce; } } }; for i in 0..to_produce { loop { if output_buf.push(i).is_none() { break; } } } }); to_return.push(1); // wait for thread to start while let None = to_return.pop_single() {} to_return } #[bench] fn ring_mpsc(b: &mut test::Bencher) { let buffer = HeapRing::<u32>::new(10_000); let mut producer_buffers = Vec::new(); for _ in 0..NUM_PRODUCERS { producer_buffers.push(producer_thread(buffer.clone())); } b.iter(|| { for producer in producer_buffers.iter() { producer.push(ELEMENTS_PER_PRODUCER as u32); } let mut num_received = 0; while num_received != ELEMENTS { if let Some(_) = buffer.pop_single() { num_received += 1; } } assert_eq!(num_received, ELEMENTS); }); for producer in producer_buffers.iter() { producer.push(0); } } #[bench] fn ring_mpsc_batch_pop(b: &mut test::Bencher) { ring_mpsc_batch_pop_bench(b, 10, NUM_PRODUCERS, ELEMENTS); } #[bench] fn ring_mpsc_batch_pop_1(b: &mut test::Bencher) { ring_mpsc_batch_pop_bench(b, 100, 1, ELEMENTS); } #[bench] fn ring_mpsc_batch_pop_2(b: &mut test::Bencher) { ring_mpsc_batch_pop_bench(b, 100, 2, ELEMENTS * 2); } #[bench] fn ring_mpsc_batch_pop_4(b: &mut test::Bencher) { ring_mpsc_batch_pop_bench(b, 100, 4, ELEMENTS * 4); } #[bench] fn ring_mpsc_batch_pop_8(b: &mut test::Bencher) { ring_mpsc_batch_pop_bench(b, 100, 8, ELEMENTS * 8); } fn ring_mpsc_batch_pop_bench( b: &mut test::Bencher, batch_multiple: usize, num_producers: usize, num_elements: usize, ) { let elements_per_producer = num_elements / num_producers; let buffer = HeapRing::<u32>::new(10_000); let mut producer_buffers = Vec::new(); for _ in 0..num_producers { producer_buffers.push(producer_thread(buffer.clone())); } b.iter(|| { for producer in producer_buffers.iter() { producer.push(elements_per_producer as u32); } let mut num_received = 0; let batch_size = NUM_PRODUCERS * batch_multiple; let mut value_vec = Vec::with_capacity(batch_size); while num_received != elements_per_producer * num_producers { num_received += unsafe { buffer.pop_exact(batch_size, value_vec.as_mut_ptr()) }; } assert_eq!(num_received, elements_per_producer * num_producers); }); for producer in producer_buffers.iter() { producer.push(0); } }
true
72e7e87416662f9c18df4de57ef5815494db2a7b
Rust
qbox/rust-sdk
/qiniu-c/src/upload_response.rs
UTF-8
5,362
2.734375
3
[ "Apache-2.0" ]
permissive
use crate::utils::qiniu_ng_str_t; use libc::{c_void, size_t}; use qiniu_ng::storage::uploader::UploadResponse; use std::{ mem::transmute, ptr::{copy_nonoverlapping, null_mut}, }; use tap::TapOps; /// @brief 上传响应 /// @details /// 上传响应实例对上传响应中的响应体进行封装,提供一些辅助方法。 /// 当 `qiniu_ng_upload_response_t` 使用完毕后,请务必调用 `qiniu_ng_upload_response_free()` 方法释放内存 /// @note 该结构体内部状态不可变,因此可以跨线程使用 #[repr(C)] #[derive(Copy, Clone)] pub struct qiniu_ng_upload_response_t(*mut c_void); impl Default for qiniu_ng_upload_response_t { #[inline] fn default() -> Self { Self(null_mut()) } } impl qiniu_ng_upload_response_t { #[inline] pub fn is_null(self) -> bool { self.0.is_null() } } impl From<qiniu_ng_upload_response_t> for Option<Box<UploadResponse>> { fn from(upload_response: qiniu_ng_upload_response_t) -> Self { if upload_response.is_null() { None } else { Some(unsafe { Box::from_raw(transmute(upload_response)) }) } } } impl From<Option<Box<UploadResponse>>> for qiniu_ng_upload_response_t { fn from(upload_response: Option<Box<UploadResponse>>) -> Self { upload_response .map(|upload_response| upload_response.into()) .unwrap_or_default() } } impl From<Box<UploadResponse>> for qiniu_ng_upload_response_t { fn from(upload_response: Box<UploadResponse>) -> Self { unsafe { transmute(Box::into_raw(upload_response)) } } } /// @brief 获取上传响应中的对象名称 /// @param[in] upload_response 上传响应实例 /// @retval qiniu_ng_str_t 对象名称 /// @note 这里返回的 `qiniu_ng_str_t` 有可能封装的是 `NULL`,请调用 `qiniu_ng_str_is_null()` 进行判断 /// @warning 当 `qiniu_ng_str_t` 使用完毕后,请务必调用 `qiniu_ng_str_free()` 方法释放内存 #[no_mangle] pub extern "C" fn qiniu_ng_upload_response_get_key(upload_response: qiniu_ng_upload_response_t) -> qiniu_ng_str_t { let upload_response = Option::<Box<UploadResponse>>::from(upload_response).unwrap(); unsafe { qiniu_ng_str_t::from_optional_str_unchecked(upload_response.key()) }.tap(|_| { let _ = qiniu_ng_upload_response_t::from(upload_response); }) } /// @brief 获取上传响应中的校验和字段 /// @param[in] upload_response 上传响应实例 /// @param[out] result_ptr 提供内存地址用于返回校验和字段,如果传入 `NULL` 表示不获取 `result_ptr`。但如果该字段存在,返回值依然是 `true`,且不影响其他字段的获取 /// @param[out] result_size 用于返回校验和字段长度,如果传入 `NULL` 表示不获取 `result_size`。但如果该字段存在,返回值依然是 `true`,且不影响其他字段的获取。该字段一般返回的是 Etag,因此长度一般会等于 `ETAG_SIZE`。如果返回 `0`,则表明该校验和字段并不存在 #[no_mangle] pub extern "C" fn qiniu_ng_upload_response_get_hash( upload_response: qiniu_ng_upload_response_t, result_ptr: *mut c_void, result_size: *mut size_t, ) { let upload_response = Option::<Box<UploadResponse>>::from(upload_response).unwrap(); if let Some(hash) = upload_response.hash().map(|hash| hash.as_bytes()) { if let Some(result_size) = unsafe { result_size.as_mut() } { *result_size = hash.len(); } if let Some(result_ptr) = unsafe { result_ptr.as_mut() } { unsafe { copy_nonoverlapping(hash.as_ptr(), result_ptr as *mut c_void as *mut u8, hash.len()) }; } } else if let Some(result_size) = unsafe { result_size.as_mut() } { *result_size = 0; } let _ = qiniu_ng_upload_response_t::from(upload_response); } /// @brief 获取上传响应的字符串 /// @param[in] upload_response 上传响应实例 /// @retval qiniu_ng_str_t 上传响应字符串,一般是 JSON 格式的 /// @warning 当 `qiniu_ng_str_t` 使用完毕后,请务必调用 `qiniu_ng_str_free()` 方法释放内存 #[no_mangle] pub extern "C" fn qiniu_ng_upload_response_get_string(upload_response: qiniu_ng_upload_response_t) -> qiniu_ng_str_t { let upload_response = Option::<Box<UploadResponse>>::from(upload_response).unwrap(); unsafe { qiniu_ng_str_t::from_string_unchecked(upload_response.to_string()) }.tap(|_| { let _ = qiniu_ng_upload_response_t::from(upload_response); }) } /// @brief 释放上传响应实例 /// @param[in,out] upload_response 上传响应实例地址,释放完毕后该实例将不再可用 #[no_mangle] pub extern "C" fn qiniu_ng_upload_response_free(upload_response: *mut qiniu_ng_upload_response_t) { if let Some(upload_response) = unsafe { upload_response.as_mut() } { let _ = Option::<Box<UploadResponse>>::from(*upload_response); *upload_response = qiniu_ng_upload_response_t::default(); } } /// @brief 判断上传响应实例是否已经被释放 /// @param[in] upload_response 上传响应实例 /// @retval bool 如果返回 `true` 则表示上传响应实例已经被释放,该实例不再可用 #[no_mangle] pub extern "C" fn qiniu_ng_upload_response_is_freed(upload_response: qiniu_ng_upload_response_t) -> bool { upload_response.is_null() }
true
450a09082700d3557e76c5f26305e1f234757fb2
Rust
bahamas10/vsv
/src/commands/enable_disable.rs
UTF-8
1,599
2.609375
3
[ "MIT" ]
permissive
/* * Author: Dave Eddy <[email protected]> * Date: February 15, 2022 * License: MIT */ //! `vsv enable` and `vsv disable`. use anyhow::{ensure, Result}; use yansi::{Color, Style}; use crate::config; use crate::config::Config; use crate::runit::RunitService; /// Handle `vsv enable`. pub fn do_enable(cfg: &Config) -> Result<()> { _do_enable_disable(cfg) } /// Handle `vsv enable`. pub fn do_disable(cfg: &Config) -> Result<()> { _do_enable_disable(cfg) } /// Handle `vsv enable` and `vsv disable`. fn _do_enable_disable(cfg: &Config) -> Result<()> { ensure!(!cfg.operands.is_empty(), "at least one (1) service required"); let mut had_error = false; for name in &cfg.operands { let p = cfg.svdir.join(name); let svc = RunitService::new(name, &p); print!( "{} service {}... ", cfg.mode, Style::default().bold().paint(name) ); if !svc.valid() { println!("{}", Color::Red.paint("failed! service not valid")); had_error = true; continue; } let ret = match cfg.mode { config::ProgramMode::Enable => svc.enable(), config::ProgramMode::Disable => svc.disable(), _ => unreachable!(), }; match ret { Err(err) => { had_error = true; println!("{}", Color::Red.paint(format!("failed! {}", err))); } Ok(()) => println!("{}.", Color::Green.paint("done")), }; } ensure!(!had_error, "failed to modify service(s)"); Ok(()) }
true
d403bead26789937cec73ea706e6abc3e5332587
Rust
klasnordmark/advent-of-code
/day3/src/main.rs
UTF-8
4,078
3.5625
4
[]
no_license
use std::io; enum Direction { Up, Down, Right, Left, } #[derive(PartialEq)] enum Orientation { Clockwise, Counterclockwise, Colinear, } struct Segment { dir: Direction, length: u32, } #[derive(Clone, PartialEq, Copy)] struct Point { x: i32, y: i32, } fn segments_from_line(line: String) -> Vec<Segment> { let mut result: Vec<Segment> = Vec::new(); for val in line.split(',') { let dir: Direction = match val.chars().next() { Some('U') => Direction::Up, Some('D') => Direction::Down, Some('R') => Direction::Right, Some('L') => Direction::Left, _ => { println!("Invalid direction!"); break; } }; let length: u32 = match val[1..].trim().parse() { Ok(num) => num, Err(_) => { println!("Invalid length!"); break; } }; result.push(Segment { dir, length }); } result } fn endpoints_on_segments(wire: Vec<Segment>) -> Vec<(Point, Point)> { let mut result: Vec<(Point, Point)> = Vec::new(); let mut end_point = Point { x: 0, y: 0 }; for seg in wire { let start_point = end_point.clone(); match seg.dir { Direction::Up => end_point.y = end_point.y + seg.length as i32, Direction::Down => end_point.y = end_point.y - seg.length as i32, Direction::Right => end_point.x = end_point.x + seg.length as i32, Direction::Left => end_point.x = end_point.x - seg.length as i32, } result.push((start_point, end_point)); } result } fn get_orientation(p1: Point, p2: Point, p3: Point) -> Orientation { let result = (p2.y - p1.y) * (p3.x - p2.x) - (p2.x - p1.x) * (p3.y - p2.y); if result > 0 { return Orientation::Clockwise; } else if result < 0 { return Orientation::Counterclockwise; } else { return Orientation::Colinear; } } fn find_intersection(wire_a: (Point, Point), wire_b: (Point, Point)) -> Option<Point> { let o1 = get_orientation(wire_a.0, wire_a.1, wire_b.0); let o2 = get_orientation(wire_a.0, wire_a.1, wire_b.1); let o3 = get_orientation(wire_b.0, wire_b.1, wire_a.0); let o4 = get_orientation(wire_b.0, wire_b.1, wire_a.1); if (o1 != o2) && (o3 != o4) { if wire_a.0.x == wire_a.1.x { return Some(Point { x: wire_a.0.x, y: wire_b.0.y, }); } else { return Some(Point { x: wire_b.0.x, y: wire_a.0.y, }); } } None } fn main() { // first parse the input lines into vectors representing the wires let mut first_string = String::new(); let mut second_string = String::new(); io::stdin() .read_line(&mut first_string) .expect("Failed to read line."); io::stdin() .read_line(&mut second_string) .expect("Failed to read line."); let first_wire = segments_from_line(first_string); let second_wire = segments_from_line(second_string); // each wire in terms of it's endpoints let first_points = endpoints_on_segments(first_wire); let second_points = endpoints_on_segments(second_wire); // find intersections let mut intersections: Vec<Point> = Vec::new(); for first_seg in first_points { let points_temp = second_points.clone(); for second_seg in points_temp { match find_intersection(first_seg, second_seg) { Some(val) => intersections.push(val), None => continue, } } } // find common point with smallest Manhattan distance from origin let mut smallest_distance = std::i32::MAX; for pt in intersections { let distance = pt.x.abs() + pt.y.abs(); if distance < smallest_distance { smallest_distance = distance; } } println!("The smallest distance is: {}", smallest_distance); }
true
7edf43654063b1a7118d61c9ae203f405a051e75
Rust
JoshFourie/fft-rs
/src/two_radix/node.rs
UTF-8
12,584
3.15625
3
[ "MIT" ]
permissive
use num::Complex; use crate::two_radix::butterfly; #[derive(Debug, PartialEq)] pub enum NodeError<'a> { Stage(usize, usize), Index(usize, usize), Twiddle(&'a mut DecimationNode), Len(usize, usize), } impl<'a> NodeError<'a> { pub fn flip_node(node: &mut DecimationNode) { if node.twiddle==true { node.twiddle=false } else if node.twiddle==false { node.twiddle=true } } } #[derive(Debug, PartialEq, Copy, Clone)] pub struct DecimationNode { pub element: Complex<f64>, pub index: usize, pub stage: usize, pub twiddle: bool, } impl DecimationNode { pub fn new(element: Complex<f64>, stage: usize, index: usize, twiddle: bool) -> Self { Self{ element, index, stage, twiddle} } // takes two decimation nodes and returns two that have been correctly twiddled. pub fn map_butterflies(mut self, mut other: Self) -> (Self, Self) { let closure = |mut s: Self, mut o: Self| -> (Self, Self) { let big_n=(2 as usize).pow(s.stage as u32); let small_n=s.index; let (lhs, rhs)=butterfly( s.element, o.element, small_n, big_n ); s.element=lhs; o.element=rhs; (s, o) }; match Self::butterfly_check(&mut self, &mut other) { Ok(_) => { }, Err(err) => match err { NodeError::Stage(x, y) => panic!("self.stage: {} != other.stage: {}", x, y), NodeError::Index(x, y) => panic!("self.index: {} != other.index: {}", x, y), NodeError::Len(x, y) => panic!("self.len: {} != other.len: {}", x, y), NodeError::Twiddle(lhs) => { NodeError::flip_node(lhs); match Self::butterfly_check(&mut self, &mut other) { Err(e) => match e { NodeError::Twiddle(rhs) => NodeError::flip_node(rhs), _ => panic!("Unreachable Error: second pass through butterfly") }, Ok(_) => { }, } return closure(self, other) } } } closure(self, other) } pub fn butterfly_check<'a>(&'a mut self, other: &'a mut Self) -> Result<(), NodeError> { match self.stage == other.stage { true => {}, false => return Err(NodeError::Stage(self.stage, other.stage)), } match self.index == other.index { true => {}, false => return Err(NodeError::Index(self.index, other.index)) } match self.twiddle { true => return Err(NodeError::Twiddle(self)), false => { }, } match other.twiddle { true => { }, false => return Err(NodeError::Twiddle(self)), } Ok(()) } } #[cfg(test)] mod tests { use super::*; use itertools::Itertools; use crate::two_radix::{*, tests::seq_dft, leaf::DecimationLeaf, node::DecimationNode }; use rustfft::FFTplanner; use assert_approx_eq::assert_approx_eq; fn assert_approx_vec(x: std::vec::IntoIter<Complex<f64>>, y: std::vec::IntoIter<Complex<f64>>) { for (exp, test) in x.zip(y) { assert_approx_eq!(exp.re, test.re); assert_approx_eq!(exp.im, test.im); } } #[test] fn test_two_input_butterfly_map() { let a0 = DecimationNode::new(Complex::from(0.0), 1, 0, false); let a1 = DecimationNode::new(Complex::from(1.0), 1, 0, true); let leaf = DecimationLeaf::new(vec![a0], vec![a1], 3).transform(); let test = butterfly(a0.element, a1.element, 0, 8); let leaf_lhs = leaf.lhs[0].element; let leaf_rhs = leaf.rhs[0].element; assert_eq!((leaf_lhs, leaf_rhs), test); let (x, y) = test; let check = seq_dft(&vec![0.0, 1.0].into_iter().map(|n| Complex::<f64>::from(n)).collect_vec()); assert_approx_vec(check.into_iter(), vec![x, y].into_iter()); } #[test] fn test_four_input_butterfly_map() { let a0 = DecimationNode::new(Complex::from(0.0), 1, 0, false); let a1 = DecimationNode::new(Complex::from(1.0), 1, 0, true); let a2 = DecimationNode::new(Complex::from(2.0), 1, 0, false); let a3 = DecimationNode::new(Complex::from(3.0), 1, 0, true); /* Stage One */ let (stage_one_a0, stage_one_a2) = butterfly(a0.element, a2.element, 0, 2); let (stage_one_a1, stage_one_a3) = butterfly(a1.element, a3.element, 0, 2); /* Stage Two */ let (stage_two_a0, stage_two_a1) = butterfly(stage_one_a0, stage_one_a1, 0, 4); let (stage_two_a2, stage_two_a3) = butterfly(stage_one_a2, stage_one_a3, 1, 4); let stage_one_temp_leaf_lhs = DecimationLeaf::new( vec![a0], vec![a2], 1 ).transform(); let stage_one_temp_leaf_rhs = DecimationLeaf::new( vec![a1], vec![a3], 1 ).transform(); let stage_two_temp_leaf = DecimationLeaf::generate_parent( stage_one_temp_leaf_lhs, stage_one_temp_leaf_rhs ).transform(); let mut test = stage_two_temp_leaf.lhs.into_iter() .chain(stage_two_temp_leaf.rhs.into_iter()) .map(|node| node.element.round_to(1.0)) .collect::<Vec<_>>(); danielson_lanczos_pattern(&mut test, 2); let exp = vec![stage_two_a0, stage_two_a1, stage_two_a2, stage_two_a3].into_iter().map(|x|x.round_to(1.0)).collect::<Vec<_>>(); let mut check = seq_dft( &vec![0.0, 1.0, 2.0, 3.0].into_iter() .map(|n| Complex::from(n).round_to(1.0)) .collect::<Vec<_>>() ).into_iter() .map(|x|x.round_to(1.0)) .collect::<Vec<_>>(); danielson_lanczos_pattern(&mut check, 2); assert_approx_vec(test.clone().into_iter(), exp.into_iter()); assert_approx_vec(test.into_iter(), check.into_iter()); } #[test] fn test_eight_input_butterfly_map() { let a0 = DecimationNode::new(Complex::from(0.0), 1, 0, false); let a4 = DecimationNode::new(Complex::from(4.0), 1, 0, true); let a2 = DecimationNode::new(Complex::from(2.0), 1, 0, false); let a6 = DecimationNode::new(Complex::from(6.0), 1, 0, true); let a1 = DecimationNode::new(Complex::from(1.0), 1, 0, false); let a5 = DecimationNode::new(Complex::from(5.0), 1, 0, true); let a3 = DecimationNode::new(Complex::from(3.0), 1, 0, false); let a7 = DecimationNode::new(Complex::from(7.0), 1, 0, true); /* Stage One */ let (stg1_a0, stg1_a4)=butterfly(a0.element, a4.element, 0, 2); let (stg1_a2, stg1_a6)=butterfly(a2.element, a6.element, 0, 2); let (stg1_a1, stg1_a5)=butterfly(a1.element, a5.element, 0, 2); let (stg1_a3, stg1_a7)=butterfly(a3.element, a7.element, 0, 2); let leaf_stg1_even_lhs = DecimationLeaf::new(vec![a0], vec![a4], 1).transform(); let leaf_stg1_even_rhs = DecimationLeaf::new(vec![a2], vec![a6], 1).transform(); let leaf_stg1_odd_lhs = DecimationLeaf::new(vec![a1], vec![a5], 1).transform(); let leaf_stg1_odd_rhs = DecimationLeaf::new(vec![a3], vec![a7], 1).transform(); // Expected: // a0 is equivalent in both & a5 is equivalent assert_eq!(&stg1_a0, &leaf_stg1_even_lhs.lhs[0].element); assert_eq!(&stg1_a5, &leaf_stg1_odd_lhs.rhs[0].element); // twiddle only the rhs branch. assert_eq!(&true, &leaf_stg1_odd_lhs.rhs[0].twiddle); assert_eq!(&false, &leaf_stg1_even_lhs.lhs[0].twiddle); assert_eq!(&false, &leaf_stg1_odd_lhs.lhs[0].twiddle); assert_eq!(&true, &leaf_stg1_even_lhs.rhs[0].twiddle); assert!(&leaf_stg1_odd_rhs.rhs[0].twiddle != &leaf_stg1_odd_rhs.lhs[0].twiddle); assert!(&leaf_stg1_odd_lhs.rhs[0].twiddle != &leaf_stg1_odd_lhs.lhs[0].twiddle); assert!(&leaf_stg1_even_rhs.rhs[0].twiddle != &leaf_stg1_even_rhs.lhs[0].twiddle); assert!(&leaf_stg1_even_lhs.rhs[0].twiddle != &leaf_stg1_even_lhs.lhs[0].twiddle); // all stages are set to 1 so that we check N=2 when we called transform. for (w, (x, (y, z))) in leaf_stg1_even_lhs.clone() .into_iter() .zip( leaf_stg1_odd_lhs.clone().into_iter() .zip( leaf_stg1_even_rhs.clone() .into_iter() .zip( leaf_stg1_odd_rhs.clone().into_iter() ))) { assert!( w.stage==1 && x.stage==1 && y.stage==1 && z.stage==1) } /* Stage Two */ let (stg2_a0, stg2_a2)=butterfly(stg1_a0, stg1_a2, 0, 4); let (stg2_a4, stg2_a6)=butterfly(stg1_a4, stg1_a6, 1, 4); let (stg2_a1, stg2_a3)=butterfly(stg1_a1, stg1_a3, 0, 4); let (stg2_a5, stg2_a7)=butterfly(stg1_a5, stg1_a7, 1, 4); let leaf_stg2_lhs = DecimationLeaf::generate_parent( leaf_stg1_even_lhs, leaf_stg1_even_rhs ).transform(); let leaf_stg2_rhs = DecimationLeaf::generate_parent( leaf_stg1_odd_lhs, leaf_stg1_odd_rhs ).transform(); // Expected: // indexes are identical and correct when we zip let stg2_test_vec = vec![ (&stg2_a0, &stg2_a2), (&stg2_a4, &stg2_a6), (&stg2_a1, &stg2_a3), (&stg2_a5, &stg2_a7) ]; for (i, ((lhs, rhs), (lhs_num, rhs_num))) in leaf_stg2_lhs.lhs.iter() .zip( leaf_stg2_lhs.rhs.iter() ) .zip( stg2_test_vec.into_iter() ) .enumerate() { assert_eq!(i, lhs.index); assert_eq!(lhs.index, rhs.index); assert!(i+1 != lhs.index); assert_eq!(rhs.stage, 2); assert_eq!(lhs.stage, 2); assert_eq!(&lhs.element, lhs_num); assert_eq!(&rhs.element, rhs_num); assert_eq!(rhs.twiddle, true); assert_eq!(lhs.twiddle, false); } /* Stage Three */ let (stg3_a0, stg3_a1)=butterfly(stg2_a0, stg2_a1, 0, 8); let (stg3_a4, stg3_a5)=butterfly(stg2_a4, stg2_a5, 1, 8); let (stg3_a2, stg3_a3)=butterfly(stg2_a2, stg2_a3, 2, 8); let (stg3_a6, stg3_a7)=butterfly(stg2_a6, stg2_a7, 3, 8); /* Manual DecimationLeaf */ let leaf_stg3 = DecimationLeaf::generate_parent( leaf_stg2_lhs, leaf_stg2_rhs ).transform(); // Expected: for (i, (lhs, rhs)) in leaf_stg3.lhs.iter() .zip(leaf_stg3.rhs.iter()) .enumerate() { assert_eq!(i, lhs.index); assert_eq!(lhs.index, rhs.index); assert!(i+1 != lhs.index); assert_eq!(rhs.stage, 3); assert_eq!(lhs.stage, 3); assert_eq!(rhs.twiddle, true); assert_eq!(lhs.twiddle, false); } /* Test */ let test = leaf_stg3.lhs.into_iter() .chain(leaf_stg3.rhs.into_iter()) .map(|node| node.element) .collect::<Vec<_>>(); // We can't assert_eq! as the numbers are trivially different around 10 sig. fig. let mut exp = vec![stg3_a0, stg3_a1, stg3_a2, stg3_a3, stg3_a4, stg3_a5, stg3_a6, stg3_a7]; danielson_lanczos_pattern(&mut exp, 3); let mut input: Vec<Complex<f64>> = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0] .into_iter() .map_into::<Complex<f64>>() .collect_vec(); let mut output: Vec<Complex<f64>> = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0] .into_iter() .map_into::<Complex<f64>>() .collect_vec(); let mut planner = FFTplanner::new(false); let fft = planner.plan_fft(8); fft.process(&mut input, &mut output); let check = seq_dft( &vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0].into_iter() .map(|n| Complex::from(n)) .collect::<Vec<_>>() ); assert_approx_vec(output.into_iter(), test.clone().into_iter()); assert_approx_vec(check.into_iter(), test.clone().into_iter()); assert_approx_vec(exp.into_iter(), test.into_iter()); } }
true
c89837b680fa9df08fd0217c6195bc7b37be0c58
Rust
darkwisebear/dorkfs
/src/utility/gitconfig.rs
UTF-8
11,512
2.859375
3
[ "MIT" ]
permissive
use std::hash::{Hash, Hasher}; use std::borrow::{Cow, Borrow}; use std::mem::replace; use std::collections::hash_map::{self, HashMap}; use std::path::PathBuf; use std::io::{Read, BufReader, BufRead}; use std::env; use std::str::FromStr; use failure::{bail, format_err, Fallible}; use regex::Regex; use lazy_static::lazy_static; lazy_static! { static ref SECTION_PARSER: Regex = Regex::new(r#"\[([[[:alnum:]]-.]+)(?:\s+"(.+)")?\s*]"#) .unwrap(); } #[derive(Clone, Debug)] struct SectionKey(Box<str>, Option<Box<str>>); #[derive(Clone, Debug, PartialEq)] struct BorrowedSectionKey<'a>(&'a str, Option<&'a str>); impl Hash for SectionKey { fn hash<H: Hasher>(&self, state: &mut H) { <Self as SectionDescriptor>::hash(self, state); } } impl PartialEq for SectionKey { fn eq(&self, other: &SectionKey) -> bool { self.to_descriptor().eq(&other.to_descriptor()) } } impl Eq for SectionKey {} trait SectionDescriptor { fn to_descriptor(&self) -> BorrowedSectionKey; fn hash<H: Hasher>(&self, state: &mut H) where Self: Sized { let desc = self.to_descriptor(); desc.0.hash(state); desc.1.hash(state); } } impl SectionDescriptor for SectionKey { fn to_descriptor(&self) -> BorrowedSectionKey { let s = self.1.as_ref().map(|s| s.as_ref()); BorrowedSectionKey(self.0.as_ref(), s) } } impl<'a> SectionDescriptor for BorrowedSectionKey<'a> { fn to_descriptor(&self) -> BorrowedSectionKey { self.clone() } } impl<'a> Borrow<dyn SectionDescriptor + 'a> for SectionKey { fn borrow(&self) -> &(dyn SectionDescriptor + 'a) { self as &dyn SectionDescriptor } } impl<'a> PartialEq for (dyn SectionDescriptor + 'a) { fn eq(&self, other: &dyn SectionDescriptor) -> bool { let lhs = self.to_descriptor(); let rhs = other.to_descriptor(); lhs.0 == rhs.0 && lhs.1 == rhs.1 } } impl<'a> Eq for (dyn SectionDescriptor + 'a) {} impl<'a> Hash for (dyn SectionDescriptor + 'a) { fn hash<H: Hasher>(&self, state: &mut H) { self.to_descriptor().hash(state); } } #[derive(Debug, Clone)] pub struct Section(HashMap<String, String>); impl Section { pub fn get_bool(&self, name: &str) -> Option<Fallible<bool>> { self.0.get(name) .map(|val| match val.as_str() { "yes" | "on" | "true" | "1" | "" => Ok(true), "no" | "off" | "false" | "0" => Ok(false), _ => bail!("Unable to parse {} as bool", val.as_str()) }) } pub fn get_integer(&self, name: &str) -> Option<Fallible<isize>> { self.0.get(name) .map(|val| isize::from_str(val.as_str()) .map_err(Into::into)) } pub fn get_path(&self, name: &str) -> Option<PathBuf> { let val = self.0.get(name)?; if val.starts_with("~/") || val.starts_with("~user/") { let mut path = PathBuf::from(env::var_os("HOME")?); let subpath = &val[val.find('/').unwrap()+1..]; path.push(subpath); Some(path) } else { Some(PathBuf::from(val)) } } pub fn get_str(&self, name: &str) -> Option<&str> { self.0.get(name).map(|s| s.as_str()) } pub fn extract_path(&mut self, name: &str) -> Option<PathBuf> { let val = self.0.remove(name)?; if val.starts_with("~/") || val.starts_with("~user/") { let mut path = PathBuf::from(env::var_os("HOME")?); let subpath = &val[val.find('/').unwrap()+1..]; path.push(subpath); Some(path) } else { Some(PathBuf::from(val)) } } pub fn extract_string(&mut self, name: &str) -> Option<String> { self.0.remove(name) } } #[derive(Debug, Clone)] pub struct GitConfig(HashMap<SectionKey, Section>); impl GitConfig { fn add_section(&mut self, section_name: Box<str>, subsection_name: Option<Box<str>>, section: HashMap<String, String>) { let section_key = SectionKey(section_name, subsection_name); self.0.insert(section_key, Section(section)); } fn decode_subsection(subsection: &str) -> String { let mut escape = false; let mut result = String::with_capacity(subsection.len()); for c in subsection.chars() { if !escape { if c == '\\' { escape = true; } else { result.push(c); } } else { escape = false; result.push(c); } } result } pub fn iter(&self) -> SectionIter { SectionIter(self.0.iter()) } pub fn into_iter(self) -> IntoSectionIter { IntoSectionIter(self.0.into_iter()) } pub fn from_reader<R: Read>(reader: R) -> Fallible<Self> { let reader = BufReader::new(reader); let mut config = GitConfig(HashMap::new()); let mut current_section_name: Option<Box<str>> = None; let mut current_subsection_name: Option<Box<str>> = None; let mut current_section = HashMap::new(); for line in reader.lines() { let line = line?; let line = line.split(|s| s == '#' || s == ';').next().unwrap().trim_start(); if line.starts_with('[') { // Store the old section if let Some(current_section_name) = current_section_name.take() { config.add_section(current_section_name, current_subsection_name.take(), replace(&mut current_section, HashMap::new())) } let section_captures = SECTION_PARSER.captures(line) .ok_or_else(|| format_err!("Cannot parse section header {}", line))?; current_section_name = section_captures.get(1) .map(|m| m.as_str().to_lowercase().into_boxed_str()); current_subsection_name = section_captures.get(2) .map(|m| Self::decode_subsection(m.as_str()).into_boxed_str()); } else if let Some(equal_index) = line.find('=') { let key = line[0..equal_index].trim_end(); let value = line[equal_index+1..].trim(); current_section.insert(key.to_string(), value.to_string()); } else { current_section.insert(line.trim_end().to_string(), "1".to_string()); } } if let Some(current_section_name) = current_section_name.take() { config.add_section(current_section_name, current_subsection_name.take(), current_section); } Ok(config) } pub fn get_section(&self, section: &str, subsection: Option<&str>) -> Option<&Section> { let section = if section.chars().any(char::is_uppercase) { Cow::Owned(section.to_lowercase()) } else { Cow::Borrowed(section) }; let borrowed_section_key = BorrowedSectionKey(section.as_ref(), subsection); self.0.get(&borrowed_section_key as &dyn SectionDescriptor) } } pub struct SectionIter<'a>(hash_map::Iter<'a, SectionKey, Section>); impl<'a> Iterator for SectionIter<'a> { type Item = (&'a str, Option<&'a str>, &'a Section); fn next(&mut self) -> Option<Self::Item> { self.0.next().map(|(k, v)| { (k.0.as_ref(), k.1.as_ref().map(Box::<str>::as_ref), v) }) } fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() } } pub struct IntoSectionIter(hash_map::IntoIter<SectionKey, Section>); impl Iterator for IntoSectionIter { type Item = (String, Option<String>, Section); fn next(&mut self) -> Option<Self::Item> { self.0.next().map(|(k, v)| { (k.0.into_string(), k.1.map(str::into_string), v) }) } fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() } } impl IntoIterator for GitConfig { type Item = (String, Option<String>, Section); type IntoIter = IntoSectionIter; fn into_iter(self) -> Self::IntoIter { self.into_iter() } } #[cfg(test)] mod test { use super::*; use std::path::Path; #[test] fn parse_git_config() { let sample = br#"# This is a test comment [core] autocrlf = true ; also with ; [commit] # Should work on a section as well # ; gpgSign = true [remote "origin"] path = ~/test/subdir # and as the last line"#; let config = GitConfig::from_reader(sample.as_ref()).unwrap(); let section = config.get_section("commit", None) .expect("Couldn't find section commit"); assert_eq!(true, section.get_bool("gpgSign") .expect("Value gpgSign not existing") .expect("Unable to parse value gpgSign")); let section = config.get_section("remote", Some("origin")) .expect("Unable to find remote origin"); env::set_var("HOME", "/home/example"); assert_eq!(Path::new("/home/example/test/subdir"), section.get_path("path") .expect("Couldn't find path of remote")); } #[test] fn section_is_case_insensitive() { let sample = br#"[aWeIRdSEcTioN] data = value"#; let config = GitConfig::from_reader(sample.as_ref()).unwrap(); config.get_section("aweirdsection", None).unwrap(); config.get_section("AWEIRDSECTION", None).unwrap(); config.get_section("AWEiRdsEcTIon", None).unwrap(); } #[test] fn missing_value_means_true() { let sample = br#"[section] data"#; let config = GitConfig::from_reader(&sample[..]).unwrap(); let section = config.get_section("section", None).unwrap(); assert_eq!(true, section.get_bool("data").unwrap().unwrap()); } #[test] fn subsection_name_escaping() { let sample = br#"[test "sec\\tion"] data = value [other "sec\"tion"] size = 15 [other "sec\5tion"] with = 8 "#; let config = GitConfig::from_reader(&sample[..]).unwrap(); config.get_section("test", Some("sec\\tion")).unwrap(); config.get_section("other", Some("sec\"tion")).unwrap(); config.get_section("other", Some("sec5tion")).unwrap(); } #[test] fn empty_section() { let sample = br#"[emptysection] [nonempty] some = data"#; let config = GitConfig::from_reader(&sample[..]).unwrap(); config.get_section("emptysection", None).unwrap(); let section = config.get_section("nonempty", None).unwrap(); assert_eq!("data", section.get_str("some").unwrap()); } #[test] fn section_and_subsection() { let sample = br#"[section] stuff = true [section "sub1"] data = narf [section "sub2"] sata = foo"#; let config = GitConfig::from_reader(&sample[..]).unwrap(); let section = config.get_section("section", None).unwrap(); let sub1 = config.get_section("section", Some("sub1")).unwrap(); let sub2 = config.get_section("section", Some("sub2")).unwrap(); assert_eq!(true, section.get_bool("stuff").unwrap().unwrap()); assert_eq!("narf", sub1.get_str("data").unwrap()); assert_eq!("foo", sub2.get_str("sata").unwrap()); } }
true
e6c0a9dbf5ccbddf73168cdf0dc171ab4937b7b5
Rust
uazu/qcell
/src/tlcell.rs
UTF-8
11,861
3.1875
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
use std::any::TypeId; use std::cell::UnsafeCell; use std::collections::HashSet; use std::marker::PhantomData; use super::Invariant; std::thread_local! { static SINGLETON_CHECK: std::cell::RefCell<HashSet<TypeId>> = std::cell::RefCell::new(HashSet::new()); } struct NotSendOrSync(*const ()); /// Borrowing-owner of zero or more [`TLCell`](struct.TLCell.html) /// instances. /// /// See [crate documentation](index.html). #[cfg_attr(docsrs, doc(cfg(feature = "std")))] pub struct TLCellOwner<Q: 'static> { // Use NotSendOrSync to disable Send and Sync, not_send_or_sync: PhantomData<NotSendOrSync>, // Use Invariant<Q> for invariant parameter typ: PhantomData<Invariant<Q>>, } impl<Q: 'static> Drop for TLCellOwner<Q> { fn drop(&mut self) { SINGLETON_CHECK.with(|set| set.borrow_mut().remove(&TypeId::of::<Q>())); } } impl<Q: 'static> Default for TLCellOwner<Q> { fn default() -> Self { TLCellOwner::new() } } impl<Q: 'static> TLCellOwner<Q> { /// Create the singleton owner instance. Each owner may be used /// to create many `TLCell` instances. There may be only one /// instance of this type per thread at any given time for each /// different marker type `Q`. This call panics if a second /// simultaneous instance is created. Since the owner is only /// valid to use in the thread it is created in, it does not /// support `Send` or `Sync`. pub fn new() -> Self { SINGLETON_CHECK.with(|set| { assert!(set.borrow_mut().insert(TypeId::of::<Q>()), "Illegal to create two TLCellOwner instances within the same thread with the same marker type parameter"); }); Self { not_send_or_sync: PhantomData, typ: PhantomData, } } /// Create a new cell owned by this owner instance. See also /// [`TLCell::new`]. /// /// [`TLCell::new`]: struct.TLCell.html pub fn cell<T>(&self, value: T) -> TLCell<Q, T> { TLCell::<Q, T>::new(value) } /// Borrow contents of a `TLCell` immutably (read-only). Many /// `TLCell` instances can be borrowed immutably at the same time /// from the same owner. #[inline] pub fn ro<'a, T: ?Sized>(&'a self, tc: &'a TLCell<Q, T>) -> &'a T { unsafe { &*tc.value.get() } } /// Borrow contents of a `TLCell` mutably (read-write). Only one /// `TLCell` at a time can be borrowed from the owner using this /// call. The returned reference must go out of scope before /// another can be borrowed. #[inline] pub fn rw<'a, T: ?Sized>(&'a mut self, tc: &'a TLCell<Q, T>) -> &'a mut T { unsafe { &mut *tc.value.get() } } /// Borrow contents of two `TLCell` instances mutably. Panics if /// the two `TLCell` instances point to the same memory. #[inline] pub fn rw2<'a, T: ?Sized, U: ?Sized>( &'a mut self, tc1: &'a TLCell<Q, T>, tc2: &'a TLCell<Q, U>, ) -> (&'a mut T, &'a mut U) { assert!( tc1 as *const _ as *const () as usize != tc2 as *const _ as *const () as usize, "Illegal to borrow same TLCell twice with rw2()" ); unsafe { (&mut *tc1.value.get(), &mut *tc2.value.get()) } } /// Borrow contents of three `TLCell` instances mutably. Panics if /// any pair of `TLCell` instances point to the same memory. #[inline] pub fn rw3<'a, T: ?Sized, U: ?Sized, V: ?Sized>( &'a mut self, tc1: &'a TLCell<Q, T>, tc2: &'a TLCell<Q, U>, tc3: &'a TLCell<Q, V>, ) -> (&'a mut T, &'a mut U, &'a mut V) { assert!( (tc1 as *const _ as *const () as usize != tc2 as *const _ as *const () as usize) && (tc2 as *const _ as *const () as usize != tc3 as *const _ as *const () as usize) && (tc3 as *const _ as *const () as usize != tc1 as *const _ as *const () as usize), "Illegal to borrow same TLCell twice with rw3()" ); unsafe { ( &mut *tc1.value.get(), &mut *tc2.value.get(), &mut *tc3.value.get(), ) } } } /// Cell whose contents is owned (for borrowing purposes) by a /// [`TLCellOwner`]. /// /// To borrow from this cell, use the borrowing calls on the /// [`TLCellOwner`] instance that shares the same marker type. Since /// there may be another indistinguishable [`TLCellOwner`] in another /// thread, `Sync` is not supported for this type. However it *is* /// possible to send the cell to another thread, which then allows its /// contents to be borrowed using the owner in that thread. /// /// See also [crate documentation](index.html). /// /// [`TLCellOwner`]: struct.TLCellOwner.html #[repr(transparent)] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] pub struct TLCell<Q, T: ?Sized> { // Use Invariant<Q> for invariant parameter owner: PhantomData<Invariant<Q>>, // TLCell absolutely cannot be Sync, since otherwise you could send // two &TLCell's to two different threads, that each have their own // TLCellOwner<Q> instance and that could therefore both give out // a &mut T to the same T. // // However, it's fine to Send a TLCell to a different thread, because // you can only send something if nothing borrows it, so nothing can // be accessing its contents. After sending the TLCell, the original // TLCellOwner can no longer give access to the TLCell's contents since // TLCellOwner is !Send + !Sync. Only the TLCellOwner of the new thread // can give access to this TLCell's contents now. // // `UnsafeCell` already disables `Sync` and gives the right `Send` implementation. value: UnsafeCell<T>, } impl<Q, T> TLCell<Q, T> { /// Create a new `TLCell` owned for borrowing purposes by the /// `TLCellOwner` derived from the same marker type `Q`. #[inline] pub const fn new(value: T) -> TLCell<Q, T> { TLCell { owner: PhantomData, value: UnsafeCell::new(value), } } /// Destroy the cell and return the contained value /// /// Safety: Since this consumes the cell, there can be no other /// references to the cell or the data at this point. #[inline] pub fn into_inner(self) -> T { self.value.into_inner() } } impl<Q, T: ?Sized> TLCell<Q, T> { /// Borrow contents of this cell immutably (read-only). Many /// `TLCell` instances can be borrowed immutably at the same time /// from the same owner. #[inline] pub fn ro<'a>(&'a self, owner: &'a TLCellOwner<Q>) -> &'a T { owner.ro(self) } /// Borrow contents of this cell mutably (read-write). Only one /// `TLCell` at a time can be borrowed from the owner using this /// call. The returned reference must go out of scope before /// another can be borrowed. To mutably borrow from two or three /// cells at the same time, see [`TLCellOwner::rw2`] or /// [`TLCellOwner::rw3`]. #[inline] pub fn rw<'a>(&'a self, owner: &'a mut TLCellOwner<Q>) -> &'a mut T { owner.rw(self) } /// Returns a mutable reference to the underlying data /// /// Note that this is only useful at the beginning-of-life or /// end-of-life of the cell when you have exclusive access to it. /// Normally you'd use [`TLCell::rw`] or [`TLCellOwner::rw`] to /// get a mutable reference to the contents of the cell. /// /// Safety: This call borrows `TLCell` mutably which guarantees /// that we possess the only reference. This means that there can /// be no active borrows of other forms, even ones obtained using /// an immutable reference. #[inline] pub fn get_mut(&mut self) -> &mut T { self.value.get_mut() } } impl<Q: 'static, T: Default + ?Sized> Default for TLCell<Q, T> { fn default() -> Self { TLCell::new(T::default()) } } #[cfg(test)] mod tests { use super::{TLCell, TLCellOwner}; #[test] #[should_panic] fn tlcell_singleton_1() { struct Marker; let _owner1 = TLCellOwner::<Marker>::new(); let _owner2 = TLCellOwner::<Marker>::new(); // Panic here } #[test] fn tlcell_singleton_2() { struct Marker; let owner1 = TLCellOwner::<Marker>::new(); drop(owner1); let _owner2 = TLCellOwner::<Marker>::new(); } #[test] fn tlcell_singleton_3() { struct Marker1; struct Marker2; let _owner1 = TLCellOwner::<Marker1>::new(); let _owner2 = TLCellOwner::<Marker2>::new(); } #[test] fn tlcell() { struct Marker; type ACellOwner = TLCellOwner<Marker>; type ACell<T> = TLCell<Marker, T>; let mut owner = ACellOwner::new(); let c1 = ACell::new(100u32); let c2 = owner.cell(200u32); (*owner.rw(&c1)) += 1; (*owner.rw(&c2)) += 2; let c1ref = owner.ro(&c1); let c2ref = owner.ro(&c2); let total = *c1ref + *c2ref; assert_eq!(total, 303); } #[test] fn tlcell_threads() { struct Marker; type ACellOwner = TLCellOwner<Marker>; let mut _owner1 = ACellOwner::new(); std::thread::spawn(|| { let mut _owner2 = ACellOwner::new(); }) .join() .unwrap(); } #[test] fn tlcell_get_mut() { struct Marker; type ACellOwner = TLCellOwner<Marker>; type ACell<T> = TLCell<Marker, T>; let owner = ACellOwner::new(); let mut cell = ACell::new(100u32); let mut_ref = cell.get_mut(); *mut_ref = 50; let cell_ref = owner.ro(&cell); assert_eq!(*cell_ref, 50); } #[test] fn tlcell_into_inner() { struct Marker; type ACell<T> = TLCell<Marker, T>; let cell = ACell::new(100u32); assert_eq!(cell.into_inner(), 100); } #[test] fn tlcell_unsized() { struct Marker; type ACellOwner = TLCellOwner<Marker>; type ACell<T> = TLCell<Marker, T>; let mut owner = ACellOwner::new(); struct Squares(u32); struct Integers(u64); trait Series { fn step(&mut self); fn value(&self) -> u64; } impl Series for Squares { fn step(&mut self) { self.0 += 1; } fn value(&self) -> u64 { (self.0 as u64) * (self.0 as u64) } } impl Series for Integers { fn step(&mut self) { self.0 += 1; } fn value(&self) -> u64 { self.0 } } fn series(init: u32, is_squares: bool) -> Box<ACell<dyn Series>> { if is_squares { Box::new(ACell::new(Squares(init))) } else { Box::new(ACell::new(Integers(init as u64))) } } let own = &mut owner; let cell1 = series(4, false); let cell2 = series(7, true); let cell3 = series(3, true); assert_eq!(cell1.ro(own).value(), 4); cell1.rw(own).step(); assert_eq!(cell1.ro(own).value(), 5); assert_eq!(own.ro(&cell2).value(), 49); own.rw(&cell2).step(); assert_eq!(own.ro(&cell2).value(), 64); let (r1, r2, r3) = own.rw3(&cell1, &cell2, &cell3); r1.step(); r2.step(); r3.step(); assert_eq!(cell1.ro(own).value(), 6); assert_eq!(cell2.ro(own).value(), 81); assert_eq!(cell3.ro(own).value(), 16); let (r1, r2) = own.rw2(&cell1, &cell2); r1.step(); r2.step(); assert_eq!(cell1.ro(own).value(), 7); assert_eq!(cell2.ro(own).value(), 100); } }
true
99d7133a8b054427336d93de1d50bbae5011e90a
Rust
sagiegurari/duckscript
/duckscript_sdk/src/sdk/std/fs/cp/mod.rs
UTF-8
2,478
2.703125
3
[ "Apache-2.0", "LicenseRef-scancode-free-unknown" ]
permissive
use crate::utils::pckg; use duckscript::types::command::{Command, CommandResult}; use fs_extra::dir; use fsio::directory::create_parent; use std::fs; use std::path::Path; #[cfg(test)] #[path = "./mod_test.rs"] mod mod_test; #[derive(Clone)] pub(crate) struct CommandImpl { package: String, } impl Command for CommandImpl { fn name(&self) -> String { pckg::concat(&self.package, "CopyPath") } fn aliases(&self) -> Vec<String> { vec!["cp".to_string()] } fn help(&self) -> String { include_str!("help.md").to_string() } fn clone_and_box(&self) -> Box<dyn Command> { Box::new((*self).clone()) } fn run(&self, arguments: Vec<String>) -> CommandResult { if arguments.len() < 2 { CommandResult::Error("Paths not provided.".to_string()) } else { let source_path_str = &arguments[0]; let source_path = Path::new(source_path_str); if !source_path.exists() { CommandResult::Error("Path does not exist.".to_string()) } else { let source_file = source_path.is_file(); let target_path_str = &arguments[1]; if source_file { match create_parent(target_path_str) { Ok(_) => match fs::copy(source_path_str, target_path_str) { Ok(_) => CommandResult::Continue(Some("true".to_string())), Err(error) => CommandResult::Error(error.to_string()), }, Err(error) => CommandResult::Error(error.to_string()), } } else { match fsio::directory::create(target_path_str) { Ok(_) => { let options = dir::CopyOptions::new(); match dir::copy(source_path_str, target_path_str, &options) { Ok(_) => CommandResult::Continue(Some("true".to_string())), Err(error) => CommandResult::Error(error.to_string()), } } Err(error) => CommandResult::Error(error.to_string()), } } } } } } pub(crate) fn create(package: &str) -> Box<dyn Command> { Box::new(CommandImpl { package: package.to_string(), }) }
true
59ae2409acd67f9813f0b9193f76b3b8afc1b30f
Rust
DoumanAsh/yukikaze
/src/header/content_disposition.rs
UTF-8
16,006
3.453125
3
[ "Apache-2.0" ]
permissive
use percent_encoding::{utf8_percent_encode, percent_decode_str}; use crate::utils::enc::HEADER_VALUE_ENCODE_SET; use core::fmt; use core::str::FromStr; use std::error::Error; #[derive(Debug)] ///Filename parameter of `Content-Disposition` pub enum Filename { ///Regular `filename` Name(Option<String>), ///Extended `filename*` /// ///Charset is always UTF-8, because whatelse you need? /// ///Values: ///1. Optional language tag. ///2. Correctly percent encoded string Extended(Option<String>, String) } impl Filename { ///Returns default `Filename` with empty name field. pub fn new() -> Self { Filename::Name(None) } ///Creates file name. pub fn with_name(name: String) -> Self { Filename::Name(Some(name)) } ///Creates file name, and checks whether it should be encoded. /// ///Note that actual encoding would happen only when header is written. ///The value itself would remain unchanged in the `Filename`. pub fn with_encoded_name(name: std::borrow::Cow<'_, str>) -> Self { match name.is_ascii() { true => Self::with_name(name.into_owned()), false => match utf8_percent_encode(&name, HEADER_VALUE_ENCODE_SET).into() { std::borrow::Cow::Owned(encoded) => Self::with_extended(None, encoded), std::borrow::Cow::Borrowed(maybe_encoded) => match maybe_encoded == name { true => Self::with_extended(None, maybe_encoded.to_owned()), false => Self::with_name(name.into_owned()), } } } } #[inline] ///Creates extended file name. pub fn with_extended(lang: Option<String>, name: String) -> Self { Filename::Extended(lang, name) } #[inline] ///Returns whether filename is of extended type. pub fn is_extended(&self) -> bool { match self { Filename::Extended(_, _) => true, _ => false } } ///Returns file name, percent decoded if necessary. /// ///Note: expects to work with utf-8 only. pub fn name(&self) -> Option<std::borrow::Cow<'_, str>> { match self { Filename::Name(None) => None, Filename::Name(Some(ref name)) => Some(name.as_str().into()), Filename::Extended(_, name) => Some(percent_decode_str(&name).decode_utf8_lossy()), } } ///Consumes self and returns file name, if present. /// ///Note: expects to work with utf-8 only. pub fn into_name(self) -> Option<String> { match self { Filename::Name(None) => None, Filename::Name(Some(name)) => Some(name), Filename::Extended(_, name) => Some(percent_decode_str(&name).decode_utf8_lossy().into_owned()), } } } #[derive(Debug)] /// A `Content-Disposition` header, defined in [RFC6266](https://tools.ietf.org/html/rfc6266). /// /// The Content-Disposition response header field is used to convey /// additional information about how to process the response payload, and /// also can be used to attach additional metadata, such as the filename /// to use when saving the response payload locally. pub enum ContentDisposition { ///Tells that content should be displayed inside web page. Inline, ///Tells that content should be downloaded. Attachment(Filename), ///Tells that content is field of form with name and filename /// ///## Note /// ///This is an extension that can be used only inside of multipart ///body, it is not expected value for header. FormData(Option<String>, Filename) } fn split_into_two(text: &str, sep: char) -> (&str, &str) { match text.find(sep) { Some(end) => (&text[..end].trim_end(), &text[end+1..].trim_start()), None => (text, ""), } } macro_rules! parse_file_ext { ($param:ident) => {{ let mut parts = $param.splitn(3, '\''); //Should be utf-8, but since we parse from str, should be always utf-8 let _ = match parts.next() { Some(charset) => charset.to_owned(), None => continue }; let lang = parts.next().map(|lang| lang.to_owned()); let value = match parts.next() { Some(value) => value.to_owned(), None => continue }; Filename::Extended(lang, value) }} } #[derive(Debug)] pub enum ParseError { InvalidDispositionType, UnknownAttachmentParam, UnknownFormParam, } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &ParseError::InvalidDispositionType => f.write_str("Specified disposition type is not valid. Should be inline, attachment or form-data"), &ParseError::UnknownAttachmentParam => f.write_str("Form-data parameter is invalid. Allowed: filename[*]"), &ParseError::UnknownFormParam => f.write_str("Form-data parameter is invalid. Allowed: name, filename[*]"), } } } impl Error for ParseError { } impl FromStr for ContentDisposition { type Err = ParseError; fn from_str(text: &str) -> Result<Self, Self::Err> { const NAME: &str = "name"; const FILENAME: &str = "filename"; let text = text.trim(); let (disp_type, arg) = split_into_two(text, ';'); if disp_type.eq_ignore_ascii_case("inline") { Ok(ContentDisposition::Inline) } else if disp_type.eq_ignore_ascii_case("attachment") { let mut file_name = Filename::Name(None); for arg in arg.split(';').map(|arg| arg.trim()) { let (name, value) = split_into_two(arg, '='); if value.len() == 0 { continue; } if name.len() < FILENAME.len() { return Err(ParseError::UnknownAttachmentParam) } let prefix = &name[..FILENAME.len()]; if prefix.eq_ignore_ascii_case("filename") { let value = value.trim_matches('"'); if let Some(_) = name.rfind('*') { file_name = parse_file_ext!(value); break; } else { file_name = Filename::Name(Some(value.to_owned())); } } else { return Err(ParseError::UnknownAttachmentParam) } } Ok(ContentDisposition::Attachment(file_name)) } else if disp_type.eq_ignore_ascii_case("form-data") { let mut name_param = None; let mut file_name = Filename::Name(None); for arg in arg.split(';').map(|arg| arg.trim()) { let (name, value) = split_into_two(arg, '='); if value.len() == 0 { continue; } if name.eq_ignore_ascii_case(NAME) { name_param = Some(value.trim_matches('"').to_owned()); continue; } else if name.len() < FILENAME.len() { return Err(ParseError::UnknownFormParam) } let prefix = &name[..FILENAME.len()]; if prefix.eq_ignore_ascii_case("filename") { let value = value.trim_matches('"'); if let Some(_) = name.rfind('*') { file_name = parse_file_ext!(value); } else if !file_name.is_extended() { file_name = Filename::Name(Some(value.to_owned())); } } else { return Err(ParseError::UnknownFormParam) } } Ok(ContentDisposition::FormData(name_param, file_name)) } else { Err(ParseError::InvalidDispositionType) } } } impl fmt::Display for ContentDisposition { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ContentDisposition::Inline => write!(f, "inline"), ContentDisposition::Attachment(file) => match file { Filename::Name(Some(name)) => write!(f, "attachment; filename=\"{}\"", name), Filename::Name(None) => write!(f, "attachment"), Filename::Extended(lang, value) => { write!(f, "attachment; filename*=utf-8'{}'{}", lang.as_ref().map(|lang| lang.as_str()).unwrap_or(""), value) }, }, ContentDisposition::FormData(None, file) => match file { Filename::Name(Some(name)) => write!(f, "form-data; filename=\"{}\"", name), Filename::Name(None) => write!(f, "form-data"), Filename::Extended(lang, value) => { write!(f, "form-data; filename*=utf-8'{}'{}", lang.as_ref().map(|lang| lang.as_str()).unwrap_or(""), value) }, }, ContentDisposition::FormData(Some(name), file) => match file { Filename::Name(Some(file_name)) => write!(f, "form-data; name=\"{}\"; filename=\"{}\"", name, file_name), Filename::Name(None) => write!(f, "form-data; name=\"{}\"", name), Filename::Extended(lang, value) => { write!(f, "form-data; name=\"{}\"; filename*=utf-8'{}'{}", name, lang.as_ref().map(|lang| lang.as_str()).unwrap_or(""), value) }, } } } } #[cfg(test)] mod tests { use percent_encoding::{percent_decode}; use super::{FromStr, ContentDisposition, Filename}; #[test] fn parse_file_name_extended_ascii() { const INPUT: &'static str = "rori.mp4"; let file_name = Filename::with_encoded_name(INPUT.into()); assert!(!file_name.is_extended()); } #[test] fn parse_file_name_extended_non_ascii() { const INPUT: &'static str = "ロリへんたい.mp4"; let file_name = Filename::with_encoded_name(INPUT.into()); assert!(file_name.is_extended()); } #[test] fn parse_inline_disp() { const INPUT: &'static str = "inline"; let result = ContentDisposition::from_str(INPUT).expect("To have inline Disposition"); let result = match result { ContentDisposition::Inline => result.to_string(), _ => panic!("Invalid Content Disposition") }; assert_eq!(result, INPUT); } #[test] fn parse_attach_disp_wo_filename() { const INPUT: &'static str = "attachment; filename"; let result = ContentDisposition::from_str(INPUT).expect("To have attachment Disposition"); let result_text = result.to_string(); match result { ContentDisposition::Attachment(file) => { match file { Filename::Name(name) => assert!(name.is_none()), _ => panic!("Wrong Filename type"), } }, _ => panic!("Invalid Content Disposition") } assert_eq!(result_text, "attachment"); } #[test] fn parse_attach_disp_w_filename() { const INPUT: &'static str = "attachment; filename=\"lolka.jpg\";filename=\"lolka2.jpg\""; let result = ContentDisposition::from_str(INPUT).expect("To have attachment Disposition"); let result_text = result.to_string(); match result { ContentDisposition::Attachment(file) => { match file { Filename::Name(name) => assert_eq!(name.expect("Filename value"), "lolka2.jpg"), _ => panic!("Wrong Filename type"), } }, _ => panic!("Invalid Content Disposition") } assert_eq!(result_text, "attachment; filename=\"lolka2.jpg\""); } #[test] fn parse_attach_disp_w_filename_ext() { const EXPECT_INPUT: &'static str = "attachment; filename*=utf-8'en'%C2%A3%20and%20%E2%82%AC%20rates"; const INPUT: &'static str = "attachment;\t filename*=UTF-8'en'%C2%A3%20and%20%E2%82%AC%20rates"; let result = ContentDisposition::from_str(INPUT).expect("To have attachment Disposition"); let result_text = result.to_string(); match result { ContentDisposition::Attachment(file) => { assert!(file.is_extended()); let expected_value = percent_decode("%C2%A3%20and%20%E2%82%AC%20rates".as_bytes()).decode_utf8_lossy(); let value = file.name().expect("To have file name"); assert_eq!(value, expected_value); }, _ => panic!("Invalid Content Disposition") } assert_eq!(result_text, EXPECT_INPUT); } #[test] fn parse_form_data() { const EXPECT_INPUT: &'static str = "form-data; name=\"lolka\"; filename=\"lolka.jpg\""; const INPUT: &'static str = "form-data;\t name=\"lolka\";filename=\"lolka.jpg\""; let result = ContentDisposition::from_str(INPUT).expect("To have form-data Disposition"); let result_text = result.to_string(); match result { ContentDisposition::FormData(name, file) => { assert_eq!(name.expect("To have form-data name"), "lolka"); match file { Filename::Name(name) => assert_eq!(name.expect("Filename value"), "lolka.jpg"), _ => panic!("Wrong Filename type"), } }, _ => panic!("Invalid Content Disposition") } assert_eq!(result_text, EXPECT_INPUT); } #[test] fn parse_form_data_wo_params() { const INPUT: &'static str = "form-data"; let result = ContentDisposition::from_str(INPUT).expect("To have form-data Disposition"); let result_text = result.to_string(); match result { ContentDisposition::FormData(name, file) => { assert!(name.is_none()); match file { Filename::Name(name) => assert!(name.is_none()), _ => panic!("Wrong Filename type"), } }, _ => panic!("Invalid Content Disposition") } assert_eq!(result_text, INPUT); } #[test] fn parse_form_data_wo_name() { const INPUT: &'static str = "form-data; filename=\"lolka.jpg\""; let result = ContentDisposition::from_str(INPUT).expect("To have form-data Disposition"); let result_text = result.to_string(); match result { ContentDisposition::FormData(name, file) => { assert!(name.is_none()); match file { Filename::Name(name) => assert_eq!(name.expect("Filename value"), "lolka.jpg"), _ => panic!("Wrong Filename type"), } }, _ => panic!("Invalid Content Disposition") } assert_eq!(result_text, INPUT); } #[test] fn parse_form_data_wo_filename() { const INPUT: &'static str = "form-data; name=\"lolka\""; let result = ContentDisposition::from_str(INPUT).expect("To have form-data Disposition"); let result_text = result.to_string(); match result { ContentDisposition::FormData(name, file) => { assert_eq!(name.expect("To have form-data name"), "lolka"); match file { Filename::Name(name) => assert!(name.is_none()), _ => panic!("Wrong Filename type"), } }, _ => panic!("Invalid Content Disposition") } assert_eq!(result_text, INPUT); } }
true
c8041aea4e22221aff28b9ff7ab4168881c90e17
Rust
ocadaruma/redis-hyperminhash
/src/redis/dense.rs
UTF-8
1,097
2.75
3
[ "Apache-2.0" ]
permissive
use crate::hyperminhash::{RegisterVector, NUM_REGISTERS}; use super::dma::CByteArray; use std::mem::size_of; /// RegisterVector impl which stores registers as 16-bit integer array. /// Each integer is stored in little endian. pub struct DenseVector { data: CByteArray, } impl DenseVector { pub const SINGLE_REGISTER_BYTES: usize = size_of::<u16>(); pub const DENSE_BYTES: usize = NUM_REGISTERS * DenseVector::SINGLE_REGISTER_BYTES; pub fn wrap(data: CByteArray) -> Self { Self { data, } } } impl RegisterVector for DenseVector { fn register_at(&self, idx: usize) -> u32 { let offset = idx * DenseVector::SINGLE_REGISTER_BYTES; let mut result = 0u16; result |= u16::from(self.data[offset ]); result |= u16::from(self.data[offset + 1]) << 8; u32::from(result) } fn set_register(&mut self, idx: usize, value: u32) { let offset = idx * DenseVector::SINGLE_REGISTER_BYTES; self.data[offset ] = ((value ) & 0xff) as u8; self.data[offset + 1] = ((value >> 8) & 0xff) as u8; } }
true
06ebdfe09aa4bcb8de1450a1d97441c430b455ee
Rust
Logicalshift/flowbetween
/cmdline/diag/src/console.rs
UTF-8
2,290
2.84375
3
[ "Apache-2.0" ]
permissive
use flo_commands::*; use tokio::fs; use tokio::io::{stdout, stderr, AsyncWrite, AsyncWriteExt}; use futures::prelude::*; use std::path::*; /// /// Sends the command output to the console /// pub fn run_console<InputStream>(command_output: InputStream) -> impl Future<Output=()>+Send where InputStream: Stream<Item=FloCommandOutput>+Send+Unpin { async move { let mut command_output = command_output; // The default command output stream is stdout. The messages are sent to stderr let mut output_stream: Box<dyn AsyncWrite+Send+Unpin> = Box::new(stdout()); let mut message_stream = stderr(); while let Some(input) = command_output.next().await { use self::FloCommandOutput::*; match input { BeginCommand(_cmd) => { } Message(msg) => { message_stream.write(msg.as_bytes()).await.unwrap(); message_stream.write("\n".as_bytes()).await.unwrap(); } BeginOutput(filename) => { output_stream = Box::new(fs::File::create(PathBuf::from(filename)).await.unwrap()); } Error(err) => { message_stream.write(err.as_bytes()).await.unwrap(); message_stream.write("\n".as_bytes()).await.unwrap(); } State(_state) => { } FinishCommand(_cmd) => { } StartTask(_task) => { } TaskProgress(_complete, _todo) => { } FinishTask => { } Failure(error) => { let msg = format!("ERROR: {}", error); message_stream.write(msg.as_bytes()).await.unwrap(); } Output(output) => { let bytes = output.as_bytes(); let mut pos = 0; while pos < bytes.len() { let remaining_bytes = &bytes[pos..bytes.len()]; let num_written = output_stream.write(remaining_bytes).await.unwrap(); pos += num_written; } } } } } }
true
f7f46a72fc116c9d1df9d04eadb7a9e636652d99
Rust
munagekar/cp
/leetcode/00012.rs
UTF-8
1,729
3.5
4
[]
no_license
// https://leetcode.com/problems/integer-to-roman/ // It Would have been a better idea to use a dictionary based lookup instead of the long function impl Solution { pub fn int_to_roman(mut num: i32) -> String { let mut a = String::new(); let ms = num / 1000; num = num % 1000; let mut ds = num / 500; num = num % 500; let mut cs = num / 100; num = num % 100; let mut ls = num / 50; num = num % 50; let mut xs = num / 10; num = num % 10; let mut vs = num /5; num = num % 5; let mut is = num; for i in 0..ms{ a.push('M'); } if ds ==1 && cs==4{ a.push_str("CM"); ds -= 1; cs -=4; } for i in 0..ds{ a.push('D'); } if cs==4{ a.push_str("CD"); cs -=4; } for i in 0..cs{ a.push('C'); } if ls ==1 && xs==4{ a.push_str("XC"); ls -=1; xs -=4; } for i in 0..ls{ a.push('L'); } if xs==4{ a.push_str("XL"); xs -=4; } for i in 0..xs{ a.push('X'); } if vs==1 && is==4{ a.push_str("IX"); vs -=1; is -=4; } if vs==1{ a.push_str("V"); } if is==4{ a.push_str("IV"); is -=4; } for i in 0..is{ a.push('I'); } a } }
true
cd34bc5bd57eeca47f665607bc33c19df5c88755
Rust
bosley/OPAS
/Rust/coal/lang/tests/expressions.rs
UTF-8
837
2.515625
3
[ "MIT" ]
permissive
extern crate coal_lang; #[test] fn expressions () { let code_chunks = vec![ r#" unit main{ static module other { def init() -> none {} def give_42() -> int { return 42; } } static module main { members { pi: float = 3.14159; } def init() -> none { local_var : int = 3 + 5 + 7.0 * (pi ** var); local_var = self.give_something(); } def give_something() -> int { return 3 + other::give_42().to_string().to_int().fail().arbitrary().length().access().is_okay(); } }}"#, ]; for item in code_chunks.iter() { match coal_lang::from_source(&item.to_string()) { Ok(_) => {}, Err(e) => { panic!("Failed to parse code : \n\n {} \n-----------\n {:?}", item, e); } }; } }
true
44a42e4aed728a71e04b880964c609c47ff02d61
Rust
ndrsllwngr/aussieplusplus
/src/ast/op.rs
UTF-8
2,466
3.34375
3
[]
no_license
use std::fmt::Display; use crate::token::Kind; #[derive(Copy, Clone, Debug, PartialEq)] pub enum UnaryOp { Bang, Minus, } impl From<Kind> for Option<UnaryOp> { fn from(kind: Kind) -> Self { match kind { Kind::Bang => Some(UnaryOp::Bang), Kind::Minus => Some(UnaryOp::Minus), _ => None, } } } #[derive(Copy, Clone, Debug, PartialEq)] pub enum BinaryOp { Plus, Minus, Multiply, Divide, Equal, NotEqual, Less, LessEqual, Greater, GreaterEqual, Modulo, } impl From<Kind> for Option<BinaryOp> { fn from(kind: Kind) -> Self { match kind { Kind::Plus => Some(BinaryOp::Plus), Kind::Minus => Some(BinaryOp::Minus), Kind::Asterisk => Some(BinaryOp::Multiply), Kind::Slash => Some(BinaryOp::Divide), Kind::Equals => Some(BinaryOp::Equal), Kind::BangEqual => Some(BinaryOp::NotEqual), Kind::LeftBoomerang => Some(BinaryOp::Less), Kind::LTE => Some(BinaryOp::LessEqual), Kind::RightBoomerang => Some(BinaryOp::Greater), Kind::GTE => Some(BinaryOp::GreaterEqual), Kind::Modulo => Some(BinaryOp::Modulo), _ => None, } } } impl Display for BinaryOp { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Plus => write!(f, "+"), Self::Minus => write!(f, "-"), Self::Multiply => write!(f, "*"), Self::Divide => write!(f, "/"), Self::Equal => write!(f, "=="), Self::NotEqual => write!(f, "!="), Self::Less => write!(f, "<"), Self::LessEqual => write!(f, "<="), Self::Greater => write!(f, ">"), Self::GreaterEqual => write!(f, ">="), Self::Modulo => write!(f, "%"), } } } #[derive(Copy, Clone, Debug, PartialEq)] pub enum LogicalOp { And, Or, } impl From<Kind> for Option<LogicalOp> { fn from(kind: Kind) -> Self { match kind { Kind::And => Some(LogicalOp::And), Kind::Or => Some(LogicalOp::Or), _ => None, } } } impl Display for LogicalOp { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::And => write!(f, "&&"), Self::Or => write!(f, "||"), } } }
true
11769aa5f74c1ffe5eef8383bd38d9391ddea7d7
Rust
NyxCode/Penrose-Polygon-Generator
/impossible-polygon/src/utils.rs
UTF-8
5,064
3.578125
4
[]
no_license
use crate::types::*; use std::f64::consts::PI; use svg::Document; use svg::node::element::Polygon; /// `get_wrap` gets an element from a `Vec` at a specific index. /// Instead of causing a 'index out of bounds' panic, the index is 'wrapped around', /// so `vec![1, 2, 3].get_wrap(3)` returns `1` pub trait GetWrapping<T> { fn get_wrap(&self, index: usize) -> &T; } impl <T> GetWrapping<T> for Vec<T> { fn get_wrap(&self, index: usize) -> &T { assert!(!self.is_empty()); &self[index % self.len()] } } /// Generates a regular polygon with n edges rotated by 'phi' radians. /// All points are 'radius' away from the origin. pub fn generate_regular_polygon(n: u32, radius: f64, phi: f64) -> Vec<Point> { let delta_angle = 2.0 * PI / n as f64; (0..n) .map(|n| n as f64 * delta_angle + phi) .map(|a| point(a.sin() * radius, a.cos() * radius)) .collect() } /// Returns the inner angle of a regular polygon with n edges. /// /// example: /// `inner_angle_of_regular_polygon(3)` -> `rad(60°)` /// `inner_angle_of_regular_polygon(4)` -> `rad(90°)` pub fn inner_angle_of_regular_polygon(n: u32) -> f64 { f64::from(n - 2) * PI / f64::from(n) } /// Calculates all intersections between two set of lines. /// Intersections between lines in one set are not returned. /// The calculated intersections are sorted by their angle on the unit circle. pub fn get_intersections(poly1: &Vec<Line>, poly2: &Vec<Line>) -> Vec<Point> { let mut points = vec![]; for line1 in poly1 { for line2 in poly2 { if let Some(intersection) = line1.intersect(*line2) { points.push(intersection); } } } let mut average = point(0.0, 0.0); for p in &points { average = point(average.x + p.x, average.y + p.y); } average = point(average.x / points.len() as f64, average.y / points.len() as f64); let mut p: Vec<_> = points.iter() .map(|point| (point, *point - average)) .map(|(point, avg)| (point, f64::atan2(avg.x, avg.y))) .collect(); p.sort_by(|(_, atan1), (_, atan2)| (*atan1).partial_cmp(atan2).unwrap()); p.iter().map(|(point, _)| **point).collect() } /// Calculate the radius of a polygon with the given side length and edges. /// /// example: /// a circle around a square with side length 1 has a radius of sqrt(2) /// `calculate_regular_polygon_radius(4, 1)` -> `sqrt(2)` pub fn calculate_regular_polygon_radius(n: u32, side_len: f64) -> f64 { let alpha = inner_angle_of_regular_polygon(n); (side_len * (alpha / 2.0).sin()) / (PI - alpha).sin() } /// Connects points, resulting in a `Vec` of `Line`s. /// /// `points[0]` is connected to `points[1]` /// `points[1]` is connected to `points[2]` /// `points.last()` is connected to `points[0]` pub fn connect_points(points: &Vec<Point>) -> Vec<Line> { assert!(points.len() >= 3); let mut lines = Vec::with_capacity(points.len()); let mut iterator = points.iter().peekable(); let first_point = iterator.next().unwrap(); let mut last_point = first_point; while iterator.peek().is_some() { let current_point = iterator.next().unwrap(); let line = line(*last_point, *current_point); lines.push(line); last_point = current_point; } let last_line = line(*last_point, *first_point); lines.push(last_line); lines } /// Moves every entry in `vec` one to the right while moving the last entry to the start pub fn rotate_vec<E>(vec: &mut Vec<E>) { let last_element = vec.pop().expect("vec is empty"); vec.insert(0, last_element); } /// Draws lines of the given color into the document pub fn draw_lines<S: Into<String> + Copy>(document: Document, lines: &Vec<Line>, color: S) -> Document { let mut doc = document; for line in lines { doc = doc.add(line.to_svg(color)) } doc } /// Draws a polygon of the given color described by the given points to the document pub fn draw_polygon<S: Into<String>>(document: Document, points: &Vec<Point>, color: S) -> Document { let points_str = points .iter() .map(|point| format!("{},{}", point.x, point.y)) .collect::<Vec<_>>() .join(" "); let polygon = Polygon::new() .set("points", points_str) .set("fill", color.into()); document.add(polygon) } /// sets the viewbox of the given document to completely display all given lines pub fn set_viewbox(document: Document, lines: &Vec<Line>) -> Document { let cords = lines.iter().flat_map(|line| vec![line.a, line.b]); let min_x = cords.clone().map(|cord| cord.x).fold(1. / 0., f64::min) * 1.1; let min_y = cords.clone().map(|cord| cord.y).fold(1. / 0., f64::min) * 1.1; let max_x = cords.clone().map(|cord| cord.x).fold(0. / 0., f64::max) * 1.1; let max_y = cords.clone().map(|cord| cord.y).fold(0. / 0., f64::max) * 1.1; document.set("viewBox", (min_x, min_y, max_x - min_x, max_y - min_y)) }
true
3b18f8ed907637912c9b593bc479651838378e43
Rust
katyo/literium
/rust/backend/src/third/traits.rs
UTF-8
751
2.59375
3
[ "MIT" ]
permissive
use super::ThirdError; use base::BoxFuture; use std::borrow::Cow; /// Third service interface pub trait IsThirdService<S, A>: Sized { /// The name of provider /// /// Example: guthub, google, ...etc. fn service_name(&self) -> Cow<str>; /// Fetch user info /// /// Get user profile from third service fn get_user_profile(&self, state: &S, access_token: Cow<str>) -> BoxFuture<A, ThirdError>; } impl<'a, T, S, A> IsThirdService<S, A> for &'a T where T: IsThirdService<S, A>, { fn service_name(&self) -> Cow<str> { (*self).service_name() } fn get_user_profile(&self, state: &S, access_token: Cow<str>) -> BoxFuture<A, ThirdError> { (*self).get_user_profile(state, access_token) } }
true
18039f6a976d574f0670596b5a6d45fff7100d2a
Rust
gerritsangel/lapin
/src/executor.rs
UTF-8
2,706
2.875
3
[ "MIT" ]
permissive
use crate::{thread::ThreadHandle, Result}; use async_task::Task; use crossbeam_channel::{Receiver, Sender}; use parking_lot::Mutex; use std::{fmt, future::Future, ops::Deref, pin::Pin, sync::Arc, thread::Builder as ThreadBuilder}; pub trait Executor: std::fmt::Debug + Send + Sync { fn spawn(&self, f: Pin<Box<dyn Future<Output = ()> + Send>>) -> Result<()>; } impl Executor for Arc<dyn Executor> { fn spawn(&self, f: Pin<Box<dyn Future<Output = ()> + Send>>) -> Result<()> { self.deref().spawn(f) } } #[derive(Clone)] pub struct DefaultExecutor { sender: Sender<Option<Task<()>>>, receiver: Receiver<Option<Task<()>>>, threads: Arc<Mutex<Vec<ThreadHandle>>>, max_threads: usize, } impl DefaultExecutor { pub fn new(max_threads: usize) -> Self { let (sender, receiver) = crossbeam_channel::unbounded(); let threads = Default::default(); Self { sender, receiver, threads, max_threads, } } pub(crate) fn maybe_spawn_thread(&self) -> Result<()> { let mut threads = self.threads.lock(); let id = threads.len() + 1; if id <= self.max_threads { let receiver = self.receiver.clone(); threads.push(ThreadHandle::new( ThreadBuilder::new() .name(format!("executor {}", id)) .spawn(move || { while let Ok(Some(task)) = receiver.recv() { task.run(); } Ok(()) })?, )); } Ok(()) } } impl Default for DefaultExecutor { fn default() -> Self { Self::new(1) } } impl fmt::Debug for DefaultExecutor { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("DefaultExecutor") .field("max_threads", &self.max_threads) .finish() } } impl Executor for DefaultExecutor { fn spawn(&self, f: Pin<Box<dyn Future<Output = ()> + Send>>) -> Result<()> { self.maybe_spawn_thread()?; let sender = self.sender.clone(); let schedule = move |task| sender.send(Some(task)).expect("executor failed"); let (task, _) = async_task::spawn(f, schedule, ()); task.schedule(); Ok(()) } } impl Drop for DefaultExecutor { fn drop(&mut self) { if let Some(threads) = self.threads.try_lock() { for _ in threads.iter() { let _ = self.sender.send(None); } for thread in threads.iter() { let _ = thread.wait("executor"); } } } }
true
775b75fcca2b889610e39c9ddfe73b3b86840d67
Rust
itto-ki/atcoder
/ABC/028/B.rs
UTF-8
482
3.28125
3
[]
no_license
fn read_string() -> String { let mut input = String::new(); let _ = std::io::stdin().read_line(&mut input); input.trim().to_string() } fn main() { let s = read_string(); let mut rslt = [0; 6]; for c in s.chars() { rslt[((c as u8) - ('A' as u8)) as usize] += 1; } for (i, val) in rslt.iter().enumerate() { if i != 5 { print!("{} ", val); } else { print!("{}", val); } } println!(""); }
true
b02bb3df9b025a0dc6383ba3d36b5d1eb386e966
Rust
crypto-forks/rebuild-ethereum-in-rust
/rs/src/util/mod.rs
UTF-8
3,173
3.125
3
[ "LicenseRef-scancode-other-permissive" ]
permissive
use crate::account::Account; use crate::blockchain::block::U256; use crate::blockchain::blockchain::Blockchain; use crate::interpreter::OPCODE; use crate::store::state::State; use crate::transaction::tx::Transaction; use crate::transaction::tx_queue::TransactionQueue; use itertools::Itertools; use serde::{Deserialize, Serialize}; use sha3::{Digest, Keccak256}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GlobalState { pub blockchain: Blockchain, pub tx_queue: TransactionQueue, pub miner_account: Account, } pub fn prep_state() -> GlobalState { let code = vec![ OPCODE::PUSH, OPCODE::VAL(10), OPCODE::PUSH, OPCODE::VAL(5), OPCODE::ADD, OPCODE::STOP, ]; println!("MINER ACCOUNT: "); let miner_account = Account::new(vec![]); println!("SMART CONTRACT ACCOUNT: "); let sc_account = Account::new(code); let tx = Transaction::create_transaction(Some(miner_account.clone()), None, 0, None, 100); let tx2 = Transaction::create_transaction(Some(sc_account), None, 0, None, 100); let mut global_state = GlobalState { blockchain: Blockchain::new(State::new()), tx_queue: TransactionQueue::new(), miner_account, }; global_state.tx_queue.add(tx); global_state.tx_queue.add(tx2); global_state } pub fn sort_characters<T>(data: &T) -> String where T: ?Sized + Serialize, { let s = serde_json::to_string(data).unwrap(); // println!("{:?}", s); s.chars().sorted().rev().collect::<String>() } /// Note we're specifically using keccak256 not sha3 /// read about the difference here - https://www.oreilly.com/library/view/mastering-ethereum/9781491971932/ch04.html (under cryptographic hash functions header) pub fn keccak_hash<T>(data: &T) -> String where T: ?Sized + Serialize, { let s = sort_characters(data); // println!("{:?}", s); let mut hasher = Keccak256::new(); hasher.update(s); let result = hasher.finalize(); let hex_r = hex::encode(result); // println!("{}", hex_r); hex_r } pub fn base16_to_base10(base16: &String) -> U256 { U256::from_str_radix(base16, 16).unwrap() } pub fn base10_to_base16(base10: U256) -> String { format!("{:x}", base10) } #[cfg(test)] mod tests { use super::*; #[derive(Debug, Serialize, Deserialize)] pub struct Headers { pub header: String, } /// as per https://www.oreilly.com/library/view/mastering-ethereum/9781491971932/ch04.html /// although because I'm serializing the string I was unable to pass complete nothing and instead passed "" /// verify here https://keccak-256.cloxy.net/ #[test] fn test_keccak_correct_algo() { let data: String = "".into(); assert_eq!( keccak_hash(&data), "2392a80f8a87b8cfde0aa5c84e199f163aae4c2a4c512d37598362ace687ad0c" ); } #[test] fn test_keccak_works() { let data = Headers { header: "abc".into(), }; assert_eq!( keccak_hash(&data), "2d30e1a63627cecd178fc7a3851069a65edc462839975a8449379b47bcf66953" ); } }
true
628c92045fe4cae203f6c94f21b7063e8594d98e
Rust
MrFlynn/saltine
/src/main.rs
UTF-8
2,174
2.953125
3
[ "MIT" ]
permissive
#[macro_use] extern crate clap; mod crypt; mod generator; mod thread; use std::fs::File; use std::io::{prelude::*, BufReader}; static DEFAULT_ALPHABET: &'static str = &"abcdefghijklmnopqrstuvwxyz"; fn get_user_info(path: &str, username: &str) -> (String, String) { let file = File::open(path).unwrap(); let reader = BufReader::new(file); let mut hash = String::from(""); let mut salt = String::from(""); for line in reader.lines() { let content = String::from(line.unwrap()); let first_split: Vec<&str> = content.split_terminator(':').collect(); if first_split[0] == username { let second_split: Vec<&str> = first_split[1].split_terminator('$').collect(); hash = String::from(second_split[3]); salt = String::from(second_split[2]); break; } } (hash, salt) } fn main() { let matches = clap_app!(saltine => (version: "0.1.0") (author: "Nick Pleatsikas <[email protected]>") (about: "Proof-of-concept MD5 password cracking tool.") (@arg FILE: +required "/etc/shadow file to crack.") (@arg USERNAME: --username -u +takes_value +required "Username to target.") (@arg THREADS: --threads -t +takes_value "Number of threads to run on. Defaults to 1.") (@arg SIZE: --("password-size") -s +takes_value "Size of password to crack. Defaults to 6 characters.") (@arg ALPHABET: --alphabet -a +takes_value "Character alphabet to use. Defaults to all lower case English characters.") ).get_matches(); let filename: &str = matches.value_of("FILE").unwrap(); let username: &str = matches.value_of("USERNAME").unwrap(); let threads: u32 = matches .value_of("THREADS") .unwrap_or("1") .to_string() .parse::<u32>() .unwrap(); let size: usize = matches .value_of("SIZE") .unwrap_or("6") .to_string() .parse::<usize>() .unwrap(); let alphabet: &str = matches.value_of("ALPHABET").unwrap_or(DEFAULT_ALPHABET); let (hash, salt) = get_user_info(filename, username); thread::run(hash, salt, size, threads, alphabet); }
true
638573b6e949edb87d8f30f434fd84abdc178c88
Rust
simon-frankau/advent_of_code_2018
/day20b/src/main.rs
UTF-8
10,835
3.453125
3
[ "MIT" ]
permissive
use std::collections::HashMap; use std::collections::HashSet; use std::io; use std::io::Read; use std::iter::Peekable; use std::slice::Iter; #[derive(Clone)] enum Match { Literal(char), Alternation(Vec<Match>), Concatenation(Vec<Match>), } impl std::fmt::Debug for Match { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Match::Literal(c) => write!(f, "{}", c)?, Match::Concatenation(xs) => { for x in xs.iter() { x.fmt(f)?; } } Match::Alternation(xs) => { // We could do precedence-based printing, but let's always put them in... let mut first = true; for x in xs.iter() { write!(f, "{}", if first {'('} else {'|'})?; first = false; x.fmt(f)?; } write!(f, ")")?; } } Ok(()) } } fn parse_regexp(iter: &mut Peekable<Iter<char>>) -> Match { // Current alternation, made of a sequence of concatentations. let mut alternatives = Vec::new(); // Current concatenation being built. let mut curr = Vec::new(); loop { let c = match iter.peek() { Some(c) => Some(*c), None => None, }; match c { Some('(') => { iter.next(); curr.push(parse_regexp(iter)); if iter.next() != Some(&')') { panic!("Imbalanced brackets"); } } Some('|') => { iter.next(); alternatives.push(Match::Concatenation(curr)); curr = Vec::new(); } Some(')') => break, None => break, Some(c) => { curr.push(Match::Literal(*c)); iter.next(); } } } alternatives.push(Match::Concatenation(curr)); Match::Alternation(alternatives) } //////////////////////////////////////////////////////////////////////// // This is the bit for problem 20a... // // This just cleans up the regexp tree, without understanding paths. fn opt_regexp(m: Match) -> Match { match m { Match::Alternation(xs) => { let xs: Vec<Match> = xs.into_iter().map(opt_regexp).collect(); if xs.len() == 1 { // Take first element, and discard rest. xs.into_iter().next().unwrap() } else { Match::Alternation(xs) } } Match::Concatenation(xs) => { let xs: Vec<Match> = xs.into_iter().map(opt_regexp).collect(); if xs.len() == 1 { // Take first element, and discard rest. xs.into_iter().next().unwrap() } else { Match::Concatenation(xs) } } Match::Literal(_) => m, } } // This removes obvious, basic back-tracking (back-tracking that // occurs only within a single concatenation of literals). fn opt_backtracks(m: Match) -> Match { match m { Match::Alternation(xs) => { Match::Alternation(xs.into_iter().map(opt_backtracks).collect()) } Match::Literal(_) => m, Match::Concatenation(xs) => { let mut xs = xs.into_iter().map(opt_backtracks).collect::<Vec<_>>(); let mut i = 0; while i + 1 < xs.len() { if if let (Match::Literal(a), Match::Literal(b)) = (&xs[i], &xs[i+1]) { match (a, b) { ('N', 'S') => true, ('S', 'N') => true, ('W', 'E') => true, ('E', 'W') => true, _ => false, } } else { false } { xs.drain(i..i+2); if i > 0 { i -= 1; } } else { i += 1; } } Match::Concatenation(xs) } } } // Is this an empty match? Used by opt_empties. fn is_empty(m: &Match) -> bool { match m { Match::Literal(_) => false, Match::Concatenation(xs) => xs.iter().all(is_empty), Match::Alternation(xs) => xs.len() > 0 && xs.iter().all(is_empty), } } // And this removes alternatives of thing from concatenations. It's a // specific optimisation, but seems key to this exercise. fn opt_empties(m: Match) -> Match { match m { Match::Alternation(xs) => { Match::Alternation(xs.into_iter().map(opt_empties).collect()) } Match::Literal(_) => m, Match::Concatenation(xs) => { Match::Concatenation(xs.into_iter().map(opt_empties).filter(|x| !is_empty(x)).collect()) } } } //////////////////////////////////////////////////////////////////////// // Problem 20b part // // Find the route to the turning point for a sequence of literals fn get_literal_partial(xs: &[Match]) -> Option<Vec<Match>> { if xs.len() == 0 { return None; } for elem in xs.iter().zip(xs.iter().rev()) { match elem { (Match::Literal('N'), Match::Literal('S')) => (), (Match::Literal('S'), Match::Literal('N')) => (), (Match::Literal('W'), Match::Literal('E')) => (), (Match::Literal('E'), Match::Literal('W')) => (), _ => return None, } } Some(xs.iter().take(xs.len() / 2).map(|x| (*x).clone()).collect()) } // Given a route that involves back-tracks, generate a list of routes // up to the turning-around point. e.g. NEWS -> NE. fn get_partials(m: &Match) -> Vec<Match> { match m { Match::Alternation(xs) => { let mut res = Vec::new(); for alternative in xs.iter() { res.extend(get_partials(alternative).into_iter()); } res } // A single literal will have no backtrackable parts. Match::Literal(_) => Vec::new(), Match::Concatenation(xs) => { match get_literal_partial(xs) { Some(x) => vec![Match::Concatenation(x)], None => { let mut res = Vec::new(); for i in 0..xs.len() { let partials = get_partials(&xs[i]); for partial in partials.into_iter() { let mut element = xs.iter().take(i).map(|x| (*x).clone()).collect::<Vec<Match>>(); element.push(partial); res.push(Match::Concatenation(element)); } } res } } } } } //////////////////////////////////////////////////////////////////////// // Generate all the possible strings. // fn generate_all(m: &Match) -> HashSet<String> { let mut res: HashSet<String> = HashSet::new(); match m { Match::Literal(x) => { res.insert(x.to_string()); () } Match::Alternation(xs) => { for x in xs.iter() { res.extend(generate_all(x).into_iter()); } } Match::Concatenation(xs) => { // Ugh. Cross products are potentially expensive. res.insert(String::new()); for x in xs.iter() { let to_cross = generate_all(x); add_cross_string(&mut res, &to_cross); } } } res } fn add_cross_string(lhs: &mut HashSet<String>, rhs: &HashSet<String>) { let mut res = HashSet::new(); for s1 in lhs.iter() { for s2 in rhs.iter() { let mut s = s1.clone(); s.push_str(&s2); res.insert(s); } } // This is where I'd like to swap lhs and res. lhs.clear(); lhs.extend(res.into_iter()); } // Generate all the incremental paths fn all_prefixes(strs: &HashSet<String>) -> HashSet<String> { let mut seen = HashSet::new(); for str in strs.iter() { for l in 0..str.len() { seen.insert(str.get(0..l+1).unwrap().to_string()); } } seen } // Given a path, generate the coordinates of its end point. fn get_coords(s: &str) -> (i32, i32) { let y = s.chars().map(|c| match c { 'N' => 1, 'S' => -1, _ => 0, }).sum(); let x = s.chars().map(|c| match c { 'E' => 1, 'W' => -1, _ => 0, }).sum(); (x, y) } // Build a mapping from coord to shortest route there. fn build_mapping(strs: &HashSet<String>) -> HashMap<(i32, i32), usize> { let mut map = HashMap::new(); for s in strs.iter() { let xy = get_coords(s); let l = s.len(); let e = map.entry(xy).or_insert(1000000); if l < *e { *e = l; } } map } // Count the long routes fn count_long(l: usize, mapping: &HashMap<(i32, i32), usize>) -> usize { mapping.iter().filter(|(_, l2)| **l2 >= l).count() } fn main() { let mut buffer = String::new(); io::stdin().read_to_string(&mut buffer).expect("Read error"); let chars = buffer.replace('^', "").replace('$', "").trim().chars().collect::<Vec<_>>(); // println!("{:?}\n", chars); let res = parse_regexp(&mut chars.iter().peekable()); // println!("{:?}\n", res); // All the backtracks form a trivial pattern, so we'll extract all // the routes up to a backtrack (plus original route). let mut partials = get_partials(&res); partials.push(res); // println!("{:?}\n", partials); // Then we'll eliminate the back-tracks, etc. let partials = partials.into_iter().map(|x| opt_empties(opt_backtracks(opt_regexp(x)))).collect::<Vec<_>>(); // println!("{:?}\n", partials); println!("{}\n", partials.len()); // And now build the regexp of doom. let regex = Match::Alternation(partials); let all = generate_all(&regex); // println!("{:?}\n", all); println!("{}\n", all.len()); // We have all the paths, now generate all the partial paths. let prefixes = all_prefixes(&all); println!("{}\n", prefixes.len()); // Some paths will overlap, so for each coordinate, find the shortest path there. let mapping = build_mapping(&prefixes); println!("{}\n", mapping.len()); // And find the count of coordinates over length 1000. println!("{}\n", count_long(1000, &mapping)); // My, that was really, really tedious. // If I'd known you could just generate all of the paths in // sensible time once you'd taken out the obvious // backtracking... *sigh*. }
true
97885ac75129bd13ac285829de7e50685b568fc5
Rust
DobromirM/rust-examples
/custom-future-test/src/main.rs
UTF-8
930
2.90625
3
[]
no_license
use async_std::task; use futures::task::{Context, Poll}; use std::future::Future; use std::time::Duration; use tokio::macros::support::Pin; #[tokio::main] async fn main() { let str = HelloFuture::new(hello()).await; println!("{}", str) } async fn hello() -> String { task::sleep(Duration::from_secs(1)).await; return "Hello, world!".to_string(); } struct HelloFuture<Fut> where Fut: Future<Output = String> + Unpin, { fut: Fut, } impl<Fut> HelloFuture<Pin<Box<Fut>>> where Fut: Future<Output = String>, { fn new(fut: Fut) -> HelloFuture<Pin<Box<Fut>>> { HelloFuture { fut: Box::pin(fut) } } } impl<Fut> Future for HelloFuture<Fut> where Fut: Future<Output = String> + Unpin, { type Output = String; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let mut_self = self.get_mut(); Pin::new(&mut mut_self.fut).poll(cx) } }
true
a2c6c451bc6b33bec2f92b79026a8f7d979d8851
Rust
opp11/calcr
/src/lexer.rs
UTF-8
6,270
3.6875
4
[ "MIT" ]
permissive
use std::str::Chars; use std::iter::Peekable; use errors::{CalcrResult, CalcrError}; use token::Token; use token::TokVal::*; use token::OpKind::*; use token::DelimKind::*; pub fn lex_equation(eq: &String) -> CalcrResult<Vec<Token>> { let mut lexer = Lexer { pos: 0, iter: eq.chars().peekable(), }; lexer.lex_expression() } pub struct Lexer<'a> { pos: usize, iter: Peekable<Chars<'a>>, } impl<'a> Lexer<'a> { pub fn lex_expression(&mut self) -> CalcrResult<Vec<Token>> { let mut out = Vec::new(); loop { self.consume_whitespace(); let tok = match self.peek_char() { Some(ch) if ch.is_numeric() => try!(self.lex_number()), Some(ch) if ch.is_alphabetic() => try!(self.lex_name()), Some(_) => try!(self.lex_single_char()), None => break, }; out.push(tok); } Ok(out) } fn lex_number(&mut self) -> CalcrResult<Token> { let num_str = self.consume_while(|ch| ch.is_numeric() || ch == '.'); if let Ok(num) = num_str.parse::<f64>() { Ok(Token { val: Num(num), span: (self.pos - num_str.len(), self.pos), }) } else { Err(CalcrError { desc: format!("Invalid number: {}", num_str), span: Some((self.pos - num_str.len(), self.pos)), }) } } fn lex_name(&mut self) -> CalcrResult<Token> { let name_str = self.consume_while(|ch| ch.is_alphabetic() || ch.is_numeric()); let len = name_str.chars().count(); Ok(Token { val: Name(name_str), span: (self.pos - len, self.pos), }) } fn lex_single_char(&mut self) -> CalcrResult<Token> { let val = match self.consume_char() { '+' => Op(Plus), '-' => Op(Minus), '*' => Op(Mult), '/' => Op(Div), '^' => Op(Pow), '!' => Op(Fact), '=' => Op(Assign), '√' => Name("sqrt".to_string()), '(' => OpenDelim(Paren), '[' => OpenDelim(Bracket), '{' => OpenDelim(Brace), ')' => CloseDelim(Paren), ']' => CloseDelim(Bracket), '}' => CloseDelim(Brace), '|' => AbsDelim, ch => return Err(CalcrError { desc: format!("Invalid char: {}", ch), span: Some((self.pos - 1, self.pos)), }), }; Ok(Token { val: val, span: (self.pos - 1, self.pos), }) } /// Peeks at the next `char` and returns `Some` if one was found, or `None` if none are left fn peek_char(&mut self) -> Option<char> { self.iter.peek().map(|ch| *ch) } /// Consumes a `char` - thereby advanding `pos` - and returns it /// /// # Panics /// This function panics if there are no more chars to consume fn consume_char(&mut self) -> char { let ch = self.iter.next(); self.pos += 1; ch.unwrap().to_lowercase().next().unwrap() } /// Consumes `char`s long as `pred` returns true and we are not eof /// /// The `char`s are returned as a `String`. Note that unlike `consume_char` this function will /// not panic. fn consume_while<F>(&mut self, pred: F) -> String where F: Fn(char) -> bool { let mut out = String::new(); loop { match self.peek_char() { Some(ch) if pred(ch) => out.push(self.consume_char()), _ => break, } } out } /// Consumes any current whitespace fn consume_whitespace(&mut self) { self.consume_while(|ch| ch.is_whitespace()); } } #[cfg(test)] mod tests { use super::lex_equation; use token::Token; use token::TokVal::*; use token::OpKind::*; use token::DelimKind::*; #[test] fn empty() { let eq = "".to_string(); let toks = lex_equation(&eq); assert_eq!(toks, Ok(vec!())); } #[test] fn single_char() { let eq = "2".to_string(); let toks = lex_equation(&eq); assert_eq!(toks, Ok(vec!(Token { val: Num(2.0), span: (0, 1) }))); } #[test] fn utf8() { let eq = "π𐍈".to_string(); let toks = lex_equation(&eq); assert_eq!(toks, Ok(vec!(Token { val: Name(eq), span: (0, 2) }))); } #[test] fn double_width() { let eq = "指事字假借".to_string(); let toks = lex_equation(&eq); assert_eq!(toks, Ok(vec!(Token { val: Name(eq), span: (0, 5) }))); } #[test] fn ops() { let eq = "+-*/!^".to_string(); let toks = lex_equation(&eq); assert_eq!(toks, Ok(vec!(Token { val: Op(Plus), span: (0,1) }, Token { val: Op(Minus), span: (1,2) }, Token { val: Op(Mult), span: (2,3) }, Token { val: Op(Div), span: (3,4) }, Token { val: Op(Fact), span: (4,5) }, Token { val: Op(Pow), span: (5,6) }))); } #[test] fn delims() { let eq = "|()[]{}".to_string(); let toks = lex_equation(&eq); assert_eq!(toks, Ok(vec!(Token { val: AbsDelim, span: (0,1) }, Token { val: OpenDelim(Paren), span: (1,2) }, Token { val: CloseDelim(Paren), span: (2,3) }, Token { val: OpenDelim(Bracket), span: (3,4) }, Token { val: CloseDelim(Bracket), span: (4,5) }, Token { val: OpenDelim(Brace), span: (5,6) }, Token { val: CloseDelim(Brace), span: (6,7) }))); } #[test] fn sqrt_single_char() { let eq = "√".to_string(); let toks = lex_equation(&eq); assert_eq!(toks, Ok(vec!(Token { val: Name("sqrt".to_string()), span: (0,1) }))); } #[test] fn invalid_char() { let eq = "?".to_string(); let err = lex_equation(&eq); assert!(err.is_err()); } }
true
47312686490d987bad2768aeaca02d2c00ffa248
Rust
aylei/leetcode-rust
/src/solution/s0208_implement_trie_prefix_tree.rs
UTF-8
2,517
3.75
4
[ "Apache-2.0" ]
permissive
/** * [208] Implement Trie (Prefix Tree) * * Implement a trie with insert, search, and startsWith methods. * * Example: * * * Trie trie = new Trie(); * * trie.insert("apple"); * trie.search("apple"); // returns true * trie.search("app"); // returns false * trie.startsWith("app"); // returns true * trie.insert("app"); * trie.search("app"); // returns true * * * Note: * * * You may assume that all inputs are consist of lowercase letters a-z. * All inputs are guaranteed to be non-empty strings. * * */ pub struct Solution {} // problem: https://leetcode.com/problems/implement-trie-prefix-tree/ // discuss: https://leetcode.com/problems/implement-trie-prefix-tree/discuss/?currentPage=1&orderBy=most_votes&query= // submission codes start here #[derive(Default)] struct Trie { is_end: bool, nodes: [Option<Box<Trie>>; 26], } /** * `&self` means the method takes an immutable reference. * If you need a mutable reference, change it to `&mut self` instead. */ impl Trie { /** Initialize your data structure here. */ fn new() -> Self { Default::default() } /** insert a word into the trie. */ fn insert(&mut self, word: String) { let mut curr = self; for i in word.chars().map(|ch| (ch as u8 - 'a' as u8) as usize) { curr = curr.nodes[i].get_or_insert_with(|| Box::new(Trie::new())); } curr.is_end = true; } /** Returns if the word is in the trie. */ fn search(&self, word: String) -> bool { self.find(word).map_or(false, |t| t.is_end) } /** Returns if there is any word in the trie that starts with the given prefix. */ fn starts_with(&self, prefix: String) -> bool { self.find(prefix).is_some() } fn find(&self, word: String) -> Option<&Trie> { let mut curr = self; for i in word.chars().map(|ch| (ch as u8 - 'a' as u8) as usize) { curr = curr.nodes[i].as_ref()?; } Some(curr) } } // submission codes end #[cfg(test)] mod tests { use super::*; #[test] fn test_208() { let mut trie = Trie::new(); trie.insert("apple".to_owned()); assert_eq!(trie.search("apple".to_owned()), true); // returns true assert_eq!(trie.search("app".to_owned()), false); assert_eq!(trie.starts_with("app".to_owned()), true); // returns true trie.insert("app".to_owned()); assert_eq!(trie.search("app".to_owned()), true); // returns true } }
true
a610c77279e83975aa7ccdef6d737dee65441890
Rust
CalliEve/boa
/boa_engine/src/syntax/parser/expression/primary/array_initializer/mod.rs
UTF-8
4,580
2.890625
3
[ "MIT", "Unlicense" ]
permissive
//! Array initializer parsing. //! //! More information: //! - [MDN documentation][mdn] //! - [ECMAScript specification][spec] //! //! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array //! [spec]: https://tc39.es/ecma262/#sec-array-initializer #[cfg(test)] mod tests; use crate::syntax::{ ast::{ node::{ArrayDecl, Node, Spread}, Punctuator, }, lexer::TokenKind, parser::{ expression::AssignmentExpression, AllowAwait, AllowYield, Cursor, ParseError, TokenParser, }, }; use boa_interner::Interner; use boa_profiler::Profiler; use std::io::Read; /// Parses an array literal. /// /// More information: /// - [MDN documentation][mdn] /// - [ECMAScript specification][spec] /// /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array /// [spec]: https://tc39.es/ecma262/#prod-ArrayLiteral #[derive(Debug, Clone, Copy)] pub(super) struct ArrayLiteral { allow_yield: AllowYield, allow_await: AllowAwait, } impl ArrayLiteral { /// Creates a new `ArrayLiteral` parser. pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self where Y: Into<AllowYield>, A: Into<AllowAwait>, { Self { allow_yield: allow_yield.into(), allow_await: allow_await.into(), } } } impl<R> TokenParser<R> for ArrayLiteral where R: Read, { type Output = ArrayDecl; fn parse( self, cursor: &mut Cursor<R>, interner: &mut Interner, ) -> Result<Self::Output, ParseError> { let _timer = Profiler::global().start_event("ArrayLiteral", "Parsing"); let mut elements = Vec::new(); let mut has_trailing_comma_spread = false; let mut next_comma = false; let mut last_spread = false; loop { let token = cursor.peek(0, interner)?.ok_or(ParseError::AbruptEnd)?; match token.kind() { TokenKind::Punctuator(Punctuator::CloseBracket) => { cursor.next(interner).expect("token disappeared"); break; } TokenKind::Punctuator(Punctuator::Comma) if next_comma => { cursor.next(interner).expect("token disappeared"); if last_spread { let token = cursor.peek(0, interner)?.ok_or(ParseError::AbruptEnd)?; if token.kind() == &TokenKind::Punctuator(Punctuator::CloseBracket) { has_trailing_comma_spread = true; } } next_comma = false; } TokenKind::Punctuator(Punctuator::Comma) => { cursor.next(interner).expect("token disappeared"); elements.push(Node::Empty); } TokenKind::Punctuator(Punctuator::Spread) if next_comma => { return Err(ParseError::unexpected( token.to_string(interner), token.span(), "expected comma or end of array", )); } TokenKind::Punctuator(Punctuator::Spread) => { cursor.next(interner).expect("token disappeared"); let node = AssignmentExpression::new(None, true, self.allow_yield, self.allow_await) .parse(cursor, interner)?; elements.push(Spread::new(node).into()); next_comma = true; last_spread = true; } _ if next_comma => { return Err(ParseError::unexpected( token.to_string(interner), token.span(), "expected comma or end of array", )); } _ => { let node = AssignmentExpression::new(None, true, self.allow_yield, self.allow_await) .parse(cursor, interner)?; elements.push(node); next_comma = true; last_spread = false; } } } if last_spread { if let Some(Node::Empty) = elements.last() { has_trailing_comma_spread = true; } } Ok(ArrayDecl::new(elements, has_trailing_comma_spread)) } }
true
2004453afff8647ccc72d2d7ef85235983f5defa
Rust
cryptoJLee/anchor-escrow
/programs/escrow/src/lib.rs
UTF-8
10,585
2.796875
3
[]
no_license
//! An example of an escrow program, inspired by PaulX tutorial seen here //! https://paulx.dev/blog/2021/01/14/programming-on-solana-an-introduction/ //! This example has some changes to implementation, but more or less should be the same overall //! Also gives examples on how to use some newer anchor features and CPI //! //! User (Initializer) constructs an escrow deal: //! - SPL token (X) they will offer and amount //! - SPL token (Y) count they want in return and amount //! - Program will take ownership of initializer's token X account //! //! Once this escrow is initialised, either: //! 1. User (Taker) can call the exchange function to exchange their Y for X //! - This will close the escrow account and no longer be usable //! OR //! 2. If no one has exchanged, the initializer can close the escrow account //! - Initializer will get back ownership of their token X account use anchor_lang::prelude::*; use anchor_spl::token::{self, CloseAccount, Mint, SetAuthority, TokenAccount, Transfer}; use spl_token::instruction::AuthorityType; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); #[program] pub mod escrow { use super::*; const ESCROW_PDA_SEED: &[u8] = b"escrow"; pub fn initialize_escrow( ctx: Context<InitializeEscrow>, _vault_account_bump: u8, initializer_amount: u64, taker_amount: u64, ) -> ProgramResult { ctx.accounts.escrow_account.initializer_key = *ctx.accounts.initializer.key; ctx.accounts .escrow_account .initializer_deposit_token_account = *ctx .accounts .initializer_deposit_token_account .to_account_info() .key; ctx.accounts .escrow_account .initializer_receive_token_account = *ctx .accounts .initializer_receive_token_account .to_account_info() .key; ctx.accounts.escrow_account.initializer_amount = initializer_amount; ctx.accounts.escrow_account.taker_amount = taker_amount; let (vault_authority, _vault_authority_bump) = Pubkey::find_program_address(&[ESCROW_PDA_SEED], ctx.program_id); token::set_authority( ctx.accounts.into_set_authority_context(), AuthorityType::AccountOwner, Some(vault_authority), )?; token::transfer( ctx.accounts.into_transfer_to_pda_context(), ctx.accounts.escrow_account.initializer_amount, )?; Ok(()) } pub fn cancel_escrow(ctx: Context<CancelEscrow>) -> ProgramResult { let (_vault_authority, vault_authority_bump) = Pubkey::find_program_address(&[ESCROW_PDA_SEED], ctx.program_id); let authority_seeds = &[&ESCROW_PDA_SEED[..], &[vault_authority_bump]]; token::transfer( ctx.accounts .into_transfer_to_initializer_context() .with_signer(&[&authority_seeds[..]]), ctx.accounts.escrow_account.initializer_amount, )?; token::close_account( ctx.accounts .into_close_context() .with_signer(&[&authority_seeds[..]]), )?; Ok(()) } pub fn exchange(ctx: Context<Exchange>) -> ProgramResult { // Transferring from initializer to taker let (_vault_authority, vault_authority_bump) = Pubkey::find_program_address(&[ESCROW_PDA_SEED], ctx.program_id); let authority_seeds = &[&ESCROW_PDA_SEED[..], &[vault_authority_bump]]; token::transfer( ctx.accounts.into_transfer_to_initializer_context(), ctx.accounts.escrow_account.taker_amount, )?; token::transfer( ctx.accounts .into_transfer_to_taker_context() .with_signer(&[&authority_seeds[..]]), ctx.accounts.escrow_account.initializer_amount, )?; token::close_account( ctx.accounts .into_close_context() .with_signer(&[&authority_seeds[..]]), )?; Ok(()) } } #[derive(Accounts)] #[instruction(vault_account_bump: u8, initializer_amount: u64)] pub struct InitializeEscrow<'info> { #[account(mut, signer)] pub initializer: AccountInfo<'info>, pub mint: Account<'info, Mint>, #[account( init, seeds = [b"token-seed".as_ref()], bump = vault_account_bump, payer = initializer, token::mint = mint, token::authority = initializer, )] pub vault_account: Account<'info, TokenAccount>, #[account( mut, constraint = initializer_deposit_token_account.amount >= initializer_amount )] pub initializer_deposit_token_account: Account<'info, TokenAccount>, pub initializer_receive_token_account: Account<'info, TokenAccount>, #[account(zero)] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub system_program: AccountInfo<'info>, pub rent: Sysvar<'info, Rent>, pub token_program: AccountInfo<'info>, } #[derive(Accounts)] pub struct CancelEscrow<'info> { #[account(mut, signer)] pub initializer: AccountInfo<'info>, #[account(mut)] pub vault_account: Account<'info, TokenAccount>, pub vault_authority: AccountInfo<'info>, #[account(mut)] pub initializer_deposit_token_account: Account<'info, TokenAccount>, #[account( mut, constraint = escrow_account.initializer_key == *initializer.key, constraint = escrow_account.initializer_deposit_token_account == *initializer_deposit_token_account.to_account_info().key, close = initializer )] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub token_program: AccountInfo<'info>, } #[derive(Accounts)] pub struct Exchange<'info> { #[account(signer)] pub taker: AccountInfo<'info>, #[account(mut)] pub taker_deposit_token_account: Account<'info, TokenAccount>, #[account(mut)] pub taker_receive_token_account: Account<'info, TokenAccount>, #[account(mut)] pub initializer_deposit_token_account: Account<'info, TokenAccount>, #[account(mut)] pub initializer_receive_token_account: Account<'info, TokenAccount>, #[account(mut)] pub initializer: AccountInfo<'info>, #[account( mut, constraint = escrow_account.taker_amount <= taker_deposit_token_account.amount, constraint = escrow_account.initializer_deposit_token_account == *initializer_deposit_token_account.to_account_info().key, constraint = escrow_account.initializer_receive_token_account == *initializer_receive_token_account.to_account_info().key, constraint = escrow_account.initializer_key == *initializer.key, close = initializer )] pub escrow_account: ProgramAccount<'info, EscrowAccount>, #[account(mut)] pub vault_account: Account<'info, TokenAccount>, pub vault_authority: AccountInfo<'info>, pub token_program: AccountInfo<'info>, } #[account] pub struct EscrowAccount { pub initializer_key: Pubkey, pub initializer_deposit_token_account: Pubkey, pub initializer_receive_token_account: Pubkey, pub initializer_amount: u64, pub taker_amount: u64, } impl<'info> InitializeEscrow<'info> { fn into_transfer_to_pda_context(&self) -> CpiContext<'_, '_, '_, 'info, Transfer<'info>> { let cpi_accounts = Transfer { from: self .initializer_deposit_token_account .to_account_info() .clone(), to: self.vault_account.to_account_info().clone(), authority: self.initializer.clone(), }; CpiContext::new(self.token_program.clone(), cpi_accounts) } fn into_set_authority_context(&self) -> CpiContext<'_, '_, '_, 'info, SetAuthority<'info>> { let cpi_accounts = SetAuthority { account_or_mint: self.vault_account.to_account_info().clone(), current_authority: self.initializer.clone(), }; let cpi_program = self.token_program.to_account_info(); CpiContext::new(cpi_program, cpi_accounts) } } impl<'info> CancelEscrow<'info> { fn into_transfer_to_initializer_context( &self, ) -> CpiContext<'_, '_, '_, 'info, Transfer<'info>> { let cpi_accounts = Transfer { from: self.vault_account.to_account_info().clone(), to: self .initializer_deposit_token_account .to_account_info() .clone(), authority: self.vault_authority.clone(), }; let cpi_program = self.token_program.to_account_info(); CpiContext::new(cpi_program, cpi_accounts) } fn into_close_context(&self) -> CpiContext<'_, '_, '_, 'info, CloseAccount<'info>> { let cpi_accounts = CloseAccount { account: self.vault_account.to_account_info().clone(), destination: self.initializer.clone(), authority: self.vault_authority.clone(), }; let cpi_program = self.token_program.to_account_info(); CpiContext::new(cpi_program, cpi_accounts) } } impl<'info> Exchange<'info> { fn into_transfer_to_initializer_context( &self, ) -> CpiContext<'_, '_, '_, 'info, Transfer<'info>> { let cpi_accounts = Transfer { from: self.taker_deposit_token_account.to_account_info().clone(), to: self .initializer_receive_token_account .to_account_info() .clone(), authority: self.taker.clone(), }; let cpi_program = self.token_program.to_account_info(); CpiContext::new(cpi_program, cpi_accounts) } fn into_transfer_to_taker_context(&self) -> CpiContext<'_, '_, '_, 'info, Transfer<'info>> { let cpi_accounts = Transfer { from: self.vault_account.to_account_info().clone(), to: self.taker_receive_token_account.to_account_info().clone(), authority: self.vault_authority.clone(), }; CpiContext::new(self.token_program.clone(), cpi_accounts) } fn into_close_context(&self) -> CpiContext<'_, '_, '_, 'info, CloseAccount<'info>> { let cpi_accounts = CloseAccount { account: self.vault_account.to_account_info().clone(), destination: self.initializer.clone(), authority: self.vault_authority.clone(), }; CpiContext::new(self.token_program.clone(), cpi_accounts) } }
true
3790165662a27b5abd680852b4efe8eff686bd2d
Rust
Insynia/zgog-io-server
/src/player/mod.rs
UTF-8
4,093
2.734375
3
[ "Apache-2.0" ]
permissive
pub mod inventory; use std::sync::{Arc, RwLock}; use uuid::Uuid; use crate::coordinates::Coords; use crate::map::valid_spawn; use crate::player::inventory::Inventory; /// Player struct #[derive(Clone, Serialize, Deserialize)] pub struct Player { pub id: Uuid, pub name: String, pub position: Coords, pub orientation: Coords, pub velocity: Coords, pub inventory: Inventory, pub hitting: bool, } /// Player coords struct #[derive(Serialize, Deserialize)] pub struct PlayerState { pub position: Coords, pub orientation: Coords, pub velocity: Coords, pub hitting: bool, } /// NewPlyerInfos struct #[derive(Serialize, Deserialize)] pub struct NewPlayerInfos { pub name: String, } lazy_static! { pub static ref PLAYERS: Arc<RwLock<Vec<Player>>> = Arc::new(RwLock::new(vec![])); } pub fn add_player(id: Uuid, payload: Option<serde_json::Value>) -> Result<Player, String> { if let Some(payload) = payload { if let Ok(player) = serde_json::from_value::<NewPlayerInfos>(payload) { let player = Player { id, name: player.name.to_owned(), position: valid_spawn(), orientation: Coords::default(), velocity: Coords::default(), inventory: Inventory::default(), hitting: false, }; PLAYERS .write() .expect("Could not lock players mutex") .push(player.clone()); info!("New player \"{}\" with id \"{}\"", player.name, player.id); return Ok(player); } else { Err("Could not deserialize player infos for add_player".to_owned()) } } else { Err("No payload provided for add_player".to_owned()) } } pub fn remove_player(id: Uuid) { PLAYERS .write() .expect("Could not lock players mutex") .retain(|c| c.id != id); info!("Player with id \"{}\" removed", id); } pub fn move_player(id: Uuid, payload: Option<serde_json::Value>) -> Result<(), String> { if let Some(payload) = payload { if let Ok(state) = serde_json::from_value::<PlayerState>(payload) { if let Some(ref mut player) = PLAYERS .write() .expect("Could not lock players mutex") .iter_mut() .filter(|p| p.id == id) .collect::<Vec<_>>() .first_mut() { player.position.x = state.position.x; player.position.y = state.position.y; player.orientation.x = state.orientation.x; player.orientation.y = state.orientation.y; player.velocity.x = state.velocity.x; player.velocity.y = state.velocity.y; player.hitting = state.hitting; Ok(()) } else { Err("Player not found for move_player".to_owned()) } } else { Err("Could not deserialize coords for move_player".to_owned()) } } else { Err("No payload provided for move_player".to_owned()) } } use std::net::TcpStream; use websocket::result::WebSocketError; use websocket::sender::Writer; use websocket::OwnedMessage; use crate::communication::{OutgoingMessage, OutgoingMessageType}; pub fn send_hero(sender: &mut Writer<TcpStream>, player: Player) -> Result<(), WebSocketError> { sender.send_message(&OwnedMessage::Text( OutgoingMessage { _type: OutgoingMessageType::Hero, payload: Some(player), } .into(), )) } pub fn send_all_players(sender: &mut Writer<TcpStream>) -> Result<(), WebSocketError> { sender.send_message(&OwnedMessage::Text( OutgoingMessage { _type: OutgoingMessageType::AllPlayers, payload: Some( PLAYERS .read() .expect("Could not lock players mutex") .clone(), ), } .into(), )) }
true
9ea48aead327727227712928f490277ce59e540c
Rust
Jakobzs/rs-cache
/src/lib.rs
UTF-8
4,471
3.09375
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! [RuneScape](https://oldschool.runescape.com/) cache api for basic //! and simple cache interactions. //! //! # Features //! //! Currently rs-cache only supports OSRS with the features listed below. //! This crate also contains tools to help you with implementing your own cache //! if the currently supplied cache is insufficient for a specific use-case. //! //! The following features are currently provided: //! - Reading from the cache. //! - Huffman buffer access. //! - Checksum with simple-to-use validation. //! - Compression and decompression: //! - [Gzip](https://crates.io/crates/libflate) //! - [Bzip2](https://crates.io/crates/bzip2) //! - Loaders //! - [`ItemLoader`](ldr/osrs/struct.ItemLoader.html) //! - [`NpcLoader`](ldr/osrs/struct.NpcLoader.html) //! - [`ObjectLoader`](ldr/osrs/struct.ObjectLoader.html) //! - Utilities //! - Huffman decompressor. //! - Isaac randomizer. //! - Xtea decipher. //! //! Features to be implemented in the future: //! - Writing data to the cache. //! - RS3 protocol support. (including LZMA compression) //! //! # Quick Start //! //! The quickest and easiest way to get started is by using //! [`OsrsCache`](type.OsrsCache.html). //! //! ``` //! use rscache::OsrsCache; //! //! # fn main() -> rscache::Result<()> { //! let cache = OsrsCache::new("./data/cache")?; //! //! let index_id = 2; // Config index. //! let archive_id = 10; // Archive containing item definitions. //! //! let buffer: Vec<u8> = cache.read(index_id, archive_id)?; //! //! # Ok(()) //! # } //! ``` //! //! # Cache interchangeability //! //! The internal storage and reading functionalities can be changed //! by using the generic [`Cache`](struct.Cache.html) struct and chosing //! a store implementation that fits a specific use-case. //! //! In the below example the [`FileStore`](struct.FileStore.html) holds a //! handle to the main data file while the [`MemoryStore`](struct.MemoryStore.html) //! parses the entire main data file into memory. If the main file is too large //! for the `MemoryStore` you can opt into a `FileStore` to do reading through disk I/O. //! //! The type [`OsrsCache`](type.OsrsCache.html) is a type alias for `Cache<MemoryStore>`. //! //! ``` //! use rscache::{ Cache, store::FileStore }; //! //! # fn main() -> rscache::Result<()> { //! let cache = Cache::<FileStore>::new("./data/cache")?; //! //! let index_id = 2; // Config index. //! let archive_id = 10; // Archive containing item definitions. //! //! let buffer: Vec<u8> = cache.read(index_id, archive_id)?; //! //! # Ok(()) //! # } //! ``` //! //! # Building a custom cache //! //! This crate supplies traits and helper functions to help implement //! your own cache when the default cache doesn't do exactly what you need. //! //! See the [custom_cache](https://github.com/jimvdl/rs-cache/tree/master/examples) //! example to help you get started. #![deny(clippy::all, clippy::nursery)] #![warn( clippy::clone_on_ref_ptr, clippy::redundant_clone, clippy::default_trait_access, clippy::expl_impl_clone_on_copy, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map, clippy::filter_map_next, clippy::find_map, clippy::get_unwrap, clippy::items_after_statements, clippy::large_digit_groups, clippy::map_flatten, clippy::match_same_arms, clippy::maybe_infinite_iter, clippy::mem_forget, clippy::missing_inline_in_public_items, clippy::multiple_inherent_impl, clippy::mut_mut, clippy::needless_continue, clippy::needless_pass_by_value, clippy::map_unwrap_or, clippy::pub_enum_variant_names, clippy::unused_self, clippy::similar_names, clippy::single_match_else, clippy::too_many_lines, clippy::type_repetition_in_bounds, clippy::unseparated_literal_suffix, clippy::used_underscore_binding )] #[macro_use] pub mod util; pub mod cache; pub mod cksm; pub mod idx; pub mod arc; pub mod ext; pub mod error; pub mod store; pub mod codec; pub mod def; pub mod ldr; pub mod sec; /// Type alias for `Cache<MemoryStore>`. pub type OsrsCache = Cache<store::MemoryStore>; #[doc(inline)] pub use error::Result; #[doc(inline)] pub use cache::{ Cache, CacheCore, CacheRead }; #[doc(inline)] pub use cksm::Checksum; #[doc(inline)] pub use store::{ Store, FileStore, MemoryStore }; #[doc(inline)] pub use ldr::Loader; #[doc(inline)] pub use def::Definition;
true