blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
c0f5851a839fcd9abf82b551b7af97fbe616823d
Rust
ciruz/next.js
/packages/next-swc/crates/next-core/src/env.rs
UTF-8
2,347
2.5625
3
[ "MIT" ]
permissive
use anyhow::Result; use indexmap::indexmap; use turbopack_binding::{ turbo::tasks_env::{ CustomProcessEnvVc, EnvMapVc, FilterProcessEnvVc, ProcessEnv, ProcessEnvVc, }, turbopack::env::EmbeddableProcessEnvVc, }; use crate::next_config::NextConfigVc; /// Creates a ProcessEnvVc safe to use in JS, by stringifying and encoding as /// regular JS strings. Setting `client` to true will additionally filter the /// env to just the keys that are acceptable for the client to access. /// /// For now, it also injects overridden values as if they were real JS code, eg /// an Object and not a String. #[turbo_tasks::function] pub async fn env_for_js( env: ProcessEnvVc, client: bool, next_config: NextConfigVc, ) -> Result<ProcessEnvVc> { let test_mode = env.read("__NEXT_TEST_MODE").await?; let test_mode = test_mode.as_deref().unwrap_or(""); let env = if client { FilterProcessEnvVc::new( env, vec![ "NEXT_PUBLIC_".to_string(), "NODE_ENV".to_string(), "PORT".to_string(), ], ) .into() } else { // Server doesn't need to have env vars injected since it will have them in the // real process.env. EnvMapVc::cell(Default::default()).into() }; let env = EmbeddableProcessEnvVc::new(CustomProcessEnvVc::new(env, next_config.env()).into()).into(); let image_config = next_config.image_config().await?; let mut map = indexmap! { // We need to overload the __NEXT_IMAGE_OPTS to override the default remotePatterns field. // This allows us to support loading from remote hostnames until we properly support reading // the next.config.js file. "__NEXT_IMAGE_OPTS".to_string() => serde_json::to_string(&image_config)?, }; let next_config = next_config.await?; if next_config.react_strict_mode.unwrap_or(false) { map.insert("__NEXT_STRICT_MODE".to_string(), "true".to_string()); } if next_config.react_strict_mode.unwrap_or(true) { map.insert("__NEXT_STRICT_MODE_APP".to_string(), "true".to_string()); } if !test_mode.is_empty() { map.insert("__NEXT_TEST_MODE".to_string(), "true".to_string()); } Ok(CustomProcessEnvVc::new(env, EnvMapVc::cell(map)).into()) }
true
6ebb0a203940cf712a214aafff687772b1972759
Rust
hsk/rucc
/src/lexer.rs
UTF-8
25,350
2.90625
3
[ "MIT" ]
permissive
use std::fs::OpenOptions; use std::io::prelude::*; use std::iter; use std::str; use std::collections::VecDeque; use std::path; use std::process; use std::collections::{HashSet, HashMap}; use error; use parser; use MACRO_MAP; #[derive(Debug)] pub enum Macro { // Vec<Token> -> macro body Object(Vec<Token>), FuncLike(Vec<Token>), } #[derive(PartialEq, Debug, Clone)] pub enum TokenKind { MacroParam, Identifier, IntNumber, FloatNumber, String, Char, Symbol, Newline, } #[derive(PartialEq, Debug, Clone)] pub struct Token { pub kind: TokenKind, pub space: bool, // leading space pub val: String, pub macro_position: usize, pub hideset: HashSet<String>, pub line: i32, } impl Token { pub fn new(kind: TokenKind, val: &str, macro_position: usize, line: i32) -> Token { Token { kind: kind, space: false, val: val.to_string(), macro_position: macro_position, hideset: HashSet::new(), line: line, } } } pub struct Lexer<'a> { pub cur_line: i32, filename: String, peek: iter::Peekable<str::Chars<'a>>, peek_buf: VecDeque<char>, buf: VecDeque<VecDeque<Token>>, cond_stack: Vec<bool>, } impl<'a> Lexer<'a> { pub fn new(filename: String, input: &'a str) -> Lexer<'a> { let mut v: VecDeque<VecDeque<Token>> = VecDeque::new(); v.push_back(VecDeque::new()); Lexer { cur_line: 1, filename: filename.to_string(), peek: input.chars().peekable(), peek_buf: VecDeque::new(), buf: v, cond_stack: Vec::new(), } } pub fn get_filename(self) -> String { self.filename } fn peek_get(&mut self) -> Option<&char> { if self.peek_buf.len() > 0 { self.peek_buf.front() } else { self.peek.peek() } } fn peek_next(&mut self) -> char { if self.peek_buf.len() > 0 { self.peek_buf.pop_front().unwrap() } else { self.peek.next().unwrap() } } fn peek_unget(&mut self, ch: char) { self.peek_buf.push_back(ch); } fn peek_next_char_is(&mut self, ch: char) -> bool { let c = self.peek_next(); let nextc = self.peek_next(); self.peek_unget(c); self.peek_unget(nextc); nextc == ch } fn peek_char_is(&mut self, ch: char) -> bool { let line = self.cur_line; let errf = || -> Option<&char> { error::error_exit(line, format!("expected '{}'", ch).as_str()); }; let peekc = self.peek_get().or_else(errf).unwrap(); *peekc == ch } pub fn next_token_is(&mut self, expect: &str) -> bool { let next = self.peek(); if next.is_some() { let n = next.unwrap(); n.val == expect && n.kind != TokenKind::String && n.kind != TokenKind::Char } else { error::error_exit(self.cur_line, "expected a token but reach EOF") } } pub fn skip(&mut self, s: &str) -> bool { let next = self.get(); match next { Some(n) => { if n.val == s && n.kind != TokenKind::String && n.kind != TokenKind::Char { true } else { self.buf.back_mut().unwrap().push_back(n); false } } None => { error::error_exit(self.cur_line, format!("expect '{}' but reach EOF", s).as_str()) } } } pub fn expect_skip(&mut self, expect: &str) -> bool { if !self.skip(expect) { error::error_exit(self.cur_line, format!("expected '{}'", expect).as_str()); } true } pub fn unget(&mut self, t: Token) { self.buf.back_mut().unwrap().push_back(t); } pub fn unget_all(&mut self, mut tv: Vec<Token>) { tv.reverse(); for t in tv { self.unget(t); } } pub fn read_identifier(&mut self) -> Token { let mut ident = String::new(); loop { match self.peek_get() { Some(&c) => { match c { 'a'...'z' | 'A'...'Z' | '_' | '0'...'9' => ident.push(c), _ => break, } } _ => break, }; self.peek_next(); } Token::new(TokenKind::Identifier, ident.as_str(), 0, self.cur_line) } fn read_number_literal(&mut self) -> Token { let mut num = String::new(); let mut is_float = false; loop { match self.peek_get() { Some(&c) => { match c { '.' | '0'...'9' | 'a'...'z' | 'A'...'Z' => { num.push(c); if c == '.' { is_float = true; } } _ => break, } } _ => break, }; self.peek_next(); } if is_float { Token::new(TokenKind::FloatNumber, num.as_str(), 0, self.cur_line) } else { Token::new(TokenKind::IntNumber, num.as_str(), 0, self.cur_line) } } pub fn read_newline(&mut self) -> Token { self.peek_next(); self.cur_line += 1; Token::new(TokenKind::Newline, "", 0, self.cur_line) } pub fn read_symbol(&mut self) -> Token { let c = self.peek_next(); let mut sym = String::new(); sym.push(c); match c { '+' | '-' => { if self.peek_char_is('=') || self.peek_char_is('>') || self.peek_char_is('+') || self.peek_char_is('-') { sym.push(self.peek_next()); } } '*' | '/' | '%' | '=' | '^' | '!' => { if self.peek_char_is('=') { sym.push(self.peek_next()); } } '<' | '>' | '&' | '|' => { if self.peek_char_is(c) { sym.push(self.peek_next()); } if self.peek_char_is('=') { sym.push(self.peek_next()); } } '.' => { if self.peek_char_is('.') && self.peek_next_char_is('.') { sym.push(self.peek_next()); sym.push(self.peek_next()); } } _ => {} }; Token::new(TokenKind::Symbol, sym.as_str(), 0, self.cur_line) } fn read_string_literal(&mut self) -> Token { self.peek_next(); let mut s = String::new(); while !self.peek_char_is('\"') { s.push(self.peek_next()); } self.peek_next(); Token::new(TokenKind::String, s.as_str(), 0, self.cur_line) } fn read_char_literal(&mut self) -> Token { self.peek_next(); let mut s = String::new(); while !self.peek_char_is('\'') { s.push(self.peek_next()); } self.peek_next(); Token::new(TokenKind::Char, s.as_str(), 0, self.cur_line) } pub fn do_read_token(&mut self) -> Option<Token> { if !self.buf.back_mut().unwrap().is_empty() { return self.buf.back_mut().unwrap().pop_back(); } match self.peek_get() { Some(&c) => { match c { 'a'...'z' | 'A'...'Z' | '_' => Some(self.read_identifier()), ' ' | '\t' => { self.peek_next(); // set a leading space fn f(tok: Token) -> Option<Token> { let mut t = tok; t.space = true; Some(t) } self.do_read_token().and_then(f) } '0'...'9' => Some(self.read_number_literal()), '\"' => Some(self.read_string_literal()), '\'' => Some(self.read_char_literal()), '\n' => Some(self.read_newline()), '\\' => { while self.peek_next() != '\n' {} self.do_read_token() } '/' => { if self.peek_next_char_is('*') { self.peek_next(); // / self.peek_next(); // * while !(self.peek_char_is('*') && self.peek_next_char_is('/')) { self.peek_next(); } self.peek_next(); // * self.peek_next(); // / self.do_read_token() } else if self.peek_next_char_is('/') { self.peek_next(); // / self.peek_next(); // / while !self.peek_char_is('\n') { self.peek_next(); } // self.peek_next(); // \n self.do_read_token() } else { Some(self.read_symbol()) } } _ => Some(self.read_symbol()), } } None => None as Option<Token>, } } pub fn read_token(&mut self) -> Option<Token> { let t = self.do_read_token(); match t { Some(tok) => { match tok.kind { TokenKind::Newline => self.read_token(), _ => Some(tok), } } _ => t, } } fn expand_obj_macro(&mut self, name: String, body: &Vec<Token>) { let mut bdy: Vec<Token> = Vec::new(); for t in body { bdy.push(|| -> Token { let mut a = t.clone(); a.hideset.insert(name.to_string()); a }()); } self.unget_all(bdy); } fn read_one_arg(&mut self) -> Vec<Token> { let mut n = 0; let mut arg: Vec<Token> = Vec::new(); loop { let tok = self.read_token() .or_else(|| error::error_exit(self.cur_line, "expected macro args but reach EOF")) .unwrap(); if n == 0 { if tok.val == ")" { self.unget(tok); break; } else if tok.val == "," { break; } } match tok.val.as_str() { "(" => n += 1, ")" => n -= 1, _ => {} } arg.push(tok); } arg } fn stringize(&mut self, tokens: &Vec<Token>) -> Token { let mut string = String::new(); for token in tokens { string += format!("{}{}", (if token.space { " " } else { "" }), token.val).as_str(); } Token::new(TokenKind::String, string.as_str(), 0, self.cur_line) } fn expand_func_macro(&mut self, name: String, macro_body: &Vec<Token>) { // expect '(', (self.skip can't be used because skip uses 'self.get' that uses MACRO_MAP using Mutex let expect_bracket = self.read_token() .or_else(|| error::error_exit(self.cur_line, "expected '(' but reach EOF")) .unwrap(); if expect_bracket.val != "(" { error::error_exit(self.cur_line, "expected '('"); } let mut args: Vec<Vec<Token>> = Vec::new(); // read macro arguments loop { let maybe_bracket = self.read_token() .or_else(|| error::error_exit(self.cur_line, "expected ')' but reach EOF")) .unwrap(); if maybe_bracket.val == ")" { break; } else { self.unget(maybe_bracket); } args.push(self.read_one_arg()); } let mut expanded: Vec<Token> = Vec::new(); let mut is_stringize = false; let mut is_combine = false; for macro_tok in macro_body { // TODO: refine code if macro_tok.val == "#" { // means ## if is_stringize { is_stringize = false; is_combine = true; } else { is_stringize = true; } continue; } if macro_tok.kind == TokenKind::MacroParam { let position = macro_tok.macro_position; if is_stringize { expanded.push(self.stringize(&args[position])); is_stringize = false; } else if is_combine { let mut last = expanded.pop().unwrap(); for t in &args[position] { last.val += t.val.as_str(); } expanded.push(last); is_combine = false; } else { for t in &args[position] { let mut a = t.clone(); a.hideset.insert(name.to_string()); expanded.push(a); } } } else { let mut a = macro_tok.clone(); a.hideset.insert(name.to_string()); expanded.push(a); } } self.unget_all(expanded); } fn expand(&mut self, token: Option<Token>) -> Option<Token> { token.and_then(|tok| { let name = tok.val.clone(); if tok.hideset.contains(name.as_str()) || !MACRO_MAP.lock().unwrap().contains_key(name.as_str()) { Some(tok) } else { // if cur token is macro: match MACRO_MAP.lock().unwrap().get(name.as_str()).unwrap() { &Macro::Object(ref body) => self.expand_obj_macro(name, body), &Macro::FuncLike(ref body) => self.expand_func_macro(name, body), } self.get() } }) } pub fn get(&mut self) -> Option<Token> { let t = self.read_token(); let tok = match t { Some(tok) => { if tok.val == "#" { self.read_cpp_directive(); self.get() } else { Some(tok) } } _ => return t, }; self.expand(tok) } pub fn get_e(&mut self) -> Token { let tok = self.get(); if tok.is_none() { error::error_exit(self.cur_line, "expected a token, but reach EOF"); } tok.unwrap() } pub fn peek(&mut self) -> Option<Token> { let tok = self.get(); tok.clone() .and_then(|t| { self.unget(t.clone()); Some(t) }); tok } pub fn peek_e(&mut self) -> Token { let tok = self.peek(); if tok.is_none() { error::error_exit(self.cur_line, "expected a token, but reach EOF"); } tok.unwrap() } // for c preprocessor fn read_cpp_directive(&mut self) { let t = self.do_read_token(); // cpp directive match t.ok_or("error").unwrap().val.as_str() { "include" => self.read_include(), "define" => self.read_define(), "undef" => self.read_undef(), "if" => self.read_if(), "ifdef" => self.read_ifdef(), "ifndef" => self.read_ifndef(), "elif" => self.read_elif(), "else" => self.read_else(), _ => {} } } fn try_include(&mut self, filename: &str) -> Option<String> { let header_paths = vec!["./include/", "/include/", "/usr/include/", "/usr/include/linux/", "/usr/include/x86_64-linux-gnu/", "./include/", ""]; let mut real_filename = String::new(); let mut found = false; for header_path in header_paths { real_filename = format!("{}{}", header_path, filename); if path::Path::new(real_filename.as_str()).exists() { found = true; break; } } if found { Some(real_filename) } else { None } } fn read_include(&mut self) { // this will be a function let mut filename = String::new(); if self.skip("<") { while !self.skip(">") { println!("{}", filename); filename.push_str(self.do_read_token().unwrap().val.as_str()); } } let real_filename = match self.try_include(filename.as_str()) { Some(f) => f, _ => { println!("error: {}: not found '{}'", self.cur_line, filename); process::exit(-1) } }; println!("include filename: {}", real_filename); let mut file = OpenOptions::new() .read(true) .open(real_filename.to_string()) .unwrap(); let mut body = String::new(); file.read_to_string(&mut body); let mut lexer = Lexer::new(filename, body.as_str()); let mut v: Vec<Token> = Vec::new(); loop { match lexer.get() { Some(tok) => v.push(tok), None => break, } } self.unget_all(v); println!("end of: {}", real_filename); } fn read_define_obj_macro(&mut self, name: String) { println!("\tmacro: {}", name); let mut body: Vec<Token> = Vec::new(); print!("\tmacro body: "); loop { let c = self.do_read_token().unwrap(); if c.kind == TokenKind::Newline { break; } print!("{}{}", if c.space { " " } else { "" }, c.val); body.push(c); } println!(); self.register_obj_macro(name, body); } fn read_define_func_macro(&mut self, name: String) { print!("\tmacro: {}(", name); // read macro arguments let mut args: HashMap<String, usize> = HashMap::new(); let mut count = 0usize; loop { let arg = self.get() .or_else(|| { error::error_exit(self.cur_line, "expcted macro args"); }) .unwrap() .val; args.insert(arg, count); if self.skip(")") { break; } self.expect_skip(","); count += 1; } for (key, val) in args.clone() { print!("{}({}),", key, val); } println!(")"); let mut body: Vec<Token> = Vec::new(); print!("\tmacro body: "); loop { let tok = self.do_read_token().unwrap(); if tok.kind == TokenKind::Newline { break; } print!("{}{}", if tok.space { " " } else { "" }, tok.val); // if tok is a parameter of funclike macro, // the kind of tok is changed to MacroParam // and set macro_position let maybe_macro_name = tok.val.as_str(); if args.contains_key(maybe_macro_name) { let mut macro_param = tok.clone(); macro_param.kind = TokenKind::MacroParam; macro_param.macro_position = args.get(maybe_macro_name).unwrap().clone(); body.push(macro_param); } else { body.push(tok.clone()); } } println!(); self.register_funclike_macro(name, body); } fn read_define(&mut self) { let mcro = self.do_read_token().unwrap(); assert_eq!(mcro.kind, TokenKind::Identifier); let t = self.do_read_token().unwrap(); if !t.space && t.val.as_str() == "(" { self.read_define_func_macro(mcro.val); } else { self.unget(t); self.read_define_obj_macro(mcro.val); } } fn read_undef(&mut self) { let mcro = self.do_read_token().unwrap(); assert_eq!(mcro.kind, TokenKind::Identifier); MACRO_MAP.lock().unwrap().remove(mcro.val.as_str()); } fn register_obj_macro(&mut self, name: String, body: Vec<Token>) { MACRO_MAP .lock() .unwrap() .insert(name, Macro::Object(body)); } fn register_funclike_macro(&mut self, name: String, body: Vec<Token>) { MACRO_MAP .lock() .unwrap() .insert(name, Macro::FuncLike(body)); } fn read_defined_op(&mut self) -> Token { // TODO: add err handler let mut tok = self.do_read_token().unwrap(); if tok.val == "(" { tok = self.do_read_token().unwrap(); self.expect_skip(")"); } if MACRO_MAP.lock().unwrap().contains_key(tok.val.as_str()) { Token::new(TokenKind::IntNumber, "1", 0, self.cur_line) } else { Token::new(TokenKind::IntNumber, "0", 0, self.cur_line) } } fn read_intexpr_line(&mut self) -> Vec<Token> { let mut v: Vec<Token> = Vec::new(); loop { let mut tok = self.do_read_token() .or_else(|| error::error_exit(self.cur_line, "expect a token, but reach EOF")) .unwrap(); tok = self.expand(Some(tok)).unwrap(); if tok.kind == TokenKind::Newline { break; } else if tok.val == "defined" { v.push(self.read_defined_op()); } else if tok.kind == TokenKind::Identifier { // identifier in expr line is replaced with 0i v.push(Token::new(TokenKind::IntNumber, "0", 0, self.cur_line)); } else { v.push(tok); } } v } fn read_constexpr(&mut self) -> bool { let expr_line = self.read_intexpr_line(); self.buf.push_back(VecDeque::new()); self.unget(Token::new(TokenKind::Symbol, ";", 0, 0)); self.unget_all(expr_line); let node = parser::read_expr(self); self.buf.pop_back(); node.show(); println!(); node.eval_constexpr() != 0 } fn do_read_if(&mut self, cond: bool) { self.cond_stack.push(cond); if !cond { self.skip_cond_include(); } } fn read_if(&mut self) { let cond = self.read_constexpr(); self.do_read_if(cond); } fn read_ifdef(&mut self) { let mcro_name = self.do_read_token() .or_else(|| error::error_exit(self.cur_line, "expected macro")) .unwrap() .val; self.do_read_if((*MACRO_MAP.lock().unwrap()).contains_key(mcro_name.as_str())); } fn read_ifndef(&mut self) { let mcro_name = self.do_read_token() .or_else(|| error::error_exit(self.cur_line, "expected macro")) .unwrap() .val; self.do_read_if(!(*MACRO_MAP.lock().unwrap()).contains_key(mcro_name.as_str())); } fn read_elif(&mut self) { if self.cond_stack[self.cond_stack.len() - 1] || !self.read_constexpr() { self.skip_cond_include(); } else { self.cond_stack.pop(); self.cond_stack.push(true); } } fn read_else(&mut self) { if self.cond_stack[self.cond_stack.len() - 1] { self.skip_cond_include(); } } fn skip_cond_include(&mut self) { let mut nest = 0; let get_tok = |lex: &mut Lexer| -> Token { lex.do_read_token() .or_else(|| error::error_exit(lex.cur_line, "reach EOF")) .unwrap() }; loop { if get_tok(self).val != "#" { continue; } let tok = get_tok(self); if nest == 0 { match tok.val.as_str() { "else" | "elif" | "endif" => { let line = self.cur_line; self.unget(tok); self.unget(Token::new(TokenKind::Symbol, "#", 0, line)); return; } _ => {} } } match tok.val.as_str() { "if" | "ifdef" | "ifndef" => nest += 1, "endif" => nest -= 1, _ => {} } // TODO: if nest < 0 then? } } }
true
af36764468f8999eb0220aaaacc400ddca547e24
Rust
bobmcwhirter/defmt
/defmt/src/encoding/mod.rs
UTF-8
3,671
3
3
[ "Apache-2.0", "MIT" ]
permissive
#[cfg(all(feature = "encoding-raw", feature = "encoding-rzcobs"))] compile_error!("Multiple `encoding-*` features are enabled. You may only enable one."); #[cfg_attr(feature = "encoding-raw", path = "raw.rs")] #[cfg_attr(not(feature = "encoding-raw"), path = "rzcobs.rs")] mod inner; // This wrapper struct is to avoid copypasting the public docs in all the impls. /// Encode raw defmt frames for sending over the wire. /// /// defmt emits "log frames", which are sequences of bytes. The raw log frame data /// is then *encoded* prior to sending over the wire. /// /// `Encoder` will encode the frames according to the currently selected /// `encoding-*` Cargo feature. See `Cargo.toml` for the supported encodings /// and their tradeoffs. /// /// Encodings may perform two functions: /// /// - Framing: Adds extra data to allow the encoder to know when each frame starts /// and ends in the stream. Unframed log frames already contain enough information for /// the decoder to know when they end, so framing is optional. However, without framing /// the decoder must receive all bytes intact or it may "lose sync". With framing, it can /// recover from missing/corrupted data, and can start decoding from the "middle" of an /// already-running stream. /// - Compression: The frame data has rather low entropy (for example, it contains many /// zero bytes due to encoding all integers in fixed with, and will likely contain many /// repetitions). Compression can decrease the on-the-wire required bandwidth. /// /// defmt provides the `Encoder` separately instead of feeding already-encoded bytes /// to the `Logger` because `Logger` implementations may decide to allow /// concurrent logging from multiple "contexts" such as threads or interrupt /// priority levels. In this case, the Logger implementation needs to create one /// Encoder for each such context. pub struct Encoder { inner: inner::Encoder, } impl Encoder { /// Create a new `Encoder`. pub const fn new() -> Self { Self { inner: inner::Encoder::new(), } } /// Start encoding a log frame. /// /// `Logger` impls will typically call this from `acquire()`. /// /// You may only call `start_frame` when no frame is currently being encoded. /// Failure to do so may result in corrupted data on the wire. /// /// The `write` closure will be called with the encoded data that must /// be sent on the wire. It may be called zero, one, or multiple times. pub fn start_frame(&mut self, write: impl FnMut(&[u8])) { self.inner.start_frame(write) } /// Finish encoding a log frame. /// /// `Logger` impls will typically call this from `release()`. /// /// You may only call `end_frame` when a frame is currently being encoded. /// Failure to do so may result in corrupted data on the wire. /// /// The `write` closure will be called with the encoded data that must /// be sent on the wire. It may be called zero, one, or multiple times. pub fn end_frame(&mut self, write: impl FnMut(&[u8])) { self.inner.end_frame(write) } /// Write part of data for a log frame. /// /// `Logger` impls will typically call this from `write()`. /// /// You may only call `write` when a frame is currently being encoded. /// Failure to do so may result in corrupted data on the wire. /// /// The `write` closure will be called with the encoded data that must /// be sent on the wire. It may be called zero, one, or multiple times. pub fn write(&mut self, data: &[u8], write: impl FnMut(&[u8])) { self.inner.write(data, write) } }
true
69806a9ea01f1a870fa621405b37d5f28c5c03bd
Rust
maciejhirsz/logos
/logos/src/lib.rs
UTF-8
13,793
3.75
4
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
//! <img src="https://raw.githubusercontent.com/maciejhirsz/logos/master/logos.svg?sanitize=true" alt="Logos logo" width="250" align="right"> //! //! # Logos //! //! _Create ridiculously fast Lexers._ //! //! **Logos** has two goals: //! //! + To make it easy to create a Lexer, so you can focus on more complex problems. //! + To make the generated Lexer faster than anything you'd write by hand. //! //! To achieve those, **Logos**: //! //! + Combines all token definitions into a single [deterministic state machine](https://en.wikipedia.org/wiki/Deterministic_finite_automaton). //! + Optimizes branches into [lookup tables](https://en.wikipedia.org/wiki/Lookup_table) or [jump tables](https://en.wikipedia.org/wiki/Branch_table). //! + Prevents [backtracking](https://en.wikipedia.org/wiki/ReDoS) inside token definitions. //! + [Unwinds loops](https://en.wikipedia.org/wiki/Loop_unrolling), and batches reads to minimize bounds checking. //! + Does all of that heavy lifting at compile time. //! //! ## Example //! //! ```rust //! use logos::Logos; //! //! #[derive(Logos, Debug, PartialEq)] //! #[logos(skip r"[ \t\n\f]+")] // Ignore this regex pattern between tokens //! enum Token { //! // Tokens can be literal strings, of any length. //! #[token("fast")] //! Fast, //! //! #[token(".")] //! Period, //! //! // Or regular expressions. //! #[regex("[a-zA-Z]+")] //! Text, //! } //! //! fn main() { //! let mut lex = Token::lexer("Create ridiculously fast Lexers."); //! //! assert_eq!(lex.next(), Some(Ok(Token::Text))); //! assert_eq!(lex.span(), 0..6); //! assert_eq!(lex.slice(), "Create"); //! //! assert_eq!(lex.next(), Some(Ok(Token::Text))); //! assert_eq!(lex.span(), 7..19); //! assert_eq!(lex.slice(), "ridiculously"); //! //! assert_eq!(lex.next(), Some(Ok(Token::Fast))); //! assert_eq!(lex.span(), 20..24); //! assert_eq!(lex.slice(), "fast"); //! //! assert_eq!(lex.next(), Some(Ok(Token::Text))); //! assert_eq!(lex.slice(), "Lexers"); //! assert_eq!(lex.span(), 25..31); //! //! assert_eq!(lex.next(), Some(Ok(Token::Period))); //! assert_eq!(lex.span(), 31..32); //! assert_eq!(lex.slice(), "."); //! //! assert_eq!(lex.next(), None); //! } //! ``` //! //! ### Callbacks //! //! **Logos** can also call arbitrary functions whenever a pattern is matched, //! which can be used to put data into a variant: //! //! ```rust //! use logos::{Logos, Lexer}; //! //! // Note: callbacks can return `Option` or `Result` //! fn kilo(lex: &mut Lexer<Token>) -> Option<u64> { //! let slice = lex.slice(); //! let n: u64 = slice[..slice.len() - 1].parse().ok()?; // skip 'k' //! Some(n * 1_000) //! } //! //! fn mega(lex: &mut Lexer<Token>) -> Option<u64> { //! let slice = lex.slice(); //! let n: u64 = slice[..slice.len() - 1].parse().ok()?; // skip 'm' //! Some(n * 1_000_000) //! } //! //! #[derive(Logos, Debug, PartialEq)] //! #[logos(skip r"[ \t\n\f]+")] //! enum Token { //! // Callbacks can use closure syntax, or refer //! // to a function defined elsewhere. //! // //! // Each pattern can have its own callback. //! #[regex("[0-9]+", |lex| lex.slice().parse().ok())] //! #[regex("[0-9]+k", kilo)] //! #[regex("[0-9]+m", mega)] //! Number(u64), //! } //! //! fn main() { //! let mut lex = Token::lexer("5 42k 75m"); //! //! assert_eq!(lex.next(), Some(Ok(Token::Number(5)))); //! assert_eq!(lex.slice(), "5"); //! //! assert_eq!(lex.next(), Some(Ok(Token::Number(42_000)))); //! assert_eq!(lex.slice(), "42k"); //! //! assert_eq!(lex.next(), Some(Ok(Token::Number(75_000_000)))); //! assert_eq!(lex.slice(), "75m"); //! //! assert_eq!(lex.next(), None); //! } //! ``` //! //! Logos can handle callbacks with following return types: //! //! | Return type | Produces | //! |--------------------------------------------------|-----------------------------------------------------------------------------------------------------| //! | `()` | `Ok(Token::Unit)` | //! | `bool` | `Ok(Token::Unit)` **or** `Err(<Token as Logos>::Error::default())` | //! | `Result<(), E>` | `Ok(Token::Unit)` **or** `Err(<Token as Logos>::Error::from(err))` | //! | `T` | `Ok(Token::Value(T))` | //! | `Option<T>` | `Ok(Token::Value(T))` **or** `Err(<Token as Logos>::Error::default())` | //! | `Result<T, E>` | `Ok(Token::Value(T))` **or** `Err(<Token as Logos>::Error::from(err))` | //! | [`Skip`](./struct.Skip.html) | _skips matched input_ | //! | [`Filter<T>`](./enum.Filter.html) | `Ok(Token::Value(T))` **or** _skips matched input_ | //! | [`FilterResult<T, E>`](./enum.FilterResult.html) | `Ok(Token::Value(T))` **or** `Err(<Token as Logos>::Error::from(err))` **or** _skips matched input_ | //! //! Callbacks can be also used to do perform more specialized lexing in place //! where regular expressions are too limiting. For specifics look at //! [`Lexer::remainder`](./struct.Lexer.html#method.remainder) and //! [`Lexer::bump`](./struct.Lexer.html#method.bump). //! //! ## Errors //! //! By default, **Logos** uses `()` as the error type, which means that it //! doesn't store any information about the error. //! This can be changed by using `#[logos(error = T)]` attribute on the enum. //! The type `T` can be any type that implements `Clone`, `PartialEq`, //! `Default` and `From<E>` for each callback's error type. //! //! ## Token disambiguation //! //! Rule of thumb is: //! //! + Longer beats shorter. //! + Specific beats generic. //! //! If any two definitions could match the same input, like `fast` and `[a-zA-Z]+` //! in the example above, it's the longer and more specific definition of `Token::Fast` //! that will be the result. //! //! This is done by comparing numeric priority attached to each definition. Every consecutive, //! non-repeating single byte adds 2 to the priority, while every range or regex class adds 1. //! Loops or optional blocks are ignored, while alternations count the shortest alternative: //! //! + `[a-zA-Z]+` has a priority of 1 (lowest possible), because at minimum it can match a single byte to a class. //! + `foobar` has a priority of 12. //! + `(foo|hello)(bar)?` has a priority of 6, `foo` being it's shortest possible match. #![cfg_attr(not(feature = "std"), no_std)] #![warn(missing_docs)] #![doc(html_logo_url = "https://maciej.codes/kosz/logos.png")] #[cfg(not(feature = "std"))] extern crate core as std; #[cfg(feature = "export_derive")] pub use logos_derive::Logos; use std::fmt::Debug; mod lexer; pub mod source; #[doc(hidden)] pub mod internal; pub use crate::lexer::{Lexer, Span, SpannedIter}; pub use crate::source::Source; /// Trait implemented for an enum representing all tokens. You should never have /// to implement it manually, use the `#[derive(Logos)]` attribute on your enum. pub trait Logos<'source>: Sized { /// Associated type `Extras` for the particular lexer. This can be set using /// `#[logos(extras = MyExtras)]` and accessed inside callbacks. type Extras; /// Source type this token can be lexed from. This will default to `str`, /// unless one of the defined patterns explicitly uses non-unicode byte values /// or byte slices, in which case that implementation will use `[u8]`. type Source: Source + ?Sized + 'source; /// Error type returned by the lexer. This can be set using /// `#[logos(error = MyError)]`. Defaults to `()` if not set. type Error: Default + Clone + PartialEq + Debug + 'source; /// The heart of Logos. Called by the `Lexer`. The implementation for this function /// is generated by the `logos-derive` crate. fn lex(lexer: &mut Lexer<'source, Self>); /// Create a new instance of a `Lexer` that will produce tokens implementing /// this `Logos`. fn lexer(source: &'source Self::Source) -> Lexer<'source, Self> where Self::Extras: Default, { Lexer::new(source) } /// Create a new instance of a `Lexer` with the provided `Extras` that will /// produce tokens implementing this `Logos`. fn lexer_with_extras( source: &'source Self::Source, extras: Self::Extras, ) -> Lexer<'source, Self> { Lexer::with_extras(source, extras) } } /// Type that can be returned from a callback, informing the `Lexer`, to skip /// current token match. See also [`logos::skip`](./fn.skip.html). /// /// # Example /// /// ```rust /// use logos::{Logos, Skip}; /// /// #[derive(Logos, Debug, PartialEq)] /// enum Token<'a> { /// // We will treat "abc" as if it was whitespace. /// // This is identical to using `logos::skip`. /// #[regex(" |abc", |_| Skip)] /// Ignored, /// /// #[regex("[a-zA-Z]+")] /// Text(&'a str), /// } /// /// let tokens: Vec<_> = Token::lexer("Hello abc world").collect(); /// /// assert_eq!( /// tokens, /// &[ /// Ok(Token::Text("Hello")), /// Ok(Token::Text("world")), /// ], /// ); /// ``` pub struct Skip; /// Type that can be returned from a callback, either producing a field /// for a token, or skipping it. /// /// # Example /// /// ```rust /// use logos::{Logos, Filter}; /// /// #[derive(Logos, Debug, PartialEq)] /// enum Token { /// #[regex(r"[ \n\f\t]+", logos::skip)] /// Ignored, /// /// #[regex("[0-9]+", |lex| { /// let n: u64 = lex.slice().parse().unwrap(); /// /// // Only emit a token if `n` is an even number /// match n % 2 { /// 0 => Filter::Emit(n), /// _ => Filter::Skip, /// } /// })] /// EvenNumber(u64) /// } /// /// let tokens: Vec<_> = Token::lexer("20 11 42 23 100 8002").collect(); /// /// assert_eq!( /// tokens, /// &[ /// Ok(Token::EvenNumber(20)), /// // skipping 11 /// Ok(Token::EvenNumber(42)), /// // skipping 23 /// Ok(Token::EvenNumber(100)), /// Ok(Token::EvenNumber(8002)) /// ] /// ); /// ``` pub enum Filter<T> { /// Emit a token with a given value `T`. Use `()` for unit variants without fields. Emit(T), /// Skip current match, analog to [`Skip`](./struct.Skip.html). Skip, } /// Type that can be returned from a callback, either producing a field /// for a token, skipping it, or emitting an error. /// /// # Example /// /// ```rust /// use logos::{Logos, FilterResult}; /// /// #[derive(Debug, PartialEq, Clone, Default)] /// enum LexingError { /// NumberParseError, /// NumberIsTen, /// #[default] /// Other, /// } /// /// impl From<std::num::ParseIntError> for LexingError { /// fn from(_: std::num::ParseIntError) -> Self { /// LexingError::NumberParseError /// } /// } /// /// #[derive(Logos, Debug, PartialEq)] /// #[logos(error = LexingError)] /// enum Token { /// #[regex(r"[ \n\f\t]+", logos::skip)] /// Ignored, /// /// #[regex("[0-9]+", |lex| { /// let n: u64 = lex.slice().parse().unwrap(); /// /// // Only emit a token if `n` is an even number. /// if n % 2 == 0 { /// // Emit an error if `n` is 10. /// if n == 10 { /// FilterResult::Error(LexingError::NumberIsTen) /// } else { /// FilterResult::Emit(n) /// } /// } else { /// FilterResult::Skip /// } /// })] /// NiceEvenNumber(u64) /// } /// /// let tokens: Vec<_> = Token::lexer("20 11 42 23 100 10").collect(); /// /// assert_eq!( /// tokens, /// &[ /// Ok(Token::NiceEvenNumber(20)), /// // skipping 11 /// Ok(Token::NiceEvenNumber(42)), /// // skipping 23 /// Ok(Token::NiceEvenNumber(100)), /// // error at 10 /// Err(LexingError::NumberIsTen), /// ] /// ); /// ``` pub enum FilterResult<T, E> { /// Emit a token with a given value `T`. Use `()` for unit variants without fields. Emit(T), /// Skip current match, analog to [`Skip`](./struct.Skip.html). Skip, /// Emit a `<Token as Logos>::ERROR` token. Error(E), } /// Predefined callback that will inform the `Lexer` to skip a definition. /// /// # Example /// /// ```rust /// use logos::Logos; /// /// #[derive(Logos, Debug, PartialEq)] /// enum Token<'a> { /// // We will treat "abc" as if it was whitespace /// #[regex(" |abc", logos::skip)] /// Ignored, /// /// #[regex("[a-zA-Z]+")] /// Text(&'a str), /// } /// /// let tokens: Vec<_> = Token::lexer("Hello abc world").collect(); /// /// assert_eq!( /// tokens, /// &[ /// Ok(Token::Text("Hello")), /// Ok(Token::Text("world")), /// ], /// ); /// ``` #[inline] pub fn skip<'source, Token: Logos<'source>>(_: &mut Lexer<'source, Token>) -> Skip { Skip } #[cfg(doctest)] mod test_readme { macro_rules! external_doc_test { ($x:expr) => { #[doc = $x] extern "C" {} }; } external_doc_test!(include_str!("../../README.md")); }
true
33e545a021bfd7dc8317ca4e8f15e3fd8807abfd
Rust
aidanhs/ayzim
/src/parser.rs
UTF-8
39,063
2.765625
3
[]
no_license
// Pure parsing. Calls methods on a Builder (template argument) to actually // construct the AST // // XXX All parsing methods assume they take ownership of the input string. This // lets them reuse parts of it. You will segfault if the input string cannot be // reused and written to. // RSTODO: nom parser? use std::collections::HashMap; use std::ptr; use std::slice; use std::str; use libc; use libc::{c_char, c_int}; use super::IString; use super::cashew::AstNode; use super::cashew::builder; use super::num::{f64tou32, isInteger32}; static KEYWORDS: &'static [IString] = issl![ "var", "const", "function", "if", "else", "do", "while", "for", "break", "continue", "return", "switch", "case", "default", "throw", "try", "catch", "finally", "true", "false", "null", "new", ]; static OP_CLASSES: &'static [OpClass] = &[ OpClass::new(issl!["."], false, OpClassTy::Binary), OpClass::new(issl!["!","~","+","-"], true, OpClassTy::Prefix), OpClass::new(issl!["*","/","%"], false, OpClassTy::Binary), OpClass::new(issl!["+","-"], false, OpClassTy::Binary), OpClass::new(issl!["<<",">>",">>>"], false, OpClassTy::Binary), OpClass::new(issl!["<","<=",">",">="], false, OpClassTy::Binary), OpClass::new(issl!["==","!="], false, OpClassTy::Binary), OpClass::new(issl!["&"], false, OpClassTy::Binary), OpClass::new(issl!["^"], false, OpClassTy::Binary), OpClass::new(issl!["|"], false, OpClassTy::Binary), OpClass::new(issl!["?",":"], true, OpClassTy::Tertiary), OpClass::new(issl!["="], true, OpClassTy::Binary), OpClass::new(issl![","], true, OpClassTy::Binary), ]; // RSTODO: convert into const fn when some of these become const fns, see // https://github.com/rust-lang/rust/issues/24111, then remove lazy_static? lazy_static! { static ref PRECEDENCES: Vec<HashMap<IString, usize>> = { let mut prec_builders: Vec<_> = (0..OpClassTy::Tertiary as usize+1).map(|_| HashMap::new() ).collect(); for (prec, oc) in OP_CLASSES.iter().enumerate() { for curr in oc.ops.iter() { let prev = prec_builders[oc.ty as usize].insert(curr.clone(), prec); assert!(prev.is_none()); } } prec_builders }; } // Used in hasChar, must be a cstring const OPERATOR_INITS: &'static [u8] = b"+-*/%<>&^|~=!,?:.\0"; const SEPARATORS: &'static [u8] = b"([;{}\0"; #[derive(Copy, Clone, PartialEq, Eq)] pub enum OpClassTy { Binary = 0, Prefix = 1, Tertiary = 2, } pub struct OpClass { ops: &'static [IString], rtl: bool, ty: OpClassTy, } impl OpClass { const fn new(ops: &'static [IString], rtl: bool, ty: OpClassTy) -> OpClass { OpClass { ops: ops, rtl: rtl, ty: ty } } pub fn getPrecedence(ty: OpClassTy, op: IString) -> usize { *PRECEDENCES[ty as usize].get(&op).unwrap() } pub fn getRtl(prec: usize) -> bool { OP_CLASSES[prec].rtl } } macro_rules! pp { { $p:ident += $off:expr } => {{ *$p = (*$p).offset($off as isize) }}; { $p:ident + $off:expr } => {{ (*$p).offset($off as isize) }}; { $p:ident[$off:expr] } => {{ *(*$p).offset($off as isize) }}; } macro_rules! p { { $p:ident += $off:expr } => {{ $p = $p.offset($off as isize) }}; { $p:ident + $off:expr } => {{ $p.offset($off as isize) }}; { $p:ident[$off:expr] } => {{ *$p.offset($off as isize) }}; } fn isIdentInit(x: u8) -> bool { (x >= b'a' && x <= b'z') || (x >= b'A' && x <= b'Z') || x == b'_' || x == b'$' } // RSTODO: use isDigit? pub fn isIdentPart(x: u8) -> bool { isIdentInit(x) || (x >= b'0' && x <= b'9') } fn isSpace(x: u8) -> bool { // space, tab, linefeed/newline or return x == 32 || x == 9 || x == 10 || x == 13 } fn isDigit(x: u8) -> bool { x >= b'0' && x <= b'9' } unsafe fn hasChar(mut list: *const u8, x: u8) -> bool { while p!{list[0]} != b'\0' { if p!{list[0]} == x { return true } p!{list+=1} } false } // not eq because float NaN is not reflexive #[derive(PartialEq)] enum FragData { Keyword(IString), Operator(IString), Ident(IString), String(IString), Int(f64), Double(f64), Separator(IString), } // https://github.com/rust-lang/rust/issues/32836 // An atomic fragment of something. Stops at a natural boundary. struct Frag { data: FragData, size: usize, } impl Frag { fn isNumber(&self) -> bool { match self.data { FragData::Int(_) | FragData::Double(_) => true, _ => false, } } fn getStr(&self) -> IString { match self.data { FragData::Keyword(ref s) | FragData::Operator(ref s) | FragData::Ident(ref s) | FragData::String(ref s) | FragData::Separator(ref s) => s.clone(), FragData::Int(_) | FragData::Double(_) => panic!(), } } fn parse(&self) -> AstNode { match self.data { FragData::Ident(ref s) => builder::makeName(s.clone()), FragData::String(ref s) => builder::makeString(s.clone()), FragData::Int(f) => builder::makeInt(f64tou32(f)), FragData::Double(f) => builder::makeDouble(f), _ => panic!(), } } unsafe fn from_str(mut src: *const u8) -> Frag { let start = src; let fragdata = if isIdentInit(p!{src[0]}) { // read an identifier or a keyword p!{src+=1}; while isIdentPart(*src) { p!{src+=1}; } let b = slice::from_raw_parts(start, src as usize - start as usize); let s = str::from_utf8_unchecked(b); let is = IString::from(s); if KEYWORDS.contains(&is) { FragData::Keyword(is) } else { FragData::Ident(is) } } else if isDigit(p!{src[0]}) || (p!{src[0]} == b'.' && isDigit(p!{src[1]})) { let num = if p!{src[0]} == b'0' && (p!{src[1]} == b'x' || p!{src[1]} == b'X') { // Explicitly parse hex numbers of form "0x...", because strtod // supports hex number strings only in C++11, and Visual Studio 2013 // does not yet support that functionality. p!{src+=2}; let mut num: f64 = 0f64; loop { if p!{src[0]} >= b'0' && p!{src[0]} <= b'9' { num *= 16f64; num += (p!{src[0]} - b'0') as f64; } else if p!{src[0]} >= b'a' && p!{src[0]} <= b'f' { num *= 16f64; num += (p!{src[0]} - b'a' + 10) as f64; } else if p!{src[0]} >= b'A' && p!{src[0]} <= b'F' { num *= 16f64; num += (p!{src[0]} - b'F' + 10) as f64; } else { break } p!{src+=1}; } num as f64 } else { let mut ptr = ptr::null_mut(); let num = libc::strtod(start as *const c_char, &mut ptr as *mut _); src = ptr as *const _; num }; // asm.js must have a '.' for double values. however, we also tolerate // uglify's tendency to emit without a '.' (and fix it later with a +). // for valid asm.js input, the '.' should be enough, and for uglify // in the emscripten optimizer pipeline, we use simple_ast where // INT/DOUBLE is quite the same at this point anyhow let b = slice::from_raw_parts(start, src as usize - start as usize); if !b.contains(&b'.') && isInteger32(num) { FragData::Int(num) } else { FragData::Double(num) } } else if hasChar(OPERATOR_INITS.as_ptr(), p!{src[0]}) { let is = match p!{src[0]} { b'!' => if p!{src[1]} == b'=' { is!("!=") } else { is!("!") }, b'%' => is!("%"), b'&' => is!("&"), b'*' => is!("*"), b'+' => is!("+"), b',' => is!(","), b'-' => is!("-"), b'.' => is!("."), b'/' => is!("/"), b':' => is!(":"), b'<' => match p!{src[1]} { b'<' => is!("<<"), b'=' => is!("<="), _ => is!("<"), }, b'=' => if p!{src[1]} == b'=' { is!("==") } else { is!("=") }, b'>' => match p!{src[1]} { b'>' => if p!{src[2]} == b'>' { is!(">>>") } else { is!(">>") }, b'=' => is!(">="), _ => is!(">"), }, b'?' => is!("?"), b'^' => is!("^"), b'|' => is!("|"), b'~' => is!("~"), _ => unreachable!(), }; debug_assert!({ let b = is.as_bytes(); b == slice::from_raw_parts(start, b.len()) }); p!{src+=is.len()}; FragData::Operator(is) } else if hasChar(SEPARATORS.as_ptr(), p!{src[0]}) { let b = slice::from_raw_parts(src, 1); let s = str::from_utf8_unchecked(b); let is = IString::from(s); p!{src+=1}; FragData::Separator(is) } else if p!{src[0]} == b'"' || p!{src[0]} == b'\'' { let end = libc::strchr(p!{src+1} as *const c_char, p!{src[0]} as c_int) as *const _; let b = slice::from_raw_parts(p!{src+1}, end as usize - p!{src+1} as usize); let s = str::from_utf8_unchecked(b); let is = IString::from(s); src = end; p!{src+=1}; FragData::String(is) } else { // RSTODO //Parser::dump("frag parsing".as_ptr(), src); panic!() }; Frag { data: fragdata, size: src as usize - start as usize, } } } #[derive(Debug)] enum ExprElt { Node(AstNode), Op(IString), } // parser pub struct Parser { // This is a list of the current stack of node-operator-node-operator-etc. // this works by each parseExpression call appending to the vector; then // recursing out, and the toplevel sorts it all expressionPartsStack: Vec<Vec<ExprElt>>, allSource: *const u8, allSize: usize, } unsafe fn skipSpace(curr: &mut *const u8) { while pp!{curr[0]} != b'\0' { if isSpace(pp!{curr[0]}) { pp!{curr+=1}; continue } if pp!{curr[0]} == b'/' && pp!{curr[1]} == b'/' { pp!{curr+=2}; while pp!{curr[0]} != b'\0' && pp!{curr[0]} != b'\n' { pp!{curr+=1}; } if pp!{curr[0]} != b'\0' { pp!{curr+=1}; } continue } if pp!{curr[0]} == b'/' && pp!{curr[1]} == b'*' { pp!{curr+=2}; while pp!{curr[0]} != b'\0' && (pp!{curr[0]} != b'*' || pp!{curr[1]} != b'/') { pp!{curr+=1}; } pp!{curr+=2}; continue } return } } impl Parser { // Parses an element in a list of such elements, e.g. list of statements in a block, or list of parameters in a call unsafe fn parseElement(&mut self, src: &mut *const u8, seps: *const u8) -> AstNode { //dump("parseElement", src); skipSpace(src); let frag = Frag::from_str(*src); pp!{src+=frag.size}; match frag.data { FragData::Keyword(_) => self.parseAfterKeyword(&frag, src, seps), FragData::Ident(_) => self.parseAfterIdent(&frag, src, seps), FragData::String(_) | FragData::Int(_) | FragData::Double(_) => self.parseExpression(ExprElt::Node(frag.parse()), src, seps), FragData::Separator(s) => { let eenode = match s { is!("(") => self.parseAfterParen(src), is!("[") => self.parseAfterBrace(src), is!("{") => self.parseAfterCurly(src), _ => panic!(), }; self.parseExpression(ExprElt::Node(eenode), src, seps) }, FragData::Operator(s) => self.parseExpression(ExprElt::Op(s), src, seps), } } unsafe fn parseAfterKeyword(&mut self, frag: &Frag, src: &mut *const u8, seps: *const u8) -> AstNode { skipSpace(src); match frag.getStr() { is!("function") => self.parseFunction(src, seps), is!("var") | is!("const") => self.parseVar(src, seps, false), is!("return") => self.parseReturn(src, seps), is!("if") => self.parseIf(src, seps), is!("do") => self.parseDo(src, seps), is!("while") => self.parseWhile(src, seps), is!("break") => self.parseBreak(src, seps), is!("continue") => self.parseContinue(src, seps), is!("switch") => self.parseSwitch(src, seps), is!("new") => self.parseNew(src, seps), _ => panic!(), } } // RSTODO: remove seps? unsafe fn parseFunction(&mut self, src: &mut *const u8, _seps: *const u8) -> AstNode { let name_str = match Frag::from_str(*src) { Frag { data: FragData::Ident(s), size: n } => { pp!{src+=n}; s }, Frag { data: FragData::Separator(is!("(")), .. } => is!(""), _ => panic!(), }; let mut ret = builder::makeFunction(name_str); skipSpace(src); assert!(pp!{src[0]} == b'('); pp!{src+=1}; loop { skipSpace(src); if pp!{src[0]} == b')' { break } if let Frag { data: FragData::Ident(s), size: n } = Frag::from_str(*src) { pp!{src+=n}; builder::appendArgumentToFunction(&mut ret, s) } else { panic!() } skipSpace(src); match pp!{src[0]} { b')' => break, b',' => { pp!{src+=1}; continue }, _ => panic!(), } } pp!{src+=1}; builder::setBlockContent(&mut ret, self.parseBracketedBlock(src)); // TODO: parse expression? ret } // RSTODO: remove seps? unsafe fn parseVar(&mut self, src: &mut *const u8, _seps: *const u8, is_const: bool) -> AstNode { let mut ret = builder::makeVar(is_const); loop { skipSpace(src); if pp!{src[0]} == b';' { break } let name_str = if let Frag { data: FragData::Ident(s), size: n } = Frag::from_str(*src) { pp!{src+=n}; s } else { panic!() }; skipSpace(src); let mut value = None; if pp!{src[0]} == b'=' { pp!{src+=1}; skipSpace(src); value = Some(self.parseElement(src, b";,\0".as_ptr())) } builder::appendToVar(&mut ret, name_str, value); skipSpace(src); match pp!{src[0]} { b';' => break, b',' => { pp!{src+=1}; continue }, _ => panic!(), } } pp!{src+=1}; ret } unsafe fn parseReturn(&mut self, src: &mut *const u8, seps: *const u8) -> AstNode { skipSpace(src); let mut value = None; if !hasChar(seps, pp!{src[0]}) { value = Some(self.parseElement(src, seps)) } assert!(hasChar(seps, pp!{src[0]})); if pp!{src[0]} == b';' { pp!{src+=1} } builder::makeReturn(value) } unsafe fn parseIf(&mut self, src: &mut *const u8, seps: *const u8) -> AstNode { let condition = self.parseParenned(src); let ifTrue = self.parseMaybeBracketed(src, seps); skipSpace(src); let mut ifFalse = None; if !hasChar(seps, pp!{src[0]}) { let next = Frag::from_str(*src); if let Frag { data: FragData::Keyword(is!("else")), size: n } = next { pp!{src+=n}; ifFalse = Some(self.parseMaybeBracketed(src, seps)) } } builder::makeIf(condition, ifTrue, ifFalse) } unsafe fn parseDo(&mut self, src: &mut *const u8, seps: *const u8) -> AstNode { let body = self.parseMaybeBracketed(src, seps); skipSpace(src); let next = Frag::from_str(*src); if let Frag { data: FragData::Keyword(is!("while")), size: n } = next { pp!{src+=n}; } else { panic!() } let condition = self.parseParenned(src); builder::makeDo(body, condition) } unsafe fn parseWhile(&mut self, src: &mut *const u8, seps: *const u8) -> AstNode { let condition = self.parseParenned(src); let body = self.parseMaybeBracketed(src, seps); builder::makeWhile(condition, body) } // RSTODO: remove seps? unsafe fn parseBreak(&mut self, src: &mut *const u8, _seps: *const u8) -> AstNode { skipSpace(src); let next = Frag::from_str(*src); let mut arg = None; if let Frag { data: FragData::Ident(s), size: n } = next { pp!{src+=n}; arg = Some(s) } builder::makeBreak(arg) } // RSTODO: remove seps? unsafe fn parseContinue(&mut self, src: &mut *const u8, _seps: *const u8) -> AstNode { skipSpace(src); let next = Frag::from_str(*src); let mut arg = None; if let Frag { data: FragData::Ident(s), size: n } = next { pp!{src+=n}; arg = Some(s) } builder::makeContinue(arg) } // RSTODO: remove seps? unsafe fn parseSwitch(&mut self, src: &mut *const u8, _seps: *const u8) -> AstNode { let mut ret = builder::makeSwitch(self.parseParenned(src)); skipSpace(src); assert!(pp!{src[0]} == b'{'); pp!{src+=1}; loop { // find all cases and possibly a default skipSpace(src); if pp!{src[0]} == b'}' { break } let next = Frag::from_str(*src); match next { Frag { data: FragData::Keyword(is!("case")), size: n } => { pp!{src+=n}; skipSpace(src); let value = Frag::from_str(*src); let arg = if value.isNumber() { pp!{src+=value.size}; value.parse() } else { assert!(value.data == FragData::Operator(is!("-"))); pp!{src+=value.size}; skipSpace(src); let value2 = Frag::from_str(*src); assert!(value2.isNumber()); pp!{src+=value2.size}; builder::makePrefix(is!("-"), value2.parse()) }; builder::appendCaseToSwitch(&mut ret, arg); skipSpace(src); assert!(pp!{src[0]} == b':'); pp!{src+=1}; continue }, Frag { data: FragData::Keyword(is!("default")), size: n } => { pp!{src+=n}; builder::appendDefaultToSwitch(&mut ret); skipSpace(src); assert!(pp!{src[0]} == b':'); pp!{src+=1}; continue }, // otherwise, may be some keyword that happens to start a block // (e.g. case 1: _return_ 5) _ => () } // not case X: or default: or }, so must be some code skipSpace(src); let explicitBlock = pp!{src[0]} == b'{'; let subBlock = if explicitBlock { self.parseBracketedBlock(src) } else { self.parseBlock(src, b";}\0".as_ptr(), Some(is!("case")), Some(is!("default"))) }; builder::appendCodeToSwitch(&mut ret, subBlock, explicitBlock); } skipSpace(src); assert!(pp!{src[0]} == b'}'); pp!{src+=1}; ret } unsafe fn parseNew(&mut self, src: &mut *const u8, seps: *const u8) -> AstNode { builder::makeNew(self.parseElement(src, seps)) } // RSTODO unsafe fn parseAfterIdent(&mut self, frag: &Frag, src: &mut *const u8, seps: *const u8) -> AstNode { skipSpace(src); match pp!{src[0]} { b'(' => { let exprelt = ExprElt::Node(self.parseCall(frag.parse(), src)); self.parseExpression(exprelt, src, seps) }, b'[' => { let exprelt = ExprElt::Node(self.parseIndexing(frag.parse(), src)); self.parseExpression(exprelt, src, seps) }, b':' if self.expressionPartsStack.last().unwrap().is_empty() => { pp!{src+=1}; skipSpace(src); let inner = if pp!{src[0]} == b'{' { // context lets us know this is not an object, but a block self.parseBracketedBlock(src) } else { self.parseElement(src, seps) }; builder::makeLabel(frag.getStr(), inner) }, b'.' => { let exprelt = ExprElt::Node(self.parseDotting(frag.parse(), src)); self.parseExpression(exprelt, src, seps) }, _ => self.parseExpression(ExprElt::Node(frag.parse()), src, seps), } } unsafe fn parseCall(&mut self, target: AstNode, src: &mut *const u8) -> AstNode { self.expressionPartsStack.push(vec![]); assert!(pp!{src[0]} == b'('); pp!{src+=1}; let mut ret = builder::makeCall(target); loop { skipSpace(src); if pp!{src[0]} == b')' { break } builder::appendToCall(&mut ret, self.parseElement(src, b",)\0".as_ptr())); skipSpace(src); if pp!{src[0]} == b')' { break } if pp!{src[0]} == b',' { pp!{src+=1}; continue } panic!() } pp!{src+=1}; assert!(self.expressionPartsStack.pop().unwrap().len() == 0); ret } unsafe fn parseIndexing(&mut self, target: AstNode, src: &mut *const u8) -> AstNode { self.expressionPartsStack.push(vec![]); assert!(pp!{src[0]} == b'['); pp!{src+=1}; let elt = self.parseElement(src, b"]\0".as_ptr()); let ret = builder::makeIndexing(target, elt); skipSpace(src); assert!(pp!{src[0]} == b']'); pp!{src+=1}; assert!(self.expressionPartsStack.pop().unwrap().len() == 0); ret } unsafe fn parseDotting(&mut self, target: AstNode, src: &mut *const u8) -> AstNode { assert!(pp!{src[0]} == b'['); pp!{src+=1}; if let Frag { data: FragData::Ident(s), size: n } = Frag::from_str(*src) { pp!{src+=n}; builder::makeDot(target, s) } else { panic!() } } unsafe fn parseAfterParen(&mut self, src: &mut *const u8) -> AstNode { self.expressionPartsStack.push(vec![]); skipSpace(src); let ret = self.parseElement(src, b")\0".as_ptr()); skipSpace(src); assert!(pp!{src[0]} == b')'); pp!{src+=1}; assert!(self.expressionPartsStack.pop().unwrap().len() == 0); ret } // RSTODO: needs expressionPartsStack pop? unsafe fn parseAfterBrace(&mut self, src: &mut *const u8) -> AstNode { self.expressionPartsStack.push(vec![]); let mut ret = builder::makeArray(); loop { skipSpace(src); assert!(pp!{src[0]} != b'\0'); if pp!{src[0]} == b']' { break } builder::appendToArray(&mut ret, self.parseElement(src, b",]\0".as_ptr())); skipSpace(src); if pp!{src[0]} == b']' { break } if pp!{src[0]} == b',' { pp!{src+=1}; continue } panic!() } pp!{src+=1}; ret } // RSTODO: needs expressionPartsStack pop? unsafe fn parseAfterCurly(&mut self, src: &mut *const u8) -> AstNode { self.expressionPartsStack.push(vec![]); let mut ret = builder::makeObject(); loop { skipSpace(src); assert!(pp!{src[0]} != b'\0'); if pp!{src[0]} == b'}' { break } let (s, n) = match Frag::from_str(*src) { // key Frag { data: FragData::Ident(s), size: n } | Frag { data: FragData::String(s), size: n } => (s, n), _ => panic!(), }; pp!{src+=n}; skipSpace(src); assert!(pp!{src[0]} == b':'); pp!{src+=1}; let value = self.parseElement(src, b",}\0".as_ptr()); builder::appendToObject(&mut ret, s, value); skipSpace(src); if pp!{src[0]} == b'}' { break } if pp!{src[0]} == b',' { pp!{src+=1}; continue } panic!() } pp!{src+=1}; ret } unsafe fn makeBinary(left: AstNode, op: IString, right: AstNode) -> AstNode { if op == is!(".") { builder::makeDotAstNode(left, right) } else { builder::makeBinary(left, op, right) } } unsafe fn parseExpression(&mut self, initial: ExprElt, src: &mut *const u8, seps: *const u8) -> AstNode { //dump("parseExpression", src); // RSTODO: this function is to make it less ugly to work around rust // lexical lifetimes fn getParts(s: &mut Parser) -> &mut Vec<ExprElt> { s.expressionPartsStack.last_mut().unwrap() } skipSpace(src); if pp!{src[0]} == b'\0' || hasChar(seps, pp!{src[0]}) { let parts = getParts(self); if !parts.is_empty() { // RSTODO: This is ridiculously unsafe but is needed because of // the way the expression stack works. When parseExpression is // called with an empty top level of the stack, the bit between // 'let top' and 'let last' begins the population of the stack // level. The 'let last = parseElement' then recursively calls // down back into parseExpression, building up the stack and // until we hit *this* line. All of the parseExpressions but // the top level one then return (because !top) and top level // sorts it all out. // Note that this ptr::read is crucially coupled with the // mem::forget near 'let last'. // https://github.com/kripken/cashew/commit/a2f527c1597cdbe0342cb4154465023159832518 parts.push(::std::ptr::read(&initial)); // cherry on top of the cake } let node = if let ExprElt::Node(n) = initial { n } else { panic!() }; return node } let top; if let ExprElt::Node(node) = initial { let next = Frag::from_str(*src); if let Frag { data: FragData::Operator(s), size: n } = next { let parts = getParts(self); top = parts.is_empty(); parts.push(ExprElt::Node(node)); pp!{src+=n}; parts.push(ExprElt::Op(s)) } else { let initial = ExprElt::Node(match pp!{src[0]} { b'(' => self.parseCall(node, src), b'[' => self.parseIndexing(node, src), _ => { //self.dump("bad parseExpression state", *src); panic!("bad parseExpression state") }, }); return self.parseExpression(initial, src, seps) } } else { let parts = getParts(self); top = parts.is_empty(); parts.push(initial) } let last = self.parseElement(src, seps); if !top { return last } ::std::mem::forget(last); let parts = getParts(self); // parts may have been invalidated by that call // we are the toplevel. sort it all out // collapse right to left, highest priority first //dumpParts(parts, 0); for ops in OP_CLASSES.iter() { // RSTODO: consider unifying rtl and ltr if ops.rtl { // right to left cfor!{let mut i = parts.len()-1; /* cond */; if i == 0 { break }, i -= 1; { let op = match parts[i] { ExprElt::Node(_) => continue, ExprElt::Op(ref op) => op.clone(), }; if !ops.ops.contains(&op) { continue } if ops.ty == OpClassTy::Binary && i > 0 && i < parts.len()-1 { let part2 = parts.remove(i+1); let part1 = parts.remove(i-1); let (n1, n2) = match (part1, part2) { (ExprElt::Node(n1), ExprElt::Node(n2)) => (n1, n2), _ => panic!("not both nodes in rtl binary"), }; // RSTODO: if assigned at i-1, only need one drain? parts[i-1] = ExprElt::Node(Self::makeBinary(n1, op, n2)); // RSTODO: could optimise here by decrementing to avoid // reprocessing? Note the unfortunate asymmetry with ltr } else if ops.ty == OpClassTy::Prefix && i < parts.len()-1 { if i > 0 { // cannot apply prefix operator if it would join // two nodes if let ExprElt::Node(_) = parts[i-1] { continue } } let n1 = match parts.remove(i+1) { ExprElt::Node(n1) => n1, _ => panic!("not node in rtl prefix"), }; parts[i] = ExprElt::Node(builder::makePrefix(op, n1)); } else if ops.ty == OpClassTy::Tertiary { // we must be at X ? Y : Z // ^ //dumpParts(parts, i); if op != is!(":") { continue } assert!(i < parts.len()-1 && i >= 3); match parts[i-2] { ExprElt::Op(is!("?")) => (), ExprElt::Op(_) => continue, ExprElt::Node(_) => panic!("node in rtl tertiary"), } let part3 = parts.remove(i+1); let part2 = parts.remove(i-1); let _ = parts.remove(i-2); let part1 = parts.remove(i-3); let (n1, n2, n3) = match (part1, part2, part3) { (ExprElt::Node(n1), ExprElt::Node(n2), ExprElt::Node(n3)) => (n1, n2, n3), _ => panic!("not all three nodes in rtl tertiary"), }; parts[i-3] = ExprElt::Node(builder::makeConditional(n1, n2, n3)); i = parts.len(); } // TODO: postfix }} } else { // left to right cfor!{let mut i = 0; i < parts.len(); i += 1; { let op = match parts[i] { ExprElt::Node(_) => continue, ExprElt::Op(ref op) => op.clone(), }; if !ops.ops.contains(&op) { continue } if ops.ty == OpClassTy::Binary && i > 0 && i < parts.len()-1 { let part2 = parts.remove(i+1); let part1 = parts.remove(i-1); let (n1, n2) = match (part1, part2) { (ExprElt::Node(n1), ExprElt::Node(n2)) => (n1, n2), _ => panic!("not both nodes in ltr binary"), }; // RSTODO: if assigned at i-1, only need one drain? parts[i-1] = ExprElt::Node(Self::makeBinary(n1, op, n2)); i -= 1; } else if ops.ty == OpClassTy::Prefix && i < parts.len()-1 { if i > 0 { // cannot apply prefix operator if it would join // two nodes if let ExprElt::Node(_) = parts[i-1] { continue } } let n1 = match parts.remove(i+1) { ExprElt::Node(n1) => n1, _ => panic!("not node in ltr prefix"), }; parts[i] = ExprElt::Node(builder::makePrefix(op, n1)); // allow a previous prefix operator to cascade i = if i > 2 { i-2 } else { 0 }; } // TODO: tertiary, postfix }} } } let part = parts.pop().unwrap(); assert!(parts.is_empty()); if let ExprElt::Node(n) = part { n } else { panic!() } } // Parses a block of code (e.g. a bunch of statements inside {,}, or the top level of o file) unsafe fn parseBlock(&mut self, src: &mut *const u8, seps: *const u8, keywordSep1: Option<IString>, keywordSep2: Option<IString>) -> AstNode { let mut block = builder::makeBlock(); //dump("parseBlock", src); loop { skipSpace(src); if pp!{src[0]} == b'\0' { break } if pp!{src[0]} == b';' { pp!{src+=1}; // skip a statement in this block continue } if hasChar(seps, pp!{src[0]}) { break } // RSTODO: combine these two conditions? if let Some(ref ks) = keywordSep1 { assert!(*ks != is!("")); let next = Frag::from_str(*src); if FragData::Keyword(ks.clone()) == next.data { break } } if let Some(ref ks) = keywordSep2 { assert!(*ks != is!("")); let next = Frag::from_str(*src); if FragData::Keyword(ks.clone()) == next.data { break } } let element = self.parseElementOrStatement(src, seps); builder::appendToBlock(&mut block, element); } block } unsafe fn parseBracketedBlock(&mut self, src: &mut *const u8) -> AstNode { skipSpace(src); assert!(pp!{src[0]} == b'{'); pp!{src+=1}; // the two are not symmetrical, ; is just internally separating, } is // the final one - parseBlock knows all this let block = self.parseBlock(src, b";}\0".as_ptr(), None, None); assert!(pp!{src[0]} == b'}'); pp!{src+=1}; block } unsafe fn parseElementOrStatement(&mut self, src: &mut *const u8, seps: *const u8) -> AstNode { skipSpace(src); if pp!{src[0]} == b';' { pp!{src+=1}; } if pp!{src[0]} == b'{' { // detect a trivial {} in a statement context let before = *src; pp!{src+=1}; skipSpace(src); if pp!{src[0]} == b'}' { pp!{src+=1}; // we don't need the brackets here, but oh well return builder::makeBlock() } *src = before; } let mut ret = self.parseElement(src, seps); skipSpace(src); if pp!{src[0]} == b';' { ret = builder::makeStatement(ret); pp!{src+=1}; } ret } unsafe fn parseMaybeBracketed(&mut self, src: &mut *const u8, seps: *const u8) -> AstNode { skipSpace(src); if pp!{src[0]} == b'{' { self.parseBracketedBlock(src) } else { self.parseElementOrStatement(src, seps) } } unsafe fn parseParenned(&mut self, src: &mut *const u8) -> AstNode { skipSpace(src); assert!(pp!{src[0]} == b'('); pp!{src+=1}; let ret = self.parseElement(src, b")\0".as_ptr()); skipSpace(src); assert!(pp!{src[0]} == b')'); pp!{src+=1}; ret } pub fn new() -> Parser { Parser { expressionPartsStack: vec![vec![]], allSource: ptr::null(), allSize: 0, } } pub unsafe fn parseToplevel(&mut self, src: *const u8) -> AstNode { self.allSource = src; self.allSize = libc::strlen(src as *const i8); let mut toplevel = builder::makeToplevel(); let mut cursrc = src; let block = self.parseBlock(&mut cursrc, b";\0".as_ptr(), None, None); builder::setBlockContent(&mut toplevel, block); toplevel } // Debugging // RSTODO //unsafe fn dump(&mut self, _msg: &str, _curr: *const u8) { // panic!() //} // static void dump(const char *where, char* curr) { // /* // printf("%s:\n=============\n", where); // for (int i = 0; i < allSize; i++) printf("%c", allSource[i] ? allSource[i] : '?'); // printf("\n"); // for (int i = 0; i < (curr - allSource); i++) printf(" "); // printf("^\n=============\n"); // */ // fprintf(stderr, "%s:\n==========\n", where); // int newlinesLeft = 2; // int charsLeft = 200; // while (*curr) { // if (*curr == '\n') { // newlinesLeft--; // if (newlinesLeft == 0) break; // } // charsLeft--; // if (charsLeft == 0) break; // fprintf(stderr, "%c", *curr++); // } // fprintf(stderr, "\n\n"); // } // RSTODO //unsafe fn dumpParts(_parts: Vec<Vec<ExprElt>>, _i: usize) { // panic!() //} // void dumpParts(ExpressionParts& parts, int i) { // printf("expressionparts: %d (at %d)\n", parts.size(), i); // printf("| "); // for (int i = 0; i < parts.size(); i++) { // if (parts[i].isNode) { // parts[i].getNode()->stringify(std::cout); // printf(" "); // } else { // printf(" _%s_ ", parts[i].getOp().str); // } // } // printf("|\n"); // } // }
true
f19a0baf870b410c50ca09a55e5dd0bc227a5c90
Rust
12101111/nettest
/src/tcping.rs
UTF-8
1,389
2.828125
3
[]
no_license
use crate::{Measurement, Task}; use anyhow::{anyhow, Context, Result}; use log::info; use std::net::{SocketAddr, TcpStream, ToSocketAddrs}; use std::time::{Duration, Instant}; pub struct TcpingTask { target: SocketAddr, timeout: Duration, seq: u16, } impl TcpingTask { pub fn new(addr: &str, timeout: u64) -> Result<TcpingTask> { let target = addr .to_socket_addrs() .context("Can't resolve IP address")? .next() .ok_or(anyhow!("Don't have IP address"))?; let format_target = target.to_string(); if format_target != addr { info!("Ping to {} ({}) using TCP", addr, format_target); } else { info!("Ping to {} using TCP", format_target); } let timeout = Duration::from_secs(timeout); Ok(Self { target, timeout, seq: 0, }) } } impl Task for TcpingTask { fn run(&mut self) -> Result<Measurement> { self.seq += 1; let start = Instant::now(); let tcp = TcpStream::connect_timeout(&self.target, self.timeout)?; let time = start.elapsed(); tcp.shutdown(std::net::Shutdown::Both)?; drop(tcp); info!( "Connected to {}: seq={} time={:?}", self.target, self.seq, time ); Ok(Measurement::Time(time)) } }
true
6730c434f57e8def618296beb6f9dfb10f5092bd
Rust
lattice0/rust-ffmpeg-1
/src/util/cvec.rs
UTF-8
992
2.96875
3
[ "WTFPL" ]
permissive
use std::{ptr, slice}; use libc::{c_uchar}; /* extern "C" { fn allocate_data(data_ptr: *mut *const c_uchar, data_len: *mut i32); fn deallocate_data(data_ptr: *const c_uchar); } */ //fn deallocate_data(data_ptr: *const c_uchar); pub struct CVec { ptr: *const c_uchar, len: usize, } impl CVec{ pub fn new(ptr: *const c_uchar, len: usize) -> CVec { CVec{ ptr: ptr, len: len } } } impl std::ops::Deref for CVec { type Target = [c_uchar]; fn deref(&self) -> &[c_uchar] { unsafe { slice::from_raw_parts(self.ptr, self.len) } } } impl Drop for CVec { fn drop(&mut self) { //unsafe { deallocate_data(self.ptr) }; } } fn get_vec() -> CVec { let mut ptr = ptr::null(); let mut len = 0; unsafe { //allocate_data(&mut ptr, &mut len); assert!(!ptr.is_null()); assert!(len >= 0); CVec { ptr, len: len as usize, } } }
true
0f68608edea1ee937f281cabf119c8495538753c
Rust
willcrichton/rapier
/src/dynamics/mass_properties.rs
UTF-8
16,276
2.90625
3
[ "Apache-2.0" ]
permissive
use crate::math::{AngVector, AngularInertia, Isometry, Point, Rotation, Vector}; use crate::utils; use num::Zero; use std::ops::{Add, AddAssign, Sub, SubAssign}; #[cfg(feature = "dim3")] use {na::Matrix3, std::ops::MulAssign}; #[derive(Copy, Clone, Debug, PartialEq)] #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] /// The local mass properties of a rigid-body. pub struct MassProperties { /// The center of mass of a rigid-body expressed in its local-space. pub local_com: Point<f32>, /// The inverse of the mass of a rigid-body. /// /// If this is zero, the rigid-body is assumed to have infinite mass. pub inv_mass: f32, /// The inverse of the principal angular inertia of the rigid-body. /// /// Components set to zero are assumed to be infinite along the corresponding principal axis. pub inv_principal_inertia_sqrt: AngVector<f32>, #[cfg(feature = "dim3")] /// The principal vectors of the local angular inertia tensor of the rigid-body. pub principal_inertia_local_frame: Rotation<f32>, } impl MassProperties { /// Initializes the mass properties with the given center-of-mass, mass, and angular inertia. /// /// The center-of-mass is specified in the local-space of the rigid-body. #[cfg(feature = "dim2")] pub fn new(local_com: Point<f32>, mass: f32, principal_inertia: f32) -> Self { let inv_mass = utils::inv(mass); let inv_principal_inertia_sqrt = utils::inv(principal_inertia.sqrt()); Self { local_com, inv_mass, inv_principal_inertia_sqrt, } } /// Initializes the mass properties from the given center-of-mass, mass, and principal angular inertia. /// /// The center-of-mass is specified in the local-space of the rigid-body. /// The principal angular inertia are the angular inertia along the coordinate axes in the local-space /// of the rigid-body. #[cfg(feature = "dim3")] pub fn new(local_com: Point<f32>, mass: f32, principal_inertia: AngVector<f32>) -> Self { Self::with_principal_inertia_frame(local_com, mass, principal_inertia, Rotation::identity()) } /// Initializes the mass properties from the given center-of-mass, mass, and principal angular inertia. /// /// The center-of-mass is specified in the local-space of the rigid-body. /// The principal angular inertia are the angular inertia along the coordinate axes defined by /// the `principal_inertia_local_frame` expressed in the local-space of the rigid-body. #[cfg(feature = "dim3")] pub fn with_principal_inertia_frame( local_com: Point<f32>, mass: f32, principal_inertia: AngVector<f32>, principal_inertia_local_frame: Rotation<f32>, ) -> Self { let inv_mass = utils::inv(mass); let inv_principal_inertia_sqrt = principal_inertia.map(|e| utils::inv(e.sqrt())); Self { local_com, inv_mass, inv_principal_inertia_sqrt, principal_inertia_local_frame, } } /// The world-space center of mass of the rigid-body. pub fn world_com(&self, pos: &Isometry<f32>) -> Point<f32> { pos * self.local_com } #[cfg(feature = "dim2")] /// The world-space inverse angular inertia tensor of the rigid-body. pub fn world_inv_inertia_sqrt(&self, _rot: &Rotation<f32>) -> AngularInertia<f32> { self.inv_principal_inertia_sqrt } #[cfg(feature = "dim3")] /// The world-space inverse angular inertia tensor of the rigid-body. pub fn world_inv_inertia_sqrt(&self, rot: &Rotation<f32>) -> AngularInertia<f32> { if !self.inv_principal_inertia_sqrt.is_zero() { let mut lhs = (rot * self.principal_inertia_local_frame) .to_rotation_matrix() .into_inner(); let rhs = lhs.transpose(); lhs.column_mut(0) .mul_assign(self.inv_principal_inertia_sqrt.x); lhs.column_mut(1) .mul_assign(self.inv_principal_inertia_sqrt.y); lhs.column_mut(2) .mul_assign(self.inv_principal_inertia_sqrt.z); let inertia = lhs * rhs; AngularInertia::from_sdp_matrix(inertia) } else { AngularInertia::zero() } } #[cfg(feature = "dim3")] /// Reconstructs the inverse angular inertia tensor of the rigid body from its principal inertia values and axes. pub fn reconstruct_inverse_inertia_matrix(&self) -> Matrix3<f32> { let inv_principal_inertia = self.inv_principal_inertia_sqrt.map(|e| e * e); self.principal_inertia_local_frame.to_rotation_matrix() * Matrix3::from_diagonal(&inv_principal_inertia) * self .principal_inertia_local_frame .inverse() .to_rotation_matrix() } #[cfg(feature = "dim3")] /// Reconstructs the angular inertia tensor of the rigid body from its principal inertia values and axes. pub fn reconstruct_inertia_matrix(&self) -> Matrix3<f32> { let principal_inertia = self.inv_principal_inertia_sqrt.map(|e| utils::inv(e * e)); self.principal_inertia_local_frame.to_rotation_matrix() * Matrix3::from_diagonal(&principal_inertia) * self .principal_inertia_local_frame .inverse() .to_rotation_matrix() } #[cfg(feature = "dim2")] pub(crate) fn construct_shifted_inertia_matrix(&self, shift: Vector<f32>) -> f32 { let i = utils::inv(self.inv_principal_inertia_sqrt * self.inv_principal_inertia_sqrt); if self.inv_mass != 0.0 { let mass = 1.0 / self.inv_mass; i + shift.norm_squared() * mass } else { i } } #[cfg(feature = "dim3")] pub(crate) fn construct_shifted_inertia_matrix(&self, shift: Vector<f32>) -> Matrix3<f32> { let matrix = self.reconstruct_inertia_matrix(); if self.inv_mass != 0.0 { let mass = 1.0 / self.inv_mass; let diag = shift.norm_squared(); let diagm = Matrix3::from_diagonal_element(diag); matrix + (diagm + shift * shift.transpose()) * mass } else { matrix } } /// Transform each element of the mass properties. pub fn transform_by(&self, m: &Isometry<f32>) -> Self { // NOTE: we don't apply the parallel axis theorem here // because the center of mass is also transformed. Self { local_com: m * self.local_com, inv_mass: self.inv_mass, inv_principal_inertia_sqrt: self.inv_principal_inertia_sqrt, #[cfg(feature = "dim3")] principal_inertia_local_frame: m.rotation * self.principal_inertia_local_frame, } } } impl Zero for MassProperties { fn zero() -> Self { Self { inv_mass: 0.0, inv_principal_inertia_sqrt: na::zero(), #[cfg(feature = "dim3")] principal_inertia_local_frame: Rotation::identity(), local_com: Point::origin(), } } fn is_zero(&self) -> bool { *self == Self::zero() } } impl Sub<MassProperties> for MassProperties { type Output = Self; #[cfg(feature = "dim2")] fn sub(self, other: MassProperties) -> Self { if self.is_zero() || other.is_zero() { return self; } let m1 = utils::inv(self.inv_mass); let m2 = utils::inv(other.inv_mass); let inv_mass = utils::inv(m1 - m2); let local_com = (self.local_com * m1 - other.local_com.coords * m2) * inv_mass; let i1 = self.construct_shifted_inertia_matrix(local_com - self.local_com); let i2 = other.construct_shifted_inertia_matrix(local_com - other.local_com); let inertia = i1 - i2; // NOTE: we drop the negative eigenvalues that may result from subtraction rounding errors. let inv_principal_inertia_sqrt = utils::inv(inertia.max(0.0).sqrt()); Self { local_com, inv_mass, inv_principal_inertia_sqrt, } } #[cfg(feature = "dim3")] fn sub(self, other: MassProperties) -> Self { if self.is_zero() || other.is_zero() { return self; } let m1 = utils::inv(self.inv_mass); let m2 = utils::inv(other.inv_mass); let inv_mass = utils::inv(m1 - m2); let local_com = (self.local_com * m1 - other.local_com.coords * m2) * inv_mass; let i1 = self.construct_shifted_inertia_matrix(local_com - self.local_com); let i2 = other.construct_shifted_inertia_matrix(local_com - other.local_com); let inertia = i1 - i2; let eigen = inertia.symmetric_eigen(); let principal_inertia_local_frame = Rotation::from_matrix_eps(&eigen.eigenvectors, 1.0e-6, 10, na::one()); let principal_inertia = eigen.eigenvalues; // NOTE: we drop the negative eigenvalues that may result from subtraction rounding errors. let inv_principal_inertia_sqrt = principal_inertia.map(|e| utils::inv(e.max(0.0).sqrt())); Self { local_com, inv_mass, inv_principal_inertia_sqrt, principal_inertia_local_frame, } } } impl SubAssign<MassProperties> for MassProperties { fn sub_assign(&mut self, rhs: MassProperties) { *self = *self - rhs } } impl Add<MassProperties> for MassProperties { type Output = Self; #[cfg(feature = "dim2")] fn add(self, other: MassProperties) -> Self { if self.is_zero() { return other; } else if other.is_zero() { return self; } let m1 = utils::inv(self.inv_mass); let m2 = utils::inv(other.inv_mass); let inv_mass = utils::inv(m1 + m2); let local_com = (self.local_com * m1 + other.local_com.coords * m2) * inv_mass; let i1 = self.construct_shifted_inertia_matrix(local_com - self.local_com); let i2 = other.construct_shifted_inertia_matrix(local_com - other.local_com); let inertia = i1 + i2; let inv_principal_inertia_sqrt = utils::inv(inertia.sqrt()); Self { local_com, inv_mass, inv_principal_inertia_sqrt, } } #[cfg(feature = "dim3")] fn add(self, other: MassProperties) -> Self { if self.is_zero() { return other; } else if other.is_zero() { return self; } let m1 = utils::inv(self.inv_mass); let m2 = utils::inv(other.inv_mass); let inv_mass = utils::inv(m1 + m2); let local_com = (self.local_com * m1 + other.local_com.coords * m2) * inv_mass; let i1 = self.construct_shifted_inertia_matrix(local_com - self.local_com); let i2 = other.construct_shifted_inertia_matrix(local_com - other.local_com); let inertia = i1 + i2; let eigen = inertia.symmetric_eigen(); let principal_inertia_local_frame = Rotation::from_matrix_eps(&eigen.eigenvectors, 1.0e-6, 10, na::one()); let principal_inertia = eigen.eigenvalues; let inv_principal_inertia_sqrt = principal_inertia.map(|e| utils::inv(e.sqrt())); Self { local_com, inv_mass, inv_principal_inertia_sqrt, principal_inertia_local_frame, } } } impl AddAssign<MassProperties> for MassProperties { fn add_assign(&mut self, rhs: MassProperties) { *self = *self + rhs } } impl approx::AbsDiffEq for MassProperties { type Epsilon = f32; fn default_epsilon() -> Self::Epsilon { f32::default_epsilon() } fn abs_diff_eq(&self, other: &Self, epsilon: Self::Epsilon) -> bool { #[cfg(feature = "dim2")] let inertia_is_ok = self .inv_principal_inertia_sqrt .abs_diff_eq(&other.inv_principal_inertia_sqrt, epsilon); #[cfg(feature = "dim3")] let inertia_is_ok = self .reconstruct_inverse_inertia_matrix() .abs_diff_eq(&other.reconstruct_inverse_inertia_matrix(), epsilon); inertia_is_ok && self.local_com.abs_diff_eq(&other.local_com, epsilon) && self.inv_mass.abs_diff_eq(&other.inv_mass, epsilon) && self .inv_principal_inertia_sqrt .abs_diff_eq(&other.inv_principal_inertia_sqrt, epsilon) } } impl approx::RelativeEq for MassProperties { fn default_max_relative() -> Self::Epsilon { f32::default_max_relative() } fn relative_eq( &self, other: &Self, epsilon: Self::Epsilon, max_relative: Self::Epsilon, ) -> bool { #[cfg(feature = "dim2")] let inertia_is_ok = self.inv_principal_inertia_sqrt.relative_eq( &other.inv_principal_inertia_sqrt, epsilon, max_relative, ); #[cfg(feature = "dim3")] let inertia_is_ok = self.reconstruct_inverse_inertia_matrix().relative_eq( &other.reconstruct_inverse_inertia_matrix(), epsilon, max_relative, ); inertia_is_ok && self .local_com .relative_eq(&other.local_com, epsilon, max_relative) && self .inv_mass .relative_eq(&other.inv_mass, epsilon, max_relative) } } #[cfg(test)] mod test { use super::MassProperties; use crate::geometry::ColliderBuilder; use crate::math::{Point, Rotation, Vector}; use approx::assert_relative_eq; use num::Zero; #[test] fn mass_properties_add_partial_zero() { let m1 = MassProperties { local_com: Point::origin(), inv_mass: 2.0, inv_principal_inertia_sqrt: na::zero(), #[cfg(feature = "dim3")] principal_inertia_local_frame: Rotation::identity(), }; let m2 = MassProperties { local_com: Point::origin(), inv_mass: 0.0, #[cfg(feature = "dim2")] inv_principal_inertia_sqrt: 1.0, #[cfg(feature = "dim3")] inv_principal_inertia_sqrt: Vector::new(1.0, 2.0, 3.0), #[cfg(feature = "dim3")] principal_inertia_local_frame: Rotation::identity(), }; let result = MassProperties { local_com: Point::origin(), inv_mass: 2.0, #[cfg(feature = "dim2")] inv_principal_inertia_sqrt: 1.0, #[cfg(feature = "dim3")] inv_principal_inertia_sqrt: Vector::new(1.0, 2.0, 3.0), #[cfg(feature = "dim3")] principal_inertia_local_frame: Rotation::identity(), }; assert_eq!(m1 + m2, result); assert_eq!(m2 + m1, result); } #[test] fn mass_properties_add_sub() { // Check that addition and subtraction of mass properties behave as expected. let c1 = ColliderBuilder::capsule_x(1.0, 2.0).build(); let c2 = ColliderBuilder::capsule_y(3.0, 4.0).build(); let c3 = ColliderBuilder::ball(5.0).build(); let m1 = c1.mass_properties(); let m2 = c2.mass_properties(); let m3 = c3.mass_properties(); let m1m2m3 = m1 + m2 + m3; assert_relative_eq!(m1 + m2, m2 + m1, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - m1, m2 + m3, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - m2, m1 + m3, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - m3, m1 + m2, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - (m1 + m2), m3, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - (m1 + m3), m2, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - (m2 + m3), m1, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - m1 - m2, m3, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - m1 - m3, m2, epsilon = 1.0e-6); assert_relative_eq!(m1m2m3 - m2 - m3, m1, epsilon = 1.0e-6); assert_relative_eq!( m1m2m3 - m1 - m2 - m3, MassProperties::zero(), epsilon = 1.0e-6 ); } }
true
d4d2d337cfce55b36f02d7cb0dafc25705c3d0e0
Rust
ges0909/rust-examples
/result/main.rs
UTF-8
640
3.921875
4
[]
no_license
fn myfunc(n: isize) -> Result<isize, String> { if n % 2 == 0 { return Result::Ok(n); // returns Ok } Result::Err(format!("odd number {}", n)) // returns Err } fn main() { // explicit match let r = match myfunc(2) { Ok(num) => num, // r = num Err(msg) => panic!(msg), // write message and exit }; println!("{}", r); // match with unwrap() returning Ok let r = myfunc(4); println!("{}", r.unwrap()); // unwrap() is doing the match // match with unwrap() result in panic! let r = myfunc(3); println!("{}", r.unwrap()); // unwrap() and panic }
true
14e23ed6c2f723e5b29d45af9200cb03f4f885b3
Rust
indo-dev-0/icu4x
/components/provider/src/lib.rs
UTF-8
3,870
2.90625
3
[ "Apache-2.0", "ICU", "MIT", "LicenseRef-scancode-unicode" ]
permissive
// This file is part of ICU4X. For terms of use, please see the file // called LICENSE at the top level of the ICU4X source tree // (online at: https://github.com/unicode-org/icu4x/blob/master/LICENSE ). //! `icu_provider` is one of the [`ICU4X`] components. //! //! It defines traits and structs for transmitting data through the ICU4X locale data pipeline. //! The primary trait is [`DataProvider`]. It has one method, which transforms a [`Request`] into //! a [`Response`]: //! //! ```ignore //! fn load(&self, req: &DataRequest) -> Result<DataResponse<'d>, DataError> //! ``` //! //! A Request contains a [`DataKey`] (a composition of a [`Category`] and sub-category, e.g., //! "plurals/cardinal@1") and [`DataEntry`] (a language identifier and optional variant, e.g., //! "fr") being requested. The Response contains the data payload corresponding to the Request. //! //! The most common types required for ICU4X [`DataProvider`] are included via the prelude: //! //! ``` //! use icu_provider::prelude::*; //! use std::any::TypeId; //! //! // Types included: //! println!("{:?}", TypeId::of::<dyn DataProvider>()); //! println!("{:?}", TypeId::of::<DataError>()); //! println!("{:?}", TypeId::of::<DataKey>()); //! println!("{:?}", TypeId::of::<DataEntry>()); //! println!("{:?}", TypeId::of::<DataCategory>()); //! println!("{:?}", TypeId::of::<DataRequest>()); //! println!("{:?}", TypeId::of::<DataResponse>()); //! println!("{:?}", TypeId::of::<DataResponseBuilder>()); //! ``` //! //! ## Types of Data Providers //! //! Any object implementing [`DataProvider`] can be used to supply ICU4X with locale data. ICU4X ships //! with some pre-built data providers: //! //! - [`FsDataProvider`](../icu_provider_fs/struct.FsDataProvider.html) reads structured data from the //! filesystem. It can also write out that filesystem structure. //! - [`CldrJsonDataProvider`](../icu_provider_cldr/transform/struct.CldrJsonDataProvider.html) reads structured //! data directly from CLDR source files. //! //! ## Iterable Data Providers //! //! Data providers can implement [`DataEntryCollection`], allowing them to be used via the //! auto-implemented trait [`IterableDataProvider`]. This allows iteration over all [`DataEntry`] //! instances supported for a certain key in the data provider. This can be useful when //! transforming data between storage formats. For more information, see the [`iter`] module. //! //! ## `InvariantDataProvider` //! //! For testing or development purposes, this crate also offers `InvariantDataProvider`, which //! returns fixed data that does not vary by locale. You must enable `InvariantDataProvider` via the //! `"invariant"` feature in your Cargo.toml file. //! //! [`ICU4X`]: ../icu/index.html //! [`DataProvider`]: prelude::DataProvider //! [`Request`]: prelude::DataRequest //! [`Response`]: prelude::DataResponse //! [`DataKey`]: prelude::DataKey //! [`Category`]: prelude::DataCategory //! [`DataEntry`]: prelude::DataEntry //! [`DataEntryCollection`]: iter::DataEntryCollection //! [`IterableDataProvider`]: iter::IterableDataProvider mod cloneable_any; mod data_entry; #[macro_use] mod data_key; mod data_provider; mod error; pub mod iter; pub mod structs; #[cfg(feature = "invariant")] mod invariant; #[cfg(feature = "invariant")] pub use invariant::InvariantDataProvider; pub mod prelude { //! Core selection of APIs and structures for `DataProvider`. pub use crate::data_entry::DataEntry; pub use crate::data_key::DataCategory; pub use crate::data_key::DataKey; pub use crate::data_provider::DataProvider; pub use crate::data_provider::DataRequest; pub use crate::data_provider::DataResponse; pub use crate::data_provider::DataResponseBuilder; pub use crate::error::Error as DataError; } // Also include the same symbols at the top level for selective inclusion pub use prelude::*;
true
364352193ea29f18ec16657d30f6d427a7d989f4
Rust
bodoni/font
/src/format/opentype/character.rs
UTF-8
2,140
2.828125
3
[ "Apache-2.0", "MIT" ]
permissive
use std::collections::HashMap; use std::io::Result; use std::ops::RangeInclusive; use opentype::truetype::character_mapping::{CharacterMapping, Encoding}; use opentype::truetype::GlyphID; use typeface::Tape; use crate::format::opentype::cache::Cache; /// Unicode code points. pub type Characters = Vec<RangeInclusive<u32>>; pub struct Mapping(HashMap<u32, GlyphID>); impl Mapping { pub fn new(character_mapping: &CharacterMapping) -> Result<Self> { for encoding in character_mapping.encodings.iter() { match encoding { Encoding::Format0(encoding) => return Ok(Self(encoding.mapping())), Encoding::Format4(encoding) => return Ok(Self(encoding.mapping())), Encoding::Format6(encoding) => return Ok(Self(encoding.mapping())), Encoding::Format12(encoding) => return Ok(Self(encoding.mapping())), _ => {} } } raise!("found no known character-to-glyph encoding") } #[inline] pub fn get(&self, character: char) -> Option<GlyphID> { self.0.get(&(character as u32)).copied() } } pub(crate) fn read<T: Tape>(cache: &mut Cache<T>) -> Result<Characters> { for encoding in cache.character_mapping()?.encodings.iter() { let ranges = match encoding { Encoding::Format0(encoding) => encoding.characters(), Encoding::Format4(encoding) => encoding.characters(), Encoding::Format6(encoding) => encoding.characters(), Encoding::Format12(encoding) => encoding.characters(), _ => continue, }; return Ok(compress(ranges)); } raise!("found no known character-to-glyph encoding") } fn compress(ranges: Vec<(u32, u32)>) -> Vec<RangeInclusive<u32>> { let mut result: Vec<RangeInclusive<u32>> = Vec::with_capacity(ranges.len()); for range in ranges { if let Some(last) = result.last_mut() { if last.end() + 1 == range.0 { *last = *last.start()..=range.1; continue; } } result.push(range.0..=range.1); } result }
true
79d79455e4e32b934785e15c2ef80ad476b1384d
Rust
mersinvald/akkadia
/akkadia-vfs/src/test.rs
UTF-8
8,706
2.78125
3
[]
no_license
use super::{VfsInternal, Change, FileLoader, File, Error, make_line_indices}; use Span; use span::{Row, Column}; use std::path::{Path, PathBuf}; struct MockFileLoader; impl FileLoader for MockFileLoader { fn read<U>(file_name: &Path) -> Result<File<U>, Error> { let text = format!("{}\nHello\nWorld\nHello, World!\n", file_name.display()); Ok(File { line_indices: make_line_indices(&text), text: text, changed: false, user_data: None, }) } fn write(file_name: &Path, file: &FileKind) -> Result<(), Error> { if file_name.display().to_string() == "foo" { assert_eq!(file.changed, true); assert_eq!(file.text, "foo\nHfooo\nWorld\nHello, World!\n"); } Ok(()) } } fn make_change(with_len: bool) -> Change { let (row_end, col_end, len) = if with_len { // If len is present, we shouldn't depend on row_end/col_end // at all, because they may be invalid. (0, 0, Some(3)) } else { (1, 4, None) }; Change::ReplaceText { span: Span::new( Row::new_zero_indexed(1), Row::new_zero_indexed(row_end), Column::new_zero_indexed(1), Column::new_zero_indexed(col_end), "foo", ), len: len, text: "foo".to_owned(), } } fn make_change_2(with_len: bool) -> Change { let (row_end, col_end, len) = if with_len { // If len is present, we shouldn't depend on row_end/col_end // at all, because they may be invalid. (0, 0, Some(4)) } else { (3, 2, None) }; Change::ReplaceText { span: Span::new( Row::new_zero_indexed(2), Row::new_zero_indexed(row_end), Column::new_zero_indexed(4), Column::new_zero_indexed(col_end), "foo", ), len: len, text: "aye carumba".to_owned(), } } fn test_has_changes(with_len: bool) { let vfs = VfsInternal::<MockFileLoader, ()>::new(); assert!(!vfs.has_changes()); vfs.load_file(&Path::new("foo")).unwrap(); assert!(!vfs.has_changes()); vfs.on_changes(&[make_change(with_len)]).unwrap(); assert!(vfs.has_changes()); vfs.file_saved(&Path::new("bar")).unwrap(); assert!(vfs.has_changes()); vfs.file_saved(&Path::new("foo")).unwrap(); assert!(!vfs.has_changes()); } #[test] fn test_has_changes_without_len() { test_has_changes(false) } #[test] fn test_has_changes_with_len() { test_has_changes(true) } #[test] fn test_cached_files() { let vfs = VfsInternal::<MockFileLoader, ()>::new(); assert!(vfs.get_cached_files().is_empty()); vfs.load_file(&Path::new("foo")).unwrap(); vfs.load_file(&Path::new("bar")).unwrap(); let files = vfs.get_cached_files(); assert!(files.len() == 2); assert!(files[Path::new("foo")] == "foo\nHello\nWorld\nHello, World!\n"); assert!(files[Path::new("bar")] == "bar\nHello\nWorld\nHello, World!\n"); } #[test] fn test_flush_file() { let vfs = VfsInternal::<MockFileLoader, ()>::new(); // Flushing an uncached-file should succeed. vfs.flush_file(&Path::new("foo")).unwrap(); vfs.load_file(&Path::new("foo")).unwrap(); vfs.flush_file(&Path::new("foo")).unwrap(); assert!(vfs.get_cached_files().is_empty()); } fn test_changes(with_len: bool) { let vfs = VfsInternal::<MockFileLoader, ()>::new(); vfs.on_changes(&[make_change(with_len)]).unwrap(); let files = vfs.get_cached_files(); assert!(files.len() == 1); assert!(files[&PathBuf::from("foo")] == "foo\nHfooo\nWorld\nHello, World!\n"); assert!( vfs.load_file(&Path::new("foo")) == Ok("foo\nHfooo\nWorld\nHello, World!\n".to_owned()) ); assert!( vfs.load_file(&Path::new("bar")) == Ok("bar\nHello\nWorld\nHello, World!\n".to_owned()) ); vfs.on_changes(&[make_change_2(with_len)]).unwrap(); let files = vfs.get_cached_files(); assert!(files.len() == 2); assert!(files[&PathBuf::from("foo")] == "foo\nHfooo\nWorlaye carumballo, World!\n"); assert!( vfs.load_file(&Path::new("foo")) == Ok("foo\nHfooo\nWorlaye carumballo, World!\n".to_owned()) ); } #[test] fn test_changes_without_len() { test_changes(false) } #[test] fn test_changes_with_len() { test_changes(true) } #[test] fn test_change_add_file() { let vfs = VfsInternal::<MockFileLoader, ()>::new(); let new_file = Change::AddFile { file: PathBuf::from("foo"), text: "Hello, World!".to_owned(), }; vfs.on_changes(&[new_file]).unwrap(); let files = vfs.get_cached_files(); assert_eq!(files.len(), 1); assert_eq!(files[&PathBuf::from("foo")], "Hello, World!"); } fn test_user_data(with_len: bool) { let vfs = VfsInternal::<MockFileLoader, i32>::new(); // New files have no user data. vfs.load_file(&Path::new("foo")).unwrap(); vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(u, Err(Error::NoUserDataForFile)); Ok(()) }).unwrap(); // Set and read data. vfs.set_user_data(&Path::new("foo"), Some(42)).unwrap(); vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(*u.unwrap().1, 42); Ok(()) }).unwrap(); assert_eq!( vfs.set_user_data(&Path::new("bar"), Some(42)), Err(Error::FileNotCached) ); // ensure_user_data should not be called if the userdata already exists. vfs.ensure_user_data(&Path::new("foo"), |_| panic!()) .unwrap(); // Test ensure_user_data is called. vfs.load_file(&Path::new("bar")).unwrap(); vfs.ensure_user_data(&Path::new("bar"), |_| Ok(1)).unwrap(); vfs.with_user_data(&Path::new("bar"), |u| { assert_eq!(*u.unwrap().1, 1); Ok(()) }).unwrap(); // compute and read data. vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(u.as_ref().unwrap().0, "foo\nHello\nWorld\nHello, World!\n"); *u.unwrap().1 = 43; Ok(()) }).unwrap(); vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(*u.unwrap().1, 43); Ok(()) }).unwrap(); assert_eq!( vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(*u.unwrap().1, 43); Err(Error::BadLocation): Result<(), Error> }), Err(Error::BadLocation) ); vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(*u.unwrap().1, 43); Ok(()) }).unwrap(); // Clear and read data. vfs.set_user_data(&Path::new("foo"), None).unwrap(); vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(u, Err(Error::NoUserDataForFile)); Ok(()) }).unwrap(); // Compute (clear) and read data. vfs.set_user_data(&Path::new("foo"), Some(42)).unwrap(); assert_eq!( vfs.with_user_data(&Path::new("foo"), |_| { Err(Error::NoUserDataForFile): Result<(), Error> }), Err(Error::NoUserDataForFile) ); vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(u, Err(Error::NoUserDataForFile)); Ok(()) }).unwrap(); // Flushing a file should clear user data. vfs.set_user_data(&Path::new("foo"), Some(42)).unwrap(); vfs.flush_file(&Path::new("foo")).unwrap(); vfs.load_file(&Path::new("foo")).unwrap(); vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(u, Err(Error::NoUserDataForFile)); Ok(()) }).unwrap(); // Recording a change should clear user data. vfs.set_user_data(&Path::new("foo"), Some(42)).unwrap(); vfs.on_changes(&[make_change(with_len)]).unwrap(); vfs.with_user_data(&Path::new("foo"), |u| { assert_eq!(u, Err(Error::NoUserDataForFile)); Ok(()) }).unwrap(); } #[test] fn test_user_data_without_len() { test_user_data(false) } #[test] fn test_user_data_with_len() { test_user_data(true) } fn test_write(with_len: bool) { let vfs = VfsInternal::<MockFileLoader, ()>::new(); vfs.on_changes(&[make_change(with_len)]).unwrap(); vfs.write_file(&Path::new("foo")).unwrap(); let files = vfs.get_cached_files(); assert!(files.len() == 1); let files = vfs.get_changes(); assert!(files.is_empty()); } #[test] fn test_write_without_len() { test_write(false) } #[test] fn test_write_with_len() { test_write(true) } #[test] fn test_clear() { let vfs = VfsInternal::<MockFileLoader, ()>::new(); vfs.load_file(&Path::new("foo")).unwrap(); vfs.load_file(&Path::new("bar")).unwrap(); assert!(vfs.get_cached_files().len() == 2); vfs.clear(); assert!(vfs.get_cached_files().is_empty()); } // TODO test with wide chars
true
48564b8ec94f9ba8ee348ca22f16d37d285e8357
Rust
garethkcjones/rays
/src/texture/chequer.rs
UTF-8
1,055
3.34375
3
[]
no_license
use super::Texture; use crate::{Colour, Vec3}; use std::sync::Arc; /** * Type for representing a chequered texture. */ #[derive(Debug)] pub struct Chequer { scale: Vec3, even: Arc<dyn Texture>, odd: Arc<dyn Texture>, } impl Chequer { #[must_use] pub fn new( scale: Vec3, even: impl Into<Arc<dyn Texture>>, odd: impl Into<Arc<dyn Texture>>, ) -> Self { Self { scale, even: even.into(), odd: odd.into(), } } #[must_use] pub fn new_texture( scale: Vec3, even: impl Into<Arc<dyn Texture>>, odd: impl Into<Arc<dyn Texture>>, ) -> Arc<dyn Texture> { Arc::new(Self::new(scale, even, odd)) } } impl Texture for Chequer { fn value(&self, u: f64, v: f64, p: Vec3) -> Colour { let p = p * self.scale; let sines = p.x().sin() * p.y().sin() * p.z().sin(); if sines < 0.0 { self.odd.value(u, v, p) } else { self.even.value(u, v, p) } } }
true
0dd5e99d568629b8a830ff94c5a6764902a4b936
Rust
void-dragon/runjit
/src/executor.rs
UTF-8
6,916
3.25
3
[]
no_license
//! //! AST executor //! use std::collections::BTreeMap; use std::rc::Rc; use std::cell::RefCell; use std::ops::Deref; use ast::*; use types::*; pub struct Context { pub parent: Option<Rc<Context>>, pub values: RefCell<BTreeMap<String, Rc<Value>>>, pub statements: Vec<Ast>, } impl Context { pub fn new() -> Rc<Context> { Rc::new(Context { parent: None, values: RefCell::new(BTreeMap::new()), statements: Vec::new(), }) } pub fn with_parent(parent: Rc<Context>) -> Rc<Context> { Rc::new(Context { parent: Some(parent), values: RefCell::new(BTreeMap::new()), statements: Vec::new(), }) } pub fn get(&self, name: &str) -> Option<Rc<Value>> { let vals = self.values.borrow(); let maybe = vals.get(name); match maybe { Some(m) => Some(m.clone()), None => { if let Some(ref parent) = self.parent { parent.get(name) } else { None } } } } pub fn get_by_ast(&self, var: &Vec<Rc<Ast>>) -> Option<Rc<Value>> { for i in var { match **i { Ast::Str(ref name) => {} Ast::Exp(_, _, _) => {} _ => {} } } None } pub fn set(&self, name: &str, val: Rc<Value>) { let mut vals = self.values.borrow_mut(); vals.insert(name.to_string(), val); } pub fn set_by_ast(&self, var: &Vec<Rc<Ast>>, val: Rc<Value>) {} } pub fn run(ctx: Rc<Context>, ast: Rc<Ast>) -> Result<Rc<Value>, String> { match ast.deref() { &Ast::Block(ref b) => block(ctx.clone(), b), _ => Err(String::from("unexpected ast element")), } } fn block(ctx: Rc<Context>, data: &Vec<Rc<Ast>>) -> Result<Rc<Value>, String> { for stmnt in data { let res = match stmnt.deref() { &Ast::Assign(ref name, ref ast) => assign(ctx.clone(), name.clone(), ast.clone()), &Ast::Call(ref name, ref ast) => call(ctx.clone(), name, ast), &Ast::If(ref exp, ref block, ref _else) => { _if(ctx.clone(), exp.clone(), &block, _else.clone()) } _ => Err(String::from("unexpected ast element")), }; if res.is_err() { return res; } } Ok(Rc::new(Value::Null)) } fn exp(ctx: Rc<Context>, ast: Rc<Ast>) -> Result<Rc<Value>, String> { match ast.deref() { &Ast::Str(ref data) => Ok(Rc::new(Value::String(data.clone()))), &Ast::Float(ref data) => Ok(Rc::new(Value::Float(*data))), &Ast::Lambda(ref params, ref stmnts) => { Ok(Rc::new(Value::Lambda(params.clone(), stmnts.clone()))) } &Ast::Var(ref tokens) => { ctx.get_by_ast(tokens).ok_or( String::from("unknown variable"), ) } &Ast::Exp(ref op, ref left, ref right) => { let l = exp(ctx.clone(), left.clone()); let r = exp(ctx.clone(), right.clone()); match l { Ok(v) => { if let Value::Float(lf) = *v { match r { Ok(v) => { if let Value::Float(rf) = *v { match op { &Operation::Add => Ok(Rc::new(Value::Float(lf + rf))), &Operation::Sub => Ok(Rc::new(Value::Float(lf - rf))), &Operation::Mul => Ok(Rc::new(Value::Float(lf * rf))), &Operation::Div => Ok(Rc::new(Value::Float(lf / rf))), &Operation::Mod => Ok(Rc::new(Value::Float(lf % rf))), &Operation::Eq => { if lf == rf { Ok(Rc::new(Value::Float(1.0))) } else { Ok(Rc::new(Value::Null)) } } &Operation::Neq => { if lf != rf { Ok(Rc::new(Value::Float(1.0))) } else { Ok(Rc::new(Value::Null)) } } _ => Err(String::from("unsupported operation")), } } else { Err(String::from("only can calculate numbers")) } } Err(e) => Err(e), } } else { Err(String::from("only can calculate numbers")) } } Err(e) => Err(e), } } _ => Err(format!("unexpected expression")), } } fn assign(ctx: Rc<Context>, name: Rc<Ast>, ast: Rc<Ast>) -> Result<Rc<Value>, String> { if let Ok(val) = exp(ctx.clone(), ast) { if let Ast::Var(ref tokens) = *name { ctx.set_by_ast(tokens, val); } } Ok(Rc::new(Value::Null)) } fn call(ctx: Rc<Context>, name: &str, ast: &Vec<Rc<Ast>>) -> Result<Rc<Value>, String> { let maybe = ctx.get(name); if let Some(val) = maybe { let params: Vec<Rc<Value>> = ast.iter() .map(|x| exp(ctx.clone(), x.clone()).unwrap()) .collect(); match *val { Value::Lambda(ref names, ref stmnts) => { let new_ctx = Context::with_parent(ctx); for i in 0..params.len() { new_ctx.set(&names[i], params[i].clone()); } block(new_ctx, stmnts) } Value::RustCall(ref rc) => rc.call(&params), _ => Err(String::from("unexpected value")), } } else { Err(String::from("unknown call of variable")) } } fn _if( ctx: Rc<Context>, ex: Rc<Ast>, blck: &Vec<Rc<Ast>>, el: Rc<Ast>, ) -> Result<Rc<Value>, String> { let res = exp(ctx.clone(), ex); match res { Ok(r) => { match *r { Value::Null => Ok(Rc::new(Value::Null)), _ => { let new_ctx = Context::with_parent(ctx); block(new_ctx, blck) } } } Err(e) => Err(e), } }
true
c2ca4988c4cf1e2ddcecdf70cbdad3f9e7367d0e
Rust
HiraokaTakuya/bitboard_bench
/rust_version/src/main.rs
UTF-8
2,576
3.4375
3
[]
no_license
struct Square(i32); struct Bitboard { v: [u64; 2], } impl Iterator for Bitboard { type Item = Square; fn next(&mut self) -> Option<Self::Item> { self.pop_lsb() } } impl Bitboard { const ALL: Bitboard = Bitboard { v: [0x7fff_ffff_ffff_ffff, 0x7fff_ffff_ffff_ffff], }; fn value(&self, i: usize) -> u64 { self.v[i] } fn merge(&self) -> u64 { self.value(0) | self.value(1) } fn to_bool(&self) -> bool { self.merge() != 0 } fn pop_lsb_right_unchecked(&mut self) -> Square { let sq = Square(self.value(0).trailing_zeros() as i32); self.v[0] &= self.v[0] - 1; sq } fn pop_lsb_left_unchecked(&mut self) -> Square { let sq = Square(self.value(1).trailing_zeros() as i32 + 64); self.v[1] &= self.v[1] - 1; sq } fn pop_lsb_unchecked(&mut self) -> Square { if self.value(0) != 0 { return self.pop_lsb_right_unchecked(); } self.pop_lsb_left_unchecked() } fn pop_lsb(&mut self) -> Option<Square> { if self.to_bool() { Some(self.pop_lsb_unchecked()) } else { None } } } fn main() { const NUM_TRIALS: i64 = 30000000; let mut sum: u64 = 0; let start = std::time::Instant::now(); for _ in 0..NUM_TRIALS { let mut all_one: u64 = 0xffff_ffff_ffff_ffff; while all_one != 0 { sum += all_one.trailing_zeros() as u64; all_one &= all_one - 1; } } let end = start.elapsed(); let elapsed = (end.as_secs() * 1000) as i64 + end.subsec_millis() as i64; println!("u64 bench"); println!("elapsed: {} [msec]", elapsed); if elapsed != 0 { println!("times/s: {} [times/sec]", NUM_TRIALS * 1000 / elapsed); println!("sum: {}", sum); } let mut sum: u64 = 0; let start = std::time::Instant::now(); for _ in 0..NUM_TRIALS { let all_one = Bitboard::ALL; for sq in all_one { sum += sq.0 as u64; } //let mut all_one = Bitboard::ALL; //while all_one.to_bool() { // let sq = all_one.pop_lsb_unchecked(); // sum += sq.0 as u64; //} } let end = start.elapsed(); let elapsed = (end.as_secs() * 1000) as i64 + end.subsec_millis() as i64; println!("bitboard bench"); println!("elapsed: {} [msec]", elapsed); if elapsed != 0 { println!("times/s: {} [times/sec]", NUM_TRIALS * 1000 / elapsed); println!("sum: {}", sum); } }
true
83afdcc05baaed0558869a7d5e121347c9bb8cd2
Rust
scoopr/gfx
/src/hal/src/pso/mod.rs
UTF-8
6,289
2.703125
3
[ "Apache-2.0" ]
permissive
//! Raw Pipeline State Objects //! //! This module contains items used to create and manage a raw pipeline state object. Most users //! will want to use the typed and safe `PipelineState`. See the `pso` module inside the `gfx` //! crate. use {device, pass}; use std::error::Error; use std::fmt; mod compute; mod descriptor; mod graphics; mod input_assembler; mod output_merger; pub use self::compute::*; pub use self::descriptor::*; pub use self::graphics::*; pub use self::input_assembler::*; pub use self::output_merger::*; use Backend; /// Error types happening upon PSO creation on the device side. #[derive(Clone, Debug, PartialEq)] pub enum CreationError { /// Unknown other error. Other, /// Invalid subpass (not part of renderpass). InvalidSubpass(pass::SubpassId), /// Shader compilation error. Shader(device::ShaderError), } impl fmt::Display for CreationError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { CreationError::InvalidSubpass(id) => write!(f, "{}: {:?}", self.description(), id), CreationError::Shader(ref err) => write!(f, "{}: {:?}", self.description(), err), _ => write!(f, "{}", self.description()), } } } impl Error for CreationError { fn description(&self) -> &str { match *self { CreationError::Other => "Unknown other error.", CreationError::InvalidSubpass(_) => "Invalid subpass index.", CreationError::Shader(_) => "Shader compilation error.", } } } bitflags!( /// Stages of the logical pipeline. /// /// The pipeline is structured as given the by the ordering of the flags. /// Some stages are queue type dependent. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct PipelineStage: u32 { /// Beginning of the command queue. const TOP_OF_PIPE = 0x1; /// Indirect data consumption. const DRAW_INDIRECT = 0x2; /// Vertex data consumption. const VERTEX_INPUT = 0x4; /// Vertex shader execution. const VERTEX_SHADER = 0x8; /// Hull shader execution. const HULL_SHADER = 0x10; /// Domain shader execution. const DOMAIN_SHADER = 0x20; /// Geometry shader execution. const GEOMETRY_SHADER = 0x40; /// Fragment shader execution. const FRAGMENT_SHADER = 0x80; /// Stage of early depth and stencil test. const EARLY_FRAGMENT_TESTS = 0x100; /// Stage of late depth and stencil test. const LATE_FRAGMENT_TESTS = 0x200; /// Stage of final color value calculation. const COLOR_ATTACHMENT_OUTPUT = 0x400; /// Compute shader execution, const COMPUTE_SHADER = 0x800; /// Copy/Transfer command execution. const TRANSFER = 0x1000; /// End of the command queue. const BOTTOM_OF_PIPE = 0x2000; /// Read/Write access from host. /// (Not a real pipeline stage) const HOST = 0x4000; } ); bitflags!( /// Combination of different shader pipeline stages. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct ShaderStageFlags: u16 { /// Vertex shader stage. const VERTEX = 0x1; /// Hull (tessellation) shader stage. const HULL = 0x2; /// Domain (tessellation) shader stage. const DOMAIN = 0x4; /// Geometry shader stage. const GEOMETRY = 0x8; /// Fragment shader stage. const FRAGMENT = 0x10; /// Compute shader stage. const COMPUTE = 0x20; /// All graphics pipeline shader stages. const GRAPHICS = Self::VERTEX.bits | Self::HULL.bits | Self::DOMAIN.bits | Self::GEOMETRY.bits | Self::FRAGMENT.bits; /// All shader stages. const ALL = Self::GRAPHICS.bits | Self::COMPUTE.bits; } ); //Note: this type is only needed for backends, not used anywhere within gfx_core. /// Which program stage this shader represents. #[allow(missing_docs)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[repr(u8)] pub enum Stage { Vertex, Hull, Domain, Geometry, Fragment, Compute } /// Shader entry point. #[derive(Debug, Copy)] pub struct EntryPoint<'a, B: Backend> { /// Entry point name. pub entry: &'a str, /// Shader module reference. pub module: &'a B::ShaderModule, /// Specialization info. pub specialization: &'a [Specialization], } impl<'a, B: Backend> Clone for EntryPoint<'a, B> { fn clone(&self) -> Self { EntryPoint { entry: self.entry, module: self.module, specialization: self.specialization, } } } bitflags!( /// Pipeline creation flags. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct PipelineCreationFlags: u32 { /// Disable pipeline optimizations. /// /// May speedup pipeline creation. const DISABLE_OPTIMIZATION = 0x1; /// Allow derivatives of the pipeline. /// /// Must be set when pipelines set the pipeline as base. const ALLOW_DERIVATIVES = 0x2; } ); /// #[derive(Debug)] pub enum BasePipeline<'a, P: 'a> { /// Referencing an existing pipeline as parent. Pipeline(&'a P), /// A pipeline in the same create pipelines call. /// /// The index of the parent must be lower than the index of the child. Index(usize), /// None, } /// pub type BaseGraphics<'a, B: Backend> = BasePipeline<'a, B::GraphicsPipeline>; /// pub type BaseCompute<'a, B: Backend> = BasePipeline<'a, B::ComputePipeline>; /// Specialization information for pipelines. #[derive(Debug, Clone)] pub struct Specialization { /// Constant identifier in shader source. pub id: u32, /// Value to override specialization constant. pub value: Constant, } /// Scalar specialization constant with value for overriding. #[derive(Debug, Clone)] pub enum Constant { /// Bool(bool), /// U32(u32), /// U64(u64), /// I32(i32), /// I64(i64), /// F32(f32), /// F64(f64), }
true
dbfa6e28934ec9fd7520f68a5b98ea54166cec59
Rust
fitzgen/preduce
/src/reducers.rs
UTF-8
23,970
2.78125
3
[]
no_license
//! Concrete implementations of `preduce::traits::Reducer`. use error; use is_executable::IsExecutable; use preduce_ipc_types::{FastForwardRequest, NewRequest, NextOnInterestingRequest, NextRequest, ReduceRequest, Request}; use preduce_ipc_types::{FastForwardResponse, NewResponse, NextOnInterestingResponse, NextResponse, ReduceResponse, Response}; use serde_json; use std::any::Any; use std::borrow::Cow; use std::cell::RefCell; use std::io::{self, BufRead, Write}; use std::path; use std::process; use std::sync::Arc; use tempdir; use test_case::{self, TestCaseMethods}; use traits::Reducer; impl Reducer for Box<Reducer> { fn name(&self) -> Cow<str> { (**self).name() } fn clone_boxed(&self) -> Box<Reducer> where Self: 'static, { (**self).clone_boxed() } fn new_state(&mut self, seed: &test_case::Interesting) -> error::Result<Box<Any + Send>> { (**self).new_state(seed) } fn clone_state(&self, state: &Box<Any + Send>) -> Box<Any + Send> { (**self).clone_state(state) } fn next_state( &mut self, seed: &test_case::Interesting, prev_state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { (**self).next_state(seed, prev_state) } fn next_state_on_interesting( &mut self, new_seed: &test_case::Interesting, old_seed: &test_case::Interesting, prev_state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { (**self).next_state_on_interesting(new_seed, old_seed, prev_state) } fn fast_forward_states( &mut self, seed: &test_case::Interesting, n: usize, prev_state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { (**self).fast_forward_states(seed, n, prev_state) } fn reduce( &mut self, seed: &test_case::Interesting, state: &Box<Any + Send>, ) -> error::Result<Option<test_case::Candidate>> { (**self).reduce(seed, state) } } /// A test case reducer that is implemented as an external script. /// /// See the `preduce_ipc_types` crate's documentation for information on the IPC /// protocol. /// /// See the `preduce_reducer_script` crate's documentation for example reducer /// scripts. /// /// ### Example Rust Usage /// /// ``` /// extern crate preduce; /// use preduce::traits::Reducer; /// /// # fn main() { fn _foo() -> preduce::error::Result<()> { /// // Create a `Reducer` that is implemented by a child process running an /// // external script. /// let mut script = preduce::reducers::Script::new("path/to/reducer/script")?; /// /// # let some_seed_test_case = || unimplemented!(); /// // Get some known-interesting seed test case to create candidates from. /// let seed = some_seed_test_case(); /// /// // Get the initial state for the given seed. /// let mut state = script.new_state(&seed)?; /// /// while let Some(candidate) = script.reduce(&seed, &state)? { /// println!("Here is a candidate: {:?}", candidate); /// /// // Advance to the next state. Alternatively, if this candidate was /// // interesting, use `next_state_on_interesting`. /// state = match script.next_state(&seed, &state)? { /// None => break, /// Some(s) => s, /// }; /// } /// # Ok(()) } } /// ``` #[derive(Debug)] pub struct Script { program: path::PathBuf, out_dir: Option<Arc<tempdir::TempDir>>, counter: usize, child: Option<process::Child>, child_stdin: Option<io::BufWriter<process::ChildStdin>>, child_stdout: Option<io::BufReader<process::ChildStdout>>, strict: bool, } impl Script { /// Construct a reducer script with the given `program`. pub fn new<S>(program: S) -> error::Result<Script> where S: AsRef<path::Path>, { if !program.as_ref().is_file() { return Err(error::Error::DoesNotExist(program.as_ref().into())); } if !program.as_ref().is_executable() { return Err(error::Error::IsNotExecutable(program.as_ref().into())); } let program = program.as_ref().canonicalize()?; Ok(Script { program: program, out_dir: None, counter: 0, child: None, child_stdin: None, child_stdout: None, strict: false, }) } /// Enable or disable extra strict checks on the reducer script. /// /// For example, enforce that generated candidates are smaller than the /// seed. pub fn set_strict(&mut self, be_strict: bool) { self.strict = be_strict; } fn spawn_child(&mut self) -> error::Result<()> { assert!(self.out_dir.is_none()); assert!(self.child.is_none()); assert!(self.child_stdin.is_none()); assert!(self.child_stdout.is_none()); self.out_dir = Some(Arc::new(tempdir::TempDir::new("preduce-reducer-script")?)); let mut cmd = process::Command::new(&self.program); cmd.current_dir(self.out_dir.as_ref().unwrap().path()) .stdin(process::Stdio::piped()) .stdout(process::Stdio::piped()); let mut child = cmd.spawn()?; let stdin = child.stdin.take().unwrap(); self.child_stdin = Some(io::BufWriter::with_capacity(1024 * 256, stdin)); let stdout = child.stdout.take().unwrap(); self.child_stdout = Some(io::BufReader::new(stdout)); self.child = Some(child); Ok(()) } /// Attempt to nicely tell the child to stop by sending it an empty line to /// use as the next "seed", whereupon it should exit cleanly, thus cleaning /// up any resources it was using (e.g. temporary files). fn shutdown_child(&mut self) { if let Some(mut child) = self.child.take() { if (|| -> error::Result<()> { { let mut child_stdin = self.child_stdin.as_mut().unwrap(); serde_json::to_writer(&mut child_stdin, &Request::Shutdown)?; writeln!(&mut child_stdin)?; child_stdin.flush()?; } self.child_stdin = None; child.wait()?; Ok(()) })() .is_err() { self.kill_child(); } self.child_stdout = None; self.out_dir = None; } } fn kill_child(&mut self) { self.child_stdin = None; if let Some(mut child) = self.child.take() { let _ = child.kill(); let _ = child.wait(); } self.child_stdout = None; self.out_dir = None; } fn next_temp_file(&mut self) -> error::Result<test_case::TempFile> { let mut file_name = String::from("candidate"); file_name.push_str(&self.counter.to_string()); self.counter += 1; let file_path = path::PathBuf::from(file_name); test_case::TempFile::new(self.out_dir.as_ref().unwrap().clone(), file_path) } fn misbehaving_reducer_script<T>(&mut self, details: String) -> error::Result<T> { self.kill_child(); Err(error::Error::MisbehavingReducerScript(details)) } fn downcast(state: &Box<Any + Send>) -> &serde_json::Value { state.downcast_ref::<serde_json::Value>().unwrap() } fn request(&mut self, request: Request) -> error::Result<Response> { assert!(self.child.is_some()); assert!(self.child_stdout.is_some()); match (|| { let mut stdin = self.child_stdin.as_mut().unwrap(); serde_json::to_writer(&mut stdin, &request)?; write!(&mut stdin, "\n")?; stdin.flush()?; let stdout = self.child_stdout.as_mut().unwrap(); let mut line = String::new(); stdout.read_line(&mut line)?; let response: Response = serde_json::from_str(&line)?; Ok(response) })() { r @ Ok(_) => r, e @ Err(_) => { self.kill_child(); e } } } } impl Drop for Script { fn drop(&mut self) { self.shutdown_child(); } } impl Reducer for Script { fn name(&self) -> Cow<str> { self.program.to_string_lossy() } fn clone_boxed(&self) -> Box<Reducer> where Self: 'static, { Box::new(Script { program: self.program.clone(), out_dir: None, counter: 0, child: None, child_stdin: None, child_stdout: None, strict: self.strict, }) } fn new_state(&mut self, seed: &test_case::Interesting) -> error::Result<Box<Any + Send>> { if self.child.is_none() { self.spawn_child()?; } let response = self.request(Request::New(NewRequest { seed: seed.path().into(), }))?; match response { Response::New(NewResponse { state }) => Ok(Box::new(state)), otherwise => { let program = self.program.to_string_lossy().to_string(); self.misbehaving_reducer_script(format!( "Expected a `Response::New` in response to a `Request::New` request; \ got `{:?}` from '{}'", otherwise, program )) } } } fn clone_state(&self, state: &Box<Any + Send>) -> Box<Any + Send> { Box::new(Self::downcast(state).clone()) } fn next_state( &mut self, seed: &test_case::Interesting, state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { // It's possible that we killed the child for misbehaving since we // generated this state, so we can't assert that the child exists. if self.child.is_none() { self.spawn_child()?; } let state = Self::downcast(state); let response = self.request(Request::Next(NextRequest { seed: seed.path().into(), state: state.clone(), }))?; match response { Response::Next(NextResponse { next_state }) => { Ok(next_state.map(|ns| Box::new(ns) as Box<Any + Send>)) } otherwise => { let program = self.program.to_string_lossy().to_string(); self.misbehaving_reducer_script(format!( "Expected a `Response::Next` in response to a `Request::Next` request; \ got `{:?}` from '{}'", otherwise, program )) } } } fn next_state_on_interesting( &mut self, new_seed: &test_case::Interesting, old_seed: &test_case::Interesting, state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { if self.child.is_none() { self.spawn_child()?; } let state = Self::downcast(state); let response = self.request(Request::NextOnInteresting(NextOnInterestingRequest { new_seed: new_seed.path().into(), old_seed: old_seed.path().into(), state: state.clone(), }))?; match response { Response::NextOnInteresting(NextOnInterestingResponse { next_state }) => { Ok(next_state.map(|ns| Box::new(ns) as Box<Any + Send>)) } otherwise => { let program = self.program.to_string_lossy().to_string(); self.misbehaving_reducer_script(format!( "Expected a `Response::NextOnInteresting` in response to a \ `Request::NextOnInteresting` request; got `{:?}` from '{}'", otherwise, program )) } } } fn fast_forward_states( &mut self, seed: &test_case::Interesting, n: usize, state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { if self.child.is_none() { self.spawn_child()?; } let state = Self::downcast(state); let response = self.request(Request::FastForward(FastForwardRequest { seed: seed.path().into(), n, state: state.clone(), }))?; match response { Response::FastForward(FastForwardResponse { next_state }) => { Ok(next_state.map(|ns| Box::new(ns) as Box<Any + Send>)) } otherwise => { let program = self.program.to_string_lossy().to_string(); self.misbehaving_reducer_script(format!( "Expected a `Response::FastForward` in response to a `Request::FastForward` \ request; got `{:?}` from '{}'", otherwise, program )) } } } fn reduce( &mut self, seed: &test_case::Interesting, state: &Box<Any + Send>, ) -> error::Result<Option<test_case::Candidate>> { if self.child.is_none() { self.spawn_child()?; } let state = Self::downcast(state); let temp_file = self.next_temp_file()?; let response = self.request(Request::Reduce(ReduceRequest { seed: seed.path().into(), state: state.clone(), dest: temp_file.path().into(), }))?; match response { Response::Reduce(ReduceResponse { reduced: true }) => { if !temp_file.path().is_file() { let program = self.program.to_string_lossy().to_string(); return self.misbehaving_reducer_script(format!( "'{}' did not generate a test case file at {}", program, temp_file.path().display() )); } Ok(Some(test_case::Candidate::new( seed.clone(), self.program.to_string_lossy(), temp_file, )?)) } Response::Reduce(ReduceResponse { reduced: false }) => Ok(None), otherwise => { let program = self.program.to_string_lossy().to_string(); self.misbehaving_reducer_script(format!( "Expected a `Response::Reduce` in response to a `Request::Reduce` request; \ got {:?} from '{}'", otherwise, program )) } } } } /// A reducer which ends after the first `Ok(None)` or `Err`. /// /// Analogous to [`std::iter::Iterator::fuse`][iterfuse]. The `Fuse` combinator /// ensures that once a reducer has either yielded an error or signaled /// exhaustion, that it will always return `Ok(None)` forever after, until it is /// reconfigured with a new seed or state. /// /// [iterfuse]: https://doc.rust-lang.org/nightly/std/iter/trait.Iterator.html#method.fuse /// /// ### Example /// /// ``` /// extern crate preduce; /// use preduce::traits::Reducer; /// /// # fn main() { fn _foo() -> preduce::error::Result<()> { /// let script = preduce::reducers::Script::new("/path/to/some/reducer/script")?; /// let mut fused = preduce::reducers::Fuse::new(script); /// /// # let some_seed_test_case = || unimplemented!(); /// let seed = some_seed_test_case(); /// let mut state = fused.new_state(&seed)?; /// /// while let Some(candidate) = fused.reduce(&seed, &state)? { /// println!("A candidate is {:?}", candidate); /// /// // Advance to the next state. Alternatively, if this candidate was /// // interesting, use `next_state_on_interesting`. /// state = match fused.next_state(&seed, &state)? { /// None => break, /// Some(s) => s, /// }; /// } /// /// // This will always hold true until `fused` is reconfigured with some new /// // seed or state. /// assert_eq!(fused.reduce(&seed, &state).unwrap(), None); /// assert_eq!(fused.reduce(&seed, &state).unwrap(), None); /// assert_eq!(fused.reduce(&seed, &state).unwrap(), None); /// # Ok(()) } } /// ``` #[derive(Clone, Debug, PartialEq, Eq)] pub struct Fuse<R> { inner: R, } #[derive(Debug)] enum FuseState { Finished, NotFinished(Box<Any + Send>), } impl<R> Fuse<R> { /// Ensure that the given `reducer` ends after having emitted `Ok(None)` or /// `Err`. pub fn new(inner: R) -> Fuse<R> { Fuse { inner: inner } } fn downcast<'a, 'b>(&'a self, state: &'b Box<Any + Send>) -> &'b RefCell<FuseState> { state .downcast_ref::<RefCell<FuseState>>() .expect("Fuse::downcast given unexpected state") } } impl<R> Reducer for Fuse<R> where R: Reducer, { fn name(&self) -> Cow<str> { self.inner.name() } fn clone_boxed(&self) -> Box<Reducer> where Self: 'static, { Box::new(Fuse { inner: self.inner.clone_boxed(), }) } fn new_state(&mut self, seed: &test_case::Interesting) -> error::Result<Box<Any + Send>> { let inner = self.inner.new_state(seed)?; Ok(Box::new(RefCell::new(FuseState::NotFinished(inner)))) } fn clone_state(&self, state: &Box<Any + Send>) -> Box<Any + Send> { let state = self.downcast(state); let state = state.borrow(); match *state { FuseState::Finished => Box::new(RefCell::new(FuseState::Finished)), FuseState::NotFinished(ref inner) => Box::new(RefCell::new( FuseState::NotFinished(self.inner.clone_state(inner)), )), } } fn next_state( &mut self, seed: &test_case::Interesting, prev_state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { let prev_state = self.downcast(prev_state); let mut prev_state = prev_state.borrow_mut(); let result = match *prev_state { FuseState::Finished => return Ok(None), FuseState::NotFinished(ref inner) => self.inner.next_state(seed, inner), }; match result { Ok(Some(inner)) => Ok(Some(Box::new(RefCell::new(FuseState::NotFinished(inner))))), result @ Ok(None) | result @ Err(_) => { *prev_state = FuseState::Finished; result } } } fn next_state_on_interesting( &mut self, new_seed: &test_case::Interesting, old_seed: &test_case::Interesting, prev_state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { let prev_state = self.downcast(prev_state); let mut prev_state = prev_state.borrow_mut(); let result = match *prev_state { FuseState::Finished => return Ok(None), FuseState::NotFinished(ref inner) => { self.inner .next_state_on_interesting(new_seed, old_seed, inner) } }; match result { Ok(Some(inner)) => Ok(Some(Box::new(RefCell::new(FuseState::NotFinished(inner))))), result @ Ok(None) | result @ Err(_) => { *prev_state = FuseState::Finished; result } } } fn fast_forward_states( &mut self, seed: &test_case::Interesting, n: usize, prev_state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { let prev_state = self.downcast(prev_state); let mut prev_state = prev_state.borrow_mut(); let result = match *prev_state { FuseState::Finished => return Ok(None), FuseState::NotFinished(ref inner) => self.inner.fast_forward_states(seed, n, inner), }; match result { Ok(Some(inner)) => Ok(Some(Box::new(RefCell::new(FuseState::NotFinished(inner))))), result @ Ok(None) | result @ Err(_) => { *prev_state = FuseState::Finished; result } } } fn reduce( &mut self, seed: &test_case::Interesting, state: &Box<Any + Send>, ) -> error::Result<Option<test_case::Candidate>> { let state = self.downcast(state); let mut state = state.borrow_mut(); let result = match *state { FuseState::Finished => return Ok(None), FuseState::NotFinished(ref inner) => self.inner.reduce(seed, inner), }; match result { result @ Ok(Some(_)) => result, result @ Ok(None) | result @ Err(_) => { *state = FuseState::Finished; result } } } } #[cfg(test)] mod tests { use super::*; use std::borrow::Cow; use test_case; use traits::Reducer; #[test] fn fuse() { #[derive(Clone, Debug)] struct Erratic(usize); impl Reducer for Erratic { fn name(&self) -> Cow<str> { Cow::from("Erratic") } fn clone_boxed(&self) -> Box<Reducer> where Self: 'static, { Box::new(self.clone()) } fn new_state(&mut self, _: &test_case::Interesting) -> error::Result<Box<Any + Send>> { Ok(Box::new(())) } fn clone_state(&self, _: &Box<Any + Send>) -> Box<Any + Send> { Box::new(()) } fn next_state( &mut self, _seed: &test_case::Interesting, _prev_state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { Ok(Some(Box::new(()))) } fn next_state_on_interesting( &mut self, _new_seed: &test_case::Interesting, _old_seed: &test_case::Interesting, _prev_state: &Box<Any + Send>, ) -> error::Result<Option<Box<Any + Send>>> { Ok(Some(Box::new(()))) } fn reduce( &mut self, _seed: &test_case::Interesting, _state: &Box<Any + Send>, ) -> error::Result<Option<test_case::Candidate>> { let result = match self.0 % 3 { 0 => Ok(Some(test_case::Candidate::testing_only_new())), 1 => Ok(None), 2 => Err(error::Error::MisbehavingReducerScript("TEST".into())), _ => unreachable!(), }; self.0 += 1; result } } let seed = test_case::Interesting::testing_only_new(); let mut reducer = Erratic(0); let state = reducer.new_state(&seed).unwrap(); assert!(reducer.reduce(&seed, &state).unwrap().is_some()); assert!(reducer.reduce(&seed, &state).unwrap().is_none()); assert!(reducer.reduce(&seed, &state).is_err()); assert!(reducer.reduce(&seed, &state).unwrap().is_some()); let mut reducer = Fuse::new(Erratic(0)); let state = reducer.new_state(&seed).unwrap(); assert!(reducer.reduce(&seed, &state).unwrap().is_some()); assert!(reducer.reduce(&seed, &state).unwrap().is_none()); assert!(reducer.reduce(&seed, &state).unwrap().is_none()); assert!(reducer.reduce(&seed, &state).unwrap().is_none()); } #[test] fn not_executable() { match Script::new("./tests/fixtures/lorem-ipsum.txt") { Err(error::Error::IsNotExecutable(_)) => {} otherwise => { panic!("Expected Error::IsNotExecutable, found {:?}", otherwise); } } } }
true
aa408b00f27242d9ea15c991ff1ac6fe99cb620e
Rust
PacktPublishing/Hands-On-Functional-Programming-in-Rust
/Chapter07/lazy_pattern.rs
UTF-8
8,499
3.09375
3
[ "MIT" ]
permissive
use std::rc::Rc; use std::sync::{Arc,Mutex}; #[derive(Clone)] struct LazyList<A: Clone> { buffer: Rc<Vec<A>>, index: usize } impl<A: Clone> LazyList<A> { fn new(buf: Vec<A>) -> LazyList<A> { LazyList { buffer: Rc::new(buf), index: 0 } } fn next(&self) -> Option<(LazyList<A>,A)> { if self.index < self.buffer.len() { let new_item = self.buffer[self.index].clone(); let new_index = self.index + 1; Some((LazyList { buffer: Rc::clone(&self.buffer), index: new_index },new_item)) } else { None } } } fn effects_bind<A,B,C,F,G>(f: F, g: G) -> impl Fn(A) -> C where F: Fn(A) -> B, G: Fn(B) -> C { move |a| g(f(a)) } fn launch_missiles(i: i32) -> i32 { println!("launching {} missiles", i); i } struct ReactiveUnit<St,A,B> { state: Arc<Mutex<St>>, event_handler: Arc<Fn(&mut St,A) -> B> } impl<St: 'static,A: 'static,B: 'static> ReactiveUnit<St,A,B> { fn new<F>(st: St, f: F) -> ReactiveUnit<St,A,B> where F: 'static + Fn(&mut St,A) -> B { ReactiveUnit { state: Arc::new(Mutex::new(st)), event_handler: Arc::new(f) } } fn bind<G,C>(&self, g: G) -> ReactiveUnit<St,A,C> where G: 'static + Fn(&mut St,B) -> C { let ev = Arc::clone(&self.event_handler); ReactiveUnit { state: Arc::clone(&self.state), event_handler: Arc::new(move |st: &mut St,a| { let r = ev(st,a); let r = g(st,r); r }) } } fn plus<St2: 'static,C: 'static>(&self, other: ReactiveUnit<St2,B,C>) -> ReactiveUnit<(Arc<Mutex<St>>,Arc<Mutex<St2>>),A,C> { let ev1 = Arc::clone(&self.event_handler); let st1 = Arc::clone(&self.state); let ev2 = Arc::clone(&other.event_handler); let st2 = Arc::clone(&other.state); ReactiveUnit { state: Arc::new(Mutex::new((st1,st2))), event_handler: Arc::new(move |stst: &mut (Arc<Mutex<St>>,Arc<Mutex<St2>>),a| { let mut st1 = stst.0.lock().unwrap(); let r = ev1(&mut st1, a); let mut st2 = stst.1.lock().unwrap(); let r = ev2(&mut st2, r); r }) } } fn apply(&self, a: A) -> B { let mut st = self.state.lock().unwrap(); (self.event_handler)(&mut st, a) } } fn main() { 2 + 3; || 2 + 3; let ll = LazyList::new(vec![1,2,3]); let (ll1,a1) = ll.next().expect("expect 1 item"); println!("lazy item 1: {}", a1); let (ll2,a2) = ll1.next().expect("expect 2 item"); println!("lazy item 2: {}", a2); let (ll3,a3) = ll2.next().expect("expect 3 item"); println!("lazy item 3: {}", a3); let (ll2,a2) = ll1.next().expect("expect 2 item"); println!("lazy item 2: {}", a2); let e1 = |i| { println!("before {}", i); i }; let e2 = |i| { launch_missiles(i) }; let e3 = |i| { println!("after {}", i); i }; let s = effects_bind(effects_bind(e1,e2),e3); s(22); println!("\nrender 1:"); let render1 = ReactiveUnit::new((),|(),()| { let html = r###"$('body').innerHTML = ' <header> <h3 data-section="1" class="active">Section 1</h3> <h3 data-section="2">Section 2</h3> <h3 data-section="3">Section 3</h3> </header> <div>page content</div> <footer>Copyright</footer> ';"###; html.to_string() }); println!("{}", render1.apply(())); println!("\nrender 2:"); let render2 = ReactiveUnit::new((),|(),section: usize| { let section_1 = r###"$('body').innerHTML = ' <header> <h3 data-section="1" class="active">Section 1</h3> <h3 data-section="2">Section 2</h3> <h3 data-section="3">Section 3</h3> </header> <div>section 1 content</div> <footer>Copyright</footer> ';"###; let section_2 = r###"$('body').innerHTML = ' <header> <h3 data-section="1">Section 1</h3> <h3 data-section="2" class="active">Section 2</h3> <h3 data-section="3">Section 3</h3> </header> <div>section 2 content</div> <footer>Copyright</footer> ';"###; let section_3 = r###"$('body').innerHTML = ' <header> <h3 data-section="1">Section 1</h3> <h3 data-section="2">Section 2</h3> <h3 data-section="3" class="active">Section 3</h3> </header> <div>section 3 content</div> <footer>Copyright</footer> ';"###; if section==1 { section_1.to_string() } else if section==2 { section_2.to_string() } else if section==3 { section_3.to_string() } else { panic!("unknown section") } }); println!("{}", render2.apply(1)); println!("{}", render2.apply(2)); println!("{}", render2.apply(3)); let render3header = ReactiveUnit::new(None,|opsec: &mut Option<usize>,section: usize| { let section_1 = r###"$('header').innerHTML = ' <h3 data-section="1" class="active">Section 1</h3> <h3 data-section="2">Section 2</h3> <h3 data-section="3">Section 3</h3> ';"###; let section_2 = r###"$('header').innerHTML = ' <h3 data-section="1">Section 1</h3> <h3 data-section="2" class="active">Section 2</h3> <h3 data-section="3">Section 3</h3> ';"###; let section_3 = r###"$('header').innerHTML = ' <h3 data-section="1">Section 1</h3> <h3 data-section="2">Section 2</h3> <h3 data-section="3" class="active">Section 3</h3> ';"###; let changed = if section==1 { section_1 } else if section==2 { section_2 } else if section==3 { section_3 } else { panic!("invalid section") }; if let Some(sec) = *opsec { if sec==section { "" } else { *opsec = Some(section); changed } } else { *opsec = Some(section); changed } }); let render3content = ReactiveUnit::new(None,|opsec: &mut Option<usize>,section: usize| { let section_1 = r###"$('div#content').innerHTML = ' section 1 content ';"###; let section_2 = r###"$('div#content').innerHTML = ' section 2 content ';"###; let section_3 = r###"$('div#content').innerHTML = ' section 3 content ';"###; let changed = if section==1 { section_1 } else if section==2 { section_2 } else if section==3 { section_3 } else { panic!("invalid section") }; if let Some(sec) = *opsec { if sec==section { "" } else { *opsec = Some(section); changed } } else { *opsec = Some(section); changed } }); let render3 = ReactiveUnit::new((render3header,render3content), |(rheader,rcontent),section: usize| { let header = rheader.apply(section); let content = rcontent.apply(section); format!("{}{}", header, content) }); println!("section 1: {}", render3.apply(1)); println!("section 2: {}", render3.apply(2)); println!("section 2: {}", render3.apply(2)); println!("section 3: {}", render3.apply(3)); let database = ("hello world", 5, 2); let react1 = ReactiveUnit::new((database,render3), |(database,render),evt:(&str,&str)| { match evt { ("header button click",n) => render.apply(n.parse::<usize>().unwrap()), ("text submission",s) => { database.0 = s; format!("db.textfield1.set(\"{}\")",s) }, ("number 1 submission",n) => { database.1 += n.parse::<i32>().unwrap(); format!("db.numfield1.set(\"{}\")",database.1) }, ("number 2 submission",n) => { database.2 += n.parse::<i32>().unwrap(); format!("db.numfield2.set(\"{}\")",database.2) }, _ => "".to_string() } }); println!("react 1: {}", react1.apply(("header button click","2"))); println!("react 1: {}", react1.apply(("header button click","2"))); println!("react 1: {}", react1.apply(("text submission","abc def"))); println!("react 1: {}", react1.apply(("number 1 submission","123"))); println!("react 1: {}", react1.apply(("number 1 submission","234"))); println!("react 1: {}", react1.apply(("number 2 submission","333"))); println!("react 1: {}", react1.apply(("number 2 submission","222"))); }
true
74d550809aeee1417eb570131aa332a4b66acf3d
Rust
jvff/pegast
/src/rules/zero_or_more.rs
UTF-8
769
2.625
3
[]
no_license
use { crate::{input::Input, ParseError, PegAstNode}, std::{borrow::Cow, iter}, }; impl<R: PegAstNode> PegAstNode for Vec<R> { fn parse(input: &mut impl Input) -> Result<Self, ParseError> { Ok(iter::from_fn(|| R::parse(input).ok()).collect()) } fn parsed_string(&self) -> Cow<'_, str> { let mut string = String::new(); for element in self { string.push_str(element.parsed_string().as_ref()); } Cow::Owned(string) } fn parsed_string_length(&self) -> usize { self.iter().map(PegAstNode::parsed_string_length).sum() } fn expecting() -> Vec<String> { let mut expecting = R::expecting(); expecting.push("or nothing".to_owned()); expecting } }
true
e3abc5a65545fc43cd52f9e0547a977ef5e336ef
Rust
SINHASantos/tunapanel
/src/panel.rs
UTF-8
3,871
3
3
[ "MIT", "Apache-2.0" ]
permissive
//! Definition of a panel. use serde::de::DeserializeOwned; use templates::{HTML, HANDLEBARS}; /// Trait for panels. /// /// You should not need to implement this yourself. /// The `impl`s are generated by the `tunapanel!` macro. pub trait Panel: DeserializeOwned { fn title() -> &'static str; fn widgets() -> HTML; } #[derive(Serialize)] struct PanelFields { title: &'static str, } pub fn panel_html<P: Panel>() -> HTML { let fields = PanelFields { title: <P as Panel>::title(), }; let mut html = String::new(); html.push_str(&HANDLEBARS.render("header", &fields).unwrap()); html.push_str(&<P as Panel>::widgets()); html.push_str(&HANDLEBARS.render("footer", &fields).unwrap()); html } #[cfg(test)] mod test { use std::default::Default; use serde_json; use widget::Button; tunapanel! { #[title = "test panel"] #[derive(Debug)] struct Panel { #[label = "A float"] x: f32 = 0.0, #[label = "A string"] y: String = String::new(), } } #[test] fn direct_deserialize() { let s = r#" { "x": 3.4, "y": "hello" } "#; let d: Panel = serde_json::from_str(&s).unwrap(); assert_eq!(d.x, 3.4); assert_eq!(d.y, "hello"); } #[test] fn panel_html() { let html = super::panel_html::<Panel>(); assert!(html.contains(r#"<title>test panel</title>"#)); assert!(html.contains(r#"tunapanel_name="x""#)); assert!(html.contains(r#"tunapanel_name="y""#)); assert!(html.contains(r#"<td id="tunapanel_status"></td>"#)); assert!(html.contains(r#"https://code.jquery.com"#)); assert!(html.contains(r#"JSON.stringify(obj)"#)); } tunapanel! { #[title = "test panel"] struct EscTest { #[label = "Test \' escaping <<>"] x: f32 = 0.0, #[label = "& another `test%"] y: String = "Attribute \" escaping \'".to_owned(), } } #[test] fn escaping() { let html = super::panel_html::<EscTest>(); assert!(html.contains(r#"Test ' escaping &lt;&lt;&gt;"#)); assert!(html.contains(r#"&amp; another `test%"#)); assert!(html.contains(r#""Attribute &quot; escaping '""#)); } tunapanel! { #[title = "test panel"] struct Types { #[label = "u8"] f_u8: u8 = 0, #[label = "u16"] f_u16: u16 = 0, #[label = "u32"] f_u32: u32 = 0, #[label = "u64"] f_u64: u64 = 0, #[label = "usize"] f_usize: usize = 0, #[label = "i8"] f_i8: i8 = 0, #[label = "i16"] f_i16: i16 = 0, #[label = "i32"] f_i32: i32 = 0, #[label = "i64"] f_i64: i64 = 0, #[label = "isize"] f_isize: isize = 0, #[label = "f32"] f_f32: f32 = 0.0, #[label = "f64"] f_f64: f64 = 0.0, #[label = "bool"] f_bool: bool = false, #[label = "button"] f_button: Button = Button::new(), } } #[test] fn types() { let html = super::panel_html::<Types>(); for name in &["u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", "isize", "f32", "f64", "bool", "button"] { let search = format!(r#"tunapanel_name="f_{}""#, name); assert!(html.contains(&search)); } } mod inner { tunapanel! { #[title = "test panel"] pub struct Panel { #[label = "A float"] x: f32 = 0.0, } } } #[test] fn pub_panel() { let _p = inner::Panel::default(); } }
true
40705375880d340089571086d490da33c448d0ec
Rust
AntoineGagne/crypt3
/rust_src/crypt3_nif/src/bindings.rs
UTF-8
824
2.875
3
[ "BSD-2-Clause", "BSD-3-Clause" ]
permissive
use std::ffi::{CStr, CString}; use std::os::raw::c_char; pub enum EncodedField { Key, Salt, } pub enum EncryptionError { Encoding(EncodedField), Decoding(String), } pub fn encrypt(key: String, salt: String) -> Result<String, EncryptionError> { unsafe { let key_ = CString::new(key) .map_err(|_| EncryptionError::Encoding(EncodedField::Key))? .as_ptr(); let salt_ = CString::new(salt) .map_err(|_| EncryptionError::Encoding(EncodedField::Salt))? .as_ptr(); Ok(CStr::from_ptr(crypt(key_, salt_)) .to_str() .map_err(|x| EncryptionError::Decoding(x.to_string()))? .to_owned()) } } #[link(name = "crypt")] extern "C" { fn crypt(key: *const c_char, salt: *const c_char) -> *const c_char; }
true
235004e719ed0bc788194148a40f019feaeb8d4a
Rust
theotherphil/rtiow
/src/camera.rs
UTF-8
1,881
3.203125
3
[]
no_license
use rand::prelude::*; use std::f32::consts::PI; use crate::ray::*; use crate::vec3::*; fn random_point_in_unit_disk() -> Vec3 { let mut rng = thread_rng(); loop { let p = 2.0 * Vec3::new(rng.gen(), rng.gen(), 0.0) - Vec3::new(1.0, 1.0, 0.0); if dot(p, p) < 1.0 { return p; } } } #[derive(Debug)] pub struct Camera { origin: Vec3, lower_left_corner: Vec3, horizontal: Vec3, vertical: Vec3, u: Vec3, v: Vec3, lens_radius: f32 } impl Camera { pub fn new( look_from: Vec3, look_at: Vec3, view_up: Vec3, vfov: f32, aspect: f32, aperture: f32, focus_distance: f32 ) -> Camera { let theta = vfov * PI / 180.0; let half_height = (theta / 2.0).tan(); let half_width = aspect * half_height; let w = unit_vector(look_from - look_at); let u = unit_vector(cross(view_up, w)); let v = cross(w, u); let lower_left_corner = look_from - half_width * focus_distance * u - half_height * focus_distance * v - w * focus_distance; Camera { origin: look_from, lower_left_corner: lower_left_corner, horizontal: 2.0 * half_width * focus_distance * u, vertical: 2.0 * half_height * focus_distance * v, u: u, v: v, lens_radius: aperture / 2.0 } } pub fn get_ray(&self, u: f32, v: f32) -> Ray { //let rd = self.lens_radius * random_point_in_unit_disk(); //let offset = self.u * rd.x()+ self.v * rd.y(); Ray::new( self.origin,// + offset, self.lower_left_corner + u * self.horizontal + v * self.vertical - self.origin //- offset ) } }
true
506cd68377aa8745061cffe49c611c9eb04e5b3a
Rust
rcore-os/arceos
/crates/slab_allocator/src/lib.rs
UTF-8
10,968
2.9375
3
[ "Apache-2.0", "AGPL-3.0-only", "LicenseRef-scancode-mulanpubl-2.0", "AGPL-3.0-or-later", "GPL-3.0-only", "MulanPSL-2.0", "LicenseRef-scancode-unknown-license-reference", "LicenseRef-scancode-mulanpsl-2.0-en" ]
permissive
//! Slab allocator for `no_std` systems. It uses multiple slabs with blocks of //! different sizes and a [buddy_system_allocator] for blocks larger than 4096 //! bytes. //! //! It's based on <https://github.com/weclaw1/slab_allocator>. //! //! [buddy_system_allocator]: https://docs.rs/buddy_system_allocator/latest/buddy_system_allocator/ #![feature(allocator_api)] #![no_std] extern crate alloc; extern crate buddy_system_allocator; use alloc::alloc::{AllocError, Layout}; use core::ptr::NonNull; #[cfg(test)] mod tests; mod slab; use slab::Slab; const SET_SIZE: usize = 64; const MIN_HEAP_SIZE: usize = 0x8000; enum HeapAllocator { Slab64Bytes, Slab128Bytes, Slab256Bytes, Slab512Bytes, Slab1024Bytes, Slab2048Bytes, Slab4096Bytes, BuddyAllocator, } /// A fixed size heap backed by multiple slabs with blocks of different sizes. /// Allocations over 4096 bytes are served by linked list allocator. pub struct Heap { slab_64_bytes: Slab<64>, slab_128_bytes: Slab<128>, slab_256_bytes: Slab<256>, slab_512_bytes: Slab<512>, slab_1024_bytes: Slab<1024>, slab_2048_bytes: Slab<2048>, slab_4096_bytes: Slab<4096>, buddy_allocator: buddy_system_allocator::Heap<32>, } impl Heap { /// Creates a new heap with the given `heap_start_addr` and `heap_size`. The start address must be valid /// and the memory in the `[heap_start_addr, heap_start_addr + heap_size)` range must not be used for /// anything else. /// /// # Safety /// This function is unsafe because it can cause undefined behavior if the /// given address is invalid. pub unsafe fn new(heap_start_addr: usize, heap_size: usize) -> Heap { assert!( heap_start_addr % 4096 == 0, "Start address should be page aligned" ); assert!( heap_size >= MIN_HEAP_SIZE, "Heap size should be greater or equal to minimum heap size" ); assert!( heap_size % MIN_HEAP_SIZE == 0, "Heap size should be a multiple of minimum heap size" ); Heap { slab_64_bytes: Slab::<64>::new(0, 0), slab_128_bytes: Slab::<128>::new(0, 0), slab_256_bytes: Slab::<256>::new(0, 0), slab_512_bytes: Slab::<512>::new(0, 0), slab_1024_bytes: Slab::<1024>::new(0, 0), slab_2048_bytes: Slab::<2048>::new(0, 0), slab_4096_bytes: Slab::<4096>::new(0, 0), buddy_allocator: { let mut buddy = buddy_system_allocator::Heap::<32>::new(); buddy.init(heap_start_addr, heap_size); buddy }, } } /// Adds memory to the heap. The start address must be valid /// and the memory in the `[mem_start_addr, mem_start_addr + heap_size)` range must not be used for /// anything else. /// /// # Safety /// This function is unsafe because it can cause undefined behavior if the /// given address is invalid. pub unsafe fn add_memory(&mut self, heap_start_addr: usize, heap_size: usize) { assert!( heap_start_addr % 4096 == 0, "Start address should be page aligned" ); assert!( heap_size % 4096 == 0, "Add Heap size should be a multiple of page size" ); self.buddy_allocator .add_to_heap(heap_start_addr, heap_start_addr + heap_size); } /// Adds memory to the heap. The start address must be valid /// and the memory in the `[mem_start_addr, mem_start_addr + heap_size)` range must not be used for /// anything else. /// In case of linked list allocator the memory can only be extended. /// /// # Safety /// This function is unsafe because it can cause undefined behavior if the /// given address is invalid. unsafe fn _grow(&mut self, mem_start_addr: usize, mem_size: usize, slab: HeapAllocator) { match slab { HeapAllocator::Slab64Bytes => self.slab_64_bytes.grow(mem_start_addr, mem_size), HeapAllocator::Slab128Bytes => self.slab_128_bytes.grow(mem_start_addr, mem_size), HeapAllocator::Slab256Bytes => self.slab_256_bytes.grow(mem_start_addr, mem_size), HeapAllocator::Slab512Bytes => self.slab_512_bytes.grow(mem_start_addr, mem_size), HeapAllocator::Slab1024Bytes => self.slab_1024_bytes.grow(mem_start_addr, mem_size), HeapAllocator::Slab2048Bytes => self.slab_2048_bytes.grow(mem_start_addr, mem_size), HeapAllocator::Slab4096Bytes => self.slab_4096_bytes.grow(mem_start_addr, mem_size), HeapAllocator::BuddyAllocator => self .buddy_allocator .add_to_heap(mem_start_addr, mem_start_addr + mem_size), } } /// Allocates a chunk of the given size with the given alignment. Returns a pointer to the /// beginning of that chunk if it was successful. Else it returns `Err`. /// This function finds the slab of lowest size which can still accommodate the given chunk. /// The runtime is in `O(1)` for chunks of size <= 4096, and `O(n)` when chunk size is > 4096, pub fn allocate(&mut self, layout: Layout) -> Result<usize, AllocError> { match Heap::layout_to_allocator(&layout) { HeapAllocator::Slab64Bytes => self .slab_64_bytes .allocate(layout, &mut self.buddy_allocator), HeapAllocator::Slab128Bytes => self .slab_128_bytes .allocate(layout, &mut self.buddy_allocator), HeapAllocator::Slab256Bytes => self .slab_256_bytes .allocate(layout, &mut self.buddy_allocator), HeapAllocator::Slab512Bytes => self .slab_512_bytes .allocate(layout, &mut self.buddy_allocator), HeapAllocator::Slab1024Bytes => self .slab_1024_bytes .allocate(layout, &mut self.buddy_allocator), HeapAllocator::Slab2048Bytes => self .slab_2048_bytes .allocate(layout, &mut self.buddy_allocator), HeapAllocator::Slab4096Bytes => self .slab_4096_bytes .allocate(layout, &mut self.buddy_allocator), HeapAllocator::BuddyAllocator => self .buddy_allocator .alloc(layout) .map(|ptr| ptr.as_ptr() as usize) .map_err(|_| AllocError), } } /// Frees the given allocation. `ptr` must be a pointer returned /// by a call to the `allocate` function with identical size and alignment. Undefined /// behavior may occur for invalid arguments, thus this function is unsafe. /// /// This function finds the slab which contains address of `ptr` and adds the blocks beginning /// with `ptr` address to the list of free blocks. /// This operation is in `O(1)` for blocks <= 4096 bytes and `O(n)` for blocks > 4096 bytes. /// /// # Safety /// This function is unsafe because it can cause undefined behavior if the /// given address is invalid. pub unsafe fn deallocate(&mut self, ptr: usize, layout: Layout) { match Heap::layout_to_allocator(&layout) { HeapAllocator::Slab64Bytes => self.slab_64_bytes.deallocate(ptr), HeapAllocator::Slab128Bytes => self.slab_128_bytes.deallocate(ptr), HeapAllocator::Slab256Bytes => self.slab_256_bytes.deallocate(ptr), HeapAllocator::Slab512Bytes => self.slab_512_bytes.deallocate(ptr), HeapAllocator::Slab1024Bytes => self.slab_1024_bytes.deallocate(ptr), HeapAllocator::Slab2048Bytes => self.slab_2048_bytes.deallocate(ptr), HeapAllocator::Slab4096Bytes => self.slab_4096_bytes.deallocate(ptr), HeapAllocator::BuddyAllocator => self .buddy_allocator .dealloc(NonNull::new(ptr as *mut u8).unwrap(), layout), } } /// Returns bounds on the guaranteed usable size of a successful /// allocation created with the specified `layout`. pub fn usable_size(&self, layout: Layout) -> (usize, usize) { match Heap::layout_to_allocator(&layout) { HeapAllocator::Slab64Bytes => (layout.size(), 64), HeapAllocator::Slab128Bytes => (layout.size(), 128), HeapAllocator::Slab256Bytes => (layout.size(), 256), HeapAllocator::Slab512Bytes => (layout.size(), 512), HeapAllocator::Slab1024Bytes => (layout.size(), 1024), HeapAllocator::Slab2048Bytes => (layout.size(), 2048), HeapAllocator::Slab4096Bytes => (layout.size(), 4096), HeapAllocator::BuddyAllocator => (layout.size(), layout.size()), } } /// Finds allocator to use based on layout size and alignment fn layout_to_allocator(layout: &Layout) -> HeapAllocator { if layout.size() > 4096 { HeapAllocator::BuddyAllocator } else if layout.size() <= 64 && layout.align() <= 64 { HeapAllocator::Slab64Bytes } else if layout.size() <= 128 && layout.align() <= 128 { HeapAllocator::Slab128Bytes } else if layout.size() <= 256 && layout.align() <= 256 { HeapAllocator::Slab256Bytes } else if layout.size() <= 512 && layout.align() <= 512 { HeapAllocator::Slab512Bytes } else if layout.size() <= 1024 && layout.align() <= 1024 { HeapAllocator::Slab1024Bytes } else if layout.size() <= 2048 && layout.align() <= 2048 { HeapAllocator::Slab2048Bytes } else { HeapAllocator::Slab4096Bytes } } /// Returns total memory size in bytes of the heap. pub fn total_bytes(&self) -> usize { self.slab_64_bytes.total_blocks() * 64 + self.slab_128_bytes.total_blocks() * 128 + self.slab_256_bytes.total_blocks() * 256 + self.slab_512_bytes.total_blocks() * 512 + self.slab_1024_bytes.total_blocks() * 1024 + self.slab_2048_bytes.total_blocks() * 2048 + self.slab_4096_bytes.total_blocks() * 4096 + self.buddy_allocator.stats_total_bytes() } /// Returns allocated memory size in bytes. pub fn used_bytes(&self) -> usize { self.slab_64_bytes.used_blocks() * 64 + self.slab_128_bytes.used_blocks() * 128 + self.slab_256_bytes.used_blocks() * 256 + self.slab_512_bytes.used_blocks() * 512 + self.slab_1024_bytes.used_blocks() * 1024 + self.slab_2048_bytes.used_blocks() * 2048 + self.slab_4096_bytes.used_blocks() * 4096 + self.buddy_allocator.stats_alloc_actual() } /// Returns available memory size in bytes. pub fn available_bytes(&self) -> usize { self.total_bytes() - self.used_bytes() } }
true
ff3e387d1543cd5afb1198e11de7de23cac1b970
Rust
onelson/destiny2-api-rs
/codegen/src/models/destiny_entities_vendors_destiny_vendor_sale_item_component.rs
UTF-8
7,540
2.921875
3
[]
no_license
/* * Bungie.Net API * * These endpoints constitute the functionality exposed by Bungie.net, both for more traditional website functionality and for connectivity to Bungie video games and their related functionality. * * OpenAPI spec version: 2.0.0 * Contact: [email protected] * Generated by: https://github.com/swagger-api/swagger-codegen.git */ /// DestinyEntitiesVendorsDestinyVendorSaleItemComponent : Request this component if you want the details about an item being sold in relation to the character making the request: whether the character can buy it, whether they can afford it, and other data related to purchasing the item. Note that if you want instance, stats, etc... data for the item, you'll have to request additional components such as ItemInstances, ItemPerks etc... and acquire them from the DestinyVendorResponse's \"items\" property. #[derive(Debug, Serialize, Deserialize)] pub struct DestinyEntitiesVendorsDestinyVendorSaleItemComponent { /// The index into the DestinyVendorDefinition.itemList property. Note that this means Vendor data *is* Content Version dependent: make sure you have the latest content before you use Vendor data, or these indexes may mismatch. Most systems avoid this problem, but Vendors is one area where we are unable to reasonably avoid content dependency at the moment. #[serde(rename = "vendorItemIndex")] vendor_item_index: Option<i32>, /// The hash of the item being sold, as a quick shortcut for looking up the DestinyInventoryItemDefinition of the sale item. #[serde(rename = "itemHash")] item_hash: Option<i32>, /// A flag indicating whether the requesting character can buy the item, and if not the reasons why the character can't buy it. #[serde(rename = "saleStatus")] sale_status: Option<Object>, /// A summary of the current costs of the item. #[serde(rename = "costs")] costs: Option<Vec<::models::DestinyDestinyItemQuantity>>, /// If you can't buy the item due to a complex character state, these will be hashes for DestinyUnlockDefinitions that you can check to see messages regarding the failure (if the unlocks have human readable information: it is not guaranteed that Unlocks will have human readable strings, and your application will have to handle that) Prefer using failureIndexes instead. These are provided for informational purposes, but have largely been supplanted by failureIndexes. #[serde(rename = "requiredUnlocks")] required_unlocks: Option<Vec<i32>>, /// If any complex unlock states are checked in determining purchasability, these will be returned here along with the status of the unlock check. Prefer using failureIndexes instead. These are provided for informational purposes, but have largely been supplanted by failureIndexes. #[serde(rename = "unlockStatuses")] unlock_statuses: Option<Vec<::models::DestinyDestinyUnlockStatus>>, /// Indexes in to the \"failureStrings\" lookup table in DestinyVendorDefinition for the given Vendor. Gives some more reliable failure information for why you can't purchase an item. It is preferred to use these over requiredUnlocks and unlockStatuses: the latter are provided mostly in case someone can do something interesting with it that I didn't anticipate. #[serde(rename = "failureIndexes")] failure_indexes: Option<Vec<i32>> } impl DestinyEntitiesVendorsDestinyVendorSaleItemComponent { /// Request this component if you want the details about an item being sold in relation to the character making the request: whether the character can buy it, whether they can afford it, and other data related to purchasing the item. Note that if you want instance, stats, etc... data for the item, you'll have to request additional components such as ItemInstances, ItemPerks etc... and acquire them from the DestinyVendorResponse's \"items\" property. pub fn new() -> DestinyEntitiesVendorsDestinyVendorSaleItemComponent { DestinyEntitiesVendorsDestinyVendorSaleItemComponent { vendor_item_index: None, item_hash: None, sale_status: None, costs: None, required_unlocks: None, unlock_statuses: None, failure_indexes: None } } pub fn set_vendor_item_index(&mut self, vendor_item_index: i32) { self.vendor_item_index = Some(vendor_item_index); } pub fn with_vendor_item_index(mut self, vendor_item_index: i32) -> DestinyEntitiesVendorsDestinyVendorSaleItemComponent { self.vendor_item_index = Some(vendor_item_index); self } pub fn vendor_item_index(&self) -> Option<&i32> { self.vendor_item_index.as_ref() } pub fn reset_vendor_item_index(&mut self) { self.vendor_item_index = None; } pub fn set_item_hash(&mut self, item_hash: i32) { self.item_hash = Some(item_hash); } pub fn with_item_hash(mut self, item_hash: i32) -> DestinyEntitiesVendorsDestinyVendorSaleItemComponent { self.item_hash = Some(item_hash); self } pub fn item_hash(&self) -> Option<&i32> { self.item_hash.as_ref() } pub fn reset_item_hash(&mut self) { self.item_hash = None; } pub fn set_sale_status(&mut self, sale_status: Object) { self.sale_status = Some(sale_status); } pub fn with_sale_status(mut self, sale_status: Object) -> DestinyEntitiesVendorsDestinyVendorSaleItemComponent { self.sale_status = Some(sale_status); self } pub fn sale_status(&self) -> Option<&Object> { self.sale_status.as_ref() } pub fn reset_sale_status(&mut self) { self.sale_status = None; } pub fn set_costs(&mut self, costs: Vec<::models::DestinyDestinyItemQuantity>) { self.costs = Some(costs); } pub fn with_costs(mut self, costs: Vec<::models::DestinyDestinyItemQuantity>) -> DestinyEntitiesVendorsDestinyVendorSaleItemComponent { self.costs = Some(costs); self } pub fn costs(&self) -> Option<&Vec<::models::DestinyDestinyItemQuantity>> { self.costs.as_ref() } pub fn reset_costs(&mut self) { self.costs = None; } pub fn set_required_unlocks(&mut self, required_unlocks: Vec<i32>) { self.required_unlocks = Some(required_unlocks); } pub fn with_required_unlocks(mut self, required_unlocks: Vec<i32>) -> DestinyEntitiesVendorsDestinyVendorSaleItemComponent { self.required_unlocks = Some(required_unlocks); self } pub fn required_unlocks(&self) -> Option<&Vec<i32>> { self.required_unlocks.as_ref() } pub fn reset_required_unlocks(&mut self) { self.required_unlocks = None; } pub fn set_unlock_statuses(&mut self, unlock_statuses: Vec<::models::DestinyDestinyUnlockStatus>) { self.unlock_statuses = Some(unlock_statuses); } pub fn with_unlock_statuses(mut self, unlock_statuses: Vec<::models::DestinyDestinyUnlockStatus>) -> DestinyEntitiesVendorsDestinyVendorSaleItemComponent { self.unlock_statuses = Some(unlock_statuses); self } pub fn unlock_statuses(&self) -> Option<&Vec<::models::DestinyDestinyUnlockStatus>> { self.unlock_statuses.as_ref() } pub fn reset_unlock_statuses(&mut self) { self.unlock_statuses = None; } pub fn set_failure_indexes(&mut self, failure_indexes: Vec<i32>) { self.failure_indexes = Some(failure_indexes); } pub fn with_failure_indexes(mut self, failure_indexes: Vec<i32>) -> DestinyEntitiesVendorsDestinyVendorSaleItemComponent { self.failure_indexes = Some(failure_indexes); self } pub fn failure_indexes(&self) -> Option<&Vec<i32>> { self.failure_indexes.as_ref() } pub fn reset_failure_indexes(&mut self) { self.failure_indexes = None; } }
true
8785590146f5077fbceb054e74fffd5e4df9f7ad
Rust
mitchmindtree/RustFFT
/src/algorithm/good_thomas_algorithm.rs
UTF-8
6,625
3.15625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
use std::sync::Arc; use num_complex::Complex; use common::{FFTnum, verify_length, verify_length_divisible}; use math_utils; use array_utils; use ::{Length, IsInverse, FFT}; /// Implementation of the [Good-Thomas Algorithm (AKA Prime Factor Algorithm)](https://en.wikipedia.org/wiki/Prime-factor_FFT_algorithm) /// /// This algorithm factors a size n FFT into n1 * n2, where GCD(n1, n2) == 1 /// /// Conceptually, this algorithm is very similar to the Mixed-Radix FFT, except because GCD(n1, n2) == 1 we can do some /// number theory trickery to reduce the number of floating-point multiplications and additions /// /// ~~~ /// // Computes a forward FFT of size 1200, using the Good-Thomas Algorithm /// use rustfft::algorithm::GoodThomasAlgorithm; /// use rustfft::{FFT, FFTplanner}; /// use rustfft::num_complex::Complex; /// use rustfft::num_traits::Zero; /// /// let mut input: Vec<Complex<f32>> = vec![Zero::zero(); 1200]; /// let mut output: Vec<Complex<f32>> = vec![Zero::zero(); 1200]; /// /// // we need to find an n1 and n2 such that n1 * n2 == 1200 and GCD(n1, n2) == 1 /// // n1 = 48 and n2 = 25 satisfies this /// let mut planner = FFTplanner::new(false); /// let inner_fft_n1 = planner.plan_fft(48); /// let inner_fft_n2 = planner.plan_fft(25); /// /// // the good-thomas FFT length will be inner_fft_n1.len() * inner_fft_n2.len() = 1200 /// let fft = GoodThomasAlgorithm::new(inner_fft_n1, inner_fft_n2); /// fft.process(&mut input, &mut output); /// ~~~ pub struct GoodThomasAlgorithm<T> { width: usize, // width_inverse: usize, width_size_fft: Arc<FFT<T>>, height: usize, // height_inverse: usize, height_size_fft: Arc<FFT<T>>, input_map: Box<[usize]>, output_map: Box<[usize]>, inverse: bool, } impl<T: FFTnum> GoodThomasAlgorithm<T> { /// Creates a FFT instance which will process inputs/outputs of size `n1_fft.len() * n2_fft.len()` /// /// GCD(n1.len(), n2.len()) must be equal to 1 pub fn new(n1_fft: Arc<FFT<T>>, n2_fft: Arc<FFT<T>>) -> Self { assert_eq!( n1_fft.is_inverse(), n2_fft.is_inverse(), "n1_fft and n2_fft must both be inverse, or neither. got n1 inverse={}, n2 inverse={}", n1_fft.is_inverse(), n2_fft.is_inverse()); let n1 = n1_fft.len(); let n2 = n2_fft.len(); // compute the nultiplicative inverse of n1 mod n2 and vice versa let (gcd, mut n1_inverse, mut n2_inverse) = math_utils::extended_euclidean_algorithm(n1 as i64, n2 as i64); assert!(gcd == 1, "Invalid input n1 and n2 to Good-Thomas Algorithm: ({},{}): Inputs must be coprime", n1, n2); // n1_inverse or n2_inverse might be negative, make it positive if n1_inverse < 0 { n1_inverse += n2 as i64; } if n2_inverse < 0 { n2_inverse += n1 as i64; } // NOTE: we are precomputing the input and output reordering indexes // benchmarking shows that it's 20-30% faster // If we wanted to optimize for memory use or setup time instead of multiple-FFT speed, // these can be computed at runtime let input_map: Vec<usize> = (0..n1 * n2) .map(|i| (i % n1, i / n1)) .map(|(x, y)| (x * n2 + y * n1) % (n1 * n2)) .collect(); let output_map: Vec<usize> = (0..n1 * n2) .map(|i| (i % n2, i / n2)) .map(|(y, x)| { (x * n2 * n2_inverse as usize + y * n1 * n1_inverse as usize) % (n1 * n2) }) .collect(); GoodThomasAlgorithm { inverse: n1_fft.is_inverse(), width: n1, width_size_fft: n1_fft, height: n2, height_size_fft: n2_fft, input_map: input_map.into_boxed_slice(), output_map: output_map.into_boxed_slice(), } } fn perform_fft(&self, input: &mut [Complex<T>], output: &mut [Complex<T>]) { // copy the input using our reordering mapping for (output_element, &input_index) in output.iter_mut().zip(self.input_map.iter()) { *output_element = input[input_index]; } // run FFTs of size `width` self.width_size_fft.process_multi(output, input); // transpose array_utils::transpose(self.width, self.height, input, output); // run FFTs of size 'height' self.height_size_fft.process_multi(output, input); // copy to the output, using our output redordeing mapping for (input_element, &output_index) in input.iter().zip(self.output_map.iter()) { output[output_index] = *input_element; } } } impl<T: FFTnum> FFT<T> for GoodThomasAlgorithm<T> { fn process(&self, input: &mut [Complex<T>], output: &mut [Complex<T>]) { verify_length(input, output, self.len()); self.perform_fft(input, output); } fn process_multi(&self, input: &mut [Complex<T>], output: &mut [Complex<T>]) { verify_length_divisible(input, output, self.len()); for (in_chunk, out_chunk) in input.chunks_mut(self.len()).zip(output.chunks_mut(self.len())) { self.perform_fft(in_chunk, out_chunk); } } } impl<T> Length for GoodThomasAlgorithm<T> { #[inline(always)] fn len(&self) -> usize { self.input_map.len() } } impl<T> IsInverse for GoodThomasAlgorithm<T> { #[inline(always)] fn is_inverse(&self) -> bool { self.inverse } } #[cfg(test)] mod unit_tests { use super::*; use std::sync::Arc; use test_utils::check_fft_algorithm; use algorithm::DFT; #[test] fn test_good_thomas() { //gcd(n, n+1) is guaranteed to be 1, so we can generate some test sizes by just passing in n, n + 1 for width in 2..20 { test_good_thomas_with_lengths(width, width - 1); test_good_thomas_with_lengths(width, width + 1); } //verify that it works correctly when width and/or height are 1 test_good_thomas_with_lengths(1, 10); test_good_thomas_with_lengths(10, 1); test_good_thomas_with_lengths(1, 1); } fn test_good_thomas_with_lengths(width: usize, height: usize) { let width_fft = Arc::new(DFT::new(width, false)) as Arc<FFT<f32>>; let height_fft = Arc::new(DFT::new(height, false)) as Arc<FFT<f32>>; let fft = GoodThomasAlgorithm::new(width_fft, height_fft); check_fft_algorithm(&fft, width * height, false); } }
true
be9f35df5ca322c6ab6c6d2b37768fac06206086
Rust
padenot/monome-rs
/examples/gradient.rs
UTF-8
1,018
2.546875
3
[ "MIT", "Apache-2.0" ]
permissive
extern crate env_logger; extern crate monome; use std::{thread, time}; use monome::Monome; fn main() { env_logger::init(); let mut monome = Monome::new("/prefix".to_string()).unwrap(); println!("{:?}", monome); let mut v = [0; 64]; let mut sp: isize = 1; let mut dir: isize = 1; loop { for i in 0..8 { for j in 0..8 { v[i * 8 + j] = (sp / ((i + 1) as isize)) as u8; } } let mut grid: Vec<u8> = vec![0; 128]; for i in 0..8 { for j in 0..16 { grid[i * 16 + j] = (sp / ((i + 1) as isize)) as u8; } } // both methods are equivalent monome.set_all_intensity(&grid); monome.map(0, 0, &v); monome.map(8, 0, &v); sp += dir; if sp == 15 { dir = -1; } if sp == 1 { dir = 1; } let refresh = time::Duration::from_millis(100); thread::sleep(refresh); } }
true
4b774fcd96443abc4e61625a9231c4a1c17f8b77
Rust
Gekkio/gb-hardware-db
/site/src/template/site_header.rs
UTF-8
2,908
2.53125
3
[ "MIT", "CC0-1.0" ]
permissive
// SPDX-FileCopyrightText: 2017-2023 Joonas Javanainen <[email protected]> // // SPDX-License-Identifier: MIT use gbhwdb_backend::Console; use percy_dom::{html, IterableNodes, View, VirtualNode}; use crate::site::SiteSection; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct SiteHeader { pub section: SiteSection, } impl View for SiteHeader { fn render(&self) -> VirtualNode { let consoles_class = match self.section { SiteSection::Consoles(_) => "active", SiteSection::Cartridges => "", }; let cartridges_class = match self.section { SiteSection::Consoles(_) => "", SiteSection::Cartridges => "active", }; html! { <header class="site-header"> <div class="site-header__primary"> <h1 class="site-header__title"> <a href="/"> {"Game Boy hardware database"} <aside>{"by Gekkio and contributors"}</aside> </a> </h1> <nav class="site-primary-nav"> <ul> <li class={consoles_class}> <a href="/">{"Consoles"}</a> </li> <li class={cartridges_class}> <a href="/cartridges">{"Game cartridges"}</a> </li> </ul> </nav> </div> <SecondaryNav section={self.section} /> </header> } } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] struct SecondaryNav { pub section: SiteSection, } impl View for SecondaryNav { fn render(&self) -> VirtualNode { match self.section { SiteSection::Consoles(selected) => { html! { <nav class="site-secondary-nav"> <ul> { Console::ALL.iter().map(|&console| { let class = if Some(console) == selected { "active" } else { "" }; html! { <li class={class}> <a href={format!("/consoles/{}", console.id())}> <strong>{console.code()}</strong> <span class="name">{console.name()}</span> </a> </li> } }).collect::<Vec<_>>() } </ul> </nav> } } SiteSection::Cartridges => html! { <nav class="site-secondary-nav" /> }, } } }
true
d47a912895f2b01162b0ab34d0be9d97165fd5b1
Rust
fazo96/rust-game
/src/game/player.rs
UTF-8
2,914
3.140625
3
[]
no_license
use super::position::*; use super::{Game, GameState}; use super::Entity; use super::render::RenderInfo; use super::rpg::CharacterStats; use rustbox::{Color,Key}; #[derive(Clone)] pub struct Player { position: Position, cursor_position: Position, render_info: RenderInfo, character_stats: CharacterStats } impl Player { pub fn new(x: i32, y: i32) -> Player { Player { character_stats: CharacterStats::new(5, 5, 30), position: Position::new(x, y), cursor_position: Position::new(x, y), render_info: RenderInfo::new('@', Color::White) } } pub fn cursor_position(&self) -> &Position { &self.cursor_position } fn mov(&mut self, h: i32, v: i32, game: &Game) -> bool { let position = match game.state { GameState::InspectTiles => &mut self.cursor_position, _ => &mut self.position }; match game.current_state() { GameState::Gameplay => { position.move_relative_if_passable(h, v, game) }, GameState::InspectTiles => { position.move_relative(h, v); true }, _ => false } } } impl Entity for Player { fn current_position(&self) -> &Position { &self.position } fn position(&mut self) -> &mut Position { &mut self.position } fn render_info(&self) -> &RenderInfo { &self.render_info } fn name(&self) -> Option<&str> { Some("Player") } fn kind(&self) -> &str { "Human" } fn stats(&self) -> &CharacterStats { &self.character_stats } fn tick(&mut self, game: &mut Game) { match game.last_input_key { None => {}, Some(key) => match key { Key::Left => { self.mov(-1, 0, game); } Key::Right => { self.mov(1, 0, game); } Key::Up => { self.mov(0, -1, game); } Key::Down => { self.mov(0, 1, game); }, Key::Esc => { if game.state == GameState::Gameplay { game.state = GameState::Quit; } else { game.state = GameState::Gameplay; } }, Key::Char('v') => { game.state = match game.state { GameState::InspectTiles => GameState::Gameplay, _ => { self.cursor_position = self.position.clone(); GameState::InspectTiles } } } _ => {} } } } }
true
e4e2230712a4ad4283a567e1ce6aca83c896677f
Rust
cjburkey01/citymonopolis
/amazintosh_rs/src/render/shader.rs
UTF-8
9,646
2.921875
3
[ "MIT" ]
permissive
use super::inner_gl; use super::inner_gl::types::{GLchar, GLint, GLuint}; use crate::render::{Gl, RenderHandler}; use nalgebra::{Matrix2, Matrix3, Matrix4, Vector2, Vector3, Vector4}; use std::collections::HashMap; use std::ffi::CString; use std::fmt::{Debug, Display, Formatter}; macro_rules! gl_error_check { ($gl:expr, $handle:expr, $get_shader_iv:ident, $get_shader_info_log:ident, $error:ident) => {{ // Get the number of characters in the shader's info log to check if // there is an error and prevent unnecessary allocation of a bigger buffer. let info_log_length: GLint = { let mut v: GLint = 0; unsafe { $gl.0 .$get_shader_iv($handle, inner_gl::INFO_LOG_LENGTH, &mut v); } v }; // Check if the shader failed to compile if info_log_length > 0 { let error = unsafe { // Create a vector with the required length (including the ending null byte) let mut buffer: Vec<u8> = Vec::with_capacity(info_log_length as usize + 1); // Fill it with spaces buffer.extend([b' '].iter().cycle().take(info_log_length as usize)); // Get the error from OpenGL into the CString $gl.0.$get_shader_info_log( $handle, info_log_length, std::ptr::null_mut(), buffer.as_mut_ptr() as *mut GLchar, ); // Convert the pointer back into a CString and then return a // compiler error with an owned string CString::from_vec_with_nul(buffer) .map_err(|_| ShaderError::Unknown)? .to_str() .map_err(|_| ShaderError::Unknown)? .to_owned() }; // Return the error Err(ShaderError::$error(error.trim().to_owned())) } else { Ok(()) } }}; } pub trait ShaderHandler: Clone { fn uniform1f(&mut self, location: i32, value: f32); fn uniform2f(&mut self, location: i32, value: Vector2<f32>); fn uniform3f(&mut self, location: i32, value: Vector3<f32>); fn uniform4f(&mut self, location: i32, value: Vector4<f32>); fn uniform2x2f(&mut self, location: i32, value: Matrix2<f32>); fn uniform3x3f(&mut self, location: i32, value: Matrix3<f32>); fn uniform4x4f(&mut self, location: i32, value: Matrix4<f32>); } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum ShaderType { Vertex, Geometry, Fragment, } impl ShaderType { fn gl_enum(self) -> inner_gl::types::GLenum { match self { Self::Vertex => inner_gl::VERTEX_SHADER, Self::Geometry => inner_gl::GEOMETRY_SHADER, Self::Fragment => inner_gl::FRAGMENT_SHADER, } } } #[derive(Debug)] pub enum ShaderError { CreateShaderFailed, InvalidSourceString, Unknown, CompileError(String), CreateShaderProgramFailed, LinkError(String), ValidateError(String), InvalidUniformName(String), DuplicateUniform(String), } impl Display for ShaderError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self) } } pub struct Shader { gl: Gl, handle: GLuint, } impl Shader { pub fn create_shader( gl: &mut Gl, shader_type: ShaderType, source: &str, ) -> Result<Self, ShaderError> { // Create an instance of the shader to make sure that no matter what, // if this is dropped, it will be cleaned up. let shader = Self { gl: gl.clone(), handle: { // Create a GL shader and return an error if it fails let handle = unsafe { gl.0.CreateShader(shader_type.gl_enum()) }; if handle == 0 { return Err(ShaderError::CreateShaderFailed); } handle }, }; // Convert the input source into a format that the C OpenGL api can // understand let c_str = CString::new(source).map_err(|_| ShaderError::InvalidSourceString)?; unsafe { // Load the shader source into OpenGL gl.0.ShaderSource(shader.handle, 1, &c_str.as_ptr(), std::ptr::null()); // Try to compile the shader from the provided source gl.0.CompileShader(shader.handle); } // Check for compilation errors gl_error_check!( gl, shader.handle, GetShaderiv, GetShaderInfoLog, CompileError )?; // Return the shader because it was successfully compiled Ok(shader) } } impl Drop for Shader { // Automatically delete this shader when this struct is dropped. fn drop(&mut self) { println!("Dropping shader {}", self.handle); unsafe { self.gl.0.DeleteShader(self.handle); } } } pub trait ShaderUniformValue { fn uniform<RHType: RenderHandler>(&self, render_handler: &mut RHType, location: i32); } pub struct ShaderProgram { gl: Gl, handle: GLuint, uniforms: HashMap<String, GLint>, } impl ShaderProgram { pub fn from_shaders( gl: &mut Gl, vertex_shader: Option<Shader>, geometry_shader: Option<Shader>, fragment_shader: Option<Shader>, uniforms: Vec<&str>, ) -> Result<Self, ShaderError> { let program = Self { gl: gl.clone(), handle: { // Create a GL shader and return an error if it fails let handle = unsafe { gl.0.CreateProgram() }; if handle == 0 { return Err(ShaderError::CreateShaderProgramFailed); } handle }, uniforms: HashMap::new(), }; // Attach the shaders if they are provided macro_rules! attach_shader { ($shader:expr) => { // Use a reference so this macro doesn't consume the shader if let Some(shader) = &$shader { unsafe { gl.0.AttachShader(program.handle, shader.handle); } Some(shader.handle) } else { None } }; } let vs = attach_shader!(vertex_shader); let gs = attach_shader!(geometry_shader); let fs = attach_shader!(fragment_shader); // Link the program unsafe { gl.0.LinkProgram(program.handle); } // Check for link errors gl_error_check!( gl, program.handle, GetProgramiv, GetProgramInfoLog, ValidateError )?; // Detach the shaders so they can be deleted when this function // invocation ends macro_rules! detach_shader { ($shader:expr) => { if let Some(shader) = $shader { unsafe { gl.0.DetachShader(program.handle, shader); } } }; } detach_shader!(vs); detach_shader!(gs); detach_shader!(fs); /* The shaders will be dropped after this as they are no longer needed */ Ok({ // Get a mutable reference let mut program = program; for uniform in uniforms { let uniform_name = CString::new(uniform.as_bytes()) .map_err(|_| ShaderError::InvalidUniformName(uniform.to_owned()))?; let location: GLint = unsafe { gl.0.GetUniformLocation(program.handle, uniform_name.as_ptr()) }; if location < 0 { eprintln!("Unable to locate uniform \"{}\" in shader", uniform); continue; } let owned_key = uniform.to_owned(); match program.uniforms.insert(owned_key, location) { Some(_) => return Err(ShaderError::DuplicateUniform(uniform.to_owned())), None => {} } } // Return the program program }) } /// Checks whether this program could be executed given the current OpenGL /// application state. If there are errors that occur that are difficult /// to trace, this method may reveal what the issue is other than just /// "Invalid operation." pub fn validate(&mut self) -> Result<(), ShaderError> { // Validate the program unsafe { self.gl.0.ValidateProgram(self.handle); } // Check for validation errors gl_error_check!( self.gl, self.handle, GetProgramiv, GetProgramInfoLog, ValidateError ) } pub fn bind(&mut self) { unsafe { self.gl.0.UseProgram(self.handle); } } pub fn uniform<Value: ShaderUniformValue>(&mut self, uniform: &str, value: Value) { if let Some(location) = self.uniforms.get(uniform) { value.uniform(&mut self.gl, *location); } } } impl Drop for ShaderProgram { // Automatically delete the program fn drop(&mut self) { println!("Dropping program {}", self.handle); unsafe { self.gl.0.DeleteProgram(self.handle); } } }
true
78d29ccb4b41266a60591d02e40ff76d3bd269f2
Rust
compactcode/exercism
/rust/rotational-cipher/src/lib.rs
UTF-8
445
3.484375
3
[]
no_license
pub fn rotate(input: &str, key: u8) -> String { input .chars() .map(|c| rotate_char(c, key)) .collect() } fn rotate_char(c: char, key: u8) -> char { match c { 'a'...'z' => rotate_from_base(c as u8, 'a' as u8, key), 'A'...'Z' => rotate_from_base(c as u8, 'A' as u8, key), _ => c } } fn rotate_from_base(c: u8, base: u8, key: u8) -> char { ((c - base + key) % 26 + base) as char }
true
2270702da79ab416afa9f4f8754f1ab5975ae738
Rust
irbis-labs/rsmorphy
/src/container/word.rs
UTF-8
619
3.484375
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::sync::Arc; #[derive(Debug, Clone, PartialEq)] pub struct WordStruct { is_known: bool, word: Arc<String>, } impl WordStruct { pub fn new<W: Into<String>>(word: W, is_known: bool) -> Self { let word = Arc::new(word.into()); WordStruct { word, is_known } } pub fn known<W: Into<String>>(word: W) -> Self { WordStruct::new(word, true) } pub fn unknown<W: Into<String>>(word: W) -> Self { WordStruct::new(word, false) } pub fn is_known(&self) -> bool { self.is_known } pub fn word(&self) -> &str { &self.word } }
true
1306d2509ed69b4ccea0c2154dd2e933f4807678
Rust
belfz/piston_snake
/src/lib.rs
UTF-8
3,455
2.890625
3
[ "MIT" ]
permissive
extern crate piston_window; extern crate rand; mod board; mod direction; mod food; mod snake; use board::Board; use direction::Direction; use food::Food; use snake::Snake; use piston_window::*; use std::time::Instant; const ONE_HUNDRED_MS: u32 = 100000000; static WHITE: [f32; 4] = [1.0, 1.0, 1.0, 1.0]; static RED: [f32; 4] = [1.0, 0.0, 0.0, 1.0]; static TITLE: &'static str = "Rusnake!"; fn draw_block<G: Graphics>(x: f64, y: f64, width: f64, context: &Context, graphics: &mut G) { rectangle([0.0, 0.0, 0.0, 1.0], [x, y, width, width], context.transform, graphics); } fn render(window: &mut PistonWindow, event: &piston_window::Event, game_board: &Board) { window.draw_2d(event, |context, graphics| { let &Snake { ref segments, .. } = game_board.get_snake(); let width = snake::SNAKE_SEGMENT_WIDTH as f64; let color = if game_board.is_game_running() { WHITE } else { RED }; clear(color, graphics); // render each segment of the snake for &(x, y) in segments.iter() { draw_block(x as f64, y as f64, width, &context, graphics); } // render food let &Food { x: food_x, y: food_y } = game_board.get_food(); draw_block(food_x as f64, food_y as f64, width, &context, graphics); }); } pub fn run(width: u32, height: u32) { let mut last_position_update_timestamp = Instant::now(); let mut next_direction = Direction::Right; let mut game_board = Board::new(width, height, Food::next_rand_food(width, height), Snake::new(50, 50, next_direction)); let mut window: PistonWindow = WindowSettings::new(TITLE, [game_board.width, game_board.height]) .exit_on_esc(true).build().unwrap(); while let Some(event) = window.next() { if let Some(Button::Keyboard(key)) = event.press_args() { next_direction = match key { Key::Right => Direction::Right, Key::Left => Direction::Left, Key::Up => Direction::Up, Key::Down => Direction::Down, Key::R => { if !game_board.is_game_running() { let direction = Direction::Right; game_board = Board::new(width, height, Food::next_rand_food(width, height), Snake::new(50, 50, direction)); direction } else { next_direction } }, _ => next_direction, }; } render(&mut window, &event, &game_board); if Instant::now().duration_since(last_position_update_timestamp).subsec_nanos() > ONE_HUNDRED_MS && game_board.is_game_running() { let mut snake = game_board.get_snake().advance(game_board.width as i32, game_board.height as i32); if snake.has_collision() { game_board = game_board.set_game_is_running(false); continue; } snake = snake.change_direction(next_direction); let (snake_head_x, snake_head_y) = game_board.get_snake().segments[0]; if snake_head_x == game_board.get_food().x as i32 && snake_head_y == game_board.get_food().y as i32 { snake = snake.eat_food(game_board.get_food(), game_board.width as i32, game_board.height as i32); let new_food = Food::next_rand_food(width, height); game_board = game_board.set_food(new_food); } game_board = game_board.set_snake(snake); last_position_update_timestamp = Instant::now(); } else { continue; } } }
true
e089f3b8d6753d4f01dda2453c00d7d28b7dc800
Rust
jonwingfield/atsamd09-rs
/atsamd09d14a/dmac/crcctrl/mod.rs
UTF-8
10,004
2.859375
3
[]
no_license
#[doc = r" Value read from the register"] pub struct R { bits: u16, } #[doc = r" Value to write to the register"] pub struct W { bits: u16, } impl super::CRCCTRL { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `CRCBEATSIZE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CRCBEATSIZER { #[doc = "8-bit bus transfer"] BYTE, #[doc = "16-bit bus transfer"] HWORD, #[doc = "32-bit bus transfer"] WORD, #[doc = r" Reserved"] _Reserved(u8), } impl CRCBEATSIZER { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { CRCBEATSIZER::BYTE => 0, CRCBEATSIZER::HWORD => 1, CRCBEATSIZER::WORD => 2, CRCBEATSIZER::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> CRCBEATSIZER { match value { 0 => CRCBEATSIZER::BYTE, 1 => CRCBEATSIZER::HWORD, 2 => CRCBEATSIZER::WORD, i => CRCBEATSIZER::_Reserved(i), } } #[doc = "Checks if the value of the field is `BYTE`"] #[inline] pub fn is_byte(&self) -> bool { *self == CRCBEATSIZER::BYTE } #[doc = "Checks if the value of the field is `HWORD`"] #[inline] pub fn is_hword(&self) -> bool { *self == CRCBEATSIZER::HWORD } #[doc = "Checks if the value of the field is `WORD`"] #[inline] pub fn is_word(&self) -> bool { *self == CRCBEATSIZER::WORD } } #[doc = "Possible values of the field `CRCPOLY`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CRCPOLYR { #[doc = "CRC-16 (CRC-CCITT)"] CRC16, #[doc = "CRC32 (IEEE 802.3)"] CRC32, #[doc = r" Reserved"] _Reserved(u8), } impl CRCPOLYR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { CRCPOLYR::CRC16 => 0, CRCPOLYR::CRC32 => 1, CRCPOLYR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> CRCPOLYR { match value { 0 => CRCPOLYR::CRC16, 1 => CRCPOLYR::CRC32, i => CRCPOLYR::_Reserved(i), } } #[doc = "Checks if the value of the field is `CRC16`"] #[inline] pub fn is_crc16(&self) -> bool { *self == CRCPOLYR::CRC16 } #[doc = "Checks if the value of the field is `CRC32`"] #[inline] pub fn is_crc32(&self) -> bool { *self == CRCPOLYR::CRC32 } } #[doc = "Possible values of the field `CRCSRC`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CRCSRCR { #[doc = "No action"] NOACT, #[doc = "I/O interface"] IO, #[doc = r" Reserved"] _Reserved(u8), } impl CRCSRCR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { CRCSRCR::NOACT => 0, CRCSRCR::IO => 1, CRCSRCR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> CRCSRCR { match value { 0 => CRCSRCR::NOACT, 1 => CRCSRCR::IO, i => CRCSRCR::_Reserved(i), } } #[doc = "Checks if the value of the field is `NOACT`"] #[inline] pub fn is_noact(&self) -> bool { *self == CRCSRCR::NOACT } #[doc = "Checks if the value of the field is `IO`"] #[inline] pub fn is_io(&self) -> bool { *self == CRCSRCR::IO } } #[doc = "Values that can be written to the field `CRCBEATSIZE`"] pub enum CRCBEATSIZEW { #[doc = "8-bit bus transfer"] BYTE, #[doc = "16-bit bus transfer"] HWORD, #[doc = "32-bit bus transfer"] WORD, } impl CRCBEATSIZEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { CRCBEATSIZEW::BYTE => 0, CRCBEATSIZEW::HWORD => 1, CRCBEATSIZEW::WORD => 2, } } } #[doc = r" Proxy"] pub struct _CRCBEATSIZEW<'a> { w: &'a mut W, } impl<'a> _CRCBEATSIZEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: CRCBEATSIZEW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "8-bit bus transfer"] #[inline] pub fn byte(self) -> &'a mut W { self.variant(CRCBEATSIZEW::BYTE) } #[doc = "16-bit bus transfer"] #[inline] pub fn hword(self) -> &'a mut W { self.variant(CRCBEATSIZEW::HWORD) } #[doc = "32-bit bus transfer"] #[inline] pub fn word(self) -> &'a mut W { self.variant(CRCBEATSIZEW::WORD) } #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u16) << OFFSET); self.w.bits |= ((value & MASK) as u16) << OFFSET; self.w } } #[doc = "Values that can be written to the field `CRCPOLY`"] pub enum CRCPOLYW { #[doc = "CRC-16 (CRC-CCITT)"] CRC16, #[doc = "CRC32 (IEEE 802.3)"] CRC32, } impl CRCPOLYW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { CRCPOLYW::CRC16 => 0, CRCPOLYW::CRC32 => 1, } } } #[doc = r" Proxy"] pub struct _CRCPOLYW<'a> { w: &'a mut W, } impl<'a> _CRCPOLYW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: CRCPOLYW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "CRC-16 (CRC-CCITT)"] #[inline] pub fn crc16(self) -> &'a mut W { self.variant(CRCPOLYW::CRC16) } #[doc = "CRC32 (IEEE 802.3)"] #[inline] pub fn crc32(self) -> &'a mut W { self.variant(CRCPOLYW::CRC32) } #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u16) << OFFSET); self.w.bits |= ((value & MASK) as u16) << OFFSET; self.w } } #[doc = "Values that can be written to the field `CRCSRC`"] pub enum CRCSRCW { #[doc = "No action"] NOACT, #[doc = "I/O interface"] IO, } impl CRCSRCW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { CRCSRCW::NOACT => 0, CRCSRCW::IO => 1, } } } #[doc = r" Proxy"] pub struct _CRCSRCW<'a> { w: &'a mut W, } impl<'a> _CRCSRCW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: CRCSRCW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "No action"] #[inline] pub fn noact(self) -> &'a mut W { self.variant(CRCSRCW::NOACT) } #[doc = "I/O interface"] #[inline] pub fn io(self) -> &'a mut W { self.variant(CRCSRCW::IO) } #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 63; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u16) << OFFSET); self.w.bits |= ((value & MASK) as u16) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u16 { self.bits } #[doc = "Bits 0:1 - CRC Beat Size"] #[inline] pub fn crcbeatsize(&self) -> CRCBEATSIZER { CRCBEATSIZER::_from({ const MASK: u8 = 3; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u16) as u8 }) } #[doc = "Bits 2:3 - CRC Polynomial Type"] #[inline] pub fn crcpoly(&self) -> CRCPOLYR { CRCPOLYR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u16) as u8 }) } #[doc = "Bits 8:13 - CRC Input Source"] #[inline] pub fn crcsrc(&self) -> CRCSRCR { CRCSRCR::_from({ const MASK: u8 = 63; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u16) as u8 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u16) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:1 - CRC Beat Size"] #[inline] pub fn crcbeatsize(&mut self) -> _CRCBEATSIZEW { _CRCBEATSIZEW { w: self } } #[doc = "Bits 2:3 - CRC Polynomial Type"] #[inline] pub fn crcpoly(&mut self) -> _CRCPOLYW { _CRCPOLYW { w: self } } #[doc = "Bits 8:13 - CRC Input Source"] #[inline] pub fn crcsrc(&mut self) -> _CRCSRCW { _CRCSRCW { w: self } } }
true
16c16c81c3d2ace7155a2f90b14eb9caa6f6bedc
Rust
tokio-rs/tokio
/tokio/tests/uds_stream.rs
UTF-8
10,976
2.640625
3
[ "MIT" ]
permissive
#![cfg(feature = "full")] #![warn(rust_2018_idioms)] #![cfg(unix)] use std::io; use std::task::Poll; use tokio::io::{AsyncReadExt, AsyncWriteExt, Interest}; use tokio::net::{UnixListener, UnixStream}; use tokio_test::{assert_ok, assert_pending, assert_ready_ok, task}; use futures::future::{poll_fn, try_join}; #[tokio::test] async fn accept_read_write() -> std::io::Result<()> { let dir = tempfile::Builder::new() .prefix("tokio-uds-tests") .tempdir() .unwrap(); let sock_path = dir.path().join("connect.sock"); let listener = UnixListener::bind(&sock_path)?; let accept = listener.accept(); let connect = UnixStream::connect(&sock_path); let ((mut server, _), mut client) = try_join(accept, connect).await?; // Write to the client. client.write_all(b"hello").await?; drop(client); // Read from the server. let mut buf = vec![]; server.read_to_end(&mut buf).await?; assert_eq!(&buf, b"hello"); let len = server.read(&mut buf).await?; assert_eq!(len, 0); Ok(()) } #[tokio::test] async fn shutdown() -> std::io::Result<()> { let dir = tempfile::Builder::new() .prefix("tokio-uds-tests") .tempdir() .unwrap(); let sock_path = dir.path().join("connect.sock"); let listener = UnixListener::bind(&sock_path)?; let accept = listener.accept(); let connect = UnixStream::connect(&sock_path); let ((mut server, _), mut client) = try_join(accept, connect).await?; // Shut down the client AsyncWriteExt::shutdown(&mut client).await?; // Read from the server should return 0 to indicate the channel has been closed. let mut buf = [0u8; 1]; let n = server.read(&mut buf).await?; assert_eq!(n, 0); Ok(()) } #[tokio::test] async fn try_read_write() -> std::io::Result<()> { let msg = b"hello world"; let dir = tempfile::tempdir()?; let bind_path = dir.path().join("bind.sock"); // Create listener let listener = UnixListener::bind(&bind_path)?; // Create socket pair let client = UnixStream::connect(&bind_path).await?; let (server, _) = listener.accept().await?; let mut written = msg.to_vec(); // Track the server receiving data let mut readable = task::spawn(server.readable()); assert_pending!(readable.poll()); // Write data. client.writable().await?; assert_eq!(msg.len(), client.try_write(msg)?); // The task should be notified while !readable.is_woken() { tokio::task::yield_now().await; } // Fill the write buffer using non-vectored I/O loop { // Still ready let mut writable = task::spawn(client.writable()); assert_ready_ok!(writable.poll()); match client.try_write(msg) { Ok(n) => written.extend(&msg[..n]), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { break; } Err(e) => panic!("error = {:?}", e), } } { // Write buffer full let mut writable = task::spawn(client.writable()); assert_pending!(writable.poll()); // Drain the socket from the server end using non-vectored I/O let mut read = vec![0; written.len()]; let mut i = 0; while i < read.len() { server.readable().await?; match server.try_read(&mut read[i..]) { Ok(n) => i += n, Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => continue, Err(e) => panic!("error = {:?}", e), } } assert_eq!(read, written); } written.clear(); client.writable().await.unwrap(); // Fill the write buffer using vectored I/O let msg_bufs: Vec<_> = msg.chunks(3).map(io::IoSlice::new).collect(); loop { // Still ready let mut writable = task::spawn(client.writable()); assert_ready_ok!(writable.poll()); match client.try_write_vectored(&msg_bufs) { Ok(n) => written.extend(&msg[..n]), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { break; } Err(e) => panic!("error = {:?}", e), } } { // Write buffer full let mut writable = task::spawn(client.writable()); assert_pending!(writable.poll()); // Drain the socket from the server end using vectored I/O let mut read = vec![0; written.len()]; let mut i = 0; while i < read.len() { server.readable().await?; let mut bufs: Vec<_> = read[i..] .chunks_mut(0x10000) .map(io::IoSliceMut::new) .collect(); match server.try_read_vectored(&mut bufs) { Ok(n) => i += n, Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => continue, Err(e) => panic!("error = {:?}", e), } } assert_eq!(read, written); } // Now, we listen for shutdown drop(client); loop { let ready = server.ready(Interest::READABLE).await?; if ready.is_read_closed() { break; } else { tokio::task::yield_now().await; } } Ok(()) } async fn create_pair() -> (UnixStream, UnixStream) { let dir = assert_ok!(tempfile::tempdir()); let bind_path = dir.path().join("bind.sock"); let listener = assert_ok!(UnixListener::bind(&bind_path)); let accept = listener.accept(); let connect = UnixStream::connect(&bind_path); let ((server, _), client) = assert_ok!(try_join(accept, connect).await); (client, server) } macro_rules! assert_readable_by_polling { ($stream:expr) => { assert_ok!(poll_fn(|cx| $stream.poll_read_ready(cx)).await); }; } macro_rules! assert_not_readable_by_polling { ($stream:expr) => { poll_fn(|cx| { assert_pending!($stream.poll_read_ready(cx)); Poll::Ready(()) }) .await; }; } macro_rules! assert_writable_by_polling { ($stream:expr) => { assert_ok!(poll_fn(|cx| $stream.poll_write_ready(cx)).await); }; } macro_rules! assert_not_writable_by_polling { ($stream:expr) => { poll_fn(|cx| { assert_pending!($stream.poll_write_ready(cx)); Poll::Ready(()) }) .await; }; } #[tokio::test] async fn poll_read_ready() { let (mut client, mut server) = create_pair().await; // Initial state - not readable. assert_not_readable_by_polling!(server); // There is data in the buffer - readable. assert_ok!(client.write_all(b"ping").await); assert_readable_by_polling!(server); // Readable until calls to `poll_read` return `Poll::Pending`. let mut buf = [0u8; 4]; assert_ok!(server.read_exact(&mut buf).await); assert_readable_by_polling!(server); read_until_pending(&mut server); assert_not_readable_by_polling!(server); // Detect the client disconnect. drop(client); assert_readable_by_polling!(server); } #[tokio::test] async fn poll_write_ready() { let (mut client, server) = create_pair().await; // Initial state - writable. assert_writable_by_polling!(client); // No space to write - not writable. write_until_pending(&mut client); assert_not_writable_by_polling!(client); // Detect the server disconnect. drop(server); assert_writable_by_polling!(client); } fn read_until_pending(stream: &mut UnixStream) { let mut buf = vec![0u8; 1024 * 1024]; loop { match stream.try_read(&mut buf) { Ok(_) => (), Err(err) => { assert_eq!(err.kind(), io::ErrorKind::WouldBlock); break; } } } } fn write_until_pending(stream: &mut UnixStream) { let buf = vec![0u8; 1024 * 1024]; loop { match stream.try_write(&buf) { Ok(_) => (), Err(err) => { assert_eq!(err.kind(), io::ErrorKind::WouldBlock); break; } } } } #[tokio::test] async fn try_read_buf() -> std::io::Result<()> { let msg = b"hello world"; let dir = tempfile::tempdir()?; let bind_path = dir.path().join("bind.sock"); // Create listener let listener = UnixListener::bind(&bind_path)?; // Create socket pair let client = UnixStream::connect(&bind_path).await?; let (server, _) = listener.accept().await?; let mut written = msg.to_vec(); // Track the server receiving data let mut readable = task::spawn(server.readable()); assert_pending!(readable.poll()); // Write data. client.writable().await?; assert_eq!(msg.len(), client.try_write(msg)?); // The task should be notified while !readable.is_woken() { tokio::task::yield_now().await; } // Fill the write buffer loop { // Still ready let mut writable = task::spawn(client.writable()); assert_ready_ok!(writable.poll()); match client.try_write(msg) { Ok(n) => written.extend(&msg[..n]), Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { break; } Err(e) => panic!("error = {:?}", e), } } { // Write buffer full let mut writable = task::spawn(client.writable()); assert_pending!(writable.poll()); // Drain the socket from the server end let mut read = Vec::with_capacity(written.len()); let mut i = 0; while i < read.capacity() { server.readable().await?; match server.try_read_buf(&mut read) { Ok(n) => i += n, Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => continue, Err(e) => panic!("error = {:?}", e), } } assert_eq!(read, written); } // Now, we listen for shutdown drop(client); loop { let ready = server.ready(Interest::READABLE).await?; if ready.is_read_closed() { break; } else { tokio::task::yield_now().await; } } Ok(()) } // https://github.com/tokio-rs/tokio/issues/3879 #[tokio::test] #[cfg(not(target_os = "macos"))] async fn epollhup() -> io::Result<()> { let dir = tempfile::Builder::new() .prefix("tokio-uds-tests") .tempdir() .unwrap(); let sock_path = dir.path().join("connect.sock"); let listener = UnixListener::bind(&sock_path)?; let connect = UnixStream::connect(&sock_path); tokio::pin!(connect); // Poll `connect` once. poll_fn(|cx| { use std::future::Future; assert_pending!(connect.as_mut().poll(cx)); Poll::Ready(()) }) .await; drop(listener); let err = connect.await.unwrap_err(); assert_eq!(err.kind(), io::ErrorKind::ConnectionReset); Ok(()) }
true
467c036ae586d515adc3c8ba142e800b7b754f00
Rust
SuneelFreimuth/boa
/boa/src/builtins/map/ordered_map.rs
UTF-8
4,174
3.3125
3
[ "MIT", "Unlicense" ]
permissive
use crate::gc::{custom_trace, Finalize, Trace}; use indexmap::{map::IntoIter, map::Iter, map::IterMut, IndexMap}; use std::{ collections::hash_map::RandomState, fmt::Debug, hash::{BuildHasher, Hash}, }; /// A newtype wrapping indexmap::IndexMap #[derive(Clone)] pub struct OrderedMap<K, V, S = RandomState>(IndexMap<K, V, S>) where K: Hash + Eq; impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Finalize for OrderedMap<K, V, S> {} unsafe impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Trace for OrderedMap<K, V, S> { custom_trace!(this, { for (k, v) in this.0.iter() { mark(k); mark(v); } }); } impl<K: Hash + Eq + Debug, V: Debug> Debug for OrderedMap<K, V> { fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { self.0.fmt(formatter) } } impl<K: Hash + Eq, V> Default for OrderedMap<K, V> { fn default() -> Self { Self::new() } } impl<K, V> OrderedMap<K, V> where K: Hash + Eq, { pub fn new() -> Self { OrderedMap(IndexMap::new()) } pub fn with_capacity(capacity: usize) -> Self { OrderedMap(IndexMap::with_capacity(capacity)) } /// Return the number of key-value pairs in the map. /// /// Computes in **O(1)** time. pub fn len(&self) -> usize { self.0.len() } /// Returns true if the map contains no elements. /// /// Computes in **O(1)** time. pub fn is_empty(&self) -> bool { self.0.len() == 0 } /// Insert a key-value pair in the map. /// /// If an equivalent key already exists in the map: the key remains and /// retains in its place in the order, its corresponding value is updated /// with `value` and the older value is returned inside `Some(_)`. /// /// If no equivalent key existed in the map: the new key-value pair is /// inserted, last in order, and `None` is returned. /// /// Computes in **O(1)** time (amortized average). pub fn insert(&mut self, key: K, value: V) -> Option<V> { self.0.insert(key, value) } /// Remove the key-value pair equivalent to `key` and return /// its value. /// /// Like `Vec::remove`, the pair is removed by shifting all of the /// elements that follow it, preserving their relative order. /// **This perturbs the index of all of those elements!** /// /// Return `None` if `key` is not in map. /// /// Computes in **O(n)** time (average). pub fn remove(&mut self, key: &K) -> Option<V> { self.0.shift_remove(key) } /// Return a reference to the value stored for `key`, if it is present, /// else `None`. /// /// Computes in **O(1)** time (average). pub fn get(&self, key: &K) -> Option<&V> { self.0.get(key) } /// Get a key-value pair by index /// Valid indices are 0 <= index < self.len() /// Computes in O(1) time. pub fn get_index(&self, index: usize) -> Option<(&K, &V)> { self.0.get_index(index) } /// Return an iterator over the key-value pairs of the map, in their order pub fn iter(&self) -> Iter<'_, K, V> { self.0.iter() } /// Return `true` if an equivalent to `key` exists in the map. /// /// Computes in **O(1)** time (average). pub fn contains_key(&self, key: &K) -> bool { self.0.contains_key(key) } } impl<'a, K, V, S> IntoIterator for &'a OrderedMap<K, V, S> where K: Hash + Eq, S: BuildHasher, { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; fn into_iter(self) -> Self::IntoIter { self.0.iter() } } impl<'a, K, V, S> IntoIterator for &'a mut OrderedMap<K, V, S> where K: Hash + Eq, S: BuildHasher, { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; fn into_iter(self) -> Self::IntoIter { self.0.iter_mut() } } impl<K, V, S> IntoIterator for OrderedMap<K, V, S> where K: Hash + Eq, S: BuildHasher, { type Item = (K, V); type IntoIter = IntoIter<K, V>; fn into_iter(self) -> IntoIter<K, V> { self.0.into_iter() } }
true
73d5d3a4251123e59cbc1b644b9d3c74eabb6bb5
Rust
isgasho/linux_commands_rewritten_in_rust
/src/bin/chmod.rs
UTF-8
958
2.796875
3
[]
no_license
//! chmod (GNU coreutils) #![warn(clippy::nursery, clippy::pedantic)] use linux_commands_rewritten_in_rust::errno::last_errno_message; fn main() { let args = std::env::args().collect::<Vec<_>>(); if args.len() != 3 { eprintln!("usage_example: chmod 777 main.rs"); return; } let permission_bits = args[1].as_bytes(); let user_permission = u32::from(permission_bits[0] - b'0'); assert!(user_permission <= 7); let group_permission = u32::from(permission_bits[1] - b'0'); assert!(group_permission <= 7); let other_permission = u32::from(permission_bits[2] - b'0'); assert!(other_permission <= 7); let permission = (user_permission << 6) | (group_permission << 3) | other_permission; let filename = std::ffi::CString::new(args[2].as_bytes()).unwrap(); let ret = unsafe { libc::chmod(filename.as_ptr(), permission) }; if ret == -1 { eprintln!("{}", last_errno_message()); } }
true
c1d963b524fc795852f62d8c5684bba001321d8e
Rust
bundleofbytes/libstripe
/src/resources/core/customer_taxid.rs
UTF-8
3,078
2.71875
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::resources::common::object::Object; use crate::resources::common::path::UrlPath; use crate::resources::core::customer::Customer; use crate::util::{Deleted, Expandable, List}; use crate::Client; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct CustomerTaxID { pub id: String, pub object: Object, pub country: String, pub created: i64, pub customer: Expandable<Customer>, pub livemode: bool, #[serde(rename = "type")] pub taxid_type: TaxIDType, pub value: String, pub verification: TaxIDVerification, } #[derive(Serialize, Deserialize, Debug, PartialEq)] #[serde(rename_all = "snake_case")] pub enum TaxIDType { EuVat, NzGst, AuAbn, Unknown, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct TaxIDVerification { pub status: TaxIDVerificationStatus, pub verified_address: Option<String>, pub verified_name: Option<String>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] #[serde(rename_all = "lowercase")] pub enum TaxIDVerificationStatus { Pending, Unavailable, Unverified, Verified, } #[derive(Default, Debug, Serialize, PartialEq)] pub struct CustomerTaxIDParam<'a> { #[serde(skip_serializing_if = "Option::is_none")] pub customer: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "type")] pub taxid_type: Option<TaxIDType>, #[serde(skip_serializing_if = "Option::is_none")] pub value: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] pub expand: Option<Vec<&'a str>>, } #[derive(Default, Serialize, Debug, PartialEq)] pub struct CustomerTaxIDListParams<'a> { #[serde(skip_serializing_if = "Option::is_none")] pub customer: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] pub ending_before: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option<i64>, #[serde(skip_serializing_if = "Option::is_none")] pub starting_after: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] pub expand: Option<Vec<&'a str>>, } impl CustomerTaxID { pub fn create<B: serde::Serialize>( client: &Client, customer: &str, param: B, ) -> crate::Result<Self> { client.post(UrlPath::Customers, vec![customer, "tax_ids"], param) } pub fn retrieve(client: &Client, customer: &str, id: &str) -> crate::Result<Self> { client.get( UrlPath::Customers, vec![customer, "tax_ids", id], serde_json::Map::new(), ) } pub fn delete(client: &Client, customer: &str, id: &str) -> crate::Result<Deleted> { client.delete( UrlPath::Customers, vec![customer, "tax_ids", id], serde_json::Map::new(), ) } pub fn list<B: serde::Serialize>( client: &Client, customer: &str, param: B, ) -> crate::Result<List<Self>> { client.get(UrlPath::Customers, vec![customer, "tax_ids"], param) } }
true
12980e07fe52d28ef8f51963637414ccdbfcc9de
Rust
jFransham/simple-game
/src/gameobjects/player/ship.rs
UTF-8
14,174
2.90625
3
[]
no_license
use ::gameobjects::*; use ::events::*; use ::view::*; use ::graphics::sprites::{ VisibleComponent, VisibleRect, Sprite, }; use ::time::*; use ::set::Intersects; use super::*; use super::command_builder::CommandBuilder; use std::convert::TryInto; use std::collections::HashMap; use sdl2::pixels::Color; use sdl2::render::Texture; /// The different states our ship might be in. In the image, they're ordered /// from left to right, then from top to bottom. #[derive(Clone, Copy, Hash, PartialEq, Eq, Debug)] pub enum ShipFrame { UpNorm, UpFast, UpSlow, MidNorm, MidFast, MidSlow, DownNorm, DownFast, DownSlow, } pub static ALL_FRAMES: [ShipFrame; 9] = [ ShipFrame::UpNorm, ShipFrame::UpFast, ShipFrame::UpSlow, ShipFrame::MidNorm, ShipFrame::MidFast, ShipFrame::MidSlow, ShipFrame::DownNorm, ShipFrame::DownFast, ShipFrame::DownSlow, ]; pub struct GunArgs { pub bounds: Bounds, } pub trait Gun: Sized { fn spawn_bullets( &mut self, args: GunArgs, _: GameTime ) -> Vec<SimpleObject>; fn next_weapon(&mut self) {} } pub struct ShipGun { pub kind: BulletKind, pub sine: SineGun, pub standard: StandardGun, } impl Gun for ShipGun { fn spawn_bullets( &mut self, args: GunArgs, time: GameTime ) -> Vec<SimpleObject> { if self.kind == BulletKind::Sine { self.sine.spawn_bullets(args, time) } else { self.standard.spawn_bullets(args, time) } } fn next_weapon(&mut self) { self.kind = if self.kind == BulletKind::Sine { BulletKind::Standard } else { BulletKind::Sine }; } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub enum Direction { Neg, Zero, Pos, } impl Default for Direction { fn default() -> Self { Direction::Zero } } enum ShipCommand { Move([Direction; 2]), Fire, NextWeapon, } pub struct Ship<G: Gun/*, C: CommandBuilder<Self, ShipCommand>*/> { pub bounds: BoundingRect, //pub command_builder: C, pub gun: G, pub dir: [Direction; 2], pub sprites: HashMap<ShipFrame, Sprite<Texture>>, } impl<G: Gun> Ship<G> { fn get_control(up: bool, down: bool, left: bool, right: bool) -> [Direction; 2] { use ::fixed_size_iter::FixedSizeIntoMap; use self::Direction::*; [(left, right), (up, down)].map( |(a, b)| match (a, b) { (true, true) | (false, false) => Zero, (true, _) => Neg, (_, true) => Pos, } ) } fn movement_direction(dirs: [Direction; 2]) -> [f64; 2] { use self::Direction::*; use ::fixed_size_iter::FixedSizeIntoMap; Self::normalize( dirs.map(|d| match d { Neg => -1.0, Zero => 0.0, Pos => 1.0 }) ) } fn normalize([x, y]: [f64; 2]) -> [f64; 2] { match [x, y] { [0.0, 0.0] => [0.0, 0.0], [x, y] => { let len = (x*x + y*y).sqrt(); [x / len, y / len] }, } } #[allow(collapsible_if)] fn get_frame(vel: [Direction; 2]) -> ShipFrame { use self::ShipFrame::*; use self::Direction::*; use ::fixed_size_iter::FixedSizeIntoMap; match vel { [Neg, Neg ] => UpSlow, [Neg, Pos ] => DownSlow, [Neg, Zero] => MidSlow, [Pos, Neg ] => UpFast, [Pos, Pos ] => DownFast, [Pos, Zero] => MidFast, [Zero, Neg ] => UpNorm, [Zero, Pos ] => DownNorm, [Zero, Zero] => MidNorm, } } } impl<G: Gun> GameObject<Keys, Texture> for Ship<G> { fn update( &mut self, context: &mut Context<Keys>, time: GameTime ) -> Vec<GameAction<Keys, Texture>> { use ::fixed_size_iter::FixedSizeIntoMap; let player_speed = 230.0; let dt = time.elapsed.exact_seconds(); let (sw, sh) = context.renderer.output_size().map( |(a, b)| (a as f64, b as f64) ).unwrap(); self.dir = { let keys = &context.events.down; Self::get_control( keys.up, keys.down, keys.left, keys.right, ) }; let [dx, dy] = Self::movement_direction(self.dir).map( |a| a * dt * player_speed ); self.bounds.x += dx; self.bounds.y += dy; self.bounds = self.bounds.move_inside( &BoundingRect { width: sw, height: sh, .. Default::default() } ).unwrap(); if context.events.pressed.next_weapon { self.gun.next_weapon(); } if context.events.down.fire { vec![ GameAction::AddObjects( self.gun.spawn_bullets( GunArgs { bounds: self.bounds.into(), }, time ) ) ] } else { vec![] } } fn sprites(&self, _: GameTime) -> Vec<(VisibleComponent<Texture>, Dest)> { vec![ ( self.sprites[&Self::get_frame(self.dir)].clone().into(), self.bounds.try_into().unwrap(), ) ] } fn bounds(&self) -> Option<Bounds> { Some(self.bounds.into()) } } pub struct SineGun { pub last_ammo_at: u32, pub last_shot_at: u32, pub ammo_intervals: [u32; 3], pub ammo: u8, pub max_ammo: u8, } impl SineGun { pub fn new(now: u32) -> SineGun { let max = 10; SineGun { last_ammo_at: now, last_shot_at: 0, ammo_intervals: [1000, 700, 400], ammo: max, max_ammo: max, } } fn get_interval(&self) -> u32 { use ::gameobjects::MinMax; self.ammo_intervals[ (self.ammo_intervals.len() - 1).min(self.ammo as _) ] } } impl Gun for SineGun { fn spawn_bullets( &mut self, args: GunArgs, time: GameTime ) -> Vec<SimpleObject> { use ::gameobjects::MinMax; { let mut time_diff = time.total - self.last_ammo_at; let mut interval = self.get_interval(); while time_diff >= interval { self.last_ammo_at = time.total; time_diff -= interval; self.ammo = (self.ammo as u32 + 1).min(self.max_ammo as _) as _; interval = self.get_interval(); } } { let time_diff = time.total - self.last_shot_at; if time_diff >= 80 { self.last_shot_at = time.total; } else { return vec![]; } } if self.ammo == 0 { return vec![]; } else { self.ammo -= 1; } self.last_ammo_at = time.total; let cannons_x = args.bounds.left() + 30.0; let cannon1_y = args.bounds.top() + 6.0; let cannon2_y = args.bounds.bottom() - 10.0; vec![ box SineBullet::new( [cannons_x, cannon1_y], -90.0, time.total ), box SineBullet::new( [cannons_x, cannon2_y], 90.0, time.total ), ] } } pub struct StandardGun { pub last_shot_at: u32, } impl StandardGun { pub fn new(now: u32) -> StandardGun { StandardGun { last_shot_at: now, } } } impl Gun for StandardGun { fn spawn_bullets( &mut self, args: GunArgs, time: GameTime ) -> Vec<SimpleObject> { let time_diff = time.total - self.last_shot_at; if time_diff < 400 { return vec![]; } self.last_shot_at = time.total; let cannons_x = args.bounds.left() + 30.0; let cannon1_y = args.bounds.top() + 6.0; let cannon2_y = args.bounds.bottom() - 10.0; vec![ box Bullet::new( [cannons_x, cannon1_y], ), box Bullet::new( [cannons_x, cannon2_y], ), ] } } pub struct SineBullet { pub bounds: BoundingRect, pub born_at: u32, pub angular_velocity: f64, pub origin_y: f64, pub amplitude: f64, } impl SineBullet { pub fn new([x, y]: [f64; 2], amplitude: f64, now: u32) -> SineBullet { SineBullet { amplitude: amplitude, born_at: now, angular_velocity: 4.0, bounds: BoundingRect { x: x, y: y, width: 8.0, height: 4.0, }, origin_y: y, } } } impl GameObject<Keys, Texture> for SineBullet { fn update( &mut self, context: &mut Context<Keys>, time: GameTime ) -> Vec<GameAction<Keys, Texture>> { let velocity_x = 270.0; let time_alive = time.total - self.born_at; let (elapsed, alive_secs) = ( time.elapsed.exact_seconds(), time_alive.exact_seconds(), ); self.bounds.x += velocity_x * elapsed; self.bounds.y = self.origin_y + self.amplitude * ( self.angular_velocity * alive_secs ).sin(); let screen = context.renderer.output_size().map( |(w, h)| BoundingRect::default().with_size(w as _, h as _) ).unwrap(); if self.bounds.left() > screen.left() && self.bounds.right() < screen.right() { vec![] } else { vec![GameAction::Delete] } } fn sprites(&self, _: GameTime) -> Vec<(VisibleComponent<Texture>, Dest)> { vec![ ( VisibleRect(Color::RGB(230, 30, 30)).into(), self.bounds.try_into().unwrap(), ) ] } fn bounds(&self) -> Option<Bounds> { Some(self.bounds.into()) } fn on_hit(&self) -> Option<GameMessage<Keys, Texture>> { Some( GameMessage::Hit { other: self as _, info: DamageInfo { damage: 20, filter: DamageFilter::Player, }, } ) } fn receive_message<'a>( &'a mut self, ctx: &mut Context<Keys>, time: GameTime, m: GameMessage<'a, Keys, Texture> ) -> Vec<GameAction<Keys, Texture>> { if let GameMessage::Hit { info: DamageInfo { filter: DamageFilter::Player, .. }, .. } = m { vec![] } else { vec![ GameAction::Delete, GameAction::AddObjects( vec![ box Explosion::with_bounds( &mut ctx.renderer, time.total, BoundingRect { x: self.bounds.x, y: self.bounds.y, width: 10.0, height: 10.0, } ) ] ), ] } } } pub struct Bullet { pub bounds: BoundingRect, pub velocity: [f64; 2], } impl Bullet { pub fn new([x, y]: [f64; 2]) -> Bullet { Bullet { bounds: BoundingRect { x: x, y: y, width: 8.0, height: 4.0, }, velocity: [1800.0, 0.0], } } } impl GameObject<Keys, Texture> for Bullet { fn update( &mut self, context: &mut Context<Keys>, time: GameTime ) -> Vec<GameAction<Keys, Texture>> { let elapsed = time.elapsed.exact_seconds(); self.bounds.x += self.velocity[0] * elapsed; self.bounds.y += self.velocity[1] * elapsed; let screen = context.renderer.output_size().map( |(w, h)| BoundingRect::default().with_size(w as _, h as _) ).unwrap(); if self.bounds.intersects(&screen) { vec![] } else { vec![GameAction::Delete] } } fn sprites(&self, _: GameTime) -> Vec<(VisibleComponent<Texture>, Dest)> { vec![ ( VisibleRect(Color::RGB(230, 230, 30)).into(), self.bounds.try_into().unwrap(), ) ] } fn bounds(&self) -> Option<Bounds> { Some(self.bounds.into()) } fn receive_message<'a>( &'a mut self, ctx: &mut Context<Keys>, time: GameTime, m: GameMessage<'a, Keys, Texture> ) -> Vec<GameAction<Keys, Texture>> { if let GameMessage::Hit { info: DamageInfo { filter: DamageFilter::Player, .. }, .. } = m { vec![] } else { vec![ GameAction::Delete, GameAction::AddObjects( vec![ box Explosion::with_bounds( &mut ctx.renderer, time.total, BoundingRect { x: self.bounds.x, y: self.bounds.y, width: 10.0, height: 10.0, } ) ] ), ] } } fn on_hit(&self) -> Option<GameMessage<Keys, Texture>> { Some( GameMessage::Hit { other: self as _, info: DamageInfo { damage: 20, filter: DamageFilter::Player, }, } ) } }
true
5796a79eccd49ad1e80e67c3f08a1e0679fa2968
Rust
Palaxx/rust-polygon-map-generator
/src/terrains/snow.rs
UTF-8
215
2.703125
3
[]
no_license
use crate::terrains::terrain::Terrain; pub struct Snow { } impl Terrain for Snow { fn get_color(&self) -> [u8; 3] { [221,221,228] } fn get_name(&self) -> &'static str { "snow" } }
true
47b0da06291c8a4713214d272dff32da90bac62a
Rust
anchnk/delta
/src/options/option_value.rs
UTF-8
2,917
3.109375
3
[ "MIT" ]
permissive
use crate::config::delta_unreachable; /// A value associated with a Delta command-line option name. pub enum OptionValue { Boolean(bool), Float(f64), OptionString(Option<String>), String(String), Int(usize), } /// An OptionValue, tagged according to its provenance/semantics. pub enum ProvenancedOptionValue { GitConfigValue(OptionValue), DefaultValue(OptionValue), } impl From<bool> for OptionValue { fn from(value: bool) -> Self { OptionValue::Boolean(value) } } impl From<OptionValue> for bool { fn from(value: OptionValue) -> Self { match value { OptionValue::Boolean(value) => value, _ => delta_unreachable("Error converting OptionValue to bool."), } } } impl From<f64> for OptionValue { fn from(value: f64) -> Self { OptionValue::Float(value) } } impl From<OptionValue> for f64 { fn from(value: OptionValue) -> Self { match value { OptionValue::Float(value) => value, _ => delta_unreachable("Error converting OptionValue to f64."), } } } impl From<Option<String>> for OptionValue { fn from(value: Option<String>) -> Self { OptionValue::OptionString(value) } } impl From<OptionValue> for Option<String> { fn from(value: OptionValue) -> Self { match value { OptionValue::OptionString(value) => value, // HACK: See the comment in options::set::compute_line_numbers_mode(). That function // deliberately reads what is normally a boolean value ('line-numbers') as a string. // However options::get::get_option_value() can fall through to obtaining the value // from builtin_features, in which case an OptionValue::Boolean will be encountered. // See the comment in options::set::compute_line_numbers_mode() and docstring of // options::get::get_option_value(). OptionValue::Boolean(_) => None, _ => delta_unreachable("Error converting OptionValue to Option<String>."), } } } impl From<String> for OptionValue { fn from(value: String) -> Self { OptionValue::String(value) } } impl From<&str> for OptionValue { fn from(value: &str) -> Self { value.to_string().into() } } impl From<OptionValue> for String { fn from(value: OptionValue) -> Self { match value { OptionValue::String(value) => value, _ => delta_unreachable("Error converting OptionValue to String."), } } } impl From<usize> for OptionValue { fn from(value: usize) -> Self { OptionValue::Int(value) } } impl From<OptionValue> for usize { fn from(value: OptionValue) -> Self { match value { OptionValue::Int(value) => value, _ => delta_unreachable("Error converting OptionValue to usize."), } } }
true
b650007e6ec551d812dfafbb1a5d34886356a8d9
Rust
algebraicdb/algebraicdb
/channel_stream/src/lib.rs
UTF-8
4,114
3.09375
3
[]
no_license
use pin_utils::pin_mut; use staticvec::StaticVec; use std::future::Future; use std::io::Write; use std::pin::Pin; use std::task::{Context, Poll}; use tokio::io::{self, AsyncRead, AsyncWrite}; use tokio::sync::mpsc; const CHANNEL_CAP: usize = 255; const BUFFER_CAP: usize = 255; type Chunk = StaticVec<u8, BUFFER_CAP>; pub struct Reader { buffer: Chunk, channel: mpsc::Receiver<Chunk>, } pub struct Writer { channel: mpsc::Sender<Chunk>, } /// Create a writer-reader pair pub fn pair() -> (Writer, Reader) { let (tx, rx) = mpsc::channel(CHANNEL_CAP); let r = Reader { buffer: Chunk::new(), channel: rx, }; let w = Writer { channel: tx }; (w, r) } impl AsyncRead for Reader { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context, mut buf: &mut [u8], ) -> Poll<io::Result<usize>> { if self.buffer.len() > 0 { let n = buf.write(&self.buffer).unwrap(); self.buffer.drain(..n); Poll::Ready(Ok(n)) } else { self.buffer = { let read = self.channel.recv(); pin_mut!(read); match read.poll(cx) { Poll::Ready(r) => match r { Some(chunk) => chunk, None => return Poll::Ready(Ok(0)), }, Poll::Pending => return Poll::Pending, } }; let n = buf.write(&self.buffer).unwrap(); self.buffer.drain(..n); Poll::Ready(Ok(n)) } } } impl AsyncWrite for Writer { fn poll_write( mut self: Pin<&mut Self>, cx: &mut Context, buf: &[u8], ) -> Poll<io::Result<usize>> { let mut chunk = StaticVec::new(); let n = chunk.write(buf).unwrap(); let write = self.channel.send(chunk.clone()); pin_mut!(write); match write.poll(cx) { Poll::Ready(r) => match r { Ok(()) => Poll::Ready(Ok(n)), Err(e) => Poll::Ready(Err(io::Error::new(io::ErrorKind::BrokenPipe, e))), }, Poll::Pending => Poll::Pending, } } fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<io::Result<()>> { Poll::Ready(Ok(())) } fn poll_shutdown(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<io::Result<()>> { Poll::Ready(Ok(())) } } #[cfg(test)] mod tests { use super::pair; use tokio::io::{AsyncReadExt, AsyncWriteExt}; #[tokio::test] async fn test_big_streams() { let (mut writer, mut reader) = pair(); const MSG_COUNT: u64 = 1000000; let msg = |i: u8| [i, i.overflowing_add(2).0, i.overflowing_add(69).0]; tokio::spawn(async move { for i in 0..MSG_COUNT { let i = i as u8; let buf = msg(i); writer.write_all(&buf).await.unwrap(); } }); for i in 0..MSG_COUNT { let i = i as u8; let expected = msg(i); let mut buf = [0u8; 3]; reader.read_exact(&mut buf).await.unwrap(); assert_eq!(&buf, &expected); } } #[tokio::test] async fn test_many_streams() { for i in 0..255u8 { let (mut writer, mut reader) = pair(); tokio::spawn(async move { let buf = [i; 1000]; writer.write_all(&buf).await.unwrap(); }); let mut buf = vec![]; reader.read_to_end(&mut buf).await.unwrap(); assert_eq!(buf.len(), 1000); for i2 in buf { assert_eq!(i2, i); } } } #[tokio::test] async fn test_send_string() { let msg = b"Hello there good sir!\n"; let (mut writer, mut reader) = pair(); tokio::spawn(async move { writer.write_all(msg).await.unwrap(); }); let mut buf = vec![]; reader.read_to_end(&mut buf).await.unwrap(); assert_eq!(&buf, msg); } }
true
7731ae67c381924dc03405913665a844485c8853
Rust
BabylonAS/multi-ipc-test
/src/client.rs
UTF-8
3,961
2.890625
3
[ "Unlicense" ]
permissive
/* multi-ipc-test/src/client.rs: multi-program IPC test client * This is free and unencumbered software released into the public domain. * * Anyone is free to copy, modify, publish, use, compile, sell, or * distribute this software, either in source code form or as a compiled * binary, for any purpose, commercial or non-commercial, and by any * means. * * In jurisdictions that recognize copyright laws, the author or authors * of this software dedicate any and all copyright interest in the * software to the public domain. We make this dedication for the benefit * of the public at large and to the detriment of our heirs and * successors. We intend this dedication to be an overt act of * relinquishment in perpetuity of all present and future rights to this * software under copyright law. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. * * For more information, please refer to <http://unlicense.org> */ extern crate ipc_channel; // Use the ~~proprietary~~ custom-designed data carrier struct, called a Packet. use packet::Packet; use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; use std::env; use std::io::{stdin, Read}; use std::process::exit; // Main entry point fn main() { // Process command-line arguments. There should be at least one (server name). // Note: the first “argument”, args[0], is really the program’s name! So it technically requires two args, // but we’ll ignore args[0] for clarity. let args: Vec<String> = env::args().collect(); if args.len() < 2 { // Too few arguments, show the usage and exit. eprintln!("Error: must specify at least one argument\n\ Usage: {} <server-name> [quit]", args[0].to_owned()); exit(1); } // The server name. let serv_name = args[1].to_owned(); // Handle for stopping the server let stop: bool = if args.len() > 2 { // Print a warning if there are three or even more arguments. if args.len() > 3 { eprintln!("Warning: too many arguments ({}, expected 2), third and next ones will be ignored", args.len()-1); } // If we have two and the second is "stop", then accept it. Otherwise, print a warning. if args[2].eq(&String::from("stop")) { true } else { eprintln!("Warning: unrecognized second argument, it will be ignored"); false } } else { false }; // Read data from the standard input. You can simply pipe anything here via the shell. let mut data: String = String::new(); stdin().read_to_string(&mut data).expect("Input error!"); // Try to connect to the server, create an IPC channel and send a Packet // with the data, the stop switch, and the new channel’s sender that would // provide a callback to this client. let sender: IpcSender<Packet> = IpcSender::connect(serv_name.clone()) .expect("Can’t connect!"); let (tx, rx): (IpcSender<Packet>, IpcReceiver<Packet>) = ipc::channel() .expect("Can’t establish callback!"); sender.send(Packet { data, stop, sender: Some(Box::new(tx)) }) .expect("Can’t send the packet!"); println!("Packet successfully sent to {}", serv_name); // Receive the reply and print either the new server name or the farewell message. let reply = rx.recv().expect("Can’t receive reply!"); match reply.stop { false => println!("New server name: {}", reply.data), true => println!("Server is shutting down. {}", reply.data), }; }
true
2ba37fe753ec95cff881fe89a97de8976a7ddb7f
Rust
zxk7516/hellors
/src/bin/a3.rs
UTF-8
726
2.71875
3
[]
no_license
#![allow(warnings)] #[macro_use] extern crate crossbeam_channel; use crossbeam_channel::bounded; use crossbeam_utils::thread; fn main() { let people = vec!["Anna", "Bob", "Cody", "Dave", "Eva"]; let (s, r) = bounded(1); let seek = |name, s, r| { select! { recv(r) -> peer => println!("{} received a message from {}.", name, peer.unwrap()), send(s, name) -> _ => {}, } }; thread::scope(|scope| { for name in people { let (s, r) = (s.clone(), r.clone()); scope.spawn(move |_| seek(name, s, r)); } }) .unwrap(); if let Ok(name) = r.try_recv() { println!("No one received {}’s message.", name); } }
true
6e20212ae8fa4d00d8628942f5985c849ac8889d
Rust
pierreyoda/rustboycolor
/src/gpu/registers.rs
UTF-8
3,366
3.015625
3
[ "MIT" ]
permissive
use super::GpuMode; pub const CONTROL: usize = 0xFF40; // LCD Control pub const STAT: usize = 0xFF41; // LCD Controller status pub const SCY: usize = 0xFF42; pub const SCX: usize = 0xFF43; pub const LY: usize = 0xFF44; // read-only pub const LYC: usize = 0xFF45; pub const BGP: usize = 0xFF47; // ignored in CGB mode pub const OBP_0: usize = 0xFF48; // ignored in CGB mode pub const OBP_1: usize = 0xFF49; // ignored in CGB mode pub const WY: usize = 0xFF4A; pub const WX: usize = 0xFF4B; #[derive(Clone)] pub enum LcdControl { BgDisplayEnable = 0, ObjDisplayEnable = 1, ObjSize = 2, BgTileMapDisplaySelect = 3, BgWindowTileDataSelect = 4, WindowDisplayEnable = 5, WindowTileMapDisplaySelect = 6, LcdDisplayEnable = 7, } impl LcdControl { pub fn is_set(&self, register: u8) -> bool { let v = self.clone() as usize; ((register >> v) & 0x01) == 0x01 } } #[derive(Clone)] pub enum LcdControllerStatus { HBlankInterrupt = 3, VBlankInterrupt = 4, OamInterrupt = 5, LyCoincidenceInterrupt = 6, } impl LcdControllerStatus { pub fn is_set(&self, register: u8) -> bool { let v = self.clone() as usize; ((register >> v) & 0x01) == 0x01 } pub fn with_mode(register: u8, mode: GpuMode) -> u8 { (register & 0xFC) | (mode as u8) } /// Set the register's bit 2 to true if LYC=LY, false otherwise. pub fn with_coincidence_flag(register: u8, coincidence: bool) -> u8 { (register & 0xFB) | if coincidence { 0x04 } else { 0x00 } } } #[cfg(test)] mod test { use super::LcdControl::*; use super::LcdControllerStatus; use super::LcdControllerStatus::*; #[test] fn test_lcd_control_is_set() { assert!(BgDisplayEnable.is_set(1 << 0)); assert!(ObjDisplayEnable.is_set(1 << 1)); assert!(ObjSize.is_set(1 << 2)); assert!(BgTileMapDisplaySelect.is_set(1 << 3)); assert!(BgWindowTileDataSelect.is_set(1 << 4)); assert!(WindowDisplayEnable.is_set(1 << 5)); assert!(WindowTileMapDisplaySelect.is_set(1 << 6)); assert!(LcdDisplayEnable.is_set(1 << 7)); } #[test] fn test_lcdc_status_is_set() { assert!(HBlankInterrupt.is_set(1 << 3)); assert!(VBlankInterrupt.is_set(1 << 4)); assert!(OamInterrupt.is_set(1 << 5)); assert!(LyCoincidenceInterrupt.is_set(1 << 6)); } #[test] fn test_lcdc_status_with_mode() { use crate::gpu::GpuMode::*; let lcdc_status = 0b_0110_1011; assert_eq!( LcdControllerStatus::with_mode(lcdc_status, H_Blank), 0b_0110_1000 ); assert_eq!( LcdControllerStatus::with_mode(lcdc_status, V_Blank), 0b_0110_1001 ); assert_eq!( LcdControllerStatus::with_mode(lcdc_status, OAM_Read), 0b_0110_1010 ); assert_eq!( LcdControllerStatus::with_mode(lcdc_status, VRAM_Read), 0b_0110_1011 ); } #[test] fn test_lcdc_status_with_coincidence_flag() { assert_eq!( LcdControllerStatus::with_coincidence_flag(0b_1011_0011, true), 0b_1011_0111 ); assert_eq!( LcdControllerStatus::with_coincidence_flag(0b_1011_0111, false), 0b_1011_0011 ); } }
true
4fab0d24cc6865f5c3da7966578557c224c6d209
Rust
Aetf/fit2
/src/adaptor.rs
UTF-8
2,037
2.5625
3
[ "MIT", "Apache-2.0" ]
permissive
use futures::task::{Context, Poll}; pub use http; use hyper::{body::Bytes, http::HeaderValue, HeaderMap}; use lambda_http::Body as LambdaBody; use std::convert::Infallible; use tokio::macros::support::Pin; pub trait IntoHyperBody: Sized { fn into_bytes(self) -> Bytes; fn into_hyper_body(self) -> hyper::Body { self.into_bytes().into() } } pub trait IntoResp { fn into_resp(self) -> Response; } pub trait RequestExt { fn base_path(&self) -> &str; } pub mod prelude { pub use super::IntoHyperBody as _; pub use super::IntoResp; pub use super::RequestExt as _; pub use hyper::body::HttpBody as _; } impl IntoHyperBody for LambdaBody { fn into_bytes(self) -> Bytes { match self { LambdaBody::Empty => Bytes::new(), LambdaBody::Text(s) => s.into(), LambdaBody::Binary(b) => b.into(), } } } #[derive(Debug)] pub struct WrapperBody(Option<LambdaBody>); impl WrapperBody { pub fn empty() -> WrapperBody { Self(None) } pub fn from<T: Into<LambdaBody>>(data: T) -> WrapperBody { Self(Some(data.into())) } } impl Into<LambdaBody> for WrapperBody { fn into(self) -> LambdaBody { self.0.unwrap_or_default() } } impl hyper::body::HttpBody for WrapperBody { type Data = Bytes; type Error = Infallible; fn poll_data( mut self: Pin<&mut Self>, _cx: &mut Context<'_>, ) -> Poll<Option<Result<Self::Data, Self::Error>>> { let res = self.0.take().map(|b| Ok(b.into_bytes())); Poll::Ready(res) } fn poll_trailers( self: Pin<&mut Self>, _cx: &mut Context<'_>, ) -> Poll<Result<Option<HeaderMap<HeaderValue>>, Self::Error>> { Poll::Ready(Ok(None)) } } impl<T: Into<LambdaBody>> IntoResp for T { fn into_resp(self) -> Response { Response::new(WrapperBody::from(self)) } } pub type Body = WrapperBody; pub type Request = http::Request<hyper::Body>; pub type Response = http::Response<Body>;
true
ac827f697c2732b2f83365c175844f8f5b903bca
Rust
gnzlbg/is_sorted
/src/lib.rs
UTF-8
18,133
3.375
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Extends `Iterator` with three algorithms, `is_sorted`, `is_sorted_by`, and //! `is_sorted_by_key` that check whether the elements of an `Iterator` are //! sorted in `O(N)` time and `O(1)` space. //! //! To enable explicitly-vectorized implementations enable the `unstable` //! nightly-only feature and use the typed comparators: `Increasing` and //! `Decreasing`. // If the `use_std` feature is not enable, compile for `no_std`: #![cfg_attr(not(feature = "use_std"), no_std)] // If the `unstable` feature is enabled, enable nightly-only features: #![cfg_attr( feature = "unstable", feature( specialization, fn_traits, unboxed_closures, stdsimd, align_offset ) )] #[allow(unused_imports, unused_macros)] #[cfg(not(feature = "use_std"))] use core as std; use std::cmp; #[cfg(feature = "unstable")] use std::{arch, mem, slice}; use cmp::Ordering; #[cfg(feature = "unstable")] mod ord; #[cfg(feature = "unstable")] pub use self::ord::*; #[cfg(feature = "unstable")] #[macro_use] mod macros; #[cfg(feature = "unstable")] mod signed; #[cfg(feature = "unstable")] mod unsigned; #[cfg(feature = "unstable")] mod floats; /// Extends `Iterator` with `is_sorted`, `is_sorted_by`, and /// `is_sorted_by_key`. pub trait IsSorted: Iterator { /// Returns `true` if the elements of the iterator are sorted in increasing /// order according to `<Self::Item as PartialOrd>::partial_cmp`. /// /// ``` /// # use is_sorted::IsSorted; /// let v = vec![0, 1, 2, 3]; /// assert!(IsSorted::is_sorted(&mut v.iter())); /// /// let v = vec![0, 1, 2, -1]; /// assert!(!IsSorted::is_sorted(&mut v.iter())); /// ``` #[inline] fn is_sorted(&mut self) -> bool where Self: Sized, Self::Item: PartialOrd, { #[cfg(feature = "unstable")] { self.is_sorted_by(Increasing) } #[cfg(not(feature = "unstable"))] { self.is_sorted_by(<Self::Item as PartialOrd>::partial_cmp) } } /// Returns `true` if the elements of the iterator /// are sorted according to the `compare` function. /// /// ``` /// # use std::cmp::Ordering; /// # use is_sorted::IsSorted; /// // Is an iterator sorted in decreasing order? /// fn decr<T: PartialOrd>(a: &T, b: &T) -> Option<Ordering> { /// a.partial_cmp(b).map(|v| v.reverse()) /// } /// /// let v = vec![3, 2, 1, 0]; /// assert!(IsSorted::is_sorted_by(&mut v.iter(), decr)); /// /// let v = vec![3, 2, 1, 4]; /// assert!(!IsSorted::is_sorted_by(&mut v.iter(), decr)); /// ``` #[inline] fn is_sorted_by<F>(&mut self, compare: F) -> bool where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>, { is_sorted_by_impl(self, compare) } /// Returns `true` if the elements of the iterator /// are sorted according to the `key` extraction function. /// /// ``` /// # use is_sorted::IsSorted; /// let v = vec![0_i32, -1, 2, -3]; /// assert!(IsSorted::is_sorted_by_key(&mut v.iter(), |v| v.abs())); /// /// let v = vec![0_i32, -1, 2, 0]; /// assert!(!IsSorted::is_sorted_by_key(&mut v.iter(), |v| v.abs())); /// ``` #[inline] fn is_sorted_by_key<F, B>(&mut self, mut key: F) -> bool where Self: Sized, B: PartialOrd, F: FnMut(&Self::Item) -> B, { IsSorted::is_sorted(&mut self.map(|v| key(&v))) } /// Returns the first unsorted pair of items in the iterator and its tail. /// /// ``` /// # use is_sorted::IsSorted; /// let v: &[i32] = &[0, 1, 2, 3, 4, 1, 2, 3]; /// let (first, tail) = v.iter().is_sorted_until_by(|a, b| a.partial_cmp(b)); /// assert_eq!(first, Some((&4, &1))); /// assert_eq!(tail.as_slice(), &[2, 3]); /// ``` #[inline] fn is_sorted_until_by<F>( self, compare: F, ) -> (Option<(Self::Item, Self::Item)>, Self) where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>, { is_sorted_until_by_impl(self, compare) } } // Blanket implementation for all types that implement `Iterator`: impl<I: Iterator> IsSorted for I {} // This function dispatch to the appropriate `is_sorted_by` implementation. #[inline] fn is_sorted_by_impl<I, F>(iter: &mut I, compare: F) -> bool where I: Iterator, F: FnMut(&I::Item, &I::Item) -> Option<Ordering>, { <I as IsSortedBy<F>>::is_sorted_by(iter, compare) } // This trait is used to provide specialized implementations of `is_sorted_by` // for different (Iterator,Cmp) pairs: trait IsSortedBy<F>: Iterator where F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>, { fn is_sorted_by(&mut self, compare: F) -> bool; } // This blanket implementation acts as the fall-back, and just forwards to the // scalar implementation of the algorithm. impl<I, F> IsSortedBy<F> for I where I: Iterator, F: FnMut(&I::Item, &I::Item) -> Option<Ordering>, { #[inline] #[cfg(feature = "unstable")] default fn is_sorted_by(&mut self, compare: F) -> bool { is_sorted_by_scalar_impl(self, compare) } #[inline] #[cfg(not(feature = "unstable"))] fn is_sorted_by(&mut self, compare: F) -> bool { is_sorted_by_scalar_impl(self, compare) } } /// Scalar `is_sorted_by` implementation. /// /// It just forwards to `Iterator::all`. #[inline] fn is_sorted_by_scalar_impl<I, F>(iter: &mut I, mut compare: F) -> bool where I: Iterator, F: FnMut(&I::Item, &I::Item) -> Option<Ordering>, { let first = iter.next(); if let Some(mut first) = first { return iter.all(|second| { if let Some(ord) = compare(&first, &second) { if ord != Ordering::Greater { first = second; return true; } } false }); } true } /// Scalar `is_sorted_by` implementation for slices. /// /// Avoids bound checks. #[cfg(feature = "unstable")] #[inline] fn is_sorted_by_scalar_slice_impl<'a, T, F>( iter: &mut slice::Iter<'a, T>, mut compare: F, ) -> bool where T: PartialOrd, F: FnMut(&&'a T, &&'a T) -> Option<Ordering>, { let s = iter.as_slice(); if s.len() < 2 { return true; } let mut first = unsafe { s.get_unchecked(0) }; for i in 1..s.len() { let second = unsafe { s.get_unchecked(i) }; if let Some(ord) = compare(&first, &second) { if ord != Ordering::Greater { first = second; continue; } } return false; } true } // This function dispatch to the appropriate `is_sorted_until_by` // implementation. #[inline] fn is_sorted_until_by_impl<I, F>( iter: I, compare: F, ) -> (Option<(I::Item, I::Item)>, I) where I: Iterator, F: FnMut(&I::Item, &I::Item) -> Option<Ordering>, { <I as IsSortedUntilBy<F>>::is_sorted_until_by(iter, compare) } // This trait is used to provide specialized implementations of // `is_sorted_until_by` for different (Iterator,Cmp) pairs: trait IsSortedUntilBy<F>: Iterator where F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>, { fn is_sorted_until_by( self, compare: F, ) -> (Option<(Self::Item, Self::Item)>, Self); } // This blanket implementation acts as the fall-back, and just forwards to the // scalar implementation of the algorithm. impl<I, F> IsSortedUntilBy<F> for I where I: Iterator, F: FnMut(&I::Item, &I::Item) -> Option<Ordering>, { #[inline] #[cfg(feature = "unstable")] default fn is_sorted_until_by( self, compare: F, ) -> (Option<(Self::Item, Self::Item)>, Self) { is_sorted_until_by_scalar_impl(self, compare) } #[inline] #[cfg(not(feature = "unstable"))] fn is_sorted_until_by( self, compare: F, ) -> (Option<(Self::Item, Self::Item)>, Self) { is_sorted_until_by_scalar_impl(self, compare) } } /// Scalar `is_sorted_until_by` implementation. #[inline] fn is_sorted_until_by_scalar_impl<I, F>( mut iter: I, mut compare: F, ) -> (Option<(I::Item, I::Item)>, I) where I: Iterator, F: FnMut(&I::Item, &I::Item) -> Option<Ordering>, { let first = iter.next(); if let Some(mut first) = first { loop { let next = iter.next(); if let Some(next) = next { if let Some(ord) = compare(&first, &next) { if ord != Ordering::Greater { first = next; continue; } } return (Some((first, next)), iter); } return (None, iter); } } (None, iter) } /// Scalar `is_sorted_until_by` implementation for slices. /// /// Avoids bounds check. #[cfg(feature = "unstable")] #[inline] fn is_sorted_until_by_scalar_slice_impl<'a, T, F>( iter: slice::Iter<'a, T>, mut compare: F, ) -> (Option<(&'a T, &'a T)>, slice::Iter<'a, T>) where T: PartialOrd, F: FnMut(&&'a T, &&'a T) -> Option<Ordering>, { let s = iter.as_slice(); if s.len() < 2 { return (None, unsafe { s.get_unchecked(s.len()..).iter() }); } let mut first = unsafe { s.get_unchecked(0) }; for i in 0..s.len() { let second = unsafe { s.get_unchecked(i) }; if let Some(ord) = compare(&first, &second) { if ord != Ordering::Greater { first = second; continue; } } return (Some((first, second)), unsafe { s.get_unchecked((i + 1)..).iter() }); } return (None, unsafe { s.get_unchecked(s.len()..).iter() }); } pub fn is_sorted_until_by<T, F>(slice: &[T], f: F) -> usize where for<'r, 's> F: FnMut(&'r &T, &'s &T) -> Option<Ordering>, { let (boundary, tail) = IsSorted::is_sorted_until_by(slice.iter(), f); match boundary { Some(_) => { debug_assert!(tail.as_slice().len() < slice.len()); slice.len() - tail.as_slice().len() - 1 } None => { debug_assert!(tail.as_slice().is_empty()); slice.len() } } } #[cfg(feature = "unstable")] impl<'a, T, F> IsSortedBy<F> for slice::Iter<'a, T> where T: PartialOrd, F: FnMut(&&'a T, &&'a T) -> Option<Ordering>, { #[inline] default fn is_sorted_by(&mut self, compare: F) -> bool { is_sorted_by_scalar_slice_impl(self, compare) } } #[cfg(feature = "unstable")] impl<'a, T, F> IsSortedUntilBy<F> for slice::Iter<'a, T> where T: PartialOrd, F: FnMut(&&'a T, &&'a T) -> Option<Ordering>, { #[inline] default fn is_sorted_until_by( self, compare: F, ) -> (Option<(&'a T, &'a T)>, Self) { return is_sorted_until_by_scalar_slice_impl(self, compare); } } /// Adds a specialization of the IsSortedBy trait for a slice iterator. /// /// The (feature,function) pairs must be listed in order of decreasing /// preference, that is, first pair will be preferred over second pair if its /// feature is enabled. macro_rules! is_sorted_by_slice_iter_x86 { ($id:ident, $cmp:path : $([$feature:tt, $function:path]),*) => { #[cfg(feature = "unstable")] #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] #[cfg( any( // Either we have run-time feature detection: feature = "use_std", // Or the features are enabled at compile-time any($(target_feature = $feature),*) ) )] impl<'a> IsSortedBy<$cmp> for slice::Iter<'a, $id> { #[inline] fn is_sorted_by(&mut self, compare: $cmp) -> bool { // If we don't have run-time feature detection, we use // compile-time detection. This specialization only exists if // at least one of the features is actually enabled, so we don't // need a fallback here. #[cfg(not(feature = "use_std"))] unsafe { $( #[cfg(target_feature = $feature)] { return $function(self.as_slice()) == self.as_slice().len(); } )* } #[cfg(feature = "use_std")] { $( if is_x86_feature_detected!($feature) { return unsafe { $function(self.as_slice()) == self.as_slice().len() }; } )*; // If feature detection fails use scalar code: return is_sorted_by_scalar_slice_impl(self, compare); } } } #[cfg(feature = "unstable")] #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] #[cfg( any( // Either we have run-time feature detection: feature = "use_std", // Or the features are enabled at compile-time any($(target_feature = $feature),*) ) )] impl<'a> IsSortedUntilBy<$cmp> for slice::Iter<'a, $id> { #[inline] fn is_sorted_until_by(self, compare: $cmp) -> (Option<(Self::Item,Self::Item)>, Self) { // If we don't have run-time feature detection, we use // compile-time detection. This specialization only exists if // at least one of the features is actually enabled, so we don't // need a fallback here. #[cfg(not(feature = "use_std"))] unsafe { $( #[cfg(target_feature = $feature)] { // The slice values until the index i returned by // the impl $function are sorted. The slice &[0..j] // for j > i might be sorted as well, so we just // proceed to call the scalar implementation on it: let s = self.as_slice(); let i = $function(s); return is_sorted_until_by_scalar_slice_impl(s[i..].iter(), compare); } )* } #[cfg(feature = "use_std")] { $( if is_x86_feature_detected!($feature) { let s = self.as_slice(); let i = unsafe { $function(s) }; return is_sorted_until_by_scalar_slice_impl(s[i..].iter(), compare); } )*; // If feature detection fails use scalar code: return is_sorted_until_by_scalar_slice_impl(self, compare); } } } } } is_sorted_by_slice_iter_x86!( i64, ord::types::Increasing : ["avx2", ::signed::avx2::is_sorted_lt_i64], ["sse4.2", ::signed::sse42::is_sorted_lt_i64] ); is_sorted_by_slice_iter_x86!( i64, ord::types::Decreasing : ["avx2", ::signed::avx2::is_sorted_gt_i64], ["sse4.2", ::signed::sse42::is_sorted_gt_i64] ); is_sorted_by_slice_iter_x86!( f64, ord::types::Increasing : ["avx", ::floats::avx::is_sorted_lt_f64], ["sse4.1", ::floats::sse41::is_sorted_lt_f64] ); is_sorted_by_slice_iter_x86!( f64, ord::types::Decreasing : ["avx", ::floats::avx::is_sorted_gt_f64], ["sse4.1", ::floats::sse41::is_sorted_gt_f64] ); is_sorted_by_slice_iter_x86!( i32, ord::types::Increasing : ["avx2", ::signed::avx2::is_sorted_lt_i32], ["sse4.1", ::signed::sse41::is_sorted_lt_i32] ); is_sorted_by_slice_iter_x86!( i32, ord::types::Decreasing : ["avx2", ::signed::avx2::is_sorted_gt_i32], ["sse4.1", ::signed::sse41::is_sorted_gt_i32] ); is_sorted_by_slice_iter_x86!( u32, ord::types::Increasing : ["avx2", ::unsigned::avx2::is_sorted_lt_u32], ["sse4.1", ::unsigned::sse41::is_sorted_lt_u32] ); is_sorted_by_slice_iter_x86!( u32, ord::types::Decreasing : ["avx2", ::unsigned::avx2::is_sorted_gt_u32], ["sse4.1", ::unsigned::sse41::is_sorted_gt_u32] ); is_sorted_by_slice_iter_x86!( f32, ord::types::Increasing : ["avx", ::floats::avx::is_sorted_lt_f32], ["sse4.1", ::floats::sse41::is_sorted_lt_f32] ); is_sorted_by_slice_iter_x86!( f32, ord::types::Decreasing : ["avx", ::floats::avx::is_sorted_gt_f32], ["sse4.1", ::floats::sse41::is_sorted_gt_f32] ); is_sorted_by_slice_iter_x86!( i16, ord::types::Increasing : ["avx2", ::signed::avx2::is_sorted_lt_i16], ["sse4.1", ::signed::sse41::is_sorted_lt_i16] ); is_sorted_by_slice_iter_x86!( i16, ord::types::Decreasing : ["avx2", ::signed::avx2::is_sorted_gt_i16], ["sse4.1", ::signed::sse41::is_sorted_gt_i16] ); is_sorted_by_slice_iter_x86!( u16, ord::types::Increasing : ["avx2", ::unsigned::avx2::is_sorted_lt_u16], ["sse4.1", ::unsigned::sse41::is_sorted_lt_u16] ); is_sorted_by_slice_iter_x86!( u16, ord::types::Decreasing : ["avx2", ::unsigned::avx2::is_sorted_gt_u16], ["sse4.1", ::unsigned::sse41::is_sorted_gt_u16] ); is_sorted_by_slice_iter_x86!( i8, ord::types::Increasing : ["avx2", ::signed::avx2::is_sorted_lt_i8], ["sse4.1", ::signed::sse41::is_sorted_lt_i8] ); is_sorted_by_slice_iter_x86!( i8, ord::types::Decreasing : ["avx2", ::signed::avx2::is_sorted_gt_i8], ["sse4.1", ::signed::sse41::is_sorted_gt_i8] ); is_sorted_by_slice_iter_x86!( u8, ord::types::Increasing : ["avx2", ::unsigned::avx2::is_sorted_lt_u8], ["sse4.1", ::unsigned::sse41::is_sorted_lt_u8] ); is_sorted_by_slice_iter_x86!( u8, ord::types::Decreasing : ["avx2", ::unsigned::avx2::is_sorted_gt_u8], ["sse4.1", ::unsigned::sse41::is_sorted_gt_u8] );
true
4c60d0bfab5ed63d064c3d28694de52d5f245011
Rust
rust-lang/rust
/tests/ui/did_you_mean/issue-114112.rs
UTF-8
203
2.765625
3
[ "Apache-2.0", "LLVM-exception", "NCSA", "BSD-2-Clause", "LicenseRef-scancode-unicode", "MIT", "LicenseRef-scancode-other-permissive" ]
permissive
enum E<T> { A(T) } fn main() { match E::<i32>::A(1) { E<i32>::A(v) => { //~ ERROR generic args in patterns require the turbofish syntax println!("{v:?}"); }, } }
true
300c0893c18fef97990d2bf22f0b5e3d95f2f2db
Rust
kb10uy/derky
/weavy_crab/src/parser.rs
UTF-8
15,568
2.828125
3
[ "Apache-2.0" ]
permissive
use crate::{ mtl::{Material, MaterialProperty}, obj::{FaceIndexPair, Group, Object}, Error, Result, WavefrontObj, }; use std::{ collections::HashMap, io::{prelude::*, BufReader}, mem::{replace, take}, path::{Path, PathBuf}, str::FromStr, }; use log::warn; use ultraviolet::{Vec2, Vec3}; /// Represents the abstract data of a line in OBJ file. #[derive(Debug, Clone, PartialEq)] enum ObjCommand { /// `mtllib` MaterialLibrary(Box<Path>), /// `usemtl` UseMaterial(Box<str>), /// `o` Object(Option<Box<str>>), /// `g` Group(Option<Box<str>>), /// `v` Vertex(Vec3), /// `vt` VertexUv(Vec2), /// `vn` VertexNormal(Vec3), /// `f` Face(Box<[FaceIndexPair]>), /// Any other unknown keyword Unknown(Box<str>, Box<[Box<str>]>), } /// Represents the abstract data of a line in MTL file. #[derive(Debug, Clone, PartialEq)] enum MtlCommand { /// `newmtl` NewMaterial(Box<str>), /// Integer property Integer(Box<str>, u32), /// Float property Float(Box<str>, f32), /// Vector property Vector(Box<str>, Vec3), /// Path property Path(Box<str>, Box<Path>), /// Any other unknown keyword Unknown(Box<str>, Box<[Box<str>]>), } /// Represents the parser of OBJ/MTL. pub struct Parser<C, R> { include_function: Box<dyn FnMut(&Path, &C) -> Result<R>>, } impl<C, R: Read> Parser<C, R> { /// Creates an instance of `Parser`. /// # Parameters /// * `include_function` /// - An resolver closure/function for MTL file /// - When detects `mtllib` command, it tries to resolve the path of /// MTL file. The parser calls this resolver with detected path and context object, /// so you can return any `Read` instance or error. pub fn new(include_function: impl FnMut(&Path, &C) -> Result<R> + 'static) -> Parser<C, R> { Parser { include_function: Box::new(include_function), } } /// Parses the OBJ file. pub fn parse(&mut self, reader: impl Read, context: C) -> Result<WavefrontObj> { let mut reader = BufReader::new(reader); let mut line_buffer = String::with_capacity(1024); self.parse_impl(context, move || { loop { line_buffer.clear(); let read_size = reader.read_line(&mut line_buffer)?; if read_size == 0 { return Ok(None); } let trimmed = line_buffer.trim(); if trimmed == "" || trimmed.starts_with('#') { continue; } break; } let mut elements = line_buffer.trim().split_whitespace(); let keyword = elements .next() .expect("Each line should have at least one element"); let data: Vec<&str> = elements.collect(); let command = parse_obj_line(keyword, &data)?; Ok(Some(command)) }) } #[allow(unused_assignments)] fn parse_impl( &mut self, context: C, mut fetch_line: impl FnMut() -> Result<Option<ObjCommand>>, ) -> Result<WavefrontObj> { let mut materials = Default::default(); let mut current_material = None; let mut objects = vec![]; let mut object_name = Default::default(); let mut groups = vec![]; let mut group_name = Default::default(); let mut vertices = vec![]; let mut uvs = vec![]; let mut normals = vec![]; let mut faces = vec![]; let mut vo = 0; let mut to = 0; let mut no = 0; macro_rules! commit_group { ($n: expr) => { vo += vertices.len(); to += uvs.len(); no += normals.len(); let group = Group { name: replace(&mut group_name, $n), vertices: take(&mut vertices).into_boxed_slice(), texture_uvs: take(&mut uvs).into_boxed_slice(), normals: take(&mut normals).into_boxed_slice(), face_index_pairs: take(&mut faces).into_boxed_slice(), }; if group.face_index_pairs.len() > 0 { groups.push(group); } }; } macro_rules! commit_object { ($n: expr) => { let object = Object { name: replace(&mut object_name, $n), groups: take(&mut groups).into_boxed_slice(), }; if object.groups.len() > 0 { objects.push(object); } }; } while let Some(command) = fetch_line()? { match command { // mtllib ObjCommand::MaterialLibrary(path) => { let mtl_reader = (self.include_function)(&path, &context)?; materials = self.parse_mtl(mtl_reader)?; } // o ObjCommand::Object(name) => { commit_group!(None); commit_object!(name); } //g ObjCommand::Group(name) => { commit_group!(name); } // v ObjCommand::Vertex(vertex) => { vertices.push(vertex); } // vt ObjCommand::VertexUv(uv) => { uvs.push(uv); } // vn ObjCommand::VertexNormal(normal) => { normals.push(normal); } // f ObjCommand::Face(face) => { // TODO: チェックする let mut adjusted_face = vec![]; for FaceIndexPair(raw_v, raw_t, raw_n) in face.into_vec() { let adjusted_v = raw_v - vo; let adjusted_t = raw_t.map(|i| i - to); let adjusted_n = raw_n.map(|i| i - no); adjusted_face.push(FaceIndexPair(adjusted_v, adjusted_t, adjusted_n)) } faces.push((adjusted_face.into_boxed_slice(), current_material)); } // usemtl ObjCommand::UseMaterial(material_name) => { current_material = materials .iter() .position(|m| m.name() == &material_name[..]); } // unknown ObjCommand::Unknown(k, _) => { warn!("Unprocessable command: {:?}", k); } } } commit_group!(None); commit_object!(None); Ok(WavefrontObj { materials, objects: objects.into_boxed_slice(), }) } /// Parses MTL file. /// The reader will be wrapped with `BufReader`, so you don't have to /// do so. fn parse_mtl(&self, reader: impl Read) -> Result<Box<[Material]>> { let mut materials = vec![]; let mut properties = HashMap::new(); let mut name = String::new().into_boxed_str(); let mut reader = BufReader::new(reader); let mut line_buffer = String::with_capacity(1024); loop { line_buffer.clear(); let read_size = reader.read_line(&mut line_buffer)?; if read_size == 0 { break; } let trimmed = line_buffer.trim(); if trimmed == "" || trimmed.starts_with('#') { continue; } let mut elements = line_buffer.trim().split_whitespace(); let keyword = elements .next() .expect("Each line should have at least one element"); let data: Vec<&str> = elements.collect(); let command = parse_mtl_line(keyword, &data)?; match command { MtlCommand::NewMaterial(next_name) => { if !properties.is_empty() { let material = Material { name, properties }; materials.push(material); } properties = HashMap::new(); name = next_name; } MtlCommand::Vector(n, v) => { properties.insert(n.into(), MaterialProperty::Vector(v)); } MtlCommand::Float(n, v) => { properties.insert(n.into(), MaterialProperty::Float(v)); } MtlCommand::Integer(n, v) => { properties.insert(n.into(), MaterialProperty::Integer(v)); } MtlCommand::Path(n, v) => { properties.insert(n.into(), MaterialProperty::Path(v)); } MtlCommand::Unknown(keyword, _) => { warn!("Unsupported MTL keyword: {}", keyword); } } } let last_material = Material { name, properties }; materials.push(last_material); Ok(materials.into_boxed_slice()) } } /// Parses a line of OBJ file. fn parse_obj_line(keyword: &str, data: &[&str]) -> Result<ObjCommand> { let value = match keyword { "mtllib" => { let value = data.get(0).unwrap_or(&"").replace("\\\\", "\\"); let filename = PathBuf::from_str(&value).map_err(|_| Error::PathNotFound(value))?; ObjCommand::MaterialLibrary(filename.into_boxed_path()) } "usemtl" => { let material = data.get(0).ok_or(Error::NotEnoughData { expected: 1, found: 0, })?; ObjCommand::UseMaterial(material.to_string().into_boxed_str()) } "o" => { let name = data.get(0).map(|name| name.to_string().into_boxed_str()); ObjCommand::Object(name) } "g" => { let name = data.get(0).map(|name| name.to_string().into_boxed_str()); ObjCommand::Group(name) } "v" => { let value = take_vec3(data)?; ObjCommand::Vertex(value) } "vt" => { let value = take_vec2(data)?; ObjCommand::VertexUv(value) } "vn" => { let value = take_vec3(data)?; ObjCommand::VertexNormal(value) } "f" => { let face = parse_face(data)?; ObjCommand::Face(face) } _ => { let owned_data: Vec<_> = data .iter() .map(|s| s.to_string().into_boxed_str()) .collect(); ObjCommand::Unknown(keyword.into(), owned_data.into_boxed_slice()) } }; Ok(value) } /// Parses a line of MTL file. fn parse_mtl_line(keyword: &str, data: &[&str]) -> Result<MtlCommand> { let value = match keyword { "newmtl" => { let name = data.get(0).unwrap_or(&"").to_string(); MtlCommand::NewMaterial(name.into_boxed_str()) } "illum" => { let value = take_single(data)?; MtlCommand::Integer(keyword.into(), value) } k if k.starts_with("K") => { let value = take_vec3(data)?; MtlCommand::Vector(keyword.into(), value) } k if k.starts_with("N") => { let value = take_single(data)?; MtlCommand::Float(keyword.into(), value) } k if k.starts_with("map_") => { let value = data.get(0).unwrap_or(&"").replace("\\\\", "\\"); let value = PathBuf::from_str(&value).map_err(|_| Error::PathNotFound(value))?; MtlCommand::Path(keyword.into(), value.into_boxed_path()) } _ => { let owned_data: Vec<_> = data .iter() .map(|s| s.to_string().into_boxed_str()) .collect(); MtlCommand::Unknown(keyword.into(), owned_data.into_boxed_slice()) } }; Ok(value) } /// Parses a `f` command. fn parse_face(vertices: impl IntoIterator<Item = impl AsRef<str>>) -> Result<Box<[FaceIndexPair]>> { let not_enough = |c| Error::NotEnoughData { expected: 3, found: c, }; let mut index_pairs = vec![]; for vertex in vertices { let indices_str = vertex.as_ref().split('/'); let mut indices = indices_str.map(|s| { if s != "" { Some(s.parse::<usize>()) } else { None } }); let vertex_index = match indices.next() { Some(Some(Ok(v))) => v - 1, Some(Some(Err(_))) => return Err(Error::ParseError), Some(None) => return Err(Error::InvalidFaceVertex), None => return Err(not_enough(0)), }; let uv_index = match indices.next() { Some(Some(Ok(v))) => Some(v - 1), Some(Some(Err(_))) => return Err(Error::ParseError), Some(None) => None, None => None, }; let normal_index = match indices.next() { Some(Some(Ok(v))) => Some(v - 1), Some(Some(Err(_))) => return Err(Error::ParseError), Some(None) => None, None => None, }; index_pairs.push(FaceIndexPair(vertex_index, uv_index, normal_index)); } Ok(index_pairs.into_boxed_slice()) } /// Consumes the iterator and parses the first element. pub(crate) fn take_single<T: FromStr>(it: impl IntoIterator<Item = impl AsRef<str>>) -> Result<T> { let mut it = it.into_iter(); let first = it.next().ok_or_else(|| Error::NotEnoughData { found: 0, expected: 1, })?; let value = first.as_ref().parse().map_err(|_| Error::ParseError)?; Ok(value) } /// Consumes the iterator and parses into `Vec2`. pub(crate) fn take_vec2(it: impl IntoIterator<Item = impl AsRef<str>>) -> Result<Vec2> { let mut it = it.into_iter(); let first = it .next() .ok_or_else(|| Error::NotEnoughData { found: 0, expected: 2, }) .and_then(|s| s.as_ref().parse().map_err(|_| Error::ParseError))?; let second = it .next() .ok_or_else(|| Error::NotEnoughData { found: 1, expected: 2, }) .and_then(|s| s.as_ref().parse().map_err(|_| Error::ParseError))?; Ok(Vec2::new(first, second)) } /// Consumes the iterator and parses into `Vec3`. pub(crate) fn take_vec3(it: impl IntoIterator<Item = impl AsRef<str>>) -> Result<Vec3> { let mut it = it.into_iter(); let first = it .next() .ok_or_else(|| Error::NotEnoughData { found: 0, expected: 2, }) .and_then(|s| s.as_ref().parse().map_err(|_| Error::ParseError))?; let second = it .next() .ok_or_else(|| Error::NotEnoughData { found: 0, expected: 2, }) .and_then(|s| s.as_ref().parse().map_err(|_| Error::ParseError))?; let third = it .next() .ok_or_else(|| Error::NotEnoughData { found: 0, expected: 2, }) .and_then(|s| s.as_ref().parse().map_err(|_| Error::ParseError))?; Ok(Vec3::new(first, second, third)) }
true
ad04e7514ff6ba5b03db0ed2b196354e15d51980
Rust
crowlKats/devcode
/src/renderer/code_view/code.rs
UTF-8
7,314
2.875
3
[ "MIT" ]
permissive
use super::super::input::{max_line_length, Cursor}; use super::super::rectangle::Rectangle; use crate::renderer::Dimensions; use std::cell::RefCell; use std::rc::Rc; use wgpu_glyph::ab_glyph::FontArc; use wgpu_glyph::{GlyphPositioner, Layout, Section, SectionGeometry, Text}; use winit::dpi::{PhysicalPosition, PhysicalSize}; use winit::event::VirtualKeyCode; pub struct Code { font: FontArc, font_height: f32, text: Rc<RefCell<ropey::Rope>>, scroll_offset: PhysicalPosition<f64>, cursor: Cursor, max_line_length: f32, pub dimensions: Dimensions, highlight_config: Option<super::highlight::Config>, } impl Code { fn generate_glyph_text<'r>( &self, text: &'r ropey::Rope, start_line: usize, end_line: usize, ) -> Vec<Text<'r>> { let start_char = text.line_to_char(start_line); let end_char = text.line_to_char(end_line); if let Some(config) = &self.highlight_config { config .highlights .iter() .skip_while(|(_, end, _)| end <= &start_char) .take_while(|(_, end, _)| end <= &end_char) .flat_map(|(start, end, name)| { text .slice(start.max(&start_char)..end.min(&end_char)) .chunks() .map(move |c| { Text::new(c) .with_color( name.map(|n| n.color()).unwrap_or([0.9, 0.9, 0.9, 1.0]), ) .with_scale(self.font_height) }) }) .collect() } else { text .lines_at(start_line) .take(start_line - end_line) .flat_map(|line| { line.chunks().map(|text| { Text::new(text) .with_color([0.9, 0.9, 0.9, 1.0]) .with_scale(self.font_height) }) }) .collect() } } pub fn new( device: &wgpu::Device, screen_size: PhysicalSize<f32>, font: FontArc, font_height: f32, dimensions: Dimensions, text: Rc<RefCell<ropey::Rope>>, highlight_config: Option<super::highlight::Config>, ) -> Self { let cursor = Cursor::new( device, screen_size, Dimensions { width: 4.0, height: font_height, ..dimensions }, [0.68, 0.28, 0.26], Some(dimensions.into()), ); let max_line_length = max_line_length( text.borrow().lines().map(|s| s.to_string()), font.clone(), font_height, ); Self { font, font_height, text, scroll_offset: PhysicalPosition { x: 0.0, y: 0.0 }, cursor, max_line_length, dimensions, highlight_config, } } } impl super::super::input::TextInput for Code { fn input_special( &mut self, screen_size: PhysicalSize<f32>, key: VirtualKeyCode, ) { super::super::input::input_special( screen_size, key, &mut self.text.borrow_mut(), &mut self.cursor, self.font.clone(), self.font_height, PhysicalPosition { x: self.dimensions.x, y: 0.0, }, self.scroll_offset.cast(), ); // TODO: remove, shouldnt generate highglights when moving cursor around if let Some(config) = &mut self.highlight_config { config.generate(&self.text.borrow()); } } fn input_char(&mut self, screen_size: PhysicalSize<f32>, ch: char) { self.max_line_length = super::super::input::input_char( screen_size, ch, &mut self.text.borrow_mut(), &mut self.cursor, self.font.clone(), self.font_height, PhysicalPosition { x: self.dimensions.x, y: 0.0, }, self.scroll_offset.cast(), ); if let Some(config) = &mut self.highlight_config { config.generate(&self.text.borrow()); } } } impl super::super::RenderElement for Code { fn resize(&mut self, screen_size: PhysicalSize<f32>) { self.dimensions.width = screen_size.width - self.dimensions.x; self.cursor.rect.resize( screen_size.cast(), Dimensions { y: self.font_height - (self.cursor.row as f32 * self.font_height), ..self.cursor.rect.dimensions }, ); self.cursor.rect.region = Some(self.dimensions.into()); } fn scroll( &mut self, offset: PhysicalPosition<f64>, screen_size: PhysicalSize<f32>, ) { if offset.x.abs() > offset.y.abs() { self.scroll_offset.x = (self.scroll_offset.x - offset.x) .max((screen_size.width - self.max_line_length) as f64) // TODO .min(0.0); } else { self.scroll_offset.y = (self.scroll_offset.y + offset.y).min(0.0).max( -((self.text.borrow().len_lines() - 3) as f32 * self.font_height) as f64, ); } self.cursor.rect.resize( screen_size, Dimensions { x: self.dimensions.x + self.scroll_offset.x as f32 + self.cursor.x_offset, y: self.dimensions.y + self.scroll_offset.y as f32 + (self.cursor.row as f32 * self.font_height), ..self.cursor.rect.dimensions }, ); } fn click( &mut self, position: PhysicalPosition<f64>, _screen_size: PhysicalSize<f32>, ) { let line = ((position.y - self.scroll_offset.y) / self.font_height as f64) .floor() as usize; let layout = Layout::default_wrap(); let text = self.text.borrow(); let text_line = text.line(line); let string = text_line.to_string(); let section_glyphs = &layout.calculate_glyphs( &[self.font.clone()], &SectionGeometry { ..Default::default() }, &[Text::new(&string).with_scale(self.font_height)], ); let mut c = 0; for section_glyph in section_glyphs { c += 1; self.cursor.x_offset = section_glyph.glyph.position.x; if (position.x as f32) < section_glyph.glyph.position.x { c -= 1; break; } } self.cursor.row = line; self.cursor.column = c; } fn redraw( &mut self, glyph_brush: &mut wgpu_glyph::GlyphBrush<()>, device: &wgpu::Device, staging_belt: &mut wgpu::util::StagingBelt, encoder: &mut wgpu::CommandEncoder, target: &wgpu::TextureView, size: PhysicalSize<u32>, ) { let upper_bound = ((-self.scroll_offset.y) / self.font_height as f64).floor() as usize; let lower_bound = (upper_bound + (self.dimensions.height / self.font_height).ceil() as usize) .min(self.text.borrow().len_lines()); let text = self.text.borrow(); glyph_brush.queue(Section { screen_position: ( self.dimensions.x + self.scroll_offset.x as f32, -(((-self.scroll_offset.y as f32) % self.font_height) - self.dimensions.y), ), text: self.generate_glyph_text(&text, upper_bound, lower_bound), ..Section::default() }); glyph_brush .draw_queued_with_transform_and_scissoring( device, staging_belt, encoder, target, wgpu_glyph::orthographic_projection(size.width, size.height), self.dimensions.into(), ) .unwrap(); } fn get_rects(&self) -> Vec<&Rectangle> { vec![&self.cursor.rect] } fn get_elements(&mut self) -> Vec<&mut dyn super::super::RenderElement> { vec![] } fn get_dimensions(&self) -> Dimensions { self.dimensions } }
true
187855326dec3df4d98462fd3b28a2978b801142
Rust
qryxip/atcoder
/dwacon5th-prelims/src/bin/b.rs
UTF-8
984
2.578125
3
[]
no_license
use proconio::input; #[allow(unused_imports)] use proconio::marker::*; #[allow(unused_imports)] use std::cmp::*; #[allow(unused_imports)] use std::collections::*; #[allow(unused_imports)] use std::f64::consts::*; #[allow(unused)] const INF: usize = std::usize::MAX / 4; #[allow(unused)] const M: usize = 1000000007; fn main() { input! { n: usize, k: usize, a: [usize; n], } let mut acc = vec![0; n + 1]; for i in 0..n { acc[i + 1] = acc[i] + a[i]; } let mut b = vec![]; for i in 1..=n { for j in 0..i { b.push(acc[i] - acc[j]); } } b.sort(); let mut count = vec![0; 60]; let mut result = 0; for &bj in &b { for i in 0..count.len() { if 1 << i & bj >= 1 { count[i] += 1; } } if (0..count.len()).all(|i| 1 << i & bj == 0 || count[i] >= k) { result = bj; } } println!("{}", result); }
true
4d869a2a359ce8595fbbcf64c60d0fbe5a569341
Rust
skrap/aoc_rust_2018
/src/bin/13a.rs
UTF-8
4,864
3.4375
3
[]
no_license
#[derive(Clone)] enum Dir { Up, Down, Left, Right, } impl Dir { fn turn(&self, turn : &NextTurn) -> Dir { match turn { NextTurn::Left => { match self { Dir::Up => Dir::Left, Dir::Left => Dir::Down, Dir::Down => Dir::Right, Dir::Right => Dir::Up, } } NextTurn::Right => { match self { Dir::Up => Dir::Right, Dir::Right => Dir::Down, Dir::Down => Dir::Left, Dir::Left => Dir::Up, } } _ => { self.clone() } } } } #[derive(Clone)] enum NextTurn { Left, Straight, Right, } #[derive(Clone)] struct Cart { pos: (i32, i32), dir: Dir, next_turn: NextTurn, } impl Cart { fn new(pos: (i32, i32), dir: Dir) -> Cart { Cart {pos, dir, next_turn: NextTurn::Left} } fn play(&mut self, board: &Board) { match self.dir { Dir::Left => self.pos.0 -= 1, Dir::Right => self.pos.0 += 1, Dir::Up => self.pos.1 -= 1, Dir::Down => self.pos.1 += 1, } let track = &board[self.pos.1 as usize][self.pos.0 as usize]; match track { Track::TurnUpL => { self.dir = match self.dir { Dir::Up => Dir::Left, Dir::Right => Dir::Down, Dir::Left => Dir::Up, Dir::Down => Dir::Right, } }, Track::TurnUpR => { self.dir = match self.dir { Dir::Up => Dir::Right, Dir::Left => Dir::Down, Dir::Down => Dir::Left, Dir::Right => Dir::Up, } }, Track::Cross => { self.dir = self.dir.turn(&self.next_turn); self.next_turn = match self.next_turn { NextTurn::Left => { NextTurn::Straight }, NextTurn::Straight => { NextTurn::Right }, NextTurn::Right => { NextTurn::Left }, }; } Track::Empty => panic!("Cart derailed onto {:?}", self.pos), _ => () } } } enum Track { Empty, // ' ' Horiz, // - Vert, // | TurnUpL, // \ TurnUpR, // / Cross, // + } type Board = Vec<Vec<Track>>; fn main() { let input = include_bytes!("13_input"); let mut board : Board = Vec::new(); board.push(Vec::new()); let mut carts : Vec<Cart> = Vec::new(); let mut pos = (0,0); for b in input.iter() { match b { b'\n' => { board.push(Vec::new()); pos.1 += 1; pos.0 = 0; }, _ => { board.last_mut().unwrap().push(match b { b' ' => Track::Empty, b'-' => Track::Horiz, b'|' => Track::Vert, b'/' => Track::TurnUpR, b'\\'=> Track::TurnUpL, b'+' => Track::Cross, b'^' => { carts.push(Cart::new(pos, Dir::Up)); Track::Vert }, b'>' => { carts.push(Cart::new(pos, Dir::Right)); Track::Horiz }, b'v' => { carts.push(Cart::new(pos, Dir::Down)); Track::Vert }, b'<' => { carts.push(Cart::new(pos, Dir::Left)); Track::Horiz } _ => panic!("unknown byte {}", b), }); pos.0 += 1; } } } while carts.len() > 1 { carts.sort_by(|a,b| a.pos.1.cmp(&b.pos.1).then(a.pos.0.cmp(&b.pos.0)).reverse()); let mut next_carts : Vec<Cart> = Vec::new(); while carts.len() > 0 { let mut cart = carts.pop().unwrap(); cart.play(&mut board); if let Some(idx) = carts.iter().position(|c| c.pos == cart.pos) { println!("collision at {:?}", cart.pos); carts.remove(idx); } else if let Some(idx) = next_carts.iter().position(|c| c.pos == cart.pos) { println!("collinion at {:?}", cart.pos); next_carts.remove(idx); } else { next_carts.push(cart); } } carts = next_carts; } println!("position of remaining cart: {:?}", carts[0].pos); }
true
cb932a5819c5e23ca8aaf85a1e8adbb2470b3859
Rust
CNife/leetcode
/rust/finished/src/sqrtx.rs
UTF-8
246
3.265625
3
[]
no_license
pub fn my_sqrt(x: i32) -> i32 { (x as f64).sqrt().floor() as i32 } #[test] fn test() { let cases = vec![(0, 0), (1, 1), (4, 2), (8, 2), (2147395599, 46339)]; for (x, expect) in cases { assert_eq!(my_sqrt(x), expect); } }
true
f5761c5da135f52958d40729bacf2d17ba7642e6
Rust
RodionKovalenko/rustCrates
/src/mouse_clicks/click_events.rs
UTF-8
2,370
2.53125
3
[]
no_license
#[macro_use] use enigo::*; use std::process::Command; use std::thread; use std::time::Duration; use std::time::Instant; use webbrowser; use rand::Rng; pub fn simulate_click_events() { let mut enigo = Enigo::new(); let mut now = Instant::now(); let number_of_accounts:i32 = 7; let search_items = ["backpropagation alternatives", "how to get much money", "it is possible to get access to browser api?", "be healthy", "are fruits healthy", "what is our future?", "verschwörungstheorie existiert?", "recurrent neural network are still good?", "how to get a million dollar?", "statistics forever?", "why are we getting older?", "can we live forever?", "brave is the best browser?", "who am I?", "is it good to be faithful to your partner?", "why not give people enough money?" ]; let mut rng = rand::thread_rng(); let time = now.elapsed(); println!("{:?}", time); loop { println!("time elapsed {:?}", time); if webbrowser::open("https://www.google.de/").is_ok() { thread::sleep(Duration::from_secs(2)); enigo.key_click(Key::Return); enigo.key_sequence_parse(search_items[rng.gen_range(0, search_items.len())]); enigo.key_click(Key::Return); thread::sleep(Duration::from_secs(5)); enigo.key_down(Key::Control); enigo.key_down(Key::F4); enigo.key_up(Key::Control); enigo.key_up(Key::F4); } let output = if cfg!(target_os = "windows") { Command::new("powershell") .args(&["/C", "(new-object -com shell.application).minimizeall();start myfile.bat -window Maximized"]) .output() .expect("failed to execute process"); } else { Command::new("sh") .arg("-c") .arg("echo hello") .output() .expect("failed to execute process"); }; for account in 0..number_of_accounts { println!("clicked at {:?}", time); now = Instant::now(); thread::sleep(Duration::from_secs(2)); enigo.mouse_move_to(1300, 730); enigo.mouse_down(MouseButton::Left); enigo.mouse_up(MouseButton::Left); } thread::sleep(Duration::from_secs(180)); } }
true
86f822d39139cb24bc254a8e2080dfbb9928ef33
Rust
stackcats/leetcode
/algorithms/easy/flood_fill.rs
UTF-8
925
3.046875
3
[ "MIT" ]
permissive
use std::collections::LinkedList; impl Solution { pub fn flood_fill(mut image: Vec<Vec<i32>>, sr: i32, sc: i32, new_color: i32) -> Vec<Vec<i32>> { let mut queue = LinkedList::new(); let mut opt = vec![vec![0; image[0].len()]; image.len()]; queue.push_back((sr as usize, sc as usize)); let old_color = image[sr as usize][sc as usize]; while queue.len() > 0 { let (r, c) = queue.pop_front().unwrap(); if opt[r][c] == 1 { continue; } opt[r][c] = 1; image[r][c] = new_color; let nexts = [(r - 1, c), (r + 1, c), (r, c + 1), (r, c - 1)]; for n in &nexts { let (r, c) = *n; if r < image.len() && c < image[r].len() && image[r][c] == old_color { queue.push_back((r, c)); } } } image } }
true
cad8af54498a14da9a72dba51df389867f97a3b9
Rust
theinfinitytimes/theinfinitytimes-api-rust
/src/models/user.rs
UTF-8
1,926
3.203125
3
[ "MIT" ]
permissive
use wither::Model; use mongodb::bson::oid::ObjectId; use serde::{Serialize, Deserialize}; use chrono::prelude::{DateTime, Utc}; use url_serde::SerdeUrl; use bcrypt::{DEFAULT_COST, hash, verify}; use crate::theinfinitytimes_lib::{PreSaveMut}; use std::ops::Deref; /// This is the user model, which is the base object /// used to store user data and is used to authenticate /// users and give the necessary permission to access /// some front-end functionality based. #[derive(Model, Serialize, Deserialize)] #[model(collection_name="User")] pub struct UserModel { #[serde(rename="_id", skip_serializing_if="Option::is_none")] pub id: Option<ObjectId>, pub givenName: String, pub familyName: String, pub age: u32, pub gender: String, #[model(index(index="asc", unique="true"))] /// The nickname of the users can also be /// used to sign in and should be unique pub nickname: String, #[model(index(index="asc", unique="true"))] pub email: String, /// Only the hash of the password is saved /// and is saved in a string. We are using /// bcrypt to generate a hash and then during /// login comparing the hash of the password /// with the saved password pub userPassword: String, pub verifiedEmail: bool, pub memberSince: DateTime<Utc>, /// We are storing the profile pictures in a /// different server so this is the url pointing /// to that location pub profilePicture: SerdeUrl } impl PreSaveMut for UserModel{ fn pre_save(&mut self){ let user = self; // As in the trait, I am passing a mutable reference of the object // there is not need to check if `user` exists and isn't null // or if it is a null pointer. This would raise an error in Rust match hash(&user.userPassword, DEFAULT_COST){ Ok(v) => user.userPassword = v, Err(e) => panic!(e) } } }
true
5ee090160a6a6e0384348591db2fda49aeffaf49
Rust
Iniesta8/aoc2019
/src/bin/day05.rs
UTF-8
457
2.640625
3
[]
no_license
use aoc2019::intcode::IntCodeCpu; use std::io; fn main() -> io::Result<()> { let mut code = String::new(); io::stdin().read_line(&mut code)?; let mut cpu = IntCodeCpu::from_code(&code); cpu.input.push_back(1); cpu.run(); println!("p1: {}", cpu.output.pop_back().unwrap()); cpu = IntCodeCpu::from_code(&code); cpu.input.push_back(5); cpu.run(); println!("p2: {}", cpu.output.pop_back().unwrap()); Ok(()) }
true
a6b9cf8bcc5b3592388a4e64c542d3795a134789
Rust
remram44/dhstore
/src/memory_index.rs
UTF-8
18,747
3.015625
3
[]
no_license
//! Implementation of an object indexer that stores everything in memory. //! //! This loads all the objects from disk into memory. Objects added to the index //! are immediately written to disk as well. //! //! This is very inefficient and should be backed by proper database code at //! some point. use std::collections::{BTreeMap, HashMap, HashSet, VecDeque}; use std::fs::{self, File, OpenOptions}; use std::io; use std::mem::swap; use std::path::{PathBuf, Path}; use log::Level; use log::{debug, error, info, log_enabled, warn}; use crate::common::{HASH_STR_SIZE, Sort, ID, Dict, Object, ObjectData, Property, ObjectIndex}; use crate::errors::{self, Error}; use crate::serialize; /// Return value from a Policy for some object. pub enum PolicyDecision { Get, Keep, Drop, } /// A policy defines which objects are valid and which we want to keep. /// /// DHStore has a root configuration where the user defines what trees he wants /// to keep, schemas to validate those trees against, disk usage limits, etc. He /// can also set up delegations, to read in more policy objects recursively, ... /// /// A `Policy` object contains all this information for a specific place in the /// tree, and handles all the builtin, user-supplied, and recursive behaviors /// for the index. pub trait Policy { fn handle(&mut self, property: &str, object: Object) -> (PolicyDecision, Box<dyn Policy>); } /// Placeholder Policy that keeps everything. struct KeepPolicy; impl KeepPolicy { fn new() -> KeepPolicy { KeepPolicy } } impl Policy for KeepPolicy { fn handle(&mut self, property: &str, object: Object) -> (PolicyDecision, Box<dyn Policy>) { (PolicyDecision::Keep, Box::new(KeepPolicy)) } } /// Key of a reference, used in the backward reference map. /// /// A reference is a value, and can appear in both types of schema objects: in a /// dict, it is associated with a string key, and in a list, with an index. #[derive(PartialEq, Eq, Hash)] enum Backkey { /// Reference from a dict under this key. Key(String), /// Reference from a list from this index. Index(usize), } enum PermanodeType { Set, Single, } struct Permanode { sort: Sort, nodetype: PermanodeType, claims: BTreeMap<Property, ID>, } impl Permanode { fn index_claim(&mut self, claim: &Dict, permanode_id: &ID, claim_id: &ID) { // We require the claim to have the sort key let sort_value: &Property = match claim.get(self.sort.field()) { Some(ref prop) => prop, None => { debug!("Claim {} is invalid for permanode {}: \ missing sort key", claim_id, permanode_id); return; } }; // Currently, no validation is done; every claim is accepted // In the future, we'd have ways of checking a claim, such as public // key signatures (permanode has key, claim has signature) self.claims.insert(sort_value.clone(), claim_id.clone()); match self.nodetype { PermanodeType::Set => { // Keep the whole set of values // TODO: handle set deletion claims } PermanodeType::Single => { // Keep one value, the latest by sorting order if self.claims.len() > 1 { let mut map = BTreeMap::new(); swap(&mut self.claims, &mut map); let mut map = map.into_iter(); let (k, v) = match self.sort { Sort::Ascending(_) => map.next_back().unwrap(), Sort::Descending(_) => map.next().unwrap(), }; self.claims.insert(k, v); } } } } } fn insert_into_multimap<K: Clone + Eq + ::std::hash::Hash, V: Eq + ::std::hash::Hash>( multimap: &mut HashMap<K, HashSet<V>>, key: &K, value: V) { if let Some(set) = multimap.get_mut(key) { set.insert(value); return; } let mut set = HashSet::new(); set.insert(value); multimap.insert(key.clone(), set); } /// The in-memory index, that loads all objects from the disk on startup. pub struct MemoryIndex { /// Directory where objects are stored on disk. path: PathBuf, /// All objects, indexed by their ID. objects: HashMap<ID, Object>, /// Back references: value is all references pointing to the key. backlinks: HashMap<ID, HashSet<(Backkey, ID)>>, /// All claim objects, whether they are valid for permanode or not. claims: HashMap<ID, HashSet<ID>>, /// All permanodes, with valid associated claims. permanodes: HashMap<ID, Permanode>, root: ID, log: Option<ID>, policy: Box<dyn Policy>, } impl MemoryIndex { /// Reads all the objects from a directory into memory. pub fn open<P: AsRef<Path>>(path: P, root: ID) -> errors::Result<MemoryIndex> { let path = path.as_ref(); let mut index = MemoryIndex { path: path.to_path_buf(), objects: HashMap::new(), backlinks: HashMap::new(), claims: HashMap::new(), permanodes: HashMap::new(), root: root.clone(), log: None, policy: Box::new(KeepPolicy::new()), }; let dirlist = path.read_dir() .map_err(|e| ("Error listing objects directory", e))?; for first in dirlist { let first = first .map_err(|e| ("Error listing objects directory", e))?; let dirlist = first.path().read_dir() .map_err(|e| ("Error listing objects subdirectory", e))?; for second in dirlist { let second = second .map_err(|e| ("Error listing objects subdirectory", e))?; let filename = second.path(); // Read object let fp = File::open(filename) .map_err(|e| ("Error opening object", e))?; let object = match serialize::deserialize(fp) { Err(e) => { let mut path: PathBuf = first.file_name().into(); path.push(second.file_name()); error!("Error deserializing object: {:?}", path); return Err(("Error deserializing object", e).into()); } Ok(o) => o, }; index.insert_object_in_index(object); } } // Parse root config index.log = { let config = index.get_object(&root)? .ok_or(Error::CorruptedStore("Missing root object"))?; let config = match config.data { ObjectData::Dict(ref dict) => dict, _ => return Err(Error::CorruptedStore( "Root object is not a dict")), }; match config.get("log") { Some(&Property::Reference(ref id)) => { let log_obj = index.get_object(id)? .ok_or(Error::CorruptedStore("Missing log object"))?; match log_obj.data { ObjectData::Dict(_) => { debug!("Activated log: {}", id); } _ => { return Err(Error::CorruptedStore( "Log is not a permanode")); } } Some(id.clone()) } Some(_) => return Err(Error::CorruptedStore( "Log is not a reference")), None => None, } }; Ok(index) } pub fn create<'a, P: AsRef<Path>, I: Iterator<Item=&'a Object>>( path: P, objects: I) -> io::Result<()> { for object in objects { MemoryIndex::write_object(path.as_ref(), object)?; } Ok(()) } fn write_object(dir: &Path, object: &Object) -> io::Result<()> { let hashstr = object.id.str(); let mut path = dir.join(&hashstr[..4]); if !path.exists() { fs::create_dir(&path)?; } path.push(&hashstr[4..]); let mut fp = OpenOptions::new() .write(true) .create_new(true) .open(&path)?; serialize::serialize(&mut fp, object) } /// Utility to insert a new object in the store. /// /// Insert the object, indexing the back references, and parsing the object /// to handle permanodes. fn insert_object_in_index(&mut self, object: Object) { assert!(!self.objects.contains_key(&object.id)); { // Record reverse references // This is run on all values of type reference on the object, // whether it is a list or a dict let mut insert = |target: &ID, key: Backkey, source: ID| { if log_enabled!(Level::Debug) { match key { Backkey::Key(ref k) => { debug!("Reference {} -> {} ({})", source, target, k); } Backkey::Index(i) => { debug!("Reference {} -> {} ({})", source, target, i); } } } // Add backlink insert_into_multimap(&mut self.backlinks, target, (key, source)); }; // Go over the object, calling insert() above on all its values of // type reference match object.data { ObjectData::Dict(ref dict) => { for (k, v) in dict { if let Property::Reference(ref id) = *v { insert(id, Backkey::Key(k.clone()), object.id.clone()); } } } ObjectData::List(ref list) => { for (k, v) in list.into_iter().enumerate() { if let Property::Reference(ref id) = *v { insert(id, Backkey::Index(k), object.id.clone()); } } } } } // Check for special objects if let ObjectData::Dict(ref dict) = object.data { match dict.get("dhstore_kind") { Some(&Property::String(ref kind)) => match kind as &str { "permanode" => { info!("Found permanode: {}", object.id); self.index_permanode(&object); } "claim" => { info!("Found claim: {}", object.id); self.index_claim(&object); } kind => debug!("Found unknown kind {:?}", kind), }, Some(_) => { info!("Object has dhstore_kind with non-string value"); } None => {} } } // Now inserts the object self.objects.insert(object.id.clone(), object); } fn index_permanode(&mut self, permanode: &Object) { // Validate the permanode let ref id = permanode.id; let permanode = match permanode.data { ObjectData::Dict(ref d) => d, ObjectData::List(_) => { panic!("Invalid permanode {}: not a dict", id); } }; match permanode.get("random") { Some(&Property::String(ref s)) => { if s.len() != HASH_STR_SIZE { warn!("Invalid permanode {}: invalid random size {}", id, s.len()); return; } } _ => { warn!("Invalid permanode {}: missing random", id); return; } } let sort = match permanode.get("sort") { Some(&Property::String(ref s)) => match s.parse() { Ok(f) => f, Err(()) => { warn!("Invalid permanode {}: invalid sort", id); return; } }, _ => { warn!("Invalid permanode {}: invalid sort", id); return; } }; let nodetype = match permanode.get("type") { Some(&Property::String(ref s)) => match s as &str { "set" | "single" => PermanodeType::Set, _ => { warn!("Unknown permanode type {:?}, ignoring permanode {}", s, id); return; } }, None => PermanodeType::Single, Some(_) => { warn!("Invalid permanode {}: invalid type", id); return; } }; debug!("Permanode is well-formed, adding to index"); let mut node = Permanode { sort: sort, nodetype: nodetype, claims: BTreeMap::new() }; // Process claims if let Some(set) = self.claims.get(id) { for claim_id in set { let claim = self.objects.get(claim_id).unwrap(); let claim = match claim.data { ObjectData::Dict(ref d) => d, _ => panic!("Invalid claim {}: not a dict", claim_id), }; node.index_claim(claim, id, claim_id); } } // Insert the permanode in the index self.permanodes.insert(id.clone(), node); } fn index_claim(&mut self, claim: &Object) { // Validate the claim let id = &claim.id; let claim = match claim.data { ObjectData::Dict(ref d) => d, _ => panic!("Invalid claim {}: not a dict", id), }; let permanode = match (claim.get("node"), claim.get("value")) { (Some(&Property::Reference(ref r)), Some(&Property::Reference(_))) => r, _ => { warn!("Invalid claim {}: wrong content", id); return; } }; // Insert the claim in the index // Note that this means it is well-formed, not that it is valid; // validity needs to be checked with the permanode debug!("Claim is well-formed, adding to index"); insert_into_multimap(&mut self.claims, permanode, id.clone()); // If we have the permanode, index a valid claim if let Some(node) = self.permanodes.get_mut(permanode) { node.index_claim(claim, permanode, id); } } /// Common logic for `verify()` and `collect_garbage().` /// /// Goes over the tree of objects, checking for errors. If `collect` is /// true, unreferenced objects are deleted, and the set of referenced blobs /// is returned; else, an empty `HashSet` is returned. fn walk(&mut self, collect: bool) -> errors::Result<HashSet<ID>> { let mut alive = HashSet::new(); // ids let mut live_blobs = HashSet::new(); // ids let mut open = VecDeque::new(); // ids if self.objects.get(&self.root).is_none() { error!("Root is missing: {}", self.root); } else { open.push_front(self.root.clone()); } while let Some(id) = open.pop_front() { debug!("Walking, open={}, alive={}/{}, id={}", open.len(), alive.len(), self.objects.len(), id); let object = match self.objects.get(&id) { Some(o) => o, None => { info!("Don't have object {}", id); continue; } }; if alive.contains(&id) { debug!(" already alive"); continue; } alive.insert(id); let mut handle = |value: &Property| { match *value { Property::Reference(ref id) => { open.push_back(id.clone()); } Property::Blob(ref id) => { if collect { live_blobs.insert(id.clone()); } } _ => {} } }; match object.data { ObjectData::Dict(ref dict) => { debug!(" is dict, {} values", dict.len()); for v in dict.values() { handle(v); } } ObjectData::List(ref list) => { debug!(" is list, {} values", list.len()); for v in list { handle(v); } } } } info!("Found {}/{} live objects", alive.len(), self.objects.len()); if collect { let dead_objects = self.objects.keys() .filter(|id| !alive.contains(id)) .cloned() .collect::<Vec<_>>(); info!("Removing {} dead objects", dead_objects.len()); for id in dead_objects { self.objects.remove(&id); } } Ok(live_blobs) } } impl ObjectIndex for MemoryIndex { fn add(&mut self, data: ObjectData) -> errors::Result<ID> { let object = serialize::hash_object(data); let id = object.id.clone(); if !self.objects.contains_key(&id) { info!("Adding object to index: {}", id); MemoryIndex::write_object(&self.path, &object) .map_err(|e| ("Couldn't write object to disk", e))?; self.insert_object_in_index(object); } Ok(id) } fn get_object(&self, id: &ID) -> errors::Result<Option<&Object>> { Ok(self.objects.get(id)) } fn verify(&mut self) -> errors::Result<()> { self.walk(false).map(|_| ()) } fn collect_garbage(&mut self) -> errors::Result<HashSet<ID>> { self.walk(true) } }
true
250971409d42faeaf6f106cf4406f2b1aca7c02b
Rust
automerge/automerge
/rust/automerge/src/storage/columns/column_specification.rs
UTF-8
7,705
3.046875
3
[ "MIT" ]
permissive
/// An implementation of column specifications as specified in [1] /// /// [1]: https://alexjg.github.io/automerge-storage-docs/#column-specifications #[derive(Eq, PartialEq, Clone, Copy)] pub(crate) struct ColumnSpec(u32); impl ColumnSpec { pub(crate) fn new(id: ColumnId, col_type: ColumnType, deflate: bool) -> Self { let mut raw = id.0 << 4; raw |= u8::from(col_type) as u32; if deflate { raw |= 0b00001000; } else { raw &= 0b11110111; } ColumnSpec(raw) } pub(crate) fn col_type(&self) -> ColumnType { self.0.to_be_bytes()[3].into() } pub(crate) fn id(&self) -> ColumnId { ColumnId(self.0 >> 4) } pub(crate) fn deflate(&self) -> bool { self.0 & 0b00001000 > 0 } pub(crate) fn deflated(&self) -> Self { Self::new(self.id(), self.col_type(), true) } pub(crate) fn inflated(&self) -> Self { Self::new(self.id(), self.col_type(), false) } pub(crate) fn normalize(&self) -> Normalized { Normalized(self.0 & 0b11110111) } } #[derive(PartialEq, PartialOrd)] pub(crate) struct Normalized(u32); impl std::fmt::Debug for ColumnSpec { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "ColumnSpec(id: {:?}, type: {}, deflate: {})", self.id(), self.col_type(), self.deflate() ) } } #[derive(Eq, PartialEq, Clone, Copy)] pub(crate) struct ColumnId(u32); impl ColumnId { pub(crate) const fn new(raw: u32) -> Self { ColumnId(raw) } } impl From<u32> for ColumnId { fn from(raw: u32) -> Self { Self(raw) } } impl std::fmt::Debug for ColumnId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.0.fmt(f) } } /// The differente possible column types, as specified in [1] /// /// [1]: https://alexjg.github.io/automerge-storage-docs/#column-specifications #[derive(Eq, PartialEq, Clone, Copy, Debug)] pub(crate) enum ColumnType { Group, Actor, Integer, DeltaInteger, Boolean, String, ValueMetadata, Value, } impl std::fmt::Display for ColumnType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Group => write!(f, "Group"), Self::Actor => write!(f, "Actor"), Self::Integer => write!(f, "Integer"), Self::DeltaInteger => write!(f, "DeltaInteger"), Self::Boolean => write!(f, "Boolean"), Self::String => write!(f, "String"), Self::ValueMetadata => write!(f, "ValueMetadata"), Self::Value => write!(f, "Value"), } } } impl From<u8> for ColumnType { fn from(v: u8) -> Self { let type_bits = v & 0b00000111; match type_bits { 0 => Self::Group, 1 => Self::Actor, 2 => Self::Integer, 3 => Self::DeltaInteger, 4 => Self::Boolean, 5 => Self::String, 6 => Self::ValueMetadata, 7 => Self::Value, _ => unreachable!(), } } } impl From<ColumnType> for u8 { fn from(ct: ColumnType) -> Self { match ct { ColumnType::Group => 0, ColumnType::Actor => 1, ColumnType::Integer => 2, ColumnType::DeltaInteger => 3, ColumnType::Boolean => 4, ColumnType::String => 5, ColumnType::ValueMetadata => 6, ColumnType::Value => 7, } } } impl From<u32> for ColumnSpec { fn from(raw: u32) -> Self { ColumnSpec(raw) } } impl From<ColumnSpec> for u32 { fn from(spec: ColumnSpec) -> Self { spec.0 } } impl From<[u8; 4]> for ColumnSpec { fn from(raw: [u8; 4]) -> Self { u32::from_be_bytes(raw).into() } } #[cfg(test)] mod tests { use super::*; #[test] fn column_spec_encoding() { struct Scenario { id: ColumnId, col_type: ColumnType, int_val: u32, } let scenarios = vec![ Scenario { id: ColumnId(7), col_type: ColumnType::Group, int_val: 112, }, Scenario { id: ColumnId(0), col_type: ColumnType::Actor, int_val: 1, }, Scenario { id: ColumnId(0), col_type: ColumnType::Integer, int_val: 2, }, Scenario { id: ColumnId(1), col_type: ColumnType::DeltaInteger, int_val: 19, }, Scenario { id: ColumnId(3), col_type: ColumnType::Boolean, int_val: 52, }, Scenario { id: ColumnId(1), col_type: ColumnType::String, int_val: 21, }, Scenario { id: ColumnId(5), col_type: ColumnType::ValueMetadata, int_val: 86, }, Scenario { id: ColumnId(5), col_type: ColumnType::Value, int_val: 87, }, ]; for (index, scenario) in scenarios.into_iter().enumerate() { let spec = ColumnSpec::new(scenario.id, scenario.col_type, false); let encoded_val = u32::from(spec); if encoded_val != scenario.int_val { panic!( "Scenario {} failed encoding: expected {} but got {}", index + 1, scenario.int_val, encoded_val ); } if spec.col_type() != scenario.col_type { panic!( "Scenario {} failed col type: expected {:?} but got {:?}", index + 1, scenario.col_type, spec.col_type() ); } if spec.deflate() { panic!( "Scenario {} failed: spec returned true for deflate, should have been false", index + 1 ); } if spec.id() != scenario.id { panic!( "Scenario {} failed id: expected {:?} but got {:?}", index + 1, scenario.id, spec.id() ); } let deflated = ColumnSpec::new(scenario.id, scenario.col_type, true); if deflated.id() != spec.id() { panic!("Scenario {} failed deflate id test", index + 1); } if deflated.col_type() != spec.col_type() { panic!("Scenario {} failed col type test", index + 1); } if !deflated.deflate() { panic!( "Scenario {} failed: when deflate bit set deflate returned false", index + 1 ); } let expected = scenario.int_val | 0b00001000; if expected != u32::from(deflated) { panic!( "Scenario {} failed deflate bit test, expected {} got {}", index + 1, expected, u32::from(deflated) ); } if deflated.normalize() != spec.normalize() { panic!("Scenario {} failed normalize test", index + 1); } } } }
true
d2b7a55f491a0446e167b2b431b544b007df61a4
Rust
pombredanne/rustfst
/rustfst/src/algorithms/lazy/cache/simple_hash_map_cache.rs
UTF-8
4,838
2.875
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
use std::collections::HashMap; use std::sync::Mutex; use crate::algorithms::lazy::cache::cache_internal_types::{CachedData, StartState}; use crate::algorithms::lazy::{CacheStatus, FstCache}; use crate::semirings::Semiring; use crate::{StateId, Trs, TrsVec, EPS_LABEL}; #[derive(Debug)] pub struct SimpleHashMapCache<W: Semiring> { // First option : has start been computed // Second option: value of the start state (possibly none) // The second element of each tuple is the number of known states. start: Mutex<CachedData<CacheStatus<StartState>>>, trs: Mutex<CachedData<HashMap<StateId, CacheTrs<W>>>>, final_weights: Mutex<CachedData<HashMap<StateId, Option<W>>>>, } #[derive(Debug, Clone)] pub struct CacheTrs<W: Semiring> { pub trs: TrsVec<W>, pub niepsilons: usize, pub noepsilons: usize, } impl<W: Semiring> SimpleHashMapCache<W> { pub fn clear(&self) { let mut data_start = self.start.lock().unwrap(); data_start.clear(); let mut data_trs = self.trs.lock().unwrap(); data_trs.clear(); let mut data_final_weights = self.final_weights.lock().unwrap(); data_final_weights.clear(); } } impl<W: Semiring> Clone for SimpleHashMapCache<W> { fn clone(&self) -> Self { Self { start: Mutex::new(self.start.lock().unwrap().clone()), trs: Mutex::new(self.trs.lock().unwrap().clone()), final_weights: Mutex::new(self.final_weights.lock().unwrap().clone()), } } } impl<W: Semiring> Default for SimpleHashMapCache<W> { fn default() -> Self { Self { start: Mutex::new(CachedData::default()), trs: Mutex::new(CachedData::default()), final_weights: Mutex::new(CachedData::default()), } } } impl<W: Semiring> FstCache<W> for SimpleHashMapCache<W> { fn get_start(&self) -> CacheStatus<Option<StateId>> { let res = self.start.lock().unwrap(); res.data } fn insert_start(&self, id: Option<StateId>) { let mut data = self.start.lock().unwrap(); if let Some(s) = id { data.num_known_states = std::cmp::max(data.num_known_states, s + 1); } data.data = CacheStatus::Computed(id); } fn get_trs(&self, id: usize) -> CacheStatus<TrsVec<W>> { match self.trs.lock().unwrap().data.get(&id) { Some(e) => CacheStatus::Computed(e.trs.shallow_clone()), None => CacheStatus::NotComputed, } } fn insert_trs(&self, id: usize, trs: TrsVec<W>) { let mut cached_data = self.trs.lock().unwrap(); let mut niepsilons = 0; let mut noepsilons = 0; for tr in trs.trs() { cached_data.num_known_states = std::cmp::max(cached_data.num_known_states, tr.nextstate + 1); if tr.ilabel == EPS_LABEL { niepsilons += 1; } if tr.olabel == EPS_LABEL { noepsilons += 1; } } cached_data.data.insert( id, CacheTrs { trs, niepsilons, noepsilons, }, ); } fn get_final_weight(&self, id: usize) -> CacheStatus<Option<W>> { match self.final_weights.lock().unwrap().data.get(&id) { Some(e) => CacheStatus::Computed(e.clone()), None => CacheStatus::NotComputed, } } fn insert_final_weight(&self, id: StateId, weight: Option<W>) { let mut cached_data = self.final_weights.lock().unwrap(); cached_data.num_known_states = std::cmp::max(cached_data.num_known_states, id + 1); cached_data.data.insert(id, weight); } fn num_known_states(&self) -> usize { let mut n = 0; n = std::cmp::max(n, self.start.lock().unwrap().num_known_states); n = std::cmp::max(n, self.trs.lock().unwrap().num_known_states); n = std::cmp::max(n, self.final_weights.lock().unwrap().num_known_states); n } fn num_trs(&self, id: usize) -> Option<usize> { let cached_data = self.trs.lock().unwrap(); cached_data.data.get(&id).map(|v| v.trs.len()) } fn num_input_epsilons(&self, id: usize) -> Option<usize> { let cached_data = self.trs.lock().unwrap(); cached_data.data.get(&id).map(|v| v.niepsilons) } fn num_output_epsilons(&self, id: usize) -> Option<usize> { let cached_data = self.trs.lock().unwrap(); cached_data.data.get(&id).map(|v| v.noepsilons) } fn len_trs(&self) -> usize { let cached_data = self.trs.lock().unwrap(); cached_data.data.len() } fn len_final_weights(&self) -> usize { let cached_data = self.final_weights.lock().unwrap(); cached_data.data.len() } }
true
01f783fb5a5840c7a3d28c311fca1e04c1e42e3c
Rust
Meptl/epd-waveshare
/src/color.rs
UTF-8
574
2.96875
3
[ "ISC" ]
permissive
#[derive(Clone, Copy, PartialEq, Debug)] pub enum Color { Black, White, } impl From<u8> for Color { fn from(value: u8) -> Self { match value & 0x01 { 0x00 => Color::Black, 0x01 => Color::White, _ => unreachable!(), } } } impl Into<u8> for Color { fn into(self) -> u8 { match self { Color::White => 0xff, Color::Black => 0x00, } } } #[cfg(feature = "graphics")] use embedded_graphics::prelude::*; #[cfg(feature = "graphics")] impl PixelColor for Color {}
true
5598125a711e0430b981920eb9e39b16ce1e5e95
Rust
meloket/move-parachain
/pallets/sp-mvm/src/event.rs
UTF-8
1,246
2.515625
3
[ "Unlicense" ]
permissive
use sp_std::prelude::*; use frame_support::decl_event; use move_vm::data::EventHandler; use move_core_types::language_storage::TypeTag; decl_event!( pub enum Event<T> where AccountId = <T as frame_system::Trait>::AccountId, { // Event documentation should end with an array that provides descriptive names for event parameters. /// Event provided by Move VM /// [guid, seq_num, message] MvmEvent(Vec<u8>, u64, Vec<u8>), // / [guid, seq_num, ty_tag, message] // Event(Vec<u8>, u64, TypeTag, Vec<u8>), /// Event about successful move-module publishing /// [account] ModulePublished(AccountId), } ); pub struct EventWriter<F, E0>(F, core::marker::PhantomData<(E0,)>); impl<F: Fn(RawEvent<E0>), E0> EventHandler for EventWriter<F, E0> { fn on_event(&self, guid: Vec<u8>, seq_num: u64, ty_tag: TypeTag, message: Vec<u8>) { debug!( "MVM Event: {:?} {:?} {:?} {:?}", guid, seq_num, ty_tag, message ); // Emit an event: self.0(RawEvent::<E0>::MvmEvent(guid, seq_num, message)) } } impl<F, E0> EventWriter<F, E0> { pub fn new(f: F) -> Self { Self(f, Default::default()) } }
true
b16be52672e0ee64cf5109ca8f25bbd885720d5d
Rust
MaikKlein/Rust-Learning-Game
/examples/demo1.rs
UTF-8
1,571
2.6875
3
[]
no_license
extern mod rlg; extern mod sdl; use rlg::game::*; struct MyActor<'self>{ actor: GameActor<'self>, b_dir: bool } impl <'self> MyActor<'self> { fn new(actor: GameActor<'self>)-> MyActor<'self>{ MyActor {actor: actor, b_dir: false} } fn travel(&mut self) { if self.b_dir { self.actor.move_left(); if(self.actor.rect.x == 0){ self.b_dir = false; } } else { self.actor.move_right(); if(self.actor.rect.x == self.actor.lvl.width as i16 - self.actor.rect.w as i16){ self.b_dir = true; } } } } pub fn main() { do GameManager::new_game(~"my game") { let gm = GameManager::new(850,250,100); let actor1 = gm.spawn_actor(600,100,50,50,1, 255,0,0); let actor2 = gm.spawn_actor(400,50,50,50,2, 0,255,0); let mut myactor1 = MyActor::new(actor1); let mut myactor2 = MyActor::new(actor2); do GameManager::default_game_loop(gm.get_lvl()){ gm.get_lvl().screen.fill(sdl::video::RGBA(0,0,0,255)); myactor1.travel(); myactor2.travel(); myactor1.actor.redraw(); myactor2.actor.redraw(); gm.get_lvl().screen.flip(); } } }
true
c3e97404e091dd59ebbf8b3304fa42b872fa6812
Rust
Celeo/CoD_dice_roller
/src/commands/help.rs
UTF-8
1,036
3.21875
3
[ "MIT" ]
permissive
use serenity::{ client::Context, framework::standard::{CommandResult, macros::command}, model::channel::Message, utils::MessageBuilder, }; const HELP: &str = "Chronicles of Darkness dice roller bot To use, type '!roll # <mod>', where # is a positive number or 'chance', and <what> is one of: * 9again - to re-roll 10s and 9s * 8again - to re-roll 10s, 9s, and 8s * no10again - to not re-roll any values Note that the '<what>' portion is optional. Examples: * !roll 4 * !roll chance * !roll 10 9again You can also edit a character reference with the following commands: * !stats print|show * !stats edit <name> <value> * !stats bulk name1=value1 name2=value2 name3=value3 ... Then, you can roll using those references, like: !character edit strength 3 !roll strength + 1 9again "; #[command] pub fn help(context: &mut Context, message: &Message) -> CommandResult { let response = MessageBuilder::new().push_codeblock(HELP, None).build(); message.channel_id.say(&context.http, &response)?; Ok(()) }
true
0e0e94b2bcb5d0756a649b027e46435b9a76a85f
Rust
EarlGray/language-incubator
/js/slothjs/src/ast/stmt.rs
UTF-8
6,284
3.421875
3
[ "BSD-3-Clause" ]
permissive
//! AST definitions for JavaScript statements. //! //! The main struct here is [`Statement`], which wraps [`Stmt`] enum. use crate::prelude::*; use crate::source; use super::expr::{ Expression, FunctionExpression, Identifier, Pattern, }; /// `Statement` represents an [`Stmt`] together with its source span, if any. #[derive(Clone, Debug)] pub struct Statement { pub stmt: Stmt, pub loc: Option<Box<source::Location>>, } impl Statement { pub fn with_loc(self, loc: source::Location) -> Self { Statement { stmt: self.stmt, loc: Some(Box::new(loc)), } } } impl PartialEq for Statement { fn eq(&self, other: &Self) -> bool { self.stmt == other.stmt } } impl Eq for Statement {} impl<T> From<T> for Statement where Stmt: From<T>, { fn from(stmt: T) -> Self { Statement { stmt: Stmt::from(stmt), loc: None, } } } #[derive(Clone, Debug, PartialEq, Eq)] pub enum Stmt { Empty, Block(BlockStatement), Expr(ExpressionStatement), If(Box<IfStatement>), Switch(SwitchStatement), For(Box<ForStatement>), ForIn(Box<ForInStatement>), Return(ReturnStatement), Break(BreakStatement), Continue(ContinueStatement), Label(Box<LabelStatement>), Throw(ThrowStatement), Try(TryStatement), // TODO: move declarations out? Variable(VariableDeclaration), Function(FunctionDeclaration), } impl<E> From<E> for Stmt where Expression: From<E>, { fn from(expr: E) -> Stmt { Stmt::Expr(ExpressionStatement { expression: Expression::from(expr), }) } } impl From<VariableDeclaration> for Stmt { fn from(var: VariableDeclaration) -> Stmt { Stmt::Variable(var) } } impl From<BlockStatement> for Stmt { fn from(block: BlockStatement) -> Stmt { Stmt::Block(block) } } impl From<ReturnStatement> for Stmt { fn from(ret: ReturnStatement) -> Stmt { Stmt::Return(ret) } } // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct ExpressionStatement { pub expression: Expression, } // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct VariableDeclarator { pub name: Pattern, pub init: Option<Box<Expression>>, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum DeclarationKind { Var, Let, Const, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct VariableDeclaration { pub kind: DeclarationKind, pub declarations: Vec<VariableDeclarator>, } // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct FunctionDeclaration { pub id: Identifier, // might be different from function.id pub function: FunctionExpression, } // ============================================== #[derive(Clone, Debug)] pub struct BlockStatement { pub body: Vec<Statement>, pub bindings: HashSet<Identifier>, } impl PartialEq for BlockStatement { fn eq(&self, other: &Self) -> bool { self.body == other.body } } impl Eq for BlockStatement {} impl From<Vec<Statement>> for BlockStatement { fn from(body: Vec<Statement>) -> BlockStatement { BlockStatement { body, bindings: HashSet::new(), } // TODO: bindings analysis } } // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct IfStatement { pub test: Expression, pub consequent: Statement, pub alternate: Option<Statement>, } // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct SwitchStatement { pub discriminant: Expression, pub cases: Vec<SwitchCase>, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct SwitchCase { pub test: Option<Expression>, pub consequent: Vec<Statement>, } // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct ForStatement { pub init: Statement, // Empty | VariableDeclaration | ExpressionStatement pub test: Option<Expression>, pub update: Option<Expression>, pub body: Statement, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct ForInStatement { pub left: ForInTarget, pub right: Expression, pub body: Statement, } #[derive(Clone, Debug, PartialEq, Eq)] pub enum ForInTarget { Var(VariableDeclaration), Expr(Expression), } // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct BreakStatement(pub Option<Identifier>); #[derive(Clone, Debug, PartialEq, Eq)] pub struct ContinueStatement(pub Option<Identifier>); #[derive(Clone, Debug, PartialEq, Eq)] pub struct LabelStatement(pub Identifier, pub Statement); // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct ReturnStatement(pub Option<Expression>); // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct ThrowStatement(pub Expression); #[derive(Clone, Debug, PartialEq, Eq)] pub struct TryStatement { pub block: BlockStatement, pub handler: Option<CatchClause>, pub finalizer: Option<BlockStatement>, } // ============================================== #[derive(Clone, Debug, PartialEq, Eq)] pub struct CatchClause { pub param: Pattern, pub body: BlockStatement, } /// make a [`BlockStatement`] pub fn block(body: Vec<Statement>) -> BlockStatement { BlockStatement::from(body) } /// make a [`VariableDeclaration`] from [(var1, value1), ...] pub fn var<'a>(it: impl Iterator<Item = &'a (&'a str, Expression)>) -> VariableDeclaration { let declarations = it .map(|(name, init)| VariableDeclarator { name: Identifier::from(*name), init: Some(Box::new(init.clone())), }) .collect(); VariableDeclaration { kind: DeclarationKind::Var, declarations, } } /// make an [`ExpressionStatement`](`expr`) pub fn expr(expr: Expression) -> Statement { Statement::from(expr) } /// make a [`ReturnStatement`](`expr`) pub fn return_(expr: Expression) -> Statement { Statement::from(ReturnStatement(Some(expr))) }
true
c890c6c7c8f78ab8557fa858985b153f397c29c8
Rust
bznein/AoC-2019
/Day12/src/main.rs
UTF-8
2,687
3.5
4
[]
no_license
use num::integer::lcm; #[derive(Clone)] struct Moon { p: Vec<i64>, v: Vec<i64>, } impl Moon { pub fn new(p: Vec<i64>) -> Moon { Moon { p: p, v: vec![0, 0, 0], } } pub fn is_equal_dir(&self, m: &Moon, dir: usize) -> bool { self.p[dir] == m.p[dir] && self.v[dir] == m.v[dir] } fn potential_energy(&self) -> i64 { self.p.iter().map(|x| x.abs()).sum() } fn kinetic_energy(&self) -> i64 { self.v.iter().map(|x| x.abs()).sum() } pub fn total_energy(&self) -> i64 { self.potential_energy() * self.kinetic_energy() } pub fn compare_p_dir(&self, m: &Moon, dir: usize) -> i64 { if self.p[dir] < m.p[dir] { -1 } else if self.p[dir] > m.p[dir] { 1 } else { 0 } } } fn total_energy(moons: &Vec<Moon>) -> i64 { moons.iter().map(|x| x.total_energy()).sum() } fn is_equal_dir(moon1: &Vec<Moon>, moon2: &Vec<Moon>, dir: usize) -> bool { let mut result = true; for i in 0..moon1.len() { result = result && moon1[i].is_equal_dir(&moon2[i], dir); } result } fn universe_period_direction(mut moons: &mut Vec<Moon>, dir: usize) -> u64 { let initial_step = moons.clone(); let mut steps: u64 = 0; loop { step_moons(&mut moons, dir); steps += 1; if is_equal_dir(&initial_step, moons, dir) { return steps; } } } fn universe_period(moons: &Vec<Moon>) -> u64 { let steps = lcm( universe_period_direction(&mut moons.clone(), 0), universe_period_direction(&mut moons.clone(), 1), ); lcm(universe_period_direction(&mut moons.clone(), 2), steps) } fn step_moons(moons: &mut Vec<Moon>, dir: usize) { for i in 0..moons.len() { for j in i + 1..moons.len() { let v = moons[i].compare_p_dir(&moons[j], dir); moons[i].v[dir] -= v; moons[j].v[dir] += v; } } for i in 0..moons.len() { moons[i].p[dir] += moons[i].v[dir]; } } /* Note: I hardcode input here as I honestly do not care about parsing it, it's just not worth it */ fn main() { let mut moons = Vec::new(); moons.push(Moon::new(vec![-6, -5, -8])); moons.push(Moon::new(vec![0, -3, -13])); moons.push(Moon::new(vec![-15, 10, -11])); moons.push(Moon::new(vec![-3, -8, 3])); let period = universe_period(&moons.clone()); for _i in 0..1000 { step_moons(&mut moons, 0); step_moons(&mut moons, 1); step_moons(&mut moons, 2); } println!("Part 1 {}", total_energy(&moons)); println!("Part 2: {}", period); }
true
c5d934ee26830e246dd37ab736c26af41d8d860b
Rust
torkleyy/rocket_auth
/src/language/es.rs
UTF-8
683
2.65625
3
[ "Apache-2.0", "MIT" ]
permissive
use super::*; use Error::*; impl Language { pub fn es_message(error: Error) -> &'static str { match error { UnsafePasswordTooShort => "La clave debe tener al menos 8 caracteres.", UnauthenticatedClientError => "El cliente no esta autentificado.", UnauthorizedError => "No autorizado.", InvalidCredentialsError => "Su correo electónico o contraseña es incorrecta.", UserNotFoundError => "No se encotró el usuario.", InvalidEmailAddressError => "Correo inválido.", EmailAlreadyExists => "Ese correo ya existe.", _ => "Error interno del servidor." } } }
true
ba2190edd3768b4d94d17e2b7755217bfacf0441
Rust
qidian99/generals-rs
/src/core/common.rs
UTF-8
13,293
3.71875
4
[]
no_license
use std::collections::HashSet; pub type PlayerId = usize; /// Represent a player during a game. #[derive(Serialize, Clone, Debug, PartialEq)] pub struct Player { /// An integer that uniquely identifies each player during a game pub id: PlayerId, /// Number of tiles the player currently owns #[serde(skip_serializing_if = "has_no_tile")] pub owned_tiles: usize, /// Turn at which the player was defeated, if any #[serde(skip_serializing_if = "Option::is_none")] pub defeated_at: Option<usize>, } /// Small helper used by serde to avoid serializing the owned_tile field if the player does not own /// any tile. We try to keep the jsons as small as possible for network efficiency. fn has_no_tile(owned_tiles: &usize) -> bool { *owned_tiles == 0 } impl Player { /// Return a new undefeated player, with no owned tile. pub fn new(id: PlayerId) -> Self { Player { id, owned_tiles: 0, defeated_at: None, } } /// Return whether the player has been defeated already pub fn defeated(&self) -> bool { self.defeated_at.is_some() } /// Return whether the player can move. A player can move if it owns at least one tile, and if /// it has not been defeated. pub fn can_move(&self) -> bool { !self.defeated() && self.owned_tiles > 0 } } /// Represent an action a player can perform. #[derive(Copy, Clone, Debug, Deserialize, Serialize)] #[serde(tag = "type")] #[serde(rename_all = "lowercase")] pub enum Action { /// Resign Resign, /// Cancel all the moves already queued for the player #[serde(rename = "cancel_moves")] CancelMoves, /// Make a move from a tile to another Move(Move), } /// Represent a move from one tile to another. During a move, units are transfered from one tile to /// another adjacent tile. #[derive(Copy, Clone, Debug, Deserialize, Serialize)] pub struct Move { /// Player that is making the move. #[serde(skip)] pub player: PlayerId, /// Index of the tile from which troops are being moved. pub from: usize, /// Direction to which the troops are being moved. pub direction: Direction, } #[derive(Copy, Clone, Debug, PartialEq, Deserialize, Serialize)] #[serde(rename_all = "lowercase")] pub enum Direction { Right, Left, Up, Down, } #[derive(Copy, Clone, Debug, PartialEq)] /// Outcome of a move pub enum MoveOutcome { /// Outcome when a move resulted in a general being captured. The player ID is the ID of the /// defeated player. GeneralCaptured(PlayerId), /// Outcome when a move resulted in an open tile or a city tile being captured. If the tile /// was belonging to a different player than the one making the move, the player's ID is /// specified. TileCaptured(Option<PlayerId>), /// Outcome when a move did not result in a tile being captured. StatuQuo, } /// Represent the different types of open (ie non-mountain) tiles #[derive(Copy, Clone, PartialEq, Debug, Serialize)] #[serde(rename_all = "lowercase")] pub enum TileKind { /// A tile that contains a general General, /// A tile that contains a city City, /// A regular tile Open, /// A tile that contains a mountain Mountain, } /// Represent an open tile. Open tiles are tiles that are not mountains, ie tiles that players can /// conquer. #[derive(Clone, PartialEq, Debug, Serialize)] pub struct Tile { /// The ID of the player that currenlty owns the tile (a player own a tile if he/she has units /// occupying the tile). #[serde(skip_serializing_if = "Option::is_none")] owner: Option<PlayerId>, /// Number of units occupying the tile #[serde(skip_serializing_if = "has_no_unit")] units: u16, /// The type of tile (open, city or general) #[serde(skip_serializing_if = "is_open")] kind: TileKind, /// List of players that can see the tile. To be able to see an open tile, a player must own a /// tile that touches it. #[serde(skip)] visible_by: HashSet<PlayerId>, /// Players that had visibility on this tile when it changed. #[serde(skip)] dirty_for: HashSet<PlayerId>, } /// Small helper used by serde to avoid serializing the `kind` field if the tile if of type /// `TileKind::Open`. We try to keep the jsons as small as possible for network efficiency. fn is_open(kind: &TileKind) -> bool { *kind == TileKind::Open } /// Small helper used by serde to avoid serializing the `units` field if the tile does not have any /// units. We try to keep the jsons as small as possible for network efficiency. fn has_no_unit(units: &u16) -> bool { *units == 0 } impl Tile { /// Return a new open tile or the given type, with no owner, and no unit. pub fn new() -> Self { Tile { owner: None, units: 0, dirty_for: HashSet::new(), visible_by: HashSet::new(), kind: TileKind::Mountain, } } /// Return whether the tile is marked as visible by the given player. pub fn is_visible_by(&self, player: PlayerId) -> bool { self.visible_by.contains(&player) } /// Mark the tile as invisible for the given player pub fn hide_from(&mut self, player: PlayerId) { let was_visible = self.visible_by.remove(&player); if was_visible { self.dirty_for.insert(player); } } /// Mark the tile as visible for the given player, updating the source and destination tiles /// state if necessary (number of units, owner, etc.). pub fn reveal_to(&mut self, player: PlayerId) { self.visible_by.insert(player); self.dirty_for.insert(player); } /// Perform a move from a source tile to a destination tile. pub fn attack(&mut self, dst: &mut Tile) -> Result<MoveOutcome, InvalidMove> { if self.is_mountain() { return Err(InvalidMove::FromInvalidTile); } if dst.is_mountain() { return Err(InvalidMove::ToInvalidTile); } if self.units() < 2 { return Err(InvalidMove::NotEnoughUnits); } let attacker = self.owner.ok_or(InvalidMove::SourceTileNotOwned)?; let outcome = match dst.owner { // The destination tile belongs to someone else Some(defender) if defender != attacker => { // The defender has more units. if dst.units >= self.units - 1 { dst.units -= self.units - 1; MoveOutcome::StatuQuo } // The attacker has more units. Capture the tile. else { dst.units = self.units - 1 - dst.units; dst.owner = self.owner; // We're capturing a general if dst.kind == TileKind::General { // Turn the general into a regular city dst.kind = TileKind::City; MoveOutcome::GeneralCaptured(defender) } // We're capturing a regular tile else { MoveOutcome::TileCaptured(Some(defender)) } } } // The owner is the same for both tiles, just transfer the unit Some(_defender) => { dst.units += self.units - 1; MoveOutcome::StatuQuo } // The destination tile is not owned by anyone. None => { // The destination has more units, we can't capture it if dst.units >= self.units - 1 { dst.units -= self.units - 1; MoveOutcome::StatuQuo } else { dst.units = self.units - 1 - dst.units; dst.owner = self.owner; MoveOutcome::TileCaptured(None) } } }; // In any case, we always only leave 1 unit in the source tile // TODO: would be nice to support splitting the source tile units before moving. self.units = 1; self.set_dirty(); dst.set_dirty(); Ok(outcome) } /// Return the owner of the tile, if any pub fn owner(&self) -> Option<PlayerId> { self.owner } /// Return the number of units occupying the tile pub fn units(&self) -> u16 { self.units } /// Return whether the tile is open. A tile is open if it's not a city, a general or a /// mountain. pub fn is_open(&self) -> bool { self.kind == TileKind::Open } /// Return whether the tile is a general. pub fn is_general(&self) -> bool { self.kind == TileKind::General } /// Return whether the tile is a city. pub fn is_city(&self) -> bool { self.kind == TileKind::City } /// Return whether the tile is a mountain pub fn is_mountain(&self) -> bool { self.kind == TileKind::Mountain } /// Turn the tile into an open tile pub fn make_open(&mut self) { self.kind = TileKind::Open; self.set_dirty(); } pub fn set_dirty(&mut self) { for player_id in self.visible_by.iter() { self.dirty_for.insert(*player_id); } } /// Turn the tile into a general pub fn make_general(&mut self) { self.kind = TileKind::General; self.set_dirty(); } // // FIXME: unused for now, but that's because we don't have city yet // /// Turn the tile into a fortess. // pub fn make_city(&mut self) { // self.kind = TileKind::City; // self.set_dirty(); // } /// Turn the tile into a mountain. pub fn make_mountain(&mut self) { self.kind = TileKind::Mountain; self.set_dirty(); } /// Set the number of units occupying the tile pub fn set_units(&mut self, units: u16) { if self.is_mountain() { return; } self.units = units; self.set_dirty(); } /// Increment the number of units occupying the tile pub fn incr_units(&mut self, units: u16) { if self.is_mountain() { return; } self.units += units; self.set_dirty(); } /// Set the owner of the tile. To remove the existing owner, set the owner to `None`. pub fn set_owner(&mut self, player: Option<PlayerId>) { if self.is_mountain() { return; } // Mark the tile as dirty for the players that have visibility on the tile self.set_dirty(); // Mark the tile as dirty for the previous owner. As owner, it should have visibility on // the tile, so should have been added `dirty_for` already, but let's be safe, it's pretty // cheap. if let Some(owner) = self.owner { self.dirty_for.insert(owner); } self.owner = player; if let Some(owner) = self.owner { self.reveal_to(owner); } } /// Return whether the tile's state has changed. A tile state changes when its type, its owner, /// or the number of units occupying it changes. pub fn is_dirty(&self) -> bool { !self.dirty_for.is_empty() } pub fn is_dirty_for(&self, player_id: PlayerId) -> bool { self.dirty_for.contains(&player_id) } /// Mark the tile a clean. This should be called to acknoledge that the tile has been processed /// when after is was marked as dirty. pub fn set_clean(&mut self) { let _ = self.dirty_for.drain(); } } /// Represent an error that occurs when an invalid move is processed. #[derive(Debug, PartialEq, Eq)] pub enum InvalidMove { /// The source tile does not have enough units to perform the move. To be able to move from one /// tile, the tile must have at least two units. NotEnoughUnits, /// The destination tile is invalid (it can be a mountain or an out-of-grid tile. This occurs /// for instance if the source tile is on the top row, and the move is upward. ToInvalidTile, /// The source tile is either a mountain or out of the grid. FromInvalidTile, /// The source tile does not belong to the player making the move. A move can only be performed /// by a player. SourceTileNotOwned, } use std::error::Error; use std::fmt; impl Error for InvalidMove { fn description(&self) -> &str { match *self { InvalidMove::NotEnoughUnits => "not enough unit on the source tile", InvalidMove::ToInvalidTile => { "the destination tile is either a mountain or not on the map" } InvalidMove::FromInvalidTile => { "the source tile is either a mountain or not on the map" } InvalidMove::SourceTileNotOwned => { "the source tile does not belong to the player making the move" } } } fn cause(&self) -> Option<&Error> { None } } impl fmt::Display for InvalidMove { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Invalid move: {}", self.description()) } }
true
f460b803e8c6ae1a3a83ba03aea31aab113ac449
Rust
phlopsi/allocator
/src/u/v4.rs
UTF-8
3,886
2.9375
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#[cfg(test)] mod tests; use crate::align128::Align128; use std::cell::UnsafeCell; use std::mem::MaybeUninit; use std::ops::Deref; use std::ops::DerefMut; use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering::{ AcqRel, Acquire, Relaxed, Release, SeqCst, }; #[derive(Default)] struct Mutex<T: ?Sized> { locked: AtomicBool, value: UnsafeCell<T>, } unsafe impl<T: ?Sized + Send> Send for Mutex<T> {} unsafe impl<T: ?Sized + Send> Sync for Mutex<T> {} impl<T> Mutex<T> { pub fn new(value: T) -> Self { Self { locked: AtomicBool::new(false), value: UnsafeCell::new(value), } } pub fn try_lock(&self) -> Option<MutexGuard<'_, T>> { if self .locked .compare_exchange(false, true, Acquire, Relaxed) .is_ok() { Some(MutexGuard { mutex: self }) } else { std::process::abort() } } } #[must_use = "if unused the Mutex will immediately unlock"] struct MutexGuard<'a, T> { mutex: &'a Mutex<T>, } impl<T> Drop for MutexGuard<'_, T> { fn drop(&mut self) { self.mutex.locked.store(false, Release); } } impl<T> Deref for MutexGuard<'_, T> { type Target = T; fn deref(&self) -> &T { unsafe { &*(self.mutex.value.get() as *const T) } } } impl<T> DerefMut for MutexGuard<'_, T> { fn deref_mut(&mut self) -> &mut T { unsafe { &mut *self.mutex.value.get() } } } pub struct Allocator<T> { storage: std::boxed::Box<[Align128<Mutex<MaybeUninit<T>>>]>, indices: parking_lot::Mutex<std::boxed::Box<[u16]>>, } impl<T> Allocator<T> { pub fn new(capacity: usize) -> Self { let mut storage = Vec::with_capacity(capacity); storage.resize_with(capacity, || { Align128(Mutex::new(MaybeUninit::uninit())) }); let storage = storage.into_boxed_slice(); let mut indices = Vec::with_capacity(capacity); indices.resize_with(capacity, Default::default); let indices = parking_lot::Mutex::new(indices.into_boxed_slice()); Self { storage, indices } } #[track_caller] pub fn box_it(&self, value: T) -> Box<'_, T> { self.box_it_with_index(value, 0) } #[track_caller] fn box_it_with_index(&self, value: T, index: usize) -> Box<'_, T> { let mut guard = self .storage .iter() .cycle() .skip(index) .find_map(|mutex| mutex.try_lock()) .unwrap(); guard.write(value); Box { guard } } pub fn thread_local(&self) -> AllocatorRef<'_, T> { let mut indices = self.indices.lock(); let (index, count) = indices .iter_mut() .enumerate() .min_by_key(|(_, count)| **count) .unwrap(); *count += 1; AllocatorRef { allocator: self, index, } } } pub struct AllocatorRef<'allocator, T> { allocator: &'allocator Allocator<T>, index: usize, } impl<T> AllocatorRef<'_, T> { #[track_caller] pub fn box_it(&self, value: T) -> Box<'_, T> { self.allocator.box_it_with_index(value, self.index) } } impl<T> Drop for AllocatorRef<'_, T> { fn drop(&mut self) { self.allocator.indices.lock()[self.index] -= 1; } } pub struct Box<'guard, T> { guard: MutexGuard<'guard, MaybeUninit<T>>, } impl<T> Deref for Box<'_, T> { type Target = T; fn deref(&self) -> &T { unsafe { self.guard.assume_init_ref() } } } impl<T> DerefMut for Box<'_, T> { fn deref_mut(&mut self) -> &mut T { unsafe { self.guard.assume_init_mut() } } } impl<T> Drop for Box<'_, T> { fn drop(&mut self) { unsafe { self.guard.assume_init_drop() }; } }
true
27f76012d9c22c511aca35c73dea48a4532d34cb
Rust
Globidev/advent-of-code
/2018/rust/src/day13.rs
UTF-8
6,225
3.1875
3
[]
no_license
use hashbrown::HashSet; const RAW_INPUT: &[u8] = include_bytes!("../../inputs/day13.txt"); pub fn day13() -> ((u16, u16), (u16, u16)) { let (world, trains) = parse_input(RAW_INPUT); (part1(&world, &trains), part2(&world, &trains)) } pub fn part1(world: &World, trains: &Trains) -> (u16, u16) { let mut trains = trains.iter().cloned().collect(); loop { let crashes = tick(world, &mut trains); if let Some(&Position { x, y }) = crashes.first() { break (x, y) } } } pub fn part2(world: &World, trains: &Trains) -> (u16, u16) { let mut trains: Vec<_> = trains.iter().cloned().collect(); while trains.len() > 1 { tick(world, &mut trains); } match trains.first() { Some(&Train { pos: Position { x, y }, .. }) => (x, y), None => panic!("No more trains left") } } fn tick(world: &World, trains: &mut Trains) -> Collisions { let mut train_positions: HashSet<_> = trains.iter() .map(|train| train.pos.clone()) .collect(); let mut collisions = Collisions::new(); trains.sort_by_key(|train| (train.pos.y, train.pos.x)); for train in trains.iter_mut() { if collisions.contains(&train.pos) { continue } train_positions.remove(&train.pos); train.tick(world); if !train_positions.insert(train.pos.clone()) { collisions.push(train.pos.clone()); } } trains.retain(|train| !collisions.contains(&train.pos)); collisions } pub fn parse_input(input: &[u8]) -> (World, Trains) { let mut trains = Trains::with_capacity(32); let height = input.iter().position(|&c| c == b'\n').unwrap(); let rails: Vec<_> = input.split(|&c| c == b'\n') .flatten() .enumerate() .map(|(i, c)| { let mut add_train = |direction| { let x = i % height; let y = i / height; trains.push(Train { pos: Position { x: x as u16, y: y as u16 }, direction, turn_strategy: TurnStrategy::Left }) }; match c { b'^' => add_train(Direction::Up), b'v' => add_train(Direction::Down), b'<' => add_train(Direction::Left), b'>' => add_train(Direction::Right), _ => () } match c { b' ' => Rail::Empty, b'-' | b'|' => Rail::Straight, b'/' => Rail::CurveRight, b'\\' => Rail::CurveLeft, b'+' => Rail::Intersection, _ => Rail::Straight // Trains are only placed on straight lines } }) .collect(); (World { rails, height }, trains) } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Position { x: u16, y: u16 } #[derive(Debug)] pub enum Rail { Empty, Straight, CurveRight, CurveLeft, Intersection } #[derive(Debug)] pub struct World { rails: Vec<Rail>, height: usize } #[derive(Debug, Clone)] pub struct Train { pos: Position, direction: Direction, turn_strategy: TurnStrategy } type Trains = Vec<Train>; type Collisions = Vec<Position>; #[derive(Debug, Clone)] pub enum Direction { Up, Down, Left, Right } #[derive(Debug, Clone)] pub enum TurnStrategy { Left, Straight, Right } impl Position { fn moved(&self, direction: &Direction) -> Self { use self::Direction::*; let &Position { x, y } = self; match direction { Up => Position { x: x + 0, y: y - 1 }, Down => Position { x: x + 0, y: y + 1 }, Left => Position { x: x - 1, y: y + 0 }, Right => Position { x: x + 1, y: y + 0 }, } } } impl World { fn at(&self, pos: &Position) -> &Rail { &self.rails[pos.y as usize * self.height + pos.x as usize] } } impl Train { fn tick(&mut self, world: &World) { use self::Direction::*; let new_pos = self.pos.moved(&self.direction); let turn_direction = match world.at(&new_pos) { Rail::Straight => TurnStrategy::Straight, Rail::Intersection => self.turn_strategy.next(), Rail::CurveLeft => { match self.direction { Up | Down => TurnStrategy::Left, Left | Right => TurnStrategy::Right, } }, Rail::CurveRight => { match self.direction { Up | Down => TurnStrategy::Right, Left | Right => TurnStrategy::Left, } }, Rail::Empty => panic!("A train went off the tracks!"), }; let new_direction = self.direction.turned(&turn_direction); self.pos = new_pos; self.direction = new_direction; } } impl Direction { fn turned(&self, way: &TurnStrategy) -> Self { use self::Direction::*; use self::TurnStrategy::{Left as TurnLeft, Right as TurnRight}; match (way, self) { (TurnLeft, Up) => Left, (TurnLeft, Down) => Right, (TurnLeft, Left) => Down, (TurnLeft, Right) => Up, (TurnRight, Up) => Right, (TurnRight, Down) => Left, (TurnRight, Left) => Up, (TurnRight, Right) => Down, (_straight, current_direction) => current_direction.clone(), } } } impl TurnStrategy { fn next(&mut self) -> Self { use self::TurnStrategy::*; let next = match self { Left => Straight, Straight => Right, Right => Left, }; std::mem::replace(self, next) } } #[cfg(test)] mod tests { use super::*; #[test] fn p1() { let (world, trains) = parse_input(RAW_INPUT); assert_eq!(part1(&world, &trains), (38, 72)); } #[test] fn p2() { let (world, trains) = parse_input(RAW_INPUT); assert_eq!(part2(&world, &trains), (68, 27)); } }
true
8a033cdc1671ff9cec603fcc5c893f11124e9a73
Rust
iCodeIN/assert2ify
/assert2ify-macros/src/macro_parsing/assertion.rs
UTF-8
1,101
3.484375
3
[ "BSD-2-Clause" ]
permissive
use syn::Expr; /// An intermediate structure which helps parsing assert use cases and variants /// from the std lib and can translate them into assert2 assertions. pub enum Assertion { /// The binary assertions `std::assert_eq!` and `std::assert_ne` /// Those are transalated into the equivalent assertion of the assert2 crate AssertBinary { lhs: Expr, operator: syn::BinOp, rhs: Expr, }, /// Catch all for any other kind of `std::assert!` macro /// which is not one of the above. /// This means any other kind of assertion on one expression, /// notably also assertions on binary expressions like `assert!(a==b)` /// or `assert!(v.len() < 5)`. AssertUnary { expr: Expr }, } impl Assertion { /// Convenience constructor for binary assertions pub fn new_binary(lhs: Expr, operator: syn::BinOp, rhs: Expr) -> Self { Self::AssertBinary { lhs, operator, rhs } } /// convenience case for a general assertion case on one argument pub fn new_assert(expr: Expr) -> Self { Self::AssertUnary { expr } } }
true
1d73217b83065a2ad8cf7bc0583fbe8833eda762
Rust
qeda/qeda-cli
/src/symbols/mod.rs
UTF-8
1,266
2.859375
3
[ "MIT" ]
permissive
mod capacitor; mod ic; use std::collections::HashMap; use std::fmt::{self, Debug}; use crate::config::Config; use crate::error::*; use crate::symbol::Symbol; use capacitor::CapacitorSymbol; use ic::IcSymbol; pub trait SymbolHandler { fn draw(&self, comp_cfg: &Config, lib_cfg: &Config) -> Result<Symbol>; } impl Debug for dyn SymbolHandler { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "SymbolHandler") } } #[derive(Debug)] pub struct Symbols { handlers: HashMap<&'static str, Box<dyn SymbolHandler>>, } impl Symbols { /// Creates an empty `Symbols`. pub fn new() -> Symbols { let mut handlers: HashMap<&'static str, Box<dyn SymbolHandler>> = HashMap::new(); handlers.insert("capacitor", Box::new(CapacitorSymbol::new())); handlers.insert("ic", Box::new(IcSymbol::new())); Symbols { handlers } } pub fn get_handler(&self, key: &str) -> Result<&dyn SymbolHandler> { self.handlers .get(key) .map(|v| v.as_ref()) .ok_or_else(|| QedaError::InvalidSymbolType(key.to_string()).into()) } } impl Default for Symbols { /// Creates an empty `Symbols`. #[inline] fn default() -> Self { Self::new() } }
true
a7be615f68d47cb1a8394b95619f09cbec42f7c1
Rust
AngelOnFira/advent-of-code
/2021/src/day18.rs
UTF-8
7,572
3.46875
3
[]
no_license
use itertools::Itertools; use regex::Regex; #[aoc_generator(day18)] pub fn input_generator(input: &str) -> Vec<String> { input.lines().map(|s| s.to_string()).collect() } fn test_explode(input: String) -> Option<String> { let mut bracket_count = 0; for (i, char) in input.chars().enumerate() { match char { '[' => bracket_count += 1, ']' => bracket_count -= 1, _ => (), } if bracket_count >= 5 && char == '[' { // read ahead to the next ']', and see if it matches the regex \[\d+,\d+\] let end_of_match = i + &input[i..].find(']').unwrap() + 1; let test_string = &input[i..end_of_match]; let left_string = &input[..i]; let right_string = &input[end_of_match..]; let re = Regex::new(r"\[(\d+),(\d+)\]").unwrap(); if re.is_match(test_string) { // Get the captures let captures = re.captures(test_string).unwrap(); let left_num = captures[1].parse::<usize>().unwrap(); let right_num = captures[2].parse::<usize>().unwrap(); return Some(format!( "{}0{}", add_to_last_num_in_string(left_string.to_string(), left_num), add_to_first_num_in_string(right_string.to_string(), right_num) )); } } } None } fn test_split(input: String) -> Option<String> { // Find the first place there is a number of 10 or more with regex let re = Regex::new(r"(\d{2,})").unwrap(); if let Some(cap) = re.captures(&input) { // Get the position of the capture let pos = input.find(&cap[1]).unwrap(); let num = cap[1].parse::<usize>().unwrap(); let left = &input[..pos]; let right = &input[pos + cap[1].len()..]; let new_pair = format!( "[{},{}]", (num as f64 / 2.0).floor(), (num as f64 / 2.0).ceil() ); return Some(format!("{}{}{}", left, new_pair, right)); } None } fn add_snailfish_numbers(left: String, right: String) -> String { format!("[{},{}]", left, right) } fn add_to_last_num_in_string(input: String, num: usize) -> String { let mut pos = input.len() - 1; let mut start_num_pos = 0; let mut end_num_pos = 0; let mut on_num = false; while pos > 0 { if input.chars().nth(pos).unwrap().is_numeric() { if on_num { end_num_pos = pos; } else { start_num_pos = pos; end_num_pos = pos; on_num = true; } } else { if on_num { break; } } pos -= 1; } if start_num_pos != 0 { let first_half = &input[..end_num_pos]; let second_half = &input[start_num_pos + 1..]; let new_num = &input[end_num_pos..start_num_pos + 1] .parse::<usize>() .unwrap() + num; return format!("{}{}{}", first_half, new_num, second_half); } input } fn add_to_first_num_in_string(input: String, num: usize) -> String { let mut pos = 0; let mut start_num_pos = 0; let mut end_num_pos = 0; let mut on_num = false; while pos < input.len() { if input.chars().nth(pos).unwrap().is_numeric() { if on_num { end_num_pos = pos; } else { start_num_pos = pos; end_num_pos = pos; on_num = true; } } else { if on_num { break; } } pos += 1; } if start_num_pos != 0 { let first_half = &input[..start_num_pos]; let second_half = &input[end_num_pos + 1..]; let new_num = &input[start_num_pos..end_num_pos + 1] .parse::<usize>() .unwrap() + num; return format!("{}{}{}", first_half, new_num, second_half); } input } fn calculate_first_magnitude(input: String) -> Option<String> { // The magnitude of a pair is 3 times the magnitude of its left element plus // 2 times the magnitude of its right element. The magnitude of a regular // number is just that number. // For example, the magnitude of [9,1] is 3*9 + 2*1 = 29; the magnitude of [1,9] // is 3*1 + 2*9 = 21. Magnitude calculations are recursive: the magnitude of // [[9,1],[1,9]] is 3*29 + 2*21 = 129. // Find the first pair of numbers in the input string let re = Regex::new(r"\[(\d+,\d+)\]").unwrap(); if let Some(cap) = re.captures(&input) { // Get the position of the capture let pos = input.find(&cap[1]).unwrap(); let nums: Vec<u64> = cap[1] .split(",") .map(|x| x.parse::<u64>().unwrap()) .collect(); let left = &input[..pos - 1]; let right = &input[pos + cap[1].len() + 1..]; let new_num = nums[0] * 3 + nums[1] * 2; return Some(format!("{}{}{}", left, new_num, right)); } None } #[aoc(day18, part1)] pub fn solve_part1(input: &Vec<String>) -> i32 { let final_snail = input .iter() .map(|s| s.to_string()) .reduce(|acc, line| { let mut new_line = add_snailfish_numbers(acc.to_string(), line.to_string()); let mut action = true; while action { action = false; if let Some(exploded_line) = test_explode(new_line.clone()) { action = true; new_line = exploded_line; continue; } if let Some(split_line) = test_split(new_line.clone()) { action = true; new_line = split_line; continue; } } new_line }) .unwrap(); let mut make_small = final_snail.clone(); let mut action = true; while action { action = false; if let Some(magnitude) = calculate_first_magnitude(make_small.clone()) { action = true; make_small = magnitude; } } make_small.parse::<i32>().unwrap() } #[aoc(day18, part2)] pub fn solve_part2(input: &Vec<String>) -> i32 { input .iter() .map(|s| s.to_string()) .permutations(2) .map(|perms| { let first = perms[0].clone(); let second = perms[1].clone(); let mut new_line = add_snailfish_numbers(first.to_string(), second.to_string()); let mut action = true; while action { action = false; if let Some(exploded_line) = test_explode(new_line.clone()) { action = true; new_line = exploded_line; continue; } if let Some(split_line) = test_split(new_line.clone()) { action = true; new_line = split_line; continue; } } let mut action = true; while action { action = false; if let Some(magnitude) = calculate_first_magnitude(new_line.clone()) { action = true; new_line = magnitude; } } // Convert into i32 new_line.parse::<i32>().unwrap() }) .max() .unwrap() }
true
341c8e6441d4f39e489b1d26ff469d9e49a2011b
Rust
bs-community/blessing-skin-shell
/src/shell/executable.rs
UTF-8
4,907
2.890625
3
[ "MIT" ]
permissive
use super::transform::Transformer; use super::{Arguments, Executables, Vars}; use crate::parser::ast::Parameters; use crate::stdio::Stdio; use crate::terminal::Terminal; use futures::channel::oneshot::{channel, Sender}; use js_sys::{Function, Promise, Reflect}; use std::cell::Cell; use std::rc::Rc; use wasm_bindgen::prelude::*; use wasm_bindgen_futures::{spawn_local, JsFuture}; pub enum Program { Builtin(Box<dyn Fn() -> Box<dyn Builtin>>), Internal(Box<dyn Fn() -> Box<dyn Internal>>), External(External), } pub trait Builtin { fn run( &self, terminal: &Terminal, executables: &mut Executables, globals: &mut Vars, arguments: Arguments, ); } pub trait Internal { fn run(&self, stdout: Rc<Stdio>, arguments: Arguments, exit: Sender<()>); } pub struct External { function: Function, } impl External { pub fn new(function: Function) -> Self { External { function } } pub fn run(&self, terminal: Rc<Terminal>, arguments: Vec<String>, exit: Sender<()>) { let stdio = Stdio::new(Rc::clone(&terminal)); let f = &self.function; let arguments = serde_wasm_bindgen::to_value(&arguments).expect("arguments conversion failed"); let result = match f.call2(&JsValue::NULL, &JsValue::from(stdio), &arguments) { Ok(value) => value, Err(e) => { let message = Reflect::get(&e, &JsValue::from("message")) .ok() .and_then(|message| message.as_string()) .unwrap_or_else(|| "unknown error".to_string()); terminal.write(&format!("{}\r\n", message)); if exit.send(()).is_err() { terminal.write("Program is hang up...Please refresh the page.\r\n"); } return; } }; spawn_local(async move { let future = JsFuture::from(Promise::resolve(&result)); if future.await.is_err() { terminal.write("\r\n"); }; terminal.write("\u{001b}[?25h"); exit.send(()).expect("sender failure"); }); } } pub struct Runner { running: Rc<Cell<bool>>, } impl Runner { pub fn new() -> Self { Runner { running: Rc::new(Cell::new(false)), } } pub fn is_running(&self) -> bool { self.running.get() } pub fn run_builtin( &self, program: Box<dyn Builtin>, parameters: Option<Parameters>, terminal: &Terminal, executables: &mut Executables, globals: &mut Vars, ) { let transformer = Transformer::new(&globals, false); let arguments = parameters .map(|p| transformer.transform(p)) .unwrap_or_default(); program.run(terminal, executables, globals, arguments); } pub fn run_internal( &mut self, program: Box<dyn Internal>, parameters: Option<Parameters>, globals: &Vars, stdio: Rc<Stdio>, ) { self.running.set(true); let transformer = Transformer::new(&globals, false); let arguments = parameters .map(|p| transformer.transform(p)) .unwrap_or_default(); let (sender, receiver) = channel::<()>(); program.run(Rc::clone(&stdio), arguments, sender); let running = Rc::clone(&self.running); spawn_local(async move { receiver.await.expect("channel receiver failure"); running.set(false); stdio.prompt(); }); } pub fn run_external( &mut self, program: &External, parameters: Option<Parameters>, terminal: Rc<Terminal>, globals: &Vars, stdio: Rc<Stdio>, ) { self.running.set(true); let transformer = Transformer::new(&globals, true); let arguments = parameters .map(|p| transformer.to_texts(transformer.transform(p))) .unwrap_or_default(); let (exit_sender, exit_receiver) = channel::<()>(); program.run(terminal, arguments, exit_sender); let running = Rc::clone(&self.running); spawn_local(async move { exit_receiver.await.expect("channel receiver failure"); running.set(false); stdio.prompt(); }); } } #[cfg(test)] mod tests { use super::*; use crate::programs::Echo; use std::collections::HashMap; #[test] fn run_builtin() { let terminal = Terminal::new(); let mut executables = HashMap::new(); let mut globals = HashMap::new(); let runner = Runner::new(); runner.run_builtin( Box::new(Echo::default()), None, &terminal, &mut executables, &mut globals, ); } }
true
5ccc4dbecd1eea03db48f42ce92893f8273080a2
Rust
reinterpretcat/vrp
/vrp-core/src/models/problem/fleet.rs
UTF-8
6,212
2.953125
3
[ "Apache-2.0" ]
permissive
#[cfg(test)] #[path = "../../../tests/unit/models/problem/fleet_test.rs"] mod fleet_test; use crate::models::common::*; use crate::utils::short_type_name; use hashbrown::{HashMap, HashSet}; use std::cmp::Ordering::Less; use std::fmt::{Debug, Formatter}; use std::hash::{Hash, Hasher}; use std::sync::Arc; /// Represents operating costs for driver and vehicle. #[derive(Clone, Debug)] pub struct Costs { /// A fixed cost to use an actor. pub fixed: f64, /// Cost per distance unit. pub per_distance: f64, /// Cost per driving time unit. pub per_driving_time: f64, /// Cost per waiting time unit. pub per_waiting_time: f64, /// Cost per service time unit. pub per_service_time: f64, } /// Represents driver detail (reserved for future use). #[derive(Clone, Hash, Eq, PartialEq)] pub struct DriverDetail {} /// Represents a driver, person who drives Vehicle. /// Introduced to allow the following scenarios: /// * reuse vehicle multiple times with different drivers /// * solve best driver-vehicle match problem. /// NOTE: At the moment, it is not used. pub struct Driver { /// Specifies operating costs for driver. pub costs: Costs, /// Dimensions which contains extra work requirements. pub dimens: Dimensions, /// Specifies driver details. pub details: Vec<DriverDetail>, } /// Specifies a vehicle place. #[derive(Clone, Hash, Eq, PartialEq)] pub struct VehiclePlace { /// Location of a place. pub location: Location, /// Time interval when vehicle is allowed to be at this place. pub time: TimeInterval, } /// Represents a vehicle detail (vehicle shift). #[derive(Clone, Hash, Eq, PartialEq)] pub struct VehicleDetail { /// A place where vehicle starts. pub start: Option<VehiclePlace>, /// A place where vehicle ends. pub end: Option<VehiclePlace>, } /// Represents a vehicle. pub struct Vehicle { /// A vehicle profile. pub profile: Profile, /// Specifies operating costs for vehicle. pub costs: Costs, /// Dimensions which contains extra work requirements. pub dimens: Dimensions, /// Specifies vehicle details. pub details: Vec<VehicleDetail>, } /// Represents an actor detail. #[derive(Clone, Hash, Eq, PartialEq)] pub struct ActorDetail { /// A place where actor's vehicle starts. pub start: Option<VehiclePlace>, /// A place where actor's vehicle ends. pub end: Option<VehiclePlace>, /// Time window when actor allowed to work. pub time: TimeWindow, } /// Represents an actor. pub struct Actor { /// A vehicle associated within actor. pub vehicle: Arc<Vehicle>, /// A driver associated within actor. pub driver: Arc<Driver>, /// Specifies actor detail. pub detail: ActorDetail, } impl Debug for Actor { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct(short_type_name::<Self>()) .field("vehicle", &self.vehicle.dimens.get_id().map(|id| id.as_str()).unwrap_or("undef")) .finish_non_exhaustive() } } /// A grouping function for collection of actors. pub type ActorGroupKeyFn = Box<dyn Fn(&[Arc<Actor>]) -> Box<dyn Fn(&Arc<Actor>) -> usize + Send + Sync>>; /// Represents available resources to serve jobs. pub struct Fleet { /// All fleet drivers. pub drivers: Vec<Arc<Driver>>, /// All fleet vehicles. pub vehicles: Vec<Arc<Vehicle>>, /// All fleet profiles. pub profiles: Vec<Profile>, /// All fleet actors. pub actors: Vec<Arc<Actor>>, /// A grouped actors. pub groups: HashMap<usize, HashSet<Arc<Actor>>>, } impl Fleet { /// Creates a new instance of `Fleet`. pub fn new(drivers: Vec<Arc<Driver>>, vehicles: Vec<Arc<Vehicle>>, group_key: ActorGroupKeyFn) -> Fleet { // TODO we should also consider multiple drivers to support smart vehicle-driver assignment. assert_eq!(drivers.len(), 1); assert!(!vehicles.is_empty()); let profiles: HashMap<usize, Profile> = vehicles.iter().map(|v| (v.profile.index, v.profile.clone())).collect(); let mut profiles = profiles.into_iter().collect::<Vec<_>>(); profiles.sort_by(|(a, _), (b, _)| a.partial_cmp(b).unwrap_or(Less)); let (_, profiles): (Vec<_>, Vec<_>) = profiles.into_iter().unzip(); let actors = vehicles .iter() .flat_map(|vehicle| { vehicle.details.iter().map(|detail| { Arc::new(Actor { vehicle: vehicle.clone(), driver: drivers.first().unwrap().clone(), detail: ActorDetail { start: detail.start.clone(), end: detail.end.clone(), time: TimeWindow { start: detail.start.as_ref().and_then(|s| s.time.earliest).unwrap_or(0.), end: detail.end.as_ref().and_then(|e| e.time.latest).unwrap_or(f64::MAX), }, }, }) }) }) .collect::<Vec<_>>(); let group_key = (*group_key)(&actors); let groups = actors.iter().cloned().fold(HashMap::new(), |mut acc, actor| { acc.entry((*group_key)(&actor)).or_insert_with(HashSet::new).insert(actor.clone()); acc }); Fleet { drivers, vehicles, profiles, actors, groups } } } impl Debug for Fleet { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.debug_struct(short_type_name::<Self>()) .field("vehicles", &self.vehicles.len()) .field("drivers", &self.drivers.len()) .field("profiles", &self.profiles.len()) .field("actors", &self.actors.len()) .field("groups", &self.groups.len()) .finish() } } impl PartialEq<Actor> for Actor { fn eq(&self, other: &Actor) -> bool { std::ptr::eq(self, other) } } impl Eq for Actor {} impl Hash for Actor { fn hash<H: Hasher>(&self, state: &mut H) { let address = self as *const Actor; address.hash(state); } }
true
23a15760565a8b08858ad28df77fdeacd9c8ae7d
Rust
lamaboy2018/ritual
/qt_ritual/crate_templates/qt_core/tests/models_and_casts.rs
UTF-8
1,907
2.546875
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
use cpp_utils::{ConstRef, DynamicCast, Ref, StaticUpcast}; use qt_core::{ ItemDataRole, QAbstractItemModel, QAbstractListModel, QAbstractTableModel, QString, QStringList, QStringListModel, }; #[test] fn models_and_casts() { unsafe { let mut string_list = QStringList::new(); string_list.append(QString::from_std_str("text1").as_ref()); string_list.append(QString::from_std_str("text2").as_ref()); let mut string_list_model = QStringListModel::new4(string_list.as_ref()); assert_eq!(string_list_model.row_count_0a(), 2); { let index = string_list_model.index_2a(0, 0); assert_eq!( string_list_model .data_2a(index.as_ref(), ItemDataRole::DisplayRole.to_int()) .to_string() .to_std_string(), "text1" ); } { let index = string_list_model.index_2a(1, 0); assert_eq!( string_list_model .data_2a(index.as_ref(), ItemDataRole::DisplayRole.to_int()) .to_string() .to_std_string(), "text2" ); } let mut abstract_model: Ref<QAbstractListModel> = string_list_model.static_upcast_mut(); let abstract_model2: Ref<QAbstractItemModel> = abstract_model.static_upcast_mut(); assert_eq!(abstract_model.row_count_0a(), 2); { let string_list_model_back: Ref<QStringListModel> = abstract_model .dynamic_cast_mut() .expect("dynamic_cast should be successful"); assert_eq!(string_list_model_back.row_count_0a(), 2); } let table_model_attempt: Option<ConstRef<QAbstractTableModel>> = abstract_model2.dynamic_cast(); assert!(table_model_attempt.is_none()); } }
true
8b6ebc9b457c53304b7adb104a455c7b8537b16e
Rust
666666t/mergesort-rust
/src/main.rs
UTF-8
2,963
3.484375
3
[]
no_license
#![feature(test)] extern crate test; use std::cmp::Ordering; extern crate rand; use rand::seq::SliceRandom; use rand::thread_rng; //Lines 1 and 2 are for test featuries. //Ordering is used for the merge function. //rand uses SliceRandom and thread_rng to shuffle the initial vector. fn main() { let mut sort_vec: Vec<i32> = (0..10000000).collect(); sort_vec.shuffle(&mut thread_rng()); sort_vec = merge_sort(sort_vec); println!("{:?}", sort_vec); } fn merge_sort(sort_vec: Vec<i32>) -> Vec<i32> { let mut vec_a: Vec<i32> = Vec::new(); let mut vec_b: Vec<i32> = Vec::new(); //Creates two buffer vectors. if sort_vec.len() > 2 { vec_a.extend(sort_vec.iter().take(sort_vec.len() / 2)); vec_b.extend(sort_vec.iter().skip(sort_vec.len() / 2)); vec_a = merge_sort(vec_a); vec_b = merge_sort(vec_b); merge(vec_a, vec_b) //Extends vec_a and vec_b with their respective half of vector, //then recursively merge sorts before merging the values. } else { if sort_vec.len() == 1 || sort_vec[0] < sort_vec[1] { sort_vec } else { vec![sort_vec[1], sort_vec[0]] } //Bottom level of recursion when vector len <= 2, //returns self if ordered, returns swapped values otherwise. } } fn merge(vec_1: Vec<i32>, vec_2: Vec<i32>) -> Vec<i32> { let mut vec_merged: Vec<i32> = Vec::new(); let mut left_index = 0; let mut right_index = 0; //Creates output buffer and 2 indices for merging. for _ in 0..(vec_1.len() + vec_2.len()) { if left_index == vec_1.len() { vec_merged.extend(vec_2.iter().skip(right_index)); return vec_merged; //If all of vector 1's values have been read, //push vector 2 values to output buffer and return. } else if right_index == vec_2.len() { vec_merged.extend(vec_1.iter().skip(left_index)); return vec_merged; //If all of vector 2's values have been read, //push vector 1 values to output buffer and return. } else { match vec_1[left_index].cmp(&vec_2[right_index]) { Ordering::Less | Ordering::Equal => { vec_merged.push(vec_1[left_index]); left_index += 1; } Ordering::Greater => { vec_merged.push(vec_2[right_index]); right_index += 1; } } //Compare both values at their current index, //push the lesser (or left, if equal) value to buffer, //increment the corresponding index. } } //This bit is a bit messy, need to find alternative to if/else chain. vec_merged } #[cfg(test)] mod tests { use super::*; use test::Bencher; #[bench] fn bench_merge(b: &mut Bencher) { b.iter(|| main()); } }
true
bddc58ccf92d7107625b89a84c235068adab8fc5
Rust
gnoliyil/fuchsia
/src/developer/ffx/plugins/setui/utils/src/lib.rs
UTF-8
4,592
2.921875
3
[ "BSD-2-Clause" ]
permissive
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. //! This library contains helper functions for common operations throughout the setui ffx plugins. use anyhow::Error; use futures::{TryFutureExt, TryStream, TryStreamExt}; use std::fmt::Debug; use std::future::Future; /// Re-exported dependencies pulled used by macros #[doc(hidden)] pub mod macro_deps { pub use futures::TryStreamExt; } /// An abstraction over a stream result from a watch, or the string output /// from a get or set call. pub enum Either { Watch(StringTryStream), Set(String), Get(String), } pub type StringTryStream = Box<dyn TryStream<Ok = String, Error = Error, Item = Result<String, Error>> + Unpin>; pub type WatchOrSetResult = Result<Either, Error>; /// A utility function to convert a watch into a stream of responses. Relies /// on the output type of the watch call supporting `Debug` formatting. pub fn watch_to_stream<P, W, Fut, T, E>(proxy: P, watch_fn: W) -> StringTryStream where P: 'static, W: Fn(&P) -> Fut + 'static, Fut: Future<Output = Result<T, E>> + Unpin + 'static, T: Debug, E: Into<Error> + 'static, { formatted_watch_to_stream(proxy, watch_fn, |t| format!("{:#?}", t)) } /// A utility function to convert a watch into a stream of Responses. This variant /// allows specifying the formatting function based on the output type of the /// `watch` call. pub fn formatted_watch_to_stream<P, W, Fut, F, T, E>( proxy: P, watch_fn: W, formatting_fn: F, ) -> StringTryStream where P: 'static, W: Fn(&P) -> Fut + 'static, Fut: Future<Output = Result<T, E>> + Unpin + 'static, F: Fn(T) -> String + Clone + 'static, E: Into<Error> + 'static, { Box::new(futures::stream::try_unfold(proxy, move |proxy| { let formatting_fn = formatting_fn.clone(); watch_fn(&proxy) .map_ok(move |result| Some((formatting_fn(result), proxy))) .map_err(Into::into) })) } /// A utility function to display every output that comes from a watch stream. pub async fn print_results<S>(label: &str, mut stream: S) -> Result<(), Error> where S: TryStream<Ok = String, Error = Error> + Unpin, { println!("Watching `{}` in a loop. Press Ctrl+C to stop.", label); while let Some(output) = stream.try_next().await? { println!("{}", output); } Ok(()) } /// A utility function to manage outputting the results of either a watch or set /// call. pub async fn handle_mixed_result(label: &str, result: WatchOrSetResult) -> Result<(), Error> { Ok(match result? { Either::Watch(stream) => print_results(label, stream).await?, Either::Set(output) | Either::Get(output) => println!("{}: {}", label, output), }) } /// Validate that the results of the call are a successful watch and return the /// first result. #[macro_export] macro_rules! assert_watch { ($expr:expr) => { match $expr.await? { crate::utils::Either::Watch(mut stream) => { $crate::macro_deps::TryStreamExt::try_next(&mut stream) .await? .expect("Watch should have a result") } crate::utils::Either::Set(_) => { panic!("Did not expect a set result for a watch call") } crate::utils::Either::Get(_) => { panic!("Did not expect a get result for a watch call") } } }; } /// Validate that the results of the call are a successful set and return the result. #[macro_export] macro_rules! assert_set { ($expr:expr) => { match $expr.await? { crate::utils::Either::Set(output) => output, crate::utils::Either::Watch(_) => { panic!("Did not expect a watch result for a set call") } crate::utils::Either::Get(_) => { panic!("Did not expect a get result for a set call") } } }; } /// Validate that the results of the call are a successful get and return the result. #[macro_export] macro_rules! assert_get { ($expr:expr) => { match $expr.await? { crate::utils::Either::Get(output) => output, crate::utils::Either::Watch(_) => { panic!("Did not expect a watch result for a get call") } crate::utils::Either::Set(_) => { panic!("Did not expect a set result for a get call") } } }; }
true
ff4274104d38abb13b302c2a121ba6b218a39d12
Rust
Eonm/scaffold-rs
/src/path_parent/mod.rs
UTF-8
1,973
3.359375
3
[ "MIT" ]
permissive
pub fn replace_with_parent_dir (path: String) -> String { let splited_path = path.split("/").map(|p| p.to_string()).collect::<Vec<String>>(); let return_value : Vec<String> = splited_path.iter().fold(vec!(),|mut acc, p| { if p.contains("[*]") { match acc.clone().last() { Some(previous_dir) => { let substitution = if acc.len() > 1 { p.replace("[*]", previous_dir) } else { p.replace("[*]", "") }; acc.push(substitution.to_string()); }, None => { acc.push(p.replace("[*]", "")) } }; } else { acc.push(p.to_string()) } acc }); return_value.join("/") } #[cfg(test)] mod tests { use super::*; #[test] fn test_replace_with_parent_dir () { let input = "./dir1/file1"; assert_eq!(replace_with_parent_dir(input.to_string()), input); let input1 = "./dir1/[*]_file1"; assert_eq!(replace_with_parent_dir(input1.to_string()), "./dir1/dir1_file1"); } #[test] fn test_dont_replace_if_ancestor_dir_dont_exist () { let input = "./[*]dir1/file1"; assert_eq!(replace_with_parent_dir(input.to_string()), "./dir1/file1"); let input1 = "[*]/dir1/file1"; assert_eq!(replace_with_parent_dir(input1.to_string()), "/dir1/file1"); let input2 = "[*]/[*]dir1/file1"; assert_eq!(replace_with_parent_dir(input2.to_string()), "/dir1/file1"); } #[test] fn test_replace_multiple_parent_dirs() { let input = "./dir/[*]_file_1_[*]"; assert_eq!(replace_with_parent_dir(input.to_string()), "./dir/dir_file_1_dir"); let input1 = "./dir/sub_[*]/file_[*].txt"; assert_eq!(replace_with_parent_dir(input1.to_string()), "./dir/sub_dir/file_sub_dir.txt"); } }
true
8732f0a138317d4c60d7c243e69b0eb4c76a9635
Rust
bevyengine/bevy
/crates/bevy_reflect/src/enums/mod.rs
UTF-8
18,999
3.234375
3
[ "Apache-2.0", "MIT", "Zlib", "LicenseRef-scancode-free-unknown", "LicenseRef-scancode-unknown-license-reference", "LicenseRef-scancode-other-permissive" ]
permissive
mod dynamic_enum; mod enum_trait; mod helpers; mod variants; pub use dynamic_enum::*; pub use enum_trait::*; pub use helpers::*; pub use variants::*; #[cfg(test)] mod tests { use crate as bevy_reflect; use crate::*; #[derive(Reflect, Debug, PartialEq)] enum MyEnum { A, B(usize, i32), C { foo: f32, bar: bool }, } #[test] fn should_get_enum_type_info() { let info = MyEnum::type_info(); if let TypeInfo::Enum(info) = info { assert!(info.is::<MyEnum>(), "expected type to be `MyEnum`"); assert_eq!(std::any::type_name::<MyEnum>(), info.type_name()); // === MyEnum::A === // assert_eq!("A", info.variant_at(0).unwrap().name()); assert_eq!("A", info.variant("A").unwrap().name()); if let VariantInfo::Unit(variant) = info.variant("A").unwrap() { assert_eq!("A", variant.name()); } else { panic!("Expected `VariantInfo::Unit`"); } // === MyEnum::B === // assert_eq!("B", info.variant_at(1).unwrap().name()); assert_eq!("B", info.variant("B").unwrap().name()); if let VariantInfo::Tuple(variant) = info.variant("B").unwrap() { assert!(variant.field_at(0).unwrap().is::<usize>()); assert!(variant.field_at(1).unwrap().is::<i32>()); } else { panic!("Expected `VariantInfo::Tuple`"); } // === MyEnum::C === // assert_eq!("C", info.variant_at(2).unwrap().name()); assert_eq!("C", info.variant("C").unwrap().name()); if let VariantInfo::Struct(variant) = info.variant("C").unwrap() { assert!(variant.field_at(0).unwrap().is::<f32>()); assert!(variant.field("foo").unwrap().is::<f32>()); } else { panic!("Expected `VariantInfo::Struct`"); } } else { panic!("Expected `TypeInfo::Enum`"); } } #[test] fn dynamic_enum_should_set_variant_fields() { // === Unit === // let mut value = MyEnum::A; let dyn_enum = DynamicEnum::from(MyEnum::A); value.apply(&dyn_enum); assert_eq!(MyEnum::A, value); // === Tuple === // let mut value = MyEnum::B(0, 0); let dyn_enum = DynamicEnum::from(MyEnum::B(123, 321)); value.apply(&dyn_enum); assert_eq!(MyEnum::B(123, 321), value); // === Struct === // let mut value = MyEnum::C { foo: 0.0, bar: false, }; let dyn_enum = DynamicEnum::from(MyEnum::C { foo: 1.23, bar: true, }); value.apply(&dyn_enum); assert_eq!( MyEnum::C { foo: 1.23, bar: true, }, value ); } #[test] fn partial_dynamic_enum_should_set_variant_fields() { // === Tuple === // let mut value = MyEnum::B(0, 0); let mut data = DynamicTuple::default(); data.insert(123usize); let mut dyn_enum = DynamicEnum::default(); dyn_enum.set_variant("B", data); value.apply(&dyn_enum); assert_eq!(MyEnum::B(123, 0), value); // === Struct === // let mut value = MyEnum::C { foo: 1.23, bar: false, }; let mut data = DynamicStruct::default(); data.insert("bar", true); let mut dyn_enum = DynamicEnum::default(); dyn_enum.set_variant("C", data); value.apply(&dyn_enum); assert_eq!( MyEnum::C { foo: 1.23, bar: true, }, value ); } #[test] fn dynamic_enum_should_apply_dynamic_enum() { let mut a = DynamicEnum::from(MyEnum::B(123, 321)); let b = DynamicEnum::from(MyEnum::B(123, 321)); // Sanity check that equality check works assert!( a.reflect_partial_eq(&b).unwrap_or_default(), "dynamic enums should be equal" ); a.set_variant("A", ()); assert!( !a.reflect_partial_eq(&b).unwrap_or_default(), "dynamic enums should not be equal" ); a.apply(&b); assert!(a.reflect_partial_eq(&b).unwrap_or_default()); } #[test] fn dynamic_enum_should_change_variant() { let mut value = MyEnum::A; // === MyEnum::A -> MyEnum::B === // let mut dyn_enum = DynamicEnum::from(MyEnum::B(123, 321)); value.apply(&dyn_enum); assert_eq!(MyEnum::B(123, 321), value); // === MyEnum::B -> MyEnum::C === // let mut data = DynamicStruct::default(); data.insert("foo", 1.23_f32); data.insert("bar", true); dyn_enum.set_variant("C", data); value.apply(&dyn_enum); assert_eq!( MyEnum::C { foo: 1.23, bar: true }, value ); // === MyEnum::C -> MyEnum::B === // let mut data = DynamicTuple::default(); data.insert(123_usize); data.insert(321_i32); dyn_enum.set_variant("B", data); value.apply(&dyn_enum); assert_eq!(MyEnum::B(123, 321), value); // === MyEnum::B -> MyEnum::A === // dyn_enum.set_variant("A", ()); value.apply(&dyn_enum); assert_eq!(MyEnum::A, value); } #[test] fn enum_should_iterate_fields() { // === Unit === // let value: &dyn Enum = &MyEnum::A; assert_eq!(0, value.field_len()); let mut iter = value.iter_fields(); assert!(iter.next().is_none()); // === Tuple === // let value: &dyn Enum = &MyEnum::B(123, 321); assert_eq!(2, value.field_len()); let mut iter = value.iter_fields(); assert!(iter .next() .and_then(|field| field.value().reflect_partial_eq(&123_usize)) .unwrap_or_default()); assert!(iter .next() .and_then(|field| field.value().reflect_partial_eq(&321_i32)) .unwrap_or_default()); // === Struct === // let value: &dyn Enum = &MyEnum::C { foo: 1.23, bar: true, }; assert_eq!(2, value.field_len()); let mut iter = value.iter_fields(); assert!(iter .next() .and_then(|field| field .value() .reflect_partial_eq(&1.23_f32) .and(field.name().map(|name| name == "foo"))) .unwrap_or_default()); assert!(iter .next() .and_then(|field| field .value() .reflect_partial_eq(&true) .and(field.name().map(|name| name == "bar"))) .unwrap_or_default()); } #[test] fn enum_should_return_correct_variant_type() { // === Unit === // let value = MyEnum::A; assert_eq!(VariantType::Unit, value.variant_type()); // === Tuple === // let value = MyEnum::B(0, 0); assert_eq!(VariantType::Tuple, value.variant_type()); // === Struct === // let value = MyEnum::C { foo: 1.23, bar: true, }; assert_eq!(VariantType::Struct, value.variant_type()); } #[test] fn enum_should_return_correct_variant_path() { // === Unit === // let value = MyEnum::A; assert_eq!( "bevy_reflect::enums::tests::MyEnum::A", value.variant_path() ); // === Tuple === // let value = MyEnum::B(0, 0); assert_eq!( "bevy_reflect::enums::tests::MyEnum::B", value.variant_path() ); // === Struct === // let value = MyEnum::C { foo: 1.23, bar: true, }; assert_eq!( "bevy_reflect::enums::tests::MyEnum::C", value.variant_path() ); } #[test] #[should_panic(expected = "`((usize, i32))` is not an enum")] fn applying_non_enum_should_panic() { let mut value = MyEnum::B(0, 0); let mut dyn_tuple = DynamicTuple::default(); dyn_tuple.insert((123_usize, 321_i32)); value.apply(&dyn_tuple); } #[test] fn should_skip_ignored_fields() { #[derive(Reflect, Debug, PartialEq)] enum TestEnum { A, B, C { #[reflect(ignore)] foo: f32, bar: bool, }, } if let TypeInfo::Enum(info) = TestEnum::type_info() { assert_eq!(3, info.variant_len()); if let VariantInfo::Struct(variant) = info.variant("C").unwrap() { assert_eq!( 1, variant.field_len(), "expected one of the fields to be ignored" ); assert!(variant.field_at(0).unwrap().is::<bool>()); } else { panic!("expected `VariantInfo::Struct`"); } } else { panic!("expected `TypeInfo::Enum`"); } } #[test] fn enum_should_allow_generics() { #[derive(Reflect, Debug, PartialEq)] enum TestEnum<T: FromReflect> { A, B(T), C { value: T }, } if let TypeInfo::Enum(info) = TestEnum::<f32>::type_info() { if let VariantInfo::Tuple(variant) = info.variant("B").unwrap() { assert!(variant.field_at(0).unwrap().is::<f32>()); } else { panic!("expected `VariantInfo::Struct`"); } if let VariantInfo::Struct(variant) = info.variant("C").unwrap() { assert!(variant.field("value").unwrap().is::<f32>()); } else { panic!("expected `VariantInfo::Struct`"); } } else { panic!("expected `TypeInfo::Enum`"); } let mut value = TestEnum::<f32>::A; // === Tuple === // let mut data = DynamicTuple::default(); data.insert(1.23_f32); let dyn_enum = DynamicEnum::new("B", data); value.apply(&dyn_enum); assert_eq!(TestEnum::B(1.23), value); // === Struct === // let mut data = DynamicStruct::default(); data.insert("value", 1.23_f32); let dyn_enum = DynamicEnum::new("C", data); value.apply(&dyn_enum); assert_eq!(TestEnum::C { value: 1.23 }, value); } #[test] fn enum_should_allow_struct_fields() { #[derive(Reflect, Debug, PartialEq)] enum TestEnum { A, B(TestStruct), C { value: TestStruct }, } #[derive(Reflect, Debug, PartialEq)] struct TestStruct(usize); let mut value = TestEnum::A; // === Tuple === // let mut data = DynamicTuple::default(); data.insert(TestStruct(123)); let dyn_enum = DynamicEnum::new("B", data); value.apply(&dyn_enum); assert_eq!(TestEnum::B(TestStruct(123)), value); // === Struct === // let mut data = DynamicStruct::default(); data.insert("value", TestStruct(123)); let dyn_enum = DynamicEnum::new("C", data); value.apply(&dyn_enum); assert_eq!( TestEnum::C { value: TestStruct(123) }, value ); } #[test] fn enum_should_allow_nesting_enums() { #[derive(Reflect, Debug, PartialEq)] enum TestEnum { A, B(OtherEnum), C { value: OtherEnum }, } #[derive(Reflect, Debug, PartialEq)] enum OtherEnum { A, B(usize), C { value: f32 }, } let mut value = TestEnum::A; // === Tuple === // let mut data = DynamicTuple::default(); data.insert(OtherEnum::B(123)); let dyn_enum = DynamicEnum::new("B", data); value.apply(&dyn_enum); assert_eq!(TestEnum::B(OtherEnum::B(123)), value); // === Struct === // let mut data = DynamicStruct::default(); data.insert("value", OtherEnum::C { value: 1.23 }); let dyn_enum = DynamicEnum::new("C", data); value.apply(&dyn_enum); assert_eq!( TestEnum::C { value: OtherEnum::C { value: 1.23 } }, value ); } #[test] fn enum_should_apply() { let mut value: Box<dyn Reflect> = Box::new(MyEnum::A); // === MyEnum::A -> MyEnum::A === // value.apply(&MyEnum::A); assert!(value.reflect_partial_eq(&MyEnum::A).unwrap_or_default()); // === MyEnum::A -> MyEnum::B === // value.apply(&MyEnum::B(123, 321)); assert!(value .reflect_partial_eq(&MyEnum::B(123, 321)) .unwrap_or_default()); // === MyEnum::B -> MyEnum::B === // value.apply(&MyEnum::B(321, 123)); assert!(value .reflect_partial_eq(&MyEnum::B(321, 123)) .unwrap_or_default()); // === MyEnum::B -> MyEnum::C === // value.apply(&MyEnum::C { foo: 1.23, bar: true, }); assert!(value .reflect_partial_eq(&MyEnum::C { foo: 1.23, bar: true }) .unwrap_or_default()); // === MyEnum::C -> MyEnum::C === // value.apply(&MyEnum::C { foo: 3.21, bar: false, }); assert!(value .reflect_partial_eq(&MyEnum::C { foo: 3.21, bar: false }) .unwrap_or_default()); // === MyEnum::C -> MyEnum::B === // value.apply(&MyEnum::B(123, 321)); assert!(value .reflect_partial_eq(&MyEnum::B(123, 321)) .unwrap_or_default()); // === MyEnum::B -> MyEnum::A === // value.apply(&MyEnum::A); assert!(value.reflect_partial_eq(&MyEnum::A).unwrap_or_default()); } #[test] fn enum_should_set() { let mut value: Box<dyn Reflect> = Box::new(MyEnum::A); // === MyEnum::A -> MyEnum::A === // value.set(Box::new(MyEnum::A)).unwrap(); assert!(value.reflect_partial_eq(&MyEnum::A).unwrap_or_default()); // === MyEnum::A -> MyEnum::B === // value.set(Box::new(MyEnum::B(123, 321))).unwrap(); assert!(value .reflect_partial_eq(&MyEnum::B(123, 321)) .unwrap_or_default()); // === MyEnum::B -> MyEnum::B === // value.set(Box::new(MyEnum::B(321, 123))).unwrap(); assert!(value .reflect_partial_eq(&MyEnum::B(321, 123)) .unwrap_or_default()); // === MyEnum::B -> MyEnum::C === // value .set(Box::new(MyEnum::C { foo: 1.23, bar: true, })) .unwrap(); assert!(value .reflect_partial_eq(&MyEnum::C { foo: 1.23, bar: true }) .unwrap_or_default()); // === MyEnum::C -> MyEnum::C === // value .set(Box::new(MyEnum::C { foo: 3.21, bar: false, })) .unwrap(); assert!(value .reflect_partial_eq(&MyEnum::C { foo: 3.21, bar: false }) .unwrap_or_default()); // === MyEnum::C -> MyEnum::B === // value.set(Box::new(MyEnum::B(123, 321))).unwrap(); assert!(value .reflect_partial_eq(&MyEnum::B(123, 321)) .unwrap_or_default()); // === MyEnum::B -> MyEnum::A === // value.set(Box::new(MyEnum::A)).unwrap(); assert!(value.reflect_partial_eq(&MyEnum::A).unwrap_or_default()); } #[test] fn enum_should_partial_eq() { #[derive(Reflect)] enum TestEnum { A, A1, B(usize), B1(usize), B2(usize, usize), C { value: i32 }, C1 { value: i32 }, C2 { value: f32 }, } let a: &dyn Reflect = &TestEnum::A; let b: &dyn Reflect = &TestEnum::A; assert!( a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::A == TestEnum::A" ); let a: &dyn Reflect = &TestEnum::A; let b: &dyn Reflect = &TestEnum::A1; assert!( !a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::A != TestEnum::A1" ); let a: &dyn Reflect = &TestEnum::B(123); let b: &dyn Reflect = &TestEnum::B(123); assert!( a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::B(123) == TestEnum::B(123)" ); let a: &dyn Reflect = &TestEnum::B(123); let b: &dyn Reflect = &TestEnum::B(321); assert!( !a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::B(123) != TestEnum::B(321)" ); let a: &dyn Reflect = &TestEnum::B(123); let b: &dyn Reflect = &TestEnum::B1(123); assert!( !a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::B(123) != TestEnum::B1(123)" ); let a: &dyn Reflect = &TestEnum::B(123); let b: &dyn Reflect = &TestEnum::B2(123, 123); assert!( !a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::B(123) != TestEnum::B2(123, 123)" ); let a: &dyn Reflect = &TestEnum::C { value: 123 }; let b: &dyn Reflect = &TestEnum::C { value: 123 }; assert!( a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::C{{value: 123}} == TestEnum::C{{value: 123}}" ); let a: &dyn Reflect = &TestEnum::C { value: 123 }; let b: &dyn Reflect = &TestEnum::C { value: 321 }; assert!( !a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::C{{value: 123}} != TestEnum::C{{value: 321}}" ); let a: &dyn Reflect = &TestEnum::C { value: 123 }; let b: &dyn Reflect = &TestEnum::C1 { value: 123 }; assert!( !a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::C{{value: 123}} != TestEnum::C1{{value: 123}}" ); let a: &dyn Reflect = &TestEnum::C { value: 123 }; let b: &dyn Reflect = &TestEnum::C2 { value: 1.23 }; assert!( !a.reflect_partial_eq(b).unwrap_or_default(), "expected TestEnum::C{{value: 123}} != TestEnum::C2{{value: 1.23}}" ); } }
true
fe9bd43b9b9e15c469edb663b42ba5e17e774e4e
Rust
shadow/shadow
/src/lib/vasi/src/lib.rs
UTF-8
4,297
2.90625
3
[ "LicenseRef-scancode-public-domain" ]
permissive
#![no_std] // https://github.com/rust-lang/rfcs/blob/master/text/2585-unsafe-block-in-unsafe-fn.md #![deny(unsafe_op_in_unsafe_fn)] // For use by code generated by vasi_macro::VirtualAddressSpaceIndependent; #[doc(hidden)] pub use static_assertions::assert_impl_all; pub use vasi_macro::VirtualAddressSpaceIndependent; /// A type implementing this trait guarantees that accessing instances /// of that type outside of their original virtual address space does not /// violate Rust's safety requirements. /// /// This is mostly equivalent to them being *self-contained*: not referring to /// memory outside of the range `[&self, &self + std::mem::sizeof<T>()]`. /// However, they *may* reference memory outside of that range if they somehow /// ensure that they only do so from inside the virtual address space where it /// is valid to do so. /// /// Types implementing this trait can definitely not contain references. If /// they contain pointers, the type is responsible for ensuring those pointers /// aren't dereferenced outside a virtual address space where they are valid. /// /// *Relative* pointers, e.g. as implemented in /// [rkyv](https://crates.io/crates/rkyv), are acceptable as long as they point /// within the boundaries of the enclosing type. /// /// # Safety /// /// The type must actually be self-contained, as above. pub unsafe trait VirtualAddressSpaceIndependent { /// Used by the derive macro to validate that fields are Vasi. const IGNORE: () = (); } // Types not containing any pointers are trivially VirtualAddressSpaceIndependent. unsafe impl VirtualAddressSpaceIndependent for i64 {} unsafe impl VirtualAddressSpaceIndependent for u64 {} unsafe impl VirtualAddressSpaceIndependent for i32 {} unsafe impl VirtualAddressSpaceIndependent for u32 {} unsafe impl VirtualAddressSpaceIndependent for i16 {} unsafe impl VirtualAddressSpaceIndependent for u16 {} unsafe impl VirtualAddressSpaceIndependent for i8 {} unsafe impl VirtualAddressSpaceIndependent for u8 {} unsafe impl VirtualAddressSpaceIndependent for usize {} unsafe impl VirtualAddressSpaceIndependent for isize {} unsafe impl VirtualAddressSpaceIndependent for bool {} // e.g. "This type has the same in-memory representation as the underlying integer type, u64" unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicU64 {} unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicI64 {} unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicU32 {} unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicI32 {} unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicU16 {} unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicI16 {} unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicU8 {} unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicI8 {} unsafe impl VirtualAddressSpaceIndependent for core::sync::atomic::AtomicBool {} // An array of T is [VirtualAddressSpaceIndependent] if its elements are. unsafe impl<T, const N: usize> VirtualAddressSpaceIndependent for [T; N] where T: VirtualAddressSpaceIndependent { } // PhantomData is intrinsically VirtualAddressSpaceIndependent. // // Conservatively only implement when T is VirtualAddressSpaceIndependent, // but unclear whether this restriction is necessary. unsafe impl<T> VirtualAddressSpaceIndependent for core::marker::PhantomData<T> where T: VirtualAddressSpaceIndependent { } // Cell is `repr(transparent)` around an `UnsafeCell<T>`. unsafe impl<T> VirtualAddressSpaceIndependent for core::cell::Cell<T> where T: VirtualAddressSpaceIndependent { } // UnsafeCell is `repr(transparent)` around a `T`. unsafe impl<T> VirtualAddressSpaceIndependent for core::cell::UnsafeCell<T> where T: VirtualAddressSpaceIndependent { } // ManuallyDrop is `repr(transparent)` around a `<T>`. unsafe impl<T> VirtualAddressSpaceIndependent for core::mem::ManuallyDrop<T> where T: VirtualAddressSpaceIndependent { } // MaybeUninit is `repr(transparent)` around a union of `()` and `ManuallyDrop<T>`. unsafe impl<T> VirtualAddressSpaceIndependent for core::mem::MaybeUninit<T> where T: VirtualAddressSpaceIndependent { } unsafe impl VirtualAddressSpaceIndependent for () {}
true
1f3067976293e5d1bb1f58a3a6cced1598e70209
Rust
SCappella/exercism
/rust/isogram/src/lib.rs
UTF-8
324
2.609375
3
[ "MIT" ]
permissive
pub fn check(candidate: &str) -> bool { let letters: Vec<_> = candidate .chars() .filter(|c| c.is_ascii_alphabetic()) .map(|c| c.to_ascii_lowercase()) .collect(); let letters_dedup: std::collections::BTreeSet<_> = letters.iter().collect(); letters.len() == letters_dedup.len() }
true
2fc71d095e024d67b54f9d0f6d33c2f8dffef1d7
Rust
hairyhum/ockam
/implementations/rust/ockam/ockam/src/profile/profile_change_event.rs
UTF-8
1,257
2.78125
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use crate::{EventIdentifier, ProfileChange, ProfileChangeProof}; pub type Changes = Vec<ProfileChange>; /// [`Profile`]s are modified using change events mechanism. One event may have 1 or more [`ProfileChange`]s /// Proof is used to check whether this event comes from a party authorized to perform such updated /// Individual changes may include additional proofs, if needed #[derive(Clone, Debug)] pub struct ProfileChangeEvent { identifier: EventIdentifier, changes: Changes, proof: ProfileChangeProof, } impl ProfileChangeEvent { /// Unique [`EventIdentifier`] pub fn identifier(&self) -> &EventIdentifier { &self.identifier } /// Set of changes been applied pub fn changes(&self) -> &Changes { &self.changes } /// Proof is used to check whether this event comes from a party authorized to perform such updated /// Individual changes may include additional proofs, if needed pub fn proof(&self) -> &ProfileChangeProof { &self.proof } } impl ProfileChangeEvent { pub fn new(identifier: EventIdentifier, changes: Changes, proof: ProfileChangeProof) -> Self { ProfileChangeEvent { identifier, changes, proof, } } }
true
eaf13fc022de8e491368d9fc6c17ae29dd0311bb
Rust
mdinger/rules
/tests/range_set/symmetric_difference.rs
UTF-8
2,132
3.09375
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use super::generate; #[test] fn partial_overlap() { let set = generate(vec![('3', '6')]); let low = generate(vec![('1', '4')]); let high = generate(vec![('5', '9')]); let other_low = generate(vec![('1', '2'), ('5', '6')]); let other_high = generate(vec![('3', '4'), ('7', '9')]); assert_eq!(set.symmetric_difference(&low), other_low); assert_eq!(set.symmetric_difference(&high), other_high); } #[test] fn subset() { let set = generate(vec![('2', '7')]); let subset = generate(vec![('3', '6')]); let exact_left = generate(vec![('2', '6')]); let exact_right = generate(vec![('3', '7')]); let other_subset = generate(vec![('2', '2'), ('7', '7')]); let other_left = generate(vec![('7', '7')]); let other_right = generate(vec![('2', '2')]); let other_both = generate(vec![]); assert_eq!(set.symmetric_difference(&subset), other_subset); assert_eq!(set.symmetric_difference(&exact_left), other_left); assert_eq!(set.symmetric_difference(&exact_right), other_right); assert_eq!(set.symmetric_difference(&set), other_both); } #[test] fn superset() { let set = generate(vec![('2', '7')]); let superset = generate(vec![('1', '8')]); let other = generate(vec![('1', '1'), ('8', '8')]); assert_eq!(set.symmetric_difference(&superset), other); } #[test] fn disjoint() { let set = generate(vec![('4', '5')]); let low = generate(vec![('1', '2')]); let high = generate(vec![('7', '8')]); let other_low = generate(vec![('1', '2'), ('4', '5')]); let other_high = generate(vec![('4', '5'), ('7', '8')]); assert_eq!(set.symmetric_difference(&low), other_low); assert_eq!(set.symmetric_difference(&high), other_high); } #[test] fn disjoint_extend() { let set = generate(vec![('3', '4')]); let low = generate(vec![('1', '2')]); let high = generate(vec![('5', '6')]); let other_low = generate(vec![('1', '4')]); let other_high = generate(vec![('3', '6')]); assert_eq!(set.symmetric_difference(&low), other_low); assert_eq!(set.symmetric_difference(&high), other_high); }
true
09596d21f6be2b7ff374d3dc8fb753abb1128e61
Rust
justinliew/leaderboard
/src/listings.rs
UTF-8
2,419
2.875
3
[]
no_license
use fastly::http::StatusCode; use fastly::{Error,Response}; use uuid::Uuid; use std::time::{Duration,SystemTime}; use serde::{Deserialize,Serialize}; use serde::de::DeserializeOwned; use crate::valkeyrie::{get_raw,write_raw}; #[derive(Serialize,Deserialize,Debug,PartialOrd,PartialEq,Clone)] pub struct ListingEntry { id: String, name: String, score: i32, rank: usize, // last_heartbeat: SystemTime, } impl ListingEntry { pub fn new(id: String, name: String, score: i32) -> Self { ListingEntry{ id:id, name:name, score:score, rank: 0, } } } pub fn write_listings(listings: Vec<ListingEntry>) -> Result<(),Error> { write_raw("lblistings", listings) } pub fn add_or_update_listing(id: &str, name: &str, score: i32) -> Result<Vec<ListingEntry>,Error> { match get_raw::<ListingEntry>("lblistings") { Ok(mut listings) => { match listings.iter_mut().find(|l| l.id == id) { Some(mut l) => { l.score = score; }, _ => { let listing = ListingEntry::new(id.to_string(), name.to_string() ,score); listings.push(listing); } } listings.sort_by(|l1,l2| l2.score.cmp(&l1.score)); for i in 0..listings.len() { listings[i].rank = i + 1; } Ok(listings) }, Err(e) => { println!("Error getting listings: {:?}", e); Err(e) } } } pub fn get_index_for_score(score: i32) -> Option<(usize, usize)> { match get_raw::<ListingEntry>("lblistings") { Ok(mut listings) => { for (index,listing) in listings.iter().enumerate() { if listing.score < score { return Some((index, listings.len())); } } return Some((match listings.len() { 0 => 0, _ => listings.len()-1, },listings.len())); }, _ => return None, } } pub fn get_listings(s: usize, num: usize) -> Result<Vec<ListingEntry>,Error> { match get_raw::<ListingEntry>("lblistings") { Ok(mut listings) => { if num == 0 { return Ok(listings); } let start = std::cmp::min(s,listings.len()); let end = std::cmp::min(start+num, listings.len()); Ok(listings[start..end].to_vec()) }, Err(e) => { Err(e) } } // let mut listings = vec![]; // listings.push(ListingEntry{id: "0".to_string(), name: "Jessica".to_string(), score: 200}); // listings.push(ListingEntry{id: "0".to_string(), name: "Esme".to_string(), score: 100}); // listings.push(ListingEntry{id: "0".to_string(), name: "Zayden".to_string(), score: 57}); // Ok(listings) }
true
998bce2685ff322330a444e64e19927b6e767a69
Rust
Michael-F-Bryan/MIPS
/src/bin/mips_emulator.rs
UTF-8
2,215
3.34375
3
[ "MIT" ]
permissive
//! A MIPS emulator written entirely in Rust. #![warn(missing_docs)] #![feature(plugin)] #![plugin(docopt_macros)] extern crate byteorder; extern crate rustc_serialize; extern crate docopt; #[macro_use] extern crate log; extern crate env_logger; extern crate mips; use mips::Processor; use std::fs::File; use std::io::Read; use std::process::exit; docopt!(Args derive Debug, " A MIPS emulator written in Rust. Usage: mips [options] <file> mips (-h | --help) mips --version Options: --version Print the version number and exit -h --help Print this help text -v --verbose Verbose output "); fn main() { let args: Args = Args::docopt().decode().unwrap_or_else(|e| e.exit()); env_logger::init().unwrap(); debug!("Command Line Arguments -> {:?}", args); if args.flag_version { println!("{} v{}", "mips", env!("CARGO_PKG_VERSION")); } else if !args.arg_file.is_empty() { run_program(args.arg_file.clone(), args); } exit(0); } /// Read in a filename, load it into the emulator's memory, and then step /// through instructions until the program either terminates or crashes. fn run_program(filename: String, args: Args) -> u32 { let mut data: Vec<u8> = Vec::new(); let mut f = File::open(&filename).expect("Unable to open file"); f.read_to_end(&mut data).expect("Unable to read data"); if args.flag_verbose { info!("Read {:#} bytes from {}", data.len(), filename); } // Create a new processor let mut cpu = Processor::new(); // And load the program into memory match cpu.load(data) { Ok(_) => { if args.flag_verbose { info!("Program loaded"); } } Err(e) => { error!("{}", e); return 1; } } // Now keep executing instructions until we hit an error let result = cpu.start(); if result.is_err() { error!("{}", result.unwrap_err()); } // Print the contents of the registers and the program counter if args.flag_verbose { println!("Program Counter: {}", cpu.program_counter() - 4); println!("{:?}", cpu.registers); } 0 }
true
b3c6689f0c7f9ec97e9895f63faac7eacd8a6bef
Rust
jieyouxu/propositional-tableau-solver-rs
/src/tableaux_solver/tableau.rs
UTF-8
4,263
3.90625
4
[]
no_license
//! A `Tableau` is a collection of `Theory`-ies. This corresponds to the entire propositional //! tableau tree, where each `Theory` is a branch (from the root node to each leaf). use std::collections::VecDeque; use crate::formula::PropositionalFormula; use super::Theory; /// A `Tableau` is a collection of `Theory`-ies. This corresponds to the entire propositional /// tableau tree, where each `Theory` is a branch (from the root node to each leaf). /// /// For example, given the tableau (tree) /// /// ```text /// (a^b) /// / \ /// a b /// ``` /// /// There are two branches (hence two `Theory`-ies): /// /// 1. `{ (a^b), a }` /// 2. `{ (a^b), b }` #[derive(Debug, Clone, PartialEq)] pub struct Tableau { theories: VecDeque<Theory>, } impl Tableau { /// Construct a new `Tableau` with no theories. pub fn new() -> Self { Self { theories: VecDeque::new(), } } /// Construct a `Tableau` with the starting root node being the given propositional formula. pub fn from_starting_propositional_formula(formula: PropositionalFormula) -> Self { let mut theories = VecDeque::new(); theories.push_back(Theory::from_propositional_formula(formula)); Self { theories } } /// Check if the `Tableau` contains no `Theory`-ies. pub fn is_empty(&self) -> bool { self.theories.is_empty() } /// Retrieve a `Theory` from the `Tableau`. pub fn pop_theory(&mut self) -> Option<Theory> { self.theories.pop_front() } /// Add a `Theory` to the `Tableau`. pub fn push_theory(&mut self, theory: Theory) { self.theories.push_back(theory) } /// Check if the `Tableau` already contains the `Theory`. pub fn contains(&self, theory: &Theory) -> bool { self.theories.contains(theory) } } #[cfg(test)] mod tests { use super::*; use crate::formula::Variable; use assert2::check; #[test] fn test_empty_construction() { let empty_tab = Tableau::new(); check!(empty_tab.is_empty()); } #[test] fn test_single_construction() { let mut single_tab = Tableau::from_starting_propositional_formula( PropositionalFormula::variable(Variable::new("a")), ); check!(!single_tab.is_empty()); check!(single_tab.pop_theory().unwrap().formulas().count() == 1); } #[test] fn test_push_theory() { let mut tab = Tableau::new(); check!(tab.is_empty()); tab.push_theory(Theory::from_propositional_formula( PropositionalFormula::variable(Variable::new("a")), )); check!(!tab.is_empty()); let theory = tab.pop_theory().unwrap(); check!( &PropositionalFormula::variable(Variable::new("a")) == theory.formulas().next().unwrap() ); } #[test] fn test_pop_theory() { let mut tab = Tableau::from_starting_propositional_formula(PropositionalFormula::variable( Variable::new("a"), )); check!(!tab.is_empty()); let theory = tab.pop_theory().unwrap(); check!( &PropositionalFormula::variable(Variable::new("a")) == theory.formulas().next().unwrap() ); } #[test] fn test_push_pop_theory() { let mut tab = Tableau::new(); tab.push_theory(Theory::from_propositional_formula( PropositionalFormula::variable(Variable::new("a")), )); let _ = tab.pop_theory(); check!(tab.is_empty()); } #[test] fn test_contains_theory() { let tab = Tableau::from_starting_propositional_formula(PropositionalFormula::variable( Variable::new("a"), )); check!(tab.contains(&Theory::from_propositional_formula( PropositionalFormula::variable(Variable::new("a")) ))); } #[test] fn test_does_not_contain_theory() { let tab = Tableau::from_starting_propositional_formula(PropositionalFormula::variable( Variable::new("a"), )); check!(!tab.contains(&Theory::from_propositional_formula( PropositionalFormula::variable(Variable::new("b")) ))); } }
true
c674910c72dbd97d215c00143ac77229148bd53b
Rust
danieldk/conllx-utils
/src/bin/conllx-projectivize.rs
UTF-8
1,525
2.703125
3
[]
no_license
use std::env::args; use std::io::BufWriter; use conllx::{Deprojectivize, HeadProjectivizer, Projectivize, WriteSentence}; use conllx_utils::or_exit; use getopts::Options; use stdinout::{Input, Output}; fn print_usage(program: &str, opts: Options) { let brief = format!("Usage: {} [options] [INPUT_FILE] [OUTPUT_FILE]", program); print!("{}", opts.usage(&brief)); } fn main() { let args: Vec<String> = args().collect(); let program = args[0].clone(); let mut opts = Options::new(); opts.optflag("h", "help", "print this help menu"); opts.optflag("d", "deproj", "deprojectivize (head strategy)"); let matches = or_exit(opts.parse(&args[1..])); if matches.opt_present("h") { print_usage(&program, opts); return; } if matches.free.len() > 2 { print_usage(&program, opts); return; } let projectivizer = HeadProjectivizer::new(); let deproj = matches.opt_present("d"); let input = Input::from(matches.free.get(0)); let reader = conllx::Reader::new(or_exit(input.buf_read())); let output = Output::from(matches.free.get(1)); let mut writer = conllx::Writer::new(BufWriter::new(or_exit(output.write()))); for sentence in reader { let sentence = or_exit(sentence); if deproj { or_exit(writer.write_sentence(&or_exit(projectivizer.deprojectivize(&sentence)))); } else { or_exit(writer.write_sentence(&or_exit(projectivizer.projectivize(&sentence)))); } } }
true
6288976cab381b1f0aba8a009c64477d12a87031
Rust
nambrosini/adventofcode
/2020/src/day14.rs
UTF-8
3,777
3.0625
3
[]
no_license
use std::collections::HashMap; #[aoc_generator(day14)] fn generator(input: &str) -> Vec<String> { input.lines().map(|l| l.to_owned()).collect() } fn get_masked_value(mask: &str, value: i64) -> i64 { let mut bits: Vec<i64> = vec![0; mask.len()]; let mut v = value; let mut counter = bits.len() - 1; while v > 0 { let bit = v & 1; bits[counter] = bit; v >>= 1; counter -= 1; } let mask: Vec<char> = mask.chars().collect(); for i in 0..mask.len() { if mask[i] == '1' { bits[i] = 1; } else if mask[i] == '0' { bits[i] = 0; } } let mut res: i64 = 0; let min = bits.iter().enumerate().find(|(_, &x)| x == 1).unwrap().0; for b in bits.iter().skip(min) { res <<= 1; res += b; } res } fn get_addresses(mask: &str, value: i64) -> Vec<i64> { let mut bits: Vec<char> = vec!['0'; mask.len()]; let mut v = value; let mut counter = bits.len() - 1; while v > 0 { let bit = v & 1; bits[counter] = if bit == 0 { '0' } else { '1' }; v >>= 1; counter -= 1; } let mask: Vec<char> = mask.chars().collect(); for i in 0..mask.len() { if mask[i] != '0' { bits[i] = mask[i]; } } let bits_x_pos: Vec<usize> = bits .iter() .enumerate() .filter(|(_, &x)| x == 'X') .map(|(i, _)| i) .collect(); let mut bits_results: Vec<Vec<char>> = vec![]; for i in 0..=(2usize.pow(bits_x_pos.len() as u32)) { let mut b = bits.clone(); for j in 0..bits_x_pos.len() { let v = (i >> j) & 1; let v = if v == 0 { '0' } else { '1' }; b[bits_x_pos[j]] = v; } bits_results.push(b.clone()); } let mut results: Vec<i64> = vec![]; for b in bits_results { let mut res = 0; let min = b.iter().enumerate().find(|(_, &x)| x == '1').unwrap().0; for &i in b.iter().skip(min) { res <<= 1; res += if i == '1' { 1 } else { 0 }; } results.push(res); } results } #[aoc(day14, part1)] fn part1(input: &[String]) -> i64 { let mut mem: HashMap<usize, i64> = HashMap::new(); let mut mask = ""; for l in input { let split: Vec<&str> = l.split(" = ").collect(); if split[0] == "mask" { mask = split[1]; } else { let index = split[0]; let index: usize = index[4..index.len() - 1].parse().unwrap(); let value: i64 = split[1].parse().unwrap(); let m = mem.entry(index).or_insert(0); *m = get_masked_value(mask, value); } } mem.values().sum() } #[aoc(day14, part2)] fn part2(input: &[String]) -> u64 { let mut mem: HashMap<usize, u64> = HashMap::new(); let mut mask = ""; for l in input { let split: Vec<&str> = l.split(" = ").collect(); if split[0] == "mask" { mask = split[1]; } else { let address: i64 = split[0][4..split[0].len() - 1].parse().unwrap(); let value: u64 = split[1].parse().unwrap(); let address = get_addresses(mask, address); for x in address { mem.insert(x as usize, value); } } } mem.values().sum() } #[test] fn test1() { let s = generator( "mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11 mem[7] = 101 mem[8] = 0", ); assert_eq!(part1(&s), 165); } #[test] fn test2() { let s = generator( "mask = 000000000000000000000000000000X1001X mem[42] = 100 mask = 00000000000000000000000000000000X0XX mem[26] = 1", ); assert_eq!(part2(&s), 208); }
true
14306a84637b08a08af03a1131c60f6c89ee5b44
Rust
SergeevPavel/adventofcode2019
/src/day5.rs
UTF-8
3,979
3.140625
3
[]
no_license
use std::fs::{read_to_string}; use std::ops::{Rem}; type Program = Vec<i32>; fn read_param(program: &Program, instr: &[i32], arg_num: u8) -> Result<i32, String> { assert!((arg_num as usize) < instr.len()); let op = instr[0]; let mode = (op / i32::pow(10, 2 + (arg_num - 1) as u32)).rem(10); match mode { 0 => Ok(program[instr[arg_num as usize] as usize]), 1 => Ok(instr[arg_num as usize]), m => Err(format!("Unknown mode: {:?}", m)) } } fn run(mut program: Vec<i32>, mut input: Vec<i32>) -> Result<(), String> { let mut pc = 0; while pc < program.len() { match program[pc].rem(100) { 1 | 2 => { if let instr@[_, _, _, _] = &program[pc..pc + 4] { let op = instr[0].rem(100); let p1 = read_param(&program, instr, 1)?; let p2 = read_param(&program, instr, 2)?; let r = instr[3]; // println!("p1 {:?} p2 {:?} r {:?}", p1, p2, r); match op { 1 => { program[r as usize] = p1 + p2; } 2 => { program[r as usize] = p1 * p2; } _ => unreachable!() } } else { return Err(format!("Unexpected end of program, not enough arguments on {:?}", pc)); } pc += 4; } 3 => { let p1 = program[pc + 1]; program[p1 as usize] = input.pop().ok_or("Not enough input parameters")?; pc += 2; } 4 => { println!("Out: {}", read_param(&program, &program[pc..pc + 2], 1)?); pc += 2; } 5 => { let instr = &program[pc..pc + 3]; let cond = read_param(&program, instr, 1)?; let addr = read_param(&program, instr, 2)?; if cond != 0 { pc = addr as usize; } else { pc += 3; } } 6 => { let instr = &program[pc..pc + 3]; let cond = read_param(&program, instr, 1)?; let addr = read_param(&program, instr, 2)?; if cond == 0 { pc = addr as usize; } else { pc += 3; } } 7 => { let instr = &program[pc..pc + 4]; let p1 = read_param(&program, instr, 1); let p2 = read_param(&program, instr, 2); let r = instr[3] as usize; if p1 < p2 { program[r] = 1; } else { program[r] = 0; } pc += 4; } 8 => { let instr = &program[pc..pc + 4]; let p1 = read_param(&program, instr, 1); let p2 = read_param(&program, instr, 2); let r = instr[3] as usize; if p1 == p2 { program[r] = 1; } else { program[r] = 0; } pc += 4; } 99 => { println!("Program halt"); break; } op@_ => { return Err(format!("Unknown opcode {:?} at {:?}", op, pc)); } } } return Ok(()); } fn main() -> Result<(), String> { let program: Vec<i32> = read_to_string("inputs/day5.txt").unwrap() .split(",") .map(|s| s.parse().unwrap_or_else(|_e| { println!("Unknown shit: {:?}", s); panic!() })) .collect(); // TODO reverse input vector run(program, vec![5]).unwrap(); Ok(()) }
true
f56665e5ea0585e747a2336034ae5669740b22c6
Rust
kirikaza/postgread
/src/msg/body/initial.rs
UTF-8
4,224
3.125
3
[]
no_license
use crate::msg::type_byte::TypeByte; use crate::msg::util::decode::*; use ::std::fmt::{self, Debug, Formatter}; #[derive(Clone, Debug, PartialEq)] pub enum Initial { Cancel(Cancel), TLS, Startup(Startup), } #[derive(Clone, Debug, PartialEq)] pub struct Cancel { pub process_id: u32, pub secret_key: u32, } #[derive(Clone, Debug, PartialEq)] pub struct Startup { pub version: Version, pub params: Vec<StartupParam>, } impl MsgDecode for Initial { const TYPE_BYTE_OPT: Option<TypeByte> = None; fn decode_body(bytes: &mut BytesSource) -> DecodeResult<Self> { match Version::decode(bytes)? { Version { major: 1234, minor: 5678 } => { let process_id = bytes.take_u32()?; let secret_key = bytes.take_u32()?; Ok(Self::Cancel(Cancel { process_id, secret_key })) }, Version { major: 1234, minor: 5679 } => Ok(Self::TLS), version => { let params = StartupParam::decode_many(bytes)?; Ok(Self::Startup(Startup { version, params })) } } } } #[derive(Clone, Debug, PartialEq)] pub struct Version { pub major: u16, pub minor: u16, } impl Version { fn decode(bytes: &mut BytesSource) -> DecodeResult<Self> { let major = bytes.take_u16()?; let minor = bytes.take_u16()?; Ok(Version { major, minor }) } } #[derive(Clone, PartialEq)] pub struct StartupParam { pub name: Vec<u8>, pub value: Vec<u8>, } impl StartupParam { pub fn new(name: Vec<u8>, value: Vec<u8>) -> Self { Self { name, value } } fn decode_many(bytes: &mut BytesSource) -> DecodeResult<Vec<Self>> { let mut params = vec![]; loop { let name = bytes.take_until_null()?; if name.is_empty() { break; } let value = bytes.take_until_null()?; params.push(StartupParam { name, value }); } Ok(params) } } impl Debug for StartupParam { fn fmt(&self, f: &mut Formatter) -> fmt::Result { f.debug_struct("StartupParam") .field( &String::from_utf8_lossy(&self.name), &String::from_utf8_lossy(&self.value)) .finish() } } #[cfg(test)] mod tests { use super::{Cancel, Initial, Startup, StartupParam, Version}; use crate::msg::util::test::*; #[test] fn cancel() { let bytes: &[u8] = &[ 4, 210, 22, 46, // 4*256+210=1234, 22*256+46=5678, these numbers instead of version mean "cancel" 0x1, 0x2, 0x3, 0x4, // process ID 0x5, 0x6, 0x7, 0x8, // secret key ]; assert_decode_ok(Initial::Cancel(Cancel { process_id: 0x01020304, secret_key: 0x05060708 }), bytes); } #[test] fn ssl() { let bytes: &[u8] = &[ 4, 210, 22, 47, // 4*256+210=1234, 22*256+47=5679, these numbers instead of version mean "TLS" ]; assert_decode_ok(Initial::TLS, bytes); } #[test] fn startup_without_params() { let bytes = &[ 0, 3, 0, 1, // version 0, // params ]; assert_decode_ok( Initial::Startup(Startup { version: Version { major: 3, minor: 1 }, params: vec![], }), bytes, ); } #[test] fn startup_with_params() { let mut bytes = vec![ 0, 3, 1, 0, // version ]; bytes.extend_from_slice(b"user\0root\0database\0postgres\0\0"); let bytes = bytes.as_slice(); assert_decode_ok( Initial::Startup(Startup { version: Version { major: 3, minor: 0x100 }, params: vec![ StartupParam { name: Vec::from(&b"user"[..]), value: Vec::from(&b"root"[..]), }, StartupParam { name: Vec::from(&b"database"[..]), value: Vec::from(&b"postgres"[..]), }, ], }), bytes, ); } }
true
0272d6ff100ceac00b21bc500d12e10393df945e
Rust
LinAGKar/advent-of-code-2019-rust
/day23a/src/main.rs
UTF-8
1,004
3.078125
3
[ "MIT" ]
permissive
fn main() { let mut input = String::new(); std::io::stdin().read_line(&mut input).unwrap(); let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect(); let mut computers: Vec<_> = (0..50).map(|i| { let mut computer = intcode::IntCode::new(initial_memory.clone()); computer.set_default_input(-1); computer.put_input(i); computer }).collect(); let mut packets = Vec::new(); 'outer: loop { for computer in &mut computers { computer.iterate(); if let Some(packet) = computer.get_outputs(3) { if packet[0] == 255 { println!("{}", packet[2]); break 'outer; } packets.push(packet); } } for packet in &packets { for &i in &packet[1..] { computers[packet[0] as usize].put_input(i); } } packets.clear(); } }
true
463e9d26b681bc721ad08d788c9d10f4b20e2f3a
Rust
disco0/watchexec
/lib/src/action/workingdata.rs
UTF-8
7,992
3.171875
3
[ "Apache-2.0" ]
permissive
use std::{ fmt, sync::{Arc, Weak}, time::Duration, }; use atomic_take::AtomicTake; use once_cell::sync::OnceCell; use tokio::{ process::Command, sync::{Mutex, OwnedMutexGuard}, }; use crate::{command::Shell, event::Event, filter::Filterer, handler::Handler}; use super::Outcome; /// The configuration of the [action][crate::action] worker. /// /// This is marked non-exhaustive so new configuration can be added without breaking. #[derive(Clone)] #[non_exhaustive] pub struct WorkingData { /// How long to wait for events to build up before executing an action. /// /// This is sometimes called "debouncing." We debounce on the trailing edge: an action is /// triggered only after that amount of time has passed since the first event in the cycle. The /// action is called with all the collected events in the cycle. pub throttle: Duration, /// The main handler to define: what to do when an action is triggered. /// /// This handler is called with the [`Action`] environment, which has a certain way of returning /// the desired outcome, check out the [`Action::outcome()`] method. The handler checks for the /// outcome as soon as the handler returns, which means that if the handler returns before the /// outcome is set, you'll get unexpected results. For this reason, it's a bad idea to use ex. a /// channel as the handler. /// /// If this handler is not provided, it defaults to a no-op, which does absolutely nothing, not /// even quit. Hence, you really need to provide a handler. /// /// It is possible to change the handler or any other configuration inside the previous handler. /// It's useful to know that the handlers are updated from this working data before any of them /// run in any given cycle, so changing the pre-spawn and post-spawn handlers from this handler /// will not affect the running action. pub action_handler: Arc<AtomicTake<Box<dyn Handler<Action> + Send>>>, /// A handler triggered before a command is spawned. /// /// This handler is called with the [`PreSpawn`] environment, which provides mutable access to /// the [`Command`] which is about to be run. See the notes on the [`PreSpawn::command()`] /// method for important information on what you can do with it. /// /// Returning an error from the handler will stop the action from processing further, and issue /// a [`RuntimeError`][crate::error::RuntimeError] to the error channel. pub pre_spawn_handler: Arc<AtomicTake<Box<dyn Handler<PreSpawn> + Send>>>, /// A handler triggered immediately after a command is spawned. /// /// This handler is called with the [`PostSpawn`] environment, which provides details on the /// spawned command, including its PID. /// /// Returning an error from the handler will drop the [`Child`][tokio::process::Child], which /// will terminate the command without triggering any of the normal Watchexec behaviour, and /// issue a [`RuntimeError`][crate::error::RuntimeError] to the error channel. pub post_spawn_handler: Arc<AtomicTake<Box<dyn Handler<PostSpawn> + Send>>>, /// Command to execute. /// /// When `shell` is [`Shell::None`], this is expected to be in “execvp(3)” format: first /// program, rest arguments. Otherwise, all elements will be joined together with a single space /// and passed to the shell. More control can then be obtained by providing a 1-element vec, and /// doing your own joining and/or escaping there. pub command: Vec<String>, /// Whether to use process groups (on Unix) or job control (on Windows) to run the command. /// /// This makes use of [command_group] under the hood. /// /// If you want to known whether a spawned command was run in a process group, you should use /// the value in [`PostSpawn`] instead of reading this one, as it may have changed in the /// meantime. pub grouped: bool, /// The shell to use to run the command. /// /// See the [`Shell`] enum documentation for more details. pub shell: Shell, /// The filterer implementation to use when filtering events. /// /// The default is a no-op, which will always pass every event. pub filterer: Arc<dyn Filterer>, } impl fmt::Debug for WorkingData { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("WorkingData") .field("throttle", &self.throttle) .field("shell", &self.shell) .field("command", &self.command) .field("grouped", &self.grouped) .field("filterer", &self.filterer) .finish_non_exhaustive() } } impl Default for WorkingData { fn default() -> Self { Self { // set to 50ms here, but will remain 100ms on cli until 2022 throttle: Duration::from_millis(50), action_handler: Arc::new(AtomicTake::new(Box::new(()) as _)), pre_spawn_handler: Arc::new(AtomicTake::new(Box::new(()) as _)), post_spawn_handler: Arc::new(AtomicTake::new(Box::new(()) as _)), command: Vec::new(), shell: Shell::default(), grouped: true, filterer: Arc::new(()), } } } /// The environment given to the action handler. /// /// This deliberately does not implement Clone to make it hard to move it out of the handler, which /// you should not do. /// /// The [`Action::outcome()`] method is the only way to set the outcome of the action, and it _must_ /// be called before the handler returns. #[derive(Debug, Default)] pub struct Action { /// The collected events which triggered the action. pub events: Vec<Event>, pub(super) outcome: Arc<OnceCell<Outcome>>, } impl Action { pub(super) fn new(events: Vec<Event>) -> Self { Self { events, ..Self::default() } } /// Set the action's outcome. /// /// This takes `self` and `Action` is not `Clone`, so it's only possible to call it once. /// Regardless, if you _do_ manage to call it twice, it will do nothing beyond the first call. /// /// See the [`Action`] documentation about handlers to learn why it's a bad idea to clone or /// send it elsewhere, and what kind of handlers you cannot use. pub fn outcome(self, outcome: Outcome) { self.outcome.set(outcome).ok(); } } /// The environment given to the pre-spawn handler. /// /// This deliberately does not implement Clone to make it hard to move it out of the handler, which /// you should not do. /// /// The [`PreSpawn::command()`] method is the only way to mutate the command, and the mutex guard it /// returns _must_ be dropped before the handler returns. #[derive(Debug)] #[non_exhaustive] pub struct PreSpawn { /// The command which is about to be spawned. /// /// This is the final command, after the [`Shell`] has been applied. pub command: Vec<String>, command_w: Weak<Mutex<Command>>, } impl PreSpawn { pub(super) fn new(command: Command, cmd: Vec<String>) -> (Self, Arc<Mutex<Command>>) { let arc = Arc::new(Mutex::new(command)); ( Self { command: cmd, command_w: Arc::downgrade(&arc), }, arc.clone(), ) } /// Get write access to the command that will be spawned. /// /// Keeping the lock alive beyond the end of the handler may cause the command to be cancelled, /// but note no guarantees are made on this behaviour. Just don't do it. See the [`Action`] /// documentation about handlers for more. /// /// This will always return `Some()` under normal circumstances. pub async fn command(&self) -> Option<OwnedMutexGuard<Command>> { if let Some(arc) = self.command_w.upgrade() { Some(arc.lock_owned().await) } else { None } } } /// The environment given to the post-spawn handler. /// /// This is Clone, as there's nothing (except returning an error) that can be done to the command /// now that it's spawned, as far as Watchexec is concerned. Nevertheless, you should return from /// this handler quickly, to avoid holding up anything else. #[derive(Clone, Debug)] #[non_exhaustive] pub struct PostSpawn { /// The final command the process was spawned with. pub command: Vec<String>, /// The process ID or the process group ID. pub id: u32, /// Whether the command was run in a process group. pub grouped: bool, }
true
0d54c4b2155cbe96bb8f773ec3af615fa38ca256
Rust
bvaisvil/zenith
/src/util.rs
UTF-8
4,664
2.84375
3
[ "MIT" ]
permissive
#![allow(dead_code)] /** * Copyright 2019-2020, Benjamin Vaisvil and the zenith contributors */ use crate::constants::DEFAULT_TICK; use crossterm::{event, event::Event as CEvent, event::KeyCode as Key, event::KeyEvent}; use signal_hook::consts::signal::{SIGABRT, SIGINT, SIGTERM}; use signal_hook::iterator::Signals; use std::fs::{remove_file, File}; use std::io::Write; use std::path::{Path, PathBuf}; use std::sync::mpsc; use std::thread; use std::time::Duration; pub enum Event<I> { Input(I), Resize(u16, u16), Tick, Save, Terminate, } #[allow(dead_code)] pub struct Events { rx: mpsc::Receiver<Event<KeyEvent>>, input_handle: thread::JoinHandle<()>, tick_handle: thread::JoinHandle<()>, sig_handle: thread::JoinHandle<()>, } #[derive(Debug, Clone, Copy)] pub struct Config { pub exit_key: Key, pub tick_rate: Duration, } impl Default for Config { fn default() -> Config { Config { exit_key: Key::Char('q'), tick_rate: Duration::from_millis(DEFAULT_TICK), } } } impl Events { pub fn new(tick_rate: Duration) -> Events { Events::with_config(Config { tick_rate, exit_key: Key::Char('q'), }) } pub fn with_config(config: Config) -> Events { let (tx, rx) = mpsc::channel(); let input_handle = { let tx = tx.clone(); thread::spawn(move || loop { match event::read().expect("Couldn't read event") { CEvent::Key(key) => tx.send(Event::Input(key)).expect("Couldn't send event."), CEvent::Resize(cols, rows) => tx .send(Event::Resize(cols, rows)) .expect("Couldn't send event."), _ => (), // ignore } }) }; let tick_handle = { let tx = tx.clone(); thread::spawn(move || { let tx = tx.clone(); let mut count: u64 = 0; loop { tx.send(Event::Tick).expect("Couldn't send event."); count += 1; if count % 60 == 0 { tx.send(Event::Save).expect("Couldn't send event"); } thread::sleep(config.tick_rate); } }) }; let sig_handle = { let tx = tx; let mut signals = Signals::new(&[SIGINT, SIGTERM, SIGABRT]).expect("Couldn't create signal handler"); thread::spawn(move || { let tx = tx.clone(); for _sig in signals.forever() { tx.send(Event::Terminate) .expect("Couldn't send Terminate event."); } }) }; Events { rx, input_handle, tick_handle, sig_handle, } } pub fn next(&self) -> Result<Event<KeyEvent>, mpsc::RecvError> { self.rx.recv() } } /// Keeps a file open exclusively /// Removes the file when dropped pub struct Lockfile { file: File, path: PathBuf, } impl Lockfile { /// Tries to open the file creating if it does not exist /// Fails if zenith is already running using the same lockfile pub async fn new(main_pid: u32, path: &Path) -> Option<Self> { if is_zenith_running(path).await { debug!("{}", path.to_string_lossy()); return None; } let mut file = File::create(path).ok()?; file.write_all(main_pid.to_string().as_bytes()).ok()?; Some(Self { file, path: path.into(), }) } } impl Drop for Lockfile { fn drop(&mut self) { debug!("Removing Lock"); let res = remove_file(&self.path); if let Err(e) = res { error!( "Error deleting lockfile: path={}, error={:?}", self.path.display(), e ); } } } async fn is_zenith_running(path: &Path) -> bool { name_of_process_for_pidfile(path) .await .map_or(false, |name| name == "zenith") } async fn name_of_process_for_pidfile(path: &Path) -> Option<String> { let data = std::fs::read_to_string(path).ok()?; let pid: i32 = data.parse().ok()?; let process = heim::process::get(pid).await.ok()?; process.name().await.ok() } pub fn percent_of(numerator: u64, denominator: u64) -> f32 { if numerator == 0 || denominator == 0 { 0.0 } else { (numerator as f32 / denominator as f32) * 100.0 } }
true
1e52197ac95019d525d3ef5d7c1b40ad3302569f
Rust
Tubbz-alt/lrad
/lrad-lib/src/config.rs
UTF-8
2,082
2.640625
3
[ "MIT" ]
permissive
use std::net::IpAddr; use std::collections::HashMap; use crate::dns::CloudflareConfig; use crate::ipfs::IpfsApiServerConfig; use std::fs::File; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use git2::Repository; use crate::error::Result; #[derive(Deserialize, Serialize, Default)] pub struct CliConfig { pub dns_provider: CloudflareConfig, pub ipfs_api_server: IpfsApiServerConfig, } impl CliConfig { fn config_path(repo: &Repository) -> Result<PathBuf> { let path = if !repo.is_bare() { repo.path() .parent() .expect(".git should always have a parent folder in a non-bare repo") } else { repo.path() }; let mut path = PathBuf::from(path); path.push(Path::new(".lrad.toml")); Ok(path) } pub fn try_from(repo: &Repository) -> Result<Self> { let mut file = File::open(Self::config_path(repo)?)?; let metadata = file.metadata()?; let mut buf = Vec::with_capacity(metadata.len() as usize); let _bytes_read = file.read_to_end(&mut buf)?; toml::from_slice(buf.as_slice()).map_err(|err| err.into()) } pub fn write(&self, repo: &Repository) -> Result<()> { let config_json_str = toml::to_string(self).unwrap(); let mut file = File::create(Self::config_path(repo)?)?; file.write(config_json_str.as_bytes())?; Ok(()) } } #[derive(Deserialize, Serialize)] pub struct DaemonConfig { /// e.g. git.lrad.io pub dns_record_name: String, pub port_map: HashMap<String, Vec<PortBinding>> } #[derive(Deserialize, Serialize)] pub struct PortBinding { pub host_ip: Option<IpAddr>, pub host_port: u16, } impl DaemonConfig { pub fn try_from(path: &Path) -> Result<Self> { let mut file = File::open(path)?; let metadata = file.metadata()?; let mut buf = Vec::with_capacity(metadata.len() as usize); let _bytes_read = file.read_to_end(&mut buf)?; toml::from_slice(buf.as_slice()).map_err(|err| err.into()) } }
true
dff1ad7e05d52fc0f4504dc4cc9d332782500928
Rust
atsamd-rs/atsamd
/hal/src/sercom/spi/pads_thumbv7em.rs
UTF-8
10,801
2.765625
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Define a container for a set of SERCOM pads //! //! See the [spi module](super) documentation for more details on declaring and //! instantiating a [`Pads`] type. use core::marker::PhantomData; use crate::gpio::AnyPin; use crate::sercom::*; use crate::typelevel::{NoneT, Sealed}; use super::{Capability, Duplex, Rx, Tx}; //============================================================================= // Dipo //============================================================================= /// Map an [`OptionalPadNum`] to its corresponding `DIPO` value pub trait Dipo: OptionalPadNum { const DIPO: Option<u8>; } impl Dipo for NoneT { const DIPO: Option<u8> = None; } impl Dipo for Pad0 { const DIPO: Option<u8> = Some(0); } impl Dipo for Pad1 { const DIPO: Option<u8> = Some(1); } impl Dipo for Pad2 { const DIPO: Option<u8> = Some(2); } impl Dipo for Pad3 { const DIPO: Option<u8> = Some(3); } //============================================================================= // Dopo //============================================================================= /// Map an [`OptionalPadNum`] to its corresponding `DOPO` value pub trait Dopo: OptionalPadNum { const DOPO: Option<u8>; } impl Dopo for NoneT { const DOPO: Option<u8> = None; } impl Dopo for Pad0 { const DOPO: Option<u8> = Some(0); } impl Dopo for Pad1 { const DOPO: Option<u8> = Some(1); } impl Dopo for Pad3 { const DOPO: Option<u8> = Some(2); } //============================================================================= // DipoDopo //============================================================================= /// Configure the `DIPO` and `DOPO` fields based on a set of [`Pads`] pub trait DipoDopo: Sealed { const DIPO_DOPO: (u8, u8); } /// Lift the implementations of [`DipoDopo`] from implementations on /// [`OptionalPadNum`]s to the corresponding [`Pads`] types. impl<S, I, DI, DO, CK, SS> DipoDopo for Pads<S, I, DI, DO, CK, SS> where S: Sercom, I: IoSet, DI: OptionalPad, DO: OptionalPad, CK: OptionalPad, SS: OptionalPad, DI::PadNum: Dipo, DO::PadNum: Dopo, { const DIPO_DOPO: (u8, u8) = match (DI::PadNum::DIPO, DO::PadNum::DOPO) { (None, None) => (0, 2), (Some(dipo), None) => { let dopo = if dipo == 0 { 2 } else { 0 }; (dipo, dopo) } (None, Some(dopo)) => { let dipo = if dopo == 0 { 3 } else { 0 }; (dipo, dopo) } (Some(dipo), Some(dopo)) => (dipo, dopo), }; } //============================================================================= // Pads //============================================================================= /// Container for a set of SERCOM pads /// /// See the [spi module](super) documentation for more details on declaring and /// instantiating a [`Pads`] type. pub struct Pads<S, I, DI = NoneT, DO = NoneT, CK = NoneT, SS = NoneT> where S: Sercom, I: IoSet, DI: OptionalPad, DO: OptionalPad, CK: OptionalPad, SS: OptionalPad, { sercom: PhantomData<S>, ioset: PhantomData<I>, data_in: DI, data_out: DO, sclk: CK, ss: SS, } impl<S: Sercom, I: IoSet> Default for Pads<S, I> { fn default() -> Self { Self { sercom: PhantomData, ioset: PhantomData, data_in: NoneT, data_out: NoneT, sclk: NoneT, ss: NoneT, } } } impl<S, I, DI, DO, CK, SS> Pads<S, I, DI, DO, CK, SS> where S: Sercom, I: IoSet, DI: OptionalPad, DO: OptionalPad, CK: OptionalPad, SS: OptionalPad, { /// Set the `DI` pad /// /// In a [`MasterMode`], this is MISO. In [`Slave`] [`OpMode`], this is /// MOSI. /// /// [`MasterMode`]: super::MasterMode /// [`Slave`]: super::Slave /// [`OpMode`]: super::OpMode #[inline] pub fn data_in<Id>(self, pin: impl AnyPin<Id = Id>) -> Pads<S, I, Pad<S, Id>, DO, CK, SS> where Id: GetPad<S>, Id::PadNum: Dipo, Pad<S, Id>: InIoSet<I>, { Pads { sercom: self.sercom, ioset: self.ioset, data_in: pin.into().into_mode(), data_out: self.data_out, sclk: self.sclk, ss: self.ss, } } /// Set the `DO` pad /// /// In a [`MasterMode`], this is MOSI. In [`Slave`] [`OpMode`], this is /// MISO. /// /// [`MasterMode`]: super::MasterMode /// [`Slave`]: super::Slave /// [`OpMode`]: super::OpMode #[inline] pub fn data_out<Id>(self, pin: impl AnyPin<Id = Id>) -> Pads<S, I, DI, Pad<S, Id>, CK, SS> where Id: GetPad<S>, Id::PadNum: Dopo, Pad<S, Id>: InIoSet<I>, { Pads { sercom: self.sercom, ioset: self.ioset, data_in: self.data_in, data_out: pin.into().into_mode(), sclk: self.sclk, ss: self.ss, } } /// Set the `SCK` pad, which is always [`Pad1`] #[inline] pub fn sclk<Id>(self, pin: impl AnyPin<Id = Id>) -> Pads<S, I, DI, DO, Pad<S, Id>, SS> where Id: GetPad<S, PadNum = Pad1>, Pad<S, Id>: InIoSet<I>, { Pads { sercom: self.sercom, ioset: self.ioset, data_in: self.data_in, data_out: self.data_out, sclk: pin.into().into_mode(), ss: self.ss, } } /// Set the `SS` pad, which is always [`Pad2`] #[inline] pub fn ss<Id>(self, pin: impl AnyPin<Id = Id>) -> Pads<S, I, DI, DO, CK, Pad<S, Id>> where Id: GetPad<S, PadNum = Pad2>, Pad<S, Id>: InIoSet<I>, { Pads { sercom: self.sercom, ioset: self.ioset, data_in: self.data_in, data_out: self.data_out, sclk: self.sclk, ss: pin.into().into_mode(), } } /// Consume the [`Pads`] and return each individual /// [`Pin`](crate::gpio::Pin) #[inline] pub fn free(self) -> (DI, DO, CK, SS) { (self.data_in, self.data_out, self.sclk, self.ss) } } //============================================================================= // PadsFromIds //============================================================================= /// Define a set of [`Pads`] using [`PinId`]s instead of [`Pin`]s /// /// In some cases, it is more convenient to specify a set of `Pads` using /// `PinId`s rather than `Pin`s. This alias makes it easier to do so. /// /// The first two type parameters are the [`Sercom`] and [`IoSet`], while the /// remaining four are effectively [`OptionalPinId`]s representing the /// corresponding type parameters of [`Pads`], i.e. `DI`, `DO`, `CK` & `SS`. /// Each of the remaining type parameters defaults to [`NoneT`]. /// /// ``` /// use atsamd_hal::pac::Peripherals; /// use atsamd_hal::gpio::{PA08, PA09, Pins}; /// use atsamd_hal::sercom::{Sercom0, spi}; /// use atsamd_hal::sercom::pad::IoSet1; /// use atsamd_hal::typelevel::NoneT; /// /// pub type Pads = spi::PadsFromIds<Sercom0, IoSet1, PA08, NoneT, PA09>; /// /// pub fn create_pads() -> Pads { /// let peripherals = Peripherals::take().unwrap(); /// let pins = Pins::new(peripherals.PORT); /// spi::Pads::default().sclk(pins.pa09).data_in(pins.pa08) /// } /// ``` /// /// [`Pin`]: crate::gpio::Pin /// [`PinId`]: crate::gpio::PinId /// [`OptionalPinId`]: crate::gpio::OptionalPinId pub type PadsFromIds<S, I, DI = NoneT, DO = NoneT, CK = NoneT, SS = NoneT> = Pads< S, I, <DI as GetOptionalPad<S>>::Pad, <DO as GetOptionalPad<S>>::Pad, <CK as GetOptionalPad<S>>::Pad, <SS as GetOptionalPad<S>>::Pad, >; //============================================================================= // PadSet //============================================================================= /// Type-level function to recover the [`OptionalPad`] types from a generic set /// of [`Pads`] /// /// This trait is used as an interface between the [`Pads`] type and other /// types in this module. It acts as a [type-level function], returning the /// corresponding [`Sercom`] and [`OptionalPad`] types. It serves to cut down on /// the total number of type parameters needed in the [`Config`](super::Config) /// struct. The `Config` struct doesn't need access to the [`Pin`]s directly. /// Rather, it only needs to apply the [`SomePad`] trait bound when a `Pin` is /// required. The `PadSet` trait allows each `Config` struct to store an /// instance of `Pads` without itself being generic over all six type parameters /// of the `Pads` type. /// /// This trait is a simplified version of the [`AnyKind`] trait pattern. /// /// [`Pin`]: crate::gpio::Pin /// [type-level function]: crate::typelevel#type-level-functions /// [`AnyKind`]: crate::typelevel#anykind-trait-pattern pub trait PadSet: Sealed { type Sercom: Sercom; type IoSet: IoSet; type DataIn: OptionalPad; type DataOut: OptionalPad; type Sclk: OptionalPad; type SS: OptionalPad; } impl<S, I, DI, DO, CK, SS> Sealed for Pads<S, I, DI, DO, CK, SS> where S: Sercom, I: IoSet, DI: OptionalPad, DO: OptionalPad, CK: OptionalPad, SS: OptionalPad, { } impl<S, I, DI, DO, CK, SS> PadSet for Pads<S, I, DI, DO, CK, SS> where S: Sercom, I: IoSet, DI: OptionalPad, DO: OptionalPad, CK: OptionalPad, SS: OptionalPad, { type Sercom = S; type IoSet = I; type DataIn = DI; type DataOut = DO; type Sclk = CK; type SS = SS; } //============================================================================= // ValidPads //============================================================================= /// Marker trait for valid sets of [`Pads`] /// /// This trait labels sets of `Pads` that: /// - Specify [`SomePad`] for `CK` and at least one of `DI` or `DO` /// - Use a valid combination of [`PadNum`]s, so that the `Pads` implement /// [`DipoDopo`] pub trait ValidPads: PadSet + DipoDopo { type Capability: Capability; } impl<S, I, DI, CK, SS> ValidPads for Pads<S, I, DI, NoneT, CK, SS> where S: Sercom, I: IoSet, DI: SomePad, CK: SomePad, SS: OptionalPad, Pads<S, I, DI, NoneT, CK, SS>: DipoDopo, { type Capability = Rx; } impl<S, I, DO, CK, SS> ValidPads for Pads<S, I, NoneT, DO, CK, SS> where S: Sercom, I: IoSet, DO: SomePad, CK: SomePad, SS: OptionalPad, Pads<S, I, NoneT, DO, CK, SS>: DipoDopo, { type Capability = Tx; } impl<S, I, DI, DO, CK, SS> ValidPads for Pads<S, I, DI, DO, CK, SS> where S: Sercom, I: IoSet, DI: SomePad, DO: SomePad, CK: SomePad, SS: OptionalPad, Pads<S, I, DI, DO, CK, SS>: DipoDopo, { type Capability = Duplex; }
true
c0e8fe0b72cc4d203214b201be08f364a0b3c540
Rust
1r3n33/ucdp
/gateway/src/ucdp/dal/partners.rs
UTF-8
19,050
2.71875
3
[]
no_license
use crate::ucdp::dal::aerospike_dao::{AerospikeDao, AerospikeDaoBuilder, AerospikeDaoError}; use crate::ucdp::dal::ethereum_dao::{EthereumDao, EthereumDaoBuilder, EthereumDaoError}; use crate::ucdp::dal::in_memory_dao::{InMemoryDao, InMemoryDaoBuilder, InMemoryDaoError}; use async_trait::async_trait; use log::trace; use serde::{Deserialize, Serialize}; use std::str::FromStr; use thiserror::Error; use ucdp::config::Config; #[derive(Clone, Debug, Deserialize, Serialize)] pub struct Partner { pub name: String, pub enabled: bool, } #[derive(Error, Debug)] pub enum Error { #[error("config error")] Config(#[from] ucdp::config::Error), #[error("ethereum dao error")] EthereumDao(#[from] EthereumDaoError), #[error("aerospike dao error")] AerospikeDao(#[from] AerospikeDaoError), #[error("in memory dao error")] InMemoryDao(#[from] InMemoryDaoError), #[error("deserialization error")] Deserialization(#[from] serde_json::Error), #[error("unknown connector: {0}")] UnknownConnector(String), #[error("Parameter error: {0}")] Parameter(String), #[error("partner not found: {0}")] PartnerNotFound(String), } #[async_trait] pub trait PartnersDao: Send + Sync { async fn get_partner(&self, partner_id: &str) -> Result<Partner, Error>; async fn put_partner(&self, partner_id: &str, partner: &Partner); } struct EthereumPartnersDao<'a> { ethereum_dao: Box<dyn EthereumDao<'a, (web3::types::Address,), (Vec<u8>, bool, bool)>>, } #[async_trait] impl PartnersDao for EthereumPartnersDao<'_> { async fn get_partner(&self, partner_id: &str) -> Result<Partner, Error> { trace!("EthereumPartnersDao get {:?}", partner_id); let partner_address = web3::types::Address::from_str(partner_id) .map_err(|_| Error::Parameter("partner_id".into()))?; self.ethereum_dao .get((partner_address,)) .await .map(|(name, enabled, _)| Partner { name: String::from_utf8(name) .unwrap_or_default() // TODO: avoid unwrap .trim_end_matches(char::from(0)) .into(), enabled, }) .map_err(Error::EthereumDao) } async fn put_partner(&self, partner_id: &str, _: &Partner) { trace!("EthereumPartnersDao put {:?}", partner_id); unimplemented!() } } struct AerospikePartnersDao { aerospike_dao: Box<dyn AerospikeDao>, } #[async_trait] impl PartnersDao for AerospikePartnersDao { async fn get_partner(&self, partner_id: &str) -> Result<Partner, Error> { trace!("AerospikePartnersDao get {:?}", partner_id); self.aerospike_dao .get(partner_id) .await? .value .ok_or_else(|| Error::PartnerNotFound(partner_id.into())) .map(|bytes| { serde_json::from_slice::<Partner>(&bytes).map_err(Error::Deserialization) })? } async fn put_partner(&self, partner_id: &str, partner: &Partner) { trace!("AerospikePartnersDao put {:?}", partner_id); if let Ok(bytes) = serde_json::to_vec(partner) { self.aerospike_dao.put(partner_id, bytes).await; } } } struct InMemoryPartnersDao { in_memory_dao: Box<dyn InMemoryDao<String, Partner>>, } #[async_trait] impl PartnersDao for InMemoryPartnersDao { async fn get_partner(&self, partner_id: &str) -> Result<Partner, Error> { trace!("InMemoryPartnersDao get {:?}", partner_id); self.in_memory_dao .get(&String::from(partner_id)) .map(|res| res.value) .map_err(Error::InMemoryDao) } async fn put_partner(&self, partner_id: &str, partner: &Partner) { trace!("InMemoryPartnersDao put {:?}", partner_id); self.in_memory_dao .put(String::from(partner_id), partner.clone()) } } struct CachePartnersDao { cache_dao: Box<dyn PartnersDao>, underlying_dao: Box<dyn PartnersDao>, } #[async_trait] impl PartnersDao for CachePartnersDao { async fn get_partner(&self, partner_id: &str) -> Result<Partner, Error> { trace!("CachePartnersDao get {:?}", partner_id); match self.cache_dao.get_partner(partner_id).await { Err(_) => { let res = self.underlying_dao.get_partner(partner_id).await; if let Ok(partner) = res { self.cache_dao.put_partner(partner_id, &partner).await; Ok(partner) } else { res } } Ok(partner) => Ok(partner), } } async fn put_partner(&self, partner_id: &str, _: &Partner) { trace!("CachePartnersDao put {:?}", partner_id); unimplemented!() } } pub struct PartnersBuilder {} impl PartnersBuilder { fn build_dao(connector: &str, config: &Config) -> Result<Box<dyn PartnersDao>, Error> { match connector { "ethereum" => { let ethereum_dao = EthereumDaoBuilder::build(config, "partners")?; let dao = EthereumPartnersDao { ethereum_dao }; Ok(Box::new(dao)) } "aerospike" => { let aerospike_dao = AerospikeDaoBuilder::build(config)?; let dao = AerospikePartnersDao { aerospike_dao }; Ok(Box::new(dao)) } "in-memory" => { let in_memory_dao = InMemoryDaoBuilder::build(config)?; let dao = InMemoryPartnersDao { in_memory_dao }; Ok(Box::new(dao)) } connector => Err(Error::UnknownConnector(connector.to_string())), } } pub fn build(config: &Config) -> Result<Box<dyn PartnersDao>, Error> { let connectors = config.get_str_vec("data.partners.connectors")?; PartnersBuilder::build_rec(&connectors, config) } fn build_rec(connectors: &[String], config: &Config) -> Result<Box<dyn PartnersDao>, Error> { println!("{:?}", connectors); match connectors.len() { 0 => Err(Error::UnknownConnector("".into())), 1 => PartnersBuilder::build_dao(connectors[0].as_str(), config), _ => { let cache_dao = PartnersBuilder::build_dao(connectors[0].as_str(), config)?; let underlying_dao = PartnersBuilder::build_rec(&connectors[1..], config)?; let dao = CachePartnersDao { cache_dao, underlying_dao, }; Ok(Box::new(dao)) } } } } #[cfg(test)] mod tests { use crate::ucdp::dal::aerospike_dao::{AerospikeDao, AerospikeDaoError, AerospikeDaoResult}; use crate::ucdp::dal::ethereum_dao::{EthereumDao, EthereumDaoError}; use crate::ucdp::dal::in_memory_dao::{InMemoryDao, InMemoryDaoError, InMemoryDaoResult}; use crate::ucdp::dal::partners::{ AerospikePartnersDao, CachePartnersDao, Error, EthereumPartnersDao, InMemoryPartnersDao, }; use crate::ucdp::dal::PartnersDao; use crate::ucdp::dal::{Partner, PartnersBuilder}; use async_trait::async_trait; use std::time::SystemTime; use ucdp::config::Config; #[test] fn partnersbuilder_build_non_cached_ok_ethereum() { let mut config = config::Config::default(); let _ = config.set("data.partners.connectors", vec!["ethereum"]); let _ = config.set("ethereum.network", "http://ethereum"); let _ = config.set( "ethereum.contract", "0x0000000000000000000000000000000000000000", ); let config = Config::from(config); let res = PartnersBuilder::build(&config); assert!(res.is_ok()); } #[test] fn partnersbuilder_build_non_cached_ok_aerospike() { let mut config = config::Config::default(); let _ = config.set("data.partners.connectors", vec!["aerospike"]); let _ = config.set("aerospike.set", "default"); let _ = config.set("aerospike.host", "http://aerospike"); let config = Config::from(config); let res = PartnersBuilder::build(&config); assert!(res.is_ok()); } #[test] fn partnersbuilder_build_non_cached_ok_in_memory() { let mut config = config::Config::default(); let _ = config.set("data.partners.connectors", vec!["in-memory"]); let config = Config::from(config); let res = PartnersBuilder::build(&config); assert!(res.is_ok()); } #[test] fn partnersbuilder_build_cached_ok() { let mut config = config::Config::default(); let _ = config.set( "data.partners.connectors", vec!["in-memory", "aerospike", "ethereum"], ); let _ = config.set("aerospike.set", "default"); let _ = config.set("aerospike.host", "http://aerospike"); let _ = config.set("ethereum.network", "http://ethereum"); let _ = config.set( "ethereum.contract", "0x0000000000000000000000000000000000000000", ); let config = Config::from(config); let res = PartnersBuilder::build(&config); assert!(res.is_ok()); } #[test] fn partnersbuilder_build_err_unknown() { let mut config = config::Config::default(); let _ = config.set("data.partners.connectors", vec!["unknown"]); let config = Config::from(config); let res = PartnersBuilder::build(&config); assert!(res.is_err()); } #[test] fn partnersbuilder_build_err_unset() { let config = config::Config::default(); let config = Config::from(config); let res = PartnersBuilder::build(&config); assert!(res.is_err()); } struct PartnerEthereumDao {} #[async_trait] impl<'a> EthereumDao<'a, (web3::types::Address,), (Vec<u8>, bool, bool)> for PartnerEthereumDao { async fn get( &self, _: (web3::types::Address,), ) -> Result<(Vec<u8>, bool, bool), EthereumDaoError> { Ok(( vec![ 112, 97, 114, 116, 110, 101, 114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ], true, true, )) } } struct ErrorEthereumDao {} #[async_trait] impl<'a> EthereumDao<'a, (web3::types::Address,), (Vec<u8>, bool, bool)> for ErrorEthereumDao { async fn get( &self, _: (web3::types::Address,), ) -> Result<(Vec<u8>, bool, bool), EthereumDaoError> { Err(EthereumDaoError::Parameter("".into())) } } impl PartialEq for Partner { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.enabled == other.enabled } } #[actix_rt::test] async fn ethereum_partners_dao_get_partner_ok() { let ethereum_dao = Box::new(PartnerEthereumDao {}); let partners = Box::new(EthereumPartnersDao { ethereum_dao }); let partner = partners .get_partner("0x0000000000000000000000000000000000000000") .await .unwrap(); assert_eq!( partner, Partner { name: "partner".into(), enabled: true } ); } #[actix_rt::test] async fn ethereum_partners_dao_get_partner_err_dao() { let ethereum_dao = Box::new(ErrorEthereumDao {}); let partners = Box::new(EthereumPartnersDao { ethereum_dao }); let res = partners .get_partner("0x0000000000000000000000000000000000000000") .await; match res { Err(Error::EthereumDao(_)) => (), _ => unreachable!(), } } #[actix_rt::test] async fn ethereum_partners_dao_get_partner_err_parameter() { let ethereum_dao = Box::new(PartnerEthereumDao {}); let partners = Box::new(EthereumPartnersDao { ethereum_dao }); let res = partners.get_partner("not an address").await; if let Err(Error::Parameter(reason)) = res { assert_eq!(reason, "partner_id"); } else { unreachable!(); } } struct TestAerospikeDao {} #[async_trait] impl AerospikeDao for TestAerospikeDao { async fn get(&self, partner_id: &str) -> Result<AerospikeDaoResult, AerospikeDaoError> { match partner_id { "ok" => Ok(AerospikeDaoResult { value: Some( "{\"name\":\"partner\", \"enabled\":true}" .as_bytes() .to_vec(), ), ttl: None, }), "not found" => Ok(AerospikeDaoResult { value: None, ttl: None, }), "deserialization error" => Ok(AerospikeDaoResult { value: Some("{\"name\":\"partner\"...".as_bytes().to_vec()), ttl: None, }), _ => Err(AerospikeDaoError::ItemNotFound), } } async fn put(&self, _: &str, _: Vec<u8>) { unreachable!() } } #[actix_rt::test] async fn aerospike_partners_dao_get_partner_ok() { let aerospike_dao = Box::new(TestAerospikeDao {}); let partners = Box::new(AerospikePartnersDao { aerospike_dao }); let partner = partners.get_partner("ok").await.unwrap(); assert_eq!( partner, Partner { name: "partner".into(), enabled: true } ); } #[actix_rt::test] async fn aerospike_partners_dao_get_partner_err_not_found() { let aerospike_dao = Box::new(TestAerospikeDao {}); let partners = Box::new(AerospikePartnersDao { aerospike_dao }); let res = partners.get_partner("not found").await; if let Err(Error::PartnerNotFound(partner_id)) = res { assert_eq!(partner_id, "not found"); } else { unreachable!(); } } #[actix_rt::test] async fn aerospike_partners_dao_get_partner_err_deserialization() { let aerospike_dao = Box::new(TestAerospikeDao {}); let partners = Box::new(AerospikePartnersDao { aerospike_dao }); let res = partners.get_partner("deserialization error").await; match res { Err(Error::Deserialization(_)) => (), _ => unreachable!(), } } #[actix_rt::test] async fn aerospike_partners_dao_get_partner_err_dao() { let aerospike_dao = Box::new(TestAerospikeDao {}); let partners = Box::new(AerospikePartnersDao { aerospike_dao }); let res = partners.get_partner("dao error").await; match res { Err(Error::AerospikeDao(_)) => (), _ => unreachable!(), } } struct TestInMemoryDao {} #[async_trait] impl InMemoryDao<String, Partner> for TestInMemoryDao { fn get( &self, partner_id: &String, ) -> std::result::Result<InMemoryDaoResult<Partner>, InMemoryDaoError> { match partner_id.as_str() { "ok" => Ok(InMemoryDaoResult { value: Partner { name: "in-memory partner".into(), enabled: true, }, date: SystemTime::UNIX_EPOCH, }), _ => Err(InMemoryDaoError::ItemNotFound), } } fn put(&self, _: String, _: Partner) { unreachable!() } } #[actix_rt::test] async fn in_memory_partners_dao_get_partner_ok() { let in_memory_dao = Box::new(TestInMemoryDao {}); let partners = InMemoryPartnersDao { in_memory_dao }; let partner = partners.get_partner("ok").await.unwrap(); assert_eq!( partner, Partner { name: "in-memory partner".into(), enabled: true } ); } #[actix_rt::test] async fn in_memory_partners_dao_get_partner_error() { let in_memory_dao = Box::new(TestInMemoryDao {}); let partners = InMemoryPartnersDao { in_memory_dao }; let res = partners.get_partner("error").await; match res { Err(Error::InMemoryDao(_)) => (), _ => unreachable!(), } } struct CacheHitDao {} #[async_trait] impl PartnersDao for CacheHitDao { async fn get_partner(&self, _: &str) -> Result<Partner, Error> { Ok(Partner { name: "partner".into(), enabled: true, }) } async fn put_partner(&self, _: &str, _: &Partner) { unreachable!() } } struct UnreachableDao {} #[async_trait] impl PartnersDao for UnreachableDao { async fn get_partner(&self, _: &str) -> Result<Partner, Error> { unreachable!() } async fn put_partner(&self, _: &str, _: &Partner) { unreachable!() } } #[actix_rt::test] async fn partners_dao_cache_hit() { let cache_partners_dao = CachePartnersDao { cache_dao: Box::new(CacheHitDao {}), underlying_dao: Box::new(UnreachableDao {}), }; let partner = cache_partners_dao .get_partner("0x0000000000000000000000000000000000000000") .await .unwrap(); assert_eq!( partner, Partner { name: "partner".into(), enabled: true } ); } struct CacheMissDao {} #[async_trait] impl PartnersDao for CacheMissDao { async fn get_partner(&self, partner_id: &str) -> Result<Partner, Error> { Err(Error::PartnerNotFound(partner_id.into())) } async fn put_partner(&self, partner_id: &str, partner: &Partner) { assert_eq!(partner_id, "0x0000000000000000000000000000000000000000"); assert_eq!( *partner, Partner { name: "partner".into(), enabled: true } ); } } #[actix_rt::test] async fn partners_dao_cache_miss() { let cache_partners_dao = CachePartnersDao { cache_dao: Box::new(CacheMissDao {}), underlying_dao: Box::new(CacheHitDao {}), }; let partner = cache_partners_dao .get_partner("0x0000000000000000000000000000000000000000") .await .unwrap(); assert_eq!( partner, Partner { name: "partner".into(), enabled: true } ); } }
true
49cab71cfa2b492e190c9e47d04d93f525bda5f1
Rust
gperetin/clutch
/src/main.rs
UTF-8
2,820
2.875
3
[]
no_license
extern crate chrono; extern crate hipchat_client; extern crate serde; extern crate toml; #[macro_use] extern crate serde_derive; use std::env; use std::fs::File; use std::io::prelude::*; use std::io::{self, Write}; use std::path::Path; use chrono::prelude::*; use hipchat_client::Client as HipchatClient; use hipchat_client::message::Message; const DEFAULT_CONFIG_PATH: &str = "clutch.toml"; #[derive(Deserialize)] struct Config { token: String, origin: String, room: String, user: u64 } fn setup_client(config: &Config) -> HipchatClient { // We use clones because String is allocated on heap, doesn't have // a Copy trait, which means it wouldn't be usable after this call. // Consider removing .clone() calls if we don't needs these strings // after this call // If HipchatClient can take a reference (&String), consider passing // a reference here. That's a way to refer to a value without taking // ownership. This is called borrowing, and such parameters can't // be mutated, unless annotated with &mut HipchatClient::new(config.origin.clone(), config.token.clone()) } fn print_message(message: Message) -> () { let parsed_date = DateTime::parse_from_rfc3339(&message.date); let from: String = message.from.map(|x| x.name).unwrap_or("Unknown".to_string()); println!("[{}] [{}]: {}", parsed_date.unwrap().with_timezone(&Local).format("%m/%d %H:%M").to_string(), from, message.message); } fn print_messages(messages: Vec<Message>) -> () { // TODO: find a better way than .count() to consume an iter messages.into_iter().map(|m| print_message(m)).count(); } fn prompt_for_message() -> String { print!("Message: "); io::stdout().flush(); let mut buffer = String::new(); io::stdin().read_line(&mut buffer).unwrap(); buffer } fn load_config(path: String) -> Config { let mut contents = String::new(); File::open(path) .unwrap_or_else(|e| panic!("{}", e)) .read_to_string(&mut contents) .unwrap_or_else(|e| panic!("{}", e)); toml::from_str(&contents).unwrap() } fn get_config_path(passed_in: Option<String>) -> String { let path = passed_in.unwrap_or(DEFAULT_CONFIG_PATH.to_string()); Path::new(&path) .canonicalize() .unwrap() .to_str() .unwrap() .to_string() } fn main() { let config_path = get_config_path(env::args().nth(1)); let config = load_config(config_path); let client = setup_client(&config); let messages = client .get_recent_history(&config.room) .unwrap() .items; println!("Messages for room {}", config.room); print_messages(messages); let message = prompt_for_message(); client.send_message(&config.room, message); }
true
6973fa5a2e8ab4638bf3f9205aa94a057b3d59e8
Rust
kedia-project/stellar-base-rs
/src/error.rs
UTF-8
3,763
2.765625
3
[ "Apache-2.0" ]
permissive
//! Error and Result definitions. use xdr_rs_serialize::error::Error as XdrError; pub type Result<T> = std::result::Result<T, Error>; #[derive(thiserror::Error, Debug)] pub enum Error { /// Error that can occur when parsing a key. #[error("invalid str key")] InvalidStrKey, /// Invalid version byte in key. #[error("invalid str key version byte")] InvalidStrKeyVersionByte, /// Invalid checksum in key. #[error("invalid str key checksum")] InvalidStrKeyChecksum, /// Invalid keypair seed. #[error("invalid seed")] InvalidSeed, /// Invalid Asset code. #[error("invalid asset code")] InvalidAssetCode, /// Invalid data value. #[error("invalid data value")] InvalidDataValue, /// Invalid signature. #[error("invalid signature")] InvalidSignature, /// Invalid signature hint. #[error("invalid signature hint")] InvalidSignatureHint, /// Invalid memo text: too long. #[error("memo text too long")] InvalidMemoText, /// Invalid memo hash: too long. #[error("memo hash too long")] InvalidMemoHash, /// Invalid memo return hash: too long. #[error("memo return hash too long")] InvalidMemoReturn, /// Error that can occur when parsing amounts from stroops. #[error("invalid stroops amount")] InvalidStroopsAmount, /// Error that can occur when converting stroops to unsigned amount. #[error("stroops amount is negative")] NegativeStroops, /// Error that can occur when converting an amount with more than 7 digits. #[error("invalid amount scale")] InvalidAmountScale, /// Error parsing price. #[error("parse price error")] ParsePriceError, /// Invalid network id: too long. #[error("invalid network id")] InvalidNetworkId, /// Invalid public key. #[error("invalid public key")] InvalidPublicKey, /// Invalid pre auth tx. #[error("invalid pre auth tx")] InvalidPreAuthTx, /// Invalid hash(x). #[error("invalid hash(x)")] InvalidHashX, /// Invalid time bounds. #[error("invalid time bounds")] InvalidTimeBounds, /// Invalid claimable balance id length. Length must be 32 bytes. #[error("invalid claimable balance id length")] InvalidClaimableBalanceIdLength, /// Error that can occur when parsing amounts. #[error("error parsing amount")] ParseAmountError(#[from] rust_decimal::Error), /// Error that occurs when building operations. #[error("error building operation")] InvalidOperation(String), /// Error that occurs when building a transaction with too many operations. #[error("transaction has too many operations")] TooManyOperations, /// Error that occurs when building a transaction with no operations. #[error("transaction has no operations")] MissingOperations, /// Transaction fee is too low. #[error("transaction fee too low")] TransactionFeeTooLow, /// Home domain is too long. #[error("home domain too long")] HomeDomainTooLong, /// Invalid account flags. #[error("invalid account flags")] InvalidAccountFlags, /// Invalid trust line flags. #[error("invalid trust line flags")] InvalidTrustLineFlags, /// Transaction fee overflow. #[error("transaction fee overflow")] TransactionFeeOverflow, /// Xdr serialization error #[error("xdr serialization error")] XdrError(XdrError), /// Invalid xdr claim predicate #[error("Invalid xdr claim predicate")] XdrClaimPredicateError, /// Base64 decode error #[error("base64 decode error")] Base64DecodeError(#[from] base64::DecodeError), /// Sodium init failed. #[error("sodium init failed")] SodiumInitFailed, }
true
c9fa9f5b013501543aaefdf82413d54e5892f1c1
Rust
damienstanton/learning-rust
/operator_overloading/src/main.rs
UTF-8
392
3.265625
3
[ "MIT" ]
permissive
use std::ops; struct Foo; struct Bar; #[derive(Debug, PartialEq)] struct FooBar; impl ops::Add<Bar> for Foo { type Output = FooBar; fn add(self, _rhs: Bar) -> FooBar { FooBar } } fn main() { println!("Foo + Bar = {:?}", Foo + Bar); } #[cfg(test)] mod tests { use super::*; #[test] fn foobar_addition() { assert_eq!(FooBar, Foo + Bar) } }
true
220161e79c9baab23ebd3ae72e5999de96b216f1
Rust
icewind1991/steam-vent
/src/serverlist.rs
UTF-8
3,051
2.703125
3
[]
no_license
use reqwest::{Client, Error}; use serde::Deserialize; use std::net::SocketAddr; use thiserror::Error; use tracing::debug; #[derive(Debug, Error)] pub enum ServerDiscoveryError { #[error("Failed send discovery request: {0:#}")] Network(reqwest::Error), #[error("steam returned an empty server list")] NoServers, } impl From<reqwest::Error> for ServerDiscoveryError { fn from(value: Error) -> Self { ServerDiscoveryError::Network(value) } } #[derive(Default, Clone, Debug)] pub struct DiscoverOptions { web_client: Option<Client>, // todo: some smart cell based routing based on // https://raw.githubusercontent.com/SteamDatabase/SteamTracking/6d23ebb0070998ae851278cfae5f38832f4ac28d/ClientExtracted/steam/cached/CellMap.vdf cell: u8, } impl DiscoverOptions { pub fn with_web_client(self, web_client: Client) -> Self { DiscoverOptions { web_client: Some(web_client), ..self } } pub fn with_cell(self, cell: u8) -> Self { DiscoverOptions { cell, ..self } } } #[derive(Debug)] pub struct ServerList { servers: Vec<SocketAddr>, ws_servers: Vec<String>, } impl ServerList { pub async fn discover() -> Result<ServerList, ServerDiscoveryError> { Self::discover_with(DiscoverOptions::default()).await } pub async fn discover_with( options: DiscoverOptions, ) -> Result<ServerList, ServerDiscoveryError> { let client = options.web_client.unwrap_or_default(); let cell = options.cell; let response: ServerListResponse = client .get(&format!( "https://api.steampowered.com/ISteamDirectory/GetCMList/v1/?cellid={cell}" )) .send() .await? .json() .await?; if response.response.server_list.is_empty() { return Err(ServerDiscoveryError::NoServers); } Ok(response.into()) } pub fn pick(&self) -> SocketAddr { // todo: something more smart than always using the first let addr = *self.servers.first().unwrap(); debug!(addr = ?addr, "picked server from list"); addr } pub fn pick_ws(&self) -> String { // todo: something more smart than always using the first let addr = self.ws_servers.first().unwrap(); debug!(addr = ?addr, "picked websocket server from list"); format!("wss://{addr}/cmsocket/") } } impl From<ServerListResponse> for ServerList { fn from(value: ServerListResponse) -> Self { ServerList { servers: value.response.server_list, ws_servers: value.response.server_list_websockets, } } } #[derive(Debug, Deserialize)] struct ServerListResponse { response: ServerListResponseInner, } #[derive(Debug, Deserialize)] struct ServerListResponseInner { #[serde(rename = "serverlist")] server_list: Vec<SocketAddr>, #[serde(rename = "serverlist_websockets")] server_list_websockets: Vec<String>, }
true