blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
96cee0830f4b08273d730cbf4a85f14be1f16a2d
Rust
rgreenblatt/mesh
/src/mesh_operation/remesh.rs
UTF-8
4,845
2.546875
3
[]
no_license
use crate::mesh_operation::Operation; use crate::DataStructure; use crate::Vector3; use clap::Clap; use std::collections::HashSet; #[derive(Clap)] pub struct Remesh { iterations: u32, smoothing_weight: f32, #[clap(long = "no-collapse")] no_collapse: bool, #[clap(long = "no-flip")] no_flip: bool, } impl Operation for Remesh { #[allow(clippy::cognitive_complexity)] fn apply<D: DataStructure>(&self, mesh: &mut D) { for _ in 0..self.iterations { let mut total_edge_len = 0.0; let mut num_edges = 0; let get_edge_len = |mesh: &D, edge_idx| { let [l, r] = mesh.get_endpoints(edge_idx); let val = (mesh.get_position(l) - mesh.get_position(r)).norm(); debug_assert!(!val.is_nan()); val }; let mut edge_op = mesh.initial_edge(); while let Some(edge_idx) = edge_op { num_edges += 1; total_edge_len += get_edge_len(mesh, edge_idx); edge_op = mesh.next_edge(edge_idx); } let avg_edge_len = total_edge_len / (num_edges as f32); let mut to_split = Vec::new(); let mut to_collapse = Vec::new(); let midpoint = |mesh: &D, edge_idx| { let [l, r] = mesh.get_endpoints(edge_idx); (mesh.get_position(l) + mesh.get_position(r)) * 0.5 }; let mut edge_op = mesh.initial_edge(); while let Some(edge_idx) = edge_op { let edge_len = get_edge_len(mesh, edge_idx); if edge_len > (4.0 / 3.0) * avg_edge_len { to_split.push((edge_idx, midpoint(mesh, edge_idx))); } else if edge_len < (4.0 / 5.0) * avg_edge_len { to_collapse.push((edge_idx, midpoint(mesh, edge_idx))); } edge_op = mesh.next_edge(edge_idx); } for (edge_idx, new_pos) in to_split { let (new_vertex, _) = mesh.split_edge(edge_idx); debug_assert!(!new_pos[0].is_nan()); debug_assert!(!new_pos[1].is_nan()); debug_assert!(!new_pos[2].is_nan()); mesh.set_position(new_vertex, &new_pos); } if !self.no_collapse { let mut removed = HashSet::new(); let mut store_removed = Vec::new(); let mut store_modified = Vec::new(); for (edge_idx, new_pos) in to_collapse { if !removed.contains(&edge_idx) { if let Some(vertex_idx) = mesh.collapse_edge( edge_idx, &mut store_modified, &mut store_removed, ) { mesh.set_position(vertex_idx, &new_pos); } removed.extend(store_removed.iter().cloned()); removed.extend(store_modified.iter().map(|x| x.0)); } } } if !self.no_flip { let mut edge_op = mesh.initial_edge(); while let Some(edge_idx) = edge_op { if let ([l, r, top], Some(bottom)) = mesh.get_edge_neighbors(edge_idx) { let l_degree = mesh.degree(l) as i32; let r_degree = mesh.degree(r) as i32; let top_degree = mesh.degree(top) as i32; let bottom_degree = mesh.degree(bottom) as i32; let flip_dev = (l_degree - 7).abs() + (r_degree - 7).abs() + (top_degree - 5).abs() + (bottom_degree - 5).abs(); let no_flip_dev = (l_degree - 6).abs() + (r_degree - 6).abs() + (top_degree - 6).abs() + (bottom_degree - 6).abs(); if flip_dev < no_flip_dev && l_degree > 3 && r_degree > 3 { mesh.flip_edge(edge_idx); } } edge_op = mesh.next_edge(edge_idx); } } let mut vertex_op = mesh.initial_vertex(); let mut new_positions = Vec::new(); let mut neighbors = Vec::new(); let mut store = Vec::new(); while let Some(vertex_idx) = vertex_op { mesh.get_vertex_neighbors(vertex_idx, &mut neighbors); let centroid = neighbors .iter() .fold(Vector3::zeros(), |acc, other_vertex_idx| { acc + mesh.get_position(*other_vertex_idx) }) / neighbors.len() as f32; let orig_position = mesh.get_position(vertex_idx); let diff = centroid - orig_position; let normal = mesh.get_vertex_normal(vertex_idx, &mut store); let delta = diff - (normal.dot(&diff)) * normal; let new_position = if delta[0].is_nan() || delta[1].is_nan() || delta[2].is_nan() { orig_position } else { orig_position + self.smoothing_weight * delta }; new_positions.push((vertex_idx, new_position)); vertex_op = mesh.next_vertex(vertex_idx); } for (vertex_idx, new_position) in new_positions { mesh.set_position(vertex_idx, &new_position); } } } }
true
59e9add52c4649478b891e384583e5ca300d49be
Rust
adilhasan/catalogue
/src/models.rs
UTF-8
1,826
2.5625
3
[ "MIT" ]
permissive
// Library of models use std::{path::PathBuf}; use serde::{Serialize, Deserialize}; // Structure for the database DataFile #[derive(Debug)] pub struct DataFile { pub created: u64, pub path: PathBuf, pub size: u64, pub title: String, pub description: String, pub hash: String, pub publisher: String, pub extension: String, pub read: bool, } // Annotation to be uploaded to the database #[derive(Debug, Serialize, Deserialize)] pub struct Annotation { pub id: u64, pub title: String, pub description: String, pub publisher: String, pub read: bool, } // Annotation record downloaded from the database #[derive(Debug, Serialize, Deserialize)] pub struct AnnotationRecord { pub id: u64, pub path: PathBuf, pub title: String, pub description: String, pub publisher: String, pub read: bool, } // Constructor for the DataFile object impl DataFile { pub fn new() -> Self { Self { created: 0, path: PathBuf::new(), size: 0, title: String::new(), description: String::new(), hash: String::new(), publisher: String::new(), extension: String::new(), read: false, } } } // Config parameters #[derive(Debug, Deserialize)] pub struct Config { pub database: Database, } // Database table schema #[derive(Debug, Deserialize)] pub struct Database { pub file: PathBuf, pub get_path: String, pub create_catalogue: String, pub insert_catalogue: String, pub update_catalogue: String, pub all_annotation: String, pub before_annotation: String, pub after_annotation: String, pub interval_annotation: String, pub query_d: String, pub query_t: String, pub update_record: String, }
true
1b4bd6e16d87baa72f809835514f4f8b514b41be
Rust
g2xpf/glsl-linalg
/src/matrix/m2.rs
UTF-8
2,832
2.796875
3
[ "MIT" ]
permissive
use crate::float::Float; use crate::matrix::{FloatMatrix, FromVectors, IntoVectors, Matrix, M2}; use crate::numeric::Numeric; use crate::vector::{Vector, V2}; use std::ops::{Add, Deref, DerefMut, Div, Mul, Sub}; impl<T> Deref for M2<T> where T: Numeric, { type Target = [[T; 2]; 2]; fn deref(&self) -> &Self::Target { &self.0 } } impl<T> DerefMut for M2<T> where T: Numeric, { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl<T> IntoVectors<(V2<T>, V2<T>)> for M2<T> where T: Numeric, { fn into_cols(&self) -> (V2<T>, V2<T>) { (V2([self[0][0], self[1][0]]), V2([self[0][1], self[1][1]])) } fn into_rows(&self) -> (V2<T>, V2<T>) { (V2([self[0][0], self[0][1]]), V2([self[1][0], self[1][1]])) } } impl<T> FromVectors<(V2<T>, V2<T>)> for M2<T> where T: Numeric, { fn from_cols(v: (V2<T>, V2<T>)) -> Self { let (r1, r2) = v; M2([[r1[0], r2[0]], [r1[1], r2[1]]]) } fn from_rows(v: (V2<T>, V2<T>)) -> Self { let (V2(c1), V2(c2)) = v; M2([c1, c2]) } } impl<T> Matrix for M2<T> where T: Numeric, { fn transpose(&mut self) { unsafe { (&mut self[0][1] as *mut T).swap(&mut self[1][0]); } } } impl<F> FloatMatrix<F> for M2<F> where F: Float, { fn determinant(&self) -> F { self[0][0] * self[1][1] - self[0][1] * self[1][0] } fn cofactor(&self) -> Self { M2([[self[1][1], -self[0][1]], [-self[1][0], self[0][0]]]) } } impl<T> Add for M2<T> where T: Numeric, { type Output = M2<T>; fn add(self, rhs: Self) -> Self::Output { M2([ [self[0][0] + rhs[0][0], self[0][1] + rhs[0][1]], [self[1][0] + rhs[1][0], self[1][1] + rhs[1][1]], ]) } } impl<T> Sub for M2<T> where T: Numeric, { type Output = M2<T>; fn sub(self, rhs: Self) -> Self::Output { M2([ [self[0][0] - rhs[0][0], self[0][1] - rhs[0][1]], [self[1][0] - rhs[1][0], self[1][1] - rhs[1][1]], ]) } } impl<T> Mul for M2<T> where T: Numeric, { type Output = M2<T>; fn mul(self, rhs: Self) -> Self::Output { let (c1, c2) = self.into_rows(); let (r1, r2) = rhs.into_cols(); M2([[c1.dot(r1), c1.dot(r2)], [c2.dot(r1), c2.dot(r2)]]) } } impl<T> Mul<V2<T>> for M2<T> where T: Numeric, { type Output = V2<T>; fn mul(self, rhs: V2<T>) -> Self::Output { let (c1, c2) = self.into_rows(); V2([c1.dot(rhs), c2.dot(rhs)]) } } impl<T> Div<T> for M2<T> where T: Numeric, { type Output = M2<T>; fn div(self, rhs: T) -> Self::Output { M2([ [self[0][0] / rhs, self[0][1] / rhs], [self[1][0] / rhs, self[1][1] / rhs], ]) } }
true
10b83b138f9fb41a84b7c6af266def48b95232a1
Rust
JelteF/derive_more
/impl/src/fmt/mod.rs
UTF-8
17,463
2.84375
3
[ "MIT" ]
permissive
//! Implementations of [`fmt`]-like derive macros. //! //! [`fmt`]: std::fmt #[cfg(feature = "debug")] pub(crate) mod debug; #[cfg(feature = "display")] pub(crate) mod display; mod parsing; use proc_macro2::TokenStream; use quote::ToTokens; use syn::{ parse::{Parse, ParseStream}, punctuated::Punctuated, spanned::Spanned as _, token, Ident, }; use crate::{parsing::Expr, utils::Either}; /// Representation of a macro attribute expressing additional trait bounds. #[derive(Debug, Default)] struct BoundsAttribute(Punctuated<syn::WherePredicate, syn::token::Comma>); impl BoundsAttribute { /// Errors in case legacy syntax is encountered: `bound = "..."`. fn check_legacy_fmt(input: ParseStream<'_>) -> syn::Result<()> { let fork = input.fork(); let path = fork .parse::<syn::Path>() .and_then(|path| fork.parse::<syn::token::Eq>().map(|_| path)); match path { Ok(path) if path.is_ident("bound") => fork .parse::<syn::Lit>() .ok() .and_then(|lit| match lit { syn::Lit::Str(s) => Some(s.value()), _ => None, }) .map_or(Ok(()), |bound| { Err(syn::Error::new( input.span(), format!("legacy syntax, use `bound({bound})` instead"), )) }), Ok(_) | Err(_) => Ok(()), } } } impl Parse for BoundsAttribute { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _ = input.parse::<syn::Path>().and_then(|p| { if ["bound", "bounds", "where"] .into_iter() .any(|i| p.is_ident(i)) { Ok(p) } else { Err(syn::Error::new( p.span(), "unknown attribute, expected `bound(...)`", )) } })?; let content; syn::parenthesized!(content in input); content .parse_terminated(syn::WherePredicate::parse, token::Comma) .map(Self) } } /// Representation of a [`fmt`]-like attribute. /// /// [`fmt`]: std::fmt #[derive(Debug)] struct FmtAttribute { /// Interpolation [`syn::LitStr`]. /// /// [`syn::LitStr`]: struct@syn::LitStr lit: syn::LitStr, /// Optional [`token::Comma`]. comma: Option<token::Comma>, /// Interpolation arguments. args: Punctuated<FmtArgument, token::Comma>, } impl FmtAttribute { /// Returns an [`Iterator`] over bounded [`syn::Type`]s and trait names. fn bounded_types<'a>( &'a self, fields: &'a syn::Fields, ) -> impl Iterator<Item = (&'a syn::Type, &'static str)> { let placeholders = Placeholder::parse_fmt_string(&self.lit.value()); // We ignore unknown fields, as compiler will produce better error messages. placeholders.into_iter().filter_map(move |placeholder| { let name = match placeholder.arg { Parameter::Named(name) => self .args .iter() .find_map(|a| (a.alias()? == &name).then_some(&a.expr)) .map_or(Some(name), |expr| expr.ident().map(ToString::to_string))?, Parameter::Positional(i) => self .args .iter() .nth(i) .and_then(|a| a.expr.ident().filter(|_| a.alias.is_none()))? .to_string(), }; let unnamed = name.strip_prefix('_').and_then(|s| s.parse().ok()); let ty = match (&fields, unnamed) { (syn::Fields::Unnamed(f), Some(i)) => { f.unnamed.iter().nth(i).map(|f| &f.ty) } (syn::Fields::Named(f), None) => f.named.iter().find_map(|f| { f.ident.as_ref().filter(|s| **s == name).map(|_| &f.ty) }), _ => None, }?; Some((ty, placeholder.trait_name)) }) } /// Errors in case legacy syntax is encountered: `fmt = "...", (arg),*`. fn check_legacy_fmt(input: ParseStream<'_>) -> syn::Result<()> { let fork = input.fork(); let path = fork .parse::<syn::Path>() .and_then(|path| fork.parse::<syn::token::Eq>().map(|_| path)); match path { Ok(path) if path.is_ident("fmt") => (|| { let args = fork .parse_terminated( <Either<syn::Lit, syn::Ident>>::parse, token::Comma, ) .ok()? .into_iter() .enumerate() .filter_map(|(i, arg)| match arg { Either::Left(syn::Lit::Str(str)) => Some(if i == 0 { format!("\"{}\"", str.value()) } else { str.value() }), Either::Right(ident) => Some(ident.to_string()), _ => None, }) .collect::<Vec<_>>(); (!args.is_empty()).then_some(args) })() .map_or(Ok(()), |fmt| { Err(syn::Error::new( input.span(), format!( "legacy syntax, remove `fmt =` and use `{}` instead", fmt.join(", "), ), )) }), Ok(_) | Err(_) => Ok(()), } } } impl Parse for FmtAttribute { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { Ok(Self { lit: input.parse()?, comma: input .peek(syn::token::Comma) .then(|| input.parse()) .transpose()?, args: input.parse_terminated(FmtArgument::parse, token::Comma)?, }) } } impl ToTokens for FmtAttribute { fn to_tokens(&self, tokens: &mut TokenStream) { self.lit.to_tokens(tokens); self.comma.to_tokens(tokens); self.args.to_tokens(tokens); } } /// Representation of a [named parameter][1] (`identifier '=' expression`) in /// in a [`FmtAttribute`]. /// /// [1]: https://doc.rust-lang.org/stable/std/fmt/index.html#named-parameters #[derive(Debug)] struct FmtArgument { /// `identifier =` [`Ident`]. alias: Option<(Ident, token::Eq)>, /// `expression` [`Expr`]. expr: Expr, } impl FmtArgument { /// Returns an `identifier` of the [named parameter][1]. /// /// [1]: https://doc.rust-lang.org/stable/std/fmt/index.html#named-parameters fn alias(&self) -> Option<&Ident> { self.alias.as_ref().map(|(ident, _)| ident) } } impl Parse for FmtArgument { fn parse(input: ParseStream) -> syn::Result<Self> { Ok(Self { alias: (input.peek(Ident) && input.peek2(token::Eq)) .then(|| Ok::<_, syn::Error>((input.parse()?, input.parse()?))) .transpose()?, expr: input.parse()?, }) } } impl ToTokens for FmtArgument { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some((ident, eq)) = &self.alias { ident.to_tokens(tokens); eq.to_tokens(tokens); } self.expr.to_tokens(tokens); } } /// Representation of a [parameter][1] used in a [`Placeholder`]. /// /// [1]: https://doc.rust-lang.org/stable/std/fmt/index.html#formatting-parameters #[derive(Debug, Eq, PartialEq)] enum Parameter { /// [Positional parameter][1]. /// /// [1]: https://doc.rust-lang.org/stable/std/fmt/index.html#positional-parameters Positional(usize), /// [Named parameter][1]. /// /// [1]: https://doc.rust-lang.org/stable/std/fmt/index.html#named-parameters Named(String), } impl<'a> From<parsing::Argument<'a>> for Parameter { fn from(arg: parsing::Argument<'a>) -> Self { match arg { parsing::Argument::Integer(i) => Self::Positional(i), parsing::Argument::Identifier(i) => Self::Named(i.to_owned()), } } } /// Representation of a formatting placeholder. #[derive(Debug, PartialEq, Eq)] struct Placeholder { /// Formatting argument (either named or positional) to be used by this placeholder. arg: Parameter, /// [Width parameter][1], if present. /// /// [1]: https://doc.rust-lang.org/stable/std/fmt/index.html#width width: Option<Parameter>, /// [Precision parameter][1], if present. /// /// [1]: https://doc.rust-lang.org/stable/std/fmt/index.html#precision precision: Option<Parameter>, /// Name of [`std::fmt`] trait to be used for rendering this placeholder. trait_name: &'static str, } impl Placeholder { /// Parses [`Placeholder`]s from the provided formatting string. fn parse_fmt_string(s: &str) -> Vec<Self> { let mut n = 0; parsing::format_string(s) .into_iter() .flat_map(|f| f.formats) .map(|format| { let (maybe_arg, ty) = ( format.arg, format.spec.map(|s| s.ty).unwrap_or(parsing::Type::Display), ); let position = maybe_arg.map(Into::into).unwrap_or_else(|| { // Assign "the next argument". // https://doc.rust-lang.org/stable/std/fmt/index.html#positional-parameters n += 1; Parameter::Positional(n - 1) }); Self { arg: position, width: format.spec.and_then(|s| match s.width { Some(parsing::Count::Parameter(arg)) => Some(arg.into()), _ => None, }), precision: format.spec.and_then(|s| match s.precision { Some(parsing::Precision::Count(parsing::Count::Parameter( arg, ))) => Some(arg.into()), _ => None, }), trait_name: ty.trait_name(), } }) .collect() } } /// Representation of a [`fmt::Display`]-like derive macro attributes placed on a container (struct /// or enum variant). /// /// ```rust,ignore /// #[<fmt_trait>("<fmt_literal>", <fmt_args>)] /// #[bound(<bounds>)] /// ``` /// /// `#[<fmt_trait>(...)]` can be specified only once, while multiple /// `#[<fmt_trait>(bound(...))]` are allowed. #[derive(Debug, Default)] struct ContainerAttributes { /// Interpolation [`FmtAttribute`]. fmt: Option<FmtAttribute>, /// Addition trait bounds. bounds: BoundsAttribute, } impl ContainerAttributes { /// Parses [`ContainerAttributes`] from the provided [`syn::Attribute`]s. pub(super) fn parse_attrs( attrs: impl AsRef<[syn::Attribute]>, trait_name: &str, ) -> syn::Result<Self> { attrs .as_ref() .iter() .filter(|attr| attr.path().is_ident(trait_name_to_attribute_name(trait_name))) .try_fold(Self::default(), |mut attrs, attr| { match attr.parse_args::<ContainerAttribute>()? { ContainerAttribute::Bounds(more) => { attrs.bounds.0.extend(more.0); } ContainerAttribute::Fmt(fmt) => { attrs.fmt.replace(fmt).map_or(Ok(()), |dup| Err(syn::Error::new( dup.span(), format!( "multiple `#[{}(\"...\", ...)]` attributes aren't allowed", trait_name_to_attribute_name(trait_name), ))))?; } }; Ok(attrs) }) } } /// Representation of a single [`fmt::Display`]-like derive macro attribute, placed on a container /// (struct or enum variant). #[derive(Debug)] enum ContainerAttribute { /// [`fmt`] attribute. Fmt(FmtAttribute), /// Addition trait bounds. Bounds(BoundsAttribute), } impl Parse for ContainerAttribute { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { BoundsAttribute::check_legacy_fmt(input)?; FmtAttribute::check_legacy_fmt(input)?; if input.peek(syn::LitStr) { input.parse().map(Self::Fmt) } else { input.parse().map(Self::Bounds) } } } /// Matches the provided `trait_name` to appropriate [`Attribute::Fmt`] argument name. fn trait_name_to_attribute_name<T>(trait_name: T) -> &'static str where T: for<'a> PartialEq<&'a str>, { match () { _ if trait_name == "Binary" => "binary", _ if trait_name == "Debug" => "debug", _ if trait_name == "Display" => "display", _ if trait_name == "LowerExp" => "lower_exp", _ if trait_name == "LowerHex" => "lower_hex", _ if trait_name == "Octal" => "octal", _ if trait_name == "Pointer" => "pointer", _ if trait_name == "UpperExp" => "upper_exp", _ if trait_name == "UpperHex" => "upper_hex", _ => unimplemented!(), } } #[cfg(test)] mod fmt_attribute_spec { use itertools::Itertools as _; use quote::ToTokens; use syn; use super::FmtAttribute; fn assert<'a>(input: &'a str, parsed: impl AsRef<[&'a str]>) { let parsed = parsed.as_ref(); let attr = syn::parse_str::<FmtAttribute>(&format!("\"\", {}", input)).unwrap(); let fmt_args = attr .args .into_iter() .map(|arg| arg.into_token_stream().to_string()) .collect::<Vec<String>>(); fmt_args.iter().zip_eq(parsed).enumerate().for_each( |(i, (found, expected))| { assert_eq!( *expected, found, "Mismatch at index {i}\n\ Expected: {parsed:?}\n\ Found: {fmt_args:?}", ); }, ); } #[test] fn cases() { let cases = [ "ident", "alias = ident", "[a , b , c , d]", "counter += 1", "async { fut . await }", "a < b", "a > b", "{ let x = (a , b) ; }", "invoke (a , b)", "foo as f64", "| a , b | a + b", "obj . k", "for pat in expr { break pat ; }", "if expr { true } else { false }", "vector [2]", "1", "\"foo\"", "loop { break i ; }", "format ! (\"{}\" , q)", "match n { Some (n) => { } , None => { } }", "x . foo ::< T > (a , b)", "x . foo ::< T < [T < T >; if a < b { 1 } else { 2 }] >, { a < b } > (a , b)", "(a + b)", "i32 :: MAX", "1 .. 2", "& a", "[0u8 ; N]", "(a , b , c , d)", "< Ty as Trait > :: T", "< Ty < Ty < T >, { a < b } > as Trait < T > > :: T", ]; assert("", []); for i in 1..4 { for permutations in cases.into_iter().permutations(i) { let mut input = permutations.clone().join(","); assert(&input, &permutations); input.push(','); assert(&input, &permutations); } } } } #[cfg(test)] mod placeholder_parse_fmt_string_spec { use super::{Parameter, Placeholder}; #[test] fn indicates_position_and_trait_name_for_each_fmt_placeholder() { let fmt_string = "{},{:?},{{}},{{{1:0$}}}-{2:.1$x}{par:#?}{:width$}"; assert_eq!( Placeholder::parse_fmt_string(&fmt_string), vec![ Placeholder { arg: Parameter::Positional(0), width: None, precision: None, trait_name: "Display", }, Placeholder { arg: Parameter::Positional(1), width: None, precision: None, trait_name: "Debug", }, Placeholder { arg: Parameter::Positional(1), width: Some(Parameter::Positional(0)), precision: None, trait_name: "Display", }, Placeholder { arg: Parameter::Positional(2), width: None, precision: Some(Parameter::Positional(1)), trait_name: "LowerHex", }, Placeholder { arg: Parameter::Named("par".to_owned()), width: None, precision: None, trait_name: "Debug", }, Placeholder { arg: Parameter::Positional(2), width: Some(Parameter::Named("width".to_owned())), precision: None, trait_name: "Display", }, ], ); } }
true
002d1f774a898a58a53d5af5b1a44dad5bb523ee
Rust
xurtis/isolate
/src/main.rs
UTF-8
4,799
3.21875
3
[ "ISC" ]
permissive
//! `isolate` is a command line tool that encapsulates the behaviour provided by the //! [`unshare`](https://docs.rs/unshare) library. //! //! `isolate` uses a configuration file to construct what is essentially a lightweight container //! for the command that it then executes. //! //! # Configuration file //! //! The configuration file can be specified at the command line using the `-f` or `--config-file` //! flag. Alternatively, the following locations are searched in order: //! //! 1. `./isolate.toml` //! 1. `./.isolate.toml` //! 1. `~/.config/isolate.toml` //! 1. `~/.isolate.toml` //! 1. `/etc/isolate.toml` //! //! # Usage //! //! `isolate [--config-file <path>] <command>` extern crate docopt; extern crate serde; #[macro_use] extern crate serde_derive; extern crate toml; extern crate unshare; use std::env; use std::fs::File; use std::io::Read; use std::path::Path; use std::process::exit; use docopt::Docopt; use toml::de::from_str; fn main() { let args = Arguments::load(); if args.flag_default_config { print!("{}", DEFAULT_CONFIG); exit(0); } args.into_command().exec(); } const USAGE: &'static str = " Usage: isolate [--config-file <file>] <program> [<args>...] isolate [-v | -h | -d] Options: -f <file>, --config-file <file> Location of configuration file to use. -h, --help Show this help. -v, --version Show the version. -d, --default-config Dumpt the default configuration to stdout. "; #[derive(Deserialize)] struct Arguments { flag_config_file: Option<String>, flag_default_config: bool, arg_program: String, arg_args: Vec<String> } impl Arguments { /// Load arguments from the command line. fn load() -> Arguments { Docopt::new(USAGE) .unwrap_or_else(|e| e.exit()) .help(true) .version(Some(version())) .deserialize() .unwrap_or_else(|e| e.exit()) } /// Construct the command to execute. fn into_command(self) -> Command { let config = self.config(); Command::new(self.arg_program, self.arg_args, config) } /// Determine the path to configuration file. fn config(&self) -> Configuration { let text = if let Some(ref path) = self.find_config_path() { let mut file = File::open(path).expect("could not open configuration file"); let mut text = String::new(); file.read_to_string(&mut text).expect("could not read configuration file"); text } else { DEFAULT_CONFIG.to_string() }; from_str(&text).expect("could not parse configuration") } /// Determine the path of the configuration file. fn find_config_path(&self) -> Option<String> { if let Some(ref path) = self.flag_config_file { Some(path.clone()) } else { let paths = Arguments::default_config_paths(); for path in paths { if Path::new(&path).exists() { return Some(path) } } None } } /// Default configuration path list. fn default_config_paths() -> Vec<String> { let mut paths = vec![ "isolate.toml".to_string(), ".isolate.toml".to_string() ]; if let Ok(path) = env::var("HOME") { paths.push(format!("{}/.config/isolate.toml", path)); paths.push(format!("{}/.isolate.toml", path)); } paths.push("/etc/isolate.toml".to_string()); paths } } const DEFAULT_CONFIG: &'static str = include_str!("isolate.toml"); #[derive(Deserialize)] struct Configuration { } struct Command { program: String, arguments: Vec<String>, config: Configuration, } impl Command { /// COnstruct a new command fn new(program: String, args: Vec<String>, config: Configuration) -> Command { Command { program: program, arguments: args, config: config, } } /// Execute the given command. fn exec(&self) { unshare::Command::new(&self.program) .args(&self.arguments) .spawn() .expect("unable to spawn process") .wait() .expect("error in child process"); } } /// Construct the version string for the program. fn version() -> String { format!( "{} - {}\n{}\n\n{}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"), env!("CARGO_PKG_AUTHORS"), env!("CARGO_PKG_DESCRIPTION"), ) } /// Test that the provided configuration file is valid. #[test] fn default_config() { assert!(from_str::<Configuration>(DEFAULT_CONFIG).is_ok()); }
true
71c653f12dfc5cfb4ca415b08b18a11ebbe3cdb5
Rust
aweary/swc
/crates/swc_css_minifier/src/compressor/unit/mod.rs
UTF-8
3,286
2.984375
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use swc_atoms::{js_word, JsWord}; pub fn get_duration_ratio(unit1: &JsWord, unit2: &JsWord) -> Option<f64> { // Duration ratio, see https://www.w3.org/TR/css-values-4/#time if unit1 == unit2 { return Some(1.0); } match *unit1 { js_word!("ms") => match *unit2 { js_word!("s") => Some(1000.0), _ => None, }, js_word!("s") => match *unit2 { js_word!("ms") => Some(0.001), _ => None, }, _ => None, } } pub fn get_frequency_ratio(unit1: &JsWord, unit2: &JsWord) -> Option<f64> { // Frequency ratio, see https://www.w3.org/TR/css-values-4/#frequency if unit1 == unit2 { return Some(1.0); } match *unit1 { js_word!("hz") => match *unit2 { js_word!("khz") => Some(1000.0), _ => None, }, js_word!("khz") => match *unit2 { js_word!("hz") => Some(0.001), _ => None, }, _ => None, } } pub fn get_absolute_length_ratio(unit1: &JsWord, unit2: &JsWord) -> Option<f64> { // For length ratio, see https://www.w3.org/TR/css-values-4/#absolute-lengths if unit1 == unit2 { return Some(1.0); } match *unit1 { js_word!("cm") => match *unit2 { js_word!("mm") => Some(0.1), js_word!("q") => Some(0.025), js_word!("in") => Some(2.54), _ => None, }, js_word!("mm") => match *unit2 { js_word!("cm") => Some(10.0), js_word!("q") => Some(0.25), js_word!("in") => Some(25.4), _ => None, }, js_word!("q") => match *unit2 { js_word!("cm") => Some(40.0), js_word!("mm") => Some(4.0), js_word!("in") => Some(101.6), _ => None, }, js_word!("pc") => match *unit2 { js_word!("in") => Some(6.0), js_word!("px") => Some(0.0625), _ => None, }, js_word!("pt") => match *unit2 { js_word!("in") => Some(72.0), js_word!("pc") => Some(12.0), js_word!("px") => Some(0.75), _ => None, }, js_word!("px") => match *unit2 { js_word!("in") => Some(96.0), js_word!("pc") => Some(16.0), _ => None, }, _ => None, } } pub fn get_resolution_ratio(unit1: &JsWord, unit2: &JsWord) -> Option<f64> { // Resolution ratio, see https://www.w3.org/TR/css-values-4/#resolution // "x" is an alias for "dppx" if unit1 == unit2 { return Some(1.0); } match *unit1 { js_word!("dpcm") => match *unit2 { js_word!("dpi") => Some(2.54), _ => None, }, js_word!("dppx") | js_word!("x") => match *unit2 { js_word!("dppx") | js_word!("x") => Some(1.0), js_word!("dpi") => Some(96.0), _ => None, }, _ => None, } } pub fn is_absolute_length(unit: &JsWord) -> bool { match *unit { js_word!("cm") | js_word!("mm") | js_word!("q") | js_word!("in") | js_word!("pc") | js_word!("pt") | js_word!("px") => true, _ => false, } }
true
a2da01809f29315bf7ff99b22c4368b55b25f776
Rust
killercup/pretty-hex
/src/pretty_hex.rs
UTF-8
3,217
3.328125
3
[ "MIT" ]
permissive
use alloc::{string::String, vec::Vec}; use core::fmt; use core::result::Result; /// Returns a `String` showing octets grouped in 4-byte words. pub fn simple_hex<T: AsRef<[u8]>>(source: &T) -> String { let mut writer = String::new(); simple_hex_write(&mut writer, source).unwrap_or(()); writer } /// Return a multi-line `String` complete with addressing, hex digits, and ASCII representation. pub fn pretty_hex<T: AsRef<[u8]>>(source: &T) -> String { let mut writer = String::new(); pretty_hex_write(&mut writer, source).unwrap_or(()); writer } const COLS: usize = 16; const CHNK: usize = 4; const NASCI: char = '.'; /// Dump hex octets grouped in 4-byte words to the writer. pub fn simple_hex_write<T, W>(writer: &mut W, source: &T) -> Result<(), fmt::Error> where T: AsRef<[u8]> , W: fmt::Write { for (i,x) in source.as_ref().iter().enumerate() { write!(writer, "{:02x}{}", x, match i+1 { n if n == source.as_ref().len() => "", n if n % CHNK == 0 => " ", _ => " ", } )?; } Ok(()) } /// Write multi-line dump complete with addressing, hex digits, and ASCII representation to the writer. pub fn pretty_hex_write<T, W>(writer: &mut W, source: &T) -> Result<(), fmt::Error> where T: AsRef<[u8]> , W: fmt::Write { writeln!(writer, "Length: {0} (0x{0:x}) bytes", source.as_ref().len())?; let chunks = source.as_ref().chunks(COLS); let lines = chunks.len(); for (i, row) in chunks.enumerate() { write!(writer, "{:04x}: ", i * COLS)?; simple_hex_write(writer, &row)?; let pad = COLS - row.len(); let pad = pad * 3 + pad / CHNK; for _ in 0..pad { writer.write_char(' ')?; } write!(writer, " ")?; for x in row { writer.write_char( if x.is_ascii() && !x.is_ascii_control() { (*x).into() } else { NASCI } )?; } if i+1 < lines { writeln!(writer, "")?; } } Ok(()) } /// Reference wrapper for use in arguments formatting. pub struct Hex<'a, T: 'a>(&'a T); impl<'a, T:'a + AsRef<[u8]>> fmt::Display for Hex<'a, T> { /// Formats the value by `simple_hex_write` using the given formatter. fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { simple_hex_write(f, self.0) } } impl<'a, T:'a + AsRef<[u8]>> fmt::Debug for Hex<'a, T> { /// Formats the value by `pretty_hex_write` using the given formatter. fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { pretty_hex_write(f, self.0) } } /// Allows generates hex dumps to a formatter. pub trait PrettyHex: Sized { /// Wrap self reference for use in `std::fmt::Display`/`std::fmt::Debug` formating as hex dumps. fn hex_dump<'a>(&'a self) -> Hex<'a, Self>; } impl PrettyHex for Vec<u8> { fn hex_dump<'a>(&'a self) -> Hex<'a, Self> { Hex(self) } } impl<'a> PrettyHex for &'a [u8] { fn hex_dump<'b>(&'b self) -> Hex<'b, Self> { Hex(self) } } impl<'a> PrettyHex for &'a mut [u8] { fn hex_dump<'b>(&'b self) -> Hex<'b, Self> { Hex(self) } }
true
7dc6db538dd3f6ee039c72b4a97052f07bba2fa5
Rust
rsaarelm/bloop
/examples/beep.rs
UTF-8
666
2.65625
3
[]
no_license
extern crate bloop; use std::sync::{Arc, Mutex}; use bloop::{Synth, Flick, Sample, Music, Primitive}; use std::f64::consts::PI; use std::{thread, time}; struct SineWave { pitch: f64, volume: f64, } impl Synth for SineWave { fn sample(&self, t: Flick) -> Sample { (self.volume * (t.0 as f64 * self.pitch / bloop::FLICKS_PER_SECOND as f64 * 2.0 * PI).sin() * 127.0) as Sample } } fn main() { let music = Music::Prim(Primitive::Note( Flick::from_seconds(5.0), SineWave { pitch: 440.0, volume: 0.5 })); bloop::spawn_cpal_player(Arc::new(Mutex::new(music))); thread::sleep(time::Duration::from_secs(5)); }
true
de0db21ffc3c29547ebde924652e880713ff41d2
Rust
manisenkov/advent-of-code
/rust/src/bin/2022_12.rs
UTF-8
3,199
3.125
3
[]
no_license
use std::collections::{HashMap, VecDeque}; use aoc::solution::Solution; struct Day2022_12 { dist_map: HashMap<(isize, isize), usize>, height_map: HashMap<(isize, isize), usize>, possible_starts: Vec<(isize, isize)>, start_point: (isize, isize), end_point: (isize, isize), } impl Solution<usize> for Day2022_12 { fn new() -> Day2022_12 { Day2022_12 { dist_map: HashMap::new(), height_map: HashMap::new(), possible_starts: vec![], start_point: (0, 0), end_point: (0, 0), } } fn init(&mut self, input: &str) { for (row, line) in input.lines().enumerate() { self.height_map .extend(line.trim().chars().enumerate().map(|(col, c)| { let pos = (row as isize, col as isize); let height = if c == 'S' { self.start_point = pos; 1 } else if c == 'E' { self.end_point = pos; 26 } else { (c as usize) - ('a' as usize) + 1 }; if c == 'a' || c == 'S' { self.possible_starts.push(pos); } (pos, height) })); } } fn part_one(&mut self) -> usize { let mut queue = VecDeque::<(isize, isize)>::from([self.end_point]); self.dist_map.insert(self.end_point, 0); while let Some(cur_pos) = queue.pop_front() { let cur_dist = self.dist_map[&cur_pos]; let cur_height = self.height_map[&cur_pos]; let to_visit: Vec<(isize, isize)> = vec![ (cur_pos.0 - 1, cur_pos.1), (cur_pos.0 + 1, cur_pos.1), (cur_pos.0, cur_pos.1 - 1), (cur_pos.0, cur_pos.1 + 1), ] .iter() .filter(|p| self.height_map.contains_key(p)) .filter(|p| self.height_map[p] + 1 >= cur_height) .map(|p| *p) .collect(); for target_pos in to_visit.iter() { let next_dist = cur_dist + 1; let target_dist = *self.dist_map.get(&target_pos).unwrap_or(&0x7fffffff); if next_dist < target_dist { self.dist_map.insert(*target_pos, next_dist); queue.push_back(*target_pos); } } } self.dist_map[&self.start_point] } fn part_two(&mut self) -> usize { self.possible_starts .iter() .map(|p| *self.dist_map.get(p).unwrap_or(&0x7fffffff)) .min() .unwrap() } } fn main() { let mut sol = Day2022_12::new(); sol.run_on_stdin() } #[cfg(test)] mod tests { use crate::Day2022_12; use aoc::solution::Solution; const TEST_INPUT: &str = include_str!("../../examples/2022_12.txt"); #[test] fn test_1() { let mut sol = Day2022_12::new(); sol.init(TEST_INPUT); assert_eq!(sol.part_one(), 31); assert_eq!(sol.part_two(), 29); } }
true
46c046b6b5c1f5fe63d765622d82de10cffd2e68
Rust
DrGodCarl/AdventOfCode
/2022/src/bin/day02.rs
UTF-8
2,759
3.359375
3
[]
no_license
use anyhow::Result; use parse_display::FromStr; use utils::read_lines; #[derive(FromStr, PartialEq, Debug, Clone, Copy)] enum Throw { #[from_str(regex = "[AX]")] Rock, #[from_str(regex = "[BY]")] Paper, #[from_str(regex = "[CZ]")] Scissors, } #[derive(FromStr, PartialEq, Debug, Clone, Copy)] enum Outcome { #[display("X")] Lose, #[display("Y")] Draw, #[display("Z")] Win, } #[derive(FromStr, PartialEq, Debug)] #[display("{opponent} {me}")] struct Round { opponent: Throw, me: Throw, } #[derive(FromStr, PartialEq, Debug)] #[display("{0} {1}")] struct RoundStrategy(Throw, Outcome); impl Throw { fn score(&self) -> u32 { match self { Throw::Rock => 1, Throw::Paper => 2, Throw::Scissors => 3, } } } impl Round { fn score(&self) -> u32 { self.me.score() + match (self.opponent, self.me) { (Throw::Rock, Throw::Paper) => 6, (Throw::Rock, Throw::Scissors) => 0, (Throw::Paper, Throw::Rock) => 0, (Throw::Paper, Throw::Scissors) => 6, (Throw::Scissors, Throw::Rock) => 6, (Throw::Scissors, Throw::Paper) => 0, _ => 3, } } } impl RoundStrategy { fn to_round(&self) -> Round { let to_throw = match self { RoundStrategy(_, Outcome::Draw) => self.0, RoundStrategy(Throw::Rock, Outcome::Lose) => Throw::Scissors, RoundStrategy(Throw::Rock, Outcome::Win) => Throw::Paper, RoundStrategy(Throw::Paper, Outcome::Lose) => Throw::Rock, RoundStrategy(Throw::Paper, Outcome::Win) => Throw::Scissors, RoundStrategy(Throw::Scissors, Outcome::Lose) => Throw::Paper, RoundStrategy(Throw::Scissors, Outcome::Win) => Throw::Rock, }; Round { opponent: self.0, me: to_throw, } } } fn part1(rounds: &[Round]) -> u32 { rounds.iter().map(|r| r.score()).sum() } fn part2(round_strats: &[RoundStrategy]) -> u32 { round_strats.iter().map(|r| r.to_round().score()).sum() } fn main() -> Result<()> { let rounds = read_lines("input/day02.txt")?; let result = part1(&rounds); println!("part 1: {}", result); let rounds_strats = read_lines("input/day02.txt")?; let result = part2(&rounds_strats); println!("part 2: {}", result); Ok(()) } #[test] fn test() -> Result<()> { let rounds = read_lines("input/test/day02.txt")?; let result = part1(&rounds); assert_eq!(result, 15); let rounds_strats = read_lines("input/test/day02.txt")?; let result = part2(&rounds_strats); assert_eq!(result, 12); Ok(()) }
true
67fd8ef0764e65987f01b8810294664f8e6708a0
Rust
RustAudio/cpal
/src/host/jack/stream.rs
UTF-8
18,081
2.53125
3
[ "Apache-2.0" ]
permissive
use crate::traits::StreamTrait; use crate::ChannelCount; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex}; use crate::{ BackendSpecificError, Data, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleRate, StreamError, }; use super::JACK_SAMPLE_FORMAT; type ErrorCallbackPtr = Arc<Mutex<dyn FnMut(StreamError) + Send + 'static>>; pub struct Stream { // TODO: It might be faster to send a message when playing/pausing than to check this every iteration playing: Arc<AtomicBool>, async_client: jack::AsyncClient<JackNotificationHandler, LocalProcessHandler>, // Port names are stored in order to connect them to other ports in jack automatically input_port_names: Vec<String>, output_port_names: Vec<String>, } impl Stream { // TODO: Return error messages pub fn new_input<D, E>( client: jack::Client, channels: ChannelCount, data_callback: D, mut error_callback: E, ) -> Stream where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let mut ports = vec![]; let mut port_names: Vec<String> = vec![]; // Create ports for i in 0..channels { let port_try = client.register_port(&format!("in_{}", i), jack::AudioIn::default()); match port_try { Ok(port) => { // Get the port name in order to later connect it automatically if let Ok(port_name) = port.name() { port_names.push(port_name); } // Store the port into a Vec to move to the ProcessHandler ports.push(port); } Err(e) => { // If port creation failed, send the error back via the error_callback error_callback( BackendSpecificError { description: e.to_string(), } .into(), ); } } } let playing = Arc::new(AtomicBool::new(true)); let error_callback_ptr = Arc::new(Mutex::new(error_callback)) as ErrorCallbackPtr; let input_process_handler = LocalProcessHandler::new( vec![], ports, SampleRate(client.sample_rate() as u32), client.buffer_size() as usize, Some(Box::new(data_callback)), None, playing.clone(), Arc::clone(&error_callback_ptr), ); let notification_handler = JackNotificationHandler::new(error_callback_ptr); let async_client = client .activate_async(notification_handler, input_process_handler) .unwrap(); Stream { playing, async_client, input_port_names: port_names, output_port_names: vec![], } } pub fn new_output<D, E>( client: jack::Client, channels: ChannelCount, data_callback: D, mut error_callback: E, ) -> Stream where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let mut ports = vec![]; let mut port_names: Vec<String> = vec![]; // Create ports for i in 0..channels { let port_try = client.register_port(&format!("out_{}", i), jack::AudioOut::default()); match port_try { Ok(port) => { // Get the port name in order to later connect it automatically if let Ok(port_name) = port.name() { port_names.push(port_name); } // Store the port into a Vec to move to the ProcessHandler ports.push(port); } Err(e) => { // If port creation failed, send the error back via the error_callback error_callback( BackendSpecificError { description: e.to_string(), } .into(), ); } } } let playing = Arc::new(AtomicBool::new(true)); let error_callback_ptr = Arc::new(Mutex::new(error_callback)) as ErrorCallbackPtr; let output_process_handler = LocalProcessHandler::new( ports, vec![], SampleRate(client.sample_rate() as u32), client.buffer_size() as usize, None, Some(Box::new(data_callback)), playing.clone(), Arc::clone(&error_callback_ptr), ); let notification_handler = JackNotificationHandler::new(error_callback_ptr); let async_client = client .activate_async(notification_handler, output_process_handler) .unwrap(); Stream { playing, async_client, input_port_names: vec![], output_port_names: port_names, } } /// Connect to the standard system outputs in jack, system:playback_1 and system:playback_2 /// This has to be done after the client is activated, doing it just after creating the ports doesn't work. pub fn connect_to_system_outputs(&mut self) { // Get the system ports let system_ports = self.async_client.as_client().ports( Some("system:playback_.*"), None, jack::PortFlags::empty(), ); // Connect outputs from this client to the system playback inputs for i in 0..self.output_port_names.len() { if i >= system_ports.len() { break; } match self .async_client .as_client() .connect_ports_by_name(&self.output_port_names[i], &system_ports[i]) { Ok(_) => (), Err(e) => println!("Unable to connect to port with error {}", e), } } } /// Connect to the standard system outputs in jack, system:capture_1 and system:capture_2 /// This has to be done after the client is activated, doing it just after creating the ports doesn't work. pub fn connect_to_system_inputs(&mut self) { // Get the system ports let system_ports = self.async_client.as_client().ports( Some("system:capture_.*"), None, jack::PortFlags::empty(), ); // Connect outputs from this client to the system playback inputs for i in 0..self.input_port_names.len() { if i >= system_ports.len() { break; } match self .async_client .as_client() .connect_ports_by_name(&system_ports[i], &self.input_port_names[i]) { Ok(_) => (), Err(e) => println!("Unable to connect to port with error {}", e), } } } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { self.playing.store(true, Ordering::SeqCst); Ok(()) } fn pause(&self) -> Result<(), PauseStreamError> { self.playing.store(false, Ordering::SeqCst); Ok(()) } } struct LocalProcessHandler { /// No new ports are allowed to be created after the creation of the LocalProcessHandler as that would invalidate the buffer sizes out_ports: Vec<jack::Port<jack::AudioOut>>, in_ports: Vec<jack::Port<jack::AudioIn>>, sample_rate: SampleRate, buffer_size: usize, input_data_callback: Option<Box<dyn FnMut(&Data, &InputCallbackInfo) + Send + 'static>>, output_data_callback: Option<Box<dyn FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static>>, // JACK audio samples are 32-bit float (unless you do some custom dark magic) temp_input_buffer: Vec<f32>, temp_output_buffer: Vec<f32>, playing: Arc<AtomicBool>, creation_timestamp: std::time::Instant, /// This should not be called on `process`, only on `buffer_size` because it can block. error_callback_ptr: ErrorCallbackPtr, } impl LocalProcessHandler { fn new( out_ports: Vec<jack::Port<jack::AudioOut>>, in_ports: Vec<jack::Port<jack::AudioIn>>, sample_rate: SampleRate, buffer_size: usize, input_data_callback: Option<Box<dyn FnMut(&Data, &InputCallbackInfo) + Send + 'static>>, output_data_callback: Option< Box<dyn FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static>, >, playing: Arc<AtomicBool>, error_callback_ptr: ErrorCallbackPtr, ) -> Self { // These may be reallocated in the `buffer_size` callback. let temp_input_buffer = vec![0.0; in_ports.len() * buffer_size]; let temp_output_buffer = vec![0.0; out_ports.len() * buffer_size]; LocalProcessHandler { out_ports, in_ports, sample_rate, buffer_size, input_data_callback, output_data_callback, temp_input_buffer, temp_output_buffer, playing, creation_timestamp: std::time::Instant::now(), error_callback_ptr, } } } fn temp_buffer_to_data(temp_input_buffer: &mut Vec<f32>, total_buffer_size: usize) -> Data { let slice = &temp_input_buffer[0..total_buffer_size]; let data = slice.as_ptr() as *mut (); let len = total_buffer_size; let data = unsafe { Data::from_parts(data, len, JACK_SAMPLE_FORMAT) }; data } impl jack::ProcessHandler for LocalProcessHandler { fn process(&mut self, _: &jack::Client, process_scope: &jack::ProcessScope) -> jack::Control { if !self.playing.load(Ordering::SeqCst) { return jack::Control::Continue; } // This should be equal to self.buffer_size, but the implementation will // work even if it is less. Will panic in `temp_buffer_to_data` if greater. let current_frame_count = process_scope.n_frames() as usize; // Get timestamp data let cycle_times = process_scope.cycle_times(); let current_start_usecs = match cycle_times { Ok(times) => times.current_usecs, Err(_) => { // jack was unable to get the current time information // Fall back to using Instants let now = std::time::Instant::now(); let duration = now.duration_since(self.creation_timestamp); duration.as_micros() as u64 } }; let start_cycle_instant = micros_to_stream_instant(current_start_usecs); let start_callback_instant = start_cycle_instant .add(frames_to_duration( process_scope.frames_since_cycle_start() as usize, self.sample_rate, )) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); if let Some(input_callback) = &mut self.input_data_callback { // Let's get the data from the input ports and run the callback let num_in_channels = self.in_ports.len(); // Read the data from the input ports into the temporary buffer // Go through every channel and store its data in the temporary input buffer for ch_ix in 0..num_in_channels { let input_channel = &self.in_ports[ch_ix].as_slice(process_scope); for i in 0..current_frame_count { self.temp_input_buffer[ch_ix + i * num_in_channels] = input_channel[i]; } } // Create a slice of exactly current_frame_count frames let data = temp_buffer_to_data( &mut self.temp_input_buffer, current_frame_count * num_in_channels, ); // Create timestamp let frames_since_cycle_start = process_scope.frames_since_cycle_start() as usize; let duration_since_cycle_start = frames_to_duration(frames_since_cycle_start, self.sample_rate); let callback = start_callback_instant .add(duration_since_cycle_start) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let capture = start_callback_instant; let timestamp = crate::InputStreamTimestamp { callback, capture }; let info = crate::InputCallbackInfo { timestamp }; input_callback(&data, &info); } if let Some(output_callback) = &mut self.output_data_callback { let num_out_channels = self.out_ports.len(); // Create a slice of exactly current_frame_count frames let mut data = temp_buffer_to_data( &mut self.temp_output_buffer, current_frame_count * num_out_channels, ); // Create timestamp let frames_since_cycle_start = process_scope.frames_since_cycle_start() as usize; let duration_since_cycle_start = frames_to_duration(frames_since_cycle_start, self.sample_rate); let callback = start_callback_instant .add(duration_since_cycle_start) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let buffer_duration = frames_to_duration(current_frame_count, self.sample_rate); let playback = start_cycle_instant .add(buffer_duration) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let timestamp = crate::OutputStreamTimestamp { callback, playback }; let info = crate::OutputCallbackInfo { timestamp }; output_callback(&mut data, &info); // Deinterlace for ch_ix in 0..num_out_channels { let output_channel = &mut self.out_ports[ch_ix].as_mut_slice(process_scope); for i in 0..current_frame_count { output_channel[i] = self.temp_output_buffer[ch_ix + i * num_out_channels]; } } } // Continue as normal jack::Control::Continue } fn buffer_size(&mut self, _: &jack::Client, size: jack::Frames) -> jack::Control { // The `buffer_size` callback is actually called on the process thread, but // it does not need to be suitable for real-time use. Thus we can simply allocate // new buffers here. It is also fine to call the error callback. // Details: https://github.com/RustAudio/rust-jack/issues/137 let new_size = size as usize; if new_size != self.buffer_size { self.buffer_size = new_size; self.temp_input_buffer = vec![0.0; self.in_ports.len() * new_size]; self.temp_output_buffer = vec![0.0; self.out_ports.len() * new_size]; let description = format!("buffer size changed to: {}", new_size); if let Ok(mut mutex_guard) = self.error_callback_ptr.lock() { let err = &mut *mutex_guard; err(BackendSpecificError { description }.into()); } } jack::Control::Continue } } fn micros_to_stream_instant(micros: u64) -> crate::StreamInstant { let nanos = micros * 1000; let secs = micros / 1_000_000; let subsec_nanos = nanos - secs * 1_000_000_000; crate::StreamInstant::new(secs as i64, subsec_nanos as u32) } // Convert the given duration in frames at the given sample rate to a `std::time::Duration`. fn frames_to_duration(frames: usize, rate: crate::SampleRate) -> std::time::Duration { let secsf = frames as f64 / rate.0 as f64; let secs = secsf as u64; let nanos = ((secsf - secs as f64) * 1_000_000_000.0) as u32; std::time::Duration::new(secs, nanos) } /// Receives notifications from the JACK server. It is unclear if this may be run concurrent with itself under JACK2 specs /// so it needs to be Sync. struct JackNotificationHandler { error_callback_ptr: ErrorCallbackPtr, init_sample_rate_flag: Arc<AtomicBool>, } impl JackNotificationHandler { pub fn new(error_callback_ptr: ErrorCallbackPtr) -> Self { JackNotificationHandler { error_callback_ptr, init_sample_rate_flag: Arc::new(AtomicBool::new(false)), } } fn send_error(&mut self, description: String) { // This thread isn't the audio thread, it's fine to block if let Ok(mut mutex_guard) = self.error_callback_ptr.lock() { let err = &mut *mutex_guard; err(BackendSpecificError { description }.into()); } } } impl jack::NotificationHandler for JackNotificationHandler { fn shutdown(&mut self, _status: jack::ClientStatus, reason: &str) { self.send_error(format!("JACK was shut down for reason: {}", reason)); } fn sample_rate(&mut self, _: &jack::Client, srate: jack::Frames) -> jack::Control { match self.init_sample_rate_flag.load(Ordering::SeqCst) { false => { // One of these notifications is sent every time a client is started. self.init_sample_rate_flag.store(true, Ordering::SeqCst); jack::Control::Continue } true => { self.send_error(format!("sample rate changed to: {}", srate)); // Since CPAL currently has no way of signaling a sample rate change in order to make // all necessary changes that would bring we choose to quit. jack::Control::Quit } } } fn xrun(&mut self, _: &jack::Client) -> jack::Control { self.send_error(String::from("xrun (buffer over or under run)")); jack::Control::Continue } }
true
8156ff9c250759dff1e30cc22e3c2d84e2deb7a9
Rust
Oberacda/Borderlands2SaveEditor
/borderlands2/src/lib.rs
UTF-8
2,828
2.578125
3
[]
no_license
mod protos; mod hufman; extern crate protobuf; extern crate sha1; use std::fs; use std::fs::File; use std::io::Read; use std::convert::TryFrom; use protos::WillowTwoPlayerSaveGame::{WillowTwoPlayerSaveGame}; use hufman::hufman::decode; use sha1::{Sha1, Digest}; /// /// pub fn load_save(save_file_path: std::string::String) -> Result<WillowTwoPlayerSaveGame, std::io::Error> { let metadata = fs::metadata(&save_file_path)?; let _file_len = metadata.len(); let mut file = File::open(&save_file_path)?; let mut buffer = Vec::new(); file.read_to_end(&mut buffer)?; let buffer_checksum = &buffer[..20]; let buffer_data = &buffer[20..]; let mut hasher = Sha1::new(); hasher.input(&buffer_data); let res = hasher.result(); assert_eq!(res[..], buffer_checksum[..]); let mut uncompressed_size_bytes = [0; 4]; uncompressed_size_bytes.clone_from_slice(&buffer_data[..4]); let compressed_data = &buffer_data[4..]; let mut inner_size_bytes = [0; 4]; let mut magic_number_bytes = [0; 3]; let mut version_bytes = [0; 4]; let mut hash_bytes = [0; 4]; let mut inner_uncompressed_size_bytes = [0; 4]; unsafe { let uncompressed_size_int = std::mem::transmute::<[u8; 4], u32>(uncompressed_size_bytes).to_be() as u64; println!("Uncompressed size: {}", uncompressed_size_int); let uncompressed_size = usize::try_from(uncompressed_size_int).unwrap(); let uncompressed_data = minilzo::decompress(&compressed_data[..], uncompressed_size).unwrap(); inner_size_bytes.clone_from_slice(&uncompressed_data[..4]); let inner_size = std::mem::transmute::<[u8; 4], u32>(inner_size_bytes).to_be() as u64; magic_number_bytes.clone_from_slice(&uncompressed_data[4..7]); version_bytes.clone_from_slice(&uncompressed_data[7..11]); let version = std::mem::transmute::<[u8; 4], u32>(version_bytes).to_le() as u64; hash_bytes.clone_from_slice(&uncompressed_data[11..15]); let hash = std::mem::transmute::<[u8; 4], u32>(hash_bytes).to_le() as u64; inner_uncompressed_size_bytes.clone_from_slice(&uncompressed_data[15..19]); let inner_uncompressed_size = std::mem::transmute::<[u8; 4], i32>(inner_uncompressed_size_bytes ).to_le() as usize; let inner_compressed_data = &uncompressed_data[19..]; let inner_uncompressed_data = decode(&inner_compressed_data, inner_uncompressed_size); let save_game_res = protobuf::parse_from_bytes::<WillowTwoPlayerSaveGame>(inner_uncompressed_data.as_ref()); let save_game = save_game_res.unwrap(); return Ok(save_game); } } #[cfg(test)] mod tests { #[test] fn load_save_test() { let _save_file = super::load_save("./resources/Save0001.sav".to_string()); } }
true
65cd82c314248b760bcd019a481952fc39a2cac3
Rust
enricoschaaf/exercism
/rust/pascals-triangle/src/lib.rs
UTF-8
745
3.4375
3
[]
no_license
pub struct PascalsTriangle { rows: Vec<Vec<u32>>, } impl PascalsTriangle { pub fn new(row_count: u32) -> Self { let rows = (1..=row_count).fold(Vec::new(), |acc, c| match c { 1 => vec![vec![1]], _ => { let row = [ vec![1], acc.last() .unwrap() .windows(2) .map(|x| x.iter().sum()) .collect(), vec![1], ] .concat(); [acc, vec![row]].concat() } }); PascalsTriangle { rows } } pub fn rows(&self) -> Vec<Vec<u32>> { self.rows.clone() } }
true
e603ac00a01245c6f0ce3a5efc1a74afc7b91eaa
Rust
pac85/vulkano-events
/vulkano/src/render_pass/render_pass.rs
UTF-8
29,354
2.640625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
// Copyright (c) 2016 The vulkano developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or // https://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or https://opensource.org/licenses/MIT>, // at your option. All files in the project carrying such // notice may not be copied, modified, or distributed except // according to those terms. use crate::check_errors; use crate::device::Device; use crate::device::DeviceOwned; use crate::format::FormatTy; use crate::image::ImageLayout; use crate::pipeline::shader::ShaderInterface; use crate::render_pass::AttachmentDesc; use crate::render_pass::LoadOp; use crate::render_pass::RenderPassDesc; use crate::render_pass::SubpassDesc; use crate::vk; use crate::Error; use crate::OomError; use crate::VulkanObject; use smallvec::SmallVec; use std::error; use std::fmt; use std::marker::PhantomData; use std::mem::MaybeUninit; use std::ptr; use std::sync::Arc; use std::sync::Mutex; /// An object representing the discrete steps in which rendering is done. /// /// A render pass in Vulkan is made up of three parts: /// - A list of attachments, which are image views that are inputs, outputs or intermediate stages /// in the rendering process. /// - One or more subpasses, which are the steps in which the rendering process, takes place, /// and the attachments that are used for each step. /// - Dependencies, which describe how the input and output data of each subpass is to be passed /// from one subpass to the next. /// /// In order to create a render pass, you must create a `RenderPassDesc` object that describes the /// render pass, then pass it to `RenderPass::new`. /// /// ``` /// use vulkano::render_pass::RenderPass; /// use vulkano::render_pass::RenderPassDesc; /// /// # let device: std::sync::Arc<vulkano::device::Device> = return; /// let desc = RenderPassDesc::empty(); /// let render_pass = RenderPass::new(device.clone(), desc).unwrap(); /// ``` /// /// This example creates a render pass with no attachment and one single subpass that doesn't draw /// on anything. While it's sometimes useful, most of the time it's not what you want. /// /// The easiest way to create a "real" render pass is to use the `single_pass_renderpass!` macro. /// /// ``` /// # #[macro_use] extern crate vulkano; /// # fn main() { /// # let device: std::sync::Arc<vulkano::device::Device> = return; /// use vulkano::format::Format; /// /// let render_pass = single_pass_renderpass!(device.clone(), /// attachments: { /// // `foo` is a custom name we give to the first and only attachment. /// foo: { /// load: Clear, /// store: Store, /// format: Format::R8G8B8A8Unorm, /// samples: 1, /// } /// }, /// pass: { /// color: [foo], // Repeat the attachment name here. /// depth_stencil: {} /// } /// ).unwrap(); /// # } /// ``` /// /// See the documentation of the macro for more details. TODO: put link here pub struct RenderPass { // The internal Vulkan object. render_pass: vk::RenderPass, // Device this render pass was created from. device: Arc<Device>, // Description of the render pass. desc: RenderPassDesc, // Cache of the granularity of the render pass. granularity: Mutex<Option<[u32; 2]>>, } impl RenderPass { /// Builds a new render pass. /// /// # Panic /// /// - Can panic if it detects some violations in the restrictions. Only inexpensive checks are /// performed. `debug_assert!` is used, so some restrictions are only checked in debug /// mode. /// pub fn new( device: Arc<Device>, description: RenderPassDesc, ) -> Result<RenderPass, RenderPassCreationError> { let vk = device.pointers(); // If the first use of an attachment in this render pass is as an input attachment, and // the attachment is not also used as a color or depth/stencil attachment in the same // subpass, then loadOp must not be VK_ATTACHMENT_LOAD_OP_CLEAR debug_assert!(description.attachments().into_iter().enumerate().all( |(atch_num, attachment)| { if attachment.load != LoadOp::Clear { return true; } for p in description.subpasses() { if p.color_attachments .iter() .find(|&&(a, _)| a == atch_num) .is_some() { return true; } if let Some((a, _)) = p.depth_stencil { if a == atch_num { return true; } } if p.input_attachments .iter() .find(|&&(a, _)| a == atch_num) .is_some() { return false; } } true } )); let attachments = description .attachments() .iter() .map(|attachment| { debug_assert!(attachment.samples.is_power_of_two()); vk::AttachmentDescription { flags: 0, // FIXME: may alias flag format: attachment.format as u32, samples: attachment.samples, loadOp: attachment.load as u32, storeOp: attachment.store as u32, stencilLoadOp: attachment.stencil_load as u32, stencilStoreOp: attachment.stencil_store as u32, initialLayout: attachment.initial_layout as u32, finalLayout: attachment.final_layout as u32, } }) .collect::<SmallVec<[_; 16]>>(); // We need to pass pointers to vkAttachmentReference structs when creating the render pass. // Therefore we need to allocate them in advance. // // This block allocates, for each pass, in order, all color attachment references, then all // input attachment references, then all resolve attachment references, then the depth // stencil attachment reference. let attachment_references = description .subpasses() .iter() .flat_map(|pass| { // Performing some validation with debug asserts. debug_assert!( pass.resolve_attachments.is_empty() || pass.resolve_attachments.len() == pass.color_attachments.len() ); debug_assert!(pass .resolve_attachments .iter() .all(|a| attachments[a.0].samples == 1)); debug_assert!( pass.resolve_attachments.is_empty() || pass .color_attachments .iter() .all(|a| attachments[a.0].samples > 1) ); debug_assert!( pass.resolve_attachments.is_empty() || pass .resolve_attachments .iter() .zip(pass.color_attachments.iter()) .all(|(r, c)| { attachments[r.0].format == attachments[c.0].format }) ); debug_assert!(pass .color_attachments .iter() .cloned() .chain(pass.depth_stencil.clone().into_iter()) .chain(pass.input_attachments.iter().cloned()) .chain(pass.resolve_attachments.iter().cloned()) .all(|(a, _)| { pass.preserve_attachments .iter() .find(|&&b| a == b) .is_none() })); debug_assert!(pass .color_attachments .iter() .cloned() .chain(pass.depth_stencil.clone().into_iter()) .all(|(atch, layout)| { if let Some(r) = pass.input_attachments.iter().find(|r| r.0 == atch) { r.1 == layout } else { true } })); let resolve = pass.resolve_attachments.iter().map(|&(offset, img_la)| { debug_assert!(offset < attachments.len()); vk::AttachmentReference { attachment: offset as u32, layout: img_la as u32, } }); let color = pass.color_attachments.iter().map(|&(offset, img_la)| { debug_assert!(offset < attachments.len()); vk::AttachmentReference { attachment: offset as u32, layout: img_la as u32, } }); let input = pass.input_attachments.iter().map(|&(offset, img_la)| { debug_assert!(offset < attachments.len()); vk::AttachmentReference { attachment: offset as u32, layout: img_la as u32, } }); let depthstencil = if let Some((offset, img_la)) = pass.depth_stencil { Some(vk::AttachmentReference { attachment: offset as u32, layout: img_la as u32, }) } else { None } .into_iter(); color.chain(input).chain(resolve).chain(depthstencil) }) .collect::<SmallVec<[_; 16]>>(); // Same as `attachment_references` but only for the preserve attachments. // This is separate because attachment references are u32s and not `vkAttachmentReference` // structs. let preserve_attachments_references = description .subpasses() .iter() .flat_map(|pass| { pass.preserve_attachments .iter() .map(|&offset| offset as u32) }) .collect::<SmallVec<[_; 16]>>(); // Now iterating over passes. let passes = unsafe { // `ref_index` and `preserve_ref_index` are increased during the loop and point to the // next element to use in respectively `attachment_references` and // `preserve_attachments_references`. let mut ref_index = 0usize; let mut preserve_ref_index = 0usize; let mut out: SmallVec<[_; 16]> = SmallVec::new(); for pass in description.subpasses() { if pass.color_attachments.len() as u32 > device.physical_device().limits().max_color_attachments() { return Err(RenderPassCreationError::ColorAttachmentsLimitExceeded); } let color_attachments = attachment_references.as_ptr().offset(ref_index as isize); ref_index += pass.color_attachments.len(); let input_attachments = attachment_references.as_ptr().offset(ref_index as isize); ref_index += pass.input_attachments.len(); let resolve_attachments = attachment_references.as_ptr().offset(ref_index as isize); ref_index += pass.resolve_attachments.len(); let depth_stencil = if pass.depth_stencil.is_some() { let a = attachment_references.as_ptr().offset(ref_index as isize); ref_index += 1; a } else { ptr::null() }; let preserve_attachments = preserve_attachments_references .as_ptr() .offset(preserve_ref_index as isize); preserve_ref_index += pass.preserve_attachments.len(); out.push(vk::SubpassDescription { flags: 0, // reserved pipelineBindPoint: vk::PIPELINE_BIND_POINT_GRAPHICS, inputAttachmentCount: pass.input_attachments.len() as u32, pInputAttachments: if pass.input_attachments.is_empty() { ptr::null() } else { input_attachments }, colorAttachmentCount: pass.color_attachments.len() as u32, pColorAttachments: if pass.color_attachments.is_empty() { ptr::null() } else { color_attachments }, pResolveAttachments: if pass.resolve_attachments.is_empty() { ptr::null() } else { resolve_attachments }, pDepthStencilAttachment: depth_stencil, preserveAttachmentCount: pass.preserve_attachments.len() as u32, pPreserveAttachments: if pass.preserve_attachments.is_empty() { ptr::null() } else { preserve_attachments }, }); } assert!(!out.is_empty()); // If these assertions fails, there's a serious bug in the code above ^. debug_assert!(ref_index == attachment_references.len()); debug_assert!(preserve_ref_index == preserve_attachments_references.len()); out }; let dependencies = description .dependencies() .iter() .map(|dependency| { debug_assert!( dependency.source_subpass as u32 == vk::SUBPASS_EXTERNAL || dependency.source_subpass < passes.len() ); debug_assert!( dependency.destination_subpass as u32 == vk::SUBPASS_EXTERNAL || dependency.destination_subpass < passes.len() ); vk::SubpassDependency { srcSubpass: dependency.source_subpass as u32, dstSubpass: dependency.destination_subpass as u32, srcStageMask: dependency.source_stages.into(), dstStageMask: dependency.destination_stages.into(), srcAccessMask: dependency.source_access.into(), dstAccessMask: dependency.destination_access.into(), dependencyFlags: if dependency.by_region { vk::DEPENDENCY_BY_REGION_BIT } else { 0 }, } }) .collect::<SmallVec<[_; 16]>>(); let render_pass = unsafe { let infos = vk::RenderPassCreateInfo { sType: vk::STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, pNext: ptr::null(), flags: 0, // reserved attachmentCount: attachments.len() as u32, pAttachments: if attachments.is_empty() { ptr::null() } else { attachments.as_ptr() }, subpassCount: passes.len() as u32, pSubpasses: if passes.is_empty() { ptr::null() } else { passes.as_ptr() }, dependencyCount: dependencies.len() as u32, pDependencies: if dependencies.is_empty() { ptr::null() } else { dependencies.as_ptr() }, }; let mut output = MaybeUninit::uninit(); check_errors(vk.CreateRenderPass( device.internal_object(), &infos, ptr::null(), output.as_mut_ptr(), ))?; output.assume_init() }; Ok(RenderPass { device: device.clone(), render_pass, desc: description, granularity: Mutex::new(None), }) } /// Builds a render pass with one subpass and no attachment. /// /// This method is useful for quick tests. #[inline] pub fn empty_single_pass(device: Arc<Device>) -> Result<RenderPass, RenderPassCreationError> { RenderPass::new(device, RenderPassDesc::empty()) } #[inline] pub fn inner(&self) -> RenderPassSys { RenderPassSys(self.render_pass, PhantomData) } /// Returns the granularity of this render pass. /// /// If the render area of a render pass in a command buffer is a multiple of this granularity, /// then the performance will be optimal. Performances are always optimal for render areas /// that cover the whole framebuffer. pub fn granularity(&self) -> [u32; 2] { let mut granularity = self.granularity.lock().unwrap(); if let Some(&granularity) = granularity.as_ref() { return granularity; } unsafe { let vk = self.device.pointers(); let mut out = MaybeUninit::uninit(); vk.GetRenderAreaGranularity( self.device.internal_object(), self.render_pass, out.as_mut_ptr(), ); let out = out.assume_init(); debug_assert_ne!(out.width, 0); debug_assert_ne!(out.height, 0); let gran = [out.width, out.height]; *granularity = Some(gran); gran } } /// Returns the description of the render pass. #[inline] pub fn desc(&self) -> &RenderPassDesc { &self.desc } } unsafe impl DeviceOwned for RenderPass { #[inline] fn device(&self) -> &Arc<Device> { &self.device } } impl fmt::Debug for RenderPass { fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { fmt.debug_struct("RenderPass") .field("raw", &self.render_pass) .field("device", &self.device) .field("desc", &self.desc) .finish() } } impl Drop for RenderPass { #[inline] fn drop(&mut self) { unsafe { let vk = self.device.pointers(); vk.DestroyRenderPass(self.device.internal_object(), self.render_pass, ptr::null()); } } } /// Opaque object that represents the render pass' internals. #[derive(Debug, Copy, Clone)] pub struct RenderPassSys<'a>(vk::RenderPass, PhantomData<&'a ()>); unsafe impl<'a> VulkanObject for RenderPassSys<'a> { type Object = vk::RenderPass; const TYPE: vk::ObjectType = vk::OBJECT_TYPE_RENDER_PASS; #[inline] fn internal_object(&self) -> vk::RenderPass { self.0 } } /// Error that can happen when creating a compute pipeline. #[derive(Clone, Debug, PartialEq, Eq)] pub enum RenderPassCreationError { /// Not enough memory. OomError(OomError), /// The maximum number of color attachments has been exceeded. ColorAttachmentsLimitExceeded, } impl error::Error for RenderPassCreationError { #[inline] fn source(&self) -> Option<&(dyn error::Error + 'static)> { match *self { RenderPassCreationError::OomError(ref err) => Some(err), _ => None, } } } impl fmt::Display for RenderPassCreationError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!( fmt, "{}", match *self { RenderPassCreationError::OomError(_) => "not enough memory available", RenderPassCreationError::ColorAttachmentsLimitExceeded => { "the maximum number of color attachments has been exceeded" } } ) } } impl From<OomError> for RenderPassCreationError { #[inline] fn from(err: OomError) -> RenderPassCreationError { RenderPassCreationError::OomError(err) } } impl From<Error> for RenderPassCreationError { #[inline] fn from(err: Error) -> RenderPassCreationError { match err { err @ Error::OutOfHostMemory => RenderPassCreationError::OomError(OomError::from(err)), err @ Error::OutOfDeviceMemory => { RenderPassCreationError::OomError(OomError::from(err)) } _ => panic!("unexpected error: {:?}", err), } } } /// Represents a subpass within a `RenderPass` object. /// /// This struct doesn't correspond to anything in Vulkan. It is simply an equivalent to a /// tuple of a render pass and subpass index. Contrary to a tuple, however, the existence of the /// subpass is checked when the object is created. When you have a `Subpass` you are guaranteed /// that the given subpass does exist. #[derive(Debug, Clone)] pub struct Subpass { render_pass: Arc<RenderPass>, subpass_id: u32, } impl Subpass { /// Returns a handle that represents a subpass of a render pass. #[inline] pub fn from(render_pass: Arc<RenderPass>, id: u32) -> Option<Subpass> { if (id as usize) < render_pass.desc().subpasses().len() { Some(Subpass { render_pass, subpass_id: id, }) } else { None } } #[inline] fn subpass_desc(&self) -> &SubpassDesc { &self.render_pass.desc().subpasses()[self.subpass_id as usize] } #[inline] fn attachment_desc(&self, atch_num: usize) -> &AttachmentDesc { &self.render_pass.desc().attachments()[atch_num] } /// Returns the number of color attachments in this subpass. #[inline] pub fn num_color_attachments(&self) -> u32 { self.subpass_desc().color_attachments.len() as u32 } /// Returns true if the subpass has a depth attachment or a depth-stencil attachment. #[inline] pub fn has_depth(&self) -> bool { let subpass_desc = self.subpass_desc(); let atch_num = match subpass_desc.depth_stencil { Some((d, _)) => d, None => return false, }; match self.attachment_desc(atch_num).format.ty() { FormatTy::Depth => true, FormatTy::Stencil => false, FormatTy::DepthStencil => true, _ => unreachable!(), } } /// Returns true if the subpass has a depth attachment or a depth-stencil attachment whose /// layout is not `DepthStencilReadOnlyOptimal`. #[inline] pub fn has_writable_depth(&self) -> bool { let subpass_desc = self.subpass_desc(); let atch_num = match subpass_desc.depth_stencil { Some((d, l)) => { if l == ImageLayout::DepthStencilReadOnlyOptimal { return false; } d } None => return false, }; match self.attachment_desc(atch_num).format.ty() { FormatTy::Depth => true, FormatTy::Stencil => false, FormatTy::DepthStencil => true, _ => unreachable!(), } } /// Returns true if the subpass has a stencil attachment or a depth-stencil attachment. #[inline] pub fn has_stencil(&self) -> bool { let subpass_desc = self.subpass_desc(); let atch_num = match subpass_desc.depth_stencil { Some((d, _)) => d, None => return false, }; match self.attachment_desc(atch_num).format.ty() { FormatTy::Depth => false, FormatTy::Stencil => true, FormatTy::DepthStencil => true, _ => unreachable!(), } } /// Returns true if the subpass has a stencil attachment or a depth-stencil attachment whose /// layout is not `DepthStencilReadOnlyOptimal`. #[inline] pub fn has_writable_stencil(&self) -> bool { let subpass_desc = self.subpass_desc(); let atch_num = match subpass_desc.depth_stencil { Some((d, l)) => { if l == ImageLayout::DepthStencilReadOnlyOptimal { return false; } d } None => return false, }; match self.attachment_desc(atch_num).format.ty() { FormatTy::Depth => false, FormatTy::Stencil => true, FormatTy::DepthStencil => true, _ => unreachable!(), } } /// Returns true if the subpass has any color or depth/stencil attachment. #[inline] pub fn has_color_or_depth_stencil_attachment(&self) -> bool { if self.num_color_attachments() >= 1 { return true; } let subpass_desc = self.subpass_desc(); match subpass_desc.depth_stencil { Some((d, _)) => true, None => false, } } /// Returns the number of samples in the color and/or depth/stencil attachments. Returns `None` /// if there is no such attachment in this subpass. #[inline] pub fn num_samples(&self) -> Option<u32> { let subpass_desc = self.subpass_desc(); // TODO: chain input attachments as well? subpass_desc .color_attachments .iter() .cloned() .chain(subpass_desc.depth_stencil.clone().into_iter()) .filter_map(|a| self.render_pass.desc().attachments().get(a.0)) .next() .map(|a| a.samples) } /// Returns the render pass of this subpass. #[inline] pub fn render_pass(&self) -> &Arc<RenderPass> { &self.render_pass } /// Returns the index of this subpass within the renderpass. #[inline] pub fn index(&self) -> u32 { self.subpass_id } /// Returns `true` if this subpass is compatible with the fragment output definition. // TODO: return proper error pub fn is_compatible_with(&self, shader_interface: &ShaderInterface) -> bool { self.render_pass .desc() .is_compatible_with_shader(self.subpass_id, shader_interface) } } impl From<Subpass> for (Arc<RenderPass>, u32) { #[inline] fn from(value: Subpass) -> (Arc<RenderPass>, u32) { (value.render_pass, value.subpass_id) } } #[cfg(test)] mod tests { use crate::format::Format; use crate::render_pass::RenderPass; use crate::render_pass::RenderPassCreationError; #[test] fn empty() { let (device, _) = gfx_dev_and_queue!(); let _ = RenderPass::empty_single_pass(device).unwrap(); } #[test] fn too_many_color_atch() { let (device, _) = gfx_dev_and_queue!(); if device.physical_device().limits().max_color_attachments() >= 10 { return; // test ignored } let rp = single_pass_renderpass! { device.clone(), attachments: { a1: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a2: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a3: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a4: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a5: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a6: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a7: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a8: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a9: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, }, a10: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, } }, pass: { color: [a1, a2, a3, a4, a5, a6, a7, a8, a9, a10], depth_stencil: {} } }; match rp { Err(RenderPassCreationError::ColorAttachmentsLimitExceeded) => (), _ => panic!(), } } #[test] fn non_zero_granularity() { let (device, _) = gfx_dev_and_queue!(); let rp = single_pass_renderpass! { device.clone(), attachments: { a: { load: Clear, store: DontCare, format: Format::R8G8B8A8Unorm, samples: 1, } }, pass: { color: [a], depth_stencil: {} } } .unwrap(); let granularity = rp.granularity(); assert_ne!(granularity[0], 0); assert_ne!(granularity[1], 0); } }
true
1411b016544fdecf360963a5260cfd137fae02b8
Rust
zphixon/froggi
/server/src/main.rs
UTF-8
3,461
2.9375
3
[]
no_license
use anyhow::{Context, Result}; use froggi::request::Request; use froggi::response::{Item, ItemKind, Response, ResponseBuilder, ResponseKind}; use std::collections::HashMap; use std::io::Write; use std::net::{TcpListener, TcpStream}; fn handle_client(mut stream: TcpStream, page_store: &PageStore) { let request = Request::from_bytes(&mut stream).unwrap(); println!("request: {:?}", request); match page_store.page(request.request()) { Some(page) => stream.write_all(page).unwrap(), None => stream.write_all(page_store.not_found()).unwrap(), } } // TODO we're accidentally caching the ID struct PageStore { page_cache: HashMap<String, Vec<u8>>, not_found: Vec<u8>, } impl PageStore { fn new() -> PageStore { PageStore { page_cache: HashMap::new(), not_found: ResponseBuilder::default() .page(String::from("('not found')")) .kind(ResponseKind::Error) .build() .unwrap() .bytes(), } } fn add_page(&mut self, name: String, response: Response) { self.page_cache.insert(name, response.bytes()); } fn page(&self, request: &str) -> Option<&Vec<u8>> { self.page_cache.get(request) } fn not_found(&self) -> &[u8] { &self.not_found } } fn main() { let mut pages = PageStore::new(); println!("reading pages"); for item in std::fs::read_dir("pages").unwrap() { let item = item.unwrap(); if item.metadata().unwrap().is_file() && item.file_name().to_str().unwrap().ends_with(".fml") { println!("{}", item.file_name().to_str().unwrap()); pages.add_page( item.file_name().into_string().unwrap(), response_from_file(item.path()).unwrap(), ); } } let listener = TcpListener::bind("0.0.0.0:11121").unwrap(); println!( "listening at {}. run this binary from froggi-server dir!", listener.local_addr().unwrap() ); for stream in listener.incoming() { match stream { Ok(stream) => { println!("new client"); crossbeam::scope(|s| { s.spawn(|_| { handle_client(stream, &pages); }); }) .unwrap(); } Err(e) => { println!("error {}", e); } } } } fn response_from_file(path: impl AsRef<std::path::Path>) -> Result<Response> { // TODO this is kind of garbage let path = path.as_ref(); let data = std::fs::read_to_string(&path).context(format!("could not read '{}'", path.display()))?; let page = froggi::markup::parse::parse(&data).map_err(|mut errs| errs.pop().unwrap())?; let item_names = page.item_names(); let mut item_data = Vec::new(); for name in item_names.iter() { item_data.push( std::fs::read(path.parent().unwrap().join(name)) .context(format!("could not read file {}", name))?, ); } let items = item_names .into_iter() .zip(item_data.into_iter()) .map(|(name, data)| Item::new(name, ItemKind::Image, data)) .collect(); ResponseBuilder::default() .page(data) .items(items) .build() .map_err(|e| anyhow::anyhow!(e)) }
true
212a48e040683d64d1c2e87aa0e8ce6aced532cb
Rust
starcoinorg/starcoin
/vm/types/src/token/token_code.rs
UTF-8
5,831
2.734375
3
[ "Apache-2.0" ]
permissive
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::language_storage::TypeTag; use crate::move_resource::MoveResource; use crate::parser::parse_type_tag; use crate::token::TOKEN_MODULE_NAME; use anyhow::{bail, Result}; use move_core_types::account_address::AccountAddress; use move_core_types::language_storage::StructTag; use schemars::{self, JsonSchema}; use serde::de::Error; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::convert::{TryFrom, TryInto}; use std::fmt; use std::str::FromStr; #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, JsonSchema)] pub struct TokenCode { ///Token module's address #[schemars(with = "String")] pub address: AccountAddress, ///Token module's name pub module: String, ///Token's struct name pub name: String, } impl MoveResource for TokenCode { const MODULE_NAME: &'static str = TOKEN_MODULE_NAME; const STRUCT_NAME: &'static str = "TokenCode"; } impl TokenCode { pub fn new(address: AccountAddress, module: String, name: String) -> TokenCode { Self { address, module, name, } } } impl fmt::Display for TokenCode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.address)?; write!(f, "::{}", self.module)?; write!(f, "::{}", self.name) } } impl TryFrom<TypeTag> for TokenCode { type Error = anyhow::Error; fn try_from(value: TypeTag) -> Result<Self, Self::Error> { match value { TypeTag::Struct(struct_tag) => Ok(TokenCode::from(*struct_tag)), type_tag => bail!("{:?} is not a Token's type tag", type_tag), } } } impl From<StructTag> for TokenCode { fn from(struct_tag: StructTag) -> Self { let tag = struct_tag.to_string(); let s: Vec<_> = tag.splitn(3, "::").collect(); //this should not happen assert_eq!(s.len(), 3, "invalid struct tag format"); Self::new( struct_tag.address, struct_tag.module.into_string(), s[2].to_string(), ) } } impl FromStr for TokenCode { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let type_tag = parse_type_tag(s)?; Self::try_from(type_tag) } } #[allow(clippy::from_over_into)] impl TryInto<StructTag> for TokenCode { type Error = anyhow::Error; fn try_into(self) -> Result<StructTag, Self::Error> { match parse_type_tag(self.to_string().as_str())? { TypeTag::Struct(s) => Ok(*s), t => bail!("expect token code to be a struct tag, but receive {}", t), } } } impl TryInto<TypeTag> for TokenCode { type Error = anyhow::Error; fn try_into(self) -> Result<TypeTag, Self::Error> { Ok(TypeTag::Struct(Box::new(self.try_into()?))) } } impl<'de> Deserialize<'de> for TokenCode { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { if deserializer.is_human_readable() { let s = <String>::deserialize(deserializer)?; TokenCode::from_str(&s).map_err(D::Error::custom) } else { // In order to preserve the Serde data model and help analysis tools, // make sure to wrap our value in a container with the same name // as the original type. #[derive(::serde::Deserialize)] #[serde(rename = "TokenCode")] struct Value { address: AccountAddress, module: String, name: String, } let value = Value::deserialize(deserializer)?; Ok(TokenCode::new(value.address, value.module, value.name)) } } } impl Serialize for TokenCode { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { self.to_string().serialize(serializer) } else { // See comment in deserialize. serializer.serialize_newtype_struct( "TokenCode", &(self.address, self.module.clone(), self.name.clone()), ) } } } #[cfg(test)] mod test { use crate::language_storage::{StructTag, TypeTag}; use crate::parser::parse_type_tag; use crate::token::stc::G_STC_TOKEN_CODE; use crate::token::token_code::TokenCode; use serde::{Deserialize, Serialize}; use std::convert::TryInto; use std::str::FromStr; #[test] fn test_token_code() { let token = "0x00000000000000000000000000000002::LiquidityToken::LiquidityToken<0x569ab535990a17ac9afd1bc57faec683::Ddd::Ddd, 0x569ab535990a17ac9afd1bc57faec683::Bot::Bot>"; let tc = TokenCode::from_str(token).unwrap(); let type_tag: StructTag = tc.clone().try_into().unwrap(); assert_eq!(token.to_string(), tc.to_string()); assert_eq!( parse_type_tag(token).unwrap(), TypeTag::Struct(Box::new(type_tag.clone())) ); assert_eq!(tc, type_tag.try_into().unwrap()); } #[derive(Serialize, Deserialize, Debug, Clone, PartialOrd, Eq, PartialEq)] struct Setting { default_token: TokenCode, } #[test] fn test_token_serialize() { let setting = Setting { default_token: G_STC_TOKEN_CODE.clone(), }; let json = serde_json::to_string(&setting).unwrap(); let setting2: Setting = serde_json::from_str(json.as_str()).unwrap(); assert_eq!(setting, setting2); let bytes = bcs_ext::to_bytes(&setting).unwrap(); let setting3: Setting = bcs_ext::from_bytes(bytes.as_slice()).unwrap(); assert_eq!(setting, setting3); } }
true
5b198ce239da887da2c7e7095c32c495b43b0560
Rust
RustedTerrier/fanterra
/src/main.rs
UTF-8
3,847
3.203125
3
[ "BSD-3-Clause" ]
permissive
mod game; mod setup; use std::{env, io}; fn main() { // if 2 < 3 { // extern crate rodio; // } start_screen(); } fn start_screen() { // print!("{}{}", 27 as char, "[38;5;3m"); println!("AMMMMMMMMMMA AMMA AMA AMA AMMMMMMMMMMMMMA AMMMMMMMMMMA AMMMMMMA. AMMMMMMA. AMMA\n\ MMMMMMMMMMMV AMVVMA MMMA MMM VMMMMMMMMMMMMMV MMMMMMMMMMMV MMMMMMMMMA MMMMMMMMMA AMVVMA\n\ MMM AMV VMA MMMMA MMM MMM MMM MMM` `VMA MMM` `VMA AMV VMA\n\ MMM AMV VMA MMMVMA MMM MMM MMM MMM ;MM MMM ;MM AMV VMA\n\ MMMMMMMMA AMV VMA MMM VMA MMM MMM MMMMMMMMMA MMM. .AMV MMM. .AMV AMV VMA\n\ MMMMMMMMV MMMMMMMMMMMM MMM VMAMMM MMM MMMMMMMMMV MMMMMMMMMV MMMMMMMMMV MMMMMMMMMMMM\n\ MMM MMMMMMMMMMMM MMM VMMMM MMM MMM MMMMMMMMA` MMMMMMMMA` MMMMMMMMMMMM\n\ MMM MMM MMM MMM VMMM MMM MMMMMMMMMMMA MMM VMA MMM VMA MMM MMM\n\ VMV VMV VMV VMV VMV VMV VMMMMMMMMMMV VMV VMA VMV VMA VMV VMV\n\ "); // print!("{}{}", 27 as char, "[38;5;15m"); println!("Do you want to do? 1) Create a new game, 2) Play an existing game, or 3) Quit?"); let mut choice = String::new(); io::stdin() .read_line(&mut choice) .expect("Failed to read line"); if choice[0 .. choice.len() - 1] == String::from("1") { // If you want to create a new world, create a new world. let hme = env::var("HOME").unwrap(); let world_name = setup::create_world(hme); play_game(world_name); } if choice[0 .. choice.len() - 1] == String::from("2") { // If you want to play an existing game, do that. let mut worlds_string: String = setup::read_worlds(env::var("HOME").unwrap()) .into_iter() .collect(); worlds_string = worlds_string[0 .. worlds_string.len() - 1].to_string(); println!( "Choose from each world, with a corresponding number: \n\r{}", &worlds_string ); worlds_string = setup::read_worlds(env::var("HOME").unwrap()) .into_iter() .collect(); let mut world = String::new(); io::stdin() .read_line(&mut world) .expect("Something went wrong reading your input."); let worldnum = world.replace("\n", "").parse::<u32>().unwrap() - 1; let worldsplit: Vec<&str> = worlds_string.split("\n").collect(); world = worldsplit[worldnum as usize] .to_string() .replace(" | ", ""); world = world[2 ..].to_string(); play_game(world); } if choice[0 .. choice.len() - 1] == String::from("3") { // If you want to quit, quit. println!("Quiting..."); } } fn play_game(world: String) { let game = setup::setup_game(world, env::var("HOME").unwrap()); // pa1, is supposed to mean path1, I'm just lazy at typing. let mut game_data = game::start_game(game.seed, game.pa1, game.pa2, game.pa3); loop { game_data.area += 1; match game_data.path { | 1 => game_data.area = game::navigate_path(&game_data.p1, game_data.area), | 2 => game_data.area = game::navigate_path(&game_data.p2, game_data.area), | 3 => game_data.area = game::navigate_path(&game_data.p3, game_data.area), | _ => {} } if !game_data.game_state { break; } else { // game_data.game_state = false; } } }
true
7b15c79be388a5aaddf9549f7730a5565789047e
Rust
listboss/ex18
/src/main.rs
UTF-8
4,417
3.5
4
[]
no_license
struct Grid([[char; 100]; 100]); impl Copy for Grid {} impl Clone for Grid { fn clone(&self) -> Grid { *self } } impl Grid { fn new(input: &str) -> Grid { assert_eq!(input.lines().count(), 100); assert_eq!(100, input.lines().next().unwrap_or("").len()); fn initiate(grid: &mut Grid, input: &str) { input.lines() .enumerate() .map(|(x, line): (usize, &str)| { line.chars() .enumerate() .filter(|&(_, s)| s == '#') .map(|(y, s): (usize, char)| grid.0[x][y] = s) .count(); }) .count(); } let mut grid = Grid([['.'; 100]; 100]); // If input string is empty, return a blank grid with default value. if input.len() != 0 { initiate(&mut grid, input); } grid } fn set(&mut self, (i, j): (usize, usize), c: char) { assert!(['#', '.'].contains(&c)); assert!(i < self.0.len()); assert!(j < self.0.len()); self.0[i][j] = c; } } trait Light { fn step(&mut self, stuck_corners: bool); fn brightness(&self) -> usize; } impl Light for Grid { fn brightness(&self) -> usize { self.0 .iter() .map(|l| l.iter().filter(|&&c| c == '#').count()) .sum() } fn step(&mut self, stuck_corners: bool) { let mut new_grid = self.clone(); let nrows = self.0.len(); let ncols = nrows; for (i, row) in self.0.iter().enumerate() { let next_i = if i != nrows - 1 { Some(i + 1) } else { None }; let prev_i = if i != 0 { Some(i - 1) } else { None }; for (j, light) in row.iter().enumerate() { let next_j = if j != ncols - 1 { Some(j + 1) } else { None }; let prev_j = if j != 0 { Some(j - 1) } else { None }; let lights_on = [(prev_i, prev_j), (prev_i, Some(j)), (prev_i, next_j), (Some(i), prev_j), (Some(i), next_j), (next_i, prev_j), (next_i, Some(j)), (next_i, next_j)] .iter() .filter(|&&(i, j)| { i.is_some() && j.is_some() && self.0[i.unwrap()][j.unwrap()] == '#' }) .count(); match *light { '#' if ![2, 3].contains(&lights_on) => new_grid.0[i][j] = '.', '.' if lights_on == 3 => new_grid.0[i][j] = '#', _ => (), } } } if stuck_corners { new_grid.0[0][0] = '#'; new_grid.0[0][ncols - 1] = '#'; new_grid.0[nrows - 1][0] = '#'; new_grid.0[nrows - 1][ncols - 1] = '#'; } *self = new_grid; } } fn main() { let mut grid = Grid::new(include_str!("../input.txt")); for _ in 1..101 { grid.step(false); } println!("No of ON lighs: {}", grid.brightness()); let mut grid = Grid::new(include_str!("../input.txt")); grid.set((0, 0), '#'); grid.set((0, 99), '#'); grid.set((99, 0), '#'); grid.set((99, 99), '#'); for _ in 1..101 { grid.step(true); } println!("No of ON lighs with stuck corner: {}", grid.brightness()); } #[cfg(test)] mod tests { use super::*; #[test] fn initiate_grid() { let g = Grid::new(include_str!("../input.txt")); let expected = "..####.####.##.#.##....#...##....#..#....#..###..#...#..###.#####.....\ #####..##.#.#.#.#.#.##.####..."; let expected = expected.chars().collect::<Vec<char>>(); assert_eq!(expected[..], g.0[54][..]); let expected = "##.#.##.#...#.###.##.##.##.##..##.##...#..##.#..#######.#..#...#.#.##..#..\ ..##.#..####.###........#."; let expected = expected.chars().collect::<Vec<char>>(); assert_eq!(expected[..], g.0[90][..]); } #[test] fn getting_brightness() { let g = Grid::new(include_str!("../input.txt")); assert_eq!(5076, g.brightness()); } }
true
7140bc5fb1ab2f4a562cb5c1f950bd633cd8e735
Rust
yycdavid/tamago
/src/input.rs
UTF-8
15,520
2.890625
3
[ "MIT" ]
permissive
use crate::model::*; use egg::*; use itertools::Itertools; use std::collections::HashMap; const MAX_DIM: usize = 8; /// Struct for converting a model specified using our Rust interface to RecExpr /// /// The RecExpr is growed on the fly when member functions are called. Uses a /// Hashmap to store the map of scalar nodes to their indices into the RexExpr to /// avoid replication. #[derive(Default)] pub struct GraphConverter { rec_expr: RecExpr<Mdl>, scalar_map: HashMap<i32, Id>, name_gen: NameGen, } /// Struct for storing information of a tensor. This is passed between functions /// during graph creation. #[derive(Copy, Clone, Default)] pub struct TensorInfo { /// Id into the RecExpr constructed pub id: Id, /// Shape of the tensor. We deal with tensor up to MAX_DIM dimensions pub shape: [i32; MAX_DIM], /// Number of dimensions of this tensor pub n_dim: usize, } /// The APIs of GraphConverter are (intended to) match TASO's so that we can easily /// construct TASO graphs using this class impl GraphConverter { /// Gets the RexExpr after graph is constructed pub fn rec_expr(self) -> RecExpr<Mdl> { self.rec_expr } /// Takes in the parameters for the new input, construct the node in RexExpr, /// return the Id (index) of this input node in the RecExpr. This is the /// pattern for all these op functions. pub fn new_input(&mut self, dims: &[i32]) -> TensorInfo { let name = self.name_gen.new_input_name() + "@" + &dims.iter().join("_"); let node = Mdl::Var(Symbol::from(name)); let name_id = self.rec_expr.add(node); let new_node = Mdl::Input([name_id]); let (shape, n_dim) = self.shape_from_dim(dims); TensorInfo { id: self.rec_expr.add(new_node), shape, n_dim, } } pub fn new_weight(&mut self, dims: &[i32]) -> TensorInfo { let name = self.name_gen.new_weight_name() + "@" + &dims.iter().join("_"); let node = Mdl::Var(Symbol::from(name)); let name_id = self.rec_expr.add(node); let new_node = Mdl::Weight([name_id]); let (shape, n_dim) = self.shape_from_dim(dims); TensorInfo { id: self.rec_expr.add(new_node), shape, n_dim, } } pub fn conv2d( &mut self, inpt: TensorInfo, wght: TensorInfo, stride_h: i32, stride_w: i32, padding: i32, activation: i32, ) -> TensorInfo { let stride_h_id = self.add_or_get_val(stride_h); let stride_w_id = self.add_or_get_val(stride_w); let padding_id = self.add_or_get_val(padding); let activation_id = self.add_or_get_val(activation); let new_node = Mdl::Conv2d([ stride_h_id, stride_w_id, padding_id, activation_id, inpt.id, wght.id, ]); // Get shape let mut shape = [0; MAX_DIM]; let input_h = inpt.shape[2]; let input_w = inpt.shape[3]; let kernel_h = wght.shape[2]; let kernel_w = wght.shape[3]; let (output_h, output_w) = self.get_conv_shape( input_h, input_w, stride_h, stride_w, kernel_h, kernel_w, padding, ); shape[0] = inpt.shape[0]; shape[1] = wght.shape[0]; shape[2] = output_h; shape[3] = output_w; TensorInfo { id: self.rec_expr.add(new_node), shape: shape, n_dim: 4, } } pub fn dropout(&mut self, inpt: TensorInfo) -> TensorInfo { let new_node = Mdl::Dropout(inpt.id); TensorInfo { id: self.rec_expr.add(new_node), ..inpt } } pub fn relu(&mut self, inpt: TensorInfo) -> TensorInfo { let new_node = Mdl::Relu(inpt.id); TensorInfo { id: self.rec_expr.add(new_node), ..inpt } } pub fn tanh(&mut self, inpt: TensorInfo) -> TensorInfo { let new_node = Mdl::Tanh(inpt.id); TensorInfo { id: self.rec_expr.add(new_node), ..inpt } } pub fn sigmoid(&mut self, inpt: TensorInfo) -> TensorInfo { let new_node = Mdl::Sigmoid(inpt.id); TensorInfo { id: self.rec_expr.add(new_node), ..inpt } } pub fn batchnorm(&mut self, inpt: TensorInfo, scale: TensorInfo, bias: TensorInfo, mean: TensorInfo, var: TensorInfo) -> TensorInfo { let new_node = Mdl::BatchNorm([inpt.id, scale.id, bias.id, mean.id, var.id]); TensorInfo { id: self.rec_expr.add(new_node), ..inpt } } pub fn add(&mut self, inpt_1: TensorInfo, inpt_2: TensorInfo) -> TensorInfo { let new_node = Mdl::Ewadd([inpt_1.id, inpt_2.id]); TensorInfo { id: self.rec_expr.add(new_node), ..inpt_1 } } pub fn matmul(&mut self, inpt_1: TensorInfo, inpt_2: TensorInfo) -> TensorInfo { let activation = ACTNONE; let act_id = self.add_or_get_val(activation); let new_node = Mdl::Matmul([act_id, inpt_1.id, inpt_2.id]); let mut shape = inpt_1.shape; let n_dim = inpt_1.n_dim; shape[n_dim - 1] = inpt_2.shape[n_dim - 1]; TensorInfo { id: self.rec_expr.add(new_node), shape, n_dim, } } pub fn mul(&mut self, inpt_1: TensorInfo, inpt_2: TensorInfo) -> TensorInfo { let new_node = Mdl::Ewmul([inpt_1.id, inpt_2.id]); TensorInfo { id: self.rec_expr.add(new_node), ..inpt_1 } } pub fn concat( &mut self, axis: i32, ndim: i32, inpt_1: TensorInfo, inpt_2: TensorInfo, ) -> TensorInfo { // Only support concat of 2 inputs for now // To support more, pass in a slice and create more concat nodes here let axis_id = self.add_or_get_val(axis); let ndim_id = self.add_or_get_val(ndim); let new_node = Mdl::Concat([axis_id, ndim_id, inpt_1.id, inpt_2.id]); let mut shape = inpt_1.shape; let n_dim = inpt_1.n_dim; shape[axis as usize] += inpt_2.shape[axis as usize]; TensorInfo { id: self.rec_expr.add(new_node), shape, n_dim, } } pub fn concat_multi(&mut self, axis: i32, inputs: &[TensorInfo]) -> TensorInfo { let n_inputs = inputs.len(); // We can add supports for other number of inputs later when needed. // We need to add a new Concat op for each number of inputs assert!(n_inputs <= 5); let n_dim = inputs[0].n_dim; let axis_id = self.add_or_get_val(axis); let ndim_id = self.add_or_get_val(n_dim as i32); let new_node = match n_inputs { 2 => { Mdl::Concat([ axis_id, ndim_id, inputs[0].id, inputs[1].id, ]) } 3 => { Mdl::Concat3([ axis_id, ndim_id, inputs[0].id, inputs[1].id, inputs[2].id, ]) } 4 => { Mdl::Concat4([ axis_id, ndim_id, inputs[0].id, inputs[1].id, inputs[2].id, inputs[3].id, ]) } 5 => { Mdl::Concat5([ axis_id, ndim_id, inputs[0].id, inputs[1].id, inputs[2].id, inputs[3].id, inputs[4].id, ]) } _ => panic!("Number of input for concat not supported"), }; let mut shape = inputs[0].shape; shape[axis as usize] += (1..n_inputs) .map(|i| inputs[i].shape[axis as usize]) .sum::<i32>(); TensorInfo { id: self.rec_expr.add(new_node), shape, n_dim, } } pub fn maxpool2d( &mut self, inpt: TensorInfo, kernel_h: i32, kernel_w: i32, stride_h: i32, stride_w: i32, padding: i32, ) -> TensorInfo { let kernel_h_id = self.add_or_get_val(kernel_h); let kernel_w_id = self.add_or_get_val(kernel_w); let stride_h_id = self.add_or_get_val(stride_h); let stride_w_id = self.add_or_get_val(stride_w); let padding_id = self.add_or_get_val(padding); let activation = ACTNONE; let act_id = self.add_or_get_val(activation); let new_node = Mdl::Poolmax([ inpt.id, kernel_h_id, kernel_w_id, stride_h_id, stride_w_id, padding_id, act_id, ]); // Get shape let mut shape = [0; MAX_DIM]; let input_h = inpt.shape[2]; let input_w = inpt.shape[3]; let (output_h, output_w) = self.get_conv_shape( input_h, input_w, stride_h, stride_w, kernel_h, kernel_w, padding, ); shape[0] = inpt.shape[0]; shape[1] = inpt.shape[1]; shape[2] = output_h; shape[3] = output_w; TensorInfo { id: self.rec_expr.add(new_node), shape: shape, n_dim: 4, } } pub fn avgpool2d( &mut self, inpt: TensorInfo, kernel_h: i32, kernel_w: i32, stride_h: i32, stride_w: i32, padding: i32, ) -> TensorInfo { let kernel_h_id = self.add_or_get_val(kernel_h); let kernel_w_id = self.add_or_get_val(kernel_w); let stride_h_id = self.add_or_get_val(stride_h); let stride_w_id = self.add_or_get_val(stride_w); let padding_id = self.add_or_get_val(padding); let activation = ACTNONE; let act_id = self.add_or_get_val(activation); let new_node = Mdl::Poolavg([ inpt.id, kernel_h_id, kernel_w_id, stride_h_id, stride_w_id, padding_id, act_id, ]); // Get shape let mut shape = [0; MAX_DIM]; let input_h = inpt.shape[2]; let input_w = inpt.shape[3]; let (output_h, output_w) = self.get_conv_shape( input_h, input_w, stride_h, stride_w, kernel_h, kernel_w, padding, ); shape[0] = inpt.shape[0]; shape[1] = inpt.shape[1]; shape[2] = output_h; shape[3] = output_w; TensorInfo { id: self.rec_expr.add(new_node), shape: shape, n_dim: 4, } } pub fn enlarge(&mut self, inpt_1: TensorInfo, inpt_2: TensorInfo) -> TensorInfo { let mut shape = inpt_1.shape; shape[2] = inpt_2.shape[2]; shape[3] = inpt_2.shape[3]; let new_node = Mdl::Enlarge([inpt_1.id, inpt_2.id]); TensorInfo { id: self.rec_expr.add(new_node), shape: shape, n_dim: 4, } } pub fn split(&mut self, axis: i32, inpt: TensorInfo) -> (TensorInfo, TensorInfo) { let axis_id = self.add_or_get_val(axis); let split_node = Mdl::Split([axis_id, inpt.id]); let split_id = self.rec_expr.add(split_node); let split_0_node = Mdl::Split0(split_id); let split_0_id = self.rec_expr.add(split_0_node); let split_1_node = Mdl::Split1(split_id); let split_1_id = self.rec_expr.add(split_1_node); assert!(false, "Shape inference not implemented for split"); let out_0 = TensorInfo { id: split_0_id, shape: [0; MAX_DIM], n_dim: inpt.n_dim, }; let out_1 = TensorInfo { id: split_1_id, shape: [0; MAX_DIM], n_dim: inpt.n_dim, }; (out_0, out_1) } pub fn reshape(&mut self, inpt: TensorInfo, shape: &[i32]) -> TensorInfo { let shape_name = &shape.iter().join("_"); let node = Mdl::Var(Symbol::from(shape_name)); let shape_name_id = self.rec_expr.add(node); let new_node = Mdl::Reshape([inpt.id, shape_name_id]); let (shape_new, n_dim) = self.shape_from_dim(shape); TensorInfo { id: self.rec_expr.add(new_node), shape: shape_new, n_dim: n_dim, } } pub fn transpose(&mut self, inpt: TensorInfo, perm: &[i32], shuffle: bool) -> TensorInfo { let perm_name = &perm.iter().join("_"); let node = Mdl::Var(Symbol::from(perm_name)); let perm_name_id = self.rec_expr.add(node); let shuffle_val = if shuffle { SHUFFLE } else { NOSHUFFLE }; let shuffle_id = self.add_or_get_val(shuffle_val); let new_node = Mdl::Transpose([inpt.id, perm_name_id, shuffle_id]); let mut shape = [0; MAX_DIM]; let n_dim = inpt.n_dim; for (i, perm_i) in perm.iter().enumerate() { shape[i] = inpt.shape[*perm_i as usize]; } TensorInfo { id: self.rec_expr.add(new_node), shape, n_dim, } } pub fn noop(&mut self, inpt_1: TensorInfo, inpt_2: TensorInfo) -> TensorInfo { let new_node = Mdl::Noop([inpt_1.id, inpt_2.id]); TensorInfo { id: self.rec_expr.add(new_node), shape: [0; MAX_DIM], n_dim: inpt_1.n_dim, } } /// If a scalar value is in the RecExpr, gets the Id. Otherwise creates one. fn add_or_get_val(&mut self, val: i32) -> Id { match self.scalar_map.get(&val) { Some(id) => *id, None => { let node = Mdl::Num(val); let id = self.rec_expr.add(node); self.scalar_map.insert(val, id); id } } } fn shape_from_dim(&self, dims: &[i32]) -> ([i32; MAX_DIM], usize) { let mut shape = [0; MAX_DIM]; for (i, dim) in dims.iter().enumerate() { shape[i] = *dim; } (shape, dims.len()) } fn get_conv_shape( &self, input_h: i32, input_w: i32, stride_h: i32, stride_w: i32, kernel_h: i32, kernel_w: i32, padding: i32, ) -> (i32, i32) { if padding == PSAME { let output_h = (input_h + stride_h - 1) / stride_h; let output_w = (input_w + stride_w - 1) / stride_w; (output_h, output_w) } else { let output_h = (input_h - kernel_h) / stride_h + 1; let output_w = (input_w - kernel_w) / stride_w + 1; (output_h, output_w) } } } /// Struct for generating new names for weight tensors in the model /// /// Generates names like w1, w2... #[derive(Default)] pub struct NameGen { count_input: i32, count_weight: i32, } impl NameGen { pub fn new_weight_name(&mut self) -> String { let name = format!("w_{}", self.count_weight); self.count_weight += 1; name } pub fn new_input_name(&mut self) -> String { let name = format!("input_{}", self.count_input); self.count_input += 1; name } }
true
63838c9c198c236cff32a2fe28a0200595c5aa97
Rust
llwwns/pairing_heap
/src/main.rs
UTF-8
196
2.9375
3
[ "MIT" ]
permissive
use pairing_heap::PairingHeap; fn main() { let x = vec![5,2,1,7,4,6,9,2]; let h: PairingHeap<_> = x.into_iter().collect(); let y: Vec<_> = h.into_iter().collect(); println!("{:?}", y); }
true
ed134a5a9a779fdc7c2996cf471cfddec58b9ad2
Rust
hgzimmerman/rust_sms
/src/models/new_user_builders/realized_new_user_builder.rs
UTF-8
4,127
3.140625
3
[]
no_license
use super::NewUserBuilder; use diesel; use diesel::pg::PgConnection; use diesel::prelude::*; use models::users::{NewUser, UserState}; use super::user_builder_state::UserBuilderState; #[derive(Clone, Debug)] pub struct RealizedNewUserBuilder { pub phone_number: String, first_name: Option<String>, last_name: Option<String>, pub builder_state: UserBuilderState } impl RealizedNewUserBuilder { pub fn new(phone_number: String) -> RealizedNewUserBuilder { RealizedNewUserBuilder { first_name: None, last_name: None, phone_number: phone_number, builder_state: UserBuilderState::AwaitingFirstName } } pub fn build(self) -> Option<NewUser> { let first_name = match self.first_name { Some(first_name) => first_name, None => { error!("Tried to call build without setting the first name."); return None } }; let last_name = match self.last_name { Some(last_name) => last_name, None => { error!("Tried to call build without setting the last name."); return None } }; // Actually build the new user Some( NewUser { first_name: first_name, last_name: last_name, phone_number: self.phone_number, state: UserState::StartState.into() } ) } pub fn add_first_name(&mut self, first_name: String) { self.first_name = Some(first_name); } pub fn add_last_name(&mut self, last_name: String) { self.last_name = Some(last_name); } pub fn add_phone_number(&mut self, phone_number: String) { self.phone_number = phone_number; } pub fn get_printable_name(&self) -> String { let self_clone: RealizedNewUserBuilder = self.clone(); let first = self_clone.first_name.clone().unwrap(); let last = self_clone.last_name.clone().unwrap(); format!("{} {}", first, last) } pub fn db_insert(&self, connection: &PgConnection) { use schema::new_user_builders; let u: NewUserBuilder = self.clone().into(); diesel::insert(&u) .into(new_user_builders::table) .execute(connection) .expect("Error saving provisional user"); } pub fn db_update(&self, connection:&PgConnection) { use schema::new_user_builders; let u: NewUserBuilder = self.clone().into(); diesel::update(new_user_builders::table) .set(&u) .execute(connection) .expect("Error updating"); } pub fn get_by_phone_number(searched_phone_number: &String, connection: &PgConnection) -> Option<RealizedNewUserBuilder> { use schema::new_user_builders::dsl::*; let phone_num: String = searched_phone_number.clone(); let results = new_user_builders.filter(phone_number.eq(phone_num)) .limit(1) .load::<NewUserBuilder>(connection) .expect("ERR loading users"); // get the only element in the results match results.iter().last() { Some(user_builder) => Some(RealizedNewUserBuilder::from(user_builder.clone())), // Clone to get ownership, then convert. None => None } } } impl Into<NewUserBuilder> for RealizedNewUserBuilder { fn into(self) -> NewUserBuilder { NewUserBuilder { phone_number: self.phone_number, first_name: self.first_name, last_name: self.last_name, builder_state: self.builder_state.into(), } } } impl From<NewUserBuilder> for RealizedNewUserBuilder { fn from(new_user_builder: NewUserBuilder) -> Self { RealizedNewUserBuilder { phone_number: new_user_builder.phone_number, first_name: new_user_builder.first_name, last_name: new_user_builder.last_name, builder_state: new_user_builder.builder_state.into(), } } }
true
70f3701ce46b455fdc8e356993b4309f18397173
Rust
Psykopear/gameman
/src/gpu.rs
UTF-8
7,057
3.03125
3
[]
no_license
use crate::cpu::is_bit_set; /// Expose the memories of the GPU pub trait GPUMemoriesAccess { fn read_oam(&mut self, addr: u16) -> u8; fn write_oam(&mut self, addr: u16, byte: u8); fn read_vram(&mut self, addr: u16) -> u8; fn write_vram(&mut self, addr: u16, byte: u8); fn read_byte(&mut self, addr: u16) -> u8; fn write_byte(&mut self, addr: u16, byte: u8); } pub struct GPU { vram: [u8; 8192], oam: [u8; 256], buffer: [u8; 160 * 144], // every pixel can have 4 values (4 shades of grey) modeclock: u16, mode: u8, line: u8, control: u8, scroll_x: u8, scroll_y: u8, palette: u8, } impl GPUMemoriesAccess for GPU { fn read_oam(&mut self, addr: u16) -> u8 { self.oam[addr as usize] } fn write_oam(&mut self, addr: u16, byte: u8) { self.oam[addr as usize] = byte } fn read_vram(&mut self, addr: u16) -> u8 { self.vram[addr as usize] } fn write_vram(&mut self, addr: u16, byte: u8) { self.vram[addr as usize] = byte } fn read_byte(&mut self, addr: u16) -> u8 { match addr { 0xFF40 => self.control, 0xFF42 => self.scroll_y, 0xFF43 => self.scroll_x, 0xFF44 => self.line, _ => 0, } } fn write_byte(&mut self, addr: u16, byte: u8) { match addr { 0xFF40 => { self.control = byte; } 0xFF42 => { self.scroll_y = byte; } 0xFF43 => { self.scroll_x = byte; } 0xFF47 => { self.palette = byte; } _ => {} } } } impl GPU { pub fn new() -> Self { GPU { vram: [0; 8192], oam: [0; 256], buffer: [0; 160 * 144], modeclock: 0, mode: 2, line: 0, scroll_x: 0, scroll_y: 0, palette: 0, control: 0, } } pub fn get_buffer(&self) -> &[u8; 160 * 144] { return &self.buffer; } // draws a line on the buffer pub fn render_scan_to_buffer(&mut self) { // todo: reuse some calculations let (tiles_in_a_tilemap_row, tiles_in_a_screen_row, tile_size) = (32, 20, 8); let line_to_draw: usize = (self.line + self.scroll_y) as usize; let tilemap_row: usize = line_to_draw / tile_size; //todo: go back on top if line > 256 let pixel_row: usize = line_to_draw % tile_size; let tilemap0_offset = 0x9800 - 0x8000; for tile in 0..20 { // todo: right now only draws the first 20 tiles from the left, use scroll X let tilemap_index = tilemap0_offset + (tilemap_row * tiles_in_a_tilemap_row + tile) as usize; let pos = self.vram[tilemap_index]; let tile_in_tileset: usize = (2 * tile_size * (pos as usize) + (pixel_row as usize) * 2) as usize; // a tile pixel line is encoded in two consecutive bytes let byte_1 = self.vram[tile_in_tileset]; let byte_2 = self.vram[tile_in_tileset + 1]; for pixel in 0..8u8 { let ix = 7 - pixel; let high_bit: u8 = is_bit_set(ix, byte_2 as u16) as u8; let low_bit: u8 = is_bit_set(ix, byte_1 as u16) as u8; let color: u8 = (high_bit << 1) + low_bit; let index: usize = (self.line as usize * tiles_in_a_screen_row * tile_size) + (tile as usize) * tile_size + pixel as usize; self.buffer[index] = color; } } } // go forward based on the cpu's last operation clocks pub fn step(&mut self, t: u8) -> bool { self.modeclock += t as u16; let mut vblank_interrupt: bool = false; // todo: implement it as a state machine? match self.mode { // scanline, oam read mode 2 => { if self.modeclock >= 80 { self.modeclock = 0; self.mode = 3; } } // scanline, vram read mode 3 => { if self.modeclock >= 172 { // enter hblank mode self.modeclock = 0; self.mode = 0; self.render_scan_to_buffer(); } } // hblank 0 => { if self.modeclock >= 204 { self.modeclock = 0; self.line += 1; if self.line == 143 { // enter vblank mode self.mode = 1; } else { self.mode = 2; } } } // vblank (10 lines) 1 => { if self.modeclock >= 456 { self.modeclock = 0; self.line += 1; // restart if self.line > 153 { vblank_interrupt = true; self.mode = 2; self.line = 0; } } } _ => panic!("Sorry what?"), } vblank_interrupt } } #[cfg(test)] mod tests { use super::*; // test scroll_y write and read access, as well as the default value #[test] fn test_scroll_y() { let mut gpu = GPU::new(); assert_eq!(gpu.scroll_y, 0); gpu.write_byte(0xFF42, 1); assert_eq!(gpu.scroll_y, 1); assert_eq!(gpu.read_byte(0xFF42), 1); } // test scroll_x write and read access, as well as the default value #[test] fn test_scroll_x() { let mut gpu = GPU::new(); assert_eq!(gpu.scroll_x, 0); gpu.write_byte(0xFF43, 1); assert_eq!(gpu.scroll_x, 1); assert_eq!(gpu.read_byte(0xFF43), 1); } // test palette write and read access, as well as the default value #[test] fn test_palette() { let mut gpu = GPU::new(); // default value assert_eq!(gpu.palette, 0); gpu.write_byte(0xFF47, 1); assert_eq!(gpu.palette, 1); // no read access assert_eq!(gpu.read_byte(0xFF47), 0); } // test control write and read access, as well as the default value #[test] fn test_control() { let mut gpu = GPU::new(); assert_eq!(gpu.control, 0); gpu.write_byte(0xFF40, 1); assert_eq!(gpu.control, 1); assert_eq!(gpu.read_byte(0xFF40), 1); } // test line read and write access #[test] fn test_line() { let mut gpu = GPU::new(); assert_eq!(gpu.line, 0); gpu.write_byte(0xFF44, 1); // no write access assert_eq!(gpu.line, 0); gpu.line = 15; assert_eq!(gpu.read_byte(0xFF44), 15); } }
true
d3a11b9b431168c0d0e07049159bff7a28385b46
Rust
oldtree2008/met-io-rs
/src/app/monitor_config.rs
UTF-8
747
2.6875
3
[]
no_license
use crate::MetError; use serde::*; use serde_json; use std::fs::File; use std::io::{BufReader, Read}; /// 监控的配置信息。多个目录,一个目标 #[derive(Debug, Serialize, Deserialize)] pub struct MonitorConfig { pub source: Vec<Source>, pub destination: Destination, } #[derive(Debug, Serialize, Deserialize)] pub struct Source { pub data_type: Option<String>, pub path: String, } #[derive(Debug, Serialize, Deserialize)] pub struct Destination { pub data_type: Vec<String>, pub path: String, } pub fn get_config(fname: &str) -> Result<MonitorConfig, MetError> { let file = File::open(fname)?; let reader = BufReader::new(&file); let config = serde_json::from_reader(reader)?; Ok(config) }
true
31a5f35a3859610bb6d77cfb2fa49ed3e87c9da7
Rust
pjl123/aoc_2020
/day7/main.rs
UTF-8
4,498
3.375
3
[]
no_license
use std::fs; use std::collections::HashMap; use regex::Regex; #[derive(Clone)] enum Bag { Leaf, Container (ContainerDef, ContainerDef, ContainerDef, ContainerDef) } #[derive(Clone, Debug)] struct ContainerDef { content_key: String, quantity: usize } #[derive(Debug)] struct BagNode { key: String, children: Vec<BagNode> } fn main() { let contents: String = fs::read_to_string("C:\\Users\\lafat\\Documents\\projects\\advent_of_code\\day_7\\handy_haversacks\\input.txt") .expect("Something went wrong reading the file"); let entries: Vec<(String, Bag)> = contents.split("\r\n") .map(|line| get_entry(line)) .collect(); let mut def_map = HashMap::new(); for entry in entries { def_map.insert(entry.0, entry.1.clone()); } let count_gold = count_bag_children(&def_map["shiny gold"], &def_map) - 1; println!("Count of gold: {}", count_gold); } fn get_entry(line: &str) -> (String, Bag) { let line_split: Vec<&str> = line.split(" bags contain ").collect(); let key = line_split[0]; let mut container_defs = Vec::with_capacity(4); let container_regex = Regex::new(r"\b(\d*? ?[a-z]+? [a-z]+?)\b bag").unwrap(); for cap in container_regex.captures_iter(line_split[1]) { container_defs.push(get_container_def(cap[1].trim())) } if container_defs[0].quantity == 0 { return (String::from(key), Bag::Leaf); } return (String::from(key), Bag::Container ( ContainerDef{ content_key: String::from(container_defs[0].content_key.as_str()), quantity: container_defs[0].quantity }, if container_defs.len() > 1 { ContainerDef{ content_key: String::from(container_defs[1].content_key.as_str()), quantity: container_defs[1].quantity } } else { ContainerDef { content_key: String::from(""), quantity: 0 } }, if container_defs.len() > 2 { ContainerDef{ content_key: String::from(container_defs[2].content_key.as_str()), quantity: container_defs[2].quantity } } else { ContainerDef { content_key: String::from(""), quantity: 0 } }, if container_defs.len() > 3 { ContainerDef{ content_key: String::from(container_defs[3].content_key.as_str()), quantity: container_defs[3].quantity } } else { ContainerDef { content_key: String::from(""), quantity: 0 } } )) } fn get_container_def(container_str: &str) -> ContainerDef { let content_key_regex = Regex::new(r"\b[a-z]+? [a-z]+?\b").unwrap(); let content_key = content_key_regex.find(container_str).unwrap().as_str(); let quantity: usize; if content_key == "no other" { quantity = 0; } else { let quantity_regex = Regex::new(r"\d+").unwrap(); quantity = quantity_regex.find(container_str).unwrap().as_str().parse().unwrap(); } return ContainerDef { content_key: String::from(content_key), quantity: quantity } } fn count_bag_children(bag: &Bag, bag_defs: &HashMap<String, Bag>) -> usize { match bag { Bag::Leaf => return 0, Bag::Container (bag_1, bag_2, bag_3, bag_4) => return get_bag_counts(bag_1, bag_2, bag_3, bag_4, bag_defs) } } fn get_bag_counts(bag_1: &ContainerDef, bag_2: &ContainerDef, bag_3: &ContainerDef, bag_4: &ContainerDef, bag_defs: &HashMap<String, Bag>) -> usize { return 1 + get_bag_count(bag_1, bag_defs) + get_bag_count(bag_2, bag_defs) + get_bag_count(bag_3, bag_defs) + get_bag_count(bag_4, bag_defs); } fn get_bag_count(bag: &ContainerDef, bag_defs: &HashMap<String, Bag>) -> usize{ print!("getting count for: {:?}", bag); if bag.quantity == 0 { return 0; } let bag_child_count = count_bag_children(&bag_defs[&bag.content_key], bag_defs); if bag_child_count == 0 { println!("...{}", bag.quantity); return bag.quantity; } println!("...{}", bag.quantity * bag_child_count); return bag.quantity * bag_child_count; }
true
c0248ff3593bd3b9cbf5f84b5f22f638589c0e92
Rust
solana-labs/solana
/core/src/banking_stage/immutable_deserialized_packet.rs
UTF-8
5,185
2.6875
3
[ "Apache-2.0" ]
permissive
use { solana_perf::packet::Packet, solana_runtime::transaction_priority_details::{ GetTransactionPriorityDetails, TransactionPriorityDetails, }, solana_sdk::{ feature_set, hash::Hash, message::Message, sanitize::SanitizeError, short_vec::decode_shortu16_len, signature::Signature, transaction::{ AddressLoader, SanitizedTransaction, SanitizedVersionedTransaction, VersionedTransaction, }, }, std::{cmp::Ordering, mem::size_of, sync::Arc}, thiserror::Error, }; #[derive(Debug, Error)] pub enum DeserializedPacketError { #[error("ShortVec Failed to Deserialize")] // short_vec::decode_shortu16_len() currently returns () on error ShortVecError(()), #[error("Deserialization Error: {0}")] DeserializationError(#[from] bincode::Error), #[error("overflowed on signature size {0}")] SignatureOverflowed(usize), #[error("packet failed sanitization {0}")] SanitizeError(#[from] SanitizeError), #[error("transaction failed prioritization")] PrioritizationFailure, #[error("vote transaction failure")] VoteTransactionError, } #[derive(Debug, PartialEq, Eq)] pub struct ImmutableDeserializedPacket { original_packet: Packet, transaction: SanitizedVersionedTransaction, message_hash: Hash, is_simple_vote: bool, priority_details: TransactionPriorityDetails, } impl ImmutableDeserializedPacket { pub fn new(packet: Packet) -> Result<Self, DeserializedPacketError> { let versioned_transaction: VersionedTransaction = packet.deserialize_slice(..)?; let sanitized_transaction = SanitizedVersionedTransaction::try_from(versioned_transaction)?; let message_bytes = packet_message(&packet)?; let message_hash = Message::hash_raw_message(message_bytes); let is_simple_vote = packet.meta().is_simple_vote_tx(); // drop transaction if prioritization fails. let mut priority_details = sanitized_transaction .get_transaction_priority_details(packet.meta().round_compute_unit_price()) .ok_or(DeserializedPacketError::PrioritizationFailure)?; // set priority to zero for vote transactions if is_simple_vote { priority_details.priority = 0; }; Ok(Self { original_packet: packet, transaction: sanitized_transaction, message_hash, is_simple_vote, priority_details, }) } pub fn original_packet(&self) -> &Packet { &self.original_packet } pub fn transaction(&self) -> &SanitizedVersionedTransaction { &self.transaction } pub fn message_hash(&self) -> &Hash { &self.message_hash } pub fn is_simple_vote(&self) -> bool { self.is_simple_vote } pub fn priority(&self) -> u64 { self.priority_details.priority } pub fn compute_unit_limit(&self) -> u64 { self.priority_details.compute_unit_limit } // This function deserializes packets into transactions, computes the blake3 hash of transaction // messages, and verifies secp256k1 instructions. pub fn build_sanitized_transaction( &self, feature_set: &Arc<feature_set::FeatureSet>, votes_only: bool, address_loader: impl AddressLoader, ) -> Option<SanitizedTransaction> { if votes_only && !self.is_simple_vote() { return None; } let tx = SanitizedTransaction::try_new( self.transaction().clone(), *self.message_hash(), self.is_simple_vote(), address_loader, ) .ok()?; tx.verify_precompiles(feature_set).ok()?; Some(tx) } } impl PartialOrd for ImmutableDeserializedPacket { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Ord for ImmutableDeserializedPacket { fn cmp(&self, other: &Self) -> Ordering { self.priority().cmp(&other.priority()) } } /// Read the transaction message from packet data fn packet_message(packet: &Packet) -> Result<&[u8], DeserializedPacketError> { let (sig_len, sig_size) = packet .data(..) .and_then(|bytes| decode_shortu16_len(bytes).ok()) .ok_or(DeserializedPacketError::ShortVecError(()))?; sig_len .checked_mul(size_of::<Signature>()) .and_then(|v| v.checked_add(sig_size)) .and_then(|msg_start| packet.data(msg_start..)) .ok_or(DeserializedPacketError::SignatureOverflowed(sig_size)) } #[cfg(test)] mod tests { use { super::*, solana_sdk::{signature::Keypair, system_transaction}, }; #[test] fn simple_deserialized_packet() { let tx = system_transaction::transfer( &Keypair::new(), &solana_sdk::pubkey::new_rand(), 1, Hash::new_unique(), ); let packet = Packet::from_data(None, tx).unwrap(); let deserialized_packet = ImmutableDeserializedPacket::new(packet); assert!(deserialized_packet.is_ok()); } }
true
345b4fe905c7fcbf91cddcf2bd7541a4a2865101
Rust
ExPixel/argon2
/src/lib.rs
UTF-8
31,284
2.828125
3
[ "Apache-2.0" ]
permissive
//! Thin wrapper for the Argon2 C library. //! All public argon2 functions are mapped to functions with the `argon2` prefix //! and any leftover underscores after the prefix removed. //! e.g. `argon2_ctx` -> `ctx` and `argon2i_ctx` -> `i_ctx` #[allow(bad_style, dead_code)] mod sys; mod types; use std::convert::TryInto; use std::ffi::CStr; use types::{opt_slice_ptr_mut, opt_slice_len, opt_slice_ptr}; pub use self::types::*; /// Function that gives the string representation of an argon2 Variant. /// If the `uppercase` parameter is true, the name of the variant is returned with the first letter /// uppercased. pub fn type2string(variant: Variant, uppercase: bool) -> &'static str { unsafe { let uppercase_i = if uppercase { 1 } else { 0 }; let str_ptr = sys::argon2_type2string(variant.to_c(), uppercase_i); assert!(!str_ptr.is_null(), "null variant name."); let str_cstr = CStr::from_ptr(str_ptr); str_cstr.to_str().expect("Variant name is not valid UTF-8") } } /// Function that performs memory-hard hashing with certain degree of parallelism. pub fn ctx<C: TryInto<sys::Argon2_Context, Error = self::Error>>(context: C, variant: Variant) -> Result<(), Error> { unsafe { Error::check_code(sys::argon2_ctx(&mut context.try_into()?, variant.to_c()) as _) } } /// Argon2d: Version of Argon2 that picks memory blocks depending on the password and salt. Only /// for side-channel-free environment!! pub fn d_ctx<C: TryInto<sys::Argon2_Context, Error = self::Error>>(context: C) -> Result<(), Error> { unsafe { Error::check_code(sys::argon2d_ctx(&mut context.try_into()?)) } } /// Argon2i: Version of Argon2 that picks memory blocks /// independent on the password and salt. Good for side-channels, /// but worse with respect to tradeoff attacks if only one pass is used. pub fn i_ctx<C: TryInto<sys::Argon2_Context, Error = self::Error>>(context: C) -> Result<(), Error> { unsafe { Error::check_code(sys::argon2i_ctx(&mut context.try_into()?)) } } /// Argon2id: Version of Argon2 where the first half-pass over memory is /// password-independent, the rest are password-dependent (on the password and /// salt). OK against side channels (they reduce to 1/2-pass Argon2i), and /// better with respect to tradeoff attacks (similar to Argon2d). pub fn id_ctx<C: TryInto<sys::Argon2_Context, Error = self::Error>>(context: C) -> Result<(), Error> { unsafe { Error::check_code(sys::argon2id_ctx(&mut context.try_into()?)) } } /// Hashes a password with Argon2i, producing an encoded (string) hash. /// /// # Parameters /// - `t_cost`: Number of iterations /// - `m_cost`: Sets memory usage to m_cost kibibytes /// - `parallelism`: Number of threads and compute lanes /// - `pwd`: Slice containing the password. /// - `salt`: Slice containing the salt. /// - `hashlen`: Desired length of the hash in bytes. /// - `encoded`: Buffer where to write the encoded hash. /// /// # Notes /// /// - The different parallelism levels will give different results. pub fn i_hash_encoded( t_cost: u32, m_cost: u32, parallelism: u32, pwd: Option<&[u8]>, salt: Option<&[u8]>, hashlen: usize, encoded: &mut [u8]) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2i_hash_encoded( t_cost, m_cost, parallelism, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), opt_slice_ptr(&salt) as _, opt_slice_len(&salt), hashlen, encoded.as_mut_ptr() as _, encoded.len(), ) ) } } /// Hashes a password with Argon2i, producing a raw hash. /// /// # Parameters /// - `t_cost`: Number of iterations /// - `m_cost`: Sets memory usage to m_cost kibibytes /// - `parallelism`: Number of threads and compute lanes /// - `pwd`: Slice containing the password. /// - `salt`: Slice containing the salt. /// - `hash`: Buffer where to write the raw hash. /// /// # Notes /// /// - The different parallelism levels will give different results. pub fn i_hash_raw( t_cost: u32, m_cost: u32, parallelism: u32, pwd: Option<&[u8]>, salt: Option<&[u8]>, hash: &mut [u8]) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2i_hash_raw( t_cost, m_cost, parallelism, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), opt_slice_ptr(&salt) as _, opt_slice_len(&salt), hash.as_mut_ptr() as _, hash.len(), ) ) } } /// Hashes a password with Argon2d, producing an encoded (string) hash. /// /// # Parameters /// - `t_cost`: Number of iterations /// - `m_cost`: Sets memory usage to m_cost kibibytes /// - `parallelism`: Number of threads and compute lanes /// - `pwd`: Slice containing the password. /// - `salt`: Slice containing the salt. /// - `hashlen`: Desired length of the hash in bytes. /// - `encoded`: Buffer where to write the encoded hash. /// /// # Notes /// /// - The different parallelism levels will give different results. pub fn d_hash_encoded( t_cost: u32, m_cost: u32, parallelism: u32, pwd: Option<&[u8]>, salt: Option<&[u8]>, hashlen: usize, encoded: &mut [u8]) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2d_hash_encoded( t_cost, m_cost, parallelism, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), opt_slice_ptr(&salt) as _, opt_slice_len(&salt), hashlen, encoded.as_mut_ptr() as _, encoded.len(), ) ) } } /// Hashes a password with Argon2d, producing a raw hash. /// /// # Parameters /// - `t_cost`: Number of iterations /// - `m_cost`: Sets memory usage to m_cost kibibytes /// - `parallelism`: Number of threads and compute lanes /// - `pwd`: Slice containing the password. /// - `salt`: Slice containing the salt. /// - `hash`: Buffer where to write the raw hash. /// /// # Notes /// /// - The different parallelism levels will give different results. pub fn d_hash_raw( t_cost: u32, m_cost: u32, parallelism: u32, pwd: Option<&[u8]>, salt: Option<&[u8]>, hash: &mut [u8]) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2d_hash_raw( t_cost, m_cost, parallelism, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), opt_slice_ptr(&salt) as _, opt_slice_len(&salt), hash.as_mut_ptr() as _, hash.len(), ) ) } } /// Hashes a password with Argon2id, producing an encoded (string) hash. /// /// # Parameters /// - `t_cost`: Number of iterations /// - `m_cost`: Sets memory usage to m_cost kibibytes /// - `parallelism`: Number of threads and compute lanes /// - `pwd`: Slice containing the password. /// - `salt`: Slice containing the salt. /// - `hashlen`: Desired length of the hash in bytes. /// - `encoded`: Buffer where to write the encoded hash. /// /// # Notes /// /// - The different parallelism levels will give different results. pub fn id_hash_encoded( t_cost: u32, m_cost: u32, parallelism: u32, pwd: Option<&[u8]>, salt: Option<&[u8]>, hashlen: usize, encoded: &mut [u8]) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2id_hash_encoded( t_cost, m_cost, parallelism, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), opt_slice_ptr(&salt) as _, opt_slice_len(&salt), hashlen, encoded.as_mut_ptr() as _, encoded.len(), ) ) } } /// Hashes a password with Argon2id, producing a raw hash. /// /// # Parameters /// - `t_cost`: Number of iterations /// - `m_cost`: Sets memory usage to m_cost kibibytes /// - `parallelism`: Number of threads and compute lanes /// - `pwd`: Slice containing the password. /// - `salt`: Slice containing the salt. /// - `hash`: Buffer where to write the raw hash. /// /// # Notes /// /// - The different parallelism levels will give different results. pub fn id_hash_raw( t_cost: u32, m_cost: u32, parallelism: u32, pwd: Option<&[u8]>, salt: Option<&[u8]>, hash: &mut [u8]) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2id_hash_raw( t_cost, m_cost, parallelism, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), opt_slice_ptr(&salt) as _, opt_slice_len(&salt), hash.as_mut_ptr() as _, hash.len(), ) ) } } /// Generic Argon2 hash function. /// /// # Parameters /// - `t_cost`: Number of iterations /// - `m_cost`: Sets memory usage to m_cost kibibytes /// - `parallelism`: Number of threads and compute lanes /// - `pwd`: Slice containing the password. /// - `salt`: Slice containing the salt. /// - `hash`: Buffer where to write the raw hash. /// - `encoded`: Buffer where to write the encoded hash (as a string). /// - `variant`: The variant (type) of Argon2 to use. /// - `version`: The version of the Argon2 algorithm to use. /// /// # Notes /// /// - The different parallelism levels will give different results. pub fn hash( t_cost: u32, m_cost: u32, parallelism: u32, pwd: Option<&[u8]>, salt: Option<&[u8]>, mut hash: Option<&mut [u8]>, mut encoded: Option<&mut [u8]>, variant: Variant, version: Version) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2_hash( t_cost, m_cost, parallelism, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), opt_slice_ptr(&salt) as _, opt_slice_len(&salt), opt_slice_ptr_mut(&mut hash) as _, opt_slice_len(&hash), opt_slice_ptr_mut(&mut encoded) as _, opt_slice_len(&encoded), variant.to_c() as _, version.to_c() as _, ) ) } } /// Verifies a password against an encoded string using Argon2i. /// /// # Parameters /// - `encoded`: String encoding parameters, salt, hash. /// - `pwd`: Slice containing password. pub fn i_verify(encoded: &CStr, pwd: Option<&[u8]>) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2i_verify( encoded.as_ptr() as _, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), ) ) } } /// Verifies a password against an encoded string using Argon2d. /// /// # Parameters /// - `encoded`: String encoding parameters, salt, hash. /// - `pwd`: Slice containing password. pub fn d_verify(encoded: &CStr, pwd: Option<&[u8]>) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2d_verify( encoded.as_ptr() as _, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), ) ) } } /// Verifies a password against an encoded string using Argon2id. /// /// # Parameters /// - `encoded`: String encoding parameters, salt, hash. /// - `pwd`: Slice containing password. pub fn id_verify(encoded: &CStr, pwd: Option<&[u8]>) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2id_verify( encoded.as_ptr() as _, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), ) ) } } /// Verifies a password against an encoded string. /// /// # Parameters /// - `encoded`: String encoding parameters, salt, hash. /// - `pwd`: Slice containing password. pub fn verify(encoded: &CStr, pwd: Option<&[u8]>, variant: Variant) -> Result<(), Error> { unsafe { Error::check_code( sys::argon2_verify( encoded.as_ptr() as _, opt_slice_ptr(&pwd) as _, opt_slice_len(&pwd), variant.to_c() as _, ) ) } } /// Verify if a given password is correct for Argon2d hashing. /// /// # Parameters /// /// - `context`: The current Argon2 context. /// - `hash`: The password hash to verify. The length of the hash must match the length of the out /// parameter in context. pub fn d_verify_ctx<C: TryInto<sys::Argon2_Context, Error = self::Error>>(context: C, hash: &[u8]) -> Result<(), Error> { let mut argon_context = context.try_into()?; if hash.len() as u32 != argon_context.outlen { return Err(Error::BadParam("hash.len")) } unsafe { Error::check_code( sys::argon2d_verify_ctx( &mut argon_context, hash.as_ptr() as _, ) ) } } /// Verify if a given password is correct for Argon2i hashing. /// /// # Parameters /// /// - `context`: The current Argon2 context. /// - `hash`: The password hash to verify. The length of the hash must match the length of the out /// parameter in context. pub fn i_verify_ctx<C: TryInto<sys::Argon2_Context, Error = self::Error>>(context: C, hash: &[u8]) -> Result<(), Error> { let mut argon_context = context.try_into()?; if hash.len() as u32 != argon_context.outlen { return Err(Error::BadParam("hash.len")) } unsafe { Error::check_code( sys::argon2i_verify_ctx( &mut argon_context, hash.as_ptr() as _, ) ) } } /// Verify if a given password is correct for Argon2id hashing. /// /// # Parameters /// /// - `context`: The current Argon2 context. /// - `hash`: The password hash to verify. The length of the hash must match the length of the out /// parameter in context. pub fn id_verify_ctx<C: TryInto<sys::Argon2_Context, Error = self::Error>>(context: C, hash: &[u8]) -> Result<(), Error> { let mut argon_context = context.try_into()?; if hash.len() as u32 != argon_context.outlen { return Err(Error::BadParam("hash.len")) } unsafe { Error::check_code( sys::argon2id_verify_ctx( &mut argon_context, hash.as_ptr() as _, ) ) } } /// Verify if a given password is correct for a given variant of Argon2 hashing. /// /// # Parameters /// /// - `context`: The current Argon2 context. /// - `hash`: The password hash to verify. The length of the hash must match the length of the out /// parameter in context. pub fn verify_ctx<C: TryInto<sys::Argon2_Context, Error = self::Error>>(context: C, hash: &[u8], variant: Variant) -> Result<(), Error> { let mut argon_context = context.try_into()?; if hash.len() as u32 != argon_context.outlen { return Err(Error::BadParam("hash.len")) } unsafe { Error::check_code( sys::argon2_verify_ctx( &mut argon_context, hash.as_ptr() as _, variant.to_c() as _, ) ) } } /// Get the associated error message for a given error code. pub fn error_message(code: ErrorCode) -> &'static str { unsafe { let str_ptr = sys::argon2_error_message(code.to_c()); if str_ptr.is_null() { "UNKNOWN_ERROR_CODE" } else { let str_cstr = CStr::from_ptr(str_ptr); str_cstr.to_str().expect("Variant name is not valid UTF-8") } } } /// Returns the encoded hash length for the given input parameters. /// /// # Parameters /// `t_cost`: Number of iterations. /// `m_cost`: Memory usage in kibibytes. /// `parallelism`: Number of threads; used to compute lanes. /// `saltlen`: Salt size in bytes. /// `hashlen`: Hash size in bytes. /// `variant`: The Argon2 Variant that we want the encoded length for. /// /// # Returns /// /// The encoded hash length in bytes. pub fn encodedlen( t_cost: u32, m_cost: u32, parallelism: u32, saltlen: u32, hashlen: u32, variant: Variant) -> usize { unsafe { sys::argon2_encodedlen(t_cost, m_cost, parallelism, saltlen, hashlen, variant.to_c() as _) } } /// Converts a slice of bytes to a CStr. /// Unlike CStr::from_bytes_with_nul this will stop at the first /// null byte instead of returning an error for interior null bytes. /// This will return an error if there are no null bytes at all. pub fn c_str(bytes: &[u8]) -> Result<&CStr, Error> { for (idx, b) in bytes.iter().enumerate() { if *b == 0 { return Ok(CStr::from_bytes_with_nul(&bytes[0..(idx + 1)]).expect("Failed CStr conversion.")); } } Err(Error::BadParam("bytes")) } /// Converts a slice of bytes to a CStr much like `c_str` except this will allocate a C string for /// you instead with a terminating null byte if one cannot be found inside of the given byte /// string. pub fn c_str_cow<'a>(bytes: &'a [u8]) -> std::borrow::Cow<'a, CStr> { for (idx, b) in bytes.iter().enumerate() { if *b == 0 { return std::borrow::Cow::Borrowed( CStr::from_bytes_with_nul(&bytes[0..(idx + 1)]) .expect("Failed CStr conversion.") ); } } std::borrow::Cow::Owned( std::ffi::CString::new(bytes).expect("Failed to create CString.") ) } #[cfg(test)] mod test { use super::*; /// Make sure that all variants have names. #[test] fn test_variant_names() { assert_eq!("argon2i", type2string(Variant::I, false)); assert_eq!("Argon2i", type2string(Variant::I, true)); assert_eq!("argon2d", type2string(Variant::D, false)); assert_eq!("Argon2d", type2string(Variant::D, true)); assert_eq!("argon2id", type2string(Variant::ID, false)); assert_eq!("Argon2id", type2string(Variant::ID, true)); } fn hex_conv(bytes: &[u8], hex_dest: &mut [u8]) { const DIGITS: &[u8] = b"0123456789abcdef"; for (idx, byte) in bytes.iter().enumerate() { hex_dest[(idx * 2)] = DIGITS[((*byte >> 4) as usize) & 0xF]; hex_dest[(idx * 2) + 1] = DIGITS[(*byte as usize) & 0xF]; } } fn str_conv(bytes: &[u8]) -> &str { std::str::from_utf8(bytes).expect("Bad UTF-8 conversion.") } fn tovec(a: &[u8]) -> Vec<u8> { let mut v = Vec::with_capacity(a.len()); v.extend_from_slice(a); return v } fn hashtest_bytes(version: Version, t: u32, m: u32, p: u32, pwd: &mut [u8], salt: &mut [u8], hexref: &mut [u8], mcfref: &mut [u8], variant: Variant) { const OUTLEN: usize = 32; const ENCODED_LEN: usize = 108; let mut out = [0u8; OUTLEN]; let mut hex_out = [0u8; OUTLEN * 2 + 4]; let mut encoded = [0u8; ENCODED_LEN]; println!("HASH TEST: $v={:?} t={}, m={}, p = {}, pass={}, salt={}", version, t, m, p, unsafe { std::str::from_utf8_unchecked(pwd) }, unsafe { std::str::from_utf8_unchecked(salt) },); hash(t, 1<<m, p, Some(pwd), Some(salt), Some(&mut out), Some(&mut encoded), variant, version).expect("Test hash failed."); hex_conv(&out, &mut hex_out); assert_eq!(str_conv(hexref), str_conv(&hex_out[0..(OUTLEN * 2)])); verify( c_str(&encoded).expect("bad C string."), Some(pwd), variant ).expect("Failed verify-1"); verify( &c_str_cow(&mcfref), Some(pwd), variant ).expect("Failed verify-1"); } fn hashtest(version: Version, t: u32, m: u32, p: u32, pwd: &str, salt: &str, hexref: &str, mcfref: &str, variant: Variant) { hashtest_bytes( version, t, m, p, &mut tovec(pwd.as_bytes()), &mut tovec(salt.as_bytes()), &mut tovec(hexref.as_bytes()), &mut tovec(mcfref.as_bytes()), variant); } macro_rules! check_error_code { ($Code:ident, $Value:expr) => { assert_eq!(Err(Error::Code(ErrorCode::$Code)), $Value) } } #[test] fn test_argon2i_0x10() { println!("Test Argon2i version number: 0x{:02X}", (Version::Version10).to_int()); hashtest(Version::Version10, 2, 16, 1, "password", "somesalt", "f6c4db4a54e2a370627aff3db6176b94a2a209a62c8e36152711802f7b30c694", "$argon2i$m=65536,t=2,p=1$c29tZXNhbHQ$9sTbSlTio3Biev89thdrlKKiCaYsjjYVJxGAL3swxpQ", Variant::I); hashtest(Version::Version10, 2, 18, 1, "password", "somesalt", "3e689aaa3d28a77cf2bc72a51ac53166761751182f1ee292e3f677a7da4c2467", "$argon2i$m=262144,t=2,p=1$c29tZXNhbHQ$Pmiaqj0op3zyvHKlGsUxZnYXURgvHuKS4/Z3p9pMJGc", Variant::I); hashtest(Version::Version10, 2, 8, 1, "password", "somesalt", "fd4dd83d762c49bdeaf57c47bdcd0c2f1babf863fdeb490df63ede9975fccf06", "$argon2i$m=256,t=2,p=1$c29tZXNhbHQ$/U3YPXYsSb3q9XxHvc0MLxur+GP960kN9j7emXX8zwY", Variant::I); hashtest(Version::Version10, 2, 8, 2, "password", "somesalt", "b6c11560a6a9d61eac706b79a2f97d68b4463aa3ad87e00c07e2b01e90c564fb", "$argon2i$m=256,t=2,p=2$c29tZXNhbHQ$tsEVYKap1h6scGt5ovl9aLRGOqOth+AMB+KwHpDFZPs", Variant::I); hashtest(Version::Version10, 1, 16, 1, "password", "somesalt", "81630552b8f3b1f48cdb1992c4c678643d490b2b5eb4ff6c4b3438b5621724b2", "$argon2i$m=65536,t=1,p=1$c29tZXNhbHQ$gWMFUrjzsfSM2xmSxMZ4ZD1JCytetP9sSzQ4tWIXJLI", Variant::I); hashtest(Version::Version10, 4, 16, 1, "password", "somesalt", "f212f01615e6eb5d74734dc3ef40ade2d51d052468d8c69440a3a1f2c1c2847b", "$argon2i$m=65536,t=4,p=1$c29tZXNhbHQ$8hLwFhXm6110c03D70Ct4tUdBSRo2MaUQKOh8sHChHs", Variant::I); hashtest(Version::Version10, 2, 16, 1, "differentpassword", "somesalt", "e9c902074b6754531a3a0be519e5baf404b30ce69b3f01ac3bf21229960109a3", "$argon2i$m=65536,t=2,p=1$c29tZXNhbHQ$6ckCB0tnVFMaOgvlGeW69ASzDOabPwGsO/ISKZYBCaM", Variant::I); hashtest(Version::Version10, 2, 16, 1, "password", "diffsalt", "79a103b90fe8aef8570cb31fc8b22259778916f8336b7bdac3892569d4f1c497", "$argon2i$m=65536,t=2,p=1$ZGlmZnNhbHQ$eaEDuQ/orvhXDLMfyLIiWXeJFvgza3vaw4kladTxxJc", Variant::I); } #[test] #[ignore] fn test_argon2i_0x10_large_ram() { hashtest(Version::Version10, 2, 20, 1, "password", "somesalt", "9690ec55d28d3ed32562f2e73ea62b02b018757643a2ae6e79528459de8106e9", "$argon2i$m=1048576,t=2,p=1$c29tZXNhbHQ$lpDsVdKNPtMlYvLnPqYrArAYdXZDoq5ueVKEWd6BBuk", Variant::I); } #[test] fn test_argon2i_0x10_errors() { // Handle an invalid encoding correctly (it is missing a $) check_error_code!(DecodingFail, verify(&c_str_cow(b"$argon2i$m=65536,t=2,p=1c29tZXNhbHQ$9sTbSlTio3Biev89thdrlKKiCaYsjjYVJxGAL3swxpQ"), Some(b"password"), Variant::I)); // Handle an invalid encoding correctly (it is missing a $) check_error_code!(DecodingFail, verify(&c_str_cow(b"$argon2i$m=65536,t=2,p=1$c29tZXNhbHQ9sTbSlTio3Biev89thdrlKKiCaYsjjYVJxGAL3swxpQ"), Some(b"password"), Variant::I)); // Handle an invalid encoding correctly (salt is too short) check_error_code!(SaltTooShort, verify(&c_str_cow(b"$argon2i$m=65536,t=2,p=1$$9sTbSlTio3Biev89thdrlKKiCaYsjjYVJxGAL3swxpQ"), Some(b"password"), Variant::I)); // Handle an invalid encoding correctly (the encoded password is "passwore") check_error_code!(VerifyMismatch, verify(&c_str_cow(b"$argon2i$m=65536,t=2,p=1$c29tZXNhbHQ$b2G3seW+uPzerwQQC+/E1K50CLLO7YXy0JRcaTuswRo"), Some(b"password"), Variant::I)); } #[test] fn test_argon2i_0x13() { println!("Test Argon2i version number: 0x{:02X}", (Version::Version13).to_int()); hashtest(Version::Version13, 2, 16, 1, "password", "somesalt", "c1628832147d9720c5bd1cfd61367078729f6dfb6f8fea9ff98158e0d7816ed0", "$argon2i$v=19$m=65536,t=2,p=1$c29tZXNhbHQ$wWKIMhR9lyDFvRz9YTZweHKfbftvj+qf+YFY4NeBbtA", Variant::I); hashtest(Version::Version13, 2, 18, 1, "password", "somesalt", "296dbae80b807cdceaad44ae741b506f14db0959267b183b118f9b24229bc7cb", "$argon2i$v=19$m=262144,t=2,p=1$c29tZXNhbHQ$KW266AuAfNzqrUSudBtQbxTbCVkmexg7EY+bJCKbx8s", Variant::I); hashtest(Version::Version13, 2, 8, 1, "password", "somesalt", "89e9029f4637b295beb027056a7336c414fadd43f6b208645281cb214a56452f", "$argon2i$v=19$m=256,t=2,p=1$c29tZXNhbHQ$iekCn0Y3spW+sCcFanM2xBT63UP2sghkUoHLIUpWRS8", Variant::I); hashtest(Version::Version13, 2, 8, 2, "password", "somesalt", "4ff5ce2769a1d7f4c8a491df09d41a9fbe90e5eb02155a13e4c01e20cd4eab61", "$argon2i$v=19$m=256,t=2,p=2$c29tZXNhbHQ$T/XOJ2mh1/TIpJHfCdQan76Q5esCFVoT5MAeIM1Oq2E", Variant::I); hashtest(Version::Version13, 1, 16, 1, "password", "somesalt", "d168075c4d985e13ebeae560cf8b94c3b5d8a16c51916b6f4ac2da3ac11bbecf", "$argon2i$v=19$m=65536,t=1,p=1$c29tZXNhbHQ$0WgHXE2YXhPr6uVgz4uUw7XYoWxRkWtvSsLaOsEbvs8", Variant::I); hashtest(Version::Version13, 4, 16, 1, "password", "somesalt", "aaa953d58af3706ce3df1aefd4a64a84e31d7f54175231f1285259f88174ce5b", "$argon2i$v=19$m=65536,t=4,p=1$c29tZXNhbHQ$qqlT1YrzcGzj3xrv1KZKhOMdf1QXUjHxKFJZ+IF0zls", Variant::I); hashtest(Version::Version13, 2, 16, 1, "differentpassword", "somesalt", "14ae8da01afea8700c2358dcef7c5358d9021282bd88663a4562f59fb74d22ee", "$argon2i$v=19$m=65536,t=2,p=1$c29tZXNhbHQ$FK6NoBr+qHAMI1jc73xTWNkCEoK9iGY6RWL1n7dNIu4", Variant::I); hashtest(Version::Version13, 2, 16, 1, "password", "diffsalt", "b0357cccfbef91f3860b0dba447b2348cbefecadaf990abfe9cc40726c521271", "$argon2i$v=19$m=65536,t=2,p=1$ZGlmZnNhbHQ$sDV8zPvvkfOGCw26RHsjSMvv7K2vmQq/6cxAcmxSEnE", Variant::I); } #[test] #[ignore] pub fn test_argon2i_0x13_large_ram() { hashtest(Version::Version13, 2, 20, 1, "password", "somesalt", "d1587aca0922c3b5d6a83edab31bee3c4ebaef342ed6127a55d19b2351ad1f41", "$argon2i$v=19$m=1048576,t=2,p=1$c29tZXNhbHQ$0Vh6ygkiw7XWqD7asxvuPE667zQu1hJ6VdGbI1GtH0E", Variant::I); } #[test] fn test_argon2i_0x13_errors() { // Handle an invalid encoding correctly (it is missing a $) check_error_code!(DecodingFail, verify( &c_str_cow(b"$argon2i$v=19$m=65536,t=2,p=1$c29tZXNhbHQwWKIMhR9lyDFvRz9YTZweHKfbftvj+qf+YFY4NeBbtA"), Some(b"password"), Variant::I)); // Handle an invalid encoding correctly (it is missing a $) check_error_code!(DecodingFail, verify( &c_str_cow(b"$argon2i$v=19$m=65536,t=2,p=1$c29tZXNhbHQwWKIMhR9lyDFvRz9YTZweHKfbftvj+qf+YFY4NeBbtA"), Some(b"password"), Variant::I)); // Handle an invalid encoding correctly (salt is too short) check_error_code!(SaltTooShort, verify( &c_str_cow(b"$argon2i$v=19$m=65536,t=2,p=1$$9sTbSlTio3Biev89thdrlKKiCaYsjjYVJxGAL3swxpQ"), Some(b"password"), Variant::I)); // Handle an invalid encoding correctly (the encoded password is "passwore") check_error_code!(VerifyMismatch, verify( &c_str_cow(b"$argon2i$v=19$m=65536,t=2,p=1$c29tZXNhbHQ$8iIuixkI73Js3G1uMbezQXD0b8LG4SXGsOwoQkdAQIM"), Some(b"password"), Variant::I)); } #[test] fn test_argon2id_0x13() { println!("Test Argon2id version number: 0x{:02X}", (Version::Version13).to_int()); hashtest(Version::Version13, 2, 16, 1, "password", "somesalt", "09316115d5cf24ed5a15a31a3ba326e5cf32edc24702987c02b6566f61913cf7", "$argon2id$v=19$m=65536,t=2,p=1$c29tZXNhbHQ$CTFhFdXPJO1aFaMaO6Mm5c8y7cJHAph8ArZWb2GRPPc", Variant::ID); hashtest(Version::Version13, 2, 18, 1, "password", "somesalt", "78fe1ec91fb3aa5657d72e710854e4c3d9b9198c742f9616c2f085bed95b2e8c", "$argon2id$v=19$m=262144,t=2,p=1$c29tZXNhbHQ$eP4eyR+zqlZX1y5xCFTkw9m5GYx0L5YWwvCFvtlbLow", Variant::ID); hashtest(Version::Version13, 2, 8, 1, "password", "somesalt", "9dfeb910e80bad0311fee20f9c0e2b12c17987b4cac90c2ef54d5b3021c68bfe", "$argon2id$v=19$m=256,t=2,p=1$c29tZXNhbHQ$nf65EOgLrQMR/uIPnA4rEsF5h7TKyQwu9U1bMCHGi/4", Variant::ID); hashtest(Version::Version13, 2, 8, 2, "password", "somesalt", "6d093c501fd5999645e0ea3bf620d7b8be7fd2db59c20d9fff9539da2bf57037", "$argon2id$v=19$m=256,t=2,p=2$c29tZXNhbHQ$bQk8UB/VmZZF4Oo79iDXuL5/0ttZwg2f/5U52iv1cDc", Variant::ID); hashtest(Version::Version13, 1, 16, 1, "password", "somesalt", "f6a5adc1ba723dddef9b5ac1d464e180fcd9dffc9d1cbf76cca2fed795d9ca98", "$argon2id$v=19$m=65536,t=1,p=1$c29tZXNhbHQ$9qWtwbpyPd3vm1rB1GThgPzZ3/ydHL92zKL+15XZypg", Variant::ID); hashtest(Version::Version13, 4, 16, 1, "password", "somesalt", "9025d48e68ef7395cca9079da4c4ec3affb3c8911fe4f86d1a2520856f63172c", "$argon2id$v=19$m=65536,t=4,p=1$c29tZXNhbHQ$kCXUjmjvc5XMqQedpMTsOv+zyJEf5PhtGiUghW9jFyw", Variant::ID); hashtest(Version::Version13, 2, 16, 1, "differentpassword", "somesalt", "0b84d652cf6b0c4beaef0dfe278ba6a80df6696281d7e0d2891b817d8c458fde", "$argon2id$v=19$m=65536,t=2,p=1$c29tZXNhbHQ$C4TWUs9rDEvq7w3+J4umqA32aWKB1+DSiRuBfYxFj94", Variant::ID); hashtest(Version::Version13, 2, 16, 1, "password", "diffsalt", "bdf32b05ccc42eb15d58fd19b1f856b113da1e9a5874fdcc544308565aa8141c", "$argon2id$v=19$m=65536,t=2,p=1$ZGlmZnNhbHQ$vfMrBczELrFdWP0ZsfhWsRPaHppYdP3MVEMIVlqoFBw", Variant::ID); } #[test] fn test_common_error_states() { const OUTLEN: usize = 32; let mut out = [0u8; OUTLEN]; check_error_code!(MemoryTooLittle, hash(2, 1, 1, Some(b"password"), Some(b"diffsalt"), Some(&mut out), None, Variant::ID, Version::Version13)); check_error_code!(SaltTooShort, hash(2, 1 << 12, 1, Some(b"password"), Some(b"s"), Some(&mut out), None, Variant::ID, Version::Version13)); // @NOTE This test is missing because it's not possible to pass a mismatched length/pointer // pair to this function :) // // ret = argon2_hash(2, 1 << 12, 1, NULL, strlen("password"), // "diffsalt", strlen("diffsalt"), // out, OUT_LEN, NULL, 0, Argon2_id, version); // // It would look something like this: // // check_error_code!(PwdPtrMismatch, hash(2, 1 << 12, 1, // Some(b"password"), Some(b"diffsalt"), // Some(&mut out), None, // Variant::ID, Version::Version13)); } }
true
6acaff9a664119c50fc0e05f49b68defb0c67c87
Rust
Tavy7/Table
/src/main.rs
UTF-8
2,538
3.1875
3
[]
no_license
use std::io; mod game; mod dice; fn select_index(table: &Vec<i16>, current_player: &char) -> usize{ let ok; game::print_table(&table); print!("Introduceti piesa de mutat: "); io::Write::flush(&mut io::stdout()).expect("flush failed!"); let mut input = String::new(); io::stdin().read_line(&mut input).expect("Eroare la citire!"); let numar: usize = input.trim().parse().expect("Inputul trebuie sa fie un numar!"); ok = game::validate_choice(numar, &table, &current_player);//verifica daca piesa e buna if ok == true{ return numar; } return 30; } fn switch_player(current_player: char) -> char{ if current_player == 'n'{ return 'a'; } return 'n'; } fn main() { let mut table = game::create_table(); println!("Decidem cine incepe."); let mut current_player = dice::start_player();//char = 'a' sau 'n' let mut dice; loop{ dice = dice::roll_dice(); for _i in 0..dice.2 / 2{//dice.2 e numarul de mutari, impartit la 2 deoarece in loop mutam pentru ambele zaruri let mut poz; let mut selected_piece; loop{ println!("\nJucator curent: {}.", current_player); println!("Mutam cu valoarea: {}.", dice.0); selected_piece = select_index(&table, &current_player); if current_player == 'a'{ if game::validate_move(selected_piece + dice.0, &table, &current_player, selected_piece){ poz = selected_piece + dice.0; break; } } if current_player == 'n'{ if game::validate_move(selected_piece - dice.0, &table, &current_player, selected_piece){ poz = selected_piece - dice.0; break; } } } //efectueaza mutare table = game::make_move(selected_piece, poz, table, &current_player); loop{ println!("\nJucator curent: {}.", current_player); println!("Mutam cu {}.", dice.1); selected_piece = select_index(&table, &current_player); if selected_piece == 30{ continue; } if current_player == 'a'{ if game::validate_move(selected_piece + dice.1, &table, &current_player, selected_piece){ poz = selected_piece + dice.1; break; } } if current_player == 'n'{ if game::validate_move(selected_piece - dice.1, &table, &current_player, selected_piece){ poz = selected_piece - dice.1; break; } } } //efectueaza mutare table = game::make_move(selected_piece, poz, table, &current_player); } if game::check_final(&table, current_player) == current_player{ break; } current_player = switch_player(current_player); } }
true
848624cb643f2533003c85856c914467c1bd6312
Rust
bobbobbio/Adevnt-of-Code-2020
/three/src/main.rs
UTF-8
2,178
3.4375
3
[]
no_license
use std::io::{self, BufRead}; use std::str::FromStr; #[derive(Debug)] enum Error { Io(io::Error), Parse(String), } impl From<io::Error> for Error { fn from(e: io::Error) -> Self { Self::Io(e) } } type Result<T> = std::result::Result<T, Error>; fn parse_lines<R: BufRead, T: FromStr>(lines: R) -> Result<Vec<T>> where Error: From<<T as FromStr>::Err>, { let mut values = vec![]; for maybe_line in lines.lines() { values.push(maybe_line?.parse()?); } Ok(values) } #[derive(PartialEq, Clone, Copy)] enum Tile { Tree, Nothing, } impl FromStr for Tile { type Err = Error; fn from_str(input: &str) -> Result<Self> { match input { "." => Ok(Self::Nothing), "#" => Ok(Self::Tree), t => Err(Error::Parse(format!("bad tile {}", t))), } } } struct Row(Vec<Tile>); impl FromStr for Row { type Err = Error; fn from_str(input: &str) -> Result<Self> { Ok(Self( input .chars() .map(|c| c.to_string().parse()) .collect::<Result<_>>()?, )) } } struct Field { rows: Vec<Row>, } impl Field { fn height(&self) -> usize { self.rows.len() } fn get(&self, x: usize, y: usize) -> Tile { let row = &self.rows[y]; row.0[x % row.0.len()] } } fn count_trees(field: &Field, slope: (usize, usize)) -> usize { let mut trees = 0; let (mut x, mut y) = (0, 0); while y < field.height() { if field.get(x, y) == Tile::Tree { trees += 1; } x += slope.0; y += slope.1; } trees } fn part_one(field: &Field) { println!("{}", count_trees(field, (3, 1))); } fn part_two(field: &Field) { let slopes = [(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)]; let answer: usize = slopes.iter().map(|&s| count_trees(field, s)).product(); println!("{}", answer); } fn main() -> Result<()> { let rows: Vec<Row> = parse_lines(io::stdin().lock())?; let field = Field { rows }; println!("Part 1"); part_one(&field); println!("Part 2"); part_two(&field); Ok(()) }
true
8d21f5ab0c98c13e87978bdf96252c4a24d4b82a
Rust
wzekin/BUPT-Projects
/Data-Structure/algorithm/src/way.rs
UTF-8
8,595
3.921875
4
[]
no_license
//! 程序的way模块, 用来储存和计算交通方式的各种信息 //! //! # examples //! ``` rust //! use std::rc::Rc; //! use data_structure::city::*; //! use data_structure::way::*; //! //! let city_0 = Rc::new(City::new(0, RiskLevel::Low)); //! let city_1 = Rc::new(City::new(1, RiskLevel::Low)); //! let way_0 = Rc::new(Way::new(0, Type::Air, city_0.clone(), city_1.clone(), 3, 4)); //! ``` use crate::city::*; use getset::Getters; use std::rc::Rc; use wasm_bindgen::prelude::*; /// 交通工具的种类,一共分为三种 /// 1. Car(汽车) /// 2. Train(火车) /// 3. Air(飞机) #[wasm_bindgen] #[derive(Debug, Clone, Copy)] pub enum Type { /// 汽车 Car, /// 火车 Train, /// 飞机 Air, } //交通方式类, 表示交通方式的各种信息 #[wasm_bindgen] #[derive(Debug, Getters, Clone)] pub struct Way { ///返回交通方式的唯一id #[getset(get = "pub")] id: i32, ///返回交通方式的种类 #[getset(get = "pub")] way_type: Type, ///返回交通方式的起始城市 #[getset(get = "pub")] start_city: Rc<City>, ///返回交通方式的终点城市 #[getset(get = "pub")] end_city: Rc<City>, ///返回交通方式花费的时间 #[getset(get = "pub")] cost_time: i32, ///返回交通方式花费的开始时间 /// /// 取值为0-23 #[getset(get = "pub")] start_time: i32, } impl PartialEq for Way { fn eq(&self, other: &Self) -> bool { self.id == other.id } } impl Eq for Way {} impl Way { /// Way 的构造函数,创建一个新Way /// /// # Arguments /// * `id` - 交通方式的唯一id /// * `type` - 交通方式的种类 /// * `start_city` - 交通方式的起始城市 /// * `end_city` - 交通方式的终点城市 /// * `cost_time` - 交通方式花费的时间 /// * `start_time` - 交通方式的出发时间 pub fn new( id: i32, way_type: Type, start_city: Rc<City>, end_city: Rc<City>, cost_time: i32, start_time: i32, ) -> Way { Way { id, way_type, start_city, end_city, cost_time, start_time, } } /// 返回交通方式的风险权重系数 /// * 汽车 => 2 /// * 火车 => 5 /// * 飞机 => 9 pub fn risk(&self) -> f64 { match &self.way_type { Type::Car => 2.0, Type::Train => 5.0, Type::Air => 9.0, } } } #[cfg(target_arch = "wasm32")] #[wasm_bindgen] impl Way { ///返回交通方式的唯一id #[wasm_bindgen(getter = id)] pub fn get_id_ts_binding(&self) -> i32 { self.id } ///返回交通方式的种类 #[wasm_bindgen(getter = type)] pub fn get_type_ts_binding(&self) -> Type { self.way_type } ///返回交通方式的风险 #[wasm_bindgen(getter = risk)] pub fn get_risk_ts_binding(&self) -> f64 { self.risk() } ///返回交通方式的起始城市 #[wasm_bindgen(getter = start_city)] pub fn get_start_city_ts_binding(&self) -> City { unsafe { (*Rc::into_raw(self.start_city.clone())).clone() } } ///返回交通方式的终点城市 #[wasm_bindgen(getter = end_city)] pub fn get_end_city_ts_binding(&self) -> City { unsafe { (*Rc::into_raw(self.end_city.clone())).clone() } } ///返回交通方式花费的时间 #[wasm_bindgen(getter = cost_time)] pub fn get_cost_time_ts_binding(&self) -> i32 { self.cost_time } ///返回交通方式花费的开始时间 /// /// 取值为0-23 #[wasm_bindgen(getter = start_time)] pub fn get_start_time_ts_binding(&self) -> i32 { self.start_time } } #[cfg(test)] mod tests { use super::*; #[test] fn get_air_risk() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_air = Rc::new(Way::new(1, Type::Air, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!(way_air.risk(), 9.0); } #[test] fn get_car_risk() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_car = Rc::new(Way::new(1, Type::Car, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!(way_car.risk(), 2.0); } #[test] fn get_train_risk() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_train = Rc::new(Way::new( 1, Type::Train, city_0.clone(), city_1.clone(), 3, 4, )); assert_eq!(way_train.risk(), 5.0); } } #[cfg(target_arch = "wasm32")] #[cfg(test)] mod tests_ts_binding { use super::*; use wasm_bindgen_test::*; #[wasm_bindgen_test] fn get_id_0() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_0 = Rc::new(Way::new(0, Type::Air, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!(way_0.get_id_ts_binding(), 0); } #[wasm_bindgen_test] fn get_id_1() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_1 = Rc::new(Way::new(1, Type::Air, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!(way_1.get_id_ts_binding(), 1); } #[wasm_bindgen_test] fn get_air_risk() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_air = Rc::new(Way::new(1, Type::Air, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!(way_air.get_risk_ts_binding(), 9.0); } #[wasm_bindgen_test] fn get_car_risk() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_car = Rc::new(Way::new( 1, Type::Car, city_0.clone(), city_1.clone(), 3, 4, )); assert_eq!(way_car.get_risk_ts_binding(), 2.0); } #[wasm_bindgen_test] fn get_train_risk() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_train = Rc::new(Way::new( 1, Type::Train, city_0.clone(), city_1.clone(), 3, 4, )); assert_eq!(way_train.get_risk_ts_binding(), 5.0); } #[wasm_bindgen_test] fn get_start_city() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_car = Rc::new(Way::new(1, Type::Car, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!( way_car.get_start_city_ts_binding().get_id_ts_binding(), city_0.get_id_ts_binding() ); } #[wasm_bindgen_test] fn get_end_city() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_car = Rc::new(Way::new(1, Type::Car, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!( way_car.get_end_city_ts_binding().get_id_ts_binding(), city_1.get_id_ts_binding() ); } #[wasm_bindgen_test] fn get_cost_time() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_car = Rc::new(Way::new(1, Type::Car, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!(way_car.get_cost_time_ts_binding(), 3); } #[wasm_bindgen_test] fn get_start_time() { let city_0 = Rc::new(City::new(0, String::new(), RiskLevel::Low)); let city_1 = Rc::new(City::new(1, String::new(), RiskLevel::Low)); let way_car = Rc::new(Way::new(1, Type::Car, city_0.clone(), city_1.clone(), 3, 4)); assert_eq!(way_car.get_start_time_ts_binding(), 4); } }
true
f7d3f039a482ff32a8fb523eb5e273e0d3b4e071
Rust
Tsuguri/just
/just_core/src/transform.rs
UTF-8
1,164
2.984375
3
[]
no_license
use crate::math::*; use std::cell::RefCell; pub trait Ident { fn empty() -> Self; } impl Ident for Matrix { fn empty() -> Self { Matrix::IDENTITY } } impl Ident for Quat { fn empty() -> Self { Quat::IDENTITY } } #[derive(Copy, Clone, Debug, PartialEq)] pub struct ItemState<T: Ident> { pub changed: bool, pub item: T, } pub type MatrixState = ItemState<Matrix>; impl<T: Ident> ItemState<T> { fn new() -> Self { Self { changed: true, item: T::empty(), } } } #[derive(Clone, Debug, PartialEq)] pub struct Transform { pub position: Vec3, pub rotation: Quat, pub scale: Vec3, pub local_matrix: RefCell<MatrixState>, pub global_matrix: RefCell<MatrixState>, } unsafe impl Send for Transform {} unsafe impl Sync for Transform {} impl Transform { pub fn new() -> Self { Transform { position: Vec3::ZERO, scale: Vec3::new(1.0, 1.0, 1.0), rotation: Quat::IDENTITY, local_matrix: RefCell::new(MatrixState::new()), global_matrix: RefCell::new(MatrixState::new()), } } }
true
103b9c5bf33b6ba42917ad50730c56f961706582
Rust
nimiq/core-rs-albatross
/consensus/src/sync/light/sync.rs
UTF-8
7,351
2.609375
3
[ "Apache-2.0" ]
permissive
use std::{ collections::{HashMap, VecDeque}, sync::Arc, task::Waker, }; use futures::{future::BoxFuture, stream::FuturesUnordered, FutureExt}; use nimiq_block::Block; use nimiq_blockchain_proxy::BlockchainProxy; use nimiq_hash::Blake2bHash; use nimiq_network_interface::{ network::{CloseReason, Network, SubscribeEvents}, request::RequestError, }; use nimiq_primitives::task_executor::TaskExecutor; use nimiq_zkp_component::{ types::{Error, ZKPRequestEvent}, zkp_component::ZKPComponentProxy, }; use crate::{messages::Checkpoint, sync::syncer::MacroSync}; #[derive(Clone)] /// This struct is used to request Epochs IDs (hashes) from other peers /// in order to determine their macro chain state relative to us pub(crate) struct EpochIds<T> { /// Indicates if the latest epoch id that was queried was found in the peer's chain pub locator_found: bool, /// The most recent epoch ids (hashes) pub ids: Vec<Blake2bHash>, /// The most recent checkpoint block in the latest epoch (if any) pub checkpoint: Option<Checkpoint>, /// Epoch number corresponding to the first hash in ids pub first_epoch_number: usize, /// The sender that created this struct pub sender: T, } impl<T> EpochIds<T> { #[inline] pub(crate) fn checkpoint_epoch_number(&self) -> usize { self.first_epoch_number + self.ids.len() } #[inline] pub(crate) fn last_epoch_number(&self) -> usize { self.checkpoint_epoch_number().saturating_sub(1) } } /// This struct is used to track all the macro requests sent to a particular peer pub struct PeerMacroRequests { /// Number of requests that have been fulfilled completed_requests: usize, /// A Queue used to track the requests that have been sent, and their respective result queued_requests: VecDeque<(Blake2bHash, Option<Block>)>, } impl PeerMacroRequests { pub fn new() -> Self { Self { completed_requests: 0, queued_requests: VecDeque::new(), } } // Pushes a new request into the queue pub fn push_request(&mut self, block_hash: Blake2bHash) { self.queued_requests.push_back((block_hash, None)) } // Pops a request from the queue pub fn pop_request(&mut self) -> Option<(Blake2bHash, Option<Block>)> { self.queued_requests.pop_front() } // Returns true if the request was updated, false in case the request was not found pub fn update_request(&mut self, block: Block) -> bool { let position = self .queued_requests .iter() .position(|(hash, _)| *hash == block.hash()); if let Some(position) = position { if self.queued_requests[position].1.is_none() { // A fulfilled request is only count once self.completed_requests += 1; } // We update our block request. // Note: If we receive a response more than once, we use the latest let block_hash = block.hash(); log::trace!(%block_hash, "Updating block request"); self.queued_requests[position] = (block_hash, Some(block)); true } else { log::trace!("Received a response for a block that we didn't expect"); false } } // Returns true if all the requests have been completed pub fn is_ready(&self) -> bool { self.queued_requests.len() == self.completed_requests } } /// The LightMacroSync is one type of MacroSync and it is essentially a stream, /// that operates on a per peer basis, emitting peers either as Outdated or Good. /// To do this, it will: /// 1. Request the latest ZKP from a peer /// 2. Request epoch IDs from the peer /// 3. Request the last (if any) election or checkpoint blocks /// If during the process, a peer is deemed as outdated, then it is emitted pub struct LightMacroSync<TNetwork: Network> { /// The blockchain pub(crate) blockchain: BlockchainProxy, /// Reference to the network pub(crate) network: Arc<TNetwork>, /// Stream for peer joined and peer left events pub(crate) network_event_rx: SubscribeEvents<TNetwork::PeerId>, /// Used to track the macro requests on a per peer basis pub(crate) peer_requests: HashMap<TNetwork::PeerId, PeerMacroRequests>, /// The stream for epoch ids requests pub(crate) epoch_ids_stream: FuturesUnordered<BoxFuture<'static, Option<EpochIds<TNetwork::PeerId>>>>, /// Reference to the ZKP proxy used to interact with the ZKP component pub(crate) zkp_component_proxy: ZKPComponentProxy<TNetwork>, /// ZKP related requests (proofs) pub(crate) zkp_requests: FuturesUnordered<BoxFuture<'static, (Result<ZKPRequestEvent, Error>, TNetwork::PeerId)>>, /// Block requests pub(crate) block_headers: FuturesUnordered< BoxFuture<'static, (Result<Option<Block>, RequestError>, TNetwork::PeerId)>, >, /// Minimum distance to light sync in #blocks from the peers head. pub(crate) full_sync_threshold: u32, /// Task executor to be compatible with wasm and not wasm environments, pub(crate) executor: Box<dyn TaskExecutor + Send + 'static>, /// Waker used for the poll next function pub(crate) waker: Option<Waker>, } impl<TNetwork: Network> LightMacroSync<TNetwork> { pub fn new( blockchain: BlockchainProxy, network: Arc<TNetwork>, network_event_rx: SubscribeEvents<TNetwork::PeerId>, zkp_component_proxy: ZKPComponentProxy<TNetwork>, full_sync_threshold: u32, executor: impl TaskExecutor + Send + 'static, ) -> Self { Self { blockchain, network, network_event_rx, peer_requests: HashMap::new(), epoch_ids_stream: FuturesUnordered::new(), zkp_component_proxy, zkp_requests: FuturesUnordered::new(), waker: None, executor: Box::new(executor), full_sync_threshold, block_headers: Default::default(), } } pub fn peers(&self) -> impl Iterator<Item = &TNetwork::PeerId> { self.peer_requests.keys() } pub fn remove_peer_requests(&mut self, peer_id: TNetwork::PeerId) { self.peer_requests.remove(&peer_id); } pub fn disconnect_peer(&mut self, peer_id: TNetwork::PeerId, reason: CloseReason) { // Remove all pending peer requests (if any) self.remove_peer_requests(peer_id); let network = Arc::clone(&self.network); // We disconnect from this peer self.executor.exec(Box::pin({ async move { network.disconnect_peer(peer_id, reason).await; } })); } } impl<TNetwork: Network> MacroSync<TNetwork::PeerId> for LightMacroSync<TNetwork> { fn add_peer(&self, peer_id: TNetwork::PeerId) { info!(%peer_id, "Requesting zkp from peer"); self.zkp_requests .push(Self::request_zkps(self.zkp_component_proxy.clone(), peer_id).boxed()); // Pushing the future to FuturesUnordered above does not wake the task that // polls `epoch_ids_stream`. Therefore, we need to wake the task manually. if let Some(waker) = &self.waker { waker.wake_by_ref(); } } }
true
c1f3aa46b014211f832c05624c10e7695ea4e2d2
Rust
cognivore/icfpc2004-tbd
/src/dump_trace.rs
UTF-8
3,034
2.96875
3
[]
no_license
use std::io::Write; use crate::neurology::parse_ant; use crate::cartography::{ World, }; use crate::geography::{ MapToken, }; use crate::geometry::{ Pos }; use crate::biology::Color::*; use crate::geography::MapToken::*; pub fn dump_world(world : World, count : usize, w: &mut dyn Write) { writeln!(w).unwrap(); writeln!(w, "After round {}...", count).unwrap(); let mut v: Vec<_> = world.data.into_iter().collect(); v.sort_by(|(Pos{x,y},_),(Pos{x : a,y : b},_)| (Pos{x : *y,y : *x}).cmp(&(Pos{x : *b,y : *a}))); for (Pos{x,y},v) in v { writeln!(w, "cell ({}, {}): {}", x,y, pp(v)).unwrap(); } } fn pp(t : MapToken) -> String { match t { Rock => "rock".to_string(), Clear(cont) => { let mut res = String::new(); if cont.food.0 > 0 { res = format!("{} food; ",cont.food.0); } match cont.anthill { Some(Black) => {res.push_str("black hill; ");} Some(Red) => {res.push_str("red hill; ");} _ => {} } if let Some(rm) = cont.markers.0.get(&Red) { if !rm.is_empty() { res.push_str("red marks: "); let bits : Vec<usize> = rm.into_iter().collect(); for b in bits { res.push_str(&b.to_string()); } res.push_str("; "); } } if let Some(rm) = cont.markers.0.get(&Black) { if !rm.is_empty() { res.push_str("black marks: "); let bits : Vec<usize> = rm.into_iter().collect(); for b in bits { res.push_str(&b.to_string()); } res.push_str("; "); } } if let Some(ant) = cont.ant { match ant.color { Red => {res.push_str("red");} Black => {res.push_str("black");} } let antfood = if ant.has_food { 1 } else { 0 }; res = format!("{} ant of id {}, dir {}, food {}, state {}, resting {}", res, ant.id, ant.direction as usize, antfood, ant.state.0, ant.resting); } res }, } } // ENTRY_POINT pub fn dump_ep() { let w = std::fs::read_to_string("data/tiny.world").unwrap(); let mut w = World::from_map_string(&w); let ant_brains = [ parse_ant(&std::fs::read_to_string("data/sample.ant").unwrap()), parse_ant(&std::fs::read_to_string("data/sample.ant").unwrap()), ]; let mut rng = crate::number_theory::Random::new(12345); let mut result = vec![]; writeln!(result, "random seed: 12345").unwrap(); for round in 0..=10000 { dump_world(w.clone(), round, &mut result); w.round(&ant_brains, &mut rng); } std::fs::write("outputs/my_dump", result).unwrap(); }
true
274877386ac7a1118b2d261e63006a0cec969973
Rust
tlan2/Mini_Games
/src/bj/card.rs
UTF-8
1,111
3.53125
4
[ "MIT" ]
permissive
// Tom Lancaster (c) Summer 2019 // // Blackjack - card.rs #[derive(Clone)] pub struct Card { pub suit: String, pub value: String } impl Card { pub fn score(&self) -> u32 { let number = match self.value.parse::<u32>() { Ok(x) => x, Err(_) => self.score_for_face_card() }; number } pub fn name(&self) -> String { return format!("{}_{}", &self.value, &self.suit); } fn score_for_face_card(&self) -> u32 { let score = match self.value.as_str() { "J" => 10, "Q" => 10, "K" => 10, "A" => 11, _ => 0 }; score } } #[test] fn test_score_for_numbers() { let card = Card { suit: "Hearts".into(), value: "2".into() }; assert_eq!(card.score(), 2); } #[test] fn test_score_for_face_card() { let card = Card { suit: "Clubs".into(), value: "J".into() }; assert_eq!(card.score(), 10); } #[test] fn test_name() { let card = Card { suit: "Hearts".into(), value: "10".into() }; assert_eq!(card.name(), String::from("10_Hearts")); }
true
a302ccd7375ab2ff585537c186043167fef3fce0
Rust
btabram/AdventOfCode2018
/16/src/main.rs
UTF-8
7,500
3.046875
3
[]
no_license
use std::fs; use std::collections::{HashMap, HashSet}; type ErrorHolder = Box<std::error::Error>; type OpcodeFn = Fn(&mut Processor, i32, i32, i32); type Instructions = HashMap<i32, &'static OpcodeFn>; #[derive(Debug, PartialEq, Eq, Clone, Copy)] struct Registers(i32, i32, i32, i32); #[derive(Debug)] struct Processor{ reg: Registers, } impl std::fmt::Display for Registers { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "[{}, {}, {}, {}]", self.0, self.1, self.2, self.3) } } // Macros for simple binary operations like add macro_rules! binaryr { ($name:ident, $op:tt) => { fn $name(&mut self, a: i32, b: i32, c: i32) { self.write(c, self.read(a) $op self.read(b)); } } } macro_rules! binaryi { ($name:ident, $op:tt) => { fn $name(&mut self, a: i32, b: i32, c: i32) { self.write(c, self.read(a) $op b); } } } // Macros for testing functions like equality testing macro_rules! testingir { ($name:ident, $op:tt) => { fn $name(&mut self, a: i32, b: i32, c: i32) { self.write(c, if a $op self.read(b) { 1 } else { 0 }); } } } macro_rules! testingri { ($name:ident, $op:tt) => { fn $name(&mut self, a: i32, b: i32, c: i32) { self.write(c, if self.read(a) $op b { 1 } else { 0 }); } } } macro_rules! testingrr { ($name:ident, $op:tt) => { fn $name(&mut self, a: i32, b: i32, c: i32) { self.write(c, if self.read(a) $op self.read(b) { 1 } else { 0 }); } } } impl Processor { fn read(&self, register: i32) -> i32 { match register { 0 => self.reg.0, 1 => self.reg.1, 2 => self.reg.2, 3 => self.reg.3, _ => unreachable!(), } } fn write(&mut self, register: i32, value: i32) { match register { 0 => self.reg.0 = value, 1 => self.reg.1 = value, 2 => self.reg.2 = value, 3 => self.reg.3 = value, _ => unreachable!(), } } binaryr!(addr, +); binaryi!(addi, +); binaryr!(mulr, *); binaryi!(muli, *); binaryr!(banr, &); binaryi!(bani, &); binaryr!(borr, |); binaryi!(bori, |); fn setr(&mut self, a: i32, _: i32, c: i32) { self.write(c, self.read(a)); } fn seti(&mut self, a: i32, _: i32, c: i32) { self.write(c, a); } testingir!(gtir, >); testingri!(gtri, >); testingrr!(gtrr, >); testingir!(eqir, ==); testingri!(eqri, ==); testingrr!(eqrr, ==); } #[derive(Debug)] struct TestCase { before: Registers, opcode: i32, a: i32, b: i32, c: i32, after: Registers, } fn try_instruction(t: &TestCase, f: &OpcodeFn) -> bool { let mut p = Processor { reg: t.before }; f(&mut p, t.a, t.b, t.c); p.reg == t.after } fn run_test_case(instructions: &Instructions, t: &TestCase) -> Vec<i32> { let mut matching = vec![]; for (k, f) in instructions { if try_instruction(t, f) { matching.push(*k); } } matching } fn s_to_i(s: &str) -> i32 { s.parse().expect("Failed to parse str as i32") } fn parse_test_case(line0: &str, line1: &str, line2: &str) -> TestCase { let beforev: Vec<_> = line0[9..19].split(", ").map(s_to_i).collect(); let before = Registers(beforev[0], beforev[1], beforev[2], beforev[3]); let input: Vec<_> = line1.split(" ").map(s_to_i).collect(); let opcode = input[0]; let a = input[1]; let b = input[2]; let c = input[3]; let afterv: Vec<_> = line2[9..19].split(", ").map(s_to_i).collect(); let after = Registers(afterv[0], afterv[1], afterv[2], afterv[3]); TestCase { before, opcode, a, b, c, after } } // Maintain my own map of IDs -> instructions fn get_instructions() -> Instructions { let mut instructions: Instructions = HashMap::new(); instructions.insert(0, &Processor::addr); instructions.insert(1, &Processor::addi); instructions.insert(2, &Processor::mulr); instructions.insert(3, &Processor::muli); instructions.insert(4, &Processor::banr); instructions.insert(5, &Processor::bani); instructions.insert(6, &Processor::borr); instructions.insert(7, &Processor::bori); instructions.insert(8, &Processor::setr); instructions.insert(9, &Processor::seti); instructions.insert(10, &Processor::gtir); instructions.insert(11, &Processor::gtri); instructions.insert(12, &Processor::gtrr); instructions.insert(13, &Processor::eqir); instructions.insert(14, &Processor::eqri); instructions.insert(15, &Processor::eqrr); instructions } #[derive(Debug)] struct Command { opcode: i32, a: i32, b: i32, c: i32, } fn main() -> Result<(), ErrorHolder> { let input = fs::read_to_string("input.txt")?; let lines: Vec<_> = input.lines().collect(); let mut test_cases = vec![]; let mut commands = vec![]; let mut i = 0; while i < lines.len() { let line = lines[i]; if line.contains("Before") { let tc = parse_test_case(lines[i], lines[i + 1], lines[i + 2]); test_cases.push(tc); i += 3; continue; } if !line.is_empty() { let command_values: Vec<_> = line.split(" ").map(s_to_i).collect(); let opcode = command_values[0]; let a = command_values[1]; let b = command_values[2]; let c = command_values[3]; commands.push(Command { opcode, a, b, c }); } i += 1; } let instructions = get_instructions(); let test = TestCase { before: Registers(3, 2, 1, 1), opcode: 9, a: 2, b: 1, c: 2, after: Registers(3, 2, 2, 1), }; assert_eq!(run_test_case(&instructions, &test).len(), 3); // Part 1 let mut opcode_matches = HashMap::new(); let mut gt3_count = 0; for t in &test_cases { let matches = run_test_case(&instructions, t); if matches.len() >= 3 { gt3_count += 1; } for m in matches { let entry = opcode_matches.entry(t.opcode).or_insert(HashSet::new()); entry.insert(m); } } println!("There are {} samples which match 3 or more opcodes!", gt3_count); // Work out the mapping between the opcodes in the input and our internal // IDs for the different instructions let mut opcode_to_ids = HashMap::new(); while opcode_to_ids.len() != 16 { let const_opcode_matches = opcode_matches.clone(); let known_mappings: Vec<_> = const_opcode_matches.iter().filter(|(_, v)| v.len() == 1).collect(); for (opcode, ids) in known_mappings { assert_eq!(ids.len(), 1); let known_id = ids.iter().next().unwrap(); opcode_to_ids.insert(*opcode, *known_id); opcode_matches.values_mut().for_each(|v| { v.remove(known_id); }); } } // Part 2 let mut p = Processor { reg: Registers(0, 0, 0, 0) }; for command in commands { let id = opcode_to_ids.get(&command.opcode).expect("Unexpected opcode"); let f = instructions.get(id).expect("Unexpected instruction ID"); f(&mut p, command.a, command.b, command.c); } println!("After executing the program the registers are {}", p.reg); Ok(()) }
true
10c96a4a0b164a44fb2b93ac81db2283a0a840dc
Rust
ocstl/project_euler
/src/bin/problem27.rs
UTF-8
1,127
3.484375
3
[]
no_license
use primal::{Primes, Sieve}; const MAX_VALUE: usize = 1000; const MAX_VALUE_ISIZE: isize = 1000; /// Find the product of the coefficients, a and b, for the quadratic expression that produces the /// maximum number of primes for consecutive values of n, starting with n=0. /// The formula: n^2 + a*n + b, where |a| < 1000 and |b| <= 1000. fn main() { // Maximum possible prime produced by the formula. let sieve = Sieve::new(2 * MAX_VALUE * MAX_VALUE + MAX_VALUE); let check_prime = |x: isize| x > 0 && sieve.is_prime(x as usize); // Since we start at 0, b has to be prime. // To get more than n = 0, 1 + a + b >= 2, so a <= 1 - b. let iter_b = Primes::all().take_while(|&x| x <= MAX_VALUE); let iter_a = |b: isize| ((1 - b)..MAX_VALUE_ISIZE); let answer = iter_b .flat_map(|b| iter_a(b as isize).map(move |a| (a, b as isize))) .map(|(a, b)| { let f = |x| x * x + a * x + b; let length = (0..).take_while(|&x| check_prime(f(x))).count(); (length, a * b) }) .max() .unwrap(); println!("Answer: {:?}", answer); }
true
890e4adcc5695c96e24e82be0e968bc19d93e900
Rust
SergejJurecko/native-ui
/src/api/separator.rs
UTF-8
1,014
2.6875
3
[ "Apache-2.0", "MIT" ]
permissive
use super::Opaque as ApiOpaque; use ui::UiImpl; use wrappers::Separator as ImplSeparator; #[derive(Copy, Clone)] pub struct Separator { op: ApiOpaque, // b: ImplSeparator, // gr: ::EvGroup, } impl Separator { pub fn new_horizontal(gr: ::EvGroup) -> Separator { let b = ImplSeparator::new_horizontal(); Self::new_int(b, gr) } pub fn new_vertical(gr: ::EvGroup) -> Separator { let b = ImplSeparator::new_vertical(); Self::new_int(b, gr) } fn new_int(b: ImplSeparator, gr: ::EvGroup) -> Separator { let id = UiImpl::new_widget(::ImplOpaque(::WidgetType::Separator, b.op.1), gr); Separator { op: ApiOpaque(::WidgetType::Separator, id), // b, // gr, } } } impl ::std::cmp::PartialEq for Separator { fn eq(&self, other: &Separator) -> bool { self.op.1 == other.op.1 } } impl AsRef<ApiOpaque> for Separator { fn as_ref(&self) -> &ApiOpaque { &self.op } }
true
69bbc9f20484e148c6e132d58d6f90bfbdd5889e
Rust
knokko/knukki-rs
/src/renderer/mod.rs
UTF-8
2,638
3.125
3
[]
no_license
use crate::RenderRegion; use std::cell::RefCell; mod core; #[cfg(feature = "golem_rendering")] mod golem_renderer; mod text; #[cfg(feature = "golem_rendering")] pub use golem_renderer::ShaderId; pub use text::*; /// This struct is used to render `Component`s (and the `Application`). A reference to an instance /// of this struct will be passed as parameter to every `render` method. /// /// ## Methods /// This struct has *core* methods and *feature* methods. The *core* methods will always be /// available, regardless of compile target and whether or not there is an actual render target. /// The *feature* methods are only available when the right crate feature is enabled. Currently, /// the `golem_rendering` feature is the only crate feature that adds *feature* methods. /// /// ## Usage /// `Component`s should use `#[cfg(feature = "golem_rendering")]` before code blocks that need to /// use *feature* methods. It is encouraged to always use the same *core* methods, regardless of /// which features are enabled: even though no real drawing will be done without crate features, it /// is still nice for unit testing. An example usage is shown below: /// ``` /// use knukki::*; /// /// fn render_stuff(renderer: &Renderer) { /// // Use the core push_viewport method /// renderer.push_viewport(0.2, 0.2, 0.8, 0.8, || { /// // Use the core clear method /// renderer.clear(Color::rgb(100, 100, 0)); /// /// #[cfg(feature = "golem_rendering")] /// { /// let context = renderer.get_context(); /// // Do some more complicated rendering using the golem context /// // Or use some other feature methods /// } /// }); /// } /// ``` /// ## Constructing instances /// The *wrapper* is responsible for constructing the `Renderer`(s). In production environments, it /// will construct a real `Renderer` from a `golem` `Context`. Unit tests can use the /// `test_renderer` function to easily construct a dummy `Renderer`. pub struct Renderer { #[cfg(feature = "golem_rendering")] context: golem::Context, #[cfg(feature = "golem_rendering")] storage: golem_renderer::GolemRenderStorage, text_renderer: TextRenderer, viewport_stack: RefCell<Vec<RenderRegion>>, scissor_stack: RefCell<Vec<RenderRegion>>, } #[cfg(test)] #[cfg(not(feature = "golem_rendering"))] pub(crate) fn test_renderer(initial_viewport: RenderRegion) -> Renderer { Renderer { text_renderer: TextRenderer::new(), viewport_stack: RefCell::new(vec![initial_viewport]), scissor_stack: RefCell::new(vec![initial_viewport]), } }
true
5e52b31990ff88032bda10d020d9da2e6e852122
Rust
decaf-lang/decaf-rs
/syntax/src/lib.rs
UTF-8
2,532
2.859375
3
[]
no_license
#![feature(proc_macro_hygiene)] // allow proc macro output macro definition pub mod ast; pub mod parser; pub mod parser_ll; pub mod ty; pub mod symbol; pub use ast::*; pub use ty::*; pub use symbol::*; // below are some helper functions for parser use common::{Loc, Errors, ErrorKind, NO_LOC}; // save a little typing than writing "Default::default()" pub(crate) fn dft<T: Default>() -> T { T::default() } pub(crate) fn mk_stmt(loc: Loc, kind: StmtKind) -> Stmt { Stmt { loc, kind } } pub(crate) fn mk_expr(loc: Loc, kind: ExprKind) -> Expr { Expr { loc, ty: dft(), kind } } pub(crate) fn mk_int_lit<'a, T>(loc: Loc, s: &'a str, error: &mut Errors<'a, T>) -> Expr<'a> { let val = if s.starts_with("0x") { i32::from_str_radix(&s[2..], 16) } else { s.parse() } .unwrap_or_else(|_| error.issue(loc, ErrorKind::IntTooLarge(s))); mk_expr(loc, val.into()) } // make a block from a single statement(which may already be a block) fn mk_block(s: Stmt) -> Block { if let StmtKind::Block(b) = s.kind { b } else { Block { loc: s.loc, stmt: vec![s], scope: dft() } } } pub(crate) trait VecExt: Sized { type Item; fn pushed(self, i: <Self as VecExt>::Item) -> Self; fn reversed(self) -> Self; } impl<T> VecExt for Vec<T> { type Item = T; fn pushed(mut self, i: Self::Item) -> Self { (self.push(i), self).1 } fn reversed(mut self) -> Self { (self.reverse(), self).1 } } // assume s begin with ", this is not checked pub(crate) fn check_str<'a, T>(s: &'a str, error: &mut Errors<'a, T>, mut loc: Loc) { if s.len() <= 1 || !s.ends_with('"') { error.issue(loc, ErrorKind::UnclosedStr(&s[1..])) } let s = &s[1..s.len() - 1]; loc.next_col(); let mut escape = NO_LOC; let mut idx = 0; for ch in s.chars() { idx += ch.len_utf8(); match ch { '\\' => escape = if escape == NO_LOC { loc } else { NO_LOC }, 'n' | 'r' | 't' | '"' => escape = NO_LOC, '\r' => continue, // just ignore _ => { if escape != NO_LOC { error.issue::<()>(escape, ErrorKind::InvalidEscape); escape = NO_LOC; } // for NewlineInStr error, the reported string segment is from beginning to(including) this '\n' // (though I don't think it is very sensible, I think reporting the whole string will be better) if ch == '\n' { error.issue(loc, ErrorKind::NewlineInStr(&s[0..idx])) } } } if ch == '\n' { loc.next_line(); } else { loc.next_col(); } } if escape != NO_LOC { error.issue(escape, ErrorKind::InvalidEscape) } }
true
2fb15b8be0219a31101209590f6a92a1e61a4bdf
Rust
hoodielive/rust
/2021/c_and_rust/lester/src/main.rs
UTF-8
275
3.046875
3
[]
no_license
use std::io::{self, Write}; use std::f64; fn main() { println!("Print something yo.. "); println!(); println!("Arguments can be referred to by their position: {0}, {1}! and {1}, {0}! are built from the same arguments", "dope", "fantastic"); println!(""); }
true
6f27868f631d553e2ef6b9af19ab66ff72be632c
Rust
nihn/savings_calculator
/src/main.rs
UTF-8
8,133
2.90625
3
[ "MIT" ]
permissive
use chrono::{Duration, NaiveDate}; use clap; use dialoguer::Confirm; use std::collections::HashSet; use structopt::StructOpt; use tokio; mod conversions; mod format; mod parse; mod statistics; #[derive(Debug, StructOpt)] #[structopt(about = "Simple script to parse and combine savings in multiple currencies")] struct SavingsCalc { #[structopt(subcommand)] cmd: Command, /// Format of outputted data #[structopt(long, possible_values = &format::Format::variants(), case_insensitive = true, default_value = "Table")] format: format::Format, } #[derive(Debug, StructOpt)] enum Command { /// Add data to our savings spreadsheet Add { /// Input csv file #[structopt(parse(try_from_str = parse::parse_from_str))] records: parse::Records, /// Date of the entry, if nothing is passed today will be used #[structopt(short, long, default_value = "today", value_name = "YYYY-MM-DD", parse(try_from_str = parse::parse_date_from_str))] date: NaiveDate, /// Amount along with currency name, e.g. 123.45GBP #[structopt(short, long, required = true)] value: Vec<parse::Value>, /// Do not write file, only show what the result would look like #[structopt(long)] dry_run: bool, }, /// Parse our saving spreadsheet and display data Show { /// Input csv file #[structopt(parse(try_from_str = parse::parse_from_str))] records: parse::Records, }, /// Parse and converse into other currencies Converse { /// Input csv file #[structopt(parse(try_from_str = parse::parse_from_str))] records: parse::Records, /// Exchange rate for date, pass `today` for Today date #[structopt(short, long, value_name = "YYYY-MM-DD", parse(try_from_str = parse::parse_date_from_str))] date: Option<NaiveDate>, #[structopt(parse(try_from_str = parse::parse_currency_from_str))] currency: parse::Currency, /// Add deltas between entries #[structopt(short = "D", long)] delta: bool, }, /// Calculate averages RollingAverage { /// Input csv file #[structopt(parse(try_from_str = parse::parse_from_str))] records: parse::Records, /// Over what period rolling average should be calculated #[structopt(default_value = "1 month", parse(try_from_str = parse::parse_duration_from_str))] period: Duration, /// Currency in which should averages be presented, if not passed due per currency averages #[structopt(short, long, parse(try_from_str = parse::parse_currency_from_str))] currency: Option<parse::Currency>, /// Exchange rate for date, pass `today` for Today date #[structopt(short = "E", long, value_name = "YYYY-MM-DD", parse(try_from_str = parse::parse_date_from_str))] exchange_rate_date: Option<NaiveDate>, /// Start date - first data point >= than this date will be used #[structopt(short, long, value_name = "YYYY-MM-DD", parse(try_from_str = parse::parse_date_from_str))] start_date: Option<NaiveDate>, /// End date - first data point <= than this date will be used #[structopt(short, long, value_name = "YYYY-MM-DD", parse(try_from_str = parse::parse_date_from_str))] end_date: Option<NaiveDate>, /// Show rolling average split into buckets, note that if there is not enough data points /// for given granurality results may be missing #[structopt(short, long, parse(try_from_str = parse::parse_duration_from_str))] buckets: Option<Duration>, /// Instead of doing per data point, calculate between first and last #[structopt(short = "S", long)] sum: bool, }, } #[tokio::main] async fn main() { let opt = SavingsCalc::from_args(); match opt.cmd { Command::Show { records } => { format::present_results(records, opt.format); } Command::Add { mut records, date, value, dry_run, } => { let currencies: HashSet<_> = value.iter().map(|v| &v.currency).collect(); if currencies.len() != value.len() { clap::Error::value_validation_auto("Duplicated currency passed!".into()).exit(); } let new_currencies = currencies.into_iter().fold(vec![], |mut acc, x| { if !records.currencies.contains(x) { acc.push(x); } acc }); if !new_currencies.is_empty() && !Confirm::new() .with_prompt(format!( "Currencies {:?} are new, are you sure you want to add them?", new_currencies )) .interact() .unwrap() { clap::Error::with_description("Aborting!".into(), clap::ErrorKind::InvalidValue) .exit(); } if records.records.iter().any(|r| r.date == date) { if !Confirm::new() .with_prompt(format!( "Date {} already present in dataset, do you want to modify it?", date )) .interact() .unwrap() { clap::Error::with_description( "Aborting!".into(), clap::ErrorKind::InvalidValue, ) .exit(); } } for value in value { records.set_value(&value, date); } if !dry_run { parse::update_csv_file(&records); } format::present_results(records, opt.format); } Command::Converse { records, date, currency, delta, } => { let mut records = conversions::get_conversions(records, currency, date) .await .unwrap(); if delta { let deltas = records .records .iter() .enumerate() .skip(1) .map(|(i, s)| parse::Record { date: s.date, savings: vec![ s.savings[0], s.savings[0] - records.records[i - 1].savings[0], ], }) .collect(); records = parse::Records { currencies: vec![ records.currencies.remove(0), parse::Currency("Delta".to_string()), ], records: deltas, filepath: records.filepath, }; } format::present_results(records, opt.format); } Command::RollingAverage { records, currency, period, exchange_rate_date, start_date, end_date, buckets, sum, } => { if let Some(buckets) = buckets { if buckets > period { clap::Error::value_validation_auto( "Buckets duration cannot be longer than period!".to_string(), ); } } let records = if let Some(currency) = currency { conversions::get_conversions(records, currency, exchange_rate_date) .await .unwrap() } else { records }; let averages = statistics::calculate_rolling_average( records, period, sum, buckets, start_date, end_date, ) .unwrap(); format::present_results(averages, opt.format); } }; }
true
d55e5b0b59b8005f77b65ada38e5dc5725b9660a
Rust
yangfengzzz/box2d-rs
/src/private/dynamics/b2_island_private.rs
UTF-8
15,631
2.640625
3
[ "MIT" ]
permissive
use crate::b2_body::*; use crate::b2_timer::*; use crate::b2_world_callbacks::*; use crate::b2_math::*; use crate::b2_time_step::*; use crate::b2_settings::*; use super::b2_island::*; use crate::private::dynamics::b2_contact_solver::*; /* Position Correction Notes ========================= i tried the several algorithms for position correction of the 2D revolute joint. i looked at these systems: - simple pendulum (1m diameter sphere on massless 5m stick) with initial angular velocity of 100 rad/s. - suspension bridge with 30 1m long planks of length 1m. - multi-link chain with 30 1m long links. Here are the algorithms: Baumgarte - A fraction of the position error is added to the velocity error. There is no separate position solver. Pseudo Velocities - After the velocity solver and position integration, the position error, Jacobian, and effective mass are recomputed. Then the velocity constraints are solved with pseudo velocities and a fraction of the position error is added to the pseudo velocity error. The pseudo velocities are initialized to zero and there is no warm-starting. After the position solver, the pseudo velocities are added to the positions. This is also called the First Order World method or the Position LCP method. Modified Nonlinear Gauss-Seidel (NGS) - Like Pseudo Velocities except the position error is re-computed for each constraint and the positions are updated after the constraint is solved. The radius vectors (aka Jacobians) are re-computed too (otherwise the algorithm has horrible instability). The pseudo velocity states are not needed because they are effectively zero at the beginning of each iteration. Since we have the current position error, we allow the iterations to terminate early if the error becomes smaller than B2_LINEAR_SLOP. Full NGS or just NGS - Like Modified NGS except the effective mass are re-computed each time a constraint is solved. Here are the results: Baumgarte - this is the cheapest algorithm but it has some stability problems, especially with the bridge. The chain links separate easily close to the root and they jitter as they struggle to pull together. This is one of the most common methods in the field. The big drawback is that the position correction artificially affects the momentum, thus leading to instabilities and false bounce. i used a bias factor of 0.2. A larger bias factor makes the bridge less stable, a smaller factor makes joints and contacts more spongy. Pseudo Velocities - the is more stable than the Baumgarte method. The bridge is stable. However, joints still separate with large angular velocities. Drag the simple pendulum in a circle quickly and the joint will separate. The chain separates easily and does not recover. i used a bias factor of 0.2. A larger value lead to the bridge collapsing when a heavy cube drops on it. Modified NGS - this algorithm is better in some ways than Baumgarte and Pseudo Velocities, but in other ways it is worse. The bridge and chain are much more stable, but the simple pendulum goes unstable at high angular velocities. Full NGS - stable in all tests. The joints display good stiffness. The bridge still sags, but this is better than infinite forces. Recommendations Pseudo Velocities are not really worthwhile because the bridge and chain cannot recover from joint separation. In other cases the benefit over Baumgarte is small. Modified NGS is not a robust method for the revolute joint due to the violent instability seen in the simple pendulum. Perhaps it is viable with other constraint types, especially scalar constraints where the effective mass is a scalar. This leaves Baumgarte and Full NGS. Baumgarte has small, but manageable instabilities and is very fast. i don't think we can escape Baumgarte, especially in highly demanding cases where high constraint fidelity is not needed. Full NGS is robust and easy on the eyes. i recommend this as an option for higher fidelity simulation and certainly for suspension bridges and long chains. Full NGS might be a good choice for ragdolls, especially motorized ragdolls where joint separation can be problematic. The number of NGS iterations can be reduced for better performance without harming robustness much. Each joint in a can be handled differently in the position solver. So i recommend a system where the user can select the algorithm on a per joint basis. i would probably default to the slower Full NGS and let the user select the faster Baumgarte method in performance critical scenarios. */ /* Cache Performance The Box2D solvers are dominated by cache misses. Data structures are designed to increase the number of cache hits. Much of misses are due to random access to body data. The constraint structures are iterated over linearly, which leads to few cache misses. The bodies are not accessed during iteration. Instead read only data, such as the mass values are stored with the constraints. The mutable data are the constraint impulses and the bodies velocities/positions. The impulses are held inside the constraint structures. The body velocities/positions are held in compact, temporary arrays to increase the number of cache hits. Linear and angular velocity are stored in a single array since multiple arrays lead to multiple misses. */ /* 2D Rotation R = [cos(theta) -sin(theta)] [sin(theta) cos(theta) ] thetaDot = omega Let q1 = cos(theta), q2 = sin(theta). R = [q1 -q2] [q2 q1] q1Dot = -thetaDot * q2 q2Dot = thetaDot * q1 q1_new = q1_old - dt * w * q2 q2_new = q2_old + dt * w * q1 then normalize. This might be faster than computing sin+cos. However, we can compute sin+cos of the same angle fast. */ pub(crate) fn solve<D: UserDataType>(this: &mut B2island<D>, profile: &mut B2Profile, step: &B2timeStep, gravity: B2vec2, allow_sleep: bool) { let mut timer = B2timer::default(); let h: f32 = step.dt; // Integrate velocities and apply damping. initialize the body state. for (i, b) in (&this.m_bodies).iter().enumerate() { let mut b = b.borrow_mut(); let c: B2vec2 = b.m_sweep.c; let a:f32 = b.m_sweep.a; let mut v:B2vec2 = b.m_linear_velocity; let mut w:f32 = b.m_angular_velocity; // Store positions for continuous collision. b.m_sweep.c0 = b.m_sweep.c; b.m_sweep.a0 = b.m_sweep.a; if b.m_type == B2bodyType::B2DynamicBody { // Integrate velocities. v += h * b.m_inv_mass * (b.m_gravity_scale * b.m_mass * gravity + b.m_force); w += h * b.m_inv_i * b.m_torque; // Apply damping. // ODE: dv/dt + c * v = 0 // Solution: v(t) = v0 * exp(-c * t) // Time step: v(t + dt) = v0 * exp(-c * (t + dt)) = v0 * exp(-c * t) * exp(-c * dt) = v * exp(-c * dt) // v2 = exp(-c * dt) * v1 // Pade approximation: // v2 = v1 * 1 / (1 + c * dt) v *= 1.0 / (1.0 + h * b.m_linear_damping); w *= 1.0 / (1.0 + h * b.m_angular_damping); } this.m_positions[i].c = c; this.m_positions[i].a = a; this.m_velocities[i].v = v; this.m_velocities[i].w = w; } timer.reset(); // Solver data let mut solver_data = B2solverData{ step : *step, //TODO_humman //positions : &mut this.m_positions, //velocities : &mut this.m_velocities, }; // initialize velocity constraints. let contact_solver_def = B2contactSolverDef { step : *step, //TODO_humman //contacts : &mut this.m_contacts, //positions : &mut this.m_positions, //velocities : &mut this.m_velocities, }; let mut contact_solver = B2contactSolver::new::<D>(&contact_solver_def, &this.m_contacts); contact_solver.initialize_velocity_constraints(&this.m_positions, &this.m_velocities, &this.m_contacts); if step.warm_starting { contact_solver.warm_start(&mut this.m_velocities); } for j in &this.m_joints { j.borrow_mut().init_velocity_constraints(&mut solver_data, &mut this.m_positions, &mut this.m_velocities); } profile.solve_init = timer.get_milliseconds(); // solve velocity constraints timer.reset(); for _i in 0..step.velocity_iterations { for joint in &this.m_joints { joint.borrow_mut().solve_velocity_constraints(&mut solver_data, &mut this.m_velocities); } contact_solver.solve_velocity_constraints(&mut this.m_velocities); } // Store impulses for warm starting contact_solver.store_impulses(&this.m_contacts); profile.solve_velocity = timer.get_milliseconds(); // Integrate positions for i in 0..this.m_bodies.len() { let mut c:B2vec2 = this.m_positions[i].c; let mut a:f32 = this.m_positions[i].a; let mut v:B2vec2 = this.m_velocities[i].v; let mut w:f32 = this.m_velocities[i].w; // Check for large velocities let translation:B2vec2 = h * v; if b2_dot(translation, translation) > B2_MAX_TRANSLATION_SQUARED { let ratio:f32 = B2_MAX_TRANSLATION / translation.length(); v *= ratio; } let rotation:f32 = h * w; if rotation * rotation > B2_MAX_ROTATION_SQUARED { let ratio:f32 = B2_MAX_ROTATION / b2_abs(rotation); w *= ratio; } // Integrate c += h * v; a += h * w; this.m_positions[i].c = c; this.m_positions[i].a = a; this.m_velocities[i].v = v; this.m_velocities[i].w = w; } // solve position constraints timer.reset(); let mut position_solved:bool = false; for _i in 0..step.position_iterations { let contacts_okay: bool = contact_solver.solve_position_constraints(&mut this.m_positions); let mut joints_okay: bool = true; for joint in &this.m_joints { let joint_okay: bool = joint.borrow_mut().solve_position_constraints(&mut solver_data, &mut this.m_positions); joints_okay = joints_okay && joint_okay; } if contacts_okay && joints_okay { // Exit early if the position errors are small. position_solved = true; break; } } // Copy state buffers back to the bodies for (i,body) in (&this.m_bodies).iter().enumerate() { let mut body = body.borrow_mut(); body.m_sweep.c = this.m_positions[i].c; body.m_sweep.a = this.m_positions[i].a; body.m_linear_velocity = this.m_velocities[i].v; body.m_angular_velocity = this.m_velocities[i].w; body.synchronize_transform(); } profile.solve_position = timer.get_milliseconds(); this.report(&contact_solver.m_velocity_constraints); if allow_sleep { let mut min_sleep_time:f32 = B2_MAX_FLOAT; let lin_tol_sqr:f32 = B2_LINEAR_SLEEP_TOLERANCE * B2_LINEAR_SLEEP_TOLERANCE; let ang_tol_sqr:f32 = B2_ANGULAR_SLEEP_TOLERANCE * B2_ANGULAR_SLEEP_TOLERANCE; for b in &this.m_bodies { let mut b = b.borrow_mut(); if b.get_type() == B2bodyType::B2StaticBody { continue; } if !b.m_flags.contains(BodyFlags::E_AUTO_SLEEP_FLAG) || b.m_angular_velocity * b.m_angular_velocity > ang_tol_sqr || b2_dot(b.m_linear_velocity, b.m_linear_velocity) > lin_tol_sqr { b.m_sleep_time = 0.0; min_sleep_time = 0.0; } else { b.m_sleep_time += h; min_sleep_time = b2_min(min_sleep_time, b.m_sleep_time); } } if min_sleep_time >= B2_TIME_TO_SLEEP && position_solved { for b in &this.m_bodies { b.borrow_mut().set_awake(false); } } } } pub(crate) fn solve_toi<D: UserDataType>(this: &mut B2island<D>, sub_step: &B2timeStep, toi_index_a: i32, toi_index_b: i32) { // initialize the body state. for (i,b) in this.m_bodies.iter().enumerate() { let b = b.borrow(); this.m_positions[i].c = b.m_sweep.c; this.m_positions[i].a = b.m_sweep.a; this.m_velocities[i].v = b.m_linear_velocity; this.m_velocities[i].w = b.m_angular_velocity; } let contact_solver_def = B2contactSolverDef{ // contact_solver_def.contacts = m_contacts; // contact_solver_def.count = m_contact_count; // contact_solver_def.allocator = m_allocator; step : *sub_step // contact_solver_def.positions = m_positions; // contact_solver_def.velocities = m_velocities; }; let mut contact_solver = B2contactSolver::new::<D>(&contact_solver_def, &this.m_contacts); // solve position constraints. for _i in 0..sub_step.position_iterations { let contacts_okay:bool = contact_solver.solve_toiposition_constraints(toi_index_a, toi_index_b, &mut this.m_positions); if contacts_okay { break; } } // #if 0 // // Is the new position really safe? // for (i32 i = 0; i < m_contact_count; ++i) // { // B2contact* c = m_contacts[i]; // B2fixture* f_a = c->get_fixture_a(); // B2fixture* f_b = c->get_fixture_b(); // b2_body* b_a = f_a->get_body(); // b2_body* b_b = f_b->get_body(); // i32 index_a = c->get_child_index_a(); // i32 index_b = c->get_child_index_b(); // B2distanceInput input; // input.proxy_a.set(f_a->get_shape(), index_a); // input.proxy_b.set(f_b->get_shape(), index_b); // input.transform_a = b_a->get_transform(); // input.transform_b = b_b->get_transform(); // input.use_radii = false; // B2distanceOutput output; // B2simplexCache cache; // cache.count = 0; // b2Distance(&output, &cache, &input); // if output.distance == 0 || cache.count == 3 // { // cache.count += 0; // } // } // #endif // Leap of faith to new safe state. this.m_bodies[toi_index_a as usize].borrow_mut().m_sweep.c0 = this.m_positions[toi_index_a as usize].c; this.m_bodies[toi_index_a as usize].borrow_mut().m_sweep.a0 = this.m_positions[toi_index_a as usize].a; this.m_bodies[toi_index_b as usize].borrow_mut().m_sweep.c0 = this.m_positions[toi_index_b as usize].c; this.m_bodies[toi_index_b as usize].borrow_mut().m_sweep.a0 = this.m_positions[toi_index_b as usize].a; // No warm starting is needed for TOI events because warm // starting impulses were applied in the discrete solver. contact_solver.initialize_velocity_constraints(&this.m_positions, &this.m_velocities, &this.m_contacts); // solve velocity constraints. for _i in 0..sub_step.velocity_iterations { contact_solver.solve_velocity_constraints(&mut this.m_velocities); } // Don't store the TOI contact forces for warm starting // because they can be quite large. let h: f32 = sub_step.dt; // Integrate positions for i in 0..this.m_bodies.len() { let mut c:B2vec2 = this.m_positions[i].c; let mut a:f32 = this.m_positions[i].a; let mut v:B2vec2 = this.m_velocities[i].v; let mut w:f32 = this.m_velocities[i].w; // Check for large velocities let translation:B2vec2 = h * v; if b2_dot(translation, translation) > B2_MAX_TRANSLATION_SQUARED { let ratio:f32 = B2_MAX_TRANSLATION / translation.length(); v *= ratio; } let rotation:f32 = h * w; if rotation * rotation > B2_MAX_ROTATION_SQUARED { let ratio:f32 = B2_MAX_ROTATION / b2_abs(rotation); w *= ratio; } // Integrate c += h * v; a += h * w; this.m_positions[i].c = c; this.m_positions[i].a = a; this.m_velocities[i].v = v; this.m_velocities[i].w = w; // Sync bodies let mut body = this.m_bodies[i].borrow_mut(); body.m_sweep.c = c; body.m_sweep.a = a; body.m_linear_velocity = v; body.m_angular_velocity = w; body.synchronize_transform(); } this.report(&contact_solver.m_velocity_constraints); } pub(crate) fn report<D: UserDataType>(this: &B2island<D>, constraints: &[B2contactVelocityConstraint]) { if this.m_listener.is_none() { return; } let listener = this.m_listener.as_ref().unwrap(); assert_eq!(this.m_contacts.len(), constraints.len()); for (i,c) in (&this.m_contacts).iter().enumerate() { let mut c = c.borrow_mut(); let vc = &constraints[i]; let mut impulse = B2contactImpulse::default(); impulse.count = vc.point_count; for j in 0..vc.point_count { impulse.normal_impulses[j as usize] = vc.points[j as usize].normal_impulse; impulse.tangent_impulses[j as usize] = vc.points[j as usize].tangent_impulse; } listener.borrow_mut().post_solve(&mut *c, &impulse); } }
true
b6ad647c66eb342561b37d80710f5c464b4395fb
Rust
jake-stewart/funtime_projects
/2021/rust_stuff/gui-is-a-sin/src/main.rs
UTF-8
4,679
3.140625
3
[ "MIT" ]
permissive
use crossterm::style::Color; use std::process::exit; use terminal_menu::{ activate, back_button, button, label, list, menu, mut_menu, numeric, run, scroll, string, submenu, wait_for_exit, }; use damo_fetch::display_screen; mod utils; // Creates function that holes the menus fn launch() { // sets var that contains the menu let menu = menu(vec![ // Creats the title menu label(""), label("GUI Is A Sin").colorize(Color::Red), label("============="), label(""), // Creats a sub-menu to launch damo_fetch from submenu( "Damo Fetch", vec![ label(""), label("Select a package manager"), label(""), // Allows user to select appropriate package system for there distro list( "Manager", vec!["apt", "dpkg", "dnf", "pacman", "rpm", "xbps"], ), button("Launch"), label(""), // A button that allows the user to return to previous menu back_button("back"), ], ), submenu( "Calculator", vec![ list("Method", vec!["+", "-", "*", "/"]), numeric("Num_1", 0.0, None, None, None), numeric("Num_1", 0.0, None, None, None), button("Calculate"), back_button("back"), ], ), // Creats a sub-menu containing information about the programe author submenu( "Author Details", vec![ label("My name damo and im cool"), label("Github: https://github.com/dam-0"), back_button("back"), ], ), // Creats a sub-menu explining why the programe was made submenu( "Design Philosophy", vec![ label("Programme was made"), label("to annoy Jake"), back_button("back"), ], ), label(""), button("Exit"), ]); // Creats loop to keep the menu up until the user decides to quit while mut_menu(&menu).selected_item_name() != "Exit" { run(&menu); if mut_menu(&menu).canceled() == true { exit(0) } { if mut_menu(&menu).selected_item_name() != "Exit" { // Launches damo_fetch if mut_menu(&menu) .get_submenu("Damo Fetch") .selected_item_name() == "Launch" { display_screen( mut_menu(&menu) .get_submenu("Damo Fetch") .selection_value("Manager"), ); println!(""); utils::pause(); // Clears the screen print!("{}[2J", 27 as char) }; if mut_menu(&menu) .get_submenu("Calculator") .selected_item_name() == "Calculate" { //let title = mut_menu(&menu).get_submenu("Calculator"); let first_num = mut_menu(&menu) .get_submenu("Calculator") .numeric_value("Num_1"); let second_num = mut_menu(&menu) .get_submenu("Calculator") .numeric_value("Num_2"); /*let first_num: i32 = user_first_num.trim().parse().ok().unwrap(); let second_num: i32 = user_second_num.trim().parse().ok().unwrap();*/ match mut_menu(&menu) .get_submenu("Calculator") .selection_value("Method") { "+" => println!("sum is: {}", first_num + second_num), "-" => println!("difference is: {}", first_num - second_num), "*" => println!("Multiply is: {}", first_num * second_num), "/" => println!("division is: {}", first_num / second_num), _ => println!("Choose something valid"), } println!(""); utils::pause(); // Clears the screen print!("{}[2J", 27 as char) } } } } } fn main() { launch(); } // damo_fetch 0.2 implemented // calculator implemented
true
3d889247ecf6f7161a71b66da856759e6ee826f7
Rust
cdouglass/samara
/src/lex/decl.rs
UTF-8
3,954
3.796875
4
[]
no_license
use std::iter::Iterator; use std::iter::Peekable; use std::str::Chars; #[derive(PartialEq)] #[derive(Debug)] #[derive(Clone)] pub enum Token { Open, Close, Separator, Eql, Arrow, Bool, Int, Unit, Sum(String), Var(String) } pub struct TokenStream<'a> { it: Peekable<Chars<'a>> } pub fn build_lexer(expr: &str) -> Peekable<TokenStream> { TokenStream { it: expr.chars().peekable(), }.peekable() } impl<'a> Iterator for TokenStream<'a> { type Item = Token; fn next(&mut self) -> Option<Token> { fn is_valid(c: char) -> bool { c.is_alphabetic() || c == '_' || c == '-' || c == '>' } let mut token = None; loop { let ch = self.it.peek().cloned(); match token { Some(Token::Close) | Some(Token::Separator) | Some(Token::Eql) | Some(Token::Arrow) | Some(Token::Bool) | Some(Token::Int) | Some(Token::Unit) => { break; }, Some(Token::Open) => { match ch { Some(')') => { token = Some(Token::Unit); self.it.next(); }, _ => { return token; } } }, Some(Token::Sum(ref mut s)) | Some(Token::Var(ref mut s)) => { match ch { Some(c) if is_valid(c) => { s.push(c); self.it.next(); }, _ => { break; } } }, None => { match ch { Some('(') => { token = Some(Token::Open) }, Some(')') => { token = Some(Token::Close) }, Some('=') => { token = Some(Token::Eql) }, Some('|') => { token = Some(Token::Separator) }, Some('-') => { self.it.next(); if let Some(&'>') = self.it.peek() { self.it.next(); token = Some(Token::Arrow); } } Some(c) => { if c.is_uppercase() { token = Some(Token::Sum(c.to_string())); } else if is_valid(c) { token = Some(Token::Var(c.to_string())); } }, None => { break; } } self.it.next(); } } } if let Some(Token::Sum(ref s)) = token { if s == "Bool" { return Some(Token::Bool); } else if s == "Int" { return Some(Token::Int); } else if s == "Unit" { return Some(Token::Unit); } } token } } #[cfg(test)] mod tests { use super::build_lexer; use super::Token; use super::Token::*; /* Helpers */ fn assert_tokens(decl: &str, expected: Vec<Token>) { let tokens : Vec<Token> = build_lexer(decl).collect(); assert_eq!(tokens, expected); } /* Tests */ #[test] fn test_individual_tokens() { assert_tokens("(", vec![Open]); assert_tokens(")", vec![Close]); assert_tokens("|", vec![Separator]); assert_tokens("=", vec![Eql]); assert_tokens("->", vec![Arrow]); assert_tokens("Bool", vec![Bool]); assert_tokens("Int", vec![Int]); assert_tokens("Unit", vec![Unit]); assert_tokens("Foo", vec![Sum(String::from("Foo"))]); assert_tokens("foo", vec![Var(String::from("foo"))]); } }
true
31a3f54c30bdc1c4786e60b673b90af334c0c3e5
Rust
Azure/azure-sdk-for-rust
/services/mgmt/databricks/src/package_2023_05_01/models.rs
UTF-8
77,256
2.625
3
[ "LicenseRef-scancode-generic-cla", "MIT", "LGPL-2.1-or-later" ]
permissive
#![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::de::{value, Deserializer, IntoDeserializer}; use serde::{Deserialize, Serialize, Serializer}; use std::str::FromStr; #[doc = "Information about azure databricks accessConnector."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AccessConnector { #[serde(flatten)] pub tracked_resource: TrackedResource, #[doc = "Managed service identity (system assigned and/or user assigned identities)"] #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, #[doc = "Metadata pertaining to creation and last modification of the resource."] #[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")] pub system_data: Option<SystemData>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<AccessConnectorProperties>, } impl AccessConnector { pub fn new(tracked_resource: TrackedResource) -> Self { Self { tracked_resource, identity: None, system_data: None, properties: None, } } } #[doc = "List of azure databricks accessConnector."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct AccessConnectorListResult { #[doc = "The array of azure databricks accessConnector."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub value: Vec<AccessConnector>, #[doc = "The URL to use for getting the next set of results."] #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } impl azure_core::Continuable for AccessConnectorListResult { type Continuation = String; fn continuation(&self) -> Option<Self::Continuation> { self.next_link.clone() } } impl AccessConnectorListResult { pub fn new() -> Self { Self::default() } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct AccessConnectorProperties { #[doc = "Provisioning status of the accessConnector."] #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<access_connector_properties::ProvisioningState>, } impl AccessConnectorProperties { pub fn new() -> Self { Self::default() } } pub mod access_connector_properties { use super::*; #[doc = "Provisioning status of the accessConnector."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "ProvisioningState")] pub enum ProvisioningState { Deleted, Failed, Succeeded, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for ProvisioningState { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for ProvisioningState { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for ProvisioningState { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Deleted => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Deleted"), Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Failed"), Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Succeeded"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } } #[doc = "An update to an azure databricks accessConnector."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct AccessConnectorUpdate { #[doc = "Resource tags."] #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[doc = "Managed service identity (system assigned and/or user assigned identities)"] #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, } impl AccessConnectorUpdate { pub fn new() -> Self { Self::default() } } #[doc = "AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct AddressSpace { #[doc = "A list of address blocks reserved for this virtual network in CIDR notation."] #[serde( rename = "addressPrefixes", default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub address_prefixes: Vec<String>, } impl AddressSpace { pub fn new() -> Self { Self::default() } } #[doc = "Provides details of the entity that created/updated the workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct CreatedBy { #[doc = "The Object ID that created the workspace."] #[serde(default, skip_serializing_if = "Option::is_none")] pub oid: Option<String>, #[doc = "The Personal Object ID corresponding to the object ID above"] #[serde(default, skip_serializing_if = "Option::is_none")] pub puid: Option<String>, #[doc = "The application ID of the application that initiated the creation of the workspace. For example, Azure Portal."] #[serde(rename = "applicationId", default, skip_serializing_if = "Option::is_none")] pub application_id: Option<String>, } impl CreatedBy { pub fn new() -> Self { Self::default() } } pub type CreatedDateTime = time::OffsetDateTime; #[doc = "The object that contains details of encryption used on the workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Encryption { #[doc = "The encryption keySource (provider). Possible values (case-insensitive): Default, Microsoft.Keyvault"] #[serde(rename = "keySource", default, skip_serializing_if = "Option::is_none")] pub key_source: Option<encryption::KeySource>, #[doc = "The name of KeyVault key."] #[serde(rename = "KeyName", default, skip_serializing_if = "Option::is_none")] pub key_name: Option<String>, #[doc = "The version of KeyVault key."] #[serde(default, skip_serializing_if = "Option::is_none")] pub keyversion: Option<String>, #[doc = "The Uri of KeyVault."] #[serde(default, skip_serializing_if = "Option::is_none")] pub keyvaulturi: Option<String>, } impl Encryption { pub fn new() -> Self { Self::default() } } pub mod encryption { use super::*; #[doc = "The encryption keySource (provider). Possible values (case-insensitive): Default, Microsoft.Keyvault"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "KeySource")] pub enum KeySource { Default, #[serde(rename = "Microsoft.Keyvault")] MicrosoftKeyvault, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for KeySource { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for KeySource { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for KeySource { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Default => serializer.serialize_unit_variant("KeySource", 0u32, "Default"), Self::MicrosoftKeyvault => serializer.serialize_unit_variant("KeySource", 1u32, "Microsoft.Keyvault"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } impl Default for KeySource { fn default() -> Self { Self::Default } } } #[doc = "Encryption entities for databricks workspace resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct EncryptionEntitiesDefinition { #[doc = "The object that contains details of encryption used on the workspace."] #[serde(rename = "managedServices", default, skip_serializing_if = "Option::is_none")] pub managed_services: Option<EncryptionV2>, #[doc = "The object that contains details of encryption used on the workspace."] #[serde(rename = "managedDisk", default, skip_serializing_if = "Option::is_none")] pub managed_disk: Option<ManagedDiskEncryption>, } impl EncryptionEntitiesDefinition { pub fn new() -> Self { Self::default() } } #[doc = "The object that contains details of encryption used on the workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EncryptionV2 { #[doc = "The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Keyvault"] #[serde(rename = "keySource")] pub key_source: encryption_v2::KeySource, #[doc = "Key Vault input properties for encryption."] #[serde(rename = "keyVaultProperties", default, skip_serializing_if = "Option::is_none")] pub key_vault_properties: Option<encryption_v2::KeyVaultProperties>, } impl EncryptionV2 { pub fn new(key_source: encryption_v2::KeySource) -> Self { Self { key_source, key_vault_properties: None, } } } pub mod encryption_v2 { use super::*; #[doc = "The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Keyvault"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "KeySource")] pub enum KeySource { #[serde(rename = "Microsoft.Keyvault")] MicrosoftKeyvault, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for KeySource { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for KeySource { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for KeySource { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::MicrosoftKeyvault => serializer.serialize_unit_variant("KeySource", 0u32, "Microsoft.Keyvault"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "Key Vault input properties for encryption."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct KeyVaultProperties { #[doc = "The Uri of KeyVault."] #[serde(rename = "keyVaultUri")] pub key_vault_uri: String, #[doc = "The name of KeyVault key."] #[serde(rename = "keyName")] pub key_name: String, #[doc = "The version of KeyVault key."] #[serde(rename = "keyVersion")] pub key_version: String, } impl KeyVaultProperties { pub fn new(key_vault_uri: String, key_name: String, key_version: String) -> Self { Self { key_vault_uri, key_name, key_version, } } } } #[doc = "A domain name or IP address the Workspace is reaching at."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct EndpointDependency { #[doc = "The domain name of the dependency."] #[serde(rename = "domainName", default, skip_serializing_if = "Option::is_none")] pub domain_name: Option<String>, #[doc = "The Ports used when connecting to domainName."] #[serde( rename = "endpointDetails", default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub endpoint_details: Vec<EndpointDetail>, } impl EndpointDependency { pub fn new() -> Self { Self::default() } } #[doc = "Connect information from the Workspace to a single endpoint."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct EndpointDetail { #[doc = "An IP Address that Domain Name currently resolves to."] #[serde(rename = "ipAddress", default, skip_serializing_if = "Option::is_none")] pub ip_address: Option<String>, #[doc = "The port an endpoint is connected to."] #[serde(default, skip_serializing_if = "Option::is_none")] pub port: Option<i32>, #[doc = "The time in milliseconds it takes for the connection to be created from the Workspace to this IpAddress at this Port."] #[serde(default, skip_serializing_if = "Option::is_none")] pub latency: Option<f64>, #[doc = "Whether it is possible to create a connection from the Workspace to this IpAddress at this Port."] #[serde(rename = "isAccessible", default, skip_serializing_if = "Option::is_none")] pub is_accessible: Option<bool>, } impl EndpointDetail { pub fn new() -> Self { Self::default() } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorDetail { #[doc = "The error's code."] pub code: String, #[doc = "A human readable error message."] pub message: String, #[doc = "Indicates which property in the request is responsible for the error."] #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, } impl ErrorDetail { pub fn new(code: String, message: String) -> Self { Self { code, message, target: None, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorInfo { #[doc = "A machine readable error code."] pub code: String, #[doc = "A human readable error message."] pub message: String, #[doc = "error details."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub details: Vec<ErrorDetail>, #[doc = "Inner error details if they exist."] #[serde(default, skip_serializing_if = "Option::is_none")] pub innererror: Option<String>, } impl ErrorInfo { pub fn new(code: String, message: String) -> Self { Self { code, message, details: Vec::new(), innererror: None, } } } #[doc = "Contains details when the response code indicates an error."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { pub error: ErrorInfo, } impl azure_core::Continuable for ErrorResponse { type Continuation = String; fn continuation(&self) -> Option<Self::Continuation> { None } } impl ErrorResponse { pub fn new(error: ErrorInfo) -> Self { Self { error } } } #[doc = "The group information for creating a private endpoint on a workspace"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GroupIdInformation { #[serde(flatten)] pub resource: Resource, #[doc = "The resource identifier."] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[doc = "The resource name."] #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[doc = "The resource type."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[doc = "The properties for a group information object"] pub properties: GroupIdInformationProperties, } impl GroupIdInformation { pub fn new(properties: GroupIdInformationProperties) -> Self { Self { resource: Resource::default(), id: None, name: None, type_: None, properties, } } } #[doc = "The properties for a group information object"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct GroupIdInformationProperties { #[doc = "The group id"] #[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")] pub group_id: Option<String>, #[doc = "The required members for a specific group id"] #[serde( rename = "requiredMembers", default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub required_members: Vec<String>, #[doc = "The required DNS zones for a specific group id"] #[serde( rename = "requiredZoneNames", default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub required_zone_names: Vec<String>, } impl GroupIdInformationProperties { pub fn new() -> Self { Self::default() } } #[doc = "The object that contains details of encryption used on the workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagedDiskEncryption { #[doc = "The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Keyvault"] #[serde(rename = "keySource")] pub key_source: managed_disk_encryption::KeySource, #[doc = "Key Vault input properties for encryption."] #[serde(rename = "keyVaultProperties")] pub key_vault_properties: managed_disk_encryption::KeyVaultProperties, #[doc = "Indicate whether the latest key version should be automatically used for Managed Disk Encryption."] #[serde(rename = "rotationToLatestKeyVersionEnabled", default, skip_serializing_if = "Option::is_none")] pub rotation_to_latest_key_version_enabled: Option<bool>, } impl ManagedDiskEncryption { pub fn new(key_source: managed_disk_encryption::KeySource, key_vault_properties: managed_disk_encryption::KeyVaultProperties) -> Self { Self { key_source, key_vault_properties, rotation_to_latest_key_version_enabled: None, } } } pub mod managed_disk_encryption { use super::*; #[doc = "The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Keyvault"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "KeySource")] pub enum KeySource { #[serde(rename = "Microsoft.Keyvault")] MicrosoftKeyvault, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for KeySource { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for KeySource { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for KeySource { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::MicrosoftKeyvault => serializer.serialize_unit_variant("KeySource", 0u32, "Microsoft.Keyvault"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "Key Vault input properties for encryption."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct KeyVaultProperties { #[doc = "The URI of KeyVault."] #[serde(rename = "keyVaultUri")] pub key_vault_uri: String, #[doc = "The name of KeyVault key."] #[serde(rename = "keyName")] pub key_name: String, #[doc = "The version of KeyVault key."] #[serde(rename = "keyVersion")] pub key_version: String, } impl KeyVaultProperties { pub fn new(key_vault_uri: String, key_name: String, key_version: String) -> Self { Self { key_vault_uri, key_name, key_version, } } } } #[doc = "The Managed Identity details for storage account."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct ManagedIdentityConfiguration { #[doc = "The objectId of the Managed Identity that is linked to the Managed Storage account."] #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[doc = "The tenant Id where the Managed Identity is created."] #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, #[doc = "The type of Identity created. It can be either SystemAssigned or UserAssigned."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } impl ManagedIdentityConfiguration { pub fn new() -> Self { Self::default() } } #[doc = "Managed service identity (system assigned and/or user assigned identities)"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagedServiceIdentity { #[doc = "The service principal ID of the system assigned identity. This property will only be provided for a system assigned identity."] #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[doc = "The tenant ID of the system assigned identity. This property will only be provided for a system assigned identity."] #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, #[doc = "Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed)."] #[serde(rename = "type")] pub type_: ManagedServiceIdentityType, #[doc = "The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty objects ({}) in requests."] #[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")] pub user_assigned_identities: Option<UserAssignedIdentities>, } impl ManagedServiceIdentity { pub fn new(type_: ManagedServiceIdentityType) -> Self { Self { principal_id: None, tenant_id: None, type_, user_assigned_identities: None, } } } #[doc = "Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed)."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "ManagedServiceIdentityType")] pub enum ManagedServiceIdentityType { None, SystemAssigned, UserAssigned, #[serde(rename = "SystemAssigned,UserAssigned")] SystemAssignedUserAssigned, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for ManagedServiceIdentityType { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for ManagedServiceIdentityType { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for ManagedServiceIdentityType { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::None => serializer.serialize_unit_variant("ManagedServiceIdentityType", 0u32, "None"), Self::SystemAssigned => serializer.serialize_unit_variant("ManagedServiceIdentityType", 1u32, "SystemAssigned"), Self::UserAssigned => serializer.serialize_unit_variant("ManagedServiceIdentityType", 2u32, "UserAssigned"), Self::SystemAssignedUserAssigned => { serializer.serialize_unit_variant("ManagedServiceIdentityType", 3u32, "SystemAssigned,UserAssigned") } Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "REST API operation"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Operation { #[doc = "Operation name: {provider}/{resource}/{operation}"] #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[doc = "The object that represents the operation."] #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, } impl Operation { pub fn new() -> Self { Self::default() } } pub mod operation { use super::*; #[doc = "The object that represents the operation."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Display { #[doc = "Service provider: ex Microsoft.Databricks"] #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[doc = "Resource on which the operation is performed."] #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[doc = "Operation type: Read, write, delete, etc."] #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[doc = "Description for the resource operation."] #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } impl Display { pub fn new() -> Self { Self::default() } } } #[doc = "Result of the request to list Resource Provider operations. It contains a list of operations and a URL link to get the next set of results."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct OperationListResult { #[doc = "List of Resource Provider operations supported by the Resource Provider resource provider."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub value: Vec<Operation>, #[doc = "URL to get the next set of operation list results if there are any."] #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } impl azure_core::Continuable for OperationListResult { type Continuation = String; fn continuation(&self) -> Option<Self::Continuation> { self.next_link.clone() } } impl OperationListResult { pub fn new() -> Self { Self::default() } } #[doc = "Egress endpoints which Workspace connects to for common purposes."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct OutboundEnvironmentEndpoint { #[doc = "The category of endpoints accessed by the Workspace, e.g. azure-storage, azure-mysql, etc."] #[serde(default, skip_serializing_if = "Option::is_none")] pub category: Option<String>, #[doc = "The endpoints that Workspace connect to"] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub endpoints: Vec<EndpointDependency>, } impl OutboundEnvironmentEndpoint { pub fn new() -> Self { Self::default() } } pub type OutboundEnvironmentEndpointCollection = Vec<OutboundEnvironmentEndpoint>; #[doc = "The current provisioning state."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "PeeringProvisioningState")] pub enum PeeringProvisioningState { Succeeded, Updating, Deleting, Failed, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for PeeringProvisioningState { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for PeeringProvisioningState { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for PeeringProvisioningState { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Succeeded => serializer.serialize_unit_variant("PeeringProvisioningState", 0u32, "Succeeded"), Self::Updating => serializer.serialize_unit_variant("PeeringProvisioningState", 1u32, "Updating"), Self::Deleting => serializer.serialize_unit_variant("PeeringProvisioningState", 2u32, "Deleting"), Self::Failed => serializer.serialize_unit_variant("PeeringProvisioningState", 3u32, "Failed"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "The private endpoint property of a private endpoint connection"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct PrivateEndpoint { #[doc = "The resource identifier."] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, } impl PrivateEndpoint { pub fn new() -> Self { Self::default() } } #[doc = "The private endpoint connection of a workspace"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateEndpointConnection { #[doc = "The resource identifier."] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[doc = "The resource name."] #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[doc = "The resource type."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[doc = "The properties of a private endpoint connection"] pub properties: PrivateEndpointConnectionProperties, } impl PrivateEndpointConnection { pub fn new(properties: PrivateEndpointConnectionProperties) -> Self { Self { id: None, name: None, type_: None, properties, } } } #[doc = "The properties of a private endpoint connection"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateEndpointConnectionProperties { #[doc = "The private endpoint property of a private endpoint connection"] #[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")] pub private_endpoint: Option<PrivateEndpoint>, #[doc = "GroupIds from the private link service resource."] #[serde( rename = "groupIds", default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub group_ids: Vec<String>, #[doc = "The current state of a private endpoint connection"] #[serde(rename = "privateLinkServiceConnectionState")] pub private_link_service_connection_state: PrivateLinkServiceConnectionState, #[doc = "The current provisioning state."] #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<PrivateEndpointConnectionProvisioningState>, } impl PrivateEndpointConnectionProperties { pub fn new(private_link_service_connection_state: PrivateLinkServiceConnectionState) -> Self { Self { private_endpoint: None, group_ids: Vec::new(), private_link_service_connection_state, provisioning_state: None, } } } #[doc = "The current provisioning state."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "PrivateEndpointConnectionProvisioningState")] pub enum PrivateEndpointConnectionProvisioningState { Succeeded, Creating, Updating, Deleting, Failed, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for PrivateEndpointConnectionProvisioningState { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for PrivateEndpointConnectionProvisioningState { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for PrivateEndpointConnectionProvisioningState { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Succeeded => serializer.serialize_unit_variant("PrivateEndpointConnectionProvisioningState", 0u32, "Succeeded"), Self::Creating => serializer.serialize_unit_variant("PrivateEndpointConnectionProvisioningState", 1u32, "Creating"), Self::Updating => serializer.serialize_unit_variant("PrivateEndpointConnectionProvisioningState", 2u32, "Updating"), Self::Deleting => serializer.serialize_unit_variant("PrivateEndpointConnectionProvisioningState", 3u32, "Deleting"), Self::Failed => serializer.serialize_unit_variant("PrivateEndpointConnectionProvisioningState", 4u32, "Failed"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "List of private link connections."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct PrivateEndpointConnectionsList { #[doc = "The list of returned private endpoint connection."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub value: Vec<PrivateEndpointConnection>, #[doc = "The URL to get the next set of endpoint connections."] #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } impl azure_core::Continuable for PrivateEndpointConnectionsList { type Continuation = String; fn continuation(&self) -> Option<Self::Continuation> { self.next_link.clone() } } impl PrivateEndpointConnectionsList { pub fn new() -> Self { Self::default() } } #[doc = "The available private link resources for a workspace"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct PrivateLinkResourcesList { #[doc = "The list of available private link resources for a workspace"] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub value: Vec<GroupIdInformation>, #[doc = "The URL to get the next set of private link resources."] #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } impl azure_core::Continuable for PrivateLinkResourcesList { type Continuation = String; fn continuation(&self) -> Option<Self::Continuation> { self.next_link.clone() } } impl PrivateLinkResourcesList { pub fn new() -> Self { Self::default() } } #[doc = "The current state of a private endpoint connection"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateLinkServiceConnectionState { #[doc = "The status of a private endpoint connection"] pub status: private_link_service_connection_state::Status, #[doc = "The description for the current state of a private endpoint connection"] #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[doc = "Actions required for a private endpoint connection"] #[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")] pub actions_required: Option<String>, } impl PrivateLinkServiceConnectionState { pub fn new(status: private_link_service_connection_state::Status) -> Self { Self { status, description: None, actions_required: None, } } } pub mod private_link_service_connection_state { use super::*; #[doc = "The status of a private endpoint connection"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "Status")] pub enum Status { Pending, Approved, Rejected, Disconnected, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for Status { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for Status { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for Status { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Pending => serializer.serialize_unit_variant("Status", 0u32, "Pending"), Self::Approved => serializer.serialize_unit_variant("Status", 1u32, "Approved"), Self::Rejected => serializer.serialize_unit_variant("Status", 2u32, "Rejected"), Self::Disconnected => serializer.serialize_unit_variant("Status", 3u32, "Disconnected"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } } #[doc = "Provisioning status of the workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "ProvisioningState")] pub enum ProvisioningState { Accepted, Running, Ready, Creating, Created, Deleting, Deleted, Canceled, Failed, Succeeded, Updating, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for ProvisioningState { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for ProvisioningState { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for ProvisioningState { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Accepted => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Accepted"), Self::Running => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Running"), Self::Ready => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Ready"), Self::Creating => serializer.serialize_unit_variant("ProvisioningState", 3u32, "Creating"), Self::Created => serializer.serialize_unit_variant("ProvisioningState", 4u32, "Created"), Self::Deleting => serializer.serialize_unit_variant("ProvisioningState", 5u32, "Deleting"), Self::Deleted => serializer.serialize_unit_variant("ProvisioningState", 6u32, "Deleted"), Self::Canceled => serializer.serialize_unit_variant("ProvisioningState", 7u32, "Canceled"), Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 8u32, "Failed"), Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 9u32, "Succeeded"), Self::Updating => serializer.serialize_unit_variant("ProvisioningState", 10u32, "Updating"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "The core properties of ARM resources"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct Resource { #[doc = "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}"] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[doc = "The name of the resource"] #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[doc = "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } impl Resource { pub fn new() -> Self { Self::default() } } #[doc = "SKU for the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Sku { #[doc = "The SKU name."] pub name: String, #[doc = "The SKU tier."] #[serde(default, skip_serializing_if = "Option::is_none")] pub tier: Option<String>, } impl Sku { pub fn new(name: String) -> Self { Self { name, tier: None } } } #[doc = "The resource model definition for a ARM tracked top level resource"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TrackedResource { #[serde(flatten)] pub resource: Resource, #[doc = "Resource tags."] #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[doc = "The geo-location where the resource lives"] pub location: String, } impl TrackedResource { pub fn new(location: String) -> Self { Self { resource: Resource::default(), tags: None, location, } } } #[doc = "The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty objects ({}) in requests."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct UserAssignedIdentities {} impl UserAssignedIdentities { pub fn new() -> Self { Self::default() } } #[doc = "User assigned identity properties"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct UserAssignedIdentity { #[doc = "The principal ID of the assigned identity."] #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[doc = "The client ID of the assigned identity."] #[serde(rename = "clientId", default, skip_serializing_if = "Option::is_none")] pub client_id: Option<String>, } impl UserAssignedIdentity { pub fn new() -> Self { Self::default() } } #[doc = "Peerings in a VirtualNetwork resource"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct VirtualNetworkPeering { #[doc = "Properties of the virtual network peering."] pub properties: VirtualNetworkPeeringPropertiesFormat, #[doc = "Name of the virtual network peering resource"] #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[doc = "Resource ID."] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[doc = "type of the virtual network peering resource"] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } impl VirtualNetworkPeering { pub fn new(properties: VirtualNetworkPeeringPropertiesFormat) -> Self { Self { properties, name: None, id: None, type_: None, } } } #[doc = "Gets all virtual network peerings under a workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct VirtualNetworkPeeringList { #[doc = "List of virtual network peerings on workspace."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub value: Vec<VirtualNetworkPeering>, #[doc = "URL to get the next set of virtual network peering list results if there are any."] #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } impl azure_core::Continuable for VirtualNetworkPeeringList { type Continuation = String; fn continuation(&self) -> Option<Self::Continuation> { self.next_link.clone() } } impl VirtualNetworkPeeringList { pub fn new() -> Self { Self::default() } } #[doc = "Properties of the virtual network peering."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct VirtualNetworkPeeringPropertiesFormat { #[doc = "Whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space."] #[serde(rename = "allowVirtualNetworkAccess", default, skip_serializing_if = "Option::is_none")] pub allow_virtual_network_access: Option<bool>, #[doc = "Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network."] #[serde(rename = "allowForwardedTraffic", default, skip_serializing_if = "Option::is_none")] pub allow_forwarded_traffic: Option<bool>, #[doc = "If gateway links can be used in remote virtual networking to link to this virtual network."] #[serde(rename = "allowGatewayTransit", default, skip_serializing_if = "Option::is_none")] pub allow_gateway_transit: Option<bool>, #[doc = "If remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway."] #[serde(rename = "useRemoteGateways", default, skip_serializing_if = "Option::is_none")] pub use_remote_gateways: Option<bool>, #[doc = " The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering)."] #[serde(rename = "databricksVirtualNetwork", default, skip_serializing_if = "Option::is_none")] pub databricks_virtual_network: Option<virtual_network_peering_properties_format::DatabricksVirtualNetwork>, #[doc = "AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network."] #[serde(rename = "databricksAddressSpace", default, skip_serializing_if = "Option::is_none")] pub databricks_address_space: Option<AddressSpace>, #[doc = " The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering)."] #[serde(rename = "remoteVirtualNetwork")] pub remote_virtual_network: virtual_network_peering_properties_format::RemoteVirtualNetwork, #[doc = "AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network."] #[serde(rename = "remoteAddressSpace", default, skip_serializing_if = "Option::is_none")] pub remote_address_space: Option<AddressSpace>, #[doc = "The status of the virtual network peering."] #[serde(rename = "peeringState", default, skip_serializing_if = "Option::is_none")] pub peering_state: Option<virtual_network_peering_properties_format::PeeringState>, #[doc = "The current provisioning state."] #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<PeeringProvisioningState>, } impl VirtualNetworkPeeringPropertiesFormat { pub fn new(remote_virtual_network: virtual_network_peering_properties_format::RemoteVirtualNetwork) -> Self { Self { allow_virtual_network_access: None, allow_forwarded_traffic: None, allow_gateway_transit: None, use_remote_gateways: None, databricks_virtual_network: None, databricks_address_space: None, remote_virtual_network, remote_address_space: None, peering_state: None, provisioning_state: None, } } } pub mod virtual_network_peering_properties_format { use super::*; #[doc = " The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering)."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct DatabricksVirtualNetwork { #[doc = "The Id of the databricks virtual network."] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, } impl DatabricksVirtualNetwork { pub fn new() -> Self { Self::default() } } #[doc = " The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering)."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct RemoteVirtualNetwork { #[doc = "The Id of the remote virtual network."] #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, } impl RemoteVirtualNetwork { pub fn new() -> Self { Self::default() } } #[doc = "The status of the virtual network peering."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "PeeringState")] pub enum PeeringState { Initiated, Connected, Disconnected, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for PeeringState { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for PeeringState { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for PeeringState { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Initiated => serializer.serialize_unit_variant("PeeringState", 0u32, "Initiated"), Self::Connected => serializer.serialize_unit_variant("PeeringState", 1u32, "Connected"), Self::Disconnected => serializer.serialize_unit_variant("PeeringState", 2u32, "Disconnected"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } } #[doc = "Information about workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Workspace { #[serde(flatten)] pub tracked_resource: TrackedResource, #[doc = "The workspace properties."] pub properties: WorkspaceProperties, #[doc = "SKU for the resource."] #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<Sku>, #[doc = "Metadata pertaining to creation and last modification of the resource."] #[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")] pub system_data: Option<SystemData>, } impl Workspace { pub fn new(tracked_resource: TrackedResource, properties: WorkspaceProperties) -> Self { Self { tracked_resource, properties, sku: None, system_data: None, } } } #[doc = "The value which should be used for this field."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceCustomBooleanParameter { #[doc = "Provisioning status of the workspace."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<WorkspaceCustomParameterType>, #[doc = "The value which should be used for this field."] pub value: bool, } impl WorkspaceCustomBooleanParameter { pub fn new(value: bool) -> Self { Self { type_: None, value } } } #[doc = "The value which should be used for this field."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceCustomObjectParameter { #[doc = "Provisioning status of the workspace."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<WorkspaceCustomParameterType>, #[doc = "The value which should be used for this field."] pub value: serde_json::Value, } impl WorkspaceCustomObjectParameter { pub fn new(value: serde_json::Value) -> Self { Self { type_: None, value } } } #[doc = "Provisioning status of the workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "WorkspaceCustomParameterType")] pub enum WorkspaceCustomParameterType { Bool, Object, String, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for WorkspaceCustomParameterType { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for WorkspaceCustomParameterType { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for WorkspaceCustomParameterType { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Bool => serializer.serialize_unit_variant("WorkspaceCustomParameterType", 0u32, "Bool"), Self::Object => serializer.serialize_unit_variant("WorkspaceCustomParameterType", 1u32, "Object"), Self::String => serializer.serialize_unit_variant("WorkspaceCustomParameterType", 2u32, "String"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "Custom Parameters used for Cluster Creation."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct WorkspaceCustomParameters { #[doc = "The Value."] #[serde(rename = "amlWorkspaceId", default, skip_serializing_if = "Option::is_none")] pub aml_workspace_id: Option<WorkspaceCustomStringParameter>, #[doc = "The Value."] #[serde(rename = "customVirtualNetworkId", default, skip_serializing_if = "Option::is_none")] pub custom_virtual_network_id: Option<WorkspaceCustomStringParameter>, #[doc = "The Value."] #[serde(rename = "customPublicSubnetName", default, skip_serializing_if = "Option::is_none")] pub custom_public_subnet_name: Option<WorkspaceCustomStringParameter>, #[doc = "The Value."] #[serde(rename = "customPrivateSubnetName", default, skip_serializing_if = "Option::is_none")] pub custom_private_subnet_name: Option<WorkspaceCustomStringParameter>, #[doc = "The value which should be used for this field."] #[serde(rename = "enableNoPublicIp", default, skip_serializing_if = "Option::is_none")] pub enable_no_public_ip: Option<WorkspaceCustomBooleanParameter>, #[doc = "The Value."] #[serde(rename = "loadBalancerBackendPoolName", default, skip_serializing_if = "Option::is_none")] pub load_balancer_backend_pool_name: Option<WorkspaceCustomStringParameter>, #[doc = "The Value."] #[serde(rename = "loadBalancerId", default, skip_serializing_if = "Option::is_none")] pub load_balancer_id: Option<WorkspaceCustomStringParameter>, #[doc = "The Value."] #[serde(rename = "natGatewayName", default, skip_serializing_if = "Option::is_none")] pub nat_gateway_name: Option<WorkspaceCustomStringParameter>, #[doc = "The Value."] #[serde(rename = "publicIpName", default, skip_serializing_if = "Option::is_none")] pub public_ip_name: Option<WorkspaceCustomStringParameter>, #[doc = "The value which should be used for this field."] #[serde(rename = "prepareEncryption", default, skip_serializing_if = "Option::is_none")] pub prepare_encryption: Option<WorkspaceCustomBooleanParameter>, #[doc = "The object that contains details of encryption used on the workspace."] #[serde(default, skip_serializing_if = "Option::is_none")] pub encryption: Option<WorkspaceEncryptionParameter>, #[doc = "The value which should be used for this field."] #[serde(rename = "requireInfrastructureEncryption", default, skip_serializing_if = "Option::is_none")] pub require_infrastructure_encryption: Option<WorkspaceCustomBooleanParameter>, #[doc = "The Value."] #[serde(rename = "storageAccountName", default, skip_serializing_if = "Option::is_none")] pub storage_account_name: Option<WorkspaceCustomStringParameter>, #[doc = "The Value."] #[serde(rename = "storageAccountSkuName", default, skip_serializing_if = "Option::is_none")] pub storage_account_sku_name: Option<WorkspaceCustomStringParameter>, #[doc = "The Value."] #[serde(rename = "vnetAddressPrefix", default, skip_serializing_if = "Option::is_none")] pub vnet_address_prefix: Option<WorkspaceCustomStringParameter>, #[doc = "The value which should be used for this field."] #[serde(rename = "resourceTags", default, skip_serializing_if = "Option::is_none")] pub resource_tags: Option<WorkspaceCustomObjectParameter>, } impl WorkspaceCustomParameters { pub fn new() -> Self { Self::default() } } #[doc = "The Value."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceCustomStringParameter { #[doc = "Provisioning status of the workspace."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<WorkspaceCustomParameterType>, #[doc = "The value which should be used for this field."] pub value: String, } impl WorkspaceCustomStringParameter { pub fn new(value: String) -> Self { Self { type_: None, value } } } #[doc = "The object that contains details of encryption used on the workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct WorkspaceEncryptionParameter { #[doc = "Provisioning status of the workspace."] #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<WorkspaceCustomParameterType>, #[doc = "The object that contains details of encryption used on the workspace."] #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<Encryption>, } impl WorkspaceEncryptionParameter { pub fn new() -> Self { Self::default() } } #[doc = "List of workspaces."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct WorkspaceListResult { #[doc = "The array of workspaces."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub value: Vec<Workspace>, #[doc = "The URL to use for getting the next set of results."] #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } impl azure_core::Continuable for WorkspaceListResult { type Continuation = String; fn continuation(&self) -> Option<Self::Continuation> { self.next_link.clone() } } impl WorkspaceListResult { pub fn new() -> Self { Self::default() } } #[doc = "The workspace properties."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceProperties { #[doc = "The managed resource group Id."] #[serde(rename = "managedResourceGroupId")] pub managed_resource_group_id: String, #[doc = "Custom Parameters used for Cluster Creation."] #[serde(default, skip_serializing_if = "Option::is_none")] pub parameters: Option<WorkspaceCustomParameters>, #[doc = "Provisioning status of the workspace."] #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ProvisioningState>, #[doc = "The blob URI where the UI definition file is located."] #[serde(rename = "uiDefinitionUri", default, skip_serializing_if = "Option::is_none")] pub ui_definition_uri: Option<String>, #[doc = "The workspace provider authorizations."] #[serde( default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub authorizations: Vec<WorkspaceProviderAuthorization>, #[doc = "Provides details of the entity that created/updated the workspace."] #[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")] pub created_by: Option<CreatedBy>, #[doc = "Provides details of the entity that created/updated the workspace."] #[serde(rename = "updatedBy", default, skip_serializing_if = "Option::is_none")] pub updated_by: Option<CreatedBy>, #[doc = "The date and time stamp when the workspace was created."] #[serde(rename = "createdDateTime", default, skip_serializing_if = "Option::is_none")] pub created_date_time: Option<CreatedDateTime>, #[doc = "The unique identifier of the databricks workspace in databricks control plane."] #[serde(rename = "workspaceId", default, skip_serializing_if = "Option::is_none")] pub workspace_id: Option<String>, #[doc = "The workspace URL which is of the format 'adb-{workspaceId}.{random}.azuredatabricks.net'"] #[serde(rename = "workspaceUrl", default, skip_serializing_if = "Option::is_none")] pub workspace_url: Option<String>, #[doc = "The Managed Identity details for storage account."] #[serde(rename = "storageAccountIdentity", default, skip_serializing_if = "Option::is_none")] pub storage_account_identity: Option<ManagedIdentityConfiguration>, #[doc = "The Managed Identity details for storage account."] #[serde(rename = "managedDiskIdentity", default, skip_serializing_if = "Option::is_none")] pub managed_disk_identity: Option<ManagedIdentityConfiguration>, #[doc = "The resource Id of the managed disk encryption set."] #[serde(rename = "diskEncryptionSetId", default, skip_serializing_if = "Option::is_none")] pub disk_encryption_set_id: Option<String>, #[doc = "Encryption properties for databricks workspace"] #[serde(default, skip_serializing_if = "Option::is_none")] pub encryption: Option<workspace_properties::Encryption>, #[doc = "Private endpoint connections created on the workspace"] #[serde( rename = "privateEndpointConnections", default, deserialize_with = "azure_core::util::deserialize_null_as_default", skip_serializing_if = "Vec::is_empty" )] pub private_endpoint_connections: Vec<PrivateEndpointConnection>, #[doc = "The network access type for accessing workspace. Set value to disabled to access workspace only via private link."] #[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")] pub public_network_access: Option<workspace_properties::PublicNetworkAccess>, #[doc = "Gets or sets a value indicating whether data plane (clusters) to control plane communication happen over private endpoint. Supported values are 'AllRules' and 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only."] #[serde(rename = "requiredNsgRules", default, skip_serializing_if = "Option::is_none")] pub required_nsg_rules: Option<workspace_properties::RequiredNsgRules>, } impl WorkspaceProperties { pub fn new(managed_resource_group_id: String) -> Self { Self { managed_resource_group_id, parameters: None, provisioning_state: None, ui_definition_uri: None, authorizations: Vec::new(), created_by: None, updated_by: None, created_date_time: None, workspace_id: None, workspace_url: None, storage_account_identity: None, managed_disk_identity: None, disk_encryption_set_id: None, encryption: None, private_endpoint_connections: Vec::new(), public_network_access: None, required_nsg_rules: None, } } } pub mod workspace_properties { use super::*; #[doc = "Encryption properties for databricks workspace"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Encryption { #[doc = "Encryption entities for databricks workspace resource."] pub entities: EncryptionEntitiesDefinition, } impl Encryption { pub fn new(entities: EncryptionEntitiesDefinition) -> Self { Self { entities } } } #[doc = "The network access type for accessing workspace. Set value to disabled to access workspace only via private link."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "PublicNetworkAccess")] pub enum PublicNetworkAccess { Enabled, Disabled, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for PublicNetworkAccess { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for PublicNetworkAccess { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for PublicNetworkAccess { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::Enabled => serializer.serialize_unit_variant("PublicNetworkAccess", 0u32, "Enabled"), Self::Disabled => serializer.serialize_unit_variant("PublicNetworkAccess", 1u32, "Disabled"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "Gets or sets a value indicating whether data plane (clusters) to control plane communication happen over private endpoint. Supported values are 'AllRules' and 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "RequiredNsgRules")] pub enum RequiredNsgRules { AllRules, NoAzureDatabricksRules, NoAzureServiceRules, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for RequiredNsgRules { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for RequiredNsgRules { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for RequiredNsgRules { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::AllRules => serializer.serialize_unit_variant("RequiredNsgRules", 0u32, "AllRules"), Self::NoAzureDatabricksRules => serializer.serialize_unit_variant("RequiredNsgRules", 1u32, "NoAzureDatabricksRules"), Self::NoAzureServiceRules => serializer.serialize_unit_variant("RequiredNsgRules", 2u32, "NoAzureServiceRules"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } } #[doc = "The workspace provider authorization."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkspaceProviderAuthorization { #[doc = "The provider's principal identifier. This is the identity that the provider will use to call ARM to manage the workspace resources."] #[serde(rename = "principalId")] pub principal_id: String, #[doc = "The provider's role definition identifier. This role will define all the permissions that the provider must have on the workspace's container resource group. This role definition cannot have permission to delete the resource group."] #[serde(rename = "roleDefinitionId")] pub role_definition_id: String, } impl WorkspaceProviderAuthorization { pub fn new(principal_id: String, role_definition_id: String) -> Self { Self { principal_id, role_definition_id, } } } #[doc = "An update to a workspace."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct WorkspaceUpdate { #[doc = "Resource tags."] #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } impl WorkspaceUpdate { pub fn new() -> Self { Self::default() } } #[doc = "Metadata pertaining to creation and last modification of the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)] pub struct SystemData { #[doc = "The identity that created the resource."] #[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")] pub created_by: Option<String>, #[doc = "The type of identity that created the resource."] #[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")] pub created_by_type: Option<system_data::CreatedByType>, #[doc = "The timestamp of resource creation (UTC)."] #[serde(rename = "createdAt", default, with = "azure_core::date::rfc3339::option")] pub created_at: Option<time::OffsetDateTime>, #[doc = "The identity that last modified the resource."] #[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")] pub last_modified_by: Option<String>, #[doc = "The type of identity that last modified the resource."] #[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")] pub last_modified_by_type: Option<system_data::LastModifiedByType>, #[doc = "The timestamp of resource last modification (UTC)"] #[serde(rename = "lastModifiedAt", default, with = "azure_core::date::rfc3339::option")] pub last_modified_at: Option<time::OffsetDateTime>, } impl SystemData { pub fn new() -> Self { Self::default() } } pub mod system_data { use super::*; #[doc = "The type of identity that created the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "CreatedByType")] pub enum CreatedByType { User, Application, ManagedIdentity, Key, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for CreatedByType { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for CreatedByType { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for CreatedByType { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::User => serializer.serialize_unit_variant("CreatedByType", 0u32, "User"), Self::Application => serializer.serialize_unit_variant("CreatedByType", 1u32, "Application"), Self::ManagedIdentity => serializer.serialize_unit_variant("CreatedByType", 2u32, "ManagedIdentity"), Self::Key => serializer.serialize_unit_variant("CreatedByType", 3u32, "Key"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } #[doc = "The type of identity that last modified the resource."] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(remote = "LastModifiedByType")] pub enum LastModifiedByType { User, Application, ManagedIdentity, Key, #[serde(skip_deserializing)] UnknownValue(String), } impl FromStr for LastModifiedByType { type Err = value::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Self::deserialize(s.into_deserializer()) } } impl<'de> Deserialize<'de> for LastModifiedByType { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s)); Ok(deserialized) } } impl Serialize for LastModifiedByType { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { match self { Self::User => serializer.serialize_unit_variant("LastModifiedByType", 0u32, "User"), Self::Application => serializer.serialize_unit_variant("LastModifiedByType", 1u32, "Application"), Self::ManagedIdentity => serializer.serialize_unit_variant("LastModifiedByType", 2u32, "ManagedIdentity"), Self::Key => serializer.serialize_unit_variant("LastModifiedByType", 3u32, "Key"), Self::UnknownValue(s) => serializer.serialize_str(s.as_str()), } } } }
true
75ba401ee4260e24a350903c391f0c8921bc34fc
Rust
mingyli/aoc2018
/src/day5.rs
UTF-8
1,833
3.09375
3
[]
no_license
use std::collections::HashSet; use std::io::{self, BufRead}; use std::iter::FromIterator; use super::answer::Answer; fn react(chars: &[char]) -> String { let mut stack = String::new(); for &ch in chars { if stack.is_empty() { stack.push(ch); } else { let last = stack.chars().last().unwrap(); if last != ch && last.to_ascii_uppercase() == ch.to_ascii_uppercase() { stack.pop(); } else { stack.push(ch); } } } stack } pub fn day5a<R: BufRead>(reader: &mut R) -> io::Result<Answer> { let mut buffer = String::new(); let _num_bytes = reader.read_line(&mut buffer).unwrap(); let chars = buffer .chars() .filter(|ch| ch.is_ascii_alphabetic()) .collect::<Vec<char>>(); let reacted = react(&chars); Ok(Answer::US(reacted.len())) } fn filtered_react(removed: char, chars: &[char]) -> String { let filtered = chars .iter() .cloned() .filter(|ch| ch.to_ascii_lowercase() != removed) .collect::<Vec<char>>(); react(&filtered) } pub fn day5b<R: BufRead>(reader: &mut R) -> io::Result<Answer> { let mut buffer = String::new(); let _num_bytes = reader.read_line(&mut buffer).unwrap(); let chars = buffer .chars() .filter(|ch| ch.is_ascii_alphabetic()) .collect::<Vec<char>>(); let lexicon: HashSet<char> = HashSet::from_iter( chars .iter() .map(|ch| ch.to_ascii_lowercase()) .collect::<Vec<char>>() .iter() .cloned(), ); let best_removal = lexicon .iter() .min_by_key(|&ch| filtered_react(*ch, &chars).len()) .unwrap(); Ok(Answer::US(filtered_react(*best_removal, &chars).len())) }
true
99310a67583d5b8c323545996246e21007b51898
Rust
caesarchad/DeCore
/interface/src/keymaker.rs
UTF-8
3,444
3.109375
3
[ "Apache-2.0" ]
permissive
//! The `signature` module provides functionality for public, and private keys. use rand::{rngs::OsRng,RngCore,Rng, SeedableRng}; use rand_chacha::ChaChaRng; use rayon::prelude::*; use crate::signature::Keypair; use serde::{Deserialize, Serialize}; pub struct ChaKeys { generator: ChaChaRng, } impl ChaKeys { pub fn new(seed: [u8; 32]) -> ChaKeys { let generator = ChaChaRng::from_seed(seed); ChaKeys { generator } } fn chacha_seed(&mut self) -> [u8; 32] { let mut seed = [0u8; 32]; self.generator.fill(&mut seed); seed } fn chacha_seed_vec(&mut self, n: u64) -> Vec<[u8; 32]> { (0..n).map(|_| self.chacha_seed()).collect() } pub fn ed25519_keypair(&mut self) -> Keypair { Keypair::generate(&mut self.generator) } pub fn ed25519_keypair_vec(&mut self, n: u64) -> Vec<Keypair> { self.chacha_seed_vec(n) .into_par_iter() .map(|seed| Keypair::generate(&mut ChaChaRng::from_seed(seed))) .collect() } } pub struct MachineKeys{ generator: OsRng, } impl MachineKeys{ pub fn new() -> MachineKeys { let generator = OsRng::new().expect("can't access OsRng"); MachineKeys { generator } } fn machine_seed(&mut self) -> [u8; 32] { let mut seed = [0u8; 32]; self.generator.fill_bytes(&mut seed); seed } fn machine_seed_vec(&mut self, n: u64) -> Vec<[u8; 32]> { (0..n).map(|_| self.machine_seed()).collect() } pub fn ed25519_keypair(&mut self) -> Keypair { Keypair::generate(&mut self.generator) } pub fn ed25519_keypair_vec(&mut self, n: u64) -> Vec<Keypair> { self.machine_seed_vec(n) .into_par_iter() .map(|seed| Keypair::generate(&mut rand::rngs::StdRng::from_seed(seed))) .collect() } } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(default)] pub struct LoggerConfig { // Use async logging pub is_async: bool, // chan_size of slog async drain for node logging. pub chan_size: usize, } impl Default for LoggerConfig { fn default() -> LoggerConfig { LoggerConfig { is_async: true, chan_size: 256, } } } #[cfg(test)] mod tests { use super::*; pub use crate::bvm_address::BvmAddr; use crate::signature::KeypairUtil; use std::collections::HashSet; #[test] fn test_new_key_is_deterministic() { let seed = [0u8; 32]; let mut gen0 = ChaKeys::new(seed); let mut gen1 = ChaKeys::new(seed); for _ in 0..100 { assert_eq!(gen0.chacha_seed().to_vec(), gen1.chacha_seed().to_vec()); } } #[test] fn test_gen_keypair_is_deterministic() { let seed = [0u8; 32]; let mut gen0 = ChaKeys::new(seed); let mut gen1 = ChaKeys::new(seed); assert_eq!( gen0.ed25519_keypair().to_bytes().to_vec(), gen1.ed25519_keypair().to_bytes().to_vec() ); } fn gen_n_addresss(seed: [u8; 32], n: u64) -> HashSet<BvmAddr> { ChaKeys::new(seed) .ed25519_keypair_vec(n) .into_iter() .map(|x| x.address()) .collect() } #[test] fn test_gen_n_addresss_deterministic() { let seed = [0u8; 32]; assert_eq!(gen_n_addresss(seed, 50), gen_n_addresss(seed, 50)); } }
true
c2180196c276cd016f3996523606e1278fa34223
Rust
SamirJoshi/NumRu
/src/math/arithmetic.rs
UTF-8
9,740
3.625
4
[]
no_license
//! Arithmetics module implements Numpy routines listed [here](https://docs.scipy.org/doc/numpy/reference/routines.math.html#arithmetic-operations) //! //! add, multiply, divide and subtract already handled by ndarray lib use ndarray::*; use ndarray_parallel::prelude::*; use num_traits; use std; use std::{fmt::Debug, marker::Copy}; /// Return the reciprocal of the argument, element-wise. /// Calculates 1/x. /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// # fn main(){ /// let input_arr = array![1.0, 2.0, 4.0]; /// let expected_arr = array![1.0, 0.5, 0.25]; /// assert_eq!(input_arr.reciprocal(), expected_arr); /// # } pub trait Reciprocal<T, D> where D: Dimension, { fn reciprocal(&self) -> Array<T, D>; } macro_rules! impl_Reciprocal { (for $($t:ty),+) => { $(impl<D: Dimension> Reciprocal<$t, D> for Array<$t, D> { fn reciprocal(&self) -> Array<$t, D> { self.mapv(|x| 1 as $t / x) } })* }; } impl_Reciprocal!{ for usize, u8, u16, u32, u64, u128 } impl_Reciprocal!{ for isize, i8, i16, i32, i64, i128 } impl_Reciprocal!{ for f32, f64 } pub trait NumRuSigned { fn positive(&self) -> Self; fn absolute(&self) -> Self; fn negative(&self) -> Self; } impl<A: Debug + Copy + num_traits::Signed, D: Dimension> NumRuSigned for Array<A, D> { /// Returns the numerical positive, element-wise of an ndarray Array /// /// Same as absolute value /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// # fn main(){ /// let arr = array![[[-5.0, 6.0], [7.0, -1.0]], [[1.0, -2.0], [-3.0, -4.0]]]; /// let old_arr = array![[[-5.0, 6.0], [7.0, -1.0]], [[1.0, -2.0], [-3.0, -4.0]]]; /// let expected_arr = array![[[5.0, 6.0], [7.0, 1.0]], [[1.0, 2.0], [3.0, 4.0]]]; /// assert_eq!(arr.positive(), expected_arr); /// # } /// ``` fn positive(&self) -> Self { self.mapv(|x| x.abs()) } /// Returns the absolute value, element-wise of an ndarray Array /// /// Same as positive /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// # fn main(){ /// let arr = array![[[-5.0, 6.0], [7.0, -1.0]], [[1.0, -2.0], [-3.0, -4.0]]]; /// let expected_arr = array![[[5.0, 6.0], [7.0, 1.0]], [[1.0, 2.0], [3.0, 4.0]]]; /// assert_eq!(arr.absolute(), expected_arr); /// # } /// ``` fn absolute(&self) -> Self { self.positive() } /// Returns the negative, element-wise of an ndarray Array /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// # fn main(){ /// let arr = array![[[-5.0, 6.0], [7.0, -1.0]], [[1.0, -2.0], [-3.0, -4.0]]]; /// let expected_arr = array![[[5.0, -6.0], [-7.0, 1.0]], [[-1.0, 2.0], [3.0, 4.0]]]; /// assert_eq!(arr.negative(), expected_arr); /// # } /// ``` fn negative(&self) -> Self { self.mapv(|x| x.neg()) } } impl<A: Debug + Copy + num_traits::Signed + std::marker::Sync + std::marker::Send, D: Dimension> NumRuSigned for ArcArray<A, D> { /// Returns the numerical positive, element-wise of an ndarray ArcArray /// /// Same as absolute_rayon /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// # fn main(){ /// let arr = array![[[-5.0, 6.0], [7.0, -1.0]], [[1.0, -2.0], [-3.0, -4.0]]].into_shared(); /// let expected_arr = array![[[5.0, 6.0], [7.0, 1.0]], [[1.0, 2.0], [3.0, 4.0]]].into_shared(); /// assert_eq!(arr.positive(), expected_arr); /// # } /// ``` fn positive(&self) -> Self { let mut pos_arr = self.clone(); Zip::from(&mut pos_arr).and(self).par_apply(|pos_arr, &arr| { *pos_arr = arr.abs(); }); pos_arr } /// Returns the absolute value, element-wise of an ndarray ArcArray /// /// Same as positive_rayon /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// # fn main(){ /// let arr = array![[[-5.0, 6.0], [7.0, -1.0]], [[1.0, -2.0], [-3.0, -4.0]]].into_shared(); /// let expected_arr = array![[[5.0, 6.0], [7.0, 1.0]], [[1.0, 2.0], [3.0, 4.0]]].into_shared(); /// assert_eq!(arr.absolute(), expected_arr); /// # } /// ``` fn absolute(&self) -> Self { self.positive() } /// Returns the negative, element-wise of an ndarray ArcArray /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// # fn main(){ /// let arr = array![[[-5.0, 6.0], [7.0, -1.0]], [[1.0, -2.0], [-3.0, -4.0]]].into_shared(); /// let expected_arr = array![[[5.0, -6.0], [-7.0, 1.0]], [[-1.0, 2.0], [3.0, 4.0]]].into_shared(); /// assert_eq!(arr.negative(), expected_arr); /// # } /// ``` fn negative(&self) -> Self { let mut neg_arr = self.clone(); Zip::from(&mut neg_arr).and(self).par_apply(|neg_arr, &arr| { *neg_arr = arr.neg(); }); neg_arr } } /// First array elements raised to powers from second array, element-wise. /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// /// # fn main(){ /// let arr1 = array![2.0, 3.0, 4.0]; /// let arr2 = array![-1.0, 2.0, 2.5]; /// let arr3 = array![0.5, 9.0, 32.0]; /// assert_eq!(arr1.power(&arr2), arr3); /// # } /// ``` pub trait Power<A, B, D> where D: Dimension, { fn power(&self, arr_pow: &Array<B, D>) -> Array<A, D>; } macro_rules! impl_Power { (for $($t:ty, $t2:ty, $pow:ident),+) => { $( impl<D: Dimension> Power<$t, $t2, D> for Array<$t, D> { fn power(&self, arr_pow: &Array<$t2, D>) -> Array<$t, D> { let mut res = Array::from_elem(self.dim(), 0 as $t); Zip::from(&mut res) .and(self) .and(arr_pow) .apply(|x, &y, &z| { *x = y.$pow(z); }); res } } )* }; } impl_Power!{ for usize, u32, pow, u8, u32, pow, u16, u32, pow, u32, u32, pow, u64, u32, pow, u128, u32, pow } impl_Power!{ for isize, u32, pow, i8, u32, pow, i16, u32, pow, i32, u32, pow, i64, u32, pow, i128, u32, pow } impl_Power!{ for f32, f32, powf, f64, f64, powf } /// Return element-wise remainder of division. /// /// # Examples /// ``` /// # #[macro_use] /// # extern crate ndarray; /// # extern crate num_ru; /// use ndarray::*; /// use num_ru::math::arithmetic::*; /// /// # fn main(){ /// let arr1 = array![10.0, 11.0, 12.0]; /// let arr2 = array![3.0, 4.0, 5.0]; /// let arr3 = array![1.0, 3.0, 2.0]; /// assert_eq!(arr1.remainder(&arr2), arr3); /// # } /// ``` pub trait Remainder<T, D> where D: Dimension, { fn remainder(&self, arr2: &Array<T, D>) -> Array<T, D>; } macro_rules! impl_Remainder { (for $($t:ty),+) => { $(impl<D: Dimension> Remainder<$t, D> for Array<$t, D> { fn remainder(&self, arr2: &Array<$t, D>) -> Array<$t, D> { let mut res = Array::from_elem(self.dim(), 0 as $t); Zip::from(&mut res) .and(self) .and(arr2) .apply(|x, &y, &z| { *x = &y % &z; }); res } })* }; } impl_Remainder!{ for usize, u8, u16, u32, u64, u128 } impl_Remainder!{ for isize, i8, i16, i32, i64, i128 } impl_Remainder!{ for f32, f64 } /// divmod /// Return element-wise quotient and remainder simultaneously. /// #[cfg(test)] mod arithmetic_tests { use super::{NumRuSigned, Power, Reciprocal, Remainder}; #[test] fn positive_test() { let input_arr = array![1.0, 0.0, -1.0]; let expected_arr = array![1.0, 0.0, 1.0]; assert_eq!(input_arr.positive(), expected_arr); } #[test] fn negative_test() { let input_arr = array![1.0, 0.0, -1.0]; let expected_arr = array![-1.0, 0.0, 1.0]; assert_eq!(input_arr.negative(), expected_arr); } #[test] fn reciprocal_test() { let input_arr = array![1.0, 2.0, 4.0]; let expected_arr = array![1.0, 0.5, 0.25]; assert_eq!(input_arr.reciprocal(), expected_arr); let input_arr_2 = array![1, 2, 4]; let expected_arr_2 = array![1, 0, 0]; assert_eq!(input_arr_2.reciprocal(), expected_arr_2); } #[test] fn remainder_test() { let arr1 = array![10.0, 11.0, 12.0]; let arr2 = array![3.0, 4.0, 5.0]; let arr3 = array![1.0, 3.0, 2.0]; assert_eq!(arr1.remainder(&arr2), arr3); } #[test] fn power_test() { let arr1 = array![2.0, 3.0, 4.0]; let arr2 = array![-1.0, 2.0, 2.5]; let arr3 = array![0.5, 9.0, 32.0]; assert_eq!(arr1.power(&arr2), arr3); } }
true
ee94ee0fcc7cac39de90cf677d66fbb6e3b9f74f
Rust
isgasho/ssf
/ssf-llvm/src/initializer_configuration.rs
UTF-8
503
2.859375
3
[ "MIT" ]
permissive
pub struct InitializerConfiguration { name: String, dependent_initializer_names: Vec<String>, } impl InitializerConfiguration { pub fn new(name: impl Into<String>, dependent_initializer_names: Vec<String>) -> Self { Self { name: name.into(), dependent_initializer_names, } } pub fn name(&self) -> &str { &self.name } pub fn dependent_initializer_names(&self) -> &[String] { &self.dependent_initializer_names } }
true
74962ab0ce8b6796010a69c0c24900e81c5bd842
Rust
beanz/adventofcode-2020
/day18/src/lib.rs
UTF-8
3,696
3.28125
3
[]
no_license
use aoc2020::parse; use lalrpop_util::lalrpop_mod; use std::{ ops::{Add, Mul}, path::Path, str::FromStr, }; use thiserror::Error; lalrpop_mod!(parser); #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Operation { Add, Mul, } impl Operation { fn apply_to<T>(self, a: T, b: T) -> T where T: Add<Output = T> + Mul<Output = T>, { match self { Operation::Add => a + b, Operation::Mul => a * b, } } } impl Default for Operation { fn default() -> Self { Operation::Add } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Value { Literal(i64), Expression(Box<Expr>), } impl Value { pub fn value(&self) -> i64 { match self { Value::Literal(n) => *n, Value::Expression(e) => e.value(), } } // reduce this value to a literal using advanced math fn reduce_advanced(&self) -> i64 { match self { Value::Literal(n) => *n, Value::Expression(expr) => expr.evaluate_advanced(), } } } impl<T> From<T> for Value where i64: From<T>, { fn from(t: T) -> Self { Value::Literal(t.into()) } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Term { operation: Operation, value: Value, } impl Term { fn apply_to(&self, value: i64) -> i64 { self.operation.apply_to(self.value.value(), value) } } impl From<Value> for Term { fn from(value: Value) -> Self { Term { operation: Operation::default(), value, } } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Expr { terms: Vec<Term>, } impl FromStr for Expr { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { parser::ExprParser::new() .parse(s) .map_err(|e| Error::Parse(Box::new(e.map_token(|t| t.to_string())))) } } impl Expr { fn evaluate(terms: &[Term]) -> i64 { terms.iter().fold(0, |acc, elem| elem.apply_to(acc)) } pub fn value(&self) -> i64 { Self::evaluate(&self.terms) } /// In "advanced" math, addition is evaluated _before_ multiplication. pub fn evaluate_advanced(&self) -> i64 { // convert to a list of multiplications let remaining_terms = self.terms.iter().fold(Vec::new(), |mut remaining, term| { // first term if remaining.is_empty() { remaining.push(term.value.reduce_advanced()); return remaining; } match term.operation { Operation::Add => { // fold this term's value into the final term of remaining let last = remaining.len() - 1; remaining[last] += term.value.reduce_advanced(); } Operation::Mul => { remaining.push(term.value.reduce_advanced()); } } remaining }); // evaluate that list remaining_terms.iter().product() } } pub fn part1(input: &Path) -> Result<(), Error> { let sum = parse::<Expr>(input)?.map(|expr| expr.value()).sum::<i64>(); println!("sum of expressions: {}", sum); Ok(()) } pub fn part2(input: &Path) -> Result<(), Error> { let sum = parse::<Expr>(input)? .map(|expr| expr.evaluate_advanced()) .sum::<i64>(); println!("sum of expressions (advanced): {}", sum); Ok(()) } #[derive(Debug, Error)] pub enum Error { #[error(transparent)] Io(#[from] std::io::Error), #[error("parse error")] Parse(#[source] Box<dyn std::error::Error + Send + Sync>), }
true
817fe9451c8d8caa627c8e4300a8177706124034
Rust
mars-research/acpi
/acpi/src/platform/mod.rs
UTF-8
3,912
2.96875
3
[ "Apache-2.0", "MIT" ]
permissive
pub mod address; pub mod interrupt; use crate::{fadt::Fadt, madt::Madt, AcpiError, AcpiHandler, AcpiTables, PowerProfile}; use address::GenericAddress; use interrupt::InterruptModel; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ProcessorState { /// A processor in this state is unusable, and you must not attempt to bring it up. Disabled, /// A processor waiting for a SIPI (Startup Inter-processor Interrupt) is currently not active, /// but may be brought up. WaitingForSipi, /// A Running processor is currently brought up and running code. Running, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Processor { /// Corresponds to the `_UID` object of the processor's `Device`, or the `ProcessorId` field of the `Processor` /// object, in AML. pub processor_uid: u32, /// The ID of the local APIC of the processor. Will be less than `256` if the APIC is being used, but can be /// greater than this if the X2APIC is being used. pub local_apic_id: u32, /// The state of this processor. Check that the processor is not `Disabled` before attempting to bring it up! pub state: ProcessorState, /// Whether this processor is the Bootstrap Processor (BSP), or an Application Processor (AP). /// When the bootloader is entered, the BSP is the only processor running code. To run code on /// more than one processor, you need to "bring up" the APs. pub is_ap: bool, } // pub struct ProcessorInfo { // pub boot_processor: Processor, // /// Application processors should be brought up in the order they're defined in this list. // pub application_processors: Vec<Processor>, // } /// Information about the ACPI Power Management Timer (ACPI PM Timer). pub struct PmTimer { /// A generic address to the register block of ACPI PM Timer. pub base: GenericAddress, /// This field is `true` if the hardware supports 32-bit timer, and `false` if the hardware supports 24-bit timer. pub supports_32bit: bool, } impl PmTimer { pub fn new(fadt: &Fadt) -> Result<Option<PmTimer>, AcpiError> { match fadt.pm_timer_block()? { Some(base) => Ok(Some(PmTimer { base, supports_32bit: { fadt.flags }.pm_timer_is_32_bit() })), None => Ok(None), } } } /// `PlatformInfo` allows the collection of some basic information about the platform from some of the fixed-size /// tables in a nice way. It requires access to the `FADT` and `MADT`. It is the easiest way to get information /// about the processors and interrupt controllers on a platform. pub struct PlatformInfo { pub power_profile: PowerProfile, pub interrupt_model: InterruptModel, /// On `x86_64` platforms that support the APIC, the processor topology must also be inferred from the /// interrupt model. That information is stored here, if present. // pub processor_info: Option<ProcessorInfo>, pub pm_timer: Option<PmTimer>, /* * TODO: we could provide a nice view of the hardware register blocks in the FADT here. */ } impl PlatformInfo { pub fn new<H>(tables: &AcpiTables<H>) -> Result<PlatformInfo, AcpiError> where H: AcpiHandler, { let fadt = unsafe { tables .get_sdt::<Fadt>(crate::sdt::Signature::FADT)? .ok_or(AcpiError::TableMissing(crate::sdt::Signature::FADT))? }; let power_profile = fadt.power_profile(); let madt = unsafe { tables.get_sdt::<Madt>(crate::sdt::Signature::MADT)? }; let interrupt_model = match madt { Some(madt) => madt.parse_interrupt_model()?, None => InterruptModel::Unknown, }; let pm_timer = PmTimer::new(&fadt)?; // Ok(PlatformInfo { power_profile, interrupt_model, processor_info, pm_timer }) Ok(PlatformInfo { power_profile, interrupt_model, pm_timer }) } }
true
b4fa254119564dbacdf34562383f1834147cf384
Rust
rwuttke/lexgen
/tests/lua_5_1.rs
UTF-8
16,092
3.28125
3
[ "MIT" ]
permissive
// A Lua 5.1 lexer. We use this as // // - An example: this file is linked from README // // - A test: `test_data` contains all Lua files in Lua 5.1 source distribution, we lex it using // this lexer as a test. // // - A benchmark: We also use `test_data` lexing time as a runtime benchmark. use lexgen::lexer; //////////////////////////////////////////////////////////////////////////////// // // // Lexer definition and tests // // // //////////////////////////////////////////////////////////////////////////////// #[derive(Debug, PartialEq, Eq, Clone)] enum Token<'input> { Plus, Minus, Star, Slash, Percent, Caret, Hash, EqEq, TildeEq, LtEq, GtEq, Lt, Gt, Eq, LParen, RParen, LBrace, RBrace, LBracket, RBracket, Semicolon, Colon, Comma, Dot, DotDot, DotDotDot, Keyword(Keyword), String(StringToken<'input>), Var(&'input str), Number(&'input str), // uninterpreted } /// Raw string tokens are borrowed from the input string. Interpreted strings are copied and owned. #[derive(Debug, PartialEq, Eq, Clone)] enum StringToken<'input> { Raw(&'input str), Interpreted(String), } #[derive(Debug, PartialEq, Eq, Clone)] enum Keyword { And, Break, Do, Else, ElseIf, End, False, For, Function, If, In, Local, Nil, Not, Or, Repeat, Return, Then, True, Until, While, } #[derive(Debug, Default, Clone)] struct LexerState { /// Number of opening `=`s seen when parsing a long string long_string_opening_eqs: usize, /// Number of closing `=`s seen when parsing a long string long_string_closing_eqs: usize, /// When parsing a short string, whether it's started with a double or single quote short_string_delim: Quote, /// Buffer for strings string_buf: String, /// When parsing a long string, whether we're inside a comment or not. When inside a comment we /// don't return a token. Otherwise we return a string. in_comment: bool, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum Quote { Single, Double, } impl Default for Quote { fn default() -> Self { // arbitrary Quote::Single } } lexer! { Lexer(LexerState) -> Token<'input>; let whitespace = [' ' '\t' '\n'] | "\r\n"; // > Names (also called identifiers) in Lua can be any string of letters, digits, and // > underscores, not beginning with a digit. This coincides with the definition of names in // > most languages. (The definition of letter depends on the current locale: any character // > considered alphabetic by the current locale can be used in an identifier.) let var_init = ['a'-'z' 'A'-'Z' '_']; let var_subseq = $var_init | ['0'-'9']; let digit = ['0'-'9']; let hex_digit = ['a'-'f' 'A'-'F' '0'-'9']; rule Init { $whitespace, "+" = Token::Plus, "-" = Token::Minus, "*" = Token::Star, "/" = Token::Slash, "%" = Token::Percent, "^" = Token::Caret, "#" = Token::Hash, "==" = Token::EqEq, "~=" = Token::TildeEq, "<=" = Token::LtEq, ">=" = Token::GtEq, "<" = Token::Lt, ">" = Token::Gt, "=" = Token::Eq, "(" = Token::LParen, ")" = Token::RParen, "{" = Token::LBrace, "}" = Token::RBrace, "]" = Token::RBracket, ";" = Token::Semicolon, ":" = Token::Colon, "," = Token::Comma, "." = Token::Dot, ".." = Token::DotDot, "..." = Token::DotDotDot, "and" = Token::Keyword(Keyword::And), "break" = Token::Keyword(Keyword::Break), "do" = Token::Keyword(Keyword::Do), "else" = Token::Keyword(Keyword::Else), "elseif" = Token::Keyword(Keyword::ElseIf), "end" = Token::Keyword(Keyword::End), "false" = Token::Keyword(Keyword::False), "for" = Token::Keyword(Keyword::For), "function" = Token::Keyword(Keyword::Function), "if" = Token::Keyword(Keyword::If), "in" = Token::Keyword(Keyword::In), "local" = Token::Keyword(Keyword::Local), "nil" = Token::Keyword(Keyword::Nil), "not" = Token::Keyword(Keyword::Not), "or" = Token::Keyword(Keyword::Or), "repeat" = Token::Keyword(Keyword::Repeat), "return" = Token::Keyword(Keyword::Return), "then" = Token::Keyword(Keyword::Then), "true" = Token::Keyword(Keyword::True), "until" = Token::Keyword(Keyword::Until), "while" = Token::Keyword(Keyword::While), '"' => |mut lexer| { lexer.state().short_string_delim = Quote::Double; lexer.state().string_buf.clear(); lexer.switch(LexerRule::String) }, '\'' => |mut lexer| { lexer.state().short_string_delim = Quote::Single; lexer.state().string_buf.clear(); lexer.switch(LexerRule::String) }, "[" => |mut lexer| { match lexer.peek() { Some('[') | Some('=') => { lexer.state().long_string_opening_eqs = 0; lexer.state().in_comment = false; lexer.switch(LexerRule::LongStringBracketLeft) } _ => lexer.return_(Token::LBracket), } }, "--" => |lexer| { lexer.switch(LexerRule::EnterComment) }, $var_init $var_subseq* => |lexer| { let match_ = lexer.match_(); lexer.return_(Token::Var(match_)) }, $digit+ ('.'? $digit+ (('e' | 'E') ('+'|'-')? $digit+)?)? => |lexer| { let match_ = lexer.match_(); lexer.return_(Token::Number(match_)) }, "0x" $hex_digit+ => |lexer| { let match_ = lexer.match_(); lexer.return_(Token::Number(match_)) }, } rule LongStringBracketLeft { '=' => |mut lexer| { lexer.state().long_string_opening_eqs += 1; lexer.continue_() }, '[' => |lexer| lexer.switch(LexerRule::LongString), } rule LongString { ']' => |mut lexer| { lexer.state().long_string_closing_eqs = 0; lexer.switch(LexerRule::LongStringBracketRight) }, _ => |lexer| lexer.continue_(), } rule LongStringBracketRight { '=' => |mut lexer| { lexer.state().long_string_closing_eqs += 1; lexer.continue_() }, ']' => |mut lexer| { let state = lexer.state(); let in_comment = state.in_comment; let left_eqs = state.long_string_opening_eqs; let right_eqs = state.long_string_closing_eqs; if left_eqs == right_eqs { if in_comment { lexer.switch(LexerRule::Init) } else { let match_ = &lexer.match_[left_eqs + 2..lexer.match_.len() - right_eqs - 2]; lexer.switch_and_return(LexerRule::Init, Token::String(StringToken::Raw(match_))) } } else { lexer.state().long_string_closing_eqs = 0; lexer.continue_() } }, _ => |lexer| lexer.switch(LexerRule::LongString), } rule String { '"' => |mut lexer| { if lexer.state().short_string_delim == Quote::Double { let str = lexer.state().string_buf.clone(); lexer.switch_and_return(LexerRule::Init, Token::String(StringToken::Interpreted(str))) } else { lexer.state().string_buf.push('"'); lexer.continue_() } }, "'" => |mut lexer| { if lexer.state().short_string_delim == Quote::Single { let str = lexer.state().string_buf.clone(); lexer.switch_and_return(LexerRule::Init, Token::String(StringToken::Interpreted(str))) } else { lexer.state().string_buf.push('\''); lexer.continue_() } }, "\\a" => |mut lexer| { lexer.state().string_buf.push('\u{7}'); lexer.continue_() }, "\\b" => |mut lexer| { lexer.state().string_buf.push('\u{8}'); lexer.continue_() }, "\\f" => |mut lexer| { lexer.state().string_buf.push('\u{c}'); lexer.continue_() }, "\\n" => |mut lexer| { lexer.state().string_buf.push('\n'); lexer.continue_() }, "\\r" => |mut lexer| { lexer.state().string_buf.push('\r'); lexer.continue_() }, "\\t" => |mut lexer| { lexer.state().string_buf.push('\t'); lexer.continue_() }, "\\v" => |mut lexer| { lexer.state().string_buf.push('\u{b}'); lexer.continue_() }, "\\\\" => |mut lexer| { lexer.state().string_buf.push('\\'); lexer.continue_() }, "\\\"" => |mut lexer| { lexer.state().string_buf.push('"'); lexer.continue_() }, "\\'" => |mut lexer| { lexer.state().string_buf.push('\''); lexer.continue_() }, "\\\n" => |mut lexer| { lexer.state().string_buf.push('\n'); lexer.continue_() }, _ => |mut lexer| { let char = lexer.match_().chars().next_back().unwrap(); lexer.state().string_buf.push(char); lexer.continue_() }, } rule EnterComment { '[' => |mut lexer| { match lexer.peek() { Some('[') | Some('=') => { lexer.state().long_string_opening_eqs = 0; lexer.state().in_comment = true; lexer.switch(LexerRule::LongStringBracketLeft) } _ => lexer.switch(LexerRule::Comment), } }, _ => |lexer| lexer.switch(LexerRule::Comment), } rule Comment { '\n' => |lexer| lexer.switch(LexerRule::Init), _ => |lexer| lexer.continue_(), } } fn ignore_pos<A, E>(ret: Option<Result<(usize, A, usize), E>>) -> Option<Result<A, E>> { ret.map(|res| res.map(|(_, a, _)| a)) } #[test] fn lex_lua_number() { let mut lexer = Lexer::new("3 3.0 3.1416 314.16e-2 0.31416E1 0xff 0x56"); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Number("3")))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Number("3.0")))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Number("3.1416")))); assert_eq!( ignore_pos(lexer.next()), Some(Ok(Token::Number("314.16e-2"))) ); assert_eq!( ignore_pos(lexer.next()), Some(Ok(Token::Number("0.31416E1"))) ); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Number("0xff")))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Number("0x56")))); } #[test] fn lex_lua_string() { let str = " \"test\" \"\\ test'\\\"\" "; let mut lexer = Lexer::new(str); assert_eq!( ignore_pos(lexer.next()), Some(Ok(Token::String(StringToken::Interpreted( "test".to_owned() )))) ); assert_eq!( ignore_pos(lexer.next()), Some(Ok(Token::String(StringToken::Interpreted( "\ntest'\"".to_owned() )))) ); } #[test] fn lex_lua_long_string() { let mut lexer = Lexer::new("[[ ]] [=[test]=] [=[ ]]"); assert_eq!( ignore_pos(lexer.next()), Some(Ok(Token::String(StringToken::Raw(" ")))) ); assert_eq!( ignore_pos(lexer.next()), Some(Ok(Token::String(StringToken::Raw("test")))), ); assert!(matches!(lexer.next(), Some(Err(_)))); } #[test] fn lex_lua_comment() { let mut lexer = Lexer::new( "-- test + --[[test test]]+ --[===[ ]=]===] + --[===[ ] ]===] + ", ); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Plus))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Plus))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Plus))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Plus))); assert_eq!(ignore_pos(lexer.next()), None); } #[test] fn lex_lua_var() { let str = "ab ab1 ab_1_2 Aab"; let mut lexer = Lexer::new(str); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Var("ab")))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Var("ab1")))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Var("ab_1_2")))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Var("Aab")))); } #[test] fn lex_lua_simple() { let lexer = Lexer::new( "+ - * / % ^ # == ~= <= >= < > = ( ) { } [ ] \ ; : , . .. ... and break do else elseif end \ false for function if in local nil not or repeat \ return then true until while n", ); let mut tokens: Vec<Token> = vec![]; for token in lexer { tokens.push(token.unwrap().1); } assert_eq!( tokens, vec![ Token::Plus, Token::Minus, Token::Star, Token::Slash, Token::Percent, Token::Caret, Token::Hash, Token::EqEq, Token::TildeEq, Token::LtEq, Token::GtEq, Token::Lt, Token::Gt, Token::Eq, Token::LParen, Token::RParen, Token::LBrace, Token::RBrace, Token::LBracket, Token::RBracket, Token::Semicolon, Token::Colon, Token::Comma, Token::Dot, Token::DotDot, Token::DotDotDot, Token::Keyword(Keyword::And), Token::Keyword(Keyword::Break), Token::Keyword(Keyword::Do), Token::Keyword(Keyword::Else), Token::Keyword(Keyword::ElseIf), Token::Keyword(Keyword::End), Token::Keyword(Keyword::False), Token::Keyword(Keyword::For), Token::Keyword(Keyword::Function), Token::Keyword(Keyword::If), Token::Keyword(Keyword::In), Token::Keyword(Keyword::Local), Token::Keyword(Keyword::Nil), Token::Keyword(Keyword::Not), Token::Keyword(Keyword::Or), Token::Keyword(Keyword::Repeat), Token::Keyword(Keyword::Return), Token::Keyword(Keyword::Then), Token::Keyword(Keyword::True), Token::Keyword(Keyword::Until), Token::Keyword(Keyword::While), Token::Var("n"), ] ); } #[test] fn lex_lua_windows_line_ending() { let mut lexer = Lexer::new("+\r\n+"); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Plus))); assert_eq!(ignore_pos(lexer.next()), Some(Ok(Token::Plus))); assert_eq!(ignore_pos(lexer.next()), None); } #[test] fn lex_lua_files() { let str = std::fs::read_to_string("tests/test_data").unwrap(); let mut lexer = Lexer::new(&str); let mut i = 0; while let Some(tok) = lexer.next() { assert!(tok.is_ok()); i += 1; } println!("{} tokens", i); }
true
a8a3932c4851fbdaf94162246c6c7a1bea12cc00
Rust
MantissaLabs/rust-cpuid
/src/lib.rs
UTF-8
8,851
2.96875
3
[ "MIT" ]
permissive
//! Rust bindings for [libpcuid](https://github.com/anrieff/libcpuid) //! CPU detection and feature extraction library. //! //! `rust-cpuid` provides a high-level interface for getting information //! about the features of the CPU that runs your code. All the essential //! work is done by the `libcpuid` C library and exposed through Rust's //! FFI mechanism as a simple and convenient API. //! //! # Available features //! //! * CPU vendor, brand and codename detection //! * information about number of cores (both physical and logical) //! * cache size //! * clock frequency //! //! # Installation //! //! First - download, and build libcpuid as [described in the //! readme](https://github.com/anrieff/libcpuid). Install it by running `make //! install` (you may want to run `ldconfig` afterwards). //! //! Add to your `Cargo.toml`: //! //! ```toml //! [dependencies] //! cpuid = "*" //! ``` //! //! # Example //! //! ```rust //! extern crate cpuid; //! //! fn main () { //! match cpuid::identify() { //! Ok(info) => { //! println!("Found: {} CPU, model: {}", info.vendor, info.codename); //! println!("The full brand string is: {}", info.brand); //! println!("Hardware AES support: {}", if info.has_feature(cpuid::CpuFeature::AES) { "yes" } else { "no" }); //! }, //! Err(err) => println!("cpuid error: {}", err), //! }; //! match cpuid::clock_frequency() { //! Some(frequency) => println!("CPU speed: {} MHz", frequency), //! None => println!("Couldn't get CPU speed."), //! }; //! } //! ``` extern crate libc; use std::ffi::CStr; use std::str; mod ffi; /// A struct holding information about CPU features. /// /// This data structure is returned by `identify()`. You can consult /// [libcpuid docs for cpu_id_t](http://libcpuid.sourceforge.net/doxy/structcpu__id__t.html) /// for more detailed descriptions of these fields. pub struct CpuInfo { /// CPU vendor string, for example *GenuineIntel*. pub vendor: String, /// Brand string, for example *Intel(R) Core(TM) i5-2410M CPU @ 2.30GHz*. pub brand: String, /// Brief CPU codename, such as *Sandy Bridge (Core i5)*. pub codename: String, /// Number of physical cores of the current CPU. pub num_cores: i32, /// Number of logical processors (may include HyperThreading or such). pub num_logical_cpus: i32, /// Total number of logical processors. pub total_logical_cpus: i32, /// L1 data cache size in kB. `Some(0)` if the CPU lacks cache, `None` if it /// couldn't be determined. pub l1_data_cache: Option<i32>, /// L1 instruction cache size in kB. `Some(0)` if the CPU lacks cache, /// `None` if it couldn't be determined. pub l1_instruction_cache: Option<i32>, /// L2 cache size in kB. `Some(0)` if the CPU lacks L2 cache, `None` if it /// couldn't be determined. pub l2_cache: Option<i32>, /// L3 cache size in kB. `Some(0)` if the CPU lacks L3 cache, `None` if it /// couldn't be determined. pub l3_cache: Option<i32>, flags: [u8; ffi::CPU_FLAGS_MAX], } /// CPU feature identifiers. #[derive(Clone, Copy)] pub enum CpuFeature { FloatingPointUnit = 0, VirtualModeExtension, DebugingExtension, PageSizeExtension, TimestampCounter, ModelSpecificRegisters, PhysicalAddressExtension, MachineCheckException, CMPXCHG8B, APIC, MemoryTypeRangeRegisters, SysenterSysexit, PageGlobalEnable, MachineCheckArchitecture, CMOV, PageAttributeTable, PageAddressExtension36bit, ProcessorSerialNumber, CLFLUSH, DebugStore, ACPI, MMX, FXSAVE, SSE, SSE2, SelfSnoop, HyperThreading, ThermalMonitor, IA64, PendingBreakEnable, SSE3, PCLMULQDQ, DebugStore64, MONITOR, CplQualifiedDebugStore, VirtualisationTechnology, SaferModeExceptions, EnhancedSpeedStep, ThermalMonitor2, SSSE3, ContextId, CMPXCHG16B, SendTaskPriorityMessages, PerformanceCapabilitiesMSR, DirectCacheAccess, SSE41, SSE42, SyscallSysret, ExecuteDisableBit, MOVBE, POPCNT, AES, XSAVE, OSXSAVE, AdvancedVectorExtensions, MMXExtensions, AMD3DNow, AMD3DNowExtended, NoExecuteBit, FXSAVEOptimizations, RDTSCP, LongMode, LAHFLongMode, CoreMultiProcessingLegacyMode, AMDSecureVirtualMachine, LZCNT, MisalignedSSE, SSE4a, PREFETCH, OsVisibleWorkaround, InstructionBasedSampling, SSE5, SKINIT, WatchdogTimer, TemperatureSensor, FrequencyIDControl, VoltageIDControl, THERMTRIP, AMDThermalControl, SoftwareThermalControl, Multiplier100Mhz, HardwarePstateControl, ConstantTSCTicks, XOP, FMA3, FMA4, TrailingBitManipulation, FPConvert16Bit, RDRAND, X2APIC, CorePerformanceBoost, MPERF, ProcessorFeedbackInterface, ProcessorAccumulator, AVX2, BMI1, BMI2, HardwareLockElision, RestrictedTransactionalMemory, AVX512F, AVX512DQ, AVX512PF, AVX512ER, AVX512CD, SHASupport, AVX512BW, AVX512VL, SGX, RDSEED, ArbitraryPrecision, NumCpuFeatures, } /// CPU SGX feature identifiers. #[derive(Clone, Copy)] pub enum CpuSGXFeature { IntelSGX1, IntelSGX2, NumSGXFeature, } impl CpuInfo { /// Checks if current CPU supports given feature. /// /// See `CpuFeature` for a list of available feature identifiers. pub fn has_feature(&self, feature: CpuFeature) -> bool { self.flags[feature as usize] == 1u8 } } /// Checks if the CPUID instruction is present. pub fn is_present() -> bool { unsafe { ffi::cpuid_present() == 1 } } /// Returns libcpuid version string. pub fn version() -> String { unsafe { let ptr = ffi::cpuid_lib_version(); let bytes = CStr::from_ptr(ptr).to_bytes(); str::from_utf8(bytes) .ok() .expect("Invalid UTF8 string") .to_string() } } /// Returns last libcpuid error string. pub fn error() -> String { unsafe { let ptr = ffi::cpuid_error(); let bytes = CStr::from_ptr(ptr).to_bytes(); str::from_utf8(bytes) .ok() .expect("Invalid UTF8 string") .to_string() } } /// Tries to identify the current CPU and its features. /// /// In case of successful detection, a `CpuInfo` struct is returned (wrapped /// with `Ok`) which contains all available data about the processor. /// If libcpuid encounters an error, `identify` returns an `Err` with /// the error message inside. pub fn identify() -> Result<CpuInfo, String> { let mut raw: ffi::cpu_raw_data_t = Default::default(); let raw_result = unsafe { ffi::cpuid_get_raw_data(&mut raw) }; if raw_result != 0 { return Err(error()); } let mut data: ffi::cpu_id_t = Default::default(); let identify_result = unsafe { ffi::cpu_identify(&mut raw, &mut data) }; if identify_result != 0 { Err(error()) } else { Ok(CpuInfo { vendor: String::from_utf8(data.vendor_str.iter().map(|&x| x as u8).collect()) .ok() .expect("Invalid vendor string"), brand: String::from_utf8(data.brand_str.iter().map(|&x| x as u8).collect()) .ok() .expect("Invalid brand string"), codename: String::from_utf8(data.cpu_codename.iter().map(|&x| x as u8).collect()) .unwrap_or_default(), num_cores: data.num_cores, num_logical_cpus: data.num_logical_cpus, total_logical_cpus: data.total_logical_cpus, l1_data_cache: if data.l1_data_cache != -1 { Some(data.l1_data_cache) } else { None }, l1_instruction_cache: if data.l1_instruction_cache != -1 { Some(data.l1_instruction_cache) } else { None }, l2_cache: if data.l2_cache != -1 { Some(data.l2_cache) } else { None }, l3_cache: if data.l3_cache != -1 { Some(data.l3_cache) } else { None }, flags: data.flags, }) } } /// Gets the CPU clock frequency in MHz. /// /// The underlying implementation uses several methods to discover CPU /// speed, including direct measurement. If all these methods fail, function /// returns `None`. pub fn clock_frequency() -> Option<i32> { let frequency = unsafe { ffi::cpu_clock() }; if frequency != -1 { Some(frequency) } else { None } } #[test] fn test_is_present() { assert!(is_present()); }
true
c21a03053f6373cf3ecf54b1700c27a7341a2119
Rust
drodil/op-api-rust-sdk
/src/apis/accounts.rs
UTF-8
2,299
3.09375
3
[ "MIT" ]
permissive
//! API implementation for //! [AccountsV3](https://op-developer.fi/docs/api/3Oo5zCujXGw2SGEi00skug/OP%20Accounts%20V3.0%20API) //! API use crate::model::accounts::*; use crate::options::Options; use crate::requests::Requests; use log::debug; use std::error::Error; use std::sync::Arc; /// Accounts client. /// /// This client is used to access the OP AccountsV3 API. pub struct AccountsApi { options: Arc<Options>, } impl AccountsApi { /// Creates new Accounts API. /// /// Bear in mind that this API is implemented to follow v3 so you must /// specify v3 as version for the Options. pub fn new(options: Arc<Options>) -> AccountsApi { AccountsApi { options } } /// Gets all accounts from the API and returns list of them. pub async fn accounts(&self) -> Result<AccountList, Box<dyn Error>> { let url = format!("/accounts/{}/accounts", self.options.version()); let response = Requests::get(&self.options, &url, None::<()>).await?; debug!("Accounts response: {:#?}", response); let accounts: AccountList = response.json().await?; Ok(accounts) } /// Gets single account from the API based on accountId. pub async fn account(&self, account_id: String) -> Result<Account, Box<dyn Error>> { let url = format!( "/accounts/{}/accounts/{}", self.options.version(), account_id ); let response = Requests::get(&self.options, &url, None::<()>).await?; debug!("Account response: {:#?}", response); let account: Account = response.json().await?; Ok(account) } /// Gets all transactions for a single account with account id /// with optional parameters for filtering the results. pub async fn transactions( &self, account_id: String, params: Option<TransactionParams>, ) -> Result<TransactionList, Box<dyn Error>> { let url = format!( "/accounts/{}/accounts/{}/transactions", self.options.version(), account_id ); let response = Requests::get(&self.options, &url, params).await?; debug!("Transactions response: {:#?}", response); let list: TransactionList = response.json().await?; Ok(list) } }
true
184c6685ddcf335d44d0322324893c5a74abd031
Rust
aktoro-rs/aktoro
/aktoro-raw/src/udp.rs
UTF-8
1,318
2.859375
3
[ "MIT" ]
permissive
use std::error; use std::net::SocketAddr; use std::net::ToSocketAddrs; use futures_core::Future; pub type UdpSocketSendTo<'s, E> = Box<dyn Future<Output = Result<usize, E>> + 's>; pub type UdpSocketRecv<'s, E> = Box<dyn Future<Output = Result<(usize, SocketAddr), E>> + 's>; pub trait UdpSocket: Unpin + Send + Sized { type Error: error::Error + Send + 'static; /// Tries to create a new UDP socket that /// will be bound to the given address. fn bind<A: ToSocketAddrs>(addr: A) -> Result<Self, Self::Error>; /// Returns the address that the socket /// is bound to. fn local_addr(&self) -> Result<SocketAddr, Self::Error>; /// Tries to send data to the given /// address, eventually returning a future /// that will resolve with the number of /// bytes sent. fn send_to<'s, A: ToSocketAddrs>( &'s mut self, buf: &'s [u8], addr: A, ) -> Result<UdpSocketSendTo<'s, Self::Error>, Self::Error>; /// Tries to receive data and to write it /// to the buffer, eventually returning a /// future that will resolve with the /// number of bytes received and the /// address of the data's sender. fn recv<'s>( &'s mut self, buf: &'s mut [u8], ) -> Result<UdpSocketRecv<'s, Self::Error>, Self::Error>; }
true
5e001abcd76d271748e1c29d65f9a3711a82fd82
Rust
AntonGepting/tmux-interface-rs
/src/commands/options/set_window_option_macro.rs
UTF-8
5,702
2.921875
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
/// # Manual /// /// tmux ^3.0: /// ```text /// (removed) /// ``` /// /// tmux ^2.6: /// ```text /// set-window-option [-aFgoqu] [-t target-window] option value /// (alias: setw) /// ``` /// /// tmux ^1.9: /// ```text /// set-window-option [-agoqu] [-t target-window] option value /// (alias: setw) /// ``` /// /// tmux ^1.7: /// ```text /// set-window-option [-agqu] [-t target-window] option value /// (alias: setw) /// ``` /// /// tmux ^1.0: /// ```text /// set-window-option [-agu] [-t target-window] option value /// (alias: setw) /// ``` /// /// tmux ^0.8: /// ```text /// set-window-option [-gu] [-t target-window] option value /// (alias: setw) /// ``` #[macro_export] macro_rules! set_window_option { // `[-a]` - (@cmd ($cmd:expr) -a, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.append() }) $($tail)*) }}; // `[-F]` - (@cmd ($cmd:expr) -F, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.format() }) $($tail)*) }}; // `[-g]` - (@cmd ($cmd:expr) -g, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.global() }) $($tail)*) }}; // `[-o]` - (@cmd ($cmd:expr) -o, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.not_overwrite() }) $($tail)*) }}; // `[-q]` - (@cmd ($cmd:expr) -q, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.quiet() }) $($tail)*) }}; // `[-u]` - (@cmd ($cmd:expr) -u, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.unset() }) $($tail)*) }}; // `[-t target-window]` - (@cmd ($cmd:expr) -t $target_window:expr, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.target_window($target_window) }) $($tail)*) }}; // `option` (@cmd ($cmd:expr) $option:expr, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.option($option) }) $($tail)*) }}; // `value` (@cmd ($cmd:expr) $value:expr, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.value($value) }) $($tail)*) }}; (@cmd ($cmd:expr) $shell_command:expr, $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $cmd.shell_command($shell_command) }) $($tail)*) }}; //(@cmd ($cmd:expr) -$unknown:tt, $($tail:tt)*) => {{ //::std::compile_error!("unknown flag, option or parameter: {}", $unknown); //}}; (@cmd ($cmd:expr)) => {{ $cmd }}; () => {{ $crate::SetWindowOption::new() }}; (($cmd:expr), $($tail:tt)*) => {{ $crate::set_window_option!(@cmd ($cmd) $($tail)*,) }}; ($($tail:tt)*) => {{ $crate::set_window_option!(@cmd ({ $crate::SetWindowOption::new() }) $($tail)*,) }}; } #[test] fn set_window_option_macro() { use crate::TargetWindow; use std::borrow::Cow; // # Manual // // tmux ^3.0: // ```text // (removed) // ``` // // tmux ^2.6: // ```text // set-window-option [-aFgoqu] [-t target-window] option value // (alias: setw) // ``` // // tmux ^1.9: // ```text // set-window-option [-agoqu] [-t target-window] option value // (alias: setw) // ``` // // tmux ^1.7: // ```text // set-window-option [-agqu] [-t target-window] option value // (alias: setw) // ``` // // tmux ^1.0: // ```text // set-window-option [-agu] [-t target-window] option value // (alias: setw) // ``` // // tmux ^0.8: // ```text // set-window-option [-gu] [-t target-window] option value // (alias: setw) // ``` let target_window = TargetWindow::Raw("1").to_string(); let set_window_option = set_window_option!(); #[cfg(feature = "tmux_1_0")] let set_window_option = set_window_option!((set_window_option), -a); #[cfg(feature = "tmux_2_6")] let set_window_option = set_window_option!((set_window_option), -F); #[cfg(feature = "tmux_0_8")] let set_window_option = set_window_option!((set_window_option), -g); #[cfg(feature = "tmux_1_9")] let set_window_option = set_window_option!((set_window_option), -o); #[cfg(feature = "tmux_1_7")] let set_window_option = set_window_option!((set_window_option), -q); #[cfg(feature = "tmux_0_8")] let set_window_option = set_window_option!((set_window_option), -u); #[cfg(feature = "tmux_0_8")] let set_window_option = set_window_option!((set_window_option), -t & target_window); #[cfg(feature = "tmux_0_8")] let set_window_option = set_window_option!((set_window_option), "2"); #[cfg(feature = "tmux_0_8")] let set_window_option = set_window_option!((set_window_option), "3"); #[cfg(not(feature = "cmd_alias"))] let cmd = "set-window-option"; #[cfg(feature = "cmd_alias")] let cmd = "setw"; let mut s = Vec::new(); s.push(cmd); #[cfg(feature = "tmux_1_0")] s.push("-a"); #[cfg(feature = "tmux_2_6")] s.push("-F"); #[cfg(feature = "tmux_0_8")] s.push("-g"); #[cfg(feature = "tmux_1_9")] s.push("-o"); #[cfg(feature = "tmux_1_7")] s.push("-q"); #[cfg(feature = "tmux_0_8")] s.push("-u"); #[cfg(feature = "tmux_0_8")] s.extend_from_slice(&["-t", "1"]); #[cfg(feature = "tmux_0_8")] s.push("2"); #[cfg(feature = "tmux_0_8")] s.push("3"); let s: Vec<Cow<str>> = s.into_iter().map(|a| a.into()).collect(); let set_window_option = set_window_option.build().to_vec(); assert_eq!(set_window_option, s); }
true
1e3e5a65ea5e3cedb82215aafd8c10f2994c0a0e
Rust
drydoc-org/drydoc
/crates/drydoc-gen/src/fs2.rs
UTF-8
6,198
2.734375
3
[ "BSD-3-Clause" ]
permissive
use std::{collections::HashMap, path::{Path, PathBuf}, sync::Arc}; use std::io::{ErrorKind, Error, Result, Read}; use tokio::io::AsyncWriteExt; use memmap::{Mmap, MmapOptions}; use std::pin::Pin; use std::future::Future; pub enum Entry { File(Arc<dyn File + Send + Sync>), Folder(Folder) } impl Entry { pub fn as_file(&self) -> Option<&Arc<dyn File + Send + Sync>> { if let Self::File(file) = self { Some(file) } else { None } } pub fn as_folder(&self) -> Option<&Folder> { if let Self::Folder(folder) = self { Some(folder) } else { None } } pub fn as_folder_mut(&mut self) -> Option<&mut Folder> { if let Self::Folder(folder) = self { Some(folder) } else { None } } } impl From<Arc<dyn File + Send + Sync>> for Entry { fn from(value: Arc<dyn File + Send + Sync>) -> Self { Self::File(value) } } impl<T: 'static + File + Send + Sync> From<T> for Entry { fn from(value: T) -> Self { Self::File(Arc::new(value)) } } impl From<Folder> for Entry { fn from(value: Folder) -> Self { Self::Folder(value) } } #[async_trait::async_trait] pub trait File { fn contents(&self) -> &[u8]; async fn write(&self, path: &PathBuf) -> Result<()>; } pub struct VirtFile { contents: Box<[u8]>, } impl VirtFile { pub fn new<C: Into<Box<[u8]>>>(contents: C) -> Self { Self { contents: contents.into() } } } pub struct RealFile { file: std::fs::File, contents: Option<Mmap>, } impl RealFile { pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> { let file = std::fs::File::open(path)?; Ok(Self { contents: if file.metadata()?.len() > 0 { Some(unsafe { Mmap::map(&file)? }) } else { None }, file, }) } } static EMPTY: &'static [u8] = &[]; #[async_trait::async_trait] impl File for RealFile { fn contents(&self) -> &[u8] { if let Some(contents) = &self.contents { contents } else { EMPTY } } async fn write(&self, path: &PathBuf) -> Result<()> { if let Some(parent) = path.parent() { tokio::fs::create_dir_all(parent).await?; } let mut out_file = tokio::fs::File::create(path).await?; out_file.write_all(self.contents()).await?; Ok(()) } } #[async_trait::async_trait] impl File for VirtFile { fn contents(&self) -> &[u8] { &self.contents } async fn write(&self, path: &PathBuf) -> Result<()> { if let Some(parent) = path.parent() { tokio::fs::create_dir_all(parent).await?; } let mut out_file = tokio::fs::File::create(path).await?; out_file.write_all(&self.contents).await?; Ok(()) } } pub struct Folder { entries: HashMap<String, Entry>, } impl Folder { pub fn new() -> Self { Self { entries: HashMap::new() } } pub fn read(path: PathBuf) -> Pin<Box<dyn Future<Output = Result<Self>> + Send>> { Box::pin(async move { let mut dir = tokio::fs::read_dir(path).await?; let mut entries: HashMap<String, Entry> = HashMap::new(); while let Ok(Some(entry)) = dir.next_entry().await { let path = entry.path(); let file_name = entry.file_name(); let file_name_str = file_name.into_string().unwrap(); entries.insert(file_name_str, if path.is_dir() { Self::read(path).await?.into() } else { RealFile::open(path)?.into() }); } Ok(Self { entries }) }) } pub fn insert_path<N: AsRef<str>, E: Into<Entry>>(&mut self, path: &[N], entry: E) -> Result<()> { match path.split_first() { Some((first_component, rest)) => { let first_component_string = first_component.as_ref().to_string(); if rest.is_empty() { if let Some(_) = self.entries.get(&first_component_string) { Err(Error::new(ErrorKind::AlreadyExists, format!("{} already exists", &first_component_string))) } else { self.entries.insert(first_component_string, entry.into()); Ok(()) } } else { if let Some(inner_entry) = self.entries.get_mut(&first_component_string) { if let Entry::Folder(folder) = inner_entry { folder.insert_path(rest, entry) } else { Err(Error::new(ErrorKind::AlreadyExists, format!("{} is a folder", first_component.as_ref()))) } } else { let mut folder = Folder::new(); folder.insert_path(rest, entry)?; self.entries.insert(first_component_string, folder.into()); Ok(()) } } }, None => { Err(Error::new(ErrorKind::InvalidInput, "Path is empty")) } } } pub fn insert<N: Into<String>, E: Into<Entry>>(&mut self, name: N, entry: E) -> Result<()> { let name: String = name.into(); let entry: Entry = entry.into(); let parts: Vec<&str> = name.split('/').collect(); self.insert_path(parts.as_slice(), entry) } pub fn merge(&mut self, other: Folder) -> Result<()> { for (name, entry) in other.entries { if let Some(self_entry) = self.entries.get_mut(&name) { if let Entry::Folder(self_entry) = self_entry { if let Entry::Folder(entry) = entry { self_entry.merge(entry)?; } else { return Err(Error::new(ErrorKind::AlreadyExists,format!("Expected {} to be a folder", &name))) } } else { return Err(Error::new(ErrorKind::AlreadyExists,format!("Expected {} to be a folder", &name))) } } else { self.entries.insert(name, entry); } } Ok(()) } pub fn write_into(&self, mut path: PathBuf) -> Pin<Box<dyn Future<Output = Result<()>> + Send + '_>> { Box::pin(async move { tokio::fs::create_dir_all(&path).await?; for (name, entry) in self.entries.iter() { path.push(name); match entry { Entry::File(file) => { file.write(&path).await?; }, Entry::Folder(folder) => { folder.write_into(path.clone()).await?; } } path.pop(); } Ok(()) }) } }
true
b667dde7cfe5c66cf4398f307671ee2c8f440135
Rust
siimsoni/aoc2020
/d21/src/lib.rs
UTF-8
4,662
2.796875
3
[]
no_license
extern crate rustc_hash; mod tokenizer; use rustc_hash::{FxHashMap, FxHashSet}; use std::io::BufRead; use std::str::from_utf8; use tokenizer::{TokenKind, Tokenizer}; type ParserResult = Box<[(FxHashSet<Box<[u8]>>, FxHashSet<Box<[u8]>>)]>; pub fn parse<R>(mut reader: R) -> ParserResult where R: BufRead, { let mut input = Vec::new(); let mut tokenizer = Tokenizer::new(); let mut result = Vec::new(); let mut page: [u8; 4096] = [0; 4096]; while let Ok(page_len) = reader.read(&mut page) { if page_len == 0 { break; } tokenizer.tokenize(&mut (page[..page_len].iter())); input.extend_from_slice(&page[..page_len]); } tokenizer.flush(); let mut token_iter = tokenizer.tokens.iter(); let mut pos = 0; while pos != input.len() { let mut ingredients = FxHashSet::default(); for token in &mut token_iter { match token.kind { TokenKind::Literal => { ingredients.insert(Box::from(&input[pos..pos + token.len])); pos += token.len; } TokenKind::ParenthesesOpen => { pos += token.len; break; } _ => { pos += token.len; } } } let mut allergens = FxHashSet::default(); for token in &mut token_iter { match token.kind { TokenKind::Literal => { let slice = &input[pos..pos + token.len]; if slice != b"contains" { allergens.insert(Box::from(slice)); } pos += token.len; } TokenKind::ParenthesesClose => { pos += token.len; break; } _ => { pos += token.len; } } } result.push((ingredients, allergens)); } result.into_boxed_slice() } fn remove_ingredient( possible_ingredients_by_allergen: &mut FxHashMap<&[u8], FxHashSet<Box<[u8]>>>, ingredient: &[u8], ) { for possible_ingredients in possible_ingredients_by_allergen.values_mut() { possible_ingredients.remove(ingredient); } } fn map_ingredients(parsed: &ParserResult) -> FxHashMap<&[u8], Box<[u8]>> { let mut possible_ingredients_by_allergen: FxHashMap<&[u8], FxHashSet<Box<[u8]>>> = FxHashMap::default(); for (ingredients, allergens) in parsed.iter() { for allergen in allergens { let possible_ingredients = possible_ingredients_by_allergen .entry(allergen) .or_insert_with(|| ingredients.clone()); possible_ingredients.retain(|i| ingredients.contains(i)); } } let mut mapping: FxHashMap<&[u8], Box<[u8]>> = FxHashMap::default(); let mut mapped_in_iter = Vec::new(); loop { for (allergen, possible_ingredients) in possible_ingredients_by_allergen.iter() { if possible_ingredients.len() == 1 { let ingredient = possible_ingredients.iter().next().unwrap(); mapping.insert(allergen, ingredient.clone()); mapped_in_iter.push(ingredient.clone()); } } for ingredient in &mapped_in_iter { remove_ingredient(&mut possible_ingredients_by_allergen, ingredient); } if mapped_in_iter.is_empty() { break; } mapped_in_iter.clear(); } mapping } pub fn p1_solve(parsed: &ParserResult) -> Option<String> { let mapping = map_ingredients(parsed); let allergens: FxHashSet<&Box<[u8]>> = mapping.iter().map(|(_, ingredient)| ingredient).collect(); Some( parsed .iter() .flat_map(|(ingredients, _)| ingredients) .filter(|i| !allergens.contains(i)) .count() .to_string(), ) } pub fn p2_solve(parsed: &ParserResult) -> Option<String> { let mapping = map_ingredients(parsed); let str_mapping: FxHashMap<&str, &str> = mapping .iter() .map(|(allergen, ingredient)| { (from_utf8(allergen).unwrap(), from_utf8(ingredient).unwrap()) }) .collect(); let mut ordered: Vec<&str> = str_mapping.iter().map(|(allergen, _)| *allergen).collect(); ordered.sort_unstable(); let result = ordered .iter() .map(|allergen| *str_mapping.get(allergen).unwrap()) .collect::<Vec<&str>>() .join(","); Some(result) }
true
222dcdb3e9ec30a971ebf2f92af88ce915bb897f
Rust
Heinmci/EulerProject
/src/problems/problem_012.rs
UTF-8
1,532
3.53125
4
[]
no_license
pub fn solve(nb_divisors_wanted: u32) -> u32 { let mut current_triangular_number = 1; let mut current_triangular_number_value = 1; let mut previous_triangular_number_value; loop { current_triangular_number += 1; previous_triangular_number_value = current_triangular_number_value; current_triangular_number_value = previous_triangular_number_value + current_triangular_number; if current_triangular_number_value % 2 != 0 { // On suppose que si c'est impaire, le nb de diviseurs sera toujours moindre continue; } let nb_divisors = get_nb_divisors(current_triangular_number_value); if nb_divisors >= nb_divisors_wanted { return current_triangular_number_value; } } } /* Ne marche pas pour 1 */ fn get_nb_divisors(number: u32) -> u32 { let mut nb_divisors = 0; let square_root = (number as f32).sqrt() as u32; if number % square_root == 0 { nb_divisors += 1; } for i in 1..square_root { if number % i == 0 { nb_divisors += 2; // i lui même ainsi que n / i } } nb_divisors } #[cfg(test)] mod tests { use super::*; use time::PreciseTime; #[test] fn test_correct_result() { let start_time = PreciseTime::now(); let result = solve(500); let end_time = PreciseTime::now(); println!("Problem 12 took {} seconds.", start_time.to(end_time)); assert_eq!(result, 76576500); } }
true
715c49124af10d05f5d406fa2b06e1f89e875dec
Rust
raa0121/taplo
/taplo/src/verify.rs
UTF-8
10,072
2.75
3
[ "MIT" ]
permissive
use crate::{dom::*, value::Value}; use verify::{ span::{Span, Spanned}, Error, Validate, ValidateMap, ValidateSeq, }; use rowan::TextRange; use std::{convert::TryFrom, ops::AddAssign}; #[derive(Debug, Clone, Copy)] #[repr(transparent)] pub struct NodeSpan(pub TextRange); impl Span for NodeSpan {} impl From<TextRange> for NodeSpan { fn from(r: TextRange) -> Self { Self(r) } } impl AddAssign for NodeSpan { fn add_assign(&mut self, rhs: Self) { // We don't need hierarchy, so just set the new span as the current one. *self = rhs } } macro_rules! impl_spanned { ($($ident:ident),*) => { $(impl Spanned for $ident { type Span = NodeSpan; fn span(&self) -> Option<Self::Span> { Some(self.syntax().text_range().into()) } })* }; } impl_spanned!(Node, EntryNode, KeyNode, ValueNode, IntegerNode, StringNode); // Don't highlight the entire document impl Spanned for RootNode { type Span = NodeSpan; fn span(&self) -> Option<Self::Span> { Some(NodeSpan(TextRange::new(0.into(), 1.into()))) } } // Only highlight the key instead of // everything for table headers. impl Spanned for TableNode { type Span = NodeSpan; fn span(&self) -> Option<Self::Span> { Some(self.syntax().text_range().into()) } } // Only highlight the key instead of // everything for table headers. impl Spanned for ArrayNode { type Span = NodeSpan; fn span(&self) -> Option<Self::Span> { Some(self.syntax().text_range().into()) } } impl Validate for Node { fn validate<V: verify::Validator<Self::Span>>(&self, validator: V) -> Result<(), V::Error> { match self { Node::Root(inner) => inner.validate(validator), Node::Table(inner) => inner.validate(validator), Node::Key(inner) => inner.validate(validator), Node::Value(inner) => inner.validate(validator), Node::Array(inner) => inner.validate(validator), Node::Entry(_) => unimplemented!("entry key and value must be validated separately"), } } } impl Validate for RootNode { fn validate<V: verify::Validator<Self::Span>>(&self, validator: V) -> Result<(), V::Error> { let mut map = validator.validate_map(Some(self.entries().len()))?; let mut errs: Option<V::Error> = None; for (key, entry) in self.entries().iter() { if let Err(err) = map.validate_string_entry(key, entry.value()) { match &mut errs { Some(errs) => { *errs += err; } None => { errs = Some(err); } } } } if let Err(err) = map.end() { match &mut errs { Some(errs) => { *errs += err; } None => errs = Some(err), } } match errs { Some(e) => Err(e), None => Ok(()), } } } impl Validate for TableNode { fn validate<V: verify::Validator<Self::Span>>(&self, mut validator: V) -> Result<(), V::Error> { validator = validator.with_span(self.span()); let mut map = validator.validate_map(Some(self.entries().len()))?; let mut errs: Option<V::Error> = None; for (key, entry) in self.entries().iter() { if let Err(err) = map.validate_string_entry(key, entry.value()) { match &mut errs { Some(errs) => { *errs += err; } None => { errs = Some(err); } } } } if let Err(err) = map.end() { match &mut errs { Some(errs) => { *errs += err; } None => errs = Some(err), } } match errs { Some(e) => Err(e), None => Ok(()), } } } impl Validate for KeyNode { fn validate<V: verify::Validator<Self::Span>>(&self, validator: V) -> Result<(), V::Error> { // We assume that there are no dotted keys anymore at this point. validator.validate_str( self.keys_str() .next() .ok_or_else(|| V::Error::custom("no keys"))?, ) } } impl Validate for ValueNode { fn validate<V: verify::Validator<Self::Span>>(&self, validator: V) -> Result<(), V::Error> { match self { ValueNode::Bool(v) => validator.validate_bool( Value::try_from(v.clone()) .map_err(|err| V::Error::custom(err.to_string()))? .into_bool() .ok_or_else(|| V::Error::custom("invalid value".to_string()))?, ), ValueNode::String(v) => validator.validate_str( &Value::try_from(v.clone()) .map_err(|err| V::Error::custom(err.to_string()))? .into_string() .ok_or_else(|| V::Error::custom("invalid value".to_string()))?, ), ValueNode::Integer(v) => { match Value::try_from(v.clone()).map_err(|err| V::Error::custom(err.to_string()))? { // We try to use the smallest type, // since some validators have size constraints, // but we store everything as 64bits. Value::UnsizedInteger(u) => { if let Ok(v) = u8::try_from(u) { validator.validate_u8(v) } else if let Ok(v) = u16::try_from(u) { validator.validate_u16(v) } else if let Ok(v) = u32::try_from(u) { validator.validate_u32(v) } else { validator.validate_u64(u) } } Value::Integer(i) => { if let Ok(v) = i8::try_from(i) { validator.validate_i8(v) } else if let Ok(v) = i16::try_from(i) { validator.validate_i16(v) } else if let Ok(v) = i32::try_from(i) { validator.validate_i32(v) } else { validator.validate_i64(i) } } _ => panic!("invalid value"), } } ValueNode::Float(v) => validator.validate_f64( Value::try_from(v.clone()) .map_err(|err| V::Error::custom(err.to_string()))? .into_f64() .ok_or_else(|| V::Error::custom("invalid value".to_string()))?, ), ValueNode::Array(v) => v.validate(validator), #[cfg(any(feature = "time", feature = "chrono"))] ValueNode::Date(v) => { let date = Value::try_from(v.clone()) .map_err(|err| V::Error::custom(err.to_string()))? .into_date() .ok_or_else(|| V::Error::custom("invalid value".to_string()))?; #[cfg(feature = "chrono")] match date { crate::value::Date::OffsetDateTime(d) => validator.validate_str(&d.to_rfc3339()), crate::value::Date::LocalDateTime(d) => validator.validate_str(&d.to_string()), crate::value::Date::LocalDate(d) => validator.validate_str(&d.to_string()), crate::value::Date::LocalTime(d) => validator.validate_str(&d.to_string()), } #[cfg(feature = "time")] match date { crate::value::Date::OffsetDateTime(d) => { validator.validate_str(&d.format(time::Format::Rfc3339)) } crate::value::Date::LocalDateTime(d) => validator.validate_str(&d.to_string()), crate::value::Date::LocalDate(d) => validator.validate_str(&d.to_string()), crate::value::Date::LocalTime(d) => validator.validate_str(&d.to_string()), } } #[cfg(all(not(feature = "time"), not(feature = "chrono")))] ValueNode::Date(d) => validator.validate_str( &Value::try_from(d.clone()) .map_err(|err| V::Error::custom(err.to_string()))? .into_string() .ok_or_else(|| V::Error::custom("invalid value".to_string()))?, ), ValueNode::Table(v) => v.validate(validator), ValueNode::Invalid(_) => Err(V::Error::custom("invalid node")), ValueNode::Empty => Err(V::Error::custom("empty value")), } } } impl Validate for ArrayNode { fn validate<V: verify::Validator<Self::Span>>(&self, mut validator: V) -> Result<(), V::Error> { validator = validator.with_span(self.span()); let mut seq = validator.validate_seq(Some(self.items().len()))?; let mut errs: Option<V::Error> = None; for item in self.items() { if let Err(err) = seq.validate_element(item) { match &mut errs { Some(errs) => { *errs += err; } None => { errs = Some(err); } } } } if let Err(err) = seq.end() { match &mut errs { Some(errs) => { *errs += err; } None => errs = Some(err), } } match errs { Some(e) => Err(e), None => Ok(()), } } }
true
239b714df7a5498f0c02091c88d6046dcdf25894
Rust
aochagavia/RustCalc
/src/calc/statement.rs
UTF-8
637
3.203125
3
[]
no_license
use super::CalcResult; use super::expression::Expression; use super::environment::Environment; pub enum StmtType { Assign, FuncDef, } pub struct Statement { pub stmt_type: StmtType, pub name: String, pub rhs: Expression } impl Statement { pub fn exec(&self, env: &mut Environment) -> CalcResult<()> { match self.stmt_type { StmtType::Assign => { let rhs = try!(self.rhs.eval(env)); env.set_var(&self.name, rhs); Ok(()) } StmtType::FuncDef => { panic!("Not implemented") } } } }
true
5313ed626c27f1e2da76a7ab7f6f1ca62fc53900
Rust
pm-twice/procon
/AOJ/ITP1/4/Q1_4D/src/main.rs
UTF-8
534
2.703125
3
[]
no_license
#![allow(non_snake_case)] use std::io; fn main() { let sin = io::stdin(); let mut buf = String::new(); sin.read_line(&mut buf).ok(); let _n = buf.trim().parse::<i32>(); let mut buf = String::new(); sin.read_line(&mut buf).ok(); let a: Vec<i64> = buf.split_whitespace() .map(|v| v.parse::<i64>().unwrap()) .collect(); let v_max = a.iter().max().unwrap(); let v_min = a.iter().min().unwrap(); let v_sum: i64 = a.iter().sum(); println!("{} {} {}", v_min, v_max, v_sum); }
true
2938fe2d40c25f1d6a7e9db5cfebf7e26b86ada6
Rust
shota-tsuji/imagecli
/src/imagix/resize.rs
UTF-8
2,368
3.375
3
[]
no_license
//! 画像のresizeに関するモジュール use std::fs; use std::io; use std::path::PathBuf; use crate::imagix::error::ImageXError; /// 指定されたディレクトリ内の画像ファイルのパス一覧を返す関数 /// /// # Arguments /// * `src_folder` - 対象のディレクトリの絶対パス /// # Return /// * 画像ファイルのパス一覧あるいはImageXError pub fn get_image_files(src_folder: PathBuf) -> Result<Vec<PathBuf>, ImageXError> { let entries = fs::read_dir(src_folder) .map_err(|_e| ImageXError::UserInputError("Invalid source folder".to_string()))? .map(|res| res.map(|e| e.path())) .collect::<Result<Vec<_>, io::Error>>(); // mapを適用した後にエラーにならなかったものを通す let image_entries = entries? .into_iter() .filter(|r| { r.extension() == Some("JPG".as_ref()) || r.extension() == Some("jpg".as_ref()) || r.extension() == Some("PNG".as_ref()) || r.extension() == Some("png".as_ref()) }) .collect(); Ok(image_entries) } #[cfg(test)] mod tests { use super::*; use std::fs::File; #[test] // ディレクトリのパスを受け取るとそのディレクトリ内の画像ファイルのパスを含むVectorが返る fn test_get_image_files_1() { const TARGET_DIR: &'static str = "/tmp/test/images/"; let directory = PathBuf::from(TARGET_DIR); let file_image = PathBuf::from(TARGET_DIR.to_string() + "image_someone_1.jpg"); let file_text = PathBuf::from(TARGET_DIR.to_string() + "file.txt"); let _ = fs::create_dir_all(directory.as_path()); let _ = fs::remove_file(file_image.as_path()); let _ = File::create(file_image.as_path()); let _ = File::create(file_text.as_path()); assert_eq!(vec![file_image], get_image_files(directory).unwrap()); } #[test] // 空のディレクトリのパスを受け取ると空のVectorが返る fn test_get_image_files_2() { const TARGET_DIR: &'static str = "/tmp/test/empty/"; let directory = PathBuf::from(TARGET_DIR); let _ = fs::create_dir_all(directory.as_path()); let expected: Vec<PathBuf> = Vec::new(); assert_eq!(expected, get_image_files(directory).unwrap()); } }
true
7894e4cc935e79d7cac25636104df0f2344016c4
Rust
isgasho/alfred-workflows-rs
/datadog-workflow/src/workflow.rs
UTF-8
13,136
2.578125
3
[ "MIT" ]
permissive
use alfred::Item; use chrono::{DateTime, Utc}; use failure::{format_err, Error}; use reqwest::Client; use rusqlite::{types::ToSql, Connection, NO_PARAMS}; use serde::Deserialize; use std::str; const APPLICATION_KEY: &str = "application_key"; const API_KEY: &str = "api_key"; pub struct DatadogWorkflow { conn: Connection, } impl DatadogWorkflow { pub fn create() -> Result<Self, Error> { let conn = alfred_workflow::open_database_or_else("datadog", DatadogWorkflow::create_tables)?; Ok(DatadogWorkflow { conn }) } fn create_tables(conn: &Connection) -> Result<(), Error> { conn.execute( "CREATE TABLE IF NOT EXISTS config ( key TEXT NOT NULL PRIMARY KEY, value TEXT NOT NULL );", NO_PARAMS, )?; conn.execute( "CREATE TABLE IF NOT EXISTS timeboards ( id TEXT NOT NULL PRIMARY KEY, title TEXT NOT NULL, description TEXT NOT NULL, url TEXT NOT NULL, modified INTEGER NOT NULL );", NO_PARAMS, )?; conn.execute( "CREATE INDEX IF NOT EXISTS idx_timeboards_title_modified ON timeboards (title, modified);", NO_PARAMS, )?; conn.execute( "CREATE TABLE IF NOT EXISTS screenboards ( id INTEGER NOT NULL PRIMARY KEY, title TEXT NOT NULL, description TEXT NOT NULL, url TEXT NOT NULL, modified INTEGER NOT NULL );", NO_PARAMS, )?; conn.execute( "CREATE INDEX IF NOT EXISTS idx_screenboards_title_modified ON screenboards (title, modified);", NO_PARAMS, )?; conn.execute( "CREATE TABLE IF NOT EXISTS monitors ( id INTEGER NOT NULL PRIMARY KEY, name TEXT NOT NULL, url TEXT NOT NULL, modified INTEGER NOT NULL );", NO_PARAMS, )?; conn.execute( "CREATE INDEX IF NOT EXISTS idx_monitors_name_modified ON monitors (name, modified);", NO_PARAMS, )?; conn.execute( "CREATE TABLE IF NOT EXISTS monitor_tags ( id INTEGER NOT NULL, name TEXT NOT NULL, CONSTRAINT fk_monitors FOREIGN KEY (id) REFERENCES monitors(id) ON DELETE CASCADE );", NO_PARAMS, )?; conn.execute( "CREATE INDEX IF NOT EXISTS idx_monitor_tags_id ON monitor_tags (id);", NO_PARAMS, )?; conn.execute( "CREATE INDEX IF NOT EXISTS idx_monitor_tags_name ON monitor_tags (name);", NO_PARAMS, )?; Ok(()) } pub fn set_application_key(&self, key: &str) -> Result<(), Error> { self.set_key(APPLICATION_KEY, key) } pub fn set_api_key(&self, key: &str) -> Result<(), Error> { self.set_key(API_KEY, key) } fn set_key(&self, name: &str, key: &str) -> Result<(), Error> { self.conn .execute( "INSERT INTO config (key, value) VALUES (?1, ?2) ON CONFLICT(key) DO UPDATE SET value=excluded.value", &[name, key], ) .map(|_|Ok(())) .map_err(|e| format_err!("failed to insert application key: {}", e))? } pub fn refresh_cache(&mut self) -> Result<(), Error> { let mut stmt = self.conn.prepare("SELECT value FROM config WHERE key=?1")?; let application_key: String = stmt.query_row(&[APPLICATION_KEY], |row| row.get(0))?; let api_key: String = stmt.query_row(&[API_KEY], |row| row.get(0))?; stmt.finalize()?; let client = reqwest::Client::new(); self.refresh_timeboards(&client, &application_key, &api_key)?; self.refresh_screenboards(&client, &application_key, &api_key)?; self.refresh_monitors(&client, &application_key, &api_key)?; // since this workflow is READ heavy, let's optimize the SQLite indexes and DB self.conn .execute("VACUUM;", NO_PARAMS) .map(|_| Ok(())) .map_err(|e| format_err!("failed to VACCUM database: {}", e))? } fn refresh_timeboards( &mut self, client: &Client, app_key: &str, api_key: &str, ) -> Result<(), Error> { self.conn .execute("DELETE FROM timeboards;", NO_PARAMS) .map_err(|e| format_err!("failed to delete timeboards: {}", e))?; #[derive(Debug, Deserialize)] struct Dashboards { #[serde(rename = "dashes")] boards: Vec<Dashboard>, } #[derive(Debug, Deserialize)] struct Dashboard { id: String, title: String, description: Option<String>, modified: DateTime<Utc>, } let tx = self.conn.transaction()?; let mut stmt = tx.prepare("INSERT INTO timeboards (id, title, description, url, modified) VALUES (?1, ?2, ?3, ?4, ?5)")?; for board in client .get("https://api.datadoghq.com/api/v1/dash") .query(&[(APPLICATION_KEY, app_key), (API_KEY, api_key)]) .send()? .json::<Dashboards>()? .boards { let url = format!("https://segment.datadoghq.com/dash/{}", board.id); stmt.execute(&[ &board.id as &ToSql, &board.title, &board.description.unwrap_or_default(), &url, &board.modified.timestamp(), ])?; } stmt.finalize()?; tx.commit() .map_err(|e| format_err!("failed to commit timeboards transaction: {}", e))?; Ok(()) } fn refresh_screenboards( &mut self, client: &Client, app_key: &str, api_key: &str, ) -> Result<(), Error> { self.conn .execute("DELETE FROM screenboards;", NO_PARAMS) .map_err(|e| format_err!("failed to delete screenboards: {}", e))?; #[derive(Debug, Deserialize)] struct ScreenBoards { #[serde(rename = "screenboards")] boards: Vec<ScreenBoard>, } #[derive(Debug, Deserialize)] struct ScreenBoard { id: i32, title: String, description: Option<String>, modified: DateTime<Utc>, } let tx = self.conn.transaction()?; let mut stmt = tx.prepare("INSERT INTO screenboards (id, title, description, url, modified) VALUES (?1, ?2, ?3, ?4, ?5)")?; for board in client .get("https://api.datadoghq.com/api/v1/screen") .query(&[(APPLICATION_KEY, app_key), (API_KEY, api_key)]) .send()? .json::<ScreenBoards>()? .boards { let url = format!("https://segment.datadoghq.com/screen/{}", board.id); stmt.execute(&[ &board.id as &ToSql, &board.title, &board.description.unwrap_or_default(), &url, &board.modified.timestamp(), ])?; } stmt.finalize()?; tx.commit() .map_err(|e| format_err!("failed to commit screenboards transaction: {}", e))?; Ok(()) } fn refresh_monitors( &mut self, client: &Client, app_key: &str, api_key: &str, ) -> Result<(), Error> { self.conn .execute("DELETE FROM monitors;", NO_PARAMS) .map_err(|e| format_err!("failed to delete monitors: {}", e))?; #[derive(Debug, Deserialize)] struct Monitor { id: i32, name: String, tags: Vec<String>, modified: DateTime<Utc>, } let tx = self.conn.transaction()?; let mut stmt_monitor = tx.prepare("INSERT INTO monitors (id, name, url, modified) VALUES (?1, ?2, ?3, ?4)")?; let mut stmt_tags = tx.prepare("INSERT INTO monitor_tags (id, name) VALUES (?1, ?2)")?; for monitor in client .get("https://api.datadoghq.com/api/v1/monitor") .query(&[(APPLICATION_KEY, app_key), (API_KEY, api_key)]) .send()? .json::<Vec<Monitor>>()? { let url = format!("https://segment.datadoghq.com/monitors/{}", monitor.id); stmt_monitor.execute(&[ &monitor.id as &ToSql, &monitor.name, &url, &monitor.modified.timestamp(), ])?; for tag in monitor.tags { stmt_tags.execute(&[&monitor.id as &ToSql, &tag])?; } } stmt_monitor.finalize()?; stmt_tags.finalize()?; tx.commit() .map_err(|e| format_err!("failed to commit screenboards transaction: {}", e))?; Ok(()) } pub fn query_timeboards<'items>(&self, title: &str) -> Result<Vec<Item<'items>>, Error> { let query = format!("%{}%", title); self.conn.prepare( "SELECT title, description, url FROM timeboards WHERE title LIKE ? ORDER BY modified DESC LIMIT 10", )?.query_map(&[&query], |row| { let title: String = row.get(0); let description: String = row.get(1); let url: String = row.get(2); alfred::ItemBuilder::new(title.clone()) .subtitle(description) .autocomplete(title) .arg(format!("open {}", url)) .into_item() })? .collect::<Result<Vec<_>, _>>() .map_err(|e| format_err!("failed querying timeboards: {}", e)) } pub fn query_screenboards<'items>(&self, title: &str) -> Result<Vec<Item<'items>>, Error> { let query = format!("%{}%", title); self.conn.prepare( "SELECT title, description, url FROM screenboards WHERE title LIKE ? ORDER BY modified DESC LIMIT 10", )?.query_map(&[&query], |row| { let title: String = row.get(0); let description: String = row.get(1); let url: String = row.get(2); alfred::ItemBuilder::new(title.clone()) .subtitle(description) .autocomplete(title) .arg(format!("open {}", url)) .into_item() })? .collect::<Result<Vec<_>, _>>() .map_err(|e| format_err!("failed querying screenboards: {}", e)) } pub fn query_dashboards<'items>(&self, title: &str) -> Result<Vec<Item<'items>>, Error> { let query = format!("%{}%", title); self.conn .prepare( "SELECT title, description, url, modified FROM timeboards WHERE title LIKE ?1 UNION ALL SELECT title, description, url, modified FROM screenboards WHERE title LIKE ?1 ORDER BY modified LIMIT 10", )? .query_map(&[&query], |row| { let title: String = row.get(0); let description: String = row.get(1); let url: String = row.get(2); alfred::ItemBuilder::new(title.clone()) .subtitle(description) .autocomplete(title) .arg(format!("open {}", url)) .into_item() })? .collect::<Result<Vec<_>, _>>() .map_err(|e| format_err!("failed querying dashboards: {}", e)) } pub fn query_monitors<'items>( &self, name: &str, tag: Option<&str>, ) -> Result<Vec<Item<'items>>, Error> { let query = format!("%{}%", name); let tag_query: String; let mut params: Vec<&ToSql> = vec![&query]; let mut select = "SELECT m.name, m.url FROM monitors m ".to_owned(); match tag { Some(ref t) => { select += "LEFT JOIN monitor_tags t ON t.id = m.id WHERE m.name LIKE ? AND t.name LIKE ? "; tag_query = format!("{}%", t); params.push(&tag_query); } _ => select += "WHERE m.name LIKE ? ", } select += "ORDER BY m.modified DESC LIMIT 10"; self.conn .prepare(&select)? .query_map(&params, |row| { let name: String = row.get(0); let url: String = row.get(1); alfred::ItemBuilder::new(name.clone()) .subtitle(name.clone()) .autocomplete(name) .arg(format!("open {}", url)) .into_item() })? .collect::<Result<Vec<_>, _>>() .map_err(|e| format_err!("failed querying monitors: {}", e)) } }
true
90f3226cb3d112ac3c223e2c2cd6eabd75fa4525
Rust
sugyan/leetcode
/problems/1008-construct-binary-search-tree-from-preorder-traversal/lib.rs
UTF-8
1,231
3.390625
3
[]
no_license
use std::cell::RefCell; use std::rc::Rc; use utils::TreeNode; pub struct Solution; impl Solution { pub fn bst_from_preorder(preorder: Vec<i32>) -> Option<Rc<RefCell<TreeNode>>> { Self::helper(&preorder) } fn helper(v: &[i32]) -> Option<Rc<RefCell<TreeNode>>> { if let Some(&first) = v.first() { let i = v.iter().position(|&e| e > first).unwrap_or(v.len()); Some(Rc::new(RefCell::new(TreeNode { val: first, left: Self::helper(&v[1..i]), right: Self::helper(&v[i..]), }))) } else { None } } } #[cfg(test)] mod tests { use super::*; use utils::to_tree; #[test] fn example_1() { assert_eq!( to_tree(vec![ Some(8), Some(5), Some(10), Some(1), Some(7), None, Some(12) ]), Solution::bst_from_preorder(vec![8, 5, 1, 7, 10, 12]) ) } #[test] fn example_2() { assert_eq!( to_tree(vec![Some(1), None, Some(3)]), Solution::bst_from_preorder(vec![1, 3]) ) } }
true
e6d81968b6bc3050e543428ef104aca95edd1243
Rust
fbucek/nafta
/tests/migrations_test.rs
UTF-8
2,003
2.953125
3
[ "MIT" ]
permissive
// Database #[macro_use] extern crate diesel; #[macro_use] extern crate diesel_migrations; extern crate nafta; use diesel::dsl::insert_into; use diesel::prelude::*; // Macro needed for: embedded_migrations::run(&conn) embed_migrations!("tests/migrations"); // Define schema for Posts table! { posts (id) { id -> Integer, title -> Text, body -> Text, } } mod tests { //#[cfg_attr(test, macro_use)] use super::*; #[derive(Queryable)] struct Post { id: i32, title: String, body: String, } #[derive(Insertable)] #[table_name = "posts"] struct NewPost<'a> { title: &'a str, body: &'a str, } #[test] fn test_init() { use super::posts::dsl::*; let test_db = nafta::sqlite::TestDb::new(); // Path with database must exists let path = test_db.db_path.to_owned(); assert!(path.exists()); let conn = test_db .conn() .expect("Not possible to get pooled connection"); embedded_migrations::run(&conn).expect("Migration not possible to run"); let new_post = NewPost { title: "new post", body: "not empty body", }; let inserted = insert_into(posts) .values(&new_post) .execute(&conn) .expect("Not possible to insert new post into database"); assert_eq!(inserted, 1); // Get all values -> Must be only 1 let last_post = posts .order(id.desc()) .get_result::<Post>(&conn) .expect("Not possible to query Posts"); assert_eq!(last_post.id, 1); assert_eq!(last_post.title, "new post"); assert_eq!(last_post.body, "not empty body"); // Path after TestDb is drop must not exists drop( conn); // First connection which holds ref to Pool must be droped!!! drop(test_db); // Drop TestDb assert!(!path.exists()); } }
true
701427a4ab0741c3a5875d83c6b65ee3d74d6c8c
Rust
ParkMyCar/twilioc-rs
/src/client.rs
UTF-8
3,221
2.984375
3
[]
no_license
use std::time::Duration; use actix_rt::System; use actix_web::client::{Client, ClientBuilder, Connector}; use futures::future::{lazy, Future}; use openssl::ssl::{SslConnector, SslMethod}; use serde_derive::{Deserialize, Serialize}; use crate::config::Config; use crate::regex::valid_number_re; pub struct TwilioClient { account_sid: String, client: Client, from: Option<String>, } impl TwilioClient { pub fn new(config: Config) -> TwilioClient { // Setup SSL Connection for the Actix Client let ssl_connector = SslConnector::builder(SslMethod::tls()) .expect("Unable to build SSL connector!") .build(); let connector = Connector::new() .ssl(ssl_connector) .timeout(Duration::from_secs(5)) .finish(); // Get Twilio Keys to setup HTTP Basic Auth header let (account_sid, auth_token) = match config.keys { Some(keys) => (keys.account_sid.unwrap(), keys.auth_token.unwrap()), None => panic!("In order to create a Twilio Client you need to provide an account_sid, and auth_token with your config"), }; // Create Actix Client with SSL and auth let client = ClientBuilder::default() .basic_auth(&account_sid, Some(auth_token.as_str())) .connector(connector) .finish(); // Unwrap UserPrefs let from = match config.user_prefs { Some(prefs) => match prefs.from { Some(from) => { assert!( valid_number_re(from.as_str()), "The default from number provided is not a valid number!" ); Some(from) } None => None, }, None => None, }; TwilioClient { account_sid, client, from, } } pub fn send_sms(&self, to: String, body: String, from: Option<String>) { assert!( from.is_some() || self.from.is_some(), "You need to specify a default 'from' in your config, or you need to provide one as an argument!" ); let from = from.unwrap_or(self.from.as_ref().unwrap().clone()); let sms_form = SMSForm::new(to, from, body); let url = format!( "https://api.twilio.com/2010-04-01/Accounts/{}/Messages.json", &self.account_sid ); System::new("test") .block_on(lazy(|| { self.client .post(url) .send_form(&sms_form) .map_err(|err| (println!("{:?}", err))) .and_then(|response| { println!("{:?}", response); Ok(()) }) })) .unwrap(); } } #[allow(non_snake_case)] #[derive(Debug, Deserialize, Serialize)] struct SMSForm { To: String, From: String, Body: String, } impl SMSForm { pub fn new(to: String, from: String, body: String) -> SMSForm { SMSForm { To: to, From: from, Body: body, } } }
true
fc1838549c668d818d31f1d388aec80beda72396
Rust
battila7/megtanulok-rustban-programozni
/exercism-rust-track/02-medium/42-isbn-verifier/src/lib.rs
UTF-8
780
3.234375
3
[ "MIT" ]
permissive
pub fn is_valid_isbn(isbn: &str) -> bool { let without_hyphens = isbn.replace("-", ""); if without_hyphens.len() != 10 { return false } let (code_as_str, checksum_as_str) = without_hyphens.split_at(without_hyphens.len() - 1); if !code_as_str.chars().all(|ch| ch.is_ascii_digit()) { return false } let check_character = checksum_as_str.chars().next().unwrap(); if !check_character.is_ascii_digit() && check_character != 'X' { return false } let check = check_character.to_digit(10).unwrap_or(10) as usize; let sum: usize = code_as_str.chars() .map(|ch| ch.to_digit(10).unwrap() as usize) .enumerate() .map(|pair| (10 - pair.0) * pair.1) .sum(); (sum + check) % 11 == 0 }
true
8caee0db962091353c28bb1a14f1095baca1c06a
Rust
darkmusic/gerg-ui
/src/colors.rs
UTF-8
9,473
3.359375
3
[]
no_license
use hex::FromHex; use phf::phf_map; enum ConversionType { FromHexTriplet, FromRgbDecimal, //FromRgbPercent, FromName, } pub fn parse_color(s: &String) -> u32 { let conversion_type = determine_conversion_type(&s); let result = match conversion_type { ConversionType::FromHexTriplet => from_hex_triplet(s), ConversionType::FromRgbDecimal => from_rgb_decimal(s), ConversionType::FromName => from_name(s), }; result } fn determine_conversion_type(s: &String) -> ConversionType { let s = s.trim(); if s.starts_with('#') { return ConversionType::FromHexTriplet; } let s = s.split(';').collect::<Vec<&str>>(); if s.len() >= 3 { return ConversionType::FromRgbDecimal; } return ConversionType::FromName; } fn from_hex_triplet(s: &String) -> u32 { let mut s2 = s.clone(); s2.remove(0); let buffer = <[u8;3]>::from_hex(s2); let result = match buffer { Ok(bytes) => { ((bytes[0] as u32) << 0) + ((bytes[1] as u32) << 8) + ((bytes[2] as u32) << 16) + (255 << 24) }, Err(_) => panic!("[{}] is not a hex value.", s), }; result } fn from_rgb_decimal(s: &String) -> u32 { let split = s.trim().split(';').collect::<Vec<&str>>(); if split.len() == 3 { let value1 = split[0].trim().parse::<u32>().unwrap(); // red let value2 = split[1].trim().parse::<u32>().unwrap(); // green let value3 = split[2].trim().parse::<u32>().unwrap(); // blue let value4: u32 = 255; let result = (value1 << 0) + (value2 << 8) + (value3 << 16) + (value4 << 24); return result; } else if split.len() == 4 { let value1 = split[0].trim().parse::<u32>().unwrap(); // red let value2 = split[1].trim().parse::<u32>().unwrap(); // green let value3 = split[2].trim().parse::<u32>().unwrap(); // blue let value4 = split[3].trim().parse::<u32>().unwrap(); // alpha let result = (value1 << 0) + (value2 << 8) + (value3 << 16) + (value4 << 24); return result; } else { panic!("Could not parse [{}]", s); } } fn from_name(s: &String) -> u32 { let col = COLORS.get(s.to_lowercase().as_str()).cloned(); match col { Some(c) => return from_hex_triplet(&c.to_string()), None => panic!("Color [{}] unknown.", s) } } static COLORS: phf::Map<&'static str, &str> = phf_map! { "air force blue" => "#5d8aa8", "alice blue" => "#f0f8ff", "alizarin crimson" => "#e32636", "almond" => "#efdecd", "amaranth" => "#e52b50", "amber" => "#ffbf00", "american rose" => "#ff033e", "amethyst" => "#9966cc", "android green" => "#a4c639", "anti-flash white" => "#f2f3f4", "antique brass" => "#cd9575", "antique fuchsia" => "#915c83", "antique white" => "#faebd7", "ao" => "#008000", "apple green" => "#8db600", "apricot" => "#fbceb1", "aqua" => "#00ffff", "aquamarine" => "#7fffd4", "army green" => "#4b5320", "arylide yellow" => "#e9d66b", "ash gray" => "#b2beb5", "asparagus" => "#87a96b", "atomic tangerine" => "#ff9966", "auburn" => "#a52a2a", "aureolin" => "#fdee00", "aurometalsaurus" => "#6e7f80", "awesome" => "#ff2052", "azure" => "#007fff", "azure mist/web" => "#f0ffff", "baby blue" => "#89cff0", "baby blue eyes" => "#a1caf1", "baby pink" => "#f4c2c2", "ball blue" => "#21abcd", "banana mania" => "#fae7b5", "banana yellow" => "#ffe135", "battleship gray" => "#848482", "bazaar" => "#98777b", "beau blue" => "#bcd4e6", "beaver" => "#9f8170", "beige" => "#f5f5dc", "bisque" => "#ffe4c4", "bistre" => "#3d2b1f", "bittersweet" => "#fe6f5e", "black" => "#000000", "blanched almond" => "#ffebcd", "bleu de france" => "#318ce7", "blizzard blue" => "#ace5ee", "blond" => "#faf0be", "blue" => "#0000ff", "blue bell" => "#a2a2d0", "blue gray" => "#6699cc", "blue green" => "#0d98ba", "blue purple" => "#8a2be2", "blue violet" => "#8a2be2", "blush" => "#de5d83", "bole" => "#79443b", "bondi blue" => "#0095b6", "bone" => "#e3dac9", "boston university red" => "#cc0000", "bottle green" => "#006a4e", "boysenberry" => "#873260", "brandeis blue" => "#0070ff", "brass" => "#b5a642", "brick red" => "#cb4154", "bright cerulean" => "#1dacd6", "bright green" => "#66ff00", "bright lavender" => "#bf94e4", "bright maroon" => "#c32148", "bright pink" => "#ff007f", "bright turquoise" => "#08e8de", "bright ube" => "#d19fe8", "brilliant lavender" => "#f4bbff", "brilliant rose" => "#ff55a3", "brink pink" => "#fb607f", "british racing green" => "#004225", "bronze" => "#cd7f32", "brown" => "#a52a2a", "bubble gum" => "#ffc1cc", "bubbles" => "#e7feff", "buff" => "#f0dc82", "bulgarian rose" => "#480607", "burgundy" => "#800020", "burlywood" => "#deb887", "burnt orange" => "#cc5500", "burnt sienna" => "#e97451", "burnt umber" => "#8a3324", "byzantine" => "#bd33a4", "byzantium" => "#702963", "cg blue" => "#007aa5", "cg red" => "#e03c31", "cadet" => "#536872", "cadet blue" => "#5f9ea0", "cadet gray" => "#91a3b0", "cadmium green" => "#006b3c", "cadmium orange" => "#ed872d", "cadmium red" => "#e30022", "cadmium yellow" => "#fff600", "cafe au lait" => "#a67b5b", "cafe noir" => "#4b3621", "cal poly pomona green" => "#1e4d2b", "cambridge blue" => "#a3c1ad", "camel" => "#c19a6b", "camouflage green" => "#78866b", "canary" => "#ffff99", "canary yellow" => "#ffef00", "candy apple red" => "#ff0800", "candy pink" => "#e4717a", "capri" => "#00bfff", "caput mortuum" => "#592720", "cardinal" => "#c41e3a", "caribbean green" => "#00cc99", "carmine" => "#ff0040", "carmine pink" => "#eb4c42", "carmine red" => "#ff0038", "carnation pink" => "#ffa6c9", "carnelian" => "#b31b1b", "carolina blue" => "#99badd", "carrot orange" => "#ed9121", "celadon" => "#ace1af", "celeste" => "#b2ffff", "celestial blue" => "#4997d0", "cerise" => "#de3163", "cerise pink" => "#ec3b83", "cerulean" => "#007ba7", "cerulean blue" => "#2a52be", "chamoisee" => "#a0785a", "champagne" => "#fad6a5", "charcoal" => "#36454f", "chartreuse" => "#7fff00", "cherry" => "#de3163", "cherry blossom pink" => "#ffb7c5", "chestnut" => "#cd5c5c", "chocolate" => "#d2691e", "chrome yellow" => "#ffa700", "cinereous" => "#98817b", "cinnabar" => "#e34234", "cinnamon" => "#d2691e", "citrine" => "#e4d00a", "classic rose" => "#fbcce7", "cobalt" => "#0047ab", "cocoa brown" => "#d2691e", "coffee" => "#6f4e37", "columbia blue" => "#9bddff", "cool black" => "#002e63", "cool gray" => "#8c92ac", "copper" => "#b87333", "copper rose" => "#996666", "coquelicot" => "#ff3800", "coral" => "#ff7f50", "coral pink" => "#f88379", "coral red" => "#ff4040", "cordovan" => "#893f45", "corn" => "#fbec5d", "cornell red" => "#b31b1b", "cornflower" => "#9aceeb", "cornflower blue" => "#6495ed", "cornsilk" => "#fff8dc", "cosmic latte" => "#fff8e7", "cotton candy" => "#ffbcd9", "cream" => "#fffdd0", "crimson" => "#dc143c", "crimson red" => "#990000", "crimson glory" => "#be0032", "cyan" => "#00ffff", "daffodil" => "#ffff31", "dandelion" => "#f0e130", "dark blue" => "#00008b", "dark brown" => "#654321", "dark byzantium" => "#5d3954", "dark candy apple red" => "#a40000", "dark cerulean" => "#08457e", "dark chestnut" => "#986960", "dark coral" => "#cd5b45", "dark cyan" => "#008b8b", "dark electric blue" => "#536878", "dark goldenrod" => "#b8860b", "dark gray" => "#a9a9a9", "dark green" => "#013220", "dark jungle green" => "#1a2421", "dark khaki" => "#bdb76b", "dark lava" => "#483c32", "dark lavender" => "#734f96", "dark magenta" => "#8b008b", "dark midnight blue" => "#003366", "dark olive green" => "#556b2f", "dark orange" => "#ff8c00", "dark orchid" => "#9932cc", "dark pastel blue" => "#779ecb", "dark pastel green" => "#03c03c", "dark pastel purple" => "#966fd6", "dark pastel red" => "#c23b22", "dark pink" => "#e75480", "dark powder blue" => "#003399", "dark raspberry" => "#872657", "dark red" => "#8b0000", "dark salmon" => "#e9967a", "dark scarlet" => "#560319", "dark sea green" => "#8fbc8f", "dark sienna" => "#3c1414", "dark slate blue" => "#483d8b", "dark slate gray" => "#2f4f4f", "dark spring green" => "#177245", "dark tan" => "#918151", "dark tangerine" => "#ffa812", "dark taupe" => "#483c32", "dark terra cotta" => "#cc4e5c", "dark turquoise" => "#00ced1", "dark violet" => "#9400d3", "dartmouth green" => "#00693e", "davy gray" => "#555555", "fuchsia" => "#ff00ff", "gray" => "#808080", "green" => "#00ff00", "magenta" => "#ff00ff", "maroon" => "#800000", "navy blue" => "#000080", "olive" => "#808000", "purple" => "#800080", "red" => "#ff0000", "silver" => "#c0c0c0", "teal" => "#008080", "white" => "#ffffff", "yellow" => "#ffff00", };
true
966c6b84ef1189bcb621064bad390591ac6c9c3f
Rust
jianantian/wgpu-rust-renderer
/src/math/matrix3gpu.rs
UTF-8
922
3.171875
3
[ "MIT" ]
permissive
const ELEMENT_NUM: usize = 12; type Elements = [f32; ELEMENT_NUM]; pub struct Matrix3GPU { } impl Matrix3GPU { pub fn create() -> Elements { let mut elements = [0.0; ELEMENT_NUM]; Self::identity(&mut elements); elements } pub fn identity(m: &mut Elements) -> &mut Elements { m[0] = 1.0; m[1] = 0.0; m[2] = 0.0; m[3] = 0.0; m[4] = 0.0; m[5] = 1.0; m[6] = 0.0; m[7] = 0.0; m[8] = 0.0; m[9] = 0.0; m[10] = 1.0; m[11] = 0.0; m } pub fn copy<'a>(m: &'a mut Elements, src: &'a Elements) -> &'a mut Elements { for i in 0..ELEMENT_NUM { m[i] = src[i]; } m } pub fn copy_from_matrix3<'a>(m: &'a mut Elements, src: &'a [f32; 9]) -> &'a mut Elements { // @TODO: Use loop? m[0] = src[0]; m[1] = src[1]; m[2] = src[2]; m[3] = 0.0; m[4] = src[3]; m[5] = src[4]; m[6] = src[5]; m[7] = 0.0; m[8] = src[6]; m[9] = src[7]; m[10] = src[8]; m[11] = 0.0; m } }
true
f52720fbf36d1d2955138eea1478aaf182c7212c
Rust
dannymcgee/lox
/packages/vm/src/vector/into_iter.rs
UTF-8
783
2.765625
3
[ "MIT" ]
permissive
use std::{mem, ptr}; use super::Vector; pub struct IntoIter<T> { _inner: Vector<T>, start: *const T, end: *const T, } impl<T> IntoIterator for Vector<T> { type Item = T; type IntoIter = IntoIter<T>; fn into_iter(self) -> IntoIter<T> { unsafe { let vec = ptr::read(&self); let len = self.len; mem::forget(self); IntoIter { start: vec.ptr(), end: vec.ptr().add(len), _inner: vec, } } } } impl<T> Iterator for IntoIter<T> { type Item = T; fn next(&mut self) -> Option<Self::Item> { if self.start == self.end { None } else { unsafe { let result = ptr::read(self.start); self.start = self.start.offset(1); Some(result) } } } } impl<T> Drop for IntoIter<T> { fn drop(&mut self) { for _ in &mut *self {} } }
true
ecb82d56eb866d989a82c246be61656725d63a7c
Rust
finalfire/imnotprovable
/src/dx9.rs
UTF-8
2,517
2.796875
3
[]
no_license
mod dx; use dx::{*}; use nannou::prelude::*; fn view(app: &App, frame: Frame) { // do not write after the first frame if app.elapsed_frames() > 1 { return; } // Begin drawing let draw = app.draw(); let colors: Vec<Rgb> = [ "0fa3b1","d9e5d6","eddea4","f7a072","ff9b42", "7d4e57", "d66853", "4357ad","48a9a6","e4dfda","d4b483","c1666b" ].iter().map(|c| shex2dec(c)).collect(); let transparent = rgba(1.0, 1.0, 1.0, 0.0); draw.background().color(shex2dec("f3f5eb")); let start_x = -194.; let start_y = 188.; let size = 33.; let offset = 31.0; for k in 0..5 { for i in 0..8 { for j in 0..8 { let dmt = (i as f32 / size) * random_f32() * 50.; println!("{}, {}, {}", i, size, dmt); let px = start_x + (size * j as f32) + (offset * j as f32);// + dmt; let py = start_y - (size * i as f32) - (offset * i as f32); let rnd_color = pick_color(&colors); let alpha = random_range(0.5, 1.0); let curr_angle = if true { 0. } else { random_range(-90., 90.) }; let curr_size = if k == 0 { size } else { size * (1. / k as f32) }; let curr_weight = if true { random_range(0.90, 0.99) } else { random_range(0.25, 0.85) }; draw.rect() .x_y(px, py) .w_h(curr_size, curr_size) .color(transparent) .z_degrees(curr_angle) .stroke_color(rgba(rnd_color.red, rnd_color.green, rnd_color.blue, alpha)) .stroke_weight(curr_weight); } } } // Write the result of our drawing to the window's frame. draw.to_frame(app, &frame).unwrap(); // Capture the frame! if app.elapsed_frames() == 1 { let file_path = captured_frame_path(app, &frame); app.main_window().capture_frame(file_path); } } fn captured_frame_path(app: &App, frame: &Frame) -> std::path::PathBuf { // Create a path that we want to save this frame to. app.project_path() .expect("failed to locate `project_path`") // Name each file after the number of the frame. .join(format!("{:03}", frame.nth())) // The extension will be PNG. We also support tiff, bmp, gif, jpeg, webp and some others. .with_extension("png") } fn main() { nannou::sketch(view).size(768,768).run(); }
true
88f2193f73d191b8bf8898cef7dbd927faae92f1
Rust
dwalker109/aoc-2015
/day01/src/main.rs
UTF-8
1,201
3.21875
3
[]
no_license
use itertools::FoldWhile::{Continue, Done}; use itertools::Itertools; fn main() { let p1 = part1("./input"); let p2 = part2("./input"); println!("Part 1: {}", p1); println!("Part 2: {}", p2); } fn part1(path: &str) -> isize { let dat = std::fs::read_to_string(path).unwrap(); let floor = dat.split("").fold(0, |acc, elem| match elem { "(" => return acc + 1, ")" => return acc - 1, _ => acc, }); floor } fn part2(path: &str) -> usize { let dat = std::fs::read_to_string(path).unwrap(); let pos = dat .split("") .enumerate() .fold_while(0 as isize, |mut acc, (pos, elem)| { match elem { "(" => acc += 1, ")" => acc -= 1, _ => (), } if acc == -1 { return Done(pos as isize); } else { Continue(acc) } }) .into_inner(); pos as usize } #[cfg(test)] mod test { use super::*; #[test] fn test_part1() { assert_eq!(part1("./input"), 138); } #[test] fn test_part2() { assert_eq!(part2("./input"), 1771); } }
true
4fc94ff262f837398e03d7d53d2390ec05c7f6e4
Rust
konradsz/adventofcode2020
/day09/src/main.rs
UTF-8
1,364
3.265625
3
[]
no_license
use itertools::Itertools; use std::fs; fn part_1(numbers: &[usize]) -> usize { const PREAMBLE_SIZE: usize = 25; for i in PREAMBLE_SIZE..numbers.len() { let preamble = &numbers[i - PREAMBLE_SIZE..i]; let mut perms = preamble.iter().permutations(2); if perms .find(|p| p.iter().cloned().sum::<usize>() == numbers[i]) .is_none() { return numbers[i]; } } unreachable!() } fn part_2(numbers: &[usize]) -> usize { const SUM: usize = 133_015_568; for starting_index in 0..numbers.len() { let mut current_sum = 0; for current_index in starting_index..numbers.len() { current_sum += numbers[current_index]; if current_sum == SUM { let slice = &numbers[starting_index..current_index]; let min = slice.iter().min().unwrap(); let max = slice.iter().max().unwrap(); return min + max; } else if current_sum > SUM { break; } } } unreachable!() } fn main() { let input = fs::read_to_string("input").expect("file not found"); let numbers: Vec<usize> = input.lines().map(|l| l.parse::<usize>().unwrap()).collect(); assert_eq!(part_1(&numbers), 133_015_568); assert_eq!(part_2(&numbers), 16_107_959); }
true
d2243e8e3ea1c94e6a5abd4685c5ad0b6c978b96
Rust
alyssais/hyperx
/src/common/str.rs
UTF-8
471
3
3
[ "MIT" ]
permissive
use std::ops::Deref; use std::str; use bytes::Bytes; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ByteStr(Bytes); impl ByteStr { pub fn as_str(&self) -> &str { unsafe { str::from_utf8_unchecked(self.0.as_ref()) } } } impl Deref for ByteStr { type Target = str; fn deref(&self) -> &str { self.as_str() } } impl<'a> From<&'a str> for ByteStr { fn from(s: &'a str) -> ByteStr { ByteStr(Bytes::from(s)) } }
true
8cb9864fcc1604eb6a981f4a05f36cb3dd9799b4
Rust
DiemoHeuer/rust-meetup-rostock
/2021-11-02/demos/warp-demo/src/api/error.rs
UTF-8
319
2.578125
3
[]
no_license
use serde_derive::Deserialize; use serde_derive::Serialize; use thiserror::Error; #[derive(Debug, Deserialize, Error, Serialize)] pub enum Error { #[error("This is custom error 1.")] CustomError1, #[error("This is custom error 2.")] CustomError2, } impl warp::reject::Reject for Error {}
true
fd4d5dae3cc4c44cd4d203699ab2480f0347bac5
Rust
h4hany/leetcode
/python_solutions/1417.reformat-the-string.rs
UTF-8
5,612
3.25
3
[]
no_license
/* * @lc app=leetcode id=1417 lang=rust * * [1417] Reformat The String * * https://leetcode.com/problems/reformat-the-string/description/ * * algorithms * Easy (54.54%) * Total Accepted: 6.9K * Total Submissions: 12.6K * Testcase Example: '"a0b1c2"' * * Given alphanumeric string s. (Alphanumeric string is a string consisting of * lowercase English letters and digits). * * You have to find a permutation of the string where no letter is followed by * another letter and no digit is followed by another digit. That is, no two * adjacent characters have the same type. * * Return the reformatted string or return an empty string if it is impossible * to reformat the string. * * * Example 1: * * * Input: s = "a0b1c2" * Output: "0a1b2c" * Explanation: No two adjacent characters have the same type in "0a1b2c". * "a0b1c2", "0a1b2c", "0c2a1b" are also valid permutations. * * * Example 2: * * * Input: s = "leetcode" * Output: "" * Explanation: "leetcode" has only characters so we cannot separate them by * digits. * * * Example 3: * * * Input: s = "1229857369" * Output: "" * Explanation: "1229857369" has only digits so we cannot separate them by * characters. * * * Example 4: * * * Input: s = "covid2019" * Output: "c2o0v1i9d" * * * Example 5: * * * Input: s = "ab123" * Output: "1a2b3" * * * * Constraints: * * * 1 <= s.length <= 500 * s consists of only lowercase English letters and/or digits. * * */ impl Solution { pub fn reformat(s: String) -> String { let n = s.len(); let mut cache: Vec<char> = vec!['#'; n * 2 + 2]; let (mut i, mut j) = (1_i32, 2_i32); for c in s.chars() { if c.is_numeric() { cache[i as usize] = c; i += 2; } else { cache[j as usize] = c; j += 2; } } // println!("{}{}", i, j); if i - j >= 3 || j - i > 3 { return "".to_string(); } if (i - j).abs() > 1 { cache.swap(0, j as usize - 2); } cache.iter().filter(|c| **c != '#').collect::<String>() } } // pub struct Solution; use std::cmp::max; use std::cmp::min; use std::collections::HashMap; use std::collections::HashSet; use std::fmt::Debug; use std::hash::Hash; use std::iter::FromIterator; // use std::collections::VecDeque; // use std::collections::BTreeMap; use std::any::type_name; use std::collections::BinaryHeap; pub struct Helper; impl Helper { pub fn stringify(str_vector: Vec<&str>) -> Vec<String> { // Convert a vector of &str to vector or String for coding convenience str_vector .iter() .map(|c| c.to_string()) .collect::<Vec<String>>() } } // To get the type of a variable pub fn print_type_of<T>(_: &T) { println!("{}", std::any::type_name::<T>()) } #[allow(dead_code)] pub fn print_map<K: Debug + Eq + Hash, V: Debug>(map: &HashMap<K, V>) { for (k, v) in map.iter() { println!("{:?}: {:?}", k, v); } } #[allow(dead_code)] pub fn say_vec(nums: Vec<i32>) { println!("{:?}", nums); } #[allow(dead_code)] pub fn char_frequency(s: String) -> HashMap<char, i32> { let mut res: HashMap<char, i32> = HashMap::new(); for c in s.chars() { *res.entry(c).or_insert(0) += 1; } res } #[allow(dead_code)] pub fn vec_counter(arr: Vec<i32>) -> HashMap<i32, i32> { let mut c = HashMap::new(); for n in arr { *c.entry(n).or_insert(0) += 1; } c } #[allow(dead_code)] pub fn vec_to_hashset(arr: Vec<i32>) -> HashSet<i32> { HashSet::from_iter(arr.iter().cloned()) } #[allow(dead_code)] pub fn int_to_char(n: i32) -> char { // Convert number 0 to a, 1 to b, ... assert!(n >= 0 && n <= 25); (n as u8 + 'a' as u8) as char } #[allow(dead_code)] fn sayi32(i: i32) { println!("{}", i); } #[allow(dead_code)] fn sayi32_arr(arr: &Vec<i32>) { println!("{:?}", arr); } #[allow(dead_code)] pub fn bisect_left(arr: &Vec<i32>, target: i32) -> usize { let (mut lo, mut hi) = (0, arr.len() - 1); let mut mid; while lo < hi { mid = (lo + hi) >> 1; if arr[mid as usize] >= target { hi = mid; } else { lo = mid + 1; } } lo } #[allow(dead_code)] pub fn bisect_right(arr: &Vec<i32>, target: i32) -> usize { let (mut lo, mut hi) = (0, arr.len() - 1); let mut mid; while lo < hi { mid = (lo + hi + 1) >> 1; if arr[mid as usize] > target { hi = mid - 1; } else { lo = mid; } } if arr[hi] > target { hi } else { hi + 1 } } pub struct FenwickTree { vals: Vec<i32>, } impl FenwickTree { pub fn new(size: usize) -> FenwickTree { FenwickTree { vals: Vec::from_iter(std::iter::repeat(0).take(size + 1)), } } pub fn update(&mut self, mut i: usize, val: i32) { let size = self.vals.len(); while i < size { self.vals[i] += val; i += i & (!i + 1); } } pub fn get(&mut self, mut i: usize) -> i32 { let mut res = 0; while i > 0 { res += self.vals[i]; i -= i & (!i + 1); } res } } #[allow(dead_code)] fn get_vector_sum(a: &Vec<i32>) -> i32 { a.iter().fold(0, |mut sum, &x| { sum += x; sum }) } #[allow(dead_code)] fn get_vector_product(a: &Vec<i32>) -> i32 { a.iter().fold(1, |mut prod, &x| { prod *= x; prod }) }
true
7da32e735c4a5e5fbcf35040a8c47b8a3c0412b6
Rust
dakom/awsm-renderer
/crate/src/camera/traits.rs
UTF-8
3,069
2.9375
3
[ "MIT", "Apache-2.0", "BSD-2-Clause" ]
permissive
use crate::prelude::*; use nalgebra::{Isometry3, Matrix4, Point2, Point3, Point4, Vector2, Vector3}; use std::borrow::Cow; pub trait CameraBase { fn position(&self) -> Vector3<f64>; fn projection_view_inverse(&self) -> &Matrix4<f64>; /// The view matrix fn view(&self) -> &Matrix4<f64>; /// The projection matrix fn projection(&self) -> &Matrix4<f64>; /// The projection matrix fn update_viewport(&mut self, width: u32, height: u32); } /// Trait to help consolidate cameras, but not strictly required pub trait CameraExt: CameraBase { /* * Transformation-related methods. */ /// The camera position. fn eye(&self) -> Point3<f64>; // FIXME: should this be here? /// The transformation applied by the camera to transform a point in world coordinates to /// a point in device coordinates. /// same as projection * view fn transformation(&self) -> &Matrix4<f64>; /// The transformation applied by the camera to transform point in device coordinates to a /// point in world coordinate. fn inverse_transformation(&self) -> &Matrix4<f64>; /// The clipping planes, aka. (`znear`, `zfar`). fn clip_planes(&self) -> (f64, f64); // FIXME: should this be here? /* * Update & upload */ /// Upload the camera view and projection to the gpu. This can be called multiple times on the /// render loop. /// Converts a 3d point to 2d screen coordinates, assuming the screen has the size `size`. fn project(&self, world_coord: &Point3<f64>, size: &Vector2<f64>) -> Vector2<f64> { let h_world_coord = world_coord.to_homogeneous(); let h_normalized_coord = self.transformation() * h_world_coord; let normalized_coord = Point3::from_homogeneous(h_normalized_coord).unwrap(); Vector2::new( (1.0 + normalized_coord.x) * size.x / 2.0, (1.0 + normalized_coord.y) * size.y / 2.0, ) } /// Converts a point in 2d screen coordinates to a ray (a 3d position and a direction). /// /// The screen is assumed to have a size given by `size`. fn unproject( &self, window_coord: &Point2<f64>, size: &Vector2<f64>, ) -> (Point3<f64>, Vector3<f64>) { let normalized_coord = Point2::new( 2.0 * window_coord.x / size.x - 1.0, 2.0 * -window_coord.y / size.y + 1.0, ); let normalized_begin = Point4::new(normalized_coord.x, normalized_coord.y, -1.0, 1.0); let normalized_end = Point4::new(normalized_coord.x, normalized_coord.y, 1.0, 1.0); let cam = self.inverse_transformation(); let h_unprojected_begin = cam * normalized_begin; let h_unprojected_end = cam * normalized_end; let unprojected_begin = Point3::from_homogeneous(h_unprojected_begin.coords).unwrap(); let unprojected_end = Point3::from_homogeneous(h_unprojected_end.coords).unwrap(); ( unprojected_begin, (unprojected_end - unprojected_begin).normalize(), ) } }
true
4b262e4553e9e6ab12af143484ab812d97a3adb9
Rust
doubt72/doubtful
/src/tokenizer.rs
UTF-8
2,741
3.578125
4
[]
no_license
// Super simple tokenizer/scanner: use encoding::Token; fn next_token(chars: &Vec<char>, start: usize) -> (Token, usize) { let reserved = [':', ';', ',', '(', ')', '[', ']', '{', '}', '"', '#']; let mut index = start; let mut c = chars[index]; while c.is_whitespace() { if index == chars.len() - 1 { // EOF is only returned with trailing whitespace (or closing comment), but // we need to return something when there's no "real" token left to return return (Token::EOF, index + 1); } index += 1; c = chars[index]; } let from = index; match c { ':' => return (Token::Colon, index + 1), ';' => return (Token::Semicolon, index + 1), ',' => return (Token::Comma, index + 1), '(' => return (Token::OpenParen, index + 1), ')' => return (Token::CloseParen, index + 1), '[' => return (Token::OpenBracket, index + 1), ']' => return (Token::CloseBracket, index + 1), '{' => return (Token::OpenBrace, index + 1), '}' => return (Token::CloseBrace, index + 1), '#' => { index += 1; c = chars[index]; while index < chars.len() - 1 && c != '\n' && c != '\r' { index += 1; c = chars[index]; } // This is a comment, so we return the next token after it next_token(&chars, index) } '"' => { index += 1; c = chars[index]; while index < chars.len() - 1 && c != '"' { index += 1; c = chars[index]; } let s = chars[from + 1..index].iter().cloned().collect(); if c != '"' { // TODO: Do this in a more controlled way panic!("Unterminated string in source: {}", s); } (Token::String(s), index + 1) } _ => { while index < chars.len() - 1 && !c.is_whitespace() && !reserved.contains(&c) { index += 1; c = chars[index]; } let s:String = chars[from..index].iter().cloned().collect(); if s == "true" { return (Token::True, index); } else if s == "false" { return (Token::False, index); } else if s == "nil" { return (Token::Nil, index); } match s.parse::<i64>() { Ok(n) => (Token::Integer(n), index), _ => { match s.parse::<f64>() { Ok(n) => (Token::Float(n), index), _ => (Token::ID(s), index), } }, } }, } } pub fn tokenize(s: &str) -> Vec<Token> { let chars:Vec<char> = s.chars().collect(); let mut tokens = Vec::new(); let mut index = 0; while index < chars.len() { let (token, change) = next_token(&chars, index); index = change; // For debugging: //println!("{}:{:?}", index, token); tokens.push(token); } tokens }
true
66feb7e77dc8d0513282ea724e109d213ae20634
Rust
GameRuiner/adventofcode
/src/day6.rs
UTF-8
1,792
3.359375
3
[]
no_license
use std::collections::HashSet; #[allow(dead_code)] pub fn custom_customs(groups: &[String]) -> usize { let mut group = "".to_string(); let mut group2 = Vec::<&String>::new(); let mut count = 0; let mut count2 = 0; for line in groups { if *line == "".to_string() { count += answer_count(&group); group = "".to_string(); count2 += answer_count2(group2); group2 = Vec::<&String>::new(); } else { group.push_str(line); group2.push(line); } } count2 += answer_count2(group2); println!("{}",count2); count += answer_count(&group); count } fn answer_count2(group: Vec<&String>) -> usize { let mut answers = HashSet::<char>::new(); for ch in group[0].chars() { answers.insert(ch); } let mut p_answers = HashSet::<char>::new(); for person in group { for ch in person.chars() { p_answers.insert(ch); } answers = answers.intersection(&p_answers).map(|x| *x).collect::<HashSet<_>>(); p_answers.clear(); } answers.len() } fn answer_count(group: &String) -> usize { let mut answers = HashSet::<char>::new(); for ch in group.chars() { answers.insert(ch); } answers.len() } #[test] fn test_custom_customs() { assert_eq!(answer_count(&"abc".to_string()), 3); assert_eq!(answer_count(&"abac".to_string()), 3); assert_eq!(answer_count(&"aaaa".to_string()), 1); assert_eq!(answer_count(&"b".to_string()), 1); } #[test] fn test_answer_count() { assert_eq!(custom_customs(&["abcx".to_string(), "abcy".to_string(), "abcz".to_string(), ]), 6); }
true
d686fd97569c439a15916305f9d1886461d8f22d
Rust
nilsmartel/iced
/native/src/widget/scrollable.rs
UTF-8
28,899
2.75
3
[ "MIT" ]
permissive
//! Navigate an endless amount of content with a scrollbar. use crate::event::{self, Event}; use crate::layout; use crate::mouse; use crate::overlay; use crate::renderer; use crate::touch; use crate::widget; use crate::widget::operation::{self, Operation}; use crate::widget::tree::{self, Tree}; use crate::{ Background, Clipboard, Color, Command, Element, Layout, Length, Point, Rectangle, Shell, Size, Vector, Widget, }; use std::{f32, u32}; pub use iced_style::scrollable::StyleSheet; pub mod style { //! The styles of a [`Scrollable`]. //! //! [`Scrollable`]: crate::widget::Scrollable pub use iced_style::scrollable::{Scrollbar, Scroller}; } /// A widget that can vertically display an infinite amount of content with a /// scrollbar. #[allow(missing_debug_implementations)] pub struct Scrollable<'a, Message, Renderer> where Renderer: crate::Renderer, Renderer::Theme: StyleSheet, { id: Option<Id>, height: Length, scrollbar_width: u16, scrollbar_margin: u16, scroller_width: u16, content: Element<'a, Message, Renderer>, on_scroll: Option<Box<dyn Fn(f32) -> Message + 'a>>, style: <Renderer::Theme as StyleSheet>::Style, } impl<'a, Message, Renderer> Scrollable<'a, Message, Renderer> where Renderer: crate::Renderer, Renderer::Theme: StyleSheet, { /// Creates a new [`Scrollable`]. pub fn new(content: impl Into<Element<'a, Message, Renderer>>) -> Self { Scrollable { id: None, height: Length::Shrink, scrollbar_width: 10, scrollbar_margin: 0, scroller_width: 10, content: content.into(), on_scroll: None, style: Default::default(), } } /// Sets the [`Id`] of the [`Scrollable`]. pub fn id(mut self, id: Id) -> Self { self.id = Some(id); self } /// Sets the height of the [`Scrollable`]. pub fn height(mut self, height: Length) -> Self { self.height = height; self } /// Sets the scrollbar width of the [`Scrollable`] . /// Silently enforces a minimum value of 1. pub fn scrollbar_width(mut self, scrollbar_width: u16) -> Self { self.scrollbar_width = scrollbar_width.max(1); self } /// Sets the scrollbar margin of the [`Scrollable`] . pub fn scrollbar_margin(mut self, scrollbar_margin: u16) -> Self { self.scrollbar_margin = scrollbar_margin; self } /// Sets the scroller width of the [`Scrollable`] . /// /// It silently enforces a minimum value of 1. pub fn scroller_width(mut self, scroller_width: u16) -> Self { self.scroller_width = scroller_width.max(1); self } /// Sets a function to call when the [`Scrollable`] is scrolled. /// /// The function takes the new relative offset of the [`Scrollable`] /// (e.g. `0` means top, while `1` means bottom). pub fn on_scroll(mut self, f: impl Fn(f32) -> Message + 'a) -> Self { self.on_scroll = Some(Box::new(f)); self } /// Sets the style of the [`Scrollable`] . pub fn style( mut self, style: impl Into<<Renderer::Theme as StyleSheet>::Style>, ) -> Self { self.style = style.into(); self } } impl<'a, Message, Renderer> Widget<Message, Renderer> for Scrollable<'a, Message, Renderer> where Renderer: crate::Renderer, Renderer::Theme: StyleSheet, { fn tag(&self) -> tree::Tag { tree::Tag::of::<State>() } fn state(&self) -> tree::State { tree::State::new(State::new()) } fn children(&self) -> Vec<Tree> { vec![Tree::new(&self.content)] } fn diff(&self, tree: &mut Tree) { tree.diff_children(std::slice::from_ref(&self.content)) } fn width(&self) -> Length { self.content.as_widget().width() } fn height(&self) -> Length { self.height } fn layout( &self, renderer: &Renderer, limits: &layout::Limits, ) -> layout::Node { layout( renderer, limits, Widget::<Message, Renderer>::width(self), self.height, u32::MAX, |renderer, limits| { self.content.as_widget().layout(renderer, limits) }, ) } fn operate( &self, tree: &mut Tree, layout: Layout<'_>, operation: &mut dyn Operation<Message>, ) { let state = tree.state.downcast_mut::<State>(); operation.scrollable(state, self.id.as_ref().map(|id| &id.0)); operation.container(None, &mut |operation| { self.content.as_widget().operate( &mut tree.children[0], layout.children().next().unwrap(), operation, ); }); } fn on_event( &mut self, tree: &mut Tree, event: Event, layout: Layout<'_>, cursor_position: Point, renderer: &Renderer, clipboard: &mut dyn Clipboard, shell: &mut Shell<'_, Message>, ) -> event::Status { update( tree.state.downcast_mut::<State>(), event, layout, cursor_position, clipboard, shell, self.scrollbar_width, self.scrollbar_margin, self.scroller_width, &self.on_scroll, |event, layout, cursor_position, clipboard, shell| { self.content.as_widget_mut().on_event( &mut tree.children[0], event, layout, cursor_position, renderer, clipboard, shell, ) }, ) } fn draw( &self, tree: &Tree, renderer: &mut Renderer, theme: &Renderer::Theme, style: &renderer::Style, layout: Layout<'_>, cursor_position: Point, _viewport: &Rectangle, ) { draw( tree.state.downcast_ref::<State>(), renderer, theme, layout, cursor_position, self.scrollbar_width, self.scrollbar_margin, self.scroller_width, self.style, |renderer, layout, cursor_position, viewport| { self.content.as_widget().draw( &tree.children[0], renderer, theme, style, layout, cursor_position, viewport, ) }, ) } fn mouse_interaction( &self, tree: &Tree, layout: Layout<'_>, cursor_position: Point, _viewport: &Rectangle, renderer: &Renderer, ) -> mouse::Interaction { mouse_interaction( tree.state.downcast_ref::<State>(), layout, cursor_position, self.scrollbar_width, self.scrollbar_margin, self.scroller_width, |layout, cursor_position, viewport| { self.content.as_widget().mouse_interaction( &tree.children[0], layout, cursor_position, viewport, renderer, ) }, ) } fn overlay<'b>( &'b self, tree: &'b mut Tree, layout: Layout<'_>, renderer: &Renderer, ) -> Option<overlay::Element<'b, Message, Renderer>> { self.content .as_widget() .overlay( &mut tree.children[0], layout.children().next().unwrap(), renderer, ) .map(|overlay| { let bounds = layout.bounds(); let content_layout = layout.children().next().unwrap(); let content_bounds = content_layout.bounds(); let offset = tree .state .downcast_ref::<State>() .offset(bounds, content_bounds); overlay.translate(Vector::new(0.0, -(offset as f32))) }) } } impl<'a, Message, Renderer> From<Scrollable<'a, Message, Renderer>> for Element<'a, Message, Renderer> where Message: 'a, Renderer: 'a + crate::Renderer, Renderer::Theme: StyleSheet, { fn from( text_input: Scrollable<'a, Message, Renderer>, ) -> Element<'a, Message, Renderer> { Element::new(text_input) } } /// The identifier of a [`Scrollable`]. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Id(widget::Id); impl Id { /// Creates a custom [`Id`]. pub fn new(id: impl Into<std::borrow::Cow<'static, str>>) -> Self { Self(widget::Id::new(id)) } /// Creates a unique [`Id`]. /// /// This function produces a different [`Id`] every time it is called. pub fn unique() -> Self { Self(widget::Id::unique()) } } /// Produces a [`Command`] that snaps the [`Scrollable`] with the given [`Id`] /// to the provided `percentage`. pub fn snap_to<Message: 'static>(id: Id, percentage: f32) -> Command<Message> { Command::widget(operation::scrollable::snap_to(id.0, percentage)) } /// Computes the layout of a [`Scrollable`]. pub fn layout<Renderer>( renderer: &Renderer, limits: &layout::Limits, width: Length, height: Length, max_height: u32, layout_content: impl FnOnce(&Renderer, &layout::Limits) -> layout::Node, ) -> layout::Node { let limits = limits.max_height(max_height).width(width).height(height); let child_limits = layout::Limits::new( Size::new(limits.min().width, 0.0), Size::new(limits.max().width, f32::INFINITY), ); let content = layout_content(renderer, &child_limits); let size = limits.resolve(content.size()); layout::Node::with_children(size, vec![content]) } /// Processes an [`Event`] and updates the [`State`] of a [`Scrollable`] /// accordingly. pub fn update<Message>( state: &mut State, event: Event, layout: Layout<'_>, cursor_position: Point, clipboard: &mut dyn Clipboard, shell: &mut Shell<'_, Message>, scrollbar_width: u16, scrollbar_margin: u16, scroller_width: u16, on_scroll: &Option<Box<dyn Fn(f32) -> Message + '_>>, update_content: impl FnOnce( Event, Layout<'_>, Point, &mut dyn Clipboard, &mut Shell<'_, Message>, ) -> event::Status, ) -> event::Status { let bounds = layout.bounds(); let is_mouse_over = bounds.contains(cursor_position); let content = layout.children().next().unwrap(); let content_bounds = content.bounds(); let scrollbar = scrollbar( state, scrollbar_width, scrollbar_margin, scroller_width, bounds, content_bounds, ); let is_mouse_over_scrollbar = scrollbar .as_ref() .map(|scrollbar| scrollbar.is_mouse_over(cursor_position)) .unwrap_or(false); let event_status = { let cursor_position = if is_mouse_over && !is_mouse_over_scrollbar { Point::new( cursor_position.x, cursor_position.y + state.offset(bounds, content_bounds) as f32, ) } else { // TODO: Make `cursor_position` an `Option<Point>` so we can encode // cursor availability. // This will probably happen naturally once we add multi-window // support. Point::new(cursor_position.x, -1.0) }; update_content( event.clone(), content, cursor_position, clipboard, shell, ) }; if let event::Status::Captured = event_status { return event::Status::Captured; } if is_mouse_over { match event { Event::Mouse(mouse::Event::WheelScrolled { delta }) => { match delta { mouse::ScrollDelta::Lines { y, .. } => { // TODO: Configurable speed (?) state.scroll(y * 60.0, bounds, content_bounds); } mouse::ScrollDelta::Pixels { y, .. } => { state.scroll(y, bounds, content_bounds); } } notify_on_scroll( state, on_scroll, bounds, content_bounds, shell, ); return event::Status::Captured; } Event::Touch(event) => { match event { touch::Event::FingerPressed { .. } => { state.scroll_box_touched_at = Some(cursor_position); } touch::Event::FingerMoved { .. } => { if let Some(scroll_box_touched_at) = state.scroll_box_touched_at { let delta = cursor_position.y - scroll_box_touched_at.y; state.scroll(delta, bounds, content_bounds); state.scroll_box_touched_at = Some(cursor_position); notify_on_scroll( state, on_scroll, bounds, content_bounds, shell, ); } } touch::Event::FingerLifted { .. } | touch::Event::FingerLost { .. } => { state.scroll_box_touched_at = None; } } return event::Status::Captured; } _ => {} } } if state.is_scroller_grabbed() { match event { Event::Mouse(mouse::Event::ButtonReleased(mouse::Button::Left)) | Event::Touch(touch::Event::FingerLifted { .. }) | Event::Touch(touch::Event::FingerLost { .. }) => { state.scroller_grabbed_at = None; return event::Status::Captured; } Event::Mouse(mouse::Event::CursorMoved { .. }) | Event::Touch(touch::Event::FingerMoved { .. }) => { if let (Some(scrollbar), Some(scroller_grabbed_at)) = (scrollbar, state.scroller_grabbed_at) { state.scroll_to( scrollbar.scroll_percentage( scroller_grabbed_at, cursor_position, ), bounds, content_bounds, ); notify_on_scroll( state, on_scroll, bounds, content_bounds, shell, ); return event::Status::Captured; } } _ => {} } } else if is_mouse_over_scrollbar { match event { Event::Mouse(mouse::Event::ButtonPressed(mouse::Button::Left)) | Event::Touch(touch::Event::FingerPressed { .. }) => { if let Some(scrollbar) = scrollbar { if let Some(scroller_grabbed_at) = scrollbar.grab_scroller(cursor_position) { state.scroll_to( scrollbar.scroll_percentage( scroller_grabbed_at, cursor_position, ), bounds, content_bounds, ); state.scroller_grabbed_at = Some(scroller_grabbed_at); notify_on_scroll( state, on_scroll, bounds, content_bounds, shell, ); return event::Status::Captured; } } } _ => {} } } event::Status::Ignored } /// Computes the current [`mouse::Interaction`] of a [`Scrollable`]. pub fn mouse_interaction( state: &State, layout: Layout<'_>, cursor_position: Point, scrollbar_width: u16, scrollbar_margin: u16, scroller_width: u16, content_interaction: impl FnOnce( Layout<'_>, Point, &Rectangle, ) -> mouse::Interaction, ) -> mouse::Interaction { let bounds = layout.bounds(); let content_layout = layout.children().next().unwrap(); let content_bounds = content_layout.bounds(); let scrollbar = scrollbar( state, scrollbar_width, scrollbar_margin, scroller_width, bounds, content_bounds, ); let is_mouse_over = bounds.contains(cursor_position); let is_mouse_over_scrollbar = scrollbar .as_ref() .map(|scrollbar| scrollbar.is_mouse_over(cursor_position)) .unwrap_or(false); if is_mouse_over_scrollbar || state.is_scroller_grabbed() { mouse::Interaction::Idle } else { let offset = state.offset(bounds, content_bounds); let cursor_position = if is_mouse_over && !is_mouse_over_scrollbar { Point::new(cursor_position.x, cursor_position.y + offset as f32) } else { Point::new(cursor_position.x, -1.0) }; content_interaction( content_layout, cursor_position, &Rectangle { y: bounds.y + offset as f32, ..bounds }, ) } } /// Draws a [`Scrollable`]. pub fn draw<Renderer>( state: &State, renderer: &mut Renderer, theme: &Renderer::Theme, layout: Layout<'_>, cursor_position: Point, scrollbar_width: u16, scrollbar_margin: u16, scroller_width: u16, style: <Renderer::Theme as StyleSheet>::Style, draw_content: impl FnOnce(&mut Renderer, Layout<'_>, Point, &Rectangle), ) where Renderer: crate::Renderer, Renderer::Theme: StyleSheet, { let bounds = layout.bounds(); let content_layout = layout.children().next().unwrap(); let content_bounds = content_layout.bounds(); let offset = state.offset(bounds, content_bounds); let scrollbar = scrollbar( state, scrollbar_width, scrollbar_margin, scroller_width, bounds, content_bounds, ); let is_mouse_over = bounds.contains(cursor_position); let is_mouse_over_scrollbar = scrollbar .as_ref() .map(|scrollbar| scrollbar.is_mouse_over(cursor_position)) .unwrap_or(false); let cursor_position = if is_mouse_over && !is_mouse_over_scrollbar { Point::new(cursor_position.x, cursor_position.y + offset as f32) } else { Point::new(cursor_position.x, -1.0) }; if let Some(scrollbar) = scrollbar { renderer.with_layer(bounds, |renderer| { renderer.with_translation( Vector::new(0.0, -(offset as f32)), |renderer| { draw_content( renderer, content_layout, cursor_position, &Rectangle { y: bounds.y + offset as f32, ..bounds }, ); }, ); }); let style = if state.is_scroller_grabbed() { theme.dragging(style) } else if is_mouse_over_scrollbar { theme.hovered(style) } else { theme.active(style) }; let is_scrollbar_visible = style.background.is_some() || style.border_width > 0.0; renderer.with_layer( Rectangle { width: bounds.width + 2.0, height: bounds.height + 2.0, ..bounds }, |renderer| { if is_scrollbar_visible { renderer.fill_quad( renderer::Quad { bounds: scrollbar.bounds, border_radius: style.border_radius, border_width: style.border_width, border_color: style.border_color, }, style .background .unwrap_or(Background::Color(Color::TRANSPARENT)), ); } if is_mouse_over || state.is_scroller_grabbed() || is_scrollbar_visible { renderer.fill_quad( renderer::Quad { bounds: scrollbar.scroller.bounds, border_radius: style.scroller.border_radius, border_width: style.scroller.border_width, border_color: style.scroller.border_color, }, style.scroller.color, ); } }, ); } else { draw_content( renderer, content_layout, cursor_position, &Rectangle { y: bounds.y + offset as f32, ..bounds }, ); } } fn scrollbar( state: &State, scrollbar_width: u16, scrollbar_margin: u16, scroller_width: u16, bounds: Rectangle, content_bounds: Rectangle, ) -> Option<Scrollbar> { let offset = state.offset(bounds, content_bounds); if content_bounds.height > bounds.height { let outer_width = scrollbar_width.max(scroller_width) + 2 * scrollbar_margin; let outer_bounds = Rectangle { x: bounds.x + bounds.width - outer_width as f32, y: bounds.y, width: outer_width as f32, height: bounds.height, }; let scrollbar_bounds = Rectangle { x: bounds.x + bounds.width - f32::from(outer_width / 2 + scrollbar_width / 2), y: bounds.y, width: scrollbar_width as f32, height: bounds.height, }; let ratio = bounds.height / content_bounds.height; let scroller_height = bounds.height * ratio; let y_offset = offset as f32 * ratio; let scroller_bounds = Rectangle { x: bounds.x + bounds.width - f32::from(outer_width / 2 + scroller_width / 2), y: scrollbar_bounds.y + y_offset, width: scroller_width as f32, height: scroller_height, }; Some(Scrollbar { outer_bounds, bounds: scrollbar_bounds, scroller: Scroller { bounds: scroller_bounds, }, }) } else { None } } fn notify_on_scroll<Message>( state: &State, on_scroll: &Option<Box<dyn Fn(f32) -> Message + '_>>, bounds: Rectangle, content_bounds: Rectangle, shell: &mut Shell<'_, Message>, ) { if content_bounds.height <= bounds.height { return; } if let Some(on_scroll) = on_scroll { shell.publish(on_scroll( state.offset.absolute(bounds, content_bounds) / (content_bounds.height - bounds.height), )); } } /// The local state of a [`Scrollable`]. #[derive(Debug, Clone, Copy)] pub struct State { scroller_grabbed_at: Option<f32>, scroll_box_touched_at: Option<Point>, offset: Offset, } impl Default for State { fn default() -> Self { Self { scroller_grabbed_at: None, scroll_box_touched_at: None, offset: Offset::Absolute(0.0), } } } impl operation::Scrollable for State { fn snap_to(&mut self, percentage: f32) { State::snap_to(self, percentage); } } /// The local state of a [`Scrollable`]. #[derive(Debug, Clone, Copy)] enum Offset { Absolute(f32), Relative(f32), } impl Offset { fn absolute(self, bounds: Rectangle, content_bounds: Rectangle) -> f32 { match self { Self::Absolute(absolute) => { let hidden_content = (content_bounds.height - bounds.height).max(0.0); absolute.min(hidden_content) } Self::Relative(percentage) => { ((content_bounds.height - bounds.height) * percentage).max(0.0) } } } } impl State { /// Creates a new [`State`] with the scrollbar located at the top. pub fn new() -> Self { State::default() } /// Apply a scrolling offset to the current [`State`], given the bounds of /// the [`Scrollable`] and its contents. pub fn scroll( &mut self, delta_y: f32, bounds: Rectangle, content_bounds: Rectangle, ) { if bounds.height >= content_bounds.height { return; } self.offset = Offset::Absolute( (self.offset.absolute(bounds, content_bounds) - delta_y) .max(0.0) .min((content_bounds.height - bounds.height) as f32), ); } /// Scrolls the [`Scrollable`] to a relative amount. /// /// `0` represents scrollbar at the top, while `1` represents scrollbar at /// the bottom. pub fn scroll_to( &mut self, percentage: f32, bounds: Rectangle, content_bounds: Rectangle, ) { self.snap_to(percentage); self.unsnap(bounds, content_bounds); } /// Snaps the scroll position to a relative amount. /// /// `0` represents scrollbar at the top, while `1` represents scrollbar at /// the bottom. pub fn snap_to(&mut self, percentage: f32) { self.offset = Offset::Relative(percentage.max(0.0).min(1.0)); } /// Unsnaps the current scroll position, if snapped, given the bounds of the /// [`Scrollable`] and its contents. pub fn unsnap(&mut self, bounds: Rectangle, content_bounds: Rectangle) { self.offset = Offset::Absolute(self.offset.absolute(bounds, content_bounds)); } /// Returns the current scrolling offset of the [`State`], given the bounds /// of the [`Scrollable`] and its contents. pub fn offset(&self, bounds: Rectangle, content_bounds: Rectangle) -> u32 { self.offset.absolute(bounds, content_bounds) as u32 } /// Returns whether the scroller is currently grabbed or not. pub fn is_scroller_grabbed(&self) -> bool { self.scroller_grabbed_at.is_some() } /// Returns whether the scroll box is currently touched or not. pub fn is_scroll_box_touched(&self) -> bool { self.scroll_box_touched_at.is_some() } } /// The scrollbar of a [`Scrollable`]. #[derive(Debug)] struct Scrollbar { /// The outer bounds of the scrollable, including the [`Scrollbar`] and /// [`Scroller`]. outer_bounds: Rectangle, /// The bounds of the [`Scrollbar`]. bounds: Rectangle, /// The bounds of the [`Scroller`]. scroller: Scroller, } impl Scrollbar { fn is_mouse_over(&self, cursor_position: Point) -> bool { self.outer_bounds.contains(cursor_position) } fn grab_scroller(&self, cursor_position: Point) -> Option<f32> { if self.outer_bounds.contains(cursor_position) { Some(if self.scroller.bounds.contains(cursor_position) { (cursor_position.y - self.scroller.bounds.y) / self.scroller.bounds.height } else { 0.5 }) } else { None } } fn scroll_percentage( &self, grabbed_at: f32, cursor_position: Point, ) -> f32 { (cursor_position.y - self.bounds.y - self.scroller.bounds.height * grabbed_at) / (self.bounds.height - self.scroller.bounds.height) } } /// The handle of a [`Scrollbar`]. #[derive(Debug, Clone, Copy)] struct Scroller { /// The bounds of the [`Scroller`]. bounds: Rectangle, }
true
d3552ef5420e3214b9f23cde171811eed95970a3
Rust
Pear0/rustos
/kern/src/traps/irq.rs
UTF-8
4,012
2.515625
3
[]
no_license
use alloc::boxed::Box; use pi::interrupt::{Interrupt, CoreInterrupt}; use crate::mutex::Mutex; use crate::traps::KernelTrapFrame; use crate::smp; use core::time::Duration; use crate::process::ProcessImpl; pub type IrqHandler<T> = Box<dyn FnMut(&mut T) + Send>; #[derive(Copy, Clone, Default, Debug)] pub struct IrqStats { pub count: u32, } struct IrqEntry<T> { handler: Option<IrqHandler<T>>, stats: IrqStats, } impl<T> IrqEntry<T> { fn new() -> Self { IrqEntry { handler: None, stats: Default::default(), } } fn record_stats(&mut self, _tf: &T) { self.stats.count = self.stats.count.wrapping_add(1); } } type IrqHandlers<T> = [IrqEntry<T>; Interrupt::MAX]; type CoreIrqHandlers<T> = [IrqEntry<T>; CoreInterrupt::MAX]; fn new_core_irqs<T>() -> CoreIrqHandlers<T> { [IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new()] } struct CoreIrq<T> { handlers: [Mutex<Option<CoreIrqHandlers<T>>>; smp::MAX_CORES], } pub struct Irq<T: ProcessImpl>(Mutex<Option<IrqHandlers<T::Frame>>>, CoreIrq<T::Frame>); impl<T: ProcessImpl> Irq<T> { pub const fn uninitialized() -> Irq<T> { Irq(mutex_new!(None), CoreIrq { handlers: [ mutex_new!(None), mutex_new!(None), mutex_new!(None), mutex_new!(None) ] }) } pub fn initialize(&self) { *m_lock!(self.0) = Some([ IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), IrqEntry::new(), ]); for core in self.1.handlers.iter() { *m_lock!(core) = Some(new_core_irqs()); } } /// Register an irq handler for an interrupt. /// The caller should assure that `initialize()` has been called before calling this function. pub fn register(&self, int: Interrupt, handler: IrqHandler<T::Frame>) { m_lock!(self.0).as_mut().unwrap()[Interrupt::to_index(int)].handler = Some(handler); } pub fn register_core(&self, core: usize, int: CoreInterrupt, handler: IrqHandler<T::Frame>) { m_lock!(self.1.handlers[core]).as_mut().unwrap()[int as usize].handler = Some(handler); } /// Executes an irq handler for the givven interrupt. /// The caller should assure that `initialize()` has been called before calling this function. pub fn invoke(&self, int: Interrupt, tf: &mut T::Frame) -> bool { let lock = &mut m_lock!(self.0); let entry = &mut lock.as_mut().unwrap()[Interrupt::to_index(int)]; entry.record_stats(tf); if let Some(handler) = &mut entry.handler { handler(tf); true } else { false } } pub fn invoke_core(&self, core: usize, int: CoreInterrupt, tf: &mut T::Frame) -> bool { let lock = &mut m_lock!(self.1.handlers[core]); let entry = &mut lock.as_mut().unwrap()[int as usize]; entry.record_stats(tf); if let Some(handler) = &mut entry.handler { handler(tf); true } else { false } } pub fn get_stats(&self) -> Option<[IrqStats; Interrupt::MAX]> { let mut stats = [IrqStats::default(); Interrupt::MAX]; for (i, entry) in m_lock_timeout!(self.0, Duration::from_millis(1))?.as_ref().unwrap().into_iter().enumerate() { stats[i] = entry.stats; } Some(stats) } pub fn get_stats_core(&self, core: usize) -> Option<[IrqStats; CoreInterrupt::MAX]> { let mut stats = [IrqStats::default(); CoreInterrupt::MAX]; for (i, entry) in m_lock_timeout!(self.1.handlers[core], Duration::from_millis(1))?.as_ref().unwrap().into_iter().enumerate() { stats[i] = entry.stats; } Some(stats) } }
true
0ec843e853a7b04f85711bd215d6bbc84887e17f
Rust
1Password/typeshare
/core/src/parser.rs
UTF-8
26,003
2.859375
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "LicenseRef-scancode-free-unknown", "MIT" ]
permissive
use crate::rust_types::FieldDecorator; use crate::{ language::SupportedLanguage, rename::RenameExt, rust_types::{ Id, RustEnum, RustEnumShared, RustEnumVariant, RustEnumVariantShared, RustField, RustItem, RustStruct, RustType, RustTypeAlias, RustTypeParseError, }, }; use proc_macro2::{Ident, Span}; use std::collections::BTreeSet; use std::{ collections::{HashMap, HashSet}, convert::TryFrom, }; use syn::{Attribute, Fields, ItemEnum, ItemStruct, ItemType}; use syn::{GenericParam, Meta, NestedMeta}; use thiserror::Error; // TODO: parsing is very opinionated and makes some decisions that should be // getting made at code generation time. Fix this. const SERDE: &str = "serde"; const TYPESHARE: &str = "typeshare"; /// The results of parsing Rust source input. #[derive(Default, Debug)] pub struct ParsedData { /// Structs defined in the source pub structs: Vec<RustStruct>, /// Enums defined in the source pub enums: Vec<RustEnum>, /// Type aliases defined in the source pub aliases: Vec<RustTypeAlias>, } impl ParsedData { /// Add the parsed data from `other` to `self`. pub fn add(&mut self, mut other: Self) { self.structs.append(&mut other.structs); self.enums.append(&mut other.enums); self.aliases.append(&mut other.aliases); } fn push_rust_thing(&mut self, rust_thing: RustItem) { match rust_thing { RustItem::Struct(s) => self.structs.push(s), RustItem::Enum(e) => self.enums.push(e), RustItem::Alias(a) => self.aliases.push(a), } } } /// Errors that can occur while parsing Rust source input. #[derive(Debug, Error)] #[allow(missing_docs)] pub enum ParseError { #[error("{0}")] SynError(#[from] syn::Error), #[error("failed to parse a rust type: {0}")] RustTypeParseError(#[from] RustTypeParseError), #[error("unsupported language encountered: {0}")] UnsupportedLanguage(String), #[error("unsupported type encountered: {0}")] UnsupportedType(String), #[error("tuple structs with more than one field are currently unsupported")] ComplexTupleStruct, #[error("multiple unnamed associated types are not currently supported")] MultipleUnnamedAssociatedTypes, #[error("the serde tag attribute is not supported for non-algebraic enums: {enum_ident}")] SerdeTagNotAllowed { enum_ident: String }, #[error("the serde content attribute is not supported for non-algebraic enums: {enum_ident}")] SerdeContentNotAllowed { enum_ident: String }, #[error("serde tag attribute needs to be specified for algebraic enum {enum_ident}. e.g. #[serde(tag = \"type\", content = \"content\")]")] SerdeTagRequired { enum_ident: String }, #[error("serde content attribute needs to be specified for algebraic enum {enum_ident}. e.g. #[serde(tag = \"type\", content = \"content\")]")] SerdeContentRequired { enum_ident: String }, #[error("the serde flatten attribute is not currently supported")] SerdeFlattenNotAllowed, } /// Parse the given Rust source string into `ParsedData`. pub fn parse(input: &str) -> Result<ParsedData, ParseError> { let mut parsed_data = ParsedData::default(); // We will only produce output for files that contain the `#[typeshare]` // attribute, so this is a quick and easy performance win if !input.contains("typeshare") { return Ok(parsed_data); } // Parse and process the input, ensuring we parse only items marked with // `#[typeshare] let source = syn::parse_file(input)?; for item in flatten_items(source.items.iter()) { match item { syn::Item::Struct(s) if has_typeshare_annotation(&s.attrs) => { parsed_data.push_rust_thing(parse_struct(s)?); } syn::Item::Enum(e) if has_typeshare_annotation(&e.attrs) => { parsed_data.push_rust_thing(parse_enum(e)?); } syn::Item::Type(t) if has_typeshare_annotation(&t.attrs) => { parsed_data.aliases.push(parse_type_alias(t)?); } _ => {} } } Ok(parsed_data) } /// Given an iterator over items, will return an iterator that flattens the contents of embedded /// module items into the iterator. fn flatten_items<'a>( items: impl Iterator<Item = &'a syn::Item>, ) -> impl Iterator<Item = &'a syn::Item> { items.flat_map(|item| { match item { syn::Item::Mod(syn::ItemMod { content: Some((_, items)), .. }) => flatten_items(items.iter()).collect(), item => vec![item], } .into_iter() }) } /// Parses a struct into a definition that more succinctly represents what /// typeshare needs to generate code for other languages. /// /// This function can currently return something other than a struct, which is a /// hack. fn parse_struct(s: &ItemStruct) -> Result<RustItem, ParseError> { let serde_rename_all = serde_rename_all(&s.attrs); let generic_types = s .generics .params .iter() .filter_map(|param| match param { GenericParam::Type(type_param) => Some(type_param.ident.to_string()), _ => None, }) .collect(); // Check if this struct should be parsed as a type alias. // TODO: we shouldn't lie and return a type alias when parsing a struct. this // is a temporary hack if let Some(ty) = get_serialized_as_type(&s.attrs) { return Ok(RustItem::Alias(RustTypeAlias { id: get_ident(Some(&s.ident), &s.attrs, &None), r#type: ty.parse()?, comments: parse_comment_attrs(&s.attrs), generic_types, })); } Ok(match &s.fields { // Structs Fields::Named(f) => { let fields = f .named .iter() .filter(|field| !is_skipped(&field.attrs)) .map(|f| { let ty = if let Some(ty) = get_field_type_override(&f.attrs) { ty.parse()? } else { RustType::try_from(&f.ty)? }; if serde_flatten(&f.attrs) { return Err(ParseError::SerdeFlattenNotAllowed); } let has_default = serde_default(&f.attrs); let decorators = get_field_decorators(&f.attrs); Ok(RustField { id: get_ident(f.ident.as_ref(), &f.attrs, &serde_rename_all), ty, comments: parse_comment_attrs(&f.attrs), has_default, decorators, }) }) .collect::<Result<_, ParseError>>()?; RustItem::Struct(RustStruct { id: get_ident(Some(&s.ident), &s.attrs, &None), generic_types, fields, comments: parse_comment_attrs(&s.attrs), decorators: get_decorators(&s.attrs), }) } // Tuple structs Fields::Unnamed(f) => { if f.unnamed.len() > 1 { return Err(ParseError::ComplexTupleStruct); } let f = &f.unnamed[0]; let ty = if let Some(ty) = get_field_type_override(&f.attrs) { ty.parse()? } else { RustType::try_from(&f.ty)? }; RustItem::Alias(RustTypeAlias { id: get_ident(Some(&s.ident), &s.attrs, &None), r#type: ty, comments: parse_comment_attrs(&s.attrs), generic_types, }) } // Unit structs or `None` Fields::Unit => RustItem::Struct(RustStruct { id: get_ident(Some(&s.ident), &s.attrs, &None), generic_types, fields: vec![], comments: parse_comment_attrs(&s.attrs), decorators: get_decorators(&s.attrs), }), }) } /// Parses an enum into a definition that more succinctly represents what /// typeshare needs to generate code for other languages. /// /// This function can currently return something other than an enum, which is a /// hack. fn parse_enum(e: &ItemEnum) -> Result<RustItem, ParseError> { let generic_types = e .generics .params .iter() .filter_map(|param| match param { GenericParam::Type(type_param) => Some(type_param.ident.to_string()), _ => None, }) .collect(); let serde_rename_all = serde_rename_all(&e.attrs); // TODO: we shouldn't lie and return a type alias when parsing an enum. this // is a temporary hack if let Some(ty) = get_serialized_as_type(&e.attrs) { return Ok(RustItem::Alias(RustTypeAlias { id: get_ident(Some(&e.ident), &e.attrs, &None), r#type: ty.parse()?, comments: parse_comment_attrs(&e.attrs), generic_types, })); } let original_enum_ident = e.ident.to_string(); // Grab the `#[serde(tag = "...", content = "...")]` values if they exist let maybe_tag_key = get_tag_key(&e.attrs); let maybe_content_key = get_content_key(&e.attrs); // Parse all of the enum's variants let variants = e .variants .iter() // Filter out variants we've been told to skip .filter(|v| !is_skipped(&v.attrs)) .map(|v| parse_enum_variant(v, &serde_rename_all)) .collect::<Result<Vec<_>, _>>()?; // Check if the enum references itself recursively in any of its variants let is_recursive = variants.iter().any(|v| match v { RustEnumVariant::Unit(_) => false, RustEnumVariant::Tuple { ty, .. } => ty.contains_type(&original_enum_ident), RustEnumVariant::AnonymousStruct { fields, .. } => fields .iter() .any(|f| f.ty.contains_type(&original_enum_ident)), }); let shared = RustEnumShared { id: get_ident(Some(&e.ident), &e.attrs, &None), comments: parse_comment_attrs(&e.attrs), variants, decorators: get_decorators(&e.attrs), generic_types, is_recursive, }; // Figure out if we're dealing with a unit enum or an algebraic enum if shared .variants .iter() .all(|v| matches!(v, RustEnumVariant::Unit(_))) { // All enum variants are unit-type if maybe_tag_key.is_some() { return Err(ParseError::SerdeTagNotAllowed { enum_ident: original_enum_ident, }); } if maybe_content_key.is_some() { return Err(ParseError::SerdeContentNotAllowed { enum_ident: original_enum_ident, }); } Ok(RustItem::Enum(RustEnum::Unit(shared))) } else { // At least one enum variant is either a tuple or an anonymous struct let tag_key = maybe_tag_key.ok_or_else(|| ParseError::SerdeTagRequired { enum_ident: original_enum_ident.clone(), })?; let content_key = maybe_content_key.ok_or_else(|| ParseError::SerdeContentRequired { enum_ident: original_enum_ident.clone(), })?; Ok(RustItem::Enum(RustEnum::Algebraic { tag_key, content_key, shared, })) } } /// Parse an enum variant. fn parse_enum_variant( v: &syn::Variant, enum_serde_rename_all: &Option<String>, ) -> Result<RustEnumVariant, ParseError> { let shared = RustEnumVariantShared { id: get_ident(Some(&v.ident), &v.attrs, enum_serde_rename_all), comments: parse_comment_attrs(&v.attrs), }; // Get the value of `#[serde(rename_all)]` for this specific variant rather // than the overall enum // // The value of the attribute for the enum overall does not apply to enum // variant fields. let variant_serde_rename_all = serde_rename_all(&v.attrs); match &v.fields { syn::Fields::Unit => Ok(RustEnumVariant::Unit(shared)), syn::Fields::Unnamed(associated_type) => { if associated_type.unnamed.len() > 1 { return Err(ParseError::MultipleUnnamedAssociatedTypes); } let first_field = associated_type.unnamed.first().unwrap(); let ty = if let Some(ty) = get_field_type_override(&first_field.attrs) { ty.parse()? } else { RustType::try_from(&first_field.ty)? }; Ok(RustEnumVariant::Tuple { ty, shared }) } syn::Fields::Named(fields_named) => Ok(RustEnumVariant::AnonymousStruct { fields: fields_named .named .iter() .map(|f| { let field_type = if let Some(ty) = get_field_type_override(&f.attrs) { ty.parse()? } else { RustType::try_from(&f.ty)? }; let has_default = serde_default(&f.attrs); let decorators = get_field_decorators(&f.attrs); Ok(RustField { id: get_ident(f.ident.as_ref(), &f.attrs, &variant_serde_rename_all), ty: field_type, comments: parse_comment_attrs(&f.attrs), has_default, decorators, }) }) .collect::<Result<Vec<_>, ParseError>>()?, shared, }), } } /// Parses a type alias into a definition that more succinctly represents what /// typeshare needs to generate code for other languages. fn parse_type_alias(t: &ItemType) -> Result<RustTypeAlias, ParseError> { let ty = if let Some(ty) = get_serialized_as_type(&t.attrs) { ty.parse()? } else { RustType::try_from(t.ty.as_ref())? }; let generic_types = t .generics .params .iter() .filter_map(|param| match param { GenericParam::Type(type_param) => Some(type_param.ident.to_string()), _ => None, }) .collect(); Ok(RustTypeAlias { id: get_ident(Some(&t.ident), &t.attrs, &None), r#type: ty, comments: parse_comment_attrs(&t.attrs), generic_types, }) } // Helpers /// Parses any comment out of the given slice of attributes fn parse_comment_attrs(attrs: &[Attribute]) -> Vec<String> { const DOC_ATTR: &str = "doc"; attrs .iter() .map(Attribute::parse_meta) .filter_map(Result::ok) .filter_map(|attr| match attr { Meta::NameValue(name_value) => { if let Some(ident) = name_value.path.get_ident() { if *ident == DOC_ATTR { Some(name_value.lit) } else { None } } else { None } } _ => None, }) .filter_map(literal_as_string) .map(|string| string.trim().into()) .collect() } /// Checks the given attrs for `#[typeshare]` fn has_typeshare_annotation(attrs: &[syn::Attribute]) -> bool { let typeshare_ident = Ident::new("typeshare", Span::call_site()); for a in attrs { if let Some(segment) = a.path.segments.iter().next() { if segment.ident == typeshare_ident { return true; } } } false } fn get_ident( ident: Option<&proc_macro2::Ident>, attrs: &[syn::Attribute], rename_all: &Option<String>, ) -> Id { let original = ident.map_or("???".to_string(), |id| id.to_string().replace("r#", "")); let mut renamed = rename_all_to_case(original.clone(), rename_all); if let Some(s) = serde_rename(attrs) { renamed = s; } Id { original, renamed } } fn rename_all_to_case(original: String, case: &Option<String>) -> String { match case { None => original, Some(value) => match value.as_str() { "lowercase" => original.to_lowercase(), "UPPERCASE" => original.to_uppercase(), "PascalCase" => original.to_pascal_case(), "camelCase" => original.to_camel_case(), "snake_case" => original.to_snake_case(), "SCREAMING_SNAKE_CASE" => original.to_screaming_snake_case(), "kebab-case" => original.to_kebab_case(), "SCREAMING-KEBAB-CASE" => original.to_screaming_kebab_case(), _ => original, }, } } fn literal_as_string(lit: syn::Lit) -> Option<String> { match lit { syn::Lit::Str(str) => Some(str.value()), _ => None, } } fn get_typeshare_name_value_meta_items<'a>( attrs: &'a [syn::Attribute], name: &'a str, ) -> impl Iterator<Item = syn::Lit> + 'a { attrs.iter().flat_map(move |attr| { get_typeshare_meta_items(attr) .iter() .filter_map(|arg| match arg { NestedMeta::Meta(Meta::NameValue(name_value)) => { if let Some(ident) = name_value.path.get_ident() { if *ident == name { Some(name_value.lit.clone()) } else { None } } else { None } } _ => None, }) .collect::<Vec<_>>() }) } fn get_serde_name_value_meta_items<'a>( attrs: &'a [syn::Attribute], name: &'a str, ) -> impl Iterator<Item = syn::Lit> + 'a { attrs.iter().flat_map(move |attr| { get_serde_meta_items(attr) .iter() .filter_map(|arg| match arg { NestedMeta::Meta(Meta::NameValue(name_value)) => { if let Some(ident) = name_value.path.get_ident() { if *ident == name { Some(name_value.lit.clone()) } else { None } } else { None } } _ => None, }) .collect::<Vec<_>>() }) } fn get_serialized_as_type(attrs: &[syn::Attribute]) -> Option<String> { get_typeshare_name_value_meta_items(attrs, "serialized_as") .next() .and_then(literal_as_string) } fn get_field_type_override(attrs: &[syn::Attribute]) -> Option<String> { get_typeshare_name_value_meta_items(attrs, "serialized_as") .next() .and_then(literal_as_string) } /// Checks the struct or enum for decorators like `#[typeshare(typescript(readonly)]` /// Takes a slice of `syn::Attribute`, returns a `HashMap<language, BTreeSet<decorator>>`, where `language` is `SupportedLanguage` /// and `decorator` is `FieldDecorator`. Field decorators are ordered in a `BTreeSet` for consistent code generation. fn get_field_decorators( attrs: &[Attribute], ) -> HashMap<SupportedLanguage, BTreeSet<FieldDecorator>> { let languages: HashSet<SupportedLanguage> = SupportedLanguage::all_languages().collect(); attrs .iter() .flat_map(get_typeshare_meta_items) .flat_map(|meta| { if let NestedMeta::Meta(Meta::List(list)) = meta { Some(list) } else { None } }) .flat_map(|list| match list.path.get_ident() { Some(ident) if languages.contains(&ident.try_into().unwrap()) => { Some((ident.try_into().unwrap(), list.nested)) } _ => None, }) .map(|(language, list)| { ( language, list.into_iter().filter_map(|nested| match nested { NestedMeta::Meta(Meta::Path(path)) if path.segments.len() == 1 => { Some(FieldDecorator::Word(path.get_ident()?.to_string())) } NestedMeta::Meta(Meta::NameValue(name_value)) => { Some(FieldDecorator::NameValue( name_value.path.get_ident()?.to_string(), literal_as_string(name_value.lit)?, )) } // TODO: this should throw a visible error since it suggests a malformed // attribute. _ => None, }), ) }) .fold(HashMap::new(), |mut acc, (language, decorators)| { acc.entry(language).or_default().extend(decorators); acc }) } /// Checks the struct or enum for decorators like `#[typeshare(swift = "Codable, Equatable")]` /// Takes a slice of `syn::Attribute`, returns a `HashMap<language, Vec<decoration_words>>`, where `language` is `SupportedLanguage` and `decoration_words` is `String` fn get_decorators(attrs: &[syn::Attribute]) -> HashMap<SupportedLanguage, Vec<String>> { // The resulting HashMap, Key is the language, and the value is a vector of decorators words that will be put onto structures let mut out: HashMap<SupportedLanguage, Vec<String>> = HashMap::new(); for value in get_typeshare_name_value_meta_items(attrs, "swift").filter_map(literal_as_string) { let decorators: Vec<String> = value.split(',').map(|s| s.trim().to_string()).collect(); // lastly, get the entry in the hashmap output and extend the value, or insert what we have already found let decs = out.entry(SupportedLanguage::Swift).or_insert_with(Vec::new); decs.extend(decorators); // Sorting so all the added decorators will be after the normal ([`String`], `Codable`) in alphabetical order decs.sort_unstable(); decs.dedup(); //removing any duplicates just in case } //return our hashmap mapping of language -> Vec<decorators> out } fn get_tag_key(attrs: &[syn::Attribute]) -> Option<String> { get_serde_name_value_meta_items(attrs, "tag") .next() .and_then(literal_as_string) } fn get_content_key(attrs: &[syn::Attribute]) -> Option<String> { get_serde_name_value_meta_items(attrs, "content") .next() .and_then(literal_as_string) } fn serde_rename(attrs: &[syn::Attribute]) -> Option<String> { get_serde_name_value_meta_items(attrs, "rename") .next() .and_then(literal_as_string) } fn serde_rename_all(attrs: &[syn::Attribute]) -> Option<String> { get_serde_name_value_meta_items(attrs, "rename_all") .next() .and_then(literal_as_string) } fn serde_attr(attrs: &[syn::Attribute], ident: &Ident) -> bool { attrs.iter().any(|attr| { get_serde_meta_items(attr).iter().any(|arg| match arg { NestedMeta::Meta(Meta::Path(path)) => { if let Some(this_ident) = path.get_ident() { *this_ident == *ident } else { false } } _ => false, }) }) } fn serde_default(attrs: &[syn::Attribute]) -> bool { serde_attr(attrs, &Ident::new("default", Span::call_site())) } fn serde_flatten(attrs: &[syn::Attribute]) -> bool { serde_attr(attrs, &Ident::new("flatten", Span::call_site())) } // TODO: for now, this is a workaround until we can integrate serde_derive_internal // into our parser. /// Returns all arguments passed into `#[serde(...)]` attributes pub fn get_serde_meta_items(attr: &syn::Attribute) -> Vec<NestedMeta> { if attr.path.get_ident().is_none() || *attr.path.get_ident().unwrap() != SERDE { return Vec::default(); } match attr.parse_meta() { Ok(Meta::List(meta)) => meta.nested.into_iter().collect(), _ => Vec::new(), } } /// Returns all arguments passed into `#[typeshare(...)]` attributes pub fn get_typeshare_meta_items(attr: &syn::Attribute) -> Vec<NestedMeta> { if attr.path.get_ident().is_none() || *attr.path.get_ident().unwrap() != TYPESHARE { return Vec::default(); } match attr.parse_meta() { Ok(Meta::List(meta)) => meta.nested.into_iter().collect(), _ => Vec::new(), } } // `#[typeshare(skip)]` or `#[serde(skip)]` fn is_skipped(attrs: &[syn::Attribute]) -> bool { let skip = Ident::new("skip", Span::call_site()); attrs.iter().any(|attr| { get_serde_meta_items(attr) .into_iter() .chain(get_typeshare_meta_items(attr).into_iter()) .any(|arg| match arg { NestedMeta::Meta(Meta::Path(path)) => { if let Some(ident) = path.get_ident() { *ident == skip } else { false } } _ => false, }) }) } #[test] fn test_rename_all_to_case() { let test_word = "test_case"; let tests = [ ("lowercase", "test_case"), ("UPPERCASE", "TEST_CASE"), ("PascalCase", "TestCase"), ("camelCase", "testCase"), ("snake_case", "test_case"), ("SCREAMING_SNAKE_CASE", "TEST_CASE"), ("kebab-case", "test-case"), ("SCREAMING-KEBAB-CASE", "TEST-CASE"), ("invalid case", "test_case"), ]; for test in tests { assert_eq!( rename_all_to_case(test_word.to_string(), &Some(test.0.to_string())), test.1 ); } } /// Removes `-` characters from identifiers pub(crate) fn remove_dash_from_identifier(name: &str) -> String { // Dashes are not valid in identifiers, so we map them to underscores name.replace('-', "_") }
true
8a03591cb3140bc37fc23554506d393cd3843411
Rust
ytakhs/leetcode-rs
/examples/archives/best_time_to_buy_and_sell_stock.rs
UTF-8
672
3.59375
4
[]
no_license
struct Solution {} impl Solution { pub fn max_profit(prices: Vec<i32>) -> i32 { let mut result = 0; let mut buy = match prices.first() { None => return 0, Some(x) => *x, }; for sell in prices.into_iter().skip(1) { let profit = sell - buy; if profit < 0 { buy = sell; continue; } if profit > result { result = profit; } } result } } fn main() { assert_eq!(Solution::max_profit(vec![7, 1, 5, 3, 6, 4]), 5); assert_eq!(Solution::max_profit(vec![7, 5, 6, 1, 3, 6, 4]), 5); }
true
8d61b8b05ecef8ac4406ad9e7581ffdcb256d9c0
Rust
slayfer-dev/Challenges-sol
/Programming/Codeabbey/019/Idleless.rs
UTF-8
939
3.03125
3
[]
no_license
//Author Idleless use std::io; const open:&str = "([{<"; const close: &str = ")]}>"; fn main(){ let mut cases = String::new(); io::stdin().read_line(&mut cases).unwrap(); let cases = cases.trim().parse::<i32>().unwrap(); for _ in 0..cases{ let mut input_line = String::new(); io::stdin().read_line(&mut input_line).unwrap(); let mut stack = Vec::new(); let mut matches = 1; for c in input_line.chars(){ if open.contains(&c.to_string()){ stack.push(c); }else if close.contains(&c.to_string()){ let b = match stack.pop(){ Some(v) => v, None => {matches = 0; break;}, }; if b == '(' && c == ')' || b == '[' && c == ']' || b == '{' && c == '}' || b == '<' && c == '>'{ }else{ matches = 0; break; } } } if stack.len() > 0 { matches = 0; } print!("{} ", matches); } println!(""); }
true
8b9324a477183cf660b30169a730f9b3b343763b
Rust
spearman/enet-rs
/src/host.rs
UTF-8
8,997
2.71875
3
[ "Apache-2.0" ]
permissive
use {std, ll}; use {peer, Address, EnetDrop, Event, Packet, Peer, MAX_PEERS, MAX_CHANNEL_COUNT}; //////////////////////////////////////////////////////////////////////////////// // structs // //////////////////////////////////////////////////////////////////////////////// /// An ENet host for communicating with peers. /// /// A 'Host' cannot be sent accross threads but will keep Enet #[derive(Clone, Debug)] pub struct Host { hostdrop : std::rc::Rc <HostDrop> } #[derive(Debug, PartialEq)] pub (crate) struct HostDrop { raw : *mut ll::ENetHost, enetdrop : std::sync::Arc <EnetDrop> } //////////////////////////////////////////////////////////////////////////////// // enums // //////////////////////////////////////////////////////////////////////////////// #[derive(Debug)] pub enum Error { /// Error from `service()` ServiceError, /// Error from `check_events()` DispatchError } #[derive(Clone, Debug)] pub enum CreateError { /// Maximum peer count is enet::MAX_PEERS (4096) TooManyPeers (u32), /// Maximum channel count is enet::MAX_CHANNEL_COUNT (255) TooManyChannels (u32), ReturnedNull } //////////////////////////////////////////////////////////////////////////////// // impls // //////////////////////////////////////////////////////////////////////////////// impl Host { pub (crate) fn new ( address : Option <Address>, peer_count : u32, channel_limit : Option <u32>, incoming_bandwidth : Option <u32>, outgoing_bandwidth : Option <u32>, enetdrop : std::sync::Arc <EnetDrop> ) -> Result <Self, CreateError> { if MAX_PEERS < peer_count { return Err (CreateError::TooManyPeers (peer_count)) } let channel_limit = channel_limit.unwrap_or (0); if MAX_CHANNEL_COUNT < channel_limit { return Err (CreateError::TooManyChannels (channel_limit)) } let host; match address { Some (a) => unsafe { host = ll::enet_host_create ( a.raw(), peer_count as usize, channel_limit as usize, incoming_bandwidth.unwrap_or (0), outgoing_bandwidth.unwrap_or (0) ); if host.is_null() { return Err (CreateError::ReturnedNull) } }, None => unsafe { host = ll::enet_host_create ( std::ptr::null(), peer_count as usize, channel_limit as usize, incoming_bandwidth.unwrap_or (0), outgoing_bandwidth.unwrap_or (0) ); if host.is_null() { return Err (CreateError::ReturnedNull) } } } // end match address Ok (Host { hostdrop: std::rc::Rc::new (HostDrop { raw: host, enetdrop }) }) } // end new #[inline] pub unsafe fn raw (&self) -> *mut ll::ENetHost { self.hostdrop.raw() } /// Number of peers allocated for this host #[inline] pub fn peer_count (&self) -> usize { unsafe { (*self.raw()).peerCount } } /// Number of connected peers #[inline] pub fn connected_peers (&self) -> usize { unsafe { (*self.raw()).connectedPeers } } /// Maximum number of channels for incoming connections #[inline] pub fn channel_limit (&self) -> usize { unsafe { (*self.raw()).channelLimit } } /// Total UDP packets sent. /// /// User must reset to prevent overflow. #[inline] pub fn total_sent_packets (&self) -> u32 { unsafe { (*self.raw()).totalSentPackets } } pub fn reset_total_sent_packets (&mut self) { unsafe { (*self.raw()).totalSentPackets = 0; } } /// Total bytes sent. /// /// User must reset to prevent overflow. #[inline] pub fn total_sent_data (&self) -> u32 { unsafe { (*self.raw()).totalSentPackets } } pub fn reset_total_sent_data (&mut self) { unsafe { (*self.raw()).totalSentData = 0; } } /// Total UDP packets received. /// /// User must reset to prevent overflow. #[inline] pub fn total_received_packets (&self) -> u32 { unsafe { (*self.raw()).totalReceivedPackets } } pub fn reset_total_received_packets (&mut self) { unsafe { (*self.raw()).totalReceivedPackets = 0; } } /// Total bytes received. /// /// User must reset to prevent overflow. #[inline] pub fn total_received_data (&self) -> u32 { unsafe { (*self.raw()).totalReceivedPackets } } pub fn reset_total_received_data (&mut self) { unsafe { (*self.raw()).totalReceivedData = 0; } } /// Initiate a connection with a remote host. /// /// When connecting to a peer with the `host.connect()` method, a `Peer` /// representing the connection will be created in the `PeerState::Connecting` /// state: /// ``` /// # use enet::Address; /// # let enet = enet::initialize().unwrap(); /// # let mut client = enet.client_host_create (1, None, None).unwrap(); /// let mut peer = client.connect (&Address::localhost (12345), 2, 0); /// ``` /// where the second argument (`2`) is the number of channels to allocate to /// the connection and the third argument (`0`) is an internal `data : u32` /// that can be used by the application. /// /// After receipt of a `Connect` event, the peer is ready to use. /// /// *Note*: after receipt of the `Connect` event on the host that originated /// the connection request, a call to `flush()` or `service()` is required to /// *acknowledge* the connection has succeeded in order to generate the /// corresponding `Connect` event on the server end. /// /// That connection will now be 'in use' until the peer is changed to the /// `PeerState::Disconnected` state. /// /// Note that `Host`s can connect *mutually* (host A connected to host B, and /// host B connected to host A), or *multiply* (host A connected to host B /// more than 1 time), and each connection will have its own `Peer` structure /// in each host A and B. pub fn connect (&mut self, address : &Address, channel_count : u8, data : u32) -> Result <Peer, peer::ConnectError> { unsafe { if self.peer_count() <= self.connected_peers() { return Err (peer::ConnectError::NoPeersAvailable) } let peer = ll::enet_host_connect ( self.raw(), address.raw(), channel_count as usize, data ); if peer.is_null() { return Err (peer::ConnectError::Failure) } Ok (Peer::from_raw(peer, self.hostdrop.clone())) } } /// Waits for events on the host specified and shuttles packets between the /// host and its peers. /// /// `timeout` is the number of milliseconds that ENet should wait for events. pub fn service (&mut self, timeout : u32) -> Result <Option <Event>, Error> { let event = unsafe { let event = std::mem::MaybeUninit::<ll::ENetEvent>::uninit().as_mut_ptr(); if ll::enet_host_service (self.hostdrop.raw, event, timeout) < 0 { return Err (Error::ServiceError) } *event }; Ok (Event::from_ll (event, self.hostdrop.clone())) } // end service /// Checks for any queued events on the host and dispatches one if available #[inline] pub fn check_events (&mut self) -> Result <Option <Event>, Error> { let event = unsafe { let event = std::mem::MaybeUninit::<ll::ENetEvent>::uninit().as_mut_ptr(); if ll::enet_host_check_events (self.hostdrop.raw, event) < 0 { return Err (Error::DispatchError) } *event }; Ok (Event::from_ll (event, self.hostdrop.clone())) } /// Send any queued messages without dispatching events #[inline] pub fn flush (&mut self) { unsafe { ll::enet_host_flush (self.hostdrop.raw) } } /// Queue a packet to be sent to all peers associated with the host pub fn broadcast (&mut self, channel_id : u8, packet : Packet) { unsafe { let raw; match packet { Packet::Allocate { bytes, flags } => { raw = ll::enet_packet_create ( bytes.as_ptr() as *const std::os::raw::c_void, bytes.len() as usize, flags.bits() ); } Packet::NoAllocate { bytes, flags } => { raw = ll::enet_packet_create ( bytes.as_ptr() as *const std::os::raw::c_void, bytes.len() as usize, flags.bits() | (ll::_ENetPacketFlag_ENET_PACKET_FLAG_NO_ALLOCATE as u32) ); } } ll::enet_host_broadcast (self.raw(), channel_id, raw) } } } // end impl Host impl HostDrop { #[inline] pub unsafe fn raw (&self) -> *mut ll::ENetHost { self.raw } } impl Drop for HostDrop { #[inline] fn drop (&mut self) { unsafe { ll::enet_host_destroy (self.raw) } } }
true
bdd709be506ee10b27dcc39560d0b388b37c4c69
Rust
udoprog/genco
/src/lang/csharp/mod.rs
UTF-8
7,643
3.265625
3
[ "MIT", "Apache-2.0" ]
permissive
//! Specialization for Csharp code generation. //! //! # String Quoting in C# //! //! Since C# uses UTF-16 internally, but literal strings support C-style family //! of escapes. //! //! See [c_family_write_quoted][super::c_family_write_quoted]. //! //! ```rust //! use genco::prelude::*; //! //! # fn main() -> genco::fmt::Result { //! let toks: csharp::Tokens = quote!("start π 😊 \n \x7f end"); //! assert_eq!("\"start \\u03c0 \\U0001f60a \\n \\x7f end\"", toks.to_string()?); //! # Ok(()) //! # } //! ``` mod block_comment; mod comment; use crate as genco; use crate::fmt; use crate::quote_in; use crate::tokens::ItemStr; use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt::Write as _; pub use self::block_comment::BlockComment; pub use self::comment::Comment; /// Tokens container specialization for C#. pub type Tokens = crate::Tokens<Csharp>; impl_lang! { /// Language specialization for C#. pub Csharp { type Config = Config; type Format = Format; type Item = Import; fn write_quoted(out: &mut fmt::Formatter<'_>, input: &str) -> fmt::Result { // From: https://csharpindepth.com/articles/Strings super::c_family_write_quoted(out, input) } fn format_file( tokens: &Tokens, out: &mut fmt::Formatter<'_>, config: &Self::Config, ) -> fmt::Result { let mut file: Tokens = Tokens::new(); let mut format = Format::default(); Self::imports(&mut file, tokens, config, &mut format.imported_names); if let Some(namespace) = &config.namespace { quote_in! { file => namespace $namespace { $tokens } } file.format(out, config, &format)?; } else { file.format(out, config, &format)?; tokens.format(out, config, &format)?; } Ok(()) } } Import { fn format(&self, out: &mut fmt::Formatter<'_>, config: &Config, format: &Format) -> fmt::Result { { let qualified = self.qualified || is_qualified(config, format, &self.namespace, &self.name); if qualified { out.write_str(&self.namespace)?; out.write_str(SEP)?; } } out.write_str(&self.name)?; return Ok(()); fn is_qualified(config: &Config, format: &Format, namespace: &str, name: &str) -> bool { // Name is in current namespace. No need to qualify. if let Some(config) = &config.namespace { if &**config == namespace { return false; } } if let Some(imported) = format.imported_names.get(name) { // a conflicting name is in the namespace. if imported != namespace { return true; } } false } } } } /// Separator between types and modules in C#. const SEP: &str = "."; /// State using during formatting of C# language items. #[derive(Debug, Default)] pub struct Format { /// Keeping track of names which have been imported, do determine whether /// their use has to be qualified or not. /// /// A missing name means that it has to be used in a qualified manner. imported_names: HashMap<String, String>, } /// Config data for Csharp formatting. #[derive(Debug, Default)] pub struct Config { /// namespace to use. namespace: Option<ItemStr>, } impl Config { /// Set the namespace name to build. pub fn with_namespace<N>(self, namespace: N) -> Self where N: Into<ItemStr>, { Self { namespace: Some(namespace.into()), } } } /// The import of a C# type `using System.IO;`. /// /// Created through the [import()] function. #[derive(Debug, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)] pub struct Import { /// namespace of the class. namespace: ItemStr, /// Name of class. name: ItemStr, /// Use as qualified type. qualified: bool, } impl Import { /// Make this type into a qualified type that is always used with a /// namespace. pub fn qualified(self) -> Self { Self { qualified: true, ..self } } } impl Csharp { fn imports( out: &mut Tokens, tokens: &Tokens, config: &Config, imported_names: &mut HashMap<String, String>, ) { let mut modules = BTreeSet::new(); for import in tokens.walk_imports() { modules.insert((&*import.namespace, &*import.name)); } if modules.is_empty() { return; } let mut imported = HashSet::new(); for (namespace, name) in modules { if Some(namespace) == config.namespace.as_deref() { continue; } match imported_names.get(name) { // already imported... Some(existing) if existing == namespace => continue, // already imported, as something else... Some(_) => continue, _ => {} } if !imported.contains(namespace) { quote_in!(*out => using $namespace;); out.push(); imported.insert(namespace); } imported_names.insert(name.to_string(), namespace.to_string()); } out.line(); } } /// The import of a C# type `using System.IO;`. /// /// # Examples /// /// ``` /// use genco::prelude::*; /// /// let a = csharp::import("Foo.Bar", "A"); /// let b = csharp::import("Foo.Bar", "B"); /// let ob = csharp::import("Foo.Baz", "B"); /// /// let toks: Tokens<Csharp> = quote! { /// $a /// $b /// $ob /// }; /// /// assert_eq!( /// vec![ /// "using Foo.Bar;", /// "", /// "A", /// "B", /// "Foo.Baz.B", /// ], /// toks.to_file_vec()? /// ); /// # Ok::<_, genco::fmt::Error>(()) /// ``` pub fn import<P, N>(namespace: P, name: N) -> Import where P: Into<ItemStr>, N: Into<ItemStr>, { Import { namespace: namespace.into(), name: name.into(), qualified: false, } } /// Format a doc comment where each line is preceeded by `///`. /// /// # Examples /// /// ``` /// use genco::prelude::*; /// use std::iter; /// /// let toks = quote! { /// $(csharp::block_comment(vec!["Foo"])) /// $(csharp::block_comment(iter::empty::<&str>())) /// $(csharp::block_comment(vec!["Bar"])) /// }; /// /// assert_eq!( /// vec![ /// "/// Foo", /// "/// Bar", /// ], /// toks.to_file_vec()? /// ); /// # Ok::<_, genco::fmt::Error>(()) /// ``` pub fn block_comment<T>(comment: T) -> BlockComment<T> where T: IntoIterator, T::Item: Into<ItemStr>, { BlockComment(comment) } /// Format a doc comment where each line is preceeded by `//`. /// /// # Examples /// /// ``` /// use genco::prelude::*; /// /// let toks = quote! { /// $(csharp::comment(&["Foo"])) /// $(csharp::comment(&["Bar"])) /// }; /// /// assert_eq!( /// vec![ /// "// Foo", /// "// Bar", /// ], /// toks.to_file_vec()? /// ); /// # Ok::<_, genco::fmt::Error>(()) /// ``` pub fn comment<T>(comment: T) -> Comment<T> where T: IntoIterator, T::Item: Into<ItemStr>, { Comment(comment) }
true
9799919a373e6b68a3dad5b0099addcf2bf20c27
Rust
hajifkd/diffmerge
/src/bin/merge.rs
UTF-8
604
2.625
3
[]
no_license
use diffmerge::merge; use std::env; use std::fs::read; fn main() -> Result<(), Box<dyn std::error::Error + 'static>> { let args: Vec<String> = env::args().collect(); if args.len() != 4 { println!("Usage {} <ANCESTOR> <BRANCH1> <BRANCH2>", &args[0]); return Ok(()); } let ancestor = String::from_utf8(read(&args[1])?)?; let branch1 = String::from_utf8(read(&args[2])?)?; let branch2 = String::from_utf8(read(&args[3])?)?; let mut merge = merge(&ancestor, &branch1, &branch2); merge.set_names(&args[2], &args[3]); println!("{}", merge); Ok(()) }
true
57d7bd4c2adeca8ad473d1c69b33405836402143
Rust
santhoshselvan/advent-of-code
/2019/src/bin/day11.rs
UTF-8
2,878
3.28125
3
[]
no_license
use std::str::Lines; use intcode::IntCode; use std::collections::HashMap; use plotlib::scatter::Scatter; use plotlib::view::ContinuousView; use plotlib::scatter; use plotlib::style::Point; use plotlib::page::Page; type Panel = (i32, i32); const UP: i32 = 0; const RIGHT: i32 = 1; const DOWN: i32 = 2; const LEFT: i32 = 3; const BLACK: i64 = 0; const WHITE: i64 = 1; pub fn main() { let contents = include_str!("../../data/eleven.data"); let intcode = paint_panels(contents); } fn paint_panels(contents: &str) { let mut panels: HashMap<Panel, i64> = HashMap::new(); let mut current_panel = (0, 0); let mut current_direction = UP; panels.insert(current_panel, WHITE); let mut intcode = IntCode::initialize( contents, None, true ); loop { let current_color_wrapped = panels.get(&current_panel); let current_color = if current_color_wrapped.is_some() { current_color_wrapped.unwrap() } else { &BLACK }; intcode.set_input(*current_color); intcode.execute(); if !intcode.has_output() { println!("No output exiting: {:?}", panels); println!("count {}", panels.len()); let data2 = panels.iter() .filter(|&x| *x.1 == 1) .map(|x| (*x.0)) .map(|x| (x.0 as f64, x.1 as f64)) .collect::<Vec<(f64, f64)>>(); let scatter = Scatter::from_slice(&data2) .style(scatter::Style::new() .colour("#35C788")); let v = ContinuousView::new() .add(&scatter) .x_range(-44., 44.) .y_range(-22., 22.); Page::single(&v).save("./src/out/part11.svg"); println!("{}", Page::single(&v).to_text().unwrap()); break } panels.insert(current_panel, intcode.take_output()[0]); intcode.execute(); let direction_command = intcode.take_output()[0]; current_direction = turn_direction(current_direction, direction_command); current_panel = move_panel(&current_panel, current_direction); } } fn move_panel(current_panel: &(i32, i32), current_direction: i32) -> (i32, i32) { match current_direction { UP => (current_panel.0, current_panel.1 + 1), RIGHT => (current_panel.0 + 1, current_panel.1), DOWN => (current_panel.0, current_panel.1 - 1), LEFT => (current_panel.0 - 1, current_panel.1), _ => panic!() } } fn turn_direction(current_direction: i32, next_direction: i64) -> i32 { let i = match next_direction { 0 => (4 + current_direction - 1) % 4, 1 => (4 + current_direction + 1) % 4, _ => unreachable!() }; i } #[cfg(test)] mod tests { #![test] use crate::paint_panels; fn paints_panels() { } }
true
6910f04f7c8e8a56ba4bc60225553720b47f6c67
Rust
MiyamonY/atcoder
/codefestival/2018/final/a/01/src/main.rs
UTF-8
2,266
3
3
[]
no_license
use std::collections::HashMap; #[allow(unused_macros)] macro_rules! scan { () => { { let mut line: String = String::new(); std::io::stdin().read_line(&mut line).unwrap(); line.trim().to_string() } }; (;;) => { { let mut line: String = String::new(); std::io::stdin().read_line(&mut line).unwrap(); line.trim().split_whitespace().map(|s| s.to_string()).collect::<Vec<String>>() } }; (;;$n:expr) => { { (0..$n).map(|_| scan!()).collect::<Vec<_>>() } }; ($t:ty) => { { let mut line: String = String::new(); std::io::stdin().read_line(&mut line).unwrap(); line.trim().parse::<$t>().unwrap() } }; ($($t:ty),*) => { { let mut line: String = String::new(); std::io::stdin().read_line(&mut line).unwrap(); let mut iter = line.split_whitespace(); ( $(iter.next().unwrap().parse::<$t>().unwrap(),)* ) } }; ($t:ty;;) => { { let mut line: String = String::new(); std::io::stdin().read_line(&mut line).unwrap(); line.split_whitespace() .map(|t| t.parse::<$t>().unwrap()) .collect::<Vec<_>>() } }; ($t:ty;;$n:expr) => { (0..$n).map(|_| scan!($t;;)).collect::<Vec<_>>() }; ($t:ty; $n:expr) => { (0..$n).map(|_| scan!($t) ).collect::<Vec<_>>() }; ($($t:ty),*; $n:expr) => { (0..$n).map(|_| scan!($($t),*) ).collect::<Vec<_>>() }; } fn main() { let (n, m) = scan!(usize, usize); let mut graph = vec![HashMap::new(); n + 1]; for _ in 0..m { let (a, b, l) = scan!(usize, usize, i64); *graph[a].entry(l).or_insert(0) += 1; *graph[b].entry(l).or_insert(0) += 1; } let mut ans = 0; for g in graph.iter().skip(1) { for (&k, &n) in g { if let Some(m) = g.get(&(2540 - k)) { ans += if k == 2540 / 2 { n * (n - 1) } else { n * m } } } } println!("{}", ans / 2); }
true
b24fe77cf601f04e632fcde29fef1f50862a0771
Rust
hw-standalonecomplex/file-lock
/src/util.rs
UTF-8
2,236
3.46875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::str::FromStr; use std::fmt; use std::error::Error; /// Represents the kind of lock (e.g. *blocking*, *non-blocking*) #[derive(Clone, Debug, PartialEq, Eq)] pub enum Kind { /// Attempt a lock without blocking the call NonBlocking, /// Attempt a lock and return from the call once the lock was obtained. Blocking, } /// Represents a file access mode, e.g. read or write #[derive(Clone, Debug, Eq, PartialEq)] pub enum Mode { /// Use this to obtain a shared lock, i.e. there may be any amount of readers /// at the same time. Read, /// Used to indicate an exclusive lock, i.e. there may only be one writer at a time. Write } impl AsRef<str> for Kind { fn as_ref(&self) -> &str { match *self { Kind::NonBlocking => "nowait", Kind::Blocking => "wait", } } } impl FromStr for Kind { type Err = ParseError; fn from_str(input: &str) -> Result<Self, Self::Err> { match input { "nowait" => Ok(Kind::NonBlocking), "wait" => Ok(Kind::Blocking), _ => Err(ParseError(format!("Unknown Kind: {}", input))), } } } impl AsRef<str> for Mode { fn as_ref(&self) -> &str { match *self { Mode::Read => "read", Mode::Write => "write", } } } impl FromStr for Mode { type Err = ParseError; fn from_str(input: &str) -> Result<Self, Self::Err> { match input { "read" => Ok(Mode::Read), "write" => Ok(Mode::Write), _ => Err(ParseError(format!("Unknown Mode: {}", input))) } } } impl Into<i32> for Mode { fn into(self) -> i32 { match self { Mode::Read => 0, Mode::Write => 1, } } } impl Into<i32> for Kind { fn into(self) -> i32 { match self { Kind::NonBlocking => 0, Kind::Blocking => 1, } } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct ParseError(String); impl Error for ParseError { fn description(&self) -> &str { &self.0 } } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { self.0.fmt(f) } }
true
5142a5f6cd623640ce97f6bc74c936c718861926
Rust
TheRealSyler/rust-audio
/src/waves.rs
UTF-8
976
3.765625
4
[]
no_license
pub enum WaveType { Sine, Saw, SawReversed, Square, Triangle, } pub fn wave(wave_type: WaveType, frequency: f32, sample_clock: usize, sample_rate: f32) -> f32 { let phase = get_phase(frequency, sample_clock, sample_rate); match wave_type { WaveType::Sine => sine(phase), WaveType::Saw => saw(phase), WaveType::SawReversed => saw_reversed(phase), WaveType::Square => square(phase), WaveType::Triangle => triangle(phase), } } fn get_phase(frequency: f32, sample_clock: usize, sample_rate: f32) -> f32 { (frequency * (sample_clock as f32) / sample_rate) % 1.0 } fn sine(phase: f32) -> f32 { sin(phase * 2.0 * std::f32::consts::PI) } fn saw(phase: f32) -> f32 { 1.0 - phase } fn saw_reversed(phase: f32) -> f32 { phase } fn square(phase: f32) -> f32 { if sine(phase) > 0.0 { 1.0 } else { -1.0 } } fn triangle(phase: f32) -> f32 { 1.0 - (phase - 0.5).abs() * 4.0 } #[inline] fn sin(val: f32) -> f32 { val.sin() }
true
24eb2bee6a031690b38c5d6a64e942ec73175b11
Rust
m9s/xmc1000
/xmc1000/src/pau/avail2/mod.rs
UTF-8
9,403
2.671875
3
[]
no_license
#[doc = r" Value read from the register"] pub struct R { bits: u32, } impl super::AVAIL2 { #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } } #[doc = "Possible values of the field `AVAIL0`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AVAIL0R { #[doc = "CC80 and CCU80 kernel SFRs are not available."] VALUE1, #[doc = "CC80 and CCU80 kernel SFRs are available."] VALUE2, } impl AVAIL0R { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AVAIL0R::VALUE1 => false, AVAIL0R::VALUE2 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AVAIL0R { match value { false => AVAIL0R::VALUE1, true => AVAIL0R::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline] pub fn is_value1(&self) -> bool { *self == AVAIL0R::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline] pub fn is_value2(&self) -> bool { *self == AVAIL0R::VALUE2 } } #[doc = "Possible values of the field `AVAIL1`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AVAIL1R { #[doc = "CC81 is not available."] VALUE1, #[doc = "CC81 is available."] VALUE2, } impl AVAIL1R { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AVAIL1R::VALUE1 => false, AVAIL1R::VALUE2 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AVAIL1R { match value { false => AVAIL1R::VALUE1, true => AVAIL1R::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline] pub fn is_value1(&self) -> bool { *self == AVAIL1R::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline] pub fn is_value2(&self) -> bool { *self == AVAIL1R::VALUE2 } } #[doc = "Possible values of the field `AVAIL2`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AVAIL2R { #[doc = "CC82 is not available."] VALUE1, #[doc = "CC82 is available."] VALUE2, } impl AVAIL2R { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AVAIL2R::VALUE1 => false, AVAIL2R::VALUE2 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AVAIL2R { match value { false => AVAIL2R::VALUE1, true => AVAIL2R::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline] pub fn is_value1(&self) -> bool { *self == AVAIL2R::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline] pub fn is_value2(&self) -> bool { *self == AVAIL2R::VALUE2 } } #[doc = "Possible values of the field `AVAIL3`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AVAIL3R { #[doc = "CC83 is not available."] VALUE1, #[doc = "CC83 is available."] VALUE2, } impl AVAIL3R { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AVAIL3R::VALUE1 => false, AVAIL3R::VALUE2 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AVAIL3R { match value { false => AVAIL3R::VALUE1, true => AVAIL3R::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline] pub fn is_value1(&self) -> bool { *self == AVAIL3R::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline] pub fn is_value2(&self) -> bool { *self == AVAIL3R::VALUE2 } } #[doc = "Possible values of the field `AVAIL12`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AVAIL12R { #[doc = "POSIF0 is not available."] VALUE1, #[doc = "POSIF0 is available."] VALUE2, } impl AVAIL12R { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AVAIL12R::VALUE1 => false, AVAIL12R::VALUE2 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AVAIL12R { match value { false => AVAIL12R::VALUE1, true => AVAIL12R::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline] pub fn is_value1(&self) -> bool { *self == AVAIL12R::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline] pub fn is_value2(&self) -> bool { *self == AVAIL12R::VALUE2 } } #[doc = "Possible values of the field `AVAIL15`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AVAIL15R { #[doc = "BCCU0 is not available."] VALUE1, #[doc = "BCCU0 is available."] VALUE2, } impl AVAIL15R { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AVAIL15R::VALUE1 => false, AVAIL15R::VALUE2 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AVAIL15R { match value { false => AVAIL15R::VALUE1, true => AVAIL15R::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline] pub fn is_value1(&self) -> bool { *self == AVAIL15R::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline] pub fn is_value2(&self) -> bool { *self == AVAIL15R::VALUE2 } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0 - CC80 and CCU80 kernel SFRs Availability Flag"] #[inline] pub fn avail0(&self) -> AVAIL0R { AVAIL0R::_from({ const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 1 - CC81 Availability Flag"] #[inline] pub fn avail1(&self) -> AVAIL1R { AVAIL1R::_from({ const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 2 - CC82 Availability Flag"] #[inline] pub fn avail2(&self) -> AVAIL2R { AVAIL2R::_from({ const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 3 - CC83 Availability Flag"] #[inline] pub fn avail3(&self) -> AVAIL3R { AVAIL3R::_from({ const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 12 - POSIF0 Availability Flag"] #[inline] pub fn avail12(&self) -> AVAIL12R { AVAIL12R::_from({ const MASK: bool = true; const OFFSET: u8 = 12; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 15 - BCCU0 Availability Flag"] #[inline] pub fn avail15(&self) -> AVAIL15R { AVAIL15R::_from({ const MASK: bool = true; const OFFSET: u8 = 15; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } }
true
4ce1d8c4b97c694c3a5bc38f07e5851ac0013223
Rust
mflinn92/rusty-engine
/src/dom.rs
UTF-8
3,659
3.765625
4
[]
no_license
use std::collections::HashMap; #[derive(Debug)] pub struct Node { // data common to all nodes children: Vec<Node>, // data specific to a node type node_type: NodeType, } impl Node { pub fn new_text(data: String) -> Self { Node { children: Vec::new(), node_type: NodeType::Text(data), } } pub fn new_element(name: String, attrs: AttrMap, children: Vec<Node>) -> Self { Node { children, node_type: NodeType::Element(ElementData { tag_name: name, attributes: attrs, }), } } pub fn children(&self) -> &Vec<Node> { &self.children } pub fn node_type(&self) -> Option<String> { match self.node_type { NodeType::Text(_) => Some("text".to_string()), NodeType::Element(_) => Some("element".to_string()), } } /// Returns the node tag if exists pub fn get_tag(&self) -> Option<String> { match &self.node_type { NodeType::Element(elem) => Some(elem.tag_name.clone()), _ => None, } } pub fn get_text(&self) -> Option<String> { match &self.node_type { NodeType::Text(data) => Some(data.to_string()), _ => None, } } } #[derive(Debug)] enum NodeType { Text(String), Element(ElementData), } #[derive(Debug)] struct ElementData { tag_name: String, attributes: AttrMap, } pub type AttrMap = HashMap<String, String>; mod tests { use super::*; #[test] fn test_new_text_node() { let data = String::from("This is some text"); let node = Node::new_text(data.clone()); // node should have no children assert!(node.children.is_empty()); // should contain data text if let NodeType::Text(got) = node.node_type { assert!(got == data); } } #[test] fn test_new_element_node() { let mut attrs = AttrMap::new(); let test_key = "test_key"; let test_attr = "test attribute"; attrs.insert(String::from(test_key), String::from(test_attr)); let tag_name = String::from("test_elem"); let node = Node::new_element(tag_name, attrs.clone(), Vec::new()); // should have no children assert!(node.children.is_empty()); // Check element data if let NodeType::Element(elem) = node.node_type { assert!(elem.tag_name.starts_with("test_elem")); assert!(elem.attributes.contains_key(test_key)); assert!(elem .attributes .get(test_key) .unwrap() .starts_with(test_attr)); } } #[test] fn test_dom_tree() { // create a child node let data = String::from("This is some text"); let text_node = Node::new_text(data.clone()); // Set up root node let mut attrs = AttrMap::new(); let test_key = "test_key"; let test_attr = "test attribute"; attrs.insert(String::from(test_key), String::from(test_attr)); let tag_name = String::from("test_elem"); let node = Node::new_element(tag_name, attrs.clone(), vec![text_node]); // confirm ability to access child through parent assert!(!node.children.is_empty()); let child = node.children.get(0).unwrap(); match &child.node_type { NodeType::Text(text) => { assert!(text == &data); } _ => panic!("Expected text node, found unexpected node type"), } } }
true
001d0980c9c3c61e53a9c2977efa744c68fbe257
Rust
Spxg/library-tool
/src/opt.rs
UTF-8
888
2.890625
3
[]
no_license
use structopt_derive::*; #[derive(StructOpt)] #[structopt(name = "library_tool")] pub struct Opt { #[structopt(subcommand)] pub cmd: Command } #[derive(StructOpt)] pub enum Command { #[structopt(about = "login account", usage = "library-tool login [username] [password]")] Login { #[structopt(help = "username")] username: String, #[structopt(help = "password")] password: String, }, #[structopt(about = "in/out library", usage = "library-tool library [in/out] [--api/-a]")] Library { #[structopt(subcommand)] op: InOrOut }, } #[derive(StructOpt)] pub enum InOrOut { #[structopt(help = "in", about = "in to library")] In { #[structopt(help = "api", short = "a", long = "api", default_value = "1")] api: u8 }, #[structopt(help = "out", about = "out from library")] Out }
true
5b9bd087faa4d9cfaa63893c39bce216a44e3520
Rust
baloo/pm-firmware
/src/utils/io/write_all.rs
UTF-8
1,116
2.625
3
[]
no_license
use core::pin::Pin; use futures_core::{future::Future, task::Poll}; use futures_util::{future::poll_fn, ready}; use super::Write; #[derive(Debug)] pub enum Error<T> { WriteZero, Other(T), } impl<T> From<T> for Error<T> { fn from(err: T) -> Self { Error::Other(err) } } #[macro_export] macro_rules! readyd { ($position:expr, $e:expr $(,)?) => { match $e { Poll::Ready(t) => t, Poll::Pending => { //hprintln!("n{:?}", $position); return Poll::Pending; } } }; } pub fn write_all<'a, W: Write + 'a>( mut this: Pin<&'a mut W>, buf: impl AsRef<[u8]> + 'a, ) -> impl Future<Output = Result<(), Error<W::Error>>> + 'a { let mut position = 0; poll_fn(move |cx| { let buf = buf.as_ref(); while position < buf.len() { let amount = readyd!(position, this.as_mut().poll_write(cx, &buf[position..]))?; position += amount; if amount == 0 { Err(Error::WriteZero)?; } } Poll::Ready(Ok(())) }) }
true
5362459498932fee440016481fb563964834a7e0
Rust
MORTAL2000/titanion
/src/ttn/preference.rs
UTF-8
2,891
2.5625
3
[ "BSD-3-Clause", "BSD-2-Clause" ]
permissive
/* * Copyright 2006 Kenta Cho. Some rights reserved. */ use ttn::dummy::*; /* fn read(T)(fd : File, dst &T) { let count = fd.read ((cast(void*) &dst)[0..int.sizeof]); //assert (count is int.sizeof); } fn write(T)(File fd, T* dst) { let count = fd.write ((cast(void*) &dst)[0..int.sizeof]); //assert (count is int.sizeof); }*/ fn read<T>(fd : &File, dst : &mut T) { //TODO } fn write<T>(fd : &File, dst : &T) { //TODO } pub const RANKING_NUM : usize = 10; const MODE_NUM : usize = 3; const VERSION_NUM : i32 = 30; const PREF_FILE_NAME : &'static str = "ttn.prf"; /* * Load/Save/Record a high score table. */ pub struct Preference { //preference : src.util.preference.Preference: _highScore : [[i32; RANKING_NUM]; MODE_NUM], _lastMode : i32, } impl Preference { pub fn new() -> Self { Preference { _lastMode : 2, _highScore : [ [100000, 90000, 80000, 70000, 60000, 50000, 40000, 30000, 20000, 10000], [100000, 90000, 80000, 70000, 60000, 50000, 40000, 30000, 20000, 10000], [100000, 90000, 80000, 70000, 60000, 50000, 40000, 30000, 20000, 10000] ], } } pub fn load(&mut self) { //let fd : File; //try { let fd = File::new(PREF_FILE_NAME, FileReadExisting); let mut ver : i32; read::<i32>(&fd, &mut ver); if ver != VERSION_NUM { panic!("Wrong version num"); } read::<i32>(&fd, &mut self._lastMode); for j in 0..MODE_NUM { for i in 0..RANKING_NUM { read::<i32>(&fd, &mut self._highScore[j][i]); } } //} catch (Object e) { // init(); //} finally { //if fd { fd.close(); //} //} } /* fn init(&mut self) { self._lastMode = 2; for j in 0..MODE_NUM { for i in 0..RANKING_NUM { self._highScore[j][i] = (10 - i) * 10000; } } } */ pub fn save(&mut self) { let fd = File::new(PREF_FILE_NAME, FileWriteCreate); write::<i32>(&fd, &VERSION_NUM); write::<i32>(&fd, &self._lastMode); for j in 0..MODE_NUM { for i in 0..RANKING_NUM { write::<i32>(&fd, &self._highScore[j][i]); } } fd.close(); } pub fn setMode(&mut self, mode : i32) { self._lastMode = mode; } pub fn recordResult(&mut self, score : i32, mode : i32) { self.setMode(mode); for i in 0..RANKING_NUM { if score > self._highScore[mode as usize][i] { for j in ((i+1)..RANKING_NUM).rev() { self._highScore[mode as usize][j] = self._highScore[mode as usize][j - 1]; } self._highScore[mode as usize][i] = score; return; } } } pub fn highScore(&self) -> &[[i32; RANKING_NUM]; MODE_NUM] { &self._highScore } pub fn lastMode(&self) -> i32 { self._lastMode } }
true
eb506f123338e8e4027d83201f70646c348c921c
Rust
siikamiika/gba
/src/main.rs
UTF-8
2,535
2.828125
3
[]
no_license
#![feature(int_to_from_bytes)] use std::thread::sleep; use std::time::Duration; use std::env::args; mod gba; mod memory; mod cpu; mod registers; mod arm_instructions; use self::registers::Register::*; use self::cpu::Mode::*; use self::arm_instructions::decode_instruction; fn main() { // command line arguments let args: Vec<_> = args().collect(); let bios = &args[1]; let rom = &args[2]; // instantiate GBA let mut gba = gba::Gba::new(bios, rom); // testing gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); gba.tick(); } // fn test() { // // cpu testing // let mut arm7tdmi = cpu::ARM7TDMI::new(); // // write to register R8 in user mode and check written value // // arm7tdmi.write_register(999, R8); // // assert_eq!(arm7tdmi.read_register(R8), 999); // // set fast interrupt mode and ensure that the value of R8 has changed // // arm7tdmi.set_mode(Fiq); // // assert_ne!(arm7tdmi.read_register(R8), 999); // // instruction testing // println!("{:?}", decode_instruction(0b0001_0010_1111_1111_1111_1001_0000)); // println!("{:?}", decode_instruction(0xea000018)); // bios first word // // memory testing // let mut mem = memory::Memory::new(); // // everything gets initialized as 0 // assert_eq!(mem.read_word(0x02000000), 0); // // test word write and read // mem.write_word(1234, 0x02000000); // assert_eq!(mem.read_word(0x02000000), 1234); // // test halfword write and read // mem.write_halfword(1234, 0x02000004); // assert_eq!(mem.read_halfword(0x02000004), 1234); // // test byte write and read // mem.write_byte(0, 0x02000000); // assert_eq!(mem.read_byte(0x02000000), 0); // // should panic // // println!("{}", mem.read_word(0x01ffffff)); // // mem.write_word(1234, 0x0203FFFF); // // speed testing // for _ in 0..100_000_000 { // mem.read_word(0x08000000); // // mem.read_byte(0x08000000); // // mem.write_word(0xccddeeff, 0x02000000); // // arm7tdmi.write_register(999, R0); // // arm7tdmi.read_register(R0); // } // }
true
99b43fb4a1c96203a8943a254369efc5fc57403f
Rust
serabis-solutions/serabis
/condition-checker/src/tables/agents.rs
UTF-8
714
2.59375
3
[]
no_license
use super::Table; use r2d2_postgres::{PostgresConnectionManager}; use r2d2::{Pool}; //Table #[derive(Debug)] pub struct Agents { pool: Pool<PostgresConnectionManager>, } //Row #[derive(Debug)] pub struct Agent { id: i32 } impl Agents { pub fn get_agents(&self) -> Vec<Agent> { let mut agents = vec![]; let conn = self.pool.clone().get().unwrap(); for row in conn.query("SELECT id FROM agents", &[]).unwrap().iter() { agents.push(Agent { id: row.get(0), }); } agents } } impl Table for Agents { fn new(pool: Pool<PostgresConnectionManager>) -> Self { Agents { pool: pool } } }
true
5d371a2691bc53084e70c5cc5a49bcecc3615ddf
Rust
aaron-human/RoboExplore
/rust/src/geo/line_segment.rs
UTF-8
16,092
3.59375
4
[]
no_license
use super::consts::*; use super::common::*; use super::vec2::*; use super::range::*; use super::bounds2::*; /// A line segment. #[derive(Debug, Clone)] pub struct LineSegment { pub start : Vec2, // One of the end points. pub end : Vec2, // The other end point. pub length : f32, pub direction : Vec2, // The direction from start to end. Always unit length, unless the line is just a point (then it's zero length as that makes intersection checking easier). } // TODO: Make the above not pub... (Don't want a user would modifing them!) /// All possible results of two line segments intersecting. #[derive(Debug)] pub enum LineSegmentIntersection { Point(Vec2), // A single point. Many(LineSegment), // A range of points because the line segments share an entire line-segment worth of points. None, // No intersection. } impl LineSegmentIntersection { /// A simple way to check for "no intersection". Mainly for the automated tests. pub fn is_none(&self) -> bool { match self { LineSegmentIntersection::None => true, _ => false, } } } impl LineSegment { /// Creates a line segment with the given end points. pub fn new(start : &Vec2, end : &Vec2) -> LineSegment { let delta = end - start; let mut length = delta.length(); if length < EPSILON { length = 0.0; } LineSegment { start: start.clone(), end: end.clone(), length, direction: if 0.0 == length { Vec2::zero() } else { delta.norm() }, } } /// Check if two lines overlap. Tries to be efficient and doesn't find where they overlap. pub fn check_if_intersects_with_line_segment(&self, other : &LineSegment) -> bool { // If the bounding boxes don't even overlap, then they definitely don't intersect. if !Bounds2::from_points(&self.start, &self.end).overlaps(&Bounds2::from_points(&other.start, &other.end)) { return false; } // If each pair of end points is on either side of the opposite line, then they intersect. let other_start_to_self_start = &self.start - &other.start; let other_start_to_self_end = &self.end - &other.start; let self_start_to_other_start = &other.start - &self.start; let self_start_to_other_end = &other.end - &self.start; let self_start_side = sign(other.direction.ext(&other_start_to_self_start)); let self_end_side = sign(other.direction.ext(&other_start_to_self_end)); let other_start_side = sign(self.direction.ext( &self_start_to_other_start)); let other_end_side = sign(self.direction.ext( &self_start_to_other_end)); // Note having one zero should be fine: it just means one of the end points is on the other line. if self_start_side != self_end_side && other_start_side != other_end_side { return true; } // One last way could be intersecting: if both lines are colinear. At that point all "sides" would be 0. if 0.0 == self_start_side && 0.0 == self_end_side && 0.0 == other_start_side && 0.0 == other_end_side { // At this point, use dot product to see if any of the start/end points are between the other line's. let mut along; along = other.direction.dot(&other_start_to_self_start); if -EPSILON < along && along - other.length < EPSILON { return true; } along = other.direction.dot(&other_start_to_self_end); if -EPSILON < along && along - other.length < EPSILON { return true; } along = self.direction.dot( &self_start_to_other_start); if -EPSILON < along && along - self.length < EPSILON { return true; } along = self.direction.dot( &self_start_to_other_end); if -EPSILON < along && along - self.length < EPSILON { return true; } } false // If all else fails, then they're not intersecting. } /// Gets the shortest distance to a point from somewhere on this line segment. pub fn shortest_distance_to_point(&self, point : &Vec2) -> f32 { let offset = point - &self.start; let along = self.direction.dot(&offset); println!("along: {:?} vs {:?}", along, self.length); if -EPSILON < along && along - self.length < EPSILON { self.direction.ext(&offset).abs() } else { // Must be one of the end points. (if 0.0 > along { offset } else { point - &self.end }).length() } } /// Find the intersection between two line segments (if one exists). pub fn find_intersection_with_line_segment(&self, other : &LineSegment) -> LineSegmentIntersection { // If the bounding boxes don't even overlap, then they definitely don't intersect. if !Bounds2::from_points(&self.start, &self.end).overlaps(&Bounds2::from_points(&other.start, &other.end)) { return LineSegmentIntersection::None; } // To find the probable point of intersection get the perpendicular distance from this line segment to the other's starting point. // Then convert the other line segment's "direction" into a value that decides how quickly it moves toward/away from this line segment when tranveling from its start to end. // Use that to figure out where the segments would have to intersect. let start_offset = &other.start - &self.start; let start_perp_dist = self.direction.ext(&start_offset); let perp_direction = self.direction.ext(&other.direction); // If the lines are parallel, things degenerate quickly. if perp_direction.abs() < EPSILON { // If they're not fully colinear lines, then no intersection. if EPSILON < start_perp_dist.abs() { return LineSegmentIntersection::None; } // Otherwise the lines are on the same infinite line, and must overlap because their bounding boxes do. // Get the signed direction to all start/end points using this.start using this.direction. let self_range = Range::from_values(0.0, self.length); // self.start is obviously at 0.0, self.end is at self.length since self.direction is unit length. let other_range = Range::from_values( self.direction.dot(start_offset), self.direction.dot(&other.end - &self.start), ); let overlap = self_range.intersect(other_range); let hit_start = self.direction.scale(overlap.min().unwrap()) + &self.start; let hit_end = self.direction.scale(overlap.max().unwrap()) + &self.start; return if (&hit_end - &hit_start).length() < EPSILON { LineSegmentIntersection::Point(hit_start) } else { LineSegmentIntersection::Many(LineSegment::new(&hit_start, &hit_end)) } } // Otherwise, they're not parallel, and there's one (possible) point of intersection where: 0 = start_perp_dist + perp_direction * t let t = -start_perp_dist / perp_direction; // If the time is negative, then it's before the other line segment's start, so no intersection. if t < -EPSILON { return LineSegmentIntersection::None; } let possible = other.direction.scale(t) + &other.start; // Last check to see if the point is between the start and end of both line segments. let self_along = self.direction.dot( &possible - &self.start); let other_along = other.direction.dot(&possible - &other.start); if -EPSILON > self_along || EPSILON < self_along - self.length || -EPSILON > other_along || EPSILON < other_along - other.length { return LineSegmentIntersection::None; // Past one of the end points. } // At this point, it's definitely a valid intersection. // Do a little snapping (to end points) if it goes past them. if 0.0 <= self_along - self.length { LineSegmentIntersection::Point(self.end.clone()) } else if 0.0 >= self_along { LineSegmentIntersection::Point(self.start.clone()) } else if 0.0 <= other_along - other.length { LineSegmentIntersection::Point(other.end.clone()) } else if 0.0 >= other_along { LineSegmentIntersection::Point(other.start.clone()) } else { LineSegmentIntersection::Point(possible) } } /// Gets the end point that doesn't match the one passed in. pub fn get_other_end_point<'a>(&'a self, check : &Vec2) -> &'a Vec2 { if (self.start - check).length() < EPSILON { &self.end } else { &self.start } } } #[cfg(test)] mod test_intersection { use super::*; use crate::{assert_about_eq, assert_vec2_about_eq}; #[test] fn overlaps() { // Bounding boxes don't overlap. assert!(!LineSegment::new( &Vec2::new(0.0, 1.0), &Vec2::new(1.0, 0.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(2.0, 3.0), &Vec2::new(3.0, 2.0), ))); // Normal perpendicular where points are clearly on opposite sides of other line. assert!(LineSegment::new( &Vec2::new( 0.0, 1.0), &Vec2::new( 1.0, 0.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new( 2.0, 2.0), &Vec2::new(-2.0,-2.0), ))); // T-shape, where one point is on the other line. Swap around for a 4 possible point orderings. assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(3.0, 5.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(0.0, 3.0), &Vec2::new(2.0, 3.0), ))); assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(3.0, 5.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(2.0, 3.0), &Vec2::new(0.0, 3.0), ))); assert!(LineSegment::new( &Vec2::new(0.0, 3.0), &Vec2::new(2.0, 3.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(3.0, 5.0), ))); assert!(LineSegment::new( &Vec2::new(2.0, 3.0), &Vec2::new(0.0, 3.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(3.0, 5.0), ))); // Then try colinear with all 4 possible combinations. assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(3.0, 3.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(0.0, 0.0), &Vec2::new(2.0, 2.0), ))); assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(3.0, 3.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(2.0, 2.0), &Vec2::new(0.0, 0.0), ))); assert!(LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(1.0, 1.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(0.0, 0.0), &Vec2::new(2.0, 2.0), ))); assert!(LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(1.0, 1.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(2.0, 2.0), &Vec2::new(0.0, 0.0), ))); // Try a degenerate case where one line is a point. Have it: on the line, beyond the end point of the line, and way off the line. assert!(LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(1.0, 1.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(2.0, 2.0), &Vec2::new(2.0, 2.0), ))); assert!(!LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(1.0, 1.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(4.0, 4.0), &Vec2::new(4.0, 4.0), ))); assert!(!LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(1.0, 1.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(6.0, 3.0), &Vec2::new(6.0, 3.0), ))); // Check that the "on same side" checking is right. assert!(!LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(10.0, 10.0), ).check_if_intersects_with_line_segment(&LineSegment::new( &Vec2::new(5.5, 4.5), &Vec2::new(6.0, 4.0), ))); } #[test] fn check_shortest_distance() { let line = LineSegment::new(&Vec2::new(1.0, 1.0), &Vec2::new(3.0, 3.0)); assert_about_eq!(2.0_f32.sqrt(), line.shortest_distance_to_point(&Vec2::new(3.0, 1.0))); assert_about_eq!(1.0, line.shortest_distance_to_point(&Vec2::new(4.0, 3.0))); assert_about_eq!(1.0, line.shortest_distance_to_point(&Vec2::new(1.0, 0.0))); assert_about_eq!(2.0_f32.sqrt(), line.shortest_distance_to_point(&Vec2::new(0.0, 0.0))); } /// Asserts that a LineSegmentIntersection is a Point() type, and that the point passes a assert_vec2_about_eq!(). macro_rules! assert_intersection_is_point { ( $result:expr , $point:expr ) => { let result = $result; if let LineSegmentIntersection::Point(hit) = result { assert_vec2_about_eq!(hit, $point); } else { panic!("Expected single point intersection but got {:?}", result); } }; } /// Simple white-box testing for the find_intersection_with_line_segment() function. #[test] fn check_find_intersection_with_line_segment() { // Check when the boudning boxes don't even overlap. assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(0.0, 0.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(1.0,-1.0), &Vec2::new(5.0,-5.0), )).is_none() ); // Check parallel but not on same "infinite line". assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(5.0, 5.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(1.0, 2.0), &Vec2::new(5.0, 6.0), )).is_none() ); // Check on same "infinite line" and hits at a point vs over a range of points. assert_intersection_is_point!(LineSegment::new( &Vec2::new(5.0, 5.0), &Vec2::new(1.0, 1.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(5.0, 5.0), &Vec2::new(6.0, 6.0), )), Vec2::new(5.0, 5.0) ); let result = LineSegment::new( &Vec2::new(5.0, 5.0), &Vec2::new(1.0, 1.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(4.0, 4.0), &Vec2::new(6.0, 6.0), ) ); if let LineSegmentIntersection::Many(segment) = result { // Note: the order really doesn't matter here. assert_vec2_about_eq!(segment.start, Vec2::new(5.0, 5.0)); assert_vec2_about_eq!(segment.end, Vec2::new(4.0, 4.0)); } else { panic!("Expected single multipoint intersection but got {:?}", result); } // Check could intersect except past one of the line segments' start or end points. // Shuffle the values around to check different start/end point combinations. assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(5.0, 5.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(2.5, 1.5), &Vec2::new(5.0, 0.0), )).is_none() ); assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(5.0, 5.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(5.0, 0.0), &Vec2::new(2.5, 1.5), )).is_none() ); assert!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(5.0, 5.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(2.5, 1.5), &Vec2::new(5.0, 0.0), )).is_none() ); assert!(LineSegment::new( &Vec2::new(5.0, 5.0), &Vec2::new(1.0, 1.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(5.0, 0.0), &Vec2::new(2.5, 1.5), )).is_none() ); // Check simple intersection in the middle of both segments. assert_intersection_is_point!(LineSegment::new( &Vec2::new(5.0, 5.0), &Vec2::new(1.0, 1.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(5.0, 0.0), &Vec2::new(0.0, 5.0), )), Vec2::new(2.5, 2.5) ); // Check intersection at the end points (all 4). assert_intersection_is_point!(LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(1.0, 1.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(5.0, 5.0), )), Vec2::new(3.0, 3.0) ); assert_intersection_is_point!(LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(1.0, 1.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(5.0, 5.0), &Vec2::new(3.0, 3.0), )), Vec2::new(3.0, 3.0) ); assert_intersection_is_point!(LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(1.0, 1.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(3.0, 3.0), &Vec2::new(5.0, 5.0), )), Vec2::new(3.0, 3.0) ); assert_intersection_is_point!(LineSegment::new( &Vec2::new(1.0, 1.0), &Vec2::new(3.0, 3.0), ).find_intersection_with_line_segment(&LineSegment::new( &Vec2::new(5.0, 5.0), &Vec2::new(3.0, 3.0), )), Vec2::new(3.0, 3.0) ); // Could also check rounding behavior, but that's mostly just to limit rounding error propegation... Eh, not too important. } }
true
aaad57d0815e11c27dd02744ea989b3bc9b173d4
Rust
seanyeh/ProjectEuler
/rust/004.rs
UTF-8
857
3.703125
4
[]
no_license
//A palindromic number reads the same both ways. The largest palindrome made //from the product of two 2-digit numbers is 9009 = 91 × 99. // //Find the largest palindrome made from the product of two 3-digit numbers. fn is_palindrome(s: &str) -> bool { let len = s.char_len(); let mut a = 0; let mut b = len - 1; while a < b { if s[a] != s[b] { return false; } a += 1; b -= 1; } return true; } fn get_palindromes(start: uint, finish: uint) -> uint { let mut max = 0; for i in range(start, finish + 1) { for j in range(i, finish + 1) { let prod = i * j; if prod > max && is_palindrome(format!("{}", prod)) { max = prod; } } } return max; } fn main(){ println!("{}", get_palindromes(900,1000)); }
true