blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
0df28564025086ff80615ae9ea77e0dc3bc05709
|
Rust
|
anderspitman/battle_beetles
|
/src/simulation/fight_simulation.rs
|
UTF-8
| 2,204 | 3.078125 | 3 |
[] |
no_license
|
use simulation::Simulate;
use game::{Game, FieldState, State};
// Represents a single fight, without generations.
pub struct FightSimulation<'a, T: Fn(&FieldState), U: Fn(&FieldState) -> bool> {
game: &'a mut Game,
tick_callback: Option<T>,
check_done_callback: U,
}
impl<'a, T: Fn(&FieldState), U: Fn(&FieldState) -> bool> FightSimulation<'a, T, U> {
pub fn new(game: &'a mut Game, check_done_callback: U) -> FightSimulation<'a, T, U> {
FightSimulation {
game,
tick_callback: None,
check_done_callback: check_done_callback,
}
}
pub fn set_tick_callback(&mut self, tick_callback: T) {
self.tick_callback = Some(tick_callback);
if let Some(ref cb) = self.tick_callback {
cb(&self.game.field_state);
}
}
}
impl<'a, T: Fn(&FieldState), U: Fn(&FieldState) -> bool> Simulate<T> for FightSimulation<'a, T, U> {
fn run(&mut self) {
// TODO: get rid of clone somehow
let beetles = self.game.field_state.beetles.clone();
while !(self.check_done_callback)(&self.game.field_state) {
for beetle in beetles.values() {
if beetle.current_state == State::Idle {
if let Some(closest_beetle_id) = self.game.find_closest_enemy(&beetle) {
//println!("{} attack {}", beetle.id, closest_beetle_id);
self.game.select_beetle(beetle.id);
self.game.selected_interact_command(closest_beetle_id);
self.game.deselect_all_beetles();
}
else {
println!("no enemies for {}", beetle.id);
}
}
else {
println!("not idle");
}
}
self.game.tick();
if let Some(tick_callback) = self.get_tick_callback() {
tick_callback(&self.game.field_state);
}
}
}
fn get_tick_callback(&self) -> Option<&T> {
match self.tick_callback {
Some(ref cb) => Some(&cb),
None => None
}
}
}
| true |
3e441da183b5f1035aa7c5eecade3b052a4fb05e
|
Rust
|
seguidor777/dcoder-solutions
|
/medium/the_game_of_power.rs
|
UTF-8
| 694 | 2.96875 | 3 |
[] |
no_license
|
use std::io::{self, BufRead};
fn main() {
let stdin = io::stdin();
let mut lines = stdin.lock().lines();
let mut input = lines.next().unwrap().expect("cannot read T");
let t: u8 = input.parse().expect("cannot parse T");
for _ in 0..t {
input = lines.next().unwrap().expect("cannot read T");
let n: u16 = input.parse().expect("cannot parse N");
let n_sqrt = (n as f64).sqrt();
let n_sqrt_floor = n_sqrt.floor() as u16;
if n_sqrt == n_sqrt_floor.into() {
println!("0");
continue;
}
let min = (n - n_sqrt_floor.pow(2)).min((n_sqrt_floor + 1).pow(2) - n);
println!("{}", min);
}
}
| true |
81a1842ac2afd5ecc6d329678e1d228f0708dbea
|
Rust
|
blittable/rust-top-down
|
/exercise_solutions/fs_homework/src/main.rs
|
UTF-8
| 3,532 | 3 | 3 |
[] |
no_license
|
#![allow(dead_code)]
#![allow(warnings)]
use std::fs;
use std::io;
use std::io::prelude::*;
use serde::{Serialize, Deserialize};
use array2d::Array2D;
#[derive(Debug)]
struct Bitmap<'a> {
raw: Vec<u8>,
content: Vec<Vec<(u8, u8, u8)>>,
seek: usize,
name: &'a str
}
impl<'a> Bitmap<'a> {
const content_offset: usize = 10;
const weight_offset: usize = 18;
const height_offset: usize = 22;
fn new(name: &'a str, content: Vec<u8>) -> Self {
let weight: i16 = bincode::deserialize(&content[Bitmap::weight_offset..Bitmap::weight_offset+3]).unwrap();
let height: i16 = bincode::deserialize(&content[Bitmap::height_offset..Bitmap::height_offset+3]).unwrap();
let offset: i16 = bincode::deserialize(&content[Bitmap::content_offset..Bitmap::content_offset+3]).unwrap();
let seek: usize = offset as usize;
let mut data = content[seek..].to_vec();
let mut rows = Vec::new();
let mut columns = Vec::new();
for i in 0..data.len()/3 {
let start = (i * 3);
let len = columns.len() as i16;
if len == weight
{
rows.push(columns);
columns = Vec::new();
}
columns.push((data[start], data[start + 1], data[start + 2]));
}
rows.push(columns);
Bitmap {
raw: content,
content: rows,
seek: seek,
name: name
}
}
fn apply(self, filter: Box<dyn Filter>) -> Self {
let mut meta = self.raw[..self.seek].to_vec();
let mut translated = filter.apply(self.content)
.iter()
.flat_map(|&(b, g, r)| vec![b, g, r])
.collect::<Vec<_>>();
let mut result = Vec::new();
result.append(&mut meta);
result.append(&mut translated);
Self::new(self.name, result)
}
}
trait Filter {
fn apply(&self, data: Vec<Vec<(u8, u8, u8)>>) -> Vec<(u8, u8, u8)>;
}
struct InverseColorFilter {}
impl Filter for InverseColorFilter {
fn apply(&self, data: Vec<Vec<(u8, u8, u8)>>) -> Vec<(u8, u8, u8)> {
data
.iter()
.flat_map(|array| array.iter())
.map(|&(b, g, r)| (!b, !g, !r))
.collect::<Vec<_>>()
}
}
enum Flip {
Holizontal,
Vertical
}
struct FlipFilter {
flip: Flip
}
impl Filter for FlipFilter {
fn apply(&self, data: Vec<Vec<(u8, u8, u8)>>) -> Vec<(u8, u8, u8)> {
match self.flip {
Flip::Vertical => data
.iter()
.rev()
.flat_map(|array| array.iter())
.map(|&t| t)
.collect::<Vec<_>>(),
_ => data
.iter()
.flat_map(|array| array.iter().rev())
.map(|&t| t)
.collect::<Vec<_>>()
}
}
}
fn main() {
let fs = fs::OpenOptions::new();
let content = &fs::read("bird.bmp").unwrap();
let bmp = Bitmap::new("002", content.to_vec())
.apply(Box::new(InverseColorFilter{}))
.apply(Box::new(FlipFilter{
flip: Flip::Holizontal
}))
.apply(Box::new(FlipFilter{
flip: Flip::Vertical
}));
let mut fp = fs::OpenOptions::new()
.create(true)
.write(true)
.open(format!("{}.bmp", bmp.name))
.unwrap();
fp.write_all(&bmp.raw);
}
| true |
0f7aa82b548a7e4ea8e14ff31e819aa6e86f4f05
|
Rust
|
petanix/clvm_rs
|
/src/gen/validation_error.rs
|
UTF-8
| 1,912 | 2.5625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::allocator::{Allocator, NodePtr, SExp};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ErrorCode {
NegativeAmount,
InvalidConditionOpcode,
InvalidParentId,
InvalidPuzzleHash,
InvalidPubkey,
InvalidMessage,
InvalidCondition,
InvalidCoinAmount,
InvalidCoinAnnouncement,
InvalidPuzzleAnnouncement,
AssertHeightAbsolute,
AssertHeightRelative,
AssertSecondsAbsolute,
AssertSecondsRelative,
AssertMyAmountFailed,
AssertMyPuzzlehashFailed,
AssertMyParentIdFailed,
AssertMyCoinIdFailed,
AssertPuzzleAnnouncementFailed,
AssertCoinAnnouncementFailed,
ReserveFeeConditionFailed,
DuplicateOutput,
DoubleSpend,
CostExceeded,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct ValidationErr(pub NodePtr, pub ErrorCode);
// helper functions that fail with ValidationErr
pub fn first(a: &Allocator, n: NodePtr) -> Result<NodePtr, ValidationErr> {
match a.sexp(n) {
SExp::Pair(left, _) => Ok(left),
_ => Err(ValidationErr(n, ErrorCode::InvalidCondition)),
}
}
pub fn rest(a: &Allocator, n: NodePtr) -> Result<NodePtr, ValidationErr> {
match a.sexp(n) {
SExp::Pair(_, right) => Ok(right),
_ => Err(ValidationErr(n, ErrorCode::InvalidCondition)),
}
}
pub fn pair(a: &Allocator, n: NodePtr) -> Option<(NodePtr, NodePtr)> {
match a.sexp(n) {
SExp::Pair(left, right) => Some((left, right)),
_ => None,
}
}
pub fn next(a: &Allocator, n: NodePtr) -> Result<(NodePtr, NodePtr), ValidationErr> {
match a.sexp(n) {
SExp::Pair(left, right) => Ok((left, right)),
_ => Err(ValidationErr(n, ErrorCode::InvalidCondition)),
}
}
pub fn atom(a: &Allocator, n: NodePtr, code: ErrorCode) -> Result<&[u8], ValidationErr> {
match a.sexp(n) {
SExp::Atom(_) => Ok(a.atom(n)),
_ => Err(ValidationErr(n, code)),
}
}
| true |
b57eeb18750eaa9056b24f121754e1d15f214bfe
|
Rust
|
developers-algorithm-study/mjy9088
|
/acmicpc.net/10828/src/main.rs
|
UTF-8
| 804 | 3.140625 | 3 |
[] |
no_license
|
use std::io;
fn main() {
let mut line = String::new();
io::stdin().read_line(&mut line).unwrap();
let command_count = line.trim().parse::<usize>().unwrap();
let mut stack = Vec::<i32>::new();
for _ in 0..command_count {
let mut line = String::new();
io::stdin().read_line(&mut line).unwrap();
let argv = line.trim().split(' ').collect::<Vec<_>>();
match argv[0] {
"push" => stack.push(argv[1].parse::<i32>().unwrap()),
"pop" => println!("{}", stack.pop().unwrap_or(-1)),
"size" => println!("{}", stack.len()),
"empty" => println!("{}", if stack.len() == 0 { 1 } else { 0 }),
"top" => println!("{}", stack.last().unwrap_or(&-1)),
_ => panic!("Invalid input"),
}
}
}
| true |
263e4ad7de3f6529c3b44ce4aad26ffc9e9bf206
|
Rust
|
vvvy/webhdfs-rs
|
/src/error.rs
|
UTF-8
| 8,582 | 3.015625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::borrow::Cow;
use std::fmt::{Display, Formatter, Result as FmtResult};
pub use std::result::Result as StdResult;
pub type Result<T> = StdResult<T, Error>;
#[derive(Debug)]
pub enum Cause {
None,
Hyper(hyper::Error),
HyperHeaderToStr(hyper::header::ToStrError),
MimeFromStr(mime::FromStrError),
SerdeJson(serde_json::Error),
SerdeToml(toml::de::Error),
Http(http::Error),
HttpInvalidUri(http::uri::InvalidUri),
HttpInvalidUriParts(http::uri::InvalidUriParts),
Io(std::io::Error),
Tls(native_tls::Error),
//IntConversion(std::num::TryFromIntError),
RemoteException(crate::datatypes::RemoteException),
HttpRedirect(u16, String),
Timeout
}
#[derive(Debug)]
pub struct Error {
msg: Option<Cow<'static, str>>,
cause: Cause
}
impl Error {
pub fn new(msg: Option<Cow<'static, str>>, cause: Cause) -> Self { Error { msg, cause } }
pub fn anon(cause: Cause) -> Self { Self::new(None, cause) }
pub fn with_msg_prepended(self, msg: Cow<'static, str>) -> Self {
Error {
msg: Some(match self.msg {
Some(m) => msg + "\n" + m,
None => msg
}),
cause: self.cause
}
}
pub fn app_c(msg: &'static str) -> Self { Error::new(Some(Cow::Borrowed(msg)), Cause::None) }
pub fn app_s(msg: String) -> Self { Error::new(Some(Cow::Owned(msg)), Cause::None) }
pub fn msg_s(&self) -> &str {
match &self.msg {
Some(m) => &m,
None => "GENERIC"
}
}
pub fn cause(&self) -> &Cause { &self.cause }
pub fn from_http_redirect(status: u16, location: String) -> Self {
Self::new(None, Cause::HttpRedirect(status, location))
}
pub fn to_http_redirect(self) -> Result<(u16, String)> {
match self.cause {
Cause::HttpRedirect(code, location) => Ok((code, location)),
other => Err(Self::new(self.msg, other))
}
}
//pub fn timeout() -> Self { Self::new(None, Cause::Timeout) }
pub fn timeout_c(msg: &'static str) -> Self { Self::new(Some(Cow::Borrowed(msg)), Cause::Timeout) }
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "AppError: {}", self.msg_s())?;
match &self.cause {
Cause::Hyper(e) => write!(f, "; caused by hyper::error::Error: {}", e),
Cause::HyperHeaderToStr(e) => write!(f, "; caused by hyper::header::ToStrError: {}", e),
Cause::MimeFromStr(e) => write!(f, "; caused by mime::FromStrError: {}", e),
Cause::SerdeJson(e) => write!(f, "; caused by serde_json::Error: {}", e),
Cause::SerdeToml(e) => write!(f, "; caused by toml::de::Error: {}", e),
Cause::Http(e) => write!(f, "; caused by http::Error: {}", e),
Cause::HttpInvalidUri(e) => write!(f, "; caused by http::uri::InvalidUri: {}", e),
Cause::HttpInvalidUriParts(e) => write!(f, "; caused by http::uri::InvalidUriParts: {}", e),
Cause::Io(e) => write!(f, "; caused by IoError: {}", e),
Cause::Tls(e) => write!(f, "; caused by native_tls::Error: {}", e),
//Cause::IntConversion(e) => write!(f, "; caused by std::num::TryFromIntError: {}", e),
Cause::RemoteException(e) => write!(f, "; caused by RemoteException {}", e),
Cause::HttpRedirect(code, location) => write!(f, "; caused by HTTP redirect {} {}", code, location),
Cause::Timeout => write!(f, "; caused by Timeout"),
Cause::None => Ok(())
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.cause {
Cause::Hyper(e) => Some(e),
Cause::HyperHeaderToStr(e) => Some(e),
Cause::MimeFromStr(e) => Some(e),
Cause::SerdeJson(e) => Some(e),
Cause::SerdeToml(e) => Some(e),
Cause::Http(e) => Some(e),
Cause::HttpInvalidUri(e) => Some(e),
Cause::HttpInvalidUriParts(e) => Some(e),
Cause::Io(e) => Some(e),
Cause::Tls(e) => Some(e),
//Cause::IntConversion(e) => Some(e),
Cause::RemoteException(e) => Some(e),
Cause::HttpRedirect(_, _) => None,
Cause::Timeout => None,
Cause::None => None
}
}
}
#[cfg(panic_on_error)]
macro_rules! app_error {
(generic $s:expr, $($arg:expr),+) => { panic!(format!($s,$($arg),+)) };
(generic $s:expr) => { panic!($s) };
((cause=$c:expr) $s:expr, $($arg:expr),+) => { panic!($c.into_with(std::borrow::Cow::Owned(format!($s,$($arg),+))).to_string()) };
((cause=$c:expr) $s:expr) => { panic!($c.into_with(std::borrow::Cow::Borrowed($s)).to_string()) };
}
#[cfg(not(panic_on_error))]
macro_rules! app_error {
(generic $s:expr, $($arg:expr),+) => { crate::error::Error::app_s(format!($s,$($arg),+)) };
(generic $s:expr) => { crate::error::Error::app_c($s) };
((cause=$c:expr) $s:expr, $($arg:expr),+) => { $c.into_with(std::borrow::Cow::Owned(format!($s,$($arg),+))) };
((cause=$c:expr) $s:expr) => { $c.into_with(std::borrow::Cow::Borrowed($s)) };
}
pub trait IntoErrorAnnotated: Sized {
fn into_with(self, msg: Cow<'static, str>) -> Error;
fn into_with_c(self, msg: &'static str) -> Error { self.into_with(Cow::Borrowed(msg)) }
fn into_with_s(self, msg: String) -> Error { self.into_with(Cow::Owned(msg)) }
}
impl IntoErrorAnnotated for Error {
fn into_with(self, msg: Cow<'static, str>) -> Error { self.with_msg_prepended(msg) }
}
pub trait AnnotateError<T>: Sized {
/// a shortcut for `.map_err(|x| app_err((cause=x) "...")
fn aerr(self, msg: &'static str) -> Result<T>;
/// a shortcut for `.map_err(|x| app_err((cause=x) msg), with msg lazily evaluated
fn aerr_f(self, msg_f: impl FnOnce() -> String) -> Result<T>;
}
impl<T, E> AnnotateError<T> for std::result::Result<T, E> where E: IntoErrorAnnotated {
fn aerr(self, msg: &'static str) -> Result<T> {
self.map_err(|e| e.into_with(Cow::Borrowed(msg)))
}
fn aerr_f(self, msg_f: impl FnOnce() -> String) -> Result<T> {
self.map_err(|e| e.into_with(Cow::Owned(msg_f())))
}
}
macro_rules! error_conversion {
($f:ident($t:ty)) => {
impl From<$t> for Error {
#[cfg(panic_on_error)]
fn from(e: $t) -> Self { panic!(Error::anon(Cause::$f(e)).to_string()) }
#[cfg(not(panic_on_error))]
fn from(e: $t) -> Self { Error::anon(Cause::$f(e)) }
}
impl IntoErrorAnnotated for $t {
fn into_with(self, msg: Cow<'static, str>) -> Error {
Error::new(Some(msg), Cause::$f(self))
}
}
};
}
macro_rules! error_conversion_noarg {
($f:ident($t:ty)) => {
impl From<$t> for Error {
#[cfg(panic_on_error)]
fn from(_: $t) -> Self { panic!(Error::anon(Cause::$f).to_string()) }
#[cfg(not(panic_on_error))]
fn from(_: $t) -> Self { Error::anon(Cause::$f) }
}
impl IntoErrorAnnotated for $t {
fn into_with(self, msg: Cow<'static, str>) -> Error {
Error::new(Some(msg), Cause::$f)
}
}
};
}
macro_rules! error_conversions {
($($f:ident($t:ty)),+) => { $(error_conversion!{$f($t)})+ }
}
macro_rules! error_conversions_noarg {
($($f:ident($t:ty)),+) => { $(error_conversion_noarg!{$f($t)})+ }
}
error_conversions!{
Hyper(hyper::Error),
HyperHeaderToStr(hyper::header::ToStrError),
MimeFromStr(mime::FromStrError),
SerdeJson(serde_json::Error),
SerdeToml(toml::de::Error),
Http(http::Error),
HttpInvalidUri(http::uri::InvalidUri),
HttpInvalidUriParts(http::uri::InvalidUriParts),
Io(std::io::Error),
Tls(native_tls::Error),
//IntConversion(std::num::TryFromIntError),
RemoteException(crate::datatypes::RemoteException)
}
error_conversions_noarg!{
Timeout(tokio::time::error::Elapsed)
}
impl From<Error> for std::io::Error {
fn from(e: Error) -> Self {
use std::io::{Error as IoError, ErrorKind as IoErrorKind };
match e {
Error { msg: None, cause: Cause::Io(io) } => io,
Error { msg: Some(m), cause: Cause::Timeout } => IoError::new(IoErrorKind::TimedOut, m),
Error { msg: None, cause: Cause::Timeout } => IoError::from(IoErrorKind::TimedOut),
other => IoError::new(std::io::ErrorKind::Other, other)
}
}
}
| true |
400b976f90240308e29f021a76d8fcbf3f82d763
|
Rust
|
kasaranenikarthik/openctf
|
/src/config.rs
|
UTF-8
| 760 | 3.21875 | 3 |
[] |
no_license
|
use std::path::PathBuf;
use failure::Error;
/// Represents a configuration for an OpenCTF server instance.
pub struct Config {
/// The host on which to listen (default: '0.0.0.0')
pub host: String,
/// The port on which to listen (default: 4401)
pub port: u16,
}
impl Config {
/// Generates the default configuration for an OpenCTF server instance. This will also read
/// from environment variables as necessary.
pub fn default() -> Config {
// TODO: use environment variables
Config {
host: "0.0.0.0".to_owned(),
port: 4401,
}
}
pub fn from_file(_path: &PathBuf) -> Result<Config, Error> {
// TODO: parse a config from file
Ok(Config::default())
}
}
| true |
d212eb511e38b9e317e98573591546ce23c851da
|
Rust
|
Twinklebear/ispc-rs
|
/examples/ddvol/src/vol.rs
|
UTF-8
| 2,113 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use crate::ddvol;
use crate::empty_handle;
use crate::tfn::TransferFunction;
use crate::vec3::Vec3i;
use crate::ISPCHandle;
/// A volume dataset being rendered with its ISPC handle
pub struct Volume {
ispc_handle: ISPCHandle,
tfn: TransferFunction,
}
impl Volume {
/// Create a new volume with the desired dimensions. Enough room will be allocated to
/// store `dimensions.x * dimensions.y * dimensions.z` voxels.
pub fn new(dimensions: Vec3i) -> Volume {
let mut vol = empty_handle();
let tfn = TransferFunction::cool_warm();
unsafe {
ddvol::make_volume(
&mut vol as *mut ISPCHandle,
&dimensions as *const Vec3i,
tfn.ispc_equiv(),
);
}
Volume {
ispc_handle: vol,
tfn,
}
}
/// Set the transfer function used by the volume, overriding the default cool/warm.
pub fn set_transfer_function(&mut self, tfn: TransferFunction) {
self.tfn = tfn;
unsafe {
ddvol::volume_set_transfer_function(self.ispc_handle, self.tfn.ispc_equiv());
}
}
/// Change the isovalue being rendered. Setting to a value less than 0 will turn off
/// the isosurface.
pub fn set_isovalue(&mut self, isovalue: f32) {
unsafe {
ddvol::volume_set_isovalue(self.ispc_handle, isovalue);
}
}
/// Set a region of voxel data for the volume.
pub fn set_region(&mut self, region: &[f32], start: Vec3i, size: Vec3i) {
assert_eq!(region.len(), (size.x * size.y * size.z) as usize);
unsafe {
ddvol::set_region(
self.ispc_handle,
region.as_ptr(),
&start as *const Vec3i,
&size as *const Vec3i,
);
}
}
pub fn ispc_equiv(&self) -> ISPCHandle {
self.ispc_handle
}
}
impl Drop for Volume {
fn drop(&mut self) {
if !self.ispc_handle.is_null() {
unsafe {
ddvol::drop_volume(self.ispc_handle);
}
}
}
}
| true |
13c6418a39f30a6e747a32cef049c9e3f90e1047
|
Rust
|
hbeimf/rust_demo
|
/my_library/src/main.rs
|
UTF-8
| 926 | 2.984375 | 3 |
[] |
no_license
|
// extern crate my_library;
// fn main() {
// my_library::public_function();
// my_library::indirect_access();
// }
// ============================
// extern crate my_library;
// fn main() {
// println!("Hello in English: {}",my_library::english::greetings::hello());
// println!("Goodbye in English: {}", my_library::english::farewells::goodbye());
// println!("Hello in Chinese: {}", my_library::chinese::greetings::hello());
// println!("Goodbye in Chinese: {}", my_library::chinese::farewells::goodbye());
// }
// ==============
extern crate my_library;
fn main() {
println!("Hello in English: {}", my_library::english::greetings::hello());
println!("Goodbye in English: {}", my_library::english::farewells::goodbye());
println!("Hello in Chinese: {}", my_library::chinese::greetings::hello());
println!("Goodbye in Chinese: {}", my_library::chinese::farewells::goodbye());
}
| true |
903d7c360b2fa53b8ac67fb6b58cd5580359dc67
|
Rust
|
saibatizoku/font8x8-rs
|
/src/unicode.rs
|
UTF-8
| 4,013 | 3.34375 | 3 |
[
"MIT"
] |
permissive
|
//! Unicode support for 8x8 fonts.
pub use super::basic::{BasicFonts, BASIC_UNICODE};
pub use super::block::{BlockFonts, BLOCK_UNICODE};
pub use super::box_chars::{BoxFonts, BOX_UNICODE};
pub use super::greek::{GreekFonts, GREEK_UNICODE};
pub use super::hiragana::{HiraganaFonts, HIRAGANA_UNICODE};
pub use super::latin::{LatinFonts, LATIN_UNICODE};
use super::legacy::NOTHING_TO_DISPLAY;
pub use super::misc::{MiscFonts, MISC_UNICODE};
pub use super::sga::{SgaFonts, SGA_UNICODE};
#[cfg(feature = "std")]
pub use std::string::FromUtf16Error;
/// A single 8x8 font which supports `UTF-16` encoding/decoding.
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct FontUnicode(pub char, pub [u8; 8]);
impl FontUnicode {
/// Return the char value
pub fn char(&self) -> char {
self.0
}
/// Return the `[u8; 8]`-representation for this font.
pub fn byte_array(&self) -> [u8; 8] {
self.1
}
/// Return a result with the corresponding `String` for the font.
#[cfg(feature = "std")]
pub fn to_string(&self) -> String {
self.0.to_string()
}
/// Returns a `bool` indicating whether this font renders as a whitespace (all `0`).
pub fn is_whitespace(&self) -> bool {
self.1 == NOTHING_TO_DISPLAY
}
/// Consumes the current `FontUnicode` and returns the inner `(char, [u8; 8])` tuple.
pub fn into_inner(self) -> (char, [u8; 8]) {
self.into()
}
}
impl Into<char> for FontUnicode {
fn into(self) -> char {
self.0
}
}
impl Into<[u8; 8]> for FontUnicode {
fn into(self) -> [u8; 8] {
self.1
}
}
impl Into<(char, [u8; 8])> for FontUnicode {
fn into(self) -> (char, [u8; 8]) {
(self.0, self.1)
}
}
/// A trait for collections of `FontUnicode`, which provide methods for retrieving
/// the `Option<[u8; 8]>`, using the corresponding `char` as key.
pub trait UnicodeFonts {
fn get(&self, key: char) -> Option<[u8; 8]>;
fn get_font(&self, key: char) -> Option<FontUnicode>;
fn iter(&self) -> ::core::slice::Iter<FontUnicode>;
#[cfg(feature = "std")]
fn print_set(&self);
#[cfg(feature = "std")]
fn to_vec(&self) -> Vec<(char, FontUnicode)>;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn font_unicode_converts_into_char() {
let my_font = FontUnicode('á', [110u8; 8]);
let ch: char = my_font.into();
assert_eq!(ch, 'á');
}
#[test]
fn font_unicode_converts_into_byte_array() {
let my_font = FontUnicode('C', NOTHING_TO_DISPLAY);
let byte_array: [u8; 8] = my_font.into();
assert_eq!(byte_array, NOTHING_TO_DISPLAY);
}
#[test]
fn font_unicode_converts_into_inner_tuple() {
let my_font = FontUnicode('Á', [110u8; 8]);
let font_tuple: (char, [u8; 8]) = my_font.into();
assert_eq!(font_tuple, ('Á', [110u8; 8]));
}
#[test]
fn font_unicode_api_method_unicode_returns_char() {
let my_font = FontUnicode('ñ', [0x20; 8]);
assert_eq!(my_font.char(), 'ñ');
}
#[test]
fn font_unicode_api_method_byte_array_returns_array_with_8_bytes() {
let my_font = FontUnicode('Ñ', [0x20; 8]);
assert_eq!(my_font.byte_array(), [0x20; 8]);
}
#[cfg(feature = "std")]
#[test]
fn font_unicode_api_method_to_string_returns_string_from_unicode() {
let my_font = FontUnicode('Ñ', [0x20; 8]);
assert_eq!(my_font.to_string(), "Ñ".to_string());
}
#[cfg(feature = "std")]
#[test]
fn font_unicode_api_method_is_whitespace_returns_bool() {
let my_font = FontUnicode('Ñ', [0x20; 8]);
assert_eq!(my_font.is_whitespace(), false);
let my_font = FontUnicode('Ñ', NOTHING_TO_DISPLAY);
assert!(my_font.is_whitespace());
}
#[test]
fn font_unicode_api_method_into_inner_returns_inner_tuple() {
let my_font = FontUnicode('Á', [110u8; 8]);
assert_eq!(my_font.into_inner(), ('Á', [110u8; 8]));
}
}
| true |
2b526bd4fc8b06e88d90dce379b29071254810c9
|
Rust
|
KBryan/canvas
|
/packages/canvas/src-native/canvas-native/canvas-core/src/common/text_decoder.rs
|
UTF-8
| 3,068 | 2.578125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::ffi::{CStr, CString};
use std::os::raw::{c_char, c_longlong};
use std::ptr::null;
use encoding_rs::UTF_8;
use libc::size_t;
#[repr(C)]
pub struct TextDecoder {
decoder: &'static encoding_rs::Encoding,
}
impl TextDecoder {
pub fn new(decoding: *const c_char) -> Self {
let decoding = unsafe { CStr::from_ptr(decoding) }
.to_str()
.unwrap_or("utf-8");
let decoder = encoding_rs::Encoding::for_label(decoding.as_bytes())
.unwrap_or(UTF_8.output_encoding());
Self { decoder }
}
unsafe fn str_from_u8_nul_utf8_unchecked(utf8_src: &[u8]) -> &str {
let nul_range_end = utf8_src
.iter()
.position(|&c| c == b'\0')
.unwrap_or(utf8_src.len()); // default to length if no `\0` present
::std::str::from_utf8_unchecked(&utf8_src[0..nul_range_end])
}
pub fn decode(&mut self, data: *const u8, len: size_t) -> *const c_char {
let txt = unsafe { std::slice::from_raw_parts(data, len) };
let (res, _) = self.decoder.decode_with_bom_removal(txt);
let ptr = unsafe { TextDecoder::str_from_u8_nul_utf8_unchecked(res.as_bytes()) };
let result = CString::new(ptr);
match result {
Ok(result) => result.into_raw(),
Err(err) => {
dbg!("error {:?}", err.to_string());
null()
}
}
}
/*
pub fn decode(&mut self, data: *const u8, len: size_t) -> *const c_char {
let txt = unsafe { std::slice::from_raw_parts(data, len) };
let decoder = self.decoder.new_decoder_with_bom_removal();
let result = self.decoder.decode_with_bom_removal(txt);
let raw = result.0;
let string = String::from(raw);
let result = CString::new(string);
match result {
Ok(result) => result.into_raw(),
Err(err) => {
dbg!("error {:?}", err.to_string());
null()
}
}
}
*/
pub fn encoding(&self) -> *const c_char {
CString::new(self.decoder.name()).unwrap().into_raw()
}
pub fn release(ptr: c_longlong) {
if ptr != 0 {
let _: Box<TextDecoder> = unsafe { Box::from_raw(ptr as *mut _) };
}
}
}
pub(crate) fn text_decoder_get_encoding(decoder: c_longlong) -> *const c_char {
if decoder != 0 {
let decoder: Box<TextDecoder> = unsafe { Box::from_raw(decoder as *mut _) };
let encoding = decoder.encoding();
Box::into_raw(decoder);
return encoding;
}
null()
}
pub(crate) fn text_decoder_decode(
decoder: c_longlong,
data: *const u8,
len: size_t,
) -> *const c_char {
if decoder != 0 {
let mut decoder: Box<TextDecoder> = unsafe { Box::from_raw(decoder as *mut _) };
let decoded = decoder.decode(data, len);
Box::into_raw(decoder);
return decoded;
}
null()
}
#[allow(unused)]
pub(crate) fn free_text_decoder(decoder: i64) {
TextDecoder::release(decoder);
}
| true |
1f5d0074f0d49fc0eefe4939374dd275ce638fa5
|
Rust
|
scottschroeder/testris
|
/src/transform.rs
|
UTF-8
| 3,504 | 3.6875 | 4 |
[] |
no_license
|
use na;
pub type Point = na::Point2<i32>;
pub type Matrix = na::Matrix2<i32>;
#[derive(Debug, Clone)]
pub enum Orientation {
North,
East,
South,
West,
}
#[derive(Debug, Clone)]
pub enum RotationDirection {
Clockwise,
CounterClockwise,
}
pub fn rotate(orient: &Orientation, rotation: &RotationDirection) -> Orientation {
match *rotation {
RotationDirection::Clockwise => {
match *orient {
Orientation::North => Orientation::East,
Orientation::East => Orientation::South,
Orientation::South => Orientation::West,
Orientation::West => Orientation::North,
}
}
RotationDirection::CounterClockwise => {
match *orient {
Orientation::North => Orientation::West,
Orientation::East => Orientation::North,
Orientation::South => Orientation::East,
Orientation::West => Orientation::South,
}
}
}
}
pub fn transform(point: &Point, orientation: &Orientation) -> Point {
rotation_matrix(orientation) * *point
}
fn rotation_matrix(o: &Orientation) -> Matrix {
match *o {
Orientation::North => Matrix::new(1, 0, 0, 1),
Orientation::East => Matrix::new(0, 1, -1, 0),
Orientation::South => Matrix::new(-1, 0, 0, -1),
Orientation::West => Matrix::new(0, -1, 1, 0),
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_add() {
let p1 = Point::new(1, 1);
let p2 = Point::new(1, 2);
let presult = Point::new(2, 3);
assert_eq!(presult, p1 + p2.to_vector());
}
fn test_star() -> Vec<Point> {
vec![
Point::new(0, 1),
Point::new(1, 1),
Point::new(1, 0),
Point::new(1, -1),
Point::new(0, -1),
Point::new(-1, -1),
Point::new(-1, 0),
Point::new(-1, 1),
]
}
#[test]
/// This is a test that the crate works as expected
fn point_with_identity() {
let p = Point::new(1i32, 0i32);
let identity = Matrix::new(1, 0, 0, 1);
let p2 = identity * p;
assert_eq!(p2, p);
}
#[test]
fn transform_north() {
let rotate = Orientation::North;
let test_points = test_star();
for i in 0..test_points.len() {
let p = test_points[i];
assert_eq!(p, transform(&p, &rotate));
}
}
#[test]
fn transform_east() {
let rotate = Orientation::East;
let test_points = test_star();
for i in 0..test_points.len() {
let p = test_points[i];
let p_east = test_points[(i + 2) % test_points.len()];
assert_eq!(p_east, transform(&p, &rotate));
}
}
#[test]
fn transform_south() {
let rotate = Orientation::South;
let test_points = test_star();
for i in 0..test_points.len() {
let p = test_points[i];
let p_south = test_points[(i + 4) % test_points.len()];
assert_eq!(p_south, transform(&p, &rotate));
}
}
#[test]
fn transform_west() {
let rotate = Orientation::West;
let test_points = test_star();
for i in 0..test_points.len() {
let p = test_points[i];
let p_west = test_points[(i + 6) % test_points.len()];
assert_eq!(p_west, transform(&p, &rotate));
}
}
}
| true |
e34ecc519433e86e7d2d7dbf28f9756d368f5356
|
Rust
|
ToF-/yearly
|
/tdd_rust/yearly/src/transaction.rs
|
UTF-8
| 6,610 | 3.65625 | 4 |
[] |
no_license
|
use crate::{period::within,period::Period};
use crate::date::Date;
use std::collections::HashMap;
pub struct Transaction {
pub date: Date,
pub category: String,
pub label: String,
pub amount: i64,
}
#[derive(PartialEq,Eq,Ord,PartialOrd)]
pub struct Total {
pub category: String,
pub amounts: (i64, i64),
}
pub enum Column {
Current,
Previous
}
pub fn total_per_category(transactions: Vec<Transaction>, period: Column) -> Vec<Total> {
let mut totals = HashMap::<String,(i64,i64)>::new();
transactions.iter().for_each( | transaction |
{
let (current_amount, previous_amount) = *totals.entry(transaction.category.clone()).or_insert((0,0));
let amounts = match &period {
Column::Current => (current_amount + transaction.amount, previous_amount),
Column::Previous => (current_amount, previous_amount + transaction.amount),
};
totals.insert(transaction.category.clone(), amounts);
});
let mut result = Vec::<Total>::new();
totals.iter().for_each( | (category,amounts) | result.push(Total { category: category.clone(), amounts: *amounts, }));
result.sort();
result
}
pub fn from_period(mut transactions: Vec<Transaction>, period: Period) -> Vec<Transaction> {
transactions.retain(|transaction| within(period, transaction.date));
transactions
}
#[cfg(test)]
mod tests_transaction {
use super::*;
#[test]
fn total_per_category_on_an_empty_list_should_yield_an_empty_list() {
let transactions = Vec::<Transaction>::new();
let totals = total_per_category(transactions, Column::Current);
assert_eq!(totals.len(), 0);
}
#[test]
fn total_per_category_on_a_single_transaction_should_yield_the_transaction_amount() {
let mut transactions = Vec::<Transaction>::new();
transactions.push(Transaction {
date: Date::from_ymd(2020,02,29),
label: "some groceries".to_string(),
category: "Groceries".to_string(),
amount: 4807,
});
let totals = total_per_category(transactions, Column::Current);
assert_eq!(totals.len(), 1);
assert_eq!(totals[0].category, "Groceries");
assert_eq!(totals[0].amounts, (4807,0));
}
#[test]
fn total_on_one_category_for_several_transactions_should_yield_the_category_total() {
let mut transactions = Vec::<Transaction>::new();
transactions.push(Transaction { date: Date::from_ymd(2020,02,29), label: "some groceries".to_string(), category: "Groceries".to_string(), amount: 4807, });
transactions.push(Transaction { date: Date::from_ymd(2020,03,20), label: "other groceries".to_string(), category: "Groceries".to_string(), amount: 10000, });
let totals = total_per_category(transactions, Column::Current);
assert_eq!(totals.len(), 1);
assert_eq!(totals[0].category, "Groceries");
assert_eq!(totals[0].amounts, (14807,0));
}
#[test]
fn total_on_several_categories_for_several_transactions_should_yield_the_total_per_category_on_current_period() {
let mut transactions = Vec::<Transaction>::new();
transactions.push(Transaction { date: Date::from_ymd(2020,02,29), label: "some groceries".to_string(), category: "Groceries".to_string(), amount: 4807, });
transactions.push(Transaction { date: Date::from_ymd(2020,03,20), label: "other groceries".to_string(), category: "Groceries".to_string(), amount: 10000, });
transactions.push(Transaction { date: Date::from_ymd(2020,01,29), label: "some taxes".to_string(), category: "Taxes".to_string(), amount: 2000, });
transactions.push(Transaction { date: Date::from_ymd(2020,04,20), label: "other taxes".to_string(), category: "Taxes".to_string(), amount: 20000, });
let totals = total_per_category(transactions, Column::Current);
assert_eq!(totals.len(), 2);
assert_eq!(totals[0].category, "Groceries");
assert_eq!(totals[0].amounts, (14807,0));
assert_eq!(totals[1].category, "Taxes");
assert_eq!(totals[1].amounts, (22000,0));
}
#[test]
fn total_on_several_categories_for_several_transactions_should_yield_the_total_per_category_on_previous_period() {
let mut transactions = Vec::<Transaction>::new();
transactions.push(Transaction { date: Date::from_ymd(2020,02,29), label: "some groceries".to_string(), category: "Groceries".to_string(), amount: 4807, });
transactions.push(Transaction { date: Date::from_ymd(2020,03,20), label: "other groceries".to_string(), category: "Groceries".to_string(), amount: 10000, });
transactions.push(Transaction { date: Date::from_ymd(2020,01,29), label: "some taxes".to_string(), category: "Taxes".to_string(), amount: 2000, });
transactions.push(Transaction { date: Date::from_ymd(2020,04,20), label: "other taxes".to_string(), category: "Taxes".to_string(), amount: 20000, });
let totals = total_per_category(transactions, Column::Previous);
assert_eq!(totals.len(), 2);
assert_eq!(totals[0].category, "Groceries");
assert_eq!(totals[0].amounts, (0,14807));
assert_eq!(totals[1].category, "Taxes");
assert_eq!(totals[1].amounts, (0,22000));
}
#[test]
fn transactions_should_be_selected_given_a_period() {
let mut transactions = Vec::<Transaction>::new();
transactions.push(Transaction { date: Date::from_ymd(2020,02,29), label: "some groceries".to_string(), category: "Groceries".to_string(), amount: 4807, });
transactions.push(Transaction { date: Date::from_ymd(2020,03,20), label: "other groceries".to_string(), category: "Groceries".to_string(), amount: 10000, });
transactions.push(Transaction { date: Date::from_ymd(2020,01,29), label: "some taxes".to_string(), category: "Taxes".to_string(), amount: 2000, });
transactions.push(Transaction { date: Date::from_ymd(2020,04,20), label: "other taxes".to_string(), category: "Taxes".to_string(), amount: 20000, });
let period = (Date::from_ymd(2020, 01, 01), Date::from_ymd(2020,02,29));
let selection = from_period(transactions, period);
assert_eq!(selection.len(), 2);
assert_eq!(selection[0].label, "some groceries");
assert_eq!(selection[1].label, "some taxes");
}
}
| true |
aeaaf8e978e18e777233b1c7e8f00c8a31823ef3
|
Rust
|
natsutan/study_rust
|
/slib_cook/chap1/src/bin/random.rs
|
UTF-8
| 561 | 3.171875 | 3 |
[] |
no_license
|
extern crate rand;
fn main()
{
let random_num1 = rand::random::<i32>();
let random_num2: i32 = rand::random();
println!("rand {} {}", random_num1, random_num2);
let random_char: char = rand::random();
println!("rand {}", random_char);
use rand::Rng;
let mut rng = rand::thread_rng();
for _i in 0..10 {
if rng.gen() {
println!("success!");
}
}
let random_num3 = rng.gen_range(0, 10);
let random_num4 = rng.gen_range(0.0, 1.0);
println!("random {} {} ", random_num3, random_num4);
}
| true |
834268e39ee44e87dde3f748bf49337a13327e6f
|
Rust
|
TheArcadiaGroup/casper-staking-rewards-core
|
/tests/src/erc20.rs
|
UTF-8
| 6,302 | 2.78125 | 3 |
[] |
no_license
|
use casper_engine_test_support::{Code, Hash, SessionBuilder, TestContext, TestContextBuilder};
use casper_types::{AsymmetricType, CLTyped, Key, PublicKey, RuntimeArgs, U256, U512, account::AccountHash, bytesrepr::FromBytes, runtime_args};
// contains methods that can simulate a real-world deployment (storing the contract in the blockchain)
// and transactions to invoke the methods in the contract.
pub mod token_cfg {
use super::*;
pub const NAME: &str = "ERC20";
pub const SYMBOL: &str = "ERC";
pub const DECIMALS: u8 = 8;
pub fn total_supply() -> U256 {
1_000.into()
}
}
pub struct Sender(pub AccountHash);
pub struct Token {
pub name: String,
pub symbol: String,
context: TestContext,
pub ali: AccountHash,
pub bob: AccountHash,
pub joe: AccountHash,
}
fn key_to_str(key: &Key) -> String {
match key {
Key::Account(account) => account.to_string(),
Key::Hash(package) => hex::encode(package),
_ => "UnexpectedKeyVariant".to_string()
}
}
impl Token {
pub fn deployed(name: &str, symbol: &str) -> Token {
let ali = PublicKey::ed25519_from_bytes([3u8; 32]).unwrap();
let bob = PublicKey::ed25519_from_bytes([6u8; 32]).unwrap();
let joe = PublicKey::ed25519_from_bytes([9u8; 32]).unwrap();
let mut context = TestContextBuilder::new()
.with_public_key(ali.clone(), U512::from(500_000_000_000_000_000u64))
.with_public_key(bob.clone(), U512::from(500_000_000_000_000_000u64))
.build();
let session_code = Code::from("erc20.wasm");
let session_args = runtime_args! {
"token_name" => name,
"token_symbol" => symbol,
"token_decimals" => token_cfg::DECIMALS,
"token_total_supply" => token_cfg::total_supply()
};
let session = SessionBuilder::new(session_code, session_args)
.with_address((&ali).to_account_hash())
.with_authorization_keys(&[ali.to_account_hash()])
.build();
context.run(session);
Token {
name: name.to_string(),
symbol: symbol.to_string(),
context,
ali: ali.to_account_hash(),
bob: bob.to_account_hash(),
joe: joe.to_account_hash(),
}
}
pub fn contract_hash(&self) -> Hash {
self.context
.query(self.ali, &[format!("{}_hash", self.name)])
.unwrap_or_else(|_| panic!("{} contract not found", self.name))
.into_t()
.unwrap_or_else(|_| panic!("{} has wrong type", self.name))
}
/// query a contract's named key.
fn query_contract<T: CLTyped + FromBytes>(&self, name: &str) -> Option<T> {
match self
.context
.query(self.ali, &[self.name.clone(), name.to_string()])
{
Err(_) => None,
Ok(maybe_value) => {
let value = maybe_value
.into_t()
.unwrap_or_else(|_| panic!("{} is not expected type.", name));
Some(value)
}
}
}
/// query a contract's dictionary's key.
fn query_contract_dictionary<T: CLTyped + FromBytes>(
&self,
key: AccountHash,
context: &TestContext,
dictionary_name: String,
name: String,
) -> Option<T> {
match context.query_dictionary_item(key.into(), Some(dictionary_name), name.clone()) {
Err(_) => None,
Ok(maybe_value) => {
let value = maybe_value
.into_t()
.unwrap_or_else(|_| panic!("{} is not the expected type.", name));
Some(value)
}
}
}
/// call a contract's specific entry point.
fn call(&mut self, sender: Sender, method: &str, args: RuntimeArgs) {
let Sender(address) = sender;
let code = Code::Hash(self.contract_hash(), method.to_string());
let session = SessionBuilder::new(code, args)
.with_address(address)
.with_authorization_keys(&[address])
.build();
self.context.run(session);
}
pub fn name(&self) -> String {
self.query_contract_dictionary(
self.ali,
&self.context,
"token_metadata".to_string(),
"name".to_string()
).unwrap()
}
pub fn symbol(&self) -> String {
self.query_contract_dictionary(
self.ali,
&self.context,
"token_metadata".to_string(),
"symbol".to_string()
).unwrap()
}
pub fn decimals(&self) -> u8 {
self.query_contract_dictionary(
self.ali,
&self.context,
"token_metadata".to_string(),
"decimals".to_string()
).unwrap()
}
pub fn balance_of(&self, account: Key) -> U256 {
//let key = format!("balances_{}", account);
self.query_contract_dictionary(
self.ali,
&self.context,
"balances".to_string(),
key_to_str(&account)
).unwrap()
}
pub fn allowance(&self, owner: Key, spender: Key) -> U256 {
let key = format!("allowances_{}_{}", owner, spender);
self.query_contract(&key).unwrap_or_default()
}
pub fn transfer(&mut self, recipient: Key, amount: U256, sender: Sender) {
self.call(
sender,
"transfer",
runtime_args! {
"recipient" => recipient,
"amount" => amount
},
);
}
pub fn approve(&mut self, spender: Key, amount: U256, sender: Sender) {
self.call(
sender,
"approve",
runtime_args! {
"spender" => spender,
"amount" => amount
},
);
}
pub fn transfer_from(
&mut self,
owner: Key,
recipient: Key,
amount: U256,
sender: Sender,
) {
self.call(
sender,
"transfer_from",
runtime_args! {
"owner" => owner,
"recipient" => recipient,
"amount" => amount
},
);
}
}
| true |
3736e07c5b8ed2dc6ca4c009e24b4005ccada796
|
Rust
|
tianenchen/leetcode-rs
|
/src/leetcode/compress_string.rs
|
UTF-8
| 836 | 3.375 | 3 |
[] |
no_license
|
struct Solution;
impl Solution {
pub fn compress_string(mut s: String) -> String {
if s.is_empty() {
return "".to_string();
}
let len = s.len();
s.push('0');
let mut cs = s.chars();
let mut tmp = cs.next().unwrap();
let mut i = 1;
let mut res = String::new();
for c in cs {
if tmp == c {
i += 1;
} else {
res.push(tmp);
res += &i.to_string();
i = 1;
tmp = c;
}
}
if len > res.len() {
res
} else {
s.remove(len);
s
}
}
}
#[test]
fn check() {
assert_eq!(
Solution::compress_string("aaabbcdde".to_string()),
"aaabbcdde".to_string()
);
}
| true |
2f37bc880f8850a3462a7ffd7b2d884eee73354e
|
Rust
|
KFBI1706/kodekalender
|
/2019/day08/main.rs
|
UTF-8
| 1,712 | 3.1875 | 3 |
[] |
no_license
|
use std::fs;
fn parse_wheels(wheels_string: &str) -> Vec<Vec<&str>>{
let mut wheels : Vec<Vec<&str>> = vec![vec!["s"; 4]; 10];
let mut y = 0;
for wheel in wheels_string.split("\n").take(10) {
let mut x = 0;
for ins in wheel.split(", ") {
wheels[y][x] = ins;
x += 1;
}
y += 1;
}
return wheels
}
fn main() {
let text = fs::read_to_string("wheels.txt").expect("Failed to read file");
let wheels = parse_wheels(&text);
let mut counts = vec![0; wheels.len()];
let mut coins = 6;
let mut ins = wheels[coins%10][counts[coins%10]%4];
while ins != "STOPP" {
counts[coins%10] += 1;
match ins {
"ROTERODDE" => {
for i in (1..10).step_by(2) {
counts[i] += 1;
}
},
"TREKK1FRAODDE" => {
let mut new_coins = coins;
let mut mul = 1;
for (i,c) in ins.to_string().chars().rev().enumerate() {
if c == '-' {
new_coins *= -1
}
if c.to_digit(10).unwrap() % 2 == 1 {
new_coins
}
}
},
"GANGEMSD" => coins *= coins.to_string().replace("-","").chars().take(1).collect::<String>().parse::<usize>().unwrap(),
"MINUS1" => coins -= 1,
"MINUS9" => coins -= 9,
"PLUSS4" => coins += 4,
_ => {
println!("{}", ins);
return
}
}
println!("{}",coins);
ins = wheels[coins%10][counts[coins%10]%4];
}
}
| true |
3a544608eba4767844b21d9e3a3f877afeef66b1
|
Rust
|
mybbsky2012/multipart-stream-rs
|
/fuzz/fuzz_targets/parse.rs
|
UTF-8
| 1,736 | 2.5625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#![no_main]
use futures::{task::Poll, Stream, StreamExt};
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: &[u8]| {
// Look for panics or obvious misbehavior in the parsing implementation.
// Divide data into parts (by a u8 length prefix) and feed them over a channel, polling the
// stream after each until it's pending. When there are no more full parts, just break.
let (mut tx, rx) = futures::channel::mpsc::channel(0);
let stream = multipart_stream::parser::parse(rx.map(Ok::<_, std::convert::Infallible>), "foo");
futures::pin_mut!(stream);
let waker = futures::task::noop_waker();
let mut cx = futures::task::Context::from_waker(&waker);
let mut data = data;
while data.len() > 1 {
let next_part_size = usize::from(data[0]);
if data.len() < 1 + next_part_size {
break;
}
let next_part = bytes::Bytes::copy_from_slice(&data[1..1 + next_part_size]);
tx.try_send(next_part).expect("previous stream poll should have emptied the channel");
while let Poll::Ready(r) = stream.as_mut().poll_next(&mut cx) {
match r {
Some(Err(_)) => return,
None => panic!("tx hasn't ended => stream shouldn't end"),
_ => {}
}
}
data = &data[1 + next_part_size..];
}
// Handle end of stream.
drop(tx);
match stream.poll_next(&mut cx) {
Poll::Pending => panic!("tx has ended => stream shouldn't be pending"),
Poll::Ready(Some(Err(_))) => return, // an error about an unfinished part is normal
Poll::Ready(Some(Ok(_))) => panic!("tx has ended => stream shouldn't have more data"),
Poll::Ready(None) => {}
}
});
| true |
7065d8198e0b5582bedcda742d7150a71166cac2
|
Rust
|
vstroebel/qd_html
|
/src/tests/dom.rs
|
UTF-8
| 1,906 | 3.3125 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use crate::dom::*;
#[test]
fn element() {
let e = Element::new("test");
assert_eq!(e.name, "test");
assert_eq!(e.attributes.len(), 0);
assert_eq!(e.nodes.len(), 0);
}
#[test]
fn add_element() {
let mut e = Element::new("test");
e.add_element(Element::new("test2"));
assert_eq!(e.nodes.len(), 1);
if let &Node::Element(ref e) = &e.nodes[0] {
assert_eq!(e.name, "test2");
} else {
panic!("Node of wrong type:{:?}", e);
}
}
#[test]
fn add_text() {
let mut e = Element::new("test");
e.add_text("test2");
assert_eq!(e.nodes.len(), 1);
if let &Node::Text(ref e) = &e.nodes[0] {
assert_eq!(e.content, "test2");
} else {
panic!("Node of wrong type:{:?}", e);
}
}
#[test]
fn add_comment() {
let mut e = Element::new("test");
e.add_comment("test2");
assert_eq!(e.nodes.len(), 1);
if let &Node::Comment(ref e) = &e.nodes[0] {
assert_eq!(e.content, "test2");
} else {
panic!("Node of wrong type:{:?}", e);
}
}
#[test]
fn add_cdata() {
let mut e = Element::new("test");
e.add_cdata("test2");
assert_eq!(e.nodes.len(), 1);
if let &Node::CData(ref e) = &e.nodes[0] {
assert_eq!(e.content, "test2");
} else {
panic!("Node of wrong type:{:?}", e);
}
}
#[test]
fn get_attribute_value() {
let mut e = Element::new("test");
e.set_attribute("attr1", "v1");
e.set_attribute("attr2", "v2");
e.set_bool_attribute("attr3");
assert_eq!(e.get_attribute_value("attr1"), Some("v1"));
assert_eq!(e.get_attribute_value("attr2"), Some("v2"));
assert_eq!(e.get_attribute_value("attr3"), None);
assert_eq!(e.get_attribute_value("notexists"), None);
assert!(e.has_attribute("attr1"));
assert!(e.has_attribute("attr2"));
assert!(e.has_attribute("attr3"));
assert!(!e.has_attribute("notexists"));
}
| true |
1dd9b8529af496bf39c70d072c5cadffc1da4765
|
Rust
|
loganmzz/TrickTrackTruck-planning-importer-Rust
|
/src/parser/impl/test.rs
|
UTF-8
| 3,750 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
use chrono::prelude::*;
use model::{PackageInfo, Rotation};
use super::Error;
use ::tricktracktruck_macros::assert_that;
#[test]
#[allow(non_snake_case)]
fn test_packageinfo_parse_AB012() {
let packageinfo = PackageInfo::parse("AB012").unwrap();
assert_that!(packageinfo.package_type; equals "AB");
assert_that!(packageinfo.count ; equals 12);
}
#[test]
#[allow(non_snake_case)]
fn test_packageinfo_parse_AB01() {
match PackageInfo::parse("AB01").err().unwrap() {
Error::InvalidLength { actual: actual_length, expected: expected_length } => {
assert_that!(actual_length ; equals 4);
assert_that!(expected_length; equals 5);
},
e => assert!(false, "Should result in invalid length: {:?}", e),
}
}
#[test]
#[allow(non_snake_case)]
fn test_packageinfo_parse_ABA01() {
match PackageInfo::parse("ABA01").err().unwrap() {
Error::IntParse(_) => (),
e => assert!(false, "Should result in int parsing error: {:?}", e),
}
}
#[test]
fn test_rotation_parse_empty() {
match Rotation::parse("").err().unwrap() {
Error::InvalidLength { actual: actual_length, expected: expected_length } => {
assert_that!(actual_length ; equals 0);
assert_that!(expected_length; equals 120);
},
e => assert!(false, "Should result in invalid length error: {:?}", e),
}
}
#[test]
fn test_rotation_parse_130_characters_long() {
match Rotation::parse("0123456789112345678921234567893123456789412345678951234567896123456789712345678981234567899123456789012345678911234567892123456789").err().unwrap() {
Error::InvalidLength { actual: actual_length, expected: expected_length } => {
assert_that!(actual_length ; equals 130);
assert_that!(expected_length; equals 120);
},
e => assert!(false, "Should result in invalid length error: {:?}", e),
}
}
#[test]
fn test_rotation_parse_sample() {
let line = "01234567892017031320170319YYYYYNN01234567890123456789ABCDEFGHIJKLMNOPQ01234567890123456789ABCDEFGHIJAB012CD345EF678 ";
let rotation = match Rotation::parse(line) {
Ok(rotation) => rotation,
Err(e) => { println!("{:?}", e); panic!("Parsing error"); }
};
assert_that!(rotation.id ; equals "0123456789");
assert_that!(rotation.period_start ; equals NaiveDate::from_ymd(2017, 03, 13));
assert_that!(rotation.period_end ; equals NaiveDate::from_ymd(2017, 03, 19));
assert_that!(rotation.days[&Weekday::Mon] ; equals true);
assert_that!(rotation.days[&Weekday::Tue] ; equals true);
assert_that!(rotation.days[&Weekday::Wed] ; equals true);
assert_that!(rotation.days[&Weekday::Thu] ; equals true);
assert_that!(rotation.days[&Weekday::Fri] ; equals true);
assert_that!(rotation.days[&Weekday::Sat] ; equals false);
assert_that!(rotation.days[&Weekday::Sun] ; equals false);
assert_that!(rotation.vehicle_id ; equals "0123456789");
assert_that!(rotation.vehicle_type ; equals "0123456789ABCDEFGHIJKLMNOPQ");
assert_that!(rotation.driver_id ; equals "0123456789");
assert_that!(rotation.driver_name ; equals "0123456789ABCDEFGHIJ");
assert_that!(rotation.packages[0].package_type; equals "AB");
assert_that!(rotation.packages[0].count ; equals 12);
assert_that!(rotation.packages[1].package_type; equals "CD");
assert_that!(rotation.packages[1].count ; equals 345);
assert_that!(rotation.packages[2].package_type; equals "EF");
assert_that!(rotation.packages[2].count ; equals 678);
}
| true |
8e5b1d7d660dca4c221e44611353e21b8bbfac3f
|
Rust
|
Nercury/memur
|
/src/traits.rs
|
UTF-8
| 1,284 | 2.8125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::{Arena, List, UploadError, Array};
/// Implements collect to `Arena` allocated lists.
pub trait MemurIterator: Iterator {
fn collect_list(self, arena: &Arena) -> Result<List<Self::Item>, UploadError>;
fn collect_result_list<I, E>(self, arena: &Arena) -> Result<List<I>, E>
where
Self: Iterator<Item=Result<I, E>>,
E: From<UploadError>;
fn collect_array(self, arena: &Arena) -> Result<Array<Self::Item>, UploadError> where Self: ExactSizeIterator;
}
impl<Q: Iterator> MemurIterator for Q {
fn collect_list(self, arena: &Arena) -> Result<List<Self::Item>, UploadError>
{
let mut list = List::new(arena)?;
for i in self {
list.push(i)?;
}
Ok(list)
}
fn collect_result_list<I, E>(self, arena: &Arena) -> Result<List<I>, E>
where
Self: Iterator<Item=Result<I, E>>,
E: From<UploadError>
{
let mut list = List::new(arena)?;
for mi in self {
let i = mi?;
list.push(i)?;
}
Ok(list)
}
fn collect_array(self, arena: &Arena) -> Result<Array<Self::Item>, UploadError> where Q: ExactSizeIterator {
Array::new(arena, self)
}
}
| true |
ad603e54a75f6d3e5c63e300c3c1542be26f5183
|
Rust
|
critiqjo/cs747
|
/bandit-agent/src/server_ifx.rs
|
UTF-8
| 1,026 | 2.984375 | 3 |
[] |
no_license
|
use std::net::TcpStream;
use std::io::{Read, Write};
pub struct ServerIfx {
stream: TcpStream,
read_buf: Vec<u8>,
}
impl ServerIfx {
pub fn new(address: &str) -> ServerIfx {
ServerIfx {
stream: match TcpStream::connect(address) {
Ok(stream) => {
let _ = stream.set_read_timeout(None);
stream
},
Err(_) => panic!("Could not connect to server!"),
},
read_buf: vec![0u8; 4],
}
}
pub fn pull_arm(&mut self, arm: usize) -> u8 {
let _ = self.stream.write((arm.to_string()+"\n").as_bytes());
if let Ok(len) = self.stream.read(self.read_buf.as_mut_slice()) {
String::from_utf8(self.read_buf[..len-1].to_vec())
.unwrap_or_else(|_| panic!("Bad stream!"))
.parse::<u8>()
.unwrap_or_else(|_| panic!("Bad reward!"))
} else {
panic!("Socket stream read failed!")
}
}
}
| true |
49065dd28b300c9f369e90be01dba39b44b84016
|
Rust
|
mun-lang/mun
|
/crates/mun_codegen/src/ir/dispatch_table.rs
|
UTF-8
| 15,386 | 2.5625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::module_group::ModuleGroup;
use crate::type_info::{HasStaticTypeId, TypeId};
use crate::{intrinsics::Intrinsic, ir::function, ir::ty::HirTypeCache};
use inkwell::values::CallableValue;
use inkwell::{
context::Context,
module::Module,
targets::TargetData,
types::{BasicTypeEnum, FunctionType},
values::BasicValueEnum,
};
use mun_hir::{Body, Expr, ExprId, HirDatabase, InferenceResult};
use rustc_hash::FxHashSet;
use std::{
collections::{BTreeMap, HashMap},
sync::Arc,
};
/// A dispatch table in IR is a struct that contains pointers to all functions that are called from
/// code. In C terms it looks something like this:
/// ```c
/// struct DispatchTable {
/// int(*foo)(int, int);
/// // .. etc
/// } dispatchTable;
/// ```
///
/// The dispatch table is used to add a patchable indirection when calling a function from IR. The
/// DispatchTable is exposed to the Runtime which fills the structure with valid pointers to
/// functions. This basically enables all hot reloading within Mun.
#[derive(Debug, Eq, PartialEq)]
pub struct DispatchTable<'ink> {
// The LLVM context in which all LLVM types live
context: &'ink Context,
// The target for which to create the dispatch table
target: TargetData,
// This contains the function that map to the DispatchTable struct fields
function_to_idx: HashMap<mun_hir::Function, usize>,
// Prototype to function index
prototype_to_idx: HashMap<FunctionPrototype, usize>,
// This contains an ordered list of all the function in the dispatch table
entries: Vec<DispatchableFunction>,
// Contains a reference to the global value containing the DispatchTable
table_ref: Option<inkwell::values::GlobalValue<'ink>>,
//
table_type: Option<inkwell::types::StructType<'ink>>,
}
/// A `FunctionPrototype` defines a unique signature that can be added to the dispatch table.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub struct FunctionPrototype {
pub name: String,
pub arg_types: Vec<Arc<TypeId>>,
pub ret_type: Arc<TypeId>,
}
/// A `DispatchableFunction` is an entry in the dispatch table that may or may not be pointing to an
/// existing mun_hir function.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct DispatchableFunction {
pub prototype: FunctionPrototype,
pub mun_hir: Option<mun_hir::Function>,
}
impl<'ink> DispatchTable<'ink> {
/// Returns whether the `DispatchTable` contains the specified `function`.
pub fn contains(&self, function: mun_hir::Function) -> bool {
self.function_to_idx.contains_key(&function)
}
/// Returns a slice containing all the functions in the dispatch table.
pub fn entries(&self) -> &[DispatchableFunction] {
&self.entries
}
/// Generate a function lookup through the DispatchTable, equivalent to something along the
/// lines of: `dispatchTable[i]`, where i is the index of the function and `dispatchTable` is a
/// struct
pub fn gen_function_lookup(
&self,
db: &dyn HirDatabase,
table_ref: Option<inkwell::values::GlobalValue<'ink>>,
builder: &inkwell::builder::Builder<'ink>,
function: mun_hir::Function,
) -> CallableValue<'ink> {
let function_name = function.name(db).to_string();
// Get the index of the function
let index = *self
.function_to_idx
.get(&function)
.expect("unknown function");
self.gen_function_lookup_by_index(table_ref, builder, &function_name, index)
}
/// Generates a function lookup through the DispatchTable, equivalent to something along the
/// lines of: `dispatchTable[i]`, where i is the index of the intrinsic and `dispatchTable` is a
/// struct
pub fn gen_intrinsic_lookup(
&self,
table_ref: Option<inkwell::values::GlobalValue<'ink>>,
builder: &inkwell::builder::Builder<'ink>,
intrinsic: &impl Intrinsic,
) -> CallableValue<'ink> {
let prototype = intrinsic.prototype();
// Get the index of the intrinsic
let index = *self
.prototype_to_idx
.get(&prototype)
.expect("unknown function");
self.gen_function_lookup_by_index(table_ref, builder, &prototype.name, index)
}
/// Generates a function lookup through the DispatchTable, equivalent to something along the
/// lines of: `dispatchTable[i]`, where i is the index and `dispatchTable` is a struct
fn gen_function_lookup_by_index(
&self,
table_ref: Option<inkwell::values::GlobalValue<'ink>>,
builder: &inkwell::builder::Builder<'ink>,
function_name: &str,
index: usize,
) -> CallableValue<'ink> {
// Get the internal table reference
let table_ref = table_ref.expect("no dispatch table defined");
// Create an expression that finds the associated field in the table and returns this as a pointer access
let ptr_to_function_ptr = builder
.build_struct_gep(
table_ref.as_pointer_value(),
index as u32,
&format!("{function_name}_ptr_ptr"),
)
.unwrap_or_else(|_| {
panic!("could not get {function_name} (index: {index}) from dispatch table")
});
builder
.build_load(ptr_to_function_ptr, &format!("{function_name}_ptr"))
.into_pointer_value()
.try_into()
.expect("Pointer value is not a valid function pointer.")
}
/// Returns the value that represents the dispatch table in IR or `None` if no table was
/// generated.
pub fn global_value(&self) -> Option<&inkwell::values::GlobalValue<'ink>> {
self.table_ref.as_ref()
}
/// Returns the IR type of the dispatch table's global value, if it exists.
pub fn ty(&self) -> Option<inkwell::types::StructType<'ink>> {
self.table_type
}
}
/// A struct that can be used to build the dispatch table from HIR.
pub(crate) struct DispatchTableBuilder<'db, 'ink, 't> {
db: &'db dyn HirDatabase,
// The LLVM context in which all LLVM types live
context: &'ink Context,
// The module in which all values live
module: &'t Module<'ink>,
// The target for which to create the dispatch table
target_data: TargetData,
// Converts HIR ty's to inkwell types
hir_types: &'t HirTypeCache<'db, 'ink>,
// This contains the functions that map to the DispatchTable struct fields
function_to_idx: HashMap<mun_hir::Function, usize>,
// Prototype to function index
prototype_to_idx: HashMap<FunctionPrototype, usize>,
// These are *all* called functions in the modules
entries: Vec<TypedDispatchableFunction<'ink>>,
// Contains a reference to the global value containing the DispatchTable
table_ref: Option<inkwell::values::GlobalValue<'ink>>,
// This is the actual DispatchTable type
table_type: inkwell::types::StructType<'ink>,
// The group of modules for which the dispatch table is being build
module_group: &'t ModuleGroup,
// The set of modules that is referenced
referenced_modules: FxHashSet<mun_hir::Module>,
}
struct TypedDispatchableFunction<'ink> {
function: DispatchableFunction,
ir_type: FunctionType<'ink>,
}
impl<'db, 'ink, 't> DispatchTableBuilder<'db, 'ink, 't> {
/// Creates a new builder that can generate a dispatch function.
pub fn new(
context: &'ink Context,
target_data: TargetData,
db: &'db dyn HirDatabase,
module: &'t Module<'ink>,
intrinsics: &BTreeMap<FunctionPrototype, FunctionType<'ink>>,
hir_types: &'t HirTypeCache<'db, 'ink>,
module_group: &'t ModuleGroup,
) -> Self {
let mut table = Self {
db,
context,
module,
target_data,
function_to_idx: Default::default(),
prototype_to_idx: Default::default(),
entries: Default::default(),
table_ref: None,
table_type: context.opaque_struct_type("DispatchTable"),
hir_types,
module_group,
referenced_modules: FxHashSet::default(),
};
if !intrinsics.is_empty() {
table.ensure_table_ref();
// Use a `BTreeMap` to guarantee deterministically ordered output
for (prototype, ir_type) in intrinsics.iter() {
let index = table.entries.len();
table.entries.push(TypedDispatchableFunction {
function: DispatchableFunction {
prototype: prototype.clone(),
mun_hir: None,
},
ir_type: *ir_type,
});
table.prototype_to_idx.insert(prototype.clone(), index);
}
}
table
}
/// Creates the global dispatch table in the module if it does not exist.
fn ensure_table_ref(&mut self) {
if self.table_ref.is_none() {
self.table_ref = Some(
self.module
.add_global(self.table_type, None, "dispatchTable"),
)
}
}
/// Collects call expression from the given expression and sub expressions.
fn collect_expr(&mut self, expr_id: ExprId, body: &Arc<Body>, infer: &InferenceResult) {
let expr = &body[expr_id];
// If this expression is a call, store it in the dispatch table
if let Expr::Call { callee, .. } = expr {
match infer[*callee].as_callable_def() {
Some(mun_hir::CallableDef::Function(def)) => {
if self.module_group.should_runtime_link_fn(self.db, def) {
let fn_module = def.module(self.db);
if !def.is_extern(self.db) && !self.module_group.contains(fn_module) {
self.referenced_modules.insert(fn_module);
}
self.collect_fn_def(def);
}
}
Some(mun_hir::CallableDef::Struct(_)) => (),
None => panic!("expected a callable expression"),
}
}
// Recurse further
expr.walk_child_exprs(|expr_id| self.collect_expr(expr_id, body, infer));
}
/// Collects function call expression from the given expression.
#[allow(clippy::map_entry)]
pub fn collect_fn_def(&mut self, function: mun_hir::Function) {
self.ensure_table_ref();
// If the function is not yet contained in the table, add it
if !self.function_to_idx.contains_key(&function) {
let name = function.full_name(self.db);
let hir_type = function.ty(self.db);
let sig = hir_type.callable_sig(self.db).unwrap();
let ir_type = self.hir_types.get_function_type(function);
let arg_types = sig
.params()
.iter()
.map(|arg| self.hir_types.type_id(arg))
.collect();
let ret_type = if !sig.ret().is_empty() {
self.hir_types.type_id(sig.ret())
} else {
<()>::type_id().clone()
};
let prototype = FunctionPrototype {
name,
arg_types,
ret_type,
};
let index = self.entries.len();
self.entries.push(TypedDispatchableFunction {
function: DispatchableFunction {
prototype: prototype.clone(),
mun_hir: Some(function),
},
ir_type,
});
self.prototype_to_idx.insert(prototype, index);
self.function_to_idx.insert(function, index);
}
}
/// Collect all the call expressions from the specified body with the given type inference
/// result.
pub fn collect_body(&mut self, body: &Arc<Body>, infer: &InferenceResult) {
self.collect_expr(body.body_expr(), body, infer);
}
/// Builds the final DispatchTable with all *called* functions from within the module
/// # Parameters
/// * **functions**: Mapping of *defined* Mun functions to their respective IR values.
/// Returns the `DispatchTable` and a set of dependencies for the module.
pub fn build(self) -> (DispatchTable<'ink>, FxHashSet<mun_hir::Module>) {
// Construct the table body from all the entries in the dispatch table
let table_body: Vec<BasicTypeEnum> = self
.entries
.iter()
.map(|f| f.ir_type.ptr_type(inkwell::AddressSpace::default()).into())
.collect();
// We can fill in the DispatchTable body, i.e: struct DispatchTable { <this part> };
self.table_type.set_body(&table_body, false);
// Create a default initializer for function that are already known
if let Some(table_ref) = self.table_ref {
let values: Vec<BasicValueEnum> = self
.entries
.iter()
.enumerate()
// Maps over all HIR functions
.map(|(i, entry)| {
let function_type = table_body[i].into_pointer_type();
// Find the associated IR function if it exists
match entry.function.mun_hir {
// Case external function: Convert to typed null for the given function
None => function_type.const_null(),
// Case external function, or function from another module
Some(f) => {
if f.is_extern(self.db)
|| !self.module_group.contains(f.module(self.db))
{
// If the function is externally defined (i.e. it's an `extern`
// function or it's defined in another module) don't initialize.
function_type.const_null()
} else {
// Otherwise generate a function prototype
function::gen_prototype(self.db, self.hir_types, f, self.module)
.as_global_value()
.as_pointer_value()
}
}
}
.into()
})
.collect();
// Set the initialize for the global value
table_ref.set_initializer(&self.table_type.const_named_struct(&values));
}
let table_type = self.table_ref.map(|_| self.table_type);
(
DispatchTable {
context: self.context,
target: self.target_data,
function_to_idx: self.function_to_idx,
prototype_to_idx: self.prototype_to_idx,
table_ref: self.table_ref,
table_type,
entries: self
.entries
.into_iter()
.map(|entry| entry.function)
.collect(),
},
self.referenced_modules,
)
}
}
| true |
5604598ccffd4ed2387a9899092a9631f5e3781b
|
Rust
|
dgeene/statistics
|
/src/main.rs
|
UTF-8
| 640 | 3.203125 | 3 |
[] |
no_license
|
/*
* Objectives
*
* 1. sort the values
* 2. find the mean, median
* 3. find the std dev
* 4. find the iqr
* 5. read from file
*
*
* wishlist
* - read from xml
*
*/
fn main() {
let input = [50, 61, 44, 68, 72, 75, 64,
76, 84, 102, 86, 94];
//let sorted = sort();
sort(&input);
}
fn sort(unsorted: &[i32]) {
let mut sorted: Vec<i32> = Vec::new();
let mut val = unsorted[0];
// find the smallest value
for (i, elem) in unsorted.iter().enumerate() {
//println!("elem: {}", elem);
if val < elem {
val = elem;
}
}
println!("smallest elem: {}", val)
}
| true |
1a9fc8c832a4c67882c238630dc6e2913b166ef7
|
Rust
|
confessore/yes
|
/src/main.rs
|
UTF-8
| 407 | 3.03125 | 3 |
[] |
no_license
|
use std::env;
use std::ops::Add;
fn main() {
let mut args: Vec<String> = env::args().collect();
args.remove(0);
if args.len() > 0 {
let mut tmp = String::new();
for arg in args {
tmp += &arg.add(" ");
}
loop {
println!("{}", &tmp.trim());
}
} else {
loop {
println!("{}", "y".to_owned());
}
}
}
| true |
e68777daedc8f0596d424283ec56d1dace839ad9
|
Rust
|
stormtracks/rust-examples
|
/redisasync/examples/setpc.rs
|
UTF-8
| 1,543 | 3.109375 | 3 |
[
"MIT"
] |
permissive
|
// This uses paired_connect instead of connect
// which is shown in the example set.rs
use redis_async::{client, resp_array};
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
#[tokio::main]
async fn main() {
let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 6379);
assert_eq!("127.0.0.1:6379".parse(), Ok(addr));
let connection = client::paired_connect(&addr)
.await
.expect("Cannot open connection");
let res_f = connection.send(resp_array!["PING", "Rick"]);
connection.send_and_forget(resp_array!["SET", "x", "1234"]);
let wait_f = connection.send(resp_array!["GET", "x"]);
let result_1: String = res_f.await.expect("Cannot read result of first thing");
let result_2: String = wait_f.await.expect("Cannot read result of second thing");
assert_eq!(result_1, "Rick");
assert_eq!(result_2, "1234");
// Now lets work on taking a string from the terminal
// and munging it into vec which is the input to the resp_array
let myiter = "set y 5678".split_whitespace();
let myvec = myiter
.map(|item| {
item
})
.collect::<Vec<_>>();
assert_eq!(myvec,vec!["set", "y", "5678"]);
let command = resp_array![].append(myvec);
// connection.send_and_forget(resp_array!["SET", "y", "5678"]);
connection.send_and_forget(command);
let wait_g = connection.send(resp_array!["GET", "y"]);
let result_3: String = wait_g.await.expect("Cannot read result of third thing");
assert_eq!(result_3, "5678");
}
| true |
4b94bf5d1bcb3f05dbeea5a8c7ef306bab37022f
|
Rust
|
DeTeam/advent-2018
|
/day4/src/main.rs
|
UTF-8
| 4,733 | 2.96875 | 3 |
[] |
no_license
|
#[macro_use]
extern crate nom;
extern crate chrono;
use chrono::prelude::*;
use nom::digit;
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::str::FromStr;
#[derive(Debug)]
enum Action {
ShiftStarted(i32),
FellAsleep,
WokeUp,
}
#[derive(Debug)]
struct ParsedLine {
date: NaiveDate,
minute: i32,
action: Action,
}
#[derive(Debug)]
struct Guard {
id: i32,
minutes: HashMap<i32, i32>,
total_asleep: i32,
}
named!(process_action<&str,Action>,
alt_complete!(
tag!("wakes") => { |_| Action::WokeUp } |
tag!("falls") => { |_| Action::FellAsleep } |
do_parse!(
tag!("Guard #") >>
guard: map_res!(digit, |x| FromStr::from_str(x)) >>
(
Action::ShiftStarted(guard)
)
)
)
);
fn normalize_date(date: &str, hour: &str) -> NaiveDate {
let current_date: NaiveDate = FromStr::from_str(date).unwrap();
match hour {
"23" => current_date.succ(),
_ => current_date,
}
}
fn normalize_minute(hour: &str, minute: &str) -> i32 {
match hour {
"23" => 0,
_ => FromStr::from_str(minute).unwrap(),
}
}
named!(process_line<&str,ParsedLine>,
ws!(
do_parse!(
tag!("[") >>
date: take!(10) >>
hour: take!(2) >>
tag!(":") >>
minute: take!(2) >>
tag!("]") >>
action: process_action >>
(
ParsedLine {
date: normalize_date(date, hour),
minute: normalize_minute(hour, minute),
action: action,
}
)
)
)
);
fn sleepy_minute(g: &Guard) -> (&i32, &i32) {
let mut minutes = g.minutes.iter().collect::<Vec<_>>();
minutes.sort_by(|a, b| b.1.cmp(a.1));
*minutes.get(0).unwrap_or(&(&0, &0))
}
fn task(s: &str) {
let mut guards: HashMap<i32, Guard> = HashMap::new();
let mut guard_id = None;
let mut start = None;
let mut lines = s.lines().collect::<Vec<_>>();
lines.sort();
let logs = lines
.iter()
.filter_map(|line| process_line(line).ok())
.map(|x| x.1);
for log in logs {
match log.action {
Action::ShiftStarted(id) => {
guards.entry(id).or_insert(Guard {
id,
minutes: HashMap::new(),
total_asleep: 0,
});
guard_id = Some(id);
}
Action::FellAsleep => {
start = Some(log.minute);
}
Action::WokeUp => {
let pair = start.and_then(|s| guard_id.and_then(|i| Some((i, s))));
match pair {
Some((id, start_m)) => {
let end = log.minute;
start = None;
let range = start_m..end;
for m in range {
guards.entry(id).and_modify(|e| {
e.minutes.entry(m).and_modify(|m| *m += 1).or_insert(1);
e.total_asleep += 1;
});
}
}
_ => {}
}
}
}
}
{
let mut scores = guards.values().clone().into_iter().collect::<Vec<_>>();
scores.sort_by(|a, b| b.total_asleep.cmp(&a.total_asleep));
let sleepy_guard = scores.get(0).unwrap();
let lucky_minute = sleepy_minute(sleepy_guard).0;
println!("Strategy 1");
println!(
"Sleepy guard: {:?}, lucky minute: {:?}, result: {:?}",
sleepy_guard.id,
lucky_minute,
sleepy_guard.id * lucky_minute
);
}
{
let mut scores = guards.values().clone().into_iter().collect::<Vec<_>>();
scores.sort_by(|a, b| sleepy_minute(b).1.cmp(&sleepy_minute(a).1));
let sleepy_guard = scores.get(0).unwrap();
let m = sleepy_minute(sleepy_guard);
let lucky_minute = m.0;
let lucky_time = m.1;
println!("Strategy 2");
println!(
"Sleepy guard: {:?}, lucky minute: {:?}, time: {:?}, result: {:?}",
sleepy_guard.id,
lucky_minute,
lucky_time,
sleepy_guard.id * lucky_minute
);
}
}
fn main() {
let mut f = File::open("input.txt").expect("File not found");
let mut contents = String::new();
f.read_to_string(&mut contents)
.expect("something went wrong reading the file");
task(&contents);
}
| true |
3ef5902bd14c23f3e2ce69b189a4285895627f67
|
Rust
|
qinxiaoguang/rs-lc
|
/src/solution/l152.rs
|
UTF-8
| 2,285 | 3.28125 | 3 |
[] |
no_license
|
pub struct Solution {}
/*
* @lc app=leetcode.cn id=152 lang=rust
*
* [152] 乘积最大子序列
*
* https://leetcode-cn.com/problems/maximum-product-subarray/description/
*
* algorithms
* Medium (37.45%)
* Likes: 660
* Dislikes: 0
* Total Accepted: 79.5K
* Total Submissions: 199.2K
* Testcase Example: '[2,3,-2,4]'
*
* 给你一个整数数组 nums ,请你找出数组中乘积最大的连续子数组(该子数组中至少包含一个数字),并返回该子数组所对应的乘积。
*
*
*
* 示例 1:
*
* 输入: [2,3,-2,4]
* 输出: 6
* 解释: 子数组 [2,3] 有最大乘积 6。
*
*
* 示例 2:
*
* 输入: [-2,0,-1]
* 输出: 0
* 解释: 结果不能为 2, 因为 [-2,-1] 不是子数组。
*
*/
// @lc code=start
impl Solution {
pub fn max_product(nums: Vec<i32>) -> i32 {
use std::cmp::{max, min};
// dp 问题,min_dp[i]表示以i为底的连续数组的乘积的最小值
// max_dp[i] 则表示以i为底的连续数组的成绩的最大值
// 因为乘积是和负数有关联的,所以需要上述两个值
// 那么转移方程就是 min_dp[i] = min(nums[i] * min_dp[i],nums[i]*max_dp[i], nums[i]);
// 同样 max_dp[i] = max(nums[i]*max_dp[i], nums[i]*max_dp[i],nums[i]);
let (mut min_dp, mut max_dp) = (vec![0i64; nums.len()], vec![0i64; nums.len()]);
for i in 0..nums.len() {
if i == 0 {
min_dp[i] = nums[i] as i64;
max_dp[i] = nums[i] as i64;
continue;
}
// 其实min_dp和max_dp都可以优化成O(1),看下面的转移公式就知道了
min_dp[i] = min(
max_dp[i - 1] * nums[i] as i64,
min(nums[i] as i64, min_dp[i - 1] * nums[i] as i64),
);
max_dp[i] = max(
max_dp[i - 1] * nums[i] as i64,
max(nums[i] as i64, min_dp[i - 1] * nums[i] as i64),
);
}
max_dp.into_iter().max().unwrap() as i32
}
}
// @lc code=end
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_l152() {
assert_eq!(Solution::max_product(vec![2, 3, -2, 4]), 6);
assert_eq!(Solution::max_product(vec![-2, 0, -1]), 0);
}
}
| true |
89834da1124cb2f934f580cbf16c2654f38d369d
|
Rust
|
zbraniecki/tinystr
|
/tests/main.rs
|
UTF-8
| 17,817 | 3.203125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use rand::SeedableRng;
use rand_distr::{Alphanumeric, Distribution, Uniform};
use rand_pcg::Lcg64Xsh32;
use std::fmt::Write;
use std::mem::size_of;
use std::ops::Deref;
use std::ops::RangeInclusive;
use tinystr::{tinystr16, tinystr4, tinystr8, Error, TinyStr16, TinyStr4, TinyStr8};
#[cfg(any(feature = "std", feature = "alloc"))]
use tinystr::TinyStrAuto;
/// Generates an array of random alphanumeric strings.
///
/// - length = range of lengths for the strings (chosen uniformly at random)
/// - count = number of strings to generate
fn random_alphanums(lengths: RangeInclusive<usize>, count: usize) -> Vec<String> {
// Lcg64Xsh32 is a small, fast PRNG.
let mut rng1 = Lcg64Xsh32::seed_from_u64(2021);
let mut rng2 = Lcg64Xsh32::seed_from_u64(rand::Rng::gen(&mut rng1));
let alpha_dist = Alphanumeric;
let len_dist = Uniform::from(lengths);
len_dist
.sample_iter(&mut rng1)
.take(count)
.map(|len| {
(&alpha_dist)
.sample_iter(&mut rng2)
.take(len)
.map(char::from)
.collect::<String>()
})
.collect()
}
#[test]
fn tiny_sizes() {
assert_eq!(4, size_of::<TinyStr4>());
assert_eq!(8, size_of::<TinyStr8>());
assert_eq!(16, size_of::<TinyStr16>());
#[cfg(target_pointer_width = "64")]
assert_eq!(24, size_of::<String>());
// Note: TinyStrAuto is size 32 even when a smaller TinyStr type is used
#[cfg(all(target_pointer_width = "64", any(feature = "std", feature = "alloc")))]
assert_eq!(32, size_of::<TinyStrAuto>());
}
#[test]
fn tiny4_basic() {
let s: TinyStr4 = "abc".parse().unwrap();
assert_eq!(s.deref(), "abc");
}
#[test]
fn tiny4_from_bytes() {
let s = TinyStr4::from_bytes(b"abc").unwrap();
assert_eq!(s.deref(), "abc");
assert_eq!(
TinyStr4::from_bytes(&[0, 159, 146, 150]),
Err(Error::NonAscii)
);
assert_eq!(TinyStr4::from_bytes(&[]), Err(Error::InvalidSize));
assert_eq!(TinyStr4::from_bytes(&[0]), Err(Error::InvalidNull));
}
#[test]
fn tiny4_size() {
assert_eq!("".parse::<TinyStr4>(), Err(Error::InvalidSize));
assert!("1".parse::<TinyStr4>().is_ok());
assert!("12".parse::<TinyStr4>().is_ok());
assert!("123".parse::<TinyStr4>().is_ok());
assert!("1234".parse::<TinyStr4>().is_ok());
assert_eq!("12345".parse::<TinyStr4>(), Err(Error::InvalidSize));
assert_eq!("123456789".parse::<TinyStr4>(), Err(Error::InvalidSize));
}
#[test]
fn tiny4_null() {
assert_eq!("a\u{0}b".parse::<TinyStr4>(), Err(Error::InvalidNull));
}
#[test]
fn tiny4_new_unchecked() {
let reference: TinyStr4 = "en".parse().unwrap();
let uval: u32 = reference.into();
let s = unsafe { TinyStr4::new_unchecked(uval) };
assert_eq!(s, reference);
assert_eq!(s, "en");
}
#[test]
fn tiny4_nonascii() {
assert_eq!("\u{4000}".parse::<TinyStr4>(), Err(Error::NonAscii));
}
#[test]
fn tiny4_alpha() {
let s: TinyStr4 = "@aZ[".parse().unwrap();
assert!(!s.is_ascii_alphabetic());
assert!(!s.is_ascii_alphanumeric());
assert_eq!(s.to_ascii_uppercase().as_str(), "@AZ[");
assert_eq!(s.to_ascii_lowercase().as_str(), "@az[");
assert!("abYZ".parse::<TinyStr4>().unwrap().is_ascii_alphabetic());
assert!("abYZ".parse::<TinyStr4>().unwrap().is_ascii_alphanumeric());
assert!("a123".parse::<TinyStr4>().unwrap().is_ascii_alphanumeric());
assert!(!"a123".parse::<TinyStr4>().unwrap().is_ascii_alphabetic());
}
#[test]
fn tiny4_numeric() {
let s: TinyStr4 = "@aZ[".parse().unwrap();
assert!(!s.is_ascii_numeric());
assert!("0123".parse::<TinyStr4>().unwrap().is_ascii_numeric());
}
#[test]
fn tiny4_titlecase() {
assert_eq!(
"abcd"
.parse::<TinyStr4>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcd"
);
assert_eq!(
"ABCD"
.parse::<TinyStr4>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcd"
);
assert_eq!(
"aBCD"
.parse::<TinyStr4>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcd"
);
assert_eq!(
"A123"
.parse::<TinyStr4>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"A123"
);
assert_eq!(
"123a"
.parse::<TinyStr4>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"123a"
);
}
#[test]
fn tiny4_ord() {
let mut v: Vec<TinyStr4> = vec![
tinystr4!("zh"),
tinystr4!("aab"),
tinystr4!("zzy"),
tinystr4!("fr"),
];
v.sort();
assert_eq!(Some("aab"), v.get(0).map(TinyStr4::as_str));
assert_eq!(Some("fr"), v.get(1).map(TinyStr4::as_str));
assert_eq!(Some("zh"), v.get(2).map(TinyStr4::as_str));
assert_eq!(Some("zzy"), v.get(3).map(TinyStr4::as_str));
}
/// Test consistency of TinyStr Ord with String
#[test]
fn tinystr4_ord_consistency() {
let mut string_vec = random_alphanums(2..=4, 100);
let mut tinystr_vec: Vec<TinyStr4> = string_vec.iter().map(|s| s.parse().unwrap()).collect();
string_vec.sort();
tinystr_vec.sort();
assert_eq!(
string_vec,
tinystr_vec
.iter()
.map(|s| s.as_str().to_string())
.collect::<Vec<String>>()
);
}
#[test]
fn tiny4_eq() {
let s1: TinyStr4 = "en".parse().unwrap();
let s2: TinyStr4 = "fr".parse().unwrap();
let s3: TinyStr4 = "en".parse().unwrap();
assert_eq!(s1, s3);
assert_ne!(s1, s2);
}
#[test]
fn tiny4_display() {
let s: TinyStr4 = "abcd".parse().unwrap();
let mut result = String::new();
write!(result, "{}", s).unwrap();
assert_eq!(result, "abcd");
assert_eq!(format!("{}", s), "abcd");
assert_eq!(format!("{:>8}", s), format!("{:>8}", result));
}
#[test]
fn tiny4_debug() {
let s: TinyStr4 = "abcd".parse().unwrap();
assert_eq!(format!("{:#?}", s), "\"abcd\"");
}
#[test]
fn tiny8_basic() {
let s: TinyStr8 = "abcde".parse().unwrap();
assert_eq!(s.deref(), "abcde");
}
#[test]
fn tiny8_from_bytes() {
let s = TinyStr8::from_bytes(b"abcde").unwrap();
assert_eq!(s.deref(), "abcde");
assert_eq!(
TinyStr8::from_bytes(&[0, 159, 146, 150]),
Err(Error::NonAscii)
);
assert_eq!(TinyStr8::from_bytes(&[]), Err(Error::InvalidSize));
assert_eq!(TinyStr8::from_bytes(&[0]), Err(Error::InvalidNull));
}
#[test]
fn tiny8_size() {
assert_eq!("".parse::<TinyStr8>(), Err(Error::InvalidSize));
assert!("1".parse::<TinyStr8>().is_ok());
assert!("12".parse::<TinyStr8>().is_ok());
assert!("123".parse::<TinyStr8>().is_ok());
assert!("1234".parse::<TinyStr8>().is_ok());
assert!("12345".parse::<TinyStr8>().is_ok());
assert!("123456".parse::<TinyStr8>().is_ok());
assert!("1234567".parse::<TinyStr8>().is_ok());
assert!("12345678".parse::<TinyStr8>().is_ok());
assert_eq!("123456789".parse::<TinyStr8>(), Err(Error::InvalidSize));
}
#[test]
fn tiny8_null() {
assert_eq!("a\u{0}b".parse::<TinyStr8>(), Err(Error::InvalidNull));
}
#[test]
fn tiny8_new_unchecked() {
let reference: TinyStr8 = "Windows".parse().unwrap();
let uval: u64 = reference.into();
let s = unsafe { TinyStr8::new_unchecked(uval) };
assert_eq!(s, reference);
assert_eq!(s, "Windows");
}
#[test]
fn tiny8_nonascii() {
assert_eq!("\u{4000}".parse::<TinyStr8>(), Err(Error::NonAscii));
}
#[test]
fn tiny8_alpha() {
let s: TinyStr8 = "@abcXYZ[".parse().unwrap();
assert!(!s.is_ascii_alphabetic());
assert!(!s.is_ascii_alphanumeric());
assert_eq!(s.to_ascii_uppercase().as_str(), "@ABCXYZ[");
assert_eq!(s.to_ascii_lowercase().as_str(), "@abcxyz[");
assert!("abcXYZ".parse::<TinyStr8>().unwrap().is_ascii_alphabetic());
assert!("abcXYZ"
.parse::<TinyStr8>()
.unwrap()
.is_ascii_alphanumeric());
assert!(!"abc123".parse::<TinyStr8>().unwrap().is_ascii_alphabetic());
assert!("abc123"
.parse::<TinyStr8>()
.unwrap()
.is_ascii_alphanumeric());
}
#[test]
fn tiny8_numeric() {
let s: TinyStr8 = "@abcXYZ[".parse().unwrap();
assert!(!s.is_ascii_numeric());
assert!("01234567".parse::<TinyStr8>().unwrap().is_ascii_numeric());
}
#[test]
fn tiny8_titlecase() {
assert_eq!(
"abcdabcd"
.parse::<TinyStr8>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcdabcd"
);
assert_eq!(
"ABCDABCD"
.parse::<TinyStr8>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcdabcd"
);
assert_eq!(
"aBCDaBCD"
.parse::<TinyStr8>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcdabcd"
);
assert_eq!(
"A123a123"
.parse::<TinyStr8>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"A123a123"
);
assert_eq!(
"123a123A"
.parse::<TinyStr8>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"123a123a"
);
}
#[test]
fn tiny8_ord() {
let mut v: Vec<TinyStr8> = vec![
tinystr8!("nedis"),
tinystr8!("macos"),
tinystr8!("zzy"),
tinystr8!("aab"),
];
v.sort();
assert_eq!(Some("aab"), v.get(0).map(TinyStr8::as_str));
assert_eq!(Some("macos"), v.get(1).map(TinyStr8::as_str));
assert_eq!(Some("nedis"), v.get(2).map(TinyStr8::as_str));
assert_eq!(Some("zzy"), v.get(3).map(TinyStr8::as_str));
}
/// Test consistency of TinyStr Ord with String
#[test]
fn tinystr8_ord_consistency() {
let mut string_vec = random_alphanums(3..=8, 100);
let mut tinystr_vec: Vec<TinyStr8> = string_vec.iter().map(|s| s.parse().unwrap()).collect();
string_vec.sort();
tinystr_vec.sort();
assert_eq!(
string_vec,
tinystr_vec
.iter()
.map(|s| s.as_str().to_string())
.collect::<Vec<String>>()
);
}
#[test]
fn tiny8_eq() {
let s1: TinyStr8 = "windows".parse().unwrap();
let s2: TinyStr8 = "mac".parse().unwrap();
let s3: TinyStr8 = "windows".parse().unwrap();
assert_eq!(s1, s3);
assert_ne!(s1, s2);
}
#[test]
fn tiny8_display() {
let s: TinyStr8 = "abcdef".parse().unwrap();
let mut result = String::new();
write!(result, "{}", s).unwrap();
assert_eq!(result, "abcdef");
assert_eq!(format!("{}", s), "abcdef");
assert_eq!(format!("{:>8}", s), format!("{:>8}", result));
}
#[test]
fn tiny8_debug() {
let s: TinyStr8 = "abcdef".parse().unwrap();
assert_eq!(format!("{:#?}", s), "\"abcdef\"");
}
#[test]
fn tiny16_from_bytes() {
let s = TinyStr16::from_bytes(b"abcdefghijk").unwrap();
assert_eq!(s.deref(), "abcdefghijk");
assert_eq!(
TinyStr16::from_bytes(&[0, 159, 146, 150]),
Err(Error::NonAscii)
);
assert_eq!(TinyStr16::from_bytes(&[]), Err(Error::InvalidSize));
assert_eq!(TinyStr16::from_bytes(&[0]), Err(Error::InvalidNull));
}
#[test]
fn tiny16_size() {
assert_eq!("".parse::<TinyStr16>(), Err(Error::InvalidSize));
assert!("1".parse::<TinyStr16>().is_ok());
assert!("12".parse::<TinyStr16>().is_ok());
assert!("123".parse::<TinyStr16>().is_ok());
assert!("1234".parse::<TinyStr16>().is_ok());
assert!("12345".parse::<TinyStr16>().is_ok());
assert!("123456".parse::<TinyStr16>().is_ok());
assert!("1234567".parse::<TinyStr16>().is_ok());
assert!("12345678".parse::<TinyStr16>().is_ok());
assert!("123456781".parse::<TinyStr16>().is_ok());
assert!("1234567812".parse::<TinyStr16>().is_ok());
assert!("12345678123".parse::<TinyStr16>().is_ok());
assert!("123456781234".parse::<TinyStr16>().is_ok());
assert!("1234567812345".parse::<TinyStr16>().is_ok());
assert!("12345678123456".parse::<TinyStr16>().is_ok());
assert!("123456781234567".parse::<TinyStr16>().is_ok());
assert!("1234567812345678".parse::<TinyStr16>().is_ok());
assert_eq!(
"12345678123456789".parse::<TinyStr16>(),
Err(Error::InvalidSize)
);
}
#[test]
fn tiny16_null() {
assert_eq!("a\u{0}b".parse::<TinyStr16>(), Err(Error::InvalidNull));
}
#[test]
fn tiny16_new_unchecked() {
let reference: TinyStr16 = "WindowsCE/ME/NT".parse().unwrap();
let uval: u128 = reference.into();
let s = unsafe { TinyStr16::new_unchecked(uval) };
assert_eq!(s, reference);
assert_eq!(s, "WindowsCE/ME/NT");
}
#[test]
fn tiny16_nonascii() {
assert_eq!("\u{4000}".parse::<TinyStr16>(), Err(Error::NonAscii));
}
#[test]
fn tiny16_alpha() {
let s: TinyStr16 = "@abcdefgTUVWXYZ[".parse().unwrap();
assert!(!s.is_ascii_alphabetic());
assert!(!s.is_ascii_alphanumeric());
assert_eq!(s.to_ascii_uppercase().as_str(), "@ABCDEFGTUVWXYZ[");
assert_eq!(s.to_ascii_lowercase().as_str(), "@abcdefgtuvwxyz[");
assert!("abcdefgTUVWXYZ"
.parse::<TinyStr16>()
.unwrap()
.is_ascii_alphabetic());
assert!("abcdefgTUVWXYZ"
.parse::<TinyStr16>()
.unwrap()
.is_ascii_alphanumeric());
assert!(!"abcdefg0123456"
.parse::<TinyStr16>()
.unwrap()
.is_ascii_alphabetic());
assert!("abcdefgTUVWXYZ"
.parse::<TinyStr16>()
.unwrap()
.is_ascii_alphanumeric());
}
#[test]
fn tiny16_numeric() {
let s: TinyStr16 = "@abcdefgTUVWXYZ[".parse().unwrap();
assert!(!s.is_ascii_numeric());
assert!("0123456789"
.parse::<TinyStr16>()
.unwrap()
.is_ascii_numeric());
}
#[test]
fn tiny16_titlecase() {
assert_eq!(
"abcdabcdabcdabcd"
.parse::<TinyStr16>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcdabcdabcdabcd"
);
assert_eq!(
"ABCDABCDABCDABCD"
.parse::<TinyStr16>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcdabcdabcdabcd"
);
assert_eq!(
"aBCDaBCDaBCDaBCD"
.parse::<TinyStr16>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"Abcdabcdabcdabcd"
);
assert_eq!(
"A123a123A123a123"
.parse::<TinyStr16>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"A123a123a123a123"
);
assert_eq!(
"123a123A123a123A"
.parse::<TinyStr16>()
.unwrap()
.to_ascii_titlecase()
.as_str(),
"123a123a123a123a"
);
}
#[test]
fn tiny16_ord() {
let mut v: Vec<TinyStr16> = vec![
tinystr16!("nedis_xxxx"),
tinystr16!("macos_xxxx"),
tinystr16!("xxxxxxxx_b"),
tinystr16!("xxxxxxxx_aa"),
tinystr16!("zzy"),
tinystr16!("aab"),
];
v.sort();
assert_eq!(Some("aab"), v.get(0).map(TinyStr16::as_str));
assert_eq!(Some("macos_xxxx"), v.get(1).map(TinyStr16::as_str));
assert_eq!(Some("nedis_xxxx"), v.get(2).map(TinyStr16::as_str));
assert_eq!(Some("xxxxxxxx_aa"), v.get(3).map(TinyStr16::as_str));
assert_eq!(Some("xxxxxxxx_b"), v.get(4).map(TinyStr16::as_str));
assert_eq!(Some("zzy"), v.get(5).map(TinyStr16::as_str));
}
/// Test consistency of TinyStr Ord with String
#[test]
fn tinystr16_ord_consistency() {
let mut string_vec = random_alphanums(1..=16, 100);
let mut tinystr_vec: Vec<TinyStr16> = string_vec.iter().map(|s| s.parse().unwrap()).collect();
string_vec.sort();
tinystr_vec.sort();
assert_eq!(
string_vec,
tinystr_vec
.iter()
.map(|s| s.as_str().to_string())
.collect::<Vec<String>>()
);
}
#[test]
fn tiny16_eq() {
let s1: TinyStr16 = "windows98SE".parse().unwrap();
let s2: TinyStr16 = "mac".parse().unwrap();
let s3: TinyStr16 = "windows98SE".parse().unwrap();
assert_eq!(s1, s3);
assert_ne!(s1, s2);
}
#[test]
fn tiny16_display() {
let s: TinyStr16 = "abcdefghijkl".parse().unwrap();
let mut result = String::new();
write!(result, "{}", s).unwrap();
assert_eq!(result, "abcdefghijkl");
assert_eq!(format!("{}", s), "abcdefghijkl");
assert_eq!(format!("{:>14}", s), format!("{:>14}", result));
}
#[test]
fn tiny16_debug() {
let s: TinyStr16 = "abcdefghijkl".parse().unwrap();
assert_eq!(format!("{:#?}", s), "\"abcdefghijkl\"");
}
#[cfg(feature = "std")]
#[test]
fn supports_std_error() {
let e = "\u{4000}".parse::<TinyStr8>().unwrap_err();
let _: &dyn std::error::Error = &e;
}
#[cfg(any(feature = "std", feature = "alloc"))]
#[test]
fn tinyauto_basic() {
let s1: TinyStrAuto = "abc".parse().unwrap();
assert_eq!(s1, "abc");
let s2: TinyStrAuto = "veryveryveryveryverylong".parse().unwrap();
assert_eq!(s2, "veryveryveryveryverylong");
}
#[cfg(any(feature = "std", feature = "alloc"))]
#[test]
fn tinyauto_nonascii() {
assert_eq!("\u{4000}".parse::<TinyStrAuto>(), Err(Error::NonAscii));
assert_eq!(
"veryveryveryveryverylong\u{4000}".parse::<TinyStrAuto>(),
Err(Error::NonAscii)
);
}
#[cfg(feature = "macros")]
const TS: TinyStr8 = tinystr::macros::tinystr8!("test");
#[cfg(feature = "macros")]
#[test]
fn tinystr_macros() {
use tinystr::macros::*;
let x: TinyStr8 = "test".parse().unwrap();
assert_eq!(TS, x);
let x: TinyStr4 = "foo".parse().unwrap();
assert_eq!(tinystr4!("foo"), x);
let x: TinyStr8 = "barbaz".parse().unwrap();
assert_eq!(tinystr8!("barbaz"), x);
let x: TinyStr16 = "metamorphosis".parse().unwrap();
assert_eq!(tinystr16!("metamorphosis"), x);
}
| true |
ed830cea1342afa385f25d36db9d406c7ce05a8c
|
Rust
|
csotello/CuteDogStash
|
/src/pages/home.rs
|
UTF-8
| 2,120 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
use crate::components::Post;
use crate::utils::*;
use db::*;
use yew::prelude::*;
pub enum Msg {
Rate(u64, String, u8, String),
Edit(u64),
Delete(u64),
}
#[derive(Properties, Clone)]
pub struct Props {
pub error: bool,
pub db: Data,
pub user: Option<User>,
pub rate: Callback<(u64, String, u8, String)>,
pub edit: Callback<u64>,
pub delete: Callback<u64>,
}
pub struct Home {
link: ComponentLink<Self>,
props: Props,
}
impl Component for Home {
type Message = Msg;
type Properties = Props;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self { link, props }
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::Rate(id, author, stars, comment) => {
self.props.rate.emit((id, author, stars, comment));
}
Msg::Delete(id) => {
self.props.delete.emit(id);
}
Msg::Edit(id) => {
self.props.edit.emit(id);
}
}
true
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
self.props = props;
true
}
fn view(&self) -> Html {
// Create each post tag
let map_post = |post: &db::Post| {
let rate = self.link.callback(|(post_id, author, stars, comment)| {
Msg::Rate(post_id, author, stars, comment)
});
let delete = self.link.callback(|id| {
log("Deleting Post".to_string());
Msg::Delete(id)
});
let edit = self.link.callback(|id| {
log("Editing post".to_string());
Msg::Edit(id)
});
html! {
<Post post=post rate=rate delete=delete user=&self.props.user edit=edit/>
}
};
html! {
<div>
<br/>
{if self.props.error{html! {<p>{"Error"}</p>}} else {html!{}}}
{for self.props.db.posts.iter().map(map_post)}
</div>
}
}
}
| true |
59f5cc860517da37242e304673373c1967c38d21
|
Rust
|
arekfu/carlo
|
/src/carlo/mod.rs
|
UTF-8
| 5,563 | 2.546875 | 3 |
[] |
no_license
|
mod irc;
mod jenkins;
use std::time::Instant;
use std::sync::mpsc;
use std::sync::Arc;
use std::thread;
use ::irc::client::prelude::{Client, ClientExt, Command, IrcClient};
use ::irc::proto::message::Message;
use ::irc::proto::ChannelExt;
use self::irc::IrcListener;
use self::jenkins::cache::Name;
use self::jenkins::{BuildDuration, BuildNumber, BuildUrl, JListener};
use crate::config::Config;
#[derive(Debug)]
pub struct Carlo {
start_time: Instant,
client: Arc<IrcClient>,
jenkins_config: Option<Config>,
}
#[derive(Debug)]
pub enum Event {
IncomingIrcMessage(Message),
UpdatedJob(
String,
Name,
String,
BuildNumber,
BuildDuration,
BuildUrl,
Vec<String>,
),
}
impl Carlo {
pub fn new() -> Carlo {
debug!("New Carlo instance");
Carlo {
start_time: Instant::now(),
client: Arc::new(IrcClient::new("irc.toml").expect("Could not find irc.toml file")),
jenkins_config: Config::from_file("jenkins.toml")
.map_err(|err| warn!("Config could not be read: {}", err))
.ok(),
}
}
pub fn run(&mut self) {
let (tx, rx) = mpsc::channel();
debug!("Identifying with server");
self.client.identify().unwrap();
let mut handles = Vec::new();
let irclistener = IrcListener::new(self.client.clone(), tx.clone());
handles.push(thread::spawn(move || irclistener.listen()));
if let Some(config) = self.jenkins_config.take() {
let mut jlistener = JListener::new(tx.clone());
handles.push(thread::spawn(move || jlistener.listen(config)));
}
rx.iter().for_each(|event| {
self.handle(event).into_iter().for_each(|message| {
info!("Sending {}", message);
self.client.send(message).unwrap();
});
});
handles
.into_iter()
.for_each(|handle| handle.join().unwrap());
}
fn handle(&self, event: Event) -> Vec<Message> {
debug!("Handling event {:?}", event);
match event {
Event::IncomingIrcMessage(message) => self.handle_irc(message),
Event::UpdatedJob(server, name, result, number, duration, url, notify) => {
self.handle_updated_job(server, name, result, number, duration, url, notify)
}
}
}
fn handle_irc(&self, message: Message) -> Vec<Message> {
debug!("Handling Irc message {:?}", message);
let cmd_prefix = self.client.current_nickname().to_string();
match &message.command {
Command::PRIVMSG(channel, msg) => {
if !channel.is_channel_name() || msg.trim_start().starts_with(&cmd_prefix) {
let reply_to = message.response_target().unwrap().to_string();
let source_nick = message.source_nickname().unwrap_or("");
self.process_msg(&source_nick, &reply_to, &msg)
} else {
Vec::new()
}
}
_ => Vec::new(),
}
}
fn handle_updated_job(
&self,
server: String,
name: Name,
result: String,
number: BuildNumber,
duration: BuildDuration,
url: BuildUrl,
notify: Vec<String>,
) -> Vec<Message> {
debug!(
"Handling Job update {:?}:{:?}:{:?}:{:?}:{:?}:{:?}:{:?}",
server, name, result, number, duration, url, notify
);
notify
.into_iter()
.map(|dest| {
let reply = if result == "SUCCESS" {
format!(
"Build #{} for job '{}' on '{}'! Result: {}",
number, name, server, result
)
} else {
format!(
"Build #{} for job '{}' on '{}'! Result: {}, URL: {}",
number, name, server, result, url
)
};
let cmd = Command::PRIVMSG(dest, reply);
Message::from(cmd)
}).collect()
}
fn process_msg(&self, source_nick: &str, reply_to: &str, incoming: &str) -> Vec<Message> {
if incoming.contains("uptime") {
info!(
"\"uptime\" command received from {} on {}",
source_nick, reply_to
);
let reply = format!("uptime = {} seconds", self.start_time.elapsed().as_secs());
let cmd = Command::PRIVMSG(reply_to.to_string(), reply);
return vec![Message::from(cmd)];
} else if incoming.starts_with("say ") {
info!(
"\"say\" command received from {} on {}",
source_nick, reply_to
);
if !self.client.config().is_owner(source_nick) {
return Vec::new();
}
let v: Vec<&str> = incoming[4..].trim().splitn(2, ' ').collect();
if v.len() <= 1 {
debug!("\"say\" command has no message, not doing anything");
return Vec::new();
} else {
let chan = v[0].to_string();
let reply = v[1].trim().to_string();
let cmd = Command::PRIVMSG(chan, reply);
return vec![Message::from(cmd)];
}
} else {
debug!("unrecognized command: {}", incoming);
}
Vec::new()
}
}
| true |
798c9c36aab0cc448699f3efd8c739095d79eb72
|
Rust
|
wolfgang-wiedermann/staticjson
|
/src/model.rs
|
UTF-8
| 10,189 | 2.625 | 3 |
[] |
no_license
|
use std::collections::HashSet;
/*
* This file contains the datastructures of the staticjson tool
*/
#[derive(Clone, Debug)]
pub enum TargetLanguage {
C, RUST, HTMLDOC, JSVALIDATE, JAXRS, JAVACLIENT, JQUERY, KNOCKOUT, DOTNET, DOTNET_TYPES, DOTNET_INTERFACES
}
#[derive(Clone, Debug)]
pub enum ParserState {
INITIAL, INTYPENAME, INTYPE,
INTYPEPARAMNAME,
INTYPEPARAMVALUE,
INTYPEPARAMSTRING,
OUTOFFTYPEPARAMLIST,
INATTRIBUTENAME, INATTRIBUTETYPE,
INATTRIBUTEARRAY,
INATTRIBUTEPARAMLIST,
INATTRIBUTEPARAMNAME,
INATTRIBUTEPARAMVALUE,
INATTRIBUTEPARAMSTRING,
INOUTERCMT, // in comment outside of typedefinition
ININNERCMT, // in comment inside of typedefinition
// Special States for Interface-Definitions
ININTERFACECMT,
ININTERFACENAME,
ININTERFACEPARAMNAME,
ININTERFACEPARAMVALUE,
ININTERFACEPARAMSTRING,
OUTOFINTERFACEPARAMLIST,
INFUNCTIONNAME, INFUNCTIONRETURNTYPE,
INFUNCTIONRETURNTYPEARRAY, BEHINDFUNCTIONRETURNTYPEARRAY,
INFUNCTIONPARAMNAME,
INFUNCTIONPARAMTYPE, INFUNCTION,
INFUNCTIONPARAMTYPEARRAY, BEHINDFUNCTIONPARAMTYPEARRAY,
// Special: Function Params can have Parameters
INFUNCTIONPARAMPARAMNAME, INFUNCTIONPARAMPARAMVALUE,
INFUNCTIONPARAMPARAMSTRING, INFUNCTIONPARAMPARAMLIST,
// --
INFUNCTIONATTRIBUTENAME,
INFUNCTIONATTRIBUTEVALUE,
INFUNCTIONATTRIBUTESTRING,
// End of special States for Interface Definitions
}
#[derive(Clone, Debug)]
pub enum ParserSubState {
LEADINGBLANKS, // Fuehrende Leerzeichen
VALUE, // Wert
TRAILINGBLANKS, // Auf den Wert folgende Leerzeichen
}
#[derive(Clone, Debug)]
pub struct CommandlineOptions {
pub filename:String,
pub target_language:TargetLanguage,
pub target_folder:String,
pub debug:bool
}
#[derive(Clone, Debug)]
pub struct ParserResult {
pub types: Box<Vec<Box<Type>>>,
pub typenames: HashSet<String>,
pub interfaces: Box<Vec<Box<Interface>>>
}
impl ParserResult {
// This function detects whether a given typename is defined
// by the parsed staticjson code or not.
pub fn is_defined_typename(&self, typename:&str) -> bool {
return self.typenames.contains(typename);
}
}
#[derive(Clone, Debug)]
pub struct Parameter {
pub name:String,
pub value:String
}
#[derive(Clone, Debug)]
pub struct Attribute {
pub name:String,
pub attribute_type:String,
pub is_array:bool,
pub params:Vec<Box<Parameter>>
}
impl Attribute {
/// Checks if a param with the given name is present in
/// params attribute
pub fn is_param_present(&self, param_name:&str) -> bool {
for p in self.params.iter() {
if p.name == param_name {
return true;
}
}
return false;
}
// Checks if a param with the given name has the given value
pub fn is_param_value_present(&self, param_name:&str, param_value:&str) -> bool {
for p in self.params.iter() {
if p.name == param_name {
return p.value == param_value;
}
}
return false;
}
// Gets the value of the parameter with the given name
pub fn get_param_value(&self, param_name:&str) -> String {
for p in self.params.iter() {
if p.name == param_name {
return (*p).value.clone();
}
}
return String::new();
}
}
#[derive(Clone, Debug)]
pub struct Type {
pub typename:String,
pub attributes:Vec<Box<Attribute>>,
pub params:Vec<Box<Parameter>>
}
impl Type {
pub fn new() -> Type {
Type {
typename:String::new(),
attributes:Vec::new(),
params:Vec::new()
}
}
pub fn is_basic_type(name:&str) -> bool {
return name == "string"
|| name == "int"
|| name == "decimal"
|| name == "byte"
|| name == "bool"
|| name == "char"
|| name == "uint"
|| name == "long"
|| name == "ulong"
|| name == "date"
|| name == "time"
|| name == "datetime";
}
/// Checks if a param with the given name is present in
/// params attribute
pub fn is_param_present(&self, param_name:&str) -> bool {
for p in self.params.iter() {
if p.name == param_name {
return true;
}
}
return false;
}
// Checks if a param with the given name has the given value
pub fn is_param_value_present(&self, param_name:&str, param_value:&str) -> bool {
for p in self.params.iter() {
if p.name == param_name {
return p.value == param_value;
}
}
return false;
}
// Gets the value of the parameter with the given name
pub fn get_param_value(&self, param_name:&str) -> String {
for p in self.params.iter() {
if p.name == param_name {
return (*p).value.clone();
}
}
return String::new();
}
pub fn is_attribute_param_present(&self, param_name:&str) -> bool {
for a in self.attributes.iter() {
if a.is_param_present(param_name) {
return true;
}
}
return false;
}
}
#[derive(Clone, Debug)]
pub struct Interface {
pub name:String,
pub functions:Vec<Box<Function>>,
pub params:Vec<Box<Parameter>>
}
impl Interface {
pub fn new() -> Interface {
Interface {
name:String::new(),
functions:Vec::new(),
params:Vec::new()
}
}
/// Checks if a param with the given name is present in
/// params attribute
pub fn is_param_present(&self, param_name:&str) -> bool {
for p in self.params.iter() {
if p.name == param_name {
return true;
}
}
return false;
}
// Checks if a param with the given name has the given value
pub fn is_param_value_present(&self, param_name:&str, param_value:&str) -> bool {
for p in self.params.iter() {
if p.name == param_name {
return p.value == param_value;
}
}
return false;
}
// Gets the value of the parameter with the given name
pub fn get_param_value(&self, param_name:&str) -> String {
for p in self.params.iter() {
if p.name == param_name {
return (*p).value.clone();
}
}
return String::new();
}
pub fn is_function_attribute_present(&self, attr_name:&str) -> bool {
for f in self.functions.iter() {
if f.is_attribute_present(attr_name) {
return true;
}
}
return false;
}
pub fn is_function_attribute_value_present(&self, attr_name:&str, attr_value:&str) -> bool {
for f in self.functions.iter() {
if f.is_attribute_value_present(attr_name, attr_value) {
return true;
}
}
return false;
}
pub fn has_function_with_complex_returntype(&self) -> bool {
for f in self.functions.iter() {
if f.returntype != "void"
&& !Type::is_basic_type(&f.returntype) {
return true;
}
}
return false;
}
}
#[derive(Clone, Debug)]
pub struct Function {
pub name:String,
pub returntype:String,
pub returntype_is_array:bool,
pub params:Vec<Box<FunctionParameter>>,
pub attributes:Vec<Box<Parameter>>
}
impl Function {
pub fn new() -> Function {
Function {
name:String::new(),
returntype:String::new(),
returntype_is_array:false,
params:Vec::new(),
attributes:Vec::new()
}
}
/// Checks if a param with the given name is present in
/// params attribute
pub fn is_attribute_present(&self, param_name:&str) -> bool {
for p in self.attributes.iter() {
if p.name == param_name {
return true;
}
}
return false;
}
// Checks if a param with the given name has the given value
pub fn is_attribute_value_present(&self, param_name:&str, param_value:&str) -> bool {
for p in self.attributes.iter() {
if p.name == param_name {
return p.value == param_value;
}
}
return false;
}
pub fn get_attribute_value(&self, attr_name:&str) -> String {
for attr in self.attributes.iter() {
if attr.name == attr_name {
return (*attr).value.clone();
}
}
return String::new();
}
pub fn has_complex_functionparam(&self) -> bool {
for fp in self.params.iter() {
if fp.typename != "void"
&& !Type::is_basic_type(&fp.typename) {
return true;
}
}
return false;
}
// Checks if the function has a param which must be serialized as json object
pub fn has_serialized_functionparam(&self) -> bool {
for fp in self.params.iter() {
if !(fp.typename == "void"
|| fp.is_param_present("query-param")
|| fp.is_param_present("path-param")) {
return true;
}
}
return false;
}
// Checks if the function has a param which must be serialized as json object
pub fn get_serialized_functionparam_name(&self) -> String {
for fp in self.params.iter() {
if !(fp.typename == "void"
|| fp.is_param_present("query-param")
|| fp.is_param_present("path-param")) {
return fp.name.clone();
}
}
return format!("#UNKNOWN#");
}
}
#[derive(Clone, Debug)]
pub struct FunctionParameter {
pub name:String,
pub typename:String,
pub is_array:bool,
pub params:Vec<Box<Parameter>>
}
impl FunctionParameter {
/// Checks if a param with the given name is present in
/// params attribute
pub fn is_param_present(&self, param_name:&str) -> bool {
for p in self.params.iter() {
if p.name == param_name {
return true;
}
}
return false;
}
// Checks if a param with the given name has the given value
pub fn is_param_value_present(&self, param_name:&str, param_value:&str) -> bool {
for p in self.params.iter() {
if p.name == param_name {
return p.value == param_value;
}
}
return false;
}
pub fn get_param_value(&self, param_name:&str) -> String {
for param in self.params.iter() {
if param.name == param_name {
return (*param).value.clone();
}
}
return String::new();
}
}
pub struct GeneralModel<'a> {
pub options:&'a CommandlineOptions,
pub code:String,
}
| true |
8c857855a8485b33dc61fb3edb1a370a3992fa30
|
Rust
|
bojand/infer
|
/src/lib.rs
|
UTF-8
| 16,877 | 3.15625 | 3 |
[
"MIT"
] |
permissive
|
/*!
Small crate to infer file and MIME type by checking the
[magic number](https://en.wikipedia.org/wiki/Magic_number_(programming)) signature.
# Examples
### Get the type of a buffer
```rust
let buf = [0xFF, 0xD8, 0xFF, 0xAA];
let kind = infer::get(&buf).expect("file type is known");
assert_eq!(kind.mime_type(), "image/jpeg");
assert_eq!(kind.extension(), "jpg");
assert_eq!(kind.matcher_type(), infer::MatcherType::Image);
```
### Check file type by path
```rust
# #[cfg(feature = "std")]
# fn run() {
let kind = infer::get_from_path("testdata/sample.jpg")
.expect("file read successfully")
.expect("file type is known");
assert_eq!(kind.mime_type(), "image/jpeg");
assert_eq!(kind.extension(), "jpg");
# }
```
### Check for specific type
```rust
let buf = [0xFF, 0xD8, 0xFF, 0xAA];
assert!(infer::image::is_jpeg(&buf));
```
### Check for specific type class
```rust
let buf = [0xFF, 0xD8, 0xFF, 0xAA];
assert!(infer::is_image(&buf));
```
### Adds a custom file type matcher
Here we actually need to use the `Infer` struct to be able to declare custom matchers.
```rust
# #[cfg(feature = "alloc")]
# fn run() {
fn custom_matcher(buf: &[u8]) -> bool {
return buf.len() >= 3 && buf[0] == 0x10 && buf[1] == 0x11 && buf[2] == 0x12;
}
let mut info = infer::Infer::new();
info.add("custom/foo", "foo", custom_matcher);
let buf = [0x10, 0x11, 0x12, 0x13];
let kind = info.get(&buf).unwrap();
assert_eq!(kind.mime_type(), "custom/foo");
assert_eq!(kind.extension(), "foo");
# }
```
*/
#![crate_name = "infer"]
#![doc(html_root_url = "https://docs.rs/infer/latest")]
#![forbid(unsafe_code)]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(feature = "alloc")]
extern crate alloc;
mod map;
mod matchers;
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
use core::fmt;
#[cfg(feature = "std")]
use std::fs::File;
#[cfg(feature = "std")]
use std::io::{self, Read};
#[cfg(feature = "std")]
use std::path::Path;
pub use map::MatcherType;
use map::{WrapMatcher, MATCHER_MAP};
/// All the supported matchers categorized and exposed as functions
pub use matchers::*;
/// Matcher function
pub type Matcher = fn(buf: &[u8]) -> bool;
/// Generic information for a type
#[derive(Copy, Clone)]
pub struct Type {
matcher_type: MatcherType,
mime_type: &'static str,
extension: &'static str,
matcher: WrapMatcher,
}
impl Type {
pub(crate) const fn new_static(
matcher_type: MatcherType,
mime_type: &'static str,
extension: &'static str,
matcher: WrapMatcher,
) -> Self {
Self {
matcher_type,
mime_type,
extension,
matcher,
}
}
/// Returns a new `Type` with matcher and extension.
pub fn new(
matcher_type: MatcherType,
mime_type: &'static str,
extension: &'static str,
matcher: Matcher,
) -> Self {
Self::new_static(matcher_type, mime_type, extension, WrapMatcher(matcher))
}
/// Returns the type of matcher
///
/// # Examples
///
/// ```rust
/// let info = infer::Infer::new();
/// let buf = [0xFF, 0xD8, 0xFF, 0xAA];
/// let kind = info.get(&buf).expect("file type is known");
///
/// assert_eq!(kind.matcher_type(), infer::MatcherType::Image);
/// ```
pub const fn matcher_type(&self) -> MatcherType {
self.matcher_type
}
/// Returns the mime type
pub const fn mime_type(&self) -> &'static str {
self.mime_type
}
/// Returns the file extension
pub const fn extension(&self) -> &'static str {
self.extension
}
/// Checks if buf matches this Type
fn matches(&self, buf: &[u8]) -> bool {
(self.matcher.0)(buf)
}
}
impl fmt::Debug for Type {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Type")
.field("matcher_type", &self.matcher_type)
.field("mime_type", &self.mime_type)
.field("extension", &self.extension)
.finish()
}
}
impl fmt::Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self.mime_type, f)
}
}
impl PartialEq for Type {
fn eq(&self, other: &Self) -> bool {
self.matcher_type == other.matcher_type
&& self.mime_type == other.mime_type
&& self.extension == other.extension
}
}
/// Infer allows to use a custom set of `Matcher`s for infering a MIME type.
///
/// Most operations can be done by using the _top level functions_, but when custom matchers
/// are needed every call has to go through the `Infer` struct to be able
/// to see the custom matchers.
pub struct Infer {
#[cfg(feature = "alloc")]
mmap: Vec<Type>,
}
impl Infer {
/// Initialize a new instance of the infer struct.
pub const fn new() -> Infer {
#[cfg(feature = "alloc")]
return Infer { mmap: Vec::new() };
#[cfg(not(feature = "alloc"))]
return Infer {};
}
fn iter_matchers(&self) -> impl Iterator<Item = &Type> {
let mmap = MATCHER_MAP.iter();
#[cfg(feature = "alloc")]
return self.mmap.iter().chain(mmap);
#[cfg(not(feature = "alloc"))]
return mmap;
}
/// Returns the file type of the buffer.
///
/// # Examples
///
/// ```rust
/// let info = infer::Infer::new();
/// let buf = [0xFF, 0xD8, 0xFF, 0xAA];
/// let kind = info.get(&buf).expect("file type is known");
///
/// assert_eq!(kind.mime_type(), "image/jpeg");
/// assert_eq!(kind.extension(), "jpg");
/// ```
pub fn get(&self, buf: &[u8]) -> Option<Type> {
self.iter_matchers().find(|kind| kind.matches(buf)).copied()
}
/// Returns the file type of the file given a path.
///
/// # Examples
///
/// See [`get_from_path`](./fn.get_from_path.html).
#[cfg(feature = "std")]
pub fn get_from_path<P: AsRef<Path>>(&self, path: P) -> io::Result<Option<Type>> {
let file = File::open(path)?;
let limit = file
.metadata()
.map(|m| std::cmp::min(m.len(), 8192) as usize + 1)
.unwrap_or(0);
let mut bytes = Vec::with_capacity(limit);
file.take(8192).read_to_end(&mut bytes)?;
Ok(self.get(&bytes))
}
/// Determines whether a buffer is of given extension.
///
/// # Examples
///
/// See [`is`](./fn.is.html).
pub fn is(&self, buf: &[u8], extension: &str) -> bool {
self.iter_matchers()
.any(|kind| kind.extension() == extension && kind.matches(buf))
}
/// Determines whether a buffer is of given mime type.
///
/// # Examples
///
/// See [`is_mime`](./fn.is_mime.html).
pub fn is_mime(&self, buf: &[u8], mime_type: &str) -> bool {
self.iter_matchers()
.any(|kind| kind.mime_type() == mime_type && kind.matches(buf))
}
/// Returns whether an extension is supported.
///
/// # Examples
///
/// See [`is_supported`](./fn.is_supported.html).
pub fn is_supported(&self, extension: &str) -> bool {
self.iter_matchers()
.any(|kind| kind.extension() == extension)
}
/// Returns whether a mime type is supported.
///
/// # Examples
///
/// See [`is_mime_supported`](./fn.is_mime_supported.html).
pub fn is_mime_supported(&self, mime_type: &str) -> bool {
self.iter_matchers()
.any(|kind| kind.mime_type() == mime_type)
}
/// Determines whether a buffer is an application type.
///
/// # Examples
///
/// See [`is_app`](./fn.is_app.html).
pub fn is_app(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::App)
}
/// Determines whether a buffer is an archive type.
///
/// # Examples
///
/// See [`is_archive`](./fn.is_archive.html).
pub fn is_archive(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::Archive)
}
/// Determines whether a buffer is an audio type.
///
/// # Examples
///
/// See [`is_audio`](./fn.is_audio.html).
pub fn is_audio(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::Audio)
}
/// Determines whether a buffer is a book type.
///
/// # Examples
///
/// See [`is_book`](./fn.is_book.html).
pub fn is_book(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::Book)
}
/// Determines whether a buffer is a document type.
///
/// # Examples
///
/// See [`is_document`](./fn.is_document.html).
pub fn is_document(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::Doc)
}
/// Determines whether a buffer is a font type.
///
/// # Examples
///
/// See [`is_font`](./fn.is_font.html).
pub fn is_font(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::Font)
}
/// Determines whether a buffer is an image type.
///
/// # Examples
///
/// See [`is_image`](./fn.is_image.html).
pub fn is_image(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::Image)
}
/// Determines whether a buffer is a video type.
///
/// # Examples
///
/// See [`is_video`](./fn.is_video.html).
pub fn is_video(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::Video)
}
/// Determines whether a buffer is one of the custom types added.
///
/// # Examples
///
/// ```rust
/// # #[cfg(feature = "alloc")]
/// # fn run() {
/// fn custom_matcher(buf: &[u8]) -> bool {
/// return buf.len() >= 3 && buf[0] == 0x10 && buf[1] == 0x11 && buf[2] == 0x12;
/// }
///
/// let mut info = infer::Infer::new();
/// info.add("custom/foo", "foo", custom_matcher);
/// let buf = [0x10, 0x11, 0x12, 0x13];
/// assert!(info.is_custom(&buf));
/// # }
/// ```
pub fn is_custom(&self, buf: &[u8]) -> bool {
self.is_type(buf, MatcherType::Custom)
}
/// Adds a custom matcher.
///
/// Custom matchers are matched in order of addition and before
/// the default set of matchers.
///
/// # Examples
///
/// ```rust
/// fn custom_matcher(buf: &[u8]) -> bool {
/// return buf.len() >= 3 && buf[0] == 0x10 && buf[1] == 0x11 && buf[2] == 0x12;
/// }
///
/// let mut info = infer::Infer::new();
/// info.add("custom/foo", "foo", custom_matcher);
/// let buf = [0x10, 0x11, 0x12, 0x13];
/// let kind = info.get(&buf).expect("file type is known");
///
/// assert_eq!(kind.mime_type(), "custom/foo");
/// assert_eq!(kind.extension(), "foo");
/// ```
#[cfg(feature = "alloc")]
pub fn add(&mut self, mime_type: &'static str, extension: &'static str, m: Matcher) {
self.mmap.push(Type::new_static(
MatcherType::Custom,
mime_type,
extension,
WrapMatcher(m),
));
}
fn is_type(&self, buf: &[u8], matcher_type: MatcherType) -> bool {
self.iter_matchers()
.any(|kind| kind.matcher_type() == matcher_type && kind.matches(buf))
}
}
impl Default for Infer {
fn default() -> Self {
Infer::new()
}
}
static INFER: Infer = Infer::new();
/// Returns the file type of the buffer.
///
/// # Examples
///
/// ```rust
/// let info = infer::Infer::new();
/// let buf = [0xFF, 0xD8, 0xFF, 0xAA];
/// let kind = info.get(&buf).expect("file type is known");
///
/// assert_eq!(kind.mime_type(), "image/jpeg");
/// assert_eq!(kind.extension(), "jpg");
/// ```
pub fn get(buf: &[u8]) -> Option<Type> {
INFER.get(buf)
}
/// Returns the file type of the file given a path.
///
/// # Errors
///
/// Returns an error if we fail to read the path.
///
/// # Examples
///
/// ```rust
/// let kind = infer::get_from_path("testdata/sample.jpg")
/// .expect("file read successfully")
/// .expect("file type is known");
///
/// assert_eq!(kind.mime_type(), "image/jpeg");
/// assert_eq!(kind.extension(), "jpg");
/// ```
#[cfg(feature = "std")]
pub fn get_from_path<P: AsRef<Path>>(path: P) -> io::Result<Option<Type>> {
INFER.get_from_path(path)
}
/// Determines whether a buffer is of given extension.
///
/// # Examples
///
/// ```rust
/// let buf = [0xFF, 0xD8, 0xFF, 0xAA];
/// assert!(infer::is(&buf, "jpg"));
/// ```
pub fn is(buf: &[u8], extension: &str) -> bool {
INFER.is(buf, extension)
}
/// Determines whether a buffer is of given mime type.
///
/// # Examples
///
/// ```rust
/// let buf = [0xFF, 0xD8, 0xFF, 0xAA];
/// assert!(infer::is_mime(&buf, "image/jpeg"));
/// ```
pub fn is_mime(buf: &[u8], mime_type: &str) -> bool {
INFER.is_mime(buf, mime_type)
}
/// Returns whether an extension is supported.
///
/// # Examples
///
/// ```rust
/// assert!(infer::is_supported("jpg"));
/// ```
pub fn is_supported(extension: &str) -> bool {
INFER.is_supported(extension)
}
/// Returns whether a mime type is supported.
///
/// # Examples
///
/// ```rust
/// assert!(infer::is_mime_supported("image/jpeg"));
/// ```
pub fn is_mime_supported(mime_type: &str) -> bool {
INFER.is_mime_supported(mime_type)
}
/// Determines whether a buffer is an application type.
///
/// # Examples
///
/// ```rust
/// use std::fs;
/// assert!(infer::is_app(&fs::read("testdata/sample.wasm").unwrap()));
/// ```
pub fn is_app(buf: &[u8]) -> bool {
INFER.is_app(buf)
}
/// Determines whether a buffer is an archive type.
/// # Examples
///
/// ```rust
/// use std::fs;
/// assert!(infer::is_archive(&fs::read("testdata/sample.pdf").unwrap()));
/// ```
pub fn is_archive(buf: &[u8]) -> bool {
INFER.is_archive(buf)
}
/// Determines whether a buffer is an audio type.
///
/// # Examples
///
/// ```rust
/// // mp3
/// let v = [0xff, 0xfb, 0x90, 0x44, 0x00];
/// assert!(infer::is_audio(&v));
/// ```
pub fn is_audio(buf: &[u8]) -> bool {
INFER.is_audio(buf)
}
/// Determines whether a buffer is a book type.
///
/// # Examples
///
/// ```rust
/// use std::fs;
/// assert!(infer::is_book(&fs::read("testdata/sample.epub").unwrap()));
/// ```
pub fn is_book(buf: &[u8]) -> bool {
INFER.is_book(buf)
}
/// Determines whether a buffer is a document type.
///
/// # Examples
///
/// ```rust
/// use std::fs;
/// assert!(infer::is_document(&fs::read("testdata/sample.docx").unwrap()));
/// ```
pub fn is_document(buf: &[u8]) -> bool {
INFER.is_document(buf)
}
/// Determines whether a buffer is a font type.
///
/// # Examples
///
/// ```rust
/// use std::fs;
/// assert!(infer::is_font(&fs::read("testdata/sample.ttf").unwrap()));
/// ```
pub fn is_font(buf: &[u8]) -> bool {
INFER.is_font(buf)
}
/// Determines whether a buffer is an image type.
///
/// # Examples
///
/// ```rust
/// let v = [0xFF, 0xD8, 0xFF, 0xAA];
/// assert!(infer::is_image(&v));
/// ```
pub fn is_image(buf: &[u8]) -> bool {
INFER.is_image(buf)
}
/// Determines whether a buffer is a video type.
///
/// # Examples
///
/// ```rust
/// use std::fs;
/// assert!(infer::is_video(&fs::read("testdata/sample.mov").unwrap()));
/// ```
pub fn is_video(buf: &[u8]) -> bool {
INFER.is_video(buf)
}
#[cfg(test)]
mod tests {
#[cfg(feature = "alloc")]
use super::Infer;
#[test]
fn test_get_unknown() {
let buf = [];
assert!(crate::get(&buf).is_none());
}
#[test]
fn test_get_jpeg() {
let buf = [0xFF, 0xD8, 0xFF, 0xAA];
let kind = crate::get(&buf).expect("file type is known");
assert_eq!(kind.extension(), "jpg");
assert_eq!(kind.mime_type(), "image/jpeg");
}
#[test]
fn test_matcher_type() {
let buf = [0xFF, 0xD8, 0xFF, 0xAA];
let kind = crate::get(&buf).expect("file type is known");
assert_eq!(kind.matcher_type(), crate::MatcherType::Image);
}
#[cfg(feature = "alloc")]
#[test]
fn test_custom_matcher_ordering() {
// overrides jpeg matcher
fn foo_matcher(buf: &[u8]) -> bool {
buf.len() > 2 && buf[0] == 0xFF && buf[1] == 0xD8 && buf[2] == 0xFF
}
// overrides png matcher
fn bar_matcher(buf: &[u8]) -> bool {
buf.len() > 3 && buf[0] == 0x89 && buf[1] == 0x50 && buf[2] == 0x4E && buf[3] == 0x47
}
let mut info = Infer::new();
info.add("custom/foo", "foo", foo_matcher);
info.add("custom/bar", "bar", bar_matcher);
let buf_foo = &[0xFF, 0xD8, 0xFF];
let typ = info.get(buf_foo).expect("type is matched");
assert_eq!(typ.mime_type(), "custom/foo");
assert_eq!(typ.extension(), "foo");
let buf_bar = &[0x89, 0x50, 0x4E, 0x47];
let typ = info.get(buf_bar).expect("type is matched");
assert_eq!(typ.mime_type(), "custom/bar");
assert_eq!(typ.extension(), "bar");
}
}
| true |
89d63922287657a6c4f61888c95b74900750d4dd
|
Rust
|
SrimantaBarua/bed
|
/src/common/mod.rs
|
UTF-8
| 1,063 | 2.59375 | 3 |
[
"MIT"
] |
permissive
|
// (C) 2020 Srimanta Barua <[email protected]>
mod rope;
pub(crate) use {
rope::rope_is_grapheme_boundary, rope::rope_next_grapheme_boundary, rope::rope_trim_newlines,
rope::RopeGraphemes,
};
// Types for euclid
pub(crate) struct DPI;
pub struct PixelSize;
pub(crate) struct TextureSize;
pub(crate) fn abspath(spath: &str) -> String {
let path = std::path::Path::new(spath);
if path.is_absolute() {
spath.to_owned()
} else if path.starts_with("~") {
let mut home_dir = directories::BaseDirs::new()
.expect("failed to get base directories")
.home_dir()
.to_owned();
home_dir.push(path.strip_prefix("~").expect("failed to stip '~' prefix"));
home_dir
.to_str()
.expect("failed to convert path to string")
.to_owned()
} else {
let mut wdir = std::env::current_dir().expect("failed to get current directory");
wdir.push(spath);
wdir.to_str()
.expect("failed to convert path to string")
.to_owned()
}
}
| true |
06ce0e534f0c0d97f753dd2394af64fd1b168400
|
Rust
|
lemunozm/packet-meta-classifier
|
/classifiers/internet/examples/real_traffic/main.rs
|
UTF-8
| 3,413 | 2.71875 | 3 |
[] |
no_license
|
use internet::{
self,
http::expression::{Http, HttpHeader, HttpMethod},
ip::expression::IpVersion,
tcp::expression::Tcp,
udp::expression::Udp,
Config,
};
use pmc_core::engine::{ClassifierEngine, Rule};
use pmc_core::expression::Expr;
use pmc_core::packet::{Direction, Packet};
use mac_address::mac_address_by_name;
use pcap::{Active, Capture, Device, Linktype};
fn main() {
let args: Vec<String> = std::env::args().collect();
let interface = args.get(1).expect("An interface must be specified");
let mut network = NetworkInspector::new(interface);
println!("Sniffing from {} interface...", interface);
let mut classifier = ClassifierEngine::new(
internet::loader(),
Config::default(),
vec![
Rule::new("example.com", Expr::value(HttpHeader("Host", "example.com"))),
Rule::new("Get", Expr::value(HttpMethod::Get)),
Rule::new("Post/Put", Expr::value(HttpMethod::Post) | Expr::value(HttpMethod::Put)),
Rule::new("Http", Expr::value(Http)),
Rule::new("Tcp", Expr::value(Tcp)),
Rule::new("Udp", Expr::value(Udp)),
Rule::new("Ipv4", Expr::value(IpVersion::V4)),
Rule::new("Ipv6", Expr::value(IpVersion::V6)),
],
);
loop {
if let Some(packet) = network.next() {
let classification = classifier.classify_packet(packet);
println!(
"{} bytes classified as: {}",
classification.payload_bytes, classification.rule_tag
);
}
}
}
struct NetworkInspector {
capture: Capture<Active>,
interface_mac: [u8; 6],
}
impl NetworkInspector {
fn new(interface: &str) -> Self {
let device = Device::list()
.unwrap()
.into_iter()
.find(|device| &device.name == interface)
.unwrap();
let capture = Capture::from_device(device)
.unwrap()
.immediate_mode(true)
.open()
.expect(
"You need root capabilities to run this example.\n\
Try: 'sudo setcap cap_net_raw,cap_net_admin=eip <this-binary>'.\n\
Error",
);
assert!(
capture.get_datalink() == Linktype::ETHERNET,
"The specified interface must be of type Ethernet"
);
let interface_mac = match mac_address_by_name(interface) {
Ok(Some(interface_mac)) => interface_mac.bytes(),
_ => panic!("The specified interface has no MAC address"),
};
NetworkInspector {
capture,
interface_mac,
}
}
fn next(&mut self) -> Option<Packet<'_>> {
let pcap_packet = self.capture.next().unwrap();
if matches!(pcap_packet.data[12..14], [0x08, 0x00] | [0x86, 0xdd]) {
let direction = if pcap_packet.data[0..6] == self.interface_mac {
Direction::Downlink
} else if pcap_packet.data[6..12] == self.interface_mac {
Direction::Uplink
} else {
// The message do not belong to the expected interface
return None;
};
// IP packet over ethernet.
return Some(Packet {
data: &pcap_packet.data[14..],
direction,
});
}
None
}
}
| true |
baf0de6224304b4a01f36d8ff9fa8b54a1401aef
|
Rust
|
tobiasbu/rust
|
/src/test/ui/nll/guarantor-issue-46974.rs
|
UTF-8
| 999 | 2.828125 | 3 |
[
"BSD-3-Clause",
"NCSA",
"LicenseRef-scancode-other-permissive",
"ISC",
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] |
permissive
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that NLL analysis propagates lifetimes correctly through
// field accesses, Box accesses, etc.
#![feature(nll)]
fn foo(s: &mut (i32,)) -> i32 {
let t = &mut *s; // this borrow should last for the entire function
let x = &t.0;
*s = (2,); //~ ERROR cannot assign to `*s`
*x
}
fn bar(s: &Box<(i32,)>) -> &'static i32 {
// FIXME(#46983): error message should be better
&s.0 //~ ERROR free region `` does not outlive free region `'static`
}
fn main() {
foo(&mut (0,));
bar(&Box::new((1,)));
}
| true |
15ad578f1361d72470660d0df185c40b62c861b7
|
Rust
|
youssefhabri/zero2-rs
|
/src/core/checks.rs
|
UTF-8
| 794 | 2.640625 | 3 |
[] |
no_license
|
use serenity::framework::standard::{macros::check, Reason};
use serenity::model::prelude::Message;
use serenity::prelude::Context;
use super::consts::OWNER_ID;
#[check]
#[name = "Owner"]
async fn owner_check(_: &Context, msg: &Message) -> Result<(), Reason> {
if msg.author.id == OWNER_ID {
return Ok(());
}
Err(Reason::User("User is not Mittens".to_string()))
}
#[check]
#[name = "Admin"]
async fn admin_check(context: &Context, message: &Message) -> Result<(), Reason> {
if let Ok(member) = message.member(&context).await {
if let Ok(permissions) = member.permissions(&context) {
if permissions.administrator() {
return Ok(());
}
}
}
Err(Reason::User("User lacked admin permission.".to_string()))
}
| true |
c98ad6da40f44d0adfcd518e77952c8004f63191
|
Rust
|
stivenson/Rust-Study
|
/basic-mutations/src/main.rs
|
UTF-8
| 175 | 3.6875 | 4 |
[] |
no_license
|
fn main() {
println!("Mutation Exercises");
let mut x = 5;
x = x + 10;
let y = &mut x;
println!("The value of 'y' is {}", y);
println!("The value of 'x' is {}", x);
}
| true |
4d7d3aa8efdcfdabdde6a28b21ef5e967e0d2166
|
Rust
|
Youka/gl32
|
/tests/gl32_tests.rs
|
UTF-8
| 2,205 | 2.90625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#[cfg(test)]
mod gl32_tests {
// Imports
use glutin::*;
use glutin::dpi::*;
#[test]
fn test_gl_window() {
// Create OpenGL window
let mut events_loop = EventsLoop::new();
let gl_window = GlWindow::new(
WindowBuilder::new()
.with_title("GL32 test window!")
.with_dimensions(LogicalSize::new(1280.0, 720.0)),
ContextBuilder::new()
.with_vsync(true),
&events_loop
).expect("Couldn't create simple GL window!");
// Initialize context (activate context & load modern GL functions)
unsafe {
gl_window.make_current().expect("Couldn't make GL context current!");
}
let gl = gl32::Gl::load_with(|symbol| gl_window.get_proc_address(symbol) as *const _);
// Run event loop
events_loop.run_forever(|event| {
// Window event?
if let Event::WindowEvent { event, .. } = event {
// Window closed
if let WindowEvent::CloseRequested = event {
return ControlFlow::Break;
}
}
// Draw!
unsafe {
gl.ClearColor(0.0, 1.0, 0.0, 1.0);
gl.Clear(gl32::COLOR_BUFFER_BIT);
}
// Update screen
gl_window.swap_buffers().expect("Couldn't swap GL pixel buffers!");
// Continue loop (not!)
ControlFlow::Break
//ControlFlow::Continue
});
}
#[test]
fn test_gl32_available() {
GlWindow::new(
// Minimal invisible window (at least required for offscreen rendering)
WindowBuilder::new()
.with_dimensions(LogicalSize::new(1.0, 1.0))
.with_visibility(false),
// Request powerful enough OpenGL profile
ContextBuilder::new()
.with_gl(GlRequest::Specific(Api::OpenGl, (3, 2)))
.with_gl_profile(GlProfile::Core),
// Just required for window building
&EventsLoop::new()
).expect("Couldn't initialize simple window with GL requirements!");
}
}
| true |
90324e376ea07a694d7fa77428535e58cefbace0
|
Rust
|
SINHASantos/rust-rocks
|
/examples/counters.rs
|
UTF-8
| 2,399 | 3.015625 | 3 |
[
"Apache-2.0"
] |
permissive
|
extern crate rocks;
use rocks::prelude::*;
const DB_PATH: &str = "./data.merge_op";
pub struct UInt64AddOperator;
fn deserialize(value: &[u8]) -> u64 {
value
.iter()
.enumerate()
.fold(0, |acc, (i, &v)| acc + ((v as u64) << ((7 - i) * 8)))
}
fn serialize(value: u64) -> Vec<u8> {
value.to_be_bytes().to_vec()
}
impl AssociativeMergeOperator for UInt64AddOperator {
fn merge(&self, key: &[u8], existing_value: Option<&[u8]>, value: &[u8], _logger: &Logger) -> Option<Vec<u8>> {
println!(
"merge: key = {:?} existing_value = {:?} value = {:?}",
key, existing_value, value
);
// assuming 0 if no existing value
let existing = existing_value.map(|raw| deserialize(raw)).unwrap_or_default();
let oper = deserialize(value);
let new = existing + oper;
return Some(serialize(new));
}
}
pub struct MergeBasedCounters {
db: DB,
}
impl MergeBasedCounters {
pub fn new(db: DB) -> Self {
MergeBasedCounters { db }
}
pub fn add(&self, key: &str, value: u64) {
let serialized = serialize(value);
let _ = self
.db
.merge(WriteOptions::default_instance(), key.as_bytes(), &serialized);
}
pub fn get(&self, key: &str) -> Option<u64> {
self.db
.get(ReadOptions::default_instance(), key.as_bytes())
.map(|raw| deserialize(&*raw))
.ok()
}
/// mapped to a RocksDB Delete
pub fn remove(&self, key: &str) {
self.db
.delete(WriteOptions::default_instance(), key.as_bytes())
.unwrap();
}
/// mapped to a RocksDB Put
pub fn set(&self, key: &str, value: u64) {
let serialized = serialize(value);
let _ = self
.db
.put(WriteOptions::default_instance(), key.as_bytes(), &serialized);
}
}
fn main() {
let db = DB::open(
Options::default()
.map_db_options(|db| db.create_if_missing(true))
.map_cf_options(|cf| cf.associative_merge_operator(Box::new(UInt64AddOperator))),
DB_PATH,
)
.unwrap();
let counters = MergeBasedCounters::new(db);
// counters.remove("a");
counters.add("a", 5);
println!("val => {:?}", counters.get("a"));
// counters.set("a", 100);
// println!("val => {:?}", counters.get("a"));
}
| true |
c47ffc283161cb6bed71ad507540094f0c761031
|
Rust
|
totetmatt/adventofcode-2020
|
/rust/aoc/src/day08.rs
|
UTF-8
| 2,706 | 3.140625 | 3 |
[] |
no_license
|
use std::collections::HashSet;
use std::fs;
pub fn day08() {
enum Op {
Jmp(i32),
Nop(i32),
Acc(i32),
Err(),
}
enum Run {
Ok(i32),
Error(i32),
}
let contents =
fs::read_to_string("..\\..\\input08").expect("Something went wrong reading the file");
let prog = contents
.split("\r\n")
.map(|x| {
let mut r = x.chars();
let rr = r.by_ref();
let k = rr.take_while(|&x| x != ' ').collect::<String>();
let v = rr.collect::<String>().parse::<i32>().unwrap();
match k.as_str() {
"jmp" => Op::Jmp(v),
"nop" => Op::Nop(v),
"acc" => Op::Acc(v),
_ => Op::Err(),
}
})
.collect::<Vec<Op>>();
fn run(
prog: &Vec<Op>,
pointer: i32,
accumulator: i32,
visited: &HashSet<usize>,
changed: bool,
) -> Run {
let pointer: i32 = pointer;
let accumulator: i32 = accumulator;
let mut visited: HashSet<usize> = visited.iter().copied().collect();
if !visited.contains(&(pointer as usize)) && (pointer as usize) < prog.len() {
visited.insert(pointer as usize);
return match prog[pointer as usize] {
Op::Jmp(i) => match run(prog, pointer + i, accumulator, &visited, changed) {
Run::Error(i) => {
if changed {
Run::Error(i)
} else {
run(prog, pointer + 1, accumulator, &visited, true)
}
}
Run::Ok(i) => Run::Ok(i),
},
Op::Nop(i) => match run(prog, pointer + 1, accumulator, &visited, changed) {
Run::Error(_) => {
if changed {
Run::Error(i)
} else {
run(prog, pointer + i, accumulator, &visited, true)
}
}
Run::Ok(i) => Run::Ok(i),
},
Op::Acc(i) => run(prog, pointer + 1, accumulator + i, &visited, changed),
Op::Err() => Run::Error(-1),
};
} else {
if (pointer as usize) < prog.len() {
Run::Error(accumulator)
} else {
Run::Ok(accumulator)
}
}
}
match run(&prog, 0, 0, &HashSet::<usize>::new(), false) {
Run::Ok(i) => println!("OK {:?}", i),
Run::Error(i) => println!("Error {:?}", i),
}
}
| true |
59705f803a44fa3b907901e72d731a23757e62ec
|
Rust
|
Aino-io/aino-agent-rust
|
/src/lib.rs
|
UTF-8
| 2,797 | 2.9375 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! [`Aino.io`](https://aino.io) agent for the Rust programming language.
//!
//! [`Aino.io`](http://aino.io) is an analytics and monitoring tool for integrated enterprise applications and digital
//! business processes. Aino.io can help organizations manage, develop, and run the digital parts of their day-to-day
//! business. Read more from our [web pages](http://aino.io).
//!
//! Aino.io works by analyzing transactions between enterprise applications and other pieces of software.
//! This Agent helps to store data about the transactions to Aino.io platform using Aino.io Data API (version 2.0).
//! See [API documentation](http://www.aino.io/api) for detailed information about the API.
//!
//! #### Example
//! ```no_run
//! use std::time::SystemTime;
//!
//! // Load the configuration
//! let config = ainoio_agent::AinoConfig::new()?;
//!
//! // Start the Aino agent
//! // This must be called exactly once before any transactions are sent
//! ainoio_agent::start(config)?;
//!
//! let timestamp = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap();
//!
//! // Create transaction object
//! let mut transaction = ainoio_agent::Transaction::new("From Application".to_string(),
//! "To Application".to_string(), "Operation".to_string(), ainoio_agent::Status::Success,
//! timestamp.as_millis(), "flow id".to_string(), "Integration Segment".to_string());
//! transaction.message = Some("Data transfer successful.".to_string());
//! transaction.payload_type = Some("Product Update".to_string());
//!
//! let metadata = ainoio_agent::TransactionMetadata::new("Card API".to_string(), "https://somecardsystem.com".to_string());
//! transaction.add_metadata(metadata);
//!
//! let id = ainoio_agent::TransactionId::new("OrderId".to_string(), vec!["123456".to_string(), "xxasd".to_string()]);
//! transaction.add_id(id);
//!
//! // Add the transaction into the queue, it will be sent after `send_interval' has elapsed at the latests
//! ainoio_agent::add_transaction(transaction).expect("Failed to add transaction to the send queue.");
//!
//! # Ok::<(), ainoio_agent::AinoError>(())
//! ```
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate lazy_static;
mod aino_agent;
mod aino_config;
mod status;
mod transaction;
pub use aino_agent::*;
pub use aino_config::*;
pub use status::*;
pub use transaction::*;
use std::error::Error;
use std::fmt;
/// Error object for [`Aino.io`](https://aino.io) agent
#[derive(Debug)]
pub struct AinoError {
msg: String,
}
impl fmt::Display for AinoError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(&self.msg)
}
}
impl AinoError {
/// Construct a new `AinoError`
pub fn new(msg: String) -> Self {
AinoError { msg }
}
}
impl Error for AinoError {}
| true |
541d8bf07113baf8f1e7261f747b9663ba5b1a2d
|
Rust
|
leeduckgo/WeDPR-Lab-Core-Web-Version
|
/crypto/src/utils.rs
|
UTF-8
| 4,359 | 2.6875 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2020 WeDPR Lab Project Authors. Licensed under Apache-2.0.
//! Common utility functions.
use crate::constant::RISTRETTO_POINT_SIZE_IN_BYTES;
use bulletproofs::RangeProof;
use curve25519_dalek::{
ristretto::{CompressedRistretto, RistrettoPoint},
scalar::Scalar,
traits::MultiscalarMul,
};
use wedpr_utils::error::WedprError;
use crate::{
coder::Coder,
constant::{CODER, HASH},
hash::Hash,
};
use std::convert::TryInto;
/// Converts bytes to an encoded string.
pub fn bytes_to_string<T: ?Sized + AsRef<[u8]>>(input: &T) -> String {
CODER.encode(input)
}
/// Converts an encoded string to a bytes vector.
pub fn string_to_bytes(input: &str) -> Result<Vec<u8>, WedprError> {
CODER.decode(input)
}
/// Converts Scalar to an encoded string.
pub fn scalar_to_string(number: &Scalar) -> String {
bytes_to_string(&number.to_bytes())
}
/// Converts an encoded string to Scalar.
pub fn string_to_scalar(num: &str) -> Result<Scalar, WedprError> {
let num_u8 = match string_to_bytes(num) {
Ok(v) => v,
Err(_) => {
wedpr_println!("string_to_scalar failed, string: {}", num);
return Err(WedprError::FormatError);
},
};
let get_num_u8 = to_bytes32_slice(&num_u8)?;
let scalar_num = Scalar::from_bits(*get_num_u8);
Ok(scalar_num)
}
/// Converts RistrettoPoint to an encoded string.
pub fn point_to_string(point: &RistrettoPoint) -> String {
bytes_to_string(&point.compress().to_bytes())
}
/// Converts an encoded string to RistrettoPoint.
pub fn string_to_point(point: &str) -> Result<RistrettoPoint, WedprError> {
let decode_tmp = string_to_bytes(point)?;
if decode_tmp.len() != RISTRETTO_POINT_SIZE_IN_BYTES {
wedpr_println!("string_to_point decode failed");
return Err(WedprError::FormatError);
}
let point_value =
match CompressedRistretto::from_slice(&decode_tmp).decompress() {
Some(v) => v,
None => {
wedpr_println!(
"string_to_point decompress CompressedRistretto failed"
);
return Err(WedprError::FormatError);
},
};
Ok(point_value)
}
/// Converts RangeProof to an encoded string.
pub fn rangeproof_to_string(proof: &RangeProof) -> String {
bytes_to_string(&proof.to_bytes())
}
/// Converts an arbitrary string to Scalar.
/// It will hash it first, and transform the numberic value of hash output to
/// Scalar.
pub fn hash_to_scalar(value: &str) -> Scalar {
let mut array = [0; 32];
array.clone_from_slice(&HASH.hash(value));
Scalar::from_bytes_mod_order(array)
}
/// Gets a random Scalar.
pub fn get_random_scalar() -> Scalar {
Scalar::random(&mut rand::thread_rng())
}
/// Makes a commitment for value in point format.
pub fn make_commitment_point(
value: u64,
blinding: &Scalar,
value_basepoint: &RistrettoPoint,
blinding_basepoint: &RistrettoPoint,
) -> RistrettoPoint
{
RistrettoPoint::multiscalar_mul(&[Scalar::from(value), *blinding], &[
*value_basepoint,
*blinding_basepoint,
])
}
// Private utility functions.
/// Extracts a slice of &[u8; 32] from the given slice.
fn to_bytes32_slice(barry: &[u8]) -> Result<&[u8; 32], WedprError> {
let pop_u8 = match barry.try_into() {
Ok(v) => v,
Err(_) => return Err(WedprError::FormatError),
};
Ok(pop_u8)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_scalar_conversion() {
let num = get_random_scalar();
let num_str = scalar_to_string(&num);
let recovered_num = string_to_scalar(&num_str).unwrap();
assert_eq!(num, recovered_num);
let bad_str = "bad";
assert_eq!(
WedprError::FormatError,
string_to_scalar(bad_str).unwrap_err()
);
}
#[test]
pub fn test_bytes_conversion() {
let str = "test";
let bytes = string_to_bytes(&str).unwrap();
let recovered_str = bytes_to_string(&bytes);
assert_eq!(str, recovered_str);
}
#[test]
pub fn test_point_conversion() {
let point = RistrettoPoint::default();
let point_str = point_to_string(&point);
let recovered_point = string_to_point(&point_str).unwrap();
assert_eq!(point, recovered_point);
}
}
| true |
6776d0bb0fb27afde27b36f7a3f0a1946b7ae37a
|
Rust
|
thomasantony/sudoku-rs
|
/src/main.rs
|
UTF-8
| 3,430 | 3.15625 | 3 |
[] |
no_license
|
mod utils;
use varisat::{Lit};
use varisat::solver::Solver;
use itertools::{iproduct};
use varisat::{CnfFormula, ExtendFormula};
use utils::SudokuGrid;
use std::iter::FromIterator;
fn lit_from_value(row: usize, col: usize, value: usize) -> Lit {
Lit::from_index(row * 9 * 9 + col * 9 + value, true)
}
fn exactly_once(literals: &Vec<Lit>) -> CnfFormula{
let mut formula = CnfFormula::new();
formula.add_clause(literals);
for (i, lit1) in literals.iter().enumerate() {
for lit2 in literals[i+1..].iter() {
formula.add_clause(&[!lit1.clone(), !lit2.clone()]);
}
}
formula
}
fn literals_from_board(board: &SudokuGrid) -> CnfFormula
{
let mut formula = CnfFormula::new();
board.iter().for_each(|((row, col), cell)| {
match cell {
Some(value) => {
formula.add_clause(&[lit_from_value(*row, *col, *value -1)]);
},
_ => {}
}
});
formula
}
fn board_from_solution(model: Vec<Lit>) -> SudokuGrid
{
let grid_cells = model.iter()
.filter(|l| l.is_positive())
.map(|lit|{
let index = lit.index();
let row = index / 81;
let col = (index % 81) / 9;
let value = (index % 81) % 9;
((row, col), Some(value+1))
});
SudokuGrid::from_iter(grid_cells)
}
fn main() {
let puzzle_str = "..3.2.6..9..3.5..1..18.64....81.29..7.......8..67.82....26.95..8..2.3..9..5.1.3..";
let puzzle = utils::parse_grid(puzzle_str.to_string());
println!("Puzzle:");
utils::display_grid(&puzzle);
let mut solver = Solver::new();
// Each row has all numbers from 1..9 exactly once
for (row, value) in iproduct!(0..9, 0..9)
{
let mut literals: Vec<Lit> = Vec::new();
for col in 0..9 {
literals.push(lit_from_value(row, col, value));
}
solver.add_formula(& exactly_once(&literals));
}
// Each column has all numbers from 1..9 exactly once
for (col, value) in iproduct!(0..9, 0..9)
{
let mut literals: Vec<Lit> = Vec::new();
for row in 0..9 {
literals.push(lit_from_value(row, col, value));
}
solver.add_clause(literals.as_slice());
}
// Each box has all numbers from 1..9 exactly once
for value in 0..9
{
for (r, c) in iproduct!(&[0,3,6], &[0,3,6])
{
let mut literals: Vec<Lit> = Vec::new();
for (rr, cc) in iproduct!(&[0, 1, 2],
&[0, 1, 2])
{
let row = (r + rr) as usize;
let col = (c + cc) as usize;
literals.push(lit_from_value(row, col, value));
}
solver.add_clause(literals.as_slice());
}
}
// Each number only once
for (row, col) in iproduct!(0..9, 0..9)
{
let mut literals: Vec<Lit> = Vec::new();
for value in 0..9
{
literals.push(lit_from_value(row, col, value));
}
solver.add_formula(& exactly_once(&literals));
}
// Add in pre-filled numbers
solver.add_formula(& literals_from_board(&puzzle));
solver.solve().unwrap();
let model = solver.model().unwrap(); // None if solve didn't return Ok(true)
println!("\nSolution:");
let solution = board_from_solution(model);
utils::display_grid(&solution);
}
| true |
1a8761b1c95c07c4ac1af0f6f4bcced15a74e329
|
Rust
|
drewet/fragments
|
/examples/generators.rs
|
UTF-8
| 823 | 3.671875 | 4 |
[
"MIT"
] |
permissive
|
extern crate fragments;
use fragments::Template;
use std::fmt::Show;
use std::fmt;
//This function will just concatenate the arguments.
//I expect you to make cooler generators, yourself ;)
fn join(parts: &Vec<String>, f: &mut fmt::Formatter) -> fmt::Result {
parts.concat().fmt(f)
}
fn main() {
//Create a new Template from a string
let mut template: Template = from_str("Hello, [[:name]]! Is it written as 'white space' or '[[+join white space]]'?").unwrap();
//Insert something into the `name` placeholder
template.insert("name", "Peter");
//Functions with the signature `fn(&Vec<String>) -> Box<Show>` will automatically implement the `Generator` trait
template.insert_generator("join", join);
//Result: "Hello, Peter! Is it written as 'white space' or 'whitespace'?"
println!("Result: '{}'", template);
}
| true |
38f150d93c011ed954a4d9b7e6e214b82392a73c
|
Rust
|
l1npengtul/nokhwa
|
/nokhwa-core/src/traits.rs
|
UTF-8
| 18,919 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
/*
* Copyright 2022 l1npengtul <[email protected]> / The Nokhwa Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::{
buffer::Buffer,
error::NokhwaError,
format_filter::FormatFilter,
frame_format::SourceFrameFormat,
types::{
ApiBackend, CameraControl, CameraFormat, CameraInfo, ControlValueSetter,
KnownCameraControl, Resolution,
},
};
use std::{borrow::Cow, collections::HashMap};
pub trait Backend {
const BACKEND: ApiBackend;
}
/// This trait is for any backend that allows you to grab and take frames from a camera.
/// Many of the backends are **blocking**, if the camera is occupied the library will block while it waits for it to become available.
///
/// **Note**:
/// - Backends, if not provided with a camera format, will be spawned with 640x480@15 FPS, MJPEG [`CameraFormat`].
/// - Behaviour can differ from backend to backend. While the Camera struct abstracts most of this away, if you plan to use the raw backend structs please read the `Quirks` section of each backend.
/// - If you call [`stop_stream()`](CaptureTrait::stop_stream()), you will usually need to call [`open_stream()`](CaptureTrait::open_stream()) to get more frames from the camera.
pub trait CaptureTrait {
/// Initialize the camera, preparing it for use, with a random format (usually the first one).
fn init(&mut self) -> Result<(), NokhwaError>;
/// Initialize the camera, preparing it for use, with a format that fits the supplied [`FormatFilter`].
fn init_with_format(&mut self, format: FormatFilter) -> Result<CameraFormat, NokhwaError>;
/// Returns the current backend used.
fn backend(&self) -> ApiBackend;
/// Gets the camera information such as Name and Index as a [`CameraInfo`].
fn camera_info(&self) -> &CameraInfo;
/// Forcefully refreshes the stored camera format, bringing it into sync with "reality" (current camera state)
/// # Errors
/// If the camera can not get its most recent [`CameraFormat`]. this will error.
fn refresh_camera_format(&mut self) -> Result<(), NokhwaError>;
/// Gets the current [`CameraFormat`]. This will force refresh to the current latest if it has changed.
fn camera_format(&self) -> Option<CameraFormat>;
/// Will set the current [`CameraFormat`]
/// This will reset the current stream if used while stream is opened.
///
/// This will also update the cache.
/// # Errors
/// If you started the stream and the camera rejects the new camera format, this will return an error.
fn set_camera_format(&mut self, new_fmt: CameraFormat) -> Result<(), NokhwaError>;
/// A hashmap of [`Resolution`]s mapped to framerates. Not sorted!
/// # Errors
/// This will error if the camera is not queryable or a query operation has failed. Some backends will error this out as a Unsupported Operation ([`UnsupportedOperationError`](NokhwaError::UnsupportedOperationError)).
fn compatible_list_by_resolution(
&mut self,
fourcc: SourceFrameFormat,
) -> Result<HashMap<Resolution, Vec<u32>>, NokhwaError>;
/// Gets the compatible [`CameraFormat`] of the camera
/// # Errors
/// If it fails to get, this will error.
fn compatible_camera_formats(&mut self) -> Result<Vec<CameraFormat>, NokhwaError> {
let mut compatible_formats = vec![];
for fourcc in self.compatible_fourcc()? {
for (resolution, fps_list) in self.compatible_list_by_resolution(fourcc)? {
for fps in fps_list {
compatible_formats.push(CameraFormat::new(resolution, fourcc, fps));
}
}
}
Ok(compatible_formats)
}
/// A Vector of compatible [`FrameFormat`]s. Will only return 2 elements at most.
/// # Errors
/// This will error if the camera is not queryable or a query operation has failed. Some backends will error this out as a Unsupported Operation ([`UnsupportedOperationError`](NokhwaError::UnsupportedOperationError)).
fn compatible_fourcc(&mut self) -> Result<Vec<SourceFrameFormat>, NokhwaError>;
/// Gets the current camera resolution (See: [`Resolution`], [`CameraFormat`]). This will force refresh to the current latest if it has changed.
fn resolution(&self) -> Option<Resolution>;
/// Will set the current [`Resolution`]
/// This will reset the current stream if used while stream is opened.
///
/// This will also update the cache.
/// # Errors
/// If you started the stream and the camera rejects the new resolution, this will return an error.
fn set_resolution(&mut self, new_res: Resolution) -> Result<(), NokhwaError>;
/// Gets the current camera framerate (See: [`CameraFormat`]). This will force refresh to the current latest if it has changed.
fn frame_rate(&self) -> Option<u32>;
/// Will set the current framerate
/// This will reset the current stream if used while stream is opened.
///
/// This will also update the cache.
/// # Errors
/// If you started the stream and the camera rejects the new framerate, this will return an error.
fn set_frame_rate(&mut self, new_fps: u32) -> Result<(), NokhwaError>;
/// Gets the current camera's frame format (See: [`FrameFormat`], [`CameraFormat`]). This will force refresh to the current latest if it has changed.
fn frame_format(&self) -> SourceFrameFormat;
/// Will set the current [`FrameFormat`]
/// This will reset the current stream if used while stream is opened.
///
/// This will also update the cache.
/// # Errors
/// If you started the stream and the camera rejects the new frame format, this will return an error.
fn set_frame_format(&mut self, fourcc: SourceFrameFormat)
-> Result<(), NokhwaError>;
/// Gets the value of [`KnownCameraControl`].
/// # Errors
/// If the `control` is not supported or there is an error while getting the camera control values (e.g. unexpected value, too high, etc)
/// this will error.
fn camera_control(&self, control: KnownCameraControl) -> Result<CameraControl, NokhwaError>;
/// Gets the current supported list of [`KnownCameraControl`]
/// # Errors
/// If the list cannot be collected, this will error. This can be treated as a "nothing supported".
fn camera_controls(&self) -> Result<Vec<CameraControl>, NokhwaError>;
/// Sets the control to `control` in the camera.
/// Usually, the pipeline is calling [`camera_control()`](CaptureTrait::camera_control), getting a camera control that way
/// then calling [`value()`](CameraControl::value()) to get a [`ControlValueSetter`] and setting the value that way.
/// # Errors
/// If the `control` is not supported, the value is invalid (less than min, greater than max, not in step), or there was an error setting the control,
/// this will error.
fn set_camera_control(
&mut self,
id: KnownCameraControl,
value: ControlValueSetter,
) -> Result<(), NokhwaError>;
/// Will open the camera stream with set parameters. This will be called internally if you try and call [`frame()`](CaptureTrait::frame()) before you call [`open_stream()`](CaptureTrait::open_stream()).
/// # Errors
/// If the specific backend fails to open the camera (e.g. already taken, busy, doesn't exist anymore) this will error.
fn open_stream(&mut self) -> Result<(), NokhwaError>;
/// Checks if stream if open. If it is, it will return true.
fn is_stream_open(&self) -> bool;
/// Will get a frame from the camera as a [`Buffer`]. Depending on the backend, if you have not called [`open_stream()`](CaptureTrait::open_stream()) before you called this,
/// it will either return an error.
/// # Errors
/// If the backend fails to get the frame (e.g. already taken, busy, doesn't exist anymore), the decoding fails (e.g. MJPEG -> u8), or [`open_stream()`](CaptureTrait::open_stream()) has not been called yet,
/// this will error.
fn frame(&mut self) -> Result<Buffer, NokhwaError>;
/// Will get a frame from the camera **without** any processing applied, meaning you will usually get a frame you need to decode yourself.
/// # Errors
/// If the backend fails to get the frame (e.g. already taken, busy, doesn't exist anymore), or [`open_stream()`](CaptureTrait::open_stream()) has not been called yet, this will error.
fn frame_raw(&mut self) -> Result<Cow<[u8]>, NokhwaError>;
// #[cfg(feature = "wgpu-types")]
// #[cfg_attr(feature = "docs-features", doc(cfg(feature = "wgpu-types")))]
// /// Directly copies a frame to a Wgpu texture. This will automatically convert the frame into a RGBA frame.
// /// # Errors
// /// If the frame cannot be captured or the resolution is 0 on any axis, this will error.
// fn frame_texture<'a>(
// &mut self,
// device: &WgpuDevice,
// queue: &WgpuQueue,
// label: Option<&'a str>,
// ) -> Result<WgpuTexture, NokhwaError> {
// use crate::pixel_format::RgbAFormat;
// use std::num::NonZeroU32;
// let frame = self.frame()?.decode_image::<RgbAFormat>()?;
//
// let texture_size = Extent3d {
// width: frame.width(),
// height: frame.height(),
// depth_or_array_layers: 1,
// };
//
// let texture = device.create_texture(&TextureDescriptor {
// label,
// size: texture_size,
// mip_level_count: 1,
// sample_count: 1,
// dimension: TextureDimension::D2,
// format: TextureFormat::Rgba8UnormSrgb,
// usage: TextureUsages::TEXTURE_BINDING | TextureUsages::COPY_DST,
// });
//
// let width_nonzero = match NonZeroU32::try_from(4 * frame.width()) {
// Ok(w) => Some(w),
// Err(why) => return Err(NokhwaError::ReadFrameError(why.to_string())),
// };
//
// let height_nonzero = match NonZeroU32::try_from(frame.height()) {
// Ok(h) => Some(h),
// Err(why) => return Err(NokhwaError::ReadFrameError(why.to_string())),
// };
//
// queue.write_texture(
// ImageCopyTexture {
// texture: &texture,
// mip_level: 0,
// origin: wgpu::Origin3d::ZERO,
// aspect: TextureAspect::All,
// },
// &frame,
// ImageDataLayout {
// offset: 0,
// bytes_per_row: width_nonzero,
// rows_per_image: height_nonzero,
// },
// texture_size,
// );
//
// Ok(texture)
// }
/// Will drop the stream.
/// # Errors
/// Please check the `Quirks` section of each backend.
fn stop_stream(&mut self) -> Result<(), NokhwaError>;
}
impl<T> From<T> for Box<dyn CaptureTrait>
where
T: CaptureTrait + 'static,
{
fn from(backend: T) -> Self {
Box::new(backend)
}
}
#[cfg(feature = "async")]
#[cfg_attr(feature = "async", async_trait::async_trait)]
pub trait AsyncCaptureTrait: CaptureTrait {
/// Initialize the camera, preparing it for use, with a random format (usually the first one).
async fn init_async(&mut self) -> Result<(), NokhwaError>;
/// Initialize the camera, preparing it for use, with a format that fits the supplied [`FormatFilter`].
async fn init_with_format_async(&mut self, format: FormatFilter)
-> Result<CameraFormat, NokhwaError>;
/// Forcefully refreshes the stored camera format, bringing it into sync with "reality" (current camera state)
/// # Errors
/// If the camera can not get its most recent [`CameraFormat`]. this will error.
async fn refresh_camera_format_async(&mut self) -> Result<(), NokhwaError>;
/// Will set the current [`CameraFormat`]
/// This will reset the current stream if used while stream is opened.
///
/// This will also update the cache.
/// # Errors
/// If you started the stream and the camera rejects the new camera format, this will return an error.
async fn set_camera_format_async(&mut self, new_fmt: CameraFormat) -> Result<(), NokhwaError>;
/// A hashmap of [`Resolution`]s mapped to framerates. Not sorted!
/// # Errors
/// This will error if the camera is not queryable or a query operation has failed. Some backends will error this out as a Unsupported Operation ([`UnsupportedOperationError`](NokhwaError::UnsupportedOperationError)).
async fn compatible_list_by_resolution_async(
&mut self,
fourcc: SourceFrameFormat,
) -> Result<HashMap<Resolution, Vec<u32>>, NokhwaError>;
/// Gets the compatible [`CameraFormat`] of the camera
/// # Errors
/// If it fails to get, this will error.
async fn compatible_camera_formats_async(&mut self) -> Result<Vec<CameraFormat>, NokhwaError>;
/// A Vector of compatible [`FrameFormat`]s. Will only return 2 elements at most.
/// # Errors
/// This will error if the camera is not queryable or a query operation has failed. Some backends will error this out as a Unsupported Operation ([`UnsupportedOperationError`](NokhwaError::UnsupportedOperationError)).
async fn compatible_fourcc_async(&mut self) -> Result<Vec<SourceFrameFormat>, NokhwaError>;
/// Will set the current [`Resolution`]
/// This will reset the current stream if used while stream is opened.
///
/// This will also update the cache.
/// # Errors
/// If you started the stream and the camera rejects the new resolution, this will return an error.
async fn set_resolution_async(&mut self, new_res: Resolution) -> Result<(), NokhwaError>;
/// Will set the current framerate
/// This will reset the current stream if used while stream is opened.
///
/// This will also update the cache.
/// # Errors
/// If you started the stream and the camera rejects the new framerate, this will return an error.
async fn set_frame_rate_async(&mut self, new_fps: u32) -> Result<(), NokhwaError>;
/// Will set the current [`FrameFormat`]
/// This will reset the current stream if used while stream is opened.
///
/// This will also update the cache.
/// # Errors
/// If you started the stream and the camera rejects the new frame format, this will return an error.
async fn set_frame_format_async(
&mut self,
fourcc: SourceFrameFormat,
) -> Result<(), NokhwaError>;
/// Gets the value of [`KnownCameraControl`].
/// # Errors
/// If the `control` is not supported or there is an error while getting the camera control values (e.g. unexpected value, too high, etc)
/// this will error.
async fn camera_control_async(&self, control: KnownCameraControl) -> Result<CameraControl, NokhwaError>;
/// Gets the current supported list of [`KnownCameraControl`]
/// # Errors
/// If the list cannot be collected, this will error. This can be treated as a "nothing supported".
async fn camera_controls_async(&self) -> Result<Vec<CameraControl>, NokhwaError>;
/// Sets the control to `control` in the camera.
/// Usually, the pipeline is calling [`camera_control()`](CaptureTrait::camera_control), getting a camera control that way
/// then calling [`value()`](CameraControl::value()) to get a [`ControlValueSetter`] and setting the value that way.
/// # Errors
/// If the `control` is not supported, the value is invalid (less than min, greater than max, not in step), or there was an error setting the control,
/// this will error.
async fn set_camera_control_async(
&mut self,
id: KnownCameraControl,
value: ControlValueSetter,
) -> Result<(), NokhwaError>;
/// Will open the camera stream with set parameters. This will be called internally if you try and call [`frame()`](CaptureTrait::frame()) before you call [`open_stream()`](CaptureTrait::open_stream()).
/// # Errors
/// If the specific backend fails to open the camera (e.g. already taken, busy, doesn't exist anymore) this will error.
async fn open_stream_async(&mut self) -> Result<(), NokhwaError>;
/// Will get a frame from the camera as a [`Buffer`]. Depending on the backend, if you have not called [`open_stream()`](CaptureTrait::open_stream()) before you called this,
/// it will either return an error.
/// # Errors
/// If the backend fails to get the frame (e.g. already taken, busy, doesn't exist anymore), the decoding fails (e.g. MJPEG -> u8), or [`open_stream()`](CaptureTrait::open_stream()) has not been called yet,
/// this will error.
async fn frame_async(&mut self) -> Result<Buffer, NokhwaError>;
/// Will get a frame from the camera **without** any processing applied, meaning you will usually get a frame you need to decode yourself.
/// # Errors
/// If the backend fails to get the frame (e.g. already taken, busy, doesn't exist anymore), or [`open_stream()`](CaptureTrait::open_stream()) has not been called yet, this will error.
async fn frame_raw_async(&mut self) -> Result<Cow<[u8]>, NokhwaError>;
/// Will drop the stream.
/// # Errors
/// Please check the `Quirks` section of each backend.
async fn stop_stream_async(&mut self) -> Result<(), NokhwaError>;
}
#[cfg(feature = "async")]
impl<T> From<T> for Box<dyn AsyncCaptureTrait>
where
T: AsyncCaptureTrait + 'static,
{
fn from(backend: T) -> Self {
Box::new(backend)
}
}
pub trait OneShot: CaptureTrait {
fn one_shot(&mut self) -> Result<Buffer, NokhwaError> {
if self.is_stream_open() {
self.frame()
} else {
self.open_stream()?;
let frame = self.frame()?;
self.stop_stream()?;
Ok(frame)
}
}
}
#[cfg(feature = "async")]
#[cfg_attr(feature = "async", async_trait::async_trait)]
pub trait AsyncOneShot: AsyncCaptureTrait {
async fn one_shot(&mut self) -> Result<Buffer, NokhwaError> {
if self.is_stream_open() {
self.frame_async().await
} else {
self.open_stream_async().await?;
let frame = self.frame_async().await?;
self.stop_stream_async().await?;
Ok(frame)
}
}
}
pub trait VirtualBackendTrait {}
| true |
2758cabb9cdc99e65d8594d3bd1a9acb204f9e94
|
Rust
|
TPackard/tock
|
/kernel/src/capabilities.rs
|
UTF-8
| 2,455 | 3.84375 | 4 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
//! Special restricted capabilities.
//!
//! Rust provides a mechanism for restricting certain operations to only be used
//! by trusted code through the `unsafe` keyword. This is very useful, but
//! doesn't provide very granular access: code can either access _all_ `unsafe`
//! things, or none.
//!
//! Capabilities are the mechanism in Tock that provides more granular access.
//! For sensitive operations (e.g. operations that could violate isolation)
//! callers must have a particular capability. The type system ensures that the
//! caller does in fact have the capability, and `unsafe` is used to ensure that
//! callers cannot create the capability type themselves.
//!
//! Capabilities are passed to modules from trusted code (i.e. code that can
//! call `unsafe`).
//!
//! Capabilities are expressed as `unsafe` traits. Only code that can use
//! `unsafe` mechanisms can instantiate an object that provides an `unsafe`
//! trait. Functions that require certain capabilities require that they are
//! passed an object that provides the correct capability trait. The object
//! itself does not have to be marked `unsafe`.
//!
//! Creating an object that expresses a capability is straightforward:
//!
//! ```
//! use kernel::capabilities::ProcessManagementCapability;
//!
//! struct ProcessMgmtCap;
//! unsafe impl ProcessManagementCapability for ProcessMgmtCap {}
//! ```
//!
//! Now anything that has a ProcessMgmtCap can call any function that requires
//! the `ProcessManagementCapability` capability.
//!
//! Requiring a certain capability is also straightforward:
//!
//! ```ignore
//! pub fn manage_process<C: ProcessManagementCapability>(_c: &C) {
//! unsafe {
//! ...
//! }
//! }
//! ```
//!
//! Anything that calls `manage_process` must have a reference to some object
//! that provides the `ProcessManagementCapability` trait, which proves that it
//! has the correct capability.
/// The `ProcessManagementCapability` allows the holder to control
/// process execution, such as related to creating, restarting, and
/// otherwise managing processes.
pub unsafe trait ProcessManagementCapability {}
/// The `MainLoopCapability` capability allows the holder to start executing
/// the main scheduler loop in Tock.
pub unsafe trait MainLoopCapability {}
/// The `MemoryAllocationCapability` capability allows the holder to allocate
/// memory, for example by creating grants.
pub unsafe trait MemoryAllocationCapability {}
| true |
568eff1aae4ec88b46905a3794e967655798e128
|
Rust
|
daviscai/crab
|
/src/view/app.rs
|
UTF-8
| 1,604 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
use yew::prelude::*;
use crate::component::base::{boxes::Boxes, boxes::Hovered, theme::Theme};
// #[derive(Properties, PartialEq, Clone)]
// pub struct AppProps {
// }
#[derive(Debug)]
pub struct App {
theme: String,
link: ComponentLink<Self>,
}
#[derive(Debug)]
pub enum Msg {
Hover
}
impl Component for App {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {
App {
// 设置主题风格,有 bootstrap
theme: "bootstrap".into(),
link: link
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
log::info!("====11: {:?}", msg);
true
}
fn change(&mut self, _: Self::Properties) -> bool {
false
}
fn view(&self) -> Html {
let theme_class = Theme::get_theme_class(self.theme.as_str());
html! {
<div id="container" class={theme_class}>
<Boxes class="container-sm">
<div class="row">
<div class="col-4">
{ "One of three columns" }
</div>
<div class="col-4">
{ "One of three columns" }
</div>
<div class="col-4">
{ "One of three columns" }
</div>
</div>
<Boxes style="background:#eee" width="100%" height="50px" />
</Boxes>
</div>
}
}
}
| true |
4ee41ea610c8ff3eefcbf48faa1a3da49ae39e4e
|
Rust
|
davbo/cryptopals
|
/src/set3/challenge17.rs
|
UTF-8
| 4,260 | 2.78125 | 3 |
[] |
no_license
|
extern crate rand;
extern crate rustc_serialize;
extern crate openssl;
use self::openssl::symm::Mode;
use set2::challenge10::cbc_mode;
const BLOCK_LENGTH: usize = 16;
const INPUT_STRINGS: [&'static str; 10] = [
"MDAwMDAwTm93IHRoYXQgdGhlIHBhcnR5IGlzIGp1bXBpbmc=",
"MDAwMDAxV2l0aCB0aGUgYmFzcyBraWNrZWQgaW4gYW5kIHRoZSBWZWdhJ3MgYXJlIHB1bXBpbic=",
"MDAwMDAyUXVpY2sgdG8gdGhlIHBvaW50LCB0byB0aGUgcG9pbnQsIG5vIGZha2luZw==",
"MDAwMDAzQ29va2luZyBNQydzIGxpa2UgYSBwb3VuZCBvZiBiYWNvbg==",
"MDAwMDA0QnVybmluZyAnZW0sIGlmIHlvdSBhaW4ndCBxdWljayBhbmQgbmltYmxl",
"MDAwMDA1SSBnbyBjcmF6eSB3aGVuIEkgaGVhciBhIGN5bWJhbA==",
"MDAwMDA2QW5kIGEgaGlnaCBoYXQgd2l0aCBhIHNvdXBlZCB1cCB0ZW1wbw==",
"MDAwMDA3SSdtIG9uIGEgcm9sbCwgaXQncyB0aW1lIHRvIGdvIHNvbG8=",
"MDAwMDA4b2xsaW4nIGluIG15IGZpdmUgcG9pbnQgb2g=",
"MDAwMDA5aXRoIG15IHJhZy10b3AgZG93biBzbyBteSBoYWlyIGNhbiBibG93",
];
fn pad(mut input: Vec<u8>) -> Vec<u8> {
let input_len = input.len();
let padding_length = BLOCK_LENGTH - (input.len() % BLOCK_LENGTH);
if padding_length == 0 {
input.resize(input_len + BLOCK_LENGTH, 16u8);
} else {
input.resize(input_len + padding_length, padding_length as u8);
}
input
}
fn decrypt_and_check_padding(ciphertext: Vec<u8>, key: &[u8], iv: &[u8]) -> bool {
let decrypted = cbc_mode(ciphertext, key, iv, Mode::Decrypt);
let final_byte = decrypted[decrypted.len()-1];
let expected_padding = vec![final_byte; final_byte as usize];
// Check if padding is empty (e.g. padded byte is 0)
!expected_padding.is_empty() && decrypted.ends_with(&expected_padding)
}
fn test_byte(guess: u8, target_index: usize, attacking_block: Vec<u8>, discovered_bytes: &Vec<u8>, key: &[u8]) -> Result<u8, u8> {
let mut guess_block = vec![0;target_index];
let expected_padding_byte: u8 = BLOCK_LENGTH as u8 - target_index as u8;
guess_block.push(guess);
for val in discovered_bytes.iter().rev() {
if guess_block.len() == BLOCK_LENGTH {
break;
}
guess_block.push(val ^ expected_padding_byte);
}
if decrypt_and_check_padding(attacking_block.to_vec(), &key, &guess_block) {
Ok(guess ^ expected_padding_byte)
} else {
Err(guess)
}
}
#[test]
fn pkcs7_padding() {
assert_eq!(vec![1,2,3,4,5,6,7,8,9,10,11,12,13,14,2,2], pad(vec![1,2,3,4,5,6,7,8,9,10,11,12,13,14]));
assert_eq!(vec![1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16], pad(vec![1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]));
assert_eq!(vec![1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15], pad(vec![1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]));
}
#[test]
fn challenge17() {
use self::rustc_serialize::base64::FromBase64;
use self::rand::{thread_rng, Rng};
use set1::challenge2::fixed_xor;
let mut rng = thread_rng();
let aes_key = rng.gen::<[u8;16]>();
let iv = rng.gen::<[u8;16]>();
let choices = &INPUT_STRINGS;
let input_string = rng.choose(choices).unwrap().from_base64().unwrap();
let padded_input = pad(input_string);
let ciphertext = cbc_mode(padded_input.clone(), &aes_key, &iv, Mode::Encrypt);
assert!(decrypt_and_check_padding(ciphertext.clone(), &aes_key, &iv));
let mut intermediary: Vec<u8> = vec![];
for chunk in ciphertext.chunks(BLOCK_LENGTH).rev() {
for target_index in (0..BLOCK_LENGTH).rev() {
let mut options = vec![];
for guess in u8::min_value()..u8::max_value() {
match test_byte(guess, target_index, chunk.to_vec(), &intermediary, &aes_key) {
Ok(v) => options.push(v),
Err(_) => continue,
}
}
if options.is_empty() {
panic!("no value for {}", target_index)
} else if options.len() == 1 {
intermediary.push(options[0])
} else {
panic!("More than 1 option with valid padding, entirely possible to happen but not handled here.")
}
}
}
println!("Intermediary len: {:?}, Ciphertext len: {:?}", intermediary.len(), ciphertext.len());
let mut cipher_iter = ciphertext.chunks(BLOCK_LENGTH).rev();
cipher_iter.next();
let mut plaintext_blocks: Vec<Vec<u8>> = vec![];
for (cipher_block, intermediary_block) in cipher_iter.zip(intermediary.chunks(BLOCK_LENGTH)) {
let mut inter_block = intermediary_block.clone().to_owned();
inter_block.reverse();
plaintext_blocks.insert(0, fixed_xor(inter_block.as_slice(), cipher_block));
}
let plaintext: Vec<u8> = plaintext_blocks.concat();
println!("{:?}", plaintext);
assert_eq!(padded_input[padded_input.len()-plaintext.len()..].to_vec(), plaintext);
}
| true |
9d5494ddd1b6542faa089f661a5876facfdcd2c9
|
Rust
|
grenade/poloniex-rs
|
/src/types/cancel_order_response.rs
|
UTF-8
| 864 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
use super::deserialize::{number_to_bool, string_to_f64};
#[derive(Debug, Clone, Deserialize)]
pub struct CancelOrderResponse {
#[serde(rename = "success")]
#[serde(deserialize_with = "number_to_bool")]
pub is_success: bool,
#[serde(deserialize_with = "string_to_f64")]
pub amount: f64,
pub message: String
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json;
#[test]
fn test_deserialize() {
let json = r#"
{
"success":1,
"amount":"0.01000000",
"message":"Order #332042333440 canceled."
}
"#;
let resp: CancelOrderResponse = serde_json::from_str(json).unwrap();
assert_eq!(resp.is_success, true);
assert_eq!(resp.amount, 0.01);
assert_eq!(resp.message, "Order #332042333440 canceled.");
}
}
| true |
e700af4e00228a4fb876b654d8beb87a3fbf1ffb
|
Rust
|
ferristseng/rust-lifx
|
/src/header.rs
|
UTF-8
| 5,819 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
#[derive(Debug, Eq, PartialEq)]
pub struct Header {
size: u16,
origin: u8,
tagged: bool,
addressable: bool,
protocol: u16,
source: u32,
target: u64,
ack_required: bool,
res_required: bool,
sequence: u8,
typ: u16,
}
impl Header {
#[inline]
pub fn new(
size: u16,
tagged: bool,
source: u32,
target: u64,
ack_required: bool,
res_required: bool,
sequence: u8,
typ: u16,
) -> Header {
Header {
size: size,
origin: 0,
tagged: tagged,
addressable: true,
protocol: 1024,
source: source,
target: target,
ack_required: ack_required,
res_required: res_required,
sequence: sequence,
typ: typ,
}
}
#[inline(always)]
pub fn target(&self) -> u64 {
self.target
}
#[inline(always)]
pub fn typ(&self) -> u16 {
self.typ
}
#[inline(always)]
pub fn size(&self) -> u16 {
self.size
}
#[inline(always)]
pub fn mem_size() -> u16 {
36
}
}
impl Default for Header {
fn default() -> Header {
Header::new(0, true, 0, 0, true, true, 0, 0)
}
}
impl Encodable for Header {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_struct("Header", 36, |s| {
// FRAME
try!(s.emit_struct_field("size", 0, |s| s.emit_u16(self.size)));
try!(
s.emit_struct_field("origin_tagged_addressable_protocol", 1, |s| {
let mut value = self.origin as u16;
if self.tagged {
value |= 0b0010_0000_0000_0000;
}
if self.addressable {
value |= 0b0001_0000_0000_0000;
}
s.emit_u16(self.protocol | (value as u16))
})
);
try!(s.emit_struct_field("source", 2, |s| s.emit_u32(self.source)));
// FRAME ADDRESS
try!(s.emit_struct_field("target", 3, |s| s.emit_u64(self.target)));
try!(s.emit_struct_field("res0", 4, |s| s.emit_seq(6, |s| {
for i in 0..6 {
try!(s.emit_seq_elt(i, |s| s.emit_u8(0)))
}
Ok(())
})));
try!(s.emit_struct_field("res1_ackreq_resreq", 5, |s| {
let mut value: u8 = 0;
if self.ack_required {
value |= 0b0000_0010;
}
if self.res_required {
value |= 0b0000_0001;
}
s.emit_u8(value)
}));
try!(s.emit_struct_field("sequence", 6, |s| s.emit_u8(self.sequence)));
// PROTOCOL HEADER
try!(s.emit_struct_field("res2", 7, |s| s.emit_u64(0)));
try!(s.emit_struct_field("type", 8, |s| s.emit_u16(self.typ)));
try!(s.emit_struct_field("res3", 9, |s| s.emit_u16(0)));
Ok(())
})
}
}
impl Decodable for Header {
fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> {
let mut header: Header = Default::default();
try!(d.read_struct("Header", 36, |d| {
// FRAME
try!(
d.read_struct_field("size", 0, |d| d.read_u16().and_then(|v| {
header.size = v;
Ok(())
}))
);
try!(
d.read_struct_field(
"origin_tagged_addressable_protocol",
1,
|d| d.read_u16().and_then(|v| {
header.origin = ((v & 0b1100_0000_0000_0000) >> 14) as u8;
header.tagged = (v & 0b0010_0000_0000_0000) > 0;
header.addressable = (v & 0b001_0000_0000_0000) > 0;
header.protocol = v & 0b0000_1111_1111_1111;
Ok(())
})
)
);
try!(
d.read_struct_field("source", 2, |d| d.read_u32().and_then(|v| {
header.source = v;
Ok(())
}))
);
// FRAME ADDRESS
try!(
d.read_struct_field("target", 3, |d| d.read_u64().and_then(|v| {
header.target = v;
Ok(())
}))
);
try!(d.read_struct_field("res0", 4, |d| d.read_seq(|d, _| {
for i in 0..6 {
try!(d.read_seq_elt(i, |d| d.read_u8()));
}
Ok(())
})));
try!(
d.read_struct_field("res1_ackreq_resreq", 5, |d| d.read_u8().and_then(
|v| {
header.ack_required = v & 0b0000_0010 > 0;
header.res_required = v & 0b0000_0001 > 0;
Ok(())
}
))
);
try!(
d.read_struct_field("sequence", 6, |d| d.read_u8().and_then(|v| {
header.sequence = v;
Ok(())
}))
);
// PROTOCOL HEADER
try!(d.read_struct_field("res2", 7, |d| d.read_u64()));
try!(
d.read_struct_field("type", 8, |d| d.read_u16().and_then(|v| {
header.typ = v;
Ok(())
}))
);
try!(d.read_struct_field("res3", 9, |d| d.read_u16()));
Ok(())
}));
Ok(header)
}
}
#[test]
fn test_header_encode_correctness() {
use serialize;
let correct = [
0x24, 0x0, 0x0, 0x34, 0x29, 0xb9, 0x36, 0xa9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x2, 0x0, 0x0, 0x0,
];
let header: Header = Header::new(36, true, 2838935849, 0, false, true, 0, 2);
assert_eq!(&correct[..], &serialize::encode(&header).unwrap()[..])
}
#[test]
fn test_encode_decode_json() {
use rustc_serialize::json;
let header: Header = Header::new(128, true, 256, 1000, false, false, 1, 12);
let encode = json::as_pretty_json(&header).to_string();
let decode: Header = json::decode(&encode[..]).unwrap();
assert_eq!(decode, header);
}
#[test]
fn test_encode_decode_serializer() {
use serialize;
let header: Header = Header::new(128, true, 256, 1000, false, false, 1, 12);
let encode = serialize::encode(&header).unwrap();
let decode: Header = serialize::decode(&encode[..]).unwrap();
assert_eq!(decode, header);
}
| true |
8594d4382f41cc5bb823c3abcae865748f14cb2e
|
Rust
|
jiayihu/akri
|
/agent/src/protocols/opcua/discovery_handler.rs
|
UTF-8
| 2,163 | 2.515625 | 3 |
[
"MIT"
] |
permissive
|
use super::super::{DiscoveryHandler, DiscoveryResult};
use super::{discovery_impl::do_standard_discovery, OPCUA_DISCOVERY_URL_LABEL};
use akri_shared::akri::configuration::{OpcuaDiscoveryHandlerConfig, OpcuaDiscoveryMethod};
use async_trait::async_trait;
use failure::Error;
/// `OpcuaDiscoveryHandler` discovers the OPC UA server instances as described by the `discovery_handler_config.opcua_discovery_method`
/// and the filter `discover_handler_config.application_names`. The instances it discovers are always shared.
#[derive(Debug)]
pub struct OpcuaDiscoveryHandler {
discovery_handler_config: OpcuaDiscoveryHandlerConfig,
}
impl OpcuaDiscoveryHandler {
pub fn new(discovery_handler_config: &OpcuaDiscoveryHandlerConfig) -> Self {
OpcuaDiscoveryHandler {
discovery_handler_config: discovery_handler_config.clone(),
}
}
}
#[async_trait]
impl DiscoveryHandler for OpcuaDiscoveryHandler {
async fn discover(&self) -> Result<Vec<DiscoveryResult>, Error> {
let discovery_urls: Vec<String> =
match &self.discovery_handler_config.opcua_discovery_method {
OpcuaDiscoveryMethod::standard(standard_opcua_discovery) => do_standard_discovery(
standard_opcua_discovery.discovery_urls.clone(),
self.discovery_handler_config.application_names.clone(),
),
// No other discovery methods implemented yet
};
// Build DiscoveryResult for each server discovered
Ok(discovery_urls
.into_iter()
.map(|discovery_url| {
let mut properties = std::collections::HashMap::new();
trace!(
"discover - found OPC UA server at DiscoveryURL {}",
discovery_url
);
properties.insert(OPCUA_DISCOVERY_URL_LABEL.to_string(), discovery_url.clone());
DiscoveryResult::new(&discovery_url, properties, self.are_shared().unwrap())
})
.collect::<Vec<DiscoveryResult>>())
}
fn are_shared(&self) -> Result<bool, Error> {
Ok(true)
}
}
| true |
5b5189b9573fe557786130e41396d2587faeb686
|
Rust
|
jj-jabb/pathtrace-rs
|
/src/collision.rs
|
UTF-8
| 1,920 | 3.3125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use material::Material;
use vmath::{dot, Vec3};
#[derive(Clone, Copy, Debug)]
pub struct Ray {
pub origin: Vec3,
pub direction: Vec3,
}
#[inline]
pub fn ray(origin: Vec3, direction: Vec3) -> Ray {
Ray { origin, direction }
}
impl Ray {
#[inline]
#[allow(dead_code)]
pub fn new(origin: Vec3, direction: Vec3) -> Ray {
Ray { origin, direction }
}
#[inline]
pub fn point_at_parameter(&self, t: f32) -> Vec3 {
self.origin + (t * self.direction)
}
}
#[derive(Clone, Copy, Debug)]
pub struct RayHit {
pub t: f32,
pub point: Vec3,
pub normal: Vec3,
}
// #[derive(Clone, Copy, Debug, Serialize, Deserialize)]
#[derive(Clone, Copy, Debug)]
pub struct Sphere {
pub centre: Vec3,
pub radius: f32,
}
#[inline]
pub fn sphere(centre: Vec3, radius: f32, material: Material) -> (Sphere, Material) {
(Sphere { centre, radius }, material)
}
impl Sphere {
pub fn hit(&self, ray: &Ray, t_min: f32, t_max: f32) -> Option<RayHit> {
let oc = ray.origin - self.centre;
let a = dot(ray.direction, ray.direction);
let b = dot(oc, ray.direction);
let c = dot(oc, oc) - self.radius * self.radius;
let discriminant = b * b - a * c;
if discriminant > 0.0 {
let discriminant_sqrt = discriminant.sqrt();
let t = (-b - discriminant_sqrt) / a;
if t < t_max && t > t_min {
let point = ray.point_at_parameter(t);
let normal = (point - self.centre) / self.radius;
return Some(RayHit { t, point, normal });
}
let t = (-b + discriminant_sqrt) / a;
if t < t_max && t > t_min {
let point = ray.point_at_parameter(t);
let normal = (point - self.centre) / self.radius;
return Some(RayHit { t, point, normal });
}
}
None
}
}
| true |
4849f2c3f262f450f989ff2e68f1606b16cb75d6
|
Rust
|
timsaucer/RustDDS
|
/src/messages/submessages/submessage_header.rs
|
UTF-8
| 4,149 | 3.015625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::messages::submessages::submessage_flag::*;
use crate::messages::submessages::submessage_kind::SubmessageKind;
use speedy::{Context, Endianness, Readable, Reader, Writable, Writer};
#[derive(Debug, PartialEq, Clone, Copy)] // This is only 32 bits, so better Copy
pub struct SubmessageHeader {
pub kind: SubmessageKind,
pub flags: u8, // This must be able to contain anything combination of any flags.
pub content_length: u16, // Note that 0 is a special value, see spec 9.4.5.1.3
}
impl<'a, C: Context> Readable<'a, C> for SubmessageHeader {
#[inline]
fn read_from<R: Reader<'a, C>>(reader: &mut R) -> Result<Self, C::Error> {
let kind: SubmessageKind = reader.read_value()?;
let flags: u8 = reader.read_value()?;
let content_length = match endianness_flag(flags) {
// Speedy does not make this too easy. There seems to be no convenient way to
// read u16 when endianness is decided at run-time.
Endianness::LittleEndian => u16::from_le_bytes([reader.read_u8()?, reader.read_u8()?]),
Endianness::BigEndian => u16::from_be_bytes([reader.read_u8()?, reader.read_u8()?]),
};
Ok(SubmessageHeader {
kind,
flags,
content_length,
})
}
#[inline]
fn minimum_bytes_needed() -> usize {
std::mem::size_of::<Self>()
}
}
impl<C: Context> Writable<C> for SubmessageHeader {
#[inline]
fn write_to<T: ?Sized + Writer<C>>(&self, writer: &mut T) -> Result<(), C::Error> {
writer.write_value(&self.kind)?;
writer.write_value(&self.flags)?;
match endianness_flag(self.flags) {
// matching via writer.context().endianness() panics
speedy::Endianness::LittleEndian => {
writer.write_u8(self.content_length as u8)?;
writer.write_u8((self.content_length >> 8) as u8)?;
}
speedy::Endianness::BigEndian => {
writer.write_u8((self.content_length >> 8) as u8)?;
writer.write_u8(self.content_length as u8)?;
}
};
Ok(())
}
}
#[cfg(test)]
mod tests {
use enumflags2::BitFlags;
use super::*;
serialization_test!( type = SubmessageHeader,
{
submessage_header_big_endian_flag,
SubmessageHeader {
kind: SubmessageKind::ACKNACK,
flags: BitFlags::<ACKNACK_Flags>::from_endianness(Endianness::BigEndian).bits(),
content_length: 42,
},
le = [0x06, 0x00, 0x00, 0x2A],
be = [0x06, 0x00, 0x00, 0x2A]
},
{
submessage_header_little_endian_flag,
SubmessageHeader {
kind: SubmessageKind::ACKNACK,
flags: BitFlags::<ACKNACK_Flags>::from_endianness(Endianness::LittleEndian).bits(),
content_length: 42,
},
le = [0x06, 0x01, 0x2A, 0x00],
be = [0x06, 0x01, 0x2A, 0x00]
},
{
submessage_header_big_endian_2_bytes_length,
SubmessageHeader {
kind: SubmessageKind::ACKNACK,
flags: BitFlags::<ACKNACK_Flags>::from_endianness(Endianness::BigEndian).bits(),
content_length: 258,
},
le = [0x06, 0x00, 0x01, 0x02],
be = [0x06, 0x00, 0x01, 0x02]
},
{
submessage_header_little_endian_2_bytes_length,
SubmessageHeader {
kind: SubmessageKind::ACKNACK,
flags: BitFlags::<ACKNACK_Flags>::from_endianness(Endianness::LittleEndian).bits(),
content_length: 258,
},
le = [0x06, 0x01, 0x02, 0x01],
be = [0x06, 0x01, 0x02, 0x01]
},
{
submessage_header_wireshark,
SubmessageHeader {
kind: SubmessageKind::INFO_TS,
flags: BitFlags::<INFOTIMESTAMP_Flags>::from_endianness(Endianness::LittleEndian).bits(),
content_length: 8,
},
le = [0x09, 0x01, 0x08, 0x00],
be = [0x09, 0x01, 0x08, 0x00]
},
{
submessage_header_gap,
SubmessageHeader {
kind: SubmessageKind::GAP,
flags: BitFlags::<GAP_Flags>::from_endianness(Endianness::LittleEndian).bits(),
content_length: 7,
},
le = [0x08, 0x01, 0x07, 0x00],
be = [0x08, 0x01, 0x07, 0x00]
//TODO: Where is the flags value 0x03 from? RTPS 2.3 spec 9.4.5.5 shows only Endianness bit is legal.
});
}
| true |
f666e89a11c2703abd6a999c61a888990dc2a519
|
Rust
|
BurntSushi/ucd-generate
|
/ucd-parse/src/unicode_data.rs
|
UTF-8
| 25,815 | 3.046875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::path::Path;
use crate::{
common::{Codepoint, CodepointIter, UcdFile, UcdFileByCodepoint},
error::Error,
};
/// Represents a single row in the `UnicodeData.txt` file.
///
/// These fields were taken from UAX44, Table 9, as part of the documentation
/// for the
/// [`UnicodeData.txt` file](https://www.unicode.org/reports/tr44/#UnicodeData.txt).
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct UnicodeData {
/// The codepoint corresponding to this row.
pub codepoint: Codepoint,
/// The name of this codepoint.
pub name: String,
/// The "general category" of this codepoint.
pub general_category: String,
/// The class of this codepoint used in the Canonical Ordering Algorithm.
///
/// Note that some classes map to a particular symbol. See
/// [UAX44, Table 15](https://www.unicode.org/reports/tr44/#Canonical_Combining_Class_Values).
pub canonical_combining_class: u8,
/// The bidirectional class of this codepoint.
///
/// Possible values are listed in
/// [UAX44, Table 13](https://www.unicode.org/reports/tr44/#Bidi_Class_Values).
pub bidi_class: String,
/// The decomposition mapping for this codepoint. This includes its
/// formatting tag (if present).
pub decomposition: UnicodeDataDecomposition,
/// A decimal numeric representation of this codepoint, if it has the
/// property `Numeric_Type=Decimal`.
pub numeric_type_decimal: Option<u8>,
/// A decimal numeric representation of this codepoint, if it has the
/// property `Numeric_Type=Digit`. Note that while this field is still
/// populated for existing codepoints, no new codepoints will have this
/// field populated.
pub numeric_type_digit: Option<u8>,
/// A decimal or rational numeric representation of this codepoint, if it
/// has the property `Numeric_Type=Numeric`.
pub numeric_type_numeric: Option<UnicodeDataNumeric>,
/// A boolean indicating whether this codepoint is "mirrored" in
/// bidirectional text.
pub bidi_mirrored: bool,
/// The "old" Unicode 1.0 or ISO 6429 name of this codepoint. Note that
/// this field is empty unless it is significantly different from
/// the `name` field.
pub unicode1_name: String,
/// The ISO 10464 comment field. This no longer contains any non-NULL
/// values.
pub iso_comment: String,
/// This codepoint's simple uppercase mapping, if it exists.
pub simple_uppercase_mapping: Option<Codepoint>,
/// This codepoint's simple lowercase mapping, if it exists.
pub simple_lowercase_mapping: Option<Codepoint>,
/// This codepoint's simple titlecase mapping, if it exists.
pub simple_titlecase_mapping: Option<Codepoint>,
}
impl UcdFile for UnicodeData {
fn relative_file_path() -> &'static Path {
Path::new("UnicodeData.txt")
}
}
impl UcdFileByCodepoint for UnicodeData {
fn codepoints(&self) -> CodepointIter {
self.codepoint.into_iter()
}
}
impl UnicodeData {
/// Returns true if and only if this record corresponds to the start of a
/// range.
pub fn is_range_start(&self) -> bool {
self.name.starts_with('<')
&& self.name.ends_with('>')
&& self.name.contains("First")
}
/// Returns true if and only if this record corresponds to the end of a
/// range.
pub fn is_range_end(&self) -> bool {
self.name.starts_with('<')
&& self.name.ends_with('>')
&& self.name.contains("Last")
}
}
impl std::str::FromStr for UnicodeData {
type Err = Error;
fn from_str(line: &str) -> Result<UnicodeData, Error> {
let re_parts = regex!(
r"(?x)
^
([A-Z0-9]+); # 1; codepoint
([^;]+); # 2; name
([^;]+); # 3; general category
([0-9]+); # 4; canonical combining class
([^;]+); # 5; bidi class
([^;]*); # 6; decomposition
([0-9]*); # 7; numeric type decimal
([0-9]*); # 8; numeric type digit
([-0-9/]*); # 9; numeric type numeric
([YN]); # 10; bidi mirrored
([^;]*); # 11; unicode1 name
([^;]*); # 12; ISO comment
([^;]*); # 13; simple uppercase mapping
([^;]*); # 14; simple lowercase mapping
([^;]*) # 15; simple titlecase mapping
$
",
);
let caps = match re_parts.captures(line.trim()) {
Some(caps) => caps,
None => return err!("invalid UnicodeData line"),
};
let capget = |n| caps.get(n).unwrap().as_str();
let mut data = UnicodeData::default();
data.codepoint = capget(1).parse()?;
data.name = capget(2).to_string();
data.general_category = capget(3).to_string();
data.canonical_combining_class = match capget(4).parse() {
Ok(n) => n,
Err(err) => {
return err!(
"failed to parse canonical combining class '{}': {}",
capget(4),
err
)
}
};
data.bidi_class = capget(5).to_string();
if !caps[6].is_empty() {
data.decomposition = caps[6].parse()?;
} else {
data.decomposition.push(data.codepoint)?;
}
if !capget(7).is_empty() {
data.numeric_type_decimal = Some(match capget(7).parse() {
Ok(n) => n,
Err(err) => {
return err!(
"failed to parse numeric type decimal '{}': {}",
capget(7),
err
)
}
});
}
if !capget(8).is_empty() {
data.numeric_type_digit = Some(match capget(8).parse() {
Ok(n) => n,
Err(err) => {
return err!(
"failed to parse numeric type digit '{}': {}",
capget(8),
err
)
}
});
}
if !capget(9).is_empty() {
data.numeric_type_numeric = Some(capget(9).parse()?);
}
data.bidi_mirrored = capget(10) == "Y";
data.unicode1_name = capget(11).to_string();
data.iso_comment = capget(12).to_string();
if !capget(13).is_empty() {
data.simple_uppercase_mapping = Some(capget(13).parse()?);
}
if !capget(14).is_empty() {
data.simple_lowercase_mapping = Some(capget(14).parse()?);
}
if !capget(15).is_empty() {
data.simple_titlecase_mapping = Some(capget(15).parse()?);
}
Ok(data)
}
}
impl std::fmt::Display for UnicodeData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{};", self.codepoint)?;
write!(f, "{};", self.name)?;
write!(f, "{};", self.general_category)?;
write!(f, "{};", self.canonical_combining_class)?;
write!(f, "{};", self.bidi_class)?;
if self.decomposition.is_canonical()
&& self.decomposition.mapping() == &[self.codepoint]
{
write!(f, ";")?;
} else {
write!(f, "{};", self.decomposition)?;
}
if let Some(n) = self.numeric_type_decimal {
write!(f, "{};", n)?;
} else {
write!(f, ";")?;
}
if let Some(n) = self.numeric_type_digit {
write!(f, "{};", n)?;
} else {
write!(f, ";")?;
}
if let Some(n) = self.numeric_type_numeric {
write!(f, "{};", n)?;
} else {
write!(f, ";")?;
}
write!(f, "{};", if self.bidi_mirrored { "Y" } else { "N" })?;
write!(f, "{};", self.unicode1_name)?;
write!(f, "{};", self.iso_comment)?;
if let Some(cp) = self.simple_uppercase_mapping {
write!(f, "{};", cp)?;
} else {
write!(f, ";")?;
}
if let Some(cp) = self.simple_lowercase_mapping {
write!(f, "{};", cp)?;
} else {
write!(f, ";")?;
}
if let Some(cp) = self.simple_titlecase_mapping {
write!(f, "{}", cp)?;
}
Ok(())
}
}
/// Represents a decomposition mapping of a single row in the
/// `UnicodeData.txt` file.
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct UnicodeDataDecomposition {
/// The formatting tag associated with this mapping, if present.
pub tag: Option<UnicodeDataDecompositionTag>,
/// The number of codepoints in this mapping.
pub len: usize,
/// The codepoints in the mapping. Entries beyond `len` in the mapping
/// are always U+0000. If no mapping was present, then this always contains
/// a single codepoint corresponding to this row's character.
pub mapping: [Codepoint; 18],
}
impl UnicodeDataDecomposition {
/// Create a new decomposition mapping with the given tag and codepoints.
///
/// If there are too many codepoints, then an error is returned.
pub fn new(
tag: Option<UnicodeDataDecompositionTag>,
mapping: &[Codepoint],
) -> Result<UnicodeDataDecomposition, Error> {
let mut x = UnicodeDataDecomposition::default();
x.tag = tag;
for &cp in mapping {
x.push(cp)?;
}
Ok(x)
}
/// Add a new codepoint to this decomposition's mapping.
///
/// If the mapping is already full, then this returns an error.
pub fn push(&mut self, cp: Codepoint) -> Result<(), Error> {
if self.len >= self.mapping.len() {
return err!(
"invalid decomposition mapping (too many codepoints)"
);
}
self.mapping[self.len] = cp;
self.len += 1;
Ok(())
}
/// Return the mapping as a slice of codepoints. The slice returned
/// has length equivalent to the number of codepoints in this mapping.
pub fn mapping(&self) -> &[Codepoint] {
&self.mapping[..self.len]
}
/// Returns true if and only if this decomposition mapping is canonical.
pub fn is_canonical(&self) -> bool {
self.tag.is_none()
}
}
impl std::str::FromStr for UnicodeDataDecomposition {
type Err = Error;
fn from_str(s: &str) -> Result<UnicodeDataDecomposition, Error> {
let re_with_tag =
regex!(r"^(?:<(?P<tag>[^>]+)>)?\s*(?P<chars>[\s0-9A-F]+)$");
let re_chars = regex!(r"[0-9A-F]+");
if s.is_empty() {
return err!(
"expected non-empty string for \
UnicodeDataDecomposition value"
);
}
let caps = match re_with_tag.captures(s) {
Some(caps) => caps,
None => return err!("invalid decomposition value"),
};
let mut decomp = UnicodeDataDecomposition::default();
let mut codepoints = s;
if let Some(m) = caps.name("tag") {
decomp.tag = Some(m.as_str().parse()?);
codepoints = &caps["chars"];
}
for m in re_chars.find_iter(codepoints) {
let cp = m.as_str().parse()?;
decomp.push(cp)?;
}
Ok(decomp)
}
}
impl std::fmt::Display for UnicodeDataDecomposition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(ref tag) = self.tag {
write!(f, "<{}> ", tag)?;
}
let mut first = true;
for cp in self.mapping() {
if !first {
write!(f, " ")?;
}
first = false;
write!(f, "{}", cp)?;
}
Ok(())
}
}
/// The formatting tag on a decomposition mapping.
///
/// This is taken from
/// [UAX44, Table 14](https://www.unicode.org/reports/tr44/#Character_Decomposition_Mappings).
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum UnicodeDataDecompositionTag {
/// <font>
Font,
/// <noBreak>
NoBreak,
/// <initial>
Initial,
/// <medial>
Medial,
/// <final>
Final,
/// <isolated>
Isolated,
/// <circle>
Circle,
/// <super>
Super,
/// <sub>
Sub,
/// <vertical>
Vertical,
/// <wide>
Wide,
/// <narrow>
Narrow,
/// <small>
Small,
/// <square>
Square,
/// <fraction>
Fraction,
/// <compat>
Compat,
}
impl std::str::FromStr for UnicodeDataDecompositionTag {
type Err = Error;
fn from_str(s: &str) -> Result<UnicodeDataDecompositionTag, Error> {
use self::UnicodeDataDecompositionTag::*;
Ok(match s {
"font" => Font,
"noBreak" => NoBreak,
"initial" => Initial,
"medial" => Medial,
"final" => Final,
"isolated" => Isolated,
"circle" => Circle,
"super" => Super,
"sub" => Sub,
"vertical" => Vertical,
"wide" => Wide,
"narrow" => Narrow,
"small" => Small,
"square" => Square,
"fraction" => Fraction,
"compat" => Compat,
_ => return err!("invalid decomposition formatting tag: {}", s),
})
}
}
impl std::fmt::Display for UnicodeDataDecompositionTag {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use self::UnicodeDataDecompositionTag::*;
let s = match *self {
Font => "font",
NoBreak => "noBreak",
Initial => "initial",
Medial => "medial",
Final => "final",
Isolated => "isolated",
Circle => "circle",
Super => "super",
Sub => "sub",
Vertical => "vertical",
Wide => "wide",
Narrow => "narrow",
Small => "small",
Square => "square",
Fraction => "fraction",
Compat => "compat",
};
write!(f, "{}", s)
}
}
/// A numeric value corresponding to characters with `Numeric_Type=Numeric`.
///
/// A numeric value can either be a signed integer or a rational number.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum UnicodeDataNumeric {
/// An integer.
Integer(i64),
/// A rational number. The first is the numerator and the latter is the
/// denominator.
Rational(i64, i64),
}
impl std::str::FromStr for UnicodeDataNumeric {
type Err = Error;
fn from_str(s: &str) -> Result<UnicodeDataNumeric, Error> {
if s.is_empty() {
return err!(
"expected non-empty string for UnicodeDataNumeric value"
);
}
if let Some(pos) = s.find('/') {
let (snum, sden) = (&s[..pos], &s[pos + 1..]);
let num = match snum.parse() {
Ok(num) => num,
Err(err) => {
return err!(
"invalid integer numerator '{}': {}",
snum,
err
);
}
};
let den = match sden.parse() {
Ok(den) => den,
Err(err) => {
return err!(
"invalid integer denominator '{}': {}",
sden,
err
);
}
};
Ok(UnicodeDataNumeric::Rational(num, den))
} else {
match s.parse() {
Ok(den) => Ok(UnicodeDataNumeric::Integer(den)),
Err(err) => {
return err!(
"invalid integer denominator '{}': {}",
s,
err
);
}
}
}
}
}
impl std::fmt::Display for UnicodeDataNumeric {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
UnicodeDataNumeric::Integer(n) => write!(f, "{}", n),
UnicodeDataNumeric::Rational(n, d) => write!(f, "{}/{}", n, d),
}
}
}
/// An iterator adapter that expands rows in `UnicodeData.txt`.
///
/// Throughout `UnicodeData.txt`, some assigned codepoints are not explicitly
/// represented. Instead, they are represented by a pair of rows, indicating
/// a range of codepoints with the same properties. For example, the Hangul
/// syllable codepoints are represented by these two rows:
///
/// ```ignore
/// AC00;<Hangul Syllable, First>;Lo;0;L;;;;;N;;;;;
/// D7A3;<Hangul Syllable, Last>;Lo;0;L;;;;;N;;;;;
/// ```
///
/// This iterator will wrap any iterator of `UnicodeData` and, when a range of
/// Unicode codepoints is found, it will be expanded to the appropriate
/// sequence of `UnicodeData` values. Note that all such expanded records will
/// have an empty name.
pub struct UnicodeDataExpander<I: Iterator> {
/// The underlying iterator.
it: std::iter::Peekable<I>,
/// A range of codepoints to emit when we've found a pair. Otherwise,
/// `None`.
range: CodepointRange,
}
struct CodepointRange {
/// The codepoint range.
range: std::ops::Range<u32>,
/// The start record. All subsequent records in this range are generated
/// by cloning this and updating the codepoint/name.
start_record: UnicodeData,
}
impl<I: Iterator<Item = UnicodeData>> UnicodeDataExpander<I> {
/// Create a new iterator that expands pairs of `UnicodeData` range
/// records. All other records are passed through as-is.
pub fn new<T>(it: T) -> UnicodeDataExpander<I>
where
T: IntoIterator<IntoIter = I, Item = I::Item>,
{
UnicodeDataExpander {
it: it.into_iter().peekable(),
range: CodepointRange {
range: 0..0,
start_record: UnicodeData::default(),
},
}
}
}
impl<I: Iterator<Item = UnicodeData>> Iterator for UnicodeDataExpander<I> {
type Item = UnicodeData;
fn next(&mut self) -> Option<UnicodeData> {
if let Some(udata) = self.range.next() {
return Some(udata);
}
let row1 = match self.it.next() {
None => return None,
Some(row1) => row1,
};
if !row1.is_range_start()
|| !self.it.peek().map_or(false, |row2| row2.is_range_end())
{
return Some(row1);
}
let row2 = self.it.next().unwrap();
self.range = CodepointRange {
range: row1.codepoint.value()..(row2.codepoint.value() + 1),
start_record: row1,
};
self.next()
}
}
impl Iterator for CodepointRange {
type Item = UnicodeData;
fn next(&mut self) -> Option<UnicodeData> {
let cp = match self.range.next() {
None => return None,
Some(cp) => cp,
};
Some(UnicodeData {
codepoint: Codepoint::from_u32(cp).unwrap(),
name: "".to_string(),
..self.start_record.clone()
})
}
}
#[cfg(test)]
mod tests {
use crate::common::Codepoint;
use super::{
UnicodeData, UnicodeDataDecomposition, UnicodeDataDecompositionTag,
UnicodeDataNumeric,
};
fn codepoint(n: u32) -> Codepoint {
Codepoint::from_u32(n).unwrap()
}
fn s(string: &str) -> String {
string.to_string()
}
#[test]
fn parse1() {
let line = "249D;PARENTHESIZED LATIN SMALL LETTER B;So;0;L;<compat> 0028 0062 0029;;;;N;;;;;\n";
let data: UnicodeData = line.parse().unwrap();
assert_eq!(
data,
UnicodeData {
codepoint: codepoint(0x249d),
name: s("PARENTHESIZED LATIN SMALL LETTER B"),
general_category: s("So"),
canonical_combining_class: 0,
bidi_class: s("L"),
decomposition: UnicodeDataDecomposition::new(
Some(UnicodeDataDecompositionTag::Compat),
&[codepoint(0x28), codepoint(0x62), codepoint(0x29)],
)
.unwrap(),
numeric_type_decimal: None,
numeric_type_digit: None,
numeric_type_numeric: None,
bidi_mirrored: false,
unicode1_name: s(""),
iso_comment: s(""),
simple_uppercase_mapping: None,
simple_lowercase_mapping: None,
simple_titlecase_mapping: None,
}
);
}
#[test]
fn parse2() {
let line = "000D;<control>;Cc;0;B;;;;;N;CARRIAGE RETURN (CR);;;;\n";
let data: UnicodeData = line.parse().unwrap();
assert_eq!(
data,
UnicodeData {
codepoint: codepoint(0x000D),
name: s("<control>"),
general_category: s("Cc"),
canonical_combining_class: 0,
bidi_class: s("B"),
decomposition: UnicodeDataDecomposition::new(
None,
&[codepoint(0x000D)]
)
.unwrap(),
numeric_type_decimal: None,
numeric_type_digit: None,
numeric_type_numeric: None,
bidi_mirrored: false,
unicode1_name: s("CARRIAGE RETURN (CR)"),
iso_comment: s(""),
simple_uppercase_mapping: None,
simple_lowercase_mapping: None,
simple_titlecase_mapping: None,
}
);
}
#[test]
fn parse3() {
let line = "00BC;VULGAR FRACTION ONE QUARTER;No;0;ON;<fraction> 0031 2044 0034;;;1/4;N;FRACTION ONE QUARTER;;;;\n";
let data: UnicodeData = line.parse().unwrap();
assert_eq!(
data,
UnicodeData {
codepoint: codepoint(0x00BC),
name: s("VULGAR FRACTION ONE QUARTER"),
general_category: s("No"),
canonical_combining_class: 0,
bidi_class: s("ON"),
decomposition: UnicodeDataDecomposition::new(
Some(UnicodeDataDecompositionTag::Fraction),
&[codepoint(0x31), codepoint(0x2044), codepoint(0x34)],
)
.unwrap(),
numeric_type_decimal: None,
numeric_type_digit: None,
numeric_type_numeric: Some(UnicodeDataNumeric::Rational(1, 4)),
bidi_mirrored: false,
unicode1_name: s("FRACTION ONE QUARTER"),
iso_comment: s(""),
simple_uppercase_mapping: None,
simple_lowercase_mapping: None,
simple_titlecase_mapping: None,
}
);
}
#[test]
fn parse4() {
let line = "0041;LATIN CAPITAL LETTER A;Lu;0;L;;;;;N;;;;0061;\n";
let data: UnicodeData = line.parse().unwrap();
assert_eq!(
data,
UnicodeData {
codepoint: codepoint(0x0041),
name: s("LATIN CAPITAL LETTER A"),
general_category: s("Lu"),
canonical_combining_class: 0,
bidi_class: s("L"),
decomposition: UnicodeDataDecomposition::new(
None,
&[codepoint(0x0041)]
)
.unwrap(),
numeric_type_decimal: None,
numeric_type_digit: None,
numeric_type_numeric: None,
bidi_mirrored: false,
unicode1_name: s(""),
iso_comment: s(""),
simple_uppercase_mapping: None,
simple_lowercase_mapping: Some(codepoint(0x0061)),
simple_titlecase_mapping: None,
}
);
}
#[test]
fn parse5() {
let line = "0F33;TIBETAN DIGIT HALF ZERO;No;0;L;;;;-1/2;N;;;;;\n";
let data: UnicodeData = line.parse().unwrap();
assert_eq!(
data,
UnicodeData {
codepoint: codepoint(0x0F33),
name: s("TIBETAN DIGIT HALF ZERO"),
general_category: s("No"),
canonical_combining_class: 0,
bidi_class: s("L"),
decomposition: UnicodeDataDecomposition::new(
None,
&[codepoint(0x0F33)]
)
.unwrap(),
numeric_type_decimal: None,
numeric_type_digit: None,
numeric_type_numeric: Some(UnicodeDataNumeric::Rational(
-1, 2
)),
bidi_mirrored: false,
unicode1_name: s(""),
iso_comment: s(""),
simple_uppercase_mapping: None,
simple_lowercase_mapping: None,
simple_titlecase_mapping: None,
}
);
}
#[test]
fn expander() {
use super::UnicodeDataExpander;
use crate::common::UcdLineParser;
let data = "\
ABF9;MEETEI MAYEK DIGIT NINE;Nd;0;L;;9;9;9;N;;;;;
AC00;<Hangul Syllable, First>;Lo;0;L;;;;;N;;;;;
D7A3;<Hangul Syllable, Last>;Lo;0;L;;;;;N;;;;;
D7B0;HANGUL JUNGSEONG O-YEO;Lo;0;L;;;;;N;;;;;
";
let records = UcdLineParser::new(None, data.as_bytes())
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert_eq!(UnicodeDataExpander::new(records).count(), 11174);
}
}
| true |
50c1ea0dfc4a20242a10afed23a46f85819f218b
|
Rust
|
dfrankland/mk20d7
|
/src/fmc/pfb0cr/mod.rs
|
UTF-8
| 28,239 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::PFB0CR {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `B0SEBE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum B0SEBER {
#[doc = "Single entry buffer is disabled."]
_0,
#[doc = "Single entry buffer is enabled."]
_1,
}
impl B0SEBER {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
B0SEBER::_0 => false,
B0SEBER::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> B0SEBER {
match value {
false => B0SEBER::_0,
true => B0SEBER::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == B0SEBER::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == B0SEBER::_1
}
}
#[doc = "Possible values of the field `B0IPE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum B0IPER {
#[doc = "Do not prefetch in response to instruction fetches."]
_0,
#[doc = "Enable prefetches in response to instruction fetches."]
_1,
}
impl B0IPER {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
B0IPER::_0 => false,
B0IPER::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> B0IPER {
match value {
false => B0IPER::_0,
true => B0IPER::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == B0IPER::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == B0IPER::_1
}
}
#[doc = "Possible values of the field `B0DPE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum B0DPER {
#[doc = "Do not prefetch in response to data references."]
_0,
#[doc = "Enable prefetches in response to data references."]
_1,
}
impl B0DPER {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
B0DPER::_0 => false,
B0DPER::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> B0DPER {
match value {
false => B0DPER::_0,
true => B0DPER::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == B0DPER::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == B0DPER::_1
}
}
#[doc = "Possible values of the field `B0ICE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum B0ICER {
#[doc = "Do not cache instruction fetches."]
_0,
#[doc = "Cache instruction fetches."]
_1,
}
impl B0ICER {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
B0ICER::_0 => false,
B0ICER::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> B0ICER {
match value {
false => B0ICER::_0,
true => B0ICER::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == B0ICER::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == B0ICER::_1
}
}
#[doc = "Possible values of the field `B0DCE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum B0DCER {
#[doc = "Do not cache data references."]
_0,
#[doc = "Cache data references."]
_1,
}
impl B0DCER {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
B0DCER::_0 => false,
B0DCER::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> B0DCER {
match value {
false => B0DCER::_0,
true => B0DCER::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == B0DCER::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == B0DCER::_1
}
}
#[doc = "Possible values of the field `CRC`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CRCR {
#[doc = "LRU replacement algorithm per set across all four ways"]
_000,
#[doc = "Independent LRU with ways [0-1] for ifetches, [2-3] for data"]
_010,
#[doc = "Independent LRU with ways [0-2] for ifetches, [3] for data"]
_011,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl CRCR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
CRCR::_000 => 0,
CRCR::_010 => 2,
CRCR::_011 => 3,
CRCR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> CRCR {
match value {
0 => CRCR::_000,
2 => CRCR::_010,
3 => CRCR::_011,
i => CRCR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `_000`"]
#[inline]
pub fn is_000(&self) -> bool {
*self == CRCR::_000
}
#[doc = "Checks if the value of the field is `_010`"]
#[inline]
pub fn is_010(&self) -> bool {
*self == CRCR::_010
}
#[doc = "Checks if the value of the field is `_011`"]
#[inline]
pub fn is_011(&self) -> bool {
*self == CRCR::_011
}
}
#[doc = "Possible values of the field `B0MW`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum B0MWR {
#[doc = "32 bits"]
_00,
#[doc = "64 bits"]
_01,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl B0MWR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
B0MWR::_00 => 0,
B0MWR::_01 => 1,
B0MWR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> B0MWR {
match value {
0 => B0MWR::_00,
1 => B0MWR::_01,
i => B0MWR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `_00`"]
#[inline]
pub fn is_00(&self) -> bool {
*self == B0MWR::_00
}
#[doc = "Checks if the value of the field is `_01`"]
#[inline]
pub fn is_01(&self) -> bool {
*self == B0MWR::_01
}
}
#[doc = "Possible values of the field `CLCK_WAY`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CLCK_WAYR {
#[doc = "Cache way is unlocked and may be displaced"]
_0,
#[doc = "Cache way is locked and its contents are not displaced"]
_1,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl CLCK_WAYR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
CLCK_WAYR::_0 => 0,
CLCK_WAYR::_1 => 1,
CLCK_WAYR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> CLCK_WAYR {
match value {
0 => CLCK_WAYR::_0,
1 => CLCK_WAYR::_1,
i => CLCK_WAYR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == CLCK_WAYR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == CLCK_WAYR::_1
}
}
#[doc = r" Value of the field"]
pub struct B0RWSCR {
bits: u8,
}
impl B0RWSCR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = "Values that can be written to the field `B0SEBE`"]
pub enum B0SEBEW {
#[doc = "Single entry buffer is disabled."]
_0,
#[doc = "Single entry buffer is enabled."]
_1,
}
impl B0SEBEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
B0SEBEW::_0 => false,
B0SEBEW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _B0SEBEW<'a> {
w: &'a mut W,
}
impl<'a> _B0SEBEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: B0SEBEW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Single entry buffer is disabled."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(B0SEBEW::_0)
}
#[doc = "Single entry buffer is enabled."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(B0SEBEW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `B0IPE`"]
pub enum B0IPEW {
#[doc = "Do not prefetch in response to instruction fetches."]
_0,
#[doc = "Enable prefetches in response to instruction fetches."]
_1,
}
impl B0IPEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
B0IPEW::_0 => false,
B0IPEW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _B0IPEW<'a> {
w: &'a mut W,
}
impl<'a> _B0IPEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: B0IPEW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Do not prefetch in response to instruction fetches."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(B0IPEW::_0)
}
#[doc = "Enable prefetches in response to instruction fetches."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(B0IPEW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `B0DPE`"]
pub enum B0DPEW {
#[doc = "Do not prefetch in response to data references."]
_0,
#[doc = "Enable prefetches in response to data references."]
_1,
}
impl B0DPEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
B0DPEW::_0 => false,
B0DPEW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _B0DPEW<'a> {
w: &'a mut W,
}
impl<'a> _B0DPEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: B0DPEW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Do not prefetch in response to data references."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(B0DPEW::_0)
}
#[doc = "Enable prefetches in response to data references."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(B0DPEW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `B0ICE`"]
pub enum B0ICEW {
#[doc = "Do not cache instruction fetches."]
_0,
#[doc = "Cache instruction fetches."]
_1,
}
impl B0ICEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
B0ICEW::_0 => false,
B0ICEW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _B0ICEW<'a> {
w: &'a mut W,
}
impl<'a> _B0ICEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: B0ICEW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Do not cache instruction fetches."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(B0ICEW::_0)
}
#[doc = "Cache instruction fetches."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(B0ICEW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `B0DCE`"]
pub enum B0DCEW {
#[doc = "Do not cache data references."]
_0,
#[doc = "Cache data references."]
_1,
}
impl B0DCEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
B0DCEW::_0 => false,
B0DCEW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _B0DCEW<'a> {
w: &'a mut W,
}
impl<'a> _B0DCEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: B0DCEW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Do not cache data references."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(B0DCEW::_0)
}
#[doc = "Cache data references."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(B0DCEW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `CRC`"]
pub enum CRCW {
#[doc = "LRU replacement algorithm per set across all four ways"]
_000,
#[doc = "Independent LRU with ways [0-1] for ifetches, [2-3] for data"]
_010,
#[doc = "Independent LRU with ways [0-2] for ifetches, [3] for data"]
_011,
}
impl CRCW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
CRCW::_000 => 0,
CRCW::_010 => 2,
CRCW::_011 => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _CRCW<'a> {
w: &'a mut W,
}
impl<'a> _CRCW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CRCW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "LRU replacement algorithm per set across all four ways"]
#[inline]
pub fn _000(self) -> &'a mut W {
self.variant(CRCW::_000)
}
#[doc = "Independent LRU with ways [0-1] for ifetches, [2-3] for data"]
#[inline]
pub fn _010(self) -> &'a mut W {
self.variant(CRCW::_010)
}
#[doc = "Independent LRU with ways [0-2] for ifetches, [3] for data"]
#[inline]
pub fn _011(self) -> &'a mut W {
self.variant(CRCW::_011)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `S_B_INV`"]
pub enum S_B_INVW {
#[doc = "Speculation buffer and single entry buffer are not affected."]
_0,
#[doc = "Invalidate (clear) speculation buffer and single entry buffer."]
_1,
}
impl S_B_INVW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
S_B_INVW::_0 => false,
S_B_INVW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _S_B_INVW<'a> {
w: &'a mut W,
}
impl<'a> _S_B_INVW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: S_B_INVW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Speculation buffer and single entry buffer are not affected."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(S_B_INVW::_0)
}
#[doc = "Invalidate (clear) speculation buffer and single entry buffer."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(S_B_INVW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `CINV_WAY`"]
pub enum CINV_WAYW {
#[doc = "No cache way invalidation for the corresponding cache"]
_0,
#[doc = "Invalidate cache way for the corresponding cache: clear the tag, data, and vld bits of ways selected"]
_1,
}
impl CINV_WAYW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
CINV_WAYW::_0 => 0,
CINV_WAYW::_1 => 1,
}
}
}
#[doc = r" Proxy"]
pub struct _CINV_WAYW<'a> {
w: &'a mut W,
}
impl<'a> _CINV_WAYW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CINV_WAYW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "No cache way invalidation for the corresponding cache"]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(CINV_WAYW::_0)
}
#[doc = "Invalidate cache way for the corresponding cache: clear the tag, data, and vld bits of ways selected"]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(CINV_WAYW::_1)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `CLCK_WAY`"]
pub enum CLCK_WAYW {
#[doc = "Cache way is unlocked and may be displaced"]
_0,
#[doc = "Cache way is locked and its contents are not displaced"]
_1,
}
impl CLCK_WAYW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
CLCK_WAYW::_0 => 0,
CLCK_WAYW::_1 => 1,
}
}
}
#[doc = r" Proxy"]
pub struct _CLCK_WAYW<'a> {
w: &'a mut W,
}
impl<'a> _CLCK_WAYW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CLCK_WAYW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Cache way is unlocked and may be displaced"]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(CLCK_WAYW::_0)
}
#[doc = "Cache way is locked and its contents are not displaced"]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(CLCK_WAYW::_1)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Bank 0 Single Entry Buffer Enable"]
#[inline]
pub fn b0sebe(&self) -> B0SEBER {
B0SEBER::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Bank 0 Instruction Prefetch Enable"]
#[inline]
pub fn b0ipe(&self) -> B0IPER {
B0IPER::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 2 - Bank 0 Data Prefetch Enable"]
#[inline]
pub fn b0dpe(&self) -> B0DPER {
B0DPER::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 3 - Bank 0 Instruction Cache Enable"]
#[inline]
pub fn b0ice(&self) -> B0ICER {
B0ICER::_from({
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 4 - Bank 0 Data Cache Enable"]
#[inline]
pub fn b0dce(&self) -> B0DCER {
B0DCER::_from({
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 5:7 - Cache Replacement Control"]
#[inline]
pub fn crc(&self) -> CRCR {
CRCR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 17:18 - Bank 0 Memory Width"]
#[inline]
pub fn b0mw(&self) -> B0MWR {
B0MWR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 24:27 - Cache Lock Way x"]
#[inline]
pub fn clck_way(&self) -> CLCK_WAYR {
CLCK_WAYR::_from({
const MASK: u8 = 15;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 28:31 - Bank 0 Read Wait State Control"]
#[inline]
pub fn b0rwsc(&self) -> B0RWSCR {
let bits = {
const MASK: u8 = 15;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) as u8
};
B0RWSCR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 805437471 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Bank 0 Single Entry Buffer Enable"]
#[inline]
pub fn b0sebe(&mut self) -> _B0SEBEW {
_B0SEBEW { w: self }
}
#[doc = "Bit 1 - Bank 0 Instruction Prefetch Enable"]
#[inline]
pub fn b0ipe(&mut self) -> _B0IPEW {
_B0IPEW { w: self }
}
#[doc = "Bit 2 - Bank 0 Data Prefetch Enable"]
#[inline]
pub fn b0dpe(&mut self) -> _B0DPEW {
_B0DPEW { w: self }
}
#[doc = "Bit 3 - Bank 0 Instruction Cache Enable"]
#[inline]
pub fn b0ice(&mut self) -> _B0ICEW {
_B0ICEW { w: self }
}
#[doc = "Bit 4 - Bank 0 Data Cache Enable"]
#[inline]
pub fn b0dce(&mut self) -> _B0DCEW {
_B0DCEW { w: self }
}
#[doc = "Bits 5:7 - Cache Replacement Control"]
#[inline]
pub fn crc(&mut self) -> _CRCW {
_CRCW { w: self }
}
#[doc = "Bit 19 - Invalidate Prefetch Speculation Buffer"]
#[inline]
pub fn s_b_inv(&mut self) -> _S_B_INVW {
_S_B_INVW { w: self }
}
#[doc = "Bits 20:23 - Cache Invalidate Way x"]
#[inline]
pub fn cinv_way(&mut self) -> _CINV_WAYW {
_CINV_WAYW { w: self }
}
#[doc = "Bits 24:27 - Cache Lock Way x"]
#[inline]
pub fn clck_way(&mut self) -> _CLCK_WAYW {
_CLCK_WAYW { w: self }
}
}
| true |
b22a88239149a5b80d507d6e4080fb3d6fac4ada
|
Rust
|
jamiebrynes7/spatialos-sdk-rs
|
/spatialos-sdk-code-generator/src/generator.rs
|
UTF-8
| 18,890 | 2.765625 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::schema_bundle::*;
use heck::CamelCase;
use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::rc::Rc;
fn primitive_type_name(primitive_type: &PrimitiveType) -> &'static str {
match primitive_type {
PrimitiveType::Invalid => panic!("Encountered invalid primitive."),
PrimitiveType::Int32 => "SchemaInt32",
PrimitiveType::Sint32 => "SchemaSint32",
PrimitiveType::Sfixed32 => "SchemaSfixed32",
PrimitiveType::Int64 => "SchemaInt64",
PrimitiveType::Sint64 => "SchemaSint64",
PrimitiveType::Sfixed64 => "SchemaSfixed64",
PrimitiveType::Uint32 => "SchemaUint32",
PrimitiveType::Fixed32 => "SchemaFixed32",
PrimitiveType::Uint64 => "SchemaUint64",
PrimitiveType::Fixed64 => "SchemaFixed64",
PrimitiveType::Bool => "SchemaBool",
PrimitiveType::Float => "SchemaFloat",
PrimitiveType::Double => "SchemaDouble",
PrimitiveType::String => "SchemaString",
PrimitiveType::EntityId => "SchemaEntityId",
PrimitiveType::Entity => "SchemaEntity",
PrimitiveType::Bytes => "SchemaBytes",
}
}
#[derive(Debug, Template)]
#[TemplatePath = "./src/generated_code_mod.tt.rs"]
struct Package {
generated_code: Rc<RefCell<GeneratedCode>>,
name: String,
path: Vec<String>,
subpackages: BTreeMap<String, Package>,
enums: BTreeSet<String>,
types: BTreeSet<String>,
components: BTreeSet<String>,
}
#[allow(clippy::needless_bool)]
impl Package {
fn new(generated_code: Rc<RefCell<GeneratedCode>>, name: &str, path: Vec<String>) -> Package {
Package {
generated_code,
name: name.to_string(),
path,
subpackages: BTreeMap::new(),
enums: BTreeSet::new(),
types: BTreeSet::new(),
components: BTreeSet::new(),
}
}
pub fn get_subpackage(&self, package_part: &str) -> Option<&Package> {
self.subpackages.get(package_part)
}
fn depth(&self) -> usize {
self.path.len()
}
fn rust_name(&self, qualified_name: &str) -> String {
let tokens: Vec<&str> = qualified_name.split('.').collect();
tokens[self.path.len()..].join("_")
}
fn rust_fqname(&self, qualified_name: &str) -> String {
let gen_code = self.generated_code.borrow();
let identifier_package = gen_code.get_package(qualified_name);
[
"generated".to_string(),
identifier_package.path.join("::"),
identifier_package.rust_name(qualified_name),
]
.join("::")
}
fn schema_type_name(&self, type_ref: &TypeReference) -> Cow<'static, str> {
match type_ref {
TypeReference::Primitive(prim) => primitive_type_name(prim).into(),
TypeReference::Enum(name) => self.rust_fqname(name).into(),
TypeReference::Type(name) => self.rust_fqname(name).into(),
}
}
fn field_type_name(&self, field_ty: &FieldDefinition_FieldType) -> Cow<'static, str> {
match field_ty {
FieldDefinition_FieldType::Singular { type_reference } => {
self.schema_type_name(type_reference)
}
FieldDefinition_FieldType::Option { inner_type } => {
if self.is_type_recursive(inner_type) {
format!("RecursiveOptional<{}>", self.schema_type_name(inner_type)).into()
} else {
format!("Optional<{}>", self.schema_type_name(inner_type)).into()
}
}
FieldDefinition_FieldType::List { inner_type } => {
format!("List<{}>", self.schema_type_name(inner_type)).into()
}
FieldDefinition_FieldType::Map {
key_type,
value_type,
} => format!(
"Map<{}, {}>",
self.schema_type_name(key_type),
self.schema_type_name(value_type),
)
.into(),
}
}
fn get_enum_definition(&self, qualified_name: &str) -> EnumDefinition {
self.generated_code
.borrow()
.enums
.get(&qualified_name.to_string())
.unwrap_or_else(|| panic!("Unable to find enum {}", qualified_name))
.clone()
}
fn get_type_definition(&self, qualified_name: &str) -> TypeDefinition {
self.generated_code
.borrow()
.types
.get(&qualified_name.to_string())
.unwrap_or_else(|| panic!("Unable to find type {}", qualified_name))
.clone()
}
fn get_component_definition(&self, qualified_name: &str) -> ComponentDefinition {
self.generated_code
.borrow()
.components
.get(&qualified_name.to_string())
.unwrap_or_else(|| panic!("Unable to find component {}", qualified_name))
.clone()
}
fn resolve_enum_reference(&self, qualified_name: &str) -> EnumDefinition {
self.generated_code
.borrow()
.resolve_enum_reference(qualified_name)
.clone()
}
fn resolve_type_reference(&self, qualified_name: &str) -> TypeDefinition {
self.generated_code
.borrow()
.resolve_type_reference(qualified_name)
.clone()
}
fn get_component_fields(&self, component: &ComponentDefinition) -> Vec<FieldDefinition> {
if let Some(ref data_definition) = component.data_definition {
let data_type = self.resolve_type_reference(&data_definition);
data_type.fields
} else {
component.fields.clone()
}
}
fn generate_rust_type_name(&self, value_type: &TypeReference) -> String {
match value_type {
TypeReference::Primitive(ref primitive) => match primitive {
PrimitiveType::Invalid => panic!("Encountered invalid primitive."),
PrimitiveType::Int32 | PrimitiveType::Sint32 | PrimitiveType::Sfixed32 => "i32",
PrimitiveType::Int64 | PrimitiveType::Sint64 | PrimitiveType::Sfixed64 => "i64",
PrimitiveType::Uint32 | PrimitiveType::Fixed32 => "u32",
PrimitiveType::Uint64 | PrimitiveType::Fixed64 => "u64",
PrimitiveType::Bool => "bool",
PrimitiveType::Float => "FloatOrd<f32>",
PrimitiveType::Double => "FloatOrd<f64>",
PrimitiveType::String => "String",
PrimitiveType::EntityId => "spatialos_sdk::EntityId",
PrimitiveType::Entity => "spatialos_sdk::entity::Entity",
PrimitiveType::Bytes => "Vec<u8>",
}
.to_string(),
TypeReference::Enum(ref enum_ref) => {
self.rust_fqname(&self.resolve_enum_reference(&enum_ref).qualified_name)
}
TypeReference::Type(ref type_ref) => {
self.rust_fqname(&self.resolve_type_reference(&type_ref).qualified_name)
}
}
}
fn generate_field_type(&self, field: &FieldDefinition) -> String {
match field.field_type {
FieldDefinition_FieldType::Singular { ref type_reference } => {
self.generate_rust_type_name(type_reference)
}
FieldDefinition_FieldType::Option { ref inner_type } => {
if self.is_type_recursive(inner_type) {
format!("Option<Box<{}>>", self.generate_rust_type_name(inner_type))
} else {
format!("Option<{}>", self.generate_rust_type_name(inner_type))
}
}
FieldDefinition_FieldType::List { ref inner_type } => {
format!("Vec<{}>", self.generate_rust_type_name(inner_type))
}
FieldDefinition_FieldType::Map {
ref key_type,
ref value_type,
} => format!(
"BTreeMap<{}, {}>",
self.generate_rust_type_name(key_type),
self.generate_rust_type_name(value_type)
),
}
}
fn is_type_recursive(&self, type_ref: &TypeReference) -> bool {
fn is_recursive(
gen_code: Rc<RefCell<GeneratedCode>>,
type_name: &str,
mut parent_types: HashSet<String>,
) -> bool {
let type_def = gen_code.borrow().resolve_type_reference(type_name).clone();
let direct_child_types: Vec<String> = type_def
.fields
.iter()
.filter_map(|f| {
if let FieldDefinition_FieldType::Singular { type_reference } = &f.field_type {
if let TypeReference::Type(name) = type_reference {
return Some(name.clone());
}
}
None
})
.collect();
let option_child_types: Vec<String> = type_def
.fields
.iter()
.filter_map(|f| {
if let FieldDefinition_FieldType::Option { inner_type } = &f.field_type {
if let TypeReference::Type(name) = inner_type {
return Some(name.clone());
}
}
None
})
.collect();
let found_recursive_field = direct_child_types
.iter()
.any(|name| parent_types.contains(name))
|| option_child_types
.iter()
.any(|name| parent_types.contains(name));
if found_recursive_field {
return true;
}
parent_types.insert(type_name.to_owned());
direct_child_types
.iter()
.any(|name| is_recursive(gen_code.clone(), name, parent_types.clone()))
|| option_child_types
.iter()
.any(|name| is_recursive(gen_code.clone(), name, parent_types.clone()))
}
if let TypeReference::Type(type_name) = type_ref {
let mut parents = HashSet::new();
parents.insert(type_name.to_owned());
return is_recursive(self.generated_code.clone(), type_name, parents);
}
false
}
fn field_needs_clone(&self, field: &FieldDefinition) -> bool {
match field.field_type {
FieldDefinition_FieldType::Singular { ref type_reference } => {
self.type_needs_clone(type_reference)
}
FieldDefinition_FieldType::Option { ref inner_type } => {
self.type_needs_clone(inner_type)
}
FieldDefinition_FieldType::List { .. } | FieldDefinition_FieldType::Map { .. } => true,
}
}
fn type_needs_clone(&self, type_ref: &TypeReference) -> bool {
match type_ref {
TypeReference::Primitive(ref primitive) => match primitive {
PrimitiveType::String | PrimitiveType::Bytes | PrimitiveType::Entity => true,
_ => false,
},
TypeReference::Enum(_) => false,
TypeReference::Type(_) => true,
}
}
// Generates an expression which serializes a field from an expression into a schema object. The generated
// expression should always have type ().
fn serialize_field(&self, field: &FieldDefinition, schema_object: &str) -> String {
format!(
"{}.add::<{}>({}, &self.{})",
schema_object,
self.field_type_name(&field.field_type),
field.field_id,
field.name,
)
}
// Generates an expression which deserializes a field from a schema field 'schema_field'.
fn deserialize_field(&self, field: &FieldDefinition, schema_field: &str) -> String {
format!(
"{}.get::<{}>({field}).map_err(Error::at_field::<Self>({field}))?",
schema_field,
self.field_type_name(&field.field_type),
field = field.field_id,
)
}
fn deserialize_update_field(&self, field: &FieldDefinition, update: &str) -> String {
format!(
"{}.get_field::<{}>({field}).map_err(Error::at_field::<Self>({field}))?",
update,
self.field_type_name(&field.field_type),
field = field.field_id,
)
}
fn deserialize_update_event(
&self,
event: &ComponentDefinition_EventDefinition,
update: &str,
) -> String {
format!(
"(0..update.events().object_count({event_index})).map(|i| {}.get_event::<{}>({event_index}, i)).collect::<Result<_>>()?",
update,
self.rust_fqname(&event.type_reference),
event_index = event.event_index
)
}
fn serialize_update_field(&self, field: &FieldDefinition, update: &str) -> String {
format!(
"{}.add_field::<{}>({}, &self.{})",
update,
self.field_type_name(&field.field_type),
field.field_id,
field.name,
)
}
fn serialize_update_event(
&self,
event: &ComponentDefinition_EventDefinition,
update: &str,
) -> String {
format!(
"for ev in &self.{} {{ {}.add_event::<{}>({}, ev); }}",
event.name,
update,
self.rust_fqname(&event.type_reference),
event.event_index
)
}
}
#[derive(Debug)]
struct GeneratedCode {
root_package: Option<Package>,
packages: BTreeSet<String>,
enums: BTreeMap<String, EnumDefinition>,
types: BTreeMap<String, TypeDefinition>,
components: BTreeMap<String, ComponentDefinition>,
}
impl GeneratedCode {
fn resolve_type_reference(&self, qualified_name: &str) -> &TypeDefinition {
&self.types[qualified_name]
}
fn resolve_enum_reference(&self, qualified_name: &str) -> &EnumDefinition {
&self.enums[qualified_name]
}
pub fn get_package(&self, qualified_name: &str) -> &Package {
let mut package = self.root_package.as_ref().unwrap();
let path: Vec<&str> = qualified_name.split('.').collect();
let mut current_part = 0;
while current_part < path.len() {
if let Some(new_package) = package.get_subpackage(&path[current_part]) {
current_part += 1;
package = new_package;
} else {
break;
}
}
package
}
}
// This function ensures that given a path ["example", "foo"] and the root package, it will create
// 2 packages with the following structure:
// Package("root", [Package("example", [Package("foo", [])])])
fn get_or_create_packages<'a>(package: &'a mut Package, path: &[&str]) -> &'a mut Package {
if path.is_empty() {
return package;
}
// Given a package, and a path. If that package does not have any subpackages with the name of the "next"
// package in the FQN, create it.
let package_name = path[0];
let mut package_path = package.path.clone();
package_path.push(package_name.to_string());
if !package.subpackages.contains_key(package_name) {
package.subpackages.insert(
package_name.to_string(),
Package::new(
Rc::clone(&package.generated_code),
package_name,
package_path,
),
);
}
// Recurse into the package created above, and create more packages if needed.
get_or_create_packages(
package.subpackages.get_mut(package_name).unwrap(),
&path[1..],
)
}
fn generate_module(package: &Package) -> String {
let submodules = if !package.subpackages.is_empty() {
package
.subpackages
.iter()
.map(|(_, pkg)| generate_module(&pkg))
.fold("".to_string(), |submodule, next| submodule + "\n" + &next)
} else {
"".to_string()
};
// Passing `package` to format! causes the T4 template engine to generate output.
let module_contents = format!("{}\n{}", package, submodules);
// The only package with a depth of 0 is the root package.
if package.depth() == 0 {
let allow_warnings = vec![
"#![allow(unused_imports)]",
"#![allow(unreachable_code)]",
"#![allow(unreachable_patterns)]",
"#![allow(unused_variables)]",
"#![allow(dead_code)]",
"#![allow(non_camel_case_types)]",
"#![allow(unused_mut)]",
"#![allow(clippy::unreadable_literal)]",
"#![allow(clippy::option_option)]",
]
.join("\n");
format!("{}\n\n{}", allow_warnings, module_contents)
} else {
format!("pub mod {} {{\n{}}}\n", package.name, module_contents)
}
}
pub fn generate_code(bundle: SchemaBundle) -> String {
// Set up the root package.
let generated_code = Rc::new(RefCell::new(GeneratedCode {
root_package: None,
packages: BTreeSet::new(),
enums: BTreeMap::new(),
types: BTreeMap::new(),
components: BTreeMap::new(),
}));
let mut root_package = Package::new(Rc::clone(&generated_code), "", vec![]);
for file in bundle.schema_files {
let package = get_or_create_packages(
&mut root_package,
file.package
.name
.split('.')
.collect::<Vec<&str>>()
.as_slice(),
);
for type_def in file.types {
package.types.insert(type_def.qualified_name.clone());
generated_code
.borrow_mut()
.types
.insert(type_def.qualified_name.clone(), type_def);
}
for enum_def in file.enums {
package.enums.insert(enum_def.qualified_name.clone());
generated_code
.borrow_mut()
.enums
.insert(enum_def.qualified_name.clone(), enum_def);
}
for component_def in file.components {
package
.components
.insert(component_def.qualified_name.clone());
generated_code
.borrow_mut()
.components
.insert(component_def.qualified_name.clone(), component_def);
}
}
generated_code.borrow_mut().root_package = Some(root_package);
//println!("{:#?}", generated_code.borrow_mut().root_package);
let generated_code_ref = generated_code.borrow();
generate_module(&generated_code_ref.root_package.as_ref().unwrap())
}
| true |
fd228d168e9f05ad2f43640c3dad259b6216850a
|
Rust
|
Rahix/avr-hal
|
/mcu/attiny-hal/src/simple_pwm.rs
|
UTF-8
| 7,269 | 2.53125 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
pub use avr_hal_generic::simple_pwm::{PwmPinOps, Prescaler};
#[cfg(any(feature = "attiny85",feature = "attiny84",feature="attiny88"))]
use crate::port::*;
#[cfg(feature = "attiny84")]
avr_hal_generic::impl_simple_pwm! {
/// Use `TC0` for PWM (pins `PB2`, `PA7`)
pub struct Timer0Pwm {
timer: crate::pac::TC0,
init: |tim, prescaler| {
tim.tccr0a.modify(|_r, w| w.wgm0().pwm_fast());
tim.tccr0b.modify(|_r, w| match prescaler {
Prescaler::Direct => w.cs0().direct(),
Prescaler::Prescale8 => w.cs0().prescale_8(),
Prescaler::Prescale64 => w.cs0().prescale_64(),
Prescaler::Prescale256 => w.cs0().prescale_256(),
Prescaler::Prescale1024 => w.cs0().prescale_1024(),
});
},
pins: {
PB2: {
ocr: ocr0a,
into_pwm: |tim| if enable {
tim.tccr0a.modify(|_r, w| w.com0a().match_clear());
} else {
tim.tccr0a.modify(|_r, w| w.com0a().disconnected());
},
},
PA7: {
ocr: ocr0b,
into_pwm: |tim| if enable {
tim.tccr0a.modify(|_r, w| w.com0b().match_clear());
} else {
tim.tccr0a.modify(|_r, w| w.com0b().disconnected());
},
},
},
}
}
#[cfg(feature = "attiny84")]
avr_hal_generic::impl_simple_pwm! {
/// Use `TC1` for PWM (pins `PA6`, 'PA5')
pub struct Timer1Pwm {
timer: crate::pac::TC1,
init: |tim, prescaler| {
tim.tccr1a.modify(|_, w| w.wgm1().bits(0b01));
tim.tccr1b.modify(|_, w| w.wgm1().bits(0b01));
tim.tccr1b.modify(|_r, w| match prescaler {
Prescaler::Direct => w.cs1().direct(),
Prescaler::Prescale8 => w.cs1().prescale_8(),
Prescaler::Prescale64 => w.cs1().prescale_64(),
Prescaler::Prescale256 => w.cs1().prescale_256(),
Prescaler::Prescale1024 => w.cs1().prescale_1024(),
});
},
pins: {
PA6: {
ocr: ocr1a,
into_pwm: |tim| if enable {
tim.tccr1a.modify(|_, w| w.com1a().bits(0b10));
} else {
tim.tccr1a.modify(|_, w| w.com1a().disconnected());
},
},
PA5: {
ocr: ocr1b,
into_pwm: |tim| if enable {
tim.tccr1a.modify(|_, w| w.com1b().bits(0b10));
} else {
tim.tccr1a.modify(|_, w| w.com1b().disconnected());
},
},
},
}
}
#[cfg(feature = "attiny85")]
avr_hal_generic::impl_simple_pwm! {
/// Use `TC0` for PWM (pins `PB0`, `PB1`)
///
/// # Example
/// ```
/// let mut timer0 = Timer0Pwm::new(dp.TC0, Prescaler::Prescale64);
///
/// let mut d0 = pins.d0.into_output().into_pwm(&mut timer0);
/// let mut d1 = pins.d1.into_output().into_pwm(&mut timer0);
///
/// d0.set_duty(128);
/// d0.enable();
/// ```
pub struct Timer0Pwm {
timer: crate::pac::TC0,
init: |tim, prescaler| {
tim.tccr0a.modify(|_r, w| w.wgm0().pwm_fast());
tim.tccr0b.modify(|_r, w| match prescaler {
Prescaler::Direct => w.cs0().direct(),
Prescaler::Prescale8 => w.cs0().prescale_8(),
Prescaler::Prescale64 => w.cs0().prescale_64(),
Prescaler::Prescale256 => w.cs0().prescale_256(),
Prescaler::Prescale1024 => w.cs0().prescale_1024(),
});
},
pins: {
PB0: {
ocr: ocr0a,
into_pwm: |tim| if enable {
tim.tccr0a.modify(|_r, w| w.com0a().match_clear());
} else {
tim.tccr0a.modify(|_r, w| w.com0a().disconnected());
},
},
PB1: {
ocr: ocr0b,
into_pwm: |tim| if enable {
tim.tccr0a.modify(|_r, w| w.com0b().match_clear());
} else {
tim.tccr0a.modify(|_r, w| w.com0b().disconnected());
},
},
},
}
}
#[cfg(feature = "attiny85")]
avr_hal_generic::impl_simple_pwm! {
/// Use `TC1` for PWM (pins `PB4`)
///
/// # Example
/// ```
/// let mut timer1 = Timer1Pwm::new(dp.TC1, Prescaler::Prescale64);
///
/// let mut d4 = pins.d4.into_output().into_pwm(&mut timer1);
///
/// d4.set_duty(128);
/// d4.enable();
/// ```
pub struct Timer1Pwm {
timer: crate::pac::TC1,
init: |tim, prescaler| {
tim.gtccr.modify(|_, w| w.pwm1b().bit(true));
tim.tccr1.modify(|_r, w| match prescaler {
Prescaler::Direct => w.cs1().direct(),
Prescaler::Prescale8 => w.cs1().prescale_8(),
Prescaler::Prescale64 => w.cs1().prescale_64(),
Prescaler::Prescale256 => w.cs1().prescale_256(),
Prescaler::Prescale1024 => w.cs1().prescale_1024(),
});
},
pins: {
PB4: {
ocr: ocr1b,
into_pwm: |tim| if enable {
tim.gtccr.modify(|_, w| w.com1b().bits(0b10));
} else {
tim.gtccr.modify(|_, w| w.com1b().disconnected());
},
},
},
}
}
#[cfg(feature = "attiny88")]
avr_hal_generic::impl_simple_pwm! {
/// Use `TC1` for PWM (pins `PB1`, 'PB2')
///
/// # Example
/// ```
/// let mut timer1 = Timer1Pwm::new(dp.TC1, Prescaler::Prescale64);
///
/// let mut d9 = pins.d9.into_output().into_pwm(&mut timer1);
/// let mut d10 = pins.d10.into_output().into_pwm(&mut timer1);
///
/// d9.set_duty(128);
/// d9.enable();
/// ```
pub struct Timer1Pwm {
timer: crate::pac::TC1,
init: |tim, prescaler| {
tim.tccr1a.modify(|_, w| w.wgm1().bits(0b01));
tim.tccr1b.modify(|_, w| w.wgm1().bits(0b01));
tim.tccr1b.modify(|_r, w| match prescaler {
Prescaler::Direct => w.cs1().direct(),
Prescaler::Prescale8 => w.cs1().prescale_8(),
Prescaler::Prescale64 => w.cs1().prescale_64(),
Prescaler::Prescale256 => w.cs1().prescale_256(),
Prescaler::Prescale1024 => w.cs1().prescale_1024(),
});
},
pins: {
PB1: {
ocr: ocr1a,
into_pwm: |tim| if enable {
tim.tccr1a.modify(|_, w| w.com1a().bits(0b10));
} else {
tim.tccr1a.modify(|_, w| w.com1a().disconnected());
},
},
PB2: {
ocr: ocr1b,
into_pwm: |tim| if enable {
tim.tccr1a.modify(|_, w| w.com1b().bits(0b10));
} else {
tim.tccr1a.modify(|_, w| w.com1b().disconnected());
},
},
},
}
}
| true |
ccd8f604b47edd6533c8a5a89afbe2f7f2a48760
|
Rust
|
TheChurro/Manifold-Tracer
|
/src/math/ray.rs
|
UTF-8
| 1,449 | 3.171875 | 3 |
[
"MIT"
] |
permissive
|
use crate::math::geometry::aabb::AABBGeometry;
use crate::math::vectors::Vec3;
#[derive(Debug)]
pub struct Ray {
pub origin: Vec3,
pub direction: Vec3,
pub cast_time: f32,
}
impl Ray {
pub fn new(origin: Vec3, direction: Vec3) -> Ray {
Ray {
origin: origin,
direction: direction.normalized(),
cast_time: 0.0,
}
}
pub fn look_at(origin: Vec3, target: Vec3) -> Ray {
Ray {
origin: origin,
direction: (target - origin).normalized(),
cast_time: 0.0,
}
}
pub fn cast_at(self, time: f32) -> Ray {
Ray {
cast_time: time,
..self
}
}
pub fn point_at_parameter(&self, time: f32) -> Vec3 {
self.origin + time * self.direction
}
}
#[derive(Debug)]
pub struct RayHit {
pub hit_fraction: f32,
pub location: Vec3,
pub normal: Vec3,
pub u: f32,
pub v: f32,
}
use std::fmt;
impl fmt::Display for RayHit {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"RayHit {{ location: {} | normal: {} | uv: ({}, {}) }} at {}",
self.location, self.normal, self.u, self.v, self.hit_fraction
)
}
}
pub trait RayCollidable {
fn hit(&self, ray: &Ray, t_min: f32, t_max: f32) -> Option<RayHit>;
fn bounding_box(&self, t_min: f32, t_max: f32) -> Option<AABBGeometry>;
}
| true |
23495ccf920554265f389448e1bf8c6914dae8b3
|
Rust
|
tychedelia/franz
|
/xtask/src/generate_messages/spec.rs
|
UTF-8
| 6,727 | 2.640625 | 3 |
[] |
no_license
|
use std::fmt::{self, Display};
use std::str::FromStr;
use std::cmp;
use std::ops::RangeInclusive;
use serde::{Serialize, Deserialize};
use serde_plain::*;
use parse_display::{Display, FromStr};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Spec {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub api_key: Option<i16>,
#[serde(rename = "type")]
pub type_: SpecType,
pub name: String,
pub valid_versions: VersionSpec,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub flexible_versions: Option<VersionSpec>,
pub fields: Vec<FieldSpec>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub common_structs: Vec<StructSpec>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FieldSpec {
pub name: String,
#[serde(rename = "type")]
pub type_: TypeSpec,
pub versions: VersionSpec,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tag: Option<i32>,
#[serde(default, skip_serializing_if = "VersionSpec::is_none")]
pub tagged_versions: VersionSpec,
#[serde(default, skip_serializing_if = "VersionSpec::is_none")]
pub nullable_versions: VersionSpec,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub default: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ignorable: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub entity_type: Option<String>,
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub map_key: bool,
#[serde(default, skip_serializing_if = "str::is_empty")]
pub about: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub fields: Option<Vec<FieldSpec>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub flexible_versions: Option<VersionSpec>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct StructSpec {
pub name: String,
pub versions: VersionSpec,
pub fields: Vec<FieldSpec>,
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub enum SpecType {
Header,
Request,
Response,
}
#[derive(Debug, Copy, Clone, Display, FromStr, Eq, PartialEq)]
pub enum VersionSpec {
#[display("none")]
#[from_str(regex = r"none")]
None,
#[display("{0}")]
#[from_str(regex = r"(?P<0>\d+)")]
Exact(i16),
#[display("{0}+")]
#[from_str(regex = r"(?P<0>\d+)\+")]
Since(i16),
#[display("{0}-{1}")]
#[from_str(regex = r"(?P<0>\d+)-(?P<1>\d+)")]
Range(i16, i16),
}
derive_serialize_from_display!(VersionSpec);
derive_deserialize_from_str!(VersionSpec, "valid version specification");
impl VersionSpec {
pub fn intersect(self, other: VersionSpec) -> VersionSpec {
use VersionSpec::*;
match (self, other) {
(Exact(a), Exact(b)) if a == b => Exact(a),
(Exact(a), Since(b)) | (Since(b), Exact(a)) if a >= b => Exact(a),
(Exact(v), Range(a, b)) | (Range(a, b), Exact(v)) if v >= a && v <= b => Exact(v),
(Since(a), Since(b)) => Since(cmp::max(a, b)),
(Since(v), Range(_, b)) | (Range(_, b), Since(v)) if b == v => Exact(v),
(Since(v), Range(a, b)) | (Range(a, b), Since(v)) if b > v => Range(cmp::max(a, v), b),
(Range(_, b), Range(a, _)) if a == b => Exact(b),
(Range(a, _), Range(_, b)) if a == b => Exact(a),
(Range(a, b), Range(c, d)) if b > c && d > a => Range(cmp::max(a, c), cmp::min(b, d)),
_ => None,
}
}
pub fn contains(self, other: VersionSpec) -> bool {
other.intersect(self) == other
}
pub fn range(self) -> Option<RangeInclusive<i16>> {
match self {
VersionSpec::None => Some(1..=0),
VersionSpec::Exact(v) => Some(v..=v),
VersionSpec::Since(_) => None,
VersionSpec::Range(a, b) => Some(a..=b),
}
}
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize, Eq, PartialEq, Ord, PartialOrd)]
#[serde(rename_all = "camelCase")]
pub enum PrimitiveType {
Bool,
Int8,
Int16,
Int32,
Int64,
Float64,
String,
Bytes,
}
forward_display_to_serde!(PrimitiveType);
forward_from_str_to_serde!(PrimitiveType);
impl PrimitiveType {
pub fn rust_name(&self) -> &str {
match self {
Self::Bool => "bool",
Self::Int8 => "i8",
Self::Int16 => "i16",
Self::Int32 => "i32",
Self::Int64 => "i64",
Self::Float64 => "f64",
Self::String => "StrBytes",
Self::Bytes => "Bytes",
}
}
pub fn name(&self, flexible: bool) -> &str {
match self {
Self::Bool => "types::Boolean",
Self::Int8 => "types::Int8",
Self::Int16 => "types::Int16",
Self::Int32 => "types::Int32",
Self::Int64 => "types::Int64",
Self::Float64 => "types::Float64",
Self::String => if flexible { "types::CompactString" } else { "types::String" },
Self::Bytes => if flexible { "types::CompactBytes" } else { "types::Bytes" },
}
}
pub fn is_copy(&self) -> bool {
match self {
Self::String | Self::Bytes => false,
_ => true,
}
}
pub fn has_compact_form(&self) -> bool {
!self.is_copy()
}
}
#[derive(Debug, Clone)]
pub enum TypeSpec {
Primitive(PrimitiveType),
Struct(String),
Array(Box<TypeSpec>),
}
impl FromStr for TypeSpec {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim();
Ok(if s.starts_with("[]") {
Self::Array(Box::new(Self::from_str(&s[2..])?))
} else if let Ok(prim) = PrimitiveType::from_str(s) {
Self::Primitive(prim)
} else {
Self::Struct(s.into())
})
}
}
impl Display for TypeSpec {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Primitive(prim) => prim.fmt(f),
Self::Struct(name) => name.fmt(f),
Self::Array(inner) => write!(f, "[]{}", inner),
}
}
}
derive_serialize_from_display!(TypeSpec);
derive_deserialize_from_str!(TypeSpec, "valid type specification");
impl VersionSpec {
pub fn is_none(&self) -> bool {
match self {
VersionSpec::None => true,
_ => false,
}
}
}
impl Default for VersionSpec {
fn default() -> Self {
VersionSpec::None
}
}
| true |
5cb212e8d39133b4ccf92a1f73ecd8b44a3580ff
|
Rust
|
kruschk/advent-of-code
|
/2015/day3/src/main.rs
|
UTF-8
| 1,300 | 3.390625 | 3 |
[
"MIT"
] |
permissive
|
use std::fs;
use std::io;
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
struct Point2D {
x: isize,
y: isize,
}
#[derive(PartialEq)]
enum WhoseMove {
Santa,
RoboSanta,
}
fn main() -> Result<(), io::Error> {
let mut wm = WhoseMove::Santa;
let mut santa = Point2D {
x: 0,
y: 0,
};
let mut robo_santa = Point2D {
x: 0,
y: 0,
};
let mut history = vec![santa, robo_santa];
let input = fs::read_to_string("input.txt")?;
for c in input.chars() {
let mut mover;
if WhoseMove::Santa == wm {
mover = &mut santa;
wm = WhoseMove::RoboSanta;
} else {
mover = &mut robo_santa;
wm = WhoseMove::Santa;
}
if '>' == c {
mover.x += 1;
history.push(*mover);
} else if '^' == c {
mover.y += 1;
history.push(*mover);
} else if '<' == c {
mover.x -=1;
history.push(*mover);
} else if 'v' == c {
mover.y -= 1;
history.push(*mover);
}
}
history.sort_unstable();
history.dedup();
println!("Santa's location history: {:?}", history);
println!("Houses visited: {}", history.len());
Ok(())
}
| true |
3212823795adfadba9072779d6ba7764a555536c
|
Rust
|
Nugine/compiler-experiment
|
/src/bin/exp2.rs
|
UTF-8
| 5,869 | 2.875 | 3 |
[] |
no_license
|
use compiler_experiment::exp1::lexer::Lexer;
use compiler_experiment::exp1::tokens::Token;
use compiler_experiment::exp2::grammar::{Grammar, LL1Table};
use compiler_experiment::exp2::symbol::Symbol;
use compiler_experiment::exp2::DynResult;
use compiler_experiment::source_file::SourceFile;
use compiler_experiment::utils::str_join;
use std::fmt::Write as _;
use std::io::{stdin, stdout, BufRead, Write};
use std::{env, fmt, fs, process};
#[derive(Debug)]
struct Args {
src_path: String,
}
fn parse_args() -> Result<Args, String> {
let mut args_iter = env::args();
args_iter.next();
let src_path = args_iter.next().ok_or("missing argument: source path")?;
if args_iter.next().is_some() {
return Err("too many arguments".into());
}
Ok(Args { src_path })
}
fn exit_on_error<T, E: fmt::Display>(result: Result<T, E>) -> T {
match result {
Ok(t) => t,
Err(e) => {
eprintln!("error: {}", e);
process::exit(1)
}
}
}
fn print_grammar(grammar: &Grammar) {
fn display(nt: &Symbol, g: &Grammar) {
print!("{} -> ", nt);
let rules = str_join(
g.productions()[nt].iter().map(|r| Grammar::fmt_rule(r)),
" | ",
);
println!("{} ;", rules);
}
display(grammar.start_symbol(), grammar);
for nt in grammar.non_terminals() {
if nt != grammar.start_symbol() {
display(nt, grammar);
}
}
}
fn print_first(grammar: &Grammar) {
let first = grammar.first().unwrap();
{
let nt = grammar.start_symbol();
println!("FIRST({}) = {:?}", nt, first[nt]);
}
for nt in grammar.non_terminals() {
if nt != grammar.start_symbol() {
println!("FIRST({}) = {:?}", nt, first[nt]);
}
}
}
fn print_follow(grammar: &Grammar) {
let follow = grammar.follow().unwrap();
{
let nt = grammar.start_symbol();
println!("FOLLLOW({}) = {:?}", nt, follow[nt]);
}
for nt in grammar.non_terminals() {
if nt != grammar.start_symbol() {
println!("FOLLLOW({}) = {:?}", nt, follow[nt]);
}
}
}
fn print_ll1_table(grammar: &Grammar) {
fn display(nt: &Symbol, ll1_table: &LL1Table) {
let mut s = String::new();
write!(&mut s, "{:^8}|", nt).unwrap();
for rule in ll1_table[nt].values() {
let m;
let w = match rule {
Some(rule) => {
let rule = rule.iter().map(|sym| sym.to_string()).collect::<String>();
m = format!("{}->{}", nt, rule);
&m
}
None => "error",
};
write!(&mut s, "{:^8}|", w).unwrap();
}
println!("{}", s);
}
let ll1_table = grammar.ll1_table().unwrap();
let head = ll1_table
.values()
.next()
.unwrap()
.keys()
.map(|sym| format!("{:^8}|", sym))
.collect::<String>();
println!("{:^8}|{}", "", head);
display(grammar.start_symbol(), ll1_table);
for nt in grammar.non_terminals() {
if nt != grammar.start_symbol() {
display(nt, ll1_table);
}
}
}
fn show_grammar(grammar: &mut Grammar) {
println!("规格显示:");
print_grammar(&grammar);
println!();
grammar.calc_first();
println!("FIRST集:");
print_first(&grammar);
println!();
grammar.calc_follow();
println!("FOLLOW集:");
print_follow(&grammar);
println!();
if let Err(err) = grammar.calc_ll1_table() {
println!("发现 LL(1) 分析冲突:");
println!("{}", err);
println!();
} else {
println!("LL(1) 预测分析表:");
print_ll1_table(&grammar);
println!();
}
}
fn input_line() -> DynResult<String> {
stdout().lock().flush()?;
let stdin = stdin();
let stdin = stdin.lock();
let line = stdin.lines().next().unwrap()?;
Ok(line.trim().to_owned())
}
fn test_analyze(g: &Grammar, tokens: &[Token]) -> DynResult<()> {
let input_symbols = {
let mut v = Vec::new();
for tk in tokens.iter() {
v.push(Grammar::convert_token(tk)?)
}
v
};
match g.analyze_predict(&input_symbols) {
Ok(()) => println!("识别成功"),
Err(err) => println!("识别失败:{:?}", err),
}
Ok(())
}
fn run(args: Args) -> DynResult<()> {
let content = exit_on_error(fs::read_to_string(&args.src_path));
let mut grammar = Grammar::parse(&content)?;
println!("/-------------------------没有消除左递归-----------------------------/");
show_grammar(&mut grammar);
println!("/------------------------------------------------------------------/\n\n");
grammar = grammar.remove_left_recursion()?;
println!("/-------------------------已经消除左递归-----------------------------/");
show_grammar(&mut grammar);
println!("/------------------------------------------------------------------/\n\n");
println!("/----------------------------词法分析-------------------------------/");
print!("输入串:");
let src = input_line()?;
let lexer = Lexer::from_src(SourceFile::in_memory(&src));
let (tokens, errors) = lexer.resolve();
if !errors.is_empty() {
panic!("词法错误:{:?}", errors);
}
println!("词法单元:{:#?}", tokens);
println!("/------------------------------------------------------------------/\n\n");
println!("/----------------------------语法分析-------------------------------/");
test_analyze(&grammar, &tokens)?;
println!("/------------------------------------------------------------------/\n\n");
Ok(())
}
fn main() {
let args = exit_on_error(parse_args());
exit_on_error(run(args))
}
| true |
3bc2f6c065861e9e6e788a14f1918df79f619b72
|
Rust
|
christophercurrie/project-euler
|
/src/euler001.rs
|
UTF-8
| 1,279 | 3.625 | 4 |
[] |
no_license
|
use std::collections::HashSet;
use std::iter::successors;
use itertools::{Itertools, merge};
pub fn euler001_limit(limit: i32) -> i32 {
let mut values: HashSet<i32> = HashSet::new();
let mut product: i32 = 3;
while product < limit {
values.insert(product);
product += 3;
}
product = 5;
while product < limit {
values.insert(product);
product += 5;
}
values.iter().sum()
}
fn euler001_limit2(limit: i32) -> i32 {
let multiples_of = |i| successors(Some(i), move |n| Some(n + i));
merge(multiples_of(3), multiples_of(5)).unique().take_while(|n| *n < limit).sum()
}
pub fn euler001() {
let result = euler001_limit2(1000);
println!("Euler 001: {}", result);
}
#[cfg(test)]
mod tests {
use super::euler001_limit;
#[test]
fn test_limit_10() {
assert_eq!(euler001_limit(10), 23);
}
#[test]
fn test_limit_49() {
assert_eq!(euler001_limit(49), 543);
}
#[test]
fn test_limit_1000() {
assert_eq!(euler001_limit(1_000), 233_168);
}
#[test]
fn test_limit_8456() {
assert_eq!(euler001_limit(8_456), 16_687_353);
}
#[test]
fn test_limit_19564() {
assert_eq!(euler001_limit(19_564), 89_301_183);
}
}
| true |
e8b28b56efa6565d2a01b6d3c9cf1cf1d9b60ef2
|
Rust
|
cscheid/loom
|
/src/sampling.rs
|
UTF-8
| 1,857 | 3.203125 | 3 |
[] |
no_license
|
use vector;
use vector::Vec3;
use random::*;
//////////////////////////////////////////////////////////////////////////////
pub fn random_in_unit_sphere() -> Vec3 {
let mut p = Vec3::new(1.0, 1.0, 1.0);
while p.dot(&p) >= 1.0 {
p = Vec3::new(rand_double() * 2.0 - 1.0,
rand_double() * 2.0 - 1.0,
rand_double() * 2.0 - 1.0);
}
p
}
pub fn random_3d_direction() -> Vec3 {
vector::unit_vector(&random_in_unit_sphere())
}
pub fn random_in_unit_disk() -> Vec3 {
let mut p = Vec3::new(1.0, 1.0, 0.0);
while p.dot(&p) >= 1.0 {
p = Vec3::new(rand_double() * 2.0 - 1.0,
rand_double() * 2.0 - 1.0,
0.0);
}
p
}
pub fn t_stat(itor: &mut std::iter::Iterator<Item=f64>,
mean: f64) -> f64
{
let sufficient = itor.fold((0.0, 0.0, 0.0), |acc, next| {
(acc.0+1.0, acc.1+next, acc.2+next*next)
});
let n = sufficient.0;
let ex = sufficient.1/n;
let exx = sufficient.2 * sufficient.2/n;
(ex - mean) / (exx.sqrt() / (n as f64).sqrt())
}
pub fn avstdev(itor: &mut std::iter::Iterator<Item=f64>) ->
(f64, f64)
{
let sufficient = itor.fold((0.0, 0.0, 0.0), |acc, next| {
(acc.0+1.0, acc.1+next, acc.2+next*next)
});
let n = sufficient.0;
let ex = sufficient.1/n;
let exx = sufficient.2 * sufficient.2/n;
(ex, exx - ex * ex)
}
#[test]
fn it_works()
{
let mut itor1 = (0..10000).map(|_| {
random_in_unit_disk().x()
});
let mut itor2 = (0..10000).map(|_| {
random_in_unit_disk().y()
});
let stats1 = avstdev(&mut itor1);
let stats2 = avstdev(&mut itor2);
println!("random unit disk sampling average in x: {}", stats1.0);
println!("random unit disk sampling average in y: {}", stats2.0);
}
| true |
5516ba413aac60fa95d4e800cadbd67191a86de0
|
Rust
|
cda-group/arcon
|
/arcon/src/error/source.rs
|
UTF-8
| 743 | 2.71875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use super::{ArconResult, Error};
use snafu::Snafu;
use std::fmt::Debug;
/// Nested result type for handling source errors
pub type SourceResult<A> = ArconResult<std::result::Result<A, SourceError>>;
/// Enum containing every type of error that a source may encounter
#[derive(Debug, Snafu)]
pub enum SourceError {
#[snafu(display("Schema Error Encountered {}", msg))]
Schema { msg: String },
#[snafu(display("Failed to parse data {}", msg))]
Parse { msg: String },
#[cfg(feature = "kafka")]
#[snafu(display("Encountered a Kafka error {}", error.to_string()))]
Kafka { error: rdkafka::error::KafkaError },
}
impl<A> From<Error> for SourceResult<A> {
fn from(error: Error) -> Self {
Err(error)
}
}
| true |
04a9c3291108e507234a45aa0b4900c48c49c8db
|
Rust
|
volyx/advent2020-rust
|
/src/advent3/mod.rs
|
UTF-8
| 1,659 | 3.34375 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::{prelude::*, BufReader};
pub fn solution() {
let file = File::open("advent3.txt").unwrap();
let reader = BufReader::new(file);
let mut maze: Vec<Vec<usize>> = Vec::new();
for line in reader.lines() {
let mut maze_line: Vec<usize> = Vec::new();
let raw_line = line.unwrap();
let mut i: i32 = 0;
for c in raw_line.chars() {
let value: usize;
match c {
'.' => {
value = 0;
},
'#' => {
value = 1;
},
_ => {
println!("{:?}", c);
panic!()
}
}
maze_line.insert(i as usize, value);
i = i + 1;
}
// println!("{:?}", &maze_line);
maze.push(maze_line);
}
/*
Right 1, down 1.
Right 3, down 1. (This is the slope you already checked.)
Right 5, down 1.
Right 7, down 1.
Right 1, down 2.
*/
let mut answer:i128 = 1;
for tuple in vec![(1, 1), (1, 3), (1, 5), (1, 7), (2, 1)] {
let row_step = tuple.0;
let col_step = tuple.1;
let mut i = 0;
let mut j = 0;
let mut count = 0;
while i < maze.len() - 1 {
let row = maze.get(i + row_step).unwrap();
let value = row.get((j + col_step) % row.len()).unwrap();
if value.eq(&1) {
count = count + 1;
}
j = j + col_step;
i = i + row_step;
}
answer = answer * count;
}
println!("{:?}", answer);
}
| true |
d7159b5a3cf1d5e065c7eb3dacc8800dc53ed3bc
|
Rust
|
kohbis/leetcode
|
/algorithms/0005.longest-palindromic-substring/solution.rs
|
UTF-8
| 878 | 3.484375 | 3 |
[] |
no_license
|
impl Solution {
pub fn longest_palindrome(s: String) -> String {
let chars: Vec<char> = s.chars().collect();
let (mut left, mut right) = (0, 0);
let mut longest = 0;
for i in 0..(s.len() - 1) {
for j in (i + 1)..s.len() {
if longest > j - i {
continue;
}
if Self::is_palindrome(&chars, i, j) {
longest = j - i;
left = i;
right = j;
}
}
}
s[left..=right].to_string()
}
fn is_palindrome(chars: &Vec<char>, mut left: usize, mut right: usize) -> bool {
while left < right {
if chars[left] != chars[right] {
return false;
}
left += 1;
right -= 1;
}
true
}
}
| true |
fd84ea3df2c1451a3cb6b16f0cf79b779a5a55f1
|
Rust
|
vincenthouyi/elf_rs
|
/src/lib.rs
|
UTF-8
| 3,376 | 3.28125 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! A simple no_std ELF file reader for ELF32 and ELF64.
//!
//! ## Minimal Example
//! ```ignore
//! use elf_rs::Elf;
//!
//! /// Minimal example. Works in `no_std`-contexts and the parsing
//! /// itself needs zero allocations.
//! fn main() {
//! let elf_bytes = include_bytes!("path/to/file.elf");
//! let elf = elf_rs::Elf::from_bytes(elf_bytes).unwrap();
//! let elf64 = match elf {
//! Elf::Elf64(elf) => elf,
//! _ => panic!("got Elf32, expected Elf64"),
//! };
//! let pr_hdrs = elf64.program_header_iter().collect::<Vec<_>>();
//! dbg!(pr_hdrs);
//! }
//! ```
#![no_std]
#![allow(non_camel_case_types)]
#[macro_use]
extern crate bitflags;
extern crate num_traits;
use core::mem::size_of;
mod elf;
mod elf_header;
mod program_header;
mod section_header;
pub use elf::{
Elf32, Elf64, ElfFile, ElfHeader, ProgramHeaderEntry, ProgramHeaderIter, SectionHeaderEntry,
SectionHeaderIter,
};
pub use elf_header::{
ElfAbi, ElfClass, ElfEndian, ElfHeader32, ElfHeader64, ElfHeaderRaw, ElfMachine, ElfType,
};
pub use program_header::{
ProgramHeader32, ProgramHeader64, ProgramHeaderFlags, ProgramHeaderRaw, ProgramType,
};
pub use section_header::{
SectionHeader32, SectionHeader64, SectionHeaderFlags, SectionHeaderRaw, SectionType,
};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Error {
BufferTooShort,
InvalidMagic,
InvalidClass,
}
#[derive(Debug)]
pub enum Elf<'a> {
Elf32(Elf32<'a>),
Elf64(Elf64<'a>),
}
impl<'a> Elf<'a> {
pub fn from_bytes(elf_buf: &'a [u8]) -> Result<Self, Error> {
if elf_buf.len() < size_of::<ElfHeader32>() {
return Err(Error::BufferTooShort);
}
if !elf_buf.starts_with(&elf_header::ELF_MAGIC) {
return Err(Error::InvalidMagic);
}
let tmp_elf = Elf32::new(elf_buf);
match tmp_elf.elf_header().class() {
ElfClass::Elf64 => Elf64::from_bytes(elf_buf).map(|e| Elf::Elf64(e)),
ElfClass::Elf32 => Elf32::from_bytes(elf_buf).map(|e| Elf::Elf32(e)),
ElfClass::Unknown(_) => Err(Error::InvalidClass),
}
}
}
impl<'a> ElfFile for Elf<'a> {
fn content(&self) -> &[u8] {
match self {
Elf::Elf32(e) => e.content(),
Elf::Elf64(e) => e.content(),
}
}
fn elf_header(&self) -> ElfHeader {
match self {
Elf::Elf32(e) => e.elf_header(),
Elf::Elf64(e) => e.elf_header(),
}
}
fn program_header_nth(&self, index: usize) -> Option<ProgramHeaderEntry> {
match self {
Elf::Elf32(e) => e.program_header_nth(index),
Elf::Elf64(e) => e.program_header_nth(index),
}
}
fn program_header_iter(&self) -> ProgramHeaderIter {
match self {
Elf::Elf32(e) => e.program_header_iter(),
Elf::Elf64(e) => e.program_header_iter(),
}
}
fn section_header_nth(&self, index: usize) -> Option<SectionHeaderEntry> {
match self {
Elf::Elf32(e) => e.section_header_nth(index),
Elf::Elf64(e) => e.section_header_nth(index),
}
}
fn section_header_iter(&self) -> SectionHeaderIter {
match self {
Elf::Elf32(e) => e.section_header_iter(),
Elf::Elf64(e) => e.section_header_iter(),
}
}
}
| true |
1a27bc6ce5dcb7143a8c3e5280f32dd6a2031ee1
|
Rust
|
davidsullins/AdventOfRust2016
|
/src/bin/advent5.rs
|
UTF-8
| 3,377 | 3.53125 | 4 |
[
"MIT"
] |
permissive
|
// advent5.rs
// recovering door passwords use md5 hashes
extern crate md5;
use std::io;
use std::fmt::Write;
fn main() {
let mut input = String::new();
io::stdin().read_line(&mut input).expect("Failed to read line");
let door_id = input.trim();
println!("part 1 password: {}", get_password(door_id));
println!("part 2 password: {}", get_password2(door_id));
}
// ///////
// Part 1
fn get_password(door_id: &str) -> String {
let mut guess = door_id.to_string();
let len = door_id.len();
(0u64..)
.filter_map(|x| {
guess.truncate(len);
write!(guess, "{}", x).unwrap();
get_password_character(&guess)
})
.take(8)
.collect()
}
// if md5 hash starts with 5 0's in hex, output the 6th hex digit
fn get_password_character(guess: &str) -> Option<char> {
let md5sum = md5::compute(guess.as_bytes());
if md5sum[0] == 0 && md5sum[1] == 0 && md5sum[2] <= 0xf {
Some(char_from_nibble(md5sum[2]))
} else {
None
}
}
// output the char representing a hex digit
fn char_from_nibble(nibble: u8) -> char {
assert!(nibble <= 0xf);
if nibble <= 9 {
((b'0') + nibble) as char
} else {
((b'a') + nibble - 10) as char
}
}
// ////////
// Part 2
fn get_password2(door_id: &str) -> String {
let mut guess = door_id.to_string();
let len = door_id.len();
let mut password = vec!['*'; 8];
let mut total_chars = 0;
let pw_iter = (0u64..).filter_map(|x| {
guess.truncate(len);
write!(guess, "{}", x).unwrap();
get_password_character2(&guess)
});
for (c, pos) in pw_iter {
if '*' == password[pos] {
password[pos] = c;
total_chars += 1;
if total_chars == 8 {
break;
}
}
}
password.into_iter().collect()
}
// like part 1 but the 6th hex digit is a position if < 8 and the 7th is the char
fn get_password_character2(guess: &str) -> Option<(char, usize)> {
let md5sum = md5::compute(guess.as_bytes());
if md5sum[0] == 0 && md5sum[1] == 0 && md5sum[2] <= 7 {
Some((char_from_nibble((md5sum[3] & 0xf0) >> 4), md5sum[2] as usize))
} else {
None
}
}
// //////
// Tests
#[test]
#[ignore]
fn test_get_password() {
assert_eq!("18f47a30", get_password("abc"));
}
#[test]
fn test_get_password_character() {
assert_eq!(None, get_password_character("abc3231928"));
assert_eq!(Some('1'), get_password_character("abc3231929"));
assert_eq!(Some('8'), get_password_character("abc5017308"));
assert_eq!(Some('f'), get_password_character("abc5278568"));
}
#[test]
fn test_char_from_nibble() {
assert_eq!('0', char_from_nibble(0));
assert_eq!('9', char_from_nibble(9));
assert_eq!('a', char_from_nibble(0xa));
assert_eq!('f', char_from_nibble(0xf));
}
// Part 2
#[test]
#[ignore]
fn test_get_password2() {
assert_eq!("05ace8e3", get_password2("abc"));
}
#[test]
fn test_get_password_character2() {
assert_eq!(None, get_password_character2("abc3231928"));
assert_eq!(Some(('5', 1)), get_password_character2("abc3231929"));
assert_eq!(None, get_password_character2("abc5017308"));
assert_eq!(None, get_password_character2("abc5278568"));
assert_eq!(Some(('e', 4)), get_password_character2("abc5357525"));
}
| true |
c7b9c63db6feba282e7048ad4bfc35faa53a669f
|
Rust
|
jolestar/libra
|
/network/src/protocols/rpc/mod.rs
|
UTF-8
| 22,511 | 2.578125 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Implementation of the RPC protocol as per Libra wire protocol v1.
//!
//! Design:
//! -------
//!
//! RPC receives OutboundRpcRequest messages from upstream actors. The OutboundRpcRequest contains
//! the RPC protocol, raw request bytes, RPC timeout duration, and a channel over which the
//! response bytes can be sent back to the upstream.
//! For inbound RPC requests, RPC sends InboundRpcRequest notifications to upstream actors. The
//! InboundRpcRequest contains the RPC protocol, raw request bytes, and a channel over which the
//! upstream can send a response for the RPC.
//! Internally, the RPC actor consists of a single event loop. The event loop processes 4 kinds of
//! messages:
//! (1) outbound RPC requests received from upstream,
//! (2) notifications for inbound RpcRequest/RpcResponse from the Peer actor,
//! (3) completion notification for tasks processing inbound RPC, and
//! (4) completion notification for tasks processing outbound RPCs.
//! The tasks for inbound and outbound RPCs are spawned onto the same runtime as the one driving
//! the RPC event loop.
//!
//! Timeouts:
//! ---------
//! The tasks for inbound and outbound RPCs are also "wrapped" within timeouts to ensure that they
//! are not running forever. The outbound RPC timeout is specified by the upstream client, where as
//! the inbound RPC timeout is a configuration parameter for the RPC actor.
//!
//! Limits:
//! -------
//! We limit the number of pending inbound RPC tasks to ensure that resource usage is bounded for
//! inbound RPCs. For outbound RPCs, we log a warning when the limit is exceeded, but allow the RPC
//! to proceed.
//!
//! State
//! -------------
//! * For outbound RPCs, the RPC actors maintains a HashMap from the RequestId to a channel over
//! which inbound responses can be delivered to the task driving the request. Entries are removed
//! on completion of the task, which happens either on receipt of the response, or on
//! failure/timeout.
//! * The RPC actor also maintains a RequestIdGenerator for generating request ids for outbound
//! RPCs. The RequestIdGenerator increments the request id by 1 for each subsequent outbound RPC.
use crate::{
counters,
counters::{
CANCELED_LABEL, DECLINED_LABEL, FAILED_LABEL, RECEIVED_LABEL, REQUEST_LABEL,
RESPONSE_LABEL, SENT_LABEL,
},
peer::{PeerHandle, PeerNotification},
protocols::wire::messaging::v1::{
NetworkMessage, Priority, RequestId, RpcRequest, RpcResponse,
},
ProtocolId,
};
use bytes::Bytes;
use error::RpcError;
use futures::{
channel::oneshot,
future::{self, BoxFuture, FutureExt, TryFutureExt},
sink::SinkExt,
stream::{FuturesUnordered, StreamExt},
task::Context,
};
use libra_logger::prelude::*;
use libra_types::PeerId;
use std::{collections::HashMap, fmt::Debug, time::Duration};
pub mod error;
#[cfg(any(feature = "fuzzing", test))]
#[path = "fuzzing.rs"]
/// fuzzing module for the rpc protocol
pub mod fuzzing;
#[cfg(test)]
mod test;
/// A wrapper struct for an inbound rpc request and its associated context.
#[derive(Debug)]
pub struct InboundRpcRequest {
/// Rpc method identifier, e.g., `/libra/rpc/0.1.0/consensus/0.1.0`. This is used
/// to dispatch the request to the corresponding client handler.
pub protocol: ProtocolId,
/// The serialized request data received from the sender.
pub data: Bytes,
/// Channel over which the rpc response is sent from the upper client layer
/// to the rpc layer.
///
/// The rpc actor holds onto the receiving end of this channel, awaiting the
/// response from the upper layer. If there is an error in, e.g.,
/// deserializing the request, the upper layer should send an [`RpcError`]
/// down the channel to signify that there was an error while handling this
/// rpc request. Currently, we just log these errors and drop the substream;
/// in the future, we will send an error response to the peer and/or log any
/// malicious behaviour.
///
/// The upper client layer should be prepared for `res_tx` to be potentially
/// disconnected when trying to send their response, as the rpc call might
/// have timed out while handling the request.
pub res_tx: oneshot::Sender<Result<Bytes, RpcError>>,
}
/// A wrapper struct for an outbound rpc request and its associated context.
#[derive(Debug)]
pub struct OutboundRpcRequest {
/// Rpc method identifier, e.g., `/libra/rpc/0.1.0/consensus/0.1.0`. This is the
/// protocol we will negotiate our outbound substream to.
pub protocol: ProtocolId,
/// The serialized request data to be sent to the receiver.
pub data: Bytes,
/// Channel over which the rpc response is sent from the rpc layer to the
/// upper client layer.
///
/// If there is an error while performing the rpc protocol, e.g., the remote
/// peer drops the connection, we will send an [`RpcError`] over the channel.
pub res_tx: oneshot::Sender<Result<Bytes, RpcError>>,
/// The timeout duration for the entire rpc call. If the timeout elapses, the
/// rpc layer will send an [`RpcError::TimedOut`] error over the
/// `res_tx` channel to the upper client layer.
pub timeout: Duration,
}
/// Events sent from the [`Rpc`] actor to the
/// [`NetworkProvider`](crate::interface::NetworkProvider) actor.
#[derive(Debug)]
pub enum RpcNotification {
/// A new inbound rpc request has been received from a remote peer.
RecvRpc(InboundRpcRequest),
}
type OutboundRpcTasks = FuturesUnordered<BoxFuture<'static, RequestId>>;
type InboundRpcTasks = FuturesUnordered<BoxFuture<'static, ()>>;
// Wraps the task of request id generation. Request ids start at 0 and increment till they hit
// RequestId::MAX. After that, they wrap around to 0.
struct RequestIdGenerator {
next_id: RequestId,
peer_id: PeerId,
}
impl RequestIdGenerator {
pub fn new(peer_id: PeerId) -> Self {
Self {
next_id: 0,
peer_id,
}
}
pub fn next(&mut self) -> RequestId {
let request_id = self.next_id;
self.next_id = {
match self.next_id.overflowing_add(1) {
(next_id, true) => {
info!(
"Request ids with peer: {:?} wrapped around to 0",
self.peer_id.short_str(),
);
next_id
}
(next_id, _) => next_id,
}
};
request_id
}
}
/// The rpc actor.
pub struct Rpc {
/// Channel to send requests to Peer.
peer_handle: PeerHandle,
/// Channel to receive requests from other upstream actors.
requests_rx: channel::Receiver<OutboundRpcRequest>,
/// Channel to receive notifications from Peer.
peer_notifs_rx: channel::Receiver<PeerNotification>,
/// Channels to send notifictions to upstream actors.
rpc_handler_tx: channel::Sender<RpcNotification>,
/// The timeout duration for inbound rpc calls.
inbound_rpc_timeout: Duration,
/// Channels to send Rpc responses to pending outbound RPC tasks.
pending_outbound_rpcs: HashMap<RequestId, (ProtocolId, oneshot::Sender<RpcResponse>)>,
/// RequestId to use for next outbound RPC.
request_id_gen: RequestIdGenerator,
/// The maximum number of concurrent outbound rpc requests that we will
/// service before back-pressure kicks in.
max_concurrent_outbound_rpcs: u32,
/// The maximum number of concurrent inbound rpc requests that we will
/// service before back-pressure kicks in.
max_concurrent_inbound_rpcs: u32,
}
impl Rpc {
/// Create a new instance of the [`Rpc`] protocol actor.
pub fn new(
peer_handle: PeerHandle,
requests_rx: channel::Receiver<OutboundRpcRequest>,
peer_notifs_rx: channel::Receiver<PeerNotification>,
rpc_handler_tx: channel::Sender<RpcNotification>,
inbound_rpc_timeout: Duration,
max_concurrent_outbound_rpcs: u32,
max_concurrent_inbound_rpcs: u32,
) -> Self {
Self {
request_id_gen: RequestIdGenerator::new(peer_handle.peer_id()),
peer_handle,
requests_rx,
peer_notifs_rx,
rpc_handler_tx,
inbound_rpc_timeout,
pending_outbound_rpcs: HashMap::new(),
max_concurrent_outbound_rpcs,
max_concurrent_inbound_rpcs,
}
}
/// Start the [`Rpc`] actor's event loop.
pub async fn start(mut self) {
let mut inbound_rpc_tasks = InboundRpcTasks::new();
let mut outbound_rpc_tasks = OutboundRpcTasks::new();
loop {
::futures::select! {
notif = self.peer_notifs_rx.select_next_some() => {
self.handle_inbound_message(
notif,
&mut inbound_rpc_tasks,
);
},
maybe_req = self.requests_rx.next() => {
if let Some(req) = maybe_req {
self.handle_outbound_rpc(req, &mut outbound_rpc_tasks).await;
} else {
break;
}
},
() = inbound_rpc_tasks.select_next_some() => {
},
request_id = outbound_rpc_tasks.select_next_some() => {
// Remove request_id from pending_outbound_rpcs if not already removed.
let _ = self.pending_outbound_rpcs.remove(&request_id);
}
}
}
info!(
"Rpc actor terminated for peer: {}",
self.peer_handle.peer_id().short_str()
);
}
// Handle inbound message -- the message can be an inbound RPC request, or a response to a
// pending outbound RPC request.
fn handle_inbound_message(
&mut self,
notif: PeerNotification,
inbound_rpc_tasks: &mut InboundRpcTasks,
) {
match notif {
PeerNotification::NewMessage(message) => {
match message {
// This is a response to a pending outbound RPC.
NetworkMessage::RpcResponse(response) => {
self.handle_inbound_response(response);
}
// This is a new inbound RPC request.
NetworkMessage::RpcRequest(request) => {
self.handle_inbound_request(request, inbound_rpc_tasks);
}
_ => {
error!("Received non-RPC message from Peer actor: {:?}", message);
}
}
}
notif => debug_assert!(
false,
"Received unexpected event from Peer: {:?}, expected NewMessage",
notif
),
}
}
// Handles inbound response by either forwarding response to task waiting for response, or by
// dropping it if the task has already terminated.
fn handle_inbound_response(&mut self, response: RpcResponse) {
let peer_id = self.peer_handle.peer_id();
let request_id = response.request_id;
if let Some((protocol, response_tx)) = self.pending_outbound_rpcs.remove(&request_id) {
trace!(
"Waiting to notify outbound rpc task about inbound response for request_id {}",
request_id
);
if let Err(e) = response_tx.send(response) {
warn!(
"Failed to handle inbount RPC response from peer: {} for protocol: {:?}. Error: {:?}",
peer_id.short_str(),
protocol,
e
);
} else {
trace!(
"Done notifying outbound RPC task about inbound response for request_id {}",
request_id
);
}
} else {
// TODO: add ability to log protocol id as well
info!(
"Received response for expired request from {:?}. Discarding.",
peer_id.short_str()
)
}
}
// Handle inbound request by spawning task (with timeout).
fn handle_inbound_request(
&mut self,
request: RpcRequest,
inbound_rpc_tasks: &mut InboundRpcTasks,
) {
let notification_tx = self.rpc_handler_tx.clone();
let peer_handle = self.peer_handle.clone();
let peer_id_str = peer_handle.peer_id().short_str();
if inbound_rpc_tasks.len() as u32 == self.max_concurrent_inbound_rpcs {
// Increase counter of declined responses and log warning.
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[RESPONSE_LABEL, DECLINED_LABEL])
.inc();
warn!(
"Pending inbound RPCs are at limit ({}). Not processing new inbound rpc requests",
self.max_concurrent_inbound_rpcs
);
return;
}
let timeout = self.inbound_rpc_timeout;
// Handle request with timeout.
let f = async move {
if let Err(err) = tokio::time::timeout(
timeout,
handle_inbound_request_inner(notification_tx, request, peer_handle),
)
.map_err(Into::<RpcError>::into)
.map(|r| r.and_then(|x| x))
.await
{
// Log any errors.
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[RESPONSE_LABEL, FAILED_LABEL])
.inc();
warn!(
"Error handling inbound rpc request from {}: {:?}",
peer_id_str, err
);
}
};
inbound_rpc_tasks.push(f.boxed());
}
/// Handle an outbound rpc request.
///
/// Cancellation is done by the client dropping the receiver side of the [`req.res_tx`]
/// oneshot channel. If the request is canceled, the rpc future is dropped and the request is
/// canceled. Currently, we don't send a cancellation message to the remote peer.
///
/// [`req.res_tx`]: OutboundRpcRequest::res_tx
async fn handle_outbound_rpc(
&mut self,
req: OutboundRpcRequest,
outbound_rpc_tasks: &mut OutboundRpcTasks,
) {
// If we already have too many pending RPCs, return error immediately.
if outbound_rpc_tasks.len() as u32 == self.max_concurrent_outbound_rpcs {
warn!(
"Pending outbound RPCs ({}) exceeding limit ({}).",
outbound_rpc_tasks.len(),
self.max_concurrent_outbound_rpcs,
);
let _result = req.res_tx.send(Err(RpcError::TooManyPending(
self.max_concurrent_outbound_rpcs,
)));
return;
}
// Unpack request.
let OutboundRpcRequest {
protocol,
data: req_data,
timeout,
mut res_tx,
..
} = req;
let peer_handle = self.peer_handle.clone();
let peer_id_str = peer_handle.peer_id().short_str();
// Generate and assign request id to this RPC.
let request_id = self.request_id_gen.next();
// Create channel over which response is delivered to future driving outbound RPC.
let (response_tx, response_rx) = oneshot::channel();
// Save send end of channel which moving receive end of the channel into the future.
self.pending_outbound_rpcs
.insert(request_id, (protocol, response_tx));
let f = async move {
// Wrap the outbound rpc protocol with the requested timeout window.
let mut f_rpc_res = tokio::time::timeout(
timeout,
// Future to run the actual outbound rpc protocol.
handle_outbound_rpc_inner(peer_handle, request_id, protocol, req_data, response_rx),
)
.map_err(Into::<RpcError>::into)
.map(|r| r.and_then(|x| x))
.boxed()
.fuse();
// If the rpc client drops their oneshot receiver, this future should
// cancel the request.
let mut f_rpc_cancel =
future::poll_fn(|cx: &mut Context| res_tx.poll_canceled(cx)).fuse();
futures::select! {
res = f_rpc_res => {
// Log any errors.
if let Err(ref err) = res {
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[REQUEST_LABEL, FAILED_LABEL])
.inc();
warn!(
"Error making outbound rpc request with request_id {} to {}: {:?}",
request_id, peer_id_str, err
);
}
// Propagate the results to the rpc client layer.
if res_tx.send(res).is_err() {
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[REQUEST_LABEL, CANCELED_LABEL])
.inc();
info!("Rpc client canceled outbound rpc call to {}", peer_id_str);
}
},
// The rpc client canceled the request
cancel = f_rpc_cancel => {
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[REQUEST_LABEL, CANCELED_LABEL])
.inc();
info!("Rpc client canceled outbound rpc call to {}", peer_id_str);
},
}
// Return the request_id for state management in the main event-loop.
request_id
};
outbound_rpc_tasks.push(f.boxed());
}
}
async fn handle_outbound_rpc_inner(
mut peer_handle: PeerHandle,
request_id: RequestId,
protocol: ProtocolId,
req_data: Bytes,
response_rx: oneshot::Receiver<RpcResponse>,
) -> Result<Bytes, RpcError> {
let req_len = req_data.len();
let peer_id = peer_handle.peer_id();
let peer_id_str = peer_id.to_string();
// Create NetworkMessage to be sent over the wire.
let request = NetworkMessage::RpcRequest(RpcRequest {
request_id,
// TODO: Use default priority for now. To be exposed via network API.
priority: Priority::default(),
protocol_id: protocol,
raw_request: Vec::from(req_data.as_ref()),
});
// Send outbound request to peer_handle.
trace!(
"Sending outbound rpc request with request_id {} to peer: {:?}",
request_id,
peer_id_str
);
let prototol_id_descriptor = protocol.as_str();
// Start timer to collect RPC latency.
let timer = counters::LIBRA_NETWORK_RPC_LATENCY
.with_label_values(&[REQUEST_LABEL, prototol_id_descriptor, &peer_id_str])
.start_timer();
peer_handle.send_message(request, protocol).await?;
// Collect counters for requests sent.
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[REQUEST_LABEL, SENT_LABEL])
.inc();
counters::LIBRA_NETWORK_RPC_BYTES
.with_label_values(&[REQUEST_LABEL, SENT_LABEL])
.observe(req_len as f64);
// Wait for listener's response.
trace!(
"Waiting to receive response for request_id {} from peer: {:?}",
request_id,
peer_id_str
);
let response = response_rx.await?;
let latency = timer.stop_and_record();
trace!(
"Received response for request_id {} from peer: {:?} \
with {:.6} seconds of latency. Request protocol_id: {}",
request_id,
peer_id_str,
latency,
prototol_id_descriptor
);
// Collect counters for received response.
let res_data = response.raw_response;
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[RESPONSE_LABEL, RECEIVED_LABEL])
.inc();
counters::LIBRA_NETWORK_RPC_BYTES
.with_label_values(&[RESPONSE_LABEL, RECEIVED_LABEL])
.observe(res_data.len() as f64);
Ok(Bytes::from(res_data))
}
async fn handle_inbound_request_inner(
mut notification_tx: channel::Sender<RpcNotification>,
request: RpcRequest,
mut peer_handle: PeerHandle,
) -> Result<(), RpcError> {
let req_data = request.raw_request;
let request_id = request.request_id;
let peer_id = peer_handle.peer_id();
trace!(
"Received inbound request with request_id {} from peer: {:?}",
request_id,
peer_id.short_str()
);
// Collect counters for received request.
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[REQUEST_LABEL, RECEIVED_LABEL])
.inc();
counters::LIBRA_NETWORK_RPC_BYTES
.with_label_values(&[REQUEST_LABEL, RECEIVED_LABEL])
.observe(req_data.len() as f64);
// Forward request to upper layer.
let (res_tx, res_rx) = oneshot::channel();
let notification = RpcNotification::RecvRpc(InboundRpcRequest {
protocol: request.protocol_id,
data: Bytes::from(req_data),
res_tx,
});
notification_tx.send(notification).await?;
// Wait for response from upper layer.
trace!(
"Waiting for upstream response for inbound request with request_id {} from peer: {:?}",
request_id,
peer_id.short_str()
);
let res_data = res_rx.await??;
let res_len = res_data.len();
// Send response to remote peer.
trace!(
"Sending response for request_id {} to peer: {:?}",
request_id,
peer_id.short_str()
);
let response = RpcResponse {
raw_response: Vec::from(res_data.as_ref()),
request_id,
priority: request.priority,
};
peer_handle
.send_message(NetworkMessage::RpcResponse(response), request.protocol_id)
.await?;
// Collect counters for sent response.
counters::LIBRA_NETWORK_RPC_MESSAGES
.with_label_values(&[RESPONSE_LABEL, SENT_LABEL])
.inc();
counters::LIBRA_NETWORK_RPC_BYTES
.with_label_values(&[RESPONSE_LABEL, SENT_LABEL])
.observe(res_len as f64);
Ok(())
}
| true |
4328d2b1e37bff01856f1e57d0c3e8551ca2b046
|
Rust
|
EddyXorb/rusty-the-frac
|
/src/mandelbrottest.rs
|
UTF-8
| 467 | 2.703125 | 3 |
[] |
no_license
|
use crate::complex;
pub struct MandelbrotTestResult {
pub iterations: u8,
pub is_in: bool,
}
pub fn is_in_mandelbrot_set(c: complex::Cx, max_count: u8) -> MandelbrotTestResult {
let mut counter: u8 = 0;
let mut z = complex::Cx::new(0.0, 0.0);
while z.abs() < 20.0 && counter < max_count {
z = (z * z) + c;
counter += 1;
}
MandelbrotTestResult {
iterations: counter,
is_in: counter == max_count,
}
}
| true |
9161f31b9c39ad166f4027881ce298dded9f4d2a
|
Rust
|
grogers0/advent_of_code
|
/2015/day9/src/main.rs
|
UTF-8
| 2,033 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::{BTreeMap, BTreeSet};
use std::io::{self, Read};
use regex::Regex;
use permutohedron::LexicalPermutation;
fn parse_distances(input: &str) -> BTreeMap<[String; 2], u32> {
let re = Regex::new("^([a-zA-Z]+) to ([a-zA-Z]+) = (\\d+)$").unwrap();
let mut distances = BTreeMap::new();
for line in input.lines() {
let cap = re.captures(line).unwrap();
let x = cap[1].to_string();
let y = cap[2].to_string();
let d = cap[3].parse().unwrap();
distances.insert([x.clone(), y.clone()], d);
distances.insert([y, x], d);
}
distances
}
fn trip_distance(route: &Vec<String>, distances: &BTreeMap<[String; 2], u32>) -> u32 {
let mut dist = 0;
for pair in route.windows(2) {
dist += distances.get(pair).unwrap();
}
dist
}
fn find_shortest_longest_routes(input: &str) -> (u32, u32) {
let distances = parse_distances(input);
let cities: BTreeSet<_> = distances.keys().flat_map(|[city1, city2]| vec![city1, city2]).cloned().collect();
let mut cities: Vec<_> = cities.into_iter().collect();
let mut min_dist = std::u32::MAX;
let mut max_dist = std::u32::MIN;
while {
let dist = trip_distance(&cities, &distances);
if dist < min_dist { min_dist = dist; }
if dist > max_dist { max_dist = dist; }
cities.next_permutation()
} { }
(min_dist, max_dist)
}
fn part1(input: &str) -> u32 {
find_shortest_longest_routes(input).0
}
fn part2(input: &str) -> u32 {
find_shortest_longest_routes(input).1
}
fn main() {
let mut input = String::new();
io::stdin().read_to_string(&mut input).unwrap();
println!("{}", part1(&input));
println!("{}", part2(&input));
}
#[cfg(test)]
mod tests {
use super::*;
const EX: &str = "\
London to Dublin = 464
London to Belfast = 518
Dublin to Belfast = 141";
#[test]
fn test_part1() {
assert_eq!(part1(EX), 605);
}
#[test]
fn test_part2() {
assert_eq!(part2(EX), 982);
}
}
| true |
d599d4bbe3543040fcb45dfec10eefcd728126be
|
Rust
|
gnoliyil/fuchsia
|
/src/virtualization/bin/vmm/device/virtio_vsock/src/port_manager.rs
|
UTF-8
| 17,650 | 2.671875 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
// Copyright 2022 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::connection::VsockConnectionKey,
fuchsia_async as fasync, fuchsia_zircon as zx,
std::collections::{hash_map::Entry, HashMap, HashSet, VecDeque},
};
type HostPort = u32;
type GuestPort = u32;
// Ephemeral range taken from:
// https://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml
const FIRST_EPHEMERAL_PORT: HostPort = 49152;
const LAST_EPHEMERAL_PORT: HostPort = 65535;
// This is an arbitrarily chosen length of time which can be adjusted up or down as needed.
const QUARANTINE_TIME: zx::Duration = zx::Duration::from_seconds(10);
struct HostPortInfo {
has_listener: bool,
guest_ports: HashSet<GuestPort>,
}
impl HostPortInfo {
fn new() -> Self {
HostPortInfo { has_listener: false, guest_ports: HashSet::new() }
}
}
#[derive(Clone, Copy, Debug)]
struct QuarantinedConnection {
connection: VsockConnectionKey,
available_time: fasync::Time,
}
impl QuarantinedConnection {
fn new(connection: VsockConnectionKey) -> Self {
let available_time = fasync::Time::after(QUARANTINE_TIME);
QuarantinedConnection { connection, available_time }
}
}
pub struct PortManager {
// Active ports tracked by the device. Multiple connections can be multiplexed over a single
// host port, so a port is active as long as there is at least one connection or listener.
active_ports: HashMap<HostPort, HostPortInfo>,
// Connections that have been force shutdown (peer sends reset before the other side sent
// shutdown) have not ended cleanly, so they are quarantined for a set amount of time to
// prevent races on new, unrelated connections.
quarantined_connections: VecDeque<QuarantinedConnection>,
// The ephemeral port to start searching on, which is set to one past the last free port found.
// This is used as a hint when searching for the next free port.
ephemeral_port_start_search: HostPort,
}
impl PortManager {
pub fn new() -> Self {
PortManager {
active_ports: HashMap::new(),
quarantined_connections: VecDeque::new(),
ephemeral_port_start_search: FIRST_EPHEMERAL_PORT,
}
}
// Attempts to listen on a port. If a client is already listening on this port, returns
// zx::Status::ALREADY_BOUND.
pub fn add_listener(&mut self, port: HostPort) -> Result<(), zx::Status> {
let entry = self.active_ports.entry(port).or_insert(HostPortInfo::new());
if entry.has_listener {
Err(zx::Status::ALREADY_BOUND)
} else {
entry.has_listener = true;
Ok(())
}
}
// Stops listening on a port. If there are no active connections, this port can immediately
// be reused.
pub fn remove_listener(&mut self, port: HostPort) {
let result = match self.active_ports.entry(port) {
Entry::Vacant(_) => Err(zx::Status::NOT_FOUND),
Entry::Occupied(mut entry) => {
if entry.get().has_listener {
entry.get_mut().has_listener = false;
if entry.get().guest_ports.is_empty() {
// There was a listener on this port without any connections, so the port
// can immediately be reused.
entry.remove_entry();
}
Ok(())
} else {
Err(zx::Status::NOT_FOUND)
}
}
};
if result.is_err() {
panic!("Attempted to stop listening on port {} which had no active listener", port);
}
}
// Attempts to add a unique host/guest pair. If the connection already exists (including if the
// connection is quarantined), returns zx::Status::ALREADY_EXISTS.
pub fn add_connection(&mut self, connection: VsockConnectionKey) -> Result<(), zx::Status> {
self.check_quarantined_connections();
let entry = self.active_ports.entry(connection.host_port).or_insert(HostPortInfo::new());
if entry.guest_ports.contains(&connection.guest_port) {
Err(zx::Status::ALREADY_EXISTS)
} else {
entry.guest_ports.insert(connection.guest_port);
Ok(())
}
}
// Removes an active connection without quarantining it.
pub fn remove_connection(&mut self, connection: VsockConnectionKey) {
if let Err(_) = self.remove_connection_from_active(connection) {
panic!("Attempted to remove untracked connection: {:?}", connection);
}
}
// Removes and quarantines a connection. This connection stays active until leaving quarantine,
// so until then no duplicate connections can be made and this port pair cannot be reused.
pub fn remove_connection_unclean(&mut self, connection: VsockConnectionKey) {
self.quarantined_connections.push_back(QuarantinedConnection::new(connection));
}
// Attempts to find an unused port from the ephemeral range. If none are available, returns
// zx::Status::NO_RESOURCES.
pub fn find_unused_ephemeral_port(&mut self) -> Result<HostPort, zx::Status> {
match self.find_unused_port_in_range(
FIRST_EPHEMERAL_PORT,
LAST_EPHEMERAL_PORT,
self.ephemeral_port_start_search,
) {
Ok(port) => {
self.ephemeral_port_start_search =
if port == LAST_EPHEMERAL_PORT { FIRST_EPHEMERAL_PORT } else { port + 1 };
Ok(port)
}
err => err,
}
}
// Allocates the first unused port between start and end, inclusive. Starts at hint which must
// be within the defined range. Returns zx::Status::NO_RESOURCES if all ports are in use.
fn find_unused_port_in_range(
&mut self,
start: HostPort,
end: HostPort,
hint: HostPort,
) -> Result<HostPort, zx::Status> {
assert!(hint >= start && hint <= end);
self.check_quarantined_connections();
let mut current_port = hint;
loop {
if !self.active_ports.contains_key(¤t_port) {
return Ok(current_port);
}
current_port = if current_port == end { start } else { current_port + 1 };
if current_port == hint {
return Err(zx::Status::NO_RESOURCES);
}
}
}
// Removes a connection from the active connection map, returning zx::Status::NOT_FOUND if the
// connection was not present.
fn remove_connection_from_active(
&mut self,
connection: VsockConnectionKey,
) -> Result<(), zx::Status> {
match self.active_ports.entry(connection.host_port) {
Entry::Vacant(_) => Err(zx::Status::NOT_FOUND),
Entry::Occupied(mut entry) => {
if entry.get().guest_ports.contains(&connection.guest_port) {
entry.get_mut().guest_ports.remove(&connection.guest_port);
if entry.get().guest_ports.is_empty() && !entry.get().has_listener {
// No listener and no remaining connections, so this port can be
// immediately reused.
entry.remove_entry();
}
Ok(())
} else {
Err(zx::Status::NOT_FOUND)
}
}
}
}
// Frees connections that have been quarantined for enough time. This should happen before
// attempting to allocate ports.
fn check_quarantined_connections(&mut self) {
let now = fasync::Time::now();
while !self.quarantined_connections.is_empty() {
let front = self.quarantined_connections.front().unwrap().clone();
if front.available_time < now {
if let Err(_) = self.remove_connection_from_active(front.connection) {
panic!(
"A quarantined connection was removed from active ports before its \
quarantine ended: {:?}",
front
);
}
self.quarantined_connections.pop_front();
} else {
break;
}
}
}
}
#[cfg(test)]
mod tests {
use {
super::*,
fidl_fuchsia_virtualization::{DEFAULT_GUEST_CID, HOST_CID},
fuchsia_async::TestExecutor,
};
#[fuchsia::test]
async fn listen_on_ports() {
let mut port_manager = PortManager::new();
assert!(port_manager.add_listener(12345).is_ok());
assert!(port_manager.active_ports.get(&12345).unwrap().has_listener);
assert!(port_manager.add_listener(54321).is_ok());
assert!(port_manager.active_ports.get(&54321).unwrap().has_listener);
}
#[fuchsia::test]
async fn multiple_listen_on_single_port() {
let mut port_manager = PortManager::new();
assert!(port_manager.add_listener(12345).is_ok());
assert_eq!(port_manager.add_listener(12345).unwrap_err(), zx::Status::ALREADY_BOUND);
// The original listener is still set.
assert!(port_manager.active_ports.get(&12345).unwrap().has_listener);
}
#[fuchsia::test]
fn listen_unlisten_listen_on_port() {
let executor = TestExecutor::new_with_fake_time().unwrap();
executor.set_fake_time(fuchsia_async::Time::from_nanos(0));
let mut port_manager = PortManager::new();
// No need to progress time -- listeners are not quarantined when removed
// as the state is easily synchronized via FIDL channel.
assert!(port_manager.add_listener(12345).is_ok());
port_manager.remove_listener(12345);
assert!(port_manager.add_listener(12345).is_ok());
}
#[fuchsia::test]
async fn port_stays_active_after_unlistening_with_active_connections() {
let mut port_manager = PortManager::new();
assert!(port_manager.add_listener(12345).is_ok());
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 12345, DEFAULT_GUEST_CID, 54321))
.is_ok());
port_manager.remove_listener(12345);
// Port is still active as there's an active connection.
assert!(port_manager.active_ports.contains_key(&12345));
}
#[fuchsia::test]
async fn clean_connection_shutdown_does_not_quarantine() {
let mut port_manager = PortManager::new();
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 12345, DEFAULT_GUEST_CID, 54321))
.is_ok());
port_manager.remove_connection(VsockConnectionKey::new(
HOST_CID,
12345,
DEFAULT_GUEST_CID,
54321,
));
assert!(!port_manager.active_ports.contains_key(&12345));
assert!(port_manager.quarantined_connections.is_empty());
}
#[fuchsia::test]
async fn connection_pair_already_in_use() {
let mut port_manager = PortManager::new();
// All three of these are fine -- ports can be multiplexed but the connection pair must be
// unique.
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 2))
.is_ok());
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 3))
.is_ok());
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 3, DEFAULT_GUEST_CID, 1))
.is_ok());
// This connection is a duplicate, and is thus rejected.
assert_eq!(
port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 2))
.unwrap_err(),
zx::Status::ALREADY_EXISTS
);
}
#[fuchsia::test]
fn port_stays_active_when_connection_quarantined() {
let executor = TestExecutor::new_with_fake_time().unwrap();
executor.set_fake_time(fuchsia_async::Time::from_nanos(0));
let mut port_manager = PortManager::new();
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 2))
.is_ok());
port_manager.remove_connection_unclean(VsockConnectionKey::new(
HOST_CID,
1,
DEFAULT_GUEST_CID,
2,
));
// Still in quarantine.
assert_eq!(
port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 2))
.unwrap_err(),
zx::Status::ALREADY_EXISTS
);
}
#[fuchsia::test]
fn port_stays_active_when_no_connections_but_listener() {
let executor = TestExecutor::new_with_fake_time().unwrap();
executor.set_fake_time(fuchsia_async::Time::from_nanos(0));
let mut port_manager = PortManager::new();
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 2))
.is_ok());
assert!(port_manager.add_listener(1).is_ok());
// Both connection and listener are on the same port.
assert_eq!(port_manager.active_ports.len(), 1);
assert_eq!(port_manager.active_ports.get(&1).unwrap().guest_ports.len(), 1);
port_manager.remove_connection_unclean(VsockConnectionKey::new(
HOST_CID,
1,
DEFAULT_GUEST_CID,
2,
));
// One nano after quarantine ends.
executor.set_fake_time(fuchsia_async::Time::after(
QUARANTINE_TIME + zx::Duration::from_nanos(1),
));
// Port is still in use due to the listener (need to check quarantined connections
// explicitly as this is usually only checked when calling a public function).
port_manager.check_quarantined_connections();
assert_eq!(port_manager.active_ports.len(), 1);
assert!(port_manager.active_ports.get(&1).unwrap().guest_ports.is_empty());
}
#[fuchsia::test]
fn connection_pair_recycled_after_quarantine() {
let executor = TestExecutor::new_with_fake_time().unwrap();
executor.set_fake_time(fuchsia_async::Time::from_nanos(0));
let mut port_manager = PortManager::new();
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 2))
.is_ok());
port_manager.remove_connection_unclean(VsockConnectionKey::new(
HOST_CID,
1,
DEFAULT_GUEST_CID,
2,
));
// One nano after quarantine ends.
executor.set_fake_time(fuchsia_async::Time::after(
QUARANTINE_TIME + zx::Duration::from_nanos(1),
));
// Can re-use the now unquarantined connection.
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 2))
.is_ok());
}
#[fuchsia::test]
async fn find_ephemeral_ports() {
let mut port_manager = PortManager::new();
let port = port_manager.find_unused_ephemeral_port().unwrap();
assert_eq!(port, FIRST_EPHEMERAL_PORT);
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, port, DEFAULT_GUEST_CID, 2))
.is_ok());
let port = port_manager.find_unused_ephemeral_port().unwrap();
assert_eq!(port, FIRST_EPHEMERAL_PORT + 1);
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, port, DEFAULT_GUEST_CID, 2))
.is_ok());
port_manager.remove_connection(VsockConnectionKey::new(
HOST_CID,
FIRST_EPHEMERAL_PORT,
DEFAULT_GUEST_CID,
2,
));
// Even though the first ephemeral port is now free, the port manager hints based on the
// last used ephemeral port.
let port = port_manager.find_unused_ephemeral_port().unwrap();
assert_eq!(port, FIRST_EPHEMERAL_PORT + 2);
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, port, DEFAULT_GUEST_CID, 2))
.is_ok());
}
#[fuchsia::test]
async fn no_unused_ports_in_range() {
let mut port_manager = PortManager::new();
// Use host ports 0 to 5, inclusive.
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 0, DEFAULT_GUEST_CID, 2))
.is_ok());
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 1, DEFAULT_GUEST_CID, 2))
.is_ok());
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 2, DEFAULT_GUEST_CID, 2))
.is_ok());
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 3, DEFAULT_GUEST_CID, 2))
.is_ok());
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 4, DEFAULT_GUEST_CID, 2))
.is_ok());
assert!(port_manager
.add_connection(VsockConnectionKey::new(HOST_CID, 5, DEFAULT_GUEST_CID, 2))
.is_ok());
assert_eq!(
port_manager.find_unused_port_in_range(0, 5, 0).unwrap_err(),
zx::Status::NO_RESOURCES
);
}
}
| true |
cd6f7835601f902b12800d2441b317a361303cfb
|
Rust
|
bfops/rust-raytrace
|
/src/scene.rs
|
UTF-8
| 1,997 | 3.484375 | 3 |
[
"MIT"
] |
permissive
|
use prelude::*;
pub struct Collision<'a> {
pub object : &'a Object,
pub toi : f32,
pub location : Point,
pub normal : Vector,
}
pub struct Object {
pub center : Point,
pub radius : f32,
pub shininess : f32,
pub emittance : f32,
pub reflectance : f32,
pub transmittance : f32,
pub texture : Texture,
}
fn either_or_join<T, F: FnOnce(T, T) -> T>(f: F, x: Option<T>, y: Option<T>) -> Option<T> {
match (x, y) {
(None , None) => None,
(x , None) => x,
(None , y) => y,
(Some(x) , Some(y)) => Some(f(x, y)),
}
}
impl Object {
pub fn intersect_ray<'a>(&'a self, ray: &Ray) -> Option<Collision<'a>> {
// quadratic coefficients
let a = dot(ray.direction, ray.direction);
let to_center = ray.origin - self.center;
let b = 2.0 * dot(to_center, ray.direction);
let c = dot(to_center, to_center) - self.radius*self.radius;
// discriminant
let d = b*b - 4.0*a*c;
if d < 0.0 {
return None;
}
let d = d.sqrt();
let a = 2.0 * a;
let s1 = (d - b) / a;
let s1 = if s1 >= 0.0 { Some(s1) } else { None };
let s2 = (-d - b) / a;
let s2 = if s2 >= 0.0 { Some(s2) } else { None };
either_or_join(f32::min, s1, s2)
.map(|toi| {
let location = ray.origin + toi*ray.direction;
Collision {
object : self,
toi : toi,
location : location,
normal : normalize(location - self.center),
}
})
}
}
pub enum Texture {
SolidColor(RGB),
}
pub struct T {
pub objects : Vec<Object>,
pub fovy : f32,
pub eye : Point,
pub look : Vector,
pub up : Vector,
}
impl T {
pub fn move_camera(&mut self, v: &Vector) {
self.eye = self.eye + v;
}
pub fn x(&self) -> Vector {
self.look.cross(self.up)
}
pub fn y(&self) -> Vector {
self.up
}
pub fn z(&self) -> Vector {
self.look
}
}
| true |
fb91e763848c99bf31b4f5d5a5b9f1e5e2ea56bb
|
Rust
|
Aaron1011/miri
|
/tests/compile-fail/ptr_eq_out_of_bounds_null.rs
|
UTF-8
| 290 | 2.734375 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
fn main() {
let b = Box::new(0);
let x = (&*b as *const i32).wrapping_sub(0x800); // out-of-bounds
// We cannot compare this with NULL. After all, this *could* be NULL (with the right base address).
assert!(x != std::ptr::null()); //~ ERROR invalid arithmetic on pointers
}
| true |
54defa7d3a3e7676334b73b827b7ded5fa6b7dd6
|
Rust
|
ChezRD/blockchain-voting_2021_extracted
|
/blockchain/dit-blockchain-source/services/votings-service/src/api/voting_state.rs
|
UTF-8
| 917 | 2.515625 | 3 |
[] |
no_license
|
use exonum_rust_runtime::api::{self, ServiceApiState};
use crate::{
errors::Error,
schema::{
Voting,
},
enums::VotingState,
};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct VotingStateQuery {
pub voting_id: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct VotingStateView {
pub state: String
}
pub async fn get_voting_state(
state: ServiceApiState,
query: VotingStateQuery,
) -> api::Result<VotingStateView> {
let voting = Voting::get(state.service_data(), &query.voting_id)
.ok_or_else(|| Error::VotingDoesNotExist)?;
let voting_state = voting.get_state();
Ok(VotingStateView {
state: match voting_state {
VotingState::Registration => "Registration".to_owned(),
VotingState::InProcess => "InProcess".to_owned(),
VotingState::Stopped => "Stopped".to_owned(),
VotingState::Finished => "Finished".to_owned(),
}
})
}
| true |
f3f2a86514190e9671fa48b1c43d53dbada3042d
|
Rust
|
ClayCore/templates
|
/rust/src/logging/mod.rs
|
UTF-8
| 1,761 | 2.78125 | 3 |
[] |
no_license
|
mod errors;
use chrono::Local;
use colored::*;
use env_logger::Builder;
use errors::LoggingError;
use log::LevelFilter;
use std::io::Write;
// Instead of using `?` operator, we may want to use this
// to handle errors which may be worth logging.
#[allow(unused_macros)]
macro_rules! try_log {
($expr:expr) => {
match $expr {
::std::result::Result::Ok(val) => val,
::std::result::Result::Err(err) => {
log::error!("{}", err);
return ::std::result::Result::Err(::std::convert::From::from(err));
}
}
};
($expr:expr,) => {
$crate::logging::try_log!($expr)
};
}
// Initializes `pretty_env_logger`,
// which reads the `RUST_LOG` env var from `.env` in project root
pub fn init() -> Result<(), LoggingError> {
dotenv::dotenv()?;
Builder::new()
.format(|buf, record| {
let level = match record.level() {
log::Level::Error => format!("{}", format!("{}", record.level()).bright_red()),
log::Level::Debug => format!("{}", format!("{}", record.level()).bright_blue()),
log::Level::Info => format!("{}", format!("{}", record.level()).bright_green()),
log::Level::Trace => format!("{}", format!("{}", record.level()).bright_magenta()),
log::Level::Warn => format!("{}", format!("{}", record.level()).bright_yellow()),
};
writeln!(
buf,
"[{} {} {}]: {}",
Local::now().format("%Y/%m/%d | %H:%M:%S"),
level,
record.target(),
record.args(),
)
})
.filter(None, LevelFilter::Trace)
.init();
Ok(())
}
| true |
fadbd5f0eaab4d155154420ff03706019298275c
|
Rust
|
JerTH/elfy
|
/src/numeric.rs
|
UTF-8
| 4,614 | 3.515625 | 4 |
[
"MIT"
] |
permissive
|
//! Types describing various simple value types that may be found in an ELF file
use std::io::{ Read, Seek };
use std::convert::TryInto;
use crate::{ Parslet, ParseElfResult, Descriptor, DataClass, DataFormat };
/// Represents a 16 bit half word in an ELF file
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct Short(pub u16);
impl Short {
/// Returns the contained `u16` as a `usize`, zero extending it
pub fn as_usize(self) -> usize {
self.0 as usize
}
}
impl Parslet for Short {
fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {
Ok(Short(read_u16!(reader, descriptor)))
}
}
impl std::fmt::Debug for Short {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
/// Represents a 32 bit word in an ELF file
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct Word(pub u32);
impl Word {
/// Returns the contained `u32` as a `usize`, zero extending it if necessary
pub fn as_usize(self) -> usize {
self.0 as usize
}
/// Returns the contained 'u32' as a `u64`, zero extending it
pub fn as_u64(self) -> u64 {
self.0 as u64
}
}
impl Parslet for Word {
fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {
Ok(Word(read_u32!(reader, descriptor)))
}
}
impl std::fmt::Debug for Word {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
/// Used to represent both 32 and 64 bit sizes and offsets within an ELF file
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum Size {
/// The `Size` type for ELF32
Elf32Size(u32),
/// The `Size` type for ELF64
Elf64Size(u64)
}
impl Size {
/// Returns the contained value as `usize`
///
/// # Panics
///
/// This method panics if the contained value would not fit into a `usize` without truncation
pub fn as_usize(&self) -> usize {
match self {
Size::Elf32Size(v) => (*v).try_into().expect("Unable to convert `Elf32Size` to `usize` without truncating"),
Size::Elf64Size(v) => (*v).try_into().expect("Unable to convert `Elf64Size` to `usize` without truncating")
}
}
/// Returns the contained value as a `u64`, zero extending it if necessary
pub fn as_u64(&self) -> u64 {
self.as_usize() as u64
}
}
impl Parslet for Size {
fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {
match descriptor.data_class()? {
DataClass::Elf32 => Ok(Size::Elf32Size(read_u32!(reader, descriptor))),
DataClass::Elf64 => Ok(Size::Elf64Size(read_u64!(reader, descriptor))),
}
}
}
impl std::fmt::Debug for Size {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Size::Elf32Size(v) => {
write!(f, "{}", v)
},
Size::Elf64Size(v) => {
write!(f, "{}", v)
}
}
}
}
/// This struct is used to represent both 32 and 64 bit virtual or physical addresses in ELF files and process images
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum Address {
/// The `Address` type for ELF32
Elf32Addr(u32),
/// The `Address` type for ELF64
Elf64Addr(u64)
}
impl Address {
/// Returns the contained value as `usize`
///
/// # Panics
///
/// This method panics if the contained value would not fit into a `usize` without truncation
pub fn as_usize(&self) -> usize {
match self {
Address::Elf32Addr(v) => (*v).try_into().expect("Unable to convert `Elf32Addr` to `usize` without truncating"),
Address::Elf64Addr(v) => (*v).try_into().expect("Unable to convert `Elf64Addr` to `usize` without truncating")
}
}
}
impl Parslet for Address {
fn parse<R: Read + Seek>(reader: &mut R, descriptor: &mut Descriptor) -> ParseElfResult<Self> {
match descriptor.data_class()? {
DataClass::Elf32 => Ok(Address::Elf32Addr(read_u32!(reader, descriptor))),
DataClass::Elf64 => Ok(Address::Elf64Addr(read_u64!(reader, descriptor))),
}
}
}
impl std::fmt::Debug for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Address::Elf32Addr(v) => {
write!(f, "{:#X}", v)
},
Address::Elf64Addr(v) => {
write!(f, "{:#X}", v)
}
}
}
}
| true |
1b3a88028a523b99a3e4e2901b5d5a8fa376428e
|
Rust
|
Deskbot/Advent-of-Code-2020
|
/src/day/day13.rs
|
UTF-8
| 2,979 | 3.5 | 4 |
[] |
no_license
|
use std::fs;
pub fn day13() {
let file = fs::read_to_string("input/day13.txt").expect("input not found");
println!("Part 1: {}", part1(&file));
println!("Part 2: {}", part2(&file));
}
fn part1(input: &str) -> i64 {
let mut itr = input.lines();
let current_time = itr.next().unwrap().parse::<i64>().unwrap();
let buses = itr.next().unwrap()
.split(',')
.filter(|&bus| bus != "x")
.map(str::parse::<i64>)
.map(Result::unwrap);
let bus_wait_times = buses.map(|id| {
let last_time_arrived = -(current_time % id); // without the minus it's the additional time needed to reach current time
let next_bus_after_current_time = last_time_arrived + id; // last_time_arrived.abs() < id
return (id, next_bus_after_current_time)
});
let soonest_bus = bus_wait_times
.min_by(|(_, wait1), (_, wait2)| wait1.cmp(wait2)) // smallest wait time
.unwrap();
let (bus_id, wait_time) = soonest_bus;
return bus_id * wait_time;
}
fn part2(input: &str) -> i64 {
let itr = input.lines();
let mut itr = itr.skip(1); // skip current time line
// l = list of numbers
let buses = itr.next().unwrap()
.split(',')
.map(str::parse::<i64>)
.enumerate()
.filter(|(_, bus_id)| bus_id.is_ok())
.map(|(offset, bus_id)| (offset as i64, bus_id.unwrap()))
.collect::<Vec<(i64, i64)>>();
// start at time 0
let mut time = 0;
let mut inc_by = 1;
for i in 0..buses.len()-1 {
let (_, bus_id) = buses[i];
inc_by *= bus_id;
loop {
// go up in multiples of a number
// bus `i` will be a multiple of time
time += inc_by;
let (next_offset, next_bus_id) = buses[i+1];
// until we find the next bus comes 1 minute after `i` at some time
if (time + next_offset) % next_bus_id == 0 {
// println!("{} {:?}", time, buses[0..i+1].iter().map(|(index,bus)| bus));
break;
}
}
};
return time;
}
#[cfg(test)]
mod tests {
use super::*;
const EXAMPLE: &str = "939
7,13,x,x,59,x,31,19";
#[test]
fn part1_example() {
assert_eq!(part1(EXAMPLE), 295);
}
#[test]
fn part2_example() {
assert_eq!(part2(EXAMPLE), 1068781);
}
#[test]
fn part2_example2() {
assert_eq!(part2("poop\n7,13"), 77);
}
#[test]
fn part2_example3() {
assert_eq!(part2("poop\n17,x,13,19"), 3417);
}
#[test]
fn part2_example4() {
assert_eq!(part2("poop\n67,7,59,61"), 754018);
}
#[test]
fn part2_example5() {
assert_eq!(part2("poop\n67,x,7,59,61"), 779210);
}
#[test]
fn part2_example6() {
assert_eq!(part2("poop\n67,7,x,59,61"), 1261476);
}
#[test]
fn part2_example7() {
assert_eq!(part2("poop\n1789,37,47,1889"), 1202161486);
}
}
| true |
88e862950e70ead388b7c67d342f38de42b9e33c
|
Rust
|
Dano-Sato/Dano-Sato-s-code-jungle
|
/Rust/test.rs
|
UTF-8
| 107 | 2.8125 | 3 |
[] |
no_license
|
fn main()//fn is function
{
//! is required after the keyword "println"
println!("Hello, world");
}
| true |
1f0ae2ec8c1ad23736b670b9925fd7a236c493bc
|
Rust
|
fujiehuang/leetcode-rust
|
/35.search-insert-position.rs
|
UTF-8
| 594 | 2.8125 | 3 |
[] |
no_license
|
/*
* @lc app=leetcode id=35 lang=rust
*
* [35] Search Insert Position
*/
// @lc code=start
impl Solution {
pub fn search_insert(nums: Vec<i32>, target: i32) -> i32 {
let mut lo = 0;
let mut hi = nums.len();
while lo < hi {
let mid = (lo + hi) / 2;
let val = *nums.get(mid).unwrap();
if (val == target) {
return mid as i32;
} else if (val < target) {
lo = mid + 1;
} else {
hi = mid;
}
}
lo as i32
}
}
// @lc code=end
| true |
48cf7473eeb6b0dce0943a7238b90a15e3cf9409
|
Rust
|
Techno-coder/lexica
|
/src/inference/inference.rs
|
UTF-8
| 6,135 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt;
use std::sync::Arc;
use crate::basic::Projection;
use crate::declaration::{ModulePath, StructurePath};
use crate::error::CompileError;
use crate::intrinsic::Intrinsic;
use crate::node::Permission;
use super::TypeEngine;
#[derive(Debug, PartialEq)]
pub enum InferenceError {
Unification(Arc<InferenceType>, Arc<InferenceType>),
Recursive(InferenceType),
Unresolved(TypeVariable),
FunctionArity(usize, usize),
UndefinedField(Arc<StructurePath>, Arc<str>),
UndefinedMethod(StructurePath, Arc<str>),
MissingField(Arc<StructurePath>, Arc<str>),
ResolvedTemplate(Arc<str>, StructurePath),
TemplateProjection(Projection),
TemplateMethodCall(Arc<str>),
TemplateUnification(Arc<InferenceType>, Arc<InferenceType>),
Dereference(Arc<InferenceType>),
}
impl fmt::Display for InferenceError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
InferenceError::Unification(left, right) =>
write!(f, "Types: {}, and: {}, do not match", left, right),
InferenceError::Recursive(variable) =>
write!(f, "Inference type: {}, is recursively defined", variable),
InferenceError::Unresolved(variable) =>
write!(f, "Inference type: {}, has not been resolved", variable),
InferenceError::FunctionArity(expression, function) =>
write!(f, "Expression arity: {}, is not equal to function arity: {}", expression, function),
InferenceError::UndefinedField(structure, field) =>
write!(f, "Field: {}, is not defined on structure: {}", field, structure),
InferenceError::UndefinedMethod(structure, method) =>
write!(f, "Method: {}, is not defined on structure: {}", method, structure),
InferenceError::MissingField(structure, field) =>
write!(f, "Structure: {}, is missing field: {}", structure, field),
InferenceError::ResolvedTemplate(template, structure) =>
write!(f, "Template: {}, cannot be resolved to a structure: {}", template, structure),
InferenceError::TemplateProjection(projection) =>
write!(f, "Projection: {:?}, cannot be performed on a template", projection),
InferenceError::TemplateMethodCall(identifier) =>
write!(f, "Method call: {}, cannot be performed on a template", identifier),
InferenceError::TemplateUnification(left, right) =>
write!(f, "Templates: {}, and: {}, cannot match", left, right),
InferenceError::Dereference(inference) =>
write!(f, "Dereference is not valid for type: {}", inference),
}
}
}
impl From<InferenceError> for CompileError {
fn from(error: InferenceError) -> Self {
CompileError::Inference(error)
}
}
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)]
pub struct TypeVariable(pub usize);
impl fmt::Display for TypeVariable {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let TypeVariable(variable) = self;
write!(f, "${}", variable)
}
}
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
pub enum InferenceType {
Instance(StructurePath, Vec<Arc<InferenceType>>),
Reference(Permission, Arc<InferenceType>),
Variable(TypeVariable),
Template(Arc<str>),
}
impl InferenceType {
pub fn occurs(&self, variable: TypeVariable) -> Result<(), InferenceError> {
match self {
InferenceType::Instance(_, variables) => variables.iter()
.try_for_each(|type_variable| type_variable.occurs(variable)),
InferenceType::Reference(_, inference) => inference.occurs(variable),
InferenceType::Template(_) => Ok(()),
InferenceType::Variable(type_variable) => {
match type_variable == &variable {
true => Err(InferenceError::Recursive(self.clone())),
false => Ok(())
}
}
}
}
}
impl fmt::Display for InferenceType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
InferenceType::Variable(variable) => write!(f, "{}", variable),
InferenceType::Template(variable) => write!(f, "${}", variable),
InferenceType::Reference(permission, inference) =>
write!(f, "{}{}", permission, inference),
InferenceType::Instance(structure, variables) => {
write!(f, "{}", structure)?;
variables.split_last().map(|(last, slice)| {
write!(f, "<")?;
slice.iter().try_for_each(|variable| write!(f, "{}, ", variable))?;
write!(f, "{}>", last)
}).unwrap_or(Ok(()))
}
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum TypeResolution {
Instance(StructurePath, Vec<TypeResolution>),
Reference(Permission, Box<TypeResolution>),
Template,
}
impl TypeResolution {
pub fn intrinsic(&self) -> Option<Intrinsic> {
match self {
TypeResolution::Template | TypeResolution::Reference(_, _) => None,
TypeResolution::Instance(StructurePath(path), parameters) => {
let is_intrinsic = path.module_path == ModulePath::intrinsic();
match is_intrinsic && parameters.is_empty() {
true => Intrinsic::parse(&path.identifier),
false => None,
}
}
}
}
pub fn inference(&self, engine: &mut TypeEngine) -> Arc<InferenceType> {
match self {
TypeResolution::Template => engine.new_variable_type(),
TypeResolution::Reference(permission, resolution) =>
Arc::new(InferenceType::Reference(*permission, resolution.inference(engine))),
TypeResolution::Instance(structure, resolutions) => {
let inferences = resolutions.iter().map(|resolution|
resolution.inference(engine)).collect();
Arc::new(InferenceType::Instance(structure.clone(), inferences))
}
}
}
}
impl fmt::Display for TypeResolution {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TypeResolution::Template => write!(f, "$"),
TypeResolution::Reference(permission, resolution) => match permission {
Permission::Shared => write!(f, "&{}", resolution),
Permission::Unique => write!(f, "~&{}", resolution),
}
TypeResolution::Instance(structure, resolutions) => {
write!(f, "{}", structure)?;
resolutions.split_last().map(|(last, slice)| {
write!(f, "<")?;
slice.iter().try_for_each(|resolution| write!(f, "{}, ", resolution))?;
write!(f, "{}>", last)
}).unwrap_or(Ok(()))
}
}
}
}
impl Intrinsic {
pub fn inference(&self) -> Arc<InferenceType> {
Arc::new(InferenceType::Instance(self.structure(), Vec::new()))
}
}
| true |
1fd79fef0090e5f31628f58d956dfe1391eac626
|
Rust
|
DanielAckerson/rush
|
/src/main.rs
|
UTF-8
| 1,149 | 2.84375 | 3 |
[] |
no_license
|
extern crate pom;
mod parse;
use std::io::{self, Write};
use std::process::{self, Command};
use std::collections::HashMap;
// TODO: instead of working directly with path and args after parsing, generate a
// graph of tasks to execute and then execute them
fn main() {
let mut input = String::new();
let env_vars: HashMap<String, String> = std::env::vars().collect();
loop {
print!("$ ");
io::stdout().flush().unwrap();
match io::stdin().read_line(&mut input) {
Ok(0) => break,
Ok(_) => if let Ok(process) = parse::parse(&input, &env_vars) {
exec_input(&process.path, process.args.iter().map(AsRef::as_ref).collect());
},
Err(_) => process::exit(1),
}
input.clear();
}
println!("Bye!");
}
fn exec_input(path: &str, args: Vec<&str>) {
match Command::new(path).args(args).spawn() {
Ok(mut child) => if let Ok(exit_status) = child.wait() {
println!("process exited with code {}", exit_status.code().unwrap_or(0));
},
Err(e) => {
eprintln!("{}", e);
},
}
}
| true |
17dda4095b0e1000a1fa0eb0e312252356d91552
|
Rust
|
JosephCatrambone/facecapture
|
/src/expression_detector.rs
|
UTF-8
| 4,629 | 2.578125 | 3 |
[] |
no_license
|
use std::io;
use std::io::prelude::*;
use std::io::Read;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use tch;
use tch::nn::ModuleT;
use std::borrow::BorrowMut;
// Keep these compatible with the expression detector.
const LATENT_SIZE:usize = 1024;
const DETECTOR_WIDTH:usize = 48;
const DETECTOR_HEIGHT:usize = 48;
#[derive(Serialize, Deserialize)]
struct Expression {
name: String,
latent_point: Vec<f32>,
}
pub struct ExpressionDetector {
model: tch::CModule,
model_store: tch::nn::VarStore,
expressions: Vec<Expression>,
//face_profile: DMatrix<f32>,
// low-dim = sigma_inv * U_trans * q
}
impl ExpressionDetector {
pub fn new() -> Self {
let mut static_model_data:Vec<u8> = include_bytes!("../ml/expression_detector_cexport_cpu.pt").to_vec();
let m = match tch::CModule::load_data::<&[u8]>(&mut static_model_data.as_slice()) {
Ok(model) => model,
Err(e) => {
dbg!(e);
panic!("Goddamnit.");
}
};
//let m = tch::CModule::load("./ml/expression_detector_cpu.onnx").unwrap();
let vs = tch::nn::VarStore::new(tch::Device::Cpu);
ExpressionDetector {
model: m,
model_store: vs,
expressions: vec![]
}
}
pub fn add_expression(&mut self, image_width:u32, image_height:u32, image_data:&Vec<u8>, roi:(u32, u32, u32, u32), name:String) {
// Cut the region out, resize it to our face detector.
let mut face = image_and_roi_to_tensor(image_width, image_height, image_data, roi);
// Calculate embedding.
let tensor: tch::Tensor = self.model.forward_t(&face, false);
let embedding = (0..LATENT_SIZE).into_iter().map(|i| { tensor.double_value(&[0, i as i64]) as f32}).collect();
// Insert a new expression.
self.expressions.push(Expression{
name,
latent_point: embedding
});
}
pub fn get_expression_count(&self) -> usize { self.expressions.len() }
pub fn get_expressions(&self) -> Vec<String> {
let mut expression_list = vec![];
for xpr in &self.expressions {
expression_list.push(xpr.name.clone());
}
expression_list
}
pub fn get_expression_weights(&self, image_width:u32, image_height:u32, image_data:&Vec<u8>, roi:(u32, u32, u32, u32)) -> HashMap<String, f32> {
// Returns a list of { group: [expression 1: amount, expression 2: amount, expression 3: amount, ...], group 2: [expression 1: amount, ...]
let mut expression_list = HashMap::with_capacity(self.expressions.len());
// Extract the ROI from the given face.
let mut face = image_and_roi_to_tensor(image_width, image_height, image_data, roi);
// Embed the extracted face:
let tensor: tch::Tensor = self.model.forward_t(&face, false);
let embedding:Vec<f32> = (0..LATENT_SIZE).into_iter().map(|i| { tensor.double_value(&[0, i as i64]) as f32}).collect();
let mut embedding_magnitude = 0f32;
for i in 0..LATENT_SIZE {
embedding_magnitude += (embedding[i]*embedding[i]);
}
// Calc cosine product with all expressions.
for xpr in &self.expressions {
let mut similarity = 0f32;
let mut magnitude = 0.0f32;
for (idx, a) in xpr.latent_point.iter().enumerate() {
let a = *a;
let b = embedding[idx];
magnitude += a*a;
similarity += a*b;
}
expression_list.insert(xpr.name.clone(), (similarity/(magnitude*embedding_magnitude)) as f32);
}
expression_list
}
}
fn image_and_roi_to_vec(image_width:u32, image_height:u32, image_data:&Vec<u8>, roi:(u32,u32,u32,u32)) -> Vec<u8> {
let mut result:Vec<u8> = vec![];
// Calculate the mapping from x/y in the final image to x/y in the ROI of the source image.
// x goes from 0 -> IMAGE_WIDTH. We want it to go from 0 -> ROI_w.
// x * ROI_w/IMAGE_WIDTH + roi_x
let x_to_src = roi.2 as f32 / DETECTOR_WIDTH as f32;
let y_to_src = roi.3 as f32 / DETECTOR_HEIGHT as f32;
for y in 0..DETECTOR_HEIGHT {
for x in 0..DETECTOR_WIDTH {
let src_x = (x as f32 * x_to_src) as usize + roi.0 as usize;
let src_y = (y as f32 * y_to_src) as usize + roi.1 as usize;
result.push(image_data[src_x + src_y*image_width as usize]);
}
}
result
}
// Given a source image of the form w,h,data and an roi with (x, y, w, h), extract a tensor.
fn image_and_roi_to_tensor(image_width:u32, _image_height:u32, image_data:&Vec<u8>, roi:(u32, u32, u32, u32)) -> tch::Tensor {
//let mut result = tch::Tensor::zeros(&[1, 1, DETECTOR_HEIGHT, DETECTOR_WIDTH], (tch::Kind::Float, tch::Device::Cpu));
let data:Vec<f32> = image_and_roi_to_vec(image_width, _image_height, image_data, roi).iter().map(|f|{ *f as f32 / 255.0f32 }).collect();
tch::Tensor::of_slice(data.as_slice()).view([1i64, 1i64, DETECTOR_HEIGHT as i64, DETECTOR_WIDTH as i64])
}
| true |
9bad71b9c91b7a750c260eff92df55571223c72a
|
Rust
|
xnnyygn/xdb
|
/xdb_executor/src/lib.rs
|
UTF-8
| 1,952 | 3.40625 | 3 |
[
"MIT"
] |
permissive
|
use std::sync::{Arc, Mutex};
use std::sync::mpsc;
use std::thread;
use std::thread::JoinHandle;
trait FnBox {
fn call_box(self: Box<Self>);
}
impl<F> FnBox for F
where F: FnOnce() {
fn call_box(self: Box<F>) {
(*self)()
}
}
pub struct ThreadPool {
workers: Vec<Worker>,
sender: mpsc::Sender<Message>,
}
struct Worker {
handle: Option<JoinHandle<()>>,
}
impl Worker {
fn run(receiver: Arc<Mutex<mpsc::Receiver<Message>>>) -> Worker {
let h = thread::spawn(move || {
loop {
let receiver = receiver.lock().unwrap();
let message = receiver.recv().unwrap();
drop(receiver);
match message {
Message::Job(action) => action.call_box(),
Message::Terminate => break,
}
}
});
Worker { handle: Some(h) }
}
}
enum Message {
Job(Box<FnBox + Send + 'static>),
Terminate,
}
impl ThreadPool {
/// n must > 0
pub fn new(n: usize) -> ThreadPool {
assert!(n > 0);
let (sender, receiver) = mpsc::channel();
let receiver = Arc::new(Mutex::new(receiver));
let mut workers = Vec::with_capacity(n);
for _ in 0..n {
workers.push(Worker::run(receiver.clone()));
}
ThreadPool { workers, sender }
}
pub fn execute<F>(&self, f: F)
where F: FnOnce() + Send + 'static {
self.sender.send(Message::Job(Box::new(f))).unwrap();
}
}
impl Drop for ThreadPool {
fn drop(&mut self) {
for _ in 0..self.workers.len() {
self.sender.send(Message::Terminate).unwrap();
}
for worker in &mut self.workers {
if let Some(h) = worker.handle.take() {
h.join().unwrap_err();
}
}
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
| true |
ae9601cbbd0d93dd25432b867cbd7b7626d76132
|
Rust
|
gpace1/bo-tie
|
/examples/heart-rate-profile/src/advertise/privacy/host_privacy.rs
|
UTF-8
| 5,556 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
//! Privacy implemented by the Host
//!
//! When the Bluetooth Controller does not support the feature *LL Privacy*, the host must implement
//! the feature in order to support privacy.
use bo_tie::hci::{Connection, Host, HostChannelEnds};
use bo_tie::host::sm::{IdentityAddress, Keys};
use bo_tie::BluetoothDeviceAddress;
#[derive(Copy, Clone, PartialEq)]
struct ResolvingInformation {
// this flag is used to skip this information if the device is already connected
connected: bool,
peer_identity: IdentityAddress,
peer_irk: u128,
}
pub struct HostPrivacy {
// This is a list sorted by the peer identity
resolving_list: Vec<ResolvingInformation>,
}
impl HostPrivacy {
pub fn new() -> Self {
let resolving_list = Vec::new();
HostPrivacy { resolving_list }
}
fn set_connected(&mut self, peer_identity: &IdentityAddress, is_connected: bool) {
let index = self
.resolving_list
.binary_search_by(|entry| entry.peer_identity.cmp(&peer_identity))
.expect("no information found for peer identity");
self.resolving_list[index].connected = is_connected
}
/// Add to the hosts resolving list
pub fn add_to_resolving_list(&mut self, keys: &Keys) {
let peer_identity = keys.get_peer_identity().unwrap();
let peer_irk = keys.get_peer_irk().unwrap();
match self
.resolving_list
.binary_search_by(|entry| entry.peer_identity.cmp(&peer_identity))
{
Err(index) => {
let resolve_info = ResolvingInformation {
connected: false,
peer_identity,
peer_irk,
};
self.resolving_list.insert(index, resolve_info);
}
Ok(index) => {
let entry = &mut self.resolving_list[index];
entry.peer_identity = peer_identity;
entry.peer_irk = peer_irk;
}
}
}
/// Remove a device from the resolving list
///
/// If `hard_remove` is false then device information is not actually removed, instead the
/// `connected` field is set to false.
pub fn remove_device_from_resolving_list(&mut self, identity: &IdentityAddress, hard_remove: bool) {
if !hard_remove {
self.set_connected(identity, false);
} else {
if let Ok(index) = self
.resolving_list
.binary_search_by(|entry| entry.peer_identity.cmp(identity))
{
self.resolving_list.remove(index);
}
}
}
/// Clear the resolving list information in the Host
pub fn clear_resolving_list(&mut self) {
self.resolving_list.clear();
}
pub fn set_timeout(&mut self, timeout: std::time::Duration) -> RpaInterval {
RpaInterval {
interval: tokio::time::interval_at(tokio::time::Instant::now() + timeout, timeout),
}
}
/// Configure advertising when the host is performing Privacy
pub async fn start_private_advertising<H: HostChannelEnds>(&mut self, host: &mut Host<H>) {
set_advertising_parameters_private(host).await
}
/// Validate the Connection
///
/// The connecting device must have a valid resolvable private address. Because the Controller
/// does not perform private address resolving, any device can form a connection to this device
/// in the controller.
pub fn validate_connection<C>(&mut self, connection: &Connection<C>) -> Option<IdentityAddress> {
for info in self.resolving_list.iter_mut() {
// no need to currently connected devices
if info.connected {
continue;
}
if connection.get_peer_address().resolve(info.peer_irk) {
info.connected = true;
return Some(info.peer_identity);
}
}
None
}
}
async fn set_advertising_parameters_private<H: HostChannelEnds>(host: &mut Host<H>) {
use bo_tie::hci::commands::le::{
set_advertising_data, set_advertising_parameters, set_random_address, set_scan_response_data, OwnAddressType,
};
let mut adv_prams = set_advertising_parameters::AdvertisingParameters::default();
let own_address = BluetoothDeviceAddress::new_resolvable(crate::security::Store::IRK);
adv_prams.advertising_type =
set_advertising_parameters::AdvertisingType::ConnectableAndScannableUndirectedAdvertising;
adv_prams.peer_address_type = set_advertising_parameters::PeerAddressType::RandomAddress;
adv_prams.own_address_type = OwnAddressType::RandomDeviceAddress;
bo_tie::hci::commands::le::set_advertising_enable::send(host, false)
.await
.ok();
set_advertising_data::send(host, None).await.unwrap();
set_scan_response_data::send(host, None).await.unwrap();
set_random_address::send(host, own_address).await.unwrap();
set_advertising_parameters::send(host, adv_prams).await.unwrap();
bo_tie::hci::commands::le::set_advertising_enable::send(host, true)
.await
.unwrap();
}
pub struct RpaInterval {
interval: tokio::time::Interval,
}
impl RpaInterval {
pub async fn tick(&mut self) -> RegenRpa {
self.interval.tick().await;
RegenRpa
}
}
pub struct RegenRpa;
impl RegenRpa {
pub async fn regen<H: HostChannelEnds>(self, host: &mut Host<H>) {
set_advertising_parameters_private(host).await;
}
}
| true |
4c756fb5fccbc19ed68db981ac1bd20ca1b4f181
|
Rust
|
zack37/rust-node-ffi
|
/src/project_euler/problem_29.rs
|
UTF-8
| 374 | 2.875 | 3 |
[] |
no_license
|
use std::collections::HashSet;
use num::bigint::BigUint;
use num::{pow, FromPrimitive};
#[no_mangle]
pub extern fn distinct_powers(limit: usize) -> usize {
let mut terms: HashSet<BigUint> = HashSet::new();
for base in 2..limit+1 {
for exp in 2..limit+1 {
let big_base = BigUint::from_usize(base).unwrap();
terms.insert(pow(big_base, exp));
}
}
terms.len()
}
| true |
12a58aae063939ab858c9e97169b24c641873452
|
Rust
|
sabahtalateh/rust_by_example
|
/modules/src/bin/my_mod/nested_mod.rs
|
UTF-8
| 828 | 3.09375 | 3 |
[] |
no_license
|
pub fn function() {
println!("call my_mod::nested_mod::function()");
}
#[allow(dead_code)]
fn private_function() {
println!("call my_mod::nested_mod::private_function()");
}
// (in crate::my_mod) makes the function visible only within the
// specified crate
pub(in crate::my_mod) fn public_function_in_my_mod() {
print!("call my_mod::nested_mod::public_function_in_my_mod(), that\n> ");
public_function_in_nested();
}
// (self) makes the function visible only within the module
// the same as leave the function private
pub(self) fn public_function_in_nested() {
println!("call my_mod::nested_mod::public_function_in_nested()");
}
// (super) makes it visible only in parent module
pub(super) fn public_function_in_super_mod() {
println!("call my_mod::nested_mod::public_function_in_super_mod()");
}
| true |
bd728a16ecb28d4ea0af2ed8baa7d690bc8f302f
|
Rust
|
NirvanaNimbusa/revault_tx
|
/src/txouts.rs
|
UTF-8
| 5,400 | 2.671875 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
//! Revault txouts
//! Wrappers around bitcoin's TxOut to statically check Revault transactions creation and ease
//! their PSBT management.
use crate::scripts::{CpfpDescriptor, EmergencyAddress, UnvaultDescriptor, VaultDescriptor};
use miniscript::{
bitcoin::{Script, TxOut},
MiniscriptKey, ToPublicKey,
};
use std::fmt;
/// A transaction output created by a Revault transaction.
pub trait RevaultTxOut: fmt::Debug + Clone + PartialEq {
/// Get a reference to the inner txout
fn txout(&self) -> &TxOut;
/// Get the actual inner txout
fn into_txout(self) -> TxOut;
/// Get a reference to the inner witness script ("redeem Script of the witness program")
fn witness_script(&self) -> &Option<Script>;
/// Get the actual inner witness script ("redeem Script of the witness program")
fn into_witness_script(self) -> Option<Script>;
}
macro_rules! implem_revault_txout {
( $struct_name:ident, $doc_comment:meta ) => {
#[$doc_comment]
#[derive(Debug, Clone, PartialEq)]
pub struct $struct_name {
txout: TxOut,
witness_script: Option<Script>,
}
impl RevaultTxOut for $struct_name {
fn txout(&self) -> &TxOut {
&self.txout
}
fn into_txout(self) -> TxOut {
self.txout
}
fn witness_script(&self) -> &Option<Script> {
&self.witness_script
}
fn into_witness_script(self) -> Option<Script> {
self.witness_script
}
}
};
}
implem_revault_txout!(
VaultTxOut,
doc = "A vault transaction output. Used by the funding / deposit transactions, the cancel transactions, and the spend transactions (for the change)."
);
impl VaultTxOut {
/// Create a new VaultTxOut out of the given Vault script descriptor
pub fn new<ToPkCtx: Copy, Pk: MiniscriptKey + ToPublicKey<ToPkCtx>>(
value: u64,
script_descriptor: &VaultDescriptor<Pk>,
to_pk_ctx: ToPkCtx,
) -> VaultTxOut {
VaultTxOut {
txout: TxOut {
value,
script_pubkey: script_descriptor.0.script_pubkey(to_pk_ctx),
},
witness_script: Some(script_descriptor.0.witness_script(to_pk_ctx)),
}
}
}
implem_revault_txout!(UnvaultTxOut, doc = "*The* unvault transaction output.");
impl UnvaultTxOut {
/// Create a new UnvaultTxOut out of the given Unvault script descriptor
pub fn new<ToPkCtx: Copy, Pk: MiniscriptKey + ToPublicKey<ToPkCtx>>(
value: u64,
script_descriptor: &UnvaultDescriptor<Pk>,
to_pk_ctx: ToPkCtx,
) -> UnvaultTxOut {
UnvaultTxOut {
txout: TxOut {
value,
script_pubkey: script_descriptor.0.script_pubkey(to_pk_ctx),
},
witness_script: Some(script_descriptor.0.witness_script(to_pk_ctx)),
}
}
}
implem_revault_txout!(
EmergencyTxOut,
doc = "The Emergency Deep Vault, the destination of the emergency transactions fund."
);
impl EmergencyTxOut {
/// Create a new EmergencyTxOut, note that we don't know the witness_script!
pub fn new(address: EmergencyAddress, value: u64) -> EmergencyTxOut {
EmergencyTxOut {
txout: TxOut {
script_pubkey: address.address().script_pubkey(),
value,
},
witness_script: None,
}
}
}
implem_revault_txout!(
CpfpTxOut,
doc = "The output attached to the unvault transaction so that the fund managers can CPFP."
);
impl CpfpTxOut {
/// Create a new CpfpTxOut out of the given Cpfp descriptor
pub fn new<ToPkCtx: Copy, Pk: MiniscriptKey + ToPublicKey<ToPkCtx>>(
value: u64,
script_descriptor: &CpfpDescriptor<Pk>,
to_pk_ctx: ToPkCtx,
) -> CpfpTxOut {
CpfpTxOut {
txout: TxOut {
value,
script_pubkey: script_descriptor.0.script_pubkey(to_pk_ctx),
},
witness_script: Some(script_descriptor.0.witness_script(to_pk_ctx)),
}
}
}
implem_revault_txout!(
FeeBumpTxOut,
doc = "The output spent by the revaulting transactions to bump their feerate"
);
impl FeeBumpTxOut {
/// Create a new FeeBumpTxOut, note that it's managed externally so we don't need a witness
/// Script.
pub fn new(txout: TxOut) -> FeeBumpTxOut {
FeeBumpTxOut {
txout,
witness_script: None,
}
}
}
implem_revault_txout!(
ExternalTxOut,
doc = "An untagged external output, as spent by the vault transaction or created by the spend transaction."
);
impl ExternalTxOut {
/// Create a new ExternalTxOut, note that it's managed externally so we don't need a witness
/// Script.
pub fn new(txout: TxOut) -> ExternalTxOut {
ExternalTxOut {
txout,
witness_script: None,
}
}
}
/// A spend transaction output can be either a change one (VaultTxOut) or a payee-controlled
/// one (ExternalTxOut).
pub enum SpendTxOut {
/// The actual destination of the funds, many such output can be present in a Spend
/// transaction
Destination(ExternalTxOut),
/// The change output, usually only one such output is present in a Spend transaction
Change(VaultTxOut),
}
| true |
11cff5dd240fd6577b663a596eaaa28bb89008ce
|
Rust
|
starship/starship
|
/src/modules/character.rs
|
UTF-8
| 10,037 | 3.21875 | 3 |
[
"ISC"
] |
permissive
|
use super::{Context, Module, ModuleConfig, Shell};
use crate::configs::character::CharacterConfig;
use crate::formatter::StringFormatter;
/// Creates a module for the prompt character
///
/// The character segment prints an arrow character in a color dependent on the
/// exit-code of the last executed command:
/// - If the exit-code was "0", it will be formatted with `success_symbol`
/// (green arrow by default)
/// - If the exit-code was anything else, it will be formatted with
/// `error_symbol` (red arrow by default)
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
enum ShellEditMode {
Normal,
Visual,
Replace,
ReplaceOne,
Insert,
}
const ASSUMED_MODE: ShellEditMode = ShellEditMode::Insert;
// TODO: extend config to more modes
let mut module = context.new_module("character");
let config: CharacterConfig = CharacterConfig::try_load(module.config);
let props = &context.properties;
let exit_code = props.status_code.as_deref().unwrap_or("0");
let keymap = props.keymap.as_str();
let exit_success = exit_code == "0";
// Match shell "keymap" names to normalized vi modes
// NOTE: in vi mode, fish reports normal mode as "default".
// Unfortunately, this is also the name of the non-vi default mode.
// We do some environment detection in src/init.rs to translate.
// The result: in non-vi fish, keymap is always reported as "insert"
let mode = match (&context.shell, keymap) {
(Shell::Fish, "default")
| (Shell::Zsh, "vicmd")
| (Shell::Cmd | Shell::PowerShell, "vi") => ShellEditMode::Normal,
(Shell::Fish, "visual") => ShellEditMode::Visual,
(Shell::Fish, "replace") => ShellEditMode::Replace,
(Shell::Fish, "replace_one") => ShellEditMode::ReplaceOne,
_ => ASSUMED_MODE,
};
let symbol = match mode {
ShellEditMode::Normal => config.vimcmd_symbol,
ShellEditMode::Visual => config.vimcmd_visual_symbol,
ShellEditMode::Replace => config.vimcmd_replace_symbol,
ShellEditMode::ReplaceOne => config.vimcmd_replace_one_symbol,
ShellEditMode::Insert => {
if exit_success {
config.success_symbol
} else {
config.error_symbol
}
}
};
let parsed = StringFormatter::new(config.format).and_then(|formatter| {
formatter
.map_meta(|variable, _| match variable {
"symbol" => Some(symbol),
_ => None,
})
.parse(None, Some(context))
});
module.set_segments(match parsed {
Ok(segments) => segments,
Err(error) => {
log::warn!("Error in module `character`:\n{}", error);
return None;
}
});
Some(module)
}
#[cfg(test)]
mod test {
use crate::context::Shell;
use crate::test::ModuleRenderer;
use nu_ansi_term::Color;
#[test]
fn success_status() {
let expected = Some(format!("{} ", Color::Green.bold().paint("❯")));
// Status code 0
let actual = ModuleRenderer::new("character").status(0).collect();
assert_eq!(expected, actual);
// No status code
let actual = ModuleRenderer::new("character").collect();
assert_eq!(expected, actual);
}
#[test]
fn failure_status() {
let expected = Some(format!("{} ", Color::Red.bold().paint("❯")));
let exit_values = [1, 54321, -5000];
for status in &exit_values {
let actual = ModuleRenderer::new("character").status(*status).collect();
assert_eq!(expected, actual);
}
}
#[test]
fn custom_symbol() {
let expected_fail = Some(format!("{} ", Color::Red.bold().paint("✖")));
let expected_success = Some(format!("{} ", Color::Green.bold().paint("➜")));
let exit_values = [1, 54321, -5000];
// Test failure values
for status in &exit_values {
let actual = ModuleRenderer::new("character")
.config(toml::toml! {
[character]
success_symbol = "[➜](bold green)"
error_symbol = "[✖](bold red)"
})
.status(*status)
.collect();
assert_eq!(expected_fail, actual);
}
// Test success
let actual = ModuleRenderer::new("character")
.config(toml::toml! {
[character]
success_symbol = "[➜](bold green)"
error_symbol = "[✖](bold red)"
})
.status(0)
.collect();
assert_eq!(expected_success, actual);
}
#[test]
fn zsh_keymap() {
let expected_vicmd = Some(format!("{} ", Color::Green.bold().paint("❮")));
let expected_specified = Some(format!("{} ", Color::Green.bold().paint("V")));
let expected_other = Some(format!("{} ", Color::Green.bold().paint("❯")));
// zle keymap is vicmd
let actual = ModuleRenderer::new("character")
.shell(Shell::Zsh)
.keymap("vicmd")
.collect();
assert_eq!(expected_vicmd, actual);
// specified vicmd character
let actual = ModuleRenderer::new("character")
.config(toml::toml! {
[character]
vicmd_symbol = "[V](bold green)"
})
.shell(Shell::Zsh)
.keymap("vicmd")
.collect();
assert_eq!(expected_specified, actual);
// zle keymap is other
let actual = ModuleRenderer::new("character")
.shell(Shell::Zsh)
.keymap("visual")
.collect();
assert_eq!(expected_other, actual);
}
#[test]
fn fish_keymap() {
let expected_vicmd = Some(format!("{} ", Color::Green.bold().paint("❮")));
let expected_specified = Some(format!("{} ", Color::Green.bold().paint("V")));
let expected_visual = Some(format!("{} ", Color::Yellow.bold().paint("❮")));
let expected_replace = Some(format!("{} ", Color::Purple.bold().paint("❮")));
let expected_replace_one = expected_replace.as_deref();
let expected_other = Some(format!("{} ", Color::Green.bold().paint("❯")));
// fish keymap is default
let actual = ModuleRenderer::new("character")
.shell(Shell::Fish)
.keymap("default")
.collect();
assert_eq!(expected_vicmd, actual);
// specified vicmd character
let actual = ModuleRenderer::new("character")
.config(toml::toml! {
[character]
vicmd_symbol = "[V](bold green)"
})
.shell(Shell::Fish)
.keymap("default")
.collect();
assert_eq!(expected_specified, actual);
// fish keymap is visual
let actual = ModuleRenderer::new("character")
.shell(Shell::Fish)
.keymap("visual")
.collect();
assert_eq!(expected_visual, actual);
// fish keymap is replace
let actual = ModuleRenderer::new("character")
.shell(Shell::Fish)
.keymap("replace")
.collect();
assert_eq!(expected_replace, actual);
// fish keymap is replace_one
let actual = ModuleRenderer::new("character")
.shell(Shell::Fish)
.keymap("replace_one")
.collect();
assert_eq!(expected_replace_one, actual.as_deref());
// fish keymap is other
let actual = ModuleRenderer::new("character")
.shell(Shell::Fish)
.keymap("other")
.collect();
assert_eq!(expected_other, actual);
}
#[test]
fn cmd_keymap() {
let expected_vicmd = Some(format!("{} ", Color::Green.bold().paint("❮")));
let expected_specified = Some(format!("{} ", Color::Green.bold().paint("V")));
let expected_other = Some(format!("{} ", Color::Green.bold().paint("❯")));
// cmd keymap is vi
let actual = ModuleRenderer::new("character")
.shell(Shell::Cmd)
.keymap("vi")
.collect();
assert_eq!(expected_vicmd, actual);
// specified vicmd character
let actual = ModuleRenderer::new("character")
.config(toml::toml! {
[character]
vicmd_symbol = "[V](bold green)"
})
.shell(Shell::Cmd)
.keymap("vi")
.collect();
assert_eq!(expected_specified, actual);
// cmd keymap is other
let actual = ModuleRenderer::new("character")
.shell(Shell::Cmd)
.keymap("visual")
.collect();
assert_eq!(expected_other, actual);
}
#[test]
fn powershell_keymap() {
let expected_vicmd = Some(format!("{} ", Color::Green.bold().paint("❮")));
let expected_specified = Some(format!("{} ", Color::Green.bold().paint("V")));
let expected_other = Some(format!("{} ", Color::Green.bold().paint("❯")));
// powershell keymap is vi
let actual = ModuleRenderer::new("character")
.shell(Shell::PowerShell)
.keymap("vi")
.collect();
assert_eq!(expected_vicmd, actual);
// specified vicmd character
let actual = ModuleRenderer::new("character")
.config(toml::toml! {
[character]
vicmd_symbol = "[V](bold green)"
})
.shell(Shell::PowerShell)
.keymap("vi")
.collect();
assert_eq!(expected_specified, actual);
// powershell keymap is other
let actual = ModuleRenderer::new("character")
.shell(Shell::PowerShell)
.keymap("visual")
.collect();
assert_eq!(expected_other, actual);
}
}
| true |
06d7824ef2d1724aec7142e073dbb86b20bc2bc9
|
Rust
|
growingspaghetti/project-euler
|
/rust/src/m36.rs
|
UTF-8
| 2,862 | 3.609375 | 4 |
[] |
no_license
|
// The decimal number, 585 = 10010010012 (binary), is palindromic in both bases.
// Find the sum of all numbers, less than one million, which are palindromic in base 10 and base 2.
// (Please note that the palindromic number, in either base, may not include leading zeros.)
fn is_palindrome(a: u32) -> bool {
if a % 2 == 0 && a % 11 != 0 {
return false;
}
let mut t = a.clone();
let mut b = 0u32;
while t > 0 {
b *= 10;
b += t % 10;
t /= 10;
}
a == b
}
fn is_double_based_palindrome(a: u32) -> bool {
if a % 2 == 0 {
return false;
}
let mut t = a.clone();
let mut b = 0u32;
while t > 0 {
b <<= 1;
b |= t & 1;
t >>= 1;
}
a == b
}
// 4.9 ms
/// ```rust
/// use self::project_euler::m36::double_based_palindromes_sum;
/// assert_eq!(double_based_palindromes_sum(), 872187);
/// ```
pub fn double_based_palindromes_sum() -> u32 {
(1..1_000_000)
.step_by(2)
.filter(|&n| is_palindrome(n))
.filter(|&n| is_double_based_palindrome(n))
.sum()
}
fn reverse_digits(a: u32) -> u32 {
let mut t = a.clone();
let mut b = 0u32;
while t > 0 {
b *= 10;
b += t % 10;
t /= 10;
}
b
}
fn generate_even_and_odd_palindromes(mut n: u32) -> (u32, u32) {
let mut ep = n.clone();
let mut op = n.clone();
op /= 10;
while n > 0 {
ep *= 10;
op *= 10;
ep += n % 10;
op += n % 10;
n /= 10;
}
(ep, op)
}
// 10 -> rev -> 01 -> pal 11
// fn generate_even_digit_palindrome(n: u32) -> u32 {
// let mut n = reverse_digits(n);
// let mut p = n.clone();
// while n > 0 {
// p *= 10;
// p += n % 10;
// n /= 10;
// }
// p
// }
// fn generate_odd_digit_palindrome(n: u32) -> u32 {
// let mut n = reverse_digits(n);
// let mut p = n.clone();
// p /= 10;
// while n > 0 {
// p *= 10;
// p += n % 10;
// n /= 10;
// }
// p
// }
// 17 us
/// ```rust
/// use self::project_euler::m36::double_based_palindromes_sum_gen;
/// assert_eq!(double_based_palindromes_sum_gen(), 872187);
/// ```
pub fn double_based_palindromes_sum_gen() -> u32 {
let mut sum = 0u32;
let half = 10u32.pow(1_000_000f32.log10() as u32 / 2);
for n in 1..half {
let (ep, op) = generate_even_and_odd_palindromes(n);
// [ep, op]
// .iter()
// .filter(|&p| is_double_based_palindrome(*p))
// .for_each(|&p| sum += p);
if is_double_based_palindrome(ep) {
sum += ep;
}
if is_double_based_palindrome(op) {
sum += op;
}
}
sum
}
// if is_double_based_palindrome(ep) {
// sum += ep;
// }
// if is_double_based_palindrome(op) {
// sum += op;
// }
| true |
6ba696ffc173f08ff90ff59a61befe84a7a3fd93
|
Rust
|
Mortano/laz-rs-send-example
|
/src/main.rs
|
UTF-8
| 943 | 2.640625 | 3 |
[] |
no_license
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate rocket;
extern crate las;
use rocket::State;
use std::fs::File;
use std::sync::Mutex;
/**
* las::Reader internally holds a Box<dyn las::reader::PointReader>, a trait which does not implement 'Send',
* so we can't wrap the las::Reader in a Mutex to use it as shared state with Rocket.
*
* Internally, the las::reader::PointReader trait (or some of its implementations at least) refer to the
* LasZipDecompressor struct of the laz_rs crate, which is also not 'Send'
*/
type RocketState = Mutex<las::Reader>;
#[get("/test")]
fn dummy_handler(state: State<RocektState>) -> u64 {
let reader = state.lock().unwrap();
reader.header().number_of_points()
}
fn main() {
let las_reader = las::Reader::from_path("test.laz").unwrap();
rocket::ignite()
.mount("/", routes![dummy_handler])
.manage(Mutex::new(las_reader))
.launch();
}
| true |
ce593da4fc285cae857032812babe2d2e82f02b8
|
Rust
|
immunant/c2rust
|
/c2rust-refactor/src/transform/casts/tests.rs
|
UTF-8
| 3,724 | 2.65625 | 3 |
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
use super::{check_double_cast, DoubleCastAction, SimpleTy};
use quickcheck::{quickcheck, Arbitrary, Gen};
use rand::Rng;
use z3::ast::{Ast, BV};
use z3::{Config, Context, SatResult, Solver};
#[derive(Debug, Copy, Clone)]
#[repr(transparent)]
struct PointerWidth(usize);
impl Arbitrary for PointerWidth {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let x = g.gen_range(0, 3);
PointerWidth([16, 32, 64][x])
}
}
impl Arbitrary for SimpleTy {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let x = g.gen_range(0, 13);
match x {
0 | 1 | 2 | 3 => SimpleTy::Int([8, 16, 32, 64][x], false),
4 | 5 | 6 | 7 => SimpleTy::Int([8, 16, 32, 64][x - 4], true),
8 => SimpleTy::Size(false),
9 => SimpleTy::Size(true),
10 => SimpleTy::Float32,
11 => SimpleTy::Float64,
12 => SimpleTy::Pointer,
// TODO: generate some Other's
_ => unreachable!(),
}
}
}
fn ty_bit_width(ty: SimpleTy, pw: PointerWidth) -> u32 {
let bw = match ty {
SimpleTy::Int(w, _) => w,
SimpleTy::Size(_) | SimpleTy::Pointer => pw.0,
SimpleTy::Float32 => 32,
SimpleTy::Float64 => 64,
SimpleTy::Other => unreachable!(), // FIXME
};
bw as u32
}
fn cast_bv<'bv>(bv: BV<'bv>, from_ty: SimpleTy, to_ty: SimpleTy, pw: PointerWidth) -> BV<'bv> {
let from_width = ty_bit_width(from_ty, pw);
let to_width = ty_bit_width(to_ty, pw);
if to_width == from_width {
bv
} else if to_width < from_width {
bv.extract(to_width - 1, 0)
} else if from_ty.is_signed() {
bv.sign_ext(to_width - from_width)
} else {
bv.zero_ext(to_width - from_width)
}
}
fn cast_tys<'bv>(bv: BV<'bv>, tys: &[SimpleTy], pw: PointerWidth) -> BV<'bv> {
tys.windows(2).fold(bv, |y, w| cast_bv(y, w[0], w[1], pw))
}
thread_local!(static Z3_CONFIG: Config = Config::new());
thread_local!(static Z3_CONTEXT: Context = Z3_CONFIG.with(|cfg| Context::new(cfg)));
quickcheck! {
// Verify `check_double_cast` using QuickCheck and Z3
fn verify_double_cast(pw: PointerWidth, tys: Vec<SimpleTy>) -> bool {
if tys.len() <= 1 {
return true;
}
Z3_CONTEXT.with(|ctx| {
// Build a minimized list of types with double casts removed
let mut min_tys = vec![tys[0].clone()];
for ty in &tys[1..] {
assert!(!min_tys.is_empty());
if *ty == min_tys[min_tys.len() - 1] {
// Cast to the same type, ignore it
continue;
}
if min_tys.len() < 2 {
min_tys.push(ty.clone());
continue;
}
let last2 = &min_tys[min_tys.len() - 2..];
match check_double_cast(last2[0], last2[1], *ty) {
DoubleCastAction::RemoveBoth => {
min_tys.pop();
}
DoubleCastAction::RemoveInner => {
*min_tys.last_mut().unwrap() = ty.clone();
}
DoubleCastAction::KeepBoth => {
min_tys.push(ty.clone());
}
}
}
let x = BV::new_const(&ctx, "x", ty_bit_width(tys[0], pw));
let y = cast_tys(x.clone(), &tys[..], pw);
let z = cast_tys(x, &min_tys[..], pw);
// Check the full type list against the minimized one
let solver = Solver::new(&ctx);
solver.assert(&z._eq(&y).not());
solver.check() == SatResult::Unsat
})
}
}
| true |
dadb9dd25c548ac9c90c1d8f21c1e0366c6151f8
|
Rust
|
natepisarski/relationship-points
|
/src/emissary/emissary.rs
|
UTF-8
| 3,345 | 3.859375 | 4 |
[] |
no_license
|
/// Emissaries are data-transfer objects. They're designed to move data into a semi-permanent cache.
/// This could be the store of a Redux application, or just in-memory cache in a program.
///
/// Emissaries all have a key, the data, and a list of callbacks.
///
/// The key is a namespaced identifier for the root of the data. 'users.current' may be a key
/// for storing a user object.
///
/// The data is the data, which must be able to be serialized, that will be stored where the key
/// specifies.
///
/// The callbacks are strings, which have named variables with either ~ or @. These are the most complex
/// and variable part of the system. A callback like 'http://server.com/api/getUser?userName=~"users.current.email"'
/// will pull a stored emissary value and use it. A callback like 'http://server.com/api/getUser?userName=@"userName"'
/// will simply create a string where the variable must be filled in.
///
/// Callbacks do not need to be URLs. In a CQRS model, it could feed the string which initiates a query / command.
use std::vec::Vec;
extern crate serde;
extern crate serde_json;
/// Emissary is the trait that allows data to be transfered. It contains the data, whatever it may be,
/// a serialization key, and a list of callbacks.
pub trait Emissary {
/// Whatever type of data is being stored
type DataType;
/// Where in a stateful cache this data is stored (i.e "accounts" "user.details")
fn serialization_key(&self, Self::DataType) -> String;
/// A list of callbacks (i.e "@'application.baseUrl'/Accounts/GetAccount?userName=@'user.email')
fn get_callbacks(&self) -> Vec<String>;
fn data_transfer_type(&self) -> &'static str;
}
/// An emissary container is a structure that makes wrapping up one-off data structures as an
/// emissary easier. If you provide it with data, a key, and a list of callbacks (normally empty)
/// it will structure whatever the data is in the emissary format.
#[derive(Serialize, Debug)]
pub struct EmissaryContainer<T: serde::Serialize> {
/// The data, as long as it can be serialized.
pub data: T,
/// The serialization key. This creates the key property when serialized.
pub key: String,
/// A list of callbacks that this data can use to communicate to the layer sending it.
pub callbacks: Vec<String>
}
/// All emissary containers are de-facto Emissary objects.
impl<T: serde::Serialize> Emissary for EmissaryContainer<T> {
type DataType = T;
fn serialization_key(&self, var: T) -> String {
self.key.clone()
}
fn get_callbacks(&self) -> Vec<String> {
self.callbacks.clone()
}
fn data_transfer_type(&self) -> &'static str {
"EMISSARY"
}
}
/// Given an emissary, serialize it with default serde serialization (bound to change)
pub fn serialize_emissary<T: Emissary + serde::Serialize>(emissary: T) -> String {
match serde_json::to_string(&emissary) {
Ok(val) => val,
Err(err) => String::new()
}
}
/// Given enough data to make an emissary, this creates an emissary container which can immediately
/// be serialized.
pub fn create_emissary<T: serde::Serialize>(emissary_key: String, inner_data: T) -> EmissaryContainer<T> {
EmissaryContainer {
data: inner_data,
key: emissary_key.clone(),
callbacks: vec![]
}
}
| true |
53911c5f37a12e32c457ffe1daf965f235349669
|
Rust
|
jack-t/contracts
|
/src/parse/tree.rs
|
UTF-8
| 1,022 | 3.140625 | 3 |
[] |
no_license
|
use crate::lex::Token;
use std::collections::HashMap;
#[derive(PartialEq, Debug)]
pub enum Expression {
Assignment {
lvalue: Box<Expression>,
rvalue: Box<Expression>,
},
Variable {
name: String,
},
Int {
value: u64,
},
Str {
value: String,
},
Dec {
value: f64,
},
Char {
value: char,
},
FunctionCall {
func: String,
params: Vec<Box<Expression>>,
},
Binary {
op: Token,
lhs: Box<Expression>,
rhs: Box<Expression>,
},
}
#[derive(PartialEq, Debug)]
pub enum Statement {
Expression {
expression: Box<Expression>,
},
Conditional {
condition: Box<Expression>,
true_statement: Box<Statement>,
false_statement: Option<Box<Statement>>,
},
Block {
code: Vec<Statement>,
},
FuncDecl {
name: String,
params: Vec<String>,
block: Box<Statement>,
},
NoOp,
// func decl
}
| true |
670c0ac4924cbd46248055fa6d65c6babfb7eac7
|
Rust
|
kohbis/leetcode
|
/algorithms/1736.latest-time-by-replacing-hidden-digits/solution.rs
|
UTF-8
| 1,042 | 2.890625 | 3 |
[] |
no_license
|
impl Solution {
pub fn maximum_time(time: String) -> String {
let digits: Vec<char> = time.chars().collect();
let mut res: Vec<char> = vec![];
// "*x:xx"
if digits[0] != '?' {
res.push(digits[0])
} else {
if digits[1] == '?' || (digits[1] as i32 - 48) < 4 {
res.push('2')
} else {
res.push('1')
}
}
// "x*:xx"
if digits[1] != '?' {
res.push(digits[1])
} else {
if res[0] == '2' {
res.push('3')
} else {
res.push('9')
}
}
// "xx*xx"
res.push(':');
// "xx:*x"
if digits[3] != '?' {
res.push(digits[3])
} else {
res.push('5')
}
// "xx:x*"
if digits[4] != '?' {
res.push(digits[4])
} else {
res.push('9')
}
// collect
res.into_iter().collect()
}
}
| true |
212b56edfb80d48c3a002704709a470b38396729
|
Rust
|
winstonewert/rust-rocks
|
/src/utilities.rs
|
UTF-8
| 1,715 | 2.625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::ffi::{CStr, CString};
use std::ptr;
use crate::db::ColumnFamilyDescriptor;
use crate::options::{ColumnFamilyOptions, DBOptions};
use crate::to_raw::{FromRaw, ToRaw};
use crate::{Error, Result};
use rocks_sys as ll;
pub fn load_latest_options(path: &str) -> Result<(DBOptions, Vec<ColumnFamilyDescriptor>)> {
let cpath = CString::new(path).unwrap();
let db_opt = DBOptions::default();
let mut cf_descs_len = 0_usize;
let mut status = ptr::null_mut();
let mut cf_descs: Vec<ColumnFamilyDescriptor> = Vec::new();
let c_cf_descs =
unsafe { ll::rocks_load_latest_options(cpath.as_ptr(), db_opt.raw(), &mut cf_descs_len, &mut status) };
if let Err(error) = Error::from_ll(status) {
return Err(error);
}
for i in 0..cf_descs_len {
let c_cf_desc = unsafe { *c_cf_descs.offset(i as _) };
let name = unsafe { CStr::from_ptr(ll::rocks_column_family_descriptor_get_name(c_cf_desc)) };
let cfopt =
unsafe { ColumnFamilyOptions::from_ll(ll::rocks_column_family_descriptor_get_cfoptions(c_cf_desc)) };
cf_descs.push(ColumnFamilyDescriptor::new(
name.to_str().expect("non-utf8 cf name"),
cfopt,
));
}
unsafe { ll::rocks_load_options_destroy_cf_descs(c_cf_descs, cf_descs_len) };
Ok((db_opt, cf_descs))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[ignore]
fn load_options() {
let (dbopt, cf_descs) = load_latest_options("./data").unwrap();
println!("db opt => {:?}", dbopt);
for cf_desc in cf_descs {
println!("name => {:?}", cf_desc.name());
println!("opt =>\n{:?}", cf_desc.options());
}
}
}
| true |
604ee02ba9bcee4be842f1f46b6a403ddba57c73
|
Rust
|
freiguy1/rust-notes
|
/src/file_type/markdown.rs
|
UTF-8
| 5,304 | 2.53125 | 3 |
[
"MIT"
] |
permissive
|
use std::fs::{metadata, File};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use pulldown_cmark::html;
use pulldown_cmark::Parser;
use serde::Serialize;
use crate::file_type::{create_parent_links, read_file, FileType, Link};
use crate::util::RelativeFrom;
static TYPE_STR: &'static str = "markdown";
pub struct MarkdownFactory;
impl crate::file_type::FileTypeFactory for MarkdownFactory {
fn try_create(&self, path: &Path) -> Option<Box<dyn FileType>> {
let name = path.file_name().unwrap().to_str().unwrap();
let path_metadata = metadata(&path).ok().expect("Could not fetch file metadata");
let is_valid = path_metadata.is_file()
&& (name.ends_with(".md") || name.ends_with(".markdown") || name.ends_with(".mkd"));
if is_valid {
let result = Markdown {
path: PathBuf::from(path),
type_str: TYPE_STR,
};
Some(Box::new(result))
} else {
None
}
}
fn initialize(&self, app_context: &mut crate::AppContext<'_>) -> Result<(), &'static str> {
let header_hbs_path = app_context.root_source.join("partials/header.hbs");
if !metadata(&header_hbs_path).is_ok() {
return Err("Missing partials/header.hbs");
}
let footer_hbs_path = app_context.root_source.join("partials/footer.hbs");
if !metadata(&footer_hbs_path).is_ok() {
return Err("Missing partials/footer.hbs");
}
let note_hbs_path = app_context.root_source.join("layouts/note.hbs");
if !metadata(¬e_hbs_path).is_ok() {
return Err("Missing /layouts/note.hbs");
}
let header_hbs_contents = read_file(&header_hbs_path)?;
let footer_hbs_contents = read_file(&footer_hbs_path)?;
let note_hbs_contents = read_file(¬e_hbs_path)?;
app_context
.handlebars
.register_template_string(
TYPE_STR,
format!(
"{}\n{}\n{}",
header_hbs_contents, note_hbs_contents, footer_hbs_contents
),
)
.ok()
.expect("Error registering header|note|footer template");
Ok(())
}
}
pub struct Markdown {
path: PathBuf,
type_str: &'static str,
}
impl FileType for Markdown {
fn get_url(&self, context: &crate::AppContext<'_>) -> String {
let file_name = self.path.file_stem().unwrap().to_str().unwrap();
let relative = self
.path
.my_relative_from(&context.root_notes)
.expect("Problem parsing relative url");
let parent_relative = if relative
.parent()
.map_or_else(|| true, |p| p == Path::new("/") || p == Path::new(""))
{
String::new()
} else {
format!("{}/", relative.parent().unwrap().to_str().unwrap())
};
format!(
"{}{}{}",
context.base_url,
parent_relative,
format!("{}.html", file_name)
)
}
fn convert(&self, context: &crate::AppContext<'_>) {
let relative = self
.path
.my_relative_from(&context.root_notes)
.expect("Problem parsing relative url");
let file_name = relative.file_stem().unwrap().to_str().unwrap();
let dest_file = context
.root_dest
.clone()
.join(relative.parent().unwrap())
.join(format!("{}.html", file_name));
let mut source_contents = String::new();
File::open(&self.path)
.ok()
.unwrap()
.read_to_string(&mut source_contents)
.ok()
.expect("Could not read markdown file");
// Create Model
let content = render_html(&source_contents);
let parents = create_parent_links(&context.base_url, &relative, false);
let model = MarkdownModel {
name: String::from(file_name),
parents: parents,
content: format!("{}", content),
base_url: context.base_url.clone(),
};
match context.handlebars.render(TYPE_STR, &model) {
Ok(rendered) => {
// Create File
let mut file = File::create(&dest_file)
.ok()
.expect("Could not create markdown html file");
//fs::chmod(&dest_file, USER_FILE).ok().expect("Couldn't chmod new file");
file.write_all(rendered.as_bytes())
.ok()
.expect("Could not write html to file");
}
Err(why) => panic!("Error rendering markdown: {:?}", why),
}
}
fn get_type_str(&self) -> &'static str {
self.type_str
}
}
#[derive(Serialize)]
struct MarkdownModel {
name: String,
parents: Vec<Link>,
content: String,
base_url: String,
}
// impl ToJson for MarkdownModel {
// fn to_json(&self) -> Json {
// Json::from_str(&json::encode(&self).unwrap()).unwrap()
// }
// }
fn render_html(text: &str) -> String {
let mut s = String::with_capacity(text.len() * 3 / 2);
let p = Parser::new(&text);
html::push_html(&mut s, p);
s
}
| true |
4edb1eb61374ddd1650589b7d53b00f63b76805e
|
Rust
|
fengcen/arthas
|
/arthas/src/tree/action/mod.rs
|
UTF-8
| 1,499 | 3.1875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#[derive(Default, Debug)]
pub struct SearchAction {
pub take: bool,
pub fold_left: bool,
pub fold_right: bool,
pub go_left: bool,
pub go_right: bool,
}
impl SearchAction {
pub fn new() -> SearchAction {
SearchAction { ..Default::default() }
}
pub fn merge(&self, other: &SearchAction) -> Option<SearchAction> {
if self.go_left != other.go_left || self.go_right != other.go_right {
return None;
}
let mut new_action = SearchAction::new();
if self.take && other.take {
new_action.take = true;
}
if self.fold_left && other.fold_left {
new_action.fold_left = true;
}
if self.fold_right && other.fold_right {
new_action.fold_right = true;
}
new_action.go_left = self.go_left;
new_action.go_right = self.go_right;
Some(new_action)
}
pub fn is_stopped(&self) -> bool {
!self.go_left && !self.go_right
}
pub fn take(mut self) -> SearchAction {
self.take = true;
self
}
pub fn go_right(mut self) -> SearchAction {
self.go_right = true;
self
}
pub fn go_left(mut self) -> SearchAction {
self.go_left = true;
self
}
pub fn fold_right(mut self) -> SearchAction {
self.fold_right = true;
self
}
pub fn fold_left(mut self) -> SearchAction {
self.fold_left = true;
self
}
}
| true |
fa602627d2d94bf442242a0238a85256117288ab
|
Rust
|
S4ndyk/stomper
|
/libstomper/src/huffman/bitwriter.rs
|
UTF-8
| 3,395 | 3.9375 | 4 |
[] |
no_license
|
//! Tool for writing inidividual bits to a writer
use std::io::Write;
const BUFFER_SIZE: usize = 128;
pub struct BitWriter<W: Write>{
writer: W,
buffer: [u8; BUFFER_SIZE],
bytecount: usize,
bitpos: usize,
}
impl <W: Write> BitWriter<W> {
pub fn new(writer: W) -> Self {
BitWriter {
writer,
buffer: [0; BUFFER_SIZE],
bytecount: 0,
bitpos: 0,
}
}
/// Writes over the next bit in the stream.
pub fn write_bit(&mut self, set_bit: bool) {
if set_bit {
let mask = 1 << self.bitpos;
let byte = self.buffer[self.bytecount];
self.buffer[self.bytecount] = byte | mask;
}
self.bitpos += 1;
if self.bitpos > 7 {
self.bitpos = 0;
self.bytecount += 1;
}
// Flushes buffer if it is full
if self.bytecount > BUFFER_SIZE - 1 {
self.flush();
}
}
/// Writes remaining
pub fn flush(&mut self) {
self.writer.write_all(&self.buffer).expect("Could not write to all to writer");
self.bitpos = 0;
self.bytecount = 0;
self.buffer = [0; BUFFER_SIZE];
}
/// Writes given string as individual bits.
/// panics if s contains characters other than 0 and 1
pub fn write_string(&mut self, s: String) {
for c in s.bytes() {
if c as char == '1' {
self.write_bit(true);
continue;
}
if c as char == '0' {
self.write_bit(false);
continue;
}
panic!("String contains characters other than 0 or 1");
}
}
/// Flushes and returns inner writer
#[allow(dead_code)]
fn to_inner(mut self) -> W {
self.flush();
self.writer
}
}
#[cfg(test)]
mod tests {
use super::BitWriter;
#[test]
fn writer_writes_correctly_no1() {
let s = String::from("100101");
let buf: Vec<u8> = Vec::new();
let mut writer = BitWriter::new(buf);
writer.write_string(s);
let buf = writer.to_inner();
assert_eq!(buf[0], 0b101001);
}
#[test]
fn writer_writes_correctly_no2() {
let s = String::from("111001011101");
let buf: Vec<u8> = Vec::new();
let mut writer = BitWriter::new(buf);
writer.write_string(s);
let buf = writer.to_inner();
assert_eq!(buf[0], 0b10100111);
assert_eq!(buf[1], 0b1011);
}
#[test]
fn writer_writes_correctly_no3() {
let s1 = String::from("1110");
let s2 = String::from("01");
let s3 = String::from("01110");
let s4 = String::from("1");
let buf: Vec<u8> = Vec::new();
let mut writer = BitWriter::new(buf);
writer.write_string(s1);
writer.write_string(s2);
writer.write_string(s3);
writer.write_string(s4);
let buf = writer.to_inner();
assert_eq!(buf[0], 0b10100111);
assert_eq!(buf[1], 0b1011);
}
#[test]
fn writer_does_not_overflow() {
let buf: Vec<u8> = Vec::new();
let mut writer = BitWriter::new(buf);
for i in 0..3072 {
if i%3 == 0 {
writer.write_bit(false);
} else {
writer.write_bit(true);
}
}
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.