blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
82867c757316096b28899a7aae663b274cb822b5
|
Rust
|
redtankd/project-euler
|
/src/bin/00006.rs
|
UTF-8
| 155 | 3.09375 | 3 |
[] |
no_license
|
fn main() {
let number = 1..101;
let sum = number.fold((0, 0), |(x1, x2), y| (x1 + y * y, x2 + y));
println!("{}", sum.1 * sum.1 - sum.0);
}
| true |
5b468cc6b88b81b026845bb01ef35cddeba636c5
|
Rust
|
gimli-rs/findshlibs
|
/src/unsupported.rs
|
UTF-8
| 1,846 | 2.703125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! The fallback implementation of the [SharedLibrary
//! trait](../trait.SharedLibrary.html) that does nothing.
use crate::Segment as SegmentTrait;
use crate::SharedLibrary as SharedLibraryTrait;
use crate::{Bias, IterationControl, SharedLibraryId, Svma};
use std::ffi::OsStr;
use std::marker::PhantomData;
use std::usize;
/// An unsupported segment
#[derive(Debug)]
pub struct Segment<'a> {
phantom: PhantomData<&'a SharedLibrary<'a>>,
}
impl<'a> SegmentTrait for Segment<'a> {
type SharedLibrary = SharedLibrary<'a>;
#[inline]
fn name(&self) -> &str {
unreachable!()
}
#[inline]
fn stated_virtual_memory_address(&self) -> Svma {
unreachable!()
}
#[inline]
fn len(&self) -> usize {
unreachable!()
}
}
/// An iterator over Mach-O segments.
#[derive(Debug)]
pub struct SegmentIter<'a> {
phantom: PhantomData<&'a SharedLibrary<'a>>,
}
impl<'a> Iterator for SegmentIter<'a> {
type Item = Segment<'a>;
fn next(&mut self) -> Option<Self::Item> {
None
}
}
/// The fallback implementation of the [SharedLibrary
/// trait](../trait.SharedLibrary.html).
#[derive(Debug)]
pub struct SharedLibrary<'a> {
phantom: PhantomData<&'a SharedLibrary<'a>>,
}
impl<'a> SharedLibraryTrait for SharedLibrary<'a> {
type Segment = Segment<'a>;
type SegmentIter = SegmentIter<'a>;
#[inline]
fn name(&self) -> &OsStr {
unreachable!()
}
fn id(&self) -> Option<SharedLibraryId> {
unreachable!()
}
fn segments(&self) -> Self::SegmentIter {
SegmentIter {
phantom: PhantomData,
}
}
#[inline]
fn virtual_memory_bias(&self) -> Bias {
unreachable!()
}
fn each<F, C>(_f: F)
where
F: FnMut(&Self) -> C,
C: Into<IterationControl>,
{
}
}
| true |
096aab66c9258fd95cde71f97b53391fa23b74b7
|
Rust
|
seandewar/challenge-solutions
|
/leetcode/medium/deepest-leaves-sum.rs
|
UTF-8
| 998 | 3 | 3 |
[] |
no_license
|
// https://leetcode.com/problems/deepest-leaves-sum
//
// Complexity: runtime O(n), space O(n) [O(logn) if balanced].
use std::cell::RefCell;
use std::rc::Rc;
impl Solution {
pub fn deepest_leaves_sum(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
fn dfs(
node: Option<Rc<RefCell<TreeNode>>>,
depth: i32,
max_depth: &mut i32,
sum: &mut i32,
) {
if let Some(node) = node {
if depth > *max_depth {
*max_depth = depth;
*sum = 0;
}
let node = node.borrow();
if depth == *max_depth {
*sum += node.val;
}
dfs(node.left.clone(), depth + 1, max_depth, sum);
dfs(node.right.clone(), depth + 1, max_depth, sum);
}
}
let (mut max_depth, mut sum) = (0, 0);
dfs(root, 0, &mut max_depth, &mut sum);
sum
}
}
| true |
d9f192efa5c45089091870b0449653add2178c6a
|
Rust
|
Psychedelic/candid
|
/rust/candid/src/parser/token.rs
|
UTF-8
| 9,130 | 2.953125 | 3 |
[
"Apache-2.0",
"LLVM-exception"
] |
permissive
|
use lalrpop_util::ParseError;
use logos::{Lexer, Logos};
#[derive(Logos, Debug, Clone, PartialEq, Eq, Ord, PartialOrd)]
pub enum Token {
#[regex(r"[ \t\r\n]+", logos::skip)]
// line comment
#[regex("//[^\n]*", logos::skip)]
#[token("/*")]
StartComment,
#[error]
UnexpectedToken,
#[token("=")]
Equals,
#[token("(")]
LParen,
#[token(")")]
RParen,
#[token("{")]
LBrace,
#[token("}")]
RBrace,
#[token(";")]
Semi,
#[token(",")]
Comma,
#[token(".", priority = 10)]
Dot,
#[token(":")]
Colon,
#[token("->")]
Arrow,
#[token("null")]
Null,
#[token("vec")]
Vec,
#[token("record")]
Record,
#[token("variant")]
Variant,
#[token("func")]
Func,
#[token("service")]
Service,
#[token("oneway")]
Oneway,
#[token("query")]
Query,
#[token("blob")]
Blob,
#[token("type")]
Type,
#[token("import")]
Import,
#[token("opt")]
Opt,
#[token("==")]
TestEqual,
#[token("!=")]
NotEqual,
#[token("!:")]
NotDecode,
#[token("principal")]
Principal,
#[regex("[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_string())]
Id(String),
#[token("\"")]
StartString,
// This token is not derived. Stores the unescaped string
Text(String),
#[regex("[+-]", |lex| lex.slice().chars().next())]
Sign(char),
#[regex("[0-9][_0-9]*", parse_number)]
Decimal(String),
#[regex("0[xX][0-9a-fA-F][_0-9a-fA-F]*", parse_number)]
Hex(String),
#[regex("[0-9]*\\.[0-9]*", parse_number)]
#[regex("[0-9]*(\\.[0-9]*)?[eE][+-]?[0-9]+", parse_number)]
Float(String),
#[regex("true|false", |lex| lex.slice().parse())]
Boolean(bool),
}
#[derive(Logos, Debug, Clone, PartialEq, Eq)]
enum Comment {
#[error]
Skip,
#[token("*/")]
End,
#[token("/*")]
Start,
}
#[derive(Logos, Debug, Clone, PartialEq, Eq)]
enum Text {
#[error]
Error,
#[regex(r#"[^\\"]+"#)]
Text,
#[regex(r"\\.")]
EscapeCharacter,
#[regex(r"\\u\{[0-9a-fA-F]+\}")]
Codepoint,
#[regex(r"\\[0-9a-fA-F][0-9a-fA-F]")]
Byte,
#[token("\"")]
EndString,
}
impl std::fmt::Display for Token {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(fmt, "{:?}", self)
}
}
fn parse_number(lex: &mut Lexer<Token>) -> String {
let iter = lex.slice().chars().filter(|c| *c != '_');
if lex.slice().starts_with("0x") {
iter.skip(2).collect()
} else {
iter.collect()
}
}
pub struct Tokenizer<'input> {
lex: Lexer<'input, Token>,
}
impl<'input> Tokenizer<'input> {
pub fn new(input: &'input str) -> Self {
let lex = Token::lexer(input);
Tokenizer { lex }
}
}
pub type Span = std::ops::Range<usize>;
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Spanned<T> {
pub span: Span,
pub value: T,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct LexicalError {
pub err: String,
pub span: Span,
}
impl std::fmt::Display for LexicalError {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.span.start == 0 && self.span.end == 0 {
write!(fmt, "{}", self.err)
} else {
write!(fmt, "{} at {:?}", self.err, self.span)
}
}
}
impl LexicalError {
fn new<E: ToString>(err: E, span: Span) -> Self {
LexicalError {
err: err.to_string(),
span,
}
}
}
pub(crate) type ParserError = ParseError<usize, Token, LexicalError>;
pub fn error<E: ToString>(err: E) -> ParserError {
ParseError::User {
error: LexicalError::new(err, 0..0),
}
}
pub fn error2<E: ToString>(err: E, span: Span) -> ParserError {
ParseError::User {
error: LexicalError::new(err, span),
}
}
impl<'input> Iterator for Tokenizer<'input> {
type Item = Result<(usize, Token, usize), LexicalError>;
fn next(&mut self) -> Option<Self::Item> {
let token = self.lex.next()?;
let span = self.lex.span();
match token {
Token::UnexpectedToken => {
let err = format!("Unknown token {}", self.lex.slice());
Some(Err(LexicalError::new(err, span)))
}
Token::StartComment => {
let mut lex = self.lex.to_owned().morph::<Comment>();
let mut nesting = 1;
loop {
match lex.next() {
Some(Comment::Skip) => continue,
Some(Comment::End) => {
nesting -= 1;
if nesting == 0 {
break;
}
}
Some(Comment::Start) => nesting += 1,
None => {
return Some(Err(LexicalError::new(
"Unclosed comment",
span.start..lex.span().end,
)))
}
}
}
self.lex = lex.morph::<Token>();
self.next()
}
Token::StartString => {
let mut result = String::new();
let mut lex = self.lex.to_owned().morph::<Text>();
loop {
use self::Text::*;
match lex.next() {
Some(Text) => result += lex.slice(),
Some(EscapeCharacter) => match lex.slice().chars().nth(1).unwrap() {
'n' => result.push('\n'),
'r' => result.push('\r'),
't' => result.push('\t'),
'\\' => result.push('\\'),
'"' => result.push('"'),
'\'' => result.push('\''),
c => {
return Some(Err(LexicalError::new(
format!("Unknown escape character {}", c),
lex.span(),
)))
}
},
Some(Codepoint) => {
let slice = lex.slice();
let hex = &slice[3..slice.len() - 1];
match u32::from_str_radix(hex, 16)
.map_err(|_| {
LexicalError::new("Not a valid hex escape", lex.span())
})
.and_then(|c| {
std::char::from_u32(c).ok_or_else(|| {
LexicalError::new(
format!("Unicode escape out of range {}", hex),
lex.span(),
)
})
}) {
Ok(c) => result.push(c),
Err(e) => return Some(Err(e)),
}
}
Some(Byte) => {
let hex = &lex.slice()[1..];
match u8::from_str_radix(hex, 16) {
Ok(byte) => {
// According to https://webassembly.github.io/spec/core/text/values.html#strings
// \xx escape can break utf8 unicode.
let bytes = unsafe { result.as_mut_vec() };
bytes.push(byte);
}
Err(_) => {
return Some(Err(LexicalError::new(
"Not a valid hex escape",
lex.span(),
)))
}
}
}
Some(EndString) => break,
Some(Error) => {
return Some(Err(LexicalError::new(
format!("Unexpected string {}", lex.slice()),
lex.span(),
)))
}
None => {
return Some(Err(LexicalError::new(
"Unclosed string",
span.start..lex.span().end,
)))
}
}
}
self.lex = lex.morph::<Token>();
Some(Ok((span.start, Token::Text(result), self.lex.span().end)))
}
_ => Some(Ok((span.start, token, span.end))),
}
}
}
| true |
849b315a0fcd69956707d77957f34b1d37b6d9f1
|
Rust
|
defvar/toy
|
/pkg/toy-core/tests/value.rs
|
UTF-8
| 6,861 | 2.984375 | 3 |
[
"MIT"
] |
permissive
|
use chrono::{DateTime, Utc};
use toy_core::data::Value;
use toy_core::prelude::*;
#[test]
fn path() {
let v = map_value! {
"a" => 1,
"b" => 2,
"c" => map_value! {
"ca" => 31,
"cb" => 32,
},
"d" => seq_value![41,42,43]
};
assert_eq!(v.path("xxx"), None);
assert_eq!(v.path("a").unwrap(), &Value::from(1));
assert_eq!(v.path("c.ca").unwrap(), &Value::from(31));
assert_eq!(v.path("d.2").unwrap(), &Value::from(43));
}
#[test]
fn insert_by_path() {
let mut v = map_value! {
"a" => 1,
};
let expected_1 = map_value! {
"a" => 1,
"b" => 2,
};
let expected_2 = map_value! {
"a" => 1,
"b" => 2,
"c" => map_value! {
"ca" => 31,
},
};
assert_eq!(v.insert_by_path("b", Value::from(2)), None);
assert_eq!(v, expected_1);
assert_eq!(v.insert_by_path("c.ca", Value::from(31)), None);
assert_eq!(v, expected_2);
}
#[test]
fn insert_by_path_overwrite() {
let mut v = Value::from(1);
let expected_1 = map_value! {
"a" => 1,
};
assert_eq!(v.insert_by_path("a", Value::from(1)), None);
assert_eq!(v, expected_1);
assert_eq!(
v.insert_by_path("a", Value::from(2)).unwrap(),
Value::from(1)
);
}
macro_rules! pass_parse_integer {
($func: ident, $t: ident, $actual: expr, $expected: expr) => {
#[test]
fn $func() {
assert_eq!(Value::from($actual).parse_integer::<$t>(), Some($expected))
}
};
}
macro_rules! none_parse_integer {
($func: ident, $t: ident, $actual: expr) => {
#[test]
fn $func() {
assert_eq!(Value::from($actual).parse_integer::<$t>(), None)
}
};
}
///////////////////////////////////
// u8 /////////////////////////////
///////////////////////////////////
pass_parse_integer!(parse_u8_from_u8_0, u8, 0u8, 0u8);
pass_parse_integer!(parse_u8_from_u8_max, u8, u8::max_value(), u8::max_value());
pass_parse_integer!(parse_u8_from_u16_0, u8, 0u16, 0u8);
none_parse_integer!(parse_u8_from_u16_max, u8, u16::max_value());
pass_parse_integer!(parse_u8_from_u32_0, u8, 0u32, 0u8);
none_parse_integer!(parse_u8_from_u32_max, u8, u32::max_value());
pass_parse_integer!(parse_u8_from_u64_0, u8, 0u64, 0u8);
none_parse_integer!(parse_u8_from_u64_max, u8, u64::max_value());
////////////////////////////////////
// u16 /////////////////////////////
////////////////////////////////////
pass_parse_integer!(parse_u16_from_u8_0, u16, 0u8, 0u16);
pass_parse_integer!(
parse_u16_from_u8_max,
u16,
u8::max_value(),
u8::max_value() as u16
);
pass_parse_integer!(parse_u16_from_u16_0, u16, 0u16, 0u16);
pass_parse_integer!(
parse_u16_from_u16_max,
u16,
u16::max_value(),
u16::max_value()
);
pass_parse_integer!(parse_u16_from_u32_0, u16, 0u32, 0u16);
none_parse_integer!(parse_u16_from_u32_max, u16, u32::max_value());
pass_parse_integer!(parse_u16_from_u64_0, u16, 0u64, 0u16);
none_parse_integer!(parse_u16_from_u64_max, u16, u64::max_value());
////////////////////////////////////
// u32 /////////////////////////////
////////////////////////////////////
pass_parse_integer!(parse_u32_from_u8_0, u32, 0u8, 0u32);
pass_parse_integer!(
parse_u32_from_u8_max,
u32,
u8::max_value(),
u8::max_value() as u32
);
pass_parse_integer!(parse_u32_from_u16_0, u32, 0u16, 0u32);
pass_parse_integer!(
parse_u32_from_u16_max,
u32,
u16::max_value(),
u16::max_value() as u32
);
pass_parse_integer!(parse_u32_from_u32_0, u32, 0u32, 0u32);
pass_parse_integer!(
parse_u32_from_u32_max,
u32,
u32::max_value(),
u32::max_value()
);
pass_parse_integer!(parse_u32_from_u64_0, u32, 0u64, 0u32);
none_parse_integer!(parse_u32_from_u64_max, u32, u64::max_value());
////////////////////////////////////
// u64 /////////////////////////////
////////////////////////////////////
pass_parse_integer!(parse_u64_from_u8_0, u64, 0u8, 0u64);
pass_parse_integer!(
parse_u64_from_u8_max,
u64,
u8::max_value(),
u8::max_value() as u64
);
pass_parse_integer!(parse_u64_from_u16_0, u64, 0u16, 0u64);
pass_parse_integer!(
parse_u64_from_u16_max,
u64,
u16::max_value(),
u16::max_value() as u64
);
pass_parse_integer!(parse_u64_from_u32_0, u64, 0u32, 0u64);
pass_parse_integer!(
parse_u64_from_u32_max,
u64,
u32::max_value(),
u32::max_value() as u64
);
none_parse_integer!(parse_u64_from_u64_max, u64, u64::max_value());
#[test]
fn partial_eq_u64() {
let mut v = Value::from(1u64);
let other = 1u64;
assert_eq!(&v, other);
assert_eq!(&mut v, other);
assert_eq!(v, other);
assert_eq!(other, v);
}
#[test]
fn partial_eq_f64() {
let mut v = Value::from(1f64);
let other = 1f64;
assert_eq!(&v, other);
assert_eq!(&mut v, other);
assert_eq!(v, other);
assert_eq!(other, v);
}
#[test]
fn partial_eq_bool() {
let mut v = Value::from(true);
let other = true;
assert_eq!(&v, other);
assert_eq!(&mut v, other);
assert_eq!(v, other);
assert_eq!(other, v);
}
#[test]
fn partial_eq_str() {
let mut v = Value::from("aiueo");
let other = "aiueo";
assert_eq!(&v, other);
assert_eq!(&mut v, other);
assert_eq!(v, other);
assert_eq!(other, v);
}
#[test]
fn partial_eq_string() {
let v = Value::from("aiueo");
let other = "aiueo".to_string();
assert_eq!(v, other);
assert_eq!(other, v);
}
#[test]
fn ord_integer() {
let small = Value::Integer(1);
let big = Value::Integer(2);
assert_eq!(small < big, true);
}
#[test]
fn ord_integer_number() {
let small = Value::Integer(1);
let big = Value::Number(1.1f64);
assert_eq!(small < big, true);
}
#[test]
fn ord_integer_string() {
let small = Value::Integer(2);
let big = Value::String("1".to_string());
assert_eq!(small < big, true);
}
#[test]
fn ord_timestamp() {
let small = Value::TimeStamp(
DateTime::parse_from_rfc3339("1996-12-19T16:39:57+00:00")
.unwrap()
.with_timezone(&Utc),
);
let big = Value::TimeStamp(
DateTime::parse_from_rfc3339("1996-12-19T16:39:58+00:00")
.unwrap()
.with_timezone(&Utc),
);
assert_eq!(small < big, true);
}
#[test]
fn is_same_type() {
assert_eq!(Value::Integer(1).is_same_type(&Value::Integer(4)), true);
}
#[test]
fn as_same_type() {
assert_eq!(
Value::Integer(1).as_same_type(&Value::String("a".to_string())),
Some(Value::from("1"))
);
assert_eq!(
Value::String("1".to_string()).as_same_type(&Value::Integer(2)),
Some(Value::from(1))
);
}
#[test]
fn as_same_type_fail() {
assert_eq!(
Value::String("a".to_string()).as_same_type(&Value::Integer(2)),
None,
);
}
| true |
e1d4098d7dab4f264a779b7a1511c48958f5e896
|
Rust
|
bigkraig/opentelemetry-rust
|
/examples/async_fn.rs
|
UTF-8
| 2,728 | 2.9375 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! Demonstrates using OpenTelemetry to instrument `async` functions.
//!
//! This is based on the [`hello_world`] example from `tokio`. and implements a
//! simple client that opens a TCP stream, writes "hello world\n", and closes
//! the connection.
//!
//! You can test this out by running:
//!
//! ncat -l 6142
//!
//! And then in a second terminal run:
//!
//! ncat -l 6143
//!
//! And then in a third terminal run:
//!
//! cargo run --example async_fn
//!
//! [`hello_world`]: https://github.com/tokio-rs/tokio/blob/132e9f1da5965530b63554d7a1c59824c3de4e30/tokio/examples/hello_world.rs
use opentelemetry::{
api::{trace::futures::Instrument, Provider, Tracer},
global, sdk,
};
use std::time::Duration;
use std::{error::Error, io, net::SocketAddr, thread};
use tokio::io::AsyncWriteExt;
use tokio::net::TcpStream;
async fn connect(addr: &SocketAddr) -> io::Result<TcpStream> {
let tracer = global::trace_provider().get_tracer("connector");
let span = tracer.start("Connecting", None);
TcpStream::connect(&addr).instrument(span).await
}
async fn write(stream: &mut TcpStream) -> io::Result<usize> {
let tracer = global::trace_provider().get_tracer("writer");
let span = tracer.start("Writing", None);
stream.write(b"hello world\n").instrument(span).await
}
async fn run(addr: &SocketAddr) -> io::Result<usize> {
let tracer = global::trace_provider().get_tracer("runner");
let span = tracer.start(&format!("running: {}", addr), None);
let mut stream = connect(addr).instrument(tracer.clone_span(&span)).await?;
write(&mut stream).instrument(span).await
}
fn init_tracer() -> thrift::Result<()> {
let exporter = opentelemetry_jaeger::Exporter::builder()
.with_agent_endpoint("127.0.0.1:6831".parse().unwrap())
.with_process(opentelemetry_jaeger::Process {
service_name: "trace-demo".to_string(),
tags: vec![],
})
.init()?;
let provider = sdk::Provider::builder()
.with_simple_exporter(exporter)
.with_config(sdk::Config {
default_sampler: Box::new(sdk::Sampler::Always),
..Default::default()
})
.build();
global::set_provider(provider);
Ok(())
}
#[tokio::main]
pub async fn main() -> Result<(), Box<dyn Error>> {
init_tracer()?;
let addr = "127.0.0.1:6142".parse()?;
let addr2 = "127.0.0.1:6143".parse()?;
let tracer = global::trace_provider().get_tracer("async_example");
let span = tracer.start("root", None);
let (run1, run2) = futures::future::join(run(&addr), run(&addr2))
.instrument(span)
.await;
run1?;
run2?;
thread::sleep(Duration::from_millis(250));
Ok(())
}
| true |
72f38a3f4db73c8d3a1a27917f0c76312204e692
|
Rust
|
Candunc/roosterteeth-rs
|
/src/lib.rs
|
UTF-8
| 1,389 | 3.453125 | 3 |
[
"Apache-2.0"
] |
permissive
|
/*!
RoosterTeeth-rs is a rust wrapper for the RoosterTeeth VOD api. All requests are done through the [requests](./requests/struct.Requests.html) object.
There are usually optional parameters to restrict to a certain channel or to change how it is sorted.
In the following example, we grab the first page of episodes, with no restrictions and default sorting.
```no_run
use roosterteeth_rs::requests::{Requests, Credential};
let requests = Requests::new(Credential::Anonymous);
let episodes = requests.list_episodes(1, None, None);
println!("{}",episodes[0].attributes.title);
```
All of the returned values are documented in the following page as structs:
* [Channels](./structs/channels/struct.Channel.html)
* [Episodes](./structs/episodes/struct.Episode.html)
* [Seasons](./structs/seasons/struct.Season.html)
* [Series](./structs/series/struct.Series.html)
* [Videos](./structs/videos/struct.Video.html)
Please note the difference between an [Episode](./structs/episode/struct.Episode.html) and a [Video](./structs/video/struct.Video.html) struct.
An episode struct returns all the information about that episode, while a video struct is useful mainly for getting the m3u8 urls and
will fail if you don't have permissions to watch the video. (For example, if you aren't a first member or the video isn't public.)
*/
pub mod requests;
pub mod structs;
#[cfg(test)]
mod tests;
| true |
240c483e8af5620d9f5f4d2491b34e019947bceb
|
Rust
|
othelarian/candelabre
|
/candelabre-examples/src/luminance.rs
|
UTF-8
| 7,049 | 2.65625 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! Example to show the usage of `CandlSurface` and `CandlManager` with
//! luminance as OpenGL backend. 'ESC' to close a window.
use candelabre_windowing::{
CandlCurrentWrapper, CandlDimension, CandlElement, CandlError,
CandlManager, CandlOptions, CandlWindow
};
use candelabre_windowing::glutin::event::{
ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent
};
use candelabre_windowing::glutin::event_loop::{
ControlFlow, EventLoop, EventLoopWindowTarget
};
use candelabre_windowing::glutin::dpi::PhysicalSize;
use candelabre_windowing::glutin::monitor::VideoMode;
use candelabre_windowing::glutin::window::WindowId;
use luminance::context::GraphicsContext;
use luminance::framebuffer::Framebuffer;
use luminance::pipeline::PipelineState;
use luminance::shader::program::Program;
use luminance::render_state::RenderState;
use luminance::state::GraphicsState;
use luminance::tess::{Mode, Tess, TessBuilder};
use luminance::texture::{Dim2, Flat};
use luminance_derive::{Semantics, Vertex};
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
// ============================================================================
// structure to handle the window and to enable the use of the manager
struct LumSurface {
ctx: Option<CandlCurrentWrapper>,
gfx_state: Rc<RefCell<GraphicsState>>
}
unsafe impl GraphicsContext for LumSurface {
fn state(&self) -> &Rc<RefCell<GraphicsState>> { &self.gfx_state }
}
impl CandlWindow for LumSurface {
fn ctx(&mut self) -> CandlCurrentWrapper { self.ctx.take().unwrap() }
fn ctx_ref(&self) -> &CandlCurrentWrapper { self.ctx.as_ref().unwrap() }
fn set_ctx(&mut self, nctx: CandlCurrentWrapper) { self.ctx = Some(nctx) }
fn swap_buffers(&mut self) {
if let CandlCurrentWrapper::PossiblyCurrent(ctx) = self.ctx.as_ref().unwrap() {
ctx.swap_buffers().unwrap();
}
}
fn resize(&mut self, nsize: PhysicalSize<u32>) {
if let CandlCurrentWrapper::PossiblyCurrent(ctx) = &self.ctx_ref() {
ctx.resize(nsize);
}
}
}
impl CandlElement<LumSurface> for LumSurface {
fn build<T>(
el: &EventLoopWindowTarget<T>,
video_mode: VideoMode,
dim: CandlDimension,
title: &str,
options: CandlOptions
) -> Result<LumSurface, CandlError> {
let ctx = LumSurface::init(el, video_mode, dim, title, options)?;
let ctx = Some(CandlCurrentWrapper::PossiblyCurrent(ctx));
//let gfx_state = Rc::new(RefCell::new(GraphicsState::new().unwrap()));
let gfx_state = unsafe {
Rc::new(RefCell::new(GraphicsState::new_multi_contexts().unwrap()))
};
Ok(LumSurface {ctx, gfx_state})
}
}
impl LumSurface {
fn back_buffer(&mut self) -> Framebuffer<Flat, Dim2, (), ()> {
match self.ctx() {
CandlCurrentWrapper::PossiblyCurrent(ctx) => {
let (w, h) = ctx.window().inner_size().into();
self.set_ctx(CandlCurrentWrapper::PossiblyCurrent(ctx));
Framebuffer::back_buffer(self, [w, h])
}
CandlCurrentWrapper::NotCurrent(_) => panic!()
}
}
}
// ============================================================================
// structure to handle data for the window (Tess, triangles, etc)
mod utils;
use utils::{FS, VS};
#[derive(Clone, Copy, Debug, Eq, PartialEq, Semantics)]
pub enum Semantics {
#[sem(name = "co", repr = "[f32; 2]", wrapper = "VertexPosition")]
Position,
#[sem(name = "color", repr = "[u8; 3]", wrapper = "VertexColor")]
Color
}
#[repr(C)]
#[derive(Clone, Copy, Debug, PartialEq, Vertex)]
#[vertex(sem = "Semantics")]
struct Vertex {
pos: VertexPosition,
#[vertex(normalized = "true")]
rgb: VertexColor
}
const TRIANGLE: [Vertex; 3] = [
Vertex {pos: VertexPosition::new([-0.5, 0.5]), rgb: VertexColor::new([0, 255, 0])},
Vertex {pos: VertexPosition::new([-0.0, 0.0]), rgb: VertexColor::new([255, 0, 0])},
Vertex {pos: VertexPosition::new([0.5, 0.5]), rgb: VertexColor::new([0, 0, 255])}
];
struct LumData {
pub tess: Tess,
pub program: Program<Semantics, (), ()>
}
impl LumData {
fn new(surface: &mut LumSurface) -> LumData {
let tess = TessBuilder::new(surface)
.add_vertices(TRIANGLE)
.set_mode(Mode::Triangle)
.build()
.unwrap();
let program = Program::<Semantics, (), ()>::from_strings(None, VS, None, FS)
.expect("program creation")
.ignore_warnings();
LumData { tess, program }
}
}
// ============================================================================
// main function
fn main() {
let el = EventLoop::new();
let mut win_manager: CandlManager<LumSurface, ()> = CandlManager::new();
let mut win_datas = HashMap::<WindowId, LumData>::default();
for idx in 0..3 {
let wid = &win_manager.create_window::<_, LumSurface>(
&el,
el.primary_monitor().video_modes().next().unwrap(),
CandlDimension::Classic(800, 400),
&format!("test luminance #{}", idx+1),
CandlOptions::default()
).unwrap();
win_datas.insert(
wid.clone(),
LumData::new(win_manager.get_current(wid.clone()).unwrap())
);
}
el.run(move |evt, _, ctrl_flow| {
match evt {
Event::LoopDestroyed => return,
Event::WindowEvent {event, window_id} => match event {
WindowEvent::Resized(physical_size) =>
win_manager.get_current(window_id).unwrap().resize(physical_size),
WindowEvent::CloseRequested
| WindowEvent::KeyboardInput {
input: KeyboardInput {
state: ElementState::Released,
virtual_keycode: Some(VirtualKeyCode::Escape),
..
}, ..
} => win_manager.remove_window(window_id),
_ => ()
}
Event::RedrawRequested(win_id) => {
let surface = win_manager.get_current(win_id.clone()).unwrap();
let back_buffer = surface.back_buffer();
let win_data = win_datas.get(&win_id).unwrap();
surface.pipeline_builder().pipeline(
&back_buffer,
&PipelineState::default(),
|_, mut shd_gate| {
shd_gate.shade(&win_data.program, |_, mut rdr_gate| {
rdr_gate.render(&RenderState::default(), |mut tess_gate| {
tess_gate.render(&win_data.tess);
});
});
}
);
surface.swap_buffers();
}
_ => ()
}
if win_manager.is_empty() { *ctrl_flow = ControlFlow::Exit }
else { *ctrl_flow = ControlFlow::Wait }
});
}
| true |
7e1c356839477d107cad8f36ca450bd30b8c15e9
|
Rust
|
fplust/rlox
|
/src/ast_printer.rs
|
UTF-8
| 1,781 | 3.421875 | 3 |
[] |
no_license
|
use crate::expr::{
Expr, Visitor, Binary, Grouping, Literal, Unary
};
use crate::tokentype::Literals;
pub struct AstPrinter;
impl AstPrinter {
pub fn print(&self, expr: &Expr) -> String {
expr.accept(self)
}
}
impl Visitor<String> for AstPrinter {
fn visit_binary_expr(&self, expr: &Binary) -> String {
format!(
"({} {} {})",
expr.operator.lexeme,
expr.left.accept(self),
expr.right.accept(self)
)
}
fn visit_grouping_expr(&self, expr: &Grouping) -> String {
format!("(group {})", expr.expression.accept(self))
}
fn visit_literal_expr(&self, expr: &Literal) -> String {
match expr.value {
Literals::NUMBER(n) => format!("{}", n),
Literals::STRING(ref s) => format!("{}", s),
Literals::BOOL(s) => format!("{}", s),
Literals::NIL(s) => format!("{:?}", s),
}
}
fn visit_unary_expr(&self, expr: &Unary) -> String {
format!(
"({} {})",
expr.operator.lexeme,
expr.right.accept(self)
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_print_ast() {
use crate::tokentype::TokenType;
use crate::token::Token;
let minus = Token::new(TokenType::MINUS, '-'.to_string(), None, 1);
let star = Token::new(TokenType::STAR, "*".to_string(), None, 1);
let num1 = Literals::NUMBER(123.0);
let num2 = Literals::NUMBER(45.67);
let expression = Binary::new(
Unary::new(minus, Literal::new(num1)),
star,
Grouping::new(Literal::new(num2)),
);
let printer = AstPrinter {};
println!("{}", printer.print(&expression));
}
}
| true |
22252cf4fa5247e6aeb1c92249770beebffead71
|
Rust
|
oxidecomputer/third-party-api-clients
|
/slack/src/rtm.rs
|
UTF-8
| 1,809 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use crate::Client;
use crate::ClientResult;
pub struct Rtm {
pub client: Client,
}
impl Rtm {
#[doc(hidden)]
pub fn new(client: Client) -> Self {
Rtm { client }
}
/**
* This function performs a `GET` to the `/rtm.connect` endpoint.
*
* Starts a Real Time Messaging session.
*
* FROM: <https://api.slack.com/methods/rtm.connect>
*
* **Parameters:**
*
* * `token: &str` -- Authentication token. Requires scope: `rtm:stream`.
* * `batch_presence_aware: bool` -- Batch presence deliveries via subscription. Enabling changes the shape of `presence_change` events. See [batch presence](/docs/presence-and-status#batching).
* * `presence_sub: bool` -- Only deliver presence events when requested by subscription. See [presence subscriptions](/docs/presence-and-status#subscriptions).
*/
pub async fn connect(
&self,
batch_presence_aware: bool,
presence_sub: bool,
) -> ClientResult<crate::Response<crate::types::RtmConnectSchema>> {
let mut query_args: Vec<(String, String)> = Default::default();
if batch_presence_aware {
query_args.push((
"batch_presence_aware".to_string(),
batch_presence_aware.to_string(),
));
}
if presence_sub {
query_args.push(("presence_sub".to_string(), presence_sub.to_string()));
}
let query_ = serde_urlencoded::to_string(&query_args).unwrap();
let url = self.client.url(&format!("/rtm.connect?{}", query_), None);
self.client
.get(
&url,
crate::Message {
body: None,
content_type: None,
},
)
.await
}
}
| true |
c9efdfedd0a43c96cdc292d9e77652efaab3a134
|
Rust
|
slagroom/aoc-2019
|
/05/rust/main.rs
|
UTF-8
| 6,514 | 3.21875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::error::Error;
use std::fmt;
use std::io;
use std::io::BufRead;
#[derive(Debug)]
struct MemoryAccessError {
address: usize,
}
impl MemoryAccessError {
fn new(address: &usize) -> MemoryAccessError {
return MemoryAccessError { address: *address };
}
}
impl fmt::Display for MemoryAccessError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
return write!(f, "Invalid memory access @ {}", self.address);
}
}
impl Error for MemoryAccessError {}
#[derive(Debug)]
struct InvalidInstructionError {
instruction: isize,
address: usize,
}
impl InvalidInstructionError {
fn new (instruction: &isize, address: &usize) -> InvalidInstructionError {
return InvalidInstructionError { instruction: *instruction, address: *address };
}
}
impl fmt::Display for InvalidInstructionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
return write!(f, "Invalid instruction {} @ {}", self.instruction, self.address);
}
}
impl Error for InvalidInstructionError {}
struct IntcodeProcessor {
program_counter: usize,
memory: HashMap<usize, isize>,
read: fn() -> isize,
write: fn(isize),
}
impl IntcodeProcessor {
fn new(read: fn() -> isize, write: fn(isize)) -> IntcodeProcessor {
return IntcodeProcessor {
program_counter: 0,
memory: HashMap::new(),
read,
write,
};
}
fn store(&mut self, address: usize, word: isize) {
self.memory.insert(address, word);
}
fn fetch(&self, address: &usize) -> isize {
return self.memory.get(address)
.map(|v| *v)
.ok_or(MemoryAccessError::new(address))
.unwrap();
}
fn run(&mut self) {
loop {
match self.opcode() {
1 => self.add(),
2 => self.mul(),
3 => self.input(),
4 => self.output(),
5 => self.jnz(),
6 => self.jz(),
7 => self.lt(),
8 => self.eq(),
99 => break,
x => return Err(InvalidInstructionError::new(&x, &self.program_counter)).unwrap(),
}
}
}
fn instruction(&self) -> isize {
return self.fetch(&self.program_counter);
}
fn opcode(&self) -> isize {
return self.fetch(&self.program_counter) % 100;
}
fn add(&mut self) {
let arg1 = self.val_arg(1);
let arg2 = self.val_arg(2);
let dst = self.ref_arg(3);
self.store(dst as usize, arg1 + arg2);
self.program_counter += 4;
}
fn mul(&mut self) {
let arg1 = self.val_arg(1);
let arg2 = self.val_arg(2);
let dst = self.ref_arg(3);
self.store(dst as usize, arg1 * arg2);
self.program_counter += 4;
}
fn input(&mut self) {
let dst = self.ref_arg(1);
self.store(dst as usize, (self.read)());
self.program_counter += 2;
}
fn output(&mut self) {
let arg = self.val_arg(1);
(self.write)(arg);
self.program_counter += 2;
}
fn jnz(&mut self) {
let arg1 = self.val_arg(1);
let arg2 = self.val_arg(2);
self.program_counter = match arg1 {
0 => self.program_counter + 3,
_ => arg2 as usize,
}
}
fn jz(&mut self) {
let arg1 = self.val_arg(1);
let arg2 = self.val_arg(2);
self.program_counter = match arg1 {
0 => arg2 as usize,
_ => self.program_counter + 3,
}
}
fn lt(&mut self) {
let arg1 = self.val_arg(1);
let arg2 = self.val_arg(2);
let dst = self.ref_arg(3);
self.store(dst as usize, match arg1 < arg2 {
true => 1,
false => 0,
});
self.program_counter += 4;
}
fn eq(&mut self) {
let arg1 = self.val_arg(1);
let arg2 = self.val_arg(2);
let dst = self.ref_arg(3);
self.store(dst as usize, match arg1 == arg2 {
true => 1,
false => 0,
});
self.program_counter += 4;
}
fn val_arg(&self, position: usize) -> isize {
let value = self.fetch(&(self.program_counter + position));
let mode = self.arg_mode(position);
let arg = match mode {
0 => self.fetch(&(value as usize)), // position mode
1 => value, // immediate mode
x => panic!("invalid parameter mode {} (@ {})", x, self.program_counter),
};
return arg;
}
fn ref_arg(&self, position: usize) -> isize {
return match self.arg_mode(position) {
0 => self.fetch(&(self.program_counter + position)), // position mode
x => panic!("invalid parameter mode {} for ref arg (@ {})", x, self.program_counter),
}
}
fn arg_mode(&self, position: usize) -> usize {
let mode_mask = 10_usize.pow(position as u32 + 1);
let shifted = self.instruction() as usize / mode_mask;
let mode = shifted % 10;
return mode;
}
}
#[derive(Debug)]
struct InputError {
data: String,
}
impl InputError {
fn new (data: &str) -> InputError {
return InputError { data: data.to_string() };
}
}
impl fmt::Display for InputError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
return write!(f, "Invalid input: {}", self.data);
}
}
impl Error for InputError {}
fn main() {
let program = io::stdin().lock().lines().next().unwrap().unwrap()
.split(",")
.map(|w| w.parse::<isize>().unwrap())
.collect::<Vec<_>>();
fn input() -> isize {
return io::stdin().lock().lines().next().unwrap().unwrap()
.parse::<isize>()
.or_else(|err| Err(InputError::new(&format!("{:?}", err))))
.unwrap();
}
fn output(val: isize) {
println!("{}", val);
}
let mut computer = IntcodeProcessor::new(input, output);
program.iter().enumerate()
.for_each(|(addr,word)| {
computer.store(addr, *word);
});
println!("part 1:");
computer.run();
let mut computer = IntcodeProcessor::new(input, output);
program.iter().enumerate()
.for_each(|(addr,word)| {
computer.store(addr, *word);
});
println!("\r\npart 2:");
computer.run();
}
| true |
a128dae736f916dacaa2b6bda7d8f437bcac7cdd
|
Rust
|
flegac/advent-2020
|
/src/day1.rs
|
UTF-8
| 3,002 | 3.171875 | 3 |
[] |
no_license
|
use std::cmp::{max, min, Ordering};
use itertools::{Itertools, sorted};
use crate::utils::read_lines;
type Int = u32;
type Int2 = u64;
struct Input<'a> {
filename: &'a str,
target: Int,
}
const BASIC: Input = Input {
filename: "src/day1.txt",
target: 2020,
};
const HARD1: Input = Input {
filename: "src/bad_case.txt",
target: 600006,
};
const HARD2: Input = Input {
filename: "src/bad_case2.txt",
target: 6,
};
fn solve_fast(input: Input, n: usize) -> Option<Int2> {
let items = sorted(parse_file(&input.filename)).rev().collect_vec();
solve(&items, input.target, n)
}
fn solve(items: &[Int], target: Int, n: usize) -> Option<Int2> {
if n == 1 {
find_index(items, target).map(|index| items[index] as Int2)
} else {
(0..items.len())
.filter(|i| items[*i] <= target)
.map(|i| (i, solve(&items[i + 1..], target - items[i], n - 1)))
.find(|(i, x)| x.is_some())
.map(|(i, x)| x.map(|x| x * items[i] as Int2))
.flatten()
}
}
fn find_index(items: &[Int], value: Int) -> Option<usize> {
if items.is_empty() || items[items.len() - 1] > value || value > items[0] {
return None;
}
let j = items.len() / 2;
match items[j].cmp(&value) {
Ordering::Equal => { Some(j) }
Ordering::Less => { find_index(&items[0..j], value) }
Ordering::Greater => {
find_index(&items[j + 1..], value)
.map(|index| index + j + 1)
}
}
}
pub fn day1_benchmark() {
// println!("{} {}",
// solve1(2).expect("No solution"),
// solve1(3).expect("No solution"));
// timeit!({
// solve1(2);
// solve1(3);
// });
println!("{} {}",
solve_fast(BASIC, 2).expect("No solution"),
solve_fast(BASIC, 3).expect("No solution")
);
timeit!({
solve_fast(BASIC, 2);
solve_fast(BASIC, 3);
});
timeit!({
solve_fast(HARD1, 2);
solve_fast(HARD1, 3);
});
timeit!({
solve_fast(HARD2, 2);
solve_fast(HARD2, 3);
});
}
fn solve1(input: Input, n: usize) -> Option<Int2> {
parse_file(&input.filename)
.into_iter()
.combinations(n)
.filter(|vec| vec.iter().sum::<Int>() == input.target)
.find(|vec| vec.iter().sum::<Int>() == input.target)
.map(|vec| vec.iter().product::<Int>() as Int2)
}
fn solve2(input: Input, n: usize) -> Option<Int2> {
parse_file(&input.filename)
.iter()
.copied()
.tuple_combinations()
.find(|(a, b, c)| a + b + c == input.target)
.map(|(a, b, c)| (a * b * c) as Int2)
}
fn parse_file(filename: &str) -> Vec<Int> {
let mut all = vec![];
if let Ok(lines) = read_lines(filename) {
for line in lines {
if let Ok(value) = line {
all.push(value.parse::<Int>().unwrap());
}
}
}
return all;
}
| true |
b40a330eaf9ccafa1f908c7c89c477fa9db76568
|
Rust
|
stackcats/leetcode
|
/algorithms/easy/maximum_product_of_two_elements_in_an_array.rs
|
UTF-8
| 360 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
impl Solution {
pub fn max_product(nums: Vec<i32>) -> i32 {
let mut h1 = -1;
let mut h2 = -2;
for i in 0..nums.len() {
if nums[i] >= h1 {
h2 = h1;
h1 = nums[i];
} else if nums[i] > h2 {
h2 = nums[i];
}
}
(h1 - 1) * (h2 - 1)
}
}
| true |
be4f8b2a22252f61ccd8d8f3f28c929c0afd9a0b
|
Rust
|
xiyan128/codewar_archive
|
/6-kyu/a-rule-of-divisibility-by-13/rust/solution.rs
|
UTF-8
| 495 | 3.5 | 4 |
[] |
no_license
|
fn thirt(n: i64) -> i64{
let mut cache = n;
while cache != r_13(cache) {
cache = r_13(cache);
}
cache
}
fn digits(n: i64) -> Vec<i64> {
let (mut d, mut num) = (vec![], n.clone());
while num>0 {
d.push(num%10);
num /= 10;
}
d
}
fn r_13(n: i64) -> i64 {
let mut seq = vec![1, 10, 9, 12, 3, 4];
let mut sum = 0;
for x in digits(n) {
sum += x * seq.first().unwrap();
seq.rotate_left(1);
}
sum
}
| true |
6b9f81031cca010b3b902df6619eba6e9ea60225
|
Rust
|
LucasPickering/gdlk
|
/crates/wasm/tests/test_wasm.rs
|
UTF-8
| 10,153 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
//! Integration tests for the GDLK Wasm API
#![deny(clippy::all)]
// Needed for the macro
#![allow(clippy::bool_assert_comparison)]
use gdlk_wasm::{
compile, HardwareSpec, LangValue, ProgramSpec, SourceElement, Span,
};
use maplit::hashmap;
use std::collections::HashMap;
use wasm_bindgen_test::wasm_bindgen_test;
/// Checks each portion of the given machine's state, and compares each field
/// to the corresponding expected value.
macro_rules! assert_machine_state {
(
$machine:expr,
program_counter = $program_counter:expr,
cycle_count = $cycle_count:expr,
terminated = $terminated:expr,
successful = $successful:expr,
input = $input:expr,
output = $output:expr,
registers = $registers:expr,
stacks = $stacks:expr,
error = $error:expr
$(,)?
) => {{
let m = &$machine;
assert_eq!(m.program_counter(), $program_counter, "program counter");
assert_eq!(m.cycle_count(), $cycle_count, "cycle count");
assert_eq!(m.terminated(), $terminated, "terminated");
assert_eq!(m.successful(), $successful, "successful");
assert_eq!(m.input(), $input as &[LangValue], "input");
assert_eq!(m.output(), $output as &[LangValue], "output");
assert_eq!(
m.wasm_registers()
.into_serde::<HashMap<String, LangValue>>()
.unwrap(),
$registers,
"registers"
);
assert_eq!(
m.wasm_stacks()
.into_serde::<HashMap<String, Vec<LangValue>>>()
.unwrap(),
$stacks,
"stacks"
);
assert_eq!(m.wasm_error(), $error, "error");
}};
}
#[wasm_bindgen_test]
fn test_compile_success() {
let result = compile(
&HardwareSpec {
num_registers: 1,
num_stacks: 2,
max_stack_length: 10,
},
&ProgramSpec::new(vec![1], vec![1]),
"
READ RX0
WRITE RX0
",
);
let compile_success = result.unwrap();
let instructions = compile_success.instructions();
assert_eq!(
instructions.into_serde::<Vec<SourceElement>>().unwrap(),
vec![
SourceElement {
text: "TODO".into(),
span: Span {
offset: 9,
length: 8,
start_line: 2,
start_col: 9,
end_line: 2,
end_col: 17
}
},
SourceElement {
text: "TODO".into(),
span: Span {
offset: 26,
length: 9,
start_line: 3,
start_col: 9,
end_line: 3,
end_col: 18
}
}
]
);
let machine = compile_success.machine();
// Test initial state
assert_machine_state!(
machine,
program_counter = 0,
cycle_count = 0,
terminated = false,
successful = false,
input = &[1],
output = &[],
registers = hashmap! {
"RLI".into() => 1,
"RS0".into() => 0,
"RS1".into() => 0,
"RX0".into() => 0,
},
stacks = hashmap! {
"S0".into() => vec![],
"S1".into() => vec![],
},
error = None
);
}
#[wasm_bindgen_test]
fn test_compile_errors() {
let result = compile(
&HardwareSpec::default(),
&ProgramSpec::default(),
"
READ RX1
PUSH 3 S0
",
);
let errors = result.unwrap_err();
assert_eq!(
errors.into_serde::<Vec<SourceElement>>().unwrap(),
vec![
SourceElement {
text:
"Validation error at 2:14: Invalid reference to register `RX1`"
.into(),
span: Span {
offset: 14,
length: 3,
start_line: 2,
start_col: 14,
end_line: 2,
end_col: 17,
}
},
SourceElement {
text: "Validation error at 3:16: Invalid reference to stack `S0`"
.into(),
span: Span {
offset: 33,
length: 2,
start_line: 3,
start_col: 16,
end_line: 3,
end_col: 18,
}
}
]
);
}
#[allow(clippy::cognitive_complexity)]
#[wasm_bindgen_test]
fn test_execute() {
let result = compile(
&HardwareSpec {
num_registers: 1,
num_stacks: 1,
max_stack_length: 10,
},
&ProgramSpec::new(vec![1, 2, 3], vec![1, 2, 3]),
"
START:
JEZ RLI END
READ RX0
PUSH RX0 S0
POP S0 RX0
WRITE RX0
JMP START
END:
",
);
let mut machine = result.unwrap().machine();
// Test initial state
assert_machine_state!(
machine,
program_counter = 0,
cycle_count = 0,
terminated = false,
successful = false,
input = &[1, 2, 3],
output = &[],
registers = hashmap! {
"RLI".into() => 3,
"RS0".into() => 0,
"RX0".into() => 0,
},
stacks = hashmap! {
"S0".into() => vec![],
},
error = None
);
// JEZ
assert!(machine.wasm_execute_next());
assert_machine_state!(
machine,
program_counter = 1,
cycle_count = 1,
terminated = false,
successful = false,
input = &[1, 2, 3],
output = &[],
registers = hashmap! {
"RLI".into() => 3,
"RS0".into() => 0,
"RX0".into() => 0,
},
stacks = hashmap! {
"S0".into() => vec![],
},
error = None
);
// READ
assert!(machine.wasm_execute_next());
assert_machine_state!(
machine,
program_counter = 2,
cycle_count = 2,
terminated = false,
successful = false,
input = &[2, 3],
output = &[],
registers = hashmap! {
"RLI".into() => 2,
"RS0".into() => 0,
"RX0".into() => 1,
},
stacks = hashmap! {
"S0".into() => vec![],
},
error = None
);
// PUSH
assert!(machine.wasm_execute_next());
assert_machine_state!(
machine,
program_counter = 3,
cycle_count = 3,
terminated = false,
successful = false,
input = &[2, 3],
output = &[],
registers = hashmap! {
"RLI".into() => 2,
"RS0".into() => 1,
"RX0".into() => 1,
},
stacks = hashmap! {
"S0".into() => vec![1],
},
error = None
);
// POP
assert!(machine.wasm_execute_next());
assert_machine_state!(
machine,
program_counter = 4,
cycle_count = 4,
terminated = false,
successful = false,
input = &[2, 3],
output = &[],
registers = hashmap! {
"RLI".into() => 2,
"RS0".into() => 0,
"RX0".into() => 1,
},
stacks = hashmap! {
"S0".into() => vec![],
},
error = None
);
// WRITE
assert!(machine.wasm_execute_next());
assert_machine_state!(
machine,
program_counter = 5,
cycle_count = 5,
terminated = false,
successful = false,
input = &[2, 3],
output = &[1],
registers = hashmap! {
"RLI".into() => 2,
"RS0".into() => 0,
"RX0".into() => 1,
},
stacks = hashmap! {
"S0".into() => vec![],
},
error = None
);
// JMP
assert!(machine.wasm_execute_next());
assert_machine_state!(
machine,
program_counter = 0,
cycle_count = 6,
terminated = false,
successful = false,
input = &[2, 3],
output = &[1],
registers = hashmap! {
"RLI".into() => 2,
"RS0".into() => 0,
"RX0".into() => 1,
},
stacks = hashmap! {
"S0".into() => vec![],
},
error = None
);
// Execute the rest of the program
while !machine.terminated() {
assert!(machine.wasm_execute_next());
}
// Check final state
assert_machine_state!(
machine,
program_counter = 6,
cycle_count = 19,
terminated = true,
successful = true,
input = &[],
output = &[1, 2, 3],
registers = hashmap! {
"RLI".into() => 0,
"RS0".into() => 0,
"RX0".into() => 3,
},
stacks = hashmap! {
"S0".into() => vec![],
},
error = None
);
}
#[wasm_bindgen_test]
fn test_runtime_error() {
let result = compile(
&HardwareSpec::default(),
&ProgramSpec::default(),
"READ RX0",
);
let mut machine = result.unwrap().machine();
assert!(machine.wasm_execute_next());
assert_machine_state!(
machine,
program_counter = 0,
cycle_count = 1,
terminated = true,
successful = false,
input = &[],
output = &[],
registers = hashmap! {
"RLI".into() => 0,
"RX0".into() => 0,
},
stacks = hashmap! {},
error = Some(SourceElement {
text: "Runtime error at 1:1: Read attempted on empty input".into(),
span: Span {
offset: 0,
length: 8,
start_line: 1,
start_col: 1,
end_line: 1,
end_col: 9
}
})
);
}
| true |
d26ad714ef6cc1b65960ee33c1dd6f9eb9a547d6
|
Rust
|
dowlandaiello/notedly
|
/modules/web/server/src/api/wrapper.rs
|
UTF-8
| 6,167 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use actix_web::{client::Client, error, Error};
use serde::{Deserialize, Serialize};
use std::{default::Default, io};
/// A wrapper for each of the respective oauth provider APIs (Google, GitHub).
pub struct User {
/// The access token associated with the user
access_token: String,
/// Cached results for the /email GET
email: String,
/// The provider of the authorization service
provider: String,
/// The HTTP client used to make requests
client: Client,
}
/// A response from the GitHub API for the user's emails.
#[derive(Serialize, Deserialize, Debug)]
struct GitHubEmailResponse {
pub emails: Vec<GitHubEmail>,
}
impl GitHubEmailResponse {
/// Initializes a new GitHubEmailResponse with the given GitHub emails.
///
/// # Arguments
///
/// * `emails` - The emails contained in the response
pub fn new(emails: Vec<GitHubEmail>) -> Self {
Self { emails } // Return the new instance
}
/// Gets the most suitable email of the available GitHub emails.
pub fn best_email(&self) -> &GitHubEmail {
// We'll use the first email until we find a better one
let mut best: &GitHubEmail = &self.emails[0];
// Iterate through the emails
for i in 0..self.emails.len() {
let gh_email: &GitHubEmail = &self.emails[i]; // Get a reference to the email
// Check if the email is at least verified
if gh_email.verified {
best = gh_email; // Set the new best email
if gh_email.primary {
break; // Stop, use the best possible email
}
}
}
best // Return the best email
}
}
/// A GitHub preference regarding a user's email.
#[derive(Serialize, Deserialize, Debug)]
struct GitHubEmail {
/// The email from the response
pub email: String,
/// Whether or not the email has been verified
pub verified: bool,
/// Whether or not the email is a primary email
pub primary: bool,
/// The visibility of the email
#[serde(skip)]
visibility: String,
}
/// A GitHub user ID.
#[derive(Serialize, Deserialize)]
struct GitHubIDResponse {
/// The ID of the user
id: i32,
}
impl User {
/// Initializes a new user from the given access token and provider.
pub fn new(access_token: String, provider: String) -> Self {
Self {
email: "".to_owned(),
access_token,
provider,
client: Client::default(),
} // Return the new instance
}
/// Gets the URL of the account's oauth provider.
///
/// # Arguments
///
/// * `postfix` - A string appended to the end of the provider url
///
/// # Example
///
/// ```
/// use server::api::wrapper::User;
//
/// // A new GitHub user
/// let u = User::new("SOME_ACCESS_TOKEN", "github");
///
/// println!("{}", u.provider_url("emails")); // => https://api.github.com/user/emails
/// ```
pub fn provider_url(&self, postfix: &str) -> String {
// Return the respective authentication URL
if self.provider == "google" {
// Idk some google stuff
format!(
"https://openidconnect.googleapis.com/v1/userinfo{}",
if postfix != "" {
format!("/{}", postfix)
} else {
"".to_owned()
}
)
} else {
// A URL for all of the information known about the user
format!(
"https://api.github.com/user{}",
if postfix != "" {
format!("/{}", postfix)
} else {
"".to_owned()
}
)
}
}
/// Gets the oauth ID of the user from the known provider.
pub async fn oauth_id(&self) -> Result<i32, Error> {
// Send a request asking for the oauth ID of the user with the matching oauth token, and
// await the response from the service
let mut response = self
.client
.get(self.provider_url("")) // Start the request
.set_header("Authorization", format!("Bearer {}", self.access_token)) // Tell GitHub which user we would like to get the ID of
.set_header("User-Agent", "Notedly") // Make sure GitHub sees this as a valid request
.send() // Send the request
.await?; // Await the response
if self.provider == "github" {
// Convert the general response to a GitHub response
let github_resp: GitHubIDResponse = response.json::<GitHubIDResponse>().await?;
// Return the identifier of the user as an owned string
Ok(github_resp.id)
} else {
Err(error::ErrorBadRequest(io::Error::new(
io::ErrorKind::Other,
"the provider does not exit",
))) // User did something bad
}
}
/// Gets the email of the user from the known provider.
pub async fn email(&mut self) -> Result<&str, Error> {
// Send a request asking for the email of the user with the matching oauth token, and await
// the response from the service
let mut response = self
.client
.get(self.provider_url("emails")) // Start the request
.set_header("Authorization", format!("Bearer {}", self.access_token)) // Tell GitHub which user we would like to get the email of by sending our token
.set_header("User-Agent", "Notedly") // Make sure GitHub sees this as a valid request
.send() // Send the request
.await?; // Yay async
Ok(if self.provider == "github" {
// Get the email from the GitHub response
self.email = GitHubEmailResponse::new(response.json::<Vec<GitHubEmail>>().await?)
.best_email()
.email
.clone();
&self.email // Return the user's email
} else {
""
// TODO: Add Google openID connect support
})
}
}
| true |
929f6b1e6881e2bdcc8db9747019211f982bee3f
|
Rust
|
zhaoshenglong/Leetcode
|
/dynamic_programming_practice/740_medimum_delete_and_earn.rs
|
UTF-8
| 989 | 3.140625 | 3 |
[] |
no_license
|
use std::collections::HashMap;
struct Solution;
impl Solution {
pub fn delete_and_earn(nums: Vec<i32>) -> i32 {
let mut num_cnt = HashMap::new();
for &num in &nums {
*num_cnt.entry(num).or_insert(0) += 1;
}
let mut uni_nums = Vec::new();
for &k in num_cnt.keys() {
uni_nums.push(k);
}
uni_nums.sort();
let n = uni_nums.len();
let mut dp = vec![0, uni_nums[0] * num_cnt.get(&uni_nums[0]).unwrap()];
for i in 1..n {
let mut tmp = dp[1];
let cnt = num_cnt.get(&uni_nums[i]).unwrap();
if uni_nums[i] > uni_nums[i - 1] + 1 {
tmp += uni_nums[i] * cnt;
} else {
tmp = tmp.max(dp[0] + uni_nums[i] * cnt);
}
dp[0] = dp[1];
dp[1] = tmp;
}
dp[1]
}
}
fn main() {
let nums = vec![2,2,3,3,3,4];
println!("{}", Solution::delete_and_earn(nums));
}
| true |
fb13570c908226bec2e3e64668ec94104872f7df
|
Rust
|
dobrite/dood-rs
|
/src/pixset.rs
|
UTF-8
| 3,771 | 3.359375 | 3 |
[] |
no_license
|
use std::collections::HashMap;
pub type TexCoords = [[f32; 2]; 4];
#[derive(Debug, Eq, PartialEq, Hash)]
pub enum Pix {
Dood,
Food,
UpArrow,
DownArrow,
RightArrow,
LeftArrow,
Wall,
Period,
Comma,
Quotes,
Apostrophe,
Colon,
SemiColon,
Empty,
}
pub struct Pixset {
pub tiles: HashMap<Pix, TexCoords>,
pub total_tiles: i32,
}
impl Pixset {
pub fn new(total_tiles: i32) -> Pixset {
// TODO fix
//let tiles = vec![
// Pix::DownArrow, Pix::LeftArrow, Pix::Dood, Pix::Food,
// Pix::UpArrow, Pix::RightArrow, Pix::None, Pix::None,
// Pix::None, Pix::None, Pix::None, Pix::None,
// Pix::None, Pix::None, Pix::None, Pix::None,
//];
//let tile_dim: i32 = (total_tiles as f32).sqrt() as i32;
//for y in (0..tile_dim) {
// for x in (0..tile_dim) {
// }
//}
let mut tiles: HashMap<Pix, TexCoords> = HashMap::new();
// TODO fix
tiles.insert(Pix::DownArrow, get_tex_coords(total_tiles, vec![0, 3]));
tiles.insert(Pix::LeftArrow, get_tex_coords(total_tiles, vec![1, 3]));
tiles.insert(Pix::Dood, get_tex_coords(total_tiles, vec![2, 3]));
tiles.insert(Pix::Food, get_tex_coords(total_tiles, vec![3, 3]));
tiles.insert(Pix::UpArrow, get_tex_coords(total_tiles, vec![0, 2]));
tiles.insert(Pix::RightArrow, get_tex_coords(total_tiles, vec![1, 2]));
tiles.insert(Pix::Wall, get_tex_coords(total_tiles, vec![2, 2]));
tiles.insert(Pix::Period, get_tex_coords(total_tiles, vec![3, 2]));
tiles.insert(Pix::Comma, get_tex_coords(total_tiles, vec![0, 1]));
tiles.insert(Pix::Quotes, get_tex_coords(total_tiles, vec![1, 1]));
tiles.insert(Pix::Apostrophe, get_tex_coords(total_tiles, vec![2, 1]));
tiles.insert(Pix::Colon, get_tex_coords(total_tiles, vec![3, 1]));
tiles.insert(Pix::SemiColon, get_tex_coords(total_tiles, vec![0, 0]));
tiles.insert(Pix::Empty, get_tex_coords(total_tiles, vec![1, 0]));
Pixset { tiles: tiles, total_tiles: total_tiles }
}
pub fn get(&self, pix: &Pix) -> [[f32; 2]; 4] {
*self.tiles.get(pix).expect("tile did not contain that pix")
}
}
fn get_tex_coords(total_tiles: i32, loc: Vec<i32>) -> [[f32; 2]; 4] {
let tile_dim: f32 = (total_tiles as f32).sqrt();
let per_tile: f32 = 1.0 / tile_dim;
let top = 1.0 - loc[1] as f32 * per_tile;
let right = (loc[0] + 1) as f32 * per_tile;
let bottom = 1.0 - (loc[1] + 1) as f32 * per_tile;
let left = loc[0] as f32 * per_tile;
[
[left, top],
[right, top],
[right, bottom],
[left, bottom],
]
}
#[cfg(test)]
mod tests {
use super::get_tex_coords;
#[test]
fn it_gets_tex_coords_for_top_left() {
assert!(get_tex_coords(16, vec![0, 0]) == [
[0.0, 1.0],
[0.25, 1.0],
[0.25, 0.75],
[0.0, 0.75],
]);
}
#[test]
fn it_gets_tex_coords_for_top_right() {
assert!(get_tex_coords(16, vec![3, 0]) == [
[0.75, 1.0],
[1.0, 1.0],
[1.0, 0.75],
[0.75, 0.75],
]);
}
#[test]
fn it_gets_tex_coords_for_bottom_right() {
assert!(get_tex_coords(16, vec![3, 3]) == [
[0.75, 0.25],
[1.0, 0.25],
[1.0, 0.0],
[0.75, 0.0],
]);
}
#[test]
fn it_gets_tex_coords_for_bottom_left() {
assert!(get_tex_coords(16, vec![0, 3]) == [
[0.0, 0.25],
[0.25, 0.25],
[0.25, 0.0],
[0.0, 0.0],
]);
}
}
| true |
2240ef2622cb444df82383b68d9d82baf266d696
|
Rust
|
sno2/unilang
|
/src/models/statement.rs
|
UTF-8
| 4,205 | 3.4375 | 3 |
[
"MIT"
] |
permissive
|
pub use crate::{Language, ToCode};
#[derive(Debug)]
pub struct VariableInit {
pub name: Box<dyn ToCode>,
pub mutable: Option<bool>,
pub typ: Option<Box<dyn ToCode>>,
pub value: Box<dyn ToCode>,
}
impl std::default::Default for VariableInit {
fn default() -> Self {
Self {
name: Box::new("foo"),
mutable: None,
typ: None,
value: Box::new("bar"),
}
}
}
impl VariableInit {
pub fn with_name(mut self, name: impl ToCode + 'static) -> Self {
self.name = Box::new(name);
self
}
pub fn with_mutable(mut self, is_mutable: bool) -> Self {
self.mutable = Some(is_mutable);
self
}
pub fn with_type(mut self, typ: impl ToCode + 'static) -> Self {
self.typ = Some(Box::new(typ));
self
}
pub fn with_value(mut self, value: impl ToCode + 'static) -> Self {
self.value = Box::new(value);
self
}
}
impl ToCode for VariableInit {
fn to_code(&self, language: Language) -> String {
let Self {
name,
mutable,
typ,
value,
} = self;
match language {
Language::Rust => format!(
"let {}{}{}={};",
match mutable {
Some(true) => {
String::from("mut ")
}
_ => String::new(),
},
name.to_code(language),
match typ {
Some(typ) => format!(":{}", typ.to_code(language)),
None => String::new(),
},
value.to_code(language)
),
Language::CPP => {
format!(
"{} {}={};",
typ.as_ref().unwrap().to_code(language),
name.to_code(language),
value.to_code(language)
)
}
Language::Python { include_types, .. } => {
format!(
"{}{}={}",
name.to_code(language),
if include_types && typ.is_some() {
format!(":{}", typ.as_ref().unwrap().to_code(language))
} else {
String::new()
},
value.to_code(language)
)
}
Language::TypeScript => {
format!(
"{} {}{}={};",
match mutable {
Some(true) => String::from("let"),
Some(false) | None => String::from("const"),
},
name.to_code(language),
match typ {
Some(typ) => format!(":{}", typ.to_code(language)),
None => String::new(),
},
value.to_code(language)
)
}
}
}
}
#[derive(Debug)]
pub struct AssignVariable<T: ToCode, F: ToCode>(T, F);
impl<T: ToCode, F: ToCode> ToCode for AssignVariable<T, F> {
fn to_code(&self, language: Language) -> String {
let Self(name, value) = self;
match language {
Language::Rust | Language::TypeScript | Language::CPP => {
format!("{}={};", name.to_code(language), value.to_code(language))
}
Language::Python { .. } => {
format!("{}={}", name.to_code(language), value.to_code(language))
}
}
}
}
#[derive(Debug)]
pub struct Return<T: ToCode>(pub Option<T>);
impl<T: ToCode> ToCode for Return<T> {
fn to_code(&self, language: Language) -> String {
let Self(expr) = self;
match language {
Language::Rust | Language::TypeScript | Language::CPP => match expr {
Some(expr) => format!("return {};", expr.to_code(language)),
None => String::from("return;"),
},
Language::Python { .. } => match expr {
Some(expr) => format!("return {}", expr.to_code(language)),
None => String::from("return"),
},
}
}
}
#[derive(Debug)]
pub enum Comment {
/// ## Notes
/// * inserts newline after comment if the language's comment spans rest of
/// the line
Regular(String),
MultiLine(String),
Doc(String),
}
impl ToCode for Comment {
fn to_code(&self, language: Language) -> String {
match self {
Self::Regular(content) => match language {
Language::Rust | Language::TypeScript | Language::CPP => format!("//{}\n", content),
Language::Python { .. } => format!("#{}", content),
},
Self::MultiLine(content) => match language {
Language::Rust | Language::TypeScript | Language::CPP => format!("/*{}*/", content),
Language::Python { .. } => format!("\"\"\"{}\"\"\"", content),
},
Self::Doc(content) => match language {
Language::Rust => format!("///{}\n", content),
Language::TypeScript => format!("/**{}*/", content),
Language::CPP => Self::Regular(content.clone()).to_code(language),
Language::Python { .. } => Self::MultiLine(content.clone()).to_code(language),
},
}
}
}
| true |
66144fd8aad274ea269ce9bedf1e19d076b1c919
|
Rust
|
theshortcut/advent-of-code
|
/2021/src/day3.rs
|
UTF-8
| 2,800 | 3.140625 | 3 |
[] |
no_license
|
use aoc_runner_derive::{aoc, aoc_generator};
#[derive(Clone)]
struct BinaryList(Vec<u32>);
impl BinaryList {
fn new() -> Self {
BinaryList(vec![])
}
fn digit_at(&self, position: usize) -> u32 {
self.0[position]
}
fn push(&mut self, i: u32) {
self.0.push(i)
}
}
impl Into<u32> for BinaryList {
fn into(self) -> u32 {
u32::from_str_radix(
self
.0
.iter()
.map(|digit| digit.to_string())
.collect::<Vec<String>>()
.join("")
.as_str(),
2,
)
.unwrap()
}
}
#[aoc_generator(day3)]
fn parse_input_day3(input: &str) -> Vec<BinaryList> {
input
.lines()
.map(|l| {
BinaryList(
l.trim()
.chars()
.map(|ch| ch.to_digit(10).unwrap())
.collect(),
)
})
.collect()
}
fn most_common_bit_at(lists: &Vec<BinaryList>, position: usize) -> u32 {
let (c0, c1) = lists
.iter()
.map(|b| b.digit_at(position))
.fold((0, 0), |(c0, c1), digit| match digit {
0 => (c0 + 1, c1),
_ => (c0, c1 + 1),
});
if c0 > c1 {
0
} else {
1
}
}
#[aoc(day3, part1)]
fn part1(i: &Vec<BinaryList>) -> u32 {
let (gamma, epsilon) = (0..i[0].0.len()).fold(
(BinaryList::new(), BinaryList::new()),
|(mut gamma, mut epsilon), idx| {
let most_common = most_common_bit_at(i, idx);
gamma.push(most_common);
epsilon.push(if most_common == 1 { 0 } else { 1 });
(gamma, epsilon)
},
);
let gamma_as_int: u32 = gamma.into();
let epsilon_as_int: u32 = epsilon.into();
gamma_as_int * epsilon_as_int
}
fn filter_by_commononality(
lists: &Vec<BinaryList>,
position: usize,
most_common: bool,
) -> BinaryList {
let most_common_digit = most_common_bit_at(lists, position);
let digit_to_match = if most_common {
most_common_digit
} else {
if most_common_digit == 0 {
1
} else {
0
}
};
let filtered: Vec<BinaryList> = lists
.iter()
.filter(|b| b.digit_at(position) == digit_to_match)
.cloned()
.collect();
if filtered.len() == 1 {
filtered[0].clone()
} else {
filter_by_commononality(&filtered, position + 1, most_common)
}
}
#[aoc(day3, part2)]
fn part2(i: &Vec<BinaryList>) -> u32 {
let o2 = filter_by_commononality(i, 0, true);
let co2 = filter_by_commononality(i, 0, false);
let o2_as_int: u32 = o2.into();
let co2_as_int: u32 = co2.into();
o2_as_int * co2_as_int
}
#[cfg(test)]
mod tests {
use super::*;
const INPUT: &str = "00100
11110
10110
10111
10101
01111
00111
11100
10000
11001
00010
01010";
#[test]
fn part1_example() {
assert_eq!(part1(&parse_input_day3(INPUT)), 198)
}
#[test]
fn part2_example() {
assert_eq!(part2(&parse_input_day3(INPUT)), 230)
}
}
| true |
52717cd4a106cd65ace2e4baab180c5b23b2a57b
|
Rust
|
Azure/iot-identity-service
|
/mini-sntp/src/error.rs
|
UTF-8
| 3,437 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
// Copyright (c) Microsoft. All rights reserved.
#[derive(Debug)]
pub enum Error {
BadServerResponse(BadServerResponseReason),
BindLocalSocket(std::io::Error),
ReceiveServerResponse(std::io::Error),
ResolveNtpPoolHostname(Option<std::io::Error>),
SendClientRequest(std::io::Error),
SetReadTimeoutOnSocket(std::io::Error),
SetWriteTimeoutOnSocket(std::io::Error),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::BadServerResponse(reason) => {
write!(f, "could not parse NTP server response: {}", reason)
}
Error::BindLocalSocket(_) => write!(f, "could not bind local UDP socket"),
Error::ReceiveServerResponse(err) => {
write!(f, "could not receive NTP server response: {}", err)
}
Error::ResolveNtpPoolHostname(Some(err)) => {
write!(f, "could not resolve NTP pool hostname: {}", err)
}
Error::ResolveNtpPoolHostname(None) => {
write!(f, "could not resolve NTP pool hostname: no addresses found")
}
Error::SendClientRequest(err) => {
write!(f, "could not send SNTP client request: {}", err)
}
Error::SetReadTimeoutOnSocket(_) => {
write!(f, "could not set read timeout on local UDP socket")
}
Error::SetWriteTimeoutOnSocket(_) => {
write!(f, "could not set write timeout on local UDP socket")
}
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#[allow(clippy::match_same_arms)]
match self {
Error::BadServerResponse(_) => None,
Error::BindLocalSocket(err) => Some(err),
Error::ReceiveServerResponse(err) => Some(err),
Error::ResolveNtpPoolHostname(Some(err)) => Some(err),
Error::ResolveNtpPoolHostname(None) => None,
Error::SendClientRequest(err) => Some(err),
Error::SetReadTimeoutOnSocket(err) => Some(err),
Error::SetWriteTimeoutOnSocket(err) => Some(err),
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum BadServerResponseReason {
LeapIndicator(u8),
OriginateTimestamp {
expected: chrono::DateTime<chrono::Utc>,
actual: chrono::DateTime<chrono::Utc>,
},
Mode(u8),
VersionNumber(u8),
}
impl std::fmt::Display for BadServerResponseReason {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
BadServerResponseReason::LeapIndicator(leap_indicator) => {
write!(f, "invalid value of leap indicator {}", leap_indicator)
}
BadServerResponseReason::OriginateTimestamp { expected, actual } => write!(
f,
"expected originate timestamp to be {} but it was {}",
expected, actual
),
BadServerResponseReason::Mode(mode) => {
write!(f, "expected mode to be 4 but it was {}", mode)
}
BadServerResponseReason::VersionNumber(version_number) => write!(
f,
"expected version number to be 3 but it was {}",
version_number
),
}
}
}
| true |
b52d1ab45a441e181b4541da2348a883666b5212
|
Rust
|
ryandbair/associatedtypes
|
/src/main.rs
|
UTF-8
| 1,219 | 3.3125 | 3 |
[] |
no_license
|
pub trait Request<'a> {
fn new(msg: &'a str) -> Self;
}
pub trait Response<'a> {
type Request: Request<'a>;
fn new(msg: &'a str, req: &'a Self::Request) -> Self;
}
pub trait Sink<'a> {
type Response: Response<'a>;
fn write(&self, &Self::Response);
}
struct ARequest<'a> {
msg: &'a str,
}
impl<'a> Request<'a> for ARequest<'a> {
fn new(msg: &'a str) -> Self {
ARequest {
msg: msg,
}
}
}
struct AResponse<'a> {
msg: &'a str,
req: &'a ARequest<'a>,
}
impl<'a> Response<'a> for AResponse<'a> {
type Request = ARequest<'a>;
fn new(msg: &'a str, req: &'a Self::Request) -> Self {
AResponse {
msg: msg,
req: req,
}
}
}
struct ASink {}
impl<'a> Sink<'a> for ASink {
type Response = AResponse<'a>;
fn write(&self, response: &Self::Response) {}
}
fn main() {
println!("Hello, world!");
}
struct Encoder<'a, S: for<'b> Sink <'b> + 'a> {
sink: &'a S,
}
impl<'a, S: for<'b> Sink<'b> + 'a> Encoder<'a, S> {
fn encode(&self) {
let req = <S::Response as Response>::Request::new("test");
let resp = S::Response::new("test", &req);
self.sink.write(&resp)
}
}
| true |
a52d07e731d6f9c54e01982a5b3e835a4ee1ed75
|
Rust
|
aero530/fpapp
|
/src-tauri/src/accounts/src/inputs/expense.rs
|
UTF-8
| 908 | 2.640625 | 3 |
[] |
no_license
|
//! User input expense values
use serde::{Deserialize, Serialize};
use ts_rs::TS;
// use super::fixed_with_inflation;
/// used to populate account dropdown for expense type selection
#[derive(TS, Debug, Copy, Clone, Deserialize, Serialize, PartialEq)]
#[ts(export)]
#[serde(rename_all = "snake_case")]
pub enum ExpenseOptions {
/// fixed dollar amount
Fixed,
/// fixed dollar amount compensated for inflation from year start (ie dollar amount is in current dollars)
FixedWithInflation,
}
// impl ExpenseOptions {
// pub fn value(
// self,
// expense: f64,
// inflation: f64,
// duration: u32,
// ) -> f64 {
// match self {
// ExpenseOptions::Fixed => expense,
// ExpenseOptions::FixedWithInflation => {
// fixed_with_inflation(expense, inflation, duration)
// }
// }
// }
// }
| true |
514a9391fa1dce19559bcab6d6495761e9cd94fb
|
Rust
|
thundergolfer/goodreads-sh
|
/src/models.rs
|
UTF-8
| 7,093 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
use regex::Regex;
use std::fmt::{self, Display, Formatter};
use roxmltree::Node;
const MAX_DESC_LEN: usize = 20;
pub struct Shelf {
pub books: Vec<Book>,
}
#[derive(Clone, Debug)]
pub struct Book {
pub id: u32,
pub description: String,
pub title: String,
// Sometimes num_pages is missing from XML data.
pub num_pages: Option<u32>,
}
#[derive(Clone, Debug, Default)]
pub struct Author {
pub id: u32,
pub name: String,
}
impl Display for Book {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self.description.len() {
0 => write!(f, "{}", self.title),
1..=MAX_DESC_LEN => write!(f, "{}: {}...", self.title, self.description),
_ => {
let shortened_desc = &self.description[..MAX_DESC_LEN];
write!(f, "{}: {}...", self.title, shortened_desc)
}
}
}
}
// TODO(Jonathon): These XML -> Models methods shouldn't be in this module
pub fn parse_book_search_results(
results_xml: &str,
) -> Result<Vec<(u32, String, String)>, roxmltree::Error> {
let mut book_results: Vec<(u32, String, String)> = Vec::new();
let doc = match roxmltree::Document::parse(results_xml) {
Ok(doc) => doc,
Err(e) => {
println!("Error: {}.", e);
return Err(e);
}
};
for node in doc.descendants() {
if node.is_element() && node.has_tag_name("best_book") {
match parse_search_result_from_best_book(node) {
Ok(r) => book_results.push(r),
Err(_err) => {}
}
}
}
Ok(book_results)
}
fn parse_search_result_from_best_book(
best_book_xml_node: Node,
) -> Result<(u32, String, String), roxmltree::Error> {
let mut result: (u32, String, String) = Default::default();
for child_node in best_book_xml_node.descendants() {
match child_node.tag_name().name() {
"id" => {
let id_str = child_node.text().unwrap_or("");
// Don't set it twice. The second is probably an author ID
if result.0 == 0 {
result.0 = id_str.parse::<u32>().unwrap();
}
}
"title" => {
result.1 = String::from(child_node.text().unwrap_or(""));
}
"author" => {
let author = parse_author_node(child_node).unwrap();
result.2 = author.name;
}
_ => {}
}
}
Ok(result)
}
fn parse_author_node(author_node: Node) -> Result<Author, roxmltree::Error> {
let mut author: Author = Default::default();
for child_node in author_node.descendants() {
match child_node.tag_name().name() {
"id" => {
author.id = child_node.text().unwrap().parse::<u32>().unwrap();
}
"name" => {
author.name = String::from(child_node.text().unwrap());
}
_ => {}
}
}
Ok(author)
}
pub fn parse_shelf(shelf_xml: &str) -> Result<Shelf, roxmltree::Error> {
let mut books: Vec<Book> = Vec::new();
let doc = match roxmltree::Document::parse(shelf_xml) {
Ok(doc) => doc,
Err(e) => {
println!("Error: {}.", e);
return Err(e);
}
};
for node in doc.descendants() {
if node.is_element() && node.has_tag_name("book") {
books.push(book_from_xml_node(node));
}
}
Ok(Shelf { books })
}
/// For some insane reason the 'id' field that appears in the XML is NOT
/// the id value that you want to use in API calls.
/// The usable ID value can only be found in URLs in the XML object.
/// This function can extract the ID from the '<link>' node.
fn extract_book_id_from_book_link(book_link: &str) -> Option<u32> {
let re: Regex = Regex::new(
r"(?x)
^https://www.goodreads.com/book/show/(?P<book_id>[\d]+)[\-|\.]
([[:word:]]+-)*
([[:word:]]+_)*
[[:word:]]+$
",
)
.expect("Regex should always compile");
re.captures(book_link)
.and_then(|cap| {
cap.name("book_id")
.map(|book_id| book_id.as_str())
.map(|book_id| book_id.parse::<u32>())
})
.and_then(|parse_res| match parse_res {
Ok(num) => Some(num),
Err(_err) => None,
})
}
fn book_from_xml_node(node: Node) -> Book {
let mut book = Book {
id: 0,
description: "".to_owned(),
title: "".to_owned(),
num_pages: None,
};
for child_node in node.descendants() {
match child_node.tag_name().name() {
"link" => {
let parent = child_node.parent();
// Don't attempt to parse the <link> node that is within the <author> node.
if parent.is_some() && parent.unwrap().tag_name().name() == "book" {
let link_txt = child_node.text().unwrap_or("");
let book_id = extract_book_id_from_book_link(link_txt);
book.id = book_id.expect(&format!(
"Could not get book id from <link> URL: {}",
link_txt
));
}
}
"description" => {
book.description = String::from(child_node.text().unwrap_or(""));
}
"title" => {
book.title = String::from(child_node.text().unwrap_or(""));
}
"num_pages" => {
book.num_pages = child_node.text().and_then(|txt| txt.parse::<u32>().ok());
}
_ => {}
}
}
book
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use std::io::prelude::*;
use std::path::PathBuf;
#[allow(dead_code)]
fn load_file(path: &PathBuf) -> String {
let mut file = fs::File::open(&path).unwrap();
let mut text = String::new();
file.read_to_string(&mut text).unwrap();
text
}
// #[test]
// fn test_parse_shelf() {
// let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
// path.push("src/api_responses/currently_reading_shelf_resp.xml");
// println!("{}", path.display());
// let text = load_file(&path);
// parse_shelf(&text);
// }
#[test]
fn test_extract_book_id_from_book_link() {
let cases = vec![
(
"https://www.goodreads.com/book/show/1234.The_First_Book",
1234 as u32,
),
(
"https://www.goodreads.com/book/show/4444-The-Second-Book",
4444 as u32,
),
(
"https://www.goodreads.com/book/show/934343-The_Book",
934343 as u32,
),
];
for (url, book_id) in cases.iter() {
let actual = extract_book_id_from_book_link(url);
assert_eq!(actual, Some(*book_id));
}
}
}
| true |
87a4195e8d19f79a858d43836dc1685396ec2879
|
Rust
|
tlebrize/LearnRust
|
/chat/src/main.rs
|
UTF-8
| 2,981 | 2.84375 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::sync::{Arc, Mutex, RwLock};
use tokio::{
io::{AsyncBufReadExt, AsyncWriteExt, BufReader},
net::TcpListener,
sync::broadcast,
};
#[tokio::main]
async fn main() {
// listen for new connections
let listener = TcpListener::bind("localhost:8000").await.unwrap();
let (tx, _rx) = broadcast::channel(10);
// create a map of address::name
let names = Arc::new(RwLock::new(HashMap::new()));
loop {
let (mut socket, addr) = listener.accept().await.unwrap();
// add a new empty name to the map
let mut map = names.write().unwrap();
map.insert(addr.to_string(), Mutex::<Option<String>>::new(None));
drop(map);
let data = Arc::clone(&names);
let tx = tx.clone();
let mut rx = tx.subscribe();
// spawn a new task for each socket
tokio::spawn(async move {
let (reader, mut writer) = socket.split();
let mut reader = BufReader::new(reader);
let mut line = String::new();
// ask each user its name
writer.write_all(b"Name?\n").await.unwrap();
// handles reading or writing
loop {
tokio::select! {
result = reader.read_line(&mut line) => {
if result.unwrap() == 0 {
break;
}
let map = data.write().unwrap();
// get the name of the sender
if let Some(name) = map.get(&addr.to_string()) {
let mut name = name.lock().unwrap();
match &*name {
// if set, send their message
Some(n) => {
tx.send((n.clone(), line.clone(), addr)).unwrap();
}
// otherwise use set their name
None => {
let mut tmp_name = line.clone();
// remove /r/n
let len = tmp_name.len();
tmp_name.truncate(len - 2);
*name = Some(tmp_name);
}
}
}
line.clear();
}
result = rx.recv() => {
let (name, msg, other_addr) = result.unwrap();
// send the message to all users, prefixed with the name
if addr != other_addr {
let msg = name + ": " +&msg;
writer.write_all(msg.as_bytes()).await.unwrap();
}
}
}
}
});
}
}
| true |
64aeb3129d718ba6a2dc47da98a2c098c2732bae
|
Rust
|
surma/osci
|
/src/memory/readonlymemory.rs
|
UTF-8
| 1,244 | 3.75 | 4 |
[] |
no_license
|
//! Make a memory read-only.
use memory::Memory;
/// Wraps another `Memory` and discards all writes.
pub struct ReadOnlyMemory(Box<Memory>);
impl ReadOnlyMemory {
pub fn new(m: Box<Memory>) -> ReadOnlyMemory {
ReadOnlyMemory(m)
}
}
impl Memory for ReadOnlyMemory {
fn get(&self, addr: usize) -> i32 {
self.0.get(addr)
}
#[inline]
fn set(&mut self, _: usize, _: i32) {}
#[inline]
fn size(&self) -> usize {
self.0.size()
}
}
#[cfg(test)]
mod tests {
use memory::{Memory, SliceMemory};
#[test]
fn read() {
let sm = Box::new(SliceMemory::from_slice(Box::new([0, 1, 2, 3])));
let m = super::ReadOnlyMemory::new(sm);
assert_eq!(m.get(0), 0);
assert_eq!(m.get(3), 3);
}
#[test]
fn write() {
let sm = SliceMemory::from_slice(Box::new([0, 1, 2, 3]));
let mut m = super::ReadOnlyMemory::new(Box::new(sm));
m.set(0, 9);
assert_eq!(m.get(0), 0);
m.set(3, 4);
assert_eq!(m.get(3), 3);
}
#[test]
fn size() {
let sm = SliceMemory::from_slice(Box::new([0, 1, 2, 3]));
let m = super::ReadOnlyMemory::new(Box::new(sm));
assert_eq!(m.size(), 4);
}
}
| true |
61ec88568f8796640f5ce8152386579acc241a46
|
Rust
|
arrayfire/arrayfire-rust
|
/examples/acoustic_wave.rs
|
UTF-8
| 2,369 | 2.671875 | 3 |
[] |
permissive
|
use arrayfire::*;
use std::f64::consts::*;
fn main() {
set_device(0);
info();
acoustic_wave_simulation();
}
fn normalise(a: &Array<f32>) -> Array<f32> {
(a / (max_all(&abs(a)).0 as f32 * 2.0f32)) + 0.5f32
}
fn acoustic_wave_simulation() {
// Speed of sound
let c: f32 = 0.1;
// Distance step
let dx: f32 = 0.5;
// Time step
let dt: f32 = 1.0;
// Grid size.
let nx: u64 = 1500;
let ny: u64 = 1500;
// Grid dimensions.
let dims = Dim4::new(&[nx, ny, 1, 1]);
// Pressure field
let mut p = constant::<f32>(0.0, dims);
// d(pressure)/dt field
let mut p_dot = p.clone();
// Laplacian (Del^2) convolution kernel.
let laplacian_values: [f32; 9] = [0.0, 1.0, 0.0, 1.0, -4.0, 1.0, 0.0, 1.0, 0.0];
let laplacian_kernel = Array::new(&laplacian_values, Dim4::new(&[3, 3, 1, 1])) / (dx * dx);
// Create a window to show the waves.
let mut win = Window::new(1000, 1000, "Waves".to_string());
// Hann windowed pulse.
let pulse_time: f32 = 100.0;
let centre_freq: f32 = 0.05;
let twopi = PI as f32 * 2.0;
// Number of samples in pulse.
let pulse_n = (pulse_time / dt).floor() as u64;
let i = range::<f32>(Dim4::new(&[pulse_n, 1, 1, 1]), 0);
let t = i.clone() * dt;
let hmg_wnd = cos(&(i * (twopi / pulse_n as f32))) * -0.46f32 + 0.54f32;
let wave = sin(&(&t * centre_freq * twopi));
let pulse = wave * hmg_wnd;
// Iteration count.
let mut it = 0;
while !win.is_closed() {
// Convole with laplacian to get spacial second derivative.
let lap_p = convolve2(
&p,
&laplacian_kernel,
ConvMode::DEFAULT,
ConvDomain::SPATIAL,
);
// Calculate the updated pressure and d(pressure)/dt fields.
p_dot += lap_p * (c * dt);
p += &p_dot * dt;
if it < pulse_n {
// Location of the source.
let seqs = &[Seq::new(700.0, 800.0, 1.0), Seq::new(800.0, 800.0, 1.0)];
// Set the pressure there.
assign_seq(
&mut p,
seqs,
&index(&pulse, &[Seq::new(it as f64, it as f64, 1.0)]),
);
}
// Draw the image.
win.set_colormap(ColorMap::BLUE);
win.draw_image(&normalise(&p), None);
it += 1;
}
}
| true |
5f1bdd1e56ba02e3e614e7bd27b910af97586285
|
Rust
|
ericlass/xtracer
|
/src/random.rs
|
UTF-8
| 3,390 | 3.40625 | 3 |
[] |
no_license
|
use linear::Vector4F;
use rand::Rng;
const PI: f64 = 3.1415926535897932384626433;
pub struct Random {
}
impl Random {
pub fn new() -> Random {
Random {}
}
//Crete random number in range 0...u32.MAX
pub fn random(&mut self) -> u32 {
rand::thread_rng().gen()
}
//Create random number in range 0.0...1.0
pub fn random_f(&mut self) -> f64 {
rand::thread_rng().gen()
}
fn random_samples(&mut self, num_samples: u32) -> Vec<(f64, f64)> {
let sample_width = 1.0 / num_samples as f64;
let half_width = sample_width * 0.5;
let mut result = Vec::with_capacity((num_samples * num_samples) as usize);
for y in 0..num_samples {
for x in 0..num_samples {
let scatter = half_width * (self.random_f() - 0.5);
let offset = half_width + scatter;
let vx = (x as f64 * sample_width) + offset;
let vy = (y as f64 * sample_width) + offset;
result.push((vx, vy));
}
}
result
}
pub fn random_directions_in_hemisphere(&mut self, num_samples: u32, n: &Vector4F) -> Vec<Vector4F> {
//TODO: Do this inline and get &mut vec from outside to fill to avoid creating millions of vecs
let samples = self.random_samples(num_samples);
let mut result = Vec::new();
for sample in samples {
let u = sample.0;
let v = sample.1;
let theta = 2.0 * PI * u;
let phi = (2.0 * v - 1.0).acos();
let sin_phi = phi.sin();
let dir = Vector4F {
x: sin_phi * theta.cos(),
y: sin_phi * theta.sin(),
z: phi.cos(),
w: 1.0,
};
let pdotn = Vector4F::dot(&dir, n);
if pdotn < 0.0 {
result.push(dir.invert());
}
else {
result.push(dir);
}
}
result
}
//Creates point on unit sphere centered at (0,0,0) with radius 1.0.
fn random_point_on_unit_sphere(&mut self) -> Vector4F {
let u = self.random_f();
let v = self.random_f();
let theta = 2.0 * PI * u;
let phi = (2.0 * v - 1.0).acos();
let sin_phi = phi.sin();
Vector4F {
x: sin_phi * theta.cos(),
y: sin_phi * theta.sin(),
z: phi.cos(),
w: 1.0,
}
}
pub fn random_direction(&mut self) -> Vector4F {
self.random_point_on_unit_sphere()
}
//Create point on sphere centered at given pos and with given radius
pub fn random_point_on_sphere(&mut self, pos: &Vector4F, radius: f64) -> Vector4F {
let usp = self.random_point_on_unit_sphere();
Vector4F {
x: pos.x + (radius * usp.x),
y: pos.y + (radius * usp.y),
z: pos.z + (radius * usp.z),
w: 1.0,
}
}
//Create point on hemisphere centered at (0,0,0) and with radius 1.0. The direction of the top of the hemisphere is given by n.
pub fn random_point_on_hemisphere(&mut self, n: &Vector4F) -> Vector4F {
let usp = self.random_point_on_unit_sphere();
let pdotn = Vector4F::dot(&usp, n);
if pdotn < 0.0 {
usp.invert()
} else {
usp
}
}
}
| true |
7eb9a00e132c47cf0c86c18d3206953b7bccb91b
|
Rust
|
CasperLabs/clarity
|
/packages/sdk/test/keys-manager/contract/src/lib.rs
|
UTF-8
| 3,143 | 2.921875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use casper_contract::{
contract_api::{account},
unwrap_or_revert::UnwrapOrRevert
};
use casper_types::{
account::{
AccountHash, Weight, ActionType, AddKeyFailure, RemoveKeyFailure, SetThresholdFailure, UpdateKeyFailure
}
};
mod errors;
mod api;
use errors::Error;
use api::Api;
pub fn execute() {
let result = match Api::from_args() {
Api::SetKeyWeight(key, weight) => set_key_weight(key, weight),
Api::SetDeploymentThreshold(threshold) => set_threshold(ActionType::Deployment, threshold),
Api::SetKeyManagementThreshold(threshold) => {
set_threshold(ActionType::KeyManagement, threshold)
}
Api::SetAll(deployment_threshold, key_management_threshold, accounts, weights) => {
for (account, weight) in accounts.iter().zip(weights) {
set_key_weight(account.clone(), weight).unwrap_or_revert();
}
set_threshold(ActionType::KeyManagement, key_management_threshold).unwrap_or_revert();
set_threshold(ActionType::Deployment, deployment_threshold).unwrap_or_revert();
Ok(())
}
};
result.unwrap_or_revert()
}
fn set_key_weight(key: AccountHash, weight: Weight) -> Result<(), Error> {
if weight.value() == 0 {
remove_key_if_exists(key)
} else {
add_or_update_key(key, weight)
}
}
fn add_or_update_key(key: AccountHash, weight: Weight) -> Result<(), Error> {
match account::update_associated_key(key, weight) {
Ok(()) => Ok(()),
Err(UpdateKeyFailure::MissingKey) => add_key(key, weight),
Err(UpdateKeyFailure::PermissionDenied) => Err(Error::PermissionDenied),
Err(UpdateKeyFailure::ThresholdViolation) => Err(Error::ThresholdViolation),
}
}
fn add_key(key: AccountHash, weight: Weight) -> Result<(), Error> {
match account::add_associated_key(key, weight) {
Ok(()) => Ok(()),
Err(AddKeyFailure::MaxKeysLimit) => Err(Error::MaxKeysLimit),
Err(AddKeyFailure::DuplicateKey) => Err(Error::DuplicateKey), // Should never happen.
Err(AddKeyFailure::PermissionDenied) => Err(Error::PermissionDenied),
}
}
fn remove_key_if_exists(key: AccountHash) -> Result<(), Error> {
match account::remove_associated_key(key) {
Ok(()) => Ok(()),
Err(RemoveKeyFailure::MissingKey) => Ok(()),
Err(RemoveKeyFailure::PermissionDenied) => Err(Error::PermissionDenied),
Err(RemoveKeyFailure::ThresholdViolation) => Err(Error::ThresholdViolation),
}
}
fn set_threshold(permission_level: ActionType, threshold: Weight) -> Result<(), Error> {
match account::set_action_threshold(permission_level, threshold) {
Ok(()) => Ok(()),
Err(SetThresholdFailure::KeyManagementThreshold) => {
Err(Error::KeyManagementThresholdError)
}
Err(SetThresholdFailure::DeploymentThreshold) => Err(Error::DeploymentThresholdError),
Err(SetThresholdFailure::PermissionDeniedError) => Err(Error::PermissionDenied),
Err(SetThresholdFailure::InsufficientTotalWeight) => Err(Error::InsufficientTotalWeight),
}
}
| true |
09d6f0d347f7f4d48df547044fb1c7004e90c7af
|
Rust
|
robohouse-delft/show-image-rs
|
/src/backend/util/gpu_image.rs
|
UTF-8
| 2,791 | 3.125 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
use crate::ImageInfo;
use crate::ImageView;
use crate::{Alpha, PixelFormat};
use super::buffer::create_buffer_with_value;
/// A GPU image buffer ready to be used with the rendering pipeline.
pub struct GpuImage {
name: String,
info: ImageInfo,
bind_group: wgpu::BindGroup,
_uniforms: wgpu::Buffer,
_data: wgpu::Buffer,
}
/// The uniforms associated with a [`GpuImage`].
#[derive(Debug, Copy, Clone)]
#[allow(unused)] // All fields are used by the GPU.
pub struct GpuImageUniforms {
format: u32,
width: u32,
height: u32,
stride_x: u32,
stride_y: u32,
}
impl GpuImage {
/// Create a [`GpuImage`] from an image buffer.
pub fn from_data(name: String, device: &wgpu::Device, bind_group_layout: &wgpu::BindGroupLayout, image: &ImageView) -> Self {
let info = image.info();
let format = match info.pixel_format {
PixelFormat::Mono8 => 0,
PixelFormat::MonoAlpha8(Alpha::Unpremultiplied) => 1,
PixelFormat::MonoAlpha8(Alpha::Premultiplied) => 2,
PixelFormat::Bgr8 => 3,
PixelFormat::Bgra8(Alpha::Unpremultiplied) => 4,
PixelFormat::Bgra8(Alpha::Premultiplied) => 5,
PixelFormat::Rgb8 => 6,
PixelFormat::Rgba8(Alpha::Unpremultiplied) => 7,
PixelFormat::Rgba8(Alpha::Premultiplied) => 8,
};
let uniforms = GpuImageUniforms {
format,
width: info.size.x,
height: info.size.y,
stride_x: info.stride.x,
stride_y: info.stride.y,
};
let uniforms = create_buffer_with_value(
device,
Some(&format!("{}_uniforms_buffer", name)),
&uniforms,
wgpu::BufferUsages::UNIFORM,
);
use wgpu::util::DeviceExt;
let data = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some(&format!("{}_image_buffer", name)),
contents: image.data(),
usage: wgpu::BufferUsages::STORAGE,
});
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some(&format!("{}_bind_group", name)),
layout: bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: &uniforms,
offset: 0,
size: None, // Use entire buffer.
}),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: &data,
offset: 0,
size: None, // Use entire buffer.
}),
},
],
});
Self {
name,
info,
bind_group,
_uniforms: uniforms,
_data: data,
}
}
/// Get the name of the image.
#[allow(unused)]
pub fn name(&self) -> &str {
&self.name
}
/// Get the image info.
pub fn info(&self) -> &ImageInfo {
&self.info
}
/// Get the bind group that should be used to render the image with the rendering pipeline.
pub fn bind_group(&self) -> &wgpu::BindGroup {
&self.bind_group
}
}
| true |
77a290f9adae5af84a6d1d54f1d4d4befc8a18f7
|
Rust
|
viing937/codeforces
|
/src/1327C.rs
|
UTF-8
| 1,241 | 2.890625 | 3 |
[
"MIT"
] |
permissive
|
use std::io::{self, BufRead, BufReader, BufWriter, Write};
fn main() {
let mut stdin = BufReader::new(io::stdin());
let mut stdout = BufWriter::new(io::stdout());
let mut buffer = String::new();
stdin.read_line(&mut buffer).unwrap();
let buffer: Vec<i32> = buffer
.trim()
.split_ascii_whitespace()
.map(|x| x.parse().unwrap())
.collect();
let (n, m, k) = (buffer[0], buffer[1], buffer[2]);
for _ in 0..k {
let mut buffer = String::new();
stdin.read_line(&mut buffer).unwrap();
}
for _ in 0..k {
let mut buffer = String::new();
stdin.read_line(&mut buffer).unwrap();
}
let mut rls: Vec<String> = Vec::new();
for _ in 1..n {
rls.push("D".to_string());
}
for _ in 1..m {
rls.push("L".to_string());
}
for i in 0..n {
if i % 2 == 0 {
for _ in 1..m {
rls.push("R".to_string());
}
} else {
for _ in 1..m {
rls.push("L".to_string());
}
}
rls.push("U".to_string());
}
rls.pop();
writeln!(stdout, "{}", rls.len()).unwrap();
writeln!(stdout, "{}", rls.join("")).unwrap();
}
| true |
c084903b2062266d9baa76a91f5f7ae183aa0732
|
Rust
|
seeseemelk/gpg-tui
|
/src/widget/table.rs
|
UTF-8
| 5,055 | 3.109375 | 3 |
[
"MIT"
] |
permissive
|
use crate::widget::row::{ScrollAmount, ScrollDirection};
use tui::widgets::TableState as TuiState;
/// Table size mode.
#[derive(Clone, Debug, PartialEq)]
pub enum TableSize {
/// Normal sized table.
Normal,
/// Compact table with some rows truncated.
Compact,
/// Minimized table with all rows truncated.
Minimized,
}
impl TableSize {
/// Sets the table size to minimized.
pub fn set_minimized(&mut self, minimized: bool) {
*self = if minimized {
Self::Minimized
} else {
Self::Normal
}
}
/// Returns the next mode.
pub fn next(&self) -> Self {
match self {
Self::Normal => Self::Compact,
Self::Compact => Self::Minimized,
_ => Self::Normal,
}
}
}
/// State of the table widget.
#[derive(Clone, Debug)]
pub struct TableState {
/// State that can be modified by TUI.
pub tui: TuiState,
/// Scroll amount of the table.
pub scroll: ScrollAmount,
/// Table size.
pub size: TableSize,
/// Threshold value (width) for minimizing.
pub minimize_threshold: u16,
}
impl Default for TableState {
fn default() -> Self {
Self {
tui: TuiState::default(),
scroll: ScrollAmount::default(),
size: TableSize::Normal,
minimize_threshold: 90,
}
}
}
/// Table widget with TUI controlled states.
#[derive(Clone, Debug)]
pub struct StatefulTable<T: Clone> {
/// Default table items (for search functionality).
pub default_items: Vec<T>,
/// Table items.
pub items: Vec<T>,
/// Table state.
pub state: TableState,
}
impl<T: Clone> StatefulTable<T> {
/// Constructs a new instance of `StatefulTable`.
pub fn new(items: Vec<T>, mut state: TableState) -> StatefulTable<T> {
state.tui.select(Some(0));
Self {
default_items: items.clone(),
items,
state,
}
}
/// Construct a new `StatefulTable` with given items.
pub fn with_items(items: Vec<T>) -> StatefulTable<T> {
Self::new(items, TableState::default())
}
/// Returns the selected item.
pub fn selected(&self) -> Option<&T> {
self.items.get(self.state.tui.selected()?)
}
/// Selects the next item.
pub fn next(&mut self) {
let i = match self.state.tui.selected() {
Some(i) => {
if i >= self.items.len().checked_sub(1).unwrap_or(i) {
0
} else {
i + 1
}
}
None => 0,
};
self.state.tui.select(Some(i));
self.reset_scroll();
}
/// Selects the previous item.
pub fn previous(&mut self) {
let i = match self.state.tui.selected() {
Some(i) => {
if i == 0 {
self.items.len().checked_sub(1).unwrap_or(i)
} else {
i - 1
}
}
None => 0,
};
self.state.tui.select(Some(i));
self.reset_scroll();
}
/// Sets the scrolling state of the table row
/// depending on the given direction and offset.
pub fn scroll_row(&mut self, direction: ScrollDirection) {
match direction {
ScrollDirection::Up(value) => {
self.state.scroll.vertical = self
.state
.scroll
.vertical
.checked_sub(value)
.unwrap_or_default();
}
ScrollDirection::Right(value) => {
self.state.scroll.horizontal = self
.state
.scroll
.horizontal
.checked_add(value)
.unwrap_or(self.state.scroll.horizontal)
}
ScrollDirection::Down(value) => {
self.state.scroll.vertical = self
.state
.scroll
.vertical
.checked_add(value)
.unwrap_or(self.state.scroll.vertical)
}
ScrollDirection::Left(value) => {
self.state.scroll.horizontal = self
.state
.scroll
.horizontal
.checked_sub(value)
.unwrap_or_default();
}
_ => {}
}
}
/// Resets the items state.
pub fn reset_state(&mut self) {
self.items = self.default_items.clone();
self.state.tui.select(Some(0));
}
/// Resets the scroll state.
pub fn reset_scroll(&mut self) {
self.state.scroll = ScrollAmount::default();
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn test_widget_table() {
let mut table =
StatefulTable::with_items(vec!["data1", "data2", "data3"]);
table.state.tui.select(Some(1));
assert_eq!(Some(&"data2"), table.selected());
table.next();
assert_eq!(Some(2), table.state.tui.selected());
table.previous();
assert_eq!(Some(1), table.state.tui.selected());
table.reset_scroll();
assert_eq!(
"ScrollAmount { vertical: 0, horizontal: 0 }",
&format!("{:?}", table.state.scroll)
);
table.scroll_row(ScrollDirection::Down(3));
table.scroll_row(ScrollDirection::Right(2));
assert_eq!(
"ScrollAmount { vertical: 3, horizontal: 2 }",
&format!("{:?}", table.state.scroll)
);
table.scroll_row(ScrollDirection::Up(1));
table.scroll_row(ScrollDirection::Left(1));
assert_eq!(
"ScrollAmount { vertical: 2, horizontal: 1 }",
&format!("{:?}", table.state.scroll)
);
table.reset_state();
assert_eq!(Some(0), table.state.tui.selected());
assert_eq!(table.default_items, table.items);
assert_eq!(TableSize::Normal, table.state.size);
table.state.size = TableSize::Minimized;
table.state.size.set_minimized(false);
assert_eq!(TableSize::Compact, table.state.size.next());
}
}
| true |
3a4d4c8bbf0344238f582e3427a839b03ddb2ea8
|
Rust
|
brandonedens/stm32l4x6
|
/src/adc1/sqr1.rs
|
UTF-8
| 6,547 | 2.71875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::SQR1 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct SQ4R {
bits: u8,
}
impl SQ4R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct SQ3R {
bits: u8,
}
impl SQ3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct SQ2R {
bits: u8,
}
impl SQ2R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct SQ1R {
bits: u8,
}
impl SQ1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct L3R {
bits: u8,
}
impl L3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _SQ4W<'a> {
w: &'a mut W,
}
impl<'a> _SQ4W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SQ3W<'a> {
w: &'a mut W,
}
impl<'a> _SQ3W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SQ2W<'a> {
w: &'a mut W,
}
impl<'a> _SQ2W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SQ1W<'a> {
w: &'a mut W,
}
impl<'a> _SQ1W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _L3W<'a> {
w: &'a mut W,
}
impl<'a> _L3W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 24:28 - SQ4"]
#[inline]
pub fn sq4(&self) -> SQ4R {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) as u8
};
SQ4R { bits }
}
#[doc = "Bits 18:22 - SQ3"]
#[inline]
pub fn sq3(&self) -> SQ3R {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) as u8
};
SQ3R { bits }
}
#[doc = "Bits 12:16 - SQ2"]
#[inline]
pub fn sq2(&self) -> SQ2R {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) as u8
};
SQ2R { bits }
}
#[doc = "Bits 6:10 - SQ1"]
#[inline]
pub fn sq1(&self) -> SQ1R {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) as u8
};
SQ1R { bits }
}
#[doc = "Bits 0:3 - L3"]
#[inline]
pub fn l3(&self) -> L3R {
let bits = {
const MASK: u8 = 15;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
L3R { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 24:28 - SQ4"]
#[inline]
pub fn sq4(&mut self) -> _SQ4W {
_SQ4W { w: self }
}
#[doc = "Bits 18:22 - SQ3"]
#[inline]
pub fn sq3(&mut self) -> _SQ3W {
_SQ3W { w: self }
}
#[doc = "Bits 12:16 - SQ2"]
#[inline]
pub fn sq2(&mut self) -> _SQ2W {
_SQ2W { w: self }
}
#[doc = "Bits 6:10 - SQ1"]
#[inline]
pub fn sq1(&mut self) -> _SQ1W {
_SQ1W { w: self }
}
#[doc = "Bits 0:3 - L3"]
#[inline]
pub fn l3(&mut self) -> _L3W {
_L3W { w: self }
}
}
| true |
9ba9e26fce503f8338726b0e3d8aff2c6ce411d5
|
Rust
|
benjione/iot_webhook_server
|
/src/server.rs
|
UTF-8
| 3,253 | 3.25 | 3 |
[
"MIT"
] |
permissive
|
//! `ChatServer` is an actor. It maintains list of connection client session.
//! And manages available rooms. Peers send messages to other peers in same
//! room through `ChatServer`.
use actix::prelude::*;
use std::collections::HashMap;
use serde::Deserialize;
use diesel::r2d2;
use diesel::SqliteConnection;
use crate::models::device;
use crate::models::user;
type Pool = r2d2::Pool<r2d2::ConnectionManager<SqliteConnection>>;
/// Chat server sends this messages to session
#[derive(Message)]
#[rtype(result = "()")]
pub struct Message(pub String);
/// Message for chat server communications
/// New chat session is created
#[derive(Message)]
#[rtype(String)]
pub struct Connect {
pub addr: Recipient<Message>,
pub email: String,
pub password: String,
pub object: String,
}
/// Session is disconnected
#[derive(Message)]
#[rtype(result = "()")]
pub struct Disconnect {
pub id: String,
}
/// Session is disconnected
#[derive(Message, Deserialize)]
#[rtype(result = "()")]
pub struct SendWebhook {
pub id: String,
pub message: String,
}
/// `ChatServer` manages chat rooms and responsible for coordinating chat
/// session. implementation is super primitive
pub struct ChatServer {
sessions: HashMap<String, Recipient<Message>>, // id -> addr
db: Pool,
}
impl ChatServer {
/// Send message to all users in the room
fn send_message(&self, id: String, message: &str) {
if let Some(addr) = self.sessions.get(&id) {
let _ = addr.do_send(Message(message.to_owned()));
}
}
pub fn new(db: Pool) -> Self {
ChatServer {
sessions: HashMap::new(),
db: db,
}
}
}
/// Make actor from `ChatServer`
impl Actor for ChatServer {
/// We are going to use simple Context, we just need ability to communicate
/// with other actors.
type Context = Context<Self>;
}
/// Handler for Connect message.
///
/// Register new session and assign unique id to this session
impl Handler<Connect> for ChatServer {
type Result = String;
fn handle(&mut self, msg: Connect, _: &mut Context<Self>) -> Self::Result {
let user = user::User::get_user_from_email(&msg.email, &self.db);
let user = match user {
Some(user) => user,
_ => return "none".to_string(),
};
let object = device::Device::get_device_and_register(user.id, msg.object, &self.db);
let object = match object {
Some(object) => object,
_ => return "none".to_string(),
};
self.sessions
.insert(object.registration_id.clone(), msg.addr);
println!("Someone joined");
return object.registration_id;
}
}
/// Handler for Disconnect message.
impl Handler<Disconnect> for ChatServer {
type Result = ();
fn handle(&mut self, msg: Disconnect, _: &mut Context<Self>) {
println!("Someone disconnected");
// remove address
self.sessions.remove(&msg.id);
device::Device::go_offline(&msg.id, &self.db)
}
}
impl Handler<SendWebhook> for ChatServer {
type Result = ();
fn handle(&mut self, msg: SendWebhook, _: &mut Context<Self>) {
self.send_message(msg.id, &msg.message)
}
}
| true |
52f1d4149b5c887b842de926cec42e2f363cd4d1
|
Rust
|
IvanPleshkov/RustExperiments
|
/render/src/gpu_texture.rs
|
UTF-8
| 4,113 | 3.0625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::command_buffer::CommandBuffer;
use crate::gpu_texture_format::GpuTextureFormat;
use std::sync::Arc;
pub struct GpuTexture {
pub id: GpuTextureIndex,
pub info: GpuTextureInfo,
}
pub struct GpuTextureIndex {
pub id: u64,
}
pub struct GpuTextureInfo {
pub name: String,
pub width: u64,
pub height: u64,
pub depth: u64,
pub texture_type: GpuTextureType,
pub format: GpuTextureFormat,
pub mips_count: u32,
pub array_size: u32,
pub samples: u32,
pub tiling: GpuTextureTiling,
pub is_transfer_src: bool,
pub is_transfer_dst: bool,
pub is_sampled: bool,
pub is_storage: bool,
}
pub enum GpuTextureType {
Texture1D,
Texture2D,
Texture3D,
Texture1DArray,
Texture2DArray,
Texture3DArray,
TextureCubemap,
DepthStencil,
}
pub enum GpuTextureTiling {
Linear,
Optimal,
}
pub struct GpuTextureInfoBuilder {
pub info: GpuTextureInfo,
}
impl GpuTexture {
pub fn new(command_buffer: &mut CommandBuffer, info: GpuTextureInfo) -> Arc<GpuTexture> {
Arc::new(GpuTexture {
id: GpuTextureIndex {
id: command_buffer.generate_unique_id(),
},
info: info,
})
}
}
impl GpuTextureInfo {
pub fn default() -> GpuTextureInfo {
GpuTextureInfo {
name: String::new(),
width: 1,
height: 1,
depth: 1,
texture_type: GpuTextureType::Texture2D,
format: GpuTextureFormat::R8G8B8A8Unorm,
mips_count: 1,
array_size: 1,
samples: 1,
tiling: GpuTextureTiling::Optimal,
is_transfer_src: true,
is_transfer_dst: true,
is_sampled: true,
is_storage: true,
}
}
pub fn builder() -> GpuTextureInfoBuilder {
GpuTextureInfoBuilder {
info: GpuTextureInfo::default(),
}
}
}
impl GpuTextureInfoBuilder {
pub fn new() -> GpuTextureInfoBuilder {
GpuTextureInfoBuilder {
info: GpuTextureInfo::default(),
}
}
pub fn build(self) -> GpuTextureInfo {
self.info
}
pub fn name<'a>(&'a mut self, name: &str) -> &'a mut Self {
self.info.name = String::from(name);
self
}
pub fn width<'a>(&'a mut self, width: u64) -> &'a mut Self {
self.info.width = width;
self
}
pub fn height<'a>(&'a mut self, height: u64) -> &'a mut Self {
self.info.height = height;
self
}
pub fn depth<'a>(&'a mut self, depth: u64) -> &'a mut Self {
self.info.depth = depth;
self
}
pub fn texture_type<'a>(&'a mut self, texture_type: GpuTextureType) -> &'a mut Self {
self.info.texture_type = texture_type;
self
}
pub fn format<'a>(&'a mut self, format: GpuTextureFormat) -> &'a mut Self {
self.info.format = format;
self
}
pub fn mips_count<'a>(&'a mut self, mips_count: u32) -> &'a mut Self {
self.info.mips_count = mips_count;
self
}
pub fn array_size<'a>(&'a mut self, array_size: u32) -> &'a mut Self {
self.info.array_size = array_size;
self
}
pub fn samples<'a>(&'a mut self, samples: u32) -> &'a mut Self {
self.info.samples = samples;
self
}
pub fn tiling<'a>(&'a mut self, tiling: GpuTextureTiling) -> &'a mut Self {
self.info.tiling = tiling;
self
}
pub fn is_transfer_src<'a>(&'a mut self, is_transfer_src: bool) -> &'a mut Self {
self.info.is_transfer_src = is_transfer_src;
self
}
pub fn is_transfer_dst<'a>(&'a mut self, is_transfer_dst: bool) -> &'a mut Self {
self.info.is_transfer_dst = is_transfer_dst;
self
}
pub fn is_sampled<'a>(&'a mut self, is_sampled: bool) -> &'a mut Self {
self.info.is_sampled = is_sampled;
self
}
pub fn is_storage<'a>(&'a mut self, is_storage: bool) -> &'a mut Self {
self.info.is_storage = is_storage;
self
}
}
| true |
13ff24b6fb1199431e077d273a982dabc79d5339
|
Rust
|
PBertinJohannet/Siro
|
/src/equation.rs
|
UTF-8
| 19,069 | 3.140625 | 3 |
[] |
no_license
|
use lexer::EqLexer;
use parser::EqParser;
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::mem;
use rand::random;
use std::iter::FromIterator;
use mccluskey::PrimeImplicant;
use mccluskey::mccluskey;
#[derive(Debug, Clone, PartialEq)]
pub enum Equation {
Sum(Box<Sum>),
Prod(Box<Prod>),
Not(Box<Not>),
Var(String),
}
impl Equation {
pub fn from(text: String) -> Self {
EqParser::new(EqLexer::new(text).get_tokens().unwrap()).parse()
}
pub fn remove_simplified(&mut self) {
match self {
&mut Equation::Sum(ref mut s) => s.remove_simplified(),
_ => (),
}
}
pub fn reconstruct(&mut self){
match self {
&mut Equation::Sum(ref mut s) => {
let mut new_rep = mem::replace(&mut s.already_simplified, vec![]);
s.inner.append(&mut new_rep);
for p in s.inner.iter_mut(){
p.reconstruct();
}
},
&mut Equation::Prod(ref mut p) => {
for p in p.inner.iter_mut(){
p.reconstruct();
}
},
&mut Equation::Not(ref mut n) => n.inner.reconstruct(),
_ => ()
}
}
pub fn len_removed(&mut self) -> usize{
match self {
&mut Equation::Sum(ref mut s) => s.already_simplified.len(),
_ => 0,
}
}
pub fn eval(&self, vars: &HashMap<String, bool>) -> bool {
match self {
&Equation::Sum(ref s) => s.inner.iter().any(|inner| inner.eval(&vars)),
&Equation::Prod(ref p) => p.inner.iter().all(|inner| inner.eval(&vars)),
&Equation::Not(ref n) => !n.inner.eval(&vars),
&Equation::Var(ref e) => *vars.get(e)
.unwrap_or_else(||panic!(format!("var not found : {}", e))),
}
}
pub fn mccluskey(&mut self){
let new_sum = match self {
&mut Equation::Sum(ref mut s) => {
let res = mccluskey(s.get_primes_implicants());
s.inner = res;
},
_ => ()
};
}
pub fn inners(&self) -> Vec<&Equation> {
match self {
&Equation::Sum(ref s) => s.inner.iter().collect(),
&Equation::Prod(ref p) => p.inner.iter().collect(),
&Equation::Not(ref n) => vec![&n.inner],
v => vec![self],
}
}
/// True if the truthtables are the same.
pub fn compare_random_values(&self, other : &Equation, tests : usize) {
let vars = other.get_vars();
assert_eq!(vars.len(), self.get_vars().len());
for _ in 0..tests {
let vals = HashMap::from_iter(vars.iter().map(|&v|(v.clone(), random())));
assert_eq!(other.eval(&vals), self.eval(&vals));
}
}
pub fn into_inners(self) -> Vec<Equation> {
match self {
Equation::Sum(s) => s.inner,
Equation::Prod(p) => p.inner,
Equation::Not(n) => vec![n.inner],
v => vec![v],
}
}
pub fn complete_simplify(self) -> Self {
let ancient_nb_var = self.get_vars().len();
let mut old_self = self;
let mut new_self = old_self.clone().simplified();
while new_self != old_self {
//println!("self len : {}", format!("{}", new_self).len());
old_self = new_self;
new_self = old_self.clone().simplified();
new_self.remove_simplified();
}
new_self.reconstruct();
new_self.mccluskey();
new_self
}
pub fn is_product(&self) -> bool {
match self {
&Equation::Prod(_) => true,
_ => false,
}
}
pub fn is_simplified(&self, depth : usize) -> bool {
match self {
&Equation::Sum(ref s) => s
.inner
.iter()
.all(|inner| inner.is_simplified(depth + 1)),
&Equation::Prod(ref p) => p
.inner
.iter()
.all(|inner| inner.is_simplified(depth + 1)),
&Equation::Not(ref n) => n.inner.is_simplified(depth + 1),
&Equation::Var(_) => depth < 5,
}
}
/// Simplifies using simple basic rules.
pub fn simplified(self) -> Self {
match self {
Equation::Sum(s) => s.simplified(),
Equation::Prod(p) => p.simplified(),
Equation::Not(n) => n.simplified(),
v => v,
}
}
/// Returns a list of the names of the variables.
pub fn get_vars(&self) -> Vec<&String> {
let mut hs = HashSet::new();
match self {
&Equation::Sum(ref s) => s.get_vars(),
&Equation::Prod(ref p) => p.get_vars(),
&Equation::Not(ref n) => n.inner.get_vars(),
&Equation::Var(ref e) => vec![e],
}.into_iter()
.map(|var| hs.insert(var))
.for_each(drop);
let mut to_ret = hs.into_iter().collect::<Vec<&String>>();
to_ret.sort();
to_ret
}
/// Returns a list of the names of the variables.
pub fn get_only_var(&self) -> &String {
self.get_vars()[0]
}
/// Returns a list of the names of the variables.
pub fn get_owned_vars(&self) -> Vec<String> {
self.get_vars().iter().map(|&i|i.clone()).collect()
}
/// Returns the depth of the tree
pub fn depth(&self, so_far: usize) -> usize {
match self {
&Equation::Sum(ref s) => s
.inner
.iter()
.map(|inner| inner.depth(so_far + 1))
.max()
.unwrap_or(0),
&Equation::Prod(ref p) => p
.inner
.iter()
.map(|inner| inner.depth(so_far + 1))
.max()
.unwrap_or(0),
&Equation::Not(ref n) => n.inner.depth(so_far + 1),
&Equation::Var(_) => so_far + 1,
}
}
}
impl fmt::Display for Equation {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
&Equation::Sum(ref s) => format!(
"({})",
s.inner
.iter()
.map(|inner| format!("{}", inner))
.collect::<Vec<String>>()
.join(" + ")
),
&Equation::Prod(ref p) => format!(
"({})",
p.inner
.iter()
.map(|inner| format!("{}", inner))
.collect::<Vec<String>>()
.join(" * ")
),
&Equation::Not(ref n) => format!("! {}", n.inner),
&Equation::Var(ref e) => e.to_string(),
}
)
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Sum {
inner: Vec<Equation>,
already_simplified : Vec<Equation>,
}
impl Sum {
pub fn new(inner: Vec<Equation>) -> Self {
Sum { inner: inner , already_simplified : vec![]}
}
/// Applyes the queen mccluskey algorithm to reduce the size of the sum.
pub fn get_primes_implicants(&self) -> (Vec<String>, Vec<PrimeImplicant>){
let vars = self.get_vars();
(vars.clone().into_iter().map(|i|i.clone()).collect(),
self.inner.iter().map(|ref i|PrimeImplicant::from_eq(i, &vars)).collect())
}
/// Returns a list of the names of the variables.
pub fn get_owned_vars(&self) -> Vec<String> {
self.get_vars().iter().map(|&i|i.clone()).collect()
}
/// Returns a list of the names of the variables.
pub fn get_vars(&self) -> Vec<&String> {
let mut hs = HashSet::new();
self.inner.iter().flat_map(|inner| inner.get_vars().into_iter()).into_iter()
.map(|var| hs.insert(var))
.for_each(drop);
let mut to_ret = hs.into_iter().collect::<Vec<&String>>();
to_ret.sort();
to_ret
}
/// This must be called at the top level only.
pub fn remove_simplified(&mut self) {
let mut not_simp = vec![];
let mut simp = vec![];
for i in self.inner.iter_mut(){
if i.is_simplified(0){
simp.push( i.clone());
} else {
not_simp.push(i.clone());
}
}
self.inner = not_simp;
self.already_simplified.append(&mut simp);
}
/// Sums can be simplified using two simple rules :
/// sum(a) = a
/// sum(a, sum(b, c)) = sum (a, b, c)
pub fn simplified(mut self) -> Equation {
let mut new_inner = vec![];
for old_i in self.inner {
let mut i = old_i.simplified();
match i {
Equation::Sum(ref mut s) => new_inner.append(&mut s.inner),
_ => new_inner.push(i),
};
}
self.inner = new_inner;
if self.inner.len() == 1 {
let ret = self.inner.into_iter().next().unwrap();
return ret.simplified();
} else {
return Equation::Sum(Box::new(self));
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Prod {
inner: Vec<Equation>,
}
impl Prod {
pub fn new(inner: Vec<Equation>) -> Self {
Prod { inner: inner }
}
pub fn removed_doublons(&self) -> Vec<Equation>{
let mut inn = self.inner.clone();
inn.sort_by_key(|a|a.get_only_var().to_string());
let mut in_order_vars = inn.iter();
let mut prev = match in_order_vars.next(){
Some(p) => p,
_ => return vec![],
};
let mut new_inner = vec![];
while let Some(next) = in_order_vars.next(){
if next.get_only_var() == prev.get_only_var(){
// inside can only be Var or Not(Var) because it is simplified.
if mem::discriminant(next) != mem::discriminant(prev){
return vec![]
}
} else {
new_inner.push(prev.clone());
}
prev = next;
}
new_inner.push(prev.clone());
new_inner.clone()
}
pub fn factorise_for(mut self, i: usize) -> Equation {
let removed = self.inner.remove(i);
let mut new_sum_of_products = vec![];
for sub_sum_element in removed.into_inners() {
let mut new_inner = self.inner.clone();
new_inner.push(sub_sum_element);
new_sum_of_products.push(Equation::Prod(Box::new(Prod::new(new_inner))));
}
Equation::Sum(Box::new(Sum::new(new_sum_of_products))).complete_simplify()
}
pub fn flatten(mut self) -> Equation {
let mut new_inner = vec![];
for old_i in self.inner {
let mut i = old_i.simplified();
match i {
Equation::Prod(ref mut p) => new_inner.append(&mut p.inner),
_ => new_inner.push(i),
};
}
self.inner = new_inner;
if self.inner.len() == 1 {
let ret = self.inner.into_iter().next().unwrap();
return ret.simplified();
} else {
return Equation::Prod(Box::new(self));
}
}
/// Products can be simplified in the same way than the addition but we can also factorise :
/// a * (B + c + d) * e => (a * e * B) + (a * e * c) + (a * e * d)
///
pub fn simplified(mut self) -> Equation {
self.inner = self
.inner
.into_iter()
.map(|inner| inner.simplified())
.collect();
for i in 0..self.inner.len() {
if mem::discriminant(&self.inner[i])
== mem::discriminant(&Equation::Sum(Box::new(Sum::new(vec![]))))
{
return self.factorise_for(i);
}
}
self.flatten()
}
/// Returns a list of the names of the variables.
pub fn get_vars(&self) -> Vec<&String> {
let mut hs = HashSet::new();
self.inner.iter().flat_map(|inner| inner.get_vars().into_iter()).into_iter()
.map(|var| hs.insert(var))
.for_each(drop);
let mut to_ret = hs.into_iter().collect::<Vec<&String>>();
to_ret.sort();
to_ret
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Not {
inner: Equation,
}
impl Not {
pub fn new(inner: Equation) -> Self {
Not { inner: inner }
}
/// Returns a list of the names of the variables.
pub fn get_only_var(&self) -> &String {
self.inner.get_vars()[0]
}
pub fn simplified(mut self) -> Equation {
self.inner = self.inner.complete_simplify();
match self.inner {
Equation::Not(box n) => n.inner.simplified(),
Equation::Sum(box s) => Equation::Prod(Box::new(Prod::new(
s.inner
.into_iter()
.map(|i| Equation::Not(Box::new(Not::new(i))))
.collect(),
))),
Equation::Prod(box s) => Equation::Sum(Box::new(Sum::new(
s.inner
.into_iter()
.map(|i| Equation::Not(Box::new(Not::new(i))))
.collect(),
))),
v => Equation::Not(Box::new(Not::new(v))),
}
}
}
#[cfg(test)]
mod tests_eval {
use super::*;
use equation::{Equation, Not, Prod, Sum};
use lexer::EqLexer;
#[test]
fn test_basics() {
let eq = Equation::from("a + b * c".to_string());
let mut vars = HashMap::new();
vars.insert("a".to_string(), false);
vars.insert("b".to_string(), false);
vars.insert("c".to_string(), true);
assert_eq!(eq.eval(&vars), false);
// with a to true it is always true
vars.insert("a".to_string(), true);
vars.insert("b".to_string(), false);
vars.insert("c".to_string(), true);
assert_eq!(eq.eval(&vars), true);
// with a to false and both true it is true
vars.insert("a".to_string(), false);
vars.insert("b".to_string(), true);
vars.insert("c".to_string(), true);
assert_eq!(eq.eval(&vars), true);
}
#[test]
fn test_more() {
let eq = Equation::from("I & !B | (A + B) and (c + a./y)".to_string());
let mut vars = HashMap::new();
// in this case it is true
vars.insert("I".to_string(), true);
vars.insert("B".to_string(), false);
vars.insert("A".to_string(), true);
vars.insert("c".to_string(), true);
vars.insert("a".to_string(), false);
vars.insert("y".to_string(), true);
assert_eq!(eq.eval(&vars), true);
// in this case it is false because the y is true
vars.insert("I".to_string(), true);
vars.insert("B".to_string(), true);
vars.insert("A".to_string(), false);
vars.insert("c".to_string(), false);
vars.insert("a".to_string(), true);
vars.insert("y".to_string(), true);
assert_eq!(eq.eval(&vars), false);
// setting it to false make it true
vars.insert("I".to_string(), true);
vars.insert("B".to_string(), true);
vars.insert("A".to_string(), false);
vars.insert("c".to_string(), false);
vars.insert("a".to_string(), true);
vars.insert("y".to_string(), false);
assert_eq!(eq.eval(&vars), true);
}
}
#[cfg(test)]
mod tests_get_var {
use super::*;
use equation::{Equation, Not, Prod, Sum};
use lexer::EqLexer;
#[test]
fn test_basics() {
let eq = Equation::from("a + b * c".to_string());
let mut vars = eq.get_vars();
vars.sort();
assert_eq!(vars, vec!["a", "b", "c"]);
}
#[test]
fn test_more() {
let eq = Equation::from("I & !B | (A + B) and (c + a./y)".to_string());
let mut vars = eq.get_vars();
vars.sort();
assert_eq!(vars, vec!["A", "B", "I", "a", "c", "y"]);
}
}
#[cfg(test)]
mod tests_depth {
use super::*;
use equation::{Equation, Not, Prod, Sum};
use lexer::EqLexer;
#[test]
fn test_basics() {
let eq = Equation::from("a + b * c".to_string());
assert_eq!(eq.depth(0), 3);
}
#[test]
fn test_more() {
let eq = Equation::from("I & !B | (A + B) and (c + a./y)".to_string());
assert_eq!(eq.depth(0), 6);
}
}
#[cfg(test)]
mod tests_simplify {
use super::*;
use equation::{Equation, Not, Prod, Sum};
use lexer::EqLexer;
#[test]
fn test_simplify_sum() {
let eq = Equation::from("a + (b + c + (a + j))".to_string());
let mut new_eq = eq.complete_simplify();
assert_eq!(format!("{}", new_eq), "(a + b + c + j)");
}
#[test]
fn test_factorise() {
let eq = Equation::from("a * (B + c + d) * e".to_string());
let new_eq = eq.complete_simplify();
assert_eq!(
format!("{}", new_eq),
"((B * a * e) + (a * c * e) + (a * d * e))"
);
}
#[test]
fn test_flatten_prod() {
let eq = Equation::from("a * (b * c * (a * j))".to_string());
let new_eq = eq.simplified();
assert_eq!(format!("{}", new_eq), "(a * b * c * a * j)");
}
#[test]
fn test_remove_par() {
let eq = Equation::from("(((a))) * ((b)) + (((c)))".to_string());
let new_eq = eq.simplified();
assert_eq!(format!("{}", new_eq), "((a * b) + c)");
}
#[test]
fn test_not_sum() {
let eq = Equation::from("!(a + b + c)".to_string());
let new_eq = eq.simplified();
assert_eq!(format!("{}", new_eq), "(! a * ! b * ! c)");
}
#[test]
fn test_not_not() {
let eq = Equation::from("!!(a + b + c)".to_string());
let new_eq = eq.simplified().simplified();
assert_eq!(format!("{}", new_eq), "(a + b + c)");
}
#[test]
fn test_not_prod() {
let eq = Equation::from("!(a * b * c)".to_string());
let new_eq = eq.simplified().simplified();
assert_eq!(format!("{}", new_eq), "(! a + ! b + ! c)");
}
#[test]
fn test_keep_same() {
let eq = Equation::from("b* a".to_string());
let new_eq = eq.simplified().simplified();
assert_eq!(format!("{}", new_eq), "(b * a)");
}
#[test]
fn combined_tests() {
let eq = Equation::from(" A. B + (!((C . B) + D ) . A) + B".to_string());
let new_eq = eq.clone().complete_simplify();
new_eq.compare_random_values(&eq, 1000);
let eq = Equation::from(" I & !B | (A + B) and (c + a./y)".to_string());
let new_eq = eq.clone().complete_simplify();
new_eq.compare_random_values(&eq, 1000);
let eq = Equation::from("!(!a*!f + !b*!c + !d*!e)".to_string());
let new_eq = eq.clone().complete_simplify();
new_eq.compare_random_values(&eq, 1000);
}
}
| true |
dda692deda14b883e254f799381656438d4be1fa
|
Rust
|
benzcash/zcash-android-wallet-sdk
|
/src/main/rust/utils.rs
|
UTF-8
| 1,198 | 2.546875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use jni::{
descriptors::Desc,
errors::Result as JNIResult,
objects::{JClass, JObject, JString},
sys::{jobjectArray, jsize},
JNIEnv,
};
use std::ops::Deref;
pub(crate) mod exception;
pub(crate) fn java_string_to_rust(env: &JNIEnv<'_>, jstring: JString<'_>) -> String {
env.get_string(jstring)
.expect("Couldn't get Java string!")
.into()
}
pub(crate) fn rust_vec_to_java<'a, T, U, V, F, G>(
env: &JNIEnv<'a>,
data: Vec<T>,
element_class: U,
element_map: F,
empty_element: G,
) -> jobjectArray
where
U: Desc<'a, JClass<'a>>,
V: Deref<Target = JObject<'a>>,
F: Fn(&JNIEnv<'a>, T) -> JNIResult<V>,
G: Fn(&JNIEnv<'a>) -> JNIResult<V>,
{
let jempty = empty_element(env).expect("Couldn't create Java string!");
let jret = env
.new_object_array(data.len() as jsize, element_class, *jempty)
.expect("Couldn't create Java array!");
for (i, elem) in data.into_iter().enumerate() {
let jelem = element_map(env, elem).expect("Couldn't map element to Java!");
env.set_object_array_element(jret, i as jsize, *jelem)
.expect("Couldn't set Java array element!");
}
jret
}
| true |
2000c5ca3d0d4d100d53ceecd1ffbc2a00d1dce4
|
Rust
|
SolarLiner/gargantua
|
/gargantua/src/raytrace.rs
|
UTF-8
| 9,449 | 2.84375 | 3 |
[] |
no_license
|
use color::Color;
use image::{DynamicImage, Pixel, Rgb};
use nalgebra::{
Isometry3, Perspective3, Point2, Point3, Translation3, Unit, UnitQuaternion, Vector2, Vector3,
};
use std::f64;
use crate::texture::{Texture, TextureFiltering, TextureMode};
use crate::utils::cartesian_to_spherical;
pub type Point = Point3<f64>;
pub type Vector = Vector3<f64>;
pub type TexCoords = Vector2<f64>;
#[derive(Clone, Debug)]
pub struct Ray {
pub origin: Point,
pub direction: Unit<Vector>,
}
#[derive(Clone)]
pub struct Sphere {
pub pos: Point,
pub radius: f64,
pub texture: Texture,
}
#[derive(Clone)]
pub struct Ring {
pub pos: Point,
pub radius: (f64, f64),
pub texture_top: Texture,
pub texture_bottom: Texture,
}
#[derive(Clone)]
pub struct Camera {
pub width: u32,
pub height: u32,
pub isometry: Isometry3<f64>,
pub perspective: Perspective3<f64>,
}
#[derive(Clone)]
pub struct Scene {
pub camera: Camera,
pub sphere: Sphere,
pub ring: Ring,
pub bgtex: Option<Texture>,
}
pub trait Intersectable {
fn intersect(&self, ray: &Ray) -> Option<f64>;
fn surface_normal(&self, hit: &Point) -> Unit<Vector>;
fn texture_coords(&self, hit: &Point) -> TexCoords;
}
pub trait Renderable {
fn render_px(&self, x: u32, y: u32) -> Color;
fn get_dimensions(&self) -> (u32, u32);
}
impl Intersectable for Sphere {
fn intersect(&self, ray: &Ray) -> Option<f64> {
let oc = ray.origin - self.pos;
let loc = ray.direction.dot(&oc);
let under_sqrt = loc.powf(2.0) - (oc.dot(&oc) - self.radius.powf(2.0));
if under_sqrt < 0.0 {
return None;
} else if under_sqrt > 0.0 {
return Some((-loc + under_sqrt.sqrt()).min(-loc - under_sqrt.sqrt()));
}
return Some(loc);
}
fn surface_normal(&self, hit: &Point) -> Unit<Vector> {
Unit::new_normalize(*hit - self.pos)
}
fn texture_coords(&self, hit: &Point) -> TexCoords {
let dir = *hit - self.pos;
let (_, theta, phi) = cartesian_to_spherical(&dir);
return TexCoords::new(theta / f64::consts::PI, 0.5 * phi / f64::consts::PI + 0.5);
}
}
impl Intersectable for Ring {
fn intersect(&self, ray: &Ray) -> Option<f64> {
match ray_plane(
&Ray {
origin: self.pos,
direction: Vector::z_axis(),
},
ray,
) {
Some(t) => {
let p = ray.origin + ray.direction.as_ref() * t;
let d2 = p.coords.dot(&p.coords);
if d2 > self.radius.0 * self.radius.0 || d2 < self.radius.1 * self.radius.1 {
return None;
} else {
return Some(t);
}
}
None => None,
}
}
fn surface_normal(&self, _hit: &Point) -> Unit<Vector> {
Vector::z_axis()
}
fn texture_coords(&self, hit: &Point) -> TexCoords {
let local = hit - self.pos;
let r = local.dot(&local).sqrt();
let theta = local.normalize().dot(&Vector::x_axis().as_ref());
return TexCoords::new(r * theta.cos(), r * theta.sin());
}
}
impl Camera {
pub fn new(width: u32, height: u32, fov: f64) -> Self {
Self {
width,
height,
perspective: Perspective3::new(
height as f64 / width as f64,
fov.to_radians(),
0.01,
200.0,
),
isometry: Isometry3::identity(),
}
}
pub fn create_primary(&self, x: u32, y: u32) -> Ray {
let normalized = Point2::new(x as f64 / self.width as f64, y as f64 / self.height as f64);
let nds = normalized * 2.0 - Point2::new(1.0, 1.0);
let ndc_near = Point::new(nds.x, nds.y, -1.0);
let ndc_far = Point::new(nds.x, nds.y, 1.0);
let origin = self
.isometry
.transform_point(&self.perspective.unproject_point(&ndc_near));
let view_far = self
.isometry
.transform_point(&self.perspective.unproject_point(&ndc_far));
let direction = Unit::new_normalize(view_far - origin);
Ray { origin, direction }
}
pub fn set_position(&mut self, pos: Translation3<f64>) {
self.isometry = Isometry3::from_parts(pos, self.isometry.rotation);
}
pub fn set_rotation(&mut self, rot: UnitQuaternion<f64>) {
self.isometry = Isometry3::from_parts(self.isometry.translation, rot);
}
}
impl Scene {
pub fn set_camera(
&mut self,
trans: Option<Translation3<f64>>,
rot: Option<UnitQuaternion<f64>>,
fov: Option<f64>,
) {
match (trans, rot) {
(Some(t), Some(r)) => self.camera.isometry = Isometry3::from_parts(t, r),
(Some(t), None) => self.camera.set_position(t),
(None, Some(r)) => self.camera.set_rotation(r),
(None, None) => {}
};
if let Some(f) = fov {
self.camera.perspective.set_fovy(f.to_radians());
}
}
pub fn set_size(&mut self, width: u32, height: u32) {
self.camera.width = width;
self.camera.height = height;
self.camera
.perspective
.set_aspect(width as f64 / height as f64);
}
pub fn get_background(mut self) -> Texture {
if let Some(tex) = self.bgtex {
return tex;
} else {
self.bgtex = Self::create_bg_texture(50, 50).ok();
return self.bgtex.expect("Can't create background texture");
}
}
pub fn create_bg_texture(width: u32, height: u32) -> Result<Texture, &'static str> {
let mut bg_img = DynamicImage::new_rgb8(width, height);
match bg_img.as_mut_rgb8() {
Some(buf) => {
for (x, y, p) in buf.enumerate_pixels_mut() {
if (x + y) % 2 == 0 {
*p = Rgb::from_channels(255, 255, 0, 255);
} else {
*p = Rgb::from_channels(0, 255, 255, 255);
}
}
}
None => return Err("Couldn't create background texture"),
}
let bgtex = Texture(bg_img, TextureFiltering::Nearest, TextureMode::Repeat);
return Ok(bgtex);
}
}
impl Renderable for Scene {
fn render_px(&self, x: u32, y: u32) -> Color {
let this = self.clone();
let bgtex = this.get_background();
let ray = self.camera.create_primary(x, y);
self.ring
.intersect(&ray)
.map(|p| {
let hit = ray.origin + ray.direction.as_ref() * p;
let uv = self.ring.texture_coords(&hit);
return self.ring.texture_top.uv(uv);
})
.or_else(|| {
self.sphere.intersect(&ray).map(|p| {
let hit = ray.origin + ray.direction.as_ref() * p;
let uv = self.sphere.texture_coords(&hit);
return self.sphere.texture.uv(uv);
})
})
.or_else(|| {
let (_, theta, phi) = cartesian_to_spherical(&ray.direction);
let uv = TexCoords::new(theta / f64::consts::PI, 0.5 * phi / f64::consts::PI + 0.5);
return Some(bgtex.uv(uv));
})
.unwrap()
}
fn get_dimensions(&self) -> (u32, u32) {
(self.camera.width, self.camera.height)
}
}
pub mod render {
use super::Renderable;
use crate::utils::{color_to_rgba, DimIterator};
use image::{DynamicImage, GenericImageView};
// use rayon::prelude::*;
use rayon::ThreadPoolBuilder;
use std::sync::{mpsc, Arc, Mutex};
type Reporter<'a> = &'a Fn(f64, String);
pub fn render<'a, R: Renderable + Clone + Send + Sync + 'static>(
o: R,
r: Option<Reporter<'a>>,
) -> Result<DynamicImage, &'static str> {
let (width, height) = o.get_dimensions();
let num_threads = num_cpus::get().min(30); // Set an upper bound on the number of threads to not overwhelm the OS
let chunk_size = 32u32;
let num_columns = 1 + width / chunk_size;
let num_rows = 1 + height / chunk_size;
let pool = ThreadPoolBuilder::new()
.num_threads(num_threads)
.build()
.or(Err("Cannot setup threading"))?;
let (tx, rx) = mpsc::channel();
let osrc = Arc::new(o);
let misses = Arc::new(Mutex::new(0u32));
for cy in 0..num_rows {
for cx in 0..num_columns {
let x = chunk_size * cx;
let y = chunk_size * cy;
let x_size = chunk_size.min(width - x);
let y_size = chunk_size.min(height - y);
let ttx = tx.clone();
let this = Arc::clone(&osrc);
let m = Arc::clone(&misses);
pool.spawn(move || {
for (x, y) in DimIterator::create(x_size, y_size, x, y) {
match ttx.send((x, y, this.render_px(x, y))) {
Ok(_) => (),
Err(_) => {
let mut mref = m.lock().unwrap();
*mref += 1;
}
}
}
})
}
}
drop(tx);
let mut img = DynamicImage::new_rgba8(width, height);
match img.as_mut_rgba8() {
Some(buf) => {
let mut i: usize = 0;
let tot = width * height;
for (x, y, col) in rx.into_iter() {
if let Some(f) = r {
if i % 40 == 0 {
let nm = *misses.lock().unwrap();
if nm > 0 {
(*f)(
i as f64 / tot as f64,
format!("Raytracing ({} missed/overshot pixels)...", nm),
);
} else {
(*f)(i as f64 / tot as f64, format!("Raytracing..."));
}
}
}
i += 1;
if buf.in_bounds(x, y) {
buf.put_pixel(x, y, color_to_rgba(&col));
} else {
*misses.lock().unwrap() += 1;
}
}
let num_misses = *misses.lock().unwrap();
if num_misses > 0 {
println!("WARNING: Missed/Overshot {} pixels", num_misses);
}
Ok(img)
}
None => Err("Couldn't create image"),
}
}
}
fn ray_plane(plane: &Ray, ray: &Ray) -> Option<f64> {
let ln = ray.direction.dot(&plane.direction);
if ln == 0.0 {
return None;
}
return Some((plane.origin - ray.origin).dot(&plane.direction) / ln);
}
#[cfg(test)]
mod tests {
use super::Camera;
use nalgebra::{Point3, Translation3, Vector3};
use approx::assert_relative_eq;
#[test]
fn camera_creates_primary() {
let mut cam = Camera::new(500, 500, 50.0);
cam.set_position(Translation3::new(0.0, 0.0, -0.01));
let ray = cam.create_primary(250, 250);
println!("{:?}", ray);
assert_relative_eq!(ray.origin, Point3::new(0.0, 0.0, 0.0), epsilon = 0.01);
assert_relative_eq!(
ray.direction.into_inner(),
Vector3::new(0.0, 0.0, -1.0),
epsilon = 0.01
);
}
}
| true |
0f913eea2a5e408077595fbfdcd708a8903463f8
|
Rust
|
rust-chainblock/blockchain0
|
/src/world.rs
|
UTF-8
| 688 | 3.015625 | 3 |
[] |
no_license
|
use crate::account::Account;
use crate::id::Id;
use crate::Error;
/// Snapshot of the world, not to have to rebuild it every time we query it.
pub trait WorldState {
/// Return an account that exists in the world, by its ID.
fn get_account_by_id(&self, id: &Id) -> Result<&Account, Error>;
/// Return a mutable reference to an account that exists in the world, by its ID.
fn get_account_by_id_mut(&mut self, id: &Id) -> Result<&mut Account, Error>;
/// Register a new account in the world.
fn add_account(&mut self, id: Id) -> Result<(), Error>;
/// Is the world in its genesis, i.e. are we currently creating that world?
fn is_genesis(&self) -> bool;
}
| true |
89fd751ae0dff1e765db14d2f58d631fcd13d6e5
|
Rust
|
ddimaria/rust-actix-starter
|
/src/handlers/health.rs
|
UTF-8
| 702 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
use crate::errors::ApiError;
use crate::helpers::respond_json;
use actix_web::web::Json;
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub struct HealthResponse {
pub status: String,
pub version: String,
}
/// Handler to get the liveness of the service
pub fn get_health() -> Result<Json<HealthResponse>, ApiError> {
respond_json(HealthResponse {
status: "ok".into(),
version: env!("CARGO_PKG_VERSION").into(),
})
}
#[cfg(test)]
mod tests {
use super::*;
use actix_web::test;
#[test]
fn test_get_health() {
let response = test::block_on(get_health()).unwrap();
assert_eq!(response.into_inner().status, "ok".to_string());
}
}
| true |
0c34615e2157f7ac00fcc030799af90599ad5283
|
Rust
|
shizonic/onefetch
|
/src/onefetch/cli_utils.rs
|
UTF-8
| 2,917 | 2.859375 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::onefetch::{ascii_art::AsciiArt, error::*, info::Info, language::Language};
use colored::Color;
use std::env;
use std::io::Write;
use strum::IntoEnumIterator;
pub struct Printer<W> {
writer: W,
info: Info,
}
impl<W: Write> Printer<W> {
pub fn new(writer: W, info: Info) -> Self {
Self { writer, info }
}
pub fn print(&mut self) -> Result<()> {
let center_pad = " ";
let info_str = format!("{}", &self.info);
let mut info_lines = info_str.lines();
let colors: Vec<Color> = Vec::new();
let mut buf = String::new();
if self.info.config.art_off {
buf.push_str(&info_str);
} else if let Some(custom_image) = &self.info.config.image {
buf.push_str(&self.info.config.image_backend.as_ref().unwrap().add_image(
info_lines.map(|s| format!("{}{}", center_pad, s)).collect(),
custom_image,
self.info.config.image_colors,
));
} else {
let mut logo_lines = if let Some(custom_ascii) = &self.info.config.ascii_input {
AsciiArt::new(custom_ascii, &colors, !self.info.config.no_bold)
} else {
AsciiArt::new(
self.get_ascii(),
&self.info.colors,
!self.info.config.no_bold,
)
};
loop {
match (logo_lines.next(), info_lines.next()) {
(Some(logo_line), Some(info_line)) => {
buf.push_str(&format!("{}{}{:^}\n", logo_line, center_pad, info_line))
}
(Some(logo_line), None) => buf.push_str(&format!("{}\n", logo_line)),
(None, Some(info_line)) => buf.push_str(&format!(
"{:<width$}{}{:^}\n",
"",
center_pad,
info_line,
width = logo_lines.width()
)),
(None, None) => {
buf.push('\n');
break;
}
}
}
}
write!(self.writer, "{}", buf)?;
Ok(())
}
fn get_ascii(&self) -> &str {
let language = if let Language::Unknown = self.info.config.ascii_language {
&self.info.dominant_language
} else {
&self.info.config.ascii_language
};
language.get_ascii_art()
}
}
pub fn print_supported_languages() -> Result<()> {
let iterator = Language::iter().filter(|x| *x != Language::Unknown);
for l in iterator {
println!("{}", l);
}
Ok(())
}
pub fn is_truecolor_terminal() -> bool {
env::var("COLORTERM")
.map(|colorterm| colorterm == "truecolor" || colorterm == "24bit")
.unwrap_or(false)
}
| true |
8e311265441287552c651fe7c73a14bbb3c55705
|
Rust
|
Defelo/AdventOfCode
|
/2022/01.rs
|
UTF-8
| 874 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
type Input = Vec<Vec<u32>>;
fn setup(input: &str) -> Input {
input
.trim()
.split("\n\n")
.map(|elf| elf.split_whitespace().map(|x| x.parse().unwrap()).collect())
.collect()
}
fn part1(input: &Input) -> u32 {
input.iter().map(|elf| elf.iter().sum()).max().unwrap()
}
fn part2(input: &Input) -> u32 {
let (a, b, c) = input
.iter()
.map(|elf| elf.iter().sum())
.fold((0, 0, 0), |(a, b, c), x| {
if x > b {
if x > a {
(x, a, b)
} else {
(a, x, b)
}
} else if x > c {
(a, b, x)
} else {
(a, b, c)
}
});
a + b + c
}
aoc::main!(2022, 1);
aoc::example!(ex01, "01.1.txt", 24000, 45000);
aoc::test_input!("01.txt", 69912, 208180);
| true |
d2c40d0e1ed450d4743c4584965f730a7be2b7cb
|
Rust
|
vaind/objectbox-rust
|
/src/model.rs
|
UTF-8
| 6,292 | 2.953125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::{c, error::Error};
use std::{ffi, ptr};
pub type SchemaID = u32;
pub type SchemaUID = u64;
/// Model is used to define a database model. Use as a fluent interface (builder pattern)
pub struct Model {
c_ptr: *mut c::OBX_model,
error: Option<Error>,
}
pub struct Entity {
model: Model,
}
impl Model {
pub fn new() -> Model {
match c::new_mut(unsafe { c::obx_model() }) {
Ok(c_ptr) => Model { c_ptr, error: None },
Err(e) => Model {
c_ptr: ptr::null_mut(),
error: Some(e),
},
}
}
/// Create an entity.
pub fn entity(mut self, name: &str, id: SchemaID, uid: SchemaUID) -> Entity {
if self.error.is_none() {
let c_name = ffi::CString::new(name).unwrap();
self.error =
c::call(unsafe { c::obx_model_entity(self.c_ptr, c_name.as_ptr(), id, uid) }).err();
}
return Entity { model: self };
}
/// Inform the model about the last entity that was ever defined in the model.
pub fn last_entity_id(self, id: SchemaID, uid: SchemaUID) -> Model {
if self.error.is_none() {
unsafe { c::obx_model_last_entity_id(self.c_ptr, id, uid) }
}
return self;
}
/// Inform the model about the last index that was ever defined in the model.
pub fn last_index_id(self, id: SchemaID, uid: SchemaUID) -> Model {
if self.error.is_none() {
unsafe { c::obx_model_last_index_id(self.c_ptr, id, uid) }
}
return self;
}
/// Inform the model about the last relation that was ever defined in the model.
pub fn last_relation_id(self, id: SchemaID, uid: SchemaUID) -> Model {
if self.error.is_none() {
unsafe { c::obx_model_last_relation_id(self.c_ptr, id, uid) }
}
return self;
}
}
impl Entity {
/// Inform the model about the last property that was ever defined on the entity.
/// Finishes building the entity, returning the parent Model.
pub fn last_property_id(self, id: SchemaID, uid: SchemaUID) -> Model {
let mut model = self.model;
if model.error.is_none() {
model.error =
c::call(unsafe { c::obx_model_entity_last_property_id(model.c_ptr, id, uid) })
.err();
}
return model;
}
/// Create a property.
pub fn property(
mut self,
name: &str,
typ: c::OBXPropertyType,
flags: c::OBXPropertyFlags,
id: SchemaID,
uid: SchemaUID,
) -> Entity {
if self.model.error.is_none() {
let c_name = ffi::CString::new(name).unwrap();
self.model.error = c::call(unsafe {
c::obx_model_property(self.model.c_ptr, c_name.as_ptr(), typ, id, uid)
})
.err();
}
if flags > 0 && self.model.error.is_none() {
self.model.error =
c::call(unsafe { c::obx_model_property_flags(self.model.c_ptr, flags) }).err();
}
return self;
}
/// Declare an index on the last created property.
pub fn property_index(mut self, id: SchemaID, uid: SchemaUID) -> Entity {
if self.model.error.is_none() {
self.model.error =
c::call(unsafe { c::obx_model_property_index_id(self.model.c_ptr, id, uid) }).err();
}
return self;
}
/// Declare a to-one relation on the last created property.
/// No need to declare the index separately using property_index(), it's created automatically.
pub fn property_relation(
mut self,
target_entity_name: &str,
index_id: SchemaID,
index_uid: SchemaUID,
) -> Entity {
if self.model.error.is_none() {
let c_name = ffi::CString::new(target_entity_name).unwrap();
self.model.error = c::call(unsafe {
c::obx_model_property_relation(
self.model.c_ptr,
c_name.as_ptr(),
index_id,
index_uid,
)
})
.err();
}
return self;
}
/// Declare a standalone to-many relation between this entity and another one
pub fn relation(
mut self,
relation_id: SchemaID,
relation_uid: SchemaUID,
target_entity_id: SchemaID,
target_entity_uid: SchemaUID,
) -> Entity {
if self.model.error.is_none() {
self.model.error = c::call(unsafe {
c::obx_model_relation(
self.model.c_ptr,
relation_id,
relation_uid,
target_entity_id,
target_entity_uid,
)
})
.err();
}
return self;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn model_builder_positive() {
let model = Model::new()
.entity("A", 1, 1)
.property(
"id",
c::OBXPropertyType_Long,
c::OBXPropertyFlags_ID,
1,
101,
)
.property("text", c::OBXPropertyType_String, 0, 2, 102)
.property_index(1, 1021)
.last_property_id(2, 102)
.entity("B", 2, 2)
.property(
"id",
c::OBXPropertyType_Long,
c::OBXPropertyFlags_ID,
1,
201,
)
.property("number", c::OBXPropertyType_Int, 0, 2, 202)
.last_property_id(2, 202)
.last_entity_id(2, 2)
.last_index_id(1, 1021);
assert!(model.error.is_none());
}
#[test]
fn model_builder_negative() {
let model = Model::new().entity("A", 1, 1).last_property_id(0, 0);
let expected_err = format!(
"{} Argument condition \"property_id\" not met",
c::OBX_ERROR_ILLEGAL_ARGUMENT
);
let actual_err = format!("{}", model.error.unwrap());
println!("expected: {}", &expected_err);
println!("actual: {}", &actual_err);
assert!(actual_err.starts_with(&expected_err));
}
}
| true |
9e9f567822a1f352ecd6da534f25e8df811ddb18
|
Rust
|
jackmott/advent2019
|
/day_13/src/main.rs
|
UTF-8
| 2,765 | 3.328125 | 3 |
[
"MIT"
] |
permissive
|
use intcomputer::*;
use std::collections::HashMap;
use std::fs;
use std::sync::mpsc::channel;
use std::sync::mpsc::SendError;
use std::thread;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
struct Pos {
x: i64,
y: i64,
}
use Tile::*;
#[derive(PartialEq, Copy, Debug, Clone)]
enum Tile {
Empty,
Wall,
Block,
HorizontalPaddle,
Ball,
}
impl Tile {
fn from_int(i: i64) -> Tile {
match i {
0 => Empty,
1 => Wall,
2 => Block,
3 => HorizontalPaddle,
4 => Ball,
_ => panic!("invalid tile:{}", i),
}
}
}
fn main() -> Result<(), SendError<i64>> {
let digits: Vec<i64> = fs::read_to_string("input.txt")
.unwrap()
.trim()
.split(',')
.map(|s| s.parse::<i64>().unwrap())
.collect();
// PART 1
let (game_output, computer_input) = channel();
let (computer_output, game_input) = channel();
thread::spawn(move || {
// Create a computer and run it
let mut computer =
SuperComputer::new("Gamer".to_string(), digits, computer_output, computer_input);
computer.run();
});
let mut tiles = HashMap::new();
let mut paddle_pos = Pos { x: 0, y: 0 };
loop {
let x = match game_input.recv() {
Ok(x) => x, // x
Err(_) => break,
};
let y = match game_input.recv() {
Ok(y) => y, // y
Err(_) => {
break;
}
};
let pos = Pos { x, y };
match game_input.recv() {
Ok(input) => {
if x != -1 {
let tile = Tile::from_int(input);
if tile == Ball {
let output = if paddle_pos.x < pos.x {
1
} else if paddle_pos.x > pos.x {
-1
} else {
0
};
match game_output.send(output) {
Ok(_) => continue,
Err(_) => {
break;
}
}
} else if tile == HorizontalPaddle {
paddle_pos = pos;
}
tiles.insert(Pos { x, y }, tile);
} else {
println!("score:{}", input);
}
}
Err(err) => {
println!("err on input:{}", err);
break;
}
}
}
// Part1
println!("{}", tiles.values().filter(|tile| **tile == Block).count());
Ok(())
}
| true |
d32b5286b459f43e259001c880647a3eb790c7d9
|
Rust
|
jeroenvervaeke/adventofcode-2019
|
/day_02/src/main.rs
|
UTF-8
| 5,545 | 3.609375 | 4 |
[] |
no_license
|
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
fn main() -> Result<(), Box<dyn Error>> {
let file = File::open("day_02/input.txt")?;
let mut reader = BufReader::new(file);
let mut line = String::new();
reader.read_line(&mut line)?;
let intcode: Vec<u32> = line
.trim()
.split(',')
.map(|digit_str| digit_str.parse::<u32>().expect("Invalid input file"))
.collect();
let result_part_1 = run_part(&intcode, 12, 2)?;
println!("Part 1: {}", result_part_1);
let desired_result = 19690720;
let mut result_part_2 = None;
for noun in 0..100 {
for verb in 0..100 {
let result = run_part(&intcode, noun, verb)?;
if result == desired_result {
result_part_2 = Some((noun, verb))
}
}
}
match result_part_2 {
Some((noun, verb)) => println!("Part 2: Noun={}, Verb={}", noun, verb),
None => println!("Part 2: No result found"),
}
Ok(())
}
fn run_part(intcode: &Vec<u32>, noun: u32, verb: u32) -> Result<u32, Box<dyn Error>> {
let mut memory = intcode.clone();
memory[1] = noun;
memory[2] = verb;
run_intcode(memory.as_mut_slice())?;
Ok(memory[0])
}
#[derive(Debug, PartialEq)]
enum Op {
Add {
idx1: usize,
idx2: usize,
dst: usize,
},
Multiply {
idx1: usize,
idx2: usize,
dst: usize,
},
Exit,
}
impl Op {
fn op_len(&self) -> usize {
match self {
Op::Add { .. } | Op::Multiply { .. } => 4,
Op::Exit => 1,
}
}
}
fn parse_opcode(current: &[u32]) -> Option<Op> {
match current {
[1, idx1, idx2, dst, ..] => Some(Op::Add {
idx1: *idx1 as usize,
idx2: *idx2 as usize,
dst: *dst as usize,
}),
[2, idx1, idx2, dst, ..] => Some(Op::Multiply {
idx1: *idx1 as usize,
idx2: *idx2 as usize,
dst: *dst as usize,
}),
[99, ..] => Some(Op::Exit),
_ => None,
}
}
fn run_intcode(memory: &mut [u32]) -> Result<(), &str> {
let mut idx = 0;
loop {
let op_code = parse_opcode(&memory[idx..]).ok_or("Invalid OP code")?;
match op_code {
Op::Add { idx1, idx2, dst } => memory[dst] = memory[idx1] + memory[idx2],
Op::Multiply { idx1, idx2, dst } => memory[dst] = memory[idx1] * memory[idx2],
Op::Exit => break,
}
idx += op_code.op_len();
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_add_exact() {
let opcodes = [1, 2, 3, 4];
let op = parse_opcode(&opcodes);
assert_eq!(
op,
Some(Op::Add {
idx1: 2,
idx2: 3,
dst: 4
})
);
}
#[test]
fn parse_add_trailing() {
let opcodes = [1, 2, 3, 4, 5, 6];
let op = parse_opcode(&opcodes);
assert_eq!(
op,
Some(Op::Add {
idx1: 2,
idx2: 3,
dst: 4
})
);
}
#[test]
fn parse_add_too_short() {
let opcodes = [1, 2, 3];
let op = parse_opcode(&opcodes);
assert_eq!(op, None);
}
#[test]
fn parse_multiply_exact() {
let opcodes = [2, 3, 4, 5];
let op = parse_opcode(&opcodes);
assert_eq!(
op,
Some(Op::Multiply {
idx1: 3,
idx2: 4,
dst: 5
})
);
}
#[test]
fn parse_multiply_trailing() {
let opcodes = [2, 3, 4, 5, 6, 7, 8, 9, 10, 11];
let op = parse_opcode(&opcodes);
assert_eq!(
op,
Some(Op::Multiply {
idx1: 3,
idx2: 4,
dst: 5
})
);
}
#[test]
fn parse_exit_exact() {
let opcodes = [99];
let op = parse_opcode(&opcodes);
assert_eq!(op, Some(Op::Exit));
}
#[test]
fn parse_exit_trailing() {
let opcodes = [99, 100];
let op = parse_opcode(&opcodes);
assert_eq!(op, Some(Op::Exit));
}
#[test]
fn run_example_explained_in_text() {
let mut memory = [1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50];
let result = run_intcode(&mut memory);
assert_eq!(result.is_ok(), true);
assert_eq!(memory, [3500, 9, 10, 70, 2, 3, 11, 0, 99, 30, 40, 50]);
}
#[test]
fn run_example_short_1() {
let mut memory = [1, 0, 0, 0, 99];
let result = run_intcode(&mut memory);
assert_eq!(result.is_ok(), true);
assert_eq!(memory, [2, 0, 0, 0, 99]);
}
#[test]
fn run_example_short_2() {
let mut memory = [2, 3, 0, 3, 99];
let result = run_intcode(&mut memory);
assert_eq!(result.is_ok(), true);
assert_eq!(memory, [2, 3, 0, 6, 99]);
}
#[test]
fn run_example_short_3() {
let mut memory = [2, 4, 4, 5, 99, 0];
let result = run_intcode(&mut memory);
assert_eq!(result.is_ok(), true);
assert_eq!(memory, [2, 4, 4, 5, 99, 9801]);
}
#[test]
fn run_example_short_4() {
let mut memory = [1, 1, 1, 4, 99, 5, 6, 0, 99];
let result = run_intcode(&mut memory);
assert_eq!(result.is_ok(), true);
assert_eq!(memory, [30, 1, 1, 4, 2, 5, 6, 0, 99]);
}
}
| true |
b2e426c99ebb5112acca24d605da4fbde2885c36
|
Rust
|
ElementsProject/rust-elements
|
/src/dynafed.rs
|
UTF-8
| 28,993 | 2.5625 | 3 |
[
"CC0-1.0"
] |
permissive
|
// Rust Elements Library
// Written in 2019 by
// Andrew Poelstra <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! Dynamic Federations
use std::{fmt, io};
#[cfg(feature = "serde")]
use serde::{Deserialize, Deserializer, Serialize, Serializer};
#[cfg(feature = "serde")]
use serde::ser::{SerializeSeq, SerializeStruct};
use crate::encode::{self, Encodable, Decodable};
use crate::hashes::{Hash, sha256, sha256d};
use crate::Script;
/// ad-hoc struct to fmt in hex
struct HexBytes<'a>(&'a [u8]);
impl<'a> fmt::Display for HexBytes<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
crate::hex::format_hex(self.0, f)
}
}
impl<'a> fmt::Debug for HexBytes<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self, f)
}
}
#[cfg(feature = "serde")]
impl<'a> Serialize for HexBytes<'a> {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
if s.is_human_readable() {
s.collect_str(self)
} else {
s.serialize_bytes(self.0)
}
}
}
/// ad-hoc struct to fmt in hex
struct HexBytesArray<'a>(&'a [Vec<u8>]);
impl<'a> fmt::Display for HexBytesArray<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[")?;
for (i, e) in self.0.iter().enumerate() {
if i != 0 {
write!(f, ", ")?;
}
crate::hex::format_hex(&e[..], f)?;
}
write!(f, "]")
}
}
impl<'a> fmt::Debug for HexBytesArray<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self, f)
}
}
#[cfg(feature = "serde")]
impl<'a> Serialize for HexBytesArray<'a> {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut seq = s.serialize_seq(Some(self.0.len()))?;
for b in self.0 {
seq.serialize_element(&HexBytes(&b[..]))?;
}
seq.end()
}
}
/// Full dynafed parameters with all fields.
#[derive(Clone, Eq, Hash, PartialEq, PartialOrd, Ord)]
pub struct FullParams {
/// "scriptPubKey" used for block signing
signblockscript: Script,
/// Maximum, in bytes, of the size of a blocksigning witness
signblock_witness_limit: u32,
/// Untweaked `scriptPubKey` used for pegins
fedpeg_program: bitcoin::ScriptBuf,
/// For v0 fedpeg programs, the witness script of the untweaked
/// pegin address. For future versions, this data has no defined
/// meaning and will be considered "anyone can spend".
fedpegscript: Vec<u8>,
/// "Extension space" used by Liquid for PAK key entries
extension_space: Vec<Vec<u8>>,
}
impl FullParams {
/// Return the `extra root` of this params.
/// The extra root commits to the consensus parameters unrelated to
/// blocksigning: `fedpeg_program`, `fedpegscript` and `extension_space`.
fn extra_root(&self) -> sha256::Midstate {
fn serialize_hash<E: Encodable>(obj: &E) -> sha256d::Hash {
let mut engine = sha256d::Hash::engine();
obj.consensus_encode(&mut engine).expect("engines don't error");
sha256d::Hash::from_engine(engine)
}
let leaves = [
serialize_hash(&self.fedpeg_program).to_byte_array(),
serialize_hash(&self.fedpegscript).to_byte_array(),
serialize_hash(&self.extension_space).to_byte_array(),
];
crate::fast_merkle_root::fast_merkle_root(&leaves[..])
}
/// Calculate the root of this [FullParams].
pub fn calculate_root(&self) -> sha256::Midstate {
fn serialize_hash<E: Encodable>(obj: &E) -> sha256d::Hash {
let mut engine = sha256d::Hash::engine();
obj.consensus_encode(&mut engine).expect("engines don't error");
sha256d::Hash::from_engine(engine)
}
let leaves = [
serialize_hash(&self.signblockscript).to_byte_array(),
serialize_hash(&self.signblock_witness_limit).to_byte_array(),
];
let compact_root = crate::fast_merkle_root::fast_merkle_root(&leaves[..]);
let leaves = [
compact_root.to_byte_array(),
self.extra_root().to_byte_array(),
];
crate::fast_merkle_root::fast_merkle_root(&leaves[..])
}
/// Turns paramers into compact parameters.
/// This returns self for compact params and [None] for null ones.
pub fn into_compact(self) -> Params {
Params::Compact {
elided_root: self.extra_root(),
signblockscript: self.signblockscript,
signblock_witness_limit: self.signblock_witness_limit,
}
}
/// Format for [fmt::Debug].
fn fmt_debug(&self, f: &mut fmt::Formatter, name: &'static str) -> fmt::Result {
let mut s = f.debug_struct(name);
s.field("signblockscript", &HexBytes(&self.signblockscript[..]));
s.field("signblock_witness_limit", &self.signblock_witness_limit);
s.field("fedpeg_program", &HexBytes(self.fedpeg_program.as_ref()));
s.field("fedpegscript", &HexBytes(&self.fedpegscript[..]));
s.field("extension_space", &HexBytesArray(&self.extension_space));
s.finish()
}
#[cfg(feature = "serde")]
fn serde_serialize<S: Serializer>(&self, s: S, name: &'static str) -> Result<S::Ok, S::Error> {
let mut st = s.serialize_struct(name, 5)?;
st.serialize_field("signblockscript", &self.signblockscript)?;
st.serialize_field("signblock_witness_limit", &self.signblock_witness_limit)?;
st.serialize_field("fedpeg_program", &self.fedpeg_program)?;
st.serialize_field("fedpegscript", &HexBytes(&self.fedpegscript))?;
st.serialize_field("extension_space", &HexBytesArray(&self.extension_space))?;
st.end()
}
}
impl fmt::Debug for FullParams {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.fmt_debug(f, "FullParams")
}
}
impl Encodable for FullParams {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, encode::Error> {
let ret = Encodable::consensus_encode(&self.signblockscript, &mut s)? +
Encodable::consensus_encode(&self.signblock_witness_limit, &mut s)? +
Encodable::consensus_encode(&self.fedpeg_program, &mut s)? +
Encodable::consensus_encode(&self.fedpegscript, &mut s)? +
Encodable::consensus_encode(&self.extension_space, &mut s)?;
Ok(ret)
}
}
impl Decodable for FullParams {
fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, encode::Error> {
Ok(FullParams {
signblockscript: Decodable::consensus_decode(&mut d)?,
signblock_witness_limit: Decodable::consensus_decode(&mut d)?,
fedpeg_program: Decodable::consensus_decode(&mut d)?,
fedpegscript: Decodable::consensus_decode(&mut d)?,
extension_space: Decodable::consensus_decode(&mut d)?,
})
}
}
/// Dynamic federations paramaters, as encoded in a block header
#[derive(Clone, Eq, Hash, PartialEq, PartialOrd, Ord)]
pub enum Params {
/// Null entry, used to signal "no vote" as a proposal
Null,
/// Compact params where the fedpeg data and extension space
/// are not included, and are assumed to be equal to the values
/// from the previous block
Compact {
/// "scriptPubKey" used for block signing
signblockscript: Script,
/// Maximum, in bytes, of the size of a blocksigning witness
signblock_witness_limit: u32,
/// Merkle root of extra data
elided_root: sha256::Midstate,
},
/// Full dynamic federations parameters
Full(FullParams),
}
impl fmt::Debug for Params {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Params::Null => write!(f, "Null"),
Params::Compact { signblockscript, signblock_witness_limit, elided_root } => {
let mut s = f.debug_struct("Compact");
s.field("signblockscript", &HexBytes(&signblockscript[..]));
s.field("signblock_witness_limit", signblock_witness_limit);
s.field("elided_root", elided_root);
s.finish()
}
Params::Full(ref full) => full.fmt_debug(f, "Full"),
}
}
}
impl Params {
/// Check whether this is [Params::Null].
pub fn is_null(&self) -> bool {
match *self {
Params::Null => true,
Params::Compact { .. } => false,
Params::Full(..) => false,
}
}
/// Check whether this is [Params::Compact].
pub fn is_compact(&self) -> bool {
match *self {
Params::Null => false,
Params::Compact { .. } => true,
Params::Full(..) => false,
}
}
/// Check whether this is [Params::Full].
pub fn is_full(&self) -> bool {
match *self {
Params::Null => false,
Params::Compact { .. } => false,
Params::Full(..) => true,
}
}
/// Get the signblockscript. Is [None] for [Params::Null] params.
pub fn signblockscript(&self) -> Option<&Script> {
match *self {
Params::Null => None,
Params::Compact { ref signblockscript, ..} => Some(signblockscript),
Params::Full(ref f) => Some(&f.signblockscript),
}
}
/// Get the signblock_witness_limit. Is [None] for [Params::Null] params.
pub fn signblock_witness_limit(&self) -> Option<u32> {
match *self {
Params::Null => None,
Params::Compact { signblock_witness_limit, ..} => Some(signblock_witness_limit),
Params::Full(ref f) => Some(f.signblock_witness_limit),
}
}
/// Get the fedpeg_program. Is [None] for non-[Params::Full] params.
pub fn fedpeg_program(&self) -> Option<&bitcoin::ScriptBuf> {
match *self {
Params::Null => None,
Params::Compact { .. } => None,
Params::Full(ref f) => Some(&f.fedpeg_program),
}
}
/// Get the fedpegscript. Is [None] for non-[Params::Full] params.
pub fn fedpegscript(&self) -> Option<&Vec<u8>> {
match *self {
Params::Null => None,
Params::Compact { .. } => None,
Params::Full(ref f) => Some(&f.fedpegscript),
}
}
/// Get the extension_space. Is [None] for non-[Params::Full] params.
pub fn extension_space(&self) -> Option<&Vec<Vec<u8>>> {
match *self {
Params::Null => None,
Params::Compact { .. } => None,
Params::Full(ref f) => Some(&f.extension_space),
}
}
/// Get the elided_root. Is [None] for non-[Params::Compact] params.
pub fn elided_root(&self) -> Option<&sha256::Midstate> {
match *self {
Params::Null => None,
Params::Compact { ref elided_root, ..} => Some(elided_root),
Params::Full(..) => None,
}
}
/// Return the `extra root` of this params.
/// The extra root commits to the consensus parameters unrelated to
/// blocksigning: `fedpeg_program`, `fedpegscript` and `extension_space`.
fn extra_root(&self) -> sha256::Midstate {
match *self {
Params::Null => sha256::Midstate::from_byte_array([0u8; 32]),
Params::Compact { ref elided_root, .. } => *elided_root,
Params::Full(ref f) => f.extra_root(),
}
}
/// Calculate the root of this [Params].
pub fn calculate_root(&self) -> sha256::Midstate {
fn serialize_hash<E: Encodable>(obj: &E) -> sha256d::Hash {
let mut engine = sha256d::Hash::engine();
obj.consensus_encode(&mut engine).expect("engines don't error");
sha256d::Hash::from_engine(engine)
}
if self.is_null() {
return sha256::Midstate::from_byte_array([0u8; 32]);
}
let leaves = [
serialize_hash(self.signblockscript().unwrap()).to_byte_array(),
serialize_hash(&self.signblock_witness_limit().unwrap()).to_byte_array(),
];
let compact_root = crate::fast_merkle_root::fast_merkle_root(&leaves[..]);
let leaves = [
compact_root.to_byte_array(),
self.extra_root().to_byte_array(),
];
crate::fast_merkle_root::fast_merkle_root(&leaves[..])
}
/// Get the full params when this params are full.
pub fn full(&self) -> Option<&FullParams> {
match self {
Params::Null => None,
Params::Compact { .. } => None,
Params::Full(ref f) => Some(f),
}
}
/// Convert into the full params when this params are full.
pub fn into_full(self) -> Option<FullParams> {
match self {
Params::Null => None,
Params::Compact { .. } => None,
Params::Full(f) => Some(f),
}
}
/// Turns paramers into compact parameters.
/// This returns self for compact params and [None] for null ones.
pub fn into_compact(self) -> Option<Params> {
match self {
Params::Null => None,
s @ Params::Compact { .. } => Some(s),
Params::Full(f) => Some(f.into_compact()),
}
}
}
impl Default for Params {
fn default() -> Params {
Params::Null
}
}
#[cfg(feature = "serde")]
impl<'de> Deserialize<'de> for Params {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
use serde::de;
enum Enum {
Unknown,
SignblockScript,
SignblockWitnessLimit,
ElidedRoot,
FedpegProgram,
FedpegScript,
ExtSpace,
}
struct EnumVisitor;
impl<'de> de::Visitor<'de> for EnumVisitor {
type Value = Enum;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("a field name")
}
fn visit_str<E: de::Error>(self, v: &str) -> Result<Self::Value, E> {
match v {
"signblockscript" => Ok(Enum::SignblockScript),
"signblock_witness_limit" => Ok(Enum::SignblockWitnessLimit),
"elided_root" => Ok(Enum::ElidedRoot),
"fedpeg_program" => Ok(Enum::FedpegProgram),
"fedpegscript" => Ok(Enum::FedpegScript),
"extension_space" => Ok(Enum::ExtSpace),
_ => Ok(Enum::Unknown),
}
}
}
impl<'de> Deserialize<'de> for Enum {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
d.deserialize_str(EnumVisitor)
}
}
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
type Value = Params;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("block header extra data")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: de::MapAccess<'de>,
{
/// Utility type to parse bytes from either hex or array notation.
struct HexBytes(Vec<u8>);
impl<'de> Deserialize<'de> for HexBytes {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
type Value = HexBytes;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("bytes in either hex or array format")
}
fn visit_str<E: de::Error>(self, v: &str) -> Result<Self::Value, E> {
use crate::hex::FromHex;
Ok(HexBytes(FromHex::from_hex(v).map_err(E::custom)?))
}
fn visit_bytes<E: de::Error>(self, v: &[u8]) -> Result<Self::Value, E> {
Ok(HexBytes(v.to_vec()))
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where
A: de::SeqAccess<'de>,
{
let mut ret = if let Some(l) = seq.size_hint() {
Vec::with_capacity(l)
} else {
Vec::new()
};
while let Some(e) = seq.next_element()? {
ret.push(e);
}
Ok(HexBytes(ret))
}
}
d.deserialize_any(Visitor)
}
}
let mut signblockscript = None;
let mut signblock_witness_limit = None;
let mut elided_root = None;
let mut fedpeg_program = None;
let mut fedpegscript: Option<HexBytes> = None;
let mut extension_space: Option<Vec<HexBytes>> = None;
loop {
match map.next_key::<Enum>()? {
Some(Enum::Unknown) => {
map.next_value::<de::IgnoredAny>()?;
},
Some(Enum::SignblockScript) => {
signblockscript = Some(map.next_value()?);
},
Some(Enum::SignblockWitnessLimit) => {
signblock_witness_limit = Some(map.next_value()?);
},
Some(Enum::ElidedRoot) => {
elided_root = Some(map.next_value()?);
},
Some(Enum::FedpegProgram) => {
fedpeg_program = Some(map.next_value()?);
},
Some(Enum::FedpegScript) => {
fedpegscript = Some(map.next_value()?);
},
Some(Enum::ExtSpace) => {
extension_space = Some(map.next_value()?);
},
None => { break; }
}
}
match (
signblockscript,
signblock_witness_limit,
elided_root,
fedpeg_program,
fedpegscript,
extension_space,
) {
(
Some(signblockscript),
Some(signblock_witness_limit),
_,
Some(fedpeg_program),
Some(HexBytes(fedpegscript)),
Some(extension_space),
) => Ok(Params::Full(FullParams {
signblockscript,
signblock_witness_limit,
fedpeg_program,
fedpegscript,
extension_space: extension_space.into_iter().map(|h| h.0).collect(),
})),
(
Some(signblockscript),
Some(signblock_witness_limit),
Some(elided_root),
_,
_,
_
) => Ok(Params::Compact {
signblockscript,
signblock_witness_limit,
elided_root,
}),
// We should probably be stricter about errors here
_ => Ok(Params::Null),
}
}
}
static FIELDS: &[&str] = &[
"signblockscript",
"signblock_witness_limit",
"fedpeg_program",
"fedpegscript",
"extension_space",
"elided_root",
];
d.deserialize_struct("Params", FIELDS, Visitor)
}
}
#[cfg(feature = "serde")]
impl Serialize for Params {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
match *self {
Params::Null => {
let st = s.serialize_struct("Params", 0)?;
st.end()
},
Params::Compact {
ref signblockscript,
ref signblock_witness_limit,
ref elided_root,
} => {
let mut st = s.serialize_struct("Params", 3)?;
st.serialize_field("signblockscript", signblockscript)?;
st.serialize_field("signblock_witness_limit", signblock_witness_limit)?;
st.serialize_field("elided_root", elided_root)?;
st.end()
},
Params::Full(ref full) => full.serde_serialize(s, "Params"),
}
}
}
impl Encodable for Params {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, encode::Error> {
Ok(match *self {
Params::Null => Encodable::consensus_encode(&0u8, &mut s)?,
Params::Compact {
ref signblockscript,
ref signblock_witness_limit,
ref elided_root,
} => {
Encodable::consensus_encode(&1u8, &mut s)? +
Encodable::consensus_encode(signblockscript, &mut s)? +
Encodable::consensus_encode(signblock_witness_limit, &mut s)? +
Encodable::consensus_encode(&elided_root.to_byte_array(), &mut s)?
},
Params::Full(ref f) => {
Encodable::consensus_encode(&2u8, &mut s)? +
Encodable::consensus_encode(f, &mut s)?
},
})
}
}
impl Decodable for Params {
fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, encode::Error> {
let ser_type: u8 = Decodable::consensus_decode(&mut d)?;
match ser_type {
0 => Ok(Params::Null),
1 => Ok(Params::Compact {
signblockscript: Decodable::consensus_decode(&mut d)?,
signblock_witness_limit: Decodable::consensus_decode(&mut d)?,
elided_root: sha256::Midstate::from_byte_array(Decodable::consensus_decode(&mut d)?),
}),
2 => Ok(Params::Full(Decodable::consensus_decode(&mut d)?)),
_ => Err(encode::Error::ParseFailed(
"bad serialize type for dynafed parameters"
)),
}
}
}
#[cfg(test)]
mod tests {
use std::fmt::{self, Write};
use crate::hashes::sha256;
use crate::hex::ToHex;
use crate::{BlockHash, TxMerkleNode};
use super::*;
#[test]
fn test_param_roots() {
// Taken from the following Elements Core test:
// CScript signblockscript(opcodetype(1));
// uint32_t signblock_wl(2);
// CScript fp_program(opcodetype(3));
// CScript fp_script(opcodetype(4));
// std::vector<std::vector<unsigned char>> ext{ {5, 6}, {7} };
//
// DynaFedParamEntry compact_entry = DynaFedParamEntry(signblockscript, signblock_wl);
// BOOST_CHECK_EQUAL(
// compact_entry.CalculateRoot().GetHex(),
// "dff5f3793abc06a6d75e80fe3cfd47406f732fa4ec9305960ae2a229222a1ad5"
// );
//
// DynaFedParamEntry full_entry =
// DynaFedParamEntry(signblockscript, signblock_wl, fp_program, fp_script, ext);
// BOOST_CHECK_EQUAL(
// full_entry.CalculateRoot().GetHex(),
// "175be2087ba7cc0e33348bef493bd3e34f31f64bf9226e5881ab310dafa432ff"
// );
//
// DynaFedParams params = DynaFedParams(compact_entry, full_entry);
// BOOST_CHECK_EQUAL(
// params.CalculateRoot().GetHex(),
// "e56cf79487952dfa85fe6a85829600adc19714ba6ab1157fdff02b25ae60cee2"
// );
let signblockscript: Script = vec![1].into();
let signblock_wl = 2;
let fp_program: bitcoin::ScriptBuf = vec![3].into();
let fp_script = vec![4];
let ext = vec![vec![5, 6], vec![7]];
let compact_entry = Params::Compact {
signblockscript: signblockscript.clone(),
signblock_witness_limit: signblock_wl,
elided_root: sha256::Midstate::from_byte_array([0; 32]),
};
assert_eq!(
compact_entry.calculate_root().to_hex(),
"f98f149fd11da6fbe26d0ee53cadd28372fa9eed2cb7080f41da7ca311531777"
);
let full_entry = Params::Full(FullParams {
signblockscript,
signblock_witness_limit: signblock_wl,
fedpeg_program: fp_program,
fedpegscript: fp_script,
extension_space: ext,
});
assert_eq!(
full_entry.calculate_root().to_hex(),
"8eb1b83cce69a3d8b0bfb7fbe77ae8f1d24b57a9cae047b8c0aba084ad878249"
);
let header = crate::block::BlockHeader{
ext: crate::block::ExtData::Dynafed {
current: compact_entry,
proposed: full_entry,
signblock_witness: vec![],
},
version: Default::default(),
prev_blockhash: BlockHash::all_zeros(),
merkle_root: TxMerkleNode::all_zeros(),
time: Default::default(),
height: Default::default(),
};
assert_eq!(
header.calculate_dynafed_params_root().unwrap().to_hex(),
"113160f76dc17fe367a2def79aefe06feeea9c795310c9e88aeedc23e145982e"
);
}
fn to_debug_string<O: fmt::Debug>(o: &O) -> String {
let mut s = String::new();
write!(&mut s, "{:?}", o).unwrap();
s
}
#[test]
fn into_compact_test() {
let full = FullParams {
signblockscript: vec![0x01, 0x02].into(),
signblock_witness_limit: 3,
fedpeg_program: vec![0x04, 0x05].into(),
fedpegscript: vec![0x06, 0x07],
extension_space: vec![vec![0x08, 0x09], vec![0x0a]],
};
let extra_root = full.extra_root();
let root = full.calculate_root();
let params = Params::Full(full.clone());
assert_eq!(params.full(), Some(&full));
assert_eq!(params.clone().into_full(), Some(full.clone()));
assert_eq!(params.extra_root(), extra_root);
assert_eq!(params.calculate_root(), root);
assert_eq!(
to_debug_string(¶ms),
"Full { signblockscript: 0102, signblock_witness_limit: 3, fedpeg_program: 0405, fedpegscript: 0607, extension_space: [0809, 0a] }",
);
let compact = params.into_compact().unwrap();
assert_eq!(
to_debug_string(&compact),
"Compact { signblockscript: 0102, signblock_witness_limit: 3, elided_root: 0xc3058c822b22a13bb7c47cf50d3f3c7817e7d9075ff55a7d16c85b9673e7e553 }",
);
assert_eq!(compact.calculate_root(), full.calculate_root());
assert_eq!(compact.elided_root(), Some(&extra_root));
assert_eq!(compact.extra_root(), extra_root);
}
#[cfg(feature = "serde")]
#[test]
fn test_serde_roundtrip() {
use serde_json;
let full = Params::Full(FullParams {
signblockscript: vec![0x01, 0x02].into(),
signblock_witness_limit: 3,
fedpeg_program: vec![0x04, 0x05].into(),
fedpegscript: vec![0x06, 0x07],
extension_space: vec![vec![0x08, 0x09], vec![0x0a]],
});
let encoded = serde_json::to_string(&full).unwrap();
let decoded: Params = serde_json::from_str(&encoded).unwrap();
assert_eq!(full, decoded);
// test old encoded format
let old_encoded = {
let s1 = encoded.replace("\"0607\"", "[6,7]");
assert_ne!(s1, encoded);
let s2 = s1.replace("\"0809\",\"0a\"", "[8,9],[10]");
assert_ne!(s2, s1);
s2
};
assert_ne!(old_encoded, encoded);
let decoded: Params = serde_json::from_str(&old_encoded).unwrap();
assert_eq!(full, decoded);
}
}
| true |
26ce2e58ebd616131c061cb279bba60256b0a61b
|
Rust
|
OrangeBacon/adventofcode2019
|
/src/intcode/instruction.rs
|
UTF-8
| 6,372 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
use std::process::exit;
use std::fmt;
use std::collections::HashMap;
use indexmap::map::IndexMap;
#[derive(Copy, Clone, Debug)]
pub enum ParameterMode {
Position,
Literal,
Relative,
Any,
Address,
}
#[derive(Debug)]
pub struct Environment {
pub variables: IndexMap<String, i64>,
pub labels: HashMap<String, usize>,
pub code: Vec<Instruction>,
pub line_num: usize,
pub label_counter: usize,
}
impl ParameterMode {
pub fn from_i64(val: i64) -> ParameterMode {
match val {
0 => ParameterMode::Position,
1 => ParameterMode::Literal,
2 => ParameterMode::Relative,
-1 => ParameterMode::Any,
-2 => ParameterMode::Address,
a => {
panic!("Unknown parameter mode: {}", a);
}
}
}
}
#[derive(Debug, Clone)]
pub enum Argument {
Literal(i64),
Variable(usize),
Address(String),
Relative(i64),
}
impl Argument {
pub fn emit(&self, output: &mut Vec<i64>, patches: &mut Vec<(usize, usize)>, env: &Environment) {
match self {
Argument::Literal(a) => output.push(*a),
Argument::Variable(a) => {
patches.push((output.len(), *a));
output.push(-1);
},
Argument::Address(a) => {
match env.labels.get(a) {
Some(loc) => output.push(*loc as i64),
None => {
println!("Undefined label {}", a);
exit(1);
}
}
},
Argument::Relative(a) => output.push(*a),
}
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum OpCode {
Add,
Multiply,
Input,
Output,
JumpNotZero,
JumpZero,
LessThan,
EqualTo,
Halt,
RelativeAdjust,
Unknown,
Jump,
}
impl OpCode {
pub fn from_i64(val: i64) -> OpCode {
match val {
1 => OpCode::Add,
2 => OpCode::Multiply,
3 => OpCode::Input,
4 => OpCode::Output,
5 => OpCode::JumpNotZero,
6 => OpCode::JumpZero,
7 => OpCode::LessThan,
8 => OpCode::EqualTo,
9 => OpCode::RelativeAdjust,
99 => OpCode::Halt,
_ => OpCode::Unknown,
}
}
pub fn to_i64(&self) -> i64 {
match self {
OpCode::Add => 1,
OpCode::Multiply => 2,
OpCode::Input => 3,
OpCode::Output => 4,
OpCode::JumpNotZero => 5,
OpCode::JumpZero => 6,
OpCode::LessThan => 7,
OpCode::EqualTo => 8,
OpCode::RelativeAdjust => 9,
OpCode::Halt => 99,
OpCode::Unknown | _ => {
panic!("Cannot write unknown OpCode");
}
}
}
pub fn is_virtual(&self) -> bool {
match self {
OpCode::Jump => true,
_ => false,
}
}
pub fn from_asm_name(name: &str) -> OpCode {
match name {
"add" => OpCode::Add,
"mul" => OpCode::Multiply,
"inp" => OpCode::Input,
"out" => OpCode::Output,
"jnz" => OpCode::JumpNotZero,
"jez" => OpCode::JumpZero,
"clt" => OpCode::LessThan,
"eql" => OpCode::EqualTo,
"rba" => OpCode::RelativeAdjust,
"hlt" => OpCode::Halt,
"jmp" => OpCode::Jump,
_ => OpCode::Unknown,
}
}
pub fn to_asm_name(&self) -> &str {
match self {
OpCode::Add => "add",
OpCode::Multiply => "mul",
OpCode::Input => "inp",
OpCode::Output => "out",
OpCode::JumpNotZero => "jnz",
OpCode::JumpZero => "jez",
OpCode::LessThan => "clt",
OpCode::EqualTo => "eql",
OpCode::RelativeAdjust => "rba",
OpCode::Halt => "hlt",
OpCode::Jump => "jmp",
OpCode::Unknown => {
unreachable!("Cannot output unknown opcode");
}
}
}
pub fn to_params(&self) -> Vec<ParameterMode> {
use ParameterMode::*;
match self {
OpCode::Add => vec![Any, Any, Position],
OpCode::Multiply => vec![Any, Any, Position],
OpCode::Input => vec![Position],
OpCode::Output => vec![Any],
OpCode::JumpNotZero => vec![Any, Address],
OpCode::JumpZero => vec![Any, Address],
OpCode::LessThan => vec![Any, Any, Position],
OpCode::EqualTo => vec![Any, Any, Position],
OpCode::RelativeAdjust => vec![Any],
OpCode::Halt => vec![],
OpCode::Jump => vec![Address],
OpCode::Unknown => vec![],
}
}
}
impl fmt::Display for OpCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_asm_name())
}
}
#[derive(Debug)]
pub struct Instruction {
pub opcode: OpCode,
pub args: Vec<Argument>,
}
impl Instruction {
pub fn new(opcode: OpCode, args: Vec<Argument>) -> Instruction {
Instruction {
opcode: opcode,
args: args,
}
}
pub fn emit(&self, output: &mut Vec<i64>, patches: &mut Vec<(usize, usize)>, env: &Environment) {
if !self.opcode.is_virtual() {
let mut opcode;
opcode = self.opcode.to_i64();
for (i, arg) in self.args.iter().enumerate() {
match arg {
Argument::Literal(_) => opcode += 10i64.pow(i as u32 + 2),
Argument::Variable(_) => (),
Argument::Address(_) => opcode += 10i64.pow(i as u32 + 2),
Argument::Relative(_) => opcode += 10i64.pow(i as u32 + 2)*2,
}
}
output.push(opcode);
for arg in &self.args {
arg.emit(output, patches, env);
}
return;
}
match self.opcode {
OpCode::Jump => {
Instruction::new(OpCode::JumpZero, vec![Argument::Literal(0), self.args[0].clone()])
.emit(output, patches, env);
},
_ => unreachable!(),
}
}
}
| true |
c2a832bf9a13de5c532ae559f1c5e88bbe03829d
|
Rust
|
Sgt-Forge/Interview-Studying
|
/1. Sorting/rust/selection_sort.rs
|
UTF-8
| 487 | 3.5 | 4 |
[] |
no_license
|
pub fn selection_sort(array: &mut [i32]){
let len = array.len();
for sorted_ind in 0..len {
let mut smallest = sorted_ind;
for unsorted_ind in (sorted_ind + 1)..len {
if array[unsorted_ind] < array[smallest]{
smallest = unsorted_ind
}
}
array.swap(smallest, sorted_ind);
}
}
fn main() {
let mut array: [i32; 5] = [5,4,3,2,1];
selection_sort(&mut array);
println!("Sorted: {:?}\n", array);
}
| true |
4cfd9ef81ece26e70c09dae4afc7d27452a2ffc4
|
Rust
|
baitcenter/routing
|
/src/main.rs
|
UTF-8
| 1,195 | 2.59375 | 3 |
[
"MIT"
] |
permissive
|
#[macro_use] extern crate log;
// #[macro_use] extern crate serde_derive;
#[macro_use] extern crate serenity;
extern crate dotenv;
extern crate pretty_env_logger;
// extern crate serde;
// extern crate serde_json;
use serenity::{
framework::StandardFramework,
model::{
channel::*,
event::*,
gateway::Ready
},
prelude::*,
client::Client,
http,
};
use dotenv::dotenv;
use std::env;
use std::fs::File;
struct Handler;
impl EventHandler for Handler {
fn ready(&self, _: Context, ready: Ready) {
info!("Connected as {}", ready.user.name);
}
fn resume(&self, _: Context, _: ResumedEvent) {
info!("Resumed");
}
}
fn main() {
dotenv().ok();
pretty_env_logger::init();
// Login with a bot token from the environment
let mut client =
Client::new(
&env::var("DISCORD_TOKEN").expect("Expected a discord token in the environment."),
Handler
).expect("Error creating client");
client.with_framework(StandardFramework::new()
.configure(|c| c.prefix("~"))
.cmd("ping", ping));
if let Err(why) = client.start() {
println!("An error occured while running the client: {:?}", why);
}
}
command!(ping(_context, message) {
let _ = message.reply("pong");
});
| true |
44578c8480faa63502f3772cdf0884ca536feb35
|
Rust
|
wfraser/parallel_reader
|
/src/lib.rs
|
UTF-8
| 7,961 | 3.359375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
#![deny(missing_docs, rust_2018_idioms)]
//! A utility for reading from a stream and processing it by chunks in parallel.
//!
//! See [`read_stream_and_process_chunks_in_parallel`]() for details.
use std::io::{self, Read};
use std::sync::{Arc, Mutex};
use std::sync::mpsc;
use std::thread;
/// An error during reading or processing of a stream.
#[derive(Debug)]
pub enum Error<E> {
/// An error occurred while reading from the source.
Read(io::Error),
/// An error was returned by a processing function.
Process {
/// The offset of the chunk that was being processed which led to the error.
chunk_offset: u64,
/// The error returned by the processing function.
error: E,
},
}
impl<E: std::fmt::Display> std::fmt::Display for Error<E> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::Read(e) => write!(f, "error while reading: {}", e),
Error::Process { chunk_offset, error } => write!(f,
"error while processing data at chunk offset {}: {}", chunk_offset, error),
}
}
}
impl<E: std::error::Error + 'static> std::error::Error for Error<E> {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
Some(match self {
Error::Read(ref e) => e,
Error::Process { chunk_offset: _, ref error } => error,
})
}
}
fn start_worker_threads<E: Send + 'static>(
num_threads: usize,
work_rx: mpsc::Receiver<(u64, Vec<u8>)>,
job_tx: mpsc::Sender<(u64, E)>,
f: Arc<impl Fn(u64, &[u8]) -> Result<(), E> + Send + Sync + 'static>,
) -> Vec<thread::JoinHandle<()>> {
let mut threads = vec![];
let work_rx = Arc::new(Mutex::new(work_rx));
for _ in 0 .. num_threads {
let thread_work_rx = work_rx.clone();
let thread_job_tx = job_tx.clone();
let f = f.clone();
threads.push(thread::spawn(move || {
loop {
let (offset, data) = {
let rx = thread_work_rx.lock().unwrap();
match rx.recv() {
Ok(result) => result,
Err(_) => {
// Sender end of the channel disconnected. Main thread must be done
// and waiting for us.
return;
}
}
};
if let Err(e) = f(offset, &data) {
// Job returned an error. Pass it to the main loop so it can stop early.
thread_job_tx.send((offset, e)).unwrap();
}
}
}));
}
threads
}
/// Read from a stream and process it by chunks in parallel.
///
/// Reads are done sequentially on the current thread, in chunks of the given size, then the given
/// function is run on them in parallel on the given number of threads.
///
/// If any of the processing functions returns an error, reading and processing will be stopped as
/// soon as possible and the error returned to the caller. Note that because processing is
/// happening in parallel, it is possible for the processing and/or reading to go past the chunk
/// that causes an error, but it will stop soon thereafter.
///
/// Any read errors on the source will also stop further progress, but similarly, any ongoing
/// processing will need to finish before this function returns.
///
/// The error returned to the caller is only the first one encountered.
///
/// # Example:
/// ```
/// use parallel_reader::read_stream_and_process_chunks_in_parallel;
/// use std::sync::Arc;
/// use std::sync::atomic::{AtomicU64, Ordering::SeqCst};
/// use std::io::Cursor;
///
/// let source = Cursor::new(vec![0u8; 12345]);
/// let num_bytes = Arc::new(AtomicU64::new(0));
/// let num_bytes_clone = num_bytes.clone();
/// let result = read_stream_and_process_chunks_in_parallel(source, 1024, 4, Arc::new(
/// move |_offset, data: &[u8]| -> Result<(), ()> {
/// // Trivial worker: just sum up the number of bytes in the data.
/// num_bytes_clone.fetch_add(data.len() as u64, SeqCst);
/// Ok(())
/// }));
/// assert!(result.is_ok());
/// assert_eq!(12345, num_bytes.load(SeqCst));
/// ```
pub fn read_stream_and_process_chunks_in_parallel<E: Send + 'static>(
mut reader: impl Read,
chunk_size: usize,
num_threads: usize,
f: Arc<impl Fn(u64, &[u8]) -> Result<(), E> + Send + Sync + 'static>,
) -> Result<(), Error<E>> {
assert!(num_threads > 0, "non-zero number of threads required");
// Channel for sending work as (offset, data) pairs to the worker threads. It's bounded by the
// number of workers, to ensure we don't read ahead of the work too far.
let (work_tx, work_rx) = mpsc::sync_channel::<(u64, Vec<u8>)>(num_threads);
// If a job returns an error result, it will be sent to this channel. This is used to stop
// reading early if any job fails.
let (job_tx, job_rx) = mpsc::channel::<(u64, E)>();
// Start up workers.
let threads = start_worker_threads(num_threads, work_rx, job_tx, f);
// Read the file in chunks and pass work to worker threads.
let mut offset = 0u64;
let loop_result = loop {
// Check if any job sent anything. They only send if there's an error, so if we get
// something, stop the loop and pass the error up.
match job_rx.try_recv() {
Ok((chunk_offset, error)) => break Err(Error::Process { chunk_offset, error }),
Err(mpsc::TryRecvError::Empty) => (),
Err(mpsc::TryRecvError::Disconnected) => unreachable!("we hold the sender open"),
}
// TODO(wfraser) it'd be nice to re-use these buffers somehow
let mut buf = vec![0u8; chunk_size];
match large_read(&mut reader, &mut buf) {
Ok(0) => {
break Ok(());
}
Ok(n) => {
buf.truncate(n);
work_tx.send((offset, buf)).expect("failed to send work to threads");
offset += n as u64;
}
Err(e) => {
break Err(Error::Read(e));
}
}
};
// Close the work channel. This'll cause the workers to exit next time they try to recv from it.
drop(work_tx);
// Loop is finished; wait for outstanding jobs to stop.
for thread in threads {
thread.join().expect("failed to join on worker thread");
}
if let Err(e) = loop_result {
// Something stopped the loop prematurely: either a job failed or a read error occurred.
// Return this error.
return Err(e);
}
// Otherwise, the loop finished, but some job may have failed towards the end so check the
// channel as well.
match job_rx.recv() {
Ok((chunk_offset, error)) => {
// Some job returned an error.
Err(Error::Process { chunk_offset, error })
}
Err(mpsc::RecvError) => {
// No jobs returned any errors.
Ok(())
}
}
}
// std::io::Read::read() is not required to read the full buffer size if it is not available all at
// once. A common case when this happens is when reading from a pipe, where only a few KB are
// available for any given read. For our purposes, though, we really want full buffers until we hit
// EOF, so do multiple reads in a loop if necessary.
fn large_read(mut source: impl Read, buf: &mut [u8]) -> io::Result<usize> {
let mut total = 0;
loop {
match source.read(&mut buf[total ..]) {
Ok(0) => break,
Ok(n) => {
total += n;
if total == buf.len() {
break;
}
}
Err(e) if e.kind() == io::ErrorKind::Interrupted => continue, // retriable error
Err(e) => return Err(e),
}
}
Ok(total)
}
| true |
8bb4a18e000cb9bafb55bc22be757a41bc43bf45
|
Rust
|
boogerlad/rust-book2-exercises
|
/twelve_days_of_Christmas/src/main.rs
|
UTF-8
| 1,572 | 3.390625 | 3 |
[] |
no_license
|
fn main()
{
let days_gifts =
[
("first", "partridge in a pear tree"),
("second", "turtle doves"),
("third", "French hens"),
("fourth", "calling birds"),
("fifth", "golden rings"),
("sixth", "geese a-laying"),
("seventh", "swans a-swimming"),
("eighth", "maids a-milking"),
("ninth", "ladies dancing"),
("tenth", "lords a-leaping"),
("eleventh", "pipers piping"),
("twelfth", "drummers drumming")
];
//iter returns &tuple(&str, &str), enumerate returns a tuple of (int, &tuple(&str, &str))
for (i, &(day, _)) in days_gifts.iter().enumerate()
{
println!("On the {} day of Christmas", day);
println!("my true love sent to me:");
if i == 0
{
print!("a ");
}
else
{
//start from 1 because 0 is partridge in a pear tree and dealt with outside the loop
//i + 1 because the second arguement in a slice is exclusive
for (j, &(_, gift)) in days_gifts[1..i + 1].iter().enumerate().rev()
{
//j + 2 because indicies are zero based(+1), and
//the internal representation of slices is a pointer to first element and a length
//so even though the slice is starting at 1, j first value will be 0(+1)
//the source array indicies are not taken into account after slicing so .enumerate() will always start from 0
//if the starting value was 4 instead of 1 for example, we would need (+4)
//slicing essentially returns a new array
//array is pointer to first element and a length as well
//1+1=2
println!("{} {}", j + 2, gift);
}
print!("and a ");
}
println!("{}.", days_gifts[0].1);
}
}
| true |
81748e0a3336b9568a40912fdd09c41cf632a810
|
Rust
|
mcclellanmj/top-down
|
/src/main.rs
|
UTF-8
| 7,362 | 2.6875 | 3 |
[] |
no_license
|
// TODO: Make an input module that will allow code based input configuration
// FIXME: Figure out how to find the shortest path for rotation, currently in some quadrants it rotates the long
// way around
// TODO: Clean up the update code, at the minimum it needs to be seperate functions possibly separate mods
// TODO: Make a zero vector
// Should have options like whileDown(Keyboard::Key(W), MoveForward)
// onPress(Mouse::Button(1), FireWeapon)
extern crate piston;
extern crate vecmath;
extern crate graphics;
extern crate glutin_window;
extern crate opengl_graphics;
extern crate float;
// My crates
extern crate game_utils;
use piston::window::WindowSettings;
use piston::event_loop::*;
use piston::input::*;
use glutin_window::GlutinWindow as Window;
use opengl_graphics::{ GlGraphics, OpenGL };
use std::collections::{ HashMap, HashSet };
use float::Radians;
use game_utils::game_math::{vec2_is_zero, vec2_rotate, get_rotation};
#[derive(Debug)]
pub enum GameAction {
MoveForward(f32),
MoveBackward(f32),
MoveLeft(f32),
MoveRight(f32),
Trigger(bool)
}
struct KeyboardState {
keys_down: HashSet<Key>,
new_keys: HashSet<Key>
}
impl KeyboardState {
fn new() -> KeyboardState {
return KeyboardState {
keys_down: HashSet::new(),
new_keys: HashSet::new()
}
}
}
struct PlayerState {
velocity: vecmath::Vector2<f64>,
location: vecmath::Vector2<f64>,
max_speed: f64,
angle: f64
}
struct MouseState {
position: vecmath::Vector2<f64>
}
pub struct App {
gl: GlGraphics,
player_state: PlayerState,
keyboard_state: KeyboardState,
mouse_state: MouseState,
move_mapping: HashMap<Key, vecmath::Vector2<f64>> // TODO: this is input state/configuration?
}
impl App {
fn new(opengl: OpenGL, (width, height): (f64, f64)) -> App {
let mut move_mapping = HashMap::new();
move_mapping.insert(Key::W, [0.0, -1.0]);
move_mapping.insert(Key::S, [0.0, 1.0]);
move_mapping.insert(Key::A, [-1.0, 0.0]);
move_mapping.insert(Key::D, [1.0, 0.0]);
return App {
gl: GlGraphics::new(opengl),
mouse_state: MouseState {
position: [0.0, 0.0]
},
player_state: PlayerState {
velocity: [0.0, 0.0],
max_speed: 5.0,
location: [(width / 2.0) as f64, (height / 2.0) as f64],
angle: 0.0
},
keyboard_state: KeyboardState::new(),
move_mapping: move_mapping
}
}
fn render(&mut self, args: &RenderArgs) {
use graphics::*;
const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0];
const RED: [f32; 4] = [0.0, 1.0, 0.0, 1.0];
const RECT_SIZE : f64 = 100.0;
let square = rectangle::square(0.0, 0.0, RECT_SIZE);
let player = &self.player_state;
let rotation = player.angle;
let x = player.location[0];
let y = player.location[1];
self.gl.draw(args.viewport(), |c, gl| {
// Clear the screen.
clear(BLACK, gl);
let transform = c.transform.trans(x, y)
.rot_rad(rotation)
.trans(-(RECT_SIZE / 2.0), -(RECT_SIZE / 2.0));
// Draw a box rotating around the middle of the screen.
rectangle(RED, square, transform, gl);
});
}
fn update(&mut self, args: &UpdateArgs) {
let elapsed_time = args.dt;
let mut target_velocity = [0.0, 0.0];
for key in self.keyboard_state.keys_down.iter() {
match self.move_mapping.get(&key) {
Some(x) => target_velocity = vecmath::vec2_add(target_velocity, *x),
None => {}
}
}
let desired_angle = get_rotation(self.player_state.location, self.mouse_state.position) + (90.0).deg_to_rad();
println!("Desired angle is {}", desired_angle);
if desired_angle != self.player_state.angle {
let direction = if desired_angle < self.player_state.angle {
-5.0
} else {
5.0
};
self.player_state.angle = self.player_state.angle + (direction * elapsed_time);
}
if !vec2_is_zero(target_velocity) {
target_velocity = vecmath::vec2_normalized(target_velocity);
}
target_velocity = vec2_rotate(vecmath::vec2_scale(target_velocity, self.player_state.max_speed), self.player_state.angle);
let new_velocity = if vec2_is_zero(target_velocity) {
if !vec2_is_zero(self.player_state.velocity) {
let reverse = vec2_rotate(self.player_state.velocity, (180.0).deg_to_rad());
let scaled = vecmath::vec2_scale(
vecmath::vec2_normalized(reverse),
4.0 * elapsed_time
);
vecmath::vec2_add(self.player_state.velocity, scaled)
} else {
[0.0, 0.0]
}
} else {
let acceleration = vecmath::vec2_scale(
vecmath::vec2_normalized(
target_velocity
),
5.0 * elapsed_time
);
let new_velocity = vecmath::vec2_add(self.player_state.velocity, acceleration);
if vecmath::vec2_len(new_velocity) > self.player_state.max_speed {
vecmath::vec2_scale(
vecmath::vec2_normalized(new_velocity),
self.player_state.max_speed
)
} else {
new_velocity
}
};
self.player_state.velocity = new_velocity;
self.player_state.location = vecmath::vec2_add(self.player_state.location, self.player_state.velocity);
self.keyboard_state.new_keys.clear();
}
fn handle_input(&mut self, input_event: &Input) {
match *input_event {
Input::Press(Button::Keyboard(key)) => {
let new_insert = self.keyboard_state.keys_down.insert(key);
if new_insert {
self.keyboard_state.new_keys.insert(key);
}
},
Input::Release(Button::Keyboard(key)) => {
self.keyboard_state.keys_down.remove(&key);
},
Input::Text(_) => {},
Input::Move(Motion::MouseCursor(x, y)) => self.mouse_state.position = [x, y],
_ => println!("Unhandled input {:?}", input_event),
}
}
}
fn main() {
// Change this to OpenGL::V2_1 if not working.
let opengl = OpenGL::V2_1;
// Create an Glutin window.
let mut window: Window = WindowSettings::new(
"top-down",
[640, 480]
)
.opengl(opengl)
.exit_on_esc(true)
.build()
.unwrap();
// Create a new game and run it.
let mut app = App::new(opengl, (640.0, 480.0));
let mut events = window.events();
while let Some(e) = events.next(&mut window) {
match e {
Event::Render(r) => app.render(&r),
Event::Update(u) => app.update(&u),
Event::Input(i) => app.handle_input(&i),
Event::AfterRender(_) => {},
Event::Idle(_) => {},
}
}
}
| true |
8faa93e585ffd15eeffb7da7c7a29ae6e745d4a0
|
Rust
|
rjsberry/nano
|
/nano-oneshot/tests/tests.rs
|
UTF-8
| 3,647 | 2.90625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::sync::{Arc, Barrier};
use std::thread;
use std::time::Duration;
use nano_oneshot::{self, RecvError, RecvTimeoutError, SendError};
#[test]
fn oneshot() {
let (s, r) = nano_oneshot::channel();
s.send(128).expect("send");
assert_eq!(r.recv().expect("recv"), 128);
}
#[test]
fn oneshot_send_drop_receiver() {
let (s, r) = nano_oneshot::channel();
drop(r);
assert!(s.is_disconnected());
assert_eq!(s.send(128).unwrap_err(), SendError::Disconnected(128));
}
#[test]
fn oneshot_recv_drop_sender() {
let (s, r) = nano_oneshot::channel::<i32>();
drop(s);
assert!(r.is_disconnected());
assert_eq!(r.recv().unwrap_err(), RecvError::Disconnected);
}
#[test]
fn oneshot_recv_timeout_drop_sender() {
let (s, r) = nano_oneshot::channel::<i32>();
drop(s);
assert!(r.is_disconnected());
assert_eq!(
r.recv_timeout(Duration::from_secs(1)).unwrap_err(),
RecvTimeoutError::Disconnected
);
}
#[test]
fn oneshot_concurrent() {
let (s, r) = nano_oneshot::channel();
let handle = thread::spawn(move || s.send(128));
assert!(handle.join().expect("thread").is_ok());
assert_eq!(r.recv().expect("recv"), 128);
}
#[test]
fn oneshot_concurrent_timeout() {
let (s, r) = nano_oneshot::channel();
let b = Arc::new(Barrier::new(2));
let handle = {
let b = Arc::clone(&b);
thread::spawn(move || {
b.wait();
s.send(128)
})
};
assert!(r.recv_timeout(Duration::from_nanos(1)).is_err());
b.wait();
assert_eq!(
handle.join().expect("thread").unwrap_err(),
SendError::Disconnected(128)
);
}
#[test]
fn oneshot_concurrent_send_drop_receiver() {
let (s, r) = nano_oneshot::channel();
let b = Arc::new(Barrier::new(2));
let handle = {
let b = Arc::clone(&b);
thread::spawn(move || {
b.wait();
s.send(128)
})
};
drop(r);
b.wait();
assert_eq!(
handle.join().expect("thread").unwrap_err(),
SendError::Disconnected(128)
);
}
#[test]
fn oneshot_concurrent_recv_drop_sender() {
let (s, r) = nano_oneshot::channel::<i32>();
let b = Arc::new(Barrier::new(3));
let b2 = Arc::new(Barrier::new(2));
let handle = {
let b = Arc::clone(&b);
let b2 = Arc::clone(&b2);
thread::spawn(move || {
b.wait();
b2.wait();
drop(s);
})
};
let handle2 = {
let b = Arc::clone(&b);
thread::spawn(move || {
b.wait();
r.recv()
})
};
b.wait();
thread::sleep(Duration::from_millis(5));
b2.wait();
handle.join().expect("thread");
assert_eq!(
handle2.join().expect("thread").unwrap_err(),
RecvError::Disconnected
);
}
#[test]
fn oneshot_concurrent_recv_timeout_drop_sender() {
let (s, r) = nano_oneshot::channel::<i32>();
let b = Arc::new(Barrier::new(3));
let b2 = Arc::new(Barrier::new(2));
let handle = {
let b = Arc::clone(&b);
let b2 = Arc::clone(&b2);
thread::spawn(move || {
b.wait();
b2.wait();
drop(s);
})
};
let handle2 = {
let b = Arc::clone(&b);
thread::spawn(move || {
b.wait();
r.recv_timeout(Duration::from_secs(1))
})
};
b.wait();
thread::sleep(Duration::from_millis(5));
b2.wait();
handle.join().expect("thread");
assert_eq!(
handle2.join().expect("thread").unwrap_err(),
RecvTimeoutError::Disconnected
);
}
| true |
5472d5aeb6cb716518f4311c0750b2a8c9bef1ae
|
Rust
|
Cassin01/rubyst
|
/src/is/mod.rs
|
UTF-8
| 2,903 | 3.15625 | 3 |
[
"MIT"
] |
permissive
|
use std::iter::Peekable;
use std::str::Chars;
pub fn is_this(cs: &mut Peekable<Chars>, f: &Fn(&char)->bool) -> bool {
match cs.peek() {
Some(c) => f(&c),
None => false,
}
}
pub fn is_num(c: &char) -> bool {
match c {
'0' ... '9' => true,
_ => false
}
}
pub fn is_alphabet(c: &char) -> bool {
match c {
'a' ... 'z' => true,
'_' => true,
_ => false,
}
}
pub fn is_space(c: &char) -> bool {
match c {
' ' => true,
_ => false,
}
}
pub fn is_operator(c: &char) -> bool {
match c {
'*' => true,
'/' => true,
'%' => true,
'+' => true,
'-' => true,
'=' => true,
'!' => true,
'>' => true,
'<' => true,
_ => false,
}
}
pub fn is_first_bracket(c: &char) -> bool {
match c {
'(' => true,
_ => false,
}
}
pub fn is_second_bracket(c: &char) -> bool {
match c {
')' => true,
_ => false,
}
}
pub fn is_new_line(c: &char) -> bool {
match c {
'\n' => true,
_ => false,
}
}
pub fn is_quotation(c: &char) -> bool {
match c {
'"' => true,
_ => false,
}
}
pub fn is_operator_sums(s: &String) -> bool {
match s.as_str() {
"+" => true,
"-" => true,
_ => false,
}
}
pub fn is_operator_puroducts(s: &String) -> bool {
match s.as_str() {
"*" => true,
"/" => true,
"%" => true,
_ => false,
}
}
pub fn is_operator_pows(s: &String) -> bool {
match s.as_str() {
"**" => true,
_ => false,
}
}
pub fn is_operator_eqls(s: &String) -> bool {
match s.as_str() {
"==" => true,
"!=" => true,
">=" => true,
"<=" => true,
">" => true,
"<" => true,
_ => false,
}
}
pub fn is_operator_assign(s: &String) -> bool {
match s.as_str() {
"=" => true,
_ => false,
}
}
pub fn reserved_function(s: &String) -> bool {
match s.as_str() {
"if" => true,
"while" => true,
"begin" => true,
"case" => true,
_ => false,
}
}
pub fn reserved_if(s: &String) -> bool {
match s.as_str() {
"if" => true,
_ => false,
}
}
pub fn reserved_while(s: &String) -> bool {
match s.as_str() {
"while" => true,
_ => false,
}
}
pub fn reserved_begin(s: &String) -> bool {
match s.as_str() {
"begin" => true,
_ => false,
}
}
pub fn reserved_case(s: &String) -> bool {
match s.as_str() {
"case" => true,
_ => false,
}
}
pub fn in_closure(s: &String) -> bool {
match s.as_str() {
"case" | "while" | "begin" | "if" => true,
_ => false,
}
}
| true |
7614c7adf3a9d0c3a59c2a9333fc4bc40e0519d2
|
Rust
|
delneg/bitcoin-address-generator-api
|
/src/accounts/jobs/odd_registration_attempt.rs
|
UTF-8
| 1,900 | 3.03125 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use std::env::var;
use std::pin::Pin;
use std::future::Future;
use jelly::serde::{Deserialize, Serialize};
use jelly::anyhow::{anyhow, Error};
use jelly::email::Email;
use jelly::jobs::{DEFAULT_QUEUE, Job, JobState};
use crate::accounts::Account;
/// An email that gets sent if a user attempts to register
/// under an already registered email. We don't want to say
/// "this email exists already", as that reveals that a user has
/// registered for this service.
///
/// Instead we'll just send the registered account an email asking
/// if they meant to reset their password, and display to the user
/// registering the standard "verify" flow.
#[derive(Debug, Serialize, Deserialize)]
pub struct SendAccountOddRegisterAttemptEmail {
pub to: String
}
impl Job for SendAccountOddRegisterAttemptEmail {
type State = JobState;
type Future = Pin<Box<dyn Future<Output=Result<(), Error>> + Send>>;
const NAME: &'static str = "SendAccountOddRegisterAttemptEmailJob";
const QUEUE: &'static str = DEFAULT_QUEUE;
fn run(self, state: JobState) -> Self::Future {
Box::pin(async move {
let name = Account::fetch_name_from_email(&self.to, &state.pool).await.map_err(|e| {
anyhow!("Error fetching user name/email for odd registration attempt: {:?}", e)
})?;
let email = Email::new("odd-registration-attempt", &[self.to], {
let mut model = HashMap::new();
model.insert("preview", "Did you want to reset your password?".into());
model.insert("name", name);
model.insert("action_url", format!("{}/accounts/reset/", var("DOMAIN")
.expect("DOMAIN not set?")
));
model
});
email.send()?;
Ok(())
})
}
}
| true |
5553fd8415812b8ef862f9418c85e0e03dbed4e0
|
Rust
|
dalarson/2fa_attack
|
/otpgen/src/server.rs
|
UTF-8
| 1,982 | 2.5625 | 3 |
[] |
no_license
|
#![feature(proc_macro_hygiene)]
#![feature(decl_macro)]
#[macro_use]
extern crate rocket;
extern crate rocket_contrib;
use otpgen::AuthRequest;
use rocket::State;
use otpgen::otp_now;
use rocket_contrib::json::Json;
use std::sync::Mutex;
use crypto::aessafe::AesSafe256Decryptor;
use crypto::symmetriccipher::BlockDecryptor;
struct OtpState {
pub secret: [u8; 32],
pub last_otp: Mutex<u32>,
}
use rocket::http::Status;
#[post("/auth", data = "<req>")]
fn auth(state: State<OtpState>, req: Json<AuthRequest>) -> Status {
// println!("Got an auth request: {:?}", request);
println!("___________________\nNEW AUTH REQUEST RECEIVED");
let mut last_otp = state.last_otp.lock().unwrap();
// Generate our code
let generated_code = otp_now(&state.secret);
if generated_code == *last_otp && std::env::var("OTP_DO_NOT_BURN").is_err() {
println!("Current code has expired. Please wait for next code.");
return Status::from_code(401).unwrap();
}
println!("Expected code: {}", generated_code);
*last_otp = generated_code;
// Pull out the code given to us by the client and decrypt it
let decryptor = AesSafe256Decryptor::new(&state.secret);
let given_code = hex::decode(&req.otp).unwrap();
let mut decrypted_code_bytes = [0u8; 16];
decryptor.decrypt_block(&given_code, &mut decrypted_code_bytes);
let decrypted_code = u128::from_ne_bytes(decrypted_code_bytes);
println!("Decrypted code: {}", decrypted_code);
if decrypted_code as u32 == generated_code {
println!("Authentication successful.");
return Status::from_code(200).unwrap();
} else {
println!("Authentication denied.");
return Status::from_code(401).unwrap();
}
}
fn main() {
let state = OtpState {
secret: *include_bytes!("shared-secret"),
last_otp: Mutex::new(0),
};
rocket::ignite()
.mount("/", routes![auth])
.manage(state)
.launch();
}
| true |
9baeb7e548294f04126148fd371c952d1f0be1fb
|
Rust
|
galacticfungus/Egg
|
/egg/src/error/underlying.rs
|
UTF-8
| 1,781 | 3.109375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::fmt;
use std::io;
use std::string;
#[derive(Debug)]
pub enum UnderlyingError {
Io(io::Error),
InvalidString(string::FromUtf8Error),
FailedConversion(std::num::TryFromIntError),
// TODO: Path fail may need to be more generic than this
PathFail(std::path::StripPrefixError),
}
impl UnderlyingError {
pub fn get_error(&self) -> &(dyn std::error::Error + 'static) {
match self {
UnderlyingError::InvalidString(error) => error,
UnderlyingError::Io(error) => error,
UnderlyingError::FailedConversion(error) => error,
UnderlyingError::PathFail(error) => error,
}
}
}
impl From<string::FromUtf8Error> for UnderlyingError {
fn from(error: string::FromUtf8Error) -> UnderlyingError {
UnderlyingError::InvalidString(error)
}
}
impl From<io::Error> for UnderlyingError {
fn from(error: io::Error) -> UnderlyingError {
UnderlyingError::Io(error)
}
}
impl From<std::num::TryFromIntError> for UnderlyingError {
fn from(error: std::num::TryFromIntError) -> UnderlyingError {
UnderlyingError::FailedConversion(error)
}
}
impl From<std::path::StripPrefixError> for UnderlyingError {
fn from(error: std::path::StripPrefixError) -> UnderlyingError {
UnderlyingError::PathFail(error)
}
}
impl std::fmt::Display for UnderlyingError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
UnderlyingError::InvalidString(error) => write!(f, "{}", error),
UnderlyingError::Io(error) => write!(f, "{}", error),
UnderlyingError::FailedConversion(error) => write!(f, "{}", error),
UnderlyingError::PathFail(error) => write!(f, "{}", error),
}
}
}
| true |
1fce706821158bbf316ae4a0680a17dba06141b4
|
Rust
|
orgarten/ndarray-linalg
|
/lax/src/tridiagonal.rs
|
UTF-8
| 8,313 | 3.09375 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
//! Implement linear solver using LU decomposition
//! for tridiagonal matrix
use crate::{error::*, layout::*, *};
use cauchy::*;
use num_traits::Zero;
use std::ops::{Index, IndexMut};
/// Represents a tridiagonal matrix as 3 one-dimensional vectors.
///
/// ```text
/// [d0, u1, 0, ..., 0,
/// l1, d1, u2, ...,
/// 0, l2, d2,
/// ... ..., u{n-1},
/// 0, ..., l{n-1}, d{n-1},]
/// ```
#[derive(Clone, PartialEq)]
pub struct Tridiagonal<A: Scalar> {
/// layout of raw matrix
pub l: MatrixLayout,
/// (n-1) sub-diagonal elements of matrix.
pub dl: Vec<A>,
/// (n) diagonal elements of matrix.
pub d: Vec<A>,
/// (n-1) super-diagonal elements of matrix.
pub du: Vec<A>,
}
impl<A: Scalar> Tridiagonal<A> {
fn opnorm_one(&self) -> A::Real {
let mut col_sum: Vec<A::Real> = self.d.iter().map(|val| val.abs()).collect();
for i in 0..col_sum.len() {
if i < self.dl.len() {
col_sum[i] += self.dl[i].abs();
}
if i > 0 {
col_sum[i] += self.du[i - 1].abs();
}
}
let mut max = A::Real::zero();
for &val in &col_sum {
if max < val {
max = val;
}
}
max
}
}
/// Represents the LU factorization of a tridiagonal matrix `A` as `A = P*L*U`.
#[derive(Clone, PartialEq)]
pub struct LUFactorizedTridiagonal<A: Scalar> {
/// A tridiagonal matrix which consists of
/// - l : layout of raw matrix
/// - dl: (n-1) multipliers that define the matrix L.
/// - d : (n) diagonal elements of the upper triangular matrix U.
/// - du: (n-1) elements of the first super-diagonal of U.
pub a: Tridiagonal<A>,
/// (n-2) elements of the second super-diagonal of U.
pub du2: Vec<A>,
/// The pivot indices that define the permutation matrix `P`.
pub ipiv: Pivot,
a_opnorm_one: A::Real,
}
impl<A: Scalar> Index<(i32, i32)> for Tridiagonal<A> {
type Output = A;
#[inline]
fn index(&self, (row, col): (i32, i32)) -> &A {
let (n, _) = self.l.size();
assert!(
std::cmp::max(row, col) < n,
"ndarray: index {:?} is out of bounds for array of shape {}",
[row, col],
n
);
match row - col {
0 => &self.d[row as usize],
1 => &self.dl[col as usize],
-1 => &self.du[row as usize],
_ => panic!(
"ndarray-linalg::tridiagonal: index {:?} is not tridiagonal element",
[row, col]
),
}
}
}
impl<A: Scalar> Index<[i32; 2]> for Tridiagonal<A> {
type Output = A;
#[inline]
fn index(&self, [row, col]: [i32; 2]) -> &A {
&self[(row, col)]
}
}
impl<A: Scalar> IndexMut<(i32, i32)> for Tridiagonal<A> {
#[inline]
fn index_mut(&mut self, (row, col): (i32, i32)) -> &mut A {
let (n, _) = self.l.size();
assert!(
std::cmp::max(row, col) < n,
"ndarray: index {:?} is out of bounds for array of shape {}",
[row, col],
n
);
match row - col {
0 => &mut self.d[row as usize],
1 => &mut self.dl[col as usize],
-1 => &mut self.du[row as usize],
_ => panic!(
"ndarray-linalg::tridiagonal: index {:?} is not tridiagonal element",
[row, col]
),
}
}
}
impl<A: Scalar> IndexMut<[i32; 2]> for Tridiagonal<A> {
#[inline]
fn index_mut(&mut self, [row, col]: [i32; 2]) -> &mut A {
&mut self[(row, col)]
}
}
/// Wraps `*gttrf`, `*gtcon` and `*gttrs`
pub trait Tridiagonal_: Scalar + Sized {
/// Computes the LU factorization of a tridiagonal `m x n` matrix `a` using
/// partial pivoting with row interchanges.
fn lu_tridiagonal(a: Tridiagonal<Self>) -> Result<LUFactorizedTridiagonal<Self>>;
fn rcond_tridiagonal(lu: &LUFactorizedTridiagonal<Self>) -> Result<Self::Real>;
fn solve_tridiagonal(
lu: &LUFactorizedTridiagonal<Self>,
bl: MatrixLayout,
t: Transpose,
b: &mut [Self],
) -> Result<()>;
}
macro_rules! impl_tridiagonal {
(@real, $scalar:ty, $gttrf:path, $gtcon:path, $gttrs:path) => {
impl_tridiagonal!(@body, $scalar, $gttrf, $gtcon, $gttrs, iwork);
};
(@complex, $scalar:ty, $gttrf:path, $gtcon:path, $gttrs:path) => {
impl_tridiagonal!(@body, $scalar, $gttrf, $gtcon, $gttrs, );
};
(@body, $scalar:ty, $gttrf:path, $gtcon:path, $gttrs:path, $($iwork:ident)*) => {
impl Tridiagonal_ for $scalar {
fn lu_tridiagonal(mut a: Tridiagonal<Self>) -> Result<LUFactorizedTridiagonal<Self>> {
let (n, _) = a.l.size();
let mut du2 = unsafe { vec_uninit( (n - 2) as usize) };
let mut ipiv = unsafe { vec_uninit( n as usize) };
// We have to calc one-norm before LU factorization
let a_opnorm_one = a.opnorm_one();
let mut info = 0;
unsafe { $gttrf(n, &mut a.dl, &mut a.d, &mut a.du, &mut du2, &mut ipiv, &mut info,) };
info.as_lapack_result()?;
Ok(LUFactorizedTridiagonal {
a,
du2,
ipiv,
a_opnorm_one,
})
}
fn rcond_tridiagonal(lu: &LUFactorizedTridiagonal<Self>) -> Result<Self::Real> {
let (n, _) = lu.a.l.size();
let ipiv = &lu.ipiv;
let mut work = unsafe { vec_uninit( 2 * n as usize) };
$(
let mut $iwork = unsafe { vec_uninit( n as usize) };
)*
let mut rcond = Self::Real::zero();
let mut info = 0;
unsafe {
$gtcon(
NormType::One as u8,
n,
&lu.a.dl,
&lu.a.d,
&lu.a.du,
&lu.du2,
ipiv,
lu.a_opnorm_one,
&mut rcond,
&mut work,
$(&mut $iwork,)*
&mut info,
);
}
info.as_lapack_result()?;
Ok(rcond)
}
fn solve_tridiagonal(
lu: &LUFactorizedTridiagonal<Self>,
b_layout: MatrixLayout,
t: Transpose,
b: &mut [Self],
) -> Result<()> {
let (n, _) = lu.a.l.size();
let ipiv = &lu.ipiv;
// Transpose if b is C-continuous
let mut b_t = None;
let b_layout = match b_layout {
MatrixLayout::C { .. } => {
b_t = Some(unsafe { vec_uninit( b.len()) });
transpose(b_layout, b, b_t.as_mut().unwrap())
}
MatrixLayout::F { .. } => b_layout,
};
let (ldb, nrhs) = b_layout.size();
let mut info = 0;
unsafe {
$gttrs(
t as u8,
n,
nrhs,
&lu.a.dl,
&lu.a.d,
&lu.a.du,
&lu.du2,
ipiv,
b_t.as_mut().map(|v| v.as_mut_slice()).unwrap_or(b),
ldb,
&mut info,
);
}
info.as_lapack_result()?;
if let Some(b_t) = b_t {
transpose(b_layout, &b_t, b);
}
Ok(())
}
}
};
} // impl_tridiagonal!
impl_tridiagonal!(@real, f64, lapack::dgttrf, lapack::dgtcon, lapack::dgttrs);
impl_tridiagonal!(@real, f32, lapack::sgttrf, lapack::sgtcon, lapack::sgttrs);
impl_tridiagonal!(@complex, c64, lapack::zgttrf, lapack::zgtcon, lapack::zgttrs);
impl_tridiagonal!(@complex, c32, lapack::cgttrf, lapack::cgtcon, lapack::cgttrs);
| true |
a69f7632f7a58ae33f28f2e43b0514dce610b8da
|
Rust
|
geom3trik/tuix_baseview
|
/examples/gain_widget.rs
|
UTF-8
| 993 | 2.515625 | 3 |
[] |
no_license
|
use tuix_baseview::Application;
use tuix::{Entity, Event, State, BuildHandler, EventHandler};
use tuix::style::{Length, Color};
use tuix::widgets::value_knob::*;
use tuix::widgets::control_knob::*;
static THEME: &str = include_str!("theme.css");
struct GainWidget {
control: Entity,
}
impl GainWidget {
pub fn new() -> Self {
GainWidget {
control: Entity::null(),
}
}
}
impl BuildHandler for GainWidget {
type Ret = Entity;
fn on_build(&mut self, state: &mut State, entity: Entity) -> Self::Ret {
self.control = ValueKnob::new("Gain", 1.0, 0.0, 1.0).build(state, entity, |builder| builder);
entity
}
}
impl EventHandler for GainWidget {
}
fn main() {
let mut app = Application::new(|win_desc, state, window| {
state.insert_theme(THEME);
GainWidget::new().build(state, window, |builder| builder);
win_desc.with_title("Hello GUI").with_inner_size(300,300)
});
}
| true |
37123aad056a947d741711fa8d5e136fbe688bea
|
Rust
|
luxrck/raytrace
|
/src/main.rs
|
UTF-8
| 9,911 | 2.890625 | 3 |
[
"MIT"
] |
permissive
|
#![feature(box_syntax)]
use std::env;
use std::f64;
use image::RgbImage;
use nalgebra::Vector3;
use rand::prelude::*;
use rayon::prelude::*;
#[derive(Copy, Clone, Debug)]
struct Ray {
origin: Vector3<f64>,
direction: Vector3<f64>,
color: Vector3<f64>,
}
impl Ray {
fn new(o: Vector3<f64>, d: Vector3<f64>, c: Option<Vector3<f64>>) -> Self {
let c = match c {
Some(c) => c,
None => Vector3::new(1.0, 1.0, 1.0),
};
Ray {
origin: o,
direction: d.normalize(),
color: c,
}
}
fn point(&self, t: f64) -> Vector3<f64> {
self.origin + t * self.direction
}
}
trait Hittable {
fn hit(&self, ray: &Ray) -> Option<Ray>;
}
struct World {
objects: Vec<Box<dyn Hittable>>,
}
impl Hittable for World {
fn hit(&self, ray: &Ray) -> Option<Ray> {
let mut nearest: Option<Ray> = None;
for obj in &self.objects {
nearest = match obj.hit(ray) {
Some(oray) => {
let nray = match nearest {
Some(nray) => {
if (oray.origin - nray.origin).dot(&ray.direction) < 0.0 {
oray
} else {
nray
}
}
None => oray,
};
Some(nray)
}
None => continue,
}
}
nearest
}
}
unsafe impl Send for World {}
unsafe impl Sync for World {}
trait Material {
fn scatter(&self, normal: Vector3<f64>, in_ray: &Ray) -> Ray;
}
#[derive(Copy, Clone)]
struct Lamberitian {
albedo: Vector3<f64>,
}
#[derive(Copy, Clone)]
struct Metal {
albedo: Vector3<f64>,
fuzz: f64,
}
impl Material for Lamberitian {
fn scatter(&self, normal: Vector3<f64>, in_ray: &Ray) -> Ray {
let mut rng = rand::thread_rng();
let normal = normal.normalize();
let direction = loop {
let d = Vector3::new(rng.gen::<f64>(), rng.gen::<f64>(), rng.gen::<f64>());
let p = 2.0 * d - Vector3::new(1.0, 1.0, 1.0);
if p.dot(&p) < 1.0 {
break normal + p;
}
};
Ray::new(
Vector3::new(0.0, 0.0, 0.0),
direction,
Some(self.albedo.component_mul(&in_ray.color)),
)
}
}
impl Material for Metal {
fn scatter(&self, normal: Vector3<f64>, in_ray: &Ray) -> Ray {
let mut rng = rand::thread_rng();
let normal = normal.normalize();
let in_direction = in_ray.direction;
// reflection vector direction
let mut direction = in_direction - 2.0 * in_direction.dot(&normal) * normal;
let rand_direction = loop {
let d = Vector3::new(rng.gen::<f64>(), rng.gen::<f64>(), rng.gen::<f64>());
let p = 2.0 * d - Vector3::new(1.0, 1.0, 1.0);
if p.dot(&p) < 1.0 {
break normal + p;
}
};
direction += self.fuzz * rand_direction;
Ray::new(
Vector3::new(0.0, 0.0, 0.0),
direction,
Some(self.albedo.component_mul(&in_ray.color)),
)
}
}
struct Sphere<M: Material> {
center: Vector3<f64>,
radius: f64,
material: M,
}
impl<M: Material> Hittable for Sphere<M> {
fn hit(&self, ray: &Ray) -> Option<Ray> {
let oc = ray.origin - self.center;
let a: f64 = ray.direction.dot(&ray.direction);
let b: f64 = oc.dot(&ray.direction);
let c: f64 = oc.dot(&oc) - self.radius * self.radius;
let delta = b * b - a * c;
if delta < 0.0 {
return None;
}
let hitted = -b - delta.sqrt() / a;
// in_ray hits object A -> calc t -> calc hit point p -> compute reflection ray oray
// when we compute t for reflection ray on A, is it possible we get an non-zero
// value t due to loss of significance when compute p ?
if hitted < 0.00001 {
return None;
}
let point = ray.point(hitted);
let normal = point - self.center;
let mut oray = self.material.scatter(normal, ray);
oray.origin = point;
Some(oray)
}
}
unsafe impl<M: Material> Send for Sphere<M> {}
unsafe impl<M: Material> Sync for Sphere<M> {}
struct Camera {
eye: Vector3<f64>,
xaxis: Vector3<f64>,
yaxis: Vector3<f64>,
origin: Vector3<f64>,
sampling: u32,
len_radius: f64,
}
impl Camera {
fn new(eye: Vector3<f64>, center: Vector3<f64>, xlen: f64, ylen: f64, aperture: f64) -> Self {
let vcenter = center - eye;
let zdepth = vcenter.dot(&vcenter).sqrt();
let vxaxis = xlen * zdepth * vcenter.cross(&Vector3::new(0.0, 1.0, 0.0)).normalize();
let vyaxis = ylen * zdepth * vxaxis.cross(&vcenter).normalize();
let vorigin = eye + vcenter - vxaxis / 2.0 - vyaxis / 2.0;
Camera {
eye: eye,
xaxis: vxaxis,
yaxis: vyaxis,
origin: vorigin,
sampling: 0,
len_radius: aperture / 2.0,
}
}
fn with_sampling(mut self, samples: u32) -> Self {
self.sampling = samples;
self
}
fn ray(&self, u: f64, v: f64) -> Ray {
let mut rng = rand::thread_rng();
let rand_direction = self.len_radius
* loop {
let d = Vector3::new(rng.gen::<f64>(), rng.gen::<f64>(), rng.gen::<f64>());
let p = 2.0 * d - Vector3::new(1.0, 1.0, 1.0);
if p.dot(&p) < 1.0 {
break p;
}
};
let offset = Vector3::new(rand_direction.x, 0.0, 0.0)
.component_mul(&self.xaxis.normalize())
+ Vector3::new(0.0, rand_direction.y, 0.0).component_mul(&self.yaxis.normalize());
Ray::new(
self.eye + offset,
self.origin + u * self.xaxis + v * self.yaxis - self.eye - offset,
None,
)
}
fn trace(&self, ray: Ray, world: &World) -> Vector3<f64> {
let sky_color = |d: &Vector3<f64>| {
let sky = Vector3::new(0.5, 0.7, 1.0);
let t = 0.5 * (d.y + 1.0);
(1.0 - t) * Vector3::new(1.0, 1.0, 1.0) + t * sky
};
let mut iray = ray;
let max_trace = 50;
let mut current_trace = 0;
let color = loop {
iray = match world.hit(&iray) {
Some(nray) => nray,
None => {
break iray.color.component_mul(&sky_color(&iray.direction));
}
};
current_trace += 1;
if current_trace > max_trace {
break Vector3::new(0.0, 0.0, 0.0);
}
};
color
}
fn render(&self, world: &World, resolution: (u32, u32)) -> Option<RgbImage> {
let (width, height) = resolution;
let render_pixel = |x: u32, y: u32, rng: &mut ThreadRng| {
let color = if self.sampling > 0 {
let mut color = Vector3::new(0.0, 0.0, 0.0);
for _ in 0..self.sampling {
let u: f64 = (x as f64 + rng.gen::<f64>()) / (width as f64);
let v: f64 = (y as f64 + rng.gen::<f64>()) / (height as f64);
color += self.trace(self.ray(u, v), world);
}
color /= self.sampling as f64;
color
} else {
let u: f64 = (x as f64) / (width as f64);
let v: f64 = (y as f64) / (height as f64);
self.trace(self.ray(u, v), world)
};
let r: u8 = (255.99 * color.x.sqrt()) as u8;
let g: u8 = (255.99 * color.y.sqrt()) as u8;
let b: u8 = (255.99 * color.z.sqrt()) as u8;
vec![r, g, b]
};
let buf: Vec<_> = (0..height)
.into_par_iter()
.map(|y| {
let mut rng = rand::thread_rng();
(0..width)
.into_iter()
.map(|x| render_pixel(x, height - y, &mut rng))
.flatten()
.collect::<Vec<_>>()
})
.flatten()
.collect();
RgbImage::from_raw(width, height, buf)
}
}
fn main() {
let eye = Vector3::new(-1.0, 3.0, 3.0);
let center = Vector3::new(0.0, 0.0, -1.0);
let cam = Camera::new(eye, center, 0.8, 0.4, 2.0).with_sampling(256);
let diffuse = Lamberitian {
albedo: Vector3::new(0.8, 0.3, 0.3),
};
let diffuse1 = Lamberitian {
albedo: Vector3::new(0.8, 0.8, 0.0),
};
let metal = Metal {
albedo: Vector3::new(0.8, 0.8, 0.8),
fuzz: 0.4,
};
let metal1 = Metal {
albedo: Vector3::new(0.8, 0.6, 0.2),
fuzz: 1.0,
};
let metal2 = Metal {
albedo: Vector3::new(0.6, 0.6, 0.6),
fuzz: 0.1,
};
let s1 = Sphere {
center: Vector3::new(-1.0, 0.0, -1.0),
radius: 0.5,
material: metal,
};
let s2 = Sphere {
center: Vector3::new(0.0, 0.0, -1.0),
radius: 0.5,
material: diffuse,
};
let s3 = Sphere {
center: Vector3::new(1.0, 0.0, -1.0),
radius: 0.5,
material: metal1,
};
let s4 = Sphere {
center: Vector3::new(0.0, -100.5, -1.0),
radius: 100.0,
material: diffuse1,
};
let s5 = Sphere {
center: Vector3::new(0.0, 0.5, -2.5),
radius: 1.0,
material: metal2,
};
let world = World {
objects: vec![box s1, box s2, box s3, box s4, box s5],
};
let result = cam.render(&world, (1000, 500)).unwrap();
result.save(env::args().nth(1).unwrap()).unwrap();
}
| true |
b154feeda5f57a53490fe1205f4e3f43aaa4cce2
|
Rust
|
JosefBertolini/RustTicTacToe
|
/src/gameboard.rs
|
UTF-8
| 1,922 | 3.1875 | 3 |
[] |
no_license
|
use crate::space::{Space};
pub struct Gameboard {
pub board: [[Space; 3]; 3],
}
impl Gameboard {
pub fn new() -> [[Space; 3]; 3] {
[[Space::EMPTY; 3]; 3]
}
pub fn place(&mut self, player_move: Space, row: i32, col: i32) -> bool {
if self.board[row as usize][col as usize] == Space::EMPTY {
self.board[row as usize][col as usize] = player_move;
return true
}
false
}
pub fn print_board(&self) {
println!("A - {}|{}|{}", self.board[0][0].string_form(), self.board[0][1].string_form(), self.board[0][2].string_form());
println!("B - {}|{}|{}", self.board[1][0].string_form(), self.board[1][1].string_form(), self.board[1][2].string_form());
println!("C - {}|{}|{}", self.board[2][0].string_form(), self.board[2][1].string_form(), self.board[2][2].string_form());
println!(" 1 2 3");
}
pub fn check_win(&self) -> (bool, Space) {
for row_index in 0..3 {
if self.board[row_index][0] != Space::EMPTY && self.board[row_index][0] == self.board[row_index][1] && self.board[row_index][0] == self.board[row_index][2] {
return (true, self.board[row_index][0])
}
}
for col_index in 0..3 {
if self.board[0][col_index] != Space::EMPTY && self.board[0][col_index] == self.board[1][col_index] && self.board[0][col_index] == self.board[2][col_index] {
return (true, self.board[0][col_index])
}
}
if self.board[0][0] != Space::EMPTY && self.board[0][0] == self.board[1][1] && self.board[0][0] == self.board[2][2] {
return (true, self.board[0][0])
}
else if self.board[0][2] != Space::EMPTY && self.board[0][2] == self.board[1][1] && self.board[0][2] == self.board[2][0] {
return (true, self.board[0][2])
}
(false, Space::EMPTY)
}
}
| true |
a08490e059ba798c0b3f226ff8beb11dfbdb5502
|
Rust
|
technetos/catalyst
|
/src/response.rs
|
UTF-8
| 814 | 2.921875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::error::Error;
use bytes::Bytes;
pub struct Response {
res: http::response::Builder,
data: Bytes,
}
impl Response {
pub(crate) fn into_inner(mut self) -> Result<(http::Response<()>, Bytes), Error> {
let response = self.res.body(())?;
Ok((response, self.data))
}
pub fn new() -> Response {
Response {
res: http::Response::builder(),
data: Bytes::default(),
}
}
pub fn status(mut self, status: http::StatusCode) -> Response {
self.res.status(status);
self
}
pub fn content_type(mut self, content_ty: &str) -> Response {
self.res.header("Content-Type", content_ty);
self
}
pub fn body(mut self, body: Bytes) -> Response {
self.data = body;
self
}
}
| true |
096d1ea1baafe759ec96babe4119ed22ccfcbeca
|
Rust
|
afonsolage/craft-world
|
/src/states/world.rs
|
UTF-8
| 1,568 | 2.65625 | 3 |
[] |
no_license
|
use crate::{
components::Player,
resources::{MainPlayer, PlayerSpriteAsset, TerrainSpriteAsset, TerrainData},
};
use amethyst::{
core::{math::Vector3, Transform},
prelude::*,
renderer::{camera::Projection, Camera},
SimpleState,
};
pub struct WorldState;
impl SimpleState for WorldState {
fn on_start(&mut self, data: amethyst::StateData<'_, amethyst::GameData<'_, '_>>) {
let world = data.world;
init_camera(world);
init_sprites(world);
init_entities(world);
init_terrain(world);
}
}
fn init_camera(world: &mut World) {
let mut transform = Transform::default();
transform.set_translation_xyz(0., 30., 1.);
let mut camera = Camera::standard_3d(30., 30.);
camera.set_projection(Projection::orthographic(0.0, 30., 0.0, 30., 0., 20.));
world.create_entity().with(camera).with(transform).build();
}
fn init_sprites(world: &mut World) {
TerrainSpriteAsset::init(world);
PlayerSpriteAsset::init(world);
}
fn init_entities(world: &mut World) {
let mut transform = Transform::default();
transform.set_translation_xyz(20., 20., 0.1);
transform.set_scale(Vector3::new(1. / 32., 1. / 32., 1. / 32.));
let render = world.fetch::<PlayerSpriteAsset>().create_render(0);
let main_player = world
.create_entity()
.with(transform)
.with(Player::default())
.with(render)
.build();
world.insert(MainPlayer::new(main_player));
}
fn init_terrain(world: &mut World) {
world.insert(TerrainData::new(200, 200));
}
| true |
00b88fb560408485b07c023696c81e806153ae78
|
Rust
|
yytian/bignum
|
/src/types.rs
|
UTF-8
| 8,354 | 3.375 | 3 |
[
"MIT"
] |
permissive
|
use std::cmp;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum Sign {
Nonnegative = 1,
Negative = -1,
}
use self::Sign::*;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Bignum {
pub parts: Vec<u32>, // Least significant digit at leftmost index
pub sign: Sign,
}
#[derive(Debug)]
pub struct ParseBignumError;
// Max value of a single part
// Needs to be less than half the max value of the integer type
// To allow adding without overflow
pub const BASE: u32 = 2000000000;
pub const BASE_STR: &'static str = "2000000000";
fn skip_leading_zeroes(s: &str) -> &str {
let mut chars = s.chars();
let mut first_nonzero_index = 0;
loop {
let c = chars.next();
if let Some('0') = c {
first_nonzero_index += 1;
} else {
break;
}
}
if first_nonzero_index >= s.len() {
"0"
} else {
&s[first_nonzero_index..]
}
}
fn char_to_digit(c: char) -> u32 {
c as u32 - '0' as u32
}
fn digit_to_char(part: u32) -> char {
::std::char::from_u32(part + '0' as u32).unwrap()
}
fn string_add<'a, 'b>(left: &'a mut String, right: &'b str) -> &'a String {
let l = left.chars().collect::<Vec<char>>();
let r = right.chars().collect::<Vec<char>>();
let p = l.len();
let q = r.len();
let max = cmp::max(p, q);
let mut temp_str = String::with_capacity(max + 1);
let mut carry: u32 = 0;
for i in 1..max+1 {
let l_digit = if p >= i {
char_to_digit(l[p-i])
} else {
0
};
let r_digit = if q >= i {
char_to_digit(r[q-i])
} else {
0
};
let result: u64 = (l_digit + r_digit + carry) as u64;
temp_str.push(digit_to_char((result % 10) as u32));
carry = (result / 10) as u32;
}
if carry > 0 {
temp_str.push(digit_to_char(carry));
}
left.clone_from(&temp_str.chars().rev().collect::<String>());
left
}
fn string_mult<'a, 'b>(left: &'a mut String, right: &'b str) -> &'a String {
let l = left.chars().collect::<Vec<char>>();
let r = right.chars().collect::<Vec<char>>();
let p = l.len();
let q = r.len();
let (top, bot) = if p >= q {
(l, r)
} else {
(r, l)
};
let min = cmp::min(p, q);
let max = cmp::max(p, q);
let mut temp_str = String::with_capacity(p + q);
temp_str.push('0');
let mut carry: u32 = 0;
for i in 1..min+1 {
// For each digit of the bottom, multiply by the top
let mut line_str = String::with_capacity(max + 1);
for j in 1..max+1 {
let top_digit = if max >= j {
char_to_digit(top[max-j])
} else {
0
};
let bot_digit = if min >= i {
char_to_digit(bot[min-i])
} else {
0
};
let result: u64 = (top_digit * bot_digit + carry) as u64;
line_str.push(digit_to_char((result % 10) as u32));
carry = (result / 10) as u32;
}
while carry > 0 {
line_str.push(digit_to_char(carry));
carry = carry / 10;
}
line_str = line_str.chars().rev().collect::<String>();
// Move result forward
for _ in 0..i-1 {
line_str.push('0');
}
string_add(&mut temp_str, &line_str);
}
left.clone_from(&skip_leading_zeroes(&temp_str).to_string());
left
}
pub fn from_string(input_str: &str) -> Result<Bignum, ParseBignumError> {
if input_str.is_empty() {
Err(ParseBignumError)
}
else if input_str.starts_with("-") {
string_to_parts(&input_str[1..]).map(|parts| Bignum {
parts: parts,
sign: Negative,
})
}
else {
string_to_parts(&input_str[..]).map(|parts| Bignum {
parts: parts,
sign: Nonnegative,
})
}
}
fn string_to_parts(input_string: &str) -> Result<Vec<u32>, ParseBignumError> {
let s = skip_leading_zeroes(input_string);
let mut parts = Vec::with_capacity(s.len());
let mut quotient: String = s.to_string();
while quotient != "0" {
// Repeated long division by BASE
let mut next = String::with_capacity(quotient.len());
let mut carry: u64 = 0;
for c in quotient.chars() {
let digit = char_to_digit(c);
carry = carry * 10 + digit as u64;
// TODO: Don't do the char conversion every time
next.push(digit_to_char((carry / BASE as u64) as u32));
carry = carry % BASE as u64;
}
quotient = skip_leading_zeroes(&next).to_string();
parts.push(carry as u32);
}
Ok(parts)
}
pub fn shift_left(a: &mut Bignum, num_places: usize) {
// TODO: Possibly impl as a << trait?
let mut zeroes = vec![0; num_places];
zeroes.append(&mut a.parts);
a.parts = zeroes;
}
impl Bignum {
pub fn normalize(&mut self) {
let mut last_non_zero_index = 0;
let num_parts = self.parts.len();
for i in 0..num_parts {
let index_to_test = num_parts - i - 1;
if self.parts[index_to_test] != 0 {
last_non_zero_index = index_to_test;
break;
}
}
if last_non_zero_index != num_parts - 1 {
self.parts.truncate(last_non_zero_index + 1);
}
}
pub fn to_string(&self) -> String {
let mut prefix: String = match self.sign {
Negative => "-".to_string(),
Nonnegative => "".to_string(),
};
let rest = self.parts.iter().rev();
// Repeatedly multiply by 64
let mut product: String = String::from("0");
for part in rest {
let mut next: String = String::with_capacity(2);
next.push_str(&part.to_string());
string_mult(&mut product, BASE_STR);
string_add(&mut product, &next);
}
prefix.push_str(&product);
prefix
}
}
#[cfg(test)]
mod tests {
use super::string_add;
use super::string_mult;
use super::from_string;
use super::shift_left;
use super::BASE_STR;
#[test]
fn string_add_test() {
assert_eq!(string_add(&mut "123".to_string(), "123"), "246");
assert_eq!(string_add(&mut "123".to_string(), "0"), "123");
assert_eq!(string_add(&mut "123".to_string(), "10000"), "10123");
assert_eq!(string_add(&mut "123456789".to_string(), "987654321"), "1111111110");
}
#[test]
fn string_mult_test() {
assert_eq!(string_mult(&mut "3".to_string(), "3"), "9");
assert_eq!(string_mult(&mut "0".to_string(), "999"), "0");
assert_eq!(string_mult(&mut "123".to_string(), "241"), "29643");
assert_eq!(string_mult(&mut "349".to_string(), "807"), "281643");
assert_eq!(string_mult(&mut "55555".to_string(), "66666"), "3703629630");
}
#[test]
fn type_conversion_test() {
let examples = vec!(
"0",
"1",
"-1",
"63",
"-69",
"-9877",
"11111111111111111111",
"-952892589210459282926222035",
"12193263132251181129",
);
for string_rep in examples {
let big = from_string(string_rep).unwrap();
assert_eq!(string_rep, big.to_string());
}
}
#[test]
fn equality_test() {
assert!(from_string("123").unwrap() == from_string("123").unwrap());
assert!(from_string("123").unwrap() != from_string("-123").unwrap());
assert!(from_string("123").unwrap() != from_string("124").unwrap());
// TODO: check for leading zeroes
}
#[test]
fn shift_left_test() {
let num: u64 = 123;
let mut b = from_string(&num.to_string()).unwrap();
shift_left(&mut b, 3); // multiply by base 3 times
// Less ugly way to do this?
let mut str = num.to_string();
let mut once = string_mult(&mut str, BASE_STR).to_string();
let mut twice = string_mult(&mut once, BASE_STR).to_string();
let thrice = string_mult(&mut twice, BASE_STR).to_string();
assert_eq!(b.to_string(), thrice.to_owned());
}
}
| true |
5094ddb662408de1e2dfb2723275f333b09cf9fc
|
Rust
|
ArtemAstakhov/yew-ui
|
/src/components/table/table_row/table_row.rs
|
UTF-8
| 2,617 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
use css_in_rust::Style;
use yew::{
html, Component, ShouldRender, Html, ComponentLink, Properties, Classes, NodeRef,
html::{
ChildrenRenderer,
},
virtual_dom::{
VComp, VChild,
}
};
use crate::components::table::TableSize;
use crate::components::table::table_cell::table_cell::{
TableCell,
Props as TableCellProps,
TableCellVariant,
};
#[derive(Clone, PartialEq)]
pub enum Variants {
Cell(<TableCell as Component>::Properties),
}
impl From<TableCellProps> for Variants {
fn from(props: TableCellProps) -> Self {
Variants::Cell(props)
}
}
#[derive(PartialEq, Clone)]
pub struct ChildVariant {
props: Variants,
}
pub struct TableRow {
style: Style,
props: Props,
}
#[derive(Properties, Clone, PartialEq, Debug)]
pub struct Props {
#[prop_or_default]
pub class: String,
#[prop_or_default]
pub children: ChildrenRenderer<ChildVariant>,
#[prop_or_default]
pub variant: Option<TableCellVariant>,
#[prop_or(TableSize::Small)]
pub size: TableSize,
}
impl<CHILD> From<VChild<CHILD>> for ChildVariant
where
CHILD: Component,
CHILD::Properties: Into<Variants>,
{
fn from(vchild: VChild<CHILD>) -> Self {
Self {
props: vchild.props.into(),
}
}
}
impl From<ChildVariant> for Html {
fn from(variant: ChildVariant) -> Html {
match variant.props {
Variants::Cell(props) => VComp::new::<TableCell>(props, NodeRef::default(), None).into(),
}
}
}
impl Component for TableRow {
type Message = ();
type Properties = Props;
fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self {
let style = Style::create(
String::from("table_row"),
include_str!("table_row.scss"),
)
.expect("An error occured while creating the style");
Self {
style,
props,
}
}
fn update(&mut self, _msg: Self::Message) -> ShouldRender {
false
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
if self.props != props {
self.props = props;
true
} else {
false
}
}
fn view(&self) -> Html {
html! {
<tr
class=Classes::from(self.style.clone().to_string())
>
{
self.props.children.iter()
.filter(|c| matches!(c.props, Variants::Cell(_)))
.map(|mut c| {
let Variants::Cell(ref mut props) = c.props;
props.variant = self.props.variant.clone();
props.size = self.props.size.clone();
c
})
.collect::<Html>()
}
</tr>
}
}
}
| true |
d7c08c83ca608ef267c786cb2c186f2d0770363d
|
Rust
|
yamash723/pixelast
|
/src/response.rs
|
UTF-8
| 1,250 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
use super::PixelaClientError;
use serde_json;
use failure::Error;
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ApiRequestResult {
pub message: String,
pub is_success: bool,
}
pub fn build_result(json: &str) -> Result<(), Error> {
let res: ApiRequestResult = serde_json::from_str(&json)?;
if !res.is_success {
return Err(PixelaClientError::RequestNotSuccess(res.message).into());
}
Ok(())
}
#[cfg(test)]
mod response_test {
use super::*;
#[test]
fn build_result_is_success_test() {
let body = r#"{"message":"success message","isSuccess":true}"#;
let res = build_result(&body);
if let Err(e) = res {
panic!("failed build result. {}", e);
};
}
#[test]
fn build_result_is_not_success_test() {
let body = r#"{"message":"failed message","isSuccess":false}"#;
let res = build_result(&body);
if let Ok(_) = res {
panic!("not failed.");
};
}
#[test]
fn build_result_is_not_json_body_test() {
let body = "not json body";
let res = build_result(&body);
if let Ok(_) = res {
panic!("not failed.");
};
}
}
| true |
3113cb3afd5870b5f890589942447d7c5d30046a
|
Rust
|
stry-rs/attrouter
|
/src/parser.rs
|
UTF-8
| 8,713 | 2.78125 | 3 |
[] |
no_license
|
use {
crate::models::{FnParam, FnParamKind, GuardParam, Route, Triplet, UrlParam, UrlParams},
proc_macro2::TokenStream,
syn::{
punctuated::Punctuated, Attribute, FnArg, Generics, Ident, ItemFn, LitStr, Pat, PatIdent,
PatType, Signature, Type, TypePath,
},
};
pub fn parse<'i>(
path: &'i LitStr,
path_value: &'i str,
item: &'i ItemFn,
) -> Result<Route<'i>, TokenStream> {
let asyncness = item.sig.asyncness.is_some();
let guard_params = parse_guards(item)?;
let url_params = parse_url(path, &path_value);
let fn_params = parse_params(&item.sig.inputs)?;
let clean = clean_fn(item)?;
Ok(Route {
asyncness,
url_params,
fn_params,
guard_params,
clean,
})
}
fn parse_guards(item: &ItemFn) -> Result<Vec<GuardParam>, TokenStream> {
let mut guards = Vec::new();
for attr in &item.attrs {
let attr_path = &attr.path;
if attr_path.is_ident("header") {
// Attempt to parse the attribute's body
let Triplet { key, value, .. }: Triplet<LitStr, syn::Token![:], LitStr> = match attr
.parse_args()
{
Ok(value) => value,
Err(err) => {
let mut spanned = syn::Error::new_spanned(
&attr.tokens,
r#"[stry-attrouter] failed to parse attribute, example: #[header("Content-Type": "application/json")]"#,
);
spanned.combine(err);
return Err(spanned.to_compile_error());
}
};
guards.push(GuardParam::Header { key, value });
continue;
}
if attr_path.is_ident("body_size") {
// Attempt to parse the attribute's body
let Triplet { key, value, .. }: Triplet<Ident, syn::Token![=], LitStr> = match attr
.parse_args()
{
Ok(value) => value,
Err(err) => {
let mut spanned = syn::Error::new_spanned(
&attr.tokens,
r#"[stry-attrouter] failed to parse attribute, example: #[body_size(max = "1024")]"#,
);
spanned.combine(err);
return Err(spanned.to_compile_error());
}
};
if key == "max" {
guards.push(GuardParam::BodySize { key, value });
} else {
return Err(syn::Error::new_spanned(
&attr.tokens,
format!(r#"[stry-attrouter] failed to parse attribute, invalid type `{}`, example: #[body_size(max = "1024")]"#, key),
).to_compile_error());
}
continue;
}
// TODO: CORS
}
Ok(guards)
}
// Convert the route into chunks
// Empty will be handled as `/`
fn parse_url<'u>(token: &'u LitStr, url: &'u str) -> UrlParams<'u> {
let items = if url.contains('{') {
// Handles routes with arguments
url.match_indices('{')
.zip(url.match_indices('}'))
.fold(
(0, Vec::with_capacity(16)),
|(last, mut parts), ((start, _), (end, _))| {
// Converts any route data between the last and the current argument
parts.extend(
(&url[last..start])
.split('/')
.filter(|p| !p.is_empty())
.map(|value| UrlParam::Chunk { value }),
);
// Grabs the name of the current argument
parts.push(UrlParam::Param {
key: &url[(start + 1)..end],
});
(end + 1, parts)
},
)
.1
} else {
// Handles routes without arguments
url.split('/')
.filter(|p| !p.is_empty())
.map(|value| UrlParam::Chunk { value })
.collect()
};
UrlParams { token, items }
}
fn parse_params<'p>(
params: &'p Punctuated<FnArg, syn::Token![,]>,
) -> Result<Vec<FnParam<'p>>, TokenStream> {
let mut mapped = Vec::with_capacity(params.len());
for param in params {
match param {
FnArg::Typed(PatType { attrs, pat, ty, .. }) => {
// Extract needed type data
let key = match &**pat {
Pat::Ident(PatIdent { ident, .. }) => ident,
pat => {
return Err(syn::Error::new_spanned(
pat,
"[stry-attrouter] Unsupported function argument pattern type",
)
.to_compile_error());
}
};
let typ = match &**ty {
Type::Path(TypePath { path, .. }) => path,
typ => {
return Err(syn::Error::new_spanned(
typ,
"[stry-attrouter] Unsupported function argument type",
)
.to_compile_error());
}
};
let mut kind = None;
// Check for supported attributes
for attr in attrs {
let attr_path = &attr.path;
if attr_path.is_ident("data") {
kind = Some(FnParamKind::Data);
break;
}
if attr_path.is_ident("form") {
kind = Some(FnParamKind::Form);
break;
}
if attr_path.is_ident("header") {
let header = attr.parse_args().map_err(|err| err.to_compile_error())?;
kind = Some(FnParamKind::Header { header });
break;
}
if attr_path.is_ident("query") {
kind = Some(FnParamKind::Query);
break;
}
if attr_path.is_ident("json") {
kind = Some(FnParamKind::Json);
break;
}
}
mapped.push(FnParam {
token: param,
key,
typ,
kind,
});
}
FnArg::Receiver(_) => {
return Err(syn::Error::new_spanned(
param,
"[stry-attrouter] Routing macro does not allow for self referencing functions",
)
.to_compile_error());
}
}
}
Ok(mapped)
}
pub fn is_attr(attr: &Attribute) -> bool {
for name in &["data", "form", "header", "json", "query"] {
if attr.path.is_ident(name) {
return true;
}
}
false
}
fn clean_fn(item: &ItemFn) -> Result<ItemFn, TokenStream> {
let block = &item.block;
let Signature {
asyncness,
unsafety,
generics,
output,
inputs,
..
} = &item.sig;
// The custom attributes need to be removed be I rebuild the function
let new_inputs: Punctuated<FnArg, syn::Token![,]> = clean_inputs(inputs)?;
let Generics {
params,
where_clause,
..
} = generics;
let cleaned = quote::quote! {
#[inline(always)]
#unsafety #asyncness fn inner #params ( #new_inputs ) #output #where_clause #block
};
let item = syn::parse2::<ItemFn>(cleaned).map_err(|err| err.to_compile_error())?;
Ok(item)
}
// Remove any custom attributes from function inputs
fn clean_inputs(
inputs: &Punctuated<FnArg, syn::Token![,]>,
) -> Result<Punctuated<FnArg, syn::Token![,]>, TokenStream> {
let mut cleaned: Punctuated<FnArg, syn::Token![,]> = inputs.clone();
for mut pair in cleaned.pairs_mut() {
let value = pair.value_mut();
match value {
FnArg::Receiver(_) => {
return Err(syn::Error::new_spanned(
value,
"[stry-attrouter] Routing macro does not allow for self referencing functions",
)
.to_compile_error())
}
FnArg::Typed(PatType { attrs, .. }) => {
*attrs = attrs.clone().into_iter().partition(is_attr).1;
}
}
}
Ok(cleaned)
}
| true |
adb653dcd05b70a496ac46c9e35f7531e8407849
|
Rust
|
h-michael/save_analysis_example
|
/src/hir.rs
|
UTF-8
| 374 | 2.609375 | 3 |
[] |
no_license
|
#[prelude_import]
use std::prelude::v1::*;
#[macro_use]
extern crate std;
fn main() {
<Person>::new("not_bind", 18);
let kiske = <Person>::new("kiske", 18);
}
struct Person {
pub name: String,
pub age: u32,
}
impl Person {
fn new(name: &str, age: u32) -> Person {
Person {
name: name.to_string(),
age,
}
}
}
| true |
1d16634a5567464d66c660f54212fcc5d1d55492
|
Rust
|
jlgerber/pbgui
|
/pbgui/src/messaging/incoming/imain_toolbar.rs
|
UTF-8
| 513 | 2.59375 | 3 |
[] |
no_license
|
use super::*;
/// Responses returning to the main ui thread from the secondary thread
/// for the main toolbar element.
pub enum IMainToolbar {
/// Provides a vector of show names
Shows(Vec<String>),
/// Provides a vector of role names
Roles(Vec<String>),
/// Provides a vector of platform names
Platforms(Vec<String>),
/// Provides a vector of site names
Sites(Vec<String>),
}
impl ToIMsg for IMainToolbar {
fn to_imsg(self) -> IMsg {
IMsg::MainToolbar(self)
}
}
| true |
9700e64d50b5b6698ac9fd2134e984634ef50f83
|
Rust
|
cc14514/rust-exercise
|
/src/sortdemo/insert.rs
|
UTF-8
| 442 | 2.890625 | 3 |
[] |
no_license
|
pub fn sort(arr: Vec<u32>) -> Vec<u32> {
let mut i = 0;
let mut ret = Vec::new();
while i < arr.len() {
let mut j = 0;
let vi = arr.get(i).unwrap();
while j < ret.len() {
if let Some(vj) = ret.get(j) {
if vi < vj {
break;
}
}
j = j + 1;
}
ret.insert(j, *vi);
i = i + 1;
}
return ret;
}
| true |
6f9f1ee73086a114d579086088aa2fdd898fc94f
|
Rust
|
Chopinsky/Rusty_Express
|
/examples/simple.rs
|
UTF-8
| 771 | 2.953125 | 3 |
[
"MIT"
] |
permissive
|
extern crate rusty_express;
use rusty_express::prelude::*;
fn main() {
// define http server now
let mut server = HttpServer::new();
//define router directly
server.get(RequestPath::WildCard(r"/\w*"), simple_response);
server.listen(8080);
}
pub fn simple_response(req: &Box<Request>, resp: &mut Box<Response>) {
/* Test: generate new Sessions
// if let Some(mut session) = Session::new() {
// session.expires_at(SystemTime::now().add(Duration::new(5, 0)));
// session.save();
// println!("New session: {}", session.get_id());
// }
*/
resp.send(&format!(
"Hello world from rusty server from path: {}",
req.uri
));
resp.status(200);
}
| true |
1b9306928b477f19be67270cca9c664393022ece
|
Rust
|
rust-lang/rust-analyzer
|
/crates/base-db/src/input.rs
|
UTF-8
| 28,853 | 2.6875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! This module specifies the input to rust-analyzer. In some sense, this is
//! **the** most important module, because all other fancy stuff is strictly
//! derived from this input.
//!
//! Note that neither this module, nor any other part of the analyzer's core do
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input.
use std::{fmt, mem, ops, panic::RefUnwindSafe, str::FromStr, sync};
use cfg::CfgOptions;
use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use triomphe::Arc;
use tt::token_id::Subtree;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>;
/// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a
/// Rust crate. Source roots *might* be nested: in this case, a file belongs to
/// the nearest enclosing source root. Paths to files are always relative to a
/// source root, and the analyzer does not know the root path of the source root at
/// all. So, a file from one source root can't refer to a file in another source
/// root by path.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SourceRoot {
/// Sysroot or crates.io library.
///
/// Libraries are considered mostly immutable, this assumption is used to
/// optimize salsa's query structure
pub is_library: bool,
file_set: FileSet,
}
impl SourceRoot {
pub fn new_local(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: false, file_set }
}
pub fn new_library(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: true, file_set }
}
pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
self.file_set.path_for_file(file)
}
pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
self.file_set.file_for_path(path)
}
pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
self.file_set.resolve_path(path)
}
pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
self.file_set.iter()
}
}
/// `CrateGraph` is a bit of information which turns a set of text files into a
/// number of Rust crates.
///
/// Each crate is defined by the `FileId` of its root module, the set of enabled
/// `cfg` flags and the set of dependencies.
///
/// Note that, due to cfg's, there might be several crates for a single `FileId`!
///
/// For the purposes of analysis, a crate does not have a name. Instead, names
/// are specified on dependency edges. That is, a crate might be known under
/// different names in different dependent crates.
///
/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust
/// language proper, not a concept of the build system. In practice, we get
/// `CrateGraph` by lowering `cargo metadata` output.
///
/// `CrateGraph` is `!Serialize` by design, see
/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
#[derive(Clone, Default)]
pub struct CrateGraph {
arena: Arena<CrateData>,
}
impl fmt::Debug for CrateGraph {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map()
.entries(self.arena.iter().map(|(id, data)| (u32::from(id.into_raw()), data)))
.finish()
}
}
pub type CrateId = Idx<CrateData>;
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct CrateName(SmolStr);
impl CrateName {
/// Creates a crate name, checking for dashes in the string provided.
/// Dashes are not allowed in the crate names,
/// hence the input string is returned as `Err` for those cases.
pub fn new(name: &str) -> Result<CrateName, &str> {
if name.contains('-') {
Err(name)
} else {
Ok(Self(SmolStr::new(name)))
}
}
/// Creates a crate name, unconditionally replacing the dashes with underscores.
pub fn normalize_dashes(name: &str) -> CrateName {
Self(SmolStr::new(name.replace('-', "_")))
}
pub fn as_smol_str(&self) -> &SmolStr {
&self.0
}
}
impl fmt::Display for CrateName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl ops::Deref for CrateName {
type Target = str;
fn deref(&self) -> &str {
&self.0
}
}
/// Origin of the crates.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum CrateOrigin {
/// Crates that are from the rustc workspace.
Rustc { name: String },
/// Crates that are workspace members.
Local { repo: Option<String>, name: Option<String> },
/// Crates that are non member libraries.
Library { repo: Option<String>, name: String },
/// Crates that are provided by the language, like std, core, proc-macro, ...
Lang(LangCrateOrigin),
}
impl CrateOrigin {
pub fn is_local(&self) -> bool {
matches!(self, CrateOrigin::Local { .. })
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum LangCrateOrigin {
Alloc,
Core,
ProcMacro,
Std,
Test,
Other,
}
impl From<&str> for LangCrateOrigin {
fn from(s: &str) -> Self {
match s {
"alloc" => LangCrateOrigin::Alloc,
"core" => LangCrateOrigin::Core,
"proc-macro" => LangCrateOrigin::ProcMacro,
"std" => LangCrateOrigin::Std,
"test" => LangCrateOrigin::Test,
_ => LangCrateOrigin::Other,
}
}
}
impl fmt::Display for LangCrateOrigin {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let text = match self {
LangCrateOrigin::Alloc => "alloc",
LangCrateOrigin::Core => "core",
LangCrateOrigin::ProcMacro => "proc_macro",
LangCrateOrigin::Std => "std",
LangCrateOrigin::Test => "test",
LangCrateOrigin::Other => "other",
};
f.write_str(text)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct CrateDisplayName {
// The name we use to display various paths (with `_`).
crate_name: CrateName,
// The name as specified in Cargo.toml (with `-`).
canonical_name: String,
}
impl CrateDisplayName {
pub fn canonical_name(&self) -> &str {
&self.canonical_name
}
pub fn crate_name(&self) -> &CrateName {
&self.crate_name
}
}
impl From<CrateName> for CrateDisplayName {
fn from(crate_name: CrateName) -> CrateDisplayName {
let canonical_name = crate_name.to_string();
CrateDisplayName { crate_name, canonical_name }
}
}
impl fmt::Display for CrateDisplayName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.crate_name.fmt(f)
}
}
impl ops::Deref for CrateDisplayName {
type Target = str;
fn deref(&self) -> &str {
&self.crate_name
}
}
impl CrateDisplayName {
pub fn from_canonical_name(canonical_name: String) -> CrateDisplayName {
let crate_name = CrateName::normalize_dashes(&canonical_name);
CrateDisplayName { crate_name, canonical_name }
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub enum ProcMacroKind {
CustomDerive,
FuncLike,
Attr,
}
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
subtree: &Subtree,
attrs: Option<&Subtree>,
env: &Env,
) -> Result<Subtree, ProcMacroExpansionError>;
}
pub enum ProcMacroExpansionError {
Panic(String),
/// Things like "proc macro server was killed by OOM".
System(String),
}
pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>;
pub type TargetLayoutLoadResult = Result<Arc<str>, Arc<str>>;
#[derive(Debug, Clone)]
pub struct ProcMacro {
pub name: SmolStr,
pub kind: ProcMacroKind,
pub expander: sync::Arc<dyn ProcMacroExpander>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum ReleaseChannel {
Stable,
Beta,
Nightly,
}
impl ReleaseChannel {
pub fn as_str(self) -> &'static str {
match self {
ReleaseChannel::Stable => "stable",
ReleaseChannel::Beta => "beta",
ReleaseChannel::Nightly => "nightly",
}
}
pub fn from_str(str: &str) -> Option<Self> {
Some(match str {
"" => ReleaseChannel::Stable,
"nightly" => ReleaseChannel::Nightly,
_ if str.starts_with("beta") => ReleaseChannel::Beta,
_ => return None,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CrateData {
pub root_file_id: FileId,
pub edition: Edition,
pub version: Option<String>,
/// A name used in the package's project declaration: for Cargo projects,
/// its `[package].name` can be different for other project types or even
/// absent (a dummy crate for the code snippet, for example).
///
/// For purposes of analysis, crates are anonymous (only names in
/// `Dependency` matters), this name should only be used for UI.
pub display_name: Option<CrateDisplayName>,
pub cfg_options: CfgOptions,
/// The cfg options that could be used by the crate
pub potential_cfg_options: Option<CfgOptions>,
pub env: Env,
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level things
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Edition {
Edition2015,
Edition2018,
Edition2021,
}
impl Edition {
pub const CURRENT: Edition = Edition::Edition2021;
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Env {
entries: FxHashMap<String, String>,
}
impl Env {
pub fn new_for_test_fixture() -> Self {
Env {
entries: FxHashMap::from_iter([(
String::from("__ra_is_test_fixture"),
String::from("__ra_is_test_fixture"),
)]),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Dependency {
pub crate_id: CrateId,
pub name: CrateName,
prelude: bool,
}
impl Dependency {
pub fn new(name: CrateName, crate_id: CrateId) -> Self {
Self { name, crate_id, prelude: true }
}
pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self {
Self { name, crate_id, prelude }
}
/// Whether this dependency is to be added to the depending crate's extern prelude.
pub fn is_prelude(&self) -> bool {
self.prelude
}
}
impl CrateGraph {
pub fn add_crate_root(
&mut self,
root_file_id: FileId,
edition: Edition,
display_name: Option<CrateDisplayName>,
version: Option<String>,
cfg_options: CfgOptions,
potential_cfg_options: Option<CfgOptions>,
env: Env,
is_proc_macro: bool,
origin: CrateOrigin,
target_layout: Result<Arc<str>, Arc<str>>,
channel: Option<ReleaseChannel>,
) -> CrateId {
let data = CrateData {
root_file_id,
edition,
version,
display_name,
cfg_options,
potential_cfg_options,
env,
dependencies: Vec::new(),
origin,
target_layout,
is_proc_macro,
channel,
};
self.arena.alloc(data)
}
/// Remove the crate from crate graph. If any crates depend on this crate, the dependency would be replaced
/// with the second input.
pub fn remove_and_replace(
&mut self,
id: CrateId,
replace_with: CrateId,
) -> Result<(), CyclicDependenciesError> {
for (x, data) in self.arena.iter() {
if x == id {
continue;
}
for edge in &data.dependencies {
if edge.crate_id == id {
self.check_cycle_after_dependency(edge.crate_id, replace_with)?;
}
}
}
// if everything was ok, start to replace
for (x, data) in self.arena.iter_mut() {
if x == id {
continue;
}
for edge in &mut data.dependencies {
if edge.crate_id == id {
edge.crate_id = replace_with;
}
}
}
Ok(())
}
pub fn add_dep(
&mut self,
from: CrateId,
dep: Dependency,
) -> Result<(), CyclicDependenciesError> {
let _p = profile::span("add_dep");
self.check_cycle_after_dependency(from, dep.crate_id)?;
self.arena[from].add_dep(dep);
Ok(())
}
/// Check if adding a dep from `from` to `to` creates a cycle. To figure
/// that out, look for a path in the *opposite* direction, from `to` to
/// `from`.
fn check_cycle_after_dependency(
&self,
from: CrateId,
to: CrateId,
) -> Result<(), CyclicDependenciesError> {
if let Some(path) = self.find_path(&mut FxHashSet::default(), to, from) {
let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
let err = CyclicDependenciesError { path };
assert!(err.from().0 == from && err.to().0 == to);
return Err(err);
}
Ok(())
}
pub fn is_empty(&self) -> bool {
self.arena.is_empty()
}
pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ {
self.arena.iter().map(|(idx, _)| idx)
}
// FIXME: used for `handle_hack_cargo_workspace`, should be removed later
#[doc(hidden)]
pub fn iter_mut(&mut self) -> impl Iterator<Item = (CrateId, &mut CrateData)> + '_ {
self.arena.iter_mut()
}
/// Returns an iterator over all transitive dependencies of the given crate,
/// including the crate itself.
pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
let mut worklist = vec![of];
let mut deps = FxHashSet::default();
while let Some(krate) = worklist.pop() {
if !deps.insert(krate) {
continue;
}
worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id));
}
deps.into_iter()
}
/// Returns all transitive reverse dependencies of the given crate,
/// including the crate itself.
pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
let mut worklist = vec![of];
let mut rev_deps = FxHashSet::default();
rev_deps.insert(of);
let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
self.arena.iter().for_each(|(krate, data)| {
data.dependencies
.iter()
.for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate))
});
while let Some(krate) = worklist.pop() {
if let Some(krate_rev_deps) = inverted_graph.get(&krate) {
krate_rev_deps
.iter()
.copied()
.filter(|&rev_dep| rev_deps.insert(rev_dep))
.for_each(|rev_dep| worklist.push(rev_dep));
}
}
rev_deps.into_iter()
}
/// Returns all crates in the graph, sorted in topological order (ie. dependencies of a crate
/// come before the crate itself).
pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
let mut res = Vec::new();
let mut visited = FxHashSet::default();
for krate in self.iter() {
go(self, &mut visited, &mut res, krate);
}
return res;
fn go(
graph: &CrateGraph,
visited: &mut FxHashSet<CrateId>,
res: &mut Vec<CrateId>,
source: CrateId,
) {
if !visited.insert(source) {
return;
}
for dep in graph[source].dependencies.iter() {
go(graph, visited, res, dep.crate_id)
}
res.push(source)
}
}
// FIXME: this only finds one crate with the given root; we could have multiple
pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
let (crate_id, _) =
self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?;
Some(crate_id)
}
pub fn sort_deps(&mut self) {
self.arena
.iter_mut()
.for_each(|(_, data)| data.dependencies.sort_by_key(|dep| dep.crate_id));
}
/// Extends this crate graph by adding a complete disjoint second crate
/// graph and adjust the ids in the [`ProcMacroPaths`] accordingly.
///
/// This will deduplicate the crates of the graph where possible.
/// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id.
/// If the crate dependencies were sorted, the resulting graph from this `extend` call will also have the crate dependencies sorted.
pub fn extend(&mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths) {
let topo = other.crates_in_topological_order();
let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
for topo in topo {
let crate_data = &mut other.arena[topo];
crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
let res = self.arena.iter().find_map(
|(id, data)| {
if data == crate_data {
Some(id)
} else {
None
}
},
);
if let Some(res) = res {
id_map.insert(topo, res);
} else {
let id = self.arena.alloc(crate_data.clone());
id_map.insert(topo, id);
}
}
*proc_macros =
mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect();
}
fn find_path(
&self,
visited: &mut FxHashSet<CrateId>,
from: CrateId,
to: CrateId,
) -> Option<Vec<CrateId>> {
if !visited.insert(from) {
return None;
}
if from == to {
return Some(vec![to]);
}
for dep in &self[from].dependencies {
let crate_id = dep.crate_id;
if let Some(mut path) = self.find_path(visited, crate_id, to) {
path.push(from);
return Some(path);
}
}
None
}
// Work around for https://github.com/rust-lang/rust-analyzer/issues/6038.
// As hacky as it gets.
pub fn patch_cfg_if(&mut self) -> bool {
// we stupidly max by version in an attempt to have all duplicated std's depend on the same cfg_if so that deduplication still works
let cfg_if =
self.hacky_find_crate("cfg_if").max_by_key(|&it| self.arena[it].version.clone());
let std = self.hacky_find_crate("std").next();
match (cfg_if, std) {
(Some(cfg_if), Some(std)) => {
self.arena[cfg_if].dependencies.clear();
self.arena[std]
.dependencies
.push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if));
true
}
_ => false,
}
}
fn hacky_find_crate<'a>(&'a self, display_name: &'a str) -> impl Iterator<Item = CrateId> + 'a {
self.iter().filter(move |it| self[*it].display_name.as_deref() == Some(display_name))
}
}
impl ops::Index<CrateId> for CrateGraph {
type Output = CrateData;
fn index(&self, crate_id: CrateId) -> &CrateData {
&self.arena[crate_id]
}
}
impl CrateData {
fn add_dep(&mut self, dep: Dependency) {
self.dependencies.push(dep)
}
}
impl FromStr for Edition {
type Err = ParseEditionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let res = match s {
"2015" => Edition::Edition2015,
"2018" => Edition::Edition2018,
"2021" => Edition::Edition2021,
_ => return Err(ParseEditionError { invalid_input: s.to_string() }),
};
Ok(res)
}
}
impl fmt::Display for Edition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Edition::Edition2015 => "2015",
Edition::Edition2018 => "2018",
Edition::Edition2021 => "2021",
})
}
}
impl Extend<(String, String)> for Env {
fn extend<T: IntoIterator<Item = (String, String)>>(&mut self, iter: T) {
self.entries.extend(iter);
}
}
impl FromIterator<(String, String)> for Env {
fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self {
Env { entries: FromIterator::from_iter(iter) }
}
}
impl Env {
pub fn set(&mut self, env: &str, value: String) {
self.entries.insert(env.to_owned(), value);
}
pub fn get(&self, env: &str) -> Option<String> {
self.entries.get(env).cloned()
}
pub fn iter(&self) -> impl Iterator<Item = (&str, &str)> {
self.entries.iter().map(|(k, v)| (k.as_str(), v.as_str()))
}
}
#[derive(Debug)]
pub struct ParseEditionError {
invalid_input: String,
}
impl fmt::Display for ParseEditionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "invalid edition: {:?}", self.invalid_input)
}
}
impl std::error::Error for ParseEditionError {}
#[derive(Debug)]
pub struct CyclicDependenciesError {
path: Vec<(CrateId, Option<CrateDisplayName>)>,
}
impl CyclicDependenciesError {
fn from(&self) -> &(CrateId, Option<CrateDisplayName>) {
self.path.first().unwrap()
}
fn to(&self) -> &(CrateId, Option<CrateDisplayName>) {
self.path.last().unwrap()
}
}
impl fmt::Display for CyclicDependenciesError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let render = |(id, name): &(CrateId, Option<CrateDisplayName>)| match name {
Some(it) => format!("{it}({id:?})"),
None => format!("{id:?}"),
};
let path = self.path.iter().rev().map(render).collect::<Vec<String>>().join(" -> ");
write!(
f,
"cyclic deps: {} -> {}, alternative path: {}",
render(self.from()),
render(self.to()),
path
)
}
}
#[cfg(test)]
mod tests {
use crate::CrateOrigin;
use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
#[test]
fn detect_cyclic_dependency_indirect() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate3 = graph.add_crate_root(
FileId(3u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
.is_ok());
assert!(graph
.add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
.is_ok());
assert!(graph
.add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1))
.is_err());
}
#[test]
fn detect_cyclic_dependency_direct() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
.is_ok());
assert!(graph
.add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
.is_err());
}
#[test]
fn it_works() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate3 = graph.add_crate_root(
FileId(3u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
.is_ok());
assert!(graph
.add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
.is_ok());
}
#[test]
fn dashes_are_normalized() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
Edition2018,
None,
None,
Default::default(),
Default::default(),
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
assert!(graph
.add_dep(
crate1,
Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2)
)
.is_ok());
assert_eq!(
graph[crate1].dependencies,
vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2)]
);
}
}
| true |
2e4b6a205a8f00665d9183ca84050395bec82e5d
|
Rust
|
Connicpu/AoC2019
|
/src/parse.rs
|
UTF-8
| 1,669 | 3.359375 | 3 |
[] |
no_license
|
use num::PrimInt;
pub fn parse<T: PrimInt>(data: &[u8]) -> ParseIter<T> {
ParseIter {
state: State::new(),
data: data.iter(),
}
}
pub fn parse_i64_vec(data: &str) -> Vec<i64> {
parse(data.as_bytes()).collect()
}
pub struct ParseIter<'a, T: PrimInt> {
state: State<T>,
data: std::slice::Iter<'a, u8>,
}
impl<T: PrimInt> Iterator for ParseIter<'_, T> {
type Item = T;
fn next(&mut self) -> Option<T> {
for &b in self.data.by_ref() {
if let Some(next) = self.state.next(b) {
return Some(next);
}
}
self.state.commit()
}
}
struct State<T: PrimInt> {
value: T,
sign: T,
span: usize,
}
impl<T: PrimInt> State<T> {
fn new() -> State<T> {
State {
value: T::zero(),
sign: T::one(),
span: 0,
}
}
fn next(&mut self, digit: u8) -> Option<T> {
match digit {
b'-' => self.negative(),
b'0'..=b'9' => self.push(digit),
b',' | b'\n' => return self.commit(),
_ => (),
}
None
}
fn negative(&mut self) {
self.sign = !T::zero();
self.span += 1;
}
fn push(&mut self, digit: u8) {
self.value = self.value * T::from(10).unwrap();
self.value = self.value + T::from(digit - b'0').unwrap();
self.span += 1;
}
fn commit(&mut self) -> Option<T> {
if self.span == 0 {
return None;
}
let res = self.value * self.sign;
self.value = T::zero();
self.sign = T::one();
self.span = 0;
Some(res)
}
}
| true |
5af55fe882dfdf4b6d95c7480a744245e62fce11
|
Rust
|
KFtygy4AquzoI/OpenZKP
|
/algebra/primefield/src/prime_field.rs
|
UTF-8
| 19,357 | 2.921875 | 3 |
[
"Apache-2.0"
] |
permissive
|
// False positive: attribute has a use
#[allow(clippy::useless_attribute)]
// False positive: Importing preludes is allowed
#[allow(clippy::wildcard_imports)]
use std::{fmt, prelude::v1::*};
use crate::{Root, SquareRoot, UInt as FieldUInt};
use std::{
hash::{Hash, Hasher},
marker::PhantomData,
ops::Shr,
};
use zkp_u256::{
AddInline, Binary, DivRem, Inv, Montgomery as _, MontgomeryParameters, MulInline, NegInline,
One, Pow, SquareInline, SubInline, Zero, U256,
};
/// A finite field of prime order.
///
/// The order `Parameters::MODULUS` must be prime. Internally, values are
/// represented in Montgomery form for faster multiplications.
///
/// At a minimum `UInt` should implement [`Clone`], [`PartialEq`],
/// [`PartialOrd`], [`Zero`], [`One`], [`AddInline`]`<&Self>`,
/// [`SubInline`]`<&Self>` and [`Montgomery`].
///
/// For [`Root`] it should also implment [`Binary`] and [`DivRem`]. For
/// [`SquareRoot`] it requires [`Binary`] and [`Shr`]`<usize>`. For rand
/// support it requires [`rand::distributions::uniform::SampleUniform`]. For
/// `proptest` support `Parameters` needs to be `'static + Send` (which it
/// really should anyway).
// Derive fails for Clone, PartialEq, Eq, Hash
pub struct PrimeField<P: Parameters> {
// TODO: un-pub. They are pub so FieldElement can have const-fn constructors.
pub uint: P::UInt,
pub _parameters: PhantomData<P>,
}
/// Required constant parameters for the prime field
// TODO: Fix naming
#[allow(clippy::module_name_repetitions)]
// UInt can not have interior mutability
#[allow(clippy::declare_interior_mutable_const)]
// HACK: Ideally we'd use MontgomeryParameters<UInt: FieldUInt>
// See <https://github.com/rust-lang/rust/issues/52662>
pub trait Parameters: 'static + Send + Sync + Sized {
type UInt: FieldUInt;
/// The modulus to implement in Montgomery form
const MODULUS: Self::UInt;
/// M64 = -MODULUS^(-1) mod 2^64
const M64: u64;
// R1 = 2^256 mod MODULUS
const R1: Self::UInt;
// R2 = 2^512 mod MODULUS
const R2: Self::UInt;
// R3 = 2^768 mod MODULUS
const R3: Self::UInt;
// Generator and quadratic non-residue
const GENERATOR: Self::UInt;
// Multiplicative order: Modulus - 1
const ORDER: Self::UInt;
}
// Derive `MontgomeryParameters` from `Parameters` as `Montgomery<P:
// Parameters>`
struct Montgomery<P: Parameters>(PhantomData<P>);
impl<P: Parameters> MontgomeryParameters for Montgomery<P> {
type UInt = P::UInt;
const M64: u64 = P::M64;
const MODULUS: Self::UInt = P::MODULUS;
const R1: Self::UInt = P::R1;
const R2: Self::UInt = P::R2;
const R3: Self::UInt = P::R3;
}
impl<P: Parameters> PrimeField<P> {
// UInt can not have interior mutability
#[allow(clippy::declare_interior_mutable_const)]
pub const MODULUS: P::UInt = P::MODULUS;
#[inline(always)]
pub fn modulus() -> P::UInt {
P::MODULUS
}
/// The multiplicative order of the field.
///
/// Equal to `modulus() - 1` for prime fields.
#[inline(always)]
pub fn order() -> P::UInt {
P::ORDER
}
#[inline(always)]
pub fn generator() -> Self {
Self::from_montgomery(P::GENERATOR)
}
#[inline(always)]
pub fn as_montgomery(&self) -> &P::UInt {
debug_assert!(self.uint < Self::modulus());
&self.uint
}
/// Construct from `UInt` in Montgomery form.
///
/// This is a trivial function.
// TODO: Make `const fn` after <https://github.com/rust-lang/rust/issues/57563>
#[inline(always)]
pub fn from_montgomery(uint: P::UInt) -> Self {
debug_assert!(uint < Self::modulus());
Self {
uint,
_parameters: PhantomData,
}
}
// TODO: from_radix_str
// #[cfg(feature = "std")]
// pub fn from_hex_str(s: &str) -> Self {
// Self::from(UInt::from_hex_str(s))
// }
/// Convert to `UInt`.
#[inline(always)] // Simple wrapper for `from_montgomery`
pub fn to_uint(&self) -> P::UInt {
debug_assert!(self.uint < Self::modulus());
P::UInt::from_montgomery::<Montgomery<P>>(self.as_montgomery())
}
/// Construct from `UInt`
///
/// It does the montgomery conversion.
pub fn from_uint(uint: &P::UInt) -> Self {
debug_assert!(uint < &Self::modulus());
Self::from_montgomery(uint.to_montgomery::<Montgomery<P>>())
}
/// Reduce and construct from `UInt`
pub fn from_uint_reduce(uint: &P::UInt) -> Self {
let uint = P::UInt::redc_inline::<Montgomery<P>>(uint, &P::UInt::zero());
// UInt should not have interior mutability
#[allow(clippy::borrow_interior_mutable_const)]
let uint = uint.mul_redc_inline::<Montgomery<P>>(&P::R3);
Self::from_montgomery(uint)
}
#[inline(always)]
pub fn double(&self) -> Self {
// TODO: Optimize
self.clone() + self
}
#[inline(always)]
pub fn triple(&self) -> Self {
// TODO: Optimize
self.clone() + self + self
}
}
impl<P: Parameters> Clone for PrimeField<P> {
fn clone(&self) -> Self {
Self::from_montgomery(self.as_montgomery().clone())
}
}
impl<P: Parameters> PartialEq for PrimeField<P> {
fn eq(&self, other: &Self) -> bool {
self.as_montgomery() == other.as_montgomery()
}
}
impl<P: Parameters> Eq for PrimeField<P> {}
/// Implements [`Hash`] when `UInt` does.
impl<U, P> Hash for PrimeField<P>
where
U: FieldUInt + Hash,
P: Parameters<UInt = U>,
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.as_montgomery().hash::<H>(state)
}
}
impl<U, P> fmt::Debug for PrimeField<P>
where
U: FieldUInt + fmt::Debug,
P: Parameters<UInt = U>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "field_element!(\"{:?}\")", self.to_uint())
}
}
impl<P: Parameters> Zero for PrimeField<P> {
#[inline(always)]
fn zero() -> Self {
Self::from_montgomery(P::UInt::zero())
}
#[inline(always)]
fn is_zero(&self) -> bool {
self.as_montgomery().is_zero()
}
}
impl<P: Parameters> One for PrimeField<P> {
#[inline(always)]
fn one() -> Self {
Self::from_montgomery(P::R1)
}
// UInt should not have interior mutability
#[allow(clippy::borrow_interior_mutable_const)]
#[inline(always)]
fn is_one(&self) -> bool {
self.as_montgomery() == &P::R1
}
}
impl<P: Parameters> AddInline<&Self> for PrimeField<P> {
#[inline(always)]
fn add_inline(&self, rhs: &Self) -> Self {
let result = self.as_montgomery().add_inline(rhs.as_montgomery());
let result = result.reduce_1_inline::<Montgomery<P>>();
Self::from_montgomery(result)
}
}
impl<P: Parameters> SubInline<&Self> for PrimeField<P> {
#[inline(always)]
fn sub_inline(&self, rhs: &Self) -> Self {
let lhs = self.as_montgomery();
let rhs = rhs.as_montgomery();
let borrow = rhs > lhs;
let mut result = lhs.sub_inline(rhs);
if borrow {
result.add_assign_inline(&Self::modulus());
}
Self::from_montgomery(result)
}
}
impl<P: Parameters> NegInline for PrimeField<P> {
#[inline(always)]
fn neg_inline(&self) -> Self {
if self.is_zero() {
Self::zero()
} else {
Self::from_montgomery(Self::modulus().sub_inline(self.as_montgomery()))
}
}
}
impl<P: Parameters> SquareInline for PrimeField<P> {
#[inline(always)]
fn square_inline(&self) -> Self {
Self::from_montgomery(self.as_montgomery().square_redc_inline::<Montgomery<P>>())
}
}
impl<P: Parameters> MulInline<&Self> for PrimeField<P> {
#[inline(always)]
fn mul_inline(&self, rhs: &Self) -> Self {
Self::from_montgomery(
self.as_montgomery()
.mul_redc_inline::<Montgomery<P>>(rhs.as_montgomery()),
)
}
}
impl<P: Parameters> Inv for &PrimeField<P> {
type Output = Option<PrimeField<P>>;
#[inline(always)] // Simple wrapper
fn inv(self) -> Self::Output {
self.as_montgomery()
.inv_redc::<Montgomery<P>>()
.map(PrimeField::<P>::from_montgomery)
}
}
impl<P: Parameters> Pow<usize> for &PrimeField<P> {
type Output = PrimeField<P>;
fn pow(self, exponent: usize) -> Self::Output {
self.pow(&exponent)
}
}
impl<P: Parameters> Pow<isize> for &PrimeField<P> {
type Output = Option<PrimeField<P>>;
fn pow(self, exponent: isize) -> Self::Output {
let negative = exponent < 0;
let abs = exponent.abs() as usize;
if negative {
self.inv().map(|n| n.pow(&abs))
} else {
Some(self.pow(&abs))
}
}
}
impl<P: Parameters, Exponent> Pow<&Exponent> for &PrimeField<P>
where
Exponent: Binary,
{
type Output = PrimeField<P>;
fn pow(self, exponent: &Exponent) -> Self::Output {
if let Some(msb) = exponent.most_significant_bit() {
let mut result = Self::Output::one();
let mut square = self.clone();
for i in 0..=msb {
if exponent.bit(i) {
result *= □
}
if i < msb {
square.square_assign();
}
}
result
} else {
// exponent = 0
Self::Output::one()
}
}
}
impl<U, P> Root<usize> for PrimeField<P>
where
U: FieldUInt + Binary + DivRem<u64, Quotient = U, Remainder = u64>,
P: Parameters<UInt = U>,
{
// OPT: replace this with a constant array of roots of unity.
fn root(order: usize) -> Option<Self> {
let order = order as u64;
if let Some((q, rem)) = Self::order().div_rem(order) {
if rem.is_zero() {
Some(Self::generator().pow(&q))
} else {
None
}
} else {
Some(Self::one())
}
}
}
// TODO: Generalize over order type
// Lint has a false positive here
#[allow(single_use_lifetimes)]
impl<U, P> Root<&U> for PrimeField<P>
where
U: FieldUInt + Binary + for<'a> DivRem<&'a U, Quotient = U, Remainder = U>,
P: Parameters<UInt = U>,
{
// OPT: replace this with a constant array of roots of unity.
fn root(order: &P::UInt) -> Option<Self> {
if let Some((q, rem)) = Self::order().div_rem(order) {
if rem.is_zero() {
Some(Self::generator().pow(&q))
} else {
None
}
} else {
Some(Self::one())
}
}
}
impl<U, P> SquareRoot for PrimeField<P>
where
U: FieldUInt + Binary + Shr<usize, Output = U>,
P: Parameters<UInt = U>,
{
fn is_quadratic_residue(&self) -> bool {
self.pow(&(Self::MODULUS >> 1_usize)) != -Self::one()
}
// Tonelli-Shanks square root algorithm for prime fields
// See 'Handbook of Applied Cryptography' algorithm 3.34
// OPT: Use algorithm 3.39 for Proth primes.
fn square_root(&self) -> Option<Self> {
if self.is_zero() {
return Some(Self::zero());
}
if !self.is_quadratic_residue() {
return None;
}
// TODO: Provide as a constant parameter?
// Factor order as `signifcant` * 2 ^ `trailing_zeros`
let trailing_zeros = Self::order().trailing_zeros();
let signifcant = Self::order() >> trailing_zeros;
// The starting value of c in the Tonelli Shanks algorithm. We are using the
// prefered generator, as the quadratic nonresidue the algorithm requires.
let c_start = Self::generator().pow(&signifcant);
// This algorithm is still correct when the following assertion fails. However,
// more efficient algorithms exist when MODULUS % 4 == 3 or MODULUS % 8 == 5
// (3.36 and 3.37 in HAC).
// debug_assert!(&FieldElement::MODULUS & 7_u64 == 1);
// OPT: Raising a to a fixed power is a good candidate for an addition chain.
let mut root = self.pow(&((signifcant + P::UInt::one()) >> 1));
let mut c = c_start;
let inverse = self.inv().unwrap(); // Zero case is handled above
for i in 1..trailing_zeros {
if (root.square() * &inverse).pow(&(P::UInt::one() << (trailing_zeros - i - 1)))
== -Self::one()
{
root *= &c;
}
// OPT: Create lookup table for squares of c.
c.square_assign();
}
Some(root)
}
}
impl<P: Parameters> Default for PrimeField<P> {
fn default() -> Self {
Self::zero()
}
}
// TODO: Find a way to create generic implementations of these
impl<P: Parameters<UInt = U256>> From<PrimeField<P>> for U256 {
#[inline(always)]
fn from(other: PrimeField<P>) -> Self {
other.to_uint()
}
}
impl<P: Parameters<UInt = U256>> From<&PrimeField<P>> for U256 {
#[inline(always)]
fn from(other: &PrimeField<P>) -> Self {
other.to_uint()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::FieldElement;
use itertools::repeat_n;
use num_traits::ToPrimitive;
use proptest::prelude::*;
use zkp_macros_decl::{field_element, u256h};
use zkp_u256::U256;
#[test]
fn test_literal() {
const SMALL: FieldElement = field_element!("0F");
const NUM: FieldElement =
field_element!("0548c135e26faa9c977fb2eda057b54b2e0baa9a77a0be7c80278f4f03462d4c");
assert_eq!(SMALL, FieldElement::from(15));
assert_eq!(
NUM,
u256h!("0548c135e26faa9c977fb2eda057b54b2e0baa9a77a0be7c80278f4f03462d4c").into()
);
}
#[test]
fn minus_zero_equals_zero() {
assert!(FieldElement::zero().is_zero());
assert!(field_element!("00").is_zero());
assert_eq!(FieldElement::zero(), FieldElement::zero());
assert_eq!(-FieldElement::zero(), FieldElement::zero());
}
#[test]
fn test_add() {
let a = field_element!("06eabe184aa9caca2e17f6073bcc10bb9714c0e3866ff00e0d386f4396392852");
let b = field_element!("0313000a764a9a5514efc99070de3f70586794f9bb0add62ac689763aadea7e8");
let c = field_element!("01fdbe22c0f4650e4307bf97acaa502bef7c55dd417acd70b9a106a74117d039");
assert_eq!(a + b, c);
}
#[test]
fn test_sub() {
let a = FieldElement::from_montgomery(u256h!(
"0548c135e26faa9c977fb2eda057b54b2e0baa9a77a0be7c80278f4f03462d4c"
));
let b = FieldElement::from_montgomery(u256h!(
"024385f6bebc1c496e09955db534ef4b1eaff9a78e27d4093cfa8f7c8f886f6b"
));
let c = field_element!("03d7be0dd45f307519282c76caedd14b3ead2be9cb6512ab60cfd7dfeb5a806a");
assert_eq!(a - b, c);
}
#[test]
fn test_mul() {
let a = FieldElement::from_montgomery(u256h!(
"0548c135e26faa9c977fb2eda057b54b2e0baa9a77a0be7c80278f4f03462d4c"
));
let b = FieldElement::from_montgomery(u256h!(
"024385f6bebc1c496e09955db534ef4b1eaff9a78e27d4093cfa8f7c8f886f6b"
));
let c = field_element!("0738900c5dcab24b419674df19d2cfeb9782eca6d1107be18577eb060390365b");
assert_eq!(a * b, c);
}
#[test]
fn test_div() {
let a = FieldElement::from_montgomery(u256h!(
"0548c135e26faa9c977fb2eda057b54b2e0baa9a77a0be7c80278f4f03462d4c"
));
let b = FieldElement::from_montgomery(u256h!(
"024385f6bebc1c496e09955db534ef4b1eaff9a78e27d4093cfa8f7c8f886f6b"
));
let c = field_element!("003a9a346e7103c74dfcddd0eeb4e16ca71d8887c2bed3d4ee718b62015e87b2");
assert_eq!(a / b, c);
}
proptest!(
#[test]
fn from_as_isize(n: isize) {
prop_assert_eq!(FieldElement::from(n).to_isize().unwrap(), n)
}
#[test]
fn from_as_i128(n: i128) {
prop_assert_eq!(FieldElement::from(n).to_i128().unwrap(), n);
}
#[test]
fn add_identity(a: FieldElement) {
prop_assert_eq!(&a + FieldElement::zero(), a);
}
#[test]
fn mul_identity(a: FieldElement) {
prop_assert_eq!(&a * FieldElement::one(), a);
}
#[test]
fn commutative_add(a: FieldElement, b: FieldElement) {
prop_assert_eq!(&a + &b, b + a);
}
#[test]
fn commutative_mul(a: FieldElement, b: FieldElement) {
prop_assert_eq!(&a * &b, b * a);
}
#[test]
fn associative_add(a: FieldElement, b: FieldElement, c: FieldElement) {
prop_assert_eq!(&a + (&b + &c), (a + b) + c);
}
#[test]
fn associative_mul(a: FieldElement, b: FieldElement, c: FieldElement) {
prop_assert_eq!(&a * (&b * &c), (a * b) * c);
}
#[test]
fn inverse_add(a: FieldElement) {
prop_assert!((&a + a.neg()).is_zero());
}
#[test]
fn inverse_mul(a: FieldElement) {
let inverse = a.inv();
match inverse {
None => prop_assert!(a.is_zero()),
Some(ai) => prop_assert!((a * ai).is_one()),
}
}
#[test]
fn distributivity(a: FieldElement, b: FieldElement, c: FieldElement) {
prop_assert_eq!(&a * (&b + &c), (&a * b) + (a * c));
}
#[test]
fn square(a: FieldElement) {
prop_assert_eq!(a.square(), &a * &a);
}
#[test]
fn pow_0(a: FieldElement) {
prop_assert!(a.pow(0_usize).is_one());
}
#[test]
fn pow_1(a: FieldElement) {
prop_assert_eq!(a.pow(1_usize), a);
}
#[test]
fn pow_2(a: FieldElement) {
prop_assert_eq!(a.pow(2_usize), &a * &a);
}
#[test]
fn pow_n(a: FieldElement, n: usize) {
let exponent = n % 512;
prop_assert_eq!(a.pow(exponent), repeat_n(a, exponent).product());
}
#[test]
fn fermats_little_theorem(a: FieldElement) {
prop_assert_eq!(a.pow(&FieldElement::MODULUS), a);
}
#[test]
fn square_root(a: FieldElement) {
let s = a.square();
let r = s.square_root().unwrap();
prop_assert!(r == a || r == -a);
}
);
#[test]
fn zeroth_root_of_unity() {
assert_eq!(FieldElement::root(0).unwrap(), FieldElement::one());
}
#[test]
fn roots_of_unity_squared() {
let powers_of_two = (0..193).map(|n| U256::ONE << n);
let roots_of_unity: Vec<_> = powers_of_two
.map(|n| FieldElement::root(&n).unwrap())
.collect();
for (smaller_root, larger_root) in roots_of_unity[1..].iter().zip(roots_of_unity.as_slice())
{
assert_eq!(smaller_root.square(), *larger_root);
assert!(!smaller_root.is_one());
}
}
#[test]
fn root_of_unity_definition() {
let powers_of_two = (0..193).map(|n| U256::ONE << n);
for n in powers_of_two {
let root_of_unity = FieldElement::root(&n).unwrap();
assert_eq!(root_of_unity.pow(&n), FieldElement::one());
}
}
}
| true |
1d7029de0e2652b239448cc96894047a18898bff
|
Rust
|
irandms/riscv-phone
|
/firmware/src/eeprom.rs
|
UTF-8
| 4,189 | 2.65625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#![allow(dead_code)]
extern crate embedded_hal as hal;
use hal::blocking::spi;
use hal::digital::OutputPin;
use hal::spi::Mode;
use hal::spi::MODE_0;
#[derive(Debug, Clone, Copy)]
pub enum Error<E> {
// A write is currently happening, and subsequent writes will fail
WriteInProgress,
// The BP bits in the status register are write-protecting regions of memory
WriteIsBlockProtected,
WriteOutOfPage, // TODO: Implement this; writes beyond page boundaries begin back at the 0th offset of that page
StatusReadFail,
Spi(E),
}
pub const MAX_ADDR: u16 = 1 << 15; // 2^15 = 262,144 bits, 32768 bytes
pub const PAGE_SIZE: u32 = 64;
pub const MODE: Mode = MODE_0;
pub mod status_reg {
pub const SRWD_BIT: u8 = 1 << 7;
pub const BP1_BIT: u8 = 1 << 3;
pub const BP0_BIT: u8 = 1 << 2;
pub const WEL_BIT: u8 = 1 << 1;
pub const WIP_BIT: u8 = 1 << 0;
}
pub struct M95xxx<SPI, CS> {
spi: SPI,
cs: CS,
}
#[derive(Clone, Copy)]
enum Instruction {
WriteEnable = 0b0000_0110,
WriteDisable = 0b0000_0100,
ReadStatusReg = 0b0000_0101,
WriteStatusReg = 0b0000_0001,
Read = 0b0000_0011,
Write = 0b0000_0010,
ReadIDPage = 0b1000_0011,
WriteIDPage = 0b1000_0010,
ContinueLastInstr = 0b1111_1111,
}
impl <CS, SPI, E> M95xxx<SPI, CS>
where
SPI: spi::Transfer<u8, Error = E> + spi::Write<u8, Error = E>,
CS: OutputPin,
{
pub fn new(spi: SPI, cs: CS) -> Result<Self, E> {
Ok(M95xxx { spi, cs })
}
pub fn read(&mut self, addr: u16) -> Result<u8, E> {
let mut buffer = [
Instruction::Read as u8,
(addr >> 8) as u8,
addr as u8,
Instruction::ContinueLastInstr as u8,
];
let mut result = [
Instruction::ContinueLastInstr as u8,
];
self.with_cs_low(|m95| {
m95.spi.transfer(&mut [Instruction::Read as u8])?;
m95.spi.transfer(&mut [(addr >> 8) as u8])?;
m95.spi.transfer(&mut [addr as u8])?;
m95.spi.transfer(&mut result)?;
Ok(result[0])
})
}
pub fn read_n<'b>(&mut self, addr: u16, buffer: &'b mut [u8]) -> Result<&'b [u8], E> {
let mut cmd_buf = [
Instruction::Read as u8,
(addr >> 8) as u8,
addr as u8,
];
self.with_cs_low(move |m95| {
m95.spi.transfer(&mut cmd_buf)?;
let n = buffer.len();
for byte in &mut buffer[..n] {
*byte = m95.spi.transfer(&mut [Instruction::ContinueLastInstr as u8])?[0];
}
Ok(&*buffer)
})
}
pub fn status(&mut self) -> Result<u8, E> {
let mut buffer = [
Instruction::ReadStatusReg as u8,
Instruction::ContinueLastInstr as u8,
];
self.with_cs_low(|m95| {
let buffer = m95.spi.transfer(&mut buffer)?;
Ok(buffer[1])
})
}
pub fn write_in_progress(&mut self) -> Result<bool, Error<E>> {
match self.status() {
Ok(status) => {
Ok(status & status_reg::WIP_BIT != 0)
}
Err(e) => {
Err(Error::WriteInProgress)
}
}
}
pub fn write(&mut self, addr: u16, data: u8) -> Result<(), Error<E>> {
match self.write_in_progress() {
Ok(wip) => {
if wip {
return Err(Error::WriteInProgress);
}
}
Err(e) => {
return Err(Error::StatusReadFail);
}
}
let mut buffer = [
Instruction::Write as u8,
(addr >> 8) as u8,
addr as u8,
data,
];
self.with_cs_low(|m95| {
let buffer = m95.spi.transfer(&mut buffer);
});
Ok(())
}
fn with_cs_low<F, T>(&mut self, f: F) -> T
where
F: FnOnce(&mut Self) -> T,
{
self.cs.set_low();
let result = f(self);
self.cs.set_high();
result
}
}
| true |
bf47ddf398fe5440649465d89d883a6eb077fdfb
|
Rust
|
aelred/nes-rust
|
/src/lib.rs
|
UTF-8
| 4,721 | 2.734375 | 3 |
[] |
no_license
|
use std::fmt::{Debug, Formatter};
pub use crate::address::Address;
pub use crate::cartridge::Cartridge;
use crate::cartridge::CHR;
use crate::cartridge::PRG;
pub use crate::cpu::CPU;
pub use crate::cpu::Instruction;
pub use crate::cpu::instructions;
use crate::cpu::NESCPUMemory;
pub use crate::i_nes::INes;
pub use crate::i_nes::INesReadError;
pub use crate::input::Button;
use crate::input::Controller;
pub use crate::memory::ArrayMemory;
pub use crate::memory::Memory;
pub use crate::ppu::Color;
use crate::ppu::NESPPUMemory;
use crate::ppu::PPU;
pub use crate::serialize::SerializeByte;
mod address;
mod cartridge;
mod cpu;
mod i_nes;
mod input;
mod mapper;
mod memory;
mod ppu;
mod serialize;
pub const WIDTH: u16 = 256;
pub const HEIGHT: u16 = 240;
pub trait NESDisplay {
fn draw_pixel(&mut self, color: Color);
}
#[derive(Debug)]
pub struct NoDisplay;
impl NESDisplay for NoDisplay {
fn draw_pixel(&mut self, _: Color) {}
}
pub struct BufferDisplay {
buffer: [u8; WIDTH as usize * HEIGHT as usize * 3],
x: usize,
y: usize,
}
impl BufferDisplay {
pub fn new() -> BufferDisplay {
BufferDisplay {
buffer: [0; WIDTH as usize * HEIGHT as usize * 3],
x: usize::from(WIDTH) - 8,
y: usize::from(HEIGHT),
}
}
pub fn buffer(&self) -> &[u8] {
&self.buffer
}
}
impl Debug for BufferDisplay {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("BufferDisplay").finish()
}
}
impl NESDisplay for BufferDisplay {
fn draw_pixel(&mut self, color: Color) {
let offset = (self.y * WIDTH as usize + self.x) * 3;
if offset + 2 < self.buffer.len() {
let (r, g, b) = color.to_rgb();
self.buffer[offset] = b;
self.buffer[offset + 1] = g;
self.buffer[offset + 2] = r;
}
self.x += 1;
if self.x == usize::from(WIDTH) {
self.x = 0;
self.y += 1;
}
if self.y == usize::from(HEIGHT) {
self.y = 0;
}
}
}
type StandardPPU<'a> = PPU<NESPPUMemory<&'a mut CHR>>;
#[derive(Debug)]
pub struct NES<'a, D> {
cpu: CPU<NESCPUMemory<&'a mut PRG, StandardPPU<'a>, Controller>>,
display: D,
}
impl<'a, D: NESDisplay> NES<'a, D> {
pub fn new(cartridge: &'a mut Cartridge, display: D) -> Self {
let ppu_memory = NESPPUMemory::new(&mut cartridge.chr);
let ppu = PPU::with_memory(ppu_memory);
let controller = Controller::default();
let cpu_memory = NESCPUMemory::new(&mut cartridge.prg, ppu, controller);
let cpu = CPU::from_memory(cpu_memory);
NES { cpu, display }
}
pub fn display(&self) -> &D {
&self.display
}
pub fn program_counter(&mut self) -> Address {
self.cpu.program_counter()
}
pub fn set_program_counter(&mut self, address: Address) {
self.cpu.set_program_counter(address);
}
pub fn read_cpu(&mut self, address: Address) -> u8 {
self.cpu.read(address)
}
pub fn controller(&mut self) -> &mut Controller {
self.cpu.memory().input()
}
pub fn tick(&mut self) {
self.tick_cpu();
self.tick_ppu();
}
fn tick_cpu(&mut self) {
self.cpu.run_instruction();
}
fn ppu(&mut self) -> &mut StandardPPU<'a> {
self.cpu.memory().ppu_registers()
}
fn tick_ppu(&mut self) {
// Loops 6 times is a guess based on:
// - there are 3 PPU ticks per CPU tick
// - a CPU instruction takes a variable number of ticks to run (2-ish)
// TODO: accurately manage this
for _ in 0..6 {
let output = self.ppu().tick();
if output.interrupt {
self.cpu.non_maskable_interrupt();
}
if let Some(color) = output.color {
self.display.draw_pixel(color);
}
}
}
}
#[macro_export]
macro_rules! mem {
($( $data: expr ),*) => {
mem!{0 => { $($data),* }}
};
($( $offset: expr => { $( $data: expr ),* } )*) => {
{
#[allow(unused_variables, unused_mut)]
let mut memory = $crate::ArrayMemory::default();
$(
#[allow(unused_variables, unused_mut)]
let mut addr: Address = Address::from($offset);
$(
let byte = $crate::SerializeByte::to_byte($data);
$crate::Memory::write(&mut memory, addr, byte);
addr += 1u16;
)*
)*
memory
}
};
($offset: expr => $data: expr) => {
mem!{$offset => { $data }}
};
}
| true |
8d89b324152c91a212dbbfdf21e96e70588dfb04
|
Rust
|
IThawk/rust-project
|
/rust-master/src/librustc_mir/transform/dump_mir.rs
|
UTF-8
| 1,581 | 2.53125 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
//! This pass just dumps MIR at a specified point.
use std::borrow::Cow;
use std::fmt;
use std::fs::File;
use std::io;
use rustc::mir::Body;
use rustc::session::config::{OutputFilenames, OutputType};
use rustc::ty::TyCtxt;
use crate::transform::{MirPass, MirSource};
use crate::util as mir_util;
pub struct Marker(pub &'static str);
impl<'tcx> MirPass<'tcx> for Marker {
fn name(&self) -> Cow<'_, str> {
Cow::Borrowed(self.0)
}
fn run_pass(&self, _tcx: TyCtxt<'tcx>, _source: MirSource<'tcx>, _body: &mut Body<'tcx>) {
}
}
pub struct Disambiguator {
is_after: bool
}
impl fmt::Display for Disambiguator {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
let title = if self.is_after { "after" } else { "before" };
write!(formatter, "{}", title)
}
}
pub fn on_mir_pass<'tcx>(
tcx: TyCtxt<'tcx>,
pass_num: &dyn fmt::Display,
pass_name: &str,
source: MirSource<'tcx>,
body: &Body<'tcx>,
is_after: bool,
) {
if mir_util::dump_enabled(tcx, pass_name, source) {
mir_util::dump_mir(tcx,
Some(pass_num),
pass_name,
&Disambiguator { is_after },
source,
body,
|_, _| Ok(()) );
}
}
pub fn emit_mir(tcx: TyCtxt<'_>, outputs: &OutputFilenames) -> io::Result<()> {
let path = outputs.path(OutputType::Mir);
let mut f = File::create(&path)?;
mir_util::write_mir_pretty(tcx, None, &mut f)?;
Ok(())
}
| true |
ddfc697329eb496c1a8fed5e01e4ae039a720ccd
|
Rust
|
gs-akhan/learning-rust
|
/src/highscore.rs
|
UTF-8
| 986 | 3.03125 | 3 |
[
"MIT"
] |
permissive
|
#[derive(Debug)]
pub struct HighScores {
all_scores: Vec<u32>,
}
impl HighScores {
pub fn new(scores: &[u32]) -> Self {
Self {
all_scores: scores.to_vec(),
}
}
pub fn scores(&self) -> &[u32] {
&self.all_scores
}
pub fn latest(&self) -> Option<u32> {
if self.all_scores.len() == 0 {
None
} else {
Some(*self.all_scores.last().expect("None"))
}
}
pub fn personal_best(&self) -> Option<u32> {
if self.all_scores.len() == 0 {
return None;
}
let mut top = self.all_scores.clone();
top.sort_by(|a, b| b.cmp(a));
Some(top[0])
}
pub fn personal_top_three(&self) -> Vec<u32> {
if self.all_scores.len() == 0 {
return vec![] as Vec<u32>;
}
let mut top = self.all_scores.clone();
top.sort_by(|a, b| b.cmp(a));
top[0..min(3, top.len())].to_vec()
}
}
| true |
dc7cd71b6ea96d3413471a4da5f4eaf91ae35935
|
Rust
|
ajunlonglive/compiler-1
|
/src/runtimelib.rs
|
UTF-8
| 484 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
pub fn runtime_function(name: &str) -> Option<usize> {
Some(match name {
"print" => print as usize,
_ => return None,
})
}
unsafe extern "win64" fn print(buffer: *const u8) {
let mut size = 0;
loop {
if *buffer.add(size) == 0 {
break;
}
size += 1;
}
let slice = std::slice::from_raw_parts(buffer, size);
if let Ok(string) = std::str::from_utf8(slice) {
println!("{}", string);
}
}
| true |
db7899732d3ab9979e5dd3c2e8c971f6d114b79a
|
Rust
|
leo60228/upd8r
|
/src/hs2.rs
|
UTF-8
| 4,102 | 2.84375 | 3 |
[] |
no_license
|
use super::*;
use anyhow::{anyhow, bail, Context, Result};
use chrono::naive::NaiveDate;
use rss::{Channel, Item};
use scraper::{Html, Selector};
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use url::Url;
impl IntoUpdate for Item {
fn into_update(&self, media: &Media) -> Result<Update> {
let link = self
.link()
.ok_or_else(|| anyhow!("{} update missing link!", media))?;
let (title, id) = match media {
Media::Homestuck2 => {
let title = self
.description()
.ok_or_else(|| anyhow!("Homestuck^2 update missing title!"))?
.to_string();
let id = self
.title()
.ok_or_else(|| anyhow!("Homestuck^2 update missing page!"))?
.parse()
.context("Couldn't get page number from Homestuck^2 update!")?;
(title, id)
}
_ => return Err(anyhow!("{} not an RSS feed!", media)),
};
Ok(Update {
id,
title,
link: link.to_string(),
media: media.clone(),
show_id: true,
})
}
}
pub struct Hs2Feed(pub Vec<Update>);
fn hash(x: impl Hash) -> u64 {
let mut hasher = DefaultHasher::new();
x.hash(&mut hasher);
hasher.finish()
}
impl Feed for Hs2Feed {
type Item = Update;
fn updates(&self) -> &[Update] {
&self.0
}
fn fetch(media: &Media) -> Result<Self> {
match media {
&Media::Homestuck2 => {
let channel = Channel::from_url("https://homestuck2.com/story/rss")
.context("Failed to fetch RSS feed")?;
let mut items: Vec<_> = channel.items().iter().rev().cloned().collect();
items.dedup_by_key(|item| hash(item.pub_date()));
let updates = items
.into_iter()
.rev()
.map(|x| x.into_update(media))
.collect::<Result<Vec<_>>>()?;
Ok(Self(updates))
}
&Media::Homestuck2Bonus => {
let text = attohttpc::get("https://homestuck2.com/bonus")
.send()?
.text()?;
let doc = Html::parse_document(&text);
let update_selector = Selector::parse(".mar-y-sm > p").unwrap();
let a_selector = Selector::parse("a").unwrap();
let mut updates: Vec<Update> = doc
.select(&update_selector)
.filter_map(|update| {
let date = update.text().next()?.trim_end_matches(" - ");
let a = update.select(&a_selector).next()?;
let title = a.inner_html();
let rel = a.value().attr("href")?;
let base = Url::parse("https://homestuck2.com/bonus").unwrap();
let abs = base.join(rel).ok()?;
let link = abs.as_str().to_string();
let mut date_segments = date.split('/');
let m = date_segments.next()?.parse().ok()?;
let d = date_segments.next()?.parse().ok()?;
let y = date_segments.next()?.parse().ok()?;
let date = NaiveDate::from_ymd(y, m, d);
let datetime = date.and_hms(0, 0, 0);
let id = datetime.timestamp() as _;
Some(Update {
id,
title,
link,
media: media.clone(),
show_id: false,
})
})
.collect();
updates.sort_by(|a, b| b.id.cmp(&a.id));
Ok(Self(updates))
}
_ => bail!("{} isn't Homestuck^2!", media),
}
}
}
| true |
cf77991ed23b989d40029d331db38122b59a8d3a
|
Rust
|
mateusfg7/rust-lang-study
|
/rust-programmin-tutorial/42/parsing-json/src/main.rs
|
UTF-8
| 1,169 | 3.84375 | 4 |
[] |
no_license
|
// FIRST METHOD
// extern crate serde_json;
// use serde_json::Value as JsonValue;
// fn main() {
// let json_srt = r#"
// {
// "name": "Domenic",
// "age": 65,
// "is_male": true
// }
// "#;
// let res = serde_json::from_str(json_srt);
// if res.is_ok() {
// let p: JsonValue = res.unwrap();
// println!("The name is {}", p["name"].as_str().unwrap());
// } else {
// println!("Sorry! Could not parse JSON :(");
// }
// }
// SECOND METHOD
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[derive(Serialize, Deserialize)]
struct Person {
name: String,
age: u8,
is_male: bool,
}
fn main() {
let json_srt = r#"
{
"name": "Domenic",
"age": 65,
"is_male": true
}
"#;
let res = serde_json::from_str(json_srt);
if res.is_ok() {
let p: Person = res.unwrap();
println!("The name is {}", p.name);
println!("The age is {}", p.age);
println!("Are they male? {}", p.is_male);
} else {
println!("Sorry! Could not parse JSON :(");
}
}
| true |
20840521fea9d5ddabb37601fc5cc327ab036f06
|
Rust
|
ralphtheninja/eyros
|
/src/staging.rs
|
UTF-8
| 3,341 | 2.859375 | 3 |
[] |
no_license
|
use ::{Row,Point,Value};
use failure::{Error,bail,format_err};
use random_access_storage::RandomAccess;
use std::mem::size_of;
use bincode::{serialize,deserialize};
use write_cache::WriteCache;
pub struct StagingIterator<'a,'b,P,V> where P: Point, V: Value {
rows: &'a Vec<Row<P,V>>,
bbox: &'b P::Bounds,
index: usize
}
impl<'a,'b,P,V> StagingIterator<'a,'b,P,V> where P: Point, V: Value {
pub fn new (rows: &'a Vec<Row<P,V>>, bbox: &'b P::Bounds) -> Self {
Self { index: 0, bbox, rows }
}
}
impl<'a,'b,P,V> Iterator for StagingIterator<'a,'b,P,V>
where P: Point, V: Value {
type Item = Result<(P,V),Error>;
fn next (&mut self) -> Option<Self::Item> {
let len = self.rows.len();
while self.index < len {
let i = self.index;
self.index += 1;
match self.rows[i] {
Row::Insert(point,value) => {
if point.overlaps(self.bbox) {
return Some(Ok((point,value)))
}
},
Row::Delete(_point,_value) => {}
}
}
None
}
}
pub struct Staging<S,P,V>
where S: RandomAccess<Error=Error>, P: Point, V: Value {
store: WriteCache<S>,
pub rows: Vec<Row<P,V>>
}
impl<S,P,V> Staging<S,P,V>
where S: RandomAccess<Error=Error>, P: Point, V: Value {
const INSERT: u8 = 0u8;
const DELETE: u8 = 1u8;
pub fn open (mut store: S) -> Result<Self,Error> {
let is_empty = store.is_empty()?;
let mut staging = Self {
store: WriteCache::open(store)?,
rows: vec![]
};
if !is_empty { staging.load()? }
Ok(staging)
}
fn load (&mut self) -> Result<(),Error> {
let len = self.store.len()?;
let buf = self.store.read(0, len)?;
let n = size_of::<u8>() + P::size_of() + size_of::<V>();
let m = len/n;
self.rows.clear();
self.rows.reserve(m);
for i in 0..m {
let offset = i*n;
let (pt_type,point,value): (u8,P,V)
= deserialize(&buf[offset..offset+n])?;
self.rows.push(match pt_type {
0u8 => Row::Insert(point,value),
1u8 => Row::Delete(point,value),
_ => bail!("unexpected point type")
});
}
Ok(())
}
pub fn clear (&mut self) -> Result<(),Error> {
self.store.truncate(0)?;
self.rows.clear();
Ok(())
}
pub fn bytes (&mut self) -> Result<usize,Error> {
let len = self.store.len()?;
Ok(len)
}
pub fn len (&mut self) -> Result<usize,Error> {
let n = size_of::<u8>() + P::size_of() + size_of::<V>();
Ok(self.bytes()?/n)
}
pub fn batch (&mut self, rows: &Vec<Row<P,V>>) -> Result<(),Error> {
let offset = self.store.len()?;
let n = size_of::<u8>() + P::size_of() + size_of::<V>();
let mut buf: Vec<u8> = Vec::with_capacity(n*rows.len());
for row in rows {
let bytes: Vec<u8> = serialize(&match row {
Row::Insert(point,value) => (Self::INSERT,point,value),
Row::Delete(point,value) => (Self::DELETE,point,value)
})?;
ensure_eq!(bytes.len(), n, "unexpected byte length in staging batch");
buf.extend(bytes);
}
self.store.write(offset,&buf)?;
self.rows.extend_from_slice(rows);
Ok(())
}
pub fn commit (&mut self) -> Result<(),Error> {
self.store.commit()
}
pub fn query<'a,'b> (&'a mut self, bbox: &'b P::Bounds)
-> StagingIterator<'a,'b,P,V> {
<(StagingIterator<'a,'b,P,V>)>::new(&self.rows, bbox)
}
}
| true |
f63e98caf367b066ca5c69ccf5bc49629e8de753
|
Rust
|
SabrinaJewson/statrs.rs
|
/src/statistics/slice_statistics.rs
|
UTF-8
| 16,165 | 3 | 3 |
[
"MIT"
] |
permissive
|
use crate::statistics::*;
use core::ops::{Index, IndexMut};
use rand::prelude::SliceRandom;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Data<D>(D);
impl<D: AsRef<[f64]>> Index<usize> for Data<D> {
type Output = f64;
fn index(&self, i: usize) -> &f64 {
&self.0.as_ref()[i]
}
}
impl<D: AsMut<[f64]> + AsRef<[f64]>> IndexMut<usize> for Data<D> {
fn index_mut(&mut self, i: usize) -> &mut f64 {
&mut self.0.as_mut()[i]
}
}
impl<D: AsMut<[f64]> + AsRef<[f64]>> Data<D> {
pub fn new(data: D) -> Self {
Data(data)
}
pub fn swap(&mut self, i: usize, j: usize) {
self.0.as_mut().swap(i, j)
}
pub fn len(&self) -> usize {
self.0.as_ref().len()
}
pub fn is_empty(&self) -> bool {
self.0.as_ref().len() == 0
}
pub fn iter(&self) -> core::slice::Iter<'_, f64> {
self.0.as_ref().iter()
}
// Selection algorithm from Numerical Recipes
// See: https://en.wikipedia.org/wiki/Selection_algorithm
fn select_inplace(&mut self, rank: usize) -> f64 {
if rank == 0 {
return self.min();
}
if rank > self.len() - 1 {
return self.max();
}
let mut low = 0;
let mut high = self.len() - 1;
loop {
if high <= low + 1 {
if high == low + 1 && self[high] < self[low] {
self.swap(low, high)
}
return self[rank];
}
let middle = (low + high) / 2;
self.swap(middle, low + 1);
if self[low] > self[high] {
self.swap(low, high);
}
if self[low + 1] > self[high] {
self.swap(low + 1, high);
}
if self[low] > self[low + 1] {
self.swap(low, low + 1);
}
let mut begin = low + 1;
let mut end = high;
let pivot = self[begin];
loop {
loop {
begin += 1;
if self[begin] >= pivot {
break;
}
}
loop {
end -= 1;
if self[end] <= pivot {
break;
}
}
if end < begin {
break;
}
self.swap(begin, end);
}
self[low + 1] = self[end];
self[end] = pivot;
if end >= rank {
high = end - 1;
}
if end <= rank {
low = begin;
}
}
}
}
impl<D: AsRef<[f64]>> ::rand::distributions::Distribution<f64> for Data<D> {
fn sample<R: ::rand::Rng + ?Sized>(&self, rng: &mut R) -> f64 {
*self.0.as_ref().choose(rng).unwrap()
}
}
impl<D: AsMut<[f64]> + AsRef<[f64]>> OrderStatistics<f64> for Data<D> {
fn order_statistic(&mut self, order: usize) -> f64 {
let n = self.len();
match order {
1 => self.min(),
_ if order == n => self.max(),
_ if order < 1 || order > n => f64::NAN,
_ => self.select_inplace(order - 1),
}
}
fn median(&mut self) -> f64 {
let k = self.len() / 2;
if self.len() % 2 != 0 {
self.select_inplace(k)
} else {
(self.select_inplace(k.saturating_sub(1)) + self.select_inplace(k)) / 2.0
}
}
fn quantile(&mut self, tau: f64) -> f64 {
if !(0.0..=1.0).contains(&tau) || self.is_empty() {
return f64::NAN;
}
let h = (self.len() as f64 + 1.0 / 3.0) * tau + 1.0 / 3.0;
let hf = h as i64;
if hf <= 0 || tau == 0.0 {
return self.min();
}
if hf >= self.len() as i64 || ulps_eq!(tau, 1.0) {
return self.max();
}
let a = self.select_inplace((hf as usize).saturating_sub(1));
let b = self.select_inplace(hf as usize);
a + (h - hf as f64) * (b - a)
}
fn percentile(&mut self, p: usize) -> f64 {
self.quantile(p as f64 / 100.0)
}
fn lower_quartile(&mut self) -> f64 {
self.quantile(0.25)
}
fn upper_quartile(&mut self) -> f64 {
self.quantile(0.75)
}
fn interquartile_range(&mut self) -> f64 {
self.upper_quartile() - self.lower_quartile()
}
fn ranks(&mut self, tie_breaker: RankTieBreaker) -> Vec<f64> {
let n = self.len();
let mut ranks: Vec<f64> = vec![0.0; n];
let mut enumerated: Vec<_> = self.iter().enumerate().collect();
enumerated.sort_by(|(_, el_a), (_, el_b)| el_a.partial_cmp(el_b).unwrap());
match tie_breaker {
RankTieBreaker::First => {
for (i, idx) in enumerated.into_iter().map(|(idx, _)| idx).enumerate() {
ranks[idx] = (i + 1) as f64
}
ranks
}
_ => {
let mut prev = 0;
let mut prev_idx = 0;
let mut prev_elt = 0.0;
for (i, (idx, elt)) in enumerated.iter().cloned().enumerate() {
if i == 0 {
prev_idx = idx;
prev_elt = *elt;
}
if (*elt - prev_elt).abs() <= 0.0 {
continue;
}
if i == prev + 1 {
ranks[prev_idx] = i as f64;
} else {
handle_rank_ties(&mut ranks, &enumerated, prev, i, tie_breaker);
}
prev = i;
prev_idx = idx;
prev_elt = *elt;
}
handle_rank_ties(&mut ranks, &enumerated, prev, n, tie_breaker);
ranks
}
}
}
}
impl<D: AsMut<[f64]> + AsRef<[f64]>> Min<f64> for Data<D> {
/// Returns the minimum value in the data
///
/// # Remarks
///
/// Returns `f64::NAN` if data is empty or an entry is `f64::NAN`
///
/// # Examples
///
/// ```
/// use statrs::statistics::Min;
/// use statrs::statistics::Data;
///
/// let x = [];
/// let x = Data::new(x);
/// assert!(x.min().is_nan());
///
/// let y = [0.0, f64::NAN, 3.0, -2.0];
/// let y = Data::new(y);
/// assert!(y.min().is_nan());
///
/// let z = [0.0, 3.0, -2.0];
/// let z = Data::new(z);
/// assert_eq!(z.min(), -2.0);
/// ```
fn min(&self) -> f64 {
Statistics::min(self.iter())
}
}
impl<D: AsMut<[f64]> + AsRef<[f64]>> Max<f64> for Data<D> {
/// Returns the maximum value in the data
///
/// # Remarks
///
/// Returns `f64::NAN` if data is empty or an entry is `f64::NAN`
///
/// # Examples
///
/// ```
/// use statrs::statistics::Max;
/// use statrs::statistics::Data;
///
/// let x = [];
/// let x = Data::new(x);
/// assert!(x.max().is_nan());
///
/// let y = [0.0, f64::NAN, 3.0, -2.0];
/// let y = Data::new(y);
/// assert!(y.max().is_nan());
///
/// let z = [0.0, 3.0, -2.0];
/// let z = Data::new(z);
/// assert_eq!(z.max(), 3.0);
/// ```
fn max(&self) -> f64 {
Statistics::max(self.iter())
}
}
impl<D: AsMut<[f64]> + AsRef<[f64]>> Distribution<f64> for Data<D> {
/// Evaluates the sample mean, an estimate of the population
/// mean.
///
/// # Remarks
///
/// Returns `f64::NAN` if data is empty or an entry is `f64::NAN`
///
/// # Examples
///
/// ```
/// #[macro_use]
/// extern crate statrs;
///
/// use statrs::statistics::Distribution;
/// use statrs::statistics::Data;
///
/// # fn main() {
/// let x = [];
/// let x = Data::new(x);
/// assert!(x.mean().unwrap().is_nan());
///
/// let y = [0.0, f64::NAN, 3.0, -2.0];
/// let y = Data::new(y);
/// assert!(y.mean().unwrap().is_nan());
///
/// let z = [0.0, 3.0, -2.0];
/// let z = Data::new(z);
/// assert_almost_eq!(z.mean().unwrap(), 1.0 / 3.0, 1e-15);
/// # }
/// ```
fn mean(&self) -> Option<f64> {
Some(Statistics::mean(self.iter()))
}
/// Estimates the unbiased population variance from the provided samples
///
/// # Remarks
///
/// On a dataset of size `N`, `N-1` is used as a normalizer (Bessel's
/// correction).
///
/// Returns `f64::NAN` if data has less than two entries or if any entry is
/// `f64::NAN`
///
/// # Examples
///
/// ```
/// use statrs::statistics::Distribution;
/// use statrs::statistics::Data;
///
/// let x = [];
/// let x = Data::new(x);
/// assert!(x.variance().unwrap().is_nan());
///
/// let y = [0.0, f64::NAN, 3.0, -2.0];
/// let y = Data::new(y);
/// assert!(y.variance().unwrap().is_nan());
///
/// let z = [0.0, 3.0, -2.0];
/// let z = Data::new(z);
/// assert_eq!(z.variance().unwrap(), 19.0 / 3.0);
/// ```
fn variance(&self) -> Option<f64> {
Some(Statistics::variance(self.iter()))
}
}
impl<D: AsMut<[f64]> + AsRef<[f64]> + Clone> Median<f64> for Data<D> {
/// Returns the median value from the data
///
/// # Remarks
///
/// Returns `f64::NAN` if data is empty
///
/// # Examples
///
/// ```
/// use statrs::statistics::Median;
/// use statrs::statistics::Data;
///
/// let x = [];
/// let x = Data::new(x);
/// assert!(x.median().is_nan());
///
/// let y = [0.0, 3.0, -2.0];
/// let y = Data::new(y);
/// assert_eq!(y.median(), 0.0);
fn median(&self) -> f64 {
let mut v = self.clone();
OrderStatistics::median(&mut v)
}
}
fn handle_rank_ties(
ranks: &mut [f64],
index: &[(usize, &f64)],
a: usize,
b: usize,
tie_breaker: RankTieBreaker,
) {
let rank = match tie_breaker {
// equivalent to (b + a - 1) as f64 / 2.0 + 1.0 but less overflow issues
RankTieBreaker::Average => b as f64 / 2.0 + a as f64 / 2.0 + 0.5,
RankTieBreaker::Min => (a + 1) as f64,
RankTieBreaker::Max => b as f64,
RankTieBreaker::First => unreachable!(),
};
for i in &index[a..b] {
ranks[i.0] = rank
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::statistics::*;
#[test]
fn test_order_statistic_short() {
let data = [-1.0, 5.0, 0.0, -3.0, 10.0, -0.5, 4.0, 1.0, 6.0];
let mut data = Data::new(data);
assert!(data.order_statistic(0).is_nan());
assert_eq!(data.order_statistic(1), -3.0);
assert_eq!(data.order_statistic(2), -1.0);
assert_eq!(data.order_statistic(3), -0.5);
assert_eq!(data.order_statistic(7), 5.0);
assert_eq!(data.order_statistic(8), 6.0);
assert_eq!(data.order_statistic(9), 10.0);
assert!(data.order_statistic(10).is_nan());
}
#[test]
fn test_quantile_short() {
let data = [-1.0, 5.0, 0.0, -3.0, 10.0, -0.5, 4.0, 0.2, 1.0, 6.0];
let mut data = Data::new(data);
assert_eq!(data.quantile(0.0), -3.0);
assert_eq!(data.quantile(1.0), 10.0);
assert_almost_eq!(data.quantile(0.5), 3.0 / 5.0, 1e-15);
assert_almost_eq!(data.quantile(0.2), -4.0 / 5.0, 1e-15);
assert_eq!(data.quantile(0.7), 137.0 / 30.0);
assert_eq!(data.quantile(0.01), -3.0);
assert_eq!(data.quantile(0.99), 10.0);
assert_almost_eq!(data.quantile(0.52), 287.0 / 375.0, 1e-15);
assert_almost_eq!(data.quantile(0.325), -37.0 / 240.0, 1e-15);
}
#[test]
fn test_ranks() {
let sorted_distinct = [1.0, 2.0, 4.0, 7.0, 8.0, 9.0, 10.0, 12.0];
let mut sorted_distinct = Data::new(sorted_distinct);
let sorted_ties = [1.0, 2.0, 2.0, 7.0, 9.0, 9.0, 10.0, 12.0];
let mut sorted_ties = Data::new(sorted_ties);
assert_eq!(
sorted_distinct.ranks(RankTieBreaker::Average),
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]
);
assert_eq!(
sorted_ties.ranks(RankTieBreaker::Average),
[1.0, 2.5, 2.5, 4.0, 5.5, 5.5, 7.0, 8.0]
);
assert_eq!(
sorted_distinct.ranks(RankTieBreaker::Min),
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]
);
assert_eq!(
sorted_ties.ranks(RankTieBreaker::Min),
[1.0, 2.0, 2.0, 4.0, 5.0, 5.0, 7.0, 8.0]
);
assert_eq!(
sorted_distinct.ranks(RankTieBreaker::Max),
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]
);
assert_eq!(
sorted_ties.ranks(RankTieBreaker::Max),
[1.0, 3.0, 3.0, 4.0, 6.0, 6.0, 7.0, 8.0]
);
assert_eq!(
sorted_distinct.ranks(RankTieBreaker::First),
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]
);
assert_eq!(
sorted_ties.ranks(RankTieBreaker::First),
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]
);
let distinct = [1.0, 8.0, 12.0, 7.0, 2.0, 9.0, 10.0, 4.0];
let distinct = Data::new(distinct);
let ties = [1.0, 9.0, 12.0, 7.0, 2.0, 9.0, 10.0, 2.0];
let ties = Data::new(ties);
assert_eq!(
distinct.clone().ranks(RankTieBreaker::Average),
[1.0, 5.0, 8.0, 4.0, 2.0, 6.0, 7.0, 3.0]
);
assert_eq!(
ties.clone().ranks(RankTieBreaker::Average),
[1.0, 5.5, 8.0, 4.0, 2.5, 5.5, 7.0, 2.5]
);
assert_eq!(
distinct.clone().ranks(RankTieBreaker::Min),
[1.0, 5.0, 8.0, 4.0, 2.0, 6.0, 7.0, 3.0]
);
assert_eq!(
ties.clone().ranks(RankTieBreaker::Min),
[1.0, 5.0, 8.0, 4.0, 2.0, 5.0, 7.0, 2.0]
);
assert_eq!(
distinct.clone().ranks(RankTieBreaker::Max),
[1.0, 5.0, 8.0, 4.0, 2.0, 6.0, 7.0, 3.0]
);
assert_eq!(
ties.clone().ranks(RankTieBreaker::Max),
[1.0, 6.0, 8.0, 4.0, 3.0, 6.0, 7.0, 3.0]
);
assert_eq!(
distinct.clone().ranks(RankTieBreaker::First),
[1.0, 5.0, 8.0, 4.0, 2.0, 6.0, 7.0, 3.0]
);
assert_eq!(
ties.clone().ranks(RankTieBreaker::First),
[1.0, 5.0, 8.0, 4.0, 2.0, 6.0, 7.0, 3.0]
);
}
#[test]
fn test_median_short() {
let even = [-1.0, 5.0, 0.0, -3.0, 10.0, -0.5, 4.0, 0.2, 1.0, 6.0];
let even = Data::new(even);
assert_eq!(even.median(), 0.6);
let odd = [-1.0, 5.0, 0.0, -3.0, 10.0, -0.5, 4.0, 0.2, 1.0];
let odd = Data::new(odd);
assert_eq!(odd.median(), 0.2);
}
#[test]
fn test_median_long_constant_seq() {
let even = vec![2.0; 100000];
let even = Data::new(even);
assert_eq!(2.0, even.median());
let odd = vec![2.0; 100001];
let odd = Data::new(odd);
assert_eq!(2.0, odd.median());
}
// TODO: test codeplex issue 5667 (Math.NET)
#[test]
fn test_median_robust_on_infinities() {
let data3 = [2.0, f64::NEG_INFINITY, f64::INFINITY];
let data3 = Data::new(data3);
assert_eq!(data3.median(), 2.0);
assert_eq!(data3.median(), 2.0);
let data3 = [f64::NEG_INFINITY, 2.0, f64::INFINITY];
let data3 = Data::new(data3);
assert_eq!(data3.median(), 2.0);
assert_eq!(data3.median(), 2.0);
let data3 = [f64::NEG_INFINITY, f64::INFINITY, 2.0];
let data3 = Data::new(data3);
assert_eq!(data3.median(), 2.0);
assert_eq!(data3.median(), 2.0);
let data4 = [f64::NEG_INFINITY, 2.0, 3.0, f64::INFINITY];
let data4 = Data::new(data4);
assert_eq!(data4.median(), 2.5);
assert_eq!(data4.median(), 2.5);
}
#[test]
fn test_foo() {
let arr = [0.0, 1.0, 2.0, 3.0];
let mut arr = Data::new(arr);
arr.order_statistic(2);
}
}
| true |
2a5a012586b568e26842c5a3cc6fe2ebae1ccaa2
|
Rust
|
imorph/vector
|
/src/config/component.rs
|
UTF-8
| 1,976 | 2.921875 | 3 |
[
"MPL-2.0"
] |
permissive
|
use snafu::Snafu;
use std::marker::PhantomData;
use toml::Value;
use super::GenerateConfig;
#[derive(Debug, Snafu, Clone, PartialEq)]
pub enum ExampleError {
#[snafu(display("unable to create an example for this component"))]
MissingExample,
#[snafu(display("type '{}' does not exist", type_str))]
DoesNotExist { type_str: String },
}
/// Describes a component plugin storing its type name, an example config, and
/// other useful information about the plugin.
pub struct ComponentDescription<T: Sized> {
pub type_str: &'static str,
example_value: fn() -> Option<Value>,
component_type: PhantomData<T>,
}
impl<T> ComponentDescription<T>
where
T: 'static + Sized,
inventory::iter<ComponentDescription<T>>:
std::iter::IntoIterator<Item = &'static ComponentDescription<T>>,
{
/// Creates a new component plugin description.
/// Configuration example is generated by the `GenerateConfig` trait.
pub fn new<B: GenerateConfig>(type_str: &'static str) -> Self {
ComponentDescription {
type_str,
example_value: || Some(B::generate_config()),
component_type: PhantomData,
}
}
/// Returns an example config for a plugin identified by its type.
pub fn example(type_str: &str) -> Result<Value, ExampleError> {
inventory::iter::<ComponentDescription<T>>
.into_iter()
.find(|t| t.type_str == type_str)
.ok_or_else(|| ExampleError::DoesNotExist {
type_str: type_str.to_owned(),
})
.and_then(|t| (t.example_value)().ok_or(ExampleError::MissingExample))
}
/// Returns a sorted Vec of all plugins registered of a type.
pub fn types() -> Vec<&'static str> {
let mut types = Vec::new();
for definition in inventory::iter::<ComponentDescription<T>> {
types.push(definition.type_str);
}
types.sort_unstable();
types
}
}
| true |
d1ba78c0623af3a7a144132ff180e049411dafb1
|
Rust
|
ia7ck/competitive-programming
|
/AtCoder/abc280/src/bin/f/main.rs
|
UTF-8
| 1,543 | 2.765625 | 3 |
[] |
no_license
|
use std::collections::{HashSet, VecDeque};
use proconio::{input, marker::Usize1};
use union_find::UnionFind;
fn main() {
input! {
n: usize,
m: usize,
q: usize,
edges: [(Usize1, Usize1, i64); m],
};
let mut uf = UnionFind::new(n);
for &(a, b, _) in &edges {
uf.unite(a, b);
}
let mut g = vec![vec![]; n];
for &(a, b, c) in &edges {
g[a].push((b, c));
g[b].push((a, -c));
}
let mut cost = vec![None; n];
let mut inf = HashSet::new();
for s in 0..n {
if uf.find(s) != s {
continue;
}
let mut que = VecDeque::new();
cost[s] = Some(0);
que.push_back(s);
while let Some(u) = que.pop_front() {
for &(v, c) in &g[u] {
let new_cost = cost[u].unwrap() + c;
if let Some(cc) = cost[v] {
if cc != new_cost {
inf.insert(s);
}
} else {
cost[v] = Some(new_cost);
que.push_back(v);
}
}
}
}
for _ in 0..q {
input! {
x: Usize1,
y: Usize1,
};
if !uf.same(x, y) {
println!("nan");
continue;
}
let root = uf.find(x);
if inf.contains(&root) {
println!("inf");
continue;
}
let ans = cost[y].unwrap() - cost[x].unwrap();
println!("{}", ans);
}
}
| true |
d8e5ca2c106a5ba56953f8279245e89e88eb10b9
|
Rust
|
prove-rs/z3.rs
|
/z3/src/params.rs
|
UTF-8
| 3,452 | 2.6875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::ffi::{CStr, CString};
use std::fmt;
use z3_sys::*;
use Context;
use Params;
use Symbol;
impl<'ctx> Params<'ctx> {
unsafe fn wrap(ctx: &'ctx Context, z3_params: Z3_params) -> Params<'ctx> {
Z3_params_inc_ref(ctx.z3_ctx, z3_params);
Params { ctx, z3_params }
}
pub fn new(ctx: &'ctx Context) -> Params<'ctx> {
unsafe { Self::wrap(ctx, Z3_mk_params(ctx.z3_ctx)) }
}
pub fn set_symbol<K: Into<Symbol>, V: Into<Symbol>>(&mut self, k: K, v: V) {
unsafe {
Z3_params_set_symbol(
self.ctx.z3_ctx,
self.z3_params,
k.into().as_z3_symbol(self.ctx),
v.into().as_z3_symbol(self.ctx),
)
};
}
pub fn set_bool<K: Into<Symbol>>(&mut self, k: K, v: bool) {
unsafe {
Z3_params_set_bool(
self.ctx.z3_ctx,
self.z3_params,
k.into().as_z3_symbol(self.ctx),
v,
)
};
}
pub fn set_f64<K: Into<Symbol>>(&mut self, k: K, v: f64) {
unsafe {
Z3_params_set_double(
self.ctx.z3_ctx,
self.z3_params,
k.into().as_z3_symbol(self.ctx),
v,
)
};
}
pub fn set_u32<K: Into<Symbol>>(&mut self, k: K, v: u32) {
unsafe {
Z3_params_set_uint(
self.ctx.z3_ctx,
self.z3_params,
k.into().as_z3_symbol(self.ctx),
v,
)
};
}
}
/// Get a global (or module) parameter.
///
/// # See also
///
/// - [`set_global_param()`]
/// - [`reset_all_global_params()`]
pub fn get_global_param(k: &str) -> Option<String> {
let ks = CString::new(k).unwrap();
let mut ptr = std::ptr::null();
if unsafe { Z3_global_param_get(ks.as_ptr(), &mut ptr as Z3_string_ptr) } {
let vs = unsafe { CStr::from_ptr(ptr) };
vs.to_str().ok().map(|vs| vs.to_owned())
} else {
None
}
}
/// Set a global (or module) parameter. This setting is shared by all Z3 contexts.
///
/// # See also
///
/// - [`get_global_param()`]
/// - [`reset_all_global_params()`]
pub fn set_global_param(k: &str, v: &str) {
let ks = CString::new(k).unwrap();
let vs = CString::new(v).unwrap();
unsafe { Z3_global_param_set(ks.as_ptr(), vs.as_ptr()) };
}
/// Restore the value of all global (and module) parameters. This command will not affect already created objects (such as tactics and solvers).
///
/// # See also
///
/// - [`get_global_param()`]
/// - [`set_global_param()`]
pub fn reset_all_global_params() {
unsafe { Z3_global_param_reset_all() };
}
impl<'ctx> fmt::Display for Params<'ctx> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let p = unsafe { Z3_params_to_string(self.ctx.z3_ctx, self.z3_params) };
if p.is_null() {
return Result::Err(fmt::Error);
}
match unsafe { CStr::from_ptr(p) }.to_str() {
Ok(s) => write!(f, "{}", s),
Err(_) => Result::Err(fmt::Error),
}
}
}
impl<'ctx> fmt::Debug for Params<'ctx> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
<Self as fmt::Display>::fmt(self, f)
}
}
impl<'ctx> Drop for Params<'ctx> {
fn drop(&mut self) {
unsafe { Z3_params_dec_ref(self.ctx.z3_ctx, self.z3_params) };
}
}
| true |
a122c069f238cc6a898517ccf33f6b9d87919a5e
|
Rust
|
davidsteiner/seq-rs
|
/src/error.rs
|
UTF-8
| 678 | 3 | 3 |
[
"MIT"
] |
permissive
|
use crate::parser::Rule;
use pest::error::Error as PestError;
#[derive(Debug)]
pub enum Error {
PestError(PestError<Rule>),
ModelError { message: String },
}
impl From<PestError<Rule>> for Error {
fn from(err: PestError<Rule>) -> Self {
Error::PestError(err)
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Error::PestError(err) => write!(f, "{}", err.to_string()),
Error::ModelError { message } => write!(f, "{}", message),
}
}
}
impl Error {
pub fn new(message: String) -> Error {
Error::ModelError { message }
}
}
| true |
3af97e4710874139b17cf9a3f47db1006381489a
|
Rust
|
clap-rs/clap
|
/tests/derive/options.rs
|
UTF-8
| 13,250 | 2.578125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright 2018 Guillaume Pinot (@TeXitoi) <[email protected]>,
// Kevin Knapp (@kbknapp) <[email protected]>, and
// Ana Hobden (@hoverbear) <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// This work was derived from Structopt (https://github.com/TeXitoi/structopt)
// commit#ea76fa1b1b273e65e3b0b1046643715b49bec51f which is licensed under the
// MIT/Apache 2.0 license.
#![allow(clippy::option_option)]
use crate::utils;
use clap::{Parser, Subcommand};
#[test]
fn required_option() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short, long)]
arg: i32,
}
assert_eq!(
Opt { arg: 42 },
Opt::try_parse_from(["test", "-a42"]).unwrap()
);
assert_eq!(
Opt { arg: 42 },
Opt::try_parse_from(["test", "-a", "42"]).unwrap()
);
assert_eq!(
Opt { arg: 42 },
Opt::try_parse_from(["test", "--arg", "42"]).unwrap()
);
assert_eq!(
Opt { arg: 42 },
Opt::try_parse_from(["test", "--arg", "24", "--arg", "42"]).unwrap()
);
assert!(Opt::try_parse_from(["test"]).is_err());
}
#[test]
fn option_with_default() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short, default_value = "42")]
arg: i32,
}
assert_eq!(
Opt { arg: 24 },
Opt::try_parse_from(["test", "-a24"]).unwrap()
);
assert_eq!(
Opt { arg: 42 },
Opt::try_parse_from(["test", "-a", "24", "-a", "42"]).unwrap()
);
assert_eq!(Opt { arg: 42 }, Opt::try_parse_from(["test"]).unwrap());
}
#[test]
fn option_with_raw_default() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short, default_value = "42")]
arg: i32,
}
assert_eq!(
Opt { arg: 24 },
Opt::try_parse_from(["test", "-a24"]).unwrap()
);
assert_eq!(
Opt { arg: 42 },
Opt::try_parse_from(["test", "-a", "24", "-a", "42"]).unwrap()
);
assert_eq!(Opt { arg: 42 }, Opt::try_parse_from(["test"]).unwrap());
}
#[test]
fn option_from_str() {
#[derive(Clone, Debug, PartialEq)]
struct A;
impl std::str::FromStr for A {
type Err = std::convert::Infallible;
fn from_str(_: &str) -> Result<A, Self::Err> {
Ok(A)
}
}
#[derive(Debug, Parser, PartialEq)]
#[command(args_override_self = true)]
struct Opt {
a: Option<A>,
}
assert_eq!(Opt { a: None }, Opt::try_parse_from(["test"]).unwrap());
assert_eq!(
Opt { a: Some(A) },
Opt::try_parse_from(["test", "foo"]).unwrap()
);
}
#[test]
fn vec_from_str() {
#[derive(Clone, Debug, PartialEq)]
struct A;
impl std::str::FromStr for A {
type Err = std::convert::Infallible;
fn from_str(_: &str) -> Result<A, Self::Err> {
Ok(A)
}
}
#[derive(Debug, Parser, PartialEq)]
#[command(args_override_self = true)]
struct Opt {
a: Vec<A>,
}
assert_eq!(
Opt { a: Vec::new() },
Opt::try_parse_from(["test"]).unwrap()
);
assert_eq!(
Opt { a: vec![A] },
Opt::try_parse_from(["test", "foo"]).unwrap()
);
}
#[test]
fn option_vec_from_str() {
#[derive(Clone, Debug, PartialEq)]
struct A;
impl std::str::FromStr for A {
type Err = std::convert::Infallible;
fn from_str(_: &str) -> Result<A, Self::Err> {
Ok(A)
}
}
#[derive(Debug, Parser, PartialEq)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short)]
a: Option<Vec<A>>,
}
assert_eq!(Opt { a: None }, Opt::try_parse_from(["test"]).unwrap());
assert_eq!(
Opt { a: Some(vec![A]) },
Opt::try_parse_from(["test", "-a", "foo"]).unwrap()
);
}
#[test]
fn option_type_is_optional() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short)]
arg: Option<i32>,
}
assert_eq!(
Opt { arg: Some(42) },
Opt::try_parse_from(["test", "-a42"]).unwrap()
);
assert_eq!(
Opt { arg: Some(42) },
Opt::try_parse_from(["test", "-a", "24", "-a", "42"]).unwrap()
);
assert_eq!(Opt { arg: None }, Opt::try_parse_from(["test"]).unwrap());
}
#[test]
fn required_with_option_type() {
#[derive(Debug, PartialEq, Eq, Parser)]
#[command(subcommand_negates_reqs = true)]
#[command(args_override_self = true)]
struct Opt {
#[arg(required = true)]
req_str: Option<String>,
#[command(subcommand)]
cmd: Option<SubCommands>,
}
#[derive(Debug, PartialEq, Eq, Subcommand)]
enum SubCommands {
ExSub {
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
},
}
assert_eq!(
Opt {
req_str: Some(("arg").into()),
cmd: None,
},
Opt::try_parse_from(["test", "arg"]).unwrap()
);
assert_eq!(
Opt {
req_str: None,
cmd: Some(SubCommands::ExSub { verbose: 1 }),
},
Opt::try_parse_from(["test", "ex-sub", "-v"]).unwrap()
);
assert!(Opt::try_parse_from(["test"]).is_err());
}
#[test]
fn ignore_qualified_option_type() {
fn parser(s: &str) -> Result<Option<String>, std::convert::Infallible> {
Ok(Some(s.to_string()))
}
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(value_parser = parser)]
arg: ::std::option::Option<String>,
}
assert_eq!(
Opt {
arg: Some("success".into())
},
Opt::try_parse_from(["test", "success"]).unwrap()
);
}
#[test]
fn option_option_type_is_optional_value() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short)]
#[allow(clippy::option_option)]
arg: Option<Option<i32>>,
}
assert_eq!(
Opt {
arg: Some(Some(42))
},
Opt::try_parse_from(["test", "-a42"]).unwrap()
);
assert_eq!(
Opt { arg: Some(None) },
Opt::try_parse_from(["test", "-a"]).unwrap()
);
assert_eq!(
Opt {
arg: Some(Some(42))
},
Opt::try_parse_from(["test", "-a", "24", "-a", "42"]).unwrap()
);
assert_eq!(Opt { arg: None }, Opt::try_parse_from(["test"]).unwrap());
}
#[test]
fn option_option_type_help() {
#[derive(Parser, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(long, value_name = "val")]
arg: Option<Option<i32>>,
}
let help = utils::get_help::<Opt>();
assert!(help.contains("--arg [<val>]"));
assert!(!help.contains("--arg [<val>...]"));
}
#[test]
fn two_option_option_types() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short)]
arg: Option<Option<i32>>,
#[arg(long)]
field: Option<Option<String>>,
}
assert_eq!(
Opt {
arg: Some(Some(42)),
field: Some(Some("f".into()))
},
Opt::try_parse_from(["test", "-a42", "--field", "f"]).unwrap()
);
assert_eq!(
Opt {
arg: Some(Some(42)),
field: Some(None)
},
Opt::try_parse_from(["test", "-a42", "--field"]).unwrap()
);
assert_eq!(
Opt {
arg: Some(None),
field: Some(None)
},
Opt::try_parse_from(["test", "-a", "--field"]).unwrap()
);
assert_eq!(
Opt {
arg: Some(None),
field: Some(Some("f".into()))
},
Opt::try_parse_from(["test", "-a", "--field", "f"]).unwrap()
);
assert_eq!(
Opt {
arg: None,
field: Some(None)
},
Opt::try_parse_from(["test", "--field"]).unwrap()
);
assert_eq!(
Opt {
arg: None,
field: None
},
Opt::try_parse_from(["test"]).unwrap()
);
}
#[test]
fn vec_type_is_multiple_occurrences() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short, long)]
arg: Vec<i32>,
}
assert_eq!(
Opt { arg: vec![24] },
Opt::try_parse_from(["test", "-a24"]).unwrap()
);
assert_eq!(Opt { arg: vec![] }, Opt::try_parse_from(["test"]).unwrap());
assert_eq!(
Opt { arg: vec![24, 42] },
Opt::try_parse_from(["test", "-a", "24", "-a", "42"]).unwrap()
);
}
#[test]
fn vec_type_with_required() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short, long, required = true)]
arg: Vec<i32>,
}
assert_eq!(
Opt { arg: vec![24] },
Opt::try_parse_from(["test", "-a24"]).unwrap()
);
assert!(Opt::try_parse_from(["test"]).is_err());
assert_eq!(
Opt { arg: vec![24, 42] },
Opt::try_parse_from(["test", "-a", "24", "-a", "42"]).unwrap()
);
}
#[test]
fn vec_type_with_multiple_values_only() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short, long, num_args(1..))]
arg: Vec<i32>,
}
assert_eq!(
Opt { arg: vec![24] },
Opt::try_parse_from(["test", "-a24"]).unwrap()
);
assert_eq!(Opt { arg: vec![] }, Opt::try_parse_from(["test"]).unwrap());
assert_eq!(
Opt { arg: vec![24, 42] },
Opt::try_parse_from(["test", "-a", "24", "42"]).unwrap()
);
}
#[test]
fn ignore_qualified_vec_type() {
fn parser(s: &str) -> Result<Vec<String>, std::convert::Infallible> {
Ok(vec![s.to_string()])
}
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(value_parser = parser)]
arg: ::std::vec::Vec<String>,
}
assert_eq!(
Opt {
arg: vec!["success".into()]
},
Opt::try_parse_from(["test", "success"]).unwrap()
);
}
#[test]
fn option_vec_type() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short)]
arg: Option<Vec<i32>>,
}
assert_eq!(
Opt { arg: Some(vec![1]) },
Opt::try_parse_from(["test", "-a", "1"]).unwrap()
);
assert_eq!(
Opt {
arg: Some(vec![1, 2])
},
Opt::try_parse_from(["test", "-a", "1", "-a", "2"]).unwrap()
);
assert_eq!(Opt { arg: None }, Opt::try_parse_from(["test"]).unwrap());
}
#[test]
fn option_vec_type_structopt_behavior() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short, long, num_args(0..))]
arg: Option<Vec<i32>>,
}
assert_eq!(
Opt { arg: Some(vec![1]) },
Opt::try_parse_from(["test", "-a", "1"]).unwrap()
);
assert_eq!(
Opt {
arg: Some(vec![1, 2])
},
Opt::try_parse_from(["test", "-a", "1", "2"]).unwrap()
);
assert_eq!(
Opt { arg: Some(vec![]) },
Opt::try_parse_from(["test", "-a"]).unwrap()
);
assert_eq!(Opt { arg: None }, Opt::try_parse_from(["test"]).unwrap());
}
#[test]
fn two_option_vec_types() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(short)]
arg: Option<Vec<i32>>,
#[arg(short)]
b: Option<Vec<i32>>,
}
assert_eq!(
Opt {
arg: Some(vec![1]),
b: None,
},
Opt::try_parse_from(["test", "-a", "1"]).unwrap()
);
assert_eq!(
Opt {
arg: Some(vec![1]),
b: Some(vec![1])
},
Opt::try_parse_from(["test", "-a", "1", "-b", "1"]).unwrap()
);
assert_eq!(
Opt {
arg: Some(vec![1, 2]),
b: Some(vec![1, 2])
},
Opt::try_parse_from(["test", "-a", "1", "-a", "2", "-b", "1", "-b", "2"]).unwrap()
);
assert_eq!(
Opt { arg: None, b: None },
Opt::try_parse_from(["test"]).unwrap()
);
}
#[test]
fn explicit_value_parser() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(long, value_parser = clap::value_parser!(i32))]
arg: i32,
}
assert_eq!(
Opt { arg: 42 },
Opt::try_parse_from(["test", "--arg", "42"]).unwrap()
);
}
#[test]
fn implicit_value_parser() {
#[derive(Parser, PartialEq, Debug)]
#[command(args_override_self = true)]
struct Opt {
#[arg(long)]
arg: i32,
}
assert_eq!(
Opt { arg: 42 },
Opt::try_parse_from(["test", "--arg", "42"]).unwrap()
);
}
| true |
77281197d6114fb404094ae719bf1784401af755
|
Rust
|
Champii/rsrpc
|
/src/timer.rs
|
UTF-8
| 430 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use super::oneshot::{channel, Receiver};
use std::fmt::Debug;
use std::thread;
use std::time::Duration;
pub struct Timer {}
impl Timer {
pub fn new<T: 'static + Send + Sync + Debug>(wait_time: Duration, err: T) -> Receiver<T> {
let (tx, rx) = channel::<T>();
thread::spawn(move || {
thread::sleep(wait_time);
match tx.send(err) {
Ok(_) => (),
Err(_) => (),
};
});
rx
}
}
| true |
ca482724274310404d20f9d500cd0bd018ff8aed
|
Rust
|
bottlerocket-os/bottlerocket
|
/sources/bloodhound/src/results.rs
|
UTF-8
| 5,535 | 3.140625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, fmt, usize};
#[derive(Debug, Serialize, Deserialize)]
pub struct ReportMetadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, Default)]
pub enum CheckStatus {
/// Successfully verified to be in the expected state.
PASS,
/// Found to not be in the expected state.
FAIL,
/// Unable to verify state, manual verification required.
#[default]
SKIP,
}
impl fmt::Display for CheckStatus {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Debug, Serialize, Deserialize)]
pub enum Mode {
Automatic,
Manual,
}
impl fmt::Display for Mode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
/// CheckerMetadata contains the metadata about individual checkers. This data
/// is used by bloodhound to discover details about the available checks and
/// make decisions about including the checks based on input like the compliance
/// level to evaluate.
#[derive(Debug, Serialize, Deserialize)]
pub struct CheckerMetadata {
pub name: String,
pub id: String,
pub level: u8,
pub title: String,
pub mode: Mode,
}
impl fmt::Display for CheckerMetadata {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let output = serde_json::to_string(&self).unwrap_or_default();
write!(f, "{}", output)
}
}
/// CheckerResult contains the results of a performed check.
#[derive(Debug, Serialize, Deserialize, Default)]
pub struct CheckerResult {
pub status: CheckStatus,
pub error: String,
}
impl fmt::Display for CheckerResult {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let output = serde_json::to_string(&self).unwrap_or_default();
write!(f, "{}", output)
}
}
/// The Checker trait defines the interface for a compliance check. Checkers are
/// expected to be able to provide metadata about the check it performs, and be
/// able to execute that check and provide the results of its findings.
///
/// The expectation for bloodhound checkers is:
/// - Check is performed normally, return JSON output from execute and exit 0
/// - Check has failed validation, return JSON output from execute and exit 0
/// - Check could not be performed, return error text to stderr and exit 1
pub trait Checker {
fn metadata(&self) -> CheckerMetadata;
fn execute(&self) -> CheckerResult;
}
/// Common checker type for reporting manual check results.
pub struct ManualChecker {
pub name: String,
pub id: String,
pub title: String,
pub level: u8,
}
impl Checker for ManualChecker {
fn execute(&self) -> CheckerResult {
CheckerResult {
error: "Manual check, see benchmark for audit details.".to_string(),
status: CheckStatus::SKIP,
}
}
fn metadata(&self) -> CheckerMetadata {
CheckerMetadata {
title: self.title.to_string(),
id: self.id.to_string(),
level: self.level,
name: self.name.to_string(),
mode: Mode::Manual,
}
}
}
/// Used to help serialize output into simpler JSON structure.
#[derive(Debug, Serialize)]
pub struct IndividualResult {
#[serde(flatten)]
pub metadata: CheckerMetadata,
#[serde(flatten)]
pub result: CheckerResult,
}
/// ReportResults are the overall compliance checking containing the results of
/// all individual checks run.
#[derive(Debug, Serialize)]
pub struct ReportResults {
pub level: u8,
pub total: usize,
pub passed: usize,
pub skipped: usize,
pub failed: usize,
pub status: CheckStatus,
pub timestamp: String,
#[serde(flatten)]
pub metadata: ReportMetadata,
pub results: BTreeMap<String, IndividualResult>,
}
impl ReportResults {
/// Initialize a new `ReportResults` with the default values.
pub fn new(level: u8, metadata: ReportMetadata) -> Self {
let current_time: DateTime<Utc> = Utc::now();
ReportResults {
level,
total: 0,
passed: 0,
skipped: 0,
failed: 0,
status: CheckStatus::SKIP,
timestamp: format!("{:?}", current_time),
metadata,
results: BTreeMap::new(),
}
}
/// Add the results of a checker run to the overall results.
pub fn add_result(&mut self, metadata: CheckerMetadata, result: CheckerResult) {
self.total += 1;
match result.status {
CheckStatus::FAIL => {
self.failed += 1;
self.status = CheckStatus::FAIL;
}
CheckStatus::PASS => {
self.passed += 1;
if self.status == CheckStatus::SKIP {
// We only want to mark as passing if at least one of the
// checks ran and passed
self.status = CheckStatus::PASS;
}
}
CheckStatus::SKIP => {
self.skipped += 1;
}
}
self.results
.insert(metadata.name.clone(), IndividualResult { metadata, result });
}
}
| true |
3c2a06652966dfa14d31f9feab39fe073c39d212
|
Rust
|
phip1611/libbruteforce
|
/src/parameter/target_hash.rs
|
UTF-8
| 5,698 | 3.234375 | 3 |
[
"MIT"
] |
permissive
|
/*
MIT License
Copyright (c) 2022 Philipp Schuster
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
use crate::CrackTarget;
use std::fmt::{Debug, Formatter};
/// Helper type to create instances of [`TargetHashAndHashFunction`].
#[derive(Debug)]
pub enum TargetHashInput<'a> {
/// The provided input is already a valid hash but as (hex) string representation.
HashAsStr(&'a str),
/// The provided input is plain text and needs to be hashed by the constructor.
/// This is useful for tests, examples, and debugging. For real applications you
/// may want to use [`Self::HashAsStr`].
Plaintext(&'a str),
}
/// Abstraction over a hashing algorithm and the target hash that needs to be cracked.
/// `T` is of type [`CrackTarget`]. This generic struct exists so that hashes of type
/// [`CrackTarget`] can be checked independent of the hashing algorithm. This is
/// more efficient than transforming every hash to a string and compare the hash
/// string representations afterwards.
pub struct TargetHashAndHashFunction<T: CrackTarget> {
/// The target hash we want to crack.
target_hash: T,
/// Function that calculates the hash of type `T` of the given input plain text.
hash_fn: fn(input: &str) -> T,
/// Function that transforms a `T` in string representation to a real `T`.
/// For example, this transforms a `sha256` string representation to the runtime
/// type the hashing library uses.
hash_str_repr_to_hash_type_fn: fn(hash_as_string: &str) -> T,
/// Function that transform the hash type to a string representation. Usually, this
/// will return a hex string that represents the hash.
hash_type_to_str_repr_fn: fn(hash: &T) -> String,
}
impl<T: CrackTarget> TargetHashAndHashFunction<T> {
/// Constructor that takes a hashing function and a target hash.
///
/// # Parameters
/// * `target_hash` String representation of the target hash we want to crack.
/// This is usually the hex string representation of a sha256 hash or so.
/// * `hash_fn` Transforms a plain input password/guess of type `str` to the target hash.
/// This is the hashing function.
/// * `hash_str_repr_to_hash_type_fn` Function that can take the argument `target_hash`
/// and transform it to the target hashing type. This
/// usually transforms the hex string that represents the
/// hash to bytes in memory.
/// * `hash_type_to_str_repr_fn` Function that transform the hash type to a string representation.
/// Usually, this will return a hex string that represents the hash.
pub fn new(
target_hash: TargetHashInput,
hash_fn: fn(&str) -> T,
hash_str_repr_to_hash_type_fn: fn(hash_as_string: &str) -> T,
hash_type_to_str_repr_fn: fn(hash: &T) -> String,
) -> Self {
let target_hash = match target_hash {
TargetHashInput::HashAsStr(hash_str) => hash_str_repr_to_hash_type_fn(hash_str),
TargetHashInput::Plaintext(input) => hash_fn(input),
};
Self {
target_hash,
hash_fn,
hash_str_repr_to_hash_type_fn,
hash_type_to_str_repr_fn,
}
}
}
impl<T: CrackTarget> Debug for TargetHashAndHashFunction<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TargetHashAndHashFunction")
.field("target_hash", &self.target_hash)
.field("hash_fn", &"<func impl>")
.field("target_str_repr_to_hash_type_fn", &"<func impl>")
.field("hash_type_to_str_repr_fn", &"<func impl>")
.finish()
}
}
impl<T: CrackTarget> TargetHashAndHashFunction<T> {
/// Transforms the (hex) string representation into the type
/// the hash implementation uses to represent hashes.
pub fn hash_str_repr_to_hash_type(&self, hash_as_string: &str) -> T {
(self.hash_str_repr_to_hash_type_fn)(hash_as_string)
}
/// Hashes a value.
pub fn hash(&self, input: &str) -> T {
(self.hash_fn)(input)
}
/// Returns the target hash that we want to crack.
pub fn target_hash(&self) -> &T {
&self.target_hash
}
/// Returns a (hex) string representation of the hash.
pub fn hash_type_to_str_repr(&self, hash: &T) -> String {
(self.hash_type_to_str_repr_fn)(hash)
}
/// Hashes the input value and returns if it equals the target hash.
/// If so, the hash got cracked.
pub fn hash_matches(&self, input: &str) -> bool {
(self.hash_fn)(input) == self.target_hash
}
}
| true |
d2011a940e9bd9125bc7a20c24f734d085585e08
|
Rust
|
wormtql/rusty-parser
|
/src/bin/fa.rs
|
UTF-8
| 1,609 | 2.734375 | 3 |
[] |
no_license
|
use grammar::automaton::dfa::DFA;
use grammar::automaton::nfa::NFA;
use clap::{Arg, App};
fn main() {
let matches = App::new("Exam Cheater: Automaton")
.version("0.1.0")
.author("wormtql <[email protected]>")
.arg(Arg::with_name("file")
.short("f")
.long("file")
.takes_value(true)
.required(true)
.help("input DFA/NFA file"))
.arg(Arg::with_name("nfa2dfa")
.long("nfa2dfa")
.takes_value(false)
.help("convert NFA to DFA"))
.arg(Arg::with_name("dfa_minimize")
.long("dfa-minimize")
.takes_value(false)
.help("minimize DFA"))
.get_matches();
if matches.is_present("nfa2dfa") {
let nfa = NFA::from_file(matches.value_of("file").unwrap());
let (dfa, table) = nfa.to_dfa_with_process();
println!("{}", table);
println!("DFA:\n{}", dfa);
}
if matches.is_present("dfa_minimize") {
let dfa = DFA::from_file(matches.value_of("file").unwrap());
let (dfa, t1, t2) = dfa.minimize_with_process();
println!("split:\n{}", t1);
println!("result:\n{}", t2);
println!("DFA:\n{}", dfa);
}
// let dfa = DFA::from_file("automaton_test/test.dfa");
//let nfa = NFA::from_file("automaton_test/test.nfa");
//println!("{}", nfa);
// let (dfa2, t1, t2) = dfa.minimize_with_process();
// println!("{}", t1);
// println!("{}", t2);
// println!("{}", dfa2);
// println!("{}", table);
}
| true |
b274e4cfafb9af528ebb1d34db0586629b9b804a
|
Rust
|
Techno-coder/example_os
|
/kernel/src/task/schedulers/round_robin.rs
|
UTF-8
| 548 | 3.46875 | 3 |
[
"MIT"
] |
permissive
|
use alloc::VecDeque;
use super::Thread;
// The simplest scheduler possible
// The next thread is the thread pushed on earliest
// New threads are pushed to the back of the queue
pub struct RoundRobin {
threads: VecDeque<Thread>,
}
impl RoundRobin {
pub fn new() -> RoundRobin {
RoundRobin {
threads: VecDeque::new(),
}
}
}
impl super::Scheduler for RoundRobin {
fn schedule_next(&mut self) -> Option<Thread> {
self.threads.pop_front()
}
fn schedule_new(&mut self, new_thread: Thread) {
self.threads.push_back(new_thread);
}
}
| true |
43ee448ba14fc8f34b3962830fb8de68b8f7f093
|
Rust
|
bwestlin/advent-of-code-2016-retro
|
/rust/src/day03.rs
|
UTF-8
| 1,806 | 3.28125 | 3 |
[
"MIT"
] |
permissive
|
extern crate utils;
use std::env;
use std::num::ParseIntError;
use std::io::{self, BufReader};
use std::io::prelude::*;
use std::fs::File;
use utils::*;
type Input = Vec<TSides>;
type TSides = [u32; 3];
fn valid_triangle(s1: u32, s2: u32, s3: u32) -> bool {
(s1 + s2 > s3) && (s1 + s3 > s2) && (s2 + s3 > s1)
}
fn part1(input: &Input) -> usize {
input.iter()
.filter(|t| valid_triangle(t[0], t[1], t[2]))
.count()
}
fn part2(input: &Input) -> usize {
input.chunks_exact(3)
.map(|ts| {
(0..3).filter(|&i| valid_triangle(ts[0][i], ts[1][i], ts[2][i])).count()
})
.sum()
}
fn main() {
measure(|| {
let input = input().expect("Input failed");
println!("Part1: {}", part1(&input));
println!("Part2: {}", part2(&input));
});
}
fn read_input<R: Read>(reader: BufReader<R>) -> io::Result<Input> {
fn parse_sides(s: String) -> Result<TSides, ParseIntError> {
let sides: Vec<_> = s.split(' ')
.map(|s| s.trim())
.filter(|&s| s.len() > 0)
.collect();
let parse = |s: &str| s.parse::<u32>();
Ok([parse(sides[0])?, parse(sides[1])?, parse(sides[2])?])
}
Ok(reader.lines().flatten().map(parse_sides).flatten().collect())
}
fn input() -> io::Result<Input> {
let f = File::open(env::args().skip(1).next().expect("No input file given"))?;
read_input(BufReader::new(f))
}
#[cfg(test)]
mod tests {
use super::*;
const INPUT: &'static str =
"5 10 25
6 8 10";
fn as_input(s: &str) -> Input {
read_input(BufReader::new(s.split('\n').map(|s| s.trim()).collect::<Vec<_>>().join("\n").as_bytes())).unwrap()
}
#[test]
fn test_part1() {
assert_eq!(part1(&as_input(INPUT)), 1);
}
}
| true |
74a9d6a2f8da7186aa9d03a1c8a15fd81016f3db
|
Rust
|
pyigyli/JRPG-engine
|
/src/battle/enemy.rs
|
UTF-8
| 8,231 | 2.65625 | 3 |
[] |
no_license
|
use ggez::graphics::{spritebatch, Image, DrawParam, draw, Color};
use ggez::nalgebra::Point2;
use ggez::{Context, GameResult};
use ggez::timer::ticks;
use crate::battle::action::{ActionParameters, DamageType};
use crate::battle::state::BattleState;
use crate::party::{Party, InventoryElement};
use crate::party::character::{Character, Animation as CharacterAnimation};
use crate::party::item::InventoryItem;
use crate::menu::notification::Notification;
pub enum Animation {
StartTurn(u8, ActionParameters), // 60 frames
EndTurn, // 30 frames
Hurt, // 60 frames
Dead // 20 frames
}
impl PartialEq for Animation {
fn eq(&self, other: &Self) -> bool {
match self {
Animation::StartTurn(_, _) => {match other {Animation::StartTurn(_, _) => true, _ => false}},
Animation::EndTurn => {match other {Animation::EndTurn => true, _ => false}},
Animation::Hurt => {match other {Animation::Hurt => true, _ => false}},
Animation::Dead => {match other {Animation::Dead => true, _ => false}}
}
}
}
pub struct Enemy {
spritebatch: spritebatch::SpriteBatch,
pub screen_pos: (f32, f32),
pub selection_pos: (usize, usize),
pub size: f32,
turn_active: bool,
opacity: f32,
pub animation: (Animation, usize, usize),
pub x_offset: f32,
pub name: String,
pub state: BattleState,
pub dead: bool,
pub escapeable: bool,
turn_action: for<'r, 's, 't0, 't1> fn(&'r mut Context, &'s mut Enemy, &'t0 mut Party, &'t1 mut Option<Notification>) -> GameResult<()>
}
impl Enemy {
pub fn new(
ctx: &mut Context,
id: u8,
spritefile: String,
screen_pos: (f32, f32),
selection_pos: (usize, usize),
size: f32,
name: String,
level: u8,
hp: u16,
mp: u16,
attack: u16,
defence: u16,
magic: u16,
resistance: u16,
agility: u8,
experience: u32,
poisoned: i8,
sleeping: i8,
back_row: bool,
common_steal: Option<InventoryItem>,
rare_steal: Option<InventoryItem>,
escapeable: bool,
turn_action: for<'r, 's, 't0, 't1> fn(&'r mut Context, &'s mut Enemy, &'t0 mut Party, &'t1 mut Option<Notification>) -> GameResult<()>,
) -> Enemy {
let image = Image::new(ctx, spritefile).unwrap();
let batch = spritebatch::SpriteBatch::new(image);
Enemy {
spritebatch: batch,
screen_pos,
selection_pos,
size,
turn_active: false,
opacity: 1.,
animation: (Animation::EndTurn, 0, 0),
x_offset: 0.,
name,
state: BattleState::new(id, level, hp, mp, attack, defence, magic, resistance, agility, experience, poisoned, sleeping, back_row, common_steal, rare_steal, None),
dead: false,
escapeable,
turn_action
}
}
pub fn update(
&mut self,
ctx: &mut Context,
party: &mut Party,
active_turns: &mut Vec<u8>,
current_turn: &mut u8,
notification: &mut Option<Notification>,
enemy_start_draw_height: f32
) -> GameResult<()> {
self.state.update(current_turn, active_turns)?;
if *current_turn == self.state.id && !self.turn_active {
self.turn_active = true;
let turn_action = self.turn_action;
turn_action(ctx, self, party, notification)?;
}
if self.animation.1 > 0 {
self.animation.1 -= 1;
match self.animation.0 {
Animation::StartTurn(_, _) => {
let animation_time = ticks(ctx) - self.animation.2;
if animation_time <= 10 || (animation_time > 30 && animation_time < 50) {
self.x_offset -= 2.;
} else {
self.x_offset += 2.;
}
},
Animation::EndTurn => (),
Animation::Hurt => {
let animation_time = ticks(ctx) - self.animation.2;
if animation_time > 10 && animation_time <= 30 {
if animation_time % 10 == 0 {
self.opacity = 1.;
} else if animation_time % 5 == 0 {
self.opacity = 0.;
}
}
},
Animation::Dead => self.opacity -= 0.05
}
if self.animation.1 == 0 {
match &mut self.animation.0 {
Animation::StartTurn(target_number, action_parameters) => {
let mut parameters = action_parameters.clone();
match target_number {
0 => self.act_on_target(ctx, &mut party.inventory, notification, &mut parameters, &mut party.first )?,
1 => self.act_on_target(ctx, &mut party.inventory, notification, &mut parameters, &mut party.second)?,
2 => self.act_on_target(ctx, &mut party.inventory, notification, &mut parameters, &mut party.third )?,
_ => self.act_on_target(ctx, &mut party.inventory, notification, &mut parameters, &mut party.fourth)?
}
},
Animation::EndTurn => {
self.turn_active = false;
*current_turn = 0;
self.state.end_turn(ctx, notification, &self.name, (
700. + self.x_offset + self.screen_pos.0 * 70., enemy_start_draw_height + self.screen_pos.1 * 66.
))?;
},
Animation::Hurt => {
self.opacity = 1.;
if self.state.hp == 0 {
self.animation = (Animation::Dead, 20, ticks(ctx));
}
},
Animation::Dead => {
self.opacity = 0.;
self.dead = true;
}
}
}
}
Ok(())
}
pub fn act_on_target(
&mut self,
ctx: &mut Context,
inventory: &mut Vec<InventoryElement>,
notification: &mut Option<Notification>,
action_parameters: &mut ActionParameters,
character: &mut Character
) -> GameResult<()> {
self.animation = (Animation::EndTurn, 30, ticks(ctx));
match action_parameters.damage_type {
DamageType::Healing => {character.receive_battle_action(ctx, inventory, notification, action_parameters)},
_ => {
character.animation = (CharacterAnimation::Hurt, 60, ticks(ctx));
character.receive_battle_action(ctx, inventory, notification, action_parameters)
}
}
}
pub fn receive_battle_action(
&mut self,
ctx: &mut Context,
inventory: &mut Vec<InventoryElement>,
notification: &mut Option<Notification>,
action_parameters: &mut ActionParameters,
enemy_start_draw_height: f32
) -> GameResult<()> {
match &action_parameters.damage_type {
DamageType::None(action) => self.state.receive_none_type_action(ctx, inventory, action_parameters, *action, notification),
DamageType::Item(used_item) => {
*notification = Some(Notification::new(ctx, used_item.get_name()));
let position = self.state.get_damage_position((700. + self.x_offset + self.screen_pos.0 * 70., enemy_start_draw_height + self.screen_pos.1 * 66.));
for inventory_element in inventory {
match inventory_element {
InventoryElement::Item(item, amount) => {
if used_item.get_name() == item.get_name() && *amount > 0 {
*amount -= 1;
}
}
}
}
used_item.apply_item_effect(ctx, &mut self.state, position)
},
DamageType::Healing => self.state.receive_healing(ctx, action_parameters, (
700. + self.x_offset + self.screen_pos.0 * 70., enemy_start_draw_height + self.screen_pos.1 * 66.
)),
_ => {
self.animation = (Animation::Hurt, 60, ticks(ctx));
self.state.receive_damage(ctx, notification, &self.name, action_parameters, (
700. + self.x_offset + self.screen_pos.0 * 70., enemy_start_draw_height + self.screen_pos.1 * 66.
))
}
}
}
pub fn draw(&mut self, ctx: &mut Context, enemy_start_draw_height: f32) -> GameResult<()> {
self.spritebatch.add(
match self.animation.0 {
Animation::Hurt | Animation::Dead => DrawParam::new().color(Color::new(1., 1., 1., self.opacity)),
_ => DrawParam::new()
}
);
let param = DrawParam::new()
.dest(Point2::new(700. + self.x_offset + self.screen_pos.0 * 70., enemy_start_draw_height + self.screen_pos.1 * 66.));
draw(ctx, &self.spritebatch, param)?;
self.spritebatch.clear();
self.state.draw(ctx)
}
}
| true |
13d03f8b5585a26af95bcf2724dfea1d62e928a5
|
Rust
|
dicej/ordmap_performance
|
/src/lib.rs
|
UTF-8
| 3,988 | 3.015625 | 3 |
[] |
no_license
|
#![feature(test)]
extern crate im;
extern crate rand;
extern crate test;
use std::iter;
use im::OrdMap;
use im::nodes::btree::{Insert, Node, OrdValue, Remove};
use rand::{Rng, SeedableRng, StdRng};
use test::Bencher;
#[derive(Clone)]
struct Raw<K, V>(K, V);
impl<K: Ord + Clone, V: Eq + Clone> OrdValue for Raw<K, V> {
type Key = K;
fn extract_key(&self) -> &K {
&self.0
}
fn ptr_eq(&self, other: &Self) -> bool {
self.1 == other.1 && self.0 == other.0
}
}
#[derive(Clone)]
pub struct RawMap<K, V> {
root: Node<Raw<K, V>>,
}
impl<K: Ord + Clone, V: Eq + Clone> RawMap<K, V> {
fn new() -> Self {
RawMap { root: Node::new() }
}
fn insert(&self, k: K, v: V) -> Self {
match self.root.insert(Raw(k, v)) {
Insert::NoChange => self.clone(),
Insert::JustInc => unreachable!(),
Insert::Update(root) => RawMap { root },
Insert::Split(left, median, right) => RawMap {
root: Node::from_split(left, median, right),
},
}
}
fn remove(&self, k: &K) -> Self {
match self.root.remove(k) {
Remove::NoChange => self.clone(),
Remove::Removed(_) => unreachable!(),
Remove::Update(_, root) => RawMap { root },
}
}
fn insert_mut(&mut self, k: K, v: V) {
match self.root.insert_mut(Raw(k, v)) {
Insert::NoChange | Insert::JustInc => {}
Insert::Update(root) => self.root = root,
Insert::Split(left, median, right) => self.root = Node::from_split(left, median, right),
}
}
fn remove_mut(&mut self, k: &K) {
match self.root.remove_mut(k) {
Remove::NoChange => None,
Remove::Removed(pair) => Some(pair),
Remove::Update(pair, root) => {
self.root = root;
Some(pair)
}
};
}
}
fn pairs() -> Vec<(u64, u64)> {
StdRng::from_seed(&[2, 2, 3, 7])
.gen_iter()
.zip(iter::repeat(42))
.take(1000)
.collect()
}
#[bench]
fn add_and_remove(b: &mut Bencher) {
let pairs = pairs();
b.iter(|| {
test::black_box(
pairs.iter().cloned().fold(
pairs
.iter()
.cloned()
.fold(OrdMap::new(), |map, (k, v)| map.insert(k, v)),
|map, (k, _)| map.remove(&k),
),
);
});
}
#[bench]
fn add_and_remove_mut(b: &mut Bencher) {
let pairs = pairs();
b.iter(|| {
test::black_box(
pairs.iter().cloned().fold(
pairs
.iter()
.cloned()
.fold(OrdMap::new(), |mut map, (k, v)| {
map.insert_mut(k, v);
map
}),
|mut map, (k, _)| {
map.remove_mut(&k);
map
},
),
);
});
}
#[bench]
fn add_and_remove_raw(b: &mut Bencher) {
let pairs = pairs();
b.iter(|| {
test::black_box(
pairs.iter().cloned().fold(
pairs
.iter()
.cloned()
.fold(RawMap::new(), |map, (k, v)| map.insert(k, v)),
|map, (k, _)| map.remove(&k),
),
);
});
}
#[bench]
fn add_and_remove_raw_mut(b: &mut Bencher) {
let pairs = pairs();
b.iter(|| {
test::black_box(
pairs.iter().cloned().fold(
pairs
.iter()
.cloned()
.fold(RawMap::new(), |mut map, (k, v)| {
map.insert_mut(k, v);
map
}),
|mut map, (k, _)| {
map.remove_mut(&k);
map
},
),
);
});
}
| true |
51d974a6ca1ce79666b792f55fd6786eb776e4f5
|
Rust
|
jazlalli/getting-started-with-rust
|
/fib/src/main.rs
|
UTF-8
| 970 | 3.84375 | 4 |
[] |
no_license
|
use std::io;
fn fib(n: u32) -> u32 {
// starting values for fib numbers and iteration idx
let mut prev_1: u32 = 1;
let mut prev_2: u32 = 1;
let mut iteration: u32 = 2;
let mut tmp: u32;
let mut next: u32 = prev_1;
let mut result: String = format!("{}, {}, ", prev_1, prev_2);
while iteration <= n {
if iteration < 1 {
break;
}
tmp = prev_1.wrapping_add(prev_2);
prev_1 = prev_2;
prev_2 = tmp;
next = tmp;
result.push_str(&(next.to_string() + ", "));
iteration += 1;
}
println!("{}", result);
next
}
fn main() {
println!("Which fibonacci number do you want (zero-based, i.e. the first one is 0)?");
let mut fib_number = String::new();
io::stdin().read_line(&mut fib_number)
.expect("Failed to read input");
let fib_number: u32 = fib_number.trim().parse().unwrap();
println!("Calculating Fib({})", fib_number.to_string());
println!("Result: {}", fib(fib_number));
}
| true |
9a2215490f6fe9e6a802bb4366e70933f0cf25f9
|
Rust
|
thibautRe/rustrogueliketutorial
|
/chapter-60-caverns3/src/ai/visible_ai_system.rs
|
UTF-8
| 2,833 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
extern crate specs;
use specs::prelude::*;
use crate::{MyTurn, Faction, Position, Map, raws::Reaction, Viewshed, WantsToFlee,
WantsToApproach, Chasing};
pub struct VisibleAI {}
impl<'a> System<'a> for VisibleAI {
#[allow(clippy::type_complexity)]
type SystemData = (
ReadStorage<'a, MyTurn>,
ReadStorage<'a, Faction>,
ReadStorage<'a, Position>,
ReadExpect<'a, Map>,
WriteStorage<'a, WantsToApproach>,
WriteStorage<'a, WantsToFlee>,
Entities<'a>,
ReadExpect<'a, Entity>,
ReadStorage<'a, Viewshed>,
WriteStorage<'a, Chasing>
);
fn run(&mut self, data : Self::SystemData) {
let (turns, factions, positions, map, mut want_approach, mut want_flee, entities, player,
viewsheds, mut chasing) = data;
for (entity, _turn, my_faction, pos, viewshed) in (&entities, &turns, &factions, &positions, &viewsheds).join() {
if entity != *player {
let my_idx = map.xy_idx(pos.x, pos.y);
let mut reactions : Vec<(usize, Reaction, Entity)> = Vec::new();
let mut flee : Vec<usize> = Vec::new();
for visible_tile in viewshed.visible_tiles.iter() {
let idx = map.xy_idx(visible_tile.x, visible_tile.y);
if my_idx != idx {
evaluate(idx, &map, &factions, &my_faction.name, &mut reactions);
}
}
let mut done = false;
for reaction in reactions.iter() {
match reaction.1 {
Reaction::Attack => {
want_approach.insert(entity, WantsToApproach{ idx: reaction.0 as i32 }).expect("Unable to insert");
chasing.insert(entity, Chasing{ target: reaction.2}).expect("Unable to insert");
done = true;
}
Reaction::Flee => {
flee.push(reaction.0);
}
_ => {}
}
}
if !done && !flee.is_empty() {
want_flee.insert(entity, WantsToFlee{ indices : flee }).expect("Unable to insert");
}
}
}
}
}
fn evaluate(idx : usize, map : &Map, factions : &ReadStorage<Faction>, my_faction : &str, reactions : &mut Vec<(usize, Reaction, Entity)>) {
for other_entity in map.tile_content[idx].iter() {
if let Some(faction) = factions.get(*other_entity) {
reactions.push((
idx,
crate::raws::faction_reaction(my_faction, &faction.name, &crate::raws::RAWS.lock().unwrap()),
*other_entity
));
}
}
}
| true |
2eb084a142a26ae9be18cc7074d93516583fc5c5
|
Rust
|
aoc2020/day4_rust
|
/src/main.rs
|
UTF-8
| 3,587 | 2.984375 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::{self, Lines, BufReader, BufRead};
use std::path::Path;
use regex::Regex;
use std::collections::{HashMap, HashSet};
// const regex : Regex = Regex::new(r"(\d+)-(\d+) (.): (.*)").unwrap();
fn main() {
let pws = read_passports();
let pws_with_all_fields: Vec<&Passport> = pws.iter().filter(|p| p.has_required_fields()).collect();
let valid1 = pws_with_all_fields.len();
let pws_with_valids : Vec<&&Passport> = pws_with_all_fields.iter().filter(|x| x.all_fields_valid()).collect();
let valid2 = pws_with_valids.len();
println!("Answer 1 : {}", valid1);
println!("Answer 2 : {}", valid2);
}
struct Passport {
fields: HashMap<String, String>
}
impl Passport {
fn new(lines: Vec<String>) -> Passport {
let mut fields: HashMap<String, String> = HashMap::new();
for line in lines {
println!("Parsing line: {}", line.clone());
let tokens: Vec<(&str, &str)> = line.as_str().split(' ')
.map(|x| x.trim())
.map(|x| Passport::to_pair(x))
.collect();
for (key, value) in tokens {
fields.insert(String::from(key), String::from(value));
}
}
return Passport { fields };
}
fn to_pair(s: &str) -> (&str, &str) {
let s: Vec<&str> = s.split(':').collect();
(s.get(0).unwrap(), s.get(1).unwrap())
}
fn has_required_fields(&self) -> bool {
let keys: HashSet<&String> = self.fields.keys().collect();
for key in keys.clone() {
// println!("KEY: {}", key);
}
for key in ["byr", "iyr", "eyr", "hgt", "hcl", "ecl", "pid"].iter() {
if !keys.contains(&key.to_string()) {
println!("Missing: {}", key);
return false;
}
}
return true;
}
fn field_valid(&self, field: &str, validation: &str) -> bool {
let regex : Regex = Regex::new(validation).unwrap();
let value = self.fields.get(field).unwrap();
let is_valid= regex.is_match(value);
// println!("CHECK {} {} -> {}",field, validation, is_valid);
is_valid
}
fn all_fields_valid(&self) -> bool {
let byr = self.field_valid("byr",r"^(19[2-9][0-9]|200[0-2])$");
let ier = self.field_valid("iyr",r"^20(1[0-9]|20)$");
let eyr = self.field_valid("eyr",r"^20(2[0-9]|30)$");
let hgt = self.field_valid("hgt",r"^(1([5-8][0-9]|9[0-3])cm|(59|6[0-9]|7[0-6])in)$");
let hcl = self.field_valid("hcl",r"^#[0-9a-f]{6}$");
let ecl = self.field_valid("ecl",r"^(amb|blu|brn|gry|grn|oth|hzl)");
let pid = self.field_valid("pid", r"^[0-9]{9}$");
return byr && ier && eyr && hgt && hcl && ecl && pid;
}
}
fn read_passports() -> Vec<Passport> {
let mut passports: Vec<Passport> = vec!();
if let Ok(lines) = read_lines("input.txt") {
let mut pass_lines: Vec<String> = vec!();
for line in lines {
if let Ok(s) = line {
// println!("Processing: {}", s);
if s.is_empty() {
passports.push(Passport::new(pass_lines));
pass_lines = vec!();
} else {
pass_lines.push(s);
}
}
}
passports.push(Passport::new(pass_lines));
}
return passports;
}
fn read_lines<P>(filename: P) -> io::Result<Lines<BufReader<File>>>
where P: AsRef<Path>, {
let file = File::open(filename)?;
Ok(BufReader::new(file).lines())
}
| true |
d2259a93377a49246e0a9695ba9132b2dbc2aaf4
|
Rust
|
thominspace/aoc_2020
|
/day_17/src/main.rs
|
UTF-8
| 26,487 | 3.1875 | 3 |
[] |
no_license
|
use std::time::{Instant};
use std::io::{Error};
use std::fs;
#[derive(Debug)]
struct Grid3 {
flat_grid: Vec<usize>,
grid_width: usize,
total_grid_cells: usize,
expansion: usize
}
impl Grid3 {
// if we ever hit a point where we need to expand the grid, handle that here
// this includes a full copy of the grid in order to reindex it
fn expand_grid(&mut self) {
// save the old grid
let temp_grid: Vec<usize> = self.flat_grid.clone();
// let temp_grid_width = self.grid_width.clone();
// need to translate all of the inidices in the grid into cartesian coords, so we can reinsert them into the new grid
// we also need to do this before we increase the grid size, since it requires old info
let mut temp_reindex: Vec<(isize,isize,isize)> = Vec::with_capacity(temp_grid.len());
for ix in 0..temp_grid.len() {
temp_reindex.push(self.ix_to_cart(ix).unwrap());
}
// expand the grid
self.grid_width += 2;
self.expansion += 1;
self.total_grid_cells = self.grid_width.pow(3);
// reinit the grid
self.flat_grid = vec![0; self.total_grid_cells];
// fill with old data
for (ix, (x, y, z)) in temp_reindex.iter().enumerate() {
// convert the old cartesian into new index
let new_ix = self.cart_to_ix(*x, *y, *z).unwrap();
// insert the data
self.flat_grid[new_ix] = temp_grid[ix];
}
}
// convert a cartesian position to an array index
fn cart_to_ix(&self, x: isize, y: isize, z: isize) -> Option<usize> {
let grid_isize = self.grid_width as isize;
let expansion_isize = self.expansion as isize;
let return_val = (x+expansion_isize) + ((y+expansion_isize)*grid_isize) + ((z+expansion_isize)*grid_isize*grid_isize);
// check boundary conditions
if ((x+expansion_isize) >= 0) && ((x+expansion_isize) < (self.grid_width as isize)) &&
((y+expansion_isize) >= 0) && ((y+expansion_isize) < (self.grid_width as isize)) &&
((z+expansion_isize) >= 0) && ((z+expansion_isize) < (self.grid_width as isize)) {
Some(return_val as usize)
} else {
None
}
}
// converts an array index to a cartesian location
fn ix_to_cart(&self, index: usize) -> Option<(isize, isize, isize)> {
let max_ix = self.grid_width.pow(3);
// let return_val = (x+expansion_isize) + ((y+expansion_isize)*grid_isize) + ((z+expansion_isize)*grid_isize*grid_isize);
// check boundary conditions
if index < max_ix {
let return_x = (index % self.grid_width) as isize - self.expansion as isize;
let return_y = ((index / self.grid_width) % self.grid_width) as isize - self.expansion as isize;
let return_z = (index / (self.grid_width.pow(2))) as isize - self.expansion as isize;
Some((return_x, return_y, return_z))
} else {
None
}
}
// finds (and verifies) all neighbors given an index
fn get_neighbors_ix(&self, index: usize) -> Vec<Option<usize>> {
let mut return_vec: Vec<Option<usize>> = Vec::new();
let this_point_option: Option<(isize, isize, isize)> = self.ix_to_cart(index);
match this_point_option {
Some((this_x, this_y, this_z)) => {
// cycle through neighbors
for delta_z in -1..2 {
for delta_y in -1..2 {
for delta_x in -1..2 {
// get the neighbor at this delta, but skip the 0,0,0 case
if (delta_x == 0) && (delta_y == 0) && (delta_z == 0) {
// do nothing
} else {
// return the location (which is an option)
return_vec.push(self.cart_to_ix(this_x+(delta_x as isize), this_y+(delta_y as isize), this_z+(delta_z as isize)))
}
}
}
}
},
None => println!("PANIK"),
}
return return_vec;
}
// check just the perimeter for 1 values. if there are none, then we know we do not need to extend the grid based on the rules
fn check_perimiter(&self) -> bool {
// get the expansion as an int
let search_min = -(self.expansion as isize);
let search_max = (self.grid_width-self.expansion) as isize;
// search each face. This is going to contain overlap in the search space, but for now that's fine.
// x faces
for &ix in [search_min, search_max-1].iter() {
for iy in search_min..search_max {
for iz in search_min..search_max {
let this_coord_as_ix = self.cart_to_ix(ix, iy, iz);
match this_coord_as_ix {
Some(index) => {
if self.flat_grid[index] == 1 {
return true
}
},
None => println!("PANIK while checking perimiter")
}
}
}
}
// y faces
for &iy in [search_min, search_max-1].iter() {
for ix in search_min..search_max {
for iz in search_min..search_max {
let this_coord_as_ix = self.cart_to_ix(ix, iy, iz);
match this_coord_as_ix {
Some(index) => {
if self.flat_grid[index] == 1 {
return true
}
},
None => println!("PANIK while checking perimiter")
}
}
}
}
// z faces
for &iz in [search_min, search_max-1].iter() {
for iy in search_min..search_max {
for ix in search_min..search_max {
let this_coord_as_ix = self.cart_to_ix(ix, iy, iz);
match this_coord_as_ix {
Some(index) => {
if self.flat_grid[index] == 1 {
return true
}
},
None => println!("PANIK while checking perimiter")
}
}
}
}
// if we checked everything and found nothing, we're done here
return false;
}
}
fn build_grid3(grid_width: usize) -> Grid3 {
// grid initializer
Grid3 {
flat_grid: vec![0; grid_width.pow(3)],
grid_width: grid_width,
total_grid_cells: grid_width.pow(3),
expansion: 0
}
}
#[derive(Debug)]
struct Grid4 {
flat_grid: Vec<usize>,
grid_width: usize,
total_grid_cells: usize,
expansion: usize
}
impl Grid4 {
// if we ever hit a point where we need to expand the grid, handle that here
// this includes a full copy of the grid in order to reindex it
fn expand_grid(&mut self) {
// save the old grid
let temp_grid: Vec<usize> = self.flat_grid.clone();
// let temp_grid_width = self.grid_width.clone();
// need to translate all of the inidices in the grid into cartesian coords, so we can reinsert them into the new grid
// we also need to do this before we increase the grid size, since it requires old info
let mut temp_reindex: Vec<(isize,isize,isize,isize)> = Vec::with_capacity(temp_grid.len());
for ix in 0..temp_grid.len() {
temp_reindex.push(self.ix_to_cart(ix).unwrap());
}
// expand the grid
self.grid_width += 2;
self.expansion += 1;
self.total_grid_cells = self.grid_width.pow(4);
// reinit the grid
self.flat_grid = vec![0; self.total_grid_cells];
// fill with old data
for (ix, (x, y, z, w)) in temp_reindex.iter().enumerate() {
// convert the old cartesian into new index
let new_ix = self.cart_to_ix(*x, *y, *z, *w).unwrap();
// insert the data
self.flat_grid[new_ix] = temp_grid[ix];
}
}
// convert a cartesian position to an array index
fn cart_to_ix(&self, x: isize, y: isize, z: isize, w: isize) -> Option<usize> {
let grid_isize = self.grid_width as isize;
let expansion_isize = self.expansion as isize;
let return_val = (x+expansion_isize) + ((y+expansion_isize)*grid_isize) + ((z+expansion_isize)*grid_isize*grid_isize) + ((w+expansion_isize)*grid_isize*grid_isize*grid_isize);
// check boundary conditions
if ((x+expansion_isize) >= 0) && ((x+expansion_isize) < (self.grid_width as isize)) &&
((y+expansion_isize) >= 0) && ((y+expansion_isize) < (self.grid_width as isize)) &&
((z+expansion_isize) >= 0) && ((z+expansion_isize) < (self.grid_width as isize)) &&
((w+expansion_isize) >= 0) && ((w+expansion_isize) < (self.grid_width as isize)) {
Some(return_val as usize)
} else {
None
}
}
// converts an array index to a cartesian location
fn ix_to_cart(&self, index: usize) -> Option<(isize, isize, isize, isize)> {
let max_ix = self.grid_width.pow(4);
// let return_val = (x+expansion_isize) + ((y+expansion_isize)*grid_isize) + ((z+expansion_isize)*grid_isize*grid_isize);
// check boundary conditions
if index < max_ix {
let return_x = (index % self.grid_width) as isize - self.expansion as isize;
let return_y = ((index / self.grid_width) % self.grid_width) as isize - self.expansion as isize;
let return_z = ((index / (self.grid_width.pow(2))) % self.grid_width) as isize - self.expansion as isize;
let return_w = (index / (self.grid_width.pow(3))) as isize - self.expansion as isize;
Some((return_x, return_y, return_z, return_w))
} else {
None
}
}
// finds (and verifies) all neighbors given an index
fn get_neighbors_ix(&self, index: usize) -> Vec<Option<usize>> {
let mut return_vec: Vec<Option<usize>> = Vec::new();
let this_point_option: Option<(isize, isize, isize, isize)> = self.ix_to_cart(index);
match this_point_option {
Some((this_x, this_y, this_z, this_w)) => {
// cycle through neighbors
for delta_z in -1..2 {
for delta_y in -1..2 {
for delta_x in -1..2 {
for delta_w in -1..2 {
// get the neighbor at this delta, but skip the 0,0,0 case
if (delta_x == 0) && (delta_y == 0) && (delta_z == 0) && (delta_w == 0) {
// do nothing
} else {
// return the location (which is an option)
return_vec.push(self.cart_to_ix(this_x+(delta_x as isize),
this_y+(delta_y as isize),
this_z+(delta_z as isize),
this_w+(delta_w as isize)));
}
}
}
}
}
},
None => println!("PANIK"),
}
return return_vec;
}
// check just the perimeter for 1 values. if there are none, then we know we do not need to extend the grid based on the rules
fn check_perimiter(&self) -> bool {
// get the expansion as an int
let search_min = -(self.expansion as isize);
let search_max = (self.grid_width-self.expansion) as isize;
// search each face. This is going to contain overlap in the search space, but for now that's fine.
// x faces
for &ix in [search_min, search_max-1].iter() {
for iy in search_min..search_max {
for iz in search_min..search_max {
for iw in search_min..search_max {
let this_coord_as_ix = self.cart_to_ix(ix, iy, iz, iw);
match this_coord_as_ix {
Some(index) => {
if self.flat_grid[index] == 1 {
return true
}
},
None => println!("PANIK while checking perimiter")
}
}
}
}
}
// y faces
for &iy in [search_min, search_max-1].iter() {
for ix in search_min..search_max {
for iz in search_min..search_max {
for iw in search_min..search_max {
let this_coord_as_ix = self.cart_to_ix(ix, iy, iz, iw);
match this_coord_as_ix {
Some(index) => {
if self.flat_grid[index] == 1 {
return true
}
},
None => println!("PANIK while checking perimiter")
}
}
}
}
}
// z faces
for &iz in [search_min, search_max-1].iter() {
for iy in search_min..search_max {
for ix in search_min..search_max {
for iw in search_min..search_max {
let this_coord_as_ix = self.cart_to_ix(ix, iy, iz, iw);
match this_coord_as_ix {
Some(index) => {
if self.flat_grid[index] == 1 {
return true
}
},
None => println!("PANIK while checking perimiter")
}
}
}
}
}
// w faces
for &iw in [search_min, search_max-1].iter() {
for iy in search_min..search_max {
for ix in search_min..search_max {
for iz in search_min..search_max {
let this_coord_as_ix = self.cart_to_ix(ix, iy, iz, iw);
match this_coord_as_ix {
Some(index) => {
if self.flat_grid[index] == 1 {
return true
}
},
None => println!("PANIK while checking perimiter")
}
}
}
}
}
// if we checked everything and found nothing, we're done here
return false;
}
}
fn build_grid4(grid_width: usize) -> Grid4 {
// grid initializer
Grid4 {
flat_grid: vec![0; grid_width.pow(4)],
grid_width: grid_width,
total_grid_cells: grid_width.pow(4),
expansion: 0
}
}
fn main() -> Result<(), Error> {
// seeing as pretty much every puzzle is going to be reading in a file and then manipulating the input,
// I might as well just build main out to be a template
// read the input for today's puzzle
let filepath = "input.txt";
// let filepath = "test_input.txt";
let file_data = fs::read_to_string(filepath).expect("failed to read file"); // returns a vector of strings split by line
// part 1
let start = Instant::now();
part_1(&file_data);
let duration = start.elapsed();
println!("Time elapsed in part 1: {:?}", duration);
// part 2
let start = Instant::now();
part_2(&file_data);
let duration = start.elapsed();
println!("Time elapsed in part 2: {:?}", duration);
Ok(())
}
fn part_1(file_data: &String) {
let splits: Vec<&str> = file_data.split("\n").collect(); // this will split each line seperately
// trying to make it a dynamically expanding grid
// grab the first line as the width of the grid
let initial_grid_width = splits[0].len();
let mut my_grid = build_grid3(initial_grid_width);
// insert the inital data into the grid
for (iy, this_str) in splits.iter().enumerate() {
let this_string = String::from(*this_str);
for (ix, this_char) in this_string.chars().enumerate() {
let mut this_cell: usize = 0;
match this_char {
'.' => this_cell = 0,
'#' => this_cell = 1,
_ => ()
}
let this_ix = my_grid.cart_to_ix(ix as isize, iy as isize, 0 as isize).unwrap();
my_grid.flat_grid[this_ix] = this_cell;
}
}
// time iteration
for _ in 0..6 {
// based on the rules, if the outermost shell of the space is empty we dont need to expand.
// Otherwise, we might, so go ahead and expand
if my_grid.check_perimiter() {
my_grid.expand_grid();
}
// make a copy of the grid. Yeah, it sucks, but we can't overwrite in place
let mut temp_grid = my_grid.flat_grid.clone();
// now enforce the rules
// If a cube is active and exactly 2 or 3 of its neighbors are also active, the cube remains active. Otherwise, the cube becomes inactive.
// If a cube is inactive but exactly 3 of its neighbors are active, the cube becomes active. Otherwise, the cube remains inactive.
// go through each cell (using the main) and overwrite into the copy
for index in 0..my_grid.total_grid_cells {
// need the neighbors so we can check them
let these_neighbors: Vec<Option<usize>> = my_grid.get_neighbors_ix(index);
// need to track the neighbor conditions (as per da rulez)
let mut neighbor_check = 0;
match my_grid.flat_grid[index] {
0 => { // cube is inactive
for this_neighbor in these_neighbors {
// check the neighbor for a valid cell (some may not exist)
match this_neighbor {
Some(neighbor_ix) => {
// we found a valid cell. add the value (beacuse we need some "exactly"s in the rules)
neighbor_check += my_grid.flat_grid[neighbor_ix];
},
None => (),
}
}
// we have now checked every neighbor, so now check da rulez
if neighbor_check == 3 {
// we have met the condition wherein we want to activate the cell
temp_grid[index] = 1;
} else {
// the cell should remain inactive, however we want to assure that state is set in the copy.
temp_grid[index] = 0;
}
},
1 => { // cube is active
for this_neighbor in these_neighbors {
// check the neighbor for a valid cell (some may not exist)
match this_neighbor {
Some(neighbor_ix) => {
// we found a valid cell. add the value (beacuse we need some "exactly"s in the rules)
neighbor_check += my_grid.flat_grid[neighbor_ix];
},
None => (),
}
}
// we have now checked every neighbor, so now check da rulez
if (neighbor_check == 3) || (neighbor_check == 2) {
// we have met the condition wherein we want the cell to remain activate
temp_grid[index] = 1;
} else {
// the cell should become inactive
temp_grid[index] = 0;
}
},
_ => println!("PANIK in update loop")
}
}
// the temp grid should now be completely updated. copy it back in
my_grid.flat_grid = temp_grid.clone();
}
// now count everything
let mut count = 0;
for index in 0..my_grid.total_grid_cells {
count += my_grid.flat_grid[index];
}
println!("count {:?}", count);
}
fn part_2(file_data: &String) {
// part 2 is very similar to part 1, but with one more dimension!
// I could make the grid completely general, or i could just do it the fast way and make it 4d now
let splits: Vec<&str> = file_data.split("\n").collect(); // this will split each line seperately
// trying to make it a dynamically expanding grid
// grab the first line as the width of the grid
let initial_grid_width = splits[0].len();
let mut my_grid = build_grid4(initial_grid_width);
// insert the inital data into the grid
for (iy, this_str) in splits.iter().enumerate() {
let this_string = String::from(*this_str);
for (ix, this_char) in this_string.chars().enumerate() {
let mut this_cell: usize = 0;
match this_char {
'.' => this_cell = 0,
'#' => this_cell = 1,
_ => ()
}
let this_ix = my_grid.cart_to_ix(ix as isize, iy as isize, 0 as isize, 0 as isize).unwrap();
my_grid.flat_grid[this_ix] = this_cell;
}
}
// time iteration
for _ in 0..6 {
// based on the rules, if the outermost shell of the space is empty we dont need to expand.
// Otherwise, we might, so go ahead and expand
if my_grid.check_perimiter() {
my_grid.expand_grid();
}
// make a copy of the grid. Yeah, it sucks, but we can't overwrite in place
let mut temp_grid = my_grid.flat_grid.clone();
// now enforce the rules
// If a cube is active and exactly 2 or 3 of its neighbors are also active, the cube remains active. Otherwise, the cube becomes inactive.
// If a cube is inactive but exactly 3 of its neighbors are active, the cube becomes active. Otherwise, the cube remains inactive.
// go through each cell (using the main) and overwrite into the copy
for index in 0..my_grid.total_grid_cells {
// need the neighbors so we can check them
let these_neighbors: Vec<Option<usize>> = my_grid.get_neighbors_ix(index);
// need to track the neighbor conditions (as per da rulez)
let mut neighbor_check = 0;
match my_grid.flat_grid[index] {
0 => { // cube is inactive
'neighbor_check: for this_neighbor in these_neighbors {
// check the neighbor for a valid cell (some may not exist)
match this_neighbor {
Some(neighbor_ix) => {
// we found a valid cell. add the value (beacuse we need some "exactly"s in the rules)
neighbor_check += my_grid.flat_grid[neighbor_ix];
if neighbor_check > 3 { // we do have a break condition! all rules require 3 or less
break 'neighbor_check
}
},
None => (),
}
}
// we have now checked every neighbor, so now check da rulez
if neighbor_check == 3 {
// we have met the condition wherein we want to activate the cell
temp_grid[index] = 1;
} else {
// the cell should remain inactive, however we want to assure that state is set in the copy.
temp_grid[index] = 0;
}
},
1 => { // cube is active
for this_neighbor in these_neighbors {
// check the neighbor for a valid cell (some may not exist)
match this_neighbor {
Some(neighbor_ix) => {
// we found a valid cell. add the value (beacuse we need some "exactly"s in the rules)
neighbor_check += my_grid.flat_grid[neighbor_ix];
},
None => (),
}
}
// we have now checked every neighbor, so now check da rulez
if (neighbor_check == 3) || (neighbor_check == 2) {
// we have met the condition wherein we want the cell to remain activate
temp_grid[index] = 1;
} else {
// the cell should become inactive
temp_grid[index] = 0;
}
},
_ => println!("PANIK in update loop")
}
}
// the temp grid should now be completely updated. copy it back in
my_grid.flat_grid = temp_grid.clone();
}
// now count everything
let mut count = 0;
for index in 0..my_grid.total_grid_cells {
count += my_grid.flat_grid[index];
}
println!("count {:?}", count);
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.