blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
99fec133c9146b63c806ea8a15e204c826520a23
|
Rust
|
mplanchard/cuid-rust
|
/crates/cuid1/src/bin.rs
|
UTF-8
| 2,212 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
#[allow(deprecated)]
use cuid::{cuid, slug};
use std::{
env::{self, Args},
process::exit,
};
/// Generate a new CUID and print it to stdout
pub fn main() {
let args: CuidArgs = env::args().into();
let res = match args.v2 {
true => {
if args.slug {
// construct a v2 cuid with the same length as cuid1 slugs
Ok(cuid2::CuidConstructor::new().with_length(10).create_id())
} else {
Ok(cuid2::create_id())
}
}
false => {
if args.slug {
#[allow(deprecated)]
slug()
} else {
#[allow(deprecated)]
cuid()
}
}
};
match res {
Ok(id) => println!("{}", id),
Err(err) => {
eprintln!("{:?}", err);
exit(1)
}
}
}
const HELP: &str = r#"Usage: cuid [OPTION]...
Generate and print a CUID.
Options:
--v2 generate a v2 CUID/slug (this will eventually be the default)
--slug generate a slug instead of a full CUID
-h, --help display this help and exit
-v, --version display version information and exit"#;
const VERSION: &str = env!("CARGO_PKG_VERSION");
/// Commandline arguments for the CUID binary
#[derive(Debug)]
struct CuidArgs {
/// Whether to produce a slug instead of a CUID
slug: bool,
v2: bool,
}
impl From<Args> for CuidArgs {
fn from(args: Args) -> Self {
let mut slug = false;
let mut v2 = false;
// The first argument should be the binary name. Skip it.
args.skip(1).for_each(|arg| match arg.as_str() {
"-h" | "--help" => {
println!("{}", HELP);
exit(0);
}
"-v" | "--version" => {
println!("{}", VERSION);
exit(0);
}
"--slug" => slug = true,
"--v2" => v2 = true,
_ => {
println!("error: unrecognized argument {}", arg);
println!();
println!("{}", HELP);
exit(1);
}
});
CuidArgs { slug, v2 }
}
}
| true |
d09f9a6514644f45e270994005ea0e0fca5f41c3
|
Rust
|
wang-q/intspan
|
/src/cmd_spanr/stat.rs
|
UTF-8
| 3,801 | 2.90625 | 3 |
[
"MIT"
] |
permissive
|
use clap::*;
use intspan::*;
use serde_json::Value;
use std::collections::BTreeMap;
// Create clap subcommand arguments
pub fn make_subcommand() -> Command {
Command::new("stat")
.about("Coverage on chromosomes for runlists")
.arg(
Arg::new("chr.sizes")
.required(true)
.index(1)
.help("Sets the input file to use"),
)
.arg(
Arg::new("infile")
.required(true)
.index(2)
.help("Sets the input file to use"),
)
.arg(
Arg::new("all")
.long("all")
.action(ArgAction::SetTrue)
.help("Only write whole genome stats"),
)
.arg(
Arg::new("outfile")
.long("outfile")
.short('o')
.num_args(1)
.default_value("stdout")
.help("Output filename. [stdout] for screen"),
)
}
// command implementation
pub fn execute(args: &ArgMatches) -> anyhow::Result<()> {
//----------------------------
// Loading
//----------------------------
let sizes = read_sizes(args.get_one::<String>("chr.sizes").unwrap());
let json: BTreeMap<String, Value> = read_json(args.get_one::<String>("infile").unwrap());
let is_multi: bool = json.values().next().unwrap().is_object();
let set_of = json2set_m(&json);
let is_all = args.get_flag("all");
//----------------------------
// Operating
//----------------------------
let mut lines: Vec<String> = Vec::new(); // Avoid lifetime problems
let mut header = "key,chr,chrLength,size,coverage".to_string();
if is_multi {
if is_all {
header = header.replace("chr,", "");
}
lines.push(header);
for (name, set) in &set_of {
let key_lines = csv_lines(set, &sizes, is_all, Some(name));
lines.push(key_lines);
}
} else {
header = header.replace("key,", "");
if is_all {
header = header.replace("chr,", "");
}
lines.push(header);
let key_lines = csv_lines(set_of.get("__single").unwrap(), &sizes, is_all, None);
lines.push(key_lines);
}
//----------------------------
// Output
//----------------------------
write_lines(
args.get_one::<String>("outfile").unwrap(),
&lines.iter().map(AsRef::as_ref).collect(),
)?;
Ok(())
}
fn csv_lines(
set: &BTreeMap<String, IntSpan>,
sizes: &BTreeMap<String, i32>,
is_all: bool,
prefix: Option<&str>,
) -> String {
let mut lines = String::new();
let mut all_length: i64 = 0;
let mut all_size: i64 = 0;
for chr in set.keys() {
let length = *sizes.get(chr).unwrap();
let size = set.get(chr).unwrap().cardinality();
let line = format!(
"{},{},{},{:.4}\n",
chr,
length,
size,
size as f32 / length as f32
);
if let Some(s) = prefix {
lines.push_str(format!("{},", s).as_str())
};
lines.push_str(line.as_str());
all_length += length as i64;
all_size += size as i64;
}
let mut all_line = format!(
"{},{},{},{:.4}\n",
"all",
all_length,
all_size,
all_size as f64 / all_length as f64
);
// only keep whole genome
if is_all {
lines = String::new();
all_line = all_line.replace("all,", "");
}
if let Some(s) = prefix {
all_line.insert_str(0, format!("{},", s).as_str())
};
lines.push_str(all_line.as_str());
// Remove last LF, as write_lines will append one
lines.trim_end().to_string()
}
| true |
df3baa6f36ce42cc79437fff28a20847228164a9
|
Rust
|
longlb/exercism
|
/rust/luhn/src/lib.rs
|
UTF-8
| 580 | 3.453125 | 3 |
[] |
no_license
|
/// Check a Luhn checksum.
pub fn is_valid(code: &str) -> bool {
let code = &code.replace(" ", "");
if code.len() < 2 || code.find(|c: char| !c.is_numeric()) != None {
return false;
}
code.chars()
.rev()
.enumerate()
.map(|(i, c)| match i % 2 == 1 {
true => double(c.to_digit(10).unwrap()),
false => c.to_digit(10).unwrap(),
})
.sum::<u32>()
% 10
== 0
}
fn double(n: u32) -> u32 {
2 * n
- match n < 5 {
true => 0,
false => 9,
}
}
| true |
0cb9b959ca860b2eca03157ba756a33d37abcc4f
|
Rust
|
rodolf0/graphstack
|
/src/lib.rs
|
UTF-8
| 7,239 | 3.359375 | 3 |
[] |
no_license
|
use std::collections::HashMap;
pub struct GraphStack<T> {
/// Holds the elements inserted into the GraphStack.
items: Vec<T>,
/// In a stack, each element sits on top of another.
/// In a graph-stack, each item may have multiple ancestors.
/// The indexes in `ancestors` direclty map to items in `items`.
ancestors: HashMap<usize, Vec<usize>>,
}
impl<T> GraphStack<T> {
pub fn new() -> Self {
GraphStack {
items: Vec::new(),
ancestors: HashMap::new(),
}
}
/// Adds an element to the graph-stack and returns an item-id for it.
/// This `id` can later be used to add ancestors for this item.
pub fn push(&mut self, value: T, ancestors: &[usize]) -> usize {
// Check that each ancestor is valid
if ancestors.iter().any(|a| *a >= self.items.len()) {
panic!(
"Invalid ancestors. GS size={}, ancestors={:#?}",
self.items.len(),
ancestors
);
}
self.items.push(value);
let item_id = self.items.len() - 1;
self.ancestors
.insert(item_id, ancestors.iter().cloned().collect());
item_id
}
pub fn add_ancestors(&mut self, id: usize, ancestors: &[usize]) {
if !self.ancestors.contains_key(&id) {
panic!("Invalid ancestor id={}", id);
}
// TODO: detect cycles
self.ancestors.entry(id).or_default().extend(ancestors);
}
/// Build an iterator over the stacks encoded by this GraphStack.
/// A `start_item` is required because there may be multiple top items.
pub fn stacks(&self, start_item: usize) -> Stacks<T> {
Stacks::new(&self, start_item)
}
}
/// A cursor for keeping track when iterating over a GraphStack.
#[derive(Debug)]
struct Cursor {
item: usize,
ancestor: usize,
}
/// An iterator to retrieve stacks encoded in GraphStack.
pub struct Stacks<'a, T> {
/// Need a cursor for each item in the GraphStack to track
/// which of its ancestors is currently being traversed.
cursors: Vec<Cursor>,
/// This is where the current stack to be returned will be unpacked.
unstack: Vec<&'a T>,
/// A reference to the GraphStack that this iterator is traversing.
gs: &'a GraphStack<T>,
}
impl<'a, T> Stacks<'a, T> {
fn new(gs: &'a GraphStack<T>, start_item: usize) -> Self {
Stacks {
cursors: vec![Cursor {
item: start_item,
ancestor: 0,
}],
unstack: vec![&gs.items[start_item]],
gs,
}
}
}
impl<'a, T> Iterator for Stacks<'a, T> {
type Item = Vec<&'a T>;
fn next(&mut self) -> Option<Self::Item> {
if self.cursors.is_empty() {
return None;
}
// Build a snapshot of the stack pointed to by current cursors
while let Some(cursor) = self.cursors.last() {
let ref item_ancestors = self.gs.ancestors[&cursor.item];
// Is cursor at the bottom of the stack, or has more depth?
if item_ancestors.is_empty() {
break;
}
let prev_item_id = item_ancestors[cursor.ancestor];
self.unstack.push(&self.gs.items[prev_item_id]);
self.cursors.push(Cursor {
item: prev_item_id,
ancestor: 0,
});
}
let stack_snapshot = self.unstack.clone();
// Advance iterator: find the cursor to advance depth-first
while let Some(cursor) = self.cursors.last_mut() {
let num_item_ancestors = self.gs.ancestors[&cursor.item].len();
if cursor.ancestor + 1 < num_item_ancestors {
cursor.ancestor += 1;
break;
}
self.cursors.pop();
}
// keep the part of the stack that is common for other ancestors
self.unstack.truncate(self.cursors.len());
Some(stack_snapshot)
}
}
#[cfg(test)]
mod tests {
use super::GraphStack;
use std::collections::HashMap;
#[test]
fn check_iterator() {
// a - b - c - e - f - g - h
// \ d -/------/
let mut gs = GraphStack::new();
let idmap: HashMap<_, _> = ["a", "b", "c", "d", "e", "f", "g", "h"]
.iter()
.cloned()
.map(|value| (value, gs.push(value, &[])))
.collect();
gs.add_ancestors(idmap["b"], &[idmap["a"]]);
gs.add_ancestors(idmap["c"], &[idmap["b"]]);
gs.add_ancestors(idmap["d"], &[idmap["b"]]);
gs.add_ancestors(idmap["e"], &[idmap["c"], idmap["d"]]);
gs.add_ancestors(idmap["f"], &[idmap["e"]]);
gs.add_ancestors(idmap["g"], &[idmap["d"], idmap["f"]]);
gs.add_ancestors(idmap["h"], &[idmap["g"]]);
let mut it = gs.stacks(idmap["h"]);
assert_eq!(it.next().unwrap(), vec![&"h", &"g", &"d", &"b", &"a"]);
assert_eq!(
it.next().unwrap(),
vec![&"h", &"g", &"f", &"e", &"c", &"b", &"a"]
);
assert_eq!(
it.next().unwrap(),
vec![&"h", &"g", &"f", &"e", &"d", &"b", &"a"]
);
assert!(it.next().is_none());
}
#[test]
fn disjoint_stacks() {
// a - b - c
// d - e
let mut gs = GraphStack::new();
let idmap: HashMap<_, _> = ["a", "b", "c", "d", "e"]
.iter()
.cloned()
.map(|value| (value, gs.push(value, &[])))
.collect();
gs.add_ancestors(idmap["b"], &[idmap["a"]]);
gs.add_ancestors(idmap["c"], &[idmap["b"]]);
// disjoint stack
gs.add_ancestors(idmap["e"], &[idmap["d"]]);
let mut it = gs.stacks(idmap["e"]);
assert_eq!(it.next().unwrap(), vec![&"e", &"d"]);
assert!(it.next().is_none());
let mut it = gs.stacks(idmap["c"]);
assert_eq!(it.next().unwrap(), vec![&"c", &"b", &"a"]);
assert!(it.next().is_none());
}
#[test]
fn x_stack() {
// a - b - c
// d / \ e
let mut gs = GraphStack::new();
let idmap: HashMap<_, _> = ["a", "b", "c", "d", "e"]
.iter()
.cloned()
.map(|value| (value, gs.push(value, &[])))
.collect();
gs.add_ancestors(idmap["b"], &[idmap["a"], idmap["d"]]);
gs.add_ancestors(idmap["c"], &[idmap["b"]]);
gs.add_ancestors(idmap["e"], &[idmap["b"]]);
let mut it = gs.stacks(idmap["e"]);
assert_eq!(it.next().unwrap(), vec![&"e", &"b", &"a"]);
assert_eq!(it.next().unwrap(), vec![&"e", &"b", &"d"]);
assert!(it.next().is_none());
let mut it = gs.stacks(idmap["c"]);
assert_eq!(it.next().unwrap(), vec![&"c", &"b", &"a"]);
assert_eq!(it.next().unwrap(), vec![&"c", &"b", &"d"]);
assert!(it.next().is_none());
let mut it = gs.stacks(idmap["b"]);
assert_eq!(it.next().unwrap(), vec![&"b", &"a"]);
assert_eq!(it.next().unwrap(), vec![&"b", &"d"]);
assert!(it.next().is_none());
}
// TODO: test case for adding a cycle
// create a cycle
// gs.add_ancestors(idmap["a"], &[idmap["h"]]);
}
| true |
cad6f72bd14771b7b3294c3455b5680f96db0399
|
Rust
|
japaric/linux-rtfm
|
/rtfm/macros/src/analyze.rs
|
UTF-8
| 1,708 | 2.703125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use core::ops::{self, Range};
use std::collections::{BTreeMap, BTreeSet};
use rtfm_syntax::{
analyze::{self, Priority},
ast::App,
Core, P,
};
/// Signal number
pub type Signal = u8;
pub struct Analysis {
parent: P<analyze::Analysis>,
pub signals: BTreeMap<Core, Signals>,
}
impl ops::Deref for Analysis {
type Target = analyze::Analysis;
fn deref(&self) -> &Self::Target {
&self.parent
}
}
pub struct Signals {
pub map: BTreeMap<Priority, Signal>,
pub start: Signal,
}
impl Signals {
pub fn range(&self) -> Range<Signal> {
let start = self.start;
let end = start + self.map.len() as u8;
start..end
}
}
// Assign a RT signal handler to each priority level
pub fn app(parent: P<analyze::Analysis>, app: &App) -> P<Analysis> {
let mut rt = 0;
let mut signals = BTreeMap::new();
for core in 0..app.args.cores {
let priorities = app
.software_tasks
.values()
.filter_map(|task| {
if task.args.core == core {
Some(task.args.priority)
} else {
None
}
})
// NOTE the timer handler may be higher priority than all the other tasks
.chain(parent.timer_queues.get(&core).map(|tq| tq.priority))
.collect::<BTreeSet<_>>();
let map = priorities
.iter()
.rev()
.cloned()
.zip(rt..)
.collect::<BTreeMap<_, _>>();
let len = map.len();
signals.insert(core, Signals { map, start: rt });
rt += len as u8;
}
P::new(Analysis { parent, signals })
}
| true |
9b3c12068a39b951bc9e00cbafc5e91af8a4b76d
|
Rust
|
silvia-odwyer/photon
|
/crate/src/bin/bin.rs
|
UTF-8
| 850 | 2.8125 | 3 |
[
"Apache-2.0"
] |
permissive
|
extern crate photon_rs;
extern crate time;
use photon_rs::channels::alter_red_channel;
use photon_rs::native::{open_image, save_image};
use time::Instant;
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Open the image (a PhotonImage is returned)
let mut img = open_image("examples/input_images/daisies_fuji.jpg")?;
let start = Instant::now();
// Increment the red channel by 40
alter_red_channel(&mut img, 40_i16);
let output_img_path = "output.jpg";
// Write file to filesystem.
save_image(img, output_img_path)?;
let end = Instant::now();
println!(
"Took {} seconds to increment red channel by 40 on image.",
(end - start).as_seconds_f64()
);
println!(
"Saved image: {}. Please check this directory for the image.",
output_img_path
);
Ok(())
}
| true |
2c28a6609ce262bdcb4e3fbbbc7b55dc3110599e
|
Rust
|
brunobertoldi/Scheme-to-WASM-Compiler
|
/src/lexer/token.rs
|
UTF-8
| 2,105 | 3.5 | 4 |
[] |
no_license
|
use std::error::Error;
use std::fmt::{self, Write};
use std::str;
tokens! {
Token, TokenType {
OpenParen: b"",
CloseParen: b"",
Quote: b"quote",
Lambda: b"lambda",
If: b"if",
Ident(String),
Bool(bool),
Int(i64),
Float(f64),
String(String),
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct VoidError;
impl<T: Error> From<T> for VoidError {
fn from(_: T) -> Self {
VoidError
}
}
pub trait TokenFromBytes: Sized {
fn from_bytes(bytes: &[u8]) -> Result<Self, VoidError>;
}
impl TokenFromBytes for String {
fn from_bytes(bytes: &[u8]) -> Result<Self, VoidError> {
Ok(unsafe { String::from_utf8_unchecked(bytes.to_owned()) })
}
}
impl TokenFromBytes for bool {
fn from_bytes(bytes: &[u8]) -> Result<Self, VoidError> {
if bytes == b"t" {
Ok(true)
} else if bytes == b"f" {
Ok(false)
} else {
Err(VoidError)
}
}
}
impl TokenFromBytes for i64 {
fn from_bytes(bytes: &[u8]) -> Result<Self, VoidError> {
let s = unsafe { str::from_utf8_unchecked(bytes) };
Ok(s.parse()?)
}
}
impl TokenFromBytes for f64 {
fn from_bytes(bytes: &[u8]) -> Result<Self, VoidError> {
let s = unsafe { str::from_utf8_unchecked(bytes) };
Ok(s.parse()?)
}
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Token::OpenParen => f.write_char('('),
Token::CloseParen => f.write_char(')'),
Token::Quote => f.write_char('\''),
Token::Lambda => f.write_str("lambda"),
Token::If => f.write_str("if"),
Token::Ident(ref s) => f.write_str(s),
Token::Bool(true) => f.write_str("#t"),
Token::Bool(false) => f.write_str("#f"),
Token::Int(ref n) => write!(f, "{}", n),
Token::Float(ref n) => write!(f, "{}", n),
Token::String(ref s) => write!(f, "{:?}", s),
}
}
}
| true |
89a431d5a254f905085b8544b9fd30ffa4472617
|
Rust
|
grogers0/advent_of_code
|
/2016/day25/src/main.rs
|
UTF-8
| 2,041 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::{BTreeMap, BTreeSet};
use std::io::{self, Read};
use day12_2016::*;
fn toggle_clock(clock: i64) -> i64 {
if clock == 0 { 1 } else { 0 }
}
fn check_toggling_clock(mut program: Vec<Op>, mut registers: BTreeMap<String, i64>) -> bool {
let mut seen = BTreeSet::new();
let mut pc = 0i64;
let mut last_clock = 1;
loop {
if pc < 0 && pc >= program.len() as i64 { return false }
if !seen.insert((program.clone(), registers.clone(), pc)) {
break;
}
let mut clock = None;
execute_op(&mut program, &mut pc, &mut clock, &mut registers);
if let Some(clock) = clock {
if toggle_clock(last_clock) != clock {
return false
}
last_clock = clock;
}
}
let saved_state = (program.clone(), registers.clone(), pc);
let mut clock_changes = 0;
let mut cycle_len = 0;
while pc >= 0 && pc < program.len() as i64 {
if cycle_len != 0 && saved_state.0 == program && saved_state.1 == registers && saved_state.2 == pc {
return clock_changes > 0;
}
cycle_len += 1;
let mut clock = None;
execute_op(&mut program, &mut pc, &mut clock, &mut registers);
if let Some(clock) = clock {
if toggle_clock(last_clock) != clock {
return false
}
last_clock = clock;
clock_changes += 1;
}
}
false
}
// TODO - this takes about a minute to run, probably there is a way to rewrite the input assembly
// to be more efficient.
fn part1(input: &str) -> i64 {
let program = parse_ops(input);
for i in 0.. {
let mut registers = BTreeMap::new();
registers.insert("a".to_string(), i);
if check_toggling_clock(program.clone(), registers) {
return i;
}
}
unreachable!()
}
fn main() {
let mut input = String::new();
io::stdin().read_to_string(&mut input).unwrap();
println!("{}", part1(&input));
}
| true |
408b3704809338a4c4414b93c33fcc563547c6c1
|
Rust
|
regiontog/snew
|
/src/tests.rs
|
UTF-8
| 2,973 | 2.671875 | 3 |
[] |
no_license
|
#[cfg(test)]
mod tests {
use crate::{
auth::{ApplicationAuthenticator, Credentials, ScriptAuthenticator},
reddit::{Reddit, Result},
};
use std::env;
#[test]
fn it_works() -> Result<()> {
let script_auth = ScriptAuthenticator::new(Credentials::new(
&env::var("REDDIT_CLIENT_ID").unwrap(),
&env::var("REDDIT_CLIENT_SECRET").unwrap(),
&env::var("REDDIT_USERNAME").unwrap(),
&env::var("REDDIT_PASSWORD").unwrap(),
));
let reddit = Reddit::new(script_auth, "Windows:snew:v0.1.0 (by /u/zower98)").unwrap();
println!("{:?}", reddit.me()?);
Ok(())
}
#[test]
fn anonymous() -> Result<()> {
let application_auth = ApplicationAuthenticator::new(
&env::var("REDDIT_CLIENT_ID").unwrap(),
&env::var("REDDIT_CLIENT_SECRET").unwrap(),
);
let reddit = Reddit::new(application_auth, "Windows:snew:v0.1.0 (by /u/zower98)").unwrap();
for post in reddit.subreddit("rust").hot().take(1) {
let post = post?;
println!("Post: {:?}", post.title);
}
for post in reddit.frontpage().best().take(1) {
let post = post?;
println!("Frontpage post: {}", post.title);
}
Ok(())
}
#[test]
fn comments() -> Result<()> {
let script_auth = ScriptAuthenticator::new(Credentials::new(
&env::var("REDDIT_CLIENT_ID").unwrap(),
&env::var("REDDIT_CLIENT_SECRET").unwrap(),
&env::var("REDDIT_USERNAME").unwrap(),
&env::var("REDDIT_PASSWORD").unwrap(),
));
let reddit = Reddit::new(script_auth, "Windows:snew:v0.1.0 (by /u/zower98)").unwrap();
let hot = reddit.subreddit("globaloffensive").hot();
for post in hot.take(3) {
let post = post?;
println!("Post: {}", post.title);
for comment in post.comments().take(1) {
let comment = comment?;
println!("By: {}, {}", comment.author, comment.body);
}
}
Ok(())
}
#[test]
#[should_panic]
fn unauthorized_anonoymous() {
let application_auth = ApplicationAuthenticator::new(
&env::var("REDDIT_CLIENT_ID").unwrap(),
&env::var("REDDIT_CLIENT_SECRET").unwrap(),
);
let reddit = Reddit::new(application_auth, "Windows:snew:v0.1.0 (by /u/zower98)").unwrap();
reddit.me().unwrap();
}
#[test]
#[should_panic]
fn not_authenticated() {
let script_auth = ScriptAuthenticator::new(Credentials::new(
"fake_client_id",
"fake_client_secret",
"fake_username",
"fake_password",
));
let reddit = Reddit::new(
script_auth,
"<Operating system>:snew:v0.1.0 (by /u/<reddit username>)",
);
reddit.unwrap();
}
}
| true |
26846cd19f0bf8e81e84543bd65c2454a5482b5c
|
Rust
|
y-usuzumi/survive-the-course
|
/survive-the-course-rs/src/problems/neetcode/arrays_and_hashing/Group_Anagrams.rs
|
UTF-8
| 1,115 | 3.453125 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
// https://leetcode.com/problems/group-anagrams/
use std::collections::HashMap;
pub struct Solution;
impl Solution {
pub fn group_anagrams(strs: Vec<String>) -> Vec<Vec<String>> {
let mut m: HashMap<Vec<i32>, Vec<String>> = HashMap::new();
for s in strs {
let mut chars = vec![0; 26];
for ch in s.chars() {
chars[(ch as u32 - 'a' as u32) as usize] += 1;
}
m.entry(chars).or_default().push(s);
}
m.into_values().collect()
}
}
#[cfg(test)]
mod tests {
use test_util::assert_eq_ignore_order;
use super::*;
#[test]
fn test_1() {
let input = vec!["eat", "tea", "tan", "ate", "nat", "bat"]
.iter()
.map(|s| s.to_string())
.collect();
let output: Vec<Vec<String>> =
vec![vec!["bat"], vec!["nat", "tan"], vec!["ate", "eat", "tea"]]
.iter()
.map(|v| v.iter().map(|s| s.to_string()).collect())
.collect();
assert_eq_ignore_order(Solution::group_anagrams(input), output);
}
}
| true |
775f9b91da9d0810bcdd119c1bb67b5cc46d77ed
|
Rust
|
CraneStation/lightbeam
|
/src/disassemble.rs
|
UTF-8
| 1,330 | 2.71875 | 3 |
[
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
use capstone::prelude::*;
use dynasmrt::AssemblyOffset;
use std::error::Error;
use std::fmt::{Display, Write};
pub fn disassemble(
mem: &[u8],
mut ops: &[(AssemblyOffset, impl Display)],
) -> Result<(), Box<dyn Error>> {
let mut cs = Capstone::new()
.x86()
.mode(arch::x86::ArchMode::Mode64)
.build()?;
println!("{} bytes:", mem.len());
let insns = cs.disasm_all(&mem, 0x0)?;
for i in insns.iter() {
let mut line = String::new();
let address = i.address();
loop {
if let Some((offset, op)) = ops.first() {
if offset.0 as u64 <= address {
ops = &ops[1..];
println!("{}", op);
} else {
break;
}
} else {
break;
}
}
write!(&mut line, "{:4x}:\t", i.address())?;
let mut bytes_str = String::new();
for b in i.bytes() {
write!(&mut bytes_str, "{:02x} ", b)?;
}
write!(&mut line, "{:24}\t", bytes_str)?;
if let Some(s) = i.mnemonic() {
write!(&mut line, "{}\t", s)?;
}
if let Some(s) = i.op_str() {
write!(&mut line, "{}", s)?;
}
println!("{}", line);
}
Ok(())
}
| true |
c36606de06705f713100eae02f55be7e43d5d0da
|
Rust
|
tsauvajon/rust-es
|
/src/command.rs
|
UTF-8
| 2,540 | 2.84375 | 3 |
[] |
no_license
|
use crate::aggregate;
use crate::event;
use uuid::Uuid;
pub trait Command<A: aggregate::Aggregate, E: event::DomainEvent<A>> {
fn handle(self, aggregate: &A) -> Result<Vec<E>, aggregate::Error>;
}
pub struct MakePayment {
pub driver_id: Uuid,
pub amount: f64,
}
impl Command<aggregate::Balance, event::BalanceEvent> for MakePayment {
fn handle(
self,
balance: &aggregate::Balance,
) -> Result<Vec<event::BalanceEvent>, aggregate::Error> {
println!(
"make payment of {:?} for driver {:?}",
self.amount, self.driver_id
);
Ok(vec![event::BalanceEvent::DriverMadePayment(
event::DriverMadePayment {
amount: self.amount,
driver_id: self.driver_id,
balance: balance.amount + self.amount,
},
)])
}
}
pub struct MakeClearance {
pub driver_id: Uuid,
pub amount: f64,
}
impl Command<aggregate::Balance, event::BalanceEvent> for MakeClearance {
fn handle(
self,
balance: &aggregate::Balance,
) -> Result<Vec<event::BalanceEvent>, aggregate::Error> {
let new_balance = balance.amount - self.amount;
if new_balance < 0_f64 {
return Err(aggregate::Error::UserError(
"insufficient funds".to_string(),
));
}
println!(
"make clearance of {:?} for driver {:?}",
self.amount, self.driver_id
);
Ok(vec![event::BalanceEvent::ClearanceSentToDriver(
event::ClearanceSentToDriver {
amount: self.amount,
driver_id: self.driver_id,
balance: new_balance,
},
)])
}
}
pub struct FinishRide {
pub driver_id: Uuid,
pub passenger_id: Uuid,
pub fare: f64,
}
impl Command<aggregate::Balance, event::BalanceEvent> for FinishRide {
fn handle(
self,
balance: &aggregate::Balance,
) -> Result<Vec<event::BalanceEvent>, aggregate::Error> {
println!(
"ride with fare {:?} finished for driver {:?} and passenger {:?}",
self.fare, self.driver_id, self.passenger_id
);
Ok(vec![event::BalanceEvent::RideFinished(
event::RideFinished {
fare: self.fare,
driver_id: self.driver_id,
passenger_id: self.passenger_id,
driver_balance: balance.amount + self.fare,
},
)])
}
}
| true |
126f7e6520602e445b3974f2c9d0dc26ce64e6e7
|
Rust
|
kumanote/glog-rs
|
/src/collector_serializer.rs
|
UTF-8
| 6,752 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
//! Provides CollectorSerializer.
use std::fmt::Arguments;
use slog::{Key, Result, Serializer};
use crate::kv_categorizer::KVCategorizer;
/// This serializer collects all KV pairs into a Vec, converting the values to `String`.
/// It filters out the one that are of `KVCategory::Ignore`
pub struct CollectorSerializer<'a, C: KVCategorizer>(Vec<(Key, String)>, &'a C);
impl<'a, C: KVCategorizer> CollectorSerializer<'a, C> {
/// Create a collector serializer that will use the given categorizer to collect desired values
pub fn new(categorizer: &'a C) -> Self {
CollectorSerializer(Vec::new(), categorizer)
}
/// Once done collecting KV pairs call this to retrieve collected values
pub fn into_inner(self) -> Vec<(Key, String)> {
self.0
}
}
/// Define a macro to implement serializer emit functions.
macro_rules! impl_emit_body(
($s:expr, $k:expr, $v:expr) => {
if $s.1.ignore($k) {
return Ok(())
}
$s.0.push(($k, format!("{}", $v)));
};
);
/// Define a macro to implement serializer emit functions for standard types.
macro_rules! impl_emit(
($name:ident, $t:ty) => {
/// Emit $t
fn $name(&mut self, key: Key, val: $t) -> Result {
impl_emit_body!(self, key, val);
Ok(())
}
};
);
impl<'a, C: KVCategorizer> Serializer for CollectorSerializer<'a, C> {
/// Emit ()
fn emit_unit(&mut self, key: Key) -> Result {
impl_emit_body!(self, key, "()");
Ok(())
}
/// Emit None
fn emit_none(&mut self, key: Key) -> Result {
impl_emit_body!(self, key, "None");
Ok(())
}
impl_emit!(emit_usize, usize);
impl_emit!(emit_isize, isize);
impl_emit!(emit_bool, bool);
impl_emit!(emit_char, char);
impl_emit!(emit_u8, u8);
impl_emit!(emit_i8, i8);
impl_emit!(emit_u16, u16);
impl_emit!(emit_i16, i16);
impl_emit!(emit_u32, u32);
impl_emit!(emit_i32, i32);
impl_emit!(emit_f32, f32);
impl_emit!(emit_u64, u64);
impl_emit!(emit_i64, i64);
impl_emit!(emit_f64, f64);
impl_emit!(emit_str, &str);
impl_emit!(emit_arguments, &Arguments<'_>);
}
#[cfg(test)]
mod tests {
use super::*;
use rand::{rngs::StdRng, Rng, SeedableRng};
use slog::{b, record, Record, Level, KV, Result as SlogResult};
use crate::kv_categorizer::InlineCategorizer;
#[derive(Clone)]
struct TestKv {
key: Key,
vusize: usize,
visize: isize,
vbool: bool,
vchar: char,
vu8: u8,
vi8: i8,
vu16: u16,
vi16: i16,
vu32: u32,
vi32: i32,
vf32: f32,
vu64: u64,
vi64: i64,
vf64: f64,
vstr: String,
}
impl TestKv {
fn new<R: Rng>(key: Key, rng: &mut R) -> Self {
Self {
key,
vusize: rng.gen(),
visize: rng.gen(),
vbool: rng.gen(),
vchar: rng.gen(),
vu8: rng.gen(),
vi8: rng.gen(),
vu16: rng.gen(),
vi16: rng.gen(),
vu32: rng.gen(),
vi32: rng.gen(),
vf32: rng.gen(),
vu64: rng.gen(),
vi64: rng.gen(),
vf64: rng.gen(),
vstr: format!("value{}", rng.gen::<i64>()),
}
}
fn to_vec(&self) -> Vec<(Key, String)> {
vec![
(self.key, "None".to_owned()),
(self.key, "()".to_owned()),
(self.key, format!("{}", self.vusize)),
(self.key, format!("{}", self.visize)),
(self.key, format!("{}", self.vbool)),
(self.key, format!("{}", self.vchar)),
(self.key, format!("{}", self.vu8)),
(self.key, format!("{}", self.vi8)),
(self.key, format!("{}", self.vu16)),
(self.key, format!("{}", self.vi16)),
(self.key, format!("{}", self.vu32)),
(self.key, format!("{}", self.vi32)),
(self.key, format!("{}", self.vf32)),
(self.key, format!("{}", self.vu64)),
(self.key, format!("{}", self.vi64)),
(self.key, format!("{}", self.vf64)),
(self.key, self.vstr.clone()),
]
}
}
impl KV for TestKv {
fn serialize(&self, _record: &Record<'_>, serializer: &mut dyn Serializer) -> SlogResult {
serializer.emit_none(self.key).expect("failure emitting none");
serializer.emit_unit(self.key).expect("failure emitting unit");
serializer.emit_usize(self.key, self.vusize).expect("failure emitting usize");
serializer.emit_isize(self.key, self.visize).expect("failure emitting isize");
serializer.emit_bool(self.key, self.vbool).expect("failure emitting bool");
serializer.emit_char(self.key, self.vchar).expect("failure emitting char");
serializer.emit_u8(self.key, self.vu8).expect("failure emitting u8");
serializer.emit_i8(self.key, self.vi8).expect("failure emitting i8");
serializer.emit_u16(self.key, self.vu16).expect("failure emitting u16");
serializer.emit_i16(self.key, self.vi16).expect("failure emitting i16");
serializer.emit_u32(self.key, self.vu32).expect("failure emitting u32");
serializer.emit_i32(self.key, self.vi32).expect("failure emitting i32");
serializer.emit_f32(self.key, self.vf32).expect("failure emitting f32");
serializer.emit_u64(self.key, self.vu64).expect("failure emitting u64");
serializer.emit_i64(self.key, self.vi64).expect("failure emitting i64");
serializer.emit_f64(self.key, self.vf64).expect("failure emitting f64");
serializer.emit_str(self.key, &self.vstr).expect("failure emitting str");
Ok(())
}
}
#[test]
fn test_inline_all() {
let mut rng: StdRng = SeedableRng::from_seed([1; 32]);
let mut serializer = CollectorSerializer::new(&InlineCategorizer);
let input = vec![
TestKv::new("test1".into(), &mut rng),
TestKv::new("test2".into(), &mut rng),
];
for value in input.clone() {
value
.serialize(
&record!(Level::Info, "test", &format_args!(""), b!()),
&mut serializer,
)
.expect("serialize failed!");
}
itertools::assert_equal(
serializer.into_inner(),
input.into_iter().flat_map(|x| x.to_vec()),
);
}
}
| true |
81ea12529b74fe1b01f2ff513dc9ae322f62486d
|
Rust
|
Patrik-Stas/play_with_rust
|
/old_futures/src/concurrency_thread_local_with_arc.rs
|
UTF-8
| 593 | 3.046875 | 3 |
[] |
no_license
|
use std::sync::Arc;
#[derive(Default)]
struct Config {
pub debug_mode: bool,
}
impl Config {
pub fn current() -> Arc<Config> {
CURRENT_CONFIG.with(|c| c.clone())
}
}
thread_local! {
static CURRENT_CONFIG: Arc<Config> = Arc::new(Default::default());
}
pub fn run() {
let mut config = Config::current();
if config.debug_mode {
// do something
}
//error[E0594]: cannot assign to data in a `&` reference https://doc.rust-lang.org/nightly/error-index.html#E0594
// config.debug_mode = false;
//^^^^^^^^^^^^^^^^^^^^^^^^^ cannot assign
}
| true |
cb1b904a7f242f0fd6d61877e71decd3f26ce3ed
|
Rust
|
ritiek/piano-rs
|
/src/game/screen.rs
|
UTF-8
| 4,005 | 2.859375 | 3 |
[
"MIT"
] |
permissive
|
use std::{thread, time};
use crossterm::{
style,
queue,
Colorize,
Goto,
PrintStyledFont,
};
use crossterm_style::Color;
use std::io::{stdout, Write};
/*
█▒
*/
pub mod pianokeys {
use crossterm::{
queue,
Colorize,
Goto,
PrintStyledFont,
Result,
};
use std::io::{stdout, Stdout, Write};
struct Point {
x: u16,
y: u16,
}
pub fn draw() -> Result<()> {
let mut stdout = stdout();
print_whites(&mut stdout)?;
print_blacks(&mut stdout)?;
stdout.flush()?;
Ok(())
}
fn print_whitekey(initial_point: Point, stdout: &mut Stdout) -> Result<()> {
let key_height: u16 = 16;
for column_height in 0..key_height {
queue!(
stdout,
Goto(initial_point.x, initial_point.y + column_height),
PrintStyledFont("|".black().on_white())
)?;
queue!(
stdout,
Goto(initial_point.x + 1, initial_point.y + column_height),
PrintStyledFont("██".white())
)?;
queue!(
stdout,
Goto(initial_point.x + 3, initial_point.y + column_height),
PrintStyledFont("|".black())
)?;
}
Ok(())
}
fn print_whites(stdout: &mut Stdout) -> Result<()> {
for key in 0..58 {
let initial_point = Point { x: key * 3, y: 0 };
print_whitekey(initial_point, stdout)?;
}
Ok(())
}
fn print_blackkey(initial_point: Point, stdout: &mut Stdout) -> Result<()> {
let key_height = 9;
for column_height in 0..key_height {
queue!(
stdout,
Goto(initial_point.x, initial_point.y + column_height),
PrintStyledFont("█".black())
)?;
}
Ok(())
}
fn print_blacks(stdout: &mut Stdout) -> Result<()> {
// First black key is lonely
let mut initial_point = Point { x: 3, y: 0 };
print_blackkey(initial_point, stdout)?;
for x in 0..8 {
let g1k1 = x * 21 + 9;
let g1k2 = g1k1 + 3;
initial_point = Point { x: g1k1, y: 0 };
print_blackkey(initial_point, stdout)?;
initial_point = Point { x: g1k2, y: 0 };
print_blackkey(initial_point, stdout)?;
let g2k1 = g1k2 + 6;
let g2k2 = g2k1 + 3;
let g2k3 = g2k2 + 3;
initial_point = Point { x: g2k1, y: 0 };
print_blackkey(initial_point, stdout)?;
initial_point = Point { x: g2k2, y: 0 };
print_blackkey(initial_point, stdout)?;
initial_point = Point { x: g2k3, y: 0 };
print_blackkey(initial_point, stdout)?;
}
Ok(())
}
}
pub fn mark_note(pos: i16, white: bool, color: Color, duration: time::Duration) {
if white {
// This causes a compiler panic!
/* queue!( */
/* stdout(), */
/* Goto(pos as u16, 15), */
/* PrintStyledFont(StyledObject("██").with(color)) */
/* ).unwrap(); */
queue!(
stdout(),
Goto(pos as u16, 15),
PrintStyledFont(style("██").with(color))
).unwrap();
/* println!("{} Red foreground text", Colored::Fg(Color::Red)); */
} else {
queue!(
stdout(),
Goto(pos as u16, 8),
PrintStyledFont(style("█").with(color))
).unwrap();
}
thread::spawn(move || {
thread::sleep(duration);
if white {
queue!(
stdout(),
Goto(pos as u16, 15),
PrintStyledFont("██".white())
).unwrap();
} else {
queue!(
stdout(),
Goto(pos as u16, 8),
PrintStyledFont("█".black())
).unwrap();
}
});
}
| true |
111a77d49aa1de65f61b97821e60853287084c85
|
Rust
|
angela-1/learn-rust
|
/ar/queue-folder/src/main.rs
|
UTF-8
| 2,069 | 3.28125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::fs::{DirBuilder};
use std::fs::File;
use std::io::prelude::*;
fn main() {
// create_folder();
// create_folder().unwrap();
match create_folder() {
Ok(_) => println!("成功"),
Err(_) => println!("失败")
};
// println!("Hello, world!");
// let path = "./baz";
// assert!(fs::metadata(path).unwrap().is_dir());
// let path = get_primary_key();
// let fd_name = "./".to_owned() + &path.unwrap().trim(); //+ &path.unwrap();
// println!("{:?}", &path.unwrap().trim());
}
/// 根据获取的文件夹名称创建文件夹
fn create_folder () -> Result<(), String> {
get_primary_key()
.map_err(|err| err.to_string())
.and_then(|key| {
println!("请输入任务名称:");
let mut buffer = String::new();
std::io::stdin().read_line(&mut buffer)
.map_err(|err| err.to_string())
.map(|_| {
let folder_path = get_folder_name(key, buffer.trim());
DirBuilder::new().recursive(true).create(&folder_path).unwrap();
let folder_draft_path = folder_path.clone() + "/0-draft";
DirBuilder::new().recursive(true).create(&folder_draft_path).unwrap();
key
})
})
.and_then(|key| {
write_current_key(key)
})
}
/// 组装文件夹名称
fn get_folder_name(key: i32, folder_name: &str) -> String {
"./".to_string() + &key.to_string() + "_" + folder_name
}
/// 创建文件夹成功后,将最新序列号写入 `foo.txt` 文件
fn write_current_key (key: i32) -> Result<(), String> {
File::create("./foo.txt")
.map_err(|err| err.to_string())
.and_then(|mut file| {
file.write_all(key.to_string().as_bytes())
.map_err(|err| err.to_string())
})
}
/// 读取 `foo.txt` 文件获取当前的序列号
fn get_primary_key () -> Result<i32, String> {
File::open("./foo.txt")
.map_err(|err| err.to_string())
.and_then(|mut file| {
let mut contents = String::new();
file.read_to_string(&mut contents)
.map_err(|err| err.to_string())
.map(|_| contents)
})
.and_then(|contents| {
contents.trim().parse::<i32>()
.map_err(|err| err.to_string())
})
.map(|n| n + 1)
}
| true |
e28444db0b4ae4e43a729dc9a06698a73efd8e18
|
Rust
|
vxoli/adventofcode2020
|
/rust-d03/src/main.rs
|
UTF-8
| 5,377 | 3.171875 | 3 |
[] |
no_license
|
use std::fs;
/* # Day 3:
## --- Toboggan Trajectory ---
## --- Part One ---
With the toboggan login problems resolved, you set off toward the airport. While travel by toboggan might be easy, it's certainly not safe: there's very minimal steering and the area is covered in trees. You'll need to see which angles will take you near the fewest trees.
Due to the local geology, trees in this area only grow on exact integer coordinates in a grid. You make a map (your puzzle input) of the open squares (.) and trees (#) you can see. For example:
..##.......
#...#...#..
.#....#..#.
..#.#...#.#
.#...##..#.
..#.##.....
.#.#.#....#
.#........#
#.##...#...
#...##....#
.#..#...#.#
These aren't the only trees, though; due to something you read about once involving arboreal genetics and biome stability, the same pattern repeats to the right many times:
..##.........##.........##.........##.........##.........##....... --->
#...#...#..#...#...#..#...#...#..#...#...#..#...#...#..#...#...#..
.#....#..#..#....#..#..#....#..#..#....#..#..#....#..#..#....#..#.
..#.#...#.#..#.#...#.#..#.#...#.#..#.#...#.#..#.#...#.#..#.#...#.#
.#...##..#..#...##..#..#...##..#..#...##..#..#...##..#..#...##..#.
..#.##.......#.##.......#.##.......#.##.......#.##.......#.##..... --->
.#.#.#....#.#.#.#....#.#.#.#....#.#.#.#....#.#.#.#....#.#.#.#....#
.#........#.#........#.#........#.#........#.#........#.#........#
#.##...#...#.##...#...#.##...#...#.##...#...#.##...#...#.##...#...
#...##....##...##....##...##....##...##....##...##....##...##....#
.#..#...#.#.#..#...#.#.#..#...#.#.#..#...#.#.#..#...#.#.#..#...#.# --->
You start on the open square (.) in the top-left corner and need to reach the bottom (below the bottom-most row on your map).
The toboggan can only follow a few specific slopes (you opted for a cheaper model that prefers rational numbers); start by counting all the trees you would encounter for the slope right 3, down 1:
From your starting position at the top-left, check the position that is right 3 and down 1. Then, check the position that is right 3 and down 1 from there, and so on until you go past the bottom of the map.
The locations you'd check in the above example are marked here with O where there was an open square and X where there was a tree:
..##.........##.........##.........##.........##.........##....... --->
#..O#...#..#...#...#..#...#...#..#...#...#..#...#...#..#...#...#..
.#....X..#..#....#..#..#....#..#..#....#..#..#....#..#..#....#..#.
..#.#...#O#..#.#...#.#..#.#...#.#..#.#...#.#..#.#...#.#..#.#...#.#
.#...##..#..X...##..#..#...##..#..#...##..#..#...##..#..#...##..#.
..#.##.......#.X#.......#.##.......#.##.......#.##.......#.##..... --->
.#.#.#....#.#.#.#.O..#.#.#.#....#.#.#.#....#.#.#.#....#.#.#.#....#
.#........#.#........X.#........#.#........#.#........#.#........#
#.##...#...#.##...#...#.X#...#...#.##...#...#.##...#...#.##...#...
#...##....##...##....##...#X....##...##....##...##....##...##....#
.#..#...#.#.#..#...#.#.#..#...X.#.#..#...#.#.#..#...#.#.#..#...#.# --->
In this example, traversing the map using this slope would cause you to encounter 7 trees.
Starting at the top-left corner of your map and following a slope of right 3 and down 1, how many trees would you encounter?
## --- Part Two ---
Time to check the rest of the slopes - you need to minimize the probability of a sudden arboreal stop, after all.
Determine the number of trees you would encounter if, for each of the following slopes, you start at the top-left corner and traverse the map all the way to the bottom:
- Right 1, down 1.
- Right 3, down 1. (This is the slope you already checked.)
- Right 5, down 1.
- Right 7, down 1.
- Right 1, down 2.
In the above example, these slopes would find 2, 7, 3, 4, and 2 tree(s) respectively; multiplied together, these produce the answer 336.
What do you get if you multiply together the number of trees encountered on each of the listed slopes?
*/
fn main() {
let data = read_input_data("/home/christopher/Documents/GitHub/adventofcode2020/d03-input.txt");
// Part 1
let mut trees: i32 = toboggan_tragectory(&data, 1, 3);
println!("Part 1: trees encounted = {}", trees);
// Part 2
trees *= toboggan_tragectory(&data,1,1);
// trees = toboggan_tragectory(&data,3,1); // already soled this in part 1 - dont need to repeat
trees *= toboggan_tragectory(&data,1,5);
trees *= toboggan_tragectory(&data,1,7);
trees *= toboggan_tragectory(&data,2,1);
println!("Part 2: product of trees encounted = {}", trees);
}
//Start function definitions
fn read_input_data(filename: &str) -> Vec<String> {
let input = fs::read_to_string(filename)
.unwrap()
.lines()
.map(|line| line.to_string())
.collect();
input
}
fn toboggan_tragectory(map_data: &Vec<String>, delta_row: i32, delta_col: i32) -> i32 {
let mut col: i32 = 0;
let mut row: i32 = 0;
let rows: i32 = map_data.len() as i32;
let line_length: i32 = (map_data[0].len() as i32) -1;
let mut trees:i32 = 0;
loop {
let map_row = &map_data[row as usize];
if map_row.chars().nth(col as usize).unwrap() == '#' {trees += 1;}
col += delta_col;
col = col % (line_length + 1);
row += delta_row;
if row >= rows {break;}
}
trees
}
| true |
ef254aeecd365d5ff72b65ff986cd70302369f34
|
Rust
|
AkshayMariyanna/algo-rust-practise
|
/algebra/src/modular_inverse.rs
|
UTF-8
| 582 | 3.234375 | 3 |
[] |
no_license
|
//! [Modular Inverse](https://cp-algorithms.com/algebra/module-inverse.html)
use crate::gcd::gcd_extended1;
/// Modular Inverse Using Extended Euclidean Algorithm
///
/// Finds modular inverse of a w.r.t m
/// ```
/// assert_eq!(algebra::modular_inverse::modular_inverse_extended_gcd(2, 4), None);
/// assert_eq!(algebra::modular_inverse::modular_inverse_extended_gcd(2, 5), Some(3));
/// ```
pub fn modular_inverse_extended_gcd(a: i64, m: i64) -> Option<i64> {
let ((x, _), g) = gcd_extended1(a, m);
match g {
1 => Some((x % m + m) % m),
_ => None
}
}
| true |
f148481b2e57a8dcff49021b91d9ee469d3da184
|
Rust
|
sdbondi/bn-api
|
/stripe/src/stripe_error.rs
|
UTF-8
| 1,592 | 3.015625 | 3 |
[] |
no_license
|
use reqwest;
use serde_json;
use std::error::Error;
use std::sync::Arc;
#[derive(Debug)]
pub struct StripeError {
pub description: String,
pub cause: Option<Arc<dyn Error>>,
}
impl Error for StripeError {
fn description(&self) -> &str {
&self.description
}
}
unsafe impl Send for StripeError {}
unsafe impl Sync for StripeError {}
use std::fmt;
impl fmt::Display for StripeError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match &self.cause {
Some(c) => write!(f, "{} caused by: {}", self.description, c.description()),
None => write!(f, "{}", self.description),
}
}
}
impl StripeError {
pub fn from_response(response: &mut reqwest::Response) -> StripeError {
StripeError {
description: format!(
"Error calling Stripe: HTTP Code {}: Body:{}",
response.status(),
response
.text()
.unwrap_or("<Error reading response body>".to_string())
),
cause: None,
}
}
}
impl From<reqwest::Error> for StripeError {
fn from(r: reqwest::Error) -> Self {
StripeError {
description: format!("Error calling Stripe: reqwest error {}", r),
cause: Some(Arc::new(r)),
}
}
}
impl From<serde_json::Error> for StripeError {
fn from(r: serde_json::Error) -> Self {
StripeError {
description: format!("Error deserializing response:{}", r),
cause: Some(Arc::new(r)),
}
}
}
| true |
64f831e2c1e8b6dd7b259d53f0e1ac2ea567f0b0
|
Rust
|
cargorust/zoc
|
/core/src/movement.rs
|
UTF-8
| 8,393 | 2.796875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::default::{Default};
use std::rc::{Rc};
use types::{Size2};
use db::{Db};
use unit::{Unit};
use map::{Map, Terrain};
use game_state::{State};
use dir::{Dir, dirs};
use position::{ExactPos, SlotId, get_free_exact_pos};
use object::{ObjectClass};
use event::{MoveMode};
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct MovePoints{pub n: i32}
pub fn move_cost_modifier(mode: MoveMode) -> i32 {
match mode {
MoveMode::Fast => 1,
MoveMode::Hunt => 2,
}
}
#[derive(Clone, Debug)]
pub struct Tile {
cost: MovePoints,
parent: Option<Dir>,
slot_id: SlotId,
}
impl Tile {
pub fn parent(&self) -> Option<Dir> { self.parent }
pub fn cost(&self) -> MovePoints { self.cost }
pub fn slot_id(&self) -> SlotId { self.slot_id }
}
impl Default for Tile {
fn default() -> Tile {
Tile {
cost: MovePoints{n: 0},
parent: None,
slot_id: SlotId::WholeTile,
}
}
}
pub fn truncate_path(db: &Db, state: &State, path: &[ExactPos], unit: &Unit) -> Option<Vec<ExactPos>> {
let mut new_path = Vec::new();
let mut cost = MovePoints{n: 0};
new_path.push(path[0]);
let move_points = unit.move_points.unwrap();
for window in path.windows(2) {
let from = window[0];
let to = window[1];
cost.n += tile_cost(db, state, unit, from, to).n;
if cost.n > move_points.n {
break;
}
new_path.push(to);
}
if new_path.len() < 2 {
None
} else {
Some(new_path)
}
}
pub fn path_cost(db: &Db, state: &State, unit: &Unit, path: &[ExactPos])
-> MovePoints
{
let mut cost = MovePoints{n: 0};
for window in path.windows(2) {
let from = window[0];
let to = window[1];
cost.n += tile_cost(db, state, unit, from, to).n;
}
cost
}
// TODO: const (see https://github.com/rust-lang/rust/issues/24111 )
pub fn max_cost() -> MovePoints {
MovePoints{n: i32::max_value()}
}
// TODO: increase cost for attached units
pub fn tile_cost(db: &Db, state: &State, unit: &Unit, from: ExactPos, pos: ExactPos)
-> MovePoints
{
let map_pos = pos.map_pos;
let objects_at = state.objects_at(map_pos);
let units_at = state.units_at(map_pos);
let mut unit_cost = 0;
let mut object_cost = 0;
let unit_type = db.unit_type(unit.type_id);
if unit_type.is_air {
return MovePoints{n: 2};
}
'unit_loop: for unit in units_at {
for object in objects_at.clone() {
match object.pos.slot_id {
SlotId::Id(_) => if unit.pos == object.pos {
assert!(db.unit_type(unit.type_id).is_infantry);
break 'unit_loop;
},
SlotId::TwoTiles(_) | SlotId::WholeTile => {
break 'unit_loop;
},
SlotId::Air => {},
}
}
unit_cost += 1;
}
let tile = state.map().tile(pos);
let mut terrain_cost = if unit_type.is_infantry {
match *tile {
Terrain::Plain | Terrain::City => 4,
Terrain::Trees => 5,
Terrain::Water => 99,
}
} else {
match *tile {
Terrain::Plain | Terrain::City => 4,
Terrain::Trees => 8,
Terrain::Water => 99,
}
};
for object in objects_at.clone() {
if object.class != ObjectClass::Road {
continue;
}
let mut i = object.pos.map_pos_iter();
let road_from = i.next().unwrap();
let road_to = i.next().unwrap();
assert!(road_from != road_to);
let is_road_pos_ok = road_from == from.map_pos && road_to == pos.map_pos;
let is_road_pos_rev_ok = road_to == from.map_pos && road_from == pos.map_pos;
if (is_road_pos_ok || is_road_pos_rev_ok) && !unit_type.is_big {
// TODO: ultrahardcoded value :(
terrain_cost = if unit_type.is_infantry { 4 } else { 2 };
}
}
for object in objects_at {
let cost = if unit_type.is_infantry {
match object.class {
ObjectClass::Building => 1,
ObjectClass::ReinforcementSector |
ObjectClass::Road |
ObjectClass::Smoke => 0,
}
} else {
match object.class {
ObjectClass::Building => 2,
ObjectClass::ReinforcementSector |
ObjectClass::Road |
ObjectClass::Smoke => 0,
}
};
object_cost += cost;
}
MovePoints{n: terrain_cost + object_cost + unit_cost}
}
#[derive(Clone, Debug)]
pub struct Pathfinder {
queue: Vec<ExactPos>,
map: Map<Tile>,
db: Rc<Db>,
}
impl Pathfinder {
pub fn new(db: Rc<Db>, map_size: Size2) -> Pathfinder {
Pathfinder {
queue: Vec::new(),
map: Map::new(map_size),
db: db,
}
}
pub fn get_map(&self) -> &Map<Tile> {
&self.map
}
fn process_neighbour_pos(
&mut self,
state: &State,
unit: &Unit,
original_pos: ExactPos,
neighbour_pos: ExactPos
) {
let old_cost = self.map.tile(original_pos).cost;
let tile_cost = tile_cost(&self.db, state, unit, original_pos, neighbour_pos);
let tile = self.map.tile_mut(neighbour_pos);
let new_cost = MovePoints{n: old_cost.n + tile_cost.n};
if tile.cost.n > new_cost.n {
tile.cost = new_cost;
tile.parent = Some(Dir::get_dir_from_to(
neighbour_pos.map_pos, original_pos.map_pos));
tile.slot_id = neighbour_pos.slot_id;
self.queue.push(neighbour_pos);
}
}
fn clean_map(&mut self) {
for pos in self.map.get_iter() {
let tile = self.map.tile_mut(pos);
tile.cost = max_cost();
tile.parent = None;
tile.slot_id = SlotId::WholeTile;
}
}
fn try_to_push_neighbours(
&mut self,
state: &State,
unit: &Unit,
pos: ExactPos,
) {
assert!(self.map.is_inboard(pos));
for dir in dirs() {
let neighbour_pos = Dir::get_neighbour_pos(pos.map_pos, dir);
if self.map.is_inboard(neighbour_pos) {
let exact_neighbour_pos = match get_free_exact_pos(
state, self.db.unit_type(unit.type_id), neighbour_pos
) {
Some(pos) => pos,
None => continue,
};
self.process_neighbour_pos(
state, unit, pos, exact_neighbour_pos);
}
}
}
fn push_start_pos_to_queue(&mut self, start_pos: ExactPos) {
let start_tile = self.map.tile_mut(start_pos);
start_tile.cost = MovePoints{n: 0};
start_tile.parent = None;
start_tile.slot_id = start_pos.slot_id;
self.queue.push(start_pos);
}
pub fn fill_map(&mut self, state: &State, unit: &Unit) {
assert!(self.queue.len() == 0);
self.clean_map();
self.push_start_pos_to_queue(unit.pos);
while !self.queue.is_empty() {
let pos = self.queue.remove(0);
self.try_to_push_neighbours(state, unit, pos);
}
}
/*
pub fn is_reachable(&self, pos: ExactPos) -> bool {
self.map.tile(pos).cost.n != max_cost().n
}
*/
pub fn get_path(&self, destination: ExactPos) -> Option<Vec<ExactPos>> {
let mut path = vec![destination];
let mut pos = destination;
if self.map.tile(pos).cost.n == max_cost().n {
return None;
}
while self.map.tile(pos).cost.n != 0 {
assert!(self.map.is_inboard(pos));
let parent_dir = match self.map.tile(pos).parent() {
Some(dir) => dir,
None => return None,
};
let neighbour_map_pos = Dir::get_neighbour_pos(pos.map_pos, parent_dir);
pos = ExactPos {
map_pos: neighbour_map_pos,
slot_id: self.map.tile(neighbour_map_pos).slot_id,
};
path.push(pos);
}
path.reverse();
if path.is_empty() {
None
} else {
Some(path)
}
}
}
| true |
9d60af038628e95f318c36b08d0779c820ec2986
|
Rust
|
TyOverby/spaceships
|
/src/client.rs
|
UTF-8
| 4,062 | 2.609375 | 3 |
[] |
no_license
|
extern crate lux;
extern crate spaceships;
extern crate bincode;
extern crate wire;
use wire::udp;
use lux::game::*;
use lux::prelude::*;
use spaceships::*;
use spaceships::ServerToClientMessage as S2C;
use spaceships::ClientToServerMessage as C2S;
use std::collections::HashMap;
use std::net::{ToSocketAddrs, SocketAddr};
struct SpaceshipGame {
ships: HashMap<u16, Spaceship>,
mine: Option<u16>,
sender: udp::Sender<C2S>,
recvr: udp::Receiver<(SocketAddr, MessageCarrier<'static>)>,
server_addr: SocketAddr
}
impl SpaceshipGame {
fn new<A, B: Clone>(socket: A, server_addr: B) -> SpaceshipGame
where A: ToSocketAddrs, B: ToSocketAddrs {
let (sender, receiver) = udp::bind(socket).unwrap();
sender.send(&C2S::Hello, server_addr.clone()).ok()
.expect("expected sending HELLO to server to work.");
SpaceshipGame {
ships: HashMap::new(),
mine: None,
sender: sender,
recvr: receiver,
server_addr: server_addr.to_socket_addrs().unwrap().next().unwrap()
}
}
fn my_ship_mut(&mut self) -> Option<&mut Spaceship> {
self.mine.and_then(move |id| self.ships.get_mut(&id))
}
fn process_update(&mut self, message: S2C) {
match message {
S2C::AssignSpaceship(id) => {
self.mine = Some(id);
}
S2C::UpdateSpaceship(ship) => {
if Some(ship.id) != self.mine {
self.ships.insert(ship.id, ship);
}
}
S2C::AddSpaceship(ship) => {
self.ships.insert(ship.id, ship);
}
S2C::RemoveSpaceship(id) => {
self.ships.remove(&id);
}
S2C::Goodbye => {
// do nothing for now
}
}
}
fn consume_update(&mut self, carrier: MessageCarrier<'static>) {
let general = carrier.general.take();
let specific = carrier.specific.map(|a| a.take());
let specific = specific.into_iter().flat_map(|a| a.into_iter());
let all = general.into_iter().chain(specific);
for message in all {
self.process_update(message);
}
}
}
impl Game for SpaceshipGame {
fn update(&mut self, _dt: f32, window: &mut Window, _events: &mut EventIterator) {
let from_server = self.recvr.iter().filter_map(|(from, m)| {
if from == self.server_addr { Some(m) } else { None }
}).collect::<Vec<_>>();
for message in from_server {
self.consume_update(message);
}
let sender = self.sender.clone();
let addr = self.server_addr.clone();
if let Some(ship) = self.my_ship_mut() {
let mut dirty = false;
let mouse = window.mouse_pos();
if window.mouse_pos() != ship.position {
ship.position = mouse;
dirty = true;
}
if window.is_key_pressed('a') {
ship.rotation += 0.05;
dirty = true;
}
if window.is_key_pressed('d') {
ship.rotation -= 0.05;
dirty = true;
}
if dirty {
sender.send(&C2S::UpdateSpaceship(*ship), addr).ok()
.expect("Expected sending to work.");
}
}
}
fn render(&mut self, _lag: f32, _window: &mut Window, frame: &mut Frame) {
for (_, ship) in &self.ships {
let (x, y) = ship.position;
frame.rect(x, y, 50.0, 50.0)
.rotate_around((26.0, 25.0), ship.rotation)
.fill_color(ship.color).fill();
}
}
}
fn main(){
let client_port: u16 = std::env::args().nth(1).and_then(|a| a.parse().ok()).expect("Expected client port");
let client_addr = ("localhost", client_port);
let server_addr = ("localhost", 1234u16);
let game = SpaceshipGame::new(client_addr, server_addr);
game.run_until_end();
}
| true |
41a91776fb868a3c16c456616c9716ae7905390c
|
Rust
|
x4e/scribe
|
/src/buffer/position.rs
|
UTF-8
| 3,580 | 3.890625 | 4 |
[
"MIT"
] |
permissive
|
use buffer::Distance;
use std::cmp::{PartialOrd, Ordering};
use std::default::Default;
use std::ops::{Add, AddAssign};
/// A two (zero-based) coordinate value representing a location in a buffer.
/// The `offset` field is so named to emphasize that positions point to
/// locations before/after characters, not characters themselves, in an effort
/// to avoid fencepost errors.
#[derive(Copy, Clone, Debug, Default, PartialEq)]
pub struct Position {
pub line: usize,
pub offset: usize,
}
impl PartialOrd for Position {
fn partial_cmp(&self, other: &Position) -> Option<Ordering> {
Some(
if self.line < other.line {
Ordering::Less
} else if self.line > other.line {
Ordering::Greater
} else if self.offset < other.offset {
Ordering::Less
} else if self.offset > other.offset {
Ordering::Greater
} else {
Ordering::Equal
}
)
}
}
impl Add<Distance> for Position {
type Output = Position;
fn add(self, distance: Distance) -> Self::Output {
let offset =
if distance.lines > 0 {
distance.offset
} else {
self.offset + distance.offset
};
Position {
line: self.line + distance.lines,
offset
}
}
}
impl AddAssign<Distance> for Position {
fn add_assign(&mut self, distance: Distance) {
self.line += distance.lines;
self.offset =
if distance.lines > 0 {
distance.offset
} else {
self.offset + distance.offset
};
}
}
impl Position {
/// Creates a new position with a line/offset of 0.
///
/// # Examples
///
/// ```
/// use scribe::buffer::Position;
///
/// let mut position = Position::new();
/// assert_eq!(position, Position{
/// line: 0,
/// offset: 0
/// });
pub fn new() -> Position {
Default::default()
}
}
#[cfg(test)]
mod tests {
use buffer::{Distance, Position};
#[test]
fn compare_works_when_lines_differ() {
// Important to make the earlier position have a greater
// offset, since that's an easy mistake to make.
let earlier_position = Position{ line: 2, offset: 20 };
let later_position = Position{ line: 3, offset: 10};
assert!(earlier_position < later_position);
}
#[test]
fn compare_works_when_lines_are_equal() {
let earlier_position = Position{ line: 3, offset: 10 };
let later_position = Position{ line: 3, offset: 20};
assert!(earlier_position < later_position);
}
#[test]
fn compare_works_when_lines_and_offsets_are_equal() {
let earlier_position = Position{ line: 3, offset: 10 };
let later_position = Position{ line: 3, offset: 10};
assert!(earlier_position <= later_position);
assert!(earlier_position >= later_position);
// This is technically not necessary since we
// derive the PartialEq trait, which provides
// the implementation for this.
assert!(earlier_position == later_position);
}
#[test]
fn add_assign_works_with_zero_line_distance() {
let mut position = Position{ line: 1, offset: 3 };
let distance = Distance{ lines: 0, offset: 4 };
position += distance;
assert_eq!(position, Position{
line: 1,
offset: 7
});
}
}
| true |
6fdfcd0084b9cc7cdd36b935cfb323ad97cd8269
|
Rust
|
inickt/cs4500-wortham
|
/Fish/Common/src/server/referee.rs
|
UTF-8
| 15,414 | 3.5 | 4 |
[] |
no_license
|
//! This file contains all logic and data regarding the Referee component,
//! which runs complete games of Fish. To do this, it starts and runs the
//! game loop, sending the gamestate to all players each turn then retrieving
//! a player's move and validating it until the game is over.
use crate::common::action::PlayerMove;
use crate::common::board::Board;
use crate::common::gamestate::GameState;
use crate::common::gamephase::GamePhase;
use crate::common::game_tree::GameTree;
use crate::common::player::{ PlayerId, PlayerColor };
use crate::server::client::{ Client, ClientWithId };
/// A referee is in charge of starting, running, and managing a game of fish.
/// This entails looping until the game is over and on each turn sending the
/// full gamestate to all player's then getting the action of the current
/// player and validating it. If this action is invalid the player is kicked,
/// otherwise the placement/move is made.
///
/// There is expected to be 1 referee per game of fish.
///
/// The referee will kick clients who do any of the following:
/// 1. Send a well-formed but illegal placement to the referee
/// 2. Send a well-formed but illegal move to the referee
/// 3. Send non-well-formed JSON data to the Referee
/// 4. [Future] Take more than 30 seconds to send their move on their turn
struct Referee {
/// Client input/output stream data, indexed on GameState's PlayerId.
/// This Vec is in turn_order for each player.
clients: Vec<ClientWithId>,
/// The state of current game, separated by the current phase it is in.
phase: GamePhase,
/// The past moves that have been received by each client with the most
/// recent being last. Empty until the MovePenguins phase and cleared when
/// a player is kicked.
move_history: Vec<PlayerMove>,
}
/// The final GameState of a finished game, along with each player and
/// whether they won, lost, or were kicked.
pub struct GameResult {
/// This list is in the same order and of the same length
/// as the Referee's original clients list and turn_order. So, each entry
/// directly corresponds to the game outcome for a particular player.
pub final_statuses: Vec<ClientStatus>,
/// This is the final state of the game, which may be used to delve
/// into statistics detail about each player, such as their score
/// and end positions.
pub final_state: GameState
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum ClientStatus {
Won,
Lost,
Kicked
}
/// Runs a complete game of Fish, setting up the board and
/// waiting for player input for gameplay to occur, and terminating
/// when a player (or multiple) have won. Check out Planning/player-protocol.md
/// for more information on the Fish game.
///
/// Clients will know the game has started when the referee sends each player
/// the initial game state before the first turn.
///
/// Returns the Win,Loss,Kicked status of each player and the final GameState
pub fn run_game(clients: Vec<Box<dyn Client>>, board: Option<Board>) -> GameResult {
let clients: Vec<_> = clients.into_iter().enumerate()
.map(|(id, player)| ClientWithId::new(id, player)).collect();
run_game_shared(&clients, board)
}
/// Runs a game with a Vec of mutably shared clients so that clients
/// isn't consumed when the game is over.
///
/// Runs a complete game of Fish, setting up the board and
/// waiting for player input for gameplay to occur, and terminating
/// when a player (or multiple) have won. Check out Planning/player-protocol.md
/// for more information on the Fish game.
///
/// Clients will know the game has started when the referee sends each player
/// the initial game state before the first turn.
///
/// Returns the Win,Loss,Kicked status of each player and the final GameState
pub fn run_game_shared(clients: &[ClientWithId], board: Option<Board>) -> GameResult {
let board = board.unwrap_or(Board::with_no_holes(5, 5, 3));
let mut referee = Referee::new(clients.to_vec(), board);
referee.initialize_clients();
while !referee.is_game_over() {
referee.do_player_turn();
}
referee.get_game_result()
}
impl Referee {
fn new(clients: Vec<ClientWithId>, board: Board) -> Referee {
let client_ids = clients.iter().map(|client| client.id).collect();
let state = GameState::with_players(board, client_ids);
let phase = GamePhase::PlacingPenguins(state);
Referee { clients, phase, move_history: vec![] }
}
fn get_client_player_color(&self, client: &ClientWithId) -> PlayerColor {
let state = self.phase.get_state();
state.players.get(&client.id).unwrap().color
}
fn current_client(&self) -> &ClientWithId {
let current_player_id = self.phase.current_turn();
self.clients.iter().find(|client| client.id == current_player_id).unwrap()
}
fn initialize_clients(&mut self) {
let mut clients_to_kick = vec![];
let state = self.phase.get_state();
for client in self.clients.iter() {
let color = self.get_client_player_color(client);
let result = client.borrow_mut().initialize_game(state, color);
if result.is_none() {
clients_to_kick.push(client.id);
}
}
for id in clients_to_kick {
self.kick_player(id);
}
}
/// Returns the winners, losers, and kicked players of the game, along
/// with the final game state of the game.
///
/// Assumes that the game this referee was hosting has been played to
/// completion - otherwise no winners will be returned.
fn get_game_result(self) -> GameResult {
let Referee { clients, phase, .. } = self;
let final_statuses = clients.into_iter().map(|client| {
if client.kicked {
ClientStatus::Kicked
} else if phase.get_state().winning_players.as_ref()
.map_or(false, |winning_players| winning_players.contains(&client.id)) {
ClientStatus::Won
} else {
ClientStatus::Lost
}
}).collect();
GameResult {
final_state: phase.take_state(),
final_statuses,
}
}
/// Waits for input from the current player in the GameState,
/// then acts upon that input
fn do_player_turn(&mut self) {
let success = match &self.phase {
GamePhase::Starting => Some(()),
GamePhase::PlacingPenguins(_) => self.do_player_placement(),
GamePhase::MovingPenguins(_) => self.do_player_move(),
GamePhase::Done(_) => Some(()),
};
if success.is_none() {
self.kick_current_player();
}
self.update_gamephase_if_needed();
}
/// Retrieve a player's next placement from their input stream then tries to take that placement.
/// If the placement cannot be received from the input stream (e.g. due to a timeout) or the
/// placement is invalid in any way then None will be returned. Otherwise, Some is returned.
///
/// Invariant: If None is returned then the current_turn does not change.
fn do_player_placement(&mut self) -> Option<()> {
let placement = self.current_client().borrow_mut().get_placement(self.phase.get_state())?;
match &mut self.phase {
GamePhase::PlacingPenguins(gamestate) => gamestate.place_avatar_for_current_player(placement),
_ => unreachable!("do_player_placement called outside of the PlacingPenguins phase"),
}
}
/// Retrieve a player's next move from their input stream then try to take that move.
/// If the move is invalid in any way or if the move cannot be parsed from the input
/// stream (e.g. if the stream timeouts) then None is returned. Otherwise Some is returned.
///
/// Invariant: If None is returned then the current_turn does not change.
fn do_player_move(&mut self) -> Option<()> {
let move_history = self.get_move_history_for_current_client();
let move_ = self.current_client().borrow_mut().get_move(self.phase.get_state(), &move_history)?;
let current_player_color = self.get_client_player_color(self.current_client());
match &mut self.phase {
GamePhase::MovingPenguins(gametree) => {
let starting_state = gametree.get_state();
let player_move = PlayerMove::new(current_player_color, move_, starting_state)?;
self.phase.try_do_move(move_)?;
self.move_history.push(player_move);
Some(())
},
_ => unreachable!("do_player_move called outside of the MovingPenguins phase"),
}
}
/// Send the move history from the last time this player moved. Most recent moves are last.
fn get_move_history_for_current_client(&self) -> Vec<PlayerMove> {
let current_client_color = self.get_client_player_color(self.current_client());
let mut history = self.move_history.iter().rev()
.take_while(|player_move| player_move.mover != current_client_color)
.copied()
.collect::<Vec<PlayerMove>>();
history.reverse();
history
}
/// Kick the given player from the game, removing all their penguins and
/// their position in the turn order. This does not notify the player that
/// they were kicked.
fn kick_player(&mut self, player: PlayerId) {
self.phase.get_state_mut().remove_player(player);
self.clients.iter_mut()
.find(|client| client.id == player)
.map(|client| client.kicked = true);
// Must manually update after kicking a player to update the tree of valid moves in the game
// tree, if needed
self.phase.update_from_gamestate(self.phase.get_state().clone());
// Clear the move history when we kick players so as to not retain moves
// made by players that are no longer in the game
self.move_history.clear();
// The game ends early if all clients are kicked
if self.clients.iter().all(|client| client.kicked) {
self.phase = GamePhase::Done(self.phase.get_state().clone());
}
}
/// Kick the player whose turn it currently is. See kick_player for
/// the details of kicking a player.
fn kick_current_player(&mut self) {
let current_player = self.phase.get_state().current_turn;
self.kick_player(current_player);
}
/// Player placements and moves will update the current
/// GameState/GameTree but we still need to check if we've
/// finished the placement/moves phase and update the current
/// GamePhase as appropriate here.
fn update_gamephase_if_needed(&mut self) {
if let GamePhase::PlacingPenguins(state) = &mut self.phase {
if state.all_penguins_are_placed() {
self.phase = GamePhase::MovingPenguins(GameTree::new(state));
}
}
// Test if MovingPenguins is finished even after testing the above in case we
// start a game after placing penguins where immediately no penguin can move.
if let GamePhase::MovingPenguins(GameTree::End(state)) = &self.phase {
self.phase = GamePhase::Done(state.clone());
}
}
/// Is this referee's game over?
fn is_game_over(&self) -> bool {
self.phase.is_game_over()
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::ClientStatus::*;
use crate::server::strategy::Strategy;
use crate::server::ai_client::AIClient;
use crate::common::action::{ Move, Placement };
use crate::common::tile::TileId;
pub struct CheatingStrategy;
impl Strategy for CheatingStrategy {
fn find_placement(&mut self, _gamestate: &GameState) -> Placement {
Placement::new(TileId(0))
}
fn find_move(&mut self, _game: &mut GameTree) -> Move {
Move::new(TileId(0), TileId(0))
}
}
/// Runs a game where the first player should win if they're looking ahead enough
/// turns. For more info on this specific game, see the explanation in
/// client/strategy.rs, fn test_move_penguin_minmax_lookahead
#[test]
fn run_game_normal() {
// set up players
let players: Vec<Box<dyn Client>> = vec![
Box::new(AIClient::with_zigzag_minmax_strategy()),
Box::new(AIClient::with_zigzag_minmax_strategy()),
];
let board = Board::with_no_holes(3, 5, 1);
let result = run_game(players, Some(board));
assert!(result.final_state.is_game_over());
assert_eq!(result.final_statuses, vec![Won, Lost]);
}
/// Runs a game that should start with no possible player moves, although
/// they can each place all of their penguins.
#[test]
fn run_game_initially_over() {
// set up players
let players: Vec<Box<dyn Client>> = vec![
Box::new(AIClient::with_zigzag_minmax_strategy()),
Box::new(AIClient::with_zigzag_minmax_strategy()),
];
let board = Board::with_no_holes(2, 4, 1);
let result = run_game(players, Some(board));
assert!(result.final_state.is_game_over());
assert_eq!(result.final_statuses, vec![Won, Won]);
}
// Runs a game that should end with both players winning.
#[test]
fn run_game_both_players_win() {
// set up players
let players: Vec<Box<dyn Client>> = vec![
Box::new(AIClient::with_zigzag_minmax_strategy()),
Box::new(AIClient::with_zigzag_minmax_strategy()),
];
let board = Board::with_no_holes(4, 4, 1);
let result = run_game(players, Some(board));
assert!(result.final_state.is_game_over());
assert_eq!(result.final_statuses, vec![Won, Won]);
}
/// Runs a game with one cheating player who should get kicked from the game,
/// and one who plays the normal minmax strategy and should thus win.
#[test]
fn run_game_cheater() {
let players_cheater_second: Vec<Box<dyn Client>> = vec![
Box::new(AIClient::with_zigzag_minmax_strategy()),
Box::new(AIClient::new(Box::new(CheatingStrategy))),
];
let result = run_game(players_cheater_second, None);
assert_eq!(result.final_statuses, vec![Won, Kicked]);
}
#[test]
fn run_game_two_cheaters() {
let players_cheater_first: Vec<Box<dyn Client>> = vec![
Box::new(AIClient::new(Box::new(CheatingStrategy))),
Box::new(AIClient::with_zigzag_minmax_strategy()),
Box::new(AIClient::new(Box::new(CheatingStrategy))),
];
let result = run_game(players_cheater_first, None);
assert_eq!(result.final_statuses, vec![Kicked, Won, Kicked]);
}
#[test]
fn run_game_all_cheating_players() {
let players_cheater_first: Vec<Box<dyn Client>> = vec![
Box::new(AIClient::new(Box::new(CheatingStrategy))),
Box::new(AIClient::new(Box::new(CheatingStrategy))),
Box::new(AIClient::new(Box::new(CheatingStrategy))),
];
let result = run_game(players_cheater_first, None);
assert_eq!(result.final_statuses, vec![Kicked, Kicked, Kicked]);
}
}
| true |
9ceca4cf456e8b0dcc7a95db0f7724e3b9eceb25
|
Rust
|
dfrankland/mk20d7
|
/src/mcm/plamc/mod.rs
|
UTF-8
| 1,778 | 2.75 | 3 |
[
"MIT"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u16,
}
impl super::PLAMC {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
}
#[doc = "Possible values of the field `AMC`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum AMCR {
#[doc = "A bus master connection to AXBS input port n is absent"]
_0,
#[doc = "A bus master connection to AXBS input port n is present"]
_1,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl AMCR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
AMCR::_0 => 0,
AMCR::_1 => 1,
AMCR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> AMCR {
match value {
0 => AMCR::_0,
1 => AMCR::_1,
i => AMCR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == AMCR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == AMCR::_1
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u16 {
self.bits
}
#[doc = "Bits 0:7 - Each bit in the AMC field indicates if there is a corresponding connection to the AXBS master input port."]
#[inline]
pub fn amc(&self) -> AMCR {
AMCR::_from({
const MASK: u8 = 255;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u16) as u8
})
}
}
| true |
915cd2ed9a28344154af1276236d59065ae1cb78
|
Rust
|
evelyneee/pueue-lib
|
/src/platform/windows/directories.rs
|
UTF-8
| 972 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
use std::path::{Path, PathBuf};
use anyhow::{anyhow, Result};
fn get_home_dir() -> Result<PathBuf> {
dirs::home_dir().ok_or_else(|| anyhow!("Couldn't resolve home dir"))
}
pub fn default_config_directory() -> Result<PathBuf> {
Ok(dirs::data_local_dir()
.ok_or(anyhow!("Couldn't resolve app data directory"))?
.join("pueue"))
}
pub fn get_config_directories() -> Result<Vec<PathBuf>> {
Ok(vec![
// Windows Terminal stores its config file in the "AppData/Local" directory.
default_config_directory()?,
Path::new(".").to_path_buf(),
])
}
pub fn default_pueue_path() -> Result<String> {
// Use local data directory since this data doesn't need to be synced.
let path = dirs::data_local_dir()
.ok_or(anyhow!("Couldn't resolve app data directory"))?
.join("pueue");
Ok(path
.to_str()
.ok_or(anyhow!("Failed to parse log path (Weird characters?)"))?
.to_string())
}
| true |
b427064310e50b71ae41b44c130b7a5c6e264504
|
Rust
|
Andrew-Schwartz/aoc2020
|
/src/day_template.rs
|
UTF-8
| 580 | 2.921875 | 3 |
[] |
no_license
|
pub type Parsed = String;
pub type Output1 = usize;
pub type Output2 = usize;
pub fn parse(input: &str) -> Parsed {
todo!()
}
pub fn part1(_input: &Parsed) -> Output1 {
todo!()
}
pub fn part2(_input: &Parsed) -> Output2 {
todo!()
}
#[allow(dead_code, unused_imports)]
mod test {
use super::*;
const INPUT: &'static str = r#""#;
#[test]
fn test1() {
let input = parse(INPUT);
assert_eq!(part1(&input), todo!())
}
#[test]
fn test2() {
let input = parse(INPUT);
assert_eq!(part2(&input), todo!())
}
}
| true |
0eb2d0d791bb766173580917b5b8cb8c7cdde2b9
|
Rust
|
bfffs/bfffs
|
/bfffs/tests/integration/bfffs/fs/mount.rs
|
UTF-8
| 3,953 | 2.515625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::{
fs,
os::unix::fs::FileTypeExt,
path::PathBuf,
process::Command,
time::Duration,
};
use assert_cmd::{cargo::cargo_bin, prelude::*};
use function_name::named;
use nix::mount::{unmount, MntFlags};
use rstest::{fixture, rstest};
use tempfile::{Builder, TempDir};
use super::super::super::*;
struct Harness {
_bfffsd: Bfffsd,
pub tempdir: TempDir,
pub mountpoint: PathBuf,
pub sockpath: PathBuf,
}
impl Drop for Harness {
fn drop(&mut self) {
let _ignore_errors = unmount(&self.mountpoint, MntFlags::empty());
}
}
/// Create a pool for backing store
#[fixture]
fn harness() -> Harness {
let len = 1 << 30; // 1 GB
let tempdir = Builder::new()
.prefix(concat!(module_path!(), "."))
.tempdir()
.unwrap();
let filename = tempdir.path().join("vdev");
let file = fs::File::create(&filename).unwrap();
file.set_len(len).unwrap();
let mountpoint = tempdir.path().join("mnt");
fs::create_dir(&mountpoint).unwrap();
bfffs()
.args(["pool", "create", "-p"])
.arg(format!("mountpoint={}", mountpoint.display()))
.arg("mypool")
.arg(&filename)
.assert()
.success();
let sockpath = tempdir.path().join("bfffsd.sock");
let bfffsd: Bfffsd = Command::new(cargo_bin("bfffsd"))
.arg("--sock")
.arg(sockpath.as_os_str())
.arg("mypool")
.arg(filename.as_os_str())
.spawn()
.unwrap()
.into();
// We must wait for bfffsd to be ready to receive commands
waitfor(Duration::from_secs(5), || {
fs::metadata(&sockpath)
.map(|md| md.file_type().is_socket())
.unwrap_or(false)
})
.expect("Timeout waiting for bfffsd to listen");
Harness {
_bfffsd: bfffsd,
sockpath,
mountpoint,
tempdir,
}
}
// Unmount the file system and remount it in the same location
#[named]
#[rstest]
#[tokio::test]
async fn mount_again(harness: Harness) {
require_fusefs!();
bfffs()
.arg("--sock")
.arg(harness.sockpath.as_os_str())
.args(["fs", "mount", "mypool"])
.assert()
.success();
unmount(&harness.mountpoint, MntFlags::empty()).unwrap();
bfffs()
.arg("--sock")
.arg(harness.sockpath.as_os_str())
.args(["fs", "mount", "mypool"])
.assert()
.success();
}
#[named]
#[rstest]
#[tokio::test]
async fn ok(harness: Harness) {
require_fusefs!();
bfffs()
.arg("--sock")
.arg(harness.sockpath.as_os_str())
.args(["fs", "mount", "mypool"])
.assert()
.success();
}
#[named]
#[rstest]
#[tokio::test]
async fn options(harness: Harness) {
require_fusefs!();
bfffs()
.arg("--sock")
.arg(harness.sockpath.as_os_str())
.args(["fs", "mount", "-o", "atime=off", "mypool"])
.assert()
.success();
// TODO: figure out how to check if atime is active.
}
/// Mount a dataset other than the pool root
#[named]
#[rstest]
#[tokio::test]
async fn subfs(harness: Harness) {
require_fusefs!();
let submp = harness.tempdir.path().join("mnt").join("foo");
fs::create_dir(&submp).unwrap();
bfffs()
.arg("--sock")
.arg(harness.sockpath.as_os_str())
.args(["fs", "create", "mypool/foo"])
.assert()
.success();
bfffs()
.arg("--sock")
.arg(harness.sockpath.as_os_str())
.args(["fs", "mount", "mypool/foo"])
.assert()
.success();
unmount(&submp, MntFlags::empty()).unwrap();
}
// TODO: test with overridden mountpoint, once that is possible.
// TODO: once the mountpoint can be overridden, check that it is not be possible
// to mount the same file system twice, similar to the old ebusy test, from
// before the mountpoint property was introduced.
// TODO: test with alternate mountpoint
| true |
ef8fc9d17d79af006365c6edf4c79946175e8d2d
|
Rust
|
TeXitoi/rust-geo
|
/src/algorithm/intersects.rs
|
UTF-8
| 15,354 | 3.53125 | 4 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use num_traits::Float;
use types::{LineString, Polygon, Bbox, Point};
use algorithm::contains::Contains;
/// Checks if the geometry A intersects the geometry B.
pub trait Intersects<Rhs = Self> {
/// Checks if the geometry A intersects the geometry B.
///
/// ```
/// use geo::{Coordinate, Point, LineString};
/// use geo::algorithm::intersects::Intersects;
///
/// let p = |x, y| Point(Coordinate { x: x, y: y });
/// let linestring = LineString(vec![p(3., 2.), p(7., 6.)]);
///
/// assert!(linestring.intersects(&LineString(vec![p(3., 4.), p(8., 4.)])));
/// assert!(!linestring.intersects(&LineString(vec![p(9., 2.), p(11., 5.)])));
///
/// ```
///
fn intersects(&self, rhs: &Rhs) -> bool;
}
impl<T> Intersects<LineString<T>> for LineString<T>
where T: Float
{
// See: https://github.com/brandonxiang/geojson-python-utils/blob/33b4c00c6cf27921fb296052d0c0341bd6ca1af2/geojson_utils.py
fn intersects(&self, linestring: &LineString<T>) -> bool {
let vect0 = &self.0;
let vect1 = &linestring.0;
if vect0.is_empty() || vect1.is_empty() {
return false;
}
for a in vect0.windows(2) {
for b in vect1.windows(2) {
let u_b = (b[1].y() - b[0].y()) * (a[1].x() - a[0].x()) -
(b[1].x() - b[0].x()) * (a[1].y() - a[0].y());
if u_b == T::zero() {
continue;
}
let ua_t = (b[1].x() - b[0].x()) * (a[0].y() - b[0].y()) -
(b[1].y() - b[0].y()) * (a[0].x() - b[0].x());
let ub_t = (a[1].x() - a[0].x()) * (a[0].y() - b[0].y()) -
(a[1].y() - a[0].y()) * (a[0].x() - b[0].x());
let u_a = ua_t / u_b;
let u_b = ub_t / u_b;
if (T::zero() <= u_a) && (u_a <= T::one()) && (T::zero() <= u_b) && (u_b <= T::one()) {
return true;
}
}
}
false
}
}
impl<T> Intersects<LineString<T>> for Polygon<T>
where T: Float
{
fn intersects(&self, linestring: &LineString<T>) -> bool {
// line intersects inner or outer polygon edge
if self.exterior.intersects(linestring) || self.interiors.iter().any(|inner| inner.intersects(linestring)) {
return true;
} else {
// or if it's contained in the polygon
return linestring.0.iter().any(|point| self.contains(point))
}
}
}
impl<T> Intersects<Bbox<T>> for Bbox<T>
where T: Float
{
fn intersects(&self, bbox: &Bbox<T>) -> bool {
// line intersects inner or outer polygon edge
if bbox.contains(&self) {
return false
} else {
(self.xmin >= bbox.xmin && self.xmin <= bbox.xmax || self.xmax >= bbox.xmin && self.xmax <= bbox.xmax) &&
(self.ymin >= bbox.ymin && self.ymin <= bbox.ymax || self.ymax >= bbox.ymin && self.ymax <= bbox.ymax)
}
}
}
impl<T> Intersects<Polygon<T>> for Bbox<T>
where T: Float
{
fn intersects(&self, polygon: &Polygon<T>) -> bool {
polygon.intersects(self)
}
}
impl<T> Intersects<Bbox<T>> for Polygon<T>
where T: Float
{
fn intersects(&self, bbox: &Bbox<T>) -> bool {
let p = Polygon::new(LineString(vec![Point::new(bbox.xmin, bbox.ymin),
Point::new(bbox.xmin, bbox.ymax),
Point::new(bbox.xmax, bbox.ymax),
Point::new(bbox.xmax, bbox.ymin),
Point::new(bbox.xmin, bbox.ymin)]),
vec![]);
self.intersects(&p)
}
}
impl<T> Intersects<Polygon<T>> for Polygon<T>
where T: Float
{
fn intersects(&self, polygon: &Polygon<T>) -> bool {
// self intersects (or contains) any line in polygon
self.intersects(&polygon.exterior) ||
polygon.interiors.iter().any(|inner_line_string| self.intersects(inner_line_string)) ||
// self is contained inside polygon
polygon.intersects(&self.exterior)
}
}
#[cfg(test)]
mod test {
use types::{Coordinate, Point, LineString, Polygon, Bbox};
use algorithm::intersects::Intersects;
/// Tests: intersection LineString and LineString
#[test]
fn empty_linestring1_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let linestring = LineString(vec![p(3., 2.), p(7., 6.)]);
assert!(!LineString(Vec::new()).intersects(&linestring));
}
#[test]
fn empty_linestring2_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let linestring = LineString(vec![p(3., 2.), p(7., 6.)]);
assert!(!linestring.intersects(&LineString(Vec::new())));
}
#[test]
fn empty_all_linestring_test() {
assert!(!LineString::<f64>(Vec::new()).intersects(&LineString(Vec::new())));
}
#[test]
fn intersect_linestring_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let linestring = LineString(vec![p(3., 2.), p(7., 6.)]);
assert!(linestring.intersects(&LineString(vec![p(3., 4.), p(8., 4.)])));
}
#[test]
fn parallel_linestrings_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let linestring = LineString(vec![p(3., 2.), p(7., 6.)]);
assert!(!linestring.intersects(&LineString(vec![p(3., 1.), p(7., 5.)])));
}
/// Tests: intersection LineString and Polygon
#[test]
fn linestring_in_polygon_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let linestring = LineString(vec![p(0., 0.), p(5., 0.), p(5., 6.), p(0., 6.), p(0., 0.)]);
let poly = Polygon::new(linestring, Vec::new());
assert!(poly.intersects(&LineString(vec![p(2., 2.), p(3., 3.)])));
}
#[test]
fn linestring_on_boundary_polygon_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let poly = Polygon::new(LineString(vec![p(0., 0.), p(5., 0.), p(5., 6.), p(0., 6.), p(0., 0.)]),
Vec::new());
assert!(poly.intersects(&LineString(vec![p(0., 0.), p(5., 0.)])));
assert!(poly.intersects(&LineString(vec![p(5., 0.), p(5., 6.)])));
assert!(poly.intersects(&LineString(vec![p(5., 6.), p(0., 6.)])));
assert!(poly.intersects(&LineString(vec![p(0., 6.), p(0., 0.)])));
}
#[test]
fn intersect_linestring_polygon_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let poly = Polygon::new(LineString(vec![p(0., 0.), p(5., 0.), p(5., 6.), p(0., 6.), p(0., 0.)]),
Vec::new());
assert!(poly.intersects(&LineString(vec![p(2., 2.), p(6., 6.)])));
}
#[test]
fn linestring_outside_polygon_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let poly = Polygon::new(LineString(vec![p(0., 0.), p(5., 0.), p(5., 6.), p(0., 6.), p(0., 0.)]),
Vec::new());
assert!(!poly.intersects(&LineString(vec![p(7., 2.), p(9., 4.)])));
}
#[test]
fn linestring_in_inner_polygon_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let e = LineString(vec![p(0., 0.), p(5., 0.), p(5., 6.), p(0., 6.), p(0., 0.)]);
let v = vec![LineString(vec![p(1., 1.), p(4., 1.), p(4., 4.), p(1., 4.), p(1., 1.)])];
let poly = Polygon::new(e, v);
assert!(!poly.intersects(&LineString(vec![p(2., 2.), p(3., 3.)])));
assert!(poly.intersects(&LineString(vec![p(2., 2.), p(4., 4.)])));
}
#[test]
fn linestring_traverse_polygon_test() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let e = LineString(vec![p(0., 0.), p(5., 0.), p(5., 6.), p(0., 6.), p(0., 0.)]);
let v = vec![LineString(vec![p(1., 1.), p(4., 1.), p(4., 4.), p(1., 4.), p(1., 1.)])];
let poly = Polygon::new(e, v);
assert!(poly.intersects(&LineString(vec![p(2., 0.5), p(2., 5.)])));
}
#[test]
fn linestring_in_inner_with_2_inner_polygon_test() {
// (8,9)
// (2,8) | (14,8)
// ------------------------------------|------------------------------------------
// | | |
// | (4,7) (6,7) | |
// | ------------------ | (11,7) |
// | | | |
// | (4,6) (7,6) | (9,6) | (12,6) |
// | ---------------------- | ----------------|--------- |
// | | | | | | | |
// | | (6,5) | | | | | |
// | | / | | | | | |
// | | / | | | | | |
// | | (5,4) | | | | | |
// | | | | | | | |
// | ---------------------- | ----------------|--------- |
// | (4,3) (7,3) | (9,3) | (12,3) |
// | | (11,2.5) |
// | | |
// ------------------------------------|------------------------------------------
// (2,2) | (14,2)
// (8,1)
//
let p = |x, y| Point(Coordinate { x: x, y: y });
let e = LineString(vec![p(2., 2.), p(14., 2.), p(14., 8.), p(2., 8.), p(2., 2.)]);
let v = vec![LineString(vec![p(4., 3.), p(7., 3.), p(7., 6.), p(4., 6.), p(4., 3.)]),
LineString(vec![p(9., 3.), p(12., 3.), p(12., 6.), p(9., 6.), p(9., 3.)])];
let poly = Polygon::new(e, v);
assert!(!poly.intersects(&LineString(vec![p(5., 4.), p(6., 5.)])));
assert!(poly.intersects(&LineString(vec![p(11., 2.5), p(11., 7.)])));
assert!(poly.intersects(&LineString(vec![p(4., 7.), p(6., 7.)])));
assert!(poly.intersects(&LineString(vec![p(8., 1.), p(8., 9.)])));
}
#[test]
fn polygons_do_not_intersect() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let p1 = Polygon::new(LineString(vec![p(1., 3.), p(3., 3.), p(3., 5.), p(1., 5.), p(1., 3.)]),
Vec::new());
let p2 = Polygon::new(LineString(vec![p(10., 30.), p(30., 30.), p(30., 50.), p(10., 50.), p(10., 30.)]),
Vec::new());
assert!(!p1.intersects(&p2));
assert!(!p2.intersects(&p1));
}
#[test]
fn polygons_overlap() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let p1 = Polygon::new(LineString(vec![p(1., 3.), p(3., 3.), p(3., 5.), p(1., 5.), p(1., 3.)]),
Vec::new());
let p2 = Polygon::new(LineString(vec![p(2., 3.), p(4., 3.), p(4., 7.), p(2., 7.), p(2., 3.)]),
Vec::new());
assert!(p1.intersects(&p2));
assert!(p2.intersects(&p1));
}
#[test]
fn polygon_contained() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let p1 = Polygon::new(LineString(vec![p(1., 3.), p(4., 3.), p(4., 6.), p(1., 6.), p(1., 3.)]),
Vec::new());
let p2 = Polygon::new(LineString(vec![p(2., 4.), p(3., 4.), p(3., 5.), p(2., 5.), p(2., 4.)]),
Vec::new());
assert!(p1.intersects(&p2));
assert!(p2.intersects(&p1));
}
#[test]
fn polygons_conincident() {
let p = |x, y| Point(Coordinate { x: x, y: y });
let p1 = Polygon::new(LineString(vec![p(1., 3.), p(4., 3.), p(4., 6.), p(1., 6.), p(1., 3.)]),
Vec::new());
let p2 = Polygon::new(LineString(vec![p(1., 3.), p(4., 3.), p(4., 6.), p(1., 6.), p(1., 3.)]),
Vec::new());
assert!(p1.intersects(&p2));
assert!(p2.intersects(&p1));
}
#[test]
fn polygon_intersects_bbox_test() {
// Polygon poly =
//
// (0,8) (12,8)
// ┌──────────────────────┐
// │ (7,7) (11,7) │
// │ ┌──────┐ │
// │ │ │ │
// │ │(hole)│ │
// │ │ │ │
// │ │ │ │
// │ └──────┘ │
// │ (7,4) (11,4) │
// │ │
// │ │
// │ │
// │ │
// │ │
// └──────────────────────┘
// (0,0) (12,0)
let p = |x, y| Point(Coordinate { x: x, y: y });
let poly = Polygon::new(LineString(vec![p(0., 0.), p(12., 0.), p(12., 8.), p(0., 8.), p(0., 0.)]),
vec![LineString(vec![p(7., 4.), p(11., 4.), p(11., 7.), p(7., 7.), p(7., 4.)])]);
let b1 = Bbox { xmin: 11.0, xmax: 13.0, ymin: 1.0, ymax: 2.0 };
let b2 = Bbox { xmin: 2.0, xmax: 8.0, ymin: 2.0, ymax: 5.0 };
let b3 = Bbox { xmin: 8.0, xmax: 10.0, ymin: 5.0, ymax: 6.0 };
let b4 = Bbox { xmin: 1.0, xmax: 3.0, ymin: 1.0, ymax: 3.0 };
// overlaps
assert!(poly.intersects(&b1));
// contained in exterior, overlaps with hole
assert!(poly.intersects(&b2));
// completely contained in the hole
assert!(!poly.intersects(&b3));
// completely contained in the polygon
assert!(poly.intersects(&b4));
// conversely,
assert!(b1.intersects(&poly));
assert!(b2.intersects(&poly));
assert!(!b3.intersects(&poly));
assert!(b4.intersects(&poly));
}
#[test]
fn bbox_test() {
let bbox_xl = Bbox { xmin: -100., xmax: 100., ymin: -200., ymax: 200.};
let bbox_sm = Bbox { xmin: -10., xmax: 10., ymin: -20., ymax: 20.};
let bbox_s2 = Bbox { xmin: 0., xmax: 20., ymin: 0., ymax: 30.};
assert_eq!(false, bbox_xl.intersects(&bbox_sm));
assert_eq!(false, bbox_sm.intersects(&bbox_xl));
assert_eq!(true, bbox_sm.intersects(&bbox_s2));
assert_eq!(true, bbox_s2.intersects(&bbox_sm));
}
}
| true |
37a95062473c80e00e87be02cf7ee5da0967146f
|
Rust
|
frigus02/crypto-playground
|
/src/challenges/repeating_key_xor.rs
|
UTF-8
| 727 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
use super::super::encoding::hex;
use super::super::encoding::xor;
pub fn run(plain_text: &str, encoding_bytes: &[u8]) -> String {
let bytes = plain_text.as_bytes();
let result = xor::encode(&bytes, encoding_bytes);
hex::encode(&result)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn run_test() {
let hex = "Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal";
let encoding_bytes = vec![73u8, 67u8, 69u8]; // "ICE"
let encoded = run(hex, &encoding_bytes);
assert_eq!("0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f", encoded);
}
}
| true |
c33483e7d3c05c6dd7834233140f294c6adb44db
|
Rust
|
COLDTURNIP/raphanus_leetcode
|
/rust/src/p414.rs
|
UTF-8
| 1,794 | 3.765625 | 4 |
[] |
no_license
|
/*
Problem 414. Third Maximum Number
=================================
https://leetcode.com/problems/third-maximum-number/
Given a non-empty array of integers, return the third maximum number in this array. If it does not
exist, return the maximum number. The time complexity must be in O(n).
Example 1:
Input: [3, 2, 1]
Output: 1
Explanation: The third maximum is 1.
Example 2:
Input: [1, 2]
Output: 2
Explanation: The third maximum does not exist, so the maximum (2) is returned instead.
Example 3:
Input: [2, 2, 3, 1]
Output: 1
Explanation: Note that the third maximum here means the third maximum distinct number.
Both numbers with value 2 are both considered as second maximum.
*/
impl Solution {
pub fn third_max(nums: Vec<i32>) -> i32 {
let max_len = 3;
let mut buf = Vec::new();
for n in nums.into_iter() {
match buf.binary_search_by(|seen| n.cmp(seen)) {
Err(i) if i < max_len => {
buf.insert(i, n);
buf.truncate(max_len);
}
_ => {}
}
}
if buf.len() < max_len {
*buf.first().unwrap()
} else {
*buf.last().unwrap()
}
}
}
pub struct Solution;
#[cfg(test)]
mod tests {
extern crate test;
use super::Solution;
#[test]
fn test_321() {
assert_eq!(Solution::third_max(vec![3, 2, 1]), 1);
}
#[test]
fn test_12() {
assert_eq!(Solution::third_max(vec![1, 2]), 2);
}
#[test]
fn test_2231() {
assert_eq!(Solution::third_max(vec![2, 2, 3, 1]), 1);
}
#[bench]
fn bench(b: &mut test::Bencher) {
b.iter(|| Solution::third_max(vec![2, 7, 11, 15]));
}
}
| true |
7aee3327a9417f521476c4c9facf2c11125f5c84
|
Rust
|
frankegoesdown/LeetCode-in-Go
|
/Algorithms/0820.short-encoding-of-words/short-encoding-of-words.go
|
UTF-8
| 732 | 3.109375 | 3 |
[
"MIT"
] |
permissive
|
package problem0820
import (
"sort"
)
func minimumLengthEncoding(words []string) int {
sort.Slice(words, func(i, j int) bool {
curi := len(words[i])
curj := len(words[j])
for curi > 0 && curj > 0 {
curi--
curj--
if words[i][curi] == words[j][curj] {
continue
}
if words[i][curi] > words[j][curj] {
return true
}
return false
}
// 短的排在前面
return curi == 0
})
words = append(words, "")
res, i := 0, 1
for ; i < len(words); i++ {
if !endWith(words[i], words[i-1]) {
res += len(words[i-1]) + 1
}
}
return res
}
// if s end with post, return true
func endWith(s, post string) bool {
if len(s) < len(post) {
return false
}
return s[len(s)-len(post):] == post
}
| true |
b21926f1b44e726504d609f39b5df175c6817e1e
|
Rust
|
ntzm/gpmf-rs
|
/src/error.rs
|
UTF-8
| 856 | 2.5625 | 3 |
[] |
no_license
|
#[derive(Debug)]
pub enum JpgExtractError {
Io(std::io::Error),
FromBytes(img_parts::Error),
MissingApp6,
InvalidApp6,
}
impl std::error::Error for JpgExtractError {}
impl std::fmt::Display for JpgExtractError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
JpgExtractError::Io(e) => e.fmt(f),
JpgExtractError::FromBytes(e) => e.fmt(f),
JpgExtractError::MissingApp6 => write!(f, "Missing APP6"),
JpgExtractError::InvalidApp6 => write!(f, "Invalid APP6"),
}
}
}
impl From<img_parts::Error> for JpgExtractError {
fn from(e: img_parts::Error) -> Self {
JpgExtractError::FromBytes(e)
}
}
impl From<std::io::Error> for JpgExtractError {
fn from(e: std::io::Error) -> Self {
JpgExtractError::Io(e)
}
}
| true |
86255a48ff23545e0c5775e715bbebe7a4149b13
|
Rust
|
nix-community/nur-combined
|
/repos/colinsane/pkgs/additional/mx-sanebot/src/parsing.rs
|
UTF-8
| 4,688 | 3.203125 | 3 |
[] |
no_license
|
use std::fmt;
/// for internal use.
/// parses only if the parser has no more bytes to yield.
struct Eof;
/// literal byte (character).
pub struct Lit<const BYTE: u8>;
/// parses without consuming any bytes from the parser.
/// used to construct strictly optional constructs.
pub struct Nul;
/// the two-item sequence of A followed by B.
pub struct Then<A, B>(pub A, pub B);
/// if A parses, then A, else parse B.
pub enum Either<A, B> {
A(A),
B(B),
}
/// parse A if possible, but don't error if it isn't present.
pub type Maybe<A> = Either<A, Nul>;
/// exists because Rust doesn't allow recursive type *aliases*.
pub struct OneOrMore<A>(Then<
A,
Maybe<Box<OneOrMore<A>>>
>);
// case-sensitive u8 character.
#[macro_export]
macro_rules! lit {
($BYTE:literal) => {
Lit<{ $BYTE as u8 }>
}
}
// case-insensitive u8 character.
#[macro_export]
macro_rules! ilit {
($BYTE:literal) => {
Either<Lit<{ ($BYTE as u8).to_ascii_lowercase() }>, Lit<{ ($BYTE as u8).to_ascii_uppercase() }>>
}
}
pub type PResult<P, C> = std::result::Result<(C, P), P>;
pub trait Parser: Sized {
fn expect_byte(self, b: Option<u8>) -> PResult<Self, ()>;
fn expect<C: Parse>(self) -> PResult<Self, C>;
// {
// // support backtracking; i.e. don't modify `self` on failed parse
// match C::consume(self.clone()) {
// Ok(res) => res,
// Err(_) => self,
// }
// }
fn parse_all<C: Parse>(self) -> Result<C, ()> {
match self.expect::<Then<C, Eof>>() {
Ok((Then(c, _eof), _p)) => Ok(c),
Err(_p) => Err(()),
}
}
}
impl<'a> Parser for &'a [u8] {
fn expect_byte(self, b: Option<u8>) -> PResult<Self, ()> {
match (b, self.split_first()) {
// expected the correct character
(Some(exp), Some((first, rest))) if *first == exp => Ok( ((), rest) ),
// expected EOF, got EOF
(None, None) => Ok( ((), self)),
_ => Err(self),
}
}
fn expect<C: Parse>(self) -> PResult<Self, C> {
match C::consume(self.clone()) {
Ok(res) => Ok(res),
// rewind the parser should we fail
Err(_p) => Err(self),
}
}
}
pub trait Parse: Sized {
fn consume<P: Parser>(p: P) -> PResult<P, Self>;
}
impl Parse for Eof {
fn consume<P: Parser>(p: P) -> PResult<P, Self> {
let (_, p) = p.expect_byte(None)?;
Ok((Self, p))
}
}
impl Parse for Nul {
fn consume<P: Parser>(p: P) -> PResult<P, Self> {
Ok((Self, p))
}
}
impl fmt::Display for Nul {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "")
}
}
impl<const BYTE: u8> Parse for Lit<BYTE> {
fn consume<P: Parser>(p: P) -> PResult<P, Self> {
let (_, p) = p.expect_byte(Some(BYTE))?;
Ok((Self, p))
}
}
impl<const BYTE: u8> fmt::Display for Lit<BYTE> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", BYTE as char)
}
}
impl<A: Parse, B: Parse> Parse for Then<A, B> {
fn consume<P: Parser>(p: P) -> PResult<P, Self> {
let (a, p) = p.expect()?;
let (b, p) = p.expect()?;
Ok((Self(a, b), p))
}
}
impl<A: fmt::Display, B: fmt::Display> fmt::Display for Then<A, B> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}{}", self.0, self.1)
}
}
impl<A: Parse, B: Parse> Parse for Either<A, B> {
fn consume<P: Parser>(p: P) -> PResult<P, Self> {
let p = match p.expect() {
Ok((a, p)) => { return Ok((Self::A(a), p)); },
Err(p) => p,
};
let p = match p.expect() {
Ok((b, p)) => { return Ok((Self::B(b), p)); },
Err(p) => p,
};
Err(p)
}
}
impl<A: fmt::Display, B: fmt::Display> fmt::Display for Either<A, B> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::A(a) => write!(f, "{}", a),
Self::B(b) => write!(f, "{}", b),
}
}
}
impl<T: Parse> Parse for Box<T> {
fn consume<P: Parser>(p: P) -> PResult<P, Self> {
match T::consume(p) {
Ok((t, p)) => Ok((Box::new(t), p)),
Err(p) => Err(p),
}
}
}
impl<T: Parse> Parse for OneOrMore<T> {
fn consume<P: Parser>(p: P) -> PResult<P, Self> {
match p.expect() {
Ok((t, p)) => Ok((Self(t), p)),
Err(p) => Err(p),
}
}
}
impl<T: fmt::Display> fmt::Display for OneOrMore<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
| true |
20d84bb6d04154f1ec465b2dcb10524cf7439db4
|
Rust
|
TyOverby/stormtrooper
|
/src/viewer/watcher.rs
|
UTF-8
| 1,744 | 2.765625 | 3 |
[] |
no_license
|
use notify::{self, Watcher, Event};
use notify::op::WRITE;
use std::fs::File;
use std::sync::mpsc::{channel, Receiver};
use std::sync::{Arc, Mutex};
use std::io::Read;
use std::thread;
use super::super::{script, svg, Drawing};
pub fn run_watcher(last_generated: Arc<Mutex<Option<String>>>, file: &str) -> Receiver<()> {
fn update(file: &str, last_generated: &Arc<Mutex<Option<String>>>) {
let mut file = File::open(&file[..]).unwrap();
let mut buf = String::new();
file.read_to_string(&mut buf).unwrap();
let mut drawing = Drawing::new();
if let Err(_) = script::run_script(&mut drawing, &buf) {
return;
}
let mut out_buf = Vec::new();
svg::write_svg(&drawing, &mut out_buf).unwrap();
let out_str = String::from_utf8(out_buf).unwrap();
let mut guard = last_generated.lock().unwrap();
*guard = Some(out_str);
}
// Create a channel to receive the events.
let (sx, rx) = channel();
// This one is for the web socket server
let (notify_sx, notify_rx) = channel();
let mut watcher = notify::new(sx).unwrap();
watcher.watch(file).unwrap();
::std::mem::forget(watcher);
update(file, &last_generated);
let file = file.to_owned();
thread::spawn(move || {
let notify_sx = notify_sx;
for change in rx.iter() {
match change {
Event { op: Ok(code), .. } if (code & WRITE == WRITE) => {
update(&file, &last_generated);
notify_sx.send(()).unwrap()
}
other => {
println!("something else!: {:?}", other)
}
}
}
});
notify_rx
}
| true |
4dcc6e01a6843f2f8b825c5be2c4c503f1410def
|
Rust
|
sathishvinayk/rust_360
|
/concurrency/channels.rs
|
UTF-8
| 927 | 3.28125 | 3 |
[] |
no_license
|
use std::sync::mpsc::{Sender, SyncSender, Receiver, channel, sync_channel};
fn sync_example() {
let (tx, rx): (SyncSender<u8>, Receiver<u8>) = sync_channel(1);
let _v1 = tx.send(0);
let send_clone = tx.clone();
std::thread::spawn(move || {
let _v2 = tx.send(2);
});
std::thread::spawn(move || {
let _v3 = send_clone.send(3);
});
println!("Got sync value {}", rx.recv().unwrap());
println!("Got sync value {}", rx.recv().unwrap());
println!("Got sync value {}", rx.recv().unwrap());
}
fn cn_example() {
let (tx, rx) : (Sender<u8>, Receiver<u8>) = channel();
let x = std::thread::spawn(move || {
while let Ok(x) = rx.recv() {
println!("Got the value {}", x);
}
});
for n in 0..10 {
tx.send(n).unwrap();
}
x.join().expect("Error while spawning!");
}
fn main() {
// cn_example();
sync_example();
}
| true |
351fdd27db3b7369700458c7f5b0bdae2efe6933
|
Rust
|
jamesmunns/maxwell
|
/core/src/lib.rs
|
UTF-8
| 3,507 | 3.125 | 3 |
[] |
no_license
|
#![no_std]
#[derive(Debug)]
pub struct Demon {
/// The last sample observed by the demon
pub last_sample: u32,
/// The last sample used by the demon for mixing
pub last_mixing_sample: u32,
/// The number of consecutive samples with the same value
pub run: u32,
/// The current key-in-progress
pub key: u32,
/// The current mixing value
pub mix: u32,
/// The number of operations remaining to obtain a good key
pub ops_remaining: u32,
/// The number of samples remaining before a timeout will be raise
pub samples_remaining: u32,
}
#[derive(Debug)]
pub enum Error {
NeedMoreSamples,
Timeout,
}
pub type Result<T> = core::result::Result<T, Error>;
impl Default for Demon {
fn default() -> Self {
Demon {
key: 0xACACACAC,
last_sample: 0,
last_mixing_sample: 0,
mix: 0xF0F0F0F0,
run: 0,
ops_remaining: 100,
samples_remaining: 100_000,
}
}
}
impl Demon {
pub fn take_sample(&mut self, sample: u32) -> Result<[u8; 4]> {
// println!("{:X?} - {:X?}", self, sample);
self.samples_remaining = self.samples_remaining.saturating_sub(1);
// If we received the same value as the last sample, increase
// the run streak
if sample == self.last_sample {
self.run = self.run.saturating_add(1);
return Err(Error::NeedMoreSamples);
}
self.last_sample = sample;
if self.run != 0 {
// println!("mix_run");
self.mix = self.mix.saturating_add(1 << (self.run & 31));
self.mix = self.mix.rotate_right(self.run);
}
// Reset the run streak to zero, as we have new data
self.run = 0;
// Compare the
let sample_bits = sample & 0b11;
let candidate_bits = (self.last_mixing_sample & 0b11) ^ sample_bits;
let changed_data = candidate_bits != 0;
let new_mix = self.mix.rotate_right(candidate_bits) ^ sample_bits;
match (changed_data, (new_mix != 0)) {
(true, true) => {
self.key = self.key.wrapping_add(new_mix);
self.key ^= self.mix;
self.key = self.key.rotate_left((candidate_bits << 2) | sample_bits);
// Store the current sample as the last one used for mixing
self.last_mixing_sample = sample;
self.mix = new_mix;
// Increment the number of change operations
self.ops_remaining = self.ops_remaining.saturating_sub(1);
}
(true, false) => {
// println!("rol");
// Data has changed, but the new mix would be zero. Rotate Left a bit
self.key = self.key.rotate_left(7);
}
(false, true) => {
// println!("ror");
// Data hasn't changed, but the new mix would be non-zero. Rotate Right a bit
self.key = self.key.rotate_right(7);
}
(false, false) => {
// println!("invert");
// Everything is terrible. Invert the key
self.key = !self.key;
}
}
if self.ops_remaining == 0 {
Ok(self.key.to_ne_bytes())
} else if self.samples_remaining == 0 {
Err(Error::Timeout)
} else {
Err(Error::NeedMoreSamples)
}
}
}
| true |
9a40177b981ea966e2b9e85c4b5ccdb9d4db83a0
|
Rust
|
MIracleyin/blog
|
/server/src/comment/delete.rs
|
UTF-8
| 1,109 | 2.609375 | 3 |
[] |
no_license
|
use std::sync::Arc;
use ntex::{web::types::{State, Path}};
use crate::{
errors::CustomError,
models::user::{Admin, User},
AppState,
};
pub async fn delete_comment(
user: User,
admin: Option<Admin>,
comment_id: Path<(u32,)>,
state: State<Arc<AppState>>,
) -> Result<String, CustomError> {
let db_pool = &state.db_pool;
let comment_id = comment_id.0;
let user_id = user.id;
let is_admim = admin.is_some();
let rows_affected = if is_admim {
sqlx::query!("DELETE FROM comments WHERE id = $1", comment_id as i32)
.execute(db_pool)
.await?
} else {
sqlx::query!(
"DELETE FROM comments WHERE id = $1 AND user_id = $2",
comment_id as i32,
user_id
)
.execute(db_pool)
.await?
}
.rows_affected();
if rows_affected == 0 {
Err(CustomError::NotFound(
"删除评论失败,可能是提供的评论 ID 不正确,或没有权限删除这条评论".into(),
))
} else {
Ok("删除评论成功!".into())
}
}
| true |
5fcd96e9a405fff6db06caf3b294fda1661acab1
|
Rust
|
isgasho/meio
|
/meio-protocol/src/lib.rs
|
UTF-8
| 568 | 2.578125 | 3 |
[] |
no_license
|
use anyhow::Error;
use serde::{de::DeserializeOwned, Serialize};
use std::fmt::Debug;
pub trait ProtocolData: Serialize + DeserializeOwned + Debug + Send + 'static {}
impl<T> ProtocolData for T where T: Serialize + DeserializeOwned + Debug + Send + 'static {}
pub trait Protocol: Send + 'static {
type ToServer: ProtocolData;
type ToClient: ProtocolData;
type Codec: ProtocolCodec;
}
pub trait ProtocolCodec: Send {
fn decode<T: ProtocolData>(data: &[u8]) -> Result<T, Error>;
fn encode<T: ProtocolData>(value: &T) -> Result<Vec<u8>, Error>;
}
| true |
0159cf7fc22572f89c62227a012af89d0bb93d1e
|
Rust
|
sim82/aoc2020
|
/examples/day08.rs
|
UTF-8
| 1,229 | 2.671875 | 3 |
[] |
no_license
|
use aoc2020::asm_grammar;
use aoc2020::vm::{hooks, Hook, Op, Vm};
use std::io::Read;
struct ExecutedTwice;
impl Hook for ExecutedTwice {
fn check(&self, vm: &Vm) -> bool {
if vm.hit_count[vm.counter] > 1 {
println!("hit twice: {}", vm.counter);
return true;
}
false
}
}
fn main() {
let mut code = String::new();
std::io::stdin().lock().read_to_string(&mut code).unwrap();
let program: Vec<Op> = asm_grammar::ProgramParser::new().parse(&code[..]).unwrap();
println!("{:?}", program);
let mut vm = Vm::new(program.clone());
let mut hooks: Vec<Box<dyn Hook>> =
vec![Box::new(ExecutedTwice {}), Box::new(hooks::Tracing {})];
vm.run(Some(&mut hooks));
for i in 0..program.len() {
let mut program = program.clone();
// rewrite program
program[i] = match &program[i] {
Op::Nop(v) => Op::Jmp(*v),
Op::Jmp(v) => Op::Nop(*v),
x => x.clone(),
};
let mut vm = Vm::new(program);
let mut hooks: Vec<Box<dyn Hook>> = vec![Box::new(ExecutedTwice {})];
if vm.run(Some(&mut hooks)) {
println!("success: {} {}", i, vm.acc);
}
}
}
| true |
33b6e7232322e7fad57e8c73014e4ad6f4879a35
|
Rust
|
rcore-os-infohub/ossoc2020-wying8349-daily
|
/practice/tutorial/hellocargo/src/main.rs
|
UTF-8
| 571 | 3.59375 | 4 |
[] |
no_license
|
fn main() {
let x:[i32;2] = [3,3,3]; // x 进入作用域
} // 这里, x 先移出了作用域,然后是 s。但因为 s 的值已被移走,
// 所以不会有特殊操作
fn takes_ownership(some_string: String) { // some_string 进入作用域
println!("{}", some_string);
} // 这里,some_string 移出作用域并调用 `drop` 方法。占用的内存被释放
fn makes_copy(some_integer: i32) { // some_integer 进入作用域
println!("{}", some_integer);
} // 这里,some_integer 移出作用域。不会有特殊操作
| true |
80cf8deccc267465bc610fce279b8410ea48cfaa
|
Rust
|
sarvex/crates.io
|
/src/tests/builders/publish.rs
|
UTF-8
| 7,708 | 2.84375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use cargo_registry::views::krate_publish as u;
use std::{collections::BTreeMap, io::Read};
use flate2::{write::GzEncoder, Compression};
use once_cell::sync::Lazy;
use super::DependencyBuilder;
// The bytes of an empty tarball is not an empty vector of bytes because of tarball headers.
// Unless files are added to a PublishBuilder, the `.crate` tarball that gets uploaded
// will be empty, so precompute the empty tarball bytes to use as a default.
static EMPTY_TARBALL_BYTES: Lazy<Vec<u8>> = Lazy::new(generate_empty_tarball);
fn generate_empty_tarball() -> Vec<u8> {
let mut empty_tarball = vec![];
{
let mut ar = tar::Builder::new(GzEncoder::new(&mut empty_tarball, Compression::default()));
assert_ok!(ar.finish());
}
empty_tarball
}
/// A builder for constructing a crate for the purposes of testing publishing. If you only need
/// a crate to exist and don't need to test behavior caused by the publish request, inserting
/// a crate into the database directly by using CrateBuilder will be faster.
pub struct PublishBuilder {
categories: Vec<String>,
deps: Vec<u::EncodableCrateDependency>,
desc: Option<String>,
doc_url: Option<String>,
keywords: Vec<String>,
pub krate_name: String,
license: Option<String>,
license_file: Option<String>,
readme: Option<String>,
tarball: Vec<u8>,
version: semver::Version,
features: BTreeMap<u::EncodableFeatureName, Vec<u::EncodableFeature>>,
}
impl PublishBuilder {
/// Create a request to publish a crate with the given name, version 1.0.0, and no files
/// in its tarball.
pub fn new(krate_name: &str) -> Self {
PublishBuilder {
categories: vec![],
deps: vec![],
desc: Some("description".to_string()),
doc_url: None,
keywords: vec![],
krate_name: krate_name.into(),
license: Some("MIT".to_string()),
license_file: None,
readme: None,
tarball: EMPTY_TARBALL_BYTES.to_vec(),
version: semver::Version::parse("1.0.0").unwrap(),
features: BTreeMap::new(),
}
}
/// Set the version of the crate being published to something other than the default of 1.0.0.
pub fn version(mut self, version: &str) -> Self {
self.version = semver::Version::parse(version).unwrap();
self
}
/// Set the files in the crate's tarball.
pub fn files(self, files: &[(&str, &[u8])]) -> Self {
let mut slices = files.iter().map(|p| p.1).collect::<Vec<_>>();
let mut files = files
.iter()
.zip(&mut slices)
.map(|(&(name, _), data)| {
let len = data.len() as u64;
(name, data as &mut dyn Read, len)
})
.collect::<Vec<_>>();
self.files_with_io(&mut files)
}
/// Set the tarball from a Read trait object
pub fn files_with_io(mut self, files: &mut [(&str, &mut dyn Read, u64)]) -> Self {
let mut tarball = Vec::new();
{
let mut ar = tar::Builder::new(GzEncoder::new(&mut tarball, Compression::default()));
for &mut (name, ref mut data, size) in files {
let mut header = tar::Header::new_gnu();
assert_ok!(header.set_path(name));
header.set_size(size);
header.set_cksum();
assert_ok!(ar.append(&header, data));
}
assert_ok!(ar.finish());
}
self.tarball = tarball;
self
}
/// Set the tarball directly to the given Vec of bytes
pub fn tarball(mut self, tarball: Vec<u8>) -> Self {
self.tarball = tarball;
self
}
/// Add a dependency to this crate. Make sure the dependency already exists in the
/// database or publish will fail.
pub fn dependency(mut self, dep: DependencyBuilder) -> Self {
self.deps.push(dep.build());
self
}
/// Set the description of this crate
pub fn description(mut self, description: &str) -> Self {
self.desc = Some(description.to_string());
self
}
/// Unset the description of this crate. Publish will fail unless description is reset.
pub fn unset_description(mut self) -> Self {
self.desc = None;
self
}
/// Set the readme of this crate
pub fn readme(mut self, readme: &str) -> Self {
self.readme = Some(readme.to_string());
self
}
/// Set the documentation URL of this crate
pub fn documentation(mut self, documentation: &str) -> Self {
self.doc_url = Some(documentation.to_string());
self
}
/// Add a keyword to this crate.
pub fn keyword(mut self, keyword: &str) -> Self {
self.keywords.push(keyword.into());
self
}
/// Add a category to this crate. Make sure the category already exists in the
/// database or it will be ignored.
pub fn category(mut self, slug: &str) -> Self {
self.categories.push(slug.into());
self
}
/// Remove the license from this crate. Publish will fail unless license or license file is set.
pub fn unset_license(mut self) -> Self {
self.license = None;
self
}
/// Set the license file for this crate
pub fn license_file(mut self, license_file: &str) -> Self {
self.license_file = Some(license_file.into());
self
}
// Adds a feature.
pub fn feature(mut self, name: &str, values: &[&str]) -> Self {
let values = values
.iter()
.map(|s| u::EncodableFeature(s.to_string()))
.collect();
self.features
.insert(u::EncodableFeatureName(name.to_string()), values);
self
}
pub fn build(self) -> (String, Vec<u8>) {
let new_crate = u::EncodableCrateUpload {
name: u::EncodableCrateName(self.krate_name.clone()),
vers: u::EncodableCrateVersion(self.version),
features: self.features,
deps: self.deps,
description: self.desc,
homepage: None,
documentation: self.doc_url,
readme: self.readme,
readme_file: None,
keywords: u::EncodableKeywordList(
self.keywords.into_iter().map(u::EncodableKeyword).collect(),
),
categories: u::EncodableCategoryList(
self.categories
.into_iter()
.map(u::EncodableCategory)
.collect(),
),
license: self.license,
license_file: self.license_file,
repository: None,
links: None,
};
(serde_json::to_string(&new_crate).unwrap(), self.tarball)
}
/// Consume this builder to make the Put request body
pub fn body(self) -> Vec<u8> {
let (json, tarball) = self.build();
PublishBuilder::create_publish_body(&json, &tarball)
}
pub fn create_publish_body(json: &str, tarball: &[u8]) -> Vec<u8> {
let mut body = Vec::new();
body.extend(
[
json.len() as u8,
(json.len() >> 8) as u8,
(json.len() >> 16) as u8,
(json.len() >> 24) as u8,
]
.iter()
.cloned(),
);
body.extend(json.as_bytes().iter().cloned());
body.extend([
tarball.len() as u8,
(tarball.len() >> 8) as u8,
(tarball.len() >> 16) as u8,
(tarball.len() >> 24) as u8,
]);
body.extend(tarball);
body
}
}
| true |
cc15efcc085d8a6da4dc38c5ae821c0d4bc34991
|
Rust
|
jroweboy/jinja2-c
|
/src/jinja2.rs
|
UTF-8
| 2,397 | 2.9375 | 3 |
[
"MIT"
] |
permissive
|
//! A safe interface to the Jinja 2 Python Library
//! As of writing, I could not get bindgen to build Python bindings for Rust,
//! so I instead built a small c wrapper and this library is a safe wrapper around the
//! c library that interfaces with jinja2
use jinja2_c;
use std::str::raw::from_c_str;
use std::ptr;
use collections::HashMap;
use libc::{c_void, c_char};
pub struct Environment {
env: *mut c_void,
}
pub struct Template {
tmpl: *mut c_void,
}
impl Environment {
/// Creates a new Environment that wraps the c calls needed to interact with the Jinja2 Environment
///
/// Takes in a `str` for the absolute path to the template directory
/// TODO? Refactor the C Library to allow any Loader and not just FileSystemLoader
pub fn new(template_dir: &str) -> Environment {
let env = unsafe { jinja2_c::init_environment(template_dir.to_c_str().unwrap()) };
Environment {
env: env,
}
}
pub fn get_template(&self, template_name: &str) -> Template {
let tmpl = unsafe { jinja2_c::get_template(self.env, template_name.to_c_str().unwrap()) };
Template {
tmpl: tmpl,
}
}
pub fn list_templates(&self) {
unsafe {
jinja2_c::list_templates(self.env);
}
}
}
impl Drop for Environment {
fn drop(&mut self) {
unsafe {
jinja2_c::free_environment(self.env);
}
}
}
impl Template {
pub fn render(&self, vars: Option<HashMap<~str, ~str>>) -> Option<~str> {
let retval = match vars {
None => unsafe { jinja2_c::render(self.tmpl, 0, ptr::null::<*mut c_char>() as *mut *mut c_char) },
Some(s) => {
// convert the hashmap to char*
let mut vec = Vec::new();
let len = s.len() * 2;
for (ref key, ref val) in s.iter() {
unsafe {
vec.push(key.to_c_str().unwrap());
vec.push(val.to_c_str().unwrap());
}
}
unsafe {
jinja2_c::render(self.tmpl, len as i32, vec.as_slice().as_ptr() as *mut *mut c_char)
}
}
};
if retval.is_null() {
None
} else {
Some(unsafe{ from_c_str(retval as *c_char) })
}
}
}
| true |
45e294155c252741b53d24c91702d0a1496fa396
|
Rust
|
advancedresearch/room
|
/src/helpers.rs
|
UTF-8
| 2,415 | 3.515625 | 4 |
[
"MIT"
] |
permissive
|
use super::*;
impl Object {
/// Returns `true` if self has another object.
pub fn has(&self, obj: Object) -> bool {self.matches(&has(obj))}
/// Returns `true` if self has not another object.
pub fn has_not(&self, obj: Object) -> bool {self.matches(&has_not(obj))}
/// Returns `true` if self is on another object.
pub fn is_on(&self, obj: Object) -> bool {
self.matches(&on(obj).into())
}
/// Returns `true` if self leans toward another object.
pub fn is_leaning_toward(&self, obj: Object) -> bool {
self.matches(&lean_toward(obj).into())
}
/// Returns `true` if self is in another object.
pub fn is_in(&self, obj: Object) -> bool {
self.matches(&in_(obj).into())
}
/// Returns `true` if self is out of another object.
pub fn is_out_of(&self, obj: Object) -> bool {
self.matches(&out_of(obj).into())
}
/// Returns `true` if self was killed by another object.
pub fn was_killed_by(&self, obj: Object) -> bool {self.matches(&killed_by(obj))}
/// Returns `true` if self killed another object.
pub fn killed(&self, obj: Object) -> bool {self.matches(&killed(obj))}
/// Returns `true` if self talked to another object.
pub fn talked_to(&self, obj: Object) -> bool {
self.matches(&Object::DidTo(Verb::Talk, Box::new(obj)))
}
/// Returns `true` if self was talked to by another object.
pub fn was_talked_to_by(&self, obj: Object) -> bool {
self.matches(&Object::WasBy(Verb::Talk, Box::new(obj)))
}
/// Returns `true` if self was moved by another object.
pub fn was_moved_by(&self, obj: Object) -> bool {
self.matches(&Object::WasBy(Verb::Move, Box::new(obj)))
}
/// Returns `true` if self moved another object.
pub fn moved(&self, obj: Object) -> bool {
self.matches(&Object::DidTo(Verb::Move, Box::new(obj)))
}
/// Returns `true` if self is opponent of another object.
pub fn is_opponent_of(&self, obj: Object) -> bool {
self.matches(&opponent_of(obj).into())
}
/// Returns `true` if self locked another object.
pub fn locked(&self, obj: Object) -> bool {
self.matches(&Object::DidTo(Verb::Lock, Box::new(obj)))
}
/// Returns `true` if self closed another object.
pub fn closed(&self, obj: Object) -> bool {
self.matches(&Object::DidTo(Verb::Close, Box::new(obj)))
}
}
| true |
33c24741783b6cc8dffa9a59fd3958d2602a3701
|
Rust
|
ArsileLuci/ImageApiRs
|
/src/main.rs
|
UTF-8
| 13,073 | 2.6875 | 3 |
[] |
no_license
|
use actix_web::{get, post, web, App, HttpServer, Responder,HttpResponse};
use actix_multipart::Multipart;
extern crate futures;
use futures::{StreamExt, TryStreamExt};
extern crate base64;
use base64::decode;
#[macro_use]
extern crate diesel;
extern crate dotenv;
extern crate uuid;
use diesel::prelude::*;
use diesel::r2d2::{self, ConnectionManager};
mod schema;
mod dtos;
use dtos::*;
mod actions;
mod models;
type DbPool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
#[get("/preview/{id}")]
async fn get_preview(
pool: web::Data<DbPool>,
info: web::Path<uuid::Uuid>,
) -> impl Responder {
let id = info.into_inner();
let conn = pool.get().expect("couldn't get db connection from pool");
let img = actions::find_image_by_id(id, &conn);
match img {
Ok(opt)=>{
match opt {
Some(_image) => {
return HttpResponse::Ok().body(actions::make_preview(&_image.Content).unwrap());
}
None => {
return HttpResponse::NotFound().finish();
}
}
}
Err(_) =>{
return HttpResponse::InternalServerError().finish();
}
}
}
#[get("/image/{id}")]
async fn get_image(
pool: web::Data<DbPool>,
info: web::Path<uuid::Uuid>,
) -> impl Responder {
let id = info.into_inner();
let conn = pool.get().expect("couldn't get db connection from pool");
let img = actions::find_image_by_id(id, &conn);
match img {
Ok(opt)=>{
match opt {
Some(_image) => {
return HttpResponse::Ok().body(_image.Content);
}
None => {
return HttpResponse::NotFound().finish();
}
}
}
Err(_) =>{
return HttpResponse::InternalServerError().finish();
}
}
}
#[post("/addmultipart")]
async fn add_multipart(
pool: web::Data<DbPool>,
mut model: Multipart,
) -> impl Responder {
let mut items : Vec<Vec<u8>> = Vec::new();
while let Ok(Some(mut field)) = model.try_next().await {
let mut bytes : Vec<u8> = Vec::new();
while let Some(chunk) = field.next().await {
bytes.append(&mut Vec::from(&chunk.unwrap()[0..]));
}
items.push(bytes);
}
let conn = pool.get().expect("couldn't get db connection from pool");
let result = actions::insert_many(items, &conn);
match result {
Ok(values) => {
return HttpResponse::Ok().json(values);
}
Err(_) => {
return HttpResponse::InternalServerError().finish();
}
}
}
#[post("/addbase64")]
async fn add_base64(
pool: web::Data<DbPool>,
model: web::Json<Base64Dto>,
) -> impl Responder {
match model.Image.split(",").last() { //Пропускаем заголовок base64
Some(base64) => {
match decode(base64) {
Ok(bytes) => {
let conn = pool.get().expect("couldn't get db connection from pool");
let result = actions::insert_image(bytes, &conn);
match result {
Ok(id) => {
match id {
Some(value) => {
return HttpResponse::Ok().json(ResponseDto {Id:value});
}
None => {
println!("bad image");
return HttpResponse::BadRequest().body("image provided in not supported format, supported formats png, bmp, tiff, jpeg");
}
}
}
Err(_) => {
return HttpResponse::InternalServerError().finish();
}
}
}
Err(_) => {
println!("bruh");
return HttpResponse::BadRequest().body("base64 incorrectly encoded");
}
}
}
None => {
return HttpResponse::BadRequest().body("base64 encoded string was not presented");
}
}
}
#[post("/addurl")]
async fn add_url(
pool: web::Data<DbPool>,
model: web::Json<UrlDto>,
) -> impl Responder {
let bytes: Vec<u8>;
let request = reqwest::get(&model.Url).await;
match request {
Ok(response) => {
let response_bytes = response.bytes().await;
match response_bytes {
Ok(raw_bytes) => {
bytes = Vec::from(&raw_bytes[0..]);
}
Err(_) => {
return HttpResponse::BadRequest().body("error occured during recieving of image's bytestream");
}
}
}
Err(_) => {
return HttpResponse::BadRequest().body("cannot get image url from the given url");
}
}
let conn = pool.get().expect("couldn't get db connection from pool");
let response_img = actions::insert_image(bytes, &conn);
match response_img {
Ok(id) => {
match id {
Some(value) => {
return HttpResponse::Ok().json(ResponseDto {Id:value});
}
None => {
return HttpResponse::BadRequest().body("image provided in not supported format, supported formats png, bmp, tiff, jpeg");
}
}
}
Err(_) => {
return HttpResponse::InternalServerError().finish();
}
}
}
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
dotenv::dotenv().ok();
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
HttpServer::new(move ||
App::new()
.data(pool.clone())
.app_data(web::PayloadConfig::new(32 * 1024 * 1024 * 1024))
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 32 * 1024))
.service(add_base64)
.service(get_preview)
.service(get_image)
.service(add_url)
.service(add_multipart)
)
.bind("127.0.0.1:8080")?
.run()
.await
}
#[cfg(test)]
mod tests {
use super::*;
use actix_web::test;
use std::io::{Read};
#[actix_rt::test]
async fn test_addurl_and_getimage_ok() {
dotenv::dotenv().ok();
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
let mut app = test::init_service(App::new()
.data(pool.clone())
.app_data(web::PayloadConfig::new(32 * 1024 * 1024 * 1024))
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 32 * 1024))
.service(get_image)
.service(add_url)
).await;
let dto = UrlDto {Url: "https://media.discordapp.net/attachments/617725195691491349/692406625381646496/yin14x1vbuo41.png".to_string()};
let add_request = test::TestRequest::post()
.uri("/addurl")
.set_json(&dto)
.to_request();
let result: ResponseDto = test::read_response_json(&mut app, add_request).await;
let route = format!("/image/{}",result.Id);
let get_request = test::TestRequest::get()
.uri(&route)
.to_request();
let body = test::read_response(&mut app, get_request)
.await;
let bytes = reqwest::get(&dto.Url)
.await
.expect("download link is broken")
.bytes()
.await
.expect("error during image downloading");
assert_eq!(bytes, body);
}
#[actix_rt::test]
async fn test_addbase64_png_and_preview_ok() {
dotenv::dotenv().ok();
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
let mut app = test::init_service(App::new()
.data(pool.clone())
.app_data(web::PayloadConfig::new(32 * 1024 * 1024 * 1024))
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 32 * 1024))
.service(get_preview)
.service(add_base64)
).await;
let mut file = std::fs::OpenOptions::new().read(true).open("testbase64.png").expect("missing testbase64.png");
let mut bytes = Vec::<u8>::new();
file.read_to_end(&mut bytes).expect("somethink gone wrong during reading testbase64.png");
let base64string = base64::encode(bytes);
//println!("{}", base64string);
let dto = Base64Dto { Image: base64string };
let add_request = test::TestRequest::post()
.uri("/addbase64")
.set_json(&dto)
.to_request();
let result: ResponseDto = test::read_response_json(&mut app, add_request).await;
let route = format!("/preview/{}",result.Id);
let get_request = test::TestRequest::get()
.uri(&route)
.to_request();
let body = test::call_service(&mut app, get_request)
.await;
assert!(body.status().is_success());
}
#[actix_rt::test]
async fn test_addbase64_notimage_is_client_error() {
dotenv::dotenv().ok();
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
let mut app = test::init_service(App::new()
.data(pool.clone())
.app_data(web::PayloadConfig::new(32 * 1024 * 1024 * 1024))
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 32 * 1024))
.service(add_base64)
).await;
let mut file = std::fs::OpenOptions::new().read(true).open("notimage.txt").expect("missing notimage.txt");
let mut bytes = Vec::<u8>::new();
file.read_to_end(&mut bytes).expect("somethink gone wrong during reading notimage.txt");
let base64string = base64::encode(bytes);
//println!("{}", base64string);
let dto = Base64Dto { Image: base64string };
let add_request = test::TestRequest::post()
.uri("/addbase64")
.set_json(&dto)
.to_request();
let result = test::call_service(&mut app, add_request)
.await;
assert!(result.status().is_client_error());
}
#[actix_rt::test]
async fn test_get_image_with_bad_id_is_client_error() {
dotenv::dotenv().ok();
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
let mut app = test::init_service(App::new()
.data(pool.clone())
.app_data(web::PayloadConfig::new(32 * 1024 * 1024 * 1024))
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 32 * 1024))
.service(get_image)
).await;
let get_request = test::TestRequest::get()
.uri("/image/0000-0000-0000-0000")
.to_request();
let result = test::call_service(&mut app, get_request)
.await;
assert!(result.status().is_client_error());
}
#[actix_rt::test]
async fn test_get_preview_with_bad_id_is_client_error() {
dotenv::dotenv().ok();
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
let mut app = test::init_service(App::new()
.data(pool.clone())
.app_data(web::PayloadConfig::new(32 * 1024 * 1024 * 1024))
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 32 * 1024))
.service(get_preview)
).await;
let get_request = test::TestRequest::get()
.uri("/preview/0000-0000-0000-0000")
.to_request();
let result = test::call_service(&mut app, get_request)
.await;
assert!(result.status().is_client_error());
}
}
| true |
e333566a794639bcc951a92fd37203e76f7d68d2
|
Rust
|
legokichi/rust-sandbox
|
/lifetime-sandbox/src/main.rs
|
UTF-8
| 1,015 | 3.234375 | 3 |
[] |
no_license
|
trait Schema {
fn name(&self) -> &str;
fn r#type(&self)-> char;
fn level(&self)-> i64;
}
struct Item(String);
impl Schema for Item {
fn name(&self) -> &str { &self.0 }
fn r#type(&self) -> char { unimplemented!() }
fn level(&self) -> i64 { unimplemented!() }
}
trait SchemaDict<'a> {
type Item: Schema;
fn get(&'a self, id: i64)-> Option<&'a Self::Item>;
}
struct Dict(std::collections::HashMap<i64, Item>);
impl<'a> SchemaDict<'a> for Dict {
type Item = Item;
fn get(&'a self, id: i64)-> Option<&'a Self::Item>{
self.0.get(&id)
}
}
struct Decoder<'a, S: SchemaDict<'a>>(&'a S);
impl<'a, S: SchemaDict<'a>> Decoder<'a, S> {
fn decode(&self){
println!("{}", self.0.get(0).unwrap().name());
}
}
fn main() {
let item = Item("foo".to_string());
let mut map = std::collections::HashMap::new();
map.insert(0, item);
let dict = Dict(map);
println!("{}", dict.get(0).unwrap().name());
{
let dict = Decoder(&dict);
}
}
| true |
cd9e5f0323f1c3702e8f02cd48349adb2d243a30
|
Rust
|
Tonyce/hellorust
|
/src/main.rs
|
UTF-8
| 262 | 2.75 | 3 |
[] |
no_license
|
/**
* test特色台
*/
use futures::executor::block_on;
async fn hello_world() {
println!("hello, world!");
}
fn main() {
let a = "a";
println!("Hello, world! {}", a);
let future = hello_world(); // Nothing is printed
block_on(future);
}
| true |
b2e45e6733b32128ca3c1a0ab0f2e22ab513f4d4
|
Rust
|
lucas3fonseca/rustaceos
|
/apps/dapp-explorer/src/components/header.rs
|
UTF-8
| 3,438 | 2.890625 | 3 |
[] |
no_license
|
use yew::agent::Bridged;
use yew::worker::Bridge;
use yew::{html, Component, ComponentLink, Html, ShouldRender};
use crate::ship::{ConnectionStatus, Request as ShipRequest, Response as ShipResponse, ShipWorker};
pub struct Header {
link: ComponentLink<Self>,
ship: Box<dyn Bridge<ShipWorker>>,
connection: ConnectionStatus,
block_num: u32,
}
pub enum Msg {
ShipMsg(ShipResponse),
ConnectShip,
}
impl Component for Header {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {
let ship_callback = link.callback(|message| Msg::ShipMsg(message));
let mut ship = ShipWorker::bridge(ship_callback);
ship.send(ShipRequest::Subscribe);
Header {
ship,
link,
block_num: 0,
connection: ConnectionStatus::Offline,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::ShipMsg(ship_response) => match ship_response {
ShipResponse::Connected => self.connection = ConnectionStatus::Online,
ShipResponse::Disconnected => self.connection = ConnectionStatus::Offline,
ShipResponse::UpdatedHeadLib(head, _lib) => self.block_num = head,
},
Msg::ConnectShip => self.ship.send(ShipRequest::Connect),
}
true
}
fn change(&mut self, _: Self::Properties) -> ShouldRender {
true
}
fn view(&self) -> Html {
html! {
<nav class="navbar" role="navigation" aria-label="main navigation">
<div class="navbar-brand">
<a class="navbar-item" href="/">
<img src="/eos.gif" width="28" height="28" />
</a>
</div>
<div id="navbarBasicExample" class="navbar-menu is-active">
<div class="navbar-start">
<a class="navbar-item">
{"Accounts"}
</a>
<a class="navbar-item">
{"Contracts"}
</a>
</div>
<div class="navbar-end">
<div class="navbar-item">
{"Block: "}
<strong>{self.block_num}</strong>
</div>
{self.display_chain_status()}
</div>
</div>
</nav>
}
}
}
impl Header {
fn display_chain_status(&self) -> Html {
let (text, color) = match self.connection {
ConnectionStatus::Online => ("Online", "has-text-success"),
ConnectionStatus::Offline => ("Offline", "has-text-danger"),
};
let button = if self.connection == ConnectionStatus::Offline {
let onclick = self.link.callback(|_| Msg::ConnectShip);
html! {
<div class="navbar-item">
<button class="button is-small" onclick=onclick>{"Connect"}</button>
</div>
}
} else {
html! {}
};
html! {
<>
<div class=("navbar-item", color)>
{text}
</div>
{button}
</>
}
}
}
| true |
c076af64c3f8fbd0d853c831f365fd8291493da9
|
Rust
|
zaksky7/Rust-project
|
/src/year2017/day20.rs
|
UTF-8
| 1,286 | 3 | 3 |
[] |
no_license
|
use counter::Counter;
use itertools::Itertools;
use regex::Regex;
use crate::utils::Coord3;
struct Particle {
pos: Coord3<i64>,
vel: Coord3<i64>,
acc: Coord3<i64>,
}
fn parse_particles(input: &str) -> impl Iterator<Item = Particle> + '_ {
let reg = Regex::new(r"-?\d+").unwrap();
input.lines().map(move |line| {
let cs: Vec<Coord3<i64>> = line
.split(", ")
.map(|comp| {
let ds: Vec<i64> = reg
.find_iter(comp)
.map(|x| x.as_str().parse().unwrap())
.collect();
Coord3::new(ds[0], ds[1], ds[2])
})
.collect();
Particle {
pos: cs[0],
vel: cs[1],
acc: cs[2],
}
})
}
pub fn part1(input: &str) -> Option<usize> {
parse_particles(input).position_min_by_key(|p| p.acc.x.abs() + p.acc.y.abs() + p.acc.z.abs())
}
pub fn part2(input: &str) -> usize {
let mut ps = parse_particles(input).collect::<Vec<_>>();
for _ in 0..100 {
for p in ps.iter_mut() {
p.vel += p.acc;
p.pos += p.vel;
}
let tbl = ps.iter().map(|p| p.pos).collect::<Counter<_>>();
ps.retain(|p| tbl[&p.pos] == 1);
}
ps.len()
}
| true |
e4d122b1e64abdd762ae52bb34eb0e34385f4adb
|
Rust
|
Yatekii/sailor
|
/src/lib/drawing/vertex.rs
|
UTF-8
| 2,165 | 3.015625 | 3 |
[] |
no_license
|
use crate::*;
use lyon::tessellation::{self, geometry_builder::VertexConstructor};
#[derive(Copy, Clone, Debug)]
pub enum VertexType {
Polygon = 0,
Line = 1,
}
#[derive(Copy, Clone, Debug)]
#[repr(C, packed)]
pub struct Vertex {
pub position: [i16; 2],
pub normal: [i16; 2],
pub feature_id: u32,
}
// A very simple vertex constructor that only outputs the vertex position
pub struct LayerVertexCtor {
pub tile_id: math::TileId,
pub feature_id: u32,
pub extent: f32,
pub vertex_type: VertexType,
}
impl LayerVertexCtor {
pub fn new(tile_id: &TileId, extent: f32) -> Self {
Self {
tile_id: *tile_id,
feature_id: 0,
extent,
vertex_type: VertexType::Polygon,
}
}
}
impl VertexConstructor<tessellation::FillVertex, Vertex> for LayerVertexCtor {
fn new_vertex(&mut self, vertex: tessellation::FillVertex) -> Vertex {
assert!(!vertex.position.x.is_nan());
assert!(!vertex.position.y.is_nan());
const LIMIT: f32 = 3.0;
let normal = if vertex.normal.length() > LIMIT {
vertex.normal.normalize() * LIMIT
} else {
vertex.normal
} * self.extent;
let meta: u16 = self.vertex_type as u16;
Vertex {
position: [vertex.position.x as i16, vertex.position.y as i16],
normal: [normal.x.round() as i16, normal.y.round() as i16],
feature_id: ((meta as u32) << 16) | self.feature_id,
}
}
}
impl VertexConstructor<tessellation::StrokeVertex, Vertex> for LayerVertexCtor {
fn new_vertex(&mut self, vertex: tessellation::StrokeVertex) -> Vertex {
assert!(!vertex.position.x.is_nan());
assert!(!vertex.position.y.is_nan());
let normal = if vertex.normal.length() > 8.0 {
vertex.normal.normalize() * 8.0
} else {
vertex.normal
} * self.extent;
Vertex {
position: [vertex.position.x as i16, vertex.position.y as i16],
normal: [normal.x.round() as i16, normal.y.round() as i16],
feature_id: self.feature_id,
}
}
}
| true |
4297968b14de48c23472b1fc2aee332c95d1b8e7
|
Rust
|
rthomas/dms
|
/dns-message/src/question.rs
|
UTF-8
| 8,242 | 2.9375 | 3 |
[] |
no_license
|
use crate::{encode_str, Result};
use std::default::Default;
use std::fmt;
use tracing::{instrument, trace};
#[derive(Debug, PartialEq)]
/// The question section is used to carry the "question" in most queries, i.e.,
/// the parameters that define what is being asked.
pub struct Question {
/// RFC1035 - a domain name represented as a sequence of labels, where each
/// label consists of a length octet followed by that number of octets. The
/// domain name terminates with the zero length octet for the null label of
/// the root. Note that this field may be an odd number of octets; no
/// padding is used.
pub q_name: String,
/// RFC1035 - a two octet code which specifies the type of the query. The
/// values for this field include all codes valid for a ['Type'] field,
/// together with some more general codes which can match more than one type
/// of RR.
pub q_type: Type,
/// RFC1035 - a two octet code that specifies the ['Class`] of the query.
pub q_class: Class,
}
impl Question {
#[instrument(skip(buf))]
pub(crate) fn to_bytes(&self, buf: &mut Vec<u8>) -> Result<usize> {
let mut byte_count = encode_str(&self.q_name, buf)?;
byte_count += self.q_type.to_bytes(buf);
byte_count += self.q_class.to_bytes(buf);
trace!("Wrote {} bytes", byte_count);
Ok(byte_count)
}
}
#[derive(Debug, PartialEq)]
/// Types used in [`Question`]s.
pub enum Type {
/// RFC1035 - (1) a host address.
A,
/// RFC1035 - (2) an authoritative name server.
NS,
/// RFC1035 - (3) a mail destination (Obsolete - use MX).
MD,
/// RFC1035 - (4) a mail forwarder (Obsolete - use MX).
MF,
/// RFC1035 - (5) the canonical name for an alias.
CNAME,
/// RFC1035 - (6) marks the start of a zone of authority.
SOA,
/// RFC1035 - (7) a mailbox domain name (EXPERIMENTAL).
MB,
/// RFC1035 - (8) a mail group member (EXPERIMENTAL).
MG,
/// RFC1035 - (9) a mail rename domain name (EXPERIMENTAL).
MR,
/// RFC1035 - (10) a null RR (EXPERIMENTAL).
NULL,
/// RFC1035 - (11) a well known service description.
WKS,
/// RFC1035 - (12) a domain name pointer.
PTR,
/// RFC1035 - (13) host information.
HINFO,
/// RFC1035 - (14) mailbox or mail list information.
MINFO,
/// RFC1035 - (15) mail exchange.
MX,
/// RFC1035 - (16) text strings.
TXT,
/// RFC3596 - The AAAA resource record type is a record specific to the
/// Internet class that stores a single IPv6 address.
AAAA,
/// RFC1035 - (252) A request for a transfer of an entire zone.
AXFR,
/// RFC1035 - (253) A request for mailbox-related records (MB, MG or MR).
MAILB,
/// RFC1035 - (254) A request for mail agent RRs (Obsolete - see MX).
MAILA,
/// RFC1035 - (255) A request for all records.
STAR,
/// An unknown [`Type`] - the value is contained within.
Unknown(u16),
}
impl Type {
#[instrument(skip(buf))]
fn to_bytes(&self, buf: &mut Vec<u8>) -> usize {
let val = match self {
Self::A => 1u16.to_be_bytes(),
Self::NS => 2u16.to_be_bytes(),
Self::MD => 3u16.to_be_bytes(),
Self::MF => 4u16.to_be_bytes(),
Self::CNAME => 5u16.to_be_bytes(),
Self::SOA => 6u16.to_be_bytes(),
Self::MB => 7u16.to_be_bytes(),
Self::MG => 8u16.to_be_bytes(),
Self::MR => 9u16.to_be_bytes(),
Self::NULL => 10u16.to_be_bytes(),
Self::WKS => 11u16.to_be_bytes(),
Self::PTR => 12u16.to_be_bytes(),
Self::HINFO => 13u16.to_be_bytes(),
Self::MINFO => 14u16.to_be_bytes(),
Self::MX => 15u16.to_be_bytes(),
Self::TXT => 16u16.to_be_bytes(),
Self::AAAA => 28u16.to_be_bytes(),
Self::AXFR => 252u16.to_be_bytes(),
Self::MAILB => 253u16.to_be_bytes(),
Self::MAILA => 254u16.to_be_bytes(),
Self::STAR => 255u16.to_be_bytes(),
Self::Unknown(i) => i.to_be_bytes(),
};
buf.push(val[0]);
buf.push(val[1]);
trace!("Wrote 2 bytes");
2
}
}
impl fmt::Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> std::result::Result<(), fmt::Error> {
let disp = match self {
Self::A => "A",
Self::NS => "NS",
Self::MD => "MD",
Self::MF => "MF",
Self::CNAME => "CNAME",
Self::SOA => "SOA",
Self::MB => "MB",
Self::MG => "MG",
Self::MR => "MR",
Self::NULL => "NULL",
Self::WKS => "WKS",
Self::PTR => "PTR",
Self::HINFO => "HINFO",
Self::MINFO => "MINFO",
Self::MX => "MX",
Self::TXT => "TXT",
Self::AAAA => "AAAA",
Self::AXFR => "AXFR",
Self::MAILB => "MAILB",
Self::MAILA => "MAILA",
Self::STAR => "*",
Self::Unknown(i) => {
write!(f, "Unknown({})", i)?;
return Ok(());
}
};
write!(f, "{}", disp)
}
}
impl Default for Type {
fn default() -> Self {
Self::A
}
}
impl From<Type> for u16 {
fn from(t: Type) -> u16 {
match t {
Type::A => 1,
Type::NS => 2,
Type::MD => 3,
Type::MF => 4,
Type::CNAME => 5,
Type::SOA => 6,
Type::MB => 7,
Type::MG => 8,
Type::MR => 9,
Type::NULL => 10,
Type::WKS => 11,
Type::PTR => 12,
Type::HINFO => 13,
Type::MINFO => 14,
Type::MX => 15,
Type::TXT => 16,
Type::AAAA => 28,
Type::AXFR => 252,
Type::MAILB => 253,
Type::MAILA => 254,
Type::STAR => 255,
Type::Unknown(i) => i,
}
}
}
impl From<u16> for Type {
#[instrument]
fn from(val: u16) -> Self {
match val {
1 => Type::A,
2 => Type::NS,
3 => Type::MD,
4 => Type::MF,
5 => Type::CNAME,
6 => Type::SOA,
7 => Type::MB,
8 => Type::MG,
9 => Type::MR,
10 => Type::NULL,
11 => Type::WKS,
12 => Type::PTR,
13 => Type::HINFO,
14 => Type::MINFO,
15 => Type::MX,
16 => Type::TXT,
28 => Type::AAAA,
252 => Type::AXFR,
253 => Type::MAILB,
254 => Type::MAILA,
255 => Type::STAR,
_ => Type::Unknown(val),
}
}
}
#[derive(Debug, PartialEq)]
/// The class of the query - you will want [`Class::IN`] (the default) 99.99% of
/// the time.
pub enum Class {
/// RFC1035 - 1 the Internet.
IN,
/// RFC1035 - 2 the CSNET class (Obsolete - used only for examples in some
/// obsolete RFCs)
CS,
/// RFC1035 - 3 the CHAOS class.
CH,
/// RFC1035 - 4 Hesiod [Dyer 87].
HS,
/// RFC1035 - 255 any class.
STAR,
/// An unknown class - contained within.
Unknown(u16),
}
impl Class {
#[instrument(skip(buf))]
pub(crate) fn to_bytes(&self, buf: &mut Vec<u8>) -> usize {
let val = match self {
Class::IN => 1u16.to_be_bytes(),
Class::CS => 2u16.to_be_bytes(),
Class::CH => 3u16.to_be_bytes(),
Class::HS => 4u16.to_be_bytes(),
Class::STAR => 255u16.to_be_bytes(),
Class::Unknown(i) => i.to_be_bytes(),
};
buf.push(val[0]);
buf.push(val[1]);
trace!("Wrote 2 bytes");
2
}
}
impl Default for Class {
fn default() -> Self {
Class::IN
}
}
impl From<u16> for Class {
#[instrument]
fn from(val: u16) -> Self {
match val {
1 => Class::IN,
2 => Class::CS,
3 => Class::CH,
4 => Class::HS,
255 => Class::STAR,
_ => Class::Unknown(val),
}
}
}
| true |
afeb437670a05b943805247e3462411cd873890b
|
Rust
|
nushell/nushell
|
/crates/nu_plugin_example/src/main.rs
|
UTF-8
| 1,630 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
use nu_plugin::{serve_plugin, MsgPackSerializer};
use nu_plugin_example::Example;
fn main() {
// When defining your plugin, you can select the Serializer that could be
// used to encode and decode the messages. The available options are
// MsgPackSerializer and JsonSerializer. Both are defined in the serializer
// folder in nu-plugin.
serve_plugin(&mut Example {}, MsgPackSerializer {})
// Note
// When creating plugins in other languages one needs to consider how a plugin
// is added and used in nushell.
// The steps are:
// - The plugin is register. In this stage nushell calls the binary file of
// the plugin sending information using the encoded PluginCall::PluginSignature object.
// Use this encoded data in your plugin to design the logic that will return
// the encoded signatures.
// Nushell is expecting and encoded PluginResponse::PluginSignature with all the
// plugin signatures
// - When calling the plugin, nushell sends to the binary file the encoded
// PluginCall::CallInfo which has all the call information, such as the
// values of the arguments, the name of the signature called and the input
// from the pipeline.
// Use this data to design your plugin login and to create the value that
// will be sent to nushell
// Nushell expects an encoded PluginResponse::Value from the plugin
// - If an error needs to be sent back to nushell, one can encode PluginResponse::Error.
// This is a labeled error that nushell can format for pretty printing
}
| true |
0fd68cf28ee5793478c11ce81785f8c02cf2c8c9
|
Rust
|
numitus/rust_ray_tracer
|
/src/tuple.rs
|
UTF-8
| 3,366 | 3.40625 | 3 |
[] |
no_license
|
use std::ops::{Add, Sub};
struct Tuple {
x:f64,
y:f64,
z:f64,
w:f64
}
pub fn almost_equal(a:f64,b:f64)->bool{
return (a-b).abs()<0.0001
}
impl Tuple {
pub fn new_tuple(x: f64, y: f64,z: f64,w:f64) -> Tuple {
Tuple { x: x, y: y,z:z,w:w } // this is fine, as we're in the same module
}
pub fn new_point(x: f64, y: f64,z: f64) -> Tuple {
Tuple { x: x, y: y,z:z,w:1.0 } // this is fine, as we're in the same module
}
pub fn new_vector(x: f64, y: f64,z: f64) -> Tuple {
Tuple { x: x, y: y,z:z,w:0.0 } // this is fine, as we're in the same module
}
pub fn is_point(&self) -> bool {
return almost_equal(self.w,1.0)
}
pub fn is_vector(&self) -> bool {
return almost_equal(self.w,0.0)
}
}
impl PartialEq for Tuple {
fn eq(&self, other: &Self) -> bool {
return almost_equal(self.x,other.x) && almost_equal(self.y,other.y) &&almost_equal(self.z,other.z) &&almost_equal(self.w,other.w)
}
}
impl Add for Tuple {
type Output = Tuple;
fn add(self, other: Tuple) -> Tuple {
Tuple { x: self.x+other.x,
y: self.y+other.y,
z:self.z+other.z,
w:self.w+other.w
}
}
}
impl Sub for Tuple {
type Output = Tuple;
fn sub(self, other: Tuple) -> Tuple {
Tuple { x: self.x-other.x,
y: self.y-other.y,
z:self.z-other.z,
w:self.w-other.w
}
}
}
#[cfg(test)]
mod tests {
use crate::tuple::{Tuple, almost_equal};
#[test]
fn tuple_point_creation() {
let t=Tuple::new_tuple(4.3,-4.2,3.1,1.0);
assert!(almost_equal(t.x,4.3));
assert!(almost_equal(t.y,-4.2));
assert!(almost_equal(t.z,3.1));
assert!(almost_equal(t.w,1.0));
assert!(t.is_point());
assert!(!t.is_vector());
}
#[test]
fn tuple_vector_creation() {
let t=Tuple::new_tuple(4.3,-4.2,3.1,0.0);
assert!(almost_equal(t.x,4.3));
assert!(almost_equal(t.y,-4.2));
assert!(almost_equal(t.z,3.1));
assert!(almost_equal(t.w,0.0));
assert!(!t.is_point());
assert!(t.is_vector());
}
#[test]
fn point_creation() {
let t=Tuple::new_point(4.3,-4.0,3.0);
assert!(t==Tuple::new_tuple(4.3,-4.0,3.0,1.0));
}
#[test]
fn vector_creation() {
let t=Tuple::new_vector(4.3,-4.0,3.0);
assert!(t==Tuple::new_tuple(4.3,-4.0,3.0,0.0));
}
#[test]
fn tuple_add() {
let a=Tuple::new_tuple(3.0,-2.0,5.0,1.0);
let b=Tuple::new_tuple(-2.0,3.0,1.0,0.0);
let c:Tuple=a+b;
assert!(c==Tuple::new_tuple(1.0,1.0,6.0,1.0));
}
#[test]
fn tuple_sub() {
let a=Tuple::new_point(3.0,2.0,1.0);
let b=Tuple::new_point(5.0,6.0,7.0);
let c:Tuple=a-b;
assert!(c==Tuple::new_vector(-2.0,-4.0,-6.0));
}
#[test]
fn test_sub_point_and_vector() {
let a=Tuple::new_point(3.0,2.0,1.0);
let b=Tuple::new_vector(5.0,6.0,7.0);
let c:Tuple=a-b;
assert!(c==Tuple::new_point(-2.0,-4.0,-6.0));
}
#[test]
fn test_vector_and_vector() {
let a=Tuple::new_vector(3.0,2.0,1.0);
let b=Tuple::new_vector(5.0,6.0,7.0);
let c:Tuple=a-b;
assert!(c==Tuple::new_vector(-2.0,-4.0,-6.0));
}
}
| true |
1de9a186171d10b478f5f72afe07e00e64ec5111
|
Rust
|
bddap/crappy-lisp
|
/examples/1.rs
|
UTF-8
| 1,493 | 2.9375 | 3 |
[] |
no_license
|
use serde_json::json as lisp; // I find this funny
fn main() {
// Computing sqrt(x) using bisection
let _sqrt = lisp!([
{
"sqrt": ["fn", ["L", "H", "Q", "depth"], [
"if",
["<=", "depth", 0],
"L",
[
{"halfway": ["/", ["+", "L", "H"], 2]},
"if",
["<", ["exp2", "halfway"], "Q"],
["sqrt", "L", "halfway", ["-", "depth", 1]],
["sqrt", "halfway", "H", ["-", "depth", 1]]
]
]],
"exp2": ["fn", ["a"], ["*", "a", "a"]]
},
["fn", ["Q"], ["sqrt", 0, "Q", 32]]
]);
// Write `fib` (fibonacci) function
let _fib = lisp!([
{
"fib": ["fn", ["n"], [
"if",
["==", "n", 1],
0,
[
"if",
["==", "n", 2],
1,
["+", ["fib", ["-", "n", 1]], ["fib", ["-", "n", 2]]]
]
]],
},
["fn", ["n"], ["fib", "n"]]
]);
// Write a program that infinitely recurses upon itself
let _recurse = lisp!([
{"rec": ["fn", [], ["rec"]]},
["fn", [], ["rec"]]
]);
// This silly thing is called the Omega program, it does not terminate
// ((λ f . (f f)) (λ f . (f f)))
}
// needed builtins:
// if
// *
// /
// +
// -
// ==
// fn
| true |
3d6b15ec158fdd83c2ddab1747ea7199e6885298
|
Rust
|
xarvic/rust_gui_test
|
/src/state/clone_state.rs
|
UTF-8
| 1,888 | 2.921875 | 3 |
[] |
no_license
|
use crate::state::{Handle, HandleInner, StateInner, StateID, State};
use std::sync::Arc;
use crate::state::key::Key;
#[derive(Clone)]
pub struct CloneState<T: Clone> {
cache: T,
commit: u64,
inner: Arc<StateInner<T>>,
}
impl<T: 'static + Clone + Send + Sync> CloneState<T> {
pub fn new(value: T) -> Self {
let state = CloneState {
cache: value.clone(),
commit: 0,
inner: Arc::new(StateInner::new(value)),
};
state
}
pub fn handle(&self) -> Handle {
Handle(self.inner.clone() as Arc<dyn HandleInner + Send + Sync>)
}
}
impl<T: 'static + Clone + Send + Sync> State<T> for CloneState<T> {
fn get_id(&self) -> StateID {
self.inner.id()
}
fn with_value<R>(&self, operation: impl FnOnce(&T) -> R) -> R {
operation(&self.cache)
}
fn with_fetched_value<R>(&mut self, operation: impl FnOnce(&T, Option<&T>) -> R) -> R {
let new_commit = self.inner.commit();
if new_commit > self.commit {
self.commit = new_commit;
let CloneState{inner, cache, commit} = self;
inner.use_value(|value|cache.clone_from(value))
}
operation(&self.cache, None)//TODO: change
}
fn with_key<R>(&mut self, operation: impl FnOnce(Key<T>) -> R) -> R {
let new_commit = self.inner.commit();
if new_commit > self.commit {
self.commit = new_commit;
let CloneState{inner, cache, commit} = self;
inner.use_value(|value|cache.clone_from(value))
}
let mut change = false;
let r = operation(Key::new(&mut self.cache, &mut change));
if change {
let (_, commit) = self.inner.update_value(|value| {
value.clone_from(&self.cache)
});
self.commit = commit;
}
r
}
}
| true |
6cd30c906018165b40a7ea2442bcbb80656312b0
|
Rust
|
Noah2610/timecalc
|
/src/parse/parse_time.rs
|
UTF-8
| 4,046 | 2.984375 | 3 |
[] |
no_license
|
use crate::time::Time;
use crate::time_result::{TimeError, TimeResult};
pub fn parse_time<S: ToString>(input: S) -> TimeResult<Time> {
let re = regex::Regex::new(
r"(?:(?P<days>\d+) )?(?P<num>\d{1,2}):?(?:\.(?P<millis>\d{1,3})$\s*)?",
)
.unwrap();
let input = input.to_string();
let mut nums: Vec<u32> = Vec::new();
let mut days: Option<u32> = None;
let mut millis: Option<u32> = None;
// for cap in caps.iter().skip(1) {
for caps in re.captures_iter(input.as_str()) {
// let caps = re.captures(input.as_str()).ok_or_else(|| {
// TimeError::ParseInputError {
// input: input.to_string(),
// }
// })?;
let num = parse_num(&caps["num"], None)?;
nums.push(num);
if let Some(cap_days) = caps.name("days") {
days = Some(parse_num(cap_days.as_str(), Some("days"))?);
}
if let Some(cap_millis) = caps.name("millis") {
millis =
Some(parse_num(cap_millis.as_str(), Some("milliseconds"))?);
}
}
if nums.is_empty() && days.is_none() && millis.is_none() {
return Err(TimeError::ParseInputError {
input: input.to_string(),
});
}
let mut time = Time::default();
let mut iter = nums.into_iter();
days.map(|days| time.set_days(days));
iter.next().map(|hours| time.set_hours(hours));
iter.next().map(|minutes| time.set_minutes(minutes));
iter.next().map(|seconds| time.set_seconds(seconds));
millis.map(|millis| time.set_milliseconds(millis));
Ok(time)
// iter.next()
// .map(|hours| {
// let time = time.with_hours(hours);
// iter.next()
// .map(|minutes| {
// let time = time.with_minutes(minutes);
// iter.next()
// .map(|seconds| time.with_seconds(seconds))
// .or_else(time)
// })
// .or_else(time)
// })
// .or_else(time);
// if let Some(hours) = iter.next() {
// time.set_hours(hours);
// }
// if let Some(minutes) = iter.next() {
// time.set_minutes(minutes);
// }
// if let Some(seconds) = iter.next() {
// time.set_seconds(seconds);
// }
// match (nums.get(0), nums.get(1), nums.get(2)) {
// (None, None, None) => Err(TimeError::ParseInputError {
// input: input.to_string(),
// }),
// (Some(&hours), None, None) => Ok({
// let mut time = Time::default();
// if let Some(days) = days {
// time.set_days(days);
// }
// if let Some(milliseconds) = millis {
// time.set_milliseconds(milliseconds);
// }
// time.with_hours(hours)
// }),
// (Some(&hours), Some(&minutes), None) => Ok({
// let mut time = Time::default();
// if let Some(days) = days {
// time.set_days(days);
// }
// if let Some(milliseconds) = millis {
// time.set_milliseconds(milliseconds);
// }
// time.with_hours(hours).with_minutes(minutes)
// }),
// (Some(&hours), Some(&minutes), Some(&seconds)) => Ok({
// let mut time = Time::default();
// if let Some(days) = days {
// time.set_days(days);
// }
// if let Some(milliseconds) = millis {
// time.set_milliseconds(milliseconds);
// }
// time.with_hours(hours)
// .with_minutes(minutes)
// .with_seconds(seconds)
// }),
// _ => unreachable!(),
// }
}
fn parse_num(s: &str, name: Option<&str>) -> TimeResult<u32> {
s.parse::<u32>().or_else(|_| {
Err(TimeError::ParseNumError {
text: s.to_string(),
name: name.map(ToString::to_string),
})
})
}
| true |
1eed7816f7802c2362a20096853a65283bb1b9cb
|
Rust
|
debruinf/imgp
|
/src/lib.rs
|
UTF-8
| 4,258 | 3.296875 | 3 |
[] |
no_license
|
extern crate image;
use std::error::Error;
use std::process;
#[derive(Debug)]
pub struct Input {
flags: Vec<Config>,
}
impl Input {
pub fn new(mut args: Vec<String>) -> Result<Input, &'static str> {
args.remove(0);
args.remove(0);
let mut flags: Vec<Config> = Vec::new();
while args.len() > 0 {
let d = args.get(0..2).unwrap().to_vec();
args.remove(0);
args.remove(0);
let c = Config::new(&d).unwrap();
let mut d = vec![c];
flags.append(&mut d)
}
Ok(Input {flags})
}
}
#[derive(Debug)]
pub struct Config {
command: String,
instruction: String,
}
impl Config {
pub fn new(args: &Vec<String>) -> Result<Config, &'static str> {
if args.len() < 2 {
return Err("Missing argument(s), please check your input")
}
let command = args[0].clone();
let instruction = args[1].clone();
Ok(Config { command, instruction })
}
}
pub fn run(img_path: &String, input: Input) {
let source_img = image::open(img_path).unwrap_or_else(|_err| {
println!("No image at path '{}'", img_path);
process::exit(1);
});
if &input.flags[0].command[..] != "-d" {
println!("Error, no destination");
process::exit(1);
};
let destination: &str = &input.flags[0].instruction;
copy_img(&source_img, destination);
let copied_img = image::open(destination).unwrap_or_else(|_err| {
println!("Something went wrong");
process::exit(1);
});
for c in &input.flags {
match &c.command[..] {
"-r" => {
rotate(&copied_img, &c.instruction, &destination);
println!("Rotated!")
},
"-s" => println!("Resized"),
"-sf" => println!("Resize forced"),
// "-r" => rotate(&img, &c.instruction, &img_path),
// "-s" => size(img, &config.instruction, &img_path),
// "-sf" => size_forced(img, &config.instruction, &img_path),
_ => {
println!("Unknown flag");
}
};
};
println!("Hellow orld!")
}
pub fn check_min_length(args: &Vec<String>) -> Result<(), &'static str> {
if args.len() < 2 {
return Err("Too few arguments")
}
Ok(())
}
pub fn check_if_help(args: &Vec<String>) {
if &args[1][..] == "-h" {
let _ = print_help();
process::exit(0);
}
}
fn print_help() -> Result<(), Box<Error>> {
println!("Description for IMGP, a command line tool for simple operations on images, written in Rust");
println!("For now, only performs one operation at the time");
println!("");
println!("For help: img -h");
println!("");
println!("Syntax: img <img_path> -flag <instruction>");
println!("Options:");
println!("-c\tCopies the image as-is to path given in <instruction>");
println!("-r\tRotates the image according to the <instructions> and saves in the original location:");
println!("\t\t[cw] for clockwise operation and [ccw] for counter clockwise operation");
println!("-s\tSizes the image to dimensions as specified in <instructions> and saves in the orignal location. Preserves the image dimensions ad fits largest size. <instruction> for dimension takes the for of [<width>x<height>]");
println!("-sf\tSizes the image to dimensions as specified in <instructions> and saves in the orignal location. Does NOT preserves the image dimensions. <instruction> for dimension takes the for of [<width>x<height>]");
process::exit(1);
}
fn copy_img(img: &image::DynamicImage, copy_path: &str) -> Result<(), Box<Error>> {
img.save(copy_path)?;
println!("Image copied to {}", copy_path);
Ok(())
}
fn rotate(img: &image::DynamicImage, direction: &str, destination: &str) -> Result<(), Box<Error>> {
let mut new_img = img.clone();
new_img = match &direction[..] {
"cw" => new_img.rotate90(),
"ccw" => new_img.rotate270(),
_ => {
println!("Direction not correctly specified");
new_img
}
};
new_img.save(destination)?;
println!("Image copied to {}", destination);
Ok(())
}
| true |
13f5b589472748038d4d8766791ad968d1b7ec19
|
Rust
|
makutak/sandbox
|
/books/rustbook/ch03/basic/src/my_box.rs
|
UTF-8
| 242 | 2.734375 | 3 |
[] |
no_license
|
pub fn exec() {
let byte_ary = [b'h', b'e', b'l', b'l', b'o'];
print(Box::new(byte_ary));
let byte_ary = [b'w', b'o', b'r', b'l', b'd', b'!'];
print(Box::new(byte_ary));
}
fn print(s: Box<[u8]>) {
println!("{:?}", s);
}
| true |
f599d1295c2be8bc0aeb4fd9cc15913726ad5ea7
|
Rust
|
NattapongSiri/ds_prep_gen.rs
|
/src/main.rs
|
UTF-8
| 1,930 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
extern crate rand;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
use rand::prelude::*;
use std::fs::{File};
use std::io::{BufWriter, Write};
use std::ops::Add;
#[derive(Clone, Copy, Serialize)]
struct Point (u32, u32, u32);
#[derive(Clone, Copy, Serialize)]
struct FPoint (f64, f64, f64);
impl Add for Point {
type Output = Point;
fn add(self, rhs : Point) -> Self::Output {
Point(self.0 + rhs.0, self.1 + rhs.1, self.2 + rhs.2)
}
}
struct RandomPoint(SmallRng);
impl RandomPoint {
pub fn new() -> Self {
let rng_seed = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15];
RandomPoint(SmallRng::from_seed(rng_seed))
}
pub fn next(&mut self) -> Point {
// return point that is never larger than (7, 7, 7)
Point(self.0.next_u32() & 7, self.0.next_u32() & 7, self.0.next_u32() & 7)
}
}
fn main() -> Result<(), Box<std::error::Error>> {
let f = File::create("data.txt")?;
let mut writer = BufWriter::with_capacity(32 * 2usize.pow(20), f); // 32MB buffer
let origin = FPoint(0f64, 0f64, 0f64); // starting point for every point
let n = 10; // number of point in each record
let mut points = vec![origin; 10];
let len = 10usize.pow(5); // need 100,000 records
// let mut rdm = RandomPoint::new();
// total size is 10^6 * (10 * 32^3) = 31,250MB
(0..len).for_each(|r| {
// each record
(0..n).for_each(|i| {
// each point
// randomly move point
// points[i] = points[i] + rdm.next();
// calculate sin wave
let coor = f64::sin((r + i) as f64 / std::f64::consts::PI);
points[i] = FPoint(coor, coor, coor)
});
let p_str : &str = &serde_json::to_string(&points).unwrap();
writer.write(p_str.as_bytes()).unwrap();
writer.write(b"\n").unwrap();
});
Ok(())
}
| true |
8627e75eb6f10ac2f7fd59411e0c1761a628a908
|
Rust
|
sdleffler/doll
|
/src/syntax.rs
|
UTF-8
| 11,454 | 3.078125 | 3 |
[] |
no_license
|
use std::cmp;
use std::fmt::Debug;
use std::rc::Rc;
use std::result;
use stack::Stack;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InfTerm<I> {
Ann(Rc<ChkTerm<I>>, Rc<ChkTerm<I>>),
Star,
Pi(Rc<ChkTerm<I>>, Rc<ChkTerm<I>>),
Bound(usize),
Free(Name<I>),
App(Rc<InfTerm<I>>, Rc<ChkTerm<I>>),
}
pub use self::InfTerm::*;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ChkTerm<I> {
Inf(Rc<InfTerm<I>>),
Lam(Rc<ChkTerm<I>>),
}
pub use self::ChkTerm::*;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Name<I> {
Global(I),
Local(usize),
Quote(usize),
}
pub use self::Name::*;
#[derive(Debug, Clone, PartialEq)]
pub enum Value<I> {
VLam(Closure<I>),
VStar,
VPi(Rc<Value<I>>, Closure<I>),
VNeutral(Neutral<I>),
}
pub use self::Value::*;
type Env<I> = Stack<Value<I>>;
type Ctx<I> = Stack<(Name<I>, Rc<Value<I>>)>;
type Result<T> = result::Result<T, String>;
#[derive(Debug, Clone, PartialEq)]
pub struct Closure<I> {
env: Rc<Env<I>>,
body: Rc<ChkTerm<I>>,
}
impl<I: Debug + Clone + PartialEq> Closure<I> {
fn eval(&self, arg: Rc<Value<I>>) -> Rc<Value<I>> {
let mut env = self.env.as_ref().clone().cons_shared(arg);
self.body.eval(&mut env)
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum Neutral<I> {
NFree(Name<I>),
NApp(Box<Neutral<I>>, Rc<Value<I>>),
}
pub use self::Neutral::*;
impl<I: Debug + Clone + PartialEq> Value<I> {
fn quote(&self, i: usize) -> Rc<ChkTerm<I>> {
match *self {
VLam(ref closure) => {
Rc::new(Lam(closure.eval(Rc::new(VNeutral(NFree(Quote(i)))))
.quote(i + 1)))
}
VStar => Rc::new(Inf(Rc::new(Star))),
VPi(ref bound, ref closure) => {
Rc::new(Inf(Rc::new(Pi(bound.quote(i),
closure.eval(Rc::new(VNeutral(NFree(Quote(i)))))
.quote(i + 1)))))
}
VNeutral(ref n) => Rc::new(Inf(n.quote(i))),
}
}
}
impl<I: Debug + Clone + PartialEq> Neutral<I> {
fn quote(&self, i: usize) -> Rc<InfTerm<I>> {
match *self {
NFree(ref x) => x.quote(i),
NApp(ref n, ref v) => Rc::new(App(n.quote(i), v.quote(i))),
}
}
}
impl<I: Clone + PartialEq> Name<I> {
fn quote(&self, i: usize) -> Rc<InfTerm<I>> {
match *self {
Quote(ref k) => Rc::new(Bound(i - k - 1)),
ref x => Rc::new(Free((*x).clone())),
}
}
}
impl<I: Debug + Clone + PartialEq> InfTerm<I> {
fn depth(&self, i: usize) -> usize {
match *self {
Ann(ref e, ref t) => cmp::max(e.depth(i), t.depth(i)),
Star => 0,
Pi(ref a, ref b) => cmp::max(a.depth(i), b.depth(i + 1)),
Free(_) => 0,
Bound(j) if j > i => j - i,
Bound(_) => 0,
App(ref x, ref y) => cmp::max(x.depth(i), y.depth(i)),
}
}
fn eval(&self, env: &mut Env<I>) -> Rc<Value<I>> {
println!("eval_inf {:?} |- {:?} ~~> ?", env, self);
match *self {
Ann(ref e, _) => e.eval(env),
Star => Rc::new(VStar),
Pi(ref a, ref b) => {
Rc::new(VPi(a.eval(env),
Closure {
env: Rc::new(env.capture(b.depth(0))),
body: b.clone(),
}))
}
Free(ref x) => Rc::new(VNeutral(NFree(x.clone()))),
Bound(i) => env.share(i),
App(ref x, ref y) => {
match *x.eval(env) {
VLam(ref closure) => closure.eval(y.eval(env)),
VNeutral(ref neutral) => {
Rc::new(VNeutral(NApp(Box::new(neutral.clone()), y.eval(env))))
}
_ => {
panic!("Maltyped normal form: Attempted to apply non-lambda, non-neutral \
expression to a value.")
}
}
}
}
}
fn infer_type(&self, i: usize, ctx: &mut Ctx<I>) -> Result<Rc<Value<I>>> {
println!("infer_type[{}] {:?} |- {:?} : ?", i, ctx, self);
match *self {
Ann(ref e, ref t) => {
let t = {
let mut env = Stack::new();
t.eval(&mut env)
};
e.check_type(i, &t, ctx)?;
Ok(t)
}
Star => Ok(Rc::new(VStar)),
Pi(ref a, ref b) => {
a.check_type(i, &VStar, ctx)?;
let t = {
let mut env = Stack::new();
b.eval(&mut env)
};
ctx.isolate(|mut ctx| {
ctx.push((Local(i), t));
subst_chk(b, 0, &Rc::new(Free(Local(i)))).check_type(i + 1, &VStar, ctx)
})?;
Ok(Rc::new(VStar))
}
Bound(_) => {
panic!("Error: encountered non-free variable in context {:?} evaluating \
expression {:?}",
ctx,
self)
}
Free(ref x) => {
ctx.lookup(x)
.map(Clone::clone)
.ok_or_else(|| format!("Variable is not in the local context!"))
.clone()
}
App(ref a, ref b) => {
let s = a.infer_type(i, ctx)?;
match *s {
VPi(ref bound, ref closure) => {
b.check_type(i, bound, ctx)?;
Ok(closure.eval({
let mut env = Stack::new();
b.eval(&mut env)
}))
}
_ => Err("Illegal application!".to_string()),
}
}
}
}
}
impl<I: Debug + Clone + PartialEq> ChkTerm<I> {
fn depth(&self, i: usize) -> usize {
match *self {
Inf(ref e) => e.depth(i),
Lam(ref e) => e.depth(i + 1),
}
}
fn eval(&self, env: &mut Env<I>) -> Rc<Value<I>> {
println!("eval_chk {:?} |- {:?} ~~> ?", env, self);
match *self {
Inf(ref e) => e.eval(env),
Lam(ref e) => {
Rc::new(VLam(Closure {
env: Rc::new(env.capture(e.depth(0))),
body: e.clone(),
}))
}
}
}
fn check_type(&self, i: usize, against: &Value<I>, ctx: &mut Ctx<I>) -> Result<()> {
println!("check_type[{}] {:?} |- {:?} : {:?}", i, ctx, self, against);
match *self {
Inf(ref e) => {
let inferred = e.infer_type(i, ctx)?;
if *against.quote(0) == *inferred.quote(0) {
Ok(())
} else {
Err(format!("Type mismatch: expected {:?}, but inferred {:?}.",
inferred,
against))
}
}
Lam(ref e) => {
match *against {
VPi(ref bound, ref closure) => {
ctx.isolate(|mut ctx| {
ctx.push((Local(i), bound.clone()));
subst_chk(e, 0, &Rc::new(Free(Local(i))))
.check_type(i + 1,
closure.eval(Rc::new(VNeutral(NFree(Local(i)))))
.as_ref(),
&mut ctx)
})
}
_ => Err("Type mismatch!".to_string()),
}
}
}
}
}
fn subst_inf<I: Debug + Clone + PartialEq>(this: &Rc<InfTerm<I>>,
i: usize,
inj: &Rc<InfTerm<I>>)
-> Rc<InfTerm<I>> {
// println!("Infer-term substitution: {:?} for DeBruijn {} in {:?}.",
// inj,
// i,
// this);
match **this {
Ann(ref e, ref t) => Rc::new(Ann(subst_chk(e, i, inj), subst_chk(t, i, inj))),
Star => Rc::new(Star),
Pi(ref a, ref b) => Rc::new(Pi(subst_chk(a, i, inj), subst_chk(b, i + 1, inj))),
Bound(ref j) => if &i == j { inj.clone() } else { this.clone() },
Free(ref x) => Rc::new(Free((*x).clone())),
App(ref x, ref y) => Rc::new(App(subst_inf(x, i, inj), subst_chk(y, i, inj))),
}
}
fn subst_chk<I: Debug + Clone + PartialEq>(this: &Rc<ChkTerm<I>>,
i: usize,
inj: &Rc<InfTerm<I>>)
-> Rc<ChkTerm<I>> {
// println!("Checked-term substitution: {:?} for DeBruijn {} in {:?}.",
// inj,
// i,
// this);
match **this {
Inf(ref e) => Rc::new(Inf(subst_inf(e, i, inj))),
Lam(ref e) => Rc::new(Lam(subst_chk(e, i + 1, inj))),
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::rc::Rc;
use stack::Stack;
type TestInfTerm = super::InfTerm<&'static str>;
type TestChkTerm = super::ChkTerm<&'static str>;
type TestValue = super::Value<&'static str>;
fn mk_id() -> Rc<TestInfTerm> {
let id_lam: TestChkTerm =
Lam(Rc::new(Lam(Rc::new(Inf(Rc::new(Ann(Rc::new(Inf(Rc::new(Bound(0)))),
Rc::new(Inf(Rc::new(Bound(1)))))))))));
let id_ty: TestChkTerm =
Inf(Rc::new(Pi(Rc::new(Inf(Rc::new(Star))),
Rc::new(Inf(Rc::new(Pi(Rc::new(Inf(Rc::new(Bound(0)))),
Rc::new(Inf(Rc::new(Bound(0)))))))))));
let id: TestInfTerm = Ann(Rc::new(id_lam), Rc::new(id_ty));
Rc::new(id)
}
#[test]
fn eval_star() {
let mut env = Stack::new();
let term: TestInfTerm = Star;
let val = term.eval(&mut env);
assert_eq!(*val, VStar);
}
#[test]
fn eval_id() {
let mut env = Stack::new();
let term: Rc<TestInfTerm> = Rc::new(App(Rc::new(App(mk_id(),
Rc::new(Inf(Rc::new(Star))))),
Rc::new(Inf(Rc::new(Star)))));
let val = term.eval(&mut env);
assert_eq!(*val, VStar);
}
#[test]
fn typeck_id() {
let mut env = Stack::new();
let mut ctx = Stack::new();
let id: Rc<TestChkTerm> =
Rc::new(Lam(Rc::new(Lam(Rc::new(Inf(Rc::new(Ann(Rc::new(Inf(Rc::new(Bound(0)))),
Rc::new(Inf(Rc::new(Bound(1))))))))))));
let id_ty: Rc<TestValue> =
Inf(Rc::new(Pi(Rc::new(Inf(Rc::new(Star))),
Rc::new(Inf(Rc::new(Pi(Rc::new(Inf(Rc::new(Bound(0)))),
Rc::new(Inf(Rc::new(Bound(1)))))))))))
.eval(&mut env);
id.check_type(0, &id_ty, &mut ctx).unwrap();
}
}
| true |
717eacb9493e5db8d7eb6ab89243d2af743728c4
|
Rust
|
HarrisonMc555/exercism
|
/rust/minesweeper/src/lib.rs
|
UTF-8
| 2,116 | 3.25 | 3 |
[] |
no_license
|
const BASE: u32 = 10;
const MINE: char = '*';
const BLANK: char = ' ';
type Grid<'a> = &'a [Vec<char>];
type Location = (usize, usize);
pub fn annotate(minefield: &[&str]) -> Vec<String> {
let minefield = to_grid_of_chars(minefield);
let row_indices = 0..minefield.len();
row_indices.map(|i| annotate_row(&minefield, i)).collect()
}
fn annotate_row(minefield: Grid, row_i: usize) -> String {
let col_indices = 0..minefield[row_i].len();
col_indices
.map(|j| annotate_cell(minefield, (row_i, j)))
.collect()
}
fn annotate_cell(minefield: Grid, location: Location) -> char {
let (row, col): (usize, usize) = location;
let c = minefield[row][col];
if is_mine(c) {
c
} else if is_blank(c) {
let num_touching_mines = count_touching_mines(minefield, location);
if num_touching_mines == 0 {
BLANK
} else {
get_digit_char(num_touching_mines)
}
} else {
panic!("Invalid minefield char")
}
}
fn count_touching_mines(minefield: Grid, location: Location) -> u8 {
let (row, col) = location;
let min_row = 0;
let first_row = if row == min_row { min_row } else { row - 1 };
let max_row = minefield.len() - 1;
let last_row = if row == max_row { max_row } else { row + 1 };
let min_col = 0;
let first_col = if col == min_col { min_col } else { col - 1 };
let max_col = minefield[row].len() - 1;
let last_col = if col == max_col { max_col } else { col + 1 };
let touching_locations = (first_row..=last_row)
.map(|i| (first_col..=last_col).map(move |j| (i, j)))
.flatten();
let mine_locations =
touching_locations.filter(|&(i, j)| is_mine(minefield[i][j]));
mine_locations.count() as u8
}
fn to_grid_of_chars(grid: &[&str]) -> Vec<Vec<char>> {
grid.iter()
.map(|&row_str| row_str.chars().collect())
.collect()
}
fn is_mine(c: char) -> bool {
c == MINE
}
fn is_blank(c: char) -> bool {
c == BLANK
}
fn get_digit_char(digit: u8) -> char {
std::char::from_digit(digit as u32, BASE).unwrap()
}
| true |
cbb2d04bea11c350873485dc17e63bac037a054f
|
Rust
|
healeycodes/quill
|
/src/parser.rs
|
UTF-8
| 31,031 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
use crate::error;
use crate::eval;
use crate::lexer;
use crate::log;
use std::fmt;
#[derive(Debug, Clone)]
pub enum Node {
UnaryExprNode {
operator: lexer::Kind,
operand: Box<Node>,
position: lexer::Position,
},
BinaryExprNode {
operator: lexer::Kind,
left_operand: Box<Node>,
right_operand: Box<Node>,
position: lexer::Position,
},
FunctionCallNode {
function: Box<Node>,
arguments: Vec<Node>,
},
MatchClauseNode {
target: Box<Node>,
expression: Box<Node>,
},
MatchExprNode {
condition: Box<Node>,
clauses: Vec<Node>, // MatchClauseNode
position: lexer::Position,
},
ExpressionListNode {
expressions: Vec<Node>,
position: lexer::Position,
},
EmptyIdentifierNode {
position: lexer::Position,
},
IdentifierNode {
val: String,
position: lexer::Position,
},
NumberLiteralNode {
val: f64,
position: lexer::Position,
},
StringLiteralNode {
val: Vec<u8>,
position: lexer::Position,
},
BooleanLiteralNode {
val: bool,
position: lexer::Position,
},
ObjectLiteralNode {
entries: Vec<Node>, // ObjectEntryNode
position: lexer::Position,
},
ObjectEntryNode {
key: Box<Node>,
val: Box<Node>,
position: lexer::Position,
},
ListLiteralNode {
vals: Vec<Node>,
position: lexer::Position,
},
FunctionLiteralNode {
arguments: Vec<Node>,
body: Box<Node>,
position: lexer::Position,
},
}
impl Node {
pub fn pos(&self) -> lexer::Position {
match self {
Node::UnaryExprNode { position, .. } => *position,
Node::MatchClauseNode { target, .. } => target.pos(),
Node::FunctionCallNode { function, .. } => function.pos(),
Node::BinaryExprNode { position, .. } => *position,
Node::MatchExprNode { position, .. } => *position,
Node::ExpressionListNode { position, .. } => *position,
Node::EmptyIdentifierNode { position, .. } => *position,
Node::IdentifierNode { position, .. } => *position,
Node::NumberLiteralNode { position, .. } => *position,
Node::StringLiteralNode { position, .. } => *position,
Node::BooleanLiteralNode { position, .. } => *position,
Node::ObjectLiteralNode { position, .. } => *position,
Node::ObjectEntryNode { position, .. } => *position,
Node::ListLiteralNode { position, .. } => *position,
Node::FunctionLiteralNode { position, .. } => *position,
}
}
}
impl fmt::Display for Node {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Node::UnaryExprNode {
operator, operand, ..
} => {
write!(f, "Unary {} ({})", operator, operand)
}
Node::MatchClauseNode {
target, expression, ..
} => {
write!(f, "Clause ({}) -> ({})", target, expression)
}
Node::FunctionCallNode {
function,
arguments,
..
} => {
write!(
f,
"Call ({}) on ({})",
function,
arguments
.iter()
.map(|arg| arg.to_string())
.collect::<Vec<String>>()
.join(", "),
)
}
Node::BinaryExprNode {
left_operand,
operator,
right_operand,
..
} => {
write!(
f,
"Binary ({}) {} ({})",
left_operand, operator, right_operand
)
}
Node::MatchExprNode {
condition, clauses, ..
} => write!(
f,
"Match on ({}) to {{{}}}",
condition,
clauses
.iter()
.map(|clause| clause.to_string())
.collect::<Vec<String>>()
.join(", ")
),
Node::ExpressionListNode { expressions, .. } => {
write!(
f,
"Expression List ({})",
expressions
.iter()
.map(|expr| expr.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
Node::EmptyIdentifierNode { .. } => write!(f, "Empty Identifier"),
Node::IdentifierNode { val, .. } => write!(f, "Identifier '{}'", val),
Node::NumberLiteralNode { val, .. } => write!(f, "Number {}", eval::n_to_s(*val)),
Node::StringLiteralNode { val, .. } => {
write!(f, "String {}", std::str::from_utf8(val).unwrap())
}
Node::BooleanLiteralNode { val, .. } => write!(f, "Boolean {}", val),
Node::ObjectLiteralNode { entries, .. } => write!(
f,
"Object {}",
entries
.iter()
.map(|entry| entry.to_string())
.collect::<Vec<String>>()
.join(", ")
),
Node::ObjectEntryNode { key, val, .. } => {
write!(f, "Object Entry ({}): ({})", key, val)
}
Node::ListLiteralNode { vals, .. } => write!(
f,
"List [{}]",
vals.iter()
.map(|val| val.to_string())
.collect::<Vec<String>>()
.join(", ")
),
Node::FunctionLiteralNode {
body, arguments, ..
} => {
write!(
f,
"Function ({}) => ({})",
arguments
.iter()
.map(|arg| arg.to_string())
.collect::<Vec<String>>()
.join(", "),
body
)
}
}
}
}
fn guard_unexpected_input_end(tokens: &[lexer::Tok], idx: usize) -> Result<(), error::Err> {
if idx >= tokens.len() {
if !tokens.is_empty() {
return Err(error::Err {
reason: error::ERR_SYNTAX,
message: format!("unexpected end of input at {}", tokens[tokens.len() - 1]),
});
}
return Err(error::Err {
reason: error::ERR_SYNTAX,
message: "unexpected end of input".to_string(),
});
}
Ok(())
}
macro_rules! guard_unexpected_input_end {
($tokens:expr,$idx:expr) => {{
let err = guard_unexpected_input_end($tokens, $idx);
if let Err(err) = err {
return (Err(err), 0);
}
}};
}
// GoInk: Parse transforms a list of Tok (tokens) to Node (AST nodes).
// This implementation uses recursive descent parsing.
pub fn parse(tokens: &[lexer::Tok], fatal_error: bool, debug_parser: bool) -> Vec<Node> {
let mut nodes: Vec<Node> = Vec::new();
let mut idx = 0;
while idx < tokens.len() {
if tokens[idx].kind == lexer::Token::Separator {
// GoInk: this sometimes happens when the repl receives comment inputs
idx += 1;
continue;
}
let (expr, incr) = parse_expression(&tokens[idx..]);
idx += incr;
if let Err(ref e) = expr {
match e.reason {
error::ERR_UNKNOWN => log::log_err_f(
error::ERR_ASSERT,
&[format!("err raised that was not of Err type -> {}", e)],
),
_ => {
if fatal_error {
log::log_err(e.reason, &[e.message.clone()])
} else {
log::log_safe_err(e.reason, &[e.message.clone()])
}
}
}
return vec![];
}
let _expr = expr.unwrap();
if debug_parser {
log::log_debug(&[format!("parse -> {}", &_expr)])
}
nodes.push(_expr)
}
nodes
}
fn get_op_priority(t: &lexer::Tok) -> isize {
// GoInk: higher == greater priority
match t.kind {
lexer::Token::AccessorOp => 100,
lexer::Token::ModulusOp => 80,
lexer::Token::MultiplyOp | lexer::Token::DivideOp => 50,
lexer::Token::AddOp | lexer::Token::SubtractOp => 40,
lexer::Token::GreaterThanOp | lexer::Token::LessThanOp | lexer::Token::EqualOp => 30,
lexer::Token::LogicalAndOp => 20,
lexer::Token::LogicalXorOp => 15,
lexer::Token::LogicalOrOp => 10,
lexer::Token::DefineOp => 0,
_ => -1,
}
}
fn is_binary_op(t: &lexer::Tok) -> bool {
matches!(
t.kind,
lexer::Token::AddOp
| lexer::Token::SubtractOp
| lexer::Token::MultiplyOp
| lexer::Token::DivideOp
| lexer::Token::ModulusOp
| lexer::Token::LogicalAndOp
| lexer::Token::LogicalOrOp
| lexer::Token::LogicalXorOp
| lexer::Token::GreaterThanOp
| lexer::Token::LessThanOp
| lexer::Token::EqualOp
| lexer::Token::DefineOp
| lexer::Token::AccessorOp,
)
}
fn parse_binary_expression(
left_operand: Node,
operator: &lexer::Tok,
tokens: &[lexer::Tok],
previous_priority: isize,
) -> (Result<Node, error::Err>, usize) {
let (right_atom, mut idx) = parse_atom(tokens);
if let Err(right_atom) = right_atom {
return (Err(right_atom), 0);
}
let mut ops: Vec<&lexer::Tok> = Vec::new();
let mut nodes: Vec<Node> = Vec::new();
ops.push(operator);
nodes.push(left_operand);
nodes.push(right_atom.unwrap());
// GoInk: build up a list of binary operations, with tree nodes
// where there are higher-priority binary ops
while tokens.len() > idx && is_binary_op(&tokens[idx]) {
if previous_priority >= get_op_priority(&tokens[idx]) {
// GoInk: Priority is lower than the calling function's last op,
// so return control to the parent binary op
break;
} else if get_op_priority(ops.last().unwrap()) >= get_op_priority(&tokens[idx]) {
// GoInk: Priority is lower than the previous op (but higher than parent),
// so it's ok to be left-heavy in this tree
ops.push(&tokens[idx]);
idx += 1;
guard_unexpected_input_end!(tokens, idx);
let (right_atom, incr) = parse_atom(&tokens[idx..]);
if let Err(right_atom) = right_atom {
return (Err(right_atom), 0);
}
nodes.push(right_atom.unwrap());
idx += incr;
} else {
guard_unexpected_input_end!(tokens, idx + 1);
// GoInk: Priority is higher than previous ops,
// so make it a right-heavy tree
let (subtree, incr) = parse_binary_expression(
nodes.last().unwrap().clone(),
&tokens[idx],
&tokens[idx + 1..],
get_op_priority(ops.last().unwrap()),
);
if let Err(subtree) = subtree {
return (Err(subtree), 0);
}
let _last = nodes.len() - 1;
nodes[_last] = subtree.unwrap();
idx += incr + 1;
}
}
// GoInk: ops, nodes -> left-biased binary expression tree
let mut tree = nodes[0].clone();
nodes.drain(0..1);
while !ops.is_empty() {
tree = Node::BinaryExprNode {
operator: ops[0].kind,
left_operand: Box::new(tree),
right_operand: Box::new(nodes[0].clone()),
position: ops[0].position,
};
ops.drain(0..1);
nodes.drain(0..1);
}
(Ok(tree), idx)
}
fn parse_expression(tokens: &[lexer::Tok]) -> (Result<Node, error::Err>, usize) {
let mut idx = 0;
let consume_dangling_separator = |idx: usize, tokens: &[lexer::Tok]| {
// GoInk: bounds check in case parse_expression called at some point
// consumed end token
if idx < tokens.len() && tokens[idx].kind == lexer::Token::Separator {
idx + 1
} else {
idx
}
};
let (atom, incr) = parse_atom(&tokens[idx..]);
if let Err(atom) = atom {
return (Err(atom), 0);
}
idx += incr;
guard_unexpected_input_end!(tokens, idx);
let next_tok = &tokens[idx];
idx += 1;
match next_tok.kind {
// GoInk: consuming dangling separator
lexer::Token::Separator => (Ok(atom.unwrap()), idx),
// GoInk: these belong to the parent atom that contains this expression,
// so return without consuming token (idx - 1)
lexer::Token::RightParen | lexer::Token::KeyValueSeparator | lexer::Token::CaseArrow => {
(Ok(atom.unwrap()), idx - 1)
}
lexer::Token::AddOp
| lexer::Token::SubtractOp
| lexer::Token::MultiplyOp
| lexer::Token::DivideOp
| lexer::Token::ModulusOp
| lexer::Token::LogicalAndOp
| lexer::Token::LogicalOrOp
| lexer::Token::LogicalXorOp
| lexer::Token::GreaterThanOp
| lexer::Token::LessThanOp
| lexer::Token::EqualOp
| lexer::Token::DefineOp
| lexer::Token::AccessorOp => {
let (bin_expr, incr) =
parse_binary_expression(atom.unwrap(), next_tok, &tokens[idx..], -1);
if let Err(bin_expr) = bin_expr {
return (Err(bin_expr), 0);
}
idx += incr;
// GoInk: Binary expressions are often followed by a match
if idx < tokens.len() && tokens[idx].kind == lexer::Token::MatchColon {
let colon_pos = tokens[idx].position;
idx += 1; // GoInk: MatchColon
let (clauses, incr) = parse_match_body(&tokens[idx..]);
if let Err(clauses) = clauses {
return (Err(clauses), 0);
}
idx += incr;
idx = consume_dangling_separator(idx, tokens);
return (
Ok(Node::MatchExprNode {
condition: Box::new(bin_expr.unwrap()),
clauses: clauses.unwrap(),
position: colon_pos,
}),
idx,
);
}
idx = consume_dangling_separator(idx, tokens);
(Ok(bin_expr.unwrap()), idx)
}
lexer::Token::MatchColon => {
let (clauses, incr) = parse_match_body(&tokens[idx..]);
if let Err(clauses) = clauses {
return (Err(clauses), 0);
}
idx += incr;
idx = consume_dangling_separator(idx, tokens);
(
Ok(Node::MatchExprNode {
condition: Box::new(atom.unwrap()),
clauses: clauses.unwrap(),
position: next_tok.position,
}),
idx,
)
}
_ => {
return (
Err(error::Err {
message: format!("unexpected token {} following an expression", next_tok),
reason: error::ERR_SYNTAX,
}),
0,
)
}
}
}
fn parse_atom(tokens: &[lexer::Tok]) -> (Result<Node, error::Err>, usize) {
guard_unexpected_input_end!(tokens, 0);
let tok = &tokens[0];
let mut idx = 1;
if tok.kind == lexer::Token::NegationOp {
let (atom, idx) = parse_atom(&tokens[idx..]);
if let Err(atom) = atom {
return (Err(atom), 0);
}
return (
Ok(Node::UnaryExprNode {
operator: tok.kind,
operand: Box::new(atom.unwrap()),
position: tok.position,
}),
idx + 1,
);
}
guard_unexpected_input_end!(tokens, idx);
let mut atom: Node;
match tok.kind {
lexer::Token::NumberLiteral => {
return (
Ok(Node::NumberLiteralNode {
val: tok.num,
position: tok.position,
}),
idx,
);
}
lexer::Token::StringLiteral => {
return (
Ok(Node::StringLiteralNode {
val: tok.str.clone().into_bytes(),
position: tok.position,
}),
idx,
);
}
lexer::Token::TrueLiteral => {
return (
Ok(Node::BooleanLiteralNode {
val: true,
position: tok.position,
}),
idx,
);
}
lexer::Token::FalseLiteral => {
return (
Ok(Node::BooleanLiteralNode {
val: false,
position: tok.position,
}),
idx,
);
}
lexer::Token::Identifier => {
if tokens[idx].kind == lexer::Token::FunctionArrow {
let (_atom, _idx) = parse_function_literal(tokens);
match _atom {
Err(_atom) => return (Err(_atom), 0),
_ => {
atom = _atom.unwrap();
idx = _idx
}
}
// GoInk: parse_atom should not consume trailing Separators, but
// parse_function_literal does because it ends with expressions.
// so we backtrack one token.
idx -= 1
} else {
atom = Node::IdentifierNode {
val: tok.str.clone(),
position: tok.position,
}
}
// GoInk: may be called as a function, so flows beyond
// switch block
}
lexer::Token::EmptyIdentifier => {
if tokens[idx].kind == lexer::Token::FunctionArrow {
let parsed_function_literal = parse_function_literal(tokens);
match parsed_function_literal {
(Err(atom), _) => return (Err(atom), 0),
(Ok(_atom), _idx) => {
// parse_atom should not consume trailing Separators, but
// parse_function_literal does because it ends with expressions.
// so we backtrack one token.
return (Ok(_atom), _idx - 1);
}
}
}
return (
Ok(Node::EmptyIdentifierNode {
position: tok.position,
}),
idx,
);
}
// GoInk: may be called as a function, so flows beyond
// switch block
lexer::Token::LeftParen => {
// GoInk: grouped expression or function literal
let mut exprs: Vec<Node> = Vec::new();
while tokens[idx].kind != lexer::Token::RightParen {
let (expr, incr) = parse_expression(&tokens[idx..]);
if let Err(expr) = expr {
return (Err(expr), 0);
}
idx += incr;
exprs.push(expr.unwrap());
guard_unexpected_input_end!(tokens, idx);
}
idx += 1; // GoInk: RightParen
guard_unexpected_input_end!(tokens, idx);
if tokens[idx].kind == lexer::Token::FunctionArrow {
let (_atom, _idx) = parse_function_literal(tokens);
match _atom {
Err(_atom) => return (Err(_atom), 0),
_ => {
atom = _atom.unwrap();
idx = _idx
}
}
// GoInk: parse_atom should not consume trailing Separators, but
// parse_function_literal does because it ends with expressions.
// so we backtrack one token.
idx -= 1;
} else {
atom = Node::ExpressionListNode {
expressions: exprs,
position: tok.position,
}
}
// GoInk: may be called as a function, so flows beyond
// switch block
}
lexer::Token::LeftBrace => {
let mut entries: Vec<Node> = Vec::new();
while tokens[idx].kind != lexer::Token::RightBrace {
let (key_expr, key_incr) = parse_expression(&tokens[idx..]);
if let Err(key_expr) = key_expr {
return (Err(key_expr), 0);
}
idx += key_incr;
guard_unexpected_input_end!(tokens, idx);
if tokens[idx].kind == lexer::Token::KeyValueSeparator {
idx += 1;
} else {
return (
Err(error::Err {
reason: error::ERR_SYNTAX,
message: format!(
"expected {} after composite key, found {}",
lexer::Token::KeyValueSeparator,
tokens[idx]
),
}),
0,
);
}
guard_unexpected_input_end!(tokens, idx);
let (val_expr, val_incr) = parse_expression(&tokens[idx..]);
match val_expr {
Err(val_expr) => return (Err(val_expr), 0),
Ok(val_expr) => {
// GoInk : Separator consumed by parse_expression
idx += val_incr;
let key_expr = key_expr.unwrap();
let position = key_expr.pos();
entries.push(Node::ObjectEntryNode {
key: Box::new(key_expr),
val: Box::new(val_expr),
position,
});
}
}
guard_unexpected_input_end!(tokens, idx);
}
idx += 1; // GoInk: RightBrace
return (
Ok(Node::ObjectLiteralNode {
entries,
position: tok.position,
}),
idx,
);
}
lexer::Token::LeftBracket => {
let mut vals: Vec<Node> = Vec::new();
while tokens[idx].kind != lexer::Token::RightBracket {
let (expr, incr) = parse_expression(&tokens[idx..]);
match expr {
Err(expr) => return (Err(expr), 0),
Ok(expr) => {
idx += incr;
vals.push(expr);
}
}
guard_unexpected_input_end!(tokens, idx);
}
idx += 1; // GoInk: RightBracket
return (
Ok(Node::ListLiteralNode {
vals,
position: tok.position,
}),
idx,
);
}
_ => {
return (
Err(error::Err {
reason: error::ERR_SYNTAX,
message: format!("unexpected start of atom, found {}", tok),
}),
0,
);
}
};
// GoInk: bounds check here because parse_expression may have
// consumed all tokens before this
while idx < tokens.len() && tokens[idx].kind == lexer::Token::LeftParen {
let incr: usize;
match parse_function_call(atom, &tokens[idx..]) {
(Err(_atom), _) => return (Err(_atom), 0),
(Ok(_atom), _incr) => {
atom = _atom;
incr = _incr;
}
}
idx += incr;
guard_unexpected_input_end!(tokens, idx);
}
(Ok(atom), idx)
}
// GoInk: parses everything that follows MatchColon
// does not consume dangling separator -- that's for parse_expression
fn parse_match_body(tokens: &[lexer::Tok]) -> (Result<Vec<Node>, error::Err>, usize) {
let mut idx = 1; // GoInk: LeftBrace
let mut clauses: Vec<Node> = Vec::new();
guard_unexpected_input_end!(tokens, idx);
while tokens[idx].kind != lexer::Token::RightBrace {
let (clause_node, incr) = parse_match_clause(&tokens[idx..]);
if let Err(clause_node) = clause_node {
return (Err(clause_node), 0);
}
idx += incr;
clauses.push(clause_node.unwrap());
guard_unexpected_input_end!(tokens, idx);
}
idx += 1; // GoInk: RightBrace
(Ok(clauses), idx)
}
fn parse_match_clause(tokens: &[lexer::Tok]) -> (Result<Node, error::Err>, usize) {
let (atom, mut idx) = parse_expression(tokens);
if let Err(atom) = atom {
return (Err(atom), 0);
}
guard_unexpected_input_end!(tokens, idx);
if tokens[idx].kind != lexer::Token::CaseArrow {
return (
Err(error::Err {
reason: error::ERR_SYNTAX,
message: format!(
"expected {}, but got {}",
lexer::Token::CaseArrow,
tokens[idx]
),
}),
0,
);
}
idx += 1; // CaseArrow
guard_unexpected_input_end!(tokens, idx);
let (expr, incr) = parse_expression(&tokens[idx..]);
if let Err(expr) = expr {
return (Err(expr), 0);
}
idx += incr;
(
Ok(Node::MatchClauseNode {
target: Box::new(atom.unwrap()),
expression: Box::new(expr.unwrap()),
}),
idx,
)
}
fn parse_function_literal(tokens: &[lexer::Tok]) -> (Result<Node, error::Err>, usize) {
let tok = &tokens[0];
let mut idx = 1;
let mut arguments: Vec<Node> = Vec::new();
guard_unexpected_input_end!(tokens, idx);
match tok.kind {
lexer::Token::LeftParen => {
loop {
let tk = &tokens[idx];
match tk.kind {
lexer::Token::Identifier => {
let id_node = Node::IdentifierNode {
val: tk.str.clone(),
position: tk.position,
};
arguments.push(id_node)
}
lexer::Token::EmptyIdentifier => {
let id_node = Node::EmptyIdentifierNode {
position: tk.position,
};
arguments.push(id_node)
}
_ => break,
}
idx += 1;
guard_unexpected_input_end!(tokens, idx);
if tokens[idx].kind != lexer::Token::Separator {
return (
Err(error::Err {
reason: error::ERR_SYNTAX,
message: format!(
"expected arguments in a list separated by {}, found {}",
lexer::Token::Separator,
tokens[idx]
),
}),
0,
);
}
idx += 1; // GoInk: Separator
}
guard_unexpected_input_end!(tokens, idx);
if tokens[idx].kind != lexer::Token::RightParen {
return (
Err(error::Err {
reason: error::ERR_SYNTAX,
message: format!(
"expected arguments list to terminate with {}, found {}",
lexer::Token::RightParen,
tokens[idx]
),
}),
0,
);
}
idx += 1 // GoInk: RightParen
}
lexer::Token::Identifier => {
let id_node = Node::IdentifierNode {
val: tok.str.clone(),
position: tok.position,
};
arguments.push(id_node)
}
lexer::Token::EmptyIdentifier => {
let id_node = Node::EmptyIdentifierNode {
position: tok.position,
};
arguments.push(id_node)
}
_ => {
return (
Err(error::Err {
reason: error::ERR_SYNTAX,
message: format!("malformed arguments list in function at {}", tok),
}),
0,
)
}
}
guard_unexpected_input_end!(tokens, idx);
if tokens[idx].kind != lexer::Token::FunctionArrow {
return (
Err(error::Err {
reason: error::ERR_SYNTAX,
message: format!(
"expected {} but found {}",
lexer::Token::FunctionArrow,
tokens[idx]
),
}),
0,
);
}
idx += 1; // GoInk: FunctionArrow
let (body, incr) = parse_expression(&tokens[idx..]);
if let Err(body) = body {
return (Err(body), 0);
}
idx += incr;
(
Ok(Node::FunctionLiteralNode {
arguments,
body: Box::new(body.unwrap()),
position: tokens[0].position,
}),
idx,
)
}
fn parse_function_call(function: Node, tokens: &[lexer::Tok]) -> (Result<Node, error::Err>, usize) {
let mut idx = 1;
let mut arguments: Vec<Node> = Vec::new();
guard_unexpected_input_end!(tokens, idx);
while tokens[idx].kind != lexer::Token::RightParen {
let (expr, incr) = parse_expression(&tokens[idx..]);
if let Err(expr) = expr {
return (Err(expr), 0);
}
idx += incr;
arguments.push(expr.unwrap());
guard_unexpected_input_end!(tokens, idx);
}
idx += 1; // GoInk: RightParen
(
Ok(Node::FunctionCallNode {
function: Box::new(function),
arguments,
}),
idx,
)
}
| true |
cf7ffbfd9ccebf95c09726c52049b13695c20825
|
Rust
|
etrexel/advent_of_code_2018
|
/src/day_three.rs
|
UTF-8
| 2,700 | 3.4375 | 3 |
[
"MIT"
] |
permissive
|
use std::error::Error;
use std::fs::File;
use std::path::Path;
use std::io::prelude::*;
const QUILT_SIZE: usize = 1000;
struct Claim {
id: i32,
x_offset: i32,
y_offset: i32,
x_size: i32,
y_size: i32
}
fn build_claim(input: &str) -> Claim {
let substrings: Vec<&str> = input.split(' ').collect();
let id_str: &str = substrings[0];
let id: i32 = id_str[1..].parse::<i32>().unwrap();
let offset_str: Vec<&str> = substrings[2].split(',').collect();
let x_offset: i32 = offset_str[0].parse::<i32>().unwrap();
let y_offset: i32 = offset_str[1][..offset_str[1].len()-1].parse::<i32>().unwrap();
let size_str: Vec<&str> = substrings[3].split('x').collect();
let x_size: i32 = size_str[0].parse::<i32>().unwrap();
let y_size: i32 = size_str[1].parse::<i32>().unwrap();
let output: Claim = Claim { id, x_offset, y_offset, x_size, y_size };
output
}
fn get_input(path: &Path) -> Vec<Claim> {
let mut claims: Vec<Claim> = Vec::new();
let display = path.display();
let mut file: File = match File::open(&path) {
Err(why) => panic!("Couldn't open {}: {}", display, why.description()),
Ok(file) => file
};
let mut raw_input: String = String::new();
match file.read_to_string(&mut raw_input) {
Err(why) => panic!("Couldn't read {}: {}", display, why.description()),
Ok(raw_input) => raw_input
};
let split_input: Vec<&str> = raw_input.split('\n').collect();
for claim in split_input {
claims.push(build_claim(claim));
}
claims
}
fn plot_claim(quilt: &mut Vec<Vec<i32>>, claim: &Claim) {
let x_start: usize = claim.x_offset as usize;
let x_end: usize = (claim.x_offset + claim.x_size) as usize;
let y_start: usize = claim.y_offset as usize;
let y_end: usize = (claim.y_offset + claim.y_size) as usize;
for i in x_start..x_end {
for j in y_start..y_end {
quilt[i][j] += 1;
}
}
}
fn count_overlaps(quilt: Vec<Vec<i32>>) -> i32 {
let mut overlaps: i32 = 0;
for row in quilt {
for cell in row {
if cell >= 2 {
overlaps += 1;
}
}
}
overlaps
}
pub fn part_one(path: &Path) {
let input: Vec<Claim> = get_input(path);
let mut quilt: Vec<Vec<i32>> = Vec::with_capacity(QUILT_SIZE);
for _ in 0..QUILT_SIZE {
let mut row: Vec<i32> = Vec::with_capacity(QUILT_SIZE);
for _i_ in 0..QUILT_SIZE {
row.push(0);
}
quilt.push(row);
}
for claim in input {
plot_claim(&mut quilt, &claim);
}
let overlaps = count_overlaps(quilt);
println!("Overlaps: {}", overlaps);
}
| true |
e8abb8baddfc8a0dd135c62734f8504d475fc9e4
|
Rust
|
falconre/falcon
|
/lib/il/function.rs
|
UTF-8
| 4,323 | 3.625 | 4 |
[
"Apache-2.0"
] |
permissive
|
//! A `Function` holds a `ControlFlowGraph`.
//!
//! We can think of a `Function` as providing _location_ to a `ControlFlowGraph`.
use crate::il::*;
use crate::Error;
use serde::{Deserialize, Serialize};
/// A function for Falcon IL. Provides location and context in a `Program` to a
/// `ControlFlowGraph`.
#[derive(Clone, Debug, Deserialize, Hash, Eq, PartialEq, Serialize)]
pub struct Function {
// The address where this function was found
address: u64,
// The `ControlFlowGraph` capturing semantics of the function
control_flow_graph: ControlFlowGraph,
// The name of the function
name: Option<String>,
// Functions which belong to Programs have indices
index: Option<usize>,
}
impl Function {
/// Create a new `Function`
///
/// # Parameters
/// * `address` - The address where we recovered this function.
/// * `control_flow_graph` - A `ControlFlowGraph` capturing the semantics of this function.
pub fn new(address: u64, control_flow_graph: ControlFlowGraph) -> Function {
Function {
address,
control_flow_graph,
name: None,
index: None,
}
}
/// Create a Vec of every RefFunctionLocation for this function.
///
/// Convenient for analyses where we need to check every location in a
/// function
pub fn locations(&self) -> Vec<RefFunctionLocation> {
let mut locations = Vec::new();
for block in self.blocks() {
let instructions = block.instructions();
if instructions.is_empty() {
locations.push(RefFunctionLocation::EmptyBlock(block));
} else {
for instruction in instructions {
locations.push(RefFunctionLocation::Instruction(block, instruction));
}
}
}
for edge in self.edges() {
locations.push(RefFunctionLocation::Edge(edge))
}
locations
}
/// Get the address of this `Function`.
///
/// The address returned will be the address set when this `Function` was created,
/// which should be the virtual address where this `Function` was found.
pub fn address(&self) -> u64 {
self.address
}
/// Return a `Block` from this `Function`'s `ControlFlowGraph` by index.
pub fn block(&self, index: usize) -> Result<&Block, Error> {
self.control_flow_graph.block(index)
}
/// Return a mutable reference to a `Block` in this `Function`
pub fn block_mut(&mut self, index: usize) -> Result<&mut Block, Error> {
self.control_flow_graph.block_mut(index)
}
/// Return a Vec of all `Block` in this `Function`
pub fn blocks(&self) -> Vec<&Block> {
self.control_flow_graph.blocks()
}
/// Return a Vec of mutable references to all `Block` in this `Function`
pub fn blocks_mut(&mut self) -> Vec<&mut Block> {
self.control_flow_graph.blocks_mut()
}
/// Return an `Edge` from this `Function`'s `ControlFlowGraph` by index.
pub fn edge(&self, head: usize, tail: usize) -> Result<&Edge, Error> {
self.control_flow_graph.edge(head, tail)
}
/// Return a vec of all `Edge` in this `Function`
pub fn edges(&self) -> Vec<&Edge> {
self.control_flow_graph.edges()
}
/// Return the `ControlFlowGraph` for this `Function`.
pub fn control_flow_graph(&self) -> &ControlFlowGraph {
&self.control_flow_graph
}
/// Return a mutable reference to the `ControlFlowGraph` for this `Function`.
pub fn control_flow_graph_mut(&mut self) -> &mut ControlFlowGraph {
&mut self.control_flow_graph
}
/// Return the name of this `Function`.
pub fn name(&self) -> String {
match self.name {
Some(ref name) => name.to_string(),
None => format!("unknown@{:08X}", self.address),
}
}
/// Set this `Function`'s name.
pub fn set_name(&mut self, name: Option<String>) {
self.name = name;
}
/// Return the index of this `Function`. A `Function` will have an index if
/// it is added to a `Program`.
pub fn index(&self) -> Option<usize> {
self.index
}
pub fn set_index(&mut self, index: Option<usize>) {
self.index = index;
}
}
| true |
d01d3b649de66a940d30cab5522ff670930f39e8
|
Rust
|
SeraphyBR/travelling_salesman
|
/src/algorithms/branch_bound.rs
|
UTF-8
| 3,091 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
#![allow(dead_code)]
use crate::graph;
use crate::point::Point;
use super::algorithm::{Algorithm, TSPResult, Instant};
type Route = Vec<usize>;
type Graph = graph::Graph<f32>;
pub struct BranchBound {
graph: Graph,
lower_bound: f32,
min_dist: f32,
min_path: Route
}
impl BranchBound {
fn permutation(&mut self, arr: &mut [usize], i: usize) {
let n = arr.len();
if i == n {
let weight = self.weight_of_path(arr);
if weight <= self.min_dist {
self.min_path = Route::from(arr);
self.min_dist = weight;
}
return;
}
for j in i..n {
arr.swap(i, j);
self.permutation(arr, i + 1);
arr.swap(i, j);
}
}
fn weight_of_path(&self, path: &[usize]) -> f32 {
let mut weight = 0.0;
for i in 1..path.len() {
weight += self.graph.get_connection(path[i], path[i - 1]);
if weight > self.lower_bound && weight > self.min_dist {
return f32::MAX;
}
}
weight += self.graph.get_connection(path[path.len() - 1], 0);
weight
}
fn calculate_lower_bound(&mut self) {
let input_size = self.graph.get_vertex_count();
let mut sum = 0.0;
if input_size > 1 {
for i in 0..input_size {
let tmp = self.graph.get_connection(i, i);
self.graph.set_connection(i, i, f32::MAX);
let mut min1 = self.graph.get_connection(i, 0);
let mut min2 = self.graph.get_connection(i, 1);
if min2 < min1 {
min1 = self.graph.get_connection(i, 1);
min2 = self.graph.get_connection(i, 0);
}
for j in 2..input_size {
let aux = self.graph.get_connection(j, j);
if tmp < min1 {
min2 = min1;
min1 = aux;
}
else if tmp < min2 {
min2 = aux;
}
}
self.graph.set_connection(i, i, tmp);
sum += min1 + min2;
}
}
self.lower_bound = sum;
}
}
impl Algorithm for BranchBound {
fn with_input(input: Vec<Point<f32>>) -> Self {
let size = input.len();
let mut graph = Graph::new(size);
for p in input {
graph.add_point(p);
}
Self {
graph,
min_dist: f32::MAX,
lower_bound: 0.0,
min_path: Vec::with_capacity(size),
}
}
fn run(&mut self) -> TSPResult {
let input_size = self.graph.size();
let mut graph_path: Route = (0..input_size).collect();
self.calculate_lower_bound();
let now = Instant::now();
self.permutation(graph_path.as_mut_slice(), 1);
TSPResult::with_values("Branch Bound".into(), input_size, self.min_dist, self.min_path.as_slice(), now.elapsed())
}
}
| true |
802addc2dc05dda1929f86c551a0339e5045cfd5
|
Rust
|
CPPUer/mirror-clone
|
/src/rewrite_pipe.rs
|
UTF-8
| 4,525 | 3.078125 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! RewritePipe rewrites content of `ByteStream`.
//!
//! A `RewritePipe` is a wrapper on `ByteStream`, which may be provided by
//! a `ByteStreamPipe` or a `SourceStorage` directly.
//! It rewrites the content of the input by applying user-defined functions,
//! and yields the modified `ByteStream`.
//!
//! The rewriting process relies on `ByteStream` which only supports
//! `LocalFile` currently.
//! So a new file will be created when rewriting and deleted when dropped.
use async_trait::async_trait;
use slog::warn;
use crate::common::{Mission, SnapshotConfig};
use crate::error::{Error, Result};
use crate::stream_pipe::{ByteObject, ByteStream};
use crate::traits::{SnapshotStorage, SourceStorage};
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
pub struct RewritePipe<Source, RewriteItem, F>
where
F: Fn(RewriteItem) -> Result<RewriteItem> + Send + Sync,
{
pub source: Source,
pub buffer_path: String,
pub rewrite_fn: F,
pub max_length: u64,
_phantom: std::marker::PhantomData<RewriteItem>,
}
impl<Source, RewriteItem, F> RewritePipe<Source, RewriteItem, F>
where
F: Fn(RewriteItem) -> Result<RewriteItem> + Send + Sync,
{
pub fn new(source: Source, buffer_path: String, rewrite_fn: F, max_length: u64) -> Self {
Self {
source,
buffer_path,
rewrite_fn,
max_length,
_phantom: Default::default(),
}
}
}
#[async_trait]
impl<Snapshot, Source, RewriteItem, F> SnapshotStorage<Snapshot>
for RewritePipe<Source, RewriteItem, F>
where
Snapshot: Send + 'static,
Source: SnapshotStorage<Snapshot> + Send,
RewriteItem: Send + Sync + 'static,
F: Fn(RewriteItem) -> Result<RewriteItem> + Send + Sync + 'static,
{
async fn snapshot(
&mut self,
mission: Mission,
config: &SnapshotConfig,
) -> Result<Vec<Snapshot>> {
self.source.snapshot(mission, config).await
}
fn info(&self) -> String {
format!("rewrite <{}>", self.source.info())
}
}
// TODO support rewrite functions with `RewriteItem` other than String (eg. Vec<u8>)
#[async_trait]
impl<Snapshot, Source, F> SourceStorage<Snapshot, ByteStream> for RewritePipe<Source, String, F>
where
Snapshot: Send + Sync + 'static,
Source: SourceStorage<Snapshot, ByteStream>,
F: Fn(String) -> Result<String> + Send + Sync + 'static,
{
async fn get_object(&self, snapshot: &Snapshot, mission: &Mission) -> Result<ByteStream> {
let logger = &mission.logger;
let mut byte_stream = self.source.get_object(snapshot, mission).await?;
if byte_stream.length > self.max_length {
Ok(byte_stream)
} else {
match byte_stream.object {
ByteObject::LocalFile {
ref mut file,
path: _,
} => {
if let Some(ref mut file) = file {
let mut buffer = String::new();
if file.read_to_string(&mut buffer).await.is_err() {
warn!(logger, "rewrite_pipe: not a valid UTF-8 file, ignored");
Ok(byte_stream)
} else {
match (self.rewrite_fn)(buffer) {
Err(e) => {
warn!(logger, "rewrite_pipe: {:?}, ignored", e);
Ok(byte_stream)
}
Ok(content) => {
let content = content.into_bytes();
let content_length = content.len() as u64;
file.seek(std::io::SeekFrom::Start(0)).await?;
file.set_len(0).await?;
file.write_all(&content).await?;
file.flush().await?;
file.seek(std::io::SeekFrom::Start(0)).await?;
byte_stream.length = content_length;
Ok(byte_stream)
}
}
}
} else {
Err(Error::ProcessError(String::from(
"missing file when rewriting",
)))
}
}
}
}
}
}
| true |
3ed6e6968ef41941c632277de0f4a40becd7ae8b
|
Rust
|
hantmac/kdash
|
/src/app/svcs.rs
|
UTF-8
| 3,520 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use super::{
models::ResourceToYaml,
utils::{self, UNKNOWN},
};
use k8s_openapi::{
api::core::v1::{Service, ServicePort},
chrono::Utc,
};
#[derive(Clone)]
pub struct KubeSvc {
pub namespace: String,
pub name: String,
pub type_: String,
pub cluster_ip: String,
pub external_ip: String,
pub ports: String,
pub age: String,
k8s_obj: Service,
}
impl KubeSvc {
pub fn from_api(service: &Service) -> Self {
let (type_, cluster_ip, external_ip, ports) = match &service.spec {
Some(spec) => {
let type_ = match &spec.type_ {
Some(type_) => type_.clone(),
_ => UNKNOWN.into(),
};
let external_ips = match type_.as_str() {
"ClusterIP" | "NodePort" => spec.external_ips.clone(),
"LoadBalancer" => get_lb_ext_ips(service, spec.external_ips.clone()),
"ExternalName" => Some(vec![spec.external_name.clone().unwrap_or_default()]),
_ => None,
}
.unwrap_or_else(|| {
if type_ == "LoadBalancer" {
vec!["<pending>".into()]
} else {
vec![String::default()]
}
});
(
type_,
spec.cluster_ip.as_ref().unwrap_or(&"None".into()).clone(),
external_ips.join(","),
get_ports(&spec.ports).unwrap_or_default(),
)
}
_ => (
UNKNOWN.into(),
String::default(),
String::default(),
String::default(),
),
};
KubeSvc {
name: service.metadata.name.clone().unwrap_or_default(),
type_,
namespace: service.metadata.namespace.clone().unwrap_or_default(),
cluster_ip,
external_ip,
ports,
age: utils::to_age(service.metadata.creation_timestamp.as_ref(), Utc::now()),
k8s_obj: service.to_owned(),
}
}
}
impl ResourceToYaml<Service> for KubeSvc {
fn get_k8s_obj(&self) -> &Service {
&self.k8s_obj
}
}
fn get_ports(s_ports: &Option<Vec<ServicePort>>) -> Option<String> {
s_ports.as_ref().map(|ports| {
ports
.iter()
.map(|s_port| {
let mut port = String::new();
if let Some(name) = s_port.name.clone() {
port = format!("{}:", name);
}
port = format!("{}{}►{}", port, s_port.port, s_port.node_port.unwrap_or(0));
if let Some(protocol) = s_port.protocol.clone() {
if protocol != "TCP" {
port = format!("{}/{}", port, s_port.protocol.clone().unwrap());
}
}
port
})
.collect::<Vec<_>>()
.join(" ")
})
}
fn get_lb_ext_ips(service: &Service, external_ips: Option<Vec<String>>) -> Option<Vec<String>> {
let mut lb_ips = match &service.status {
Some(ss) => match &ss.load_balancer {
Some(lb) => {
let ing = &lb.ingress;
ing
.clone()
.unwrap_or_default()
.iter()
.map(|lb_ing| {
if lb_ing.ip.is_some() {
lb_ing.ip.clone().unwrap_or_default()
} else if lb_ing.hostname.is_some() {
lb_ing.hostname.clone().unwrap_or_default()
} else {
String::default()
}
})
.collect::<Vec<String>>()
}
None => vec![],
},
None => vec![],
};
if external_ips.is_some() && !lb_ips.is_empty() {
lb_ips.extend(external_ips.unwrap_or_default());
Some(lb_ips)
} else if !lb_ips.is_empty() {
Some(lb_ips)
} else {
None
}
}
#[cfg(test)]
mod tests {
// TODO
}
| true |
ec12be87c68f10a9a41d66a427ab0246b8c4d494
|
Rust
|
PrismaPhonic/CodeSignal-Solutions
|
/interview/heaps-stacks-queues/01-kthLargestElement/rs-solution/kth-largest-element/src/lib.rs
|
UTF-8
| 2,505 | 3.453125 | 3 |
[] |
no_license
|
#![feature(test)]
extern crate test;
use std::collections::BinaryHeap;
pub fn kth_largest_element(nums: Vec<i32>, k: i32) -> i32 {
let mut heap = BinaryHeap::from(nums);
for _ in 0..k-1 { heap.pop(); }
heap.pop().unwrap()
}
pub fn kth_largest_element_sort(nums: &mut Vec<i32>, k: i32) -> i32 {
nums[..].sort_unstable();
nums[nums.len()-k as usize]
}
#[cfg(test)]
mod tests {
use super::*;
use test::Bencher;
#[test]
fn test_1() {
assert_eq!(kth_largest_element(vec![7, 6, 5, 4, 3, 2, 1], 2), 6);
}
#[test]
fn test_1_sort() {
let mut vec = vec![7, 6, 5, 4, 3, 2, 1];
assert_eq!(kth_largest_element_sort(&mut vec, 2), 6);
}
#[bench]
fn bench_1(b: &mut Bencher) {
b.iter(|| kth_largest_element(vec![7, 6, 5, 4, 3, 2, 1], 2))
}
#[bench]
fn bench_1_sort(b: &mut Bencher) {
let mut vec = vec![7, 6, 5, 4, 3, 2, 1];
b.iter(|| kth_largest_element_sort(&mut vec, 2))
}
#[bench]
fn bench_huge(b: &mut Bencher) {
b.iter(|| kth_largest_element(vec![7, 2, 6, 5, 4, 8, 6, 7, 9, 4, 5, 6, 2, 3, 4,5, 7, 17, 29, 99, 65, 44, 33, 22, 1, 3, 4, 5, 6, 66, 77, 88, 99, 200, 500, 22, 11, 10, 9, 8, 3, 5, 6, 2, 8, 9], 10))
}
#[bench]
fn bench_huge_sort(b: &mut Bencher) {
let mut vec = vec![7, 2, 6, 5, 4, 8, 6, 7, 9, 4, 5, 6, 2, 3, 4,5, 7, 17, 29, 99, 65, 44, 33, 22, 1, 3, 4, 5, 6, 66, 77, 88, 99, 200, 500, 22, 11, 10, 9, 8, 3, 5, 6, 2, 8, 9];
b.iter(|| kth_largest_element_sort(&mut vec, 10))
}
#[test]
fn test_2() {
assert_eq!(kth_largest_element(vec![99, 99], 1), 99);
}
#[test]
fn test_3() {
assert_eq!(kth_largest_element(vec![1], 1), 1);
}
#[test]
fn test_4() {
assert_eq!(kth_largest_element(vec![2, 1], 1), 2);
}
#[test]
fn test_5() {
assert_eq!(kth_largest_element(vec![-1, 2, 0], 2), 0);
}
#[test]
fn test_6() {
assert_eq!(kth_largest_element(vec![-1, 2, 0], 3), -1);
}
#[test]
fn test_7() {
assert_eq!(kth_largest_element(vec![3, 1, 2, 4], 2), 3);
}
#[test]
fn test_8() {
assert_eq!(kth_largest_element(vec![3, 2, 1, 5, 6, 4], 2), 5);
}
#[test]
fn test_9() {
assert_eq!(kth_largest_element(vec![5, 2, 4, 1, 3, 6, 0], 2), 5);
}
#[test]
fn test_10() {
assert_eq!(kth_largest_element(vec![3, 3, 3, 3, 3, 3, 3, 3, 3], 8), 3);
}
}
| true |
216fc0e678f90a7e4456da7e55303a4f7055757a
|
Rust
|
caosbad/das-contracts
|
/tools/calculator/src/main.rs
|
UTF-8
| 3,908 | 3.140625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use clap::Clap;
mod util;
#[derive(Clap, Debug)]
#[clap(version = "1.0", author = "Link Xie. <[email protected]>")]
struct Options {
#[clap(subcommand)]
action: SubCommand,
}
#[derive(Clap, Debug)]
enum SubCommand {
New(NewAction),
Renew(RenewAction),
}
#[derive(Clap, Debug)]
struct NewAction {
#[clap(
long = "capacity",
about = "The capacity of PreAccountCell. Required if --profit is not provided."
)]
capacity: Option<u64>,
#[clap(
long = "account-name-storage",
about = "The length of account, must count its suffix. Required if --profit is not provided."
)]
account_name_storage: Option<u64>,
#[clap(long = "profit", about = "The profit of proposal confirmation.")]
profit: Option<u64>,
#[clap(
long = "price",
required = true,
about = "The register fee of account for one year."
)]
price: u64,
#[clap(
long = "quote",
required = true,
about = "The quote of CKB to USD, AKA USD/CKB."
)]
quote: u64,
#[clap(
long = "discount",
default_value = "0",
about = "The discount of register fee."
)]
discount: u32,
#[clap(long = "current", about = "The current timestamp, can be omitted.")]
current: Option<u64>,
}
#[derive(Clap, Debug)]
struct RenewAction {
#[clap(
long = "profit",
required = true,
about = "The total profit which DAS get."
)]
profit: u64,
#[clap(
long = "price",
required = true,
about = "The renew fee of account for one year."
)]
price: u64,
#[clap(
long = "quote",
required = true,
about = "The quote of CKB to USD, AKA USD/CKB."
)]
quote: u64,
}
fn main() {
// Parse options
let options: Options = Options::parse();
// println!("{:?}", options);
match options.action {
SubCommand::New(options) => {
let profit;
if options.profit.is_none() {
if options.account_name_storage.is_none() {
panic!(
"Params --account-name-storage is required when --profit is not provided."
);
}
if options.capacity.is_none() {
panic!("Params --capacity is required when --profit is not provided.");
}
let storage_capacity =
util::calc_account_storage_capacity(options.account_name_storage.unwrap());
println!(
"storage_capacity({}) = ACCOUNT_CELL_BASIC_CAPACITY({}) + (account_name_storage({}) * 100_000_000)",
storage_capacity,
util::ACCOUNT_CELL_BASIC_CAPACITY,
options.account_name_storage.unwrap()
);
profit = options.capacity.unwrap() - storage_capacity;
println!(
"total_profit({}) = capacity({}) - storage_capacity({})",
profit,
options.capacity.unwrap(),
storage_capacity
);
} else {
profit = options.profit.unwrap();
}
let duration = util::calc_duration_from_paid(
profit,
options.price,
options.quote,
options.discount,
);
if let Some(current) = options.current {
let expired_at = current + duration;
println!(
"expired_at({}) = current({}) - duration({})",
expired_at, current, duration
);
}
}
SubCommand::Renew(options) => {
let duration =
util::calc_duration_from_paid(options.profit, options.price, options.quote, 0);
}
}
}
| true |
57a2b341b4b0f5c65606fe5c4992d35b87ed43a2
|
Rust
|
jasilven/aoc-2016-rust
|
/src/bin/day4.rs
|
UTF-8
| 2,388 | 3.3125 | 3 |
[] |
no_license
|
use regex::Regex;
use std::collections::HashMap;
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
fn parse_input(fname: &str) -> Vec<(String, String, String)> {
let mut result = vec![];
let file = File::open(fname).expect("file open error");
let re = Regex::new(r"^(\D+)(\d+)\[([a-z]{5})\]").expect("invalid regex");
for line in BufReader::new(file).lines() {
let s = line.expect("file read error");
let caps = re.captures(&s).expect("input parse error");
result.push((
caps[1].to_owned().replace("-", ""),
caps[2].to_owned(),
caps[3].to_owned(),
));
}
result
}
fn checksum(s: &str) -> String {
let mut hm: HashMap<char, i32> = HashMap::new();
for ch in s.chars() {
let counter = hm.entry(ch).or_insert(0);
*counter += 1;
}
let mut kvs: Vec<(&char, &i32)> = hm.iter().collect();
kvs.sort_by(|a, b| {
if a.1 == b.1 {
a.0.cmp(b.0)
} else {
a.1.cmp(b.1).reverse()
}
});
kvs.iter()
.take(5)
.fold(String::new(), |acc, x| format!("{}{}", acc, *x.0))
}
fn solve1(input: &Vec<(String, String, String)>) -> i32 {
input
.iter()
.filter(|x| checksum(&x.0) == x.2)
.map(|x| x.1.parse::<i32>().unwrap())
.sum()
}
fn decrypt(s: &str, n: u32) -> String {
s.chars()
.map(|c| 97 + ((n + (c as u32) - 97) % 26) as u8)
.map(|i| i as char)
.collect::<String>()
}
fn solve2(input: &Vec<(String, String, String)>) -> String {
for item in input {
let id: u32 = item.1.parse().expect("sector id parse error");
if decrypt(&item.0, id) == "northpoleobjectstorage" {
return item.1.to_owned();
}
}
panic!("solution not found");
}
fn main() {
let input = parse_input("resources/day4-input.txt");
println!("Part 1: {}", solve1(&input));
// correct answer: 409147
println!("Part 2: {}", solve2(&input));
// correct answer: 991
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(1514, solve1(&parse_input("resources/day4-test-input.txt")));
}
#[test]
fn test_decrypt() {
assert_eq!(
"veryencryptedname",
decrypt(&String::from("qzmtzixmtkozyivhz"), 343)
)
}
}
| true |
c0e3bc5501d66fcfcd3b7c996a19758f5b5d35e5
|
Rust
|
owen8877/leetcode-rs
|
/src/problem_125.rs
|
UTF-8
| 839 | 3.109375 | 3 |
[] |
no_license
|
pub fn is_palindrome(s: String) -> bool {
let s: Vec<char> = s.chars().collect();
fn core(s: &[char]) -> bool {
fn is_alphabet(c: char) -> bool {
(c as u8 >= 'a' as u8 && c as u8 <= 'z' as u8) || (c as u8 >= 'A' as u8 && c as u8 <= 'Z' as u8) || (c as u8 >= '0' as u8 && c as u8 <= '9' as u8)
}
fn compare(c1: char, c2: char) -> bool {
(c1 == c2) || (c1 as u8 == c2 as u8 + 32u8 && c2 as u8 >= 'A' as u8) || (c1 as u8 + 32u8 == c2 as u8 && c1 as u8 >= 'A' as u8)
}
let n = s.len();
n == 0 || (0..n).filter(|&i| is_alphabet(s[i])).next().map_or(true, |f_index| (0..n).rev().filter(|&i| is_alphabet(s[i])).next().map_or(true, |b_index| (f_index == b_index) || compare(s[f_index], s[b_index]) && core(&s[f_index+1..b_index])))
}
core(s.as_slice())
}
| true |
e57f6a6c289289ebf405ff7414808224315815d5
|
Rust
|
Fumon/tachi-phone-srv
|
/src/main.rs
|
UTF-8
| 2,474 | 2.5625 | 3 |
[] |
no_license
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use] extern crate rocket;
#[macro_use] extern crate lazy_static;
extern crate globwalk;
use rocket::http::Status;
use rocket::response::content;
use tera::Tera;
use std::path::Path;
mod util;
use rust_embed::RustEmbed;
#[derive(RustEmbed)]
#[folder = "assets"]
struct Asset;
#[derive(RustEmbed)]
#[folder = "templates"]
struct Templates;
lazy_static! {
pub static ref TEMPLATES: Tera = {
let mut tera = Tera::default();
let t = Templates::iter().map(|filename| {
let p = filename;
let ttext = Templates::get(&p).unwrap().into_owned();
(p.into_owned(), std::str::from_utf8(&ttext).unwrap().to_owned())
}).collect::<Vec<(_,_)>>();
tera.add_raw_templates(t).unwrap();
tera
};
}
/// Root Folder
#[get("/")]
fn index() -> Result<content::Html<String>, Status> {
// Get Manga
let manga_res =
util::get_manga(Path::new("/home/fumon/tmp/manga_s/T"));
let manga = match manga_res {
Ok(manga) => manga,
Err(_) => {
return Err(Status::InternalServerError)
}
};
let mut context = tera::Context::new();
context.insert("manga", &manga);
match TEMPLATES.render("index.html.tera", &context) {
Ok(t) => Ok(content::Html(t)),
Err(_) => Err(Status::InternalServerError)
}
}
/// Chapters
#[get("/m/<title>")]
fn chapters(title: String) -> Result<content::Html<String>, Status> {
let ch_res =
util::get_chapters(Path::new("/home/fumon/tmp/manga_s/T"), &title);
let ch = match ch_res {
Ok(ch) => ch,
Err(_) => {
return Err(Status::InternalServerError)
}
};
let mut context = tera::Context::new();
context.insert("title", &title);
context.insert("chapters", &ch);
match TEMPLATES.render("manga.html.tera", &context) {
Ok(t) => Ok(content::Html(t)),
Err(e) => {
dbg!(e);
Err(Status::InternalServerError)
}
}
}
/// TODO: Handle grabbing images from manga folders
/// Embedded css supplier
#[get("/theme.css")]
fn css() -> content::Css<String> {
// TODO: generalize to any embedded resource
let css = Asset::get("theme.css").unwrap().into_owned();
content::Css(std::str::from_utf8(&css).unwrap().to_owned())
}
fn main() {
rocket::ignite()
.mount("/", routes![index, chapters, css])
.launch();
}
| true |
93cbc59f578e42c953c84b6ea38aa9a15ae7cb4b
|
Rust
|
zhv9/exp
|
/dev_basic/leetcode/src/n66_plus_one.rs
|
UTF-8
| 834 | 3.21875 | 3 |
[] |
no_license
|
/*
* @lc app=leetcode id=66 lang=rust
*
* [66] Plus One
*/
// @lc code=start
impl Solution {
pub fn plus_one(mut digits: Vec<i32>) -> Vec<i32> {
let mut index = digits.len() - 1;
loop {
if digits[index] != 9 {
digits[index] += 1;
return digits;
} else {
digits[index] = 0;
}
if index == 0 {
digits.insert(0, 1);
return digits;
}
index -= 1;
}
}
}
// @lc code=end
struct Solution;
fn main() {
println!("{:?} should be [1,2,4]", Solution::plus_one(vec![1, 2, 3]));
println!("{:?} should be [1,3,0]", Solution::plus_one(vec![1, 2, 9]));
println!(
"{:?} should be [1,0,0,0]",
Solution::plus_one(vec![9, 9, 9])
);
}
| true |
5de6b0b086a38411c0fb9263fbedc31ae97534ed
|
Rust
|
kkovach/AoC
|
/2022/day1/src/main.rs
|
UTF-8
| 758 | 3.078125 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
fn main() {
let cals_file = File::open("calories.txt").unwrap();
let reader = BufReader::new(cals_file);
let mut elf_cals :HashMap<i32, i32> = HashMap::new();
let mut index = 1;
for line in reader.lines() {
let l = line.unwrap();
if l.to_string().is_empty() {
index += 1;
} else {
let cals = l.parse::<i32>().unwrap();
elf_cals.entry(index).and_modify(|index| *index += cals).or_insert(cals);
}
}
let mut elf_cals_sorted: Vec<(&i32, &i32)> = elf_cals.iter().collect();
elf_cals_sorted.sort_by(|a, b| b.1.cmp(a.1));
println!("{:?}", elf_cals_sorted);
}
| true |
53b6d1f9aff9b75b3cb8ad17d942a513f5cd7264
|
Rust
|
nrf-rs/nrf-pacs
|
/pacs/nrf5340-app-pac/src/qspi_ns/erase/len.rs
|
UTF-8
| 4,083 | 2.59375 | 3 |
[
"0BSD"
] |
permissive
|
#[doc = "Register `LEN` reader"]
pub struct R(crate::R<LEN_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<LEN_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<LEN_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<LEN_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `LEN` writer"]
pub struct W(crate::W<LEN_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<LEN_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<LEN_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<LEN_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `LEN` reader - LEN"]
pub type LEN_R = crate::FieldReader<u8, LEN_A>;
#[doc = "LEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum LEN_A {
#[doc = "0: Erase 4 kB block (flash command 0x20)"]
_4KB = 0,
#[doc = "1: Erase 64 kB block (flash command 0xD8)"]
_64KB = 1,
#[doc = "2: Erase all (flash command 0xC7)"]
ALL = 2,
}
impl From<LEN_A> for u8 {
#[inline(always)]
fn from(variant: LEN_A) -> Self {
variant as _
}
}
impl LEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<LEN_A> {
match self.bits {
0 => Some(LEN_A::_4KB),
1 => Some(LEN_A::_64KB),
2 => Some(LEN_A::ALL),
_ => None,
}
}
#[doc = "Checks if the value of the field is `_4KB`"]
#[inline(always)]
pub fn is_4kb(&self) -> bool {
*self == LEN_A::_4KB
}
#[doc = "Checks if the value of the field is `_64KB`"]
#[inline(always)]
pub fn is_64kb(&self) -> bool {
*self == LEN_A::_64KB
}
#[doc = "Checks if the value of the field is `ALL`"]
#[inline(always)]
pub fn is_all(&self) -> bool {
*self == LEN_A::ALL
}
}
#[doc = "Field `LEN` writer - LEN"]
pub type LEN_W<'a, const O: u8> = crate::FieldWriter<'a, u32, LEN_SPEC, u8, LEN_A, 2, O>;
impl<'a, const O: u8> LEN_W<'a, O> {
#[doc = "Erase 4 kB block (flash command 0x20)"]
#[inline(always)]
pub fn _4kb(self) -> &'a mut W {
self.variant(LEN_A::_4KB)
}
#[doc = "Erase 64 kB block (flash command 0xD8)"]
#[inline(always)]
pub fn _64kb(self) -> &'a mut W {
self.variant(LEN_A::_64KB)
}
#[doc = "Erase all (flash command 0xC7)"]
#[inline(always)]
pub fn all(self) -> &'a mut W {
self.variant(LEN_A::ALL)
}
}
impl R {
#[doc = "Bits 0:1 - LEN"]
#[inline(always)]
pub fn len(&self) -> LEN_R {
LEN_R::new((self.bits & 3) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - LEN"]
#[inline(always)]
pub fn len(&mut self) -> LEN_W<0> {
LEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Size of block to be erased.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [len](index.html) module"]
pub struct LEN_SPEC;
impl crate::RegisterSpec for LEN_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [len::R](R) reader structure"]
impl crate::Readable for LEN_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [len::W](W) writer structure"]
impl crate::Writable for LEN_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets LEN to value 0"]
impl crate::Resettable for LEN_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| true |
65f58357ce73c54bcd687df8e78e9adf35163bc8
|
Rust
|
karrybit/bottomup-ddd
|
/factory/src/lib.rs
|
UTF-8
| 962 | 3.4375 | 3 |
[] |
no_license
|
struct UserID(i32);
struct UserName(String);
struct User {
id: UserID,
name: UserName,
}
trait UserFactory {
fn create(&self, name: UserName) -> User;
}
// InMemory(メモリ上でidを振る)やDBなど生成方法を抽象化
// 抽象化の必要がないならFactoryの意味がない
struct UserFactoryImpl {
current_id: i32,
}
impl UserFactory for UserFactoryImpl {
fn create(&self, name: UserName) -> User {
User {
id: UserID { 0: self.current_id },
name,
}
}
}
struct UserApplicationService {
user_factory: Box<dyn UserFactory>,
}
impl UserApplicationService {
fn create(&self, name: UserName) -> User {
self.user_factory.create(name)
}
}
#[test]
fn test_factory() {
let service = UserApplicationService {
user_factory: Box::new(UserFactoryImpl { current_id: 0 }),
};
let _ = service.create(UserName {
0: "name".to_string(),
});
}
| true |
43a5fbf9071f61199bf0c8694d5492fdccaf6a52
|
Rust
|
m2700/rust-mpi-tool-support
|
/sys/cnum/src/lib.rs
|
UTF-8
| 5,507 | 2.75 | 3 |
[] |
no_license
|
use std::{
ops::{Deref, DerefMut},
os::raw::{c_double, c_float},
};
mod c_num {
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
include!(env!("C_NUM_BINDINGS"));
}
#[repr(transparent)]
#[derive(Copy, Clone, PartialEq, Debug, Hash, Default)]
pub struct Complex<T>(c_num::__BindgenComplex<T>);
impl<T: Eq> Eq for Complex<T> {}
impl<T> From<c_num::__BindgenComplex<T>> for Complex<T> {
#[inline]
fn from(src: c_num::__BindgenComplex<T>) -> Self {
Self(src)
}
}
impl<T> From<Complex<T>> for c_num::__BindgenComplex<T> {
#[inline]
fn from(src: Complex<T>) -> Self {
src.0
}
}
impl<T> Deref for Complex<T> {
type Target = c_num::__BindgenComplex<T>;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for Complex<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T> Complex<T> {
#[inline]
pub const fn new(real: T, imag: T) -> Self {
Self(c_num::__BindgenComplex { re: real, im: imag })
}
}
impl Complex<c_float> {
#[inline]
pub fn real(self) -> c_float {
unsafe { c_num::crealf(*self) }
}
#[inline]
pub fn imag(self) -> c_float {
unsafe { c_num::cimagf(*self) }
}
#[inline]
pub fn acos(self) -> Self {
unsafe { c_num::cacosf(*self) }.into()
}
#[inline]
pub fn asin(self) -> Self {
unsafe { c_num::casinf(*self) }.into()
}
#[inline]
pub fn atan(self) -> Self {
unsafe { c_num::catanf(*self) }.into()
}
#[inline]
pub fn cos(self) -> Self {
unsafe { c_num::ccosf(*self) }.into()
}
#[inline]
pub fn sin(self) -> Self {
unsafe { c_num::csinf(*self) }.into()
}
#[inline]
pub fn tan(self) -> Self {
unsafe { c_num::ctanf(*self) }.into()
}
#[inline]
pub fn acosh(self) -> Self {
unsafe { c_num::cacoshf(*self) }.into()
}
#[inline]
pub fn asinh(self) -> Self {
unsafe { c_num::casinhf(*self) }.into()
}
#[inline]
pub fn atanh(self) -> Self {
unsafe { c_num::catanhf(*self) }.into()
}
#[inline]
pub fn cosh(self) -> Self {
unsafe { c_num::ccoshf(*self) }.into()
}
#[inline]
pub fn sinh(self) -> Self {
unsafe { c_num::csinhf(*self) }.into()
}
#[inline]
pub fn tanh(self) -> Self {
unsafe { c_num::ctanhf(*self) }.into()
}
#[inline]
pub fn exp(self) -> Self {
unsafe { c_num::cexpf(*self) }.into()
}
#[inline]
pub fn log(self) -> Self {
unsafe { c_num::clogf(*self) }.into()
}
#[inline]
pub fn abs(self) -> c_float {
unsafe { c_num::cabsf(*self) }
}
#[inline]
pub fn pow(self, exp: Self) -> Self {
unsafe { c_num::cpowf(*self, *exp) }.into()
}
#[inline]
pub fn sqrt(self) -> Self {
unsafe { c_num::csqrtf(*self) }.into()
}
#[inline]
pub fn arg(self) -> c_float {
unsafe { c_num::cargf(*self) }
}
#[inline]
pub fn conj(self) -> Self {
unsafe { c_num::conjf(*self) }.into()
}
#[inline]
pub fn proj(self) -> Self {
unsafe { c_num::cprojf(*self) }.into()
}
}
impl Complex<c_double> {
#[inline]
pub fn real(self) -> c_double {
unsafe { c_num::creal(*self) }
}
#[inline]
pub fn imag(self) -> c_double {
unsafe { c_num::cimag(*self) }
}
#[inline]
pub fn acos(self) -> Self {
unsafe { c_num::cacos(*self) }.into()
}
#[inline]
pub fn asin(self) -> Self {
unsafe { c_num::casin(*self) }.into()
}
#[inline]
pub fn atan(self) -> Self {
unsafe { c_num::catan(*self) }.into()
}
#[inline]
pub fn cos(self) -> Self {
unsafe { c_num::ccos(*self) }.into()
}
#[inline]
pub fn sin(self) -> Self {
unsafe { c_num::csin(*self) }.into()
}
#[inline]
pub fn tan(self) -> Self {
unsafe { c_num::ctan(*self) }.into()
}
#[inline]
pub fn acosh(self) -> Self {
unsafe { c_num::cacosh(*self) }.into()
}
#[inline]
pub fn asinh(self) -> Self {
unsafe { c_num::casinh(*self) }.into()
}
#[inline]
pub fn atanh(self) -> Self {
unsafe { c_num::catanh(*self) }.into()
}
#[inline]
pub fn cosh(self) -> Self {
unsafe { c_num::ccosh(*self) }.into()
}
#[inline]
pub fn sinh(self) -> Self {
unsafe { c_num::csinh(*self) }.into()
}
#[inline]
pub fn tanh(self) -> Self {
unsafe { c_num::ctanh(*self) }.into()
}
#[inline]
pub fn exp(self) -> Self {
unsafe { c_num::cexp(*self) }.into()
}
#[inline]
pub fn log(self) -> Self {
unsafe { c_num::clog(*self) }.into()
}
#[inline]
pub fn abs(self) -> c_double {
unsafe { c_num::cabs(*self) }
}
#[inline]
pub fn pow(self, exp: Self) -> Self {
unsafe { c_num::cpow(*self, *exp) }.into()
}
#[inline]
pub fn sqrt(self) -> Self {
unsafe { c_num::csqrt(*self) }.into()
}
#[inline]
pub fn arg(self) -> c_double {
unsafe { c_num::carg(*self) }
}
#[inline]
pub fn conj(self) -> Self {
unsafe { c_num::conj(*self) }.into()
}
#[inline]
pub fn proj(self) -> Self {
unsafe { c_num::cproj(*self) }.into()
}
}
| true |
e3155c4c6de41ab9b646972ebb6a198983ee37c6
|
Rust
|
rust-lang/rust
|
/library/core/src/iter/traits/unchecked_iterator.rs
|
UTF-8
| 1,595 | 3.390625 | 3 |
[
"Apache-2.0",
"LLVM-exception",
"NCSA",
"BSD-2-Clause",
"LicenseRef-scancode-unicode",
"MIT",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
use crate::iter::TrustedLen;
/// [`TrustedLen`] cannot have methods, so this allows augmenting it.
///
/// It currently requires `TrustedLen` because it's unclear whether it's
/// reasonably possible to depend on the `size_hint` of anything else.
pub(crate) trait UncheckedIterator: TrustedLen {
/// Gets the next item from a non-empty iterator.
///
/// Because there's always a value to return, that means it can return
/// the `Item` type directly, without wrapping it in an `Option`.
///
/// # Safety
///
/// This can only be called if `size_hint().0 != 0`, guaranteeing that
/// there's at least one item available.
///
/// Otherwise (aka when `size_hint().1 == Some(0)`), this is UB.
///
/// # Note to Implementers
///
/// This has a default implementation using [`Option::unwrap_unchecked`].
/// That's probably sufficient if your `next` *always* returns `Some`,
/// such as for infinite iterators. In more complicated situations, however,
/// sometimes there can still be `insertvalue`/`assume`/`extractvalue`
/// instructions remaining in the IR from the `Option` handling, at which
/// point you might want to implement this manually instead.
#[unstable(feature = "trusted_len_next_unchecked", issue = "37572")]
#[inline]
unsafe fn next_unchecked(&mut self) -> Self::Item {
let opt = self.next();
// SAFETY: The caller promised that we're not empty, and
// `Self: TrustedLen` so we can actually trust the `size_hint`.
unsafe { opt.unwrap_unchecked() }
}
}
| true |
8382e0352546c019361328b0f358bd8f4d1a264b
|
Rust
|
denismaxim0v/CHIP-8
|
/src/display.rs
|
UTF-8
| 1,394 | 3.140625 | 3 |
[] |
no_license
|
extern crate sdl2;
use crate::consts::{DISPLAY_HEIGHT, DISPLAY_WIDTH};
pub struct Display {
pub memory: [bool; 2048],
}
impl Display {
pub fn new() -> Self {
Display {
memory: [false; 2048],
}
}
pub fn clear(&mut self) {
for x in 0..DISPLAY_WIDTH {
for y in 0..DISPLAY_HEIGHT {
self.set_pixel(x, y, false);
}
}
}
pub fn draw(&mut self, x: usize, y: usize, sprite: &[u8]) -> bool {
let rows = sprite.len();
let mut collision = false;
for (j, row) in sprite.iter().enumerate().take(rows) {
for i in 0..8 {
let new_value = row >> (7 - i) & 0x01;
if new_value == 1 {
let xi = (x + i) % DISPLAY_WIDTH;
let yj = (y + j) % DISPLAY_HEIGHT;
let old_value = self.get_pixel(xi, yj);
if old_value {
collision = true;
}
self.set_pixel(xi, yj, (new_value == 1) ^ old_value);
}
}
}
collision
}
pub fn set_pixel(&mut self, x: usize, y: usize, state: bool) {
self.memory[x + y * DISPLAY_WIDTH] = state;
}
pub fn get_pixel(&mut self, x: usize, y: usize) -> bool {
self.memory[x + y * DISPLAY_WIDTH]
}
}
| true |
64a7fc7810f02b4cf98a244f81a803f3b38bbc0a
|
Rust
|
qiongtubao/latte_learn_rust
|
/example/iset/src/main.rs
|
UTF-8
| 215 | 2.90625 | 3 |
[] |
no_license
|
use derive_set::New;
#[derive(New, PartialEq, Debug)]
pub struct Bar {
pub x: i32,
pub y: String,
}
fn main() {
let x = Bar::set(42, "hello".to_owned());
assert_eq!(x, Bar{x: 42, y: "hello".to_owned()});
}
| true |
4920016d3445204fe55fa718e2e2d26e279b8da6
|
Rust
|
rust-bakery/nom
|
/src/internal.rs
|
UTF-8
| 23,358 | 3.59375 | 4 |
[
"MIT",
"CC0-1.0"
] |
permissive
|
//! Basic types to build the parsers
use self::Needed::*;
use crate::error::{self, ErrorKind, FromExternalError, ParseError};
use crate::lib::std::fmt;
use core::marker::PhantomData;
use core::num::NonZeroUsize;
/// Holds the result of parsing functions
///
/// It depends on the input type `I`, the output type `O`, and the error type `E`
/// (by default `(I, nom::ErrorKind)`)
///
/// The `Ok` side is a pair containing the remainder of the input (the part of the data that
/// was not parsed) and the produced value. The `Err` side contains an instance of `nom::Err`.
///
/// Outside of the parsing code, you can use the [Finish::finish] method to convert
/// it to a more common result type
pub type IResult<I, O, E = error::Error<I>> = Result<(I, O), Err<E>>;
/// Helper trait to convert a parser's result to a more manageable type
pub trait Finish<I, O, E> {
/// converts the parser's result to a type that is more consumable by error
/// management libraries. It keeps the same `Ok` branch, and merges `Err::Error`
/// and `Err::Failure` into the `Err` side.
///
/// *warning*: if the result is `Err(Err::Incomplete(_))`, this method will panic.
/// - "complete" parsers: It will not be an issue, `Incomplete` is never used
/// - "streaming" parsers: `Incomplete` will be returned if there's not enough data
/// for the parser to decide, and you should gather more data before parsing again.
/// Once the parser returns either `Ok(_)`, `Err(Err::Error(_))` or `Err(Err::Failure(_))`,
/// you can get out of the parsing loop and call `finish()` on the parser's result
fn finish(self) -> Result<(I, O), E>;
}
impl<I, O, E> Finish<I, O, E> for IResult<I, O, E> {
fn finish(self) -> Result<(I, O), E> {
match self {
Ok(res) => Ok(res),
Err(Err::Error(e)) | Err(Err::Failure(e)) => Err(e),
Err(Err::Incomplete(_)) => {
panic!("Cannot call `finish()` on `Err(Err::Incomplete(_))`: this result means that the parser does not have enough data to decide, you should gather more data and try to reapply the parser instead")
}
}
}
}
/// Contains information on needed data if a parser returned `Incomplete`
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
#[cfg_attr(nightly, warn(rustdoc::missing_doc_code_examples))]
pub enum Needed {
/// Needs more data, but we do not know how much
Unknown,
/// Contains the required data size in bytes
Size(NonZeroUsize),
}
impl Needed {
/// Creates `Needed` instance, returns `Needed::Unknown` if the argument is zero
pub fn new(s: usize) -> Self {
match NonZeroUsize::new(s) {
Some(sz) => Needed::Size(sz),
None => Needed::Unknown,
}
}
/// Indicates if we know how many bytes we need
pub fn is_known(&self) -> bool {
*self != Unknown
}
/// Maps a `Needed` to `Needed` by applying a function to a contained `Size` value.
#[inline]
pub fn map<F: Fn(NonZeroUsize) -> usize>(self, f: F) -> Needed {
match self {
Unknown => Unknown,
Size(n) => Needed::new(f(n)),
}
}
}
/// The `Err` enum indicates the parser was not successful
///
/// It has three cases:
///
/// * `Incomplete` indicates that more data is needed to decide. The `Needed` enum
/// can contain how many additional bytes are necessary. If you are sure your parser
/// is working on full data, you can wrap your parser with the `complete` combinator
/// to transform that case in `Error`
/// * `Error` means some parser did not succeed, but another one might (as an example,
/// when testing different branches of an `alt` combinator)
/// * `Failure` indicates an unrecoverable error. For example, when a prefix has been
/// recognised and the next parser has been confirmed, if that parser fails, then the
/// entire process fails; there are no more parsers to try.
///
/// Distinguishing `Failure` this from `Error` is only relevant inside the parser's code. For
/// external consumers, both mean that parsing failed.
///
/// See also: [`Finish`].
///
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(nightly, warn(rustdoc::missing_doc_code_examples))]
pub enum Err<Failure, Error = Failure> {
/// There was not enough data
Incomplete(Needed),
/// The parser had an error (recoverable)
Error(Error),
/// The parser had an unrecoverable error: we got to the right
/// branch and we know other branches won't work, so backtrack
/// as fast as possible
Failure(Failure),
}
impl<E> Err<E> {
/// Tests if the result is Incomplete
pub fn is_incomplete(&self) -> bool {
matches!(self, Err::Incomplete(..))
}
/// Applies the given function to the inner error
pub fn map<E2, F>(self, f: F) -> Err<E2>
where
F: FnOnce(E) -> E2,
{
match self {
Err::Incomplete(n) => Err::Incomplete(n),
Err::Failure(t) => Err::Failure(f(t)),
Err::Error(t) => Err::Error(f(t)),
}
}
/// Automatically converts between errors if the underlying type supports it
pub fn convert<F>(e: Err<F>) -> Self
where
E: From<F>,
{
e.map(crate::lib::std::convert::Into::into)
}
}
impl<T> Err<(T, ErrorKind)> {
/// Maps `Err<(T, ErrorKind)>` to `Err<(U, ErrorKind)>` with the given `F: T -> U`
pub fn map_input<U, F>(self, f: F) -> Err<(U, ErrorKind)>
where
F: FnOnce(T) -> U,
{
match self {
Err::Incomplete(n) => Err::Incomplete(n),
Err::Failure((input, k)) => Err::Failure((f(input), k)),
Err::Error((input, k)) => Err::Error((f(input), k)),
}
}
}
impl<T> Err<error::Error<T>> {
/// Maps `Err<error::Error<T>>` to `Err<error::Error<U>>` with the given `F: T -> U`
pub fn map_input<U, F>(self, f: F) -> Err<error::Error<U>>
where
F: FnOnce(T) -> U,
{
match self {
Err::Incomplete(n) => Err::Incomplete(n),
Err::Failure(error::Error { input, code }) => Err::Failure(error::Error {
input: f(input),
code,
}),
Err::Error(error::Error { input, code }) => Err::Error(error::Error {
input: f(input),
code,
}),
}
}
}
#[cfg(feature = "alloc")]
use crate::lib::std::{borrow::ToOwned, string::String, vec::Vec};
#[cfg(feature = "alloc")]
impl Err<(&[u8], ErrorKind)> {
/// Obtaining ownership
#[cfg_attr(feature = "docsrs", doc(cfg(feature = "alloc")))]
pub fn to_owned(self) -> Err<(Vec<u8>, ErrorKind)> {
self.map_input(ToOwned::to_owned)
}
}
#[cfg(feature = "alloc")]
impl Err<(&str, ErrorKind)> {
/// Obtaining ownership
#[cfg_attr(feature = "docsrs", doc(cfg(feature = "alloc")))]
pub fn to_owned(self) -> Err<(String, ErrorKind)> {
self.map_input(ToOwned::to_owned)
}
}
#[cfg(feature = "alloc")]
impl Err<error::Error<&[u8]>> {
/// Obtaining ownership
#[cfg_attr(feature = "docsrs", doc(cfg(feature = "alloc")))]
pub fn to_owned(self) -> Err<error::Error<Vec<u8>>> {
self.map_input(ToOwned::to_owned)
}
}
#[cfg(feature = "alloc")]
impl Err<error::Error<&str>> {
/// Obtaining ownership
#[cfg_attr(feature = "docsrs", doc(cfg(feature = "alloc")))]
pub fn to_owned(self) -> Err<error::Error<String>> {
self.map_input(ToOwned::to_owned)
}
}
impl<E: Eq> Eq for Err<E> {}
impl<E> fmt::Display for Err<E>
where
E: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Err::Incomplete(Needed::Size(u)) => write!(f, "Parsing requires {} bytes/chars", u),
Err::Incomplete(Needed::Unknown) => write!(f, "Parsing requires more data"),
Err::Failure(c) => write!(f, "Parsing Failure: {:?}", c),
Err::Error(c) => write!(f, "Parsing Error: {:?}", c),
}
}
}
#[cfg(feature = "std")]
use std::error::Error;
#[cfg(feature = "std")]
impl<E> Error for Err<E>
where
E: fmt::Debug,
{
fn source(&self) -> Option<&(dyn Error + 'static)> {
None // no underlying error
}
}
/// TODO
pub trait Mode {
/// TODO
type Output<T>;
/// TODO
fn bind<T, F: FnOnce() -> T>(f: F) -> Self::Output<T>;
/// TODO
fn map<T, U, F: FnOnce(T) -> U>(x: Self::Output<T>, f: F) -> Self::Output<U>;
/// TODO
fn combine<T, U, V, F: FnOnce(T, U) -> V>(
x: Self::Output<T>,
y: Self::Output<U>,
f: F,
) -> Self::Output<V>;
}
/// TODO
pub struct Emit;
impl Mode for Emit {
type Output<T> = T;
#[inline(always)]
fn bind<T, F: FnOnce() -> T>(f: F) -> Self::Output<T> {
f()
}
#[inline(always)]
fn map<T, U, F: FnOnce(T) -> U>(x: Self::Output<T>, f: F) -> Self::Output<U> {
f(x)
}
#[inline(always)]
fn combine<T, U, V, F: FnOnce(T, U) -> V>(
x: Self::Output<T>,
y: Self::Output<U>,
f: F,
) -> Self::Output<V> {
f(x, y)
}
}
/// TODO
pub struct Check;
impl Mode for Check {
type Output<T> = ();
#[inline(always)]
fn bind<T, F: FnOnce() -> T>(_: F) -> Self::Output<T> {}
#[inline(always)]
fn map<T, U, F: FnOnce(T) -> U>(_: Self::Output<T>, _: F) -> Self::Output<U> {}
#[inline(always)]
fn combine<T, U, V, F: FnOnce(T, U) -> V>(
_: Self::Output<T>,
_: Self::Output<U>,
_: F,
) -> Self::Output<V> {
}
}
/// TODO
pub type PResult<OM, I, O, E> = Result<
(I, <<OM as OutputMode>::Output as Mode>::Output<O>),
Err<E, <<OM as OutputMode>::Error as Mode>::Output<E>>,
>;
/// TODO
pub trait OutputMode {
/// TODO
type Output: Mode;
/// TODO
type Error: Mode;
/// TODO
type Incomplete: IsStreaming;
}
/// TODO
pub trait IsStreaming {
/// TODO
fn incomplete<E, F: FnOnce() -> E>(needed: Needed, err_f: F) -> Err<E>;
/// TODO
fn is_streaming() -> bool;
}
/// TODO
pub struct Streaming;
impl IsStreaming for Streaming {
fn incomplete<E, F: FnOnce() -> E>(needed: Needed, _err_f: F) -> Err<E> {
Err::Incomplete(needed)
}
#[inline]
fn is_streaming() -> bool {
true
}
}
/// TODO
pub struct Complete;
impl IsStreaming for Complete {
fn incomplete<E, F: FnOnce() -> E>(_needed: Needed, err_f: F) -> Err<E> {
Err::Error(err_f())
}
#[inline]
fn is_streaming() -> bool {
false
}
}
/// a
pub struct OutputM<M: Mode, EM: Mode, S: IsStreaming> {
m: PhantomData<M>,
em: PhantomData<EM>,
s: PhantomData<S>,
}
impl<M: Mode, EM: Mode, S: IsStreaming> OutputMode for OutputM<M, EM, S> {
type Output = M;
type Error = EM;
type Incomplete = S;
}
/// All nom parsers implement this trait
pub trait Parser<Input> {
/// Type of the produced value
type Output;
/// Error type of this parser
type Error: ParseError<Input>;
/// A parser takes in input type, and returns a `Result` containing
/// either the remaining input and the output value, or an error
#[inline]
fn parse(&mut self, input: Input) -> IResult<Input, Self::Output, Self::Error> {
self.process::<OutputM<Emit, Emit, Streaming>>(input)
}
/// A parser takes in input type, and returns a `Result` containing
/// either the remaining input and the output value, or an error
#[inline]
fn parse_complete(&mut self, input: Input) -> IResult<Input, Self::Output, Self::Error> {
self.process::<OutputM<Emit, Emit, Complete>>(input)
}
/// A parser takes in input type, and returns a `Result` containing
/// either the remaining input and the output value, or an error
fn process<OM: OutputMode>(
&mut self,
input: Input,
) -> PResult<OM, Input, Self::Output, Self::Error>;
/// Maps a function over the result of a parser
fn map<G, O2>(self, g: G) -> Map<Self, G>
where
G: FnMut(Self::Output) -> O2,
Self: core::marker::Sized,
{
Map { f: self, g }
}
/// Applies a function returning a `Result` over the result of a parser.
fn map_res<G, O2, E2>(self, g: G) -> MapRes<Self, G>
where
G: FnMut(Self::Output) -> Result<O2, E2>,
Self::Error: FromExternalError<Input, E2>,
Self: core::marker::Sized,
{
MapRes { f: self, g }
}
/// Applies a function returning an `Option` over the result of a parser.
fn map_opt<G, O2>(self, g: G) -> MapOpt<Self, G>
where
G: FnMut(Self::Output) -> Option<O2>,
Self: core::marker::Sized,
{
MapOpt { f: self, g }
}
/// Creates a second parser from the output of the first one, then apply over the rest of the input
fn flat_map<G, H>(self, g: G) -> FlatMap<Self, G>
where
G: FnMut(Self::Output) -> H,
H: Parser<Input, Error = Self::Error>,
Self: core::marker::Sized,
{
FlatMap { f: self, g }
}
/// Applies a second parser over the output of the first one
fn and_then<G>(self, g: G) -> AndThen<Self, G>
where
G: Parser<Self::Output, Error = Self::Error>,
Self: core::marker::Sized,
{
AndThen { f: self, g }
}
/// Applies a second parser after the first one, return their results as a tuple
fn and<G, O2>(self, g: G) -> And<Self, G>
where
G: Parser<Input, Output = O2, Error = Self::Error>,
Self: core::marker::Sized,
{
And { f: self, g }
}
/// Applies a second parser over the input if the first one failed
fn or<G>(self, g: G) -> Or<Self, G>
where
G: Parser<Input, Output = Self::Output, Error = Self::Error>,
Self: core::marker::Sized,
{
Or { f: self, g }
}
/// automatically converts the parser's output and error values to another type, as long as they
/// implement the `From` trait
fn into<O2: From<Self::Output>, E2: From<Self::Error>>(self) -> Into<Self, O2, E2>
where
Self: core::marker::Sized,
{
Into {
f: self,
phantom_out2: core::marker::PhantomData,
phantom_err2: core::marker::PhantomData,
}
}
}
impl<I, O, E: ParseError<I>, F> Parser<I> for F
where
F: FnMut(I) -> IResult<I, O, E>,
{
type Output = O;
type Error = E;
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
let (i, o) = self(i).map_err(|e| match e {
Err::Incomplete(i) => Err::Incomplete(i),
Err::Error(e) => Err::Error(OM::Error::bind(|| e)),
Err::Failure(e) => Err::Failure(e),
})?;
Ok((i, OM::Output::bind(|| o)))
}
}
macro_rules! impl_parser_for_tuple {
($($parser:ident $output:ident),+) => (
#[allow(non_snake_case)]
impl<I, $($output),+, E: ParseError<I>, $($parser),+> Parser<I> for ($($parser),+,)
where
$($parser: Parser<I, Output = $output, Error = E>),+
{
type Output = ($($output),+,);
type Error = E;
#[inline(always)]
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
let ($(ref mut $parser),+,) = *self;
// FIXME: is there a way to avoid producing the output values?
$(let(i, $output) = $parser.process::<OutputM<Emit, OM::Error, OM::Incomplete>>(i)?;)+
// ???
Ok((i, OM::Output::bind(|| ($($output),+,))))
}
}
)
}
macro_rules! impl_parser_for_tuples {
($parser1:ident $output1:ident, $($parser:ident $output:ident),+) => {
impl_parser_for_tuples!(__impl $parser1 $output1; $($parser $output),+);
};
(__impl $($parser:ident $output:ident),+; $parser1:ident $output1:ident $(,$parser2:ident $output2:ident)*) => {
impl_parser_for_tuple!($($parser $output),+);
impl_parser_for_tuples!(__impl $($parser $output),+, $parser1 $output1; $($parser2 $output2),*);
};
(__impl $($parser:ident $output:ident),+;) => {
impl_parser_for_tuple!($($parser $output),+);
}
}
impl_parser_for_tuples!(P1 O1, P2 O2, P3 O3, P4 O4, P5 O5, P6 O6, P7 O7, P8 O8, P9 O9, P10 O10, P11 O11, P12 O12, P13 O13, P14 O14, P15 O15, P16 O16, P17 O17, P18 O18, P19 O19, P20 O20, P21 O21);
/*
#[cfg(feature = "alloc")]
use alloc::boxed::Box;
#[cfg(feature = "alloc")]
impl<I, O, E: ParseError<I>> Parser<I> for Box<dyn Parser<I, Output = O, Error = E>> {
type Output = O;
type Error = E;
fn process<OM: OutputMode>(&mut self, input: I) -> PResult<OM, I, Self::Output, Self::Error> {
(**self).process(input)
}
}
*/
/// Implementation of `Parser::map`
#[cfg_attr(nightly, warn(rustdoc::missing_doc_code_examples))]
pub struct Map<F, G> {
f: F,
g: G,
}
impl<I, O2, E: ParseError<I>, F: Parser<I, Error = E>, G: FnMut(<F as Parser<I>>::Output) -> O2>
Parser<I> for Map<F, G>
{
type Output = O2;
type Error = E;
#[inline(always)]
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
match self.f.process::<OM>(i) {
Err(e) => Err(e),
Ok((i, o)) => Ok((i, OM::Output::map(o, |o| (self.g)(o)))),
}
}
}
/// Implementation of `Parser::map_res`
pub struct MapRes<F, G> {
f: F,
g: G,
}
impl<I, O2, E2, F, G> Parser<I> for MapRes<F, G>
where
I: Clone,
<F as Parser<I>>::Error: FromExternalError<I, E2>,
F: Parser<I>,
G: FnMut(<F as Parser<I>>::Output) -> Result<O2, E2>,
{
type Output = O2;
type Error = <F as Parser<I>>::Error;
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
let (input, o1) = self
.f
.process::<OutputM<Emit, OM::Error, OM::Incomplete>>(i.clone())?;
match (self.g)(o1) {
Ok(o2) => Ok((input, OM::Output::bind(|| o2))),
Err(e) => Err(Err::Error(OM::Error::bind(|| {
<F as Parser<I>>::Error::from_external_error(i, ErrorKind::MapRes, e)
}))),
}
}
}
/// Implementation of `Parser::map_opt`
pub struct MapOpt<F, G> {
f: F,
g: G,
}
impl<I, O2, F, G> Parser<I> for MapOpt<F, G>
where
I: Clone,
F: Parser<I>,
G: FnMut(<F as Parser<I>>::Output) -> Option<O2>,
{
type Output = O2;
type Error = <F as Parser<I>>::Error;
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
let (input, o1) = self
.f
.process::<OutputM<Emit, OM::Error, OM::Incomplete>>(i.clone())?;
match (self.g)(o1) {
Some(o2) => Ok((input, OM::Output::bind(|| o2))),
None => Err(Err::Error(OM::Error::bind(|| {
<F as Parser<I>>::Error::from_error_kind(i, ErrorKind::MapOpt)
}))),
}
}
}
/// Implementation of `Parser::flat_map`
#[cfg_attr(nightly, warn(rustdoc::missing_doc_code_examples))]
pub struct FlatMap<F, G> {
f: F,
g: G,
}
impl<
I,
E: ParseError<I>,
F: Parser<I, Error = E>,
G: FnMut(<F as Parser<I>>::Output) -> H,
H: Parser<I, Error = E>,
> Parser<I> for FlatMap<F, G>
{
type Output = <H as Parser<I>>::Output;
type Error = E;
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
let (input, o1) = self
.f
.process::<OutputM<Emit, OM::Error, OM::Incomplete>>(i)?;
(self.g)(o1).process::<OM>(input)
}
}
/// Implementation of `Parser::and_then`
#[cfg_attr(nightly, warn(rustdoc::missing_doc_code_examples))]
pub struct AndThen<F, G> {
f: F,
g: G,
}
impl<I, F: Parser<I>, G: Parser<<F as Parser<I>>::Output, Error = <F as Parser<I>>::Error>>
Parser<I> for AndThen<F, G>
{
type Output = <G as Parser<<F as Parser<I>>::Output>>::Output;
type Error = <F as Parser<I>>::Error;
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
let (input, o1) = self
.f
.process::<OutputM<Emit, OM::Error, OM::Incomplete>>(i)?;
let (_, o2) = self.g.process::<OM>(o1)?;
Ok((input, o2))
}
}
/// Implementation of `Parser::and`
#[cfg_attr(nightly, warn(rustdoc::missing_doc_code_examples))]
pub struct And<F, G> {
f: F,
g: G,
}
impl<I, E: ParseError<I>, F: Parser<I, Error = E>, G: Parser<I, Error = E>> Parser<I>
for And<F, G>
{
type Output = (<F as Parser<I>>::Output, <G as Parser<I>>::Output);
type Error = E;
#[inline(always)]
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
let (i, o1) = self.f.process::<OM>(i)?;
let (i, o2) = self.g.process::<OM>(i)?;
Ok((i, OM::Output::combine(o1, o2, |o1, o2| (o1, o2))))
}
}
/// Implementation of `Parser::or`
#[cfg_attr(nightly, warn(rustdoc::missing_doc_code_examples))]
pub struct Or<F, G> {
f: F,
g: G,
}
impl<
I: Clone,
O,
E: ParseError<I>,
F: Parser<I, Output = O, Error = E>,
G: Parser<I, Output = O, Error = E>,
> Parser<I> for Or<F, G>
{
type Output = <F as Parser<I>>::Output;
type Error = <F as Parser<I>>::Error;
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
match self.f.process::<OM>(i.clone()) {
Err(Err::Error(e1)) => match self.g.process::<OM>(i) {
Err(Err::Error(e2)) => Err(Err::Error(OM::Error::combine(e1, e2, |e1, e2| e1.or(e2)))),
res => res,
},
res => res,
}
}
}
/// Implementation of `Parser::into`
#[cfg_attr(nightly, warn(rustdoc::missing_doc_code_examples))]
pub struct Into<F, O2, E2> {
f: F,
phantom_out2: core::marker::PhantomData<O2>,
phantom_err2: core::marker::PhantomData<E2>,
}
impl<
I,
O2: From<<F as Parser<I>>::Output>,
E2: crate::error::ParseError<I> + From<<F as Parser<I>>::Error>,
F: Parser<I>,
> Parser<I> for Into<F, O2, E2>
{
type Output = O2;
type Error = E2;
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
match self.f.process::<OM>(i) {
Ok((i, o)) => Ok((i, OM::Output::map(o, |o| o.into()))),
Err(Err::Error(e)) => Err(Err::Error(OM::Error::map(e, |e| e.into()))),
Err(Err::Failure(e)) => Err(Err::Failure(e.into())),
Err(Err::Incomplete(e)) => Err(Err::Incomplete(e)),
}
}
}
/// Alternate between two Parser implementations with the same result type.
pub(crate) enum Either<F, G> {
Left(F),
Right(G),
}
impl<
I,
F: Parser<I>,
G: Parser<I, Output = <F as Parser<I>>::Output, Error = <F as Parser<I>>::Error>,
> Parser<I> for Either<F, G>
{
type Output = <F as Parser<I>>::Output;
type Error = <F as Parser<I>>::Error;
#[inline]
fn process<OM: OutputMode>(&mut self, i: I) -> PResult<OM, I, Self::Output, Self::Error> {
match self {
Either::Left(f) => f.process::<OM>(i),
Either::Right(g) => g.process::<OM>(i),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::error::ErrorKind;
use crate::bytes::streaming::{tag, take};
use crate::number::streaming::be_u16;
use crate::sequence::terminated;
#[doc(hidden)]
#[macro_export]
macro_rules! assert_size (
($t:ty, $sz:expr) => (
assert_eq!($crate::lib::std::mem::size_of::<$t>(), $sz);
);
);
#[test]
#[cfg(target_pointer_width = "64")]
fn size_test() {
assert_size!(IResult<&[u8], &[u8], (&[u8], u32)>, 40);
//FIXME: since rust 1.65, this is now 32 bytes, likely thanks to https://github.com/rust-lang/rust/pull/94075
// deactivating that test for now because it'll have different values depending on the rust version
// assert_size!(IResult<&str, &str, u32>, 40);
assert_size!(Needed, 8);
assert_size!(Err<u32>, 16);
assert_size!(ErrorKind, 1);
}
#[test]
fn err_map_test() {
let e = Err::Error(1);
assert_eq!(e.map(|v| v + 1), Err::Error(2));
}
#[test]
fn native_tuple_test() {
fn tuple_3(i: &[u8]) -> IResult<&[u8], (u16, &[u8])> {
terminated((be_u16, take(3u8)), tag("fg")).parse(i)
}
assert_eq!(
tuple_3(&b"abcdefgh"[..]),
Ok((&b"h"[..], (0x6162u16, &b"cde"[..])))
);
assert_eq!(tuple_3(&b"abcd"[..]), Err(Err::Incomplete(Needed::new(1))));
assert_eq!(tuple_3(&b"abcde"[..]), Err(Err::Incomplete(Needed::new(2))));
assert_eq!(
tuple_3(&b"abcdejk"[..]),
Err(Err::Error(error_position!(&b"jk"[..], ErrorKind::Tag)))
);
}
}
| true |
1a85732712bccea7b17232abf7cbfb100a0a38f0
|
Rust
|
jeizsm/actix-telegram
|
/src/raw/types/shipping_query.rs
|
UTF-8
| 473 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
use crate::types::*;
/// This object contains information about an incoming shipping query.
#[derive(Debug, Deserialize, Clone, Getters)]
#[get(vis = "pub")]
pub struct ShippingQuery {
/// Unique query identifier
pub(crate) id: String,
/// User who sent the query
pub(crate) from: User,
/// Bot specified invoice payload
pub(crate) invoice_payload: String,
/// User specified shipping address
pub(crate) shipping_address: ShippingAddress,
}
| true |
0edd297aae4e6f0c16a87ee083bd7807b9f66ebe
|
Rust
|
blogscot/a-simple-interpreter
|
/src/ast/evaluator.rs
|
UTF-8
| 2,988 | 3.21875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use ast::node::*;
use ast::visitor::NodeVisitor;
use lexer::token::Token::*;
use std::str::FromStr;
use utils::number::{Number, Number::Nil, NumberResult};
pub struct Evaluator {
global_scope: HashMap<String, String>,
}
impl Evaluator {
pub fn new() -> Self {
Evaluator {
global_scope: HashMap::new(),
}
}
}
impl NodeVisitor for Evaluator {
fn visit_program(&mut self, node: &ProgramNode) -> NumberResult {
self.visit(&node.block)
}
fn visit_procedure(&mut self, _node: &ProcedureNode) -> NumberResult {
Ok(Nil)
}
fn visit_parameter(&mut self, _node: &ParameterNode) -> NumberResult {
Ok(Nil)
}
fn visit_block(&mut self, node: &BlockNode) -> NumberResult {
for declaration in &node.declarations {
self.visit(&declaration)?;
}
self.visit(&node.compound_statement)
}
fn visit_declaration(&mut self, _node: &DeclarationNode) -> NumberResult {
Ok(Nil)
}
fn visit_type(&mut self, _node: &TypeNode) -> NumberResult {
Ok(Nil)
}
fn visit_integer(&mut self, node: &IntegerNumNode) -> NumberResult {
Ok(Number::from(node.value))
}
fn visit_real(&mut self, node: &RealNumNode) -> NumberResult {
Ok(Number::from(node.value))
}
fn visit_binop(&mut self, node: &BinOpNode) -> NumberResult {
let BinOpNode {
left,
right,
operator,
} = node;
let lhs = self.visit(left);
let rhs = self.visit(right);
match operator {
Plus => Ok(lhs? + rhs?),
Multiply => Ok(lhs? * rhs?),
Minus => Ok(lhs? - rhs?),
IntegerDivision => Ok(lhs? / rhs?),
RealDivision => Ok(lhs? / rhs?),
_ => Err(format!("Unknown operator found: {}", operator)),
}
}
fn visit_unaryop(&mut self, node: &UnaryOpNode) -> NumberResult {
let UnaryOpNode { operator, expr } = node;
match operator {
Plus => self.visit(expr),
Minus => Ok(-self.visit(expr)?),
_ => Err(format!("Unexpected Unary Operator found: {}", operator)),
}
}
fn visit_compound(&mut self, node: &CompoundNode) -> NumberResult {
for child in &node.children {
self.visit(child)?;
}
Ok(Nil)
}
fn visit_assign(&mut self, node: &AssignNode) -> NumberResult {
if node.identifier.is::<VarNode>() {
let var_node: &VarNode = node.identifier.downcast_ref().unwrap();
if let Id(name) = &var_node.identifier {
let value = self.visit(&node.expr);
self
.global_scope
.insert(name.to_string(), value?.to_string());
}
}
Ok(Nil)
}
fn visit_var(&mut self, node: &VarNode) -> NumberResult {
if let VarNode {
identifier: Id(name),
} = node
{
match self.global_scope.get(name.as_str()) {
Some(value) => Ok(Number::from_str(value).unwrap()),
None => Err(format!("Possible use of uninitialised variable: {}.", name)),
}
} else {
Err(format!("Invalid identifier found {}", node.identifier))
}
}
}
| true |
7916cd16070d9926dc1170dcda9d644ce51bd75e
|
Rust
|
Nejat/verbosity-rs
|
/src/lib.rs
|
UTF-8
| 1,739 | 2.828125 | 3 |
[] |
no_license
|
#![deny(clippy::all)]
#![deny(clippy::pedantic)]
#![deny(clippy::nursery)]
#![deny(clippy::cargo)]
#![deny(missing_docs)]
// ==============================================================
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::items_after_statements)]
// ==============================================================
#![doc(html_root_url = "https://docs.rs/verbosity/0.1.0")]
//! Intended for use with `cli` commands this library lets you set a singleton [`Verbosity`]
//! option to indicate different levels of reporting, i.e. `Quite` | `Terse` | `Verbose`
//!
//! ## Example
//!
//! ```no_run
//! # use std::str::FromStr;
//! # use verbosity::Verbosity;
//! # use verbosity::Verbosity::*;
//! let level = Verbosity::from_str(
//! &std::env::args().last().unwrap_or(String::new())
//! ).unwrap_or(Verbosity::Quite);
//!
//! level.set_as_global();
//!
//! match Verbosity::level() {
//! Quite => {}
//! Terse =>
//! println!("terse message"),
//! Verbose =>
//! println!("overly verbose message for some command")
//! }
//! ```
//!
//! ## Related Crate
//!
//! The [`cli-toolbox`] crate uses this library to provide a more ergonomic way of
//! controlling reporting output
//!
//! _i.e._
//! ```no_compile
//! let level = Verbosity::from_str(
//! &std::env::args().last().unwrap_or(String::new())
//! ).unwrap_or(Verbosity::Quite);
//!
//! level.set_as_global();
//!
//! report! {
//! @terse "terse message"
//! @verbose "overly verbose message for some command"
//! }
//! ```
//! [`Verbosity`]: verbosity::Verbosity
//! [`cli-toolbox`]: <https://crates.io/crates/cli-toolbox>
pub use crate::verbosity::Verbosity;
#[cfg(test)]
mod tests;
mod verbosity;
| true |
d508de0542fa7da3f37aacc52719ceb4fc7bbb68
|
Rust
|
ratijas/windows-rust-counters
|
/win-high/src/perf/types.rs
|
UTF-8
| 16,216 | 2.984375 | 3 |
[] |
no_license
|
//! From the sources of `<WinPerf.h>`:
//!
//! ```txt
//! PERF_COUNTER_DEFINITION.CounterType field values
//!
//!
//! Counter ID Field Definition:
//!
//! 3 2 2 2 2 1 1 1
//! 1 8 4 2 0 6 2 0 8 0
//! +--------+--------+----+----+--------+--------+----+----+----------------+
//! |Display |Calculation |Time|Counter | |Ctr |Size| |
//! |Flags |Modifiers |Base|SubType |Reserved|Type|Fld | Reserved |
//! +--------+--------+----+----+--------+--------+----+----+----------------+
//! ```
use std::fmt::{self, Debug};
use std::mem::transmute;
use win_low::winperf::*;
use crate::prelude::v1::*;
/// A safe, high-level wrapper for `PERF_COUNTER_DEFINITION.CounterType` value.
#[derive(Copy, Clone)]
pub struct CounterTypeDefinition(DWORD);
/// Container for bit-masks of `CounterTypeDefinition` components.
#[repr(u32)]
pub enum CounterTypeMask {
Reserved /* */ = 0b_00000000_00000000_11110000_11111111,
Size /* */ = 0b_00000000_00000000_00000011_00000000,
CounterType /* */ = 0b_00000000_00000000_00001100_00000000,
CounterSubType /**/ = 0b_00000000_00001111_00000000_00000000,
TimeBase /* */ = 0b_00000000_00110000_00000000_00000000,
CalcModifier /* */ = 0b_00001111_11000000_00000000_00000000,
DisplayFlags /* */ = 0b_11110000_00000000_00000000_00000000,
}
/// select one of the following to indicate the counter's data size
#[repr(u32)]
#[derive(Copy, Clone, Debug)]
pub enum Size {
/// 32 bit field
Dword = PERF_SIZE_DWORD,
/// 64 bit field
Large = PERF_SIZE_LARGE,
/// for Zero Length fields
Zero = PERF_SIZE_ZERO,
/// length is in CounterLength field
/// of Counter Definition struct
Var = PERF_SIZE_VARIABLE_LEN,
}
/// select one of the following values to indicate the counter field usage
#[repr(u32)]
#[derive(Copy, Clone, Debug)]
pub enum RawType {
/// a number (not a counter)
Number = PERF_TYPE_NUMBER,
/// an increasing numeric value
Counter = PERF_TYPE_COUNTER,
/// a text field
Text = PERF_TYPE_TEXT,
/// displays a zero
Zero = PERF_TYPE_ZERO,
}
/// Safe wrapper for type & subtype.
#[derive(Copy, Clone, Debug)]
pub enum CounterType {
Number(Number),
Counter(Counter),
Text(Text),
Zero,
}
/// If the PERF_TYPE_NUMBER field was selected, then select one of the
/// following to describe the Number
#[repr(u32)]
#[derive(Copy, Clone, Debug)]
pub enum Number {
/// display as HEX value
Hex = PERF_NUMBER_HEX,
/// display as a decimal integer
Decimal = PERF_NUMBER_DECIMAL,
/// display as a decimal/1000
Dec1000 = PERF_NUMBER_DEC_1000,
}
/// If the PERF_TYPE_COUNTER value was selected then select one of the
/// following to indicate the type of counter
#[repr(u32)]
#[derive(Copy, Clone, Debug)]
pub enum Counter {
/// display counter value
Value = PERF_COUNTER_VALUE,
/// divide ctr / delta time
Rate = PERF_COUNTER_RATE,
/// divide ctr / base
Fraction = PERF_COUNTER_FRACTION,
/// base value used in fractions
Base = PERF_COUNTER_BASE,
/// subtract counter from current time
Elapsed = PERF_COUNTER_ELAPSED,
/// Use Queuelen processing func.
Queuelen = PERF_COUNTER_QUEUELEN,
/// Counter begins or ends a histogram
Histogram = PERF_COUNTER_HISTOGRAM,
/// divide ctr / private clock
Precision = PERF_COUNTER_PRECISION,
}
/// If the PERF_TYPE_TEXT value was selected, then select one of the
/// following to indicate the type of TEXT data.
#[repr(u32)]
#[derive(Copy, Clone, Debug)]
pub enum Text {
/// type of text in text field
Unicode = PERF_TEXT_UNICODE,
/// ASCII using the CodePage field
Ascii = PERF_TEXT_ASCII,
}
/// Timer SubTypes
#[repr(u32)]
#[derive(Copy, Clone, Debug)]
pub enum Timer {
/// use system perf. freq for base
TimerTick = PERF_TIMER_TICK,
/// use 100 NS timer time base units
Timer100NS = PERF_TIMER_100NS,
/// use the object timer freq
ObjectTimer = PERF_OBJECT_TIMER,
}
// Any types that have calculations performed can use one or more of
// the following calculation modification flags listed here
bitflags! {
pub struct CalculationModifiers: u32 {
/// compute difference first
const DELTA = PERF_DELTA_COUNTER;
/// compute base diff as well
const DELTA_BASE = PERF_DELTA_BASE;
/// show as 1.00-value (assumes:
const INVERSE = PERF_INVERSE_COUNTER;
/// sum of multiple instances
const MULTI = PERF_MULTI_COUNTER;
}
}
/// Select one of the following values to indicate the display suffix (if any)
#[repr(u32)]
#[derive(Copy, Clone, Debug)]
pub enum DisplayFlags {
/// no suffix
NoSuffix = PERF_DISPLAY_NO_SUFFIX,
/// "/sec"
PerSec = PERF_DISPLAY_PER_SEC,
/// "%"
Percent = PERF_DISPLAY_PERCENT,
/// "secs"
Seconds = PERF_DISPLAY_SECONDS,
/// value is not displayed
NoShow = PERF_DISPLAY_NOSHOW,
}
/// The following are used to determine the level of detail associated
/// with the counter. The user will be setting the level of detail
/// that should be displayed at any given time.
#[repr(u32)]
#[derive(Copy, Clone, Debug)]
pub enum DetailLevel {
/// The uninformed can understand it
Novice = PERF_DETAIL_NOVICE,
/// For the advanced user
Advanced = PERF_DETAIL_ADVANCED,
/// For the expert user
Expert = PERF_DETAIL_EXPERT,
/// For the system designer
Wizard = PERF_DETAIL_WIZARD,
}
impl Default for DetailLevel {
fn default() -> Self {
DetailLevel::Novice
}
}
impl CounterTypeDefinition {
pub fn new(
size: Size,
counter_type: CounterType,
timer: Timer,
calculation_modifiers: CalculationModifiers,
display_flags: DisplayFlags,
) -> Self {
let inner = size.into_raw()
| RawType::from(counter_type).into_raw()
| counter_type.sub_type()
| timer.into_raw()
| calculation_modifiers.into_raw()
| display_flags.into_raw();
CounterTypeDefinition(inner)
}
pub fn from_raw(value: DWORD) -> Option<Self> {
Some(Self::new(
Size::from_raw(value),
CounterType::from_raw(value)?,
Timer::from_raw(value)?,
CalculationModifiers::from_raw(value)?,
DisplayFlags::from_raw(value)?,
))
}
pub unsafe fn from_raw_unchecked(value: DWORD) -> Self {
Self::new(
Size::from_raw(value),
CounterType::from_raw_unchecked(value),
Timer::from_raw_unchecked(value),
CalculationModifiers::from_raw_truncate(value),
DisplayFlags::from_raw_unchecked(value),
)
}
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self.0
}
#[inline(always)]
pub fn size(&self) -> Size {
Size::from_raw(self.into_raw())
}
#[inline(always)]
pub fn raw_type(&self) -> RawType {
RawType::from_raw(self.into_raw())
}
#[inline(always)]
pub fn sub_type(&self) -> DWORD {
imp::sub_type(self.into_raw())
}
#[inline(always)]
pub fn counter_type(&self) -> CounterType {
CounterType::from_raw(self.into_raw()).expect("Invalid counter type")
}
#[inline(always)]
pub fn time_base(&self) -> Timer {
Timer::from_raw(self.into_raw()).expect("Invalid time base")
}
#[inline(always)]
pub fn calculation_modifiers(&self) -> CalculationModifiers {
CalculationModifiers::from_raw(self.into_raw()).expect("Invalid calculation modifiers")
}
#[inline(always)]
pub fn display_flags(self) -> DisplayFlags {
DisplayFlags::from_raw(self.into_raw()).expect("Invalid display flags")
}
}
// from_raw/from_raw_unchecked/into_raw implementations for CounterTypeDefinition components
mod imp {
use std::convert::TryFrom;
use crate::perf::nom::PerfCounterDefinition;
use super::*;
impl CounterTypeMask {
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self as _
}
}
impl Size {
pub fn from_raw(value: DWORD) -> Self {
let value = value & CounterTypeMask::Size.into_raw();
// SAFETY: enum variants cover all possible values
unsafe { transmute(value) }
}
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self as _
}
pub fn size_of(self) -> Option<usize> {
use std::mem::size_of;
match self {
Size::Dword => Some(size_of::<DWORD>()),
Size::Large => Some(size_of::<DWORD>() * 2),
Size::Zero => Some(0),
Size::Var => None,
}
}
}
impl RawType {
pub fn from_raw(value: DWORD) -> Self {
let value = value & CounterTypeMask::CounterType.into_raw();
// SAFETY: enum variants cover all possible values
unsafe { transmute(value) }
}
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self as _
}
}
impl From<CounterType> for RawType {
/// Convert between `CounterType` and `RawType` counterparts.
fn from(value: CounterType) -> Self {
match value {
CounterType::Counter(..) => RawType::Counter,
CounterType::Number(..) => RawType::Number,
CounterType::Text(..) => RawType::Text,
CounterType::Zero => RawType::Zero,
}
}
}
impl CounterType {
pub fn from_raw(value: DWORD) -> Option<Self> {
Some(match RawType::from_raw(value) {
RawType::Number => CounterType::Number(Number::from_raw(value)?),
RawType::Counter => CounterType::Counter(Counter::from_raw(value)?),
RawType::Text => CounterType::Text(Text::from_raw(value)?),
RawType::Zero => CounterType::Zero,
})
}
pub unsafe fn from_raw_unchecked(value: DWORD) -> Self {
match RawType::from_raw(value) {
RawType::Number => CounterType::Number(Number::from_raw_unchecked(value)),
RawType::Counter => CounterType::Counter(Counter::from_raw_unchecked(value)),
RawType::Text => CounterType::Text(Text::from_raw_unchecked(value)),
RawType::Zero => CounterType::Zero,
}
}
pub fn sub_type(&self) -> DWORD {
match *self {
CounterType::Counter(it) => it.into_raw(),
CounterType::Number(it) => it.into_raw(),
CounterType::Text(it) => it.into_raw(),
// Note: in Microsoft docs and sources nothing is said about subtype values of
// Zero type. By observing certain patterns, it is safe to assume zero subtype.
CounterType::Zero => 0,
}
}
}
#[inline(always)]
pub const fn sub_type(value: DWORD) -> DWORD {
value & CounterTypeMask::CounterSubType.into_raw()
}
impl Number {
pub fn from_raw(value: DWORD) -> Option<Self> {
let value = sub_type(value);
Some(match value {
PERF_NUMBER_HEX => Self::Hex,
PERF_NUMBER_DECIMAL => Self::Decimal,
PERF_NUMBER_DEC_1000 => Self::Dec1000,
_ => return None,
})
}
pub unsafe fn from_raw_unchecked(value: DWORD) -> Self {
// SAFETY: unsafe
transmute(sub_type(value))
}
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self as _
}
}
impl Counter {
pub fn from_raw(value: DWORD) -> Option<Self> {
let value = sub_type(value);
Some(match value {
PERF_COUNTER_VALUE => Self::Value,
PERF_COUNTER_RATE => Self::Rate,
PERF_COUNTER_FRACTION => Self::Fraction,
PERF_COUNTER_BASE => Self::Base,
PERF_COUNTER_ELAPSED => Self::Elapsed,
PERF_COUNTER_QUEUELEN => Self::Queuelen,
PERF_COUNTER_HISTOGRAM => Self::Histogram,
PERF_COUNTER_PRECISION => Self::Precision,
_ => return None,
})
}
pub unsafe fn from_raw_unchecked(value: DWORD) -> Self {
// SAFETY: unsafe
transmute(sub_type(value))
}
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self as _
}
}
impl Text {
pub fn from_raw(value: DWORD) -> Option<Self> {
let value = sub_type(value);
Some(match value {
PERF_TEXT_UNICODE => Self::Unicode,
PERF_TEXT_ASCII => Self::Ascii,
_ => return None,
})
}
pub unsafe fn from_raw_unchecked(value: DWORD) -> Self {
// SAFETY: unsafe
transmute(sub_type(value))
}
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self as _
}
}
impl Timer {
pub fn from_raw(value: DWORD) -> Option<Self> {
let value = value & CounterTypeMask::TimeBase.into_raw();
Some(match value {
PERF_TIMER_TICK => Self::TimerTick,
PERF_TIMER_100NS => Self::Timer100NS,
PERF_OBJECT_TIMER => Self::ObjectTimer,
_ => return None,
})
}
pub unsafe fn from_raw_unchecked(value: DWORD) -> Self {
let value = sub_type(value);
// SAFETY: unsafe
transmute(value)
}
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self as _
}
}
impl CalculationModifiers {
pub fn from_raw(value: DWORD) -> Option<Self> {
let value = value & CounterTypeMask::CalcModifier.into_raw();
CalculationModifiers::from_bits(value)
}
pub unsafe fn from_raw_truncate(value: DWORD) -> Self {
let value = value & CounterTypeMask::CalcModifier.into_raw();
CalculationModifiers::from_bits_truncate(value)
}
pub fn into_raw(self) -> DWORD {
self.bits
}
}
impl DisplayFlags {
pub fn from_raw(value: DWORD) -> Option<Self> {
let value = value & CounterTypeMask::DisplayFlags.into_raw();
Some(match value {
PERF_DISPLAY_NO_SUFFIX => DisplayFlags::NoSuffix,
PERF_DISPLAY_PER_SEC => DisplayFlags::PerSec,
PERF_DISPLAY_PERCENT => DisplayFlags::Percent,
PERF_DISPLAY_SECONDS => DisplayFlags::Seconds,
PERF_DISPLAY_NOSHOW => DisplayFlags::NoShow,
_ => return None,
})
}
pub unsafe fn from_raw_unchecked(value: DWORD) -> Self {
let value = value & CounterTypeMask::DisplayFlags.into_raw();
transmute(value)
}
#[inline(always)]
pub const fn into_raw(self) -> DWORD {
self as _
}
}
impl<'a> TryFrom<&PerfCounterDefinition<'a>> for CounterTypeDefinition {
type Error = ();
fn try_from(counter: &PerfCounterDefinition<'a>) -> Result<Self, Self::Error> {
Self::from_raw(counter.raw.CounterType).ok_or(())
}
}
impl Debug for CounterTypeDefinition {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "CounterTypeDefinition {{ ")?;
write!(f, "Size = {:?}, ", self.size())?;
write!(f, "Type = {:?}, ", self.counter_type())?;
write!(f, "Timer = {:?}, ", self.time_base())?;
write!(f, "Modifiers = {:?}, ", self.calculation_modifiers())?;
write!(f, "Display = {:?}", self.display_flags())?;
write!(f, " }}")?;
Ok(())
}
}
}
| true |
35c65719419dd5edf52e6158d905520c6edf4fa9
|
Rust
|
them0ntem/exercism-rust
|
/bob/src/lib.rs
|
UTF-8
| 546 | 3.484375 | 3 |
[] |
no_license
|
pub fn reply(message: &str) -> &str {
let message = message.trim();
if message.is_empty() {
return "Fine. Be that way!";
}
let yelling: bool = message.chars().any(|x| x.is_ascii_uppercase())
&& !message.chars().any(|x| x.is_ascii_lowercase());
let question: bool = message.ends_with("?");
match (yelling, question) {
(true, true) => { "Calm down, I know what I'm doing!" }
(false, true) => { "Sure." }
(true, false) => { "Whoa, chill out!" }
_ => { "Whatever." }
}
}
| true |
370ef7e56329f1ae3bf24fbcc2229dac6947d830
|
Rust
|
anborg/rust_api
|
/src/routes/users.rs
|
UTF-8
| 3,749 | 2.796875 | 3 |
[] |
no_license
|
use crate::errors::ServiceError;
use crate::models::user::SlimUser;
use crate::{db::Pool, utils::is_admin};
use actix_identity::Identity;
use actix_web::{error::BlockingError, web, HttpResponse};
use diesel::{EqAll, QueryDsl, RunQueryDsl};
use serde::Deserialize;
#[derive(Deserialize)]
pub struct UserData {
name: String,
email: String,
password: String,
}
pub async fn post_user(
user_data: web::Json<UserData>,
pool: web::Data<Pool>,
) -> Result<HttpResponse, ServiceError> {
let res = web::block(move || insert_user(user_data.into_inner(), pool)).await;
match res {
//login after user creation
Ok(user) => Ok(HttpResponse::Created().body(serde_json::json!({ "email": user.email }))),
Err(e) => match e {
BlockingError::Error(service_error) => Err(service_error),
BlockingError::Canceled => Err(ServiceError::InternalServerError),
},
}
}
pub async fn get_users(id: Identity, pool: web::Data<Pool>) -> Result<HttpResponse, ServiceError> {
let _ = crate::utils::is_admin(&id)?;
let res = web::block(move || get_all_users(pool)).await;
match res {
Ok(user) => Ok(HttpResponse::Ok().json(&user)),
Err(e) => match e {
BlockingError::Error(service_error) => Err(service_error),
BlockingError::Canceled => Err(ServiceError::InternalServerError),
},
}
}
pub async fn change_account_type(
id: Identity,
user_id: web::Path<String>,
pool: web::Data<Pool>,
) -> Result<HttpResponse, ServiceError> {
let _ = is_admin(&id)?;
let user_id = match user_id.into_inner().parse::<i64>() {
Ok(v) => v,
Err(_) => return Err(ServiceError::BadRequest("invalid user id".to_owned())),
};
let res = web::block(move || change_account(user_id, pool)).await;
match res {
Ok(s) => Ok(HttpResponse::Ok().json(serde_json::json!({ "msg": s }))),
Err(e) => match e {
BlockingError::Error(service_err) => Err(service_err),
BlockingError::Canceled => Err(ServiceError::InternalServerError),
},
}
}
fn change_account(user_id: i64, pool: web::Data<Pool>) -> Result<String, ServiceError> {
use crate::schema::users::dsl::{clearance, users};
let conn = &pool.get().unwrap();
let mut return_string = String::new();
let target = users.find(user_id);
let current_clearance = target.select(clearance).get_result::<bool>(conn)?;
if current_clearance {
return_string.push_str("change account type from admin to normal user");
let _ = diesel::update(target)
.set(clearance.eq_all(false))
.execute(conn)?;
} else {
return_string.push_str("change account type from normal user to admin");
let _ = diesel::update(target)
.set(clearance.eq_all(true))
.execute(conn)?;
}
Ok(return_string)
}
fn get_all_users(pool: web::Data<Pool>) -> Result<Vec<SlimUser>, ServiceError> {
use crate::models::user::User;
use crate::schema::users::dsl::users;
let conn = &pool.get().unwrap();
let all_users = users.load::<User>(conn)?;
Ok(all_users.into_iter().map(|u| u.into()).collect())
}
fn insert_user(user_data: UserData, pool: web::Data<Pool>) -> Result<SlimUser, ServiceError> {
use crate::models::user::{User, UserInsert};
use crate::schema::users::dsl::users;
let password = crate::utils::hash_password(&user_data.password)?;
let new_user = UserInsert::from_details(user_data.name, user_data.email, password);
let conn = &pool.get().unwrap();
let inserted_user = diesel::insert_into(users)
.values(&new_user)
.get_result::<User>(conn)?;
Ok(inserted_user.into())
}
| true |
eb39578b419121133203a9218e7c388b037db098
|
Rust
|
flyq/datastruct-algorithm
|
/project_euler/src/p009/src/main.rs
|
UTF-8
| 998 | 2.953125 | 3 |
[
"MIT"
] |
permissive
|
fn main() {
let mut a = 0;
let mut b = 0;
for i in 1..1000 {
for j in i+1..1000 {
if 2000*i + 2000*j - 2*i*j == 1000000 {
println!("{},{}", i, j);
a = i;
b = j;
}
}
}
println!("{}^2 + {}^2 = {}^2 = {}", a, b, ((a*a + b*b) as f64).sqrt(), (a*a + b*b));
println!("{} + {} + {} = {}", a, b, ((a*a + b*b) as f64).sqrt(), a+b+((a*a + b*b) as f64).sqrt() as i32);
println!("a*b*c: {}", a*b*((a*a + b*b) as f64).sqrt() as i32);
}
/*
console:
cargo run
Compiling p009 v0.1.0 (/home/flyq/workspaces/flyq/projecteuler/src/p009)
Finished dev [unoptimized + debuginfo] target(s) in 0.18s
Running `/home/flyq/workspaces/flyq/projecteuler/src/p009/target/debug/p009`
200,375
200^2 + 375^2 = 425^2 = 180625
200 + 375 + 425 = 1000
a*b*c: 31875000
website:
Congratulations, the answer you gave to problem 9 is correct.
You are the 332094th person to have solved this problem.
*/
| true |
3808448ff8f4e2c12393b199531689ba5443a057
|
Rust
|
ch3rub1/adventofcode2018
|
/day3/src/rectangle.rs
|
UTF-8
| 293 | 3.234375 | 3 |
[] |
no_license
|
use point::Point;
#[derive(PartialEq,Debug)]
pub struct Rectangle {
pub start: Point,
pub width: u32,
pub height: u32
}
impl Rectangle {
pub fn new(start: Point, width: u32, height: u32) -> Rectangle {
Rectangle { start: start, width: width, height: height }
}
}
| true |
3326a6ba6ac0c0f0b5fe7c26970eb4966a2a0da2
|
Rust
|
JasonYuan869/AutoCampusCheckBot
|
/src/main.rs
|
UTF-8
| 2,992 | 2.53125 | 3 |
[] |
no_license
|
use std::env;
use regex::Regex;
use reqwest::{Method, header};
use lazy_static::lazy_static;
use reqwest::header::{HeaderMap, HeaderValue};
use serenity::{
async_trait,
client::bridge::gateway::GatewayIntents,
model::{channel::Message, gateway::Ready},
prelude::*,
};
// Just use a really generic User-Agent header
const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36";
// Hardcode the data in the form
const FORM_DATA: &str = "q1=No&q2=No&q3=No&q4=No&q5=No&q6=No&q7=No&q8=No&what=Submit";
lazy_static! {
static ref HTTP_CLIENT: reqwest::Client = reqwest::Client::builder()
.user_agent(USER_AGENT)
.build()
.expect("Error creating HTTP client");
}
async fn send_form(url: &str) -> Result<reqwest::Response, reqwest::Error> {
let request = HTTP_CLIENT.request(Method::POST, url)
.body(FORM_DATA)
.headers({
let mut map = HeaderMap::new();
map.insert(
header::CONTENT_TYPE,
HeaderValue::from_static("application/x-www-form-urlencoded")
);
map
})
.build()?;
HTTP_CLIENT.execute(request).await
}
struct Handler;
#[async_trait]
impl EventHandler for Handler {
async fn message(&self, ctx: Context, msg: Message) {
if msg.author.bot { return }
lazy_static! {
static ref KEY_REGEX: Regex = Regex::new(r"checkin\.uwaterloo\.ca/campuscheckin/screen.php?").unwrap();
static ref CONFIRM_REGEX: Regex = Regex::new(r"Thank you for").unwrap();
}
let url = &msg.content;
println!("Received message from {}", msg.author.name);
let mut reply = "Error completing form";
if let Some(_) = KEY_REGEX.find(url) {
if let Ok(resp) = send_form(url).await {
if let Some(_) = CONFIRM_REGEX.find(&resp.text().await.unwrap()) {
reply = "Successfully completed form! Check your email.";
println!("Successfully completed form for {}", msg.author.name);
}
}
} else {
reply = "Invalid URL."
}
msg.reply(ctx, reply).await.expect("Error sending message");
}
async fn ready(&self, _ctx: Context, data_about_bot: Ready) {
println!("{} is connected with id {}", data_about_bot.user.name, data_about_bot.user.id);
}
}
#[tokio::main]
async fn main() {
// Configure the client with your Discord bot token in the environment.
let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment");
// Build our client.
let mut client = Client::builder(token)
.event_handler(Handler)
.intents(GatewayIntents::DIRECT_MESSAGES)
.await
.expect("Error creating Discord client");
if let Err(why) = client.start().await {
println!("Client error: {:?}", why);
}
}
| true |
b01c3473bb9aac856f09bc06a2d3ab89024f49a0
|
Rust
|
scottdj92/rust-book
|
/functions/src/main.rs
|
UTF-8
| 522 | 4.28125 | 4 |
[
"MIT"
] |
permissive
|
fn main() {
println!("Hello, world!");
another_function(5);
flow_control(3);
flow_control(7);
forLoop();
}
fn another_function(x: u32) {
println!("Another function");
println!("the value of x is {}", x);
}
fn flow_control(num: u32) {
if num < 5 {
println!("condition was true");
} else {
println!("condition was false");
}
}
fn forLoop() {
let a = [10, 20, 30, 40, 50];
for element in a.iter() {
println!("The value is: {}", element);
}
}
| true |
be9c4dd619739a52d52b9356f9541ce0312eeb1c
|
Rust
|
chromium/chromium
|
/third_party/rust/unicode_linebreak/v0_1/crate/src/shared.rs
|
UTF-8
| 4,451 | 2.8125 | 3 |
[
"Apache-2.0",
"BSD-3-Clause",
"GPL-1.0-or-later",
"MIT",
"LGPL-2.0-or-later"
] |
permissive
|
/// Unicode line breaking class.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[repr(u8)]
pub enum BreakClass {
// Non-tailorable
/// Cause a line break (after)
Mandatory,
/// Cause a line break (after), except between CR and LF
CarriageReturn,
/// Cause a line break (after)
LineFeed,
/// Prohibit a line break between the character and the preceding character
CombiningMark,
/// Cause a line break (after)
NextLine,
/// Do not occur in well-formed text
Surrogate,
/// Prohibit line breaks before and after
WordJoiner,
/// Provide a break opportunity
ZeroWidthSpace,
/// Prohibit line breaks before and after
NonBreakingGlue,
/// Enable indirect line breaks
Space,
/// Prohibit line breaks within joiner sequences
ZeroWidthJoiner,
// Break opportunities
/// Provide a line break opportunity before and after the character
BeforeAndAfter,
/// Generally provide a line break opportunity after the character
After,
/// Generally provide a line break opportunity before the character
Before,
/// Provide a line break opportunity after the character, except in numeric context
Hyphen,
/// Provide a line break opportunity contingent on additional information
Contingent,
// Characters prohibiting certain breaks
/// Prohibit line breaks before
ClosePunctuation,
/// Prohibit line breaks before
CloseParenthesis,
/// Prohibit line breaks before
Exclamation,
/// Allow only indirect line breaks between pairs
Inseparable,
/// Allow only indirect line breaks before
NonStarter,
/// Prohibit line breaks after
OpenPunctuation,
/// Act like they are both opening and closing
Quotation,
// Numeric context
/// Prevent breaks after any and before numeric
InfixSeparator,
/// Form numeric expressions for line breaking purposes
Numeric,
/// Do not break following a numeric expression
Postfix,
/// Do not break in front of a numeric expression
Prefix,
/// Prevent a break before, and allow a break after
Symbol,
// Other characters
/// Act like AL when the resolved EAW is N; otherwise, act as ID
Ambiguous,
/// Are alphabetic characters or symbols that are used with alphabetic characters
Alphabetic,
/// Treat as NS or ID for strict or normal breaking.
ConditionalJapaneseStarter,
/// Do not break from following Emoji Modifier
EmojiBase,
/// Do not break from preceding Emoji Base
EmojiModifier,
/// Form Korean syllable blocks
HangulLvSyllable,
/// Form Korean syllable blocks
HangulLvtSyllable,
/// Do not break around a following hyphen; otherwise act as Alphabetic
HebrewLetter,
/// Break before or after, except in some numeric context
Ideographic,
/// Form Korean syllable blocks
HangulLJamo,
/// Form Korean syllable blocks
HangulVJamo,
/// Form Korean syllable blocks
HangulTJamo,
/// Keep pairs together. For pairs, break before and after other classes
RegionalIndicator,
/// Provide a line break opportunity contingent on additional, language-specific context analysis
ComplexContext,
/// Have as yet unknown line breaking behavior or unassigned code positions
Unknown,
}
use BreakClass::{
After as BA, Alphabetic as AL, Ambiguous as AI, Before as BB, BeforeAndAfter as B2,
CarriageReturn as CR, CloseParenthesis as CP, ClosePunctuation as CL, CombiningMark as CM,
ComplexContext as SA, ConditionalJapaneseStarter as CJ, Contingent as CB, EmojiBase as EB,
EmojiModifier as EM, Exclamation as EX, HangulLJamo as JL, HangulLvSyllable as H2,
HangulLvtSyllable as H3, HangulTJamo as JT, HangulVJamo as JV, HebrewLetter as HL,
Hyphen as HY, Ideographic as ID, InfixSeparator as IS, Inseparable as IN, LineFeed as LF,
Mandatory as BK, NextLine as NL, NonBreakingGlue as GL, NonStarter as NS, Numeric as NU,
OpenPunctuation as OP, Postfix as PO, Prefix as PR, Quotation as QU, RegionalIndicator as RI,
Space as SP, Surrogate as SG, Symbol as SY, Unknown as XX, WordJoiner as WJ,
ZeroWidthJoiner as ZWJ, ZeroWidthSpace as ZW,
};
const ALLOWED_BREAK_BIT: u8 = 0x80;
const MANDATORY_BREAK_BIT: u8 = 0x40;
const UNIFORM_PAGE: usize = 0x8000;
#[allow(non_upper_case_globals)]
const eot: u8 = 43;
#[allow(non_upper_case_globals)]
const sot: u8 = 44;
| true |
a70ff510a126d759e827cdc89a045bfaab530b65
|
Rust
|
data-niklas/traps
|
/src/lib.rs
|
UTF-8
| 12,565 | 3 | 3 |
[] |
no_license
|
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct Point {
pub x: i16,
pub y: i16,
}
impl Point {
pub fn new(x: i16, y: i16) -> Point {
Point { x, y }
}
pub fn matched(
p1: &Point,
p2: &Point,
p3: &Point,
p4: &Point,
xscale: f32,
yscale: f32,
tolerance: f32,
) -> bool {
let xdif1 = p2.x - p1.x;
let ydif1 = p2.y - p1.y;
let xdif2 = p4.x - p3.x;
let ydif2 = p4.y - p3.y;
(xdif1 as f32 * xscale - xdif2 as f32).abs() < tolerance
&& (ydif1 as f32 * yscale - ydif2 as f32).abs() < tolerance
}
pub fn bigger(&self, p1: &Point) -> bool {
self.x > p1.x && self.y > p1.y
}
pub fn smaller(&self, p1: &Point) -> bool {
self.x < p1.x && self.y < p1.y
}
pub fn between(&self, p1: &Point, p2: &Point) -> bool {
self.smaller(p2) && self.bigger(p1)
}
pub fn bounds(points: &Vec<Point>) -> (i16, i16) {
let mut smallx = i16::max_value();
let mut smally = i16::max_value();
let mut bigx = i16::min_value();
let mut bigy = i16::min_value();
for point in points {
if point.x > bigx {
bigx = point.x;
} else if point.x < smallx {
smallx = point.x;
}
if point.y > bigy {
bigy = point.y;
} else if point.y < smally {
smally = point.y;
}
}
(bigx - smallx, bigy - smally)
}
}
pub struct GestureRecorder {
pub is_tracking: bool,
fast_trigger: bool,
points: Vec<Point>,
gestures: Vec<Arc<Gesture>>,
tracked_gestures: Vec<TrackedGesture>,
listener: Box<dyn Fn(Arc<Gesture>) -> bool>,
}
//Constraints are checked at the end
//Fast_trigger only works with non-relative gestures
impl GestureRecorder {
pub fn new(listener: Box<dyn Fn(Arc<Gesture>) -> bool>) -> GestureRecorder {
GestureRecorder {
is_tracking: false,
fast_trigger: false,
points: Vec::new(),
gestures: Vec::new(),
tracked_gestures: Vec::new(),
listener,
}
}
pub const DEFAULT_TOLERANCE: f32 = 20.0;
pub fn set_fast_trigger(&mut self, fast_trigger: bool) {
if !self.is_tracking {
self.fast_trigger = fast_trigger;
}
}
pub fn fast_trigger_activated(&self) -> bool {
self.fast_trigger
}
pub fn register_gesture(&mut self, mut gesture: Gesture) {
gesture.calculate_bounds();
self.gestures.push(Arc::new(gesture));
}
pub fn start(&mut self) {
self.points.clear();
for gesture in &self.gestures {
self.tracked_gestures
.push(TrackedGesture::new(gesture.clone()));
}
self.is_tracking = true;
}
pub fn stop(&mut self) {
if !self.fast_trigger && !self.points.is_empty() {
let mut found_gesture: Arc<Option<Arc<Gesture>>> = Arc::new(None);
let (pwidth, pheight) = Point::bounds(&self.points);
let pfirst = self.points.first().unwrap();
self.tracked_gestures = self
.tracked_gestures
.into_iter()
.map(|mut tracked_gesture| {
{
if tracked_gesture.is_relative() {
tracked_gesture.determine_scale(pwidth, pheight);
}
}
let (gesture_matched, tracked_gesture) =
self.check_likeliest_match(tracked_gesture, pfirst);
if gesture_matched {
if let Some(found_gesture_inner) = *found_gesture {
if tracked_gesture.points_count() > found_gesture_inner.points_count() {
found_gesture = Arc::new(Some(tracked_gesture.gesture.clone()));
} else if tracked_gesture.points_count()
== found_gesture_inner.points_count()
&& !tracked_gesture.is_relative()
{
found_gesture = Arc::new(Some(tracked_gesture.gesture.clone()));
}
} else {
found_gesture = Arc::new(Some(tracked_gesture.gesture.clone()));
}
}
tracked_gesture
})
.collect();
if let Some(found_gesture_inner) = &*found_gesture {
self.notify(found_gesture_inner.clone());
}
}
self.is_tracking = false;
}
pub fn track(&mut self, plast: Point) {
/*if self.fast_trigger {
let mut i = 0;
while i < self.gestures.len() {
let mut tracked_gesture = self.tracked_gestures.get_mut(i).unwrap();
let gesture = &tracked_gesture.gesture;
let glast = gesture.get_point(tracked_gesture.matched_points);
if self.points.is_empty() {
tracked_gesture.inc_matched();
} else {
let pfirst = self.points.first().unwrap();
let gfirst = gesture.first();
if Point::matched(pfirst, &plast, gfirst, glast, 1.0, 1.0, gesture.tolerance) {
tracked_gesture.inc_matched();
}
}
if tracked_gesture.all_matched() {
if gesture.constraints_matching(&self.points) {
if self.notify(&tracked_gesture.gesture) {
self.stop();
break;
} else {
self.tracked_gestures.remove(i);
}
} else {
self.tracked_gestures.remove(i);
}
} else {
i += 1;
}
}
}
self.points.push(plast);*/
}
fn check_likeliest_match(
&self,
tracked_gesture: TrackedGesture,
pfirst: &Point,
) -> (bool, TrackedGesture) {
let gfirst = tracked_gesture.first();
for point in &self.points {
let glast = tracked_gesture.get_point(tracked_gesture.matched_points);
if Point::matched(
pfirst,
&point,
gfirst,
glast,
tracked_gesture.xscale,
tracked_gesture.yscale,
tracked_gesture.tolerance(),
) {
tracked_gesture.inc_matched();
}
if tracked_gesture.all_matched() {
return (
tracked_gesture.constraints_matching(&self.points),
tracked_gesture,
);
}
}
(false, tracked_gesture)
}
pub fn notify(&self, gesture: Arc<Gesture>) -> bool {
(*self.listener)(gesture)
}
}
pub struct GestureAttributes<'a> {
pub name: &'a str,
pub action: &'a str,
pub is_relative: bool,
pub tolerance: f32,
}
impl<'a> GestureAttributes<'a> {
pub fn default() -> GestureAttributes<'a> {
GestureAttributes {
name: "",
action: "",
is_relative: false,
tolerance: GestureRecorder::DEFAULT_TOLERANCE,
}
}
}
//Points should start at (0,0)
//Positioning is possible through constraints
#[derive(Debug, Clone)]
pub struct Gesture {
points: Vec<Point>,
pub is_relative: bool,
constraints: Vec<Constraint>,
pub tolerance: f32,
pub name: String,
pub action: String,
width: i16,
height: i16,
}
impl Gesture {
pub fn new(attributes: &GestureAttributes) -> Gesture {
Gesture {
is_relative: attributes.is_relative,
name: attributes.name.to_owned(),
action: attributes.action.to_owned(),
points: Vec::new(),
tolerance: attributes.tolerance,
constraints: Vec::new(),
width: 0,
height: 0,
}
}
pub fn calculate_bounds(&mut self) {
let (width, height) = Point::bounds(&self.points);
self.width = width;
self.height = height;
}
pub fn add_points(&mut self, mut points: Vec<Point>) {
self.points.append(&mut points);
}
pub fn add_point(&mut self, point: Point) {
self.points.push(point);
}
pub fn get_point(&self, index: usize) -> &Point {
self.points.get(index).unwrap()
}
pub fn constraints_matching(&self, points: &Vec<Point>) -> bool {
let first = points.first().unwrap();
let last = points.last().unwrap();
for constraint in &self.constraints {
let p;
let area;
match constraint {
Constraint::StartArea(a) => {
p = first;
area = a;
}
Constraint::StopArea(a) => {
p = last;
area = a;
}
}
match area {
Area::Between(start, end) => {
if !p.between(start, end) {
return false;
}
}
Area::Smaller(max) => {
if !p.smaller(max) {
return false;
}
}
Area::Bigger(min) => {
if !p.bigger(min) {
return false;
}
}
}
}
true
}
pub fn first(&self) -> &Point {
self.points.first().unwrap()
}
pub fn last(&self) -> &Point {
self.points.last().unwrap()
}
pub fn points_count(&self) -> usize {
self.points.len()
}
}
#[derive(Debug, Clone)]
pub struct TrackedGesture {
gesture: Arc<Gesture>,
pub matched_points: usize,
pub xscale: f32,
pub yscale: f32,
}
impl TrackedGesture {
pub fn new(gesture: Arc<Gesture>) -> TrackedGesture {
TrackedGesture {
gesture,
matched_points: 0,
xscale: 1.0,
yscale: 1.0,
}
}
pub fn inc_matched(&mut self) {
self.matched_points += 1;
}
pub fn all_matched(&self) -> bool {
self.matched_points == self.gesture.points.len()
}
pub fn determine_scale(&mut self, pwidth: i16, pheight: i16) {
self.xscale = self.gesture.width as f32 / pwidth as f32;
self.yscale = self.gesture.height as f32 / pheight as f32;
}
pub fn is_relative(&self) -> bool {
self.gesture.is_relative
}
pub fn first(&self) -> &Point {
self.gesture.points.first().unwrap()
}
pub fn last(&self) -> &Point {
self.gesture.points.last().unwrap()
}
pub fn points_count(&self) -> usize {
self.gesture.points.len()
}
pub fn gesture(&self) -> &Gesture {
&self.gesture
}
pub fn get_point(&self, index: usize) -> &Point {
self.gesture.get_point(index)
}
pub fn constraints_matching(&self, points: &Vec<Point>) -> bool {
self.gesture.constraints_matching(points)
}
pub fn tolerance(&self) -> f32 {
self.gesture.tolerance
}
}
#[derive(Debug, Clone)]
pub enum Constraint {
StartArea(Area),
StopArea(Area),
}
#[derive(Debug, Clone)]
pub enum Area {
Smaller(Point),
Bigger(Point),
Between(Point, Point),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn first() {
let mut recorder = GestureRecorder::new(Box::new(|gesture| {
println!("Gesture was found {:?}", gesture);
true
}));
let mut attributes = GestureAttributes::default();
attributes.name = "Right-swipe";
attributes.action = "some action";
attributes.is_relative = true;
let mut gesture = Gesture::new(&attributes);
gesture.add_points(vec![Point::new(0, 0), Point::new(-100, 0)]);
recorder.register_gesture(gesture);
recorder.start();
recorder.track(Point::new(300, 422));
recorder.track(Point::new(500, 488));
recorder.stop();
}
}
| true |
c510a307758c92640c491c1f9b304679507c421e
|
Rust
|
frankier/softcomputingforsoftpeople
|
/src/utils/real.rs
|
UTF-8
| 3,531 | 2.828125 | 3 |
[] |
no_license
|
extern crate nalgebra as na;
use na::{MatrixMN, wrap, DimName, DefaultAllocator, Scalar, abs, clamp};
use alga::general::Ring;
use num_traits::sign::Signed;
use na::allocator::Allocator;
use rand::distributions::range::SampleRange;
use rand::Rng;
#[derive(Clone)]
pub struct Hypercube<N: Scalar + Ring + Signed + SampleRange + PartialOrd, R: DimName, C: DimName>
where DefaultAllocator: Allocator<N, R, C>
{
min: MatrixMN<N, R, C>,
max: MatrixMN<N, R, C>,
range: MatrixMN<N, R, C>,
}
impl<N: Scalar + Ring + Signed + SampleRange + PartialOrd, R: DimName, C: DimName> Hypercube<N,
R,
C>
where DefaultAllocator: Allocator<N, R, C>
{
pub fn new(min: MatrixMN<N, R, C>, max: MatrixMN<N, R, C>) -> Hypercube<N, R, C> {
assert!(min < max);
let range = &max - &min;
Hypercube {
min,
max,
range: range,
}
}
pub fn sample<G: Rng>(&self, rng: &mut G) -> MatrixMN<N, R, C>
where DefaultAllocator: Allocator<N, R, C>
{
self.min
.zip_map(&self.max, |min_e, max_e| rng.gen_range(min_e, max_e))
}
//pub fn map
//pub fn zip_map
pub fn go_nearest_torus(&self,
from: &MatrixMN<N, R, C>,
to: &MatrixMN<N, R, C>)
-> MatrixMN<N, R, C> {
// That the nearest point will be the nearest in each dimension follows for any normed
// vector space. This follows from the triangle inequality.
let dir =
MatrixMN::<N, R, C>::from_iterator(izip!(from.iter(), to.iter(), self.range.iter())
.map(|(fd, td, rd)| {
// Imagine hypercube boundaries at | and fd could be in either of
// two positions:
// td |fd td fd| td
// First find the other td which would be nearest:
let other_td;
if td > fd {
other_td = *td - *rd;
} else {
other_td = *td + *rd;
}
// Now find which of other td and td are nearest and return vector in direction of
// closest
let towards_td = *td - *fd;
let towards_other_td = other_td - *fd;
if abs(&towards_td) < abs(&towards_other_td) {
towards_td
} else {
towards_other_td
}
}));
//println!("from {:?} to {:?} is nearest at {:?} dir is {:?}", from, to, from + &dir, &dir);
//nearest - from
dir
}
pub fn place_torus(&self, point: &MatrixMN<N, R, C>) -> MatrixMN<N, R, C> {
MatrixMN::<N, R, C>::from_iterator(izip!(point.iter(), self.min.iter(), self.max.iter())
.map(|(pd, mind, maxd)| wrap(*pd, *mind, *maxd)))
}
pub fn clamp(&self, point: &MatrixMN<N, R, C>) -> MatrixMN<N, R, C> {
MatrixMN::<N, R, C>::from_iterator(izip!(point.iter(), self.min.iter(), self.max.iter())
.map(|(pd, mind, maxd)| {
clamp(*pd, *mind, *maxd)
}))
}
}
| true |
402a49566f8979371425d3380e2eeb57cc44e92c
|
Rust
|
FigyTuna/chess
|
/src/main.rs
|
UTF-8
| 43,237 | 2.890625 | 3 |
[] |
no_license
|
// Imports
use std::collections::HashMap;
use std::collections::VecDeque;
use std::time::Instant;
// Data Structures
#[derive(Hash, Eq, PartialEq, Copy, Clone)]
enum PieceType {
Pawn,
Rook,
Knight,
Bishop,
Queen,
King
}
#[derive(Hash, Eq, PartialEq, Copy, Clone)]
struct Pos(usize, usize);
#[derive(Hash, Eq, PartialEq, Copy, Clone)]
struct Piece {
piece_type: PieceType,
team: bool
}
struct Board {
board: [[Option<Piece>; 8]; 8],
pieces: HashMap<Piece, Vec<Pos>>,
to_move: bool,
running: Option<Pos>,
can_castle: [bool; 4],
positions: HashMap<[u64;5], u32>,
info_eval_count: u64
}
#[derive(Hash, Eq, PartialEq, Copy, Clone)]
enum MoveNotationData {
Move,
KSC,
QSC,
PQ,
PK,
PB,
PR
}
#[derive(Hash, Eq, PartialEq, Copy, Clone)]
struct Move {
piece: Piece,
from: Pos,
to: Pos,
promote: bool,
castle_rook: Option<(Pos, Pos)>,
captured: Option<(Piece, Pos)>,
running: Option<Pos>,
was_running: Option<Pos>,
can_castle: [bool; 4],
prev_can_castle: [bool; 4],
notation_data: MoveNotationData
}
#[derive(Copy, Clone)]
enum Rating {
Evaluation{score: i64},
Checkmate{score: bool, turns: u32}
}
struct Desc {
branches: Box<Vec<Move>>,
rating: Rating
}
// Global Options
const MAX_CPU_MOVES: i32 = 150;
const DEBUG: bool = false;
const ENGINE_INFO: bool = false;
// Constants
const PAWN_VALUE: i64 = 1;
const BISHOP_VALUE: i64 = 3;
const KNIGHT_VALUE: i64 = 3;
const ROOK_VALUE: i64 = 5;
const QUEEN_VALUE: i64 = 9;
const CASTLE_WHITE: usize = 0;
const CASTLE_BLACK: usize = 2;
const KING_SIDE_CASTLE: usize = 0;
const QUEEN_SIDE_CASTLE: usize = 1;
// Initialization
fn initial_board() -> Box<Board> {
let mut board = Box::new(Board{
board: [[None; 8]; 8],
pieces: HashMap::new(),
to_move: true,
running: None,
can_castle: [true; 4],
positions: HashMap::new(),
info_eval_count: 0
});
fn init_piece_full(board: &mut [[Option<Piece>; 8]; 8], pieces: &mut HashMap<Piece, Vec<Pos>>, piece: Piece, pos: Pos) -> () {
board[pos.0][pos.1] = Some(piece);
let e = pieces.entry(piece).or_insert(Vec::new());
e.push(pos);
};
let mut init_piece = |piece, pos| init_piece_full(&mut board.board, &mut board.pieces, piece, pos);
init_piece(Piece{piece_type: PieceType::Pawn, team: true}, Pos(1, 0));
init_piece(Piece{piece_type: PieceType::Pawn, team: true}, Pos(1, 1));
init_piece(Piece{piece_type: PieceType::Pawn, team: true}, Pos(1, 2));
init_piece(Piece{piece_type: PieceType::Pawn, team: true}, Pos(1, 3));
init_piece(Piece{piece_type: PieceType::Pawn, team: true}, Pos(1, 4));
init_piece(Piece{piece_type: PieceType::Pawn, team: true}, Pos(1, 5));
init_piece(Piece{piece_type: PieceType::Pawn, team: true}, Pos(1, 6));
init_piece(Piece{piece_type: PieceType::Pawn, team: true}, Pos(1, 7));
init_piece(Piece{piece_type: PieceType::Rook, team: true}, Pos(0, 0));
init_piece(Piece{piece_type: PieceType::Knight, team: true}, Pos(0, 1));
init_piece(Piece{piece_type: PieceType::Bishop, team: true}, Pos(0, 2));
init_piece(Piece{piece_type: PieceType::Queen, team: true}, Pos(0, 3));
init_piece(Piece{piece_type: PieceType::King, team: true}, Pos(0, 4));
init_piece(Piece{piece_type: PieceType::Bishop, team: true}, Pos(0, 5));
init_piece(Piece{piece_type: PieceType::Knight, team: true}, Pos(0, 6));
init_piece(Piece{piece_type: PieceType::Rook, team: true}, Pos(0, 7));
init_piece(Piece{piece_type: PieceType::Pawn, team: false}, Pos(6, 1));
init_piece(Piece{piece_type: PieceType::Pawn, team: false}, Pos(6, 2));
init_piece(Piece{piece_type: PieceType::Pawn, team: false}, Pos(6, 0));
init_piece(Piece{piece_type: PieceType::Pawn, team: false}, Pos(6, 3));
init_piece(Piece{piece_type: PieceType::Pawn, team: false}, Pos(6, 4));
init_piece(Piece{piece_type: PieceType::Pawn, team: false}, Pos(6, 5));
init_piece(Piece{piece_type: PieceType::Pawn, team: false}, Pos(6, 6));
init_piece(Piece{piece_type: PieceType::Pawn, team: false}, Pos(6, 7));
init_piece(Piece{piece_type: PieceType::Rook, team: false}, Pos(7, 0));
init_piece(Piece{piece_type: PieceType::Knight, team: false}, Pos(7, 1));
init_piece(Piece{piece_type: PieceType::Bishop, team: false}, Pos(7, 2));
init_piece(Piece{piece_type: PieceType::Queen, team: false}, Pos(7, 3));
init_piece(Piece{piece_type: PieceType::King, team: false}, Pos(7, 4));
init_piece(Piece{piece_type: PieceType::Bishop, team: false}, Pos(7, 5));
init_piece(Piece{piece_type: PieceType::Knight, team: false}, Pos(7, 6));
init_piece(Piece{piece_type: PieceType::Rook, team: false}, Pos(7, 7));
board
}
// Board Manipulation
fn remove_item<V>(v: &mut Vec<V>, x: &V) -> () where V: PartialEq {
let mut idx = None;
let mut i = 0;
for y in v.iter() {
if x == y {
idx = Some(i);
break;
}
i += 1;
};
match idx {
Some(i) => { v.remove(i); },
None => ()
};
}
fn perform_move(board: &mut Board, m: &Move) -> () {
match m.captured {
Some((piece, pos)) => {
if DEBUG && piece.piece_type == PieceType::King {
println!("Error: King captured {}", notate_move(m));
}
remove_item(&mut board.pieces.entry(piece).or_default(), &pos);
board.board[pos.0][pos.1] = None
},
None => ()
}
if m.promote {
remove_item(&mut board.pieces.entry(Piece{piece_type: PieceType::Pawn, team: m.piece.team}).or_default(), &m.from);
board.pieces.entry(m.piece).or_default().push(m.to);
}
else {
for pos in board.pieces.entry(m.piece).or_default() {
if *pos == m.from { *pos = m.to }
};
}
board.board[m.to.0][m.to.1] = Some(m.piece);
board.board[m.from.0][m.from.1] = None;
match m.castle_rook {
Some((from, to)) => {
for pos in board.pieces.entry(Piece{piece_type: PieceType::Rook, team: m.piece.team}).or_default() {
if *pos == from { *pos = to }
};
board.board[to.0][to.1] = Some(Piece{piece_type: PieceType::Rook, team: m.piece.team});
board.board[from.0][from.1] = None;
},
None => ()
}
board.running = m.running;
board.can_castle = m.can_castle;
board.to_move = !board.to_move;
}
fn undo_move(board: &mut Board, m: &Move) -> () {
if m.promote {
remove_item(board.pieces.entry(m.piece).or_default(), &m.to);
board.pieces.entry(Piece{piece_type: PieceType::Pawn, team: m.piece.team}).or_default().push(m.from);
board.board[m.from.0][m.from.1] = Some(Piece{piece_type: PieceType::Pawn, team: m.piece.team});
}
else {
for pos in board.pieces.entry(m.piece).or_default() {
if *pos == m.to { *pos = m.from }
};
board.board[m.from.0][m.from.1] = Some(m.piece);
}
board.board[m.to.0][m.to.1] = None;
match m.captured {
Some((piece, pos)) => {
board.board[pos.0][pos.1] = Some(piece);
board.pieces.entry(piece).or_default().push(pos);
},
None => ()
}
match m.castle_rook {
Some((from, to)) => {
for pos in board.pieces.entry(Piece{piece_type: PieceType::Rook, team: m.piece.team}).or_default() {
if *pos == to { *pos = from }
};
board.board[from.0][from.1] = Some(Piece{piece_type: PieceType::Rook, team: m.piece.team});
board.board[to.0][to.1] = None;
},
None => ()
}
board.running = m.was_running;
board.can_castle = m.prev_can_castle;
board.to_move = !board.to_move;
}
fn increment_position(board: &mut Board, h: [u64;5]) -> () {
let n = board.positions.entry(h).or_insert(0);
*n += 1;
}
fn decrement_position(board: &mut Board, h: [u64;5]) -> () {
let n = board.positions.entry(h).or_insert(1);
*n -= 1;
}
// Move Generation Helpers
fn basic_move(board: &Board, piece: Piece, from: &Pos, to: &Pos, captured: Option<Piece>) -> Move {
let capturedp = match captured {
Some(piece) => Some((piece, *to)),
None => None
};
Move{
piece: piece,
from: *from,
to: *to,
promote: false,
castle_rook: None,
captured: capturedp,
running: None,
was_running: board.running,
can_castle: update_can_castle(from, board.can_castle),
prev_can_castle: board.can_castle,
notation_data: MoveNotationData::Move
}
}
fn add_promotion_moves(board: &mut Board, moves: &mut Vec<Move>, from: &Pos, to: &Pos, captured: Option<Piece>) -> () {
fn promote_move(board: &Board, piece: Piece, from: &Pos, to: &Pos, captured: Option<Piece>, mnd: MoveNotationData) -> Move {
let capturedp = match captured {
Some(piece) => Some((piece, *to)),
None => None
};
Move{
piece: piece,
from: *from,
to: *to,
promote: true,
castle_rook: None,
captured: capturedp,
running: None,
was_running: board.running,
can_castle: update_can_castle(from, board.can_castle),
prev_can_castle: board.can_castle,
notation_data: mnd
}
};
add_if_no_check(board, moves, promote_move(board, Piece{piece_type: PieceType::Bishop, team: board.to_move}, from, to, captured, MoveNotationData::PB));
add_if_no_check(board, moves, promote_move(board, Piece{piece_type: PieceType::Knight, team: board.to_move}, from, to, captured, MoveNotationData::PK));
add_if_no_check(board, moves, promote_move(board, Piece{piece_type: PieceType::Rook, team: board.to_move}, from, to, captured, MoveNotationData::PR));
add_if_no_check(board, moves, promote_move(board, Piece{piece_type: PieceType::Queen, team: board.to_move}, from, to, captured, MoveNotationData::PQ));
}
fn update_can_castle(pos: &Pos, c: [bool; 4]) -> [bool; 4] {
match pos {
Pos(0, 0) => [c[0], false, c[2], c[3]],
Pos(0, 7) => [false, c[1], c[2], c[3]],
Pos(7, 0) => [c[0], c[1], c[2], false],
Pos(7, 7) => [c[0], c[1], false, c[3]],
Pos(0, 4) => [false, false, c[2], c[3]],
Pos(7, 4) => [c[0], c[1], false, false],
_ => [c[0], c[1], c[2], c[3]]
}
}
fn add_if_no_check(board: &mut Board, moves: &mut Vec<Move>, m: Move) -> () {
let temp = if DEBUG { Some(board.board.clone()) } else { None };
perform_move(board, &m);
if !is_in_check(&board, !board.to_move) {
moves.push(m);
}
undo_move(board, &m);
if DEBUG && Some(board.board) != temp {
panic!("OH NO {} at add_if_no_check", notate_move(&m));
}
}
// Move Generation
fn gen_moves(board: &mut Board) -> (Box<Vec<Move>>, Box<[[(Vec<PieceType>,Vec<PieceType>);8];8]>) {
let mut moves = Box::new(Vec::new());
let mut ad = Box::new([
[(Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new())],
[(Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new())],
[(Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new())],
[(Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new())],
[(Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new())],
[(Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new())],
[(Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new())],
[(Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new()), (Vec::new(), Vec::new())]
]);
for pos in &board.pieces.get(&Piece{piece_type: PieceType::Rook, team: board.to_move}).expect("Error: pieces access").clone() {
for dir in ([(-1, 0), (1, 0), (0, -1), (0, 1)] as [(i32, i32); 4]).iter() {
gen_line_moves(board, &mut moves, &mut ad, pos, *dir, PieceType::Rook);
}
}
for pos in &board.pieces.get(&Piece{piece_type: PieceType::Bishop, team: board.to_move}).expect("Error: pieces access").clone() {
for dir in ([(-1, -1), (1, -1), (-1, 1), (1, 1)] as [(i32, i32); 4]).iter() {
gen_line_moves(board, &mut moves, &mut ad, pos, *dir, PieceType::Bishop);
}
}
for pos in &board.pieces.get(&Piece{piece_type: PieceType::Queen, team: board.to_move}).expect("Error: pieces access").clone() {
for dir in ([(-1, -1), (1, -1), (-1, 1), (1, 1), (-1, 0), (1, 0), (0, -1), (0, 1)] as [(i32, i32); 8]).iter() {
gen_line_moves(board, &mut moves, &mut ad, pos, *dir, PieceType::Queen);
}
}
for pos in &board.pieces.get(&Piece{piece_type: PieceType::King, team: board.to_move}).expect("Error: pieces access").clone() {
for dir in ([(-1, -1), (1, -1), (-1, 1), (1, 1), (-1, 0), (1, 0), (0, -1), (0, 1)] as [(i32, i32); 8]).iter() {
gen_line_moves(board, &mut moves, &mut ad, pos, *dir, PieceType::King);
}
}
for pos in &board.pieces.get(&Piece{piece_type: PieceType::Knight, team: board.to_move}).expect("Error: pieces access").clone() {
gen_knight_moves(board, &mut moves, &mut ad, pos);
}
for pos in &board.pieces.get(&Piece{piece_type: PieceType::Pawn, team: board.to_move}).expect("Error: pieces access").clone() {
gen_pawn_moves(board, &mut moves, &mut ad, pos);
}
gen_castle_moves(board, &mut moves);
(moves, ad)
}
fn gen_knight_moves(board: &mut Board, moves: &mut Vec<Move>, ad: &mut [[(Vec<PieceType>,Vec<PieceType>);8];8], pos: &Pos) -> () {
for (x, y) in [(-1, -2), (-2, -1), (1, -2), (-2, 1), (-1, 2), (2, -1), (1, 2), (2, 1)].iter() {
if pos.0 as i32 + x >= 0 && pos.0 as i32 + x <= 7 && pos.1 as i32 + y >= 0 && pos.1 as i32 + y <= 7 {
let p0 = (pos.0 as i32 + x) as usize;
let p1 = (pos.1 as i32 + y) as usize;
if board.to_move {
ad[p0][p1].0.push(PieceType::Knight);
}
else {
ad[p0][p1].1.push(PieceType::Knight);
};
match &board.board[p0][p1] {
Some(piece) if board.to_move != piece.team => {
let m = basic_move(board, Piece{piece_type: PieceType::Knight, team: board.to_move}, pos, &Pos(p0, p1), Some(*piece));
add_if_no_check(board, moves, m);
},
None => {
add_if_no_check(board, moves, basic_move(board, Piece{piece_type: PieceType::Knight, team: board.to_move}, pos, &Pos(p0, p1), None));
},
_ => ()
}
}
}
}
fn gen_pawn_moves(board: &mut Board, moves: &mut Vec<Move>, ad: &mut [[(Vec<PieceType>,Vec<PieceType>);8];8], pos: &Pos) -> () {
let dir: i32 = if board.to_move {1} else {-1};
let one_dir = (pos.0 as i32 + dir) as usize;
let one_space = match board.board[one_dir][pos.1] {
None => true,
_ => false
};
if one_space {
if (board.to_move && pos.0 == 6) || (!board.to_move && pos.0 == 1) {
add_promotion_moves(board, moves, pos, &Pos(one_dir, pos.1), None);
}
else {
add_if_no_check(board, moves, basic_move(board, Piece{piece_type: PieceType::Pawn, team: board.to_move}, pos, &Pos(one_dir, pos.1), None));
}
};
if ((board.to_move && pos.0 == 1) || (!board.to_move && pos.0 == 6)) && one_space {
match board.board[(pos.0 as i32 + (dir * 2)) as usize][pos.1] {
None => add_if_no_check(board, moves, Move{
piece: Piece{piece_type: PieceType::Pawn, team: board.to_move},
from: *pos,
to: Pos((pos.0 as i32 + (dir * 2)) as usize, pos.1),
promote: false,
castle_rook: None,
captured: None,
running: Some(Pos((pos.0 as i32 + (dir * 2)) as usize, pos.1)),
was_running: board.running,
can_castle: board.can_castle,
prev_can_castle: board.can_castle,
notation_data: MoveNotationData::Move
}),
_ => ()
}
};
if pos.1 > 0 {
if board.to_move {
ad[one_dir][pos.1 - 1].0.push(PieceType::Pawn);
}
else {
ad[one_dir][pos.1 - 1].1.push(PieceType::Pawn);
};
match board.board[one_dir][pos.1 - 1] {
Some(piece) if board.to_move != piece.team => {
if one_dir == 0 || one_dir == 7 {
add_promotion_moves(board, moves, pos, &Pos(one_dir, pos.1 - 1), Some(piece));
}
else
{
add_if_no_check(board, moves, basic_move(board, Piece{piece_type: PieceType::Pawn, team: board.to_move}, pos, &Pos(one_dir, pos.1 - 1), Some(piece)));
}
},
None => if board.running == Some(Pos(pos.0, pos.1 - 1)) {
match board.board[pos.0][pos.1 - 1] {
Some(piece) if board.to_move != piece.team => add_if_no_check(board, moves, Move{
piece: Piece{piece_type: PieceType::Pawn, team: board.to_move},
from: *pos,
to: Pos(one_dir, pos.1 - 1),
promote: false,
castle_rook: None,
captured: Some((piece, Pos(pos.0, pos.1 - 1))),
running: None,
was_running: board.running,
can_castle: board.can_castle,
prev_can_castle: board.can_castle,
notation_data: MoveNotationData::Move
}),
_ => ()
}
},
_ => ()
}
};
if pos.1 < 7 {
if board.to_move {
ad[one_dir][pos.1 + 1].0.push(PieceType::Pawn);
}
else {
ad[one_dir][pos.1 + 1].1.push(PieceType::Pawn);
};
match board.board[one_dir][pos.1 + 1] {
Some(piece) if board.to_move != piece.team => {
if one_dir == 0 || one_dir == 7 {
add_promotion_moves(board, moves, pos, &Pos(one_dir, pos.1 + 1), Some(piece));
}
else
{
add_if_no_check(board, moves, basic_move(board, Piece{piece_type: PieceType::Pawn, team: board.to_move}, pos, &Pos(one_dir, pos.1 + 1), Some(piece)));
}
}
None => if board.running == Some(Pos(pos.0, pos.1 + 1)) {
match board.board[pos.0][pos.1 + 1] {
Some(piece) if board.to_move != piece.team => add_if_no_check(board, moves, Move{
piece: Piece{piece_type: PieceType::Pawn, team: board.to_move},
from: *pos,
to: Pos(one_dir, pos.1 + 1),
promote: false,
castle_rook: None,
captured: Some((piece, Pos(pos.0, pos.1 + 1))),
running: None,
was_running: board.running,
can_castle: board.can_castle,
prev_can_castle: board.can_castle,
notation_data: MoveNotationData::Move
}),
_ => ()
}
},
_ => ()
}
};
}
fn gen_castle_moves(board: &mut Board, moves: &mut Vec<Move>) -> () {
let r = if board.to_move {0} else {7};
if board.can_castle[if board.to_move {CASTLE_WHITE + KING_SIDE_CASTLE} else {CASTLE_BLACK + KING_SIDE_CASTLE}] {
match (board.board[r][5], board.board[r][6]) {
(None, None) => if !is_in_check(board, board.to_move) { add_if_no_check(board, moves,
Move{
piece: Piece{piece_type: PieceType::King, team: board.to_move},
from: Pos(r, 4),
to: Pos(r, 6),
promote: false,
castle_rook: Some((Pos(r, 7), Pos(r,5))),
captured: None,
running: None,
was_running: board.running,
can_castle: update_can_castle(&Pos(r,4), board.can_castle),
prev_can_castle: board.can_castle,
notation_data: MoveNotationData::KSC
}
)},
_ => ()
};
}
if board.can_castle[if board.to_move {CASTLE_WHITE + QUEEN_SIDE_CASTLE} else {CASTLE_BLACK + QUEEN_SIDE_CASTLE}] {
match (board.board[r][1], board.board[r][2], board.board[r][3]) {
(None, None, None) => if !is_in_check(board, board.to_move) { add_if_no_check(board, moves,
Move{
piece: Piece{piece_type: PieceType::King, team: board.to_move},
from: Pos(r, 4),
to: Pos(r, 2),
promote: false,
castle_rook: Some((Pos(r, 0), Pos(r,3))),
captured: None,
running: None,
was_running: board.running,
can_castle: update_can_castle(&Pos(r,4), board.can_castle),
prev_can_castle: board.can_castle,
notation_data: MoveNotationData::QSC
}
)},
_ => ()
};
}
}
fn gen_line_moves(board: &mut Board, moves: &mut Vec<Move>, ad: &mut [[(Vec<PieceType>,Vec<PieceType>);8];8], pos: &Pos, (r_dir, f_dir): (i32, i32), piece_type: PieceType) -> () {
fn extend_dir((r, f): (i32, i32)) -> (i32, i32) {
let rp = if r > 0 {r + 1} else if r < 0 {r - 1} else {r};
let fp = if f > 0 {f + 1} else if f < 0 {f - 1} else {f};
(rp, fp)
}
let r = pos.0 as i32 + r_dir;
let f = pos.1 as i32 + f_dir;
if r >= 0 && r <= 7 && f >= 0 && f <= 7 {
let ru = r as usize;
let fu = f as usize;
if board.to_move {
ad[ru][fu].0.push(piece_type);
}
else {
ad[ru][fu].1.push(piece_type);
};
match &board.board[ru][fu] {
None => {
add_if_no_check(board, moves, basic_move(board, Piece{piece_type: piece_type, team: board.to_move}, pos, &Pos(ru, fu), None));
if piece_type != PieceType::King {
gen_line_moves(board, moves, ad, pos, extend_dir((r_dir, f_dir)), piece_type);
}
},
Some(piece) if board.to_move != piece.team => {
let m = basic_move(board, Piece{piece_type: piece_type, team: board.to_move}, pos, &Pos(ru, fu), Some(*piece));
add_if_no_check(board, moves, m);
},
_ => ()
}
}
}
// Check Detection
fn is_in_check(board: &Board, team: bool) -> bool {
if DEBUG && board.pieces[&Piece{piece_type: PieceType::King, team: team}].len() == 0 {
println!("Error: is_in_check: Missing king");
}
let pos = board.pieces[&Piece{piece_type: PieceType::King, team: team}][0];
let mut check = false;
for (dx, dy) in [(0, -1), (-1, 0), (0, 1), (1, 0), (-1, -1), (1, -1), (-1, 1), (1, 1)].iter() {
let mut r = pos.0 as i32;
let mut f = pos.1 as i32;
let mut first = true;
loop {
r += dx;
f += dy;
if r < 0 || r > 7 || f < 0 || f > 7 {break}
match board.board[r as usize][f as usize] {
Some(Piece{piece_type:PieceType::Bishop, team: pteam}) if (team != pteam) && (dx + dy).abs() % 2 == 0 => {check = true; break}
Some(Piece{piece_type:PieceType::Rook, team: pteam}) if (team != pteam) && (dx + dy).abs() % 2 == 1 => {check = true; break}
Some(Piece{piece_type:PieceType::Queen, team: pteam}) if team != pteam => {check = true; break}
Some(Piece{piece_type:PieceType::King, team: pteam}) if (team != pteam) && first => {check = true; break}
None => (),
_ => break
}
first = false;
}
}
if !check {
for (x, y) in [(-1, -2), (-2, -1), (1, -2), (-2, 1), (-1, 2), (2, -1), (1, 2), (2, 1)].iter() {
if pos.0 as i32 + x >= 0 && pos.0 as i32 + x <= 7 && pos.1 as i32 + y >= 0 && pos.1 as i32 + y <= 7 {
match board.board[(pos.0 as i32 + x) as usize][(pos.1 as i32 + y) as usize] {
Some(Piece{piece_type: PieceType::Knight, team: pteam}) if team != pteam => {check = true; break},
_ => ()
}
}
}
}
if !check {
let dir = if board.to_move {-1} else {1};
for (dx, dy) in [(dir, -1), (dir, 1)].iter() {
if pos.0 as i32 + dx >= 0 && pos.0 as i32 + dx <= 7 && pos.1 as i32 + dy >= 0 && pos.1 as i32 + dy <= 7 {
match board.board[(pos.0 as i32 + dx) as usize][(pos.1 as i32 + dy) as usize] {
Some(Piece{piece_type: PieceType::Pawn, team: pteam}) if team != pteam => {check = true; break},
_ => ()
}
}
}
}
check
}
// Board Evaluation and Move Choice
fn cmp_ratings (r1: &Rating, r2: &Rating) -> bool {
match (r1, r2) {
(Rating::Evaluation{score: s1}, Rating::Evaluation{score: s2}) => s1 > s2,
(Rating::Checkmate{score: b1, turns: t1}, Rating::Checkmate{score: b2, turns: t2}) if b1 == b2 => (t1 > t2) ^ b1,
(Rating::Checkmate{score: b, turns: _}, _) => *b,
(_, Rating::Checkmate{score: b, turns: _}) => !*b
}
}
fn join_ratings (raw_rating: &Rating, sub_rating: &Rating) -> Rating {
match (raw_rating, sub_rating) {
(Rating::Evaluation{score: rs}, Rating::Evaluation{score: ss}) => Rating::Evaluation{score: rs + ss * 10},
(Rating::Checkmate{score: b, turns: t}, _) => Rating::Checkmate{score: *b, turns: *t},
(_, Rating::Checkmate{score: b, turns: t}) => Rating::Checkmate{score: *b, turns: t + 1}
}
}
fn get_value(p: &Piece) -> i64 {
match p {
Piece{piece_type: PieceType::Pawn, team: t} => PAWN_VALUE * if *t {1} else {-1},
Piece{piece_type: PieceType::Bishop, team: t} => BISHOP_VALUE * if *t {1} else {-1},
Piece{piece_type: PieceType::Knight, team: t} => KNIGHT_VALUE * if *t {1} else {-1},
Piece{piece_type: PieceType::Rook, team: t} => ROOK_VALUE * if *t {1} else {-1},
Piece{piece_type: PieceType::Queen, team: t} => QUEEN_VALUE * if *t {1} else {-1},
Piece{piece_type: PieceType::King, team: t} => 999999 * if *t {1} else {-1},
}
}
fn evaluate_board(board: &mut Board) -> (Rating, Box<Vec<Move>>) {
if ENGINE_INFO { board.info_eval_count += 1 };
let (moves, ad) = gen_moves(board);
let rating = if moves.len() < 1 {
if is_in_check(board, board.to_move) {
Rating::Checkmate{score: !board.to_move, turns: 0}
}
else {
Rating::Evaluation{score: 0}
}
}
else {
let mut ret = 0;
for r in ad.iter().zip(board.board.iter()) {
for ((w,b), po) in r.0.iter().zip(r.1.iter()) {
if w.len() > 0 {
ret += 1;
}
if b.len() > 0 {
ret -= 1;
}
ret += w.len() as i64;
ret -= b.len() as i64;
match po {
Some(p) => {
let value = get_value(p) * 100;
ret += value;
if p.team != board.to_move {
let a_check = if board.to_move {w} else {b};
if a_check.len() > 0 {
let mut a = a_check.iter().map(|x| 100 * get_value(&Piece{piece_type:*x, team: board.to_move})).collect::<Vec<_>>();
let mut d = if board.to_move {b} else {w}.iter().map(|x| 100 * get_value(&Piece{piece_type:*x, team: !board.to_move})).collect::<Vec<_>>();
a.sort_unstable();
d.sort_unstable();
if board.to_move {
a.reverse();
}
else {
d.reverse();
}
fn quick_simulate(a: &mut Vec<i64>, d: &mut Vec<i64>, t: i64) -> i64 {
match (a.pop(), d.pop()) {
(Some(_), None) => -t,
(Some(av), Some(dv)) if av.abs() <= t.abs() || (a.len() > 0 && dv <= av) => -t + quick_simulate(d, a, av),
_ => 0
}
};
ret += quick_simulate(&mut a, &mut d, value);
}
}
}
_ => ()
}
}
};
Rating::Evaluation{score: ret}
};
(rating, moves)
}
fn simulate_and_rate_move(board: &mut Board, layers: &mut VecDeque<HashMap<[u64;5], Box<Desc>>>, m: &Move, total_depth:u32, depth: u32) -> Rating {
let temp = if DEBUG { Some(board.board.clone()) } else { None };
perform_move(board, m);
let h = get_signature(board);
increment_position(board, h);
let ret = if repitition(board) {
Rating::Evaluation{score: 0}
}
else {
fill_layers(board, h, layers, total_depth, depth - 1)
};
decrement_position(board, h);
undo_move(board, &m);
if DEBUG && Some(board.board) != temp {
panic!("OH NO {}", notate_move(&m));
};
ret
}
fn fill_layers(board: &mut Board, h: [u64;5], layers: &mut VecDeque<HashMap<[u64;5], Box<Desc>>>, total_depth:u32, depth: u32) -> Rating {
let idx = (total_depth - depth - 1) as usize;
if depth > 0 {
let (raw_rating, moves) = match layers[idx].get(&h) {
None => {
let (raw_rating, moves) = evaluate_board(board);
layers[idx].insert(h, Box::new(Desc{
branches: moves.clone(),
rating: raw_rating
}));
(raw_rating, moves)
},
Some(desc) => {
(desc.rating, desc.branches.clone())
}
};
let mut it = moves.iter();
let rating = match it.next() {
Some(first_move) => {
let mut max_rating = simulate_and_rate_move(board, layers, &first_move, total_depth, depth);
for m in it {
let r = simulate_and_rate_move(board, layers, &m, total_depth, depth);
if cmp_ratings(&r, &max_rating) ^ !board.to_move {
max_rating = r;
}
}
join_ratings(&raw_rating, &max_rating)
},
None => {
raw_rating
}
};
rating
}
else {
match layers[idx].get(&h) {
None => {
let (rating, moves) = evaluate_board(board);
layers[idx].insert(h, Box::new(Desc{
branches: moves,
rating: rating
}));
rating
},
Some(desc) => {
desc.rating
}
}
}
}
// Repitition Detection
fn get_signature(board: &Board) -> [u64;5] {
let mut s = [0; 5];
let mut a = 0;
let mut i;
for r in &board.board {
i = 0;
for p in r {
s[a / 2] += match p {
Some(Piece{piece_type: PieceType::Pawn, team: t}) => 1 + if *t {8} else {0},
Some(Piece{piece_type: PieceType::Rook, team: t}) => 2 + if *t {8} else {0},
Some(Piece{piece_type: PieceType::Bishop, team: t}) => 3 + if *t {8} else {0},
Some(Piece{piece_type: PieceType::Knight, team: t}) => 4 + if *t {8} else {0},
Some(Piece{piece_type: PieceType::King, team: t}) => 5 + if *t {8} else {0},
Some(Piece{piece_type: PieceType::Queen, team: t}) => 6 + if *t {8} else {0},
None => 0
} << (4 * ((a as u32 % 2) + (2 * i)));
i += 1;
}
a += 1;
};
s[4] = if board.to_move {1} else {0}
+ match board.running {
Some(pos) => pos.0 as u64 * 2 + pos.1 as u64 * 16,
None => 0
}
+ if board.can_castle[0] {128} else {0}
+ if board.can_castle[1] {256} else {0}
+ if board.can_castle[2] {512} else {0}
+ if board.can_castle[3] {1024} else {0};
s
}
fn repitition(board: &Board) -> bool {
match board.positions.get(&get_signature(board)) {
Some(n) => *n >= 2,
None => false
}
}
// Move Input Methods
fn cpu_move(board: &mut Board, moves: &Box<Vec<Move>>, layers: &mut VecDeque<HashMap<[u64;5], Box<Desc>>>, depth: u32) -> Move {
let now = if ENGINE_INFO { Some(Instant::now()) } else {None};
while (layers.len() as u32) < depth {
layers.push_back(HashMap::new());
};
let mut max_rating = Rating::Checkmate{score: !board.to_move, turns: 0};
let mut mo = None;
for m in moves.iter() {
let r = simulate_and_rate_move(board, layers, m, depth, depth);
if cmp_ratings(&r, &max_rating) ^ !board.to_move {
max_rating = r;
mo = Some(m)
}
}
match mo {
Some(m) => {
if ENGINE_INFO {
println!("Evaluated {} new positions.", board.info_eval_count);
println!("Positions at each layer:");
for l in layers.iter() {
println!(" {}", l.len());
};
match now {
Some(now) => println!("Took {} seconds to pick a move.", now.elapsed().as_secs_f64()),
None => ()
};
match max_rating {
Rating::Evaluation{score: s} => println!("Evaluated at a {}.", s),
Rating::Checkmate{score: s, turns: t} => println!("Evaluated at a mate in {} in favor of {}.", t, if s {"white"} else {"black"})
};
board.info_eval_count = 0;
};
*m
},
None => panic!("Error: pick_moves: moves list is empty")
}
}
fn player_move(moves: &Vec<Move>, stdin: std::io::Stdin) -> Move {
let mut s;
let ret;
loop {
println!("Player move: ");
s = String::new();
stdin.read_line(&mut s).expect("");
s.pop();
if s == "moves" {
for m in moves {
println!("{}", notate_move(&m))
};
continue;
}
let mo = moves.iter().fold(None, |acc, m| if notate_move(&m) == s {Some(m)} else {acc});
match mo {
None => {
println!("Try again. List all possible moves with \"moves\".");
continue;
},
Some(m) => {
ret = *m;
break;
}
}
}
ret
}
// Output
fn notate_pos(pos: &Pos) -> String {
let a = match pos.0 {
0 => "1",
1 => "2",
2 => "3",
3 => "4",
4 => "5",
5 => "6",
6 => "7",
_ => "8"
};
let b = match pos.1 {
0 => "a",
1 => "b",
2 => "c",
3 => "d",
4 => "e",
5 => "f",
6 => "g",
_ => "h"
};
format!("{}{}", b, a)
}
fn notate_move(m: &Move) -> String {
match m.notation_data {
MoveNotationData::Move => format!("{}-{}", notate_pos(&m.from), notate_pos(&m.to)),
MoveNotationData::KSC => format!("O-O"),
MoveNotationData::QSC => format!("O-O-O"),
MoveNotationData::PQ => format!("{}-{}-{}", notate_pos(&m.from), notate_pos(&m.to), "Q"),
MoveNotationData::PK => format!("{}-{}-{}", notate_pos(&m.from), notate_pos(&m.to), "K"),
MoveNotationData::PB => format!("{}-{}-{}", notate_pos(&m.from), notate_pos(&m.to), "B"),
MoveNotationData::PR => format!("{}-{}-{}", notate_pos(&m.from), notate_pos(&m.to), "R"),
}
}
fn print_board(board: &Board) -> () {
let board_iter = board.board.iter().map(|r| r.iter().map(|po| match po {
None => ' ',
Some(Piece{piece_type: pt, team: true}) => match pt {
PieceType::Pawn => '♙',
PieceType::Rook => '♖',
PieceType::Knight => '♘',
PieceType::Bishop => '♗',
PieceType::Queen => '♕',
PieceType::King => '♔'
},
Some(Piece{piece_type: pt, team: false}) => match pt {
PieceType::Pawn => '♟',
PieceType::Rook => '♜',
PieceType::Knight => '♞',
PieceType::Bishop => '♝',
PieceType::Queen => '♛',
PieceType::King => '♚'
}
}
));
println!(" ┌───┬───┬───┬───┬───┬───┬───┬───┐");
let mut first = true;
let mut tile = false;
let mut or = 8;
for r in board_iter.rev() {
if !first {
println!(" ├───┼───┼───┼───┼───┼───┼───┼───┤");
}
first = false;
print!(" {} ", or);
or -= 1;
for p in r {
if tile {
print!("│░{}░", p);
}
else {
print!("│ {} ", p);
}
tile = !tile;
}
tile = !tile;
println!("│");
}
println!(" └───┴───┴───┴───┴───┴───┴───┴───┘");
println!(" a b c d e f g h");
}
// Interface
fn main() {
use std::io::{stdin, stdout, Write};
let mut s;
println!("Enter white's CPU depth or press enter for player input.");
let _ = stdout().flush();
s = String::new();
stdin().read_line(&mut s).expect("");
s.pop();
let white = match s.parse::<u32>() {
Ok(i) => Some(i),
_ => {
println!("White will use player input.");
None
}
};
println!("Enter black's CPU depth or press enter for player input.");
let _ = stdout().flush();
s = String::new();
stdin().read_line(&mut s).expect("");
s.pop();
let black = match s.parse::<u32>() {
Ok(i) => Some(i),
_ => {
println!("Black will use player input.");
None
}
};
let mut board = initial_board();
let mut white_moves = 0;
let mut black_moves = 0;
let mut history = Vec::new();
let mut layers = VecDeque::new();
let h = get_signature(&board);
increment_position(&mut board, h);
print_board(&board);
loop {
let moves = gen_moves(&mut board).0;
let m;
if moves.len() < 1 {
if is_in_check(&board, true) {
println!("Checkmate, black wins.");
}
else {
println!("Stalemate, draw.");
}
break;
};
match white {
None => {
m = player_move(&moves, stdin());
perform_move(&mut board, &m);
},
Some(d) => {
if white_moves >= MAX_CPU_MOVES {
println!("Max moves exceeded, white resigns. Black wins.");
break;
}
else{
println!("White is thinking...");
m = cpu_move(&mut board, &moves, &mut layers, d);
perform_move(&mut board, &m);
println!("White plays {}.", notate_move(&m));
white_moves += 1;
}
}
};
print_board(&board);
history.push(m);
if repitition(&board) {
println!("Threefold repitition, draw.");
break;
}
let h = get_signature(&board);
increment_position(&mut board, h);
layers.pop_front();
let omoves = gen_moves(&mut board).0;
let m;
if omoves.len() < 1 {
if is_in_check(&board, false) {
println!("Checkmate, white wins.");
}
else {
println!("Stalemate, draw.");
}
break;
}
match black {
None => {
m = player_move(&omoves, stdin());
perform_move(&mut board, &m);
},
Some(d) => {
if black_moves >= MAX_CPU_MOVES {
println!("Max moves exceeded, black resigns. White wins.");
break;
}
else{
println!("Black is thinking...");
m = cpu_move(&mut board, &omoves, &mut layers, d);
perform_move(&mut board, &m);
println!("Black plays {}.", notate_move(&m));
black_moves += 1;
}
}
}
print_board(&board);
history.push(m);
if repitition(&board) {
println!("Threefold repitition, draw.");
break;
}
let h = get_signature(&board);
increment_position(&mut board, h);
layers.pop_front();
}
println!("Move history:");
for m in history {
print!("{}, ", notate_move(&m));
}
println!("Done.");
}
| true |
9850e036ab69438ee50de79cae2dbf0db7027428
|
Rust
|
anderslanglands/openexr-bind
|
/openexr-rs/examples/merge_overlapping.rs
|
UTF-8
| 2,172 | 3.359375 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
fn merge_overlapping(
a1: f32,
c1: f32, // Opacity and color of first sample
a2: f32,
c2: f32, // Opacity and color of second sample
) -> (f32, f32) {
// This function merges two perfectly overlapping volume or point
// samples. Given the color and opacity of two samples, it returns
// the color and opacity of the merged sample.
//
// The code below is written to avoid very large rounding errors when
// the opacity of one or both samples is very small:
//
// * The merged opacity must not be computed as 1 - (1-a1) * (1-a2).
// If a1 and a2 are less than about half a floating-point epsilon,
// the expressions (1-a1) and (1-a2) evaluate to 1.0 exactly, and the
// merged opacity becomes 0.0. The error is amplified later in the
// calculation of the merged color.
//
// Changing the calculation of the merged opacity to a1 + a2 - a1\*a2
// avoids the excessive rounding error.
//
// * For small x, the logarithm of 1+x is approximately equal to x,
// but log(1+x) returns 0 because 1+x evaluates to 1.0 exactly.
// This can lead to large errors in the calculation of the merged
// color if a1 or a2 is very small.
//
// x.ln_1p() returns the logarithm of 1+x, but without attempting to
// evaluate the expression 1+x when x is very small.
//
let a1 = a1.clamp(0.0, 1.0);
let a2 = a2.clamp(0.0, 1.0);
let am = a1 + a2 - a1 * a2;
if a1 == 1.0 && a2 == 1.0 {
(am, (c1 + c2) / 2.0)
} else if a1 == 1.0 {
(am, c1)
} else if a2 == 1.0 {
(am, c2)
} else {
let u1 = -((-a1).ln_1p());
let v1 = if u1 < a1 * f32::MAX { u1 / a1 } else { 1.0 };
let u2 = -((-a2).ln_1p());
let v2 = if u2 < a2 * f32::MAX { u2 / a2 } else { 1.0 };
let u = u1 + u2;
let w = if u > 1.0 || am < u * f32::MAX {
am / u
} else {
1.0
};
(am, (c1 * v1 + c2 * v2) * w)
}
}
fn main() {
let a1 = 0.5f32;
let c1 = 0.2f32;
let a2 = 0.3f32;
let c2 = 0.4f32;
assert_eq!(merge_overlapping(a1, c1, a2, c2), (0.65, 0.46611378))
}
| true |
dafa771f63b1ab41382306900cc393ad7797a62b
|
Rust
|
fly2006zhao/bsc
|
/src/utils.rs
|
UTF-8
| 455 | 2.578125 | 3 |
[] |
no_license
|
// FIXME change is_XXX to macros
pub fn is_u16(v: String) -> Result<(), String> {
match v.parse::<u16>() {
Ok(_) => Ok(()),
Err(e) => Err(format!("{}", e)),
}
}
pub fn is_u64(v: String) -> Result<(), String> {
match v.parse::<u64>() {
Ok(_) => Ok(()),
Err(e) => Err(format!("{}", e)),
}
}
pub fn is_u32(v: String) -> Result<(), String> {
match v.parse::<u32>() {
Ok(_) => Ok(()),
Err(e) => Err(format!("{}", e)),
}
}
| true |
c64670670c03eb6a3cd9cf45c27e24b04ce18f4b
|
Rust
|
iCodeIN/vilmos_assembler
|
/src/parser.rs
|
UTF-8
| 1,791 | 3.453125 | 3 |
[
"MIT"
] |
permissive
|
pub fn escaped(to_escape: Option<char>) -> Option<char> {
if to_escape.is_none() {
return None;
}
match to_escape.unwrap() {
'n' => Some('\n'),
'r' => Some('\r'),
't' => Some('\t'),
'"' => Some('"'),
'\\' => Some('\\'),
'0' => Some('\0'),
_ => None
}
}
fn consume_str(buffer: &mut String, mut actual_char: char) -> Option<String> {
let quoted = actual_char == '"';
let mut final_string = String::new();
if quoted {
match buffer.pop() {
None => { return None; }
Some(ch) => { actual_char = ch; }
}
}
loop {
if actual_char == '\\' {// escape \
let to_escape = buffer.pop();
let escaped = escaped(to_escape);
if escaped.is_none() {
return None
}
final_string.push(escaped.unwrap());
} else if (quoted && actual_char != '"') || (!quoted && !actual_char.is_whitespace()) { //push other character
final_string.push(actual_char);
} else {
return Some(final_string);
}
match buffer.pop() {
None => { return Some(final_string); }
Some(ch) => { actual_char = ch; }
}
}
}
pub fn parse(str: &str) -> Option<Vec<String>> {
let mut str = str.trim_start().chars().rev().collect::<String>();
let mut tokens: Vec<String> = Vec::new();
while let Some(actual_char) = str.pop() {
match actual_char {
'#' => break,
_ => match consume_str(&mut str, actual_char) {
None => return None,
Some(str) => tokens.push(str)
}
}
str = str.trim_end().parse().unwrap();
}
return Some(tokens);
}
| true |
b67a136a046a3582d5ca6248920917426a7375f0
|
Rust
|
kezenator/adventofcode
|
/2020/src/y2019/d06/mod.rs
|
UTF-8
| 1,775 | 2.984375 | 3 |
[
"MIT"
] |
permissive
|
use crate::support::*;
use std::collections::HashMap;
const EXAMPLE_1: &str = "COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L";
const EXAMPLE_2: &str = "COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\nK)YOU\nI)SAN";
const INPUT: &str = include_str!("input.txt");
fn parse_orbits(input: &str) -> HashMap<String, String>
{
input_to_lines(input)
.iter()
.map(|s| {
scan(s)
.until(")").parse::<String>()
.remaining().parse::<String>()
})
.map(|(a, b)| (b, a))
.collect()
}
fn path_to_com(o: &String, orbits: &HashMap<String, String>) -> Vec<String>
{
let mut result = Vec::new();
let mut cur = o;
while *cur != "COM"
{
cur = orbits.get(cur).unwrap();
result.push(cur.clone());
}
result
}
fn part_1(input: &str) -> usize
{
let orbits = parse_orbits(input);
let objects = orbits
.iter()
.map(|(a, _b)| a.to_owned())
.collect::<Vec<String>>();
objects.iter()
.map(|o| path_to_com(o, &orbits).len())
.sum()
}
fn part_2(input: &str) -> usize
{
let orbits = parse_orbits(input);
let mut p1 = path_to_com(&"YOU".to_owned(), &orbits);
let mut p2 = path_to_com(&"SAN".to_owned(), &orbits);
while p1[p1.len() - 1] == p2[p2.len() - 1]
{
p1.pop();
p2.pop();
}
p1.len() + p2.len()
}
pub fn puzzles() -> PuzzleDay
{
puzzle_day(6)
.example(|| Answer { calculated: part_1(EXAMPLE_1), expected: 42, })
.part_1(|| Answer { calculated: part_1(INPUT), expected: 150150, })
.example(|| Answer { calculated: part_2(EXAMPLE_2), expected: 4, })
.part_2(|| Answer { calculated: part_2(INPUT), expected: 352, })
}
| true |
d46009a389a600eeddb8bc103359de19d36bed35
|
Rust
|
flattiverse/connector-rust
|
/src/units/unit.rs
|
UTF-8
| 3,288 | 3.203125 | 3 |
[
"MIT"
] |
permissive
|
use crate::team::TeamId;
use crate::units::mobility::Mobility;
use crate::units::unit_kind::UnitKind;
use crate::units::unit_kind_simplified::SimpleUnitKind;
use crate::vector::Vector;
use serde_derive::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Unit {
/// The name of the unit.
pub name: String,
/// The radius of the unit
pub radius: f64,
/// The absolute position of the unit.
pub position: Vector,
/// The movement vector of the unit.
pub movement: Vector,
/// The direction this unit is facing towards.
// TODO unwrap, this is just a workaround
pub direction: Option<f64>,
/// The [`Team`] this unit belongs to, if any. Referenced through its id.
///
/// [`Team`]: crate::team::Team
pub team: Option<TeamId>,
/// The gravity exercised by this unit.
pub gravity: f64,
#[serde(flatten)]
pub kind: UnitKind,
}
impl Unit {
/// The [`Mobility`] status of this unit.
pub fn mobility(&self) -> Mobility {
match &self.kind {
UnitKind::Sun(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::Planet(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::Moon(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::Meteoroid(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::Comet(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::Buoy(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::MissionTarget(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::PlayerUnit(_) => Mobility::Mobile,
UnitKind::Shot(_) => Mobility::Mobile,
UnitKind::Explosion(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::BlackHole(unit) => Mobility::from(&unit.orbits[..]),
UnitKind::Reduced(r) => r.mobility(),
// TODO
UnitKind::Resource => Mobility::Steady,
}
}
/// Returns a value simplified unit kind
pub fn simple_kind(&self) -> SimpleUnitKind {
self.kind.simplified()
}
/// The energy output of this unit.
pub fn energy_output(&self) -> f64 {
todo!()
}
/// Whether this unit is masking.
pub fn is_masking(&self) -> bool {
todo!()
}
/// Whether this unit is solid
pub fn is_solid(&self) -> bool {
todo!()
}
/// Whether it is possible to edit this unit vai admin commands.
pub fn is_map_editable(&self) -> bool {
match &self.kind {
UnitKind::Sun(_) => true,
UnitKind::Planet(_) => true,
UnitKind::Moon(_) => true,
UnitKind::Meteoroid(_) => true,
UnitKind::Comet(_) => true,
UnitKind::Buoy(_) => true,
UnitKind::MissionTarget(_) => true,
UnitKind::PlayerUnit(_) => false,
UnitKind::Shot(_) => true,
UnitKind::Explosion(_) => true,
UnitKind::BlackHole(_) => true,
UnitKind::Reduced(_) => false,
// TODO
UnitKind::Resource => true,
}
}
/// Whether the this unit is a [`UnitKind::Reduced`] view of the unit.
pub fn is_reduced(&self) -> bool {
matches!(self.kind, UnitKind::Reduced(..))
}
}
| true |
f16a01b4a9150b2f35b14c2913df7a8b4231bea1
|
Rust
|
Karuma303/rs_sdf
|
/src/generator.rs
|
UTF-8
| 4,020 | 2.96875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::result::Result::Err;
use crate::input::DistanceInput;
use crate::processor::SourceProcessor;
use crate::distance::{DistanceType, DistanceLayer};
use crate::data::transformation::{DistanceTransformation, TransformOutputGenerator, TransformationResult};
use crate::export::image::{ImageFileWriter};
pub struct DistanceGenerator {
input: Option<Box<dyn DistanceInput>>,
output: Option<Box<dyn ImageFileWriter>>,
processor: Option<Box<dyn SourceProcessor>>,
distance_type: DistanceType,
distance_layer: DistanceLayer,
}
impl DistanceGenerator {
pub fn new() -> DistanceGenerator {
DistanceGenerator {
input: None,
output: None,
processor: None,
distance_type: DistanceType::EuclideanDistance,
distance_layer: DistanceLayer::Combined,
}
}
pub fn input(mut self, input: impl DistanceInput + 'static) -> Self {
self.input = Some(Box::new(input));
self
}
pub fn output(mut self, output: impl ImageFileWriter + 'static) -> Self {
self.output = Some(Box::new(output));
self
}
pub fn processor(mut self, processor: impl SourceProcessor + 'static) -> Self {
self.processor = Some(Box::new(processor));
self
}
pub fn export_filter(mut self, export_selection: DistanceLayer) -> Self {
self.distance_layer = export_selection;
self
}
pub fn distance_type(mut self, export_type: DistanceType) -> Self {
self.distance_type = export_type;
self
}
pub fn generate(&self) -> Result<(), String> {
// input path is set?
if let Some(input) = &self.input {
// TODO: add matching here !!!
let source = input.source_field().ok().unwrap(); //unwrap();
if let Some(processor) = &self.processor {
let df = processor.process(&source);
if let Some(output) = &self.output {
// output.export(&df, &self.distance_type, &self.distance_layer);
let mut dt: DistanceTransformation = DistanceTransformation::from(df);
dt.filter(self.distance_layer);
dt.distance_type(self.distance_type);
dt.scale(0.9); // u8 -> 0 = orig, 1 = 2^1 = orig / 2, 2 = 2^2 = orig / 4, etc...
let res : TransformationResult<u8> = dt.transform();
output.write(&res);
} else {
panic!("no export file specified");
}
} else {
panic!("no processor specified");
}
// we should tests and maybe micro-benchmark at least two known approaches here:
// 1) brute force O(n²)
// 2) the old EightPointSeqEuclideanDistTrans O(n)
// Notes from old C# repository:
// detect edges and mark them
// next: make it signed
// next: vector field
// draw distance vectors
// implement brute force / circle method : https://github.com/chriscummings100/signeddistancefields/blob/master/Assets/SignedDistanceFields/SignedDistanceFieldGenerator.cs
} else {
return Err(String::from("no input path specified"));
}
Ok(())
}
}
pub struct Configuration {}
#[cfg(test)]
mod tests {
use crate::generator::DistanceGenerator;
#[test]
fn no_input_path() {
let gen : DistanceGenerator = DistanceGenerator::new();
assert!(gen.generate().is_err(), "non existing input path should generate an error");
}
/*
#[tests]
fn generates_output_file() {
let outputPath = r"export\test_output.png";
let res = DistanceGenerator::new()
.input(r"tests\example_1_rgba_512x512.png")
.export(outputPath).generate();
let f = File::open(outputPath);
assert!(f.is_ok(), "export file was not generated");
}
*/
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.