blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
bb1d19d1cb9fdfe2779476be551a376bd3acd8f6
|
Rust
|
jrmuizel/glsl
|
/glsl/src/transpiler/spirv.rs
|
UTF-8
| 3,292 | 3.09375 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
//! SPIR-V transpiler.
//!
//! The current implementation uses the [shaderc](https://crates.io/crates/shaderc) crate to
//! transpile GLSL to SPIR-V. This is not ideal but will provide a default and starting
//! implementation.
use shaderc;
use crate::syntax;
use crate::transpiler::glsl as glsl_transpiler;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum ShaderKind {
TessControl,
TessEvaluation,
Vertex,
Geometry,
Fragment,
Compute,
}
impl From<ShaderKind> for shaderc::ShaderKind {
fn from(kind: ShaderKind) -> Self {
match kind {
ShaderKind::TessControl => shaderc::ShaderKind::TessControl,
ShaderKind::TessEvaluation => shaderc::ShaderKind::TessEvaluation,
ShaderKind::Vertex => shaderc::ShaderKind::Vertex,
ShaderKind::Geometry => shaderc::ShaderKind::Geometry,
ShaderKind::Fragment => shaderc::ShaderKind::Fragment,
ShaderKind::Compute => shaderc::ShaderKind::Compute,
}
}
}
/// Transpile a GLSL AST into a SPIR-V internal buffer and write it to the given buffer.
///
/// The current implementation is highly inefficient as it relies on internal allocations and
/// [shaderc](https://crates.io/crates/shaderc).
///
/// If any error happens while transpiling, they’re returned as an opaque string.
pub fn transpile_translation_unit_to_binary<F>(
f: &mut F,
tu: &syntax::TranslationUnit,
kind: ShaderKind
) -> Result<(), String> where F: std::io::Write {
// write as GLSL in an intermediate buffer
let mut glsl_buffer = String::new();
glsl_transpiler::show_translation_unit(&mut glsl_buffer, tu);
// pass the GLSL-formatted string to shaderc
let mut compiler = shaderc::Compiler::new().unwrap();
let options = shaderc::CompileOptions::new().unwrap();
let kind = kind.into();
let output =
compiler.compile_into_spirv(&glsl_buffer,
kind,
"glsl input",
"main",
Some(&options)
).map_err(|e| format!("{}", e))?;
let _ = f.write_all(output.as_binary_u8());
Ok(())
}
/// Transpile a GLSL AST into a SPIR-V internal buffer and write it to the given buffer.
///
/// The current implementation is highly inefficient as it relies on internal allocations and
/// [shaderc](https://crates.io/crates/shaderc).
///
/// If any error happens while transpiling, they’re returned as an opaque string.
pub fn transpile_translation_unit<F>(
f: &mut F,
tu: &syntax::TranslationUnit,
kind: ShaderKind
) -> Result<(), String> where F: std::fmt::Write {
// write as GLSL in an intermediate buffer
let mut glsl_buffer = String::new();
glsl_transpiler::show_translation_unit(&mut glsl_buffer, tu);
// pass the GLSL-formatted string to shaderc
let mut compiler = shaderc::Compiler::new().unwrap();
let options = shaderc::CompileOptions::new().unwrap();
let kind = kind.into();
let output =
compiler.compile_into_spirv_assembly(&glsl_buffer,
kind,
"glsl input",
"main",
Some(&options)
).map_err(|e| format!("{}", e))?;
let _ = f.write_str(&output.as_text());
Ok(())
}
| true |
df96b3a7aa8b2c3ace217c33ae0239ac8eeea042
|
Rust
|
hartmantis/sheepit
|
/src/changelog/change.rs
|
UTF-8
| 2,495 | 3.328125 | 3 |
[] |
no_license
|
//
// Author:: Jonathan Hartman (<[email protected]>)
// License:: Apache License, Version 2.0
//
// Copyright (C) 2015, Jonathan Hartman
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use std::fmt;
/// A struct to represent a single changelog item.
pub struct Change {
body: String,
}
impl Change {
/// Constructs a new `Change`.
///
/// # Examples
///
/// ```
/// use changelog::change::Change;
/// let c = Change::new("The body of a change");
/// ```
pub fn new(body: &str) -> Change {
Change {
body: body.to_string(),
}
}
}
impl fmt::Display for Change {
/// Formats a change into a bullet point for a changelog file with a max
/// line length of 79 characters.
///
/// # Examples
///
/// ```
/// use changelog::change::Change;
/// let c = Change::new("The body of a change");
/// println!("{}", c);
/// ```
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut lines: Vec<String> = Vec::new();
let mut next_line = String::new();
next_line.push('-');
//let mut lines = vec![String::new()];
for word in self.body.split_whitespace() {
if next_line.len() + 1 + word.len() > 79 {
lines.push(next_line);
next_line = String::new();
next_line.push(' ');
}
next_line.push(' ');
next_line.push_str(word);
}
lines.push(next_line);
write!(f, "{}", lines.join("\n"))
}
}
#[test]
fn a_short_line() {
let c = Change::new("A short line");
assert_eq!(c.to_string(), "- A short line");
}
#[test]
fn a_long_line() {
let c = Change::new("This line is long, whatever are we going to do when it gets too long \
to fit on one line?");
assert_eq!(c.to_string(), "- This line is long, whatever are we going to do when it gets \
too long to fit\n on one line?");
}
| true |
e63b1f6e4675baabce90394fff448c55ab773adc
|
Rust
|
gz/autoperf
|
/src/util.rs
|
UTF-8
| 12,546 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
#![allow(unused)]
use csv;
use itertools::*;
use log::error as lerror;
use log::*;
use nom::*;
use std::fs;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
use std::path::PathBuf;
use std::process::{Command, Output};
use std::str::{from_utf8_unchecked, FromStr};
use x86::cpuid;
pub type Node = u64;
pub type Socket = u64;
pub type Core = u64;
pub type Cpu = u64;
pub type L1 = u64;
pub type L2 = u64;
pub type L3 = u64;
pub type Online = u64;
pub type MHz = u64;
pub fn mkdir(out_dir: &Path) {
if !out_dir.exists() {
fs::create_dir(out_dir).expect("Can't create directory");
}
}
fn to_string(s: &[u8]) -> &str {
unsafe { from_utf8_unchecked(s) }
}
fn to_u64(s: &str) -> u64 {
FromStr::from_str(s).unwrap()
}
fn buf_to_u64(s: &[u8]) -> u64 {
to_u64(to_string(s))
}
named!(parse_numactl_size<&[u8], NodeInfo>,
chain!(
tag!("node") ~
take_while!(is_space) ~
node: take_while!(is_digit) ~
take_while!(is_space) ~
tag!("size:") ~
take_while!(is_space) ~
size: take_while!(is_digit) ~
take_while!(is_space) ~
tag!("MB"),
|| NodeInfo { node: buf_to_u64(node), memory: buf_to_u64(size) * 1000000 }
)
);
fn get_node_info(node: Node, numactl_output: &String) -> Option<NodeInfo> {
let find_prefix = format!("node {} size:", node);
for line in numactl_output.split('\n') {
if line.starts_with(find_prefix.as_str()) {
let res = parse_numactl_size(line.as_bytes());
return Some(res.unwrap().1);
}
}
None
}
#[derive(Debug, Eq, PartialEq, RustcEncodable)]
pub struct CpuInfo {
pub node: NodeInfo,
pub socket: Socket,
pub core: Core,
pub cpu: Cpu,
pub l1: L1,
pub l2: L2,
pub l3: L3,
}
impl CpuInfo {
pub fn cbox(&self, mt: &MachineTopology) -> String {
let cbox = self.core % mt.cores_on_socket(self.socket).len() as u64;
format!("uncore_cbox_{}", cbox)
}
}
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, RustcEncodable)]
pub struct NodeInfo {
pub node: Node,
pub memory: u64,
}
#[derive(Debug)]
pub struct MachineTopology {
data: Vec<CpuInfo>,
}
fn save_file(
cmd: &'static str,
output_path: &Path,
file: &'static str,
out: Output,
) -> io::Result<String> {
if out.status.success() {
// Save to result directory:
let mut out_file: PathBuf = output_path.to_path_buf();
out_file.push(file);
let mut f = File::create(out_file.as_path())?;
let content = String::from_utf8(out.stdout).unwrap_or(String::new());
f.write(content.as_bytes())?;
Ok(content)
} else {
lerror!(
"{} command: got unknown exit status was: {}",
cmd,
out.status
);
debug!(
"stderr:\n{}",
String::from_utf8(out.stderr).unwrap_or("Can't parse output".to_string())
);
unreachable!()
}
}
pub fn save_lstopo(output_path: &Path) -> io::Result<String> {
let out = Command::new("lstopo")
.arg("--of console")
.arg("--taskset")
.output()?;
save_file("lstopo", output_path, "lstopo.txt", out)
}
pub fn save_cpuid(output_path: &Path) -> io::Result<String> {
let out = Command::new("cpuid").output()?;
save_file("cpuid", output_path, "cpuid.txt", out)
}
pub fn save_likwid_topology(output_path: &Path) -> io::Result<String> {
let out = Command::new("likwid-topology")
.arg("-g")
.arg("-c")
.output()?;
save_file("likwid-topology", output_path, "likwid_topology.txt", out)
}
pub fn save_numa_topology(output_path: &Path) -> io::Result<String> {
let out = Command::new("numactl").arg("--hardware").output()?;
save_file("numactl", output_path, "numactl.dat", out)
}
pub fn save_cpu_topology(output_path: &Path) -> io::Result<String> {
let out = Command::new("lscpu")
.arg("--parse=NODE,SOCKET,CORE,CPU,CACHE")
.output()?;
save_file("lscpu", output_path, "lscpu.csv", out)
}
impl MachineTopology {
pub fn new() -> MachineTopology {
let lscpu_out = Command::new("lscpu")
.arg("--parse=NODE,SOCKET,CORE,CPU,CACHE")
.output()
.unwrap();
let lscpu_string = String::from_utf8(lscpu_out.stdout).unwrap_or(String::new());
let numactl_out = Command::new("numactl").arg("--hardware").output().unwrap();
let numactl_string = String::from_utf8(numactl_out.stdout).unwrap_or(String::new());
MachineTopology::from_strings(lscpu_string, numactl_string)
}
pub fn from_files(lcpu_path: &Path, numactl_path: &Path) -> MachineTopology {
let mut file = File::open(lcpu_path).expect("lscpu.csv file does not exist?");
let mut lscpu_string = String::new();
let _ = file.read_to_string(&mut lscpu_string).unwrap();
let mut file = File::open(numactl_path).expect("numactl.dat file does not exist?");
let mut numactl_string = String::new();
let _ = file.read_to_string(&mut numactl_string).unwrap();
MachineTopology::from_strings(lscpu_string, numactl_string)
}
pub fn from_strings(lscpu_output: String, numactl_output: String) -> MachineTopology {
let no_comments: Vec<&str> = lscpu_output
.split('\n')
.filter(|s| s.trim().len() > 0 && !s.trim().starts_with("#"))
.collect();
type Row = (Node, Socket, Core, Cpu, String); // Online MHz
let mut rdr = csv::Reader::from_string(no_comments.join("\n")).has_headers(false);
let rows = rdr.decode().collect::<csv::Result<Vec<Row>>>().unwrap();
let mut data: Vec<CpuInfo> = Vec::with_capacity(rows.len());
for row in rows {
let caches: Vec<u64> = row
.4
.split(":")
.map(|s| u64::from_str(s).unwrap())
.collect();
assert_eq!(caches.len(), 4);
let node: NodeInfo =
get_node_info(row.0, &numactl_output).expect("Can't find node in numactl output?");
let tuple: CpuInfo = CpuInfo {
node: node,
socket: row.1,
core: row.2,
cpu: row.3,
l1: caches[0],
l2: caches[2],
l3: caches[3],
};
data.push(tuple);
}
MachineTopology { data: data }
}
pub fn cpus(&self) -> Vec<Cpu> {
let mut cpus: Vec<Cpu> = self.data.iter().map(|t| t.cpu).collect();
cpus.sort();
cpus.dedup();
cpus
}
pub fn cpu(&self, cpu: Cpu) -> Option<&CpuInfo> {
self.data.iter().find(|t| t.cpu == cpu)
}
pub fn cores(&self) -> Vec<Core> {
let mut cores: Vec<Core> = self.data.iter().map(|t| t.core).collect();
cores.sort();
cores.dedup();
cores
}
pub fn sockets(&self) -> Vec<Socket> {
let mut sockets: Vec<Cpu> = self.data.iter().map(|t| t.socket).collect();
sockets.sort();
sockets.dedup();
sockets
}
pub fn nodes(&self) -> Vec<NodeInfo> {
let mut nodes: Vec<NodeInfo> = self.data.iter().map(|t| t.node).collect();
nodes.sort();
nodes.dedup();
nodes
}
pub fn max_memory(&self) -> u64 {
self.nodes().iter().map(|t| t.memory).sum()
}
pub fn l1(&self) -> Vec<L1> {
let mut l1: Vec<L1> = self.data.iter().map(|t| t.l1).collect();
l1.sort();
l1.dedup();
l1
}
pub fn l1_size(&self) -> Option<u64> {
let cpuid = cpuid::CpuId::new();
cpuid.get_cache_parameters().map(|mut cparams| {
let cache = cparams
.find(|c| c.level() == 1 && c.cache_type() == cpuid::CacheType::Data)
.unwrap();
(cache.associativity()
* cache.physical_line_partitions()
* cache.coherency_line_size()
* cache.sets()) as u64
})
}
pub fn l2(&self) -> Vec<L2> {
let mut l2: Vec<L2> = self.data.iter().map(|t| t.l2).collect();
l2.sort();
l2.dedup();
l2
}
pub fn l2_size(&self) -> Option<u64> {
let cpuid = cpuid::CpuId::new();
cpuid.get_cache_parameters().map(|mut cparams| {
let cache = cparams
.find(|c| c.level() == 2 && c.cache_type() == cpuid::CacheType::Unified)
.unwrap();
(cache.associativity()
* cache.physical_line_partitions()
* cache.coherency_line_size()
* cache.sets()) as u64
})
}
pub fn l3(&self) -> Vec<L3> {
let mut l3: Vec<L3> = self.data.iter().map(|t| t.l3).collect();
l3.sort();
l3.dedup();
l3
}
pub fn l3_size(&self) -> Option<u64> {
let cpuid = cpuid::CpuId::new();
cpuid.get_cache_parameters().map(|mut cparams| {
let cache = cparams
.find(|c| c.level() == 3 && c.cache_type() == cpuid::CacheType::Unified)
.unwrap();
(cache.associativity()
* cache.physical_line_partitions()
* cache.coherency_line_size()
* cache.sets()) as u64
})
}
pub fn cpus_on_node(&self, node: NodeInfo) -> Vec<&CpuInfo> {
self.data.iter().filter(|t| t.node == node).collect()
}
pub fn cpus_on_l1(&self, l1: L1) -> Vec<&CpuInfo> {
self.data.iter().filter(|t| t.l1 == l1).collect()
}
pub fn cpus_on_l2(&self, l2: L2) -> Vec<&CpuInfo> {
self.data.iter().filter(|t| t.l2 == l2).collect()
}
pub fn cpus_on_l3(&self, l3: L3) -> Vec<&CpuInfo> {
self.data.iter().filter(|t| t.l3 == l3).collect()
}
pub fn cpus_on_core(&self, core: Core) -> Vec<&CpuInfo> {
self.data.iter().filter(|t| t.core == core).collect()
}
pub fn cpus_on_socket(&self, socket: Socket) -> Vec<&CpuInfo> {
self.data.iter().filter(|t| t.socket == socket).collect()
}
fn cores_on_socket(&self, socket: Socket) -> Vec<Core> {
let mut cores: Vec<Core> = self
.data
.iter()
.filter(|c| c.socket == socket)
.map(|c| c.core)
.collect();
cores.sort();
cores.dedup();
cores
}
fn cores_on_l3(&self, l3: L3) -> Vec<&CpuInfo> {
let mut cpus: Vec<&CpuInfo> = self.data.iter().filter(|t| t.l3 == l3).collect();
cpus.sort_by_key(|c| c.core);
// TODO: implicit assumption that we have two HTs
cpus.into_iter().step(2).collect()
}
pub fn same_socket(&self) -> Vec<Vec<&CpuInfo>> {
self.sockets()
.into_iter()
.map(|s| self.cpus_on_socket(s))
.collect()
}
pub fn same_core(&self) -> Vec<Vec<&CpuInfo>> {
self.cores()
.into_iter()
.map(|c| self.cpus_on_core(c))
.collect()
}
pub fn same_node(&self) -> Vec<Vec<&CpuInfo>> {
self.nodes()
.into_iter()
.map(|c| self.cpus_on_node(c))
.collect()
}
pub fn same_l1(&self) -> Vec<Vec<&CpuInfo>> {
self.l1().into_iter().map(|c| self.cpus_on_l1(c)).collect()
}
pub fn same_l2(&self) -> Vec<Vec<&CpuInfo>> {
self.l2().into_iter().map(|c| self.cpus_on_l2(c)).collect()
}
pub fn same_l3(&self) -> Vec<Vec<&CpuInfo>> {
self.l3().into_iter().map(|c| self.cpus_on_l3(c)).collect()
}
pub fn same_l3_cores(&self) -> Vec<Vec<&CpuInfo>> {
self.l3()
.into_iter()
.map(|l3| self.cores_on_l3(l3))
.collect()
}
pub fn whole_machine(&self) -> Vec<Vec<&CpuInfo>> {
vec![self.data.iter().collect()]
}
pub fn whole_machine_cores(&self) -> Vec<Vec<&CpuInfo>> {
let mut cpus: Vec<&CpuInfo> = self.data.iter().collect();
cpus.sort_by_key(|c| c.core);
// TODO: implicit assumption that we have two HTs
vec![cpus.into_iter().step(2).collect()]
}
}
// TODO: Should ideally be generic:
pub fn socket_uncore_devices() -> Vec<&'static str> {
vec![
"uncore_ha_0",
"uncore_imc_0",
"uncore_imc_1",
"uncore_imc_2",
"uncore_imc_3",
"uncore_pcu",
"uncore_r2pcie",
"uncore_r3qpi_0",
"uncore_r3qpi_1",
"uncore_ubox",
]
}
| true |
7763aebdd49fce200baf769385b60872b0474a92
|
Rust
|
aguang-xyz/leetcode-rust
|
/src/solution/s0044_wildcard_matching.rs
|
UTF-8
| 2,002 | 3.453125 | 3 |
[] |
no_license
|
pub struct Solution {}
impl Solution {
pub fn is_match(s: String, p: String) -> bool {
let s: Vec<char> = s.chars().collect();
let p: Vec<char> = p.chars().collect();
let is_match = |s, p| s == p || p == '?';
let mut f: Vec<Vec<bool>> = vec![vec![false; p.len() + 1]; s.len() + 1];
for i in (0..(s.len() + 1)).rev() {
for j in (0..(p.len() + 1)).rev() {
if i == s.len() && j == p.len() {
f[i][j] = true;
} else if i == s.len() {
f[i][j] = p[j] == '*' && f[i][j + 1];
} else if j != p.len() {
if p[j] == '*' {
f[i][j] = f[i][j + 1];
for k in i..s.len() {
f[i][j] = f[i][j] || f[k + 1][j + 1];
}
} else {
f[i][j] = is_match(s[i], p[j]) && f[i + 1][j + 1];
}
}
}
}
f[0][0]
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn test_0044() {
assert_eq!(
Solution::is_match(String::from("aa"), String::from("a")),
false
);
assert_eq!(
Solution::is_match(String::from("aa"), String::from("*")),
true
);
assert_eq!(
Solution::is_match(String::from("cb"), String::from("?a")),
false
);
assert_eq!(
Solution::is_match(String::from("adceb"), String::from("a*b")),
true
);
assert_eq!(
Solution::is_match(String::from("acdcb"), String::from("a*c?b")),
false
);
assert_eq!(
Solution::is_match(String::from(""), String::from("*")),
true
);
assert_eq!(
Solution::is_match(String::from("a"), String::from("a*")),
true
);
}
}
| true |
0aa4409dc4ae6c39d1fe4e968c1caab612f6a70b
|
Rust
|
yuichi-morisaki/rust-tutorial
|
/book/ch19_advanced-features/never-type/src/main.rs
|
UTF-8
| 604 | 3.6875 | 4 |
[] |
no_license
|
fn bar() -> ! {
loop {}
}
fn main() {
let guess = String::from("Hello");
/* error: `match` arms have incompatible types
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => "hello",
};
*/
loop {
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue, // returns !
};
}
}
impl<T> Option<T> {
pub fn unwrap(self) -> T {
match self {
Some(val) => val,
None => panic!("called `Option::unwrap()` on a `None` value"),
}
}
}
| true |
fd018b49933addc0dde2e56e6c83814c6f982b22
|
Rust
|
franleplant/frd-lisp
|
/src/lisp_value.rs
|
UTF-8
| 3,527 | 3.296875 | 3 |
[] |
no_license
|
use log::debug;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::fmt;
use std::rc::Rc;
use crate::ast::Expr;
use crate::env::Env;
use crate::eval::eval_expression;
//TODO rename this to value
#[derive(Clone)]
pub enum LispValue {
Nill,
Id(String),
Int(i64),
Bool(Bool),
Intrinsic(fn(&[Rc<LispValue>]) -> Rc<LispValue>),
Func(Func),
}
impl LispValue {
pub fn unwrap_number(&self) -> &i64 {
match self {
LispValue::Int(ref num) => num,
_ => panic!("BBBB"),
}
}
}
impl PartialEq for LispValue {
fn eq(&self, other: &LispValue) -> bool {
use self::LispValue::*;
match (self, other) {
(LispValue::Nill, LispValue::Nill) => true,
(Int(ref n1), Int(ref n2)) => n1 == n2,
(Id(ref id1), Id(ref id2)) => *id1 == *id2,
(Bool(ref bool1), Bool(ref bool2)) => bool1 == bool2,
_ => false,
}
}
}
impl Eq for LispValue {}
impl Ord for LispValue {
fn cmp(&self, other: &LispValue) -> Ordering {
use self::LispValue::*;
match (self, other) {
(Int(ref n1), Int(ref n2)) => n1.cmp(n2),
// TODO is this the right thing to do?
_ => Ordering::Equal,
}
}
}
impl PartialOrd for LispValue {
fn partial_cmp(&self, other: &LispValue) -> Option<Ordering> {
use self::LispValue::*;
match (self, other) {
(Int(ref n1), Int(ref n2)) => Some(n1.cmp(n2)),
// TODO is this the right thing to do?
_ => None,
}
}
}
impl fmt::Debug for LispValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
LispValue::Nill => write!(f, "Nill"),
LispValue::Intrinsic(_) => write!(f, "intrinsic"),
LispValue::Func(func) => write!(f, "#func {}", func.get_name()),
LispValue::Int(num) => write!(f, "{}", num),
LispValue::Id(str) => write!(f, "{}", str),
LispValue::Bool(lisp_bool) => match lisp_bool {
Bool::True => write!(f, "true"),
Bool::False => write!(f, "false"),
},
}
}
}
#[derive(Clone, PartialEq, Eq)]
pub enum Bool {
True,
False,
}
#[derive(Clone)]
pub struct Func {
name: String,
arg_names: Vec<String>,
body: Vec<Expr>,
env: Rc<Env>,
}
impl Func {
pub fn new(name: String, arg_names: Vec<String>, body: Vec<Expr>, env: Rc<Env>) -> Func {
Func {
name,
arg_names,
body,
env,
}
}
pub fn call(&self, arg_values: Vec<Rc<LispValue>>) -> Result<Rc<LispValue>, String> {
let local_env: HashMap<String, Rc<LispValue>> =
self.arg_names.clone().into_iter().zip(arg_values).collect();
let env = Rc::new(self.env.new(self.env.clone(), local_env));
// TODO evaluate multiple Expr bodies
let result = eval_expression(&self.body[0], env.clone())?;
debug!("func call {:?}", env);
debug!("func result {:?}", result);
Ok(result)
}
pub fn get_name(&self) -> &String {
&self.name
}
}
impl Ord for Func {
fn cmp(&self, _other: &Func) -> Ordering {
Ordering::Equal
}
}
impl PartialOrd for Func {
fn partial_cmp(&self, _other: &Func) -> Option<Ordering> {
None
}
}
impl PartialEq for Func {
fn eq(&self, _other: &Func) -> bool {
false
}
}
impl Eq for Func {}
| true |
c7f7d376deea874e38ba29466f00c98c9ce799e0
|
Rust
|
jeremyletang/whl-puller
|
/src/unesco_xml.rs
|
UTF-8
| 1,746 | 2.640625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright 2017 Jeremy Letang.
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use hyper::{self, Client};
use std::error::Error;
use std::fs::File;
use std::io::Read;
const UNESCO_XML: &'static str = "http://whc.unesco.org/en/list/xml/";
pub fn get(file: Option<String>) -> Result<String, String> {
match file {
Some(f) => from_file(f),
None => from_download(),
}
}
pub fn from_file(file: String) -> Result<String, String> {
info!("reading unesco xml: '{}'", file);
match File::open(file) {
Ok(mut f) => {
let mut buf = String::new();
match f.read_to_string(&mut buf) {
Ok(_) => Ok(buf),
Err(e) => Err(format!("unable to read file, {}", e))
}
},
Err(e) => Err(format!("unable to open file: {}", e.description())),
}
}
pub fn from_download() -> Result<String, String> {
info!("downloading unesco xml");
match Client::new().get(UNESCO_XML).send() {
Ok(mut r) => {
if r.status == hyper::Ok {
let mut buf = String::new();
match r.read_to_string(&mut buf) {
Ok(_) => Ok(buf),
Err(e) => Err(format!("unable to read http request payload, try again, {}", e))
}
} else {
Err(format!("unexpected http status, try again"))
}
},
Err(e) => Err(format!("unable to get whc xml, {}", e.description()))
}
}
| true |
fa7594c8d116f7f8b865fa2e2b5911e963d07407
|
Rust
|
jjsloboda/aoc2019
|
/intcode/src/lib.rs
|
UTF-8
| 1,281 | 3.453125 | 3 |
[] |
no_license
|
pub fn execute(mem: &mut Vec<i32>) -> i32 {
let mut cursor = 0;
loop {
if mem[cursor] == 99 {
return mem[0];
}
let loc1 = mem[cursor+1] as usize;
let loc2 = mem[cursor+2] as usize;
let loc3 = mem[cursor+3] as usize;
match mem[cursor] {
1 => mem[loc3] = mem[loc1] + mem[loc2],
2 => mem[loc3] = mem[loc1] * mem[loc2],
_ => panic!("something went wrong"),
};
cursor += 4;
}
}
pub fn run_with_inputs(noun: i32, verb: i32, prgm: &Vec<i32>) -> i32 {
let mut mem = prgm.clone();
mem[1] = noun; mem[2] = verb;
execute(&mut mem)
}
#[cfg(test)]
mod tests {
use super::{execute, run_with_inputs};
#[test]
fn first_examples() {
assert_eq!(2, execute(&mut vec![1,0,0,0,99]));
assert_eq!(30, execute(&mut vec![1,1,1,4,99,5,6,0,99]));
assert_eq!(3500, execute(&mut vec![1,9,10,3,2,3,11,0,99,30,40,50]));
}
#[test]
fn try_inputs() {
use std::fs::read_to_string;
let input = read_to_string("input.txt").unwrap();
let mem: Vec<i32> = input.trim().split(',').map(
|x| x.parse::<i32>().unwrap()).collect();
assert_eq!(10566835, run_with_inputs(12, 2, &mem));
}
}
| true |
f537626a021d03ef487ab6ee082873cc5eb43750
|
Rust
|
vernonrj/advent2020
|
/day-2/src/main.rs
|
UTF-8
| 7,595 | 3.109375 | 3 |
[] |
no_license
|
#[macro_use] extern crate lazy_static;
use std::{error::Error, ops::{RangeInclusive}};
use std::io::{self, Read, BufRead, BufReader};
use std::fs::File;
use std::str::FromStr;
use clap::{App, Arg};
use regex::Regex;
fn main() -> Result<(), Box<dyn Error>> {
let matches = App::new("day-2")
.arg(Arg::with_name("input")
.required(true)
.help("the input to the program"))
.arg(Arg::with_name("policy")
.short("p")
.long("policy")
.takes_value(true)
.possible_values(&["range", "xor"])
.help("policy to use"))
.get_matches();
let variant = match matches.value_of("policy") {
Some("range") | None => PolicyVariant::OccurrenceRange,
Some("xor") => PolicyVariant::XorPosition,
Some(bad) => panic!("invalid value to --policy: {}", bad),
};
let f = File::open(matches.value_of("input").unwrap())?;
let input: Vec<String> = get_input(f)?;
let passwords: Vec<(String, Policy)> = parse_input(input).unwrap();
let valid: Vec<(String, Policy)> = passwords.into_iter().filter(|(pass, policy)| {
policy.is_valid(&pass, variant)
}).collect();
println!("number of valid passwords: {}", valid.len());
Ok(())
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Policy {
pub letter: String,
pub occurrences: RangeInclusive<i64>,
}
#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
pub enum PolicyVariant {
/// letters must occur within the range given
OccurrenceRange,
/// a letter must occur in either the start or end position, not both
XorPosition,
}
impl Policy {
pub fn is_valid(&self, password: &str, variant: PolicyVariant) -> bool {
match variant {
PolicyVariant::OccurrenceRange => {
let policy_occs = password.matches(&self.letter).count();
self.occurrences.contains(&(policy_occs as i64))
},
PolicyVariant::XorPosition => {
let chars: Vec<char> = password.chars().collect();
let start = match ((*self.occurrences.start()) as usize).checked_sub(1) {
Some(n) => n,
None => return false,
};
let end = match ((*self.occurrences.end()) as usize).checked_sub(1) {
Some(n) => n,
None => return false,
};
match (chars.get(start), chars.get(end)) {
(Some(c), Some(d)) if c == d => false,
(Some(c), _) if c.to_string() == self.letter => true,
(_, Some(c)) if c.to_string() == self.letter => true,
(_, _) => false,
}
}
}
}
}
impl FromStr for Policy {
type Err = Box<dyn Error>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
lazy_static! {
static ref RE: Regex = Regex::new(r"(?P<low>\d+)-(?P<high>\d+)\s+(?P<chars>\w+):\s+(?P<password>\w+)").unwrap();
}
let m = match RE.captures(s) {
Some(cap) => cap,
None => return Err(format!("invalid input: `{}`", s).into()),
};
let low = m.name("low").and_then(|l| l.as_str().parse::<i64>().ok());
let high = m.name("high").and_then(|h| h.as_str().parse::<i64>().ok());
let chars = m.name("chars").map(|c| c.as_str());
match (low, high, chars) {
(Some(l), Some(h), Some(c)) => Ok(Policy {
letter: c.to_string(),
occurrences: l..=h,
}),
_ => Err(format!("failed to parse into a policy").into())
}
}
}
pub fn get_input(reader: impl Read) -> io::Result<Vec<String>> {
let data: Result<Vec<String>, _> = BufReader::new(reader)
.lines()
.collect();
data
}
pub fn parse_input(input: impl IntoIterator<Item=String>) -> Result<Vec<(String, Policy)>, Box<dyn Error>> {
let mut output = Vec::new();
for line in input {
let password = get_password_from_line(&line)?;
let policy = Policy::from_str(&line)?;
output.push((password.to_string(), policy));
}
Ok(output)
}
pub fn get_password_from_line(line: &str) -> Result<&str, Box<dyn Error>> {
lazy_static! {
static ref RE: Regex = Regex::new(r".*:\s+(?P<password>\w+)").unwrap();
}
let maybe_password = RE.captures(line).and_then(|cap| cap.name("password"));
match maybe_password {
Some(m) => Ok(m.as_str()),
None => Err(format!("failed to get password from line `{}`", line).into()),
}
}
#[test]
fn test_part_1() {
use std::io::Cursor;
let data = include_str!("../input.txt");
let data = get_input(Cursor::new(data)).unwrap();
let passwords: Vec<(String, Policy)> = parse_input(data).unwrap();
let valid: Vec<(String, Policy)> = passwords.into_iter().filter(|(pass, policy)| {
policy.is_valid(&pass, PolicyVariant::OccurrenceRange)
}).collect();
assert_eq!(valid.len(), 638);
}
#[test]
fn test_part_2() {
use std::io::Cursor;
let data = include_str!("../input.txt");
let data = get_input(Cursor::new(data)).unwrap();
let passwords: Vec<(String, Policy)> = parse_input(data).unwrap();
let valid: Vec<(String, Policy)> = passwords.into_iter().filter(|(pass, policy)| {
policy.is_valid(&pass, PolicyVariant::XorPosition)
}).collect();
assert_eq!(valid.len(), 699);
}
#[test]
fn test_get_password_from_line() {
assert_eq!(get_password_from_line("1-3 a: abcde").unwrap(), "abcde");
assert_eq!(get_password_from_line("1-3 b: cdefg").unwrap(), "cdefg");
assert_eq!(get_password_from_line("2-9 c: ccccccccc").unwrap(), "ccccccccc");
}
#[test]
fn test_policy_from_str() {
assert_eq!(Policy::from_str("1-3 a: abcde").unwrap(), Policy { letter: "a".to_string(), occurrences: 1..=3 });
assert_eq!(Policy::from_str("1-3 b: cdefg").unwrap(), Policy { letter: "b".to_string(), occurrences: 1..=3 });
assert_eq!(Policy::from_str("2-9 c: ccccccccc").unwrap(), Policy { letter: "c".to_string(), occurrences: 2..=9 });
}
#[test]
fn test_parse_input() {
let input = vec![
"1-3 a: abcde".to_string(),
"1-3 b: cdefg".to_string(),
"2-9 c: ccccccccc".to_string(),
];
assert_eq!(parse_input(input).unwrap(), vec![
("abcde".to_string(), Policy { letter: "a".to_string(), occurrences: 1..=3 }),
("cdefg".to_string(), Policy { letter: "b".to_string(), occurrences: 1..=3 }),
("ccccccccc".to_string(), Policy { letter: "c".to_string(), occurrences: 2..=9 }),
])
}
#[test]
fn test_is_policy_valid() {
assert!(Policy { letter: "a".to_string(), occurrences: 1..=3 }.is_valid("abcde", PolicyVariant::OccurrenceRange));
assert!(Policy { letter: "a".to_string(), occurrences: 1..=3 }.is_valid("aaabcde", PolicyVariant::OccurrenceRange));
assert!(! Policy { letter: "b".to_string(), occurrences: 1..=3 }.is_valid("cdefg", PolicyVariant::OccurrenceRange));
assert!(Policy { letter: "c".to_string(), occurrences: 2..=9 }.is_valid("ccccccccc", PolicyVariant::OccurrenceRange));
assert!(Policy { letter: "a".to_string(), occurrences: 1..=3 }.is_valid("abcde", PolicyVariant::XorPosition));
assert!(! Policy { letter: "b".to_string(), occurrences: 1..=3 }.is_valid("cdefg", PolicyVariant::XorPosition));
assert!(! Policy { letter: "c".to_string(), occurrences: 2..=9 }.is_valid("ccccccccc", PolicyVariant::XorPosition));
}
| true |
53b4a77f93b6f3e064b027639ec3fcd8a4112bcd
|
Rust
|
Luni-4/iced_aw
|
/src/style/spinner.rs
|
UTF-8
| 1,045 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
//! Use a spinner to suggest to the user something is loading.
//!
//! *This API requires the following crate features to be activated: spinner*
use iced_style::Theme;
/// The style of a [`Spinner`](crate::native::spinner::Spinner).
#[derive(Default)]
#[allow(missing_debug_implementations)]
pub enum SpinnerStyle {
/// The default style
#[default]
Default,
/// Custom style
Custom(Box<dyn StyleSheet<Style = Theme>>),
}
/// The appearance of a [`Spinner`](crate::native::spinner::Spinner).
#[derive(Clone, Copy, Debug)]
pub struct Appearance {}
/// A set of rules that dictate the style of a [`Spinner`](crate::native::spinner::Spinner).
pub trait StyleSheet {
/// Style for the trait to use.
type Style: Default;
/// The normal appearance of a [`Spinner`](crate::native::spinner::Spinner).
fn appearance(&self, style: &Self::Style) -> Appearance;
}
impl StyleSheet for Theme {
type Style = SpinnerStyle;
fn appearance(&self, _style: &Self::Style) -> Appearance {
Appearance {}
}
}
| true |
f13de87af6a6a78fafe00ccaf29dc4e75ee1c306
|
Rust
|
poulenque/glium_fractal
|
/src/file_to_string.rs
|
UTF-8
| 335 | 2.703125 | 3 |
[] |
no_license
|
use std::path::Path;
use std::fs::File;
use std::io::Read;
use std::io;
pub fn file_to_string(path:String) -> io::Result<String> {
let path = Path::new(&path);
let mut file = match File::open(&path) {
Ok(f)=>f,
Err(why)=> return Err(why),
};
let mut s = String::new();
let _unused_var = file.read_to_string(&mut s);
Ok(s)
}
| true |
ff94bea57dcb9273b4af6463f397b323fcdbcfd7
|
Rust
|
mvanbem/oot-explorer
|
/oot-explorer-segment/src/segment.rs
|
UTF-8
| 1,162 | 2.90625 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt::{self, Debug, Formatter};
use crate::SegmentError;
#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Segment(pub u8);
impl Segment {
pub const SCENE: Segment = Segment(0x02);
pub const ROOM: Segment = Segment(0x03);
pub const GAMEPLAY_KEEP: Segment = Segment(0x04);
pub const SELECTABLE_KEEP: Segment = Segment(0x05);
pub const OBJECT: Segment = Segment(0x06);
pub fn validate(self) -> Result<Self, SegmentError> {
if self.0 <= 0x0f {
Ok(self)
} else {
Err(SegmentError::BadSegment(self))
}
}
pub fn masked(self) -> Self {
Self(self.0 & 0x0f)
}
}
impl Debug for Segment {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
Segment::SCENE => write!(f, "CurrentScene"),
Segment::ROOM => write!(f, "CurrentRoom"),
Segment::GAMEPLAY_KEEP => write!(f, "GameplayKeep"),
Segment::SELECTABLE_KEEP => write!(f, "SelectableKeep"),
Segment::OBJECT => write!(f, "CurrentObject"),
_ => write!(f, "Unknown(0x{:02x})", self.0),
}
}
}
| true |
7903d935702cb60a67a2efe0df9225272317a98c
|
Rust
|
schets/atomic_utilities
|
/src/artificial_dep.rs
|
UTF-8
| 4,260 | 3.25 | 3 |
[
"MIT"
] |
permissive
|
//! This module provides a function that forces an artificial data dependency
//! between two loads. Basically, the code:
//!
//! ```text
//! val = some_atomic.load(DepOrd);
//! val2_ref = &val2;
//! val2 ref ^= val;
//! val2_ref ^= val; // val_2 ref now is equal to &val2, but data depends on val
//! loaded_val2 = *val2_ref; // Is ordered-after val as if by consume ordering
//! ```
//! is executed. This can be far faster than fences on arm and
//! power architectures, since the ordering is a result of data dependencies in
//! the pipeline and not full-on fences. This still isn't free, since you must
//! wait for the previous load to finish but it's better than a fence
//!
//!
//! # Example:
//! ```
//! use std::sync::atomic::{AtomicUsize, Ordering};
//! use std::sync::{Arc, Barrier};
//! use std::thread;
//! use atomic_utilities::artificial_dep::{DepOrd, dependently};
//! let num_run = 1000000;
//! let atomic_val1 = Arc::new(AtomicUsize::new(0));
//! let atomic_val2 = Arc::new(AtomicUsize::new(0));
//! let start_bar = Arc::new(Barrier::new(2));
//! let atomic_valt1 = atomic_val1.clone();
//! let atomic_valt2 = atomic_val2.clone();
//! let start_bart = start_bar.clone();
//! let to_join = thread::spawn(move || {
//! start_bart.wait();
//! for i in 0..num_run {
//! atomic_valt2.store(i, Ordering::Relaxed);
//! atomic_valt1.store(i, Ordering::Release);
//! }
//! });
//!
//! start_bar.wait();
//! for _ in 0..num_run {
//! let val1_ld = atomic_val1.load(DepOrd);
//! let val2_ld = dependently(val1_ld, &atomic_val2,
//! |dep_ref| dep_ref.load(Ordering::Relaxed));
//! assert!(val2_ld >= val1_ld); // Can fail if val2_ld is ordered_before val1_ld
//! }
//! ```
#[cfg(not(all(any(target_arch = "arm", target_arch = "aarch64"),
use_asm)))]
mod artificial_dep_inner {
use std::sync::atomic::Ordering;
pub const DEPORD: Ordering = Ordering::Acquire;
#[inline(always)]
pub fn false_dep<T>(myref: &T, _v: usize) -> &T {
myref
}
#[inline(always)]
pub fn false_dep_mut<T>(myref: &mut T, _v: usize) -> &mut T {
myref
}
}
#[cfg(all(any(target_arch = "arm", target_arch = "aarch64"),
use_asm))]
mod artificial_dep_inner {
use std::sync::atomic::Ordering;
pub const DEPORD: Ordering = Ordering::Relaxed;
#[inline(always)]
pub fn false_dep<T>(mut myref: &T, val: usize) -> &T {
unsafe {
asm!("eor $0, $0, $1
eor $0, $0, $1"
: "+r" (myref)
: "r" (val));
myref
}
}
#[inline(always)]
pub fn false_dep_mut<T>(mut myref: &mut T, val: usize) -> &mut T {
unsafe {
asm!("eor $0, $0, $1
eor $0, $0, $1"
: "+r" (myref)
: "r" (val));
myref
}
}
}
/* Once this can be tested on a power machine it's good to go
#[cfg(all(any(target_arch = "powerpc", target_arch = "powerpc64"),
use_asm))]
mod artificial_dep_inner {
use std::sync::atomic::Ordering;
pub const DEPORD: Ordering = Ordering::Relaxed;
#[inline(always)]
pub fn false_dep<T>(myref: &T, val: usize) -> &T {
asm!("xor $1 $0 $0
xor $1 $0 $0"
: "+r" (myref)
: "r" (val));
myref
}
}*/
use std::sync::atomic::Ordering;
/// The ordering that must be used for any load which has fake dependent operations
#[allow(non_upper_case_globals)]
pub const DepOrd: Ordering = artificial_dep_inner::DEPORD;
/// Ensures that loads from the value myref are ordered after the load of val.
/// Val can be anything convertable to a usize, or any usize calculated from the
/// base load.
#[inline(always)]
pub fn dependently<T, R, F: FnOnce(&T) -> R>(val: usize, myref: &T, myfn: F) -> R {
myfn(artificial_dep_inner::false_dep(myref, val))
}
/// Ensures that loads from the value myref are ordered after the load of val.
/// Val can be anything convertable to a usize, or any usize calculated from the
/// base load.
#[inline(always)]
pub fn dependently_mut<T, R, F: FnOnce(&mut T) -> R>(val: usize, myref: &mut T, myfn: F) -> R {
myfn(artificial_dep_inner::false_dep_mut(myref, val))
}
| true |
33d25fb48dfaa2cef467e49c62029fd51ff8613a
|
Rust
|
oxigraph/oxigraph
|
/lib/sparesults/src/xml.rs
|
UTF-8
| 26,347 | 2.578125 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
//! Implementation of [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/)
use crate::error::{ParseError, SyntaxError};
use oxrdf::vocab::rdf;
use oxrdf::Variable;
use oxrdf::*;
use quick_xml::events::{BytesDecl, BytesEnd, BytesStart, BytesText, Event};
use quick_xml::{Reader, Writer};
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::io::{self, BufRead, Write};
use std::str;
pub fn write_boolean_xml_result<W: Write>(sink: W, value: bool) -> io::Result<W> {
do_write_boolean_xml_result(sink, value).map_err(map_xml_error)
}
fn do_write_boolean_xml_result<W: Write>(sink: W, value: bool) -> Result<W, quick_xml::Error> {
let mut writer = Writer::new(sink);
writer.write_event(Event::Decl(BytesDecl::new("1.0", None, None)))?;
writer
.create_element("sparql")
.with_attribute(("xmlns", "http://www.w3.org/2005/sparql-results#"))
.write_inner_content(|writer| {
writer
.create_element("head")
.write_text_content(BytesText::new(""))?
.create_element("boolean")
.write_text_content(BytesText::new(if value { "true" } else { "false" }))?;
Ok(())
})?;
Ok(writer.into_inner())
}
pub struct XmlSolutionsWriter<W: Write> {
writer: Writer<W>,
}
impl<W: Write> XmlSolutionsWriter<W> {
pub fn start(sink: W, variables: &[Variable]) -> io::Result<Self> {
Self::do_start(sink, variables).map_err(map_xml_error)
}
fn do_start(sink: W, variables: &[Variable]) -> Result<Self, quick_xml::Error> {
let mut writer = Writer::new(sink);
writer.write_event(Event::Decl(BytesDecl::new("1.0", None, None)))?;
let mut sparql_open = BytesStart::new("sparql");
sparql_open.push_attribute(("xmlns", "http://www.w3.org/2005/sparql-results#"));
writer.write_event(Event::Start(sparql_open))?;
writer
.create_element("head")
.write_inner_content(|writer| {
for variable in variables {
writer
.create_element("variable")
.with_attribute(("name", variable.as_str()))
.write_empty()?;
}
Ok(())
})?;
writer.write_event(Event::Start(BytesStart::new("results")))?;
Ok(Self { writer })
}
pub fn write<'a>(
&mut self,
solution: impl IntoIterator<Item = (VariableRef<'a>, TermRef<'a>)>,
) -> io::Result<()> {
self.do_write(solution).map_err(map_xml_error)
}
fn do_write<'a>(
&mut self,
solution: impl IntoIterator<Item = (VariableRef<'a>, TermRef<'a>)>,
) -> Result<(), quick_xml::Error> {
self.writer
.create_element("result")
.write_inner_content(|writer| {
for (variable, value) in solution {
writer
.create_element("binding")
.with_attribute(("name", variable.as_str()))
.write_inner_content(|writer| {
write_xml_term(value, writer)?;
Ok(())
})?;
}
Ok(())
})?;
Ok(())
}
pub fn finish(self) -> io::Result<W> {
let mut inner = self.do_finish().map_err(map_xml_error)?;
inner.flush()?;
Ok(inner)
}
fn do_finish(mut self) -> Result<W, quick_xml::Error> {
self.writer
.write_event(Event::End(BytesEnd::new("results")))?;
self.writer
.write_event(Event::End(BytesEnd::new("sparql")))?;
Ok(self.writer.into_inner())
}
}
fn write_xml_term(
term: TermRef<'_>,
writer: &mut Writer<impl Write>,
) -> Result<(), quick_xml::Error> {
match term {
TermRef::NamedNode(uri) => {
writer
.create_element("uri")
.write_text_content(BytesText::new(uri.as_str()))?;
}
TermRef::BlankNode(bnode) => {
writer
.create_element("bnode")
.write_text_content(BytesText::new(bnode.as_str()))?;
}
TermRef::Literal(literal) => {
let element = writer.create_element("literal");
let element = if let Some(language) = literal.language() {
element.with_attribute(("xml:lang", language))
} else if !literal.is_plain() {
element.with_attribute(("datatype", literal.datatype().as_str()))
} else {
element
};
element.write_text_content(BytesText::new(literal.value()))?;
}
#[cfg(feature = "rdf-star")]
TermRef::Triple(triple) => {
writer
.create_element("triple")
.write_inner_content(|writer| {
writer
.create_element("subject")
.write_inner_content(|writer| {
write_xml_term(triple.subject.as_ref().into(), writer)
})?;
writer
.create_element("predicate")
.write_inner_content(|writer| {
write_xml_term(triple.predicate.as_ref().into(), writer)
})?;
writer
.create_element("object")
.write_inner_content(|writer| {
write_xml_term(triple.object.as_ref(), writer)
})?;
Ok(())
})?;
}
}
Ok(())
}
pub enum XmlQueryResultsReader<R: BufRead> {
Solutions {
variables: Vec<Variable>,
solutions: XmlSolutionsReader<R>,
},
Boolean(bool),
}
impl<R: BufRead> XmlQueryResultsReader<R> {
pub fn read(source: R) -> Result<Self, ParseError> {
enum State {
Start,
Sparql,
Head,
AfterHead,
Boolean,
}
let mut reader = Reader::from_reader(source);
reader.trim_text(true);
reader.expand_empty_elements(true);
let mut buffer = Vec::default();
let mut variables = Vec::default();
let mut state = State::Start;
//Read header
loop {
buffer.clear();
let event = reader.read_event_into(&mut buffer)?;
match event {
Event::Start(event) => match state {
State::Start => {
if event.local_name().as_ref() == b"sparql" {
state = State::Sparql;
} else {
return Err(SyntaxError::msg(format!("Expecting <sparql> tag, found <{}>", decode(&reader, &event.name())?)).into());
}
}
State::Sparql => {
if event.local_name().as_ref() == b"head" {
state = State::Head;
} else {
return Err(SyntaxError::msg(format!("Expecting <head> tag, found <{}>",decode(&reader, &event.name())?)).into());
}
}
State::Head => {
if event.local_name().as_ref() == b"variable" {
let name = event.attributes()
.filter_map(Result::ok)
.find(|attr| attr.key.local_name().as_ref() == b"name")
.ok_or_else(|| SyntaxError::msg("No name attribute found for the <variable> tag"))?
.decode_and_unescape_value(&reader)?;
let variable = Variable::new(name).map_err(|e| SyntaxError::msg(format!("Invalid variable name: {e}")))?;
if variables.contains(&variable) {
return Err(SyntaxError::msg(format!(
"The variable {variable} is declared twice"
))
.into());
}
variables.push(variable);
} else if event.local_name().as_ref() == b"link" {
// no op
} else {
return Err(SyntaxError::msg(format!("Expecting <variable> or <link> tag, found <{}>", decode(&reader, &event.name())?)).into());
}
}
State::AfterHead => {
if event.local_name().as_ref() == b"boolean" {
state = State::Boolean
} else if event.local_name().as_ref() == b"results" {
let mut mapping = BTreeMap::default();
for (i, var) in variables.iter().enumerate() {
mapping.insert(var.clone().into_string(), i);
}
return Ok(Self::Solutions { variables,
solutions: XmlSolutionsReader {
reader,
buffer,
mapping,
stack: Vec::new(),
subject_stack: Vec::new(),
predicate_stack: Vec::new(),
object_stack: Vec::new(),
}});
} else if event.local_name().as_ref() != b"link" && event.local_name().as_ref() != b"results" && event.local_name().as_ref() != b"boolean" {
return Err(SyntaxError::msg(format!("Expecting sparql tag, found <{}>", decode(&reader, &event.name())?)).into());
}
}
State::Boolean => return Err(SyntaxError::msg(format!("Unexpected tag inside of <boolean> tag: <{}>", decode(&reader, &event.name())?)).into())
},
Event::Text(event) => {
let value = event.unescape()?;
return match state {
State::Boolean => {
return if value == "true" {
Ok(Self::Boolean(true))
} else if value == "false" {
Ok(Self::Boolean(false))
} else {
Err(SyntaxError::msg(format!("Unexpected boolean value. Found '{value}'")).into())
};
}
_ => Err(SyntaxError::msg(format!("Unexpected textual value found: '{value}'")).into())
};
},
Event::End(event) => {
if let State::Head = state {
if event.local_name().as_ref() == b"head" {
state = State::AfterHead
}
} else {
return Err(SyntaxError::msg("Unexpected early file end. All results file should have a <head> and a <result> or <boolean> tag").into());
}
},
Event::Eof => return Err(SyntaxError::msg("Unexpected early file end. All results file should have a <head> and a <result> or <boolean> tag").into()),
_ => (),
}
}
}
}
enum State {
Start,
Result,
Binding,
Uri,
BNode,
Literal,
Triple,
Subject,
Predicate,
Object,
End,
}
pub struct XmlSolutionsReader<R: BufRead> {
reader: Reader<R>,
buffer: Vec<u8>,
mapping: BTreeMap<String, usize>,
stack: Vec<State>,
subject_stack: Vec<Term>,
predicate_stack: Vec<Term>,
object_stack: Vec<Term>,
}
impl<R: BufRead> XmlSolutionsReader<R> {
pub fn read_next(&mut self) -> Result<Option<Vec<Option<Term>>>, ParseError> {
let mut state = State::Start;
let mut new_bindings = vec![None; self.mapping.len()];
let mut current_var = None;
let mut term: Option<Term> = None;
let mut lang = None;
let mut datatype = None;
loop {
self.buffer.clear();
let event = self.reader.read_event_into(&mut self.buffer)?;
match event {
Event::Start(event) => match state {
State::Start => {
if event.local_name().as_ref() == b"result" {
state = State::Result;
} else {
return Err(SyntaxError::msg(format!(
"Expecting <result>, found <{}>",
decode(&self.reader, &event.name())?
))
.into());
}
}
State::Result => {
if event.local_name().as_ref() == b"binding" {
match event
.attributes()
.filter_map(Result::ok)
.find(|attr| attr.key.local_name().as_ref() == b"name")
{
Some(attr) => {
current_var = Some(
attr.decode_and_unescape_value(&self.reader)?.to_string(),
)
}
None => {
return Err(SyntaxError::msg(
"No name attribute found for the <binding> tag",
)
.into());
}
}
state = State::Binding;
} else {
return Err(SyntaxError::msg(format!(
"Expecting <binding>, found <{}>",
decode(&self.reader, &event.name())?
))
.into());
}
}
State::Binding | State::Subject | State::Predicate | State::Object => {
if term.is_some() {
return Err(SyntaxError::msg(
"There is already a value for the current binding",
)
.into());
}
self.stack.push(state);
if event.local_name().as_ref() == b"uri" {
state = State::Uri;
} else if event.local_name().as_ref() == b"bnode" {
state = State::BNode;
} else if event.local_name().as_ref() == b"literal" {
for attr in event.attributes().flatten() {
if attr.key.as_ref() == b"xml:lang" {
lang = Some(
attr.decode_and_unescape_value(&self.reader)?.to_string(),
);
} else if attr.key.local_name().as_ref() == b"datatype" {
let iri = attr.decode_and_unescape_value(&self.reader)?;
datatype =
Some(NamedNode::new(iri.to_string()).map_err(|e| {
SyntaxError::msg(format!(
"Invalid datatype IRI '{iri}': {e}"
))
})?);
}
}
state = State::Literal;
} else if event.local_name().as_ref() == b"triple" {
state = State::Triple;
} else {
return Err(SyntaxError::msg(format!(
"Expecting <uri>, <bnode> or <literal> found <{}>",
decode(&self.reader, &event.name())?
))
.into());
}
}
State::Triple => {
if event.local_name().as_ref() == b"subject" {
state = State::Subject
} else if event.local_name().as_ref() == b"predicate" {
state = State::Predicate
} else if event.local_name().as_ref() == b"object" {
state = State::Object
} else {
return Err(SyntaxError::msg(format!(
"Expecting <subject>, <predicate> or <object> found <{}>",
decode(&self.reader, &event.name())?
))
.into());
}
}
_ => (),
},
Event::Text(event) => {
let data = event.unescape()?;
match state {
State::Uri => {
term = Some(
NamedNode::new(data.to_string())
.map_err(|e| {
SyntaxError::msg(format!("Invalid IRI value '{data}': {e}"))
})?
.into(),
)
}
State::BNode => {
term = Some(
BlankNode::new(data.to_string())
.map_err(|e| {
SyntaxError::msg(format!(
"Invalid blank node value '{data}': {e}"
))
})?
.into(),
)
}
State::Literal => {
term = Some(build_literal(data, lang.take(), datatype.take())?.into());
}
_ => {
return Err(SyntaxError::msg(format!(
"Unexpected textual value found: {data}"
))
.into());
}
}
}
Event::End(_) => match state {
State::Start => state = State::End,
State::Result => return Ok(Some(new_bindings)),
State::Binding => {
if let Some(var) = ¤t_var {
if let Some(var) = self.mapping.get(var) {
new_bindings[*var] = term.take()
} else {
return Err(
SyntaxError::msg(format!("The variable '{var}' is used in a binding but not declared in the variables list")).into()
);
}
} else {
return Err(SyntaxError::msg("No name found for <binding> tag").into());
}
state = State::Result;
}
State::Subject => {
if let Some(subject) = term.take() {
self.subject_stack.push(subject)
}
state = State::Triple;
}
State::Predicate => {
if let Some(predicate) = term.take() {
self.predicate_stack.push(predicate)
}
state = State::Triple;
}
State::Object => {
if let Some(object) = term.take() {
self.object_stack.push(object)
}
state = State::Triple;
}
State::Uri => {
state = self
.stack
.pop()
.ok_or_else(|| SyntaxError::msg("Empty stack"))?
}
State::BNode => {
if term.is_none() {
//We default to a random bnode
term = Some(BlankNode::default().into())
}
state = self
.stack
.pop()
.ok_or_else(|| SyntaxError::msg("Empty stack"))?
}
State::Literal => {
if term.is_none() {
//We default to the empty literal
term = Some(build_literal("", lang.take(), datatype.take())?.into())
}
state = self
.stack
.pop()
.ok_or_else(|| SyntaxError::msg("Empty stack"))?;
}
State::Triple => {
#[cfg(feature = "rdf-star")]
if let (Some(subject), Some(predicate), Some(object)) = (
self.subject_stack.pop(),
self.predicate_stack.pop(),
self.object_stack.pop(),
) {
term = Some(
Triple::new(
match subject {
Term::NamedNode(subject) => subject.into(),
Term::BlankNode(subject) => subject.into(),
Term::Triple(subject) => Subject::Triple(subject),
Term::Literal(_) => {
return Err(SyntaxError::msg(
"The <subject> value should not be a <literal>",
)
.into())
}
},
match predicate {
Term::NamedNode(predicate) => predicate,
_ => {
return Err(SyntaxError::msg(
"The <predicate> value should be an <uri>",
)
.into())
}
},
object,
)
.into(),
);
state = self
.stack
.pop()
.ok_or_else(|| SyntaxError::msg("Empty stack"))?;
} else {
return Err(
SyntaxError::msg("A <triple> should contain a <subject>, a <predicate> and an <object>").into()
);
}
#[cfg(not(feature = "rdf-star"))]
{
return Err(SyntaxError::msg(
"The <triple> tag is only supported with RDF-star",
)
.into());
}
}
State::End => (),
},
Event::Eof => return Ok(None),
_ => (),
}
}
}
}
fn build_literal(
value: impl Into<String>,
lang: Option<String>,
datatype: Option<NamedNode>,
) -> Result<Literal, ParseError> {
match lang {
Some(lang) => {
if let Some(datatype) = datatype {
if datatype.as_ref() != rdf::LANG_STRING {
return Err(SyntaxError::msg(format!(
"xml:lang value '{lang}' provided with the datatype {datatype}"
))
.into());
}
}
Literal::new_language_tagged_literal(value, &lang).map_err(|e| {
SyntaxError::msg(format!("Invalid xml:lang value '{lang}': {e}")).into()
})
}
None => Ok(if let Some(datatype) = datatype {
Literal::new_typed_literal(value, datatype)
} else {
Literal::new_simple_literal(value)
}),
}
}
fn decode<'a, T>(
reader: &Reader<T>,
data: &'a impl AsRef<[u8]>,
) -> Result<Cow<'a, str>, ParseError> {
Ok(reader.decoder().decode(data.as_ref())?)
}
fn map_xml_error(error: quick_xml::Error) -> io::Error {
match error {
quick_xml::Error::Io(error) => io::Error::new(error.kind(), error),
quick_xml::Error::UnexpectedEof(_) => io::Error::new(io::ErrorKind::UnexpectedEof, error),
_ => io::Error::new(io::ErrorKind::InvalidData, error),
}
}
| true |
9d66732197179cfb0b8bd99d9de19d9f1f841c78
|
Rust
|
foxfriends/root
|
/src/models/cult.rs
|
UTF-8
| 1,201 | 2.734375 | 3 |
[] |
no_license
|
#![allow(clippy::new_without_default)]
use super::*;
use sqlx::{postgres::PgConnection, query, query_as};
#[derive(Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename = "cult")]
pub struct Cult {
faction: FactionId,
outcast: Option<Suit>,
hated_outcast: bool,
}
impl Cult {
pub fn new() -> Self {
Self {
faction: FactionId::Cult,
outcast: None,
hated_outcast: false,
}
}
}
#[async_trait]
impl Loadable for Option<Cult> {
async fn load(game: &str, conn: &mut PgConnection) -> sqlx::Result<Self> {
query_as!(Cult, r#"SELECT faction as "faction: _", outcast as "outcast: _", hated_outcast FROM cult WHERE game = $1"#, game).fetch_optional(conn).await
}
}
#[async_trait]
impl Overwritable for Cult {
async fn overwrite(&self, game: &str, conn: &mut PgConnection) -> sqlx::Result<()> {
query!(
r#"INSERT INTO cult (game, outcast, hated_outcast) VALUES ($1, $2, $3) ON CONFLICT (game) DO UPDATE SET outcast = $2, hated_outcast = $3"#,
game,
self.outcast as Option<Suit>,
self.hated_outcast,
).execute(conn).await?;
Ok(())
}
}
| true |
d0e884ecc9f4e69171f7aa287b1436b8dcead62b
|
Rust
|
rbudnar/rust-mazes
|
/crate/src/grid/standard_grid.rs
|
UTF-8
| 2,319 | 3.0625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::rc::{Rc};
use super::{grid_base::GridBase, Grid, CellFormatter};
use crate::cells::{ICellStrong, cell::{Cell, CellLinkStrong}};
use crate::rng::RngWrapper;
pub static STANDARD_GRID: &str = "standard_grid";
pub struct StandardGrid {
pub grid: GridBase,
}
impl StandardGrid {
pub fn new(rows: usize, columns: usize) -> StandardGrid {
let grid = GridBase::new(rows, columns);
let mut std_grid = StandardGrid {
grid
};
std_grid.prepare_grid();
std_grid.grid.configure_cells();
std_grid
}
}
impl Grid for StandardGrid {
fn new_cell(&self, row: usize, column: usize) -> ICellStrong {
Cell::new(row, column)
}
fn prepare_grid(&mut self) {
for i in 0..self.grid.rows {
let mut row: Vec<Option<CellLinkStrong>> = Vec::new();
for j in 0..self.grid.columns {
row.push(Some(Cell::new(i as usize, j as usize)));
}
self.grid.cells.push(row);
}
}
fn random_cell(&self, rng: &dyn RngWrapper<Shuffle=ICellStrong>) -> Option<ICellStrong> {
let row: usize = rng.gen_range(0, self.grid.rows);
let col: usize = rng.gen_range(0, self.grid.columns);
self.get_cell(row, col)
}
fn each_cell(&self) -> Vec<Option<ICellStrong>> {
self.grid.each_cell().iter()
.map(|c| Some(Rc::clone(&c.as_ref().unwrap()) as ICellStrong)).collect()
}
fn rows(&self) -> usize {
self.grid.columns
}
fn columns(&self) -> usize {
self.grid.rows
}
fn cells(&self) -> &Vec<Vec<Option<ICellStrong>>> {
self.grid.cells()
}
fn get_cell(&self, row: usize, column: usize) -> Option<ICellStrong> {
let cell = self.grid.get_cell(row, column);
Some(Rc::clone(&cell.unwrap()) as ICellStrong)
}
fn to_string(&self, contents: &dyn CellFormatter) -> String {
self.grid.to_string(contents)
}
fn size(&self) -> usize {
self.grid.rows * self.grid.columns
}
fn braid(&self, p: f64, rng: &dyn RngWrapper<Shuffle=ICellStrong>) {
self.grid.braid(p, rng);
}
fn to_web(&self, formatter: &dyn CellFormatter, colorize: bool) {
self.grid.to_web(formatter, colorize);
}
}
| true |
eecd2980b03ca476e8a12a74075bc6081742ffbd
|
Rust
|
digitalbitbox/bitbox02-firmware
|
/src/rust/vendor/chacha20/src/cipher.rs
|
UTF-8
| 6,725 | 2.78125 | 3 |
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
//! ChaCha20 stream cipher implementation.
//!
//! Adapted from the `ctr` crate.
// TODO(tarcieri): figure out how to unify this with the `ctr` crate (see #95)
use crate::{
block::{Block, BUFFER_SIZE},
rounds::{Rounds, R12, R20, R8},
BLOCK_SIZE, MAX_BLOCKS,
};
use core::{
convert::TryInto,
fmt::{self, Debug},
};
use stream_cipher::{
consts::{U12, U32},
LoopError, NewStreamCipher, OverflowError, SeekNum, SyncStreamCipher, SyncStreamCipherSeek,
};
/// ChaCha8 stream cipher (reduced-round variant of ChaCha20 with 8 rounds)
pub type ChaCha8 = Cipher<R8>;
/// ChaCha12 stream cipher (reduced-round variant of ChaCha20 with 12 rounds)
pub type ChaCha12 = Cipher<R12>;
/// ChaCha20 stream cipher (RFC 8439 version with 96-bit nonce)
pub type ChaCha20 = Cipher<R20>;
/// ChaCha20 key type (256-bits/32-bytes)
///
/// Implemented as an alias for [`GenericArray`].
///
/// (NOTE: all variants of [`ChaCha20`] including `XChaCha20` use the same key type)
pub type Key = stream_cipher::Key<ChaCha20>;
/// Nonce type (96-bits/12-bytes)
///
/// Implemented as an alias for [`GenericArray`].
pub type Nonce = stream_cipher::Nonce<ChaCha20>;
/// Internal buffer
type Buffer = [u8; BUFFER_SIZE];
/// How much to increment the counter by for each buffer we generate.
/// Normally this is 1 but the AVX2 backend uses double-wide buffers.
// TODO(tarcieri): support a parallel blocks count like the `ctr` crate
// See: <https://github.com/RustCrypto/stream-ciphers/blob/907e94b/ctr/src/lib.rs#L73>
const COUNTER_INCR: u64 = (BUFFER_SIZE as u64) / (BLOCK_SIZE as u64);
/// ChaCha family stream cipher, generic around a number of rounds.
///
/// Use the [`ChaCha8`], [`ChaCha12`], or [`ChaCha20`] type aliases to select
/// a specific number of rounds.
///
/// Generally [`ChaCha20`] is preferred.
pub struct Cipher<R: Rounds> {
/// ChaCha20 block function initialized with a key and IV
block: Block<R>,
/// Buffer containing previous block function output
buffer: Buffer,
/// Position within buffer, or `None` if the buffer is not in use
buffer_pos: u8,
/// Current counter value relative to the start of the keystream
counter: u64,
/// Offset of the initial counter in the keystream. This is derived from
/// the extra 4 bytes in the 96-byte nonce RFC 8439 version (or is always
/// 0 in the legacy version)
counter_offset: u64,
}
impl<R: Rounds> NewStreamCipher for Cipher<R> {
/// Key size in bytes
type KeySize = U32;
/// Nonce size in bytes
type NonceSize = U12;
fn new(key: &Key, nonce: &Nonce) -> Self {
let block = Block::new(
key.as_slice().try_into().unwrap(),
nonce[4..12].try_into().unwrap(),
);
let counter_offset = (u64::from(nonce[0]) & 0xff) << 32
| (u64::from(nonce[1]) & 0xff) << 40
| (u64::from(nonce[2]) & 0xff) << 48
| (u64::from(nonce[3]) & 0xff) << 56;
Self {
block,
buffer: [0u8; BUFFER_SIZE],
buffer_pos: 0,
counter: 0,
counter_offset,
}
}
}
impl<R: Rounds> SyncStreamCipher for Cipher<R> {
fn try_apply_keystream(&mut self, mut data: &mut [u8]) -> Result<(), LoopError> {
self.check_data_len(data)?;
let pos = self.buffer_pos as usize;
let mut counter = self.counter;
// xor with leftover bytes from the last call if any
if pos != 0 {
if data.len() < BUFFER_SIZE - pos {
let n = pos + data.len();
xor(data, &self.buffer[pos..n]);
self.buffer_pos = n as u8;
return Ok(());
} else {
let (l, r) = data.split_at_mut(BUFFER_SIZE - pos);
data = r;
xor(l, &self.buffer[pos..]);
counter = counter.checked_add(COUNTER_INCR).unwrap();
}
}
let mut chunks = data.chunks_exact_mut(BUFFER_SIZE);
for chunk in &mut chunks {
// TODO(tarcieri): double check this should be checked and not wrapping
let counter_with_offset = self.counter_offset.checked_add(counter).unwrap();
self.block.apply_keystream(counter_with_offset, chunk);
counter = counter.checked_add(COUNTER_INCR).unwrap();
}
let rem = chunks.into_remainder();
self.buffer_pos = rem.len() as u8;
self.counter = counter;
if !rem.is_empty() {
self.generate_block(counter);
xor(rem, &self.buffer[..rem.len()]);
}
Ok(())
}
}
impl<R: Rounds> SyncStreamCipherSeek for Cipher<R> {
fn try_current_pos<T: SeekNum>(&self) -> Result<T, OverflowError> {
// quick and dirty fix, until ctr-like parallel block processing will be added
let (counter, pos) = if self.buffer_pos < BLOCK_SIZE as u8 {
(self.counter, self.buffer_pos)
} else {
(
self.counter.checked_add(1).ok_or(OverflowError)?,
self.buffer_pos - BLOCK_SIZE as u8,
)
};
T::from_block_byte(counter, pos, BLOCK_SIZE as u8)
}
fn try_seek<T: SeekNum>(&mut self, pos: T) -> Result<(), LoopError> {
let res = pos.to_block_byte(BLOCK_SIZE as u8)?;
self.counter = res.0;
self.buffer_pos = res.1;
if self.buffer_pos != 0 {
self.generate_block(self.counter);
}
Ok(())
}
}
impl<R: Rounds> Cipher<R> {
/// Check data length
fn check_data_len(&self, data: &[u8]) -> Result<(), LoopError> {
let leftover_bytes = BUFFER_SIZE - self.buffer_pos as usize;
if data.len() < leftover_bytes {
return Ok(());
}
let blocks = 1 + (data.len() - leftover_bytes) / BLOCK_SIZE;
let res = self.counter.checked_add(blocks as u64).ok_or(LoopError)?;
if res <= MAX_BLOCKS as u64 {
Ok(())
} else {
Err(LoopError)
}
}
/// Generate a block, storing it in the internal buffer
#[inline]
fn generate_block(&mut self, counter: u64) {
// TODO(tarcieri): double check this should be checked and not wrapping
let counter_with_offset = self.counter_offset.checked_add(counter).unwrap();
self.block.generate(counter_with_offset, &mut self.buffer);
}
}
impl<R: Rounds> Debug for Cipher<R> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "Cipher {{ .. }}")
}
}
#[inline(always)]
fn xor(buf: &mut [u8], key: &[u8]) {
debug_assert_eq!(buf.len(), key.len());
for (a, b) in buf.iter_mut().zip(key) {
*a ^= *b;
}
}
| true |
bdeaabc8479cce86cfebc59c74d790b15d599f97
|
Rust
|
FilippoRanza/rmd
|
/src/file_remove_iterator/remove_by_size.rs
|
UTF-8
| 2,934 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use std::fs::metadata;
use std::io::{Error, ErrorKind};
use std::path::Path;
use super::file_remove::FileRemove;
use super::parser::spec_string_parser;
const BYTE: u64 = 1;
//standard SI sizes
const KILO_BYTE: u64 = 1000 * BYTE;
const MEGA_BYTE: u64 = 1000 * KILO_BYTE;
const GIGA_BYTE: u64 = 1000 * MEGA_BYTE;
const TERA_BYTE: u64 = 1000 * GIGA_BYTE;
const PETA_BYTE: u64 = 1000 * TERA_BYTE;
//binary sizes
const KIBI_BYTE: u64 = 1024 * BYTE;
const MEBI_BYTE: u64 = 1024 * KIBI_BYTE;
const GIBI_BYTE: u64 = 1024 * MEBI_BYTE;
const TEBI_BYTE: u64 = 1024 * GIBI_BYTE;
const PEBI_BYTE: u64 = 1024 * TEBI_BYTE;
pub struct SizeRemove {
size: u64,
smaller: bool,
}
impl SizeRemove {
pub fn new(size_spec: &str, smaller: bool) -> std::io::Result<Self> {
let tmp = Self::size_converter(size_spec);
match tmp {
Ok(size) => Ok(Self::factory(size, smaller)),
Err(msg) => Err(Self::error_factory(msg)),
}
}
fn size_converter(size_spec: &str) -> Result<u64, String> {
spec_string_parser(size_spec, |s| {
match s {
"b" => Ok(BYTE),
// standard SI
"kb" | "kilo" => Ok(KILO_BYTE),
"mb" | "mega" => Ok(MEGA_BYTE),
"gb" | "giga" => Ok(GIGA_BYTE),
"tb" | "tera" => Ok(TERA_BYTE),
"pb" | "peta" => Ok(PETA_BYTE),
// binary
"kib" | "kibi" => Ok(KIBI_BYTE),
"mib" | "mebi" => Ok(MEBI_BYTE),
"gib" | "gibi" => Ok(GIBI_BYTE),
"tib" | "tebi" => Ok(TEBI_BYTE),
"pib" | "pebi" => Ok(PEBI_BYTE),
_ => Err(format!("unknown size specifier {}", s)),
}
})
}
fn factory(size: u64, smaller: bool) -> Self {
SizeRemove { size, smaller }
}
fn error_factory(msg: String) -> Error {
Error::new(ErrorKind::Other, msg)
}
}
impl FileRemove for SizeRemove {
fn remove(&mut self, path: &Path) -> Result<bool, Error> {
let meta = metadata(path)?;
let size = meta.len();
let output = if self.smaller {
size <= self.size
} else {
size >= self.size
};
Ok(output)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_size_converter() {
// these values has been randomly generated
run_test("1kb 4mib", 4195304);
run_test("4gb 2kibi 13b", 4000002061);
run_test("1kibi 1tera ", 1000000001024);
run_test("6tebi 5gibi 5tb ", 11602438475776);
run_test("5mebi 1gibi 1tb 0mb 2gb ", 1003078984704);
run_test("1kilo 3tebi 5pebi ", 5632798069097448);
run_test("5kib 4mb 6pebi 4kb ", 6755399445064864);
}
fn run_test(spec: &str, size: u64) {
let tmp = SizeRemove::size_converter(spec).unwrap();
assert_eq!(tmp, size);
}
}
| true |
578c4d908d9ced059317024afc81931b45be4cb7
|
Rust
|
cramt/aoc_2020
|
/src/days/day10.rs
|
UTF-8
| 3,562 | 3.21875 | 3 |
[] |
no_license
|
use crate::days::Day;
use std::collections::{HashMap, HashSet};
use std::num::Wrapping;
struct Node {
children: Vec<Node>,
data: usize,
}
impl Node {
pub fn new() -> Node {
Node {
children: vec![],
data: 0,
}
}
pub fn expand(&mut self) {
self.children = vec![Node::new(), Node::new()];
}
pub fn is_leaf(&self) -> bool {
self.children.len() == 0
}
fn expand_leaf_and_inc(&mut self) {
if self.is_leaf() {
self.expand();
} else {
let index = 0;
self.children[index].expand_leaf_and_inc();
}
self.data += 1
}
}
struct PairsInclusive<T> {
index: usize,
value: Vec<T>,
}
impl<T> PairsInclusive<T> {
fn new<S: Iterator<Item = T>>(s: S) -> Self {
Self {
value: s.collect(),
index: 1,
}
}
}
impl<T> Iterator for PairsInclusive<T>
where
T: Clone,
{
type Item = (T, T);
fn next(&mut self) -> Option<Self::Item> {
let re = Some((
self.value.get(self.index - 1)?.clone(),
self.value.get(self.index)?.clone(),
));
self.index += 1;
re
}
}
pub struct Day10;
impl Day10 {
fn parse(&self) -> Vec<usize> {
let mut re = vec![0]
.into_iter()
.chain(
self.input()
.split_ascii_whitespace()
.filter(|x| !x.is_empty())
.map(|x| x.parse::<usize>().unwrap()),
)
.collect::<Vec<usize>>();
re.sort();
let last = re.last().unwrap() + 3;
re.push(last);
re
}
fn part2_oneliner(self) -> usize {
let list = self.parse();
let max = list.last().unwrap().clone();
(1..=max)
.filter(|x| list.contains(x))
.fold(
vec![(0, 1)].into_iter().collect::<HashMap<usize, usize>>(),
|mut acc, i| {
acc.insert(
i,
(1..=3)
.into_iter()
.map(|x| acc.get(&(Wrapping(i) - Wrapping(x)).0).unwrap_or(&0))
.fold(0usize, |acc, x| acc + x),
);
acc
},
)
.get(&max)
.unwrap()
.clone()
}
}
impl Day<usize> for Day10 {
fn part1(&self) -> usize {
let list = self.parse();
let diffs = PairsInclusive::new(list.iter())
.map(|(a, b)| b - a)
.collect::<Vec<usize>>();
let ones = diffs.iter().filter(|x| **x == 1).count();
let threes = diffs.iter().filter(|x| **x == 3).count();
ones * threes
}
fn part2(&self) -> usize {
let list = self.parse();
let filter = list.clone().into_iter().collect::<HashSet<usize>>();
let mut acc = HashMap::new();
let max = list.last().unwrap().clone();
acc.insert(0, 1);
for i in 1..=max {
if filter.contains(&i) {
acc.insert(
i,
(1..=3)
.into_iter()
.map(|x| acc.get(&(Wrapping(i) - Wrapping(x)).0).unwrap_or(&0))
.fold(0usize, |acc, x| acc + x),
);
}
}
acc.get(&max).unwrap().clone()
}
fn input(&self) -> &str {
include_str!("../inputs/10")
}
}
| true |
bc67d437d6482b21ebf687f5b53ddce911f8a895
|
Rust
|
tokio-rs/loom
|
/src/sync/mpsc.rs
|
UTF-8
| 2,679 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
//! A stub for `std::sync::mpsc`.
use crate::rt;
/// Mock implementation of `std::sync::mpsc::channel`.
#[track_caller]
pub fn channel<T>() -> (Sender<T>, Receiver<T>) {
let location = location!();
let (sender_channel, receiver_channel) = std::sync::mpsc::channel();
let channel = std::sync::Arc::new(rt::Channel::new(location));
let sender = Sender {
object: std::sync::Arc::clone(&channel),
sender: sender_channel,
};
let receiver = Receiver {
object: std::sync::Arc::clone(&channel),
receiver: receiver_channel,
};
(sender, receiver)
}
#[derive(Debug)]
/// Mock implementation of `std::sync::mpsc::Sender`.
pub struct Sender<T> {
object: std::sync::Arc<rt::Channel>,
sender: std::sync::mpsc::Sender<T>,
}
impl<T> Sender<T> {
/// Attempts to send a value on this channel, returning it back if it could
/// not be sent.
#[track_caller]
pub fn send(&self, msg: T) -> Result<(), std::sync::mpsc::SendError<T>> {
self.object.send(location!());
self.sender.send(msg)
}
}
impl<T> Clone for Sender<T> {
fn clone(&self) -> Sender<T> {
Sender {
object: std::sync::Arc::clone(&self.object),
sender: self.sender.clone(),
}
}
}
#[derive(Debug)]
/// Mock implementation of `std::sync::mpsc::Receiver`.
pub struct Receiver<T> {
object: std::sync::Arc<rt::Channel>,
receiver: std::sync::mpsc::Receiver<T>,
}
impl<T> Receiver<T> {
/// Attempts to wait for a value on this receiver, returning an error if the
/// corresponding channel has hung up.
#[track_caller]
pub fn recv(&self) -> Result<T, std::sync::mpsc::RecvError> {
self.object.recv(location!());
self.receiver.recv()
}
/// Attempts to wait for a value on this receiver, returning an error if the
/// corresponding channel has hung up, or if it waits more than `timeout`.
pub fn recv_timeout(
&self,
_timeout: std::time::Duration,
) -> Result<T, std::sync::mpsc::RecvTimeoutError> {
unimplemented!("std::sync::mpsc::Receiver::recv_timeout is not supported yet in Loom.")
}
/// Attempts to return a pending value on this receiver without blocking.
pub fn try_recv(&self) -> Result<T, std::sync::mpsc::TryRecvError> {
if self.object.is_empty() {
return Err(std::sync::mpsc::TryRecvError::Empty);
} else {
self.recv().map_err(|e| e.into())
}
}
}
impl<T> Drop for Receiver<T> {
fn drop(&mut self) {
// Drain the channel.
while !self.object.is_empty() {
self.recv().unwrap();
}
}
}
| true |
462dc33984d73f9be4eab8c5f5fd11f7afa41d89
|
Rust
|
enukane/rust-atcoder-apg4b
|
/ex4.rs
|
UTF-8
| 181 | 2.765625 | 3 |
[] |
no_license
|
fn main() {
let seconds = 365 * 24 * 60 * 60;
println!("{}", seconds);
println!("{}", 2 * seconds);
println!("{}", 5 * seconds);
println!("{}", 10 * seconds);
}
| true |
3bbdb1af0efe8d9f1ddfa6bba497c7c14c59912e
|
Rust
|
sinistersnare/ds_store
|
/src/allocator.rs
|
UTF-8
| 20,250 | 3.171875 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use byteorder::{ByteOrder, BigEndian};
use crate::Error;
// TODO verify `as usize` casts, or place them where they truly belong (where they are created, not when used).
#[derive(Debug)]
pub struct Directory<'a> {
num_internals: u32,
num_nodes: u32,
num_records: u32,
/// A map from file/directory to its information
pub contents: HashMap<String, HashMap<&'a str, RecordValue<'a>>>,
}
// TODO: Better strongly type these. Instead of having so many slices, parse more. Also, PList variant.
#[derive(Debug)]
pub enum RecordValue<'a> {
Background(BackgroundType),
Style(StyleType),
Bool(bool),
Slice(&'a [u8]),
String(String),
I16(i16),
I32(i32),
I64(i64),
U32(u32),
DateTime(chrono::DateTime<chrono::Utc>),
}
#[derive(Debug)]
pub enum BackgroundType {
// Parsed as: FourCharCode "DefB", followed by eight unknown bytes, probably garbage.
Default,
// Parsed as: FourCharCode "ClrB", followed by an RGB value in six bytes, followed by two unknown bytes.
SolidColor(u16, u16, u16),
// Parsed as: FourCharCode "PctB", followed by the the length of the blob stored in the 'pict' record,
// followed by four unknown bytes. The 'pict' record points to the actual background image.
Picture(u32),
}
/// How a directory is viewed in the finder.
/// Icon view, Column/Browser view, List view, and Cover Flow view.
#[derive(Debug)]
pub enum StyleType {
/// represented as "icnv" in the .DS_Store file.
Icon,
/// represented as "clmv" in the .DS_Store file.
ColumnBrowser,
/// represented as "Nlsv" in the .DS_Store file.
List,
/// represented as "Flwv" in the .DS_Store file.
CoverFlow,
}
/// A Block is a u8-slice, with methods for reading from it in _Big-Endian format_.
struct Block<'a>(&'a [u8]);
impl<'a> Block<'a> {
fn new(data: &'a [u8], offset: usize, size: usize) -> Result<Block<'a>, Error<'a>> {
if data.len() < offset+0x4+size {
Err(Error::NotEnoughData)
} else {
Ok(Block(&data[offset+0x4..offset+0x4+size]))
}
}
fn len_check(&self, amt: usize) -> Result<(), Error<'a>> {
if self.0.len() < amt {
Err(Error::NotEnoughData)
} else {
Ok(())
}
}
fn skip(&mut self, amt: usize) -> Result<(), Error<'a>> {
self.len_check(amt)?;
self.0 = &self.0[amt..];
Ok(())
}
fn read_bool(&mut self) -> Result<bool, Error<'a>> {
self.len_check(1)?;
let ret = self.0[0] == 1;
self.0 = &self.0[1..];
Ok(ret)
}
fn read_i16(&mut self) -> Result<i16, Error<'a>> {
self.len_check(2)?;
let ret = Ok(BigEndian::read_i16(self.0));
self.0 = &self.0[2..];
ret
}
fn read_u16(&mut self) -> Result<u16, Error<'a>> {
self.len_check(2)?;
let ret = Ok(BigEndian::read_u16(self.0));
self.0 = &self.0[2..];
ret
}
fn read_i32(&mut self) -> Result<i32, Error<'a>> {
self.len_check(4)?;
let ret = Ok(BigEndian::read_i32(self.0));
self.0 = &self.0[4..];
ret
}
fn read_u32(&mut self) -> Result<u32, Error<'a>> {
self.len_check(4)?;
let ret = Ok(BigEndian::read_u32(self.0));
self.0 = &self.0[4..];
ret
}
fn read_i64(&mut self) -> Result<i64, Error<'a>> {
self.len_check(8)?;
let ret = Ok(BigEndian::read_i64(self.0));
self.0 = &self.0[4..];
ret
}
fn read_exact(&mut self, data: &'static [u8], err_msg: &'static str) -> Result<(), Error<'a>> {
self.len_check(data.len())?;
let unconfirmed = &self.0[..data.len()];
self.0 = &self.0[data.len()..];
if unconfirmed != data {
Err(Error::BadData(err_msg))
} else {
Ok(())
}
}
fn read_buf(&mut self, amt: usize) -> Result<&'a [u8], Error<'a>> {
self.len_check(amt)?;
let (left, right) = self.0.split_at(amt);
self.0 = right;
Ok(left)
}
/// Reads a 4-byte `length` and then reads (`length*2`)-bytes to create a `String`.
fn read_utf16(&mut self) -> Result<String, Error<'a>> {
// TODO: Small possible optimization opportinuty,
// only has to allocate for String on big-endian machines.
// as you can just slice::from_raw_parts the &[u8] -> &[u16] and itll just work.
// Would need to dupe this function with #[cfg(target_endian=little/big)]
let file_name_length = self.read_u32()?;
let mut u16_buf: Vec<u16> = Vec::with_capacity(file_name_length as usize * 2);
for _ in 0..file_name_length {
u16_buf.push(self.read_u16()?);
}
match String::from_utf16(&u16_buf) {
Err(_) => Err(Error::InvalidString),
Ok(s) => Ok(s),
}
}
// Reads a 4-byte length, then length-bytes of self.
fn read_blob(&mut self) -> Result<&'a [u8], Error<'a>> {
let length = self.read_u32()?;
Ok(self.read_buf(length as usize)?)
}
fn read_record(&mut self, records: &mut HashMap<String, HashMap<&'a str, RecordValue<'a>>>) -> Result<(), Error<'a>> {
let file_name = self.read_utf16()?;
let mut metadata = records.entry(file_name).or_insert(HashMap::new());
self.read_record_info(&mut metadata)?;
Ok(())
}
fn read_date_time(&mut self) -> Result<chrono::DateTime<chrono::Utc>, Error<'a>> {
// number that when added to a Mac-epoch integer, converts it to a Unix-epoch integer.
// TODO: MAKE SURE THIS WORKS?
const CONVERTER: i64 = 2082844800;
let raw = self.read_i64()?;
println!("DateTime raw: {}", raw);
Ok(chrono::DateTime::from_utc(chrono::NaiveDateTime::from_timestamp(raw + CONVERTER, 0), chrono::Utc))
}
// TODO: better strongly type the RecordValues. "bwsp" is actually a plist. Many blobs are meaningful.
fn read_record_info(&mut self, records: &mut HashMap<&'a str, RecordValue<'a>>) -> Result<(), Error<'a>> {
let structure_type: &'a [u8] = self.read_buf(4)?;
let record_value: RecordValue = match structure_type {
b"BKGD" => {
self.read_exact(b"blob", "\"BKGD\" only takes blobs describing the background.")?;
match self.read_buf(4)? {
b"DefB" => {
self.skip(8)?;
Ok(RecordValue::Background(BackgroundType::Default))
},
b"ClrB" => {
let r = self.read_u16()?;
let g = self.read_u16()?;
let b = self.read_u16()?;
self.skip(2)?; // unknown bytes. Seemingly not alpha?
Ok(RecordValue::Background(BackgroundType::SolidColor(r,g,b)))
},
b"PctB" => {
let picture_property_blob_length = self.read_u32()?;
self.skip(4)?;
Ok(RecordValue::U32(picture_property_blob_length))
},
other => Err(Error::UnkonwnStructureType(other))
}
},
// TODO: read_blob function.
b"ICVO" => {
self.read_exact(b"bool", "\"ICVO\" only takes bool")?;
Ok(RecordValue::Bool(self.read_bool()?))
},
b"Iloc" => {
self.read_exact(b"blob", "\"Iloc\" only takes bool")?;
self.read_exact(&[0,0,0,16], "\"Iloc\" only takes a 16-byte blob.")?;
Ok(RecordValue::Slice(self.read_buf(16)?))
},
b"LSVO" => {
self.read_exact(b"bool", "\"LSVO\" only takes bool")?;
Ok(RecordValue::Bool(self.read_bool()?))
},
b"bwsp" => {
self.read_exact(b"blob", "\"bwsp\" only takes blob")?;
Ok(RecordValue::Slice(self.read_blob()?))
},
b"cmmt" => {
self.read_exact(b"ustr", "\"cmmt\" only takes ustr")?;
Ok(RecordValue::String(self.read_utf16()?))
},
b"dilc" => {
self.read_exact(b"blob", "\"dilc\" only takes blob")?;
self.read_exact(&[0,0,0,32], "\"dilc\" only takes a 32-byte blob.")?;
Ok(RecordValue::Slice(self.read_buf(32)?))
},
b"dscl" => {
self.read_exact(b"bool", "\"dscl\" only takes bool")?;
Ok(RecordValue::Bool(self.read_bool()?))
},
b"extn" => {
self.read_exact(b"ustr", "\"extn\" only takes ustr")?;
Ok(RecordValue::String(self.read_utf16()?))
},
b"fwi0" => {
self.read_exact(b"blob", "\"fwi0\" only takes blob")?;
self.read_exact(&[0,0,0,16], "\"fwi0\" only takes 16-byte blob")?;
Ok(RecordValue::Slice(self.read_buf(16)?))
},
b"fwsw" => {
self.read_exact(b"long", "\"fwsw\" only takes long")?;
Ok(RecordValue::I32(self.read_i32()?))
},
b"fwvh" => {
self.read_exact(b"shor", "\"fwvh\" only takes shor")?;
self.skip(2)?; // shor is 4 bytes long, but only 16 bit. skip 2 bytes.
Ok(RecordValue::I16(self.read_i16()?))
},
b"GRP0" => {
self.read_exact(b"ustr", "\"GRP0\" only takes ustr")?;
Ok(RecordValue::String(self.read_utf16()?))
},
b"icgo" => {
self.read_exact(b"blob", "\"icgo\" only takes blob")?;
self.read_exact(&[0,0,0,8], "\"icgo\" only takes 8-byte blob")?;
Ok(RecordValue::Slice(self.read_buf(8)?))
},
b"icsp" => {
self.read_exact(b"blob", "\"icsp\" only takes blob")?;
self.read_exact(&[0,0,0,8], "\"icsp\" only takes 8-byte blob")?;
Ok(RecordValue::Slice(self.read_buf(8)?))
},
b"icvo" => {
self.read_exact(b"blob", "\"icvo\" only takes blob")?;
let blob = self.read_blob()?;
if blob.len() == 18 || blob.len() == 26 {
Ok(RecordValue::Slice(blob))
} else {
Err(Error::BadData("\"icvo\" only takes 18 or 26 byte blob."))
}
},
b"icvp" => {
self.read_exact(b"blob", "\"icvp\" only takes blob")?;
Ok(RecordValue::Slice(self.read_blob()?))
},
b"icvt" => {
self.read_exact(b"shor", "\"icvt\" only takes shor")?;
self.skip(2)?;
Ok(RecordValue::I16(self.read_i16()?))
},
b"info" => {
self.read_exact(b"blob", "\"info\" only takes blob")?;
let blob = self.read_blob()?;
if blob.len() == 40 || blob.len() == 48 {
Ok(RecordValue::Slice(blob))
} else {
Err(Error::BadData("\"info\" only takes 40 or 48 byte blob."))
}
},
b"logS" | b"lg1S" => {
self.read_exact(b"comp", "\"logS\"/\"lg1S\" only takes comp")?;
Ok(RecordValue::I64(self.read_i64()?))
},
b"lssp" => {
self.read_exact(b"blob", "\"lssp\" only takes blob")?;
self.read_exact(&[0,0,0,8], "\"lssp\" only takes 8-byte blob")?;
Ok(RecordValue::Slice(self.read_buf(8)?))
},
b"lsvo" => {
self.read_exact(b"blob", "\"lsvo\" only takes blob")?;
self.read_exact(&[0,0,0,76], "\"lsvo\" only takes 76-byte blob")?;
Ok(RecordValue::Slice(self.read_buf(76)?))
},
b"lsvt" => {
self.read_exact(b"shor", "\"lsvt\" only take shor")?;
self.skip(2)?;
Ok(RecordValue::I16(self.read_i16()?))
},
b"lsvp" => {
self.read_exact(b"blob", "\"lsvp\" only takes blob")?;
Ok(RecordValue::Slice(self.read_blob()?))
},
b"lsvP" => {
self.read_exact(b"blob", "\"lsvP\" only takes blob")?;
Ok(RecordValue::Slice(self.read_blob()?))
},
b"modD" | b"moDD" => {
self.read_exact(b"dutc", "\"modD\"/\"moDD\" only takes dutc")?;
Ok(RecordValue::DateTime(self.read_date_time()?))
},
b"phyS" | b"ph1S" => {
self.read_exact(b"comp", "\"phyS\"/\"ph1S\" only takes comp")?;
Ok(RecordValue::I64(self.read_i64()?))
},
b"pict" => {
// I really hope that `pic` is a regular blob,
// but the docs are unclear if we need to get the length from the 'BKGD' key.
self.read_exact(b"blob", "\"pict\" only takes blob")?;
// TODO: maybe do a verify, get the BKGD key and check they are the same?
Ok(RecordValue::Slice(self.read_blob()?))
},
b"vSrn" => {
self.read_exact(b"long", "\"vSrn\" only takes long")?;
Ok(RecordValue::I32(self.read_i32()?))
},
b"vstl" => {
self.read_exact(b"type", "\"vstl\" only takes type")?;
// let view_type = self.read_buf(4)?;
match self.read_buf(4)? {
b"icnv" => Ok(RecordValue::Style(StyleType::Icon)),
b"clmv" => Ok(RecordValue::Style(StyleType::ColumnBrowser)),
b"Nlsv" => Ok(RecordValue::Style(StyleType::List)),
b"Flwv" => Ok(RecordValue::Style(StyleType::CoverFlow)),
other => Err(Error::UnkonwnStructureType(other)),
}
},
b"ptbL" => {
self.read_exact(b"ustr", "\"ptbL\" only takes ustr")?;
Ok(RecordValue::String(self.read_utf16()?))
},
b"ptbN" => {
self.read_exact(b"ustr", "\"ptbN\" only takes ustr")?;
Ok(RecordValue::String(self.read_utf16()?))
},
other => Err(Error::UnkonwnStructureType(other)),
}?;
// should be impossible to hit error case,
// but is getting annoyed by people saying I shouldnt use unsafe code worth it? :p joking of course.
let type_str = match std::str::from_utf8(structure_type) {
Ok(s) => s,
Err(_) => { return Err(Error::InvalidString); }
};
// TODO: maybe check if dupe, and Err if so?
records.insert(type_str, record_value);
Ok(())
}
}
pub struct Allocator<'a> {
/// The whole data to be partitioned into blocks by the allocator.
data: &'a [u8],
/// The offsets to each block(?) (TODO write this.)
pub offsets: Vec<u32>,
/// It is a 'table of contents', but it seems that there is only ever 1 entry, "DSDB".
pub dsdb_location: u32,
/// locations of data allocted by the buddy-allocator. (TODO: write this.)
pub free_list: Vec<Vec<u32>>,
}
impl<'a> Allocator<'a> {
/// Create a new alloctor, initalizing all important data needed for traversal.
pub fn new(data: &'a [u8]) -> Result<Allocator<'a>, Error<'a>> {
if &data[0..4] != &[0,0,0,1] {
// creating a block offsets by 4 bytes, so check the first 4 here.
return Err(Error::BadData("First 4 bytes must be `1`."));
}
let mut prelude_block = Block::new(data, 0, 32)?;
let (info_block_offset, info_block_size) = Allocator::read_prelude(&mut prelude_block)?;
let mut info_block = Block::new(data, info_block_offset as usize, info_block_size as usize)?;
let offsets = Allocator::read_offsets(&mut info_block)?;
let dsdb_location = Allocator::read_dsdb_location(&mut info_block)?;
let free_list = Allocator::read_free_list(&mut info_block)?;
Ok(Allocator {data, offsets, dsdb_location, free_list}) // allocator should be fully allocated here.
}
fn get_block(&self, block_id: u32) -> Result<Block<'a>, Error<'a>> {
if self.offsets.len() < block_id as usize {
return Err(Error::BlockDoesntExist);
}
let address = self.offsets[block_id as usize];
// Go code does some type casting to i32 here, should I?
let offset = address & !0x1f;
let size = 1 << (address & 0x1f);
Block::new(self.data, offset as usize, size)
}
fn read_prelude(info_block: &mut Block<'a>) -> Result<(u32, u32), Error<'a>> {
info_block.read_exact(b"Bud1", "Magic number is wrong.")?;
let offset = info_block.read_u32()?;
let size = info_block.read_u32()?;
let offset_check = info_block.read_u32()?;
if offset != offset_check {
return Err(Error::BadData("Offset check failed"));
}
Ok((offset, size))
}
fn read_offsets(info_block: &mut Block<'a>) -> Result<Vec<u32>, Error<'a>> {
let num_offsets = info_block.read_u32()?;
let mut offsets = Vec::with_capacity(num_offsets as usize);
// Documented as unknown bytes, always observed as 0.
info_block.read_exact(&[0,0,0,0], "Thought these should always be 0???")?;
for _i in 0..num_offsets {
offsets.push(info_block.read_u32()?);
}
// TODO: verify this math...
// Also document this (Offsets section of https://0day.work post.)
let bytes_to_skip = (256 - (num_offsets % 256)) * 4;
info_block.skip(bytes_to_skip as usize)?;
Ok(offsets)
}
fn read_dsdb_location(info_block: &mut Block<'a>) -> Result<u32, Error<'a>> {
// Amount of entries in the TOC.
info_block.read_exact(&[0,0,0,1], "I Thought there should only be 1 TOC entry...")?;
info_block.read_exact(&[4], "Looks like \"DSDB\" is not the only key...")?;
info_block.read_exact(b"DSDB", "I thought only key was \"DSDB\"...")?;
Ok(info_block.read_u32()?) // value!
}
fn read_free_list(info_block: &mut Block<'a>) -> Result<Vec<Vec<u32>>, Error<'a>> {
let mut free_list = Vec::with_capacity(32);
for _ in 0..=31 {
let block_count = info_block.read_u32()?;
let mut list = Vec::with_capacity(block_count as usize);
for _ in 0..block_count {
list.push(info_block.read_u32()?);
}
free_list.push(list);
}
Ok(free_list)
}
pub fn traverse(&self) -> Result<Directory<'a>, Error<'a>> {
let mut root_block = self.get_block(self.dsdb_location)?;
let root_node = root_block.read_u32()?;
let num_internals = root_block.read_u32()?;
let num_records = root_block.read_u32()?;
let num_nodes = root_block.read_u32()?;
root_block.read_exact(&[0,0, 0x10, 0], "Expected 0x1000, found not that.")?;
let mut contents = HashMap::new();
self.traverse_tree(root_node, &mut contents)?;
Ok(Directory {num_internals, num_records, num_nodes, contents})
}
fn traverse_tree(&self, block_id: u32, contents: &mut HashMap<String, HashMap<&'a str, RecordValue<'a>>>) -> Result<(), Error<'a>> {
let mut current_block = self.get_block(block_id)?;
let pair_count = current_block.read_u32()?;
if pair_count == 0 {
// We are at a leaf! Congratulations!
let count = current_block.read_u32()?;
for _ in 0..count {
current_block.read_record(contents)?;
}
} else {
// Internal node of the B-Tree!
for _ in 0..pair_count {
let child = current_block.read_u32()?;
self.traverse_tree(child, contents)?;
current_block.read_record(contents)?;
}
}
Ok(())
}
}
| true |
89c074ed078905880c188bb014750604fb175748
|
Rust
|
cqueirolo/21C1-Stormtroopers
|
/redis_server/src/command/keys_cmd/ttl_cmd.rs
|
UTF-8
| 3,016 | 2.84375 | 3 |
[] |
no_license
|
//! Returns the expire time of a key.
//!
//! Example:
//! ```text
//! > expire key 60
//! OK
//! > ttl key
//! 60
//! ```
use crate::command::cmd_trait::Command;
use crate::command::command_builder::CommandBuilder;
use crate::command::command_parser::ParsedMessage;
use crate::errors::run_error::RunError;
use crate::server::app_info::AppInfo;
use crate::server::logger::{Loggable, Logger};
use crate::server::utils::timestamp_now;
/// Information string to log.
const INFO_EXPIRE_COMMAND: &str = "Run command TTL\n";
/// Name of the command.
const CLIENT_ID: &str = "ExpireCommand";
/// Response string for the command.
const TTL_ZERO_OR_ABSENT: &str = "-2\n";
/// Newline character.
const NEW_LINE: &str = "\n";
/// Code of the command.
const CONST_CMD: &str = "ttl";
/// Min amount of arguments besides of the command.
const MIN_VALID_ARGS: i32 = 1;
/// Max amount of arguments besides of the command.
const MAX_VALID_ARGS: i32 = 1;
/// Main struct for the command.
pub struct TtlCommand {
/// Id of the thread running.
id_job: u32,
/// Logger entity.
logger: Logger<String>,
}
impl TtlCommand {
pub fn new(id_job: u32, logger: Logger<String>, mut command_builder: CommandBuilder) -> Self {
let cmd = Self { id_job, logger };
command_builder.insert(CONST_CMD.to_string(), Box::new(cmd.clone()));
cmd
}
}
impl Loggable for TtlCommand {
fn get_id_client(&self) -> &str {
CLIENT_ID
}
fn get_id_thread(&self) -> u32 {
self.id_job
}
}
impl Clone for TtlCommand {
fn clone(&self) -> TtlCommand {
TtlCommand {
id_job: self.id_job,
logger: self.logger.clone(),
}
}
}
impl Command for TtlCommand {
fn run(
&self,
args: Vec<&str>,
app_info: &AppInfo,
_id_client: usize,
) -> Result<String, RunError> {
let _log_info_res = self
.logger
.info(self, INFO_EXPIRE_COMMAND, app_info.get_verbose());
ParsedMessage::validate_args(args.clone(), MIN_VALID_ARGS, MAX_VALID_ARGS)?;
let key_str = args[0]; // The key for the DB
let db = app_info.get_db_resolver();
match db.type_key(String::from(key_str)) {
Ok(_db_type) => {
let ttl_scheduler = app_info.get_ttl_scheduler();
let now = timestamp_now();
match ttl_scheduler.get_ttl_key(String::from(key_str)) {
Ok(ttl) => match ttl.parse::<u64>().unwrap().overflowing_sub(now) {
(res, false) => {
let mut ret_value = res.to_string();
ret_value.push_str(NEW_LINE);
Ok(ret_value)
}
(_, true) => Ok(String::from(TTL_ZERO_OR_ABSENT)),
},
Err(_) => Ok(String::from(TTL_ZERO_OR_ABSENT)),
}
}
Err(e) => Err(e),
}
}
}
| true |
8a8b89d92935eb00b808fcf95bae061188667fab
|
Rust
|
bgourlie/rs-nes
|
/rs-nes/src/apu/mocks.rs
|
UTF-8
| 570 | 2.75 | 3 |
[
"MIT"
] |
permissive
|
use crate::apu::IApu;
#[derive(Default)]
pub struct ApuMock {
write_addr: u16,
write_value: u8,
control: u8,
}
impl ApuMock {
pub fn write_addr(&self) -> u16 {
self.write_addr
}
pub fn write_value(&self) -> u8 {
self.write_value
}
pub fn set_control(&mut self, val: u8) {
self.control = val;
}
}
impl IApu for ApuMock {
fn write(&mut self, addr: u16, value: u8) {
self.write_addr = addr;
self.write_value = value;
}
fn read_control(&self) -> u8 {
self.control
}
}
| true |
9035c917b273a3ed626a4fc44dc7a36c38b289d1
|
Rust
|
DenSA-Inc/awrevim
|
/src/ex.rs
|
UTF-8
| 2,483 | 3.21875 | 3 |
[] |
no_license
|
use crossterm::event::{KeyEvent, KeyCode};
use ropey::Rope;
use std::fmt::Write;
pub struct ExBar {
buffer: Rope,
cursor_index: usize,
}
pub enum ExResult {
StillEditing,
Aborted,
Finished(String),
}
impl ExBar {
pub fn new() -> Self {
Self {
buffer: Rope::new(),
cursor_index: 0,
}
}
pub fn buffer(&self) -> &Rope {
&self.buffer
}
pub fn cursor_index(&self) -> usize {
self.cursor_index
}
pub fn handle_key(&mut self, key: KeyEvent) -> ExResult {
if !key.modifiers.is_empty() {
return ExResult::StillEditing;
}
match key.code {
KeyCode::Char(chr) => {
self.buffer.insert_char(self.cursor_index, chr);
self.cursor_index += 1;
ExResult::StillEditing
},
KeyCode::Enter => {
let mut result = String::new();
write!(result, "{}", self.buffer).unwrap();
self.clear();
ExResult::Finished(result)
},
KeyCode::Backspace => {
match (self.buffer.len_chars() == 0, self.cursor_index == 0) {
(true, _) => return ExResult::Aborted,
(false, true) => {},
(false, false) => {
self.buffer.remove(self.cursor_index - 1..self.cursor_index);
self.cursor_index -= 1;
},
}
ExResult::StillEditing
},
KeyCode::Delete => {
let len = self.buffer.len_chars();
if self.cursor_index < len {
self.buffer.remove(self.cursor_index..self.cursor_index + 1);
}
ExResult::StillEditing
},
KeyCode::Left => {
if self.cursor_index > 0 {
self.cursor_index -= 1;
}
ExResult::StillEditing
},
KeyCode::Right => {
if self.cursor_index < self.buffer.len_chars() {
self.cursor_index += 1;
}
ExResult::StillEditing
},
KeyCode::Esc => ExResult::Aborted,
_ => ExResult::StillEditing,
}
}
pub fn clear(&mut self) {
self.buffer = Rope::new();
self.cursor_index = 0;
}
}
| true |
39908f9f079ce9e51c59cc29d93e20725ef50942
|
Rust
|
hube12/AoC2020
|
/day15/src/main.rs
|
UTF-8
| 1,524 | 3.171875 | 3 |
[] |
no_license
|
#![allow(unreachable_code)]
use std::fs;
use std::time::Instant;
fn solve(bound: usize, puzzle: Vec<u64>) -> usize {
let mut last_index = 1;
let mut last = 1;
let mut v = vec![(u32::MAX, u32::MAX); bound];
for el in puzzle.iter().enumerate() {
v[*el.1 as usize] = ((el.0 + 1) as u32, u32::MAX);
last_index = el.0 + 1;
last = *el.1;
}
for i in last_index + 1..bound + 1 {
if v[last as usize].1 == u32::MAX {
last = 0
} else {
last = i as u64 - v[last as usize].1 as u64 - 1;
}
v[last as usize] = (i as u32, v[last as usize].0);
}
last as usize
}
fn part1(puzzle: Vec<u64>) -> usize {
solve(2020, puzzle)
}
fn part2(puzzle: Vec<u64>) -> usize {
solve(30000000, puzzle)
}
fn main() {
let input = fs::read_to_string("input/input.txt")
.expect("Something went wrong reading the file");
let lines = input.lines();
let mut puzzle: Vec<u64> = vec![];
for line in lines {
let str = line.parse::<String>().expect("Ouf that's not a string !");
let it = str.split(",");
it.for_each(|x| puzzle.push(x.parse::<u64>().unwrap()));
}
println!("Running part1");
let now = Instant::now();
println!("Found {}", part1(puzzle.clone()));
println!("Took {}us", now.elapsed().as_micros());
println!("Running part2");
let now = Instant::now();
println!("Found {}", part2(puzzle.clone()));
println!("Took {}ms", now.elapsed().as_millis());
}
| true |
3da1d5989554c7e6bc19ed0a306b03fb48014927
|
Rust
|
djc/askama
|
/askama/src/helpers.rs
|
UTF-8
| 905 | 3.265625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::iter::{Enumerate, Peekable};
pub struct TemplateLoop<I>
where
I: Iterator,
{
iter: Peekable<Enumerate<I>>,
}
impl<I> TemplateLoop<I>
where
I: Iterator,
{
#[inline]
pub fn new(iter: I) -> Self {
TemplateLoop {
iter: iter.enumerate().peekable(),
}
}
}
impl<I> Iterator for TemplateLoop<I>
where
I: Iterator,
{
type Item = (<I as Iterator>::Item, LoopItem);
#[inline]
fn next(&mut self) -> Option<(<I as Iterator>::Item, LoopItem)> {
self.iter.next().map(|(index, item)| {
(
item,
LoopItem {
index,
first: index == 0,
last: self.iter.peek().is_none(),
},
)
})
}
}
#[derive(Copy, Clone)]
pub struct LoopItem {
pub index: usize,
pub first: bool,
pub last: bool,
}
| true |
8f27f167d5fb98d3bb9ec475dc03aa4081a77362
|
Rust
|
sorpaas/bm
|
/le/derive/tests/derive.rs
|
UTF-8
| 2,002 | 2.953125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use sha2::{Digest, Sha256};
use primitive_types::H256;
use bm::InMemoryBackend;
use bm_le::{IntoTree, FromTree, MaxVec, DigestConstruct, tree_root};
use generic_array::GenericArray;
fn chunk(data: &[u8]) -> H256 {
let mut ret = [0; 32];
ret[..data.len()].copy_from_slice(data);
H256::from(ret)
}
fn h(a: &[u8], b: &[u8]) -> H256 {
let mut hash = Sha256::new();
hash.input(a);
hash.input(b);
H256::from_slice(hash.result().as_slice())
}
#[derive(IntoTree)]
struct BasicContainer {
a: u32,
b: u64,
c: u128,
}
#[derive(IntoTree, FromTree, PartialEq, Eq, Debug)]
struct ConfigContainer {
a: u64,
b: u64,
c: u64,
#[bm(compact)]
d: GenericArray<u64, typenum::U4>,
e: u64,
#[bm(compact)]
f: MaxVec<u64, typenum::U5>,
}
#[derive(IntoTree, FromTree, Debug, Eq, PartialEq)]
pub enum EnumTest {
A(u128),
B {
c: u64,
d: u32,
},
E,
}
#[test]
fn test_basic() {
assert_eq!(tree_root::<Sha256, _>(&BasicContainer { a: 1, b: 2, c: 3 }),
h(&h(&chunk(&[0x01])[..], &chunk(&[0x02])[..])[..],
&h(&chunk(&[0x03])[..], &chunk(&[])[..])[..]));
}
#[test]
fn test_config() {
let mut db = InMemoryBackend::<DigestConstruct<Sha256>>::default();
let container = ConfigContainer {
a: 1,
b: 2,
c: 3,
d: GenericArray::from([4, 5, 6, 7]),
e: 8,
f: MaxVec::from(vec![9, 10]),
};
let actual = container.into_tree(&mut db).unwrap();
let decoded = ConfigContainer::from_tree(&actual, &mut db).unwrap();
assert_eq!(container, decoded);
}
#[test]
fn test_enum() {
let mut db = InMemoryBackend::<DigestConstruct<Sha256>>::default();
let e1 = EnumTest::A(123);
let e2 = EnumTest::B { c: 1, d: 2 };
let e3 = EnumTest::E;
let a1 = e1.into_tree(&mut db).unwrap();
let d1 = EnumTest::from_tree(&a1, &mut db).unwrap();
let a2 = e2.into_tree(&mut db).unwrap();
let d2 = EnumTest::from_tree(&a2, &mut db).unwrap();
let a3 = e3.into_tree(&mut db).unwrap();
let d3 = EnumTest::from_tree(&a3, &mut db).unwrap();
assert_eq!(d1, e1);
assert_eq!(d2, e2);
assert_eq!(d3, e3);
}
| true |
ae9a62886c6015d678d67c0c333f4452bf3abb1b
|
Rust
|
lnicola/t-rex
|
/t-rex-core/src/service/tileset.rs
|
UTF-8
| 2,114 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
//
// Copyright (c) Pirmin Kalberer. All rights reserved.
// Licensed under the MIT License. See LICENSE file in the project root for full license information.
//
use core::grid::Extent;
use core::layer::Layer;
use core::config::Config;
use core::config::TilesetCfg;
/// Collection of layers in one MVT
pub struct Tileset {
pub name: String,
pub attribution: Option<String>,
pub extent: Option<Extent>,
pub layers: Vec<Layer>,
}
pub static WORLD_EXTENT: Extent = Extent {
minx: -180.0,
miny: -90.0,
maxx: 180.0,
maxy: 90.0,
};
impl Tileset {
pub fn minzoom(&self) -> u8 {
0 // TODO: from layers or config?
}
pub fn maxzoom(&self) -> u8 {
22 // TODO: from layers or config (see also MvtService#get_stylejson)
}
pub fn attribution(&self) -> String {
self.attribution.clone().unwrap_or("".to_string())
}
pub fn get_extent(&self) -> &Extent {
self.extent.as_ref().unwrap_or(&WORLD_EXTENT)
}
pub fn get_center(&self) -> (f64, f64) {
let ext = self.get_extent();
(
ext.maxx - (ext.maxx - ext.minx) / 2.0,
ext.maxy - (ext.maxy - ext.miny) / 2.0,
)
}
pub fn get_start_zoom(&self) -> u8 {
2 // TODO: from config
}
}
impl<'a> Config<'a, TilesetCfg> for Tileset {
fn from_config(tileset_cfg: &TilesetCfg) -> Result<Self, String> {
let layers = tileset_cfg
.layers
.iter()
.map(|layer| Layer::from_config(layer).unwrap())
.collect();
Ok(Tileset {
name: tileset_cfg.name.clone(),
attribution: tileset_cfg.attribution.clone(),
extent: tileset_cfg.extent.clone(),
layers: layers,
})
}
fn gen_config() -> String {
let mut config = String::new();
config.push_str(&Layer::gen_config());
config
}
fn gen_runtime_config(&self) -> String {
let mut config = String::new();
for layer in &self.layers {
config.push_str(&layer.gen_runtime_config());
}
config
}
}
| true |
c93dd0e6bc577411f2cce5f398ceb595f2e726a3
|
Rust
|
alexander-jackson/advent-of-code-2020
|
/day4/src/main.rs
|
UTF-8
| 2,870 | 3.390625 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::str::FromStr;
const EXPECTED_FIELDS: [&str; 7] = ["ecl", "pid", "eyr", "hcl", "byr", "iyr", "hgt"];
fn parse_input() -> Vec<String> {
let contents = std::fs::read_to_string("input.txt").unwrap();
contents
.trim()
.split("\n\n")
.map(|x| x.replace("\n", " "))
.collect::<Vec<_>>()
}
fn get_hashmap(passport: &str) -> HashMap<&str, &str> {
passport
.split(' ')
.map(|x| {
let mut split = x.split(':');
(split.next().unwrap(), split.next().unwrap())
})
.collect()
}
fn is_valid(passport: &str) -> bool {
// Split at the spaces and then the ':'
let map = get_hashmap(passport);
EXPECTED_FIELDS.iter().all(|x| map.contains_key(x))
}
fn validate_number(value: &str, min: usize, max: usize) -> bool {
// Try and parse it
let parsed = match usize::from_str(value) {
Ok(v) => v,
Err(_) => return false,
};
min <= parsed && parsed <= max
}
fn validate_height(value: &str) -> bool {
// Check it ends in cm or in and validate the number remaining
if let Some(cm) = value.strip_suffix("cm") {
return validate_number(cm, 150, 193);
}
if let Some(inches) = value.strip_suffix("in") {
return validate_number(inches, 59, 76);
}
false
}
fn validate_hcl(value: &str) -> bool {
// Ensure the first char is #
let mut chars = value.chars();
if !chars.next().map(|x| x == '#').unwrap_or_default() {
return false;
}
let mut lowercase = chars.map(|c| c.to_ascii_lowercase());
if !lowercase.all(|c| c.is_ascii_hexdigit()) {
return false;
}
value.len() == 7
}
fn validate_ecl(value: &str) -> bool {
match value {
"amb" | "blu" | "brn" | "gry" | "grn" | "hzl" | "oth" => true,
_ => false,
}
}
fn validate_pid(value: &str) -> bool {
value.len() == 9
}
fn validate(key: &str, value: &str) -> bool {
match key {
"byr" => validate_number(value, 1920, 2002),
"iyr" => validate_number(value, 2010, 2020),
"eyr" => validate_number(value, 2020, 2030),
"hgt" => validate_height(value),
"hcl" => validate_hcl(value),
"ecl" => validate_ecl(value),
"pid" => validate_pid(value),
"cid" => return true,
_ => unreachable!(),
}
}
fn increased_validation(passport: &str) -> bool {
let map = get_hashmap(passport);
if !is_valid(passport) {
return false;
}
map.iter().all(|(k, v)| validate(k, v))
}
fn main() {
let passports = parse_input();
let valid = passports.iter().filter(|x| is_valid(x)).count();
println!("Part 1 Solution: {}", valid);
let stricter = passports.iter().filter(|x| increased_validation(x)).count();
println!("Part 2 Solution: {}", stricter);
}
| true |
34a6f3cadb3b3f01055b9c23f2d2dc58ea5c32c9
|
Rust
|
songyzh/leetcode-rust
|
/src/solution/s0412_fizz_buzz.rs
|
UTF-8
| 1,436 | 3.453125 | 3 |
[
"Apache-2.0"
] |
permissive
|
/**
* [412] Fizz Buzz
*
* Write a program that outputs the string representation of numbers from 1 to n.
*
* But for multiples of three it should output “Fizz” instead of the number and for the multiples of five output “Buzz”. For numbers which are multiples of both three and five output “FizzBuzz”.
*
* Example:
*
* n = 15,
*
* Return:
* [
* "1",
* "2",
* "Fizz",
* "4",
* "Buzz",
* "Fizz",
* "7",
* "8",
* "Fizz",
* "Buzz",
* "11",
* "Fizz",
* "13",
* "14",
* "FizzBuzz"
* ]
*
*
*/
pub struct Solution {}
// problem: https://leetcode.com/problems/fizz-buzz/
// discuss: https://leetcode.com/problems/fizz-buzz/discuss/?currentPage=1&orderBy=most_votes&query=
// submission codes start here
impl Solution {
pub fn fizz_buzz(n: i32) -> Vec<String> {
let mut ret = vec![];
for i in 1..(n + 1) {
if i % 15 == 0 {
ret.push("FizzBuzz".to_string());
} else if i % 3 == 0 {
ret.push("Fizz".to_string());
} else if i % 5 == 0 {
ret.push("Buzz".to_string());
} else {
ret.push(i.to_string());
}
}
ret
}
}
// submission codes end
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_412() {}
}
| true |
d8f14790f0742b2de030f37b5a955b33841524c0
|
Rust
|
ianpreston/rust86
|
/src/modrm.rs
|
UTF-8
| 2,960 | 3.265625 | 3 |
[
"WTFPL"
] |
permissive
|
use cstate::{CpuState, Reg8, Reg16};
use operand::Operand;
pub fn read_modrm(cs: &mut CpuState, byte_registers: bool) -> (u8, Operand, Operand) {
// Read ModR/M byte
let byte = cs.read();
// Extract `mod'
let modbits = byte & 0b11000000;
let modbits = modbits / 64;
// Extract `reg'
let reg = byte & 0b00111000;
let reg = reg / 8;
// Extract `r/m'
let rm = byte & 0b00000111;
// Build effective and register operands
let effective = modrm_effective(cs, modbits, rm, byte_registers);
let register = modrm_register(reg, byte_registers);
(reg, effective, register)
}
fn modrm_register(reg: u8, byte: bool) -> Operand {
if byte {
return match reg {
0b000 => Operand::Reg8(Reg8::AL),
0b001 => Operand::Reg8(Reg8::CL),
0b010 => Operand::Reg8(Reg8::DL),
0b011 => Operand::Reg8(Reg8::BL),
_ => Operand::RawWord(65534), // FIXME
}
} else {
return match reg {
0b000 => Operand::Reg16(Reg16::AX),
0b001 => Operand::Reg16(Reg16::CX),
0b010 => Operand::Reg16(Reg16::DX),
0b011 => Operand::Reg16(Reg16::BX),
0b100 => Operand::Reg16(Reg16::SP),
0b101 => Operand::Reg16(Reg16::BP),
0b110 => Operand::Reg16(Reg16::SI),
0b111 => Operand::Reg16(Reg16::DI),
_ => panic!("Invalid ModR/M byte 1"),
}
}
}
fn modrm_effective(cs: &mut CpuState, modbits: u8, rm: u8, byte_registers: bool) -> Operand {
match modbits {
0b00 => match rm {
0b000 => Operand::MemoryAddress(
cs.getreg16(&Reg16::BX) + cs.getreg16(&Reg16::SI)
),
0b001 => Operand::MemoryAddress(
cs.getreg16(&Reg16::BX) + cs.getreg16(&Reg16::DI)
),
0b010 => Operand::MemoryAddress(
cs.getreg16(&Reg16::BP) + cs.getreg16(&Reg16::SI)
),
0b011 => Operand::MemoryAddress(
cs.getreg16(&Reg16::BP) + cs.getreg16(&Reg16::DI)
),
0b100 => Operand::MemoryAddress(
cs.getreg16(&Reg16::SI)
),
0b101 => Operand::MemoryAddress(
cs.getreg16(&Reg16::DI)
),
0b111 => Operand::MemoryAddress(
cs.getreg16(&Reg16::BX)
),
0b110 => Operand::MemoryAddress(
cs.read16()
),
_ => panic!("Invalid ModR/M byte"),
},
0b11 => modrm_register(rm, byte_registers),
0b10 => match rm {
0b111 => Operand::MemoryAddress(
cs.getreg16(&Reg16::BX) + cs.read16()
),
0b101 => Operand::MemoryAddress(
cs.getreg16(&Reg16::DI) + cs.read16()
),
_ => panic!("Not Implemented"),
},
_ => panic!("Not Implemented"),
}
}
| true |
ea2fb5e97d4206a081e7a32d07005845f5187f5e
|
Rust
|
mbougrin/Rust
|
/ft_time_server/src/main.rs
|
UTF-8
| 1,556 | 3.046875 | 3 |
[] |
no_license
|
use std::io::prelude::*;
use std::net::TcpListener;
use std::net::TcpStream;
use std::env;
use std::thread;
use std::time::Duration;
use std::time::{SystemTime, UNIX_EPOCH};
fn print_time() {
println!("{}", SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap().as_millis());
}
fn handle_connection(mut stream: TcpStream) {
loop {
let since_the_epoch = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap().as_millis();
let mut s = since_the_epoch.to_string();
s.push_str("\n");
stream.write(s.as_bytes()).unwrap();
stream.flush().unwrap();
let ms = Duration::from_millis(1000);
thread::sleep(ms);
}
}
fn help(name: &String) {
println!("{name} listening_ipaddr listening_port");
}
fn run_server(server_ip_port: &String) {
let listener = TcpListener::bind(server_ip_port).unwrap();
for stream in listener.incoming() {
let stream = stream.unwrap();
thread::spawn(|| {
handle_connection(stream)
});
}
}
fn main() -> std::io::Result<()> {
let args: Vec<String> = env::args().collect();
match args.len() {
3 => {
print_time();
let mut server_ip_port = String::with_capacity(128);
server_ip_port.push_str(&args[1]);
server_ip_port.push_str(":");
server_ip_port.push_str(&args[2]);
run_server(&server_ip_port);
},
_ => {
help(&args[0]);
}
}
Ok(())
}
| true |
5e827fee0a826beca036785aaf7e46fb9f1d7570
|
Rust
|
hairyhum/ockam
|
/implementations/rust/ockam/ockam_credential/src/lib.rs
|
UTF-8
| 4,162 | 3.03125 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Attribute based, privacy preserving, anonymous credentials.
//!
//! This crate provides the ability to issue and verify attribute based,
//! privacy preserving, anonymous credentials.
//!
//! The issuer of a credential signs a collection of statements that attest to
//! attributes of the subject of that credential. The subject (or a holder on
//! their behalf) can then selectively disclose these signed statements to a
//! verifier by presenting a cryptographic proof of knowledge of the issuer's
//! signature without revealing the actual signature or any of the other
//! statements that they didn't wish to disclose to this verifier.
//!
//! Applications can decide if a subject is authorized to take an action based
//! on the attributes of the subject that were proven to be signed by trusted
//! issuers. Since only limited and necessary information is revealed about
//! subjects this improves efficiency, security and privacy of applications.
//!
//! The main Ockam crate re-exports types defined in this crate.
#![no_std]
#![deny(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unsafe_code,
unused_import_braces,
unused_qualifications,
warnings
)]
#[cfg_attr(not(feature = "std"), no_std)]
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "std")]
extern crate std;
#[cfg(all(feature = "no_std", not(feature = "alloc")))]
mod structs {
pub use core::fmt::{self, Debug, Display};
use heapless::{consts::*, String, Vec};
pub type Buffer<T> = Vec<T, U32>;
pub type ByteString = String<U32>;
}
#[cfg(feature = "alloc")]
mod structs {
pub use alloc::fmt::{self, Debug, Display};
use alloc::{string::String, vec::Vec};
pub type Buffer<T> = Vec<T>;
pub type ByteString = String;
}
mod attribute;
mod attribute_type;
mod error;
mod schema;
mod serde;
pub use attribute::Attribute;
pub use attribute_type::AttributeType;
pub use schema::Schema;
#[cfg(test)]
mod tests {
use crate::{Attribute, AttributeType, Schema};
use std::string::String;
use std::vec;
fn create_test_schema() -> Schema {
let attribute = Attribute {
label: String::from("test_attr"),
description: String::from("test attribute"),
attribute_type: AttributeType::Utf8String,
};
let attributes = vec![attribute];
Schema {
id: String::from("test_id"),
label: String::from("test_label"),
description: String::from("test_desc"),
attributes,
}
}
#[test]
fn test_schema_creation() {
let _schema = create_test_schema();
}
#[test]
fn test_schema_serialization() {
let mut schema = create_test_schema();
if let Ok(serialized) = serde_json::to_string(&schema) {
assert!(serialized.contains("test_id"));
assert!(serialized.contains("test_label"));
assert!(serialized.contains("test_desc"));
assert!(serialized.contains("test_attr"));
assert!(serialized.contains("test attribute"));
if let Ok(mut rehydrated) = serde_json::from_str::<Schema>(&serialized) {
assert_eq!(rehydrated.id, schema.id);
assert_eq!(rehydrated.label, schema.label);
assert_eq!(rehydrated.description, schema.description);
assert_eq!(rehydrated.attributes.len(), schema.attributes.len());
if let Some(schema_attr) = schema.attributes.pop() {
if let Some(rehydrated_attr) = rehydrated.attributes.pop() {
assert_eq!(schema_attr.attribute_type, rehydrated_attr.attribute_type);
assert_eq!(schema_attr.label, rehydrated_attr.label);
assert_eq!(schema_attr.description, rehydrated_attr.description);
} else {
panic!("Missing rehydrated attribute")
}
} else {
panic!("Missing Schema attribute")
}
}
} else {
panic!("Couldn't serialize Schema")
}
}
}
| true |
914c70274b3c60ebdb72ffb07aa3ea6c0e08e1d0
|
Rust
|
alex-dukhno/rust-tdd-katas
|
/old-katas-iteration-02/string_compression_kata/src/day_1.rs
|
UTF-8
| 1,281 | 3.65625 | 4 |
[
"MIT"
] |
permissive
|
pub fn compress(src: &str) -> String {
if src.is_empty() {
src.to_owned()
} else {
let mut compressed = String::new();
let mut i = 1;
let len = src.len();
let mut chars = src.chars();
let mut c = chars.next().unwrap();
let mut count = 1;
while i < len {
count = 1;
while let Some(n) = chars.next() {
i += 1;
if c == n {
count += 1;
} else {
compressed.push_str(count.to_string().as_str());
compressed.push(c);
c = n;
break;
}
}
}
compressed.push_str(count.to_string().as_str());
compressed.push(c);
compressed
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn compress_empty_string() {
assert_eq!(compress(""), "");
}
#[test]
fn compress_single_char_string() {
assert_eq!(compress("a"), "1a");
}
#[test]
fn compress_string_of_unique_chars() {
assert_eq!(compress("abc"), "1a1b1c");
}
#[test]
fn compress_string_with_doubled_chars() {
assert_eq!(compress("aabbcc"), "2a2b2c");
}
}
| true |
870d864b360c1406213f673a3ef266d31eed140c
|
Rust
|
VBota1/Rust-Code-Snippets
|
/handleStringCLIarguments/lib.rs
|
UTF-8
| 617 | 2.875 | 3 |
[
"MIT"
] |
permissive
|
fn handle_arguments() -> Result<String,String> {
match args.get(1) {
Some(command) => {
match command.as_str() {
"expected" => {
Err ( "{}", log_stub (format!("Command \"expected\" is not implemented")) );
}
}
},
None => {
Err( "{}", log_stub(format!("No command received. {}", recommend_help_stub())) );
},
};
}
fn log_stub (message: String) -> String {
//TODO log message
message
}
fn recommend_help_stub() -> String {
format!("Call \"help\" for aditional information.")
}
| true |
fe4559aa15f6acbe5aa4256e0a76c71e7b126816
|
Rust
|
magiclen/vcard
|
/src/values/property_id_value.rs
|
UTF-8
| 1,634 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt::{Display, Write};
use validators::{Validated, ValidatedWrapper};
use super::*;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct PropertyIDValue {
d1: u8,
d2: Option<u8>,
}
#[derive(Clone, Debug, PartialEq)]
pub enum PropertyIDValueError {
OutOfRange,
}
impl PropertyIDValue {
pub fn from_u8(d1: u8, d2: Option<u8>) -> Result<PropertyIDValue, PropertyIDValueError> {
if d1 > 9 {
return Err(PropertyIDValueError::OutOfRange);
}
if let Some(d2) = d2 {
if d2 > 9 {
return Err(PropertyIDValueError::OutOfRange);
}
}
Ok(PropertyIDValue {
d1,
d2,
})
}
}
impl PropertyIDValue {
pub fn get_d1(&self) -> u8 {
self.d1
}
pub fn get_d2(&self) -> Option<u8> {
self.d2
}
}
impl Value for PropertyIDValue {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
f.write_fmt(format_args!("{}", self.d1))?;
if let Some(d2) = self.d2 {
f.write_char('.')?;
f.write_fmt(format_args!("{}", d2))?;
}
Ok(())
}
}
impl Display for PropertyIDValue {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
Value::fmt(self, f)
}
}
impl Validated for PropertyIDValue {}
impl ValidatedWrapper for PropertyIDValue {
type Error = &'static str;
fn from_string(_from_string_input: String) -> Result<Self, Self::Error> {
unimplemented!();
}
fn from_str(_from_str_input: &str) -> Result<Self, Self::Error> {
unimplemented!();
}
}
| true |
3c50d7581e7c16055b72e90c33e1531f0c5c175c
|
Rust
|
rusticata/pcap-analyzer
|
/libpcap-analyzer/src/plugins/mod.rs
|
UTF-8
| 3,076 | 2.515625 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
//! Plugin factory definition and default plugins implementation
use std::collections::HashMap;
use crate::{Plugin, PluginBuilder, PluginBuilderError, PluginRegistry};
use libpcap_tools::Config;
mod basic_stats;
#[cfg(feature = "plugin_community_id")]
mod community_id;
#[cfg(feature = "plugin_examples")]
mod examples;
mod flows;
#[cfg(feature = "plugins_debug")]
mod hexdump;
#[cfg(feature = "plugin_ospf")]
mod ospf;
#[cfg(feature = "plugin_rusticata")]
mod rusticata;
#[cfg(feature = "plugin_tls_stats")]
mod tls_stats;
/// Storage of plugin instances
pub struct Plugins {
pub storage: HashMap<String, Box<dyn Plugin>>,
}
/// Plugin Factory
///
/// A plugin factory stores all registered builders, and is used to
/// create all plugin instances on request.
pub struct PluginsFactory {
list: Vec<Box<dyn PluginBuilder>>,
}
impl PluginsFactory {
/// Create a new empty plugin factory
pub fn new() -> PluginsFactory {
PluginsFactory { list: Vec::new() }
}
/// Add a new plugin builder to the factory
pub fn add_builder(&mut self, b: Box<dyn PluginBuilder>) {
self.list.push(b);
}
/// Instantiate all plugins
pub fn build_plugins(&self, config: &Config) -> Result<PluginRegistry, PluginBuilderError> {
let mut registry = PluginRegistry::new();
for b in &self.list {
b.build(&mut registry, config)?;
};
Ok(registry)
}
/// Instantiate plugins if they match predicate
pub fn build_filter_plugins<P>(&self, predicate: P, config: &Config) -> Result<PluginRegistry, PluginBuilderError>
where
P: Fn(&str) -> bool,
{
let mut registry = PluginRegistry::new();
for b in &self.list {
if predicate(b.name()) {
b.build(&mut registry, config)?;
}
};
Ok(registry)
}
/// Iterate builder names
pub fn iter_builders<Op>(&self, op: Op)
where
Op: Fn(&str),
{
self.list.iter().for_each(|b| {
op(b.name())
});
}
}
impl Default for PluginsFactory {
/// Create a new plugin factory, with all default plugins
fn default() -> Self {
let mut v: Vec<Box<dyn PluginBuilder>> = vec![
Box::new(basic_stats::BasicStatsBuilder),
Box::new(flows::FlowsInfoBuilder),
];
#[cfg(feature = "plugin_community_id")]
v.push(Box::new(community_id::CommunityIDBuilder));
#[cfg(feature = "plugins_debug")]
v.push(Box::new(hexdump::HexDumpBuilder));
#[cfg(feature = "plugin_tls_stats")]
v.push(Box::new(tls_stats::TlsStatsBuilder));
#[cfg(feature = "plugin_rusticata")]
v.push(Box::new(rusticata::RusticataBuilder));
#[cfg(feature = "plugin_examples")]
{
v.push(Box::new(examples::EmptyBuilder));
v.push(Box::new(examples::EmptyWithConfigBuilder));
}
#[cfg(feature = "plugin_ospf")]
v.push(Box::new(ospf::OspfLogBuilder));
PluginsFactory { list: v }
}
}
| true |
93239acbe125cae9c5592e97497a4497438dcafb
|
Rust
|
maxhardt/rust-ci-github-actions-workflow
|
/src/lib.rs
|
UTF-8
| 200 | 3.546875 | 4 |
[
"MIT"
] |
permissive
|
/// Multiplies two integers
pub fn multiply(a: i32, b: i32) -> i32 {
a * b
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test() {
assert_eq!(multiply(2, 2), 4);
}
}
| true |
dabaad3484889025ce3300207b3714dbb7090570
|
Rust
|
kimsnj/loxers
|
/src/resolver.rs
|
UTF-8
| 4,893 | 2.671875 | 3 |
[] |
no_license
|
use crate::{
ast::{self, Expr, Stmt},
error::LoxError,
token::Token,
};
use std::collections::HashSet;
type AnalysisRes = Result<(), LoxError>;
type Bindings = Vec<(*const Token, usize)>;
type BindingsRes = Result<Bindings, LoxError>;
#[derive(Default)]
pub(crate) struct Resolver {
env: Environment,
bindings: Bindings,
}
impl Resolver {
pub fn resolve(&mut self, stmts: &[Stmt]) -> BindingsRes {
self.resolve_stmts(stmts)?;
let mut res = Vec::new();
std::mem::swap(&mut res, &mut self.bindings);
Ok(res)
}
fn resolve_stmts(&mut self, stmts: &[Stmt]) -> AnalysisRes {
stmts.iter().map(|s| self.resolve_stmt(s)).collect()
}
fn resolve_stmt(&mut self, stmt: &Stmt) -> AnalysisRes {
match stmt {
Stmt::Expression(e) | Stmt::Print(e) => self.resolve_expr(e)?,
Stmt::Var(v) => {
if let Some(ref init) = v.init {
self.resolve_expr(init)?;
}
self.env.declare(&v.name.lexeme);
}
Stmt::Block(stmts) => {
self.env.enter_scope();
self.resolve_stmts(stmts)?;
self.env.exit_scope();
}
Stmt::If(if_) => {
self.resolve_expr(&if_.condition)?;
self.resolve_stmt(&if_.then_branch)?;
if let Some(ref else_stmt) = &if_.else_branch {
self.resolve_stmt(else_stmt)?;
}
}
Stmt::While(while_) => {
self.resolve_expr(&while_.condition)?;
self.resolve_stmt(&while_.body)?;
}
Stmt::Function(f) => {
self.env.declare(&f.name);
self.env.enter_scope();
f.params.iter().for_each(|p| self.env.declare(&p.lexeme));
self.resolve_stmts(&f.body)?;
self.env.exit_scope();
}
Stmt::Return(r) => self.resolve_expr(&r.value)?,
Stmt::Class(c) => {
self.env.declare(&c.name.lexeme);
self.env.enter_scope();
self.env.declare("this");
for method in &c.methods {
self.resolve_fn(&method)?;
}
self.env.exit_scope();
}
}
Ok(())
}
fn resolve_fn(&mut self, f: &ast::Function) -> AnalysisRes {
self.env.declare(&f.name);
self.env.enter_scope();
f.params.iter().for_each(|p| self.env.declare(&p.lexeme));
self.resolve_stmts(&f.body)?;
self.env.exit_scope();
Ok(())
}
fn resolve_expr(&mut self, expr: &Expr) -> AnalysisRes {
match expr {
Expr::StringLit(_) | Expr::NumberLit(_) | Expr::BoolLit(_) | Expr::Nil => {}
Expr::Grouping(e) => self.resolve_expr(&e)?,
Expr::Unary(u) => self.resolve_expr(&u.as_ref().right)?,
Expr::Binary(b) | Expr::Logical(b) => {
self.resolve_expr(&b.left)?;
self.resolve_expr(&b.right)?;
}
Expr::Variable(v) => self.resolve_var(v)?,
Expr::Assign(a) => {
self.resolve_expr(&a.expr)?;
self.resolve_var(&a.name)?;
}
Expr::Call(c) => {
self.resolve_expr(&c.callee)?;
for arg in &c.args {
self.resolve_expr(arg)?;
}
}
Expr::Get(g) => self.resolve_expr(&g.object)?,
Expr::Set(s) => {
self.resolve_expr(&s.object)?;
self.resolve_expr(&s.value)?
}
Expr::This(t) => self.resolve_var(t)?,
}
Ok(())
}
fn resolve_var(&mut self, t: &Token) -> AnalysisRes {
let idx = self
.env
.resolve(&t.lexeme)
.ok_or_else(|| LoxError::new("undefined variable".into(), t))?;
self.bindings.push((t, idx));
Ok(())
}
}
struct Environment {
scopes: Vec<HashSet<String>>,
}
impl Default for Environment {
fn default() -> Self {
let mut globals = HashSet::new();
globals.insert("clock".into());
Self {
scopes: vec![globals],
}
}
}
impl Environment {
pub fn declare(&mut self, name: &str) {
self.scopes
.iter_mut()
.last()
.expect("no scope found")
.insert(name.into());
}
fn resolve(&self, name: &str) -> Option<usize> {
self.scopes
.iter()
.enumerate()
.rfind(|(_, s)| s.contains(name))
.map(|(i, _)| i)
}
pub fn enter_scope(&mut self) {
self.scopes.push(Default::default());
}
fn exit_scope(&mut self) {
self.scopes.pop();
}
}
| true |
6da62deb4769812342eb9bcfefb15e251d368381
|
Rust
|
flyq/datastruct-algorithm
|
/leetcode/p0682/src/main.rs
|
UTF-8
| 1,311 | 2.921875 | 3 |
[
"MIT"
] |
permissive
|
fn main() {
println!("Hello, world!");
}
pub struct Solution{}
use std::collections::VecDeque;
impl Solution {
pub fn cal_points(ops: Vec<String>) -> i32 {
let mut stack: VecDeque<i32> = VecDeque::new();
let mut res = 0;
for i in 0..ops.len() {
match ops[i].as_str() {
"+" => {
let temp = stack.pop_back().unwrap();
let now = stack.back().unwrap() + temp;
stack.push_back(temp);
stack.push_back(now);
res += now;
},
"D" => {
let now = stack.back().unwrap() * 2;
stack.push_back(now);
res += now;
}
"C" => {
res -= stack.pop_back().unwrap();
}
_ => {
let now = ops[i].parse().unwrap();
stack.push_back(now);
res += now;
}
}
}
res
}
}
/*
执行结果:
通过
显示详情
执行用时 :
0 ms
, 在所有 rust 提交中击败了
100.00%
的用户
内存消耗 :
2 MB
, 在所有 rust 提交中击败了
100.00%
的用户
*/
| true |
7455e335889cca5f510c242eb0d1191c9630ba3b
|
Rust
|
FilippoRanza/simpla
|
/src/semantic_analysis/error_message_generator.rs
|
UTF-8
| 10,594 | 3.015625 | 3 |
[] |
no_license
|
use super::semantic_error;
use extract_line_error::extract_error_code;
use simpla_parser::syntax_tree;
impl<'a> semantic_error::SemanticError<'a> {
pub fn format_error(&self, code: &str) -> String {
let msg = match self {
Self::NameRidefinition(err) => format!("name error: {}", err.format_error(code)),
Self::VoidVariableDeclaration(err) => {
format!("void declaration error: {}", err.format_error(code))
}
Self::MismatchedOperationTypes(err) => {
format!("mismatched operation error: {}", err.format_error(code))
}
Self::IncoherentOperation(err) => {
format!("incoherent operation error: {}", err.format_error(code))
}
Self::CastError(err) => format!("cast error: {}", err.format_error(code)),
Self::NonBooleanCondition(err) => {
format!("condition error: {}", err.format_error(code))
}
Self::MismatchedConditionalExpression(err) => {
format!("conditional expression error: {}", err.format_error(code))
}
Self::UnknownFunction(err) => format!("unknown function error: {}", err),
Self::UnknownVariable(err) => format!("unknonw variable error: {}", err),
Self::MismatchedUnary(err) => format!("negation error: {}", err.format_error(code)),
Self::ArgumentCountError(err) => {
format!("argument count error: {}", err.format_error(code))
}
Self::MismatchedArgumentType(err) => {
format!("argument type error: {}", err.format_error(code))
}
Self::MismatchedAssignment(err) => {
format!("assignment error: {}", err.format_error(code))
}
Self::BreakOutsideLoop => format!("break error: break outside loop"),
Self::ForLoopError(err) => format!("for loop error: {}", err.format_error(code)),
Self::ReturnError(err) => format!("return error: {}", err.format_error(code)),
Self::MissingReturn(err) => err.format_error(code),
};
format!("{}", msg)
}
}
fn format_wrong_code(code: &str, loc: &syntax_tree::Location) -> String {
extract_error_code(code, loc.begin, loc.end)
}
impl semantic_error::NameRidefinition {
fn format_error(&self, code: &str) -> String {
format!(
"{} defined multiple times, originally: {}, redefined: {}",
self.name,
self.original.format_error(code),
self.new.format_error(code)
)
}
}
impl semantic_error::Ridefinition {
fn format_error(&self, code: &str) -> String {
match self {
Self::Function(loc) => format!("function here:\n{}", format_wrong_code(code, loc)),
Self::Variable(loc) => format!("variable here:\n{}", format_wrong_code(code, loc)),
}
}
}
impl<'a> semantic_error::VoidVariableDeclaration<'a> {
fn format_error(&self, code: &str) -> String {
let tmp = self.names.id_list.join(", ");
format!(
"Variables: [{}] defined as type void: only function can have type void:\n{}",
tmp,
format_wrong_code(code, &self.names.loc)
)
}
}
impl<'a> semantic_error::MismatchedTypes<'a> {
fn format_error(&self, code: &str) -> String {
format!(
"left type: {} right type: {}:\n{}",
kind_to_string(&self.left),
kind_to_string(&self.right),
format_wrong_code(code, &self.loc)
)
}
}
impl<'a> semantic_error::IncoherentOperation<'a> {
fn format_error(&self, code: &str) -> String {
format!(
"cannot apply operator {} to type {}\n{}",
operator_to_string(&self.operator),
kind_to_string(&self.var_kind),
format_wrong_code(code, &self.loc)
)
}
}
impl<'a> semantic_error::CastError<'a> {
fn format_error(&self, code: &str) -> String {
let token = format_wrong_code(code, &self.loc);
match &self.error {
semantic_error::CastErrorType::ToInt(k) => {
format!("cannot cast {} into integer:\n{}", kind_to_string(k), token)
}
semantic_error::CastErrorType::ToReal(k) => {
format!("cannot cast {} into real:\n{}", kind_to_string(k), token)
}
}
}
}
impl<'a> semantic_error::NonBooleanCondition<'a> {
fn format_error(&self, code: &str) -> String {
fn fmt_err(
stat: &str,
kind: &syntax_tree::Kind,
code: &str,
loc: &syntax_tree::Location,
) -> String {
format!(
"{} statement requires a boolean expression as condition, found: {}\n{}",
stat,
kind_to_string(kind),
format_wrong_code(code, loc)
)
}
match &self.error {
semantic_error::NonBooleanConditionType::IfStat(k) => fmt_err("if", k, code, self.loc),
semantic_error::NonBooleanConditionType::WhileStat(k) => {
fmt_err("while", k, code, self.loc)
}
semantic_error::NonBooleanConditionType::CondStat(k) => {
fmt_err("conditional", k, code, self.loc)
}
}
}
}
impl<'a> semantic_error::MismatchedUnary<'a> {
fn format_error(&self, code: &str) -> String {
fn fmt_err(unary: &str, kind: &syntax_tree::Kind, err: String) -> String {
format!(
"{} cannot be applied to type: {}\n{}",
unary,
kind_to_string(kind),
err
)
}
let token = format_wrong_code(code, self.loc);
match &self.error {
semantic_error::MismatchedUnaryType::Logic(k) => fmt_err("logic negation", k, token),
semantic_error::MismatchedUnaryType::Numeric(k) => {
fmt_err("arithmetic negation", k, token)
}
}
}
}
impl<'a> semantic_error::MismatchedAssignment<'a> {
fn format_error(&self, code: &str) -> String {
format!(
"expected {}, found {} in variable {} assignment:\n{}",
kind_to_string(&self.correct),
kind_to_string(&self.given),
self.name,
format_wrong_code(code, self.loc)
)
}
}
impl<'a> semantic_error::ForLoopError<'a> {
fn format_error(&self, code: &str) -> String {
let descr = match &self.error {
semantic_error::ForLoopErrorType::CountVariableAssignment(name) => {
format!("count variable {} is modified into loop body", name)
}
semantic_error::ForLoopErrorType::NonIntegerCount(k) => format!(
"count variable is declared as {}, expected integer",
kind_to_string(k)
),
semantic_error::ForLoopErrorType::NonIntegerStart(k) => format!(
"for loop start expression of type {}, expected integer",
kind_to_string(k)
),
semantic_error::ForLoopErrorType::NonIntegerEnd(k) => format!(
"for loop end expression of type {}, expected integer",
kind_to_string(k)
),
};
let token = format_wrong_code(code, self.loc);
format!("{}\n{}", descr, token)
}
}
impl<'a> semantic_error::ReturnError<'a> {
fn format_error(&self, code: &str) -> String {
let token = format_wrong_code(code, self.loc);
match &self.error {
semantic_error::ReturnErrorType::ReturnOutsideFunction => format!(
"return statement is not allowd in main body, only in function declaration:\n{}",
token
),
semantic_error::ReturnErrorType::MismatchedReturnType(correct, given) => format!(
"return statement type: {}, but {} was expected:\n{}",
kind_to_string(&correct),
kind_to_string(&given),
token
),
}
}
}
impl<'a> semantic_error::ArgumentCountError<'a> {
fn format_error(&self, code: &str) -> String {
format!(
"function: {} expected {} args, but {} are used in function call:\nFunction '{1}' definition:\n{}\nFunction '{1}' call:\n{}",
self.func_decl.id,
self.func_decl.params.len(),
self.func_call.args.len(),
format_wrong_code(code, &self.func_decl.loc),
format_wrong_code(code, &self.func_decl.loc)
)
}
}
impl<'a> semantic_error::MismatchedArgumentType<'a> {
fn format_error(&self, code: &str) -> String {
format!(
"calling function {} argument expected type: {}, found {} in position {}:\nFunction declaration: {}\nFunction call: {}",
self.func.id,
kind_to_string(&self.correct),
kind_to_string(&self.given),
(self.index + 1),
format_wrong_code(code, &self.func.loc),
format_wrong_code(code, &self.loc)
)
}
}
impl<'a> semantic_error::MissingReturn<'a> {
fn format_error(&self, code: &str) -> String {
format!(
"Missing return statement in non void [actual type: {}] function:\nFound:\n{}\nIn Function:\n{}",
kind_to_string(&self.kind),
format_wrong_code(code, &self.stat_loc),
format_wrong_code(code, &self.func_loc)
)
}
}
fn kind_to_string(k: &syntax_tree::Kind) -> String {
let output = match k {
syntax_tree::Kind::Bool => "bool",
syntax_tree::Kind::Int => "integer",
syntax_tree::Kind::Real => "real",
syntax_tree::Kind::Str => "string",
syntax_tree::Kind::Void => "void",
};
output.to_owned()
}
fn operator_to_string(o: &syntax_tree::Operator) -> String {
let output = match o {
syntax_tree::Operator::Equal => "Equal",
syntax_tree::Operator::NotEqual => "NotEqual",
syntax_tree::Operator::Greater => "Greater",
syntax_tree::Operator::GreaterEqual => "GreaterEqual",
syntax_tree::Operator::Less => "Less",
syntax_tree::Operator::LessEqual => "LessEqual",
syntax_tree::Operator::Add => "Add",
syntax_tree::Operator::Sub => "Sub",
syntax_tree::Operator::Mul => "Mul",
syntax_tree::Operator::Div => "Div",
syntax_tree::Operator::And => "And",
syntax_tree::Operator::Or => "Or",
};
output.to_owned()
}
| true |
7d5df272973e3876bc2d702e77a8e3c7e047e735
|
Rust
|
dhparrick1/feroxide
|
/src/data_sef.rs
|
UTF-8
| 9,092 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
use ion::Ion;
use types::*;
use std::collections::HashMap;
// Reference: https://en.wikipedia.org/wiki/Standard_enthalpy_of_formation
// In doubt: Reference: Binas 6th edition, table 57
// Missing values from http://www.mrbigler.com/misc/energy-of-formation.html
// and http://www.conradnaleway.net/ThermoData.PDF
/// Get the Standard Enthalpy of Formation (SEF) of a ion
pub fn get_sef(ion: &Ion) -> Option<SEF> {
if let Some(&sef) = SEFMAP.get(&ion) {
Some(sef)
} else {
None
}
}
// This is mainly used for debugging purposes, to make sure no invalid reaction are added
macro_rules! str_to_ion {
($s:expr) => {
safe_unwrap_ion(Ion::from_string($s), $s)
};
}
macro_rules! add_str_ion {
($map:expr, $s:expr, $sef:expr) => {
$map.insert(str_to_ion!($s), SEF::from($sef))
};
}
/// Check if the ion is defined, then unwrap. Otherwise: panic!
fn safe_unwrap_ion(ion: Option<Ion>, s: &str) -> Ion {
if ion == None {
panic!("Ion failed to create: {}", s);
}
ion.unwrap()
}
lazy_static! {
pub static ref SEFMAP: HashMap<Ion, SEF> = {
let mut map = HashMap::new();
// NOTE: Conditions: T = 298K, p = p0
add_str_ion!(map, "AlCl3", -705.63);
add_str_ion!(map, "Al2O3", -1669.8);
// add_str_ion!(map, "Al(OH)3", -1277.0);
// add_str_ion!(map, "Al2(SO4)3", -3440.0);
add_str_ion!(map, "BaCl2", -858.6);
add_str_ion!(map, "BaCO3", -1213.0);
// add_str_ion!(map, "Ba(OH)2", -944.7);
add_str_ion!(map, "BaO", -548.1);
add_str_ion!(map, "BaSO4", -1473.2);
add_str_ion!(map, "Be", 0.0);
// add_str_ion!(map, "Be(OH)2", -902.9999);
add_str_ion!(map, "BeO", -609.4);
add_str_ion!(map, "BCl3", -402.96);
add_str_ion!(map, "Br;-", -121.0); // (aq)
add_str_ion!(map, "Br", 111.884); //(g)
add_str_ion!(map, "Br2", 30.91);
add_str_ion!(map, "BrF3", -255.6);
add_str_ion!(map, "HBr", -36.29);
add_str_ion!(map, "CdO", -258.0);
// add_str_ion!(map, "Cd(OH)2", -561.0);
add_str_ion!(map, "CdS", -162.0);
add_str_ion!(map, "CdSO4", -935.0);
add_str_ion!(map, "Ca", 178.2); // (g)
add_str_ion!(map, "Ca;2+", 1925.9); // (g)
add_str_ion!(map, "CaC2", -59.8);
add_str_ion!(map, "CaCO3", -1206.9);
add_str_ion!(map, "CaCl2", -795.8);
add_str_ion!(map, "CaCl2", -877.3);
// add_str_ion!(map, "Ca3(PO4)2", -4132.0);
add_str_ion!(map, "CaF2", -1219.6);
add_str_ion!(map, "CaH2", -186.2);
//add_str_ion!(map, "Ca(OH)2", -986.09);
//add_str_ion!(map, "Ca(OH)2", -1002.82);
add_str_ion!(map, "CaO", -635.09);
add_str_ion!(map, "CaSO4", -1434.52);
add_str_ion!(map, "CaS", -482.4);
add_str_ion!(map, "CaSiO3", -1630.0);
add_str_ion!(map, "Cs", 76.5); // (g)
add_str_ion!(map, "Cs", 2.09); // (l)
add_str_ion!(map, "Cs;+", 457.964); // (aq)
add_str_ion!(map, "CsCl", -443.04);
add_str_ion!(map, "C", 0.0); // graphite
add_str_ion!(map, "C", 1.9); // diamond
add_str_ion!(map, "C", 716.67); // (g)
add_str_ion!(map, "CO2", -393.509);
add_str_ion!(map, "CS2", 89.41);
add_str_ion!(map, "CS2", 116.7);
add_str_ion!(map, "CO", -110.525);
add_str_ion!(map, "COCl2", -218.8);
add_str_ion!(map, "CO2", -419.26); // (aq)
add_str_ion!(map, "HCO3;-", -689.93);
add_str_ion!(map, "CO3;2-", -675.23);
add_str_ion!(map, "Cl", 121.7);
add_str_ion!(map, "Cl;-", -167.2); // (aq)
add_str_ion!(map, "CuO", -155.2);
add_str_ion!(map, "CuSO4", -769.98);
// add_str_ion!(map, "H", 218.0); // (g)
add_str_ion!(map, "H2O", -241.818);
add_str_ion!(map, "H2O", -285.8);
add_str_ion!(map, "H;+", 0.0); // (aq)
add_str_ion!(map, "OH;-", -230.0); // (aq)
add_str_ion!(map, "H2O2", -187.8);
add_str_ion!(map, "H3PO4", -1288.0);
add_str_ion!(map, "HCN", 130.5);
add_str_ion!(map, "HBr", -36.3);
add_str_ion!(map, "HCl", -92.3);
add_str_ion!(map, "HCl", -167.2);
add_str_ion!(map, "HF", -273.3);
add_str_ion!(map, "HI", 26.5);
add_str_ion!(map, "I2", 62.438); // (g)
add_str_ion!(map, "I2", 23.0); // (aq)
add_str_ion!(map, "I;-", -55.0); // (aq)
add_str_ion!(map, "Fe;2+", -89.1); // From conradnaleway
add_str_ion!(map, "Fe;3+", -48.5); // From conradnaleway
add_str_ion!(map, "Fe3C", 5.4);
add_str_ion!(map, "FeCO3", -750.6);
add_str_ion!(map, "FeCl3", -399.4);
add_str_ion!(map, "FeO", -272.0);
add_str_ion!(map, "Fe3O4", -1118.4);
add_str_ion!(map, "Fe2O3", -824.2);
//add_str_ion!(map, "Fe(OH)3", -823.0); // From mrbigler
//add_str_ion!(map, "Fe(OH)2", -569.0); // From conradnaleway
// NOTE: Temporary until the parentheses are implemented
add_str_ion!(map, "FeO2H2;", -569.0); // From conradnaleway
add_str_ion!(map, "FeSO4", -929.0);
//add_str_ion!(map, "Fe2(SO4)3", -2583.0);
add_str_ion!(map, "FeS", -102.0);
add_str_ion!(map, "FeS2", -178.0);
add_str_ion!(map, "PbO2", -277.0);
add_str_ion!(map, "PbS", -100.0);
add_str_ion!(map, "PbSO4", -920.0);
//add_str_ion!(map, "Pb(NO3)2", -452.0);
add_str_ion!(map, "PbSO4", -920.0);
add_str_ion!(map, "Mg;2", -466.85); // (aq)
add_str_ion!(map, "MgCO3", -1095.797);
add_str_ion!(map, "MgCl2", -641.8);
//add_str_ion!(map, "Mg(OH)2", -924.54); // (s)
//add_str_ion!(map, "Mg(OH)2", -926.8); // (aq)
add_str_ion!(map, "MgO", -601.6);
add_str_ion!(map, "MgSO4", -1278.2);
add_str_ion!(map, "MnO", -384.9);
add_str_ion!(map, "MnO2", -519.7);
add_str_ion!(map, "Mn2O3", -971.0);
add_str_ion!(map, "Mn3O4", -1387.0);
add_str_ion!(map, "MnO4;-", -543.0);
add_str_ion!(map, "HgO", 90.83);
add_str_ion!(map, "HgS", -58.2);
add_str_ion!(map, "NH3", -80.8); // (aq)
// NOTE: The Wikipedia page has 2 different values, but I used the one from the Binas
// NOTE: [Atkins - Physical Chemistry] has -46.11, like the other Wikipedia value
// NOTE: According to https://webbook.nist.gov/cgi/cbook.cgi?ID=C7664417&Mask=1 it has to be -45.94
add_str_ion!(map, "NH3", -45.94); // (g)
add_str_ion!(map, "NH4Cl", -314.55);
add_str_ion!(map, "NH4NO3", -365.6);
add_str_ion!(map, "NO2", 33.2);
add_str_ion!(map, "N2O", 82.05);
add_str_ion!(map, "NO", 90.29);
add_str_ion!(map, "N2O4", 9.16);
add_str_ion!(map, "N2O5", -43.1);
add_str_ion!(map, "N2O5", 11.3);
add_str_ion!(map, "O", 249.0);
add_str_ion!(map, "O3", 143.0);
add_str_ion!(map, "P4", 0.0); // white
add_str_ion!(map, "P", -17.4); // red
add_str_ion!(map, "P", -39.3); // black
add_str_ion!(map, "PCl3", -319.7);
add_str_ion!(map, "PCl3", -278.0);
add_str_ion!(map, "PCl5", -440.0);
add_str_ion!(map, "KBr", -392.2);
add_str_ion!(map, "K2CO3", -1150.0);
add_str_ion!(map, "KClO3", -391.4);
add_str_ion!(map, "KCl", -436.68);
add_str_ion!(map, "KF", -562.6);
add_str_ion!(map, "K2O", -363.0);
add_str_ion!(map, "KClO4", -430.12);
add_str_ion!(map, "Si", 368.2); // (g)
add_str_ion!(map, "SiC", -73.22);
add_str_ion!(map, "SiCl4", -640.1);
add_str_ion!(map, "SiO2", -910.86);
add_str_ion!(map, "AgBr", -99.5);
add_str_ion!(map, "AgCl", -127.01);
add_str_ion!(map, "AgI", -62.4);
add_str_ion!(map, "Ag2O", -31.1);
add_str_ion!(map, "Ag2S", -31.8);
add_str_ion!(map, "Na", 107.5); // (g)
add_str_ion!(map, "NaHCO3", -950.8);
add_str_ion!(map, "Na2CO3", -1130.77);
add_str_ion!(map, "NaCl", -407.27);
add_str_ion!(map, "NaCl", -411.12);
add_str_ion!(map, "NaCl", -385.92);
add_str_ion!(map, "NaCl", -181.42);
add_str_ion!(map, "NaF", -569.0);
add_str_ion!(map, "NaOH", -469.15);
add_str_ion!(map, "NaOH", -425.93);
add_str_ion!(map, "NaNO3", -446.2);
add_str_ion!(map, "NaNO3", -424.8);
add_str_ion!(map, "Na2O", -414.2);
add_str_ion!(map, "H2S", -20.63);
add_str_ion!(map, "SO2", -296.84);
add_str_ion!(map, "SO3", -395.7);
add_str_ion!(map, "H2SO4", -814.0);
add_str_ion!(map, "Ti", 468.0); // (g)
add_str_ion!(map, "TiCl4", -763.2);
add_str_ion!(map, "TiCl4", -804.2);
add_str_ion!(map, "TiO2", -944.7);
add_str_ion!(map, "Zn", 130.7); // (g)
add_str_ion!(map, "ZnCl2", -415.1);
add_str_ion!(map, "ZnO", -348.0);
add_str_ion!(map, "ZnSO4", -980.14);
map
};
}
| true |
0e76940d0ce7e5d9611aadb31a022679cd11459d
|
Rust
|
liamnickell/rustyboi
|
/src/mmu.rs
|
UTF-8
| 5,906 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
use std::error::Error;
use std::result::Result;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
pub struct MMU {
// various MMU components: WRAM, VRAM, etc.
//0000 - 3FFF From cartridge, usually a fixed bank
//4000 - 7FFF From cartridge, switchable bank
//8000 - 9FFF Vram, Only bank 0 in Non-CGB mode Switchable bank 0/1 in CGB mode
//A000 - BFFF 8kb external ram
//C000 - CFFF 4KB Work RAM (WRAM) bank 0
//D000 - DFFF 4KB Work RAM (WRAM) bank 1~N Only bank 1 in Non-CGB mode Switchable bank 1~7 in CGB mode
//E000 - FDFF Mirror of C000~DDFF (ECHO RAM) Typically not used
//FE00 - FE9F Sprite attribute table (OAM
//FEA0 - FEFF Not Usable
//FF00 - FF7F I/O Registers
//FF80 - FFFE High RAM (HRAM)
//FFFF - FFFF Interrupts Enable Register (IE)
memory: [u8; 0x10000],
cart: [u8; 0x4000], //cartriage
}
impl MMU {
pub fn init(rom_file: &str, boot_file: &str) -> MMU {
let mut mmu = MMU {
memory: [0; 0x10000],
cart: [0; 0x4000],
};
mmu.open_rom(rom_file, boot_file);
mmu.cart_init();
mmu
}
// internal information in each cart from 0100-014F
pub fn cart_init(&mut self){
//Scrolling Nintendo graphic
self.cart[0x104] = 0xCE;
self.cart[0x105] = 0xED;
self.cart[0x106] = 0x66;
self.cart[0x107] = 0x66;
self.cart[0x108] = 0xCC;
self.cart[0x109] = 0x0D;
self.cart[0x10A] = 0x00;
self.cart[0x10B] = 0x0B;
self.cart[0x10C] = 0x03;
self.cart[0x10D] = 0x73;
self.cart[0x10E] = 0x00;
self.cart[0x10F] = 0x83;
self.cart[0x110] = 0x00;
self.cart[0x111] = 0x0C;
self.cart[0x112] = 0x00;
self.cart[0x113] = 0x0D;
self.cart[0x114] = 0x00;
self.cart[0x115] = 0x08;
self.cart[0x116] = 0x11;
self.cart[0x117] = 0x1F;
self.cart[0x118] = 0x88;
self.cart[0x119] = 0x89;
self.cart[0x11A] = 0x00;
self.cart[0x11B] = 0x0E;
self.cart[0x11C] = 0xDC;
self.cart[0x11D] = 0xCC;
self.cart[0x11E] = 0x6E;
self.cart[0x11F] = 0xE6;
self.cart[0x120] = 0xDD;
self.cart[0x121] = 0xDD;
self.cart[0x122] = 0xD9;
self.cart[0x123] = 0x99;
self.cart[0x124] = 0xBB;
self.cart[0x125] = 0xBB;
self.cart[0x126] = 0x67;
self.cart[0x127] = 0x63;
self.cart[0x128] = 0x6E;
self.cart[0x129] = 0x0E;
self.cart[0x12A] = 0xEC;
self.cart[0x12B] = 0xCC;
self.cart[0x12C] = 0xDD;
self.cart[0x12D] = 0xDC;
self.cart[0x12E] = 0x99;
self.cart[0x12F] = 0x9F;
self.cart[0x130] = 0xBB;
self.cart[0x131] = 0xB9;
self.cart[0x132] = 0x33;
self.cart[0x133] = 0x3E;
self.cart[0x143] = 0x80; //color GB
self.cart[0x146] = 0; // gb, not super gb
self.cart[0x147] = 0; // using ROM-only cartridge for now
self.cart[0x148] = 0; // using 32kb/two banks for now
self.cart[0x149] = 0; // not using RAM in cartridge
self.cart[0x14A] = 1; // we are not japanese lol
self.cart[0x14D] = 1; // document does not make this clear, not sure
}
pub fn read_byte(&mut self, addr: u16) -> u8 {
self.memory[addr as usize]
}
pub fn write_byte(&mut self, addr: u16, data: u8) {
self.memory[addr as usize] = data;
// echoing E000-FE00 with C000-DE00--see section 2.5.2
if (0xE000 <= addr) & (addr <= 0xFE00){
self.memory[addr as usize] = data;
let mirror = addr - 0x2000;
self.memory[mirror as usize] = data;
}
if (0xC000 <= addr) & (addr <= 0xE000){
self.memory[addr as usize] = data;
let mirror = addr + 0x2000;
self.memory[mirror as usize] = data;
}
}
pub fn read_word(&mut self, addr: u16) -> u16 {
((self.memory[(addr + 1) as usize] as u16) << 8) | (self.memory[addr as usize] as u16) // little endian
}
pub fn write_word(&mut self, addr: u16, data: u16) {
self.memory[addr as usize] = (data & 0x00ff) as u8; // little endian
self.memory[(addr + 1) as usize] = (data >> 8) as u8;
// echoing E000-FE00 with C000-DE00--see section 2.5.2
if (0xE000 <= addr) & (addr <= 0xFE00){
self.memory[addr as usize] = (data & 0x00ff) as u8;
self.memory[(addr + 1) as usize] = (data >> 8) as u8;
let mirror = addr - 0x2000;
self.memory[mirror as usize] = (data & 0x00ff) as u8;
self.memory[(mirror + 1) as usize] = (data >> 8) as u8;
}
if (0xC000 <= addr) & (addr <= 0xE000){
self.memory[addr as usize] = (data & 0x00ff) as u8;
self.memory[(addr + 1) as usize] = (data >> 8) as u8;
let mirror = addr + 0x2000;
self.memory[mirror as usize] = (data & 0x00ff) as u8;
self.memory[(mirror + 1) as usize] = (data >> 8) as u8;
}
}
pub fn open_rom(&mut self, name: &str, boot: &str){
//let romName = *name;
let path = Path::new(name);
let display = path.display();
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", display, why.to_string()),
Ok(file) => file,
};
let mut rom_data = Vec::new();
file.read_to_end(&mut rom_data);
//this is probably incorrect idk
//self.cart = romData;
//load romData into memory
for i in 0..rom_data.len(){
self.memory[i] = rom_data[i];
}
for i in 0..rom_data.len(){
//print!("{:X}, ", rom_data[i]);
}
}
fn test(&mut self, x: u32) -> u32{
x + 2
}
}
| true |
cb6399c8d73590ef7646bc436f623c65d9aaccea
|
Rust
|
maekawatoshiki/eb
|
/eb_parser/src/expr.rs
|
UTF-8
| 7,131 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
use super::{function, Context, Error};
use crate::{
ast::expr,
lexer::token::{DelimKind, PunctKind, TokenKind},
};
use anyhow::Result;
pub fn parse(ctx: &mut Context) -> Result<expr::Node> {
parse_binop_eq_ne(ctx)
}
fn parse_binop_eq_ne(ctx: &mut Context) -> Result<expr::Node> {
let mut lhs = parse_binop_add_sub(ctx)?;
loop {
let loc = ctx.cur_loc();
let eq = ctx.skip_punct(PunctKind::Eq);
let neq = ctx.skip_punct(PunctKind::Neq);
if !eq && !neq {
break;
}
let loc = loc?;
let rhs = parse_binop_add_sub(ctx)?;
lhs = expr::Node::new(
expr::Kind::BinOp(
if eq {
expr::BinOpKind::Eq
} else {
expr::BinOpKind::Neq
},
Box::new(lhs.clone()),
Box::new(rhs),
),
loc,
);
}
Ok(lhs)
}
fn parse_binop_add_sub(ctx: &mut Context) -> Result<expr::Node> {
let mut lhs = parse_binop_mul_div(ctx)?;
loop {
let loc = ctx.cur_loc();
let plus = ctx.skip_punct(PunctKind::Plus);
let minus = ctx.skip_punct(PunctKind::Minus);
if !plus && !minus {
break;
}
let loc = loc?;
let rhs = parse_binop_mul_div(ctx)?;
lhs = expr::Node::new(
expr::Kind::BinOp(
if plus {
expr::BinOpKind::Add
} else {
expr::BinOpKind::Sub
},
Box::new(lhs.clone()),
Box::new(rhs),
),
loc,
);
}
Ok(lhs)
}
fn parse_binop_mul_div(ctx: &mut Context) -> Result<expr::Node> {
let mut lhs = parse_postfix(ctx)?;
loop {
let loc = ctx.cur_loc();
let star = ctx.skip_punct(PunctKind::Star);
let slash = ctx.skip_punct(PunctKind::Slash);
if !star && !slash {
break;
}
let loc = loc?;
let rhs = parse_postfix(ctx)?;
lhs = expr::Node::new(
expr::Kind::BinOp(
if star {
expr::BinOpKind::Mul
} else {
expr::BinOpKind::Div
},
Box::new(lhs.clone()),
Box::new(rhs),
),
loc,
);
}
Ok(lhs)
}
fn parse_postfix(ctx: &mut Context) -> Result<expr::Node> {
let base = parse_primary(ctx)?;
let peek = match ctx.peek() {
Some(peek) => peek,
None => return Ok(base),
};
let loc = *peek.loc();
match peek.kind() {
// Call
TokenKind::OpenDelim(DelimKind::Paren) => {
assert!(ctx.next().is_some());
Ok(expr::Node::new(
expr::Kind::Call(Box::new(base), parse_call_args(ctx)?),
loc,
))
}
_ => Ok(base),
}
}
fn parse_call_args(ctx: &mut Context) -> Result<Vec<expr::Node>> {
if ctx.skip_close_delim(DelimKind::Paren) {
return Ok(vec![]);
}
let mut args = vec![];
loop {
let arg = parse(ctx)?;
args.push(arg);
if ctx.skip_punct(PunctKind::Comma) {
continue;
}
ctx.expect_close_delim(DelimKind::Paren)?;
break;
}
Ok(args)
}
fn parse_primary(ctx: &mut Context) -> Result<expr::Node> {
let peek = ctx.peek().ok_or(Error::EOF)?;
let loc = *peek.loc();
match peek.kind() {
TokenKind::Int(int) => {
let int = int.parse().unwrap();
ctx.next().unwrap();
Ok(expr::Node::new(expr::Kind::Int(int), loc))
}
TokenKind::Ident(ident) if ident == &"func" => Ok(expr::Node::new(
expr::Kind::Function(Box::new(function::parse(ctx)?)),
loc,
)),
TokenKind::Ident(ident) if ident == &"if" => Ok(expr::Node::new(parse_if(ctx)?, loc)),
TokenKind::Ident(ident) if ident == &"return" => {
Ok(expr::Node::new(parse_return(ctx)?, loc))
}
TokenKind::Ident(ident) => {
let ident = ident.to_string();
ctx.next().unwrap();
Ok(expr::Node::new(expr::Kind::Ident(ident), loc))
}
_ => Err(Error::ExpectedAny(loc, "integer value or identifier").into()),
}
}
fn parse_if(ctx: &mut Context) -> Result<expr::Kind> {
ctx.expect_keyword("if")?;
let cond = parse(ctx)?;
ctx.expect_punct(PunctKind::Colon)?;
let then_expr = parse_body(ctx)?;
let else_expr;
if ctx.skip_keyword("else") {
ctx.expect_punct(PunctKind::Colon)?;
else_expr = Some(Box::new(parse_body(ctx)?));
} else {
else_expr = None;
}
Ok(expr::Kind::If(
Box::new(cond),
Box::new(then_expr),
else_expr,
))
}
fn parse_return(ctx: &mut Context) -> Result<expr::Kind> {
ctx.expect_keyword("return")?;
Ok(expr::Kind::Return(Box::new(parse(ctx)?)))
}
pub fn parse_body(ctx: &mut Context) -> Result<expr::Node> {
let loc = ctx.cur_loc();
if ctx.skip_punct(PunctKind::DoubleSemicolon) {
return Ok(expr::Node::new(expr::Kind::Exprs(vec![]), loc?));
}
let mut body = vec![];
loop {
body.push(parse(ctx)?);
if ctx.skip_punct(PunctKind::Semicolon) {
continue;
}
if ctx.skip_punct(PunctKind::DoubleSemicolon) {
return Ok(expr::Node::new(expr::Kind::Exprs(body), loc?));
}
}
}
#[cfg(test)]
mod test {
extern crate insta;
use super::*;
use crate::lexer::{source::Source, tokenize};
fn parse_str(s: &str) -> expr::Node {
let source = Source::String(s.to_string());
let mut ctx = Context::new(tokenize(&source));
parse(&mut ctx).expect("fail to parse")
}
#[test]
fn parse1() {
insta::assert_debug_snapshot!(parse_str(r#"x"#));
}
#[test]
fn parse2() {
insta::assert_debug_snapshot!(parse_str(r#"x +x"#));
}
#[test]
fn parse3() {
insta::assert_debug_snapshot!(parse_str(r#"123 + x"#));
}
#[test]
fn parse4() {
insta::assert_debug_snapshot!(parse_str(r#"1 * 2 + 3"#));
}
#[test]
fn parse5() {
insta::assert_debug_snapshot!(parse_str(r#"f()"#));
}
#[test]
fn parse6() {
insta::assert_debug_snapshot!(parse_str(r#"f(1, x)"#));
}
#[test]
fn parse7() {
insta::assert_debug_snapshot!(parse_str(r#"x == x"#));
}
#[test]
fn parse8() {
insta::assert_debug_snapshot!(parse_str(r#"x != x"#));
}
#[test]
fn parse9() {
insta::assert_debug_snapshot!(parse_str(
r#"if x == 1:
123;;"#
));
}
#[test]
fn parse10() {
insta::assert_debug_snapshot!(parse_str(
r#"if x == 1:
x + 1 ;
x ;;
else:
42;;"#
));
}
#[test]
fn parse11() {
insta::assert_debug_snapshot!(parse_str(r#"return 123"#));
}
}
| true |
eb829b22ad76fca6cceba54d9da95e6559859a58
|
Rust
|
rylynchen/TRPL-Code
|
/TRPL_05_01_struct_instance/src/main.rs
|
UTF-8
| 655 | 3.3125 | 3 |
[] |
no_license
|
use std::string::String;
struct User {
email: String,
username: String,
active: bool,
sign_in_count: u64,
}
fn main() {
let mut user = build_user(String::from("[email protected]"), String::from("rylyn"));
println!("email: {}", user.email);
user.email = String::from("[email protected]");
println!("email: {}", user.email);
let user2 = User {
username: String::from("test2"),
..user
};
println!("u2 username: {} email: {}", user2.username, user2.email);
}
fn build_user(email: String, username: String) -> User {
User {
email,
username,
active: true,
sign_in_count: 1,
}
}
| true |
ebc4f37f2453a5426a6698f317ec360fae8515b0
|
Rust
|
blaenk/anymap
|
/src/with_clone.rs
|
UTF-8
| 779 | 3.359375 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
use std::fmt;
#[doc(hidden)]
pub trait CloneToAny {
/// Clone `self` into a new `Box<Any>` object.
fn clone_to_any(&self) -> Box<Any>;
}
impl<T: Any + Clone> CloneToAny for T {
fn clone_to_any(&self) -> Box<Any> {
Box::new(self.clone())
}
}
#[doc(hidden)]
/// Pretty much just `std::any::Any + Clone`.
pub trait Any: ::std::any::Any + CloneToAny { }
impl<T: ::std::any::Any + Clone> Any for T { }
impl Clone for Box<Any> {
fn clone(&self) -> Box<Any> {
(**self).clone_to_any()
}
}
impl<'a> fmt::Debug for &'a Any {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad("&Any")
}
}
impl<'a> fmt::Debug for Box<Any> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad("Box<Any>")
}
}
| true |
f57a5f332292935ed42ca56705c2b34949330f2a
|
Rust
|
ksakiyama/practice_atcoder
|
/arc071c/src/main.rs
|
UTF-8
| 1,414 | 3.109375 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::cmp;
fn main() {
const INF : i32 = 10000;
let ascii_lower = String::from("abcdefghijklmnopqrstuvwxyz");
let mut vec = vec![];
let n = read();
for _ in 0..n {
let mut map = HashMap::new();
let s = read_line_str();
for c in s.chars() {
if map.contains_key(&c) {
*map.get_mut(&c).unwrap() += 1;
} else {
map.insert(c, 1);
}
}
vec.push(map);
}
let mut table = HashMap::new();
for c in ascii_lower.chars() {
let mut cnt = INF;
for map in &vec {
if map.contains_key(&c) {
cnt = cmp::min(*map.get(&c).unwrap(), cnt);
} else {
cnt = 0;
}
}
if cnt >= 1 {
table.insert(c, cnt);
}
}
let mut ans = String::from("");
for c in ascii_lower.chars() {
if table.contains_key(&c) {
for _ in 0..*table.get(&c).unwrap() {
ans.push(c);
}
}
}
// println!("{:?}", table);
println!("{}", ans);
}
fn read() -> i32 {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().parse().ok().unwrap()
}
fn read_line_str() -> String {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
return s;
}
| true |
b446c892027776b3f66b0bf14384b7e00139e1f9
|
Rust
|
rksm/adventofcode
|
/2018/rust/src/main.rs
|
UTF-8
| 672 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
#![feature(toowned_clone_into)]
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_imports)]
mod day1;
mod day2;
mod day3;
mod day4;
mod day5;
mod day9;
mod util;
fn input(day: u32) -> String {
let f = format!("data/day{}.txt", day);
std::fs::read_to_string(f).expect("input not there?")
}
fn main() {
let args: Vec<String> = std::env::args().collect();
match args[1].as_ref() {
"1" => day1::run(input(1)),
"2" => day2::run(input(2)),
"3" => day3::run(input(3)),
"4" => day4::run(input(4)),
"5" => day5::run(input(5)),
"9" => day9::run(input(9)),
_ => println!("No input"),
}
}
| true |
d5f30b9ace515dc59224ca15c941dcdda8944d27
|
Rust
|
t-vlad/transaction_parser
|
/src/main.rs
|
UTF-8
| 13,246 | 3.1875 | 3 |
[] |
no_license
|
use std::{env, io};
use csv;
use serde::{Serialize, Deserialize};
use std::collections::HashMap;
fn help() {
println!("
This program parses one CSV file as input.
Eg: cargo run -- transactions.csv
OR
transactions_parser transactions.csv
The output is text in the CSV format as well
Hist, you can save that as a csv file:
cargo run -- input.csv > output.csv
For more info check the README file
");
}
fn main() {
let args: Vec<String> = env::args().collect();
let input_file:String;
// keep all the accounts here
let mut accounts: HashMap<u16, Account> = HashMap::new();
// keep relevant transactions here (deposit and withdarawals)
let mut transactions: HashMap<u32, Transaction> = HashMap::new();
// checks correct number of arguments is received
// TODO: check if valid CSV file, and fail gracefully
match args.len() {
1 => {
eprintln!("No arguments passed!");
help();
return;
},
2 => {
match args[1].parse::<String>() {
Ok(inp) => {
input_file = inp;
},
_ => {
eprintln!("Sorry, could not parse the input file!");
return;
}
}
},
_ => {
help();
return;
}
}
// create the CSV reader
let mut reader = match csv::Reader::from_path(input_file) {
Ok(r) => r,
Err(e) => {
eprintln!("Could not create CSV reader for input.
Error: {}", e );
return;
}
};
// go through input transactions
for result in reader.deserialize() {
match result {
Ok(r) => {
let transaction: Transaction = r;
// get or create a new client account
let mut client_account = match accounts.get(&transaction.client) {
Some(c) => c.to_owned(),
None => {
accounts.insert(transaction.client, Account::new(&transaction.client));
match accounts.get(&transaction.client) {
Some(nc) => nc.to_owned(),
None => {
panic!("Could not add a new account!")
}
}
}
};
let transaction_type = transaction.r#type.as_str();
// depending on operation, do the needfull :)
match transaction_type {
"deposit" => {
if client_account.deposit(transaction.get_amount()) {
transactions.insert(transaction.tx, transaction);
}
},
"withdrawal" => {
if client_account.withdraw(transaction.get_amount()) {
transactions.insert(transaction.tx, transaction);
}
},
"dispute" => {
let t = transactions.get(&transaction.tx);
match t {
Some(tr) => {
if !tr.disputed {
if client_account.dispute(tr) {
let mut newt = tr.clone();
newt.disputed = true;
transactions.insert(newt.tx, newt);
}
}
},
None => {
// no transaction can be found to be disputed
// TODO: maybe let the user know
}
}
},
"resolve" => {
let t = transactions.get(&transaction.tx);
match t {
Some(tr) => {
if tr.disputed {
if client_account.resolve(tr) {
let mut newt = tr.clone();
newt.disputed = false;
transactions.insert(newt.tx, newt);
}
}
},
None => {
// no transaction can be found to be resolved
// TODO: maybe let the user know
}
}
},
"chargeback" => {
let t = transactions.get(&transaction.tx);
match t {
Some(tr) => {
if tr.disputed {
if client_account.chargeback(tr) {
let mut newt = tr.clone();
newt.disputed = false;
transactions.insert(newt.tx, newt);
}
}
},
None => {
// no transaction can be found to be chargedback
// TODO: maybe let the user know
}
}
},
_ => {
eprintln!("Unsuported operation: {}, skipping", transaction_type)
}
}
// update the account in the hashmap
accounts.insert(client_account.client, client_account);
},
Err(e) => {
eprintln!("Could not get line!. Error: {}", e)
}
}
}
// create the CSV Writer
let mut writer = csv::Writer::from_writer(io::stdout());
// start serializing output
for (_client_id, acc) in &accounts {
if let Err(err) = writer.serialize(acc){
eprintln!("Cannot write line ! Err: {}", err);
}
}
// flush
// TODO: check if we should maybe just output directly every line (save memory, etc)
if let Err(err) = writer.flush() {
eprintln!("Could not flush the CSV data to output. Err: {}", err);
}
}
// The transaction data representation
#[derive(Debug, Clone, Default, Deserialize)]
struct Transaction {
r#type: String,
client: u16,
tx: u32,
amount: Option<f32>,
#[serde(skip_deserializing)]
disputed: bool
}
impl Transaction {
fn get_amount(&self) -> f32 {
// default the amount to zero
let amount = match self.amount {
Some(a) => a,
None => 0.0
};
amount
}
}
// Account representation
#[derive(Debug, Clone, Serialize)]
struct Account {
client: u16,
available: f32,
held: f32,
total: f32,
locked: bool
}
impl Account {
// Some sane defaults for a new account
fn new(client_id: &u16) -> Account {
Account{
client: client_id.to_owned(),
available: 0.0,
held: 0.0,
total: 0.0,
locked: false
}
}
fn round_amounts(&mut self) {
self.available = (self.available * 10000.0).round() / 10000.0;
self.held = (self.held * 10000.0).round() / 10000.0;
self.total = (self.total * 10000.0).round() / 10000.0;
}
fn deposit(&mut self, amount: f32) -> bool{
match self.locked {
false => {
self.available += amount;
self.total += amount;
self.round_amounts();
},
true => {
eprintln!("Account {} locked, cannot deposit {}", self.client, amount);
return false;
}
}
true
}
fn withdraw(&mut self, amount: f32) -> bool{
match self.locked {
false => {
if self.available - amount > 0.0 {
self.available -= amount;
self.total -= amount;
self.round_amounts();
} else {
eprintln!("Could not withdraw {} from {}. Not enough funds!", amount, self.client);
return false;
}
},
true => {
eprintln!("Account {} locked, cannot withdraw {}", self.client, amount);
return false;
}
}
true
}
fn dispute(&mut self, tx: &Transaction) -> bool{
match self.locked {
false => {
let amt = tx.get_amount();
self.held += amt;
self.available -= amt;
self.round_amounts();
},
true => {
eprintln!("Account {} locked, cannot dispute!", self.client);
return false;
}
}
true
}
fn resolve(&mut self, tx: &Transaction) -> bool{
match self.locked {
false => {
let amt = tx.get_amount();
self.held -= amt;
self.available += amt;
self.round_amounts();
},
true => {
eprintln!("Account {} locked, cannot resolve!", self.client);
return false;
}
}
true
}
fn chargeback(&mut self, tx: &Transaction) -> bool {
match self.locked {
false => {
let amt = tx.get_amount();
self.held -= amt;
self.total -= amt;
self.locked = true;
self.round_amounts();
},
true => {
eprintln!("Account {} locked, cannot chargeback!", self.client);
return false;
}
}
true
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_deposit() {
let mut acc = Account::new(&123);
acc.deposit(3.1415);
assert_eq!(acc.available, 3.1415);
assert_eq!(acc.total, 3.1415);
}
#[test]
fn test_withdraw() {
let mut acc = Account::new(&123);
acc.deposit(3.1415);
acc.withdraw(2.1414);
assert_eq!(acc.available, 1.0001);
assert_eq!(acc.total, 1.0001);
// test withdraw more than available
assert_eq!(acc.withdraw(12.1414), false);
}
#[test]
fn test_dispute() {
let mut acc = Account::new(&123);
acc.deposit(10.1234);
let tr = Transaction{
r#type: String::from("withdraw"),
client: 123,
tx: 5001,
amount: Some(5.1002),
disputed: false
};
acc.withdraw(5.1002);
acc.dispute(&tr);
assert_eq!(acc.available, -0.077);
assert_eq!(acc.held, 5.1002);
assert_eq!(acc.total, 5.0232);
}
#[test]
fn test_resolve() {
let mut acc = Account::new(&123);
acc.deposit(10.1234);
let tr = Transaction{
r#type: String::from("withdraw"),
client: 123,
tx: 5001,
amount: Some(5.1002),
disputed: false
};
acc.withdraw(5.1002);
acc.dispute(&tr);
acc.resolve(&tr);
assert_eq!(acc.available, 5.0232);
assert_eq!(acc.held, 0.0);
assert_eq!(acc.total, 5.0232);
}
#[test]
fn test_chargeback() {
let mut acc = Account::new(&123);
acc.deposit(10.1234);
let tr = Transaction{
r#type: String::from("withdraw"),
client: 123,
tx: 5001,
amount: Some(5.1002),
disputed: false
};
acc.withdraw(5.1002);
acc.dispute(&tr);
acc.chargeback(&tr);
assert_eq!(acc.available, -0.077);
assert_eq!(acc.held, 0.0);
assert_eq!(acc.total, -0.077);
assert_eq!(acc.locked, true);
}
#[test]
fn test_locked_operations() {
let mut acc = Account::new(&123);
acc.deposit(10.1234);
let tr = Transaction{
r#type: String::from("withdraw"),
client: 123,
tx: 5001,
amount: Some(5.1002),
disputed: false
};
acc.withdraw(5.1002);
acc.dispute(&tr);
acc.chargeback(&tr);
assert_eq!(acc.deposit(0.1), false);
assert_eq!(acc.withdraw(0.1), false);
assert_eq!(acc.dispute(&tr), false);
assert_eq!(acc.resolve(&tr), false);
assert_eq!(acc.chargeback(&tr), false);
}
}
| true |
c1b1b73f65ec16c27eea43e61b131a8d7f90a6bb
|
Rust
|
1u0/puzzles
|
/advent-of-code-2020/day-13/src/main.rs
|
UTF-8
| 2,348 | 3.140625 | 3 |
[] |
no_license
|
use gcd::Gcd;
use std::io::{self, BufRead};
fn load_puzzle() -> (i32, Vec<Option<i32>>) {
let mut timestamp = 0;
let mut schedule = Vec::new();
for (i, line) in io::stdin().lock().lines().enumerate() {
let line = line.unwrap();
match i {
0 => timestamp = line.parse().unwrap(),
1 => {
schedule = line
.split(',')
.map(|str| match str {
"x" => None,
_ => Some(str.parse().unwrap()),
})
.collect()
}
_ => break,
}
}
(timestamp, schedule)
}
fn solve1(timestamp: i32, schedule: &[Option<i32>]) {
let result = schedule
.iter()
.filter_map(|period| period.as_ref())
.map(|period| ((timestamp + period - 1) / period * period, period))
.min_by_key(|entry| entry.0)
.map(|entry| (entry.0 - timestamp) * entry.1);
println!("Result: {:?}", result);
}
fn solve2(schedule: &[Option<i32>]) {
// Recursive solution:
// Invariant:
// result[i] := (a[i], p[i]), where:
// a[i] is minimum value: (a[i] + sj) % pj == 0
// p[i] is minimum value: p[i] % pj == 0
// for j in 0..i
// Recursion step:
// result[i+1] := (a[i+1], p[i+1]), where:
// a[i+1] := (a[i] + x * p[i]) % pi == 0, min by x
// p[i+1] := p[i] * pi / gcd(p[i], pi)
fn solve(result: (u64, u64), t: (u64, u64)) -> (u64, u64) {
let s = (t.1 - t.0 % t.1) % t.1;
if result.1 == 0 {
(s, t.1)
} else {
// solve(&result, &t)
let d = (result.1 as u64).gcd(t.1 as u64);
let p = result.1 / d * t.1;
let mut a = result.0;
for _i in 0..t.1 {
if a % t.1 == s {
break;
}
a += result.1;
}
(a, p)
}
}
let result = schedule
.iter()
.enumerate()
.filter_map(|(i, period)| period.map(|p| (i as u64, p as u64)))
.fold((0u64, 0u64), solve); // TODO: use fold_first (aka reduce) after the api is stabilized.
println!("Result: {}", result.0);
}
fn main() {
let puzzle = load_puzzle();
solve1(puzzle.0, &puzzle.1);
solve2(&puzzle.1);
}
| true |
e0c89eb885a323e1e40924e1d9c22d74abfae726
|
Rust
|
sabinchitrakar/wilders-rs
|
/src/lib.rs
|
UTF-8
| 2,253 | 3.0625 | 3 |
[] |
no_license
|
#![feature(external_doc)]
use ta_common::traits::Indicator;
#[doc(include = "../README.md")]
pub struct Wilders {
period: u32,
prev_avg: f64,
sum: f64,
index: u32,
}
impl Wilders {
pub fn new(period: u32) -> Wilders {
Self {
period,
prev_avg: 0.0,
sum: 0.0,
index: 0,
}
}
}
impl Indicator<f64, Option<f64>> for Wilders {
fn next(&mut self, input: f64) -> Option<f64> {
self.index = self.index + 1;
println!("index {} sum {}", self.index, self.sum);
if self.index < self.period {
self.sum = self.sum + input;
return None;
}
if self.index == (self.period) {
self.sum = self.sum + input;
let res = self.sum / self.period as f64;
println!("index {} sum {}", self.index, self.sum);
self.prev_avg = res;
return Some(res);
}
let period = self.period as f64;
let res = (self.prev_avg * (period - 1.0) + input) / period;
self.prev_avg = res;
Some(res)
}
fn reset(&mut self) {
self.sum = 0.0;
self.index = 0;
self.prev_avg = 0.0;
}
}
#[cfg(test)]
mod tests {
use crate::Wilders;
use ta_common::traits::Indicator;
#[test]
fn it_works() {
let mut wilders = Wilders::new(5);
assert_eq!(wilders.next(81.59), None);
assert_eq!(wilders.next(81.06), None);
assert_eq!(wilders.next(82.87), None);
assert_eq!(wilders.next(83.00), None);
assert_eq!(wilders.next(83.61), Some(82.426));
assert_eq!(wilders.next(83.15), Some(82.5708));
assert_eq!(wilders.next(82.84), Some(82.62464));
assert_eq!(wilders.next(83.99), Some(82.897712));
assert_eq!(wilders.next(84.55), Some(83.2281696));
assert_eq!(wilders.next(84.36), Some(83.45453568));
assert_eq!(wilders.next(85.53), Some(83.86962854400001));
assert_eq!(wilders.next(86.54), Some(84.40370283520001));
assert_eq!(wilders.next(86.89), Some(84.90096226816));
assert_eq!(wilders.next(87.77), Some(85.474769814528));
assert_eq!(wilders.next(87.29), Some(85.83781585162241));
}
}
| true |
b4f64284842d2f666948a45e53a03ecbad7857a9
|
Rust
|
mcountryman/chip8
|
/src/ui/debug.rs
|
UTF-8
| 3,853 | 2.515625 | 3 |
[] |
no_license
|
//! Debug widgets.
use crate::{insn::Insn, vm::Vm};
use tui::{
backend::Backend,
layout::{Alignment, Rect},
style::{Color, Style},
text::{Span, Spans},
widgets::{Block, Borders, Paragraph},
Frame,
};
pub fn keys<B: Backend>(vm: &Vm, area: Rect, ui: &mut Frame<B>) {
let mut spans = Vec::new();
let mut keys = vm.keys.to_vec();
for _ in 0..4 {
let mut line = Vec::new();
for _ in 0..area.width / 3 {
if keys.is_empty() {
break;
}
match keys.pop() {
None => break,
Some(key) => line.push(Span::from(format!("{key:#x} "))),
};
}
spans.push(Spans::from(line));
}
ui.render_widget(
Paragraph::new(spans).block(
Block::default()
.title(" keys ")
.title_alignment(Alignment::Center)
.borders(Borders::ALL),
),
area,
);
}
pub fn registers<B: Backend>(vm: &Vm, area: Rect, ui: &mut Frame<B>) {
let mut spans = Vec::new();
for i in 0..4 {
let mut line = Vec::new();
for j in i * 4..i * 4 + 4 {
let reg = format!("V{j:x}: ");
let val = format!("{:#04x} ", vm.reg8[j]);
line.push(Span::styled(reg, Style::default().fg(Color::Gray)));
line.push(Span::from(val));
}
let (reg, val) = match i {
0 => ("PC: ", format!("{:#04x}", vm.reg_pc)),
1 => (" I: ", format!("{:#04x}", vm.reg_i)),
2 => ("DT: ", format!("{:#04x}", vm.reg_dt)),
3 => ("ST: ", format!("{:#04x}", vm.reg_st)),
_ => unreachable!(),
};
line.push(Span::styled(reg, Style::default().fg(Color::Gray)));
line.push(Span::from(val));
spans.push(Spans::from(line));
}
ui.render_widget(
Paragraph::new(spans).block(
Block::default()
.title(" registers ")
.title_alignment(Alignment::Center)
.borders(Borders::ALL),
),
area,
);
}
pub fn stack<B: Backend>(vm: &Vm, area: Rect, ui: &mut Frame<B>) {
let mut spans = Vec::new();
for i in 0..16 {
let mut line = Vec::new();
if vm.reg_sp == i as u8 {
line.push(Span::styled("> ", Style::default().fg(Color::Blue)));
} else {
line.push(Span::from(" "));
}
let add = format!("{i:#04x}");
let val = format!(": {:#06x} ", vm.stack[i]);
line.push(Span::styled(add, Style::default().fg(Color::Gray)));
line.push(Span::from(val));
spans.push(Spans::from(line));
}
ui.render_widget(
Paragraph::new(spans).block(
Block::default()
.title(" stack ")
.title_alignment(Alignment::Center)
.borders(Borders::ALL),
),
area,
);
}
pub fn disassembly<B: Backend>(vm: &Vm, area: Rect, ui: &mut Frame<B>) {
let mut spans = Vec::new();
let beg = (vm.reg_pc as usize).saturating_sub(area.height as usize);
let beg = if beg % 2 == 0 { beg } else { beg + 1 };
let end = (vm.reg_pc as usize).saturating_add(area.height as usize);
let end = if end % 2 == 0 { end } else { end + 1 };
let insns = (beg..end).step_by(2).filter_map(|i| {
let (hi, lo) = vm.get_insn_bytes_at(i)?;
let insn = Insn::from_bytes(hi, lo)?;
Some((i, (hi as u16) << 8 | lo as u16, insn))
});
for (offs, byte, insn) in insns {
let mut line = Vec::new();
let offs_color = if offs == vm.reg_pc as usize {
Color::Blue
} else {
Color::Gray
};
let offs = format!("{offs:#06x} ");
let byte = format!("{byte:04x} ");
let insn = format!("{insn}");
line.push(Span::styled(offs, Style::default().fg(offs_color)));
line.push(Span::styled(byte, Style::default().fg(Color::Yellow)));
line.push(Span::from(insn));
spans.push(Spans::from(line));
}
ui.render_widget(
Paragraph::new(spans).block(
Block::default()
.title(" disassembly ")
.title_alignment(Alignment::Center)
.borders(Borders::ALL),
),
area,
);
}
| true |
6b587921e827c0492ff84b5683e444009c816078
|
Rust
|
Pana/rs-data-structures
|
/stack/src/lib.rs
|
UTF-8
| 181 | 3.421875 | 3 |
[] |
no_license
|
pub trait Stack<T> {
fn push(&mut self, val: T) -> bool;
fn pop(&mut self) -> Option<T>;
fn is_empty(&self) -> bool;
fn len(&self) -> usize;
fn print(&self);
}
| true |
397bf878424f43372380b6fabd220728a0b8e334
|
Rust
|
miquels/webdav-server-rs
|
/pam/src/pamserver.rs
|
UTF-8
| 5,957 | 2.71875 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Server part - the code here is fork()ed off and lives in its own
// process. We communicate with it through a unix stream socket.
//
// This is all old-fashioned blocking and thread-based code.
//
use std::io::{self, Read, Write};
use std::os::unix::io::AsRawFd;
use std::os::unix::net::UnixStream as StdUnixStream;
use std::sync::{Arc, Mutex};
use bincode::{deserialize, serialize};
use libc;
use crate::pam::{pam_auth, pam_lower_rlimits, PamError};
use crate::pamclient::PamRequest;
// Response back from the server process.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) struct PamResponse {
pub id: u64,
pub result: Result<(), PamError>,
}
// server side.
pub(crate) struct PamServer {
rx_socket: StdUnixStream,
tx_socket: Arc<Mutex<StdUnixStream>>,
}
impl PamServer {
// fork and start the server, return the stream socket for communication.
pub(crate) fn start(num_threads: Option<usize>) -> Result<StdUnixStream, io::Error> {
// Create a unix socketpair for communication.
let (sock1, sock2) = StdUnixStream::pair()?;
let sock3 = sock2.try_clone()?;
let handle = std::thread::spawn(move || {
// fork server.
let pid = unsafe { libc::fork() };
if pid < 0 {
return Err(io::Error::last_os_error());
}
if pid == 0 {
// first, close all filedescriptors (well, all..)
for fdno in 3..8192 {
if fdno != sock2.as_raw_fd() && fdno != sock3.as_raw_fd() {
unsafe {
libc::close(fdno);
}
}
}
let mut server = PamServer {
rx_socket: sock2,
tx_socket: Arc::new(Mutex::new(sock3)),
};
pam_lower_rlimits();
trace!("PamServer: child: starting server");
server.serve(num_threads.unwrap_or(8));
drop(server);
std::process::exit(0);
}
Ok(())
});
handle.join().unwrap()?;
trace!("PamServer: parent: started server");
Ok(sock1)
}
// serve requests.
fn serve(&mut self, num_threads: usize) {
// create a threadpool, then serve connections via the threadpool.
let pool = threadpool::ThreadPool::new(num_threads);
// process incoming connections.
loop {
// read length.
let mut buf = [0u8; 2];
let res = self.rx_socket.read_exact(&mut buf);
if let Err(e) = res {
if e.kind() == std::io::ErrorKind::UnexpectedEof {
// parent probably exited - not an error.
trace!("PamServer::serve: EOF reached on input");
break;
}
panic!("PamServer::serve: read socket: {}", e);
}
let sz = ((buf[0] as usize) << 8) + (buf[1] as usize);
if sz == 0 {
// size 0 packet indicates client wants to shut us down.
trace!("PamServer::serve: EOF packet on input");
break;
}
// read request data.
let mut data = Vec::with_capacity(sz);
data.resize(sz, 0u8);
let res = self.rx_socket.read_exact(&mut data);
if let Err(e) = res {
panic!("PamServer::serve: read socket: {}", e);
}
let req: PamRequest = match deserialize(&data[..]) {
Ok(req) => req,
Err(_) => panic!("PamServer::serve: error deserializing request"),
};
trace!(
"PamServer::serve: read request {:?} active threads: {} queued {}",
req,
pool.active_count(),
pool.queued_count()
);
// run request on pool.
let sock = self.tx_socket.clone();
pool.execute(move || {
if let Err(e) = pam_process(req, sock) {
panic!("PamServer::pam_process: error: {}", e);
}
});
let mut i = 0;
while pool.queued_count() > 2 * pool.max_count() {
if i == 399 {
debug!(
"PamServer::serve: pool busy! active {}, max {}, queued: {}",
pool.active_count(),
pool.max_count(),
pool.queued_count()
);
}
i += 1;
i = i % 400;
std::thread::sleep(std::time::Duration::from_millis(5));
}
}
pool.join();
trace!("PamServer::serve: exit.");
std::process::exit(0);
}
}
// Process one request. This is run on the threadpool.
fn pam_process(req: PamRequest, sock: Arc<Mutex<StdUnixStream>>) -> Result<(), io::Error> {
trace!("PamServer::pam_process: starting with request {:?}", req);
// authenticate.
let remip = req.remip.as_ref().map(|s| s.as_str()).unwrap_or("");
let res = PamResponse {
id: req.id,
result: pam_auth(&req.service, &req.user, &req.pass, remip),
};
// and send back result.
trace!("PamServer::pam_process: returning response {:?}", res);
let mut response: Vec<u8> = serialize(&res)
.map_err(|e| io::Error::new(io::ErrorKind::Other, format!("error serializing response: {}", e)))?;
let l1 = ((response.len() >> 8) & 0xff) as u8;
let l2 = (response.len() & 0xff) as u8;
response.insert(0, l1);
response.insert(1, l2);
match sock.lock().unwrap().write_all(&response) {
Err(e) => {
debug!("PamServer::pam_process: writing to response socket: {}", e);
Err(e)
},
Ok(..) => Ok(()),
}
}
| true |
9d58a448b0365d0164d50dc1d27c63c8a13b216b
|
Rust
|
traviscole/heliocron
|
/src/parsers.rs
|
UTF-8
| 6,373 | 3.21875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::result;
use chrono::{DateTime, Duration, FixedOffset, Local, NaiveTime, TimeZone};
use super::{
enums::Event,
errors::{ConfigErrorKind, HeliocronError},
};
type Result<T> = result::Result<T, HeliocronError>;
pub fn parse_date(
date: &str,
date_fmt: &str,
time_zone: Option<&str>,
) -> Result<DateTime<FixedOffset>> {
// default date format
let time_fmt = "%H:%M:%S";
let datetime_fmt = format!("{}T{}", date_fmt, time_fmt);
// customisable date
// e.g. 2020-02-24
let time = "12:00:00";
let datetime = format!("{}T{}", date, time);
// default time zone is the local time zone at the given date
let time_zone = match time_zone {
Some(tz) => tz.to_string(),
None => Local
.datetime_from_str(&datetime, &datetime_fmt)?
.offset()
.to_string(),
};
let datetimetz = format!("{}{}", datetime, time_zone);
let datetimetz_fmt = format!("{}%:z", datetime_fmt);
let datetime = DateTime::parse_from_str(&datetimetz, &datetimetz_fmt)?;
Ok(datetime)
}
pub fn parse_event(event: &str, custom_altitude: Option<f64>) -> Result<Event> {
Ok(Event::new(event, custom_altitude)?)
}
pub fn parse_offset(offset: &str) -> Result<Duration> {
// offset should either be %H:%M:%S or %H:%M +/- a "-" if negative
let (positive, offset): (bool, &str) = match offset.chars().next() {
Some('-') => (false, &offset[1..]),
_ => (true, offset),
};
let offset = match offset {
offset if NaiveTime::parse_from_str(offset, "%H:%M:%S").is_ok() => {
Ok(NaiveTime::parse_from_str(offset, "%H:%M:%S")?)
}
offset if NaiveTime::parse_from_str(offset, "%H:%M").is_ok() => {
Ok(NaiveTime::parse_from_str(offset, "%H:%M")?)
}
_ => Err(HeliocronError::Config(ConfigErrorKind::ParseOffset)),
}?;
let offset = offset.signed_duration_since(NaiveTime::from_hms(0, 0, 0));
if positive {
Ok(offset)
} else {
Ok(-offset)
}
}
pub fn parse_altitude(altitude: String) -> Result<f64> {
let altitude: f64 = match altitude.parse() {
Ok(altitude) => Ok(altitude),
Err(_) => Err(HeliocronError::Config(ConfigErrorKind::ParseAltitude)),
}?;
if (altitude >= -90.0) & (altitude <= 90.0) {
Ok(altitude)
} else {
Err(HeliocronError::Config(ConfigErrorKind::ParseAltitude))
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::Timelike;
#[test]
fn test_parse_altitude() {
let valid_altitudes = &["90.0", "8", "0", "-1.2", "-90.0"];
for a in valid_altitudes.iter() {
assert!(parse_altitude((*a).to_owned()).is_ok())
}
let invalid_altitudes = &["-90.1", "90.1", "not_an_altitude"];
for a in invalid_altitudes.iter() {
assert!(parse_altitude((*a).to_owned()).is_err())
}
}
#[test]
fn test_parse_offset() {
let valid_offsets = &[
("12:00:00", Duration::hours(12)),
("12:00", Duration::hours(12)),
("-12:00:00", -Duration::hours(12)),
("23:59:59", Duration::seconds(86399)),
("23:59", Duration::seconds(86340)),
("00:59", Duration::minutes(59)),
("00:00", Duration::minutes(0)),
("0:00", Duration::minutes(0)),
("0:0", Duration::minutes(0)),
];
for (input, expected) in valid_offsets.iter() {
let offset = parse_offset(*input).unwrap();
assert_eq!(*expected, offset);
}
let invalid_offsets = &["24:00:00"];
for input in invalid_offsets.iter() {
let offset = parse_offset(*input);
assert!(offset.is_err());
}
}
#[test]
fn test_parse_date() {
let expected = DateTime::parse_from_rfc3339("2020-03-25T12:00:00+00:00").unwrap();
// standard usage, just passing in a date
let result = parse_date("2020-03-25", "%Y-%m-%d", None).unwrap();
assert_eq!(expected, result);
// but if you want to use a snazzy format, that is ok, too
let result = parse_date("25 March 2020", "%d %B %Y", None).unwrap();
assert_eq!(expected, result);
// and so is providing a custom timezone
let expected = expected
.with_timezone(&FixedOffset::east(3600))
.with_hour(12)
.unwrap();
let result = parse_date("25 Mar 2020", "%d %b %Y", Some("+01:00")).unwrap();
assert_eq!(expected, result);
}
#[test]
fn test_parse_date_wrong_format_fails() {
let result = parse_date("2020-03-25", "%Y-%m-%Y", None);
assert!(result.is_err());
}
#[test]
fn test_parse_date_wrong_tz_fails() {
let result = parse_date("2020-03-25", "%Y-%m-%d", Some("00:00"));
assert!(result.is_err());
}
#[test]
fn test_parse_event() {
let params = [
(
Event::Sunrise {
degrees_below_horizon: 0.833,
time_of_day: crate::enums::TimeOfDay::AM,
},
"sunrise",
),
(
Event::Sunrise {
degrees_below_horizon: 0.833,
time_of_day: crate::enums::TimeOfDay::AM,
},
"sunRISE",
),
(
Event::Sunrise {
degrees_below_horizon: 0.833,
time_of_day: crate::enums::TimeOfDay::AM,
},
" sunrisE",
),
(
Event::Sunset {
degrees_below_horizon: 0.833,
time_of_day: crate::enums::TimeOfDay::PM,
},
"sunset",
),
(
Event::Sunset {
degrees_below_horizon: 0.833,
time_of_day: crate::enums::TimeOfDay::PM,
},
"sunSET ",
),
];
for (expected, arg) in params.iter() {
assert_eq!(*expected, parse_event(*arg, None).unwrap());
}
}
#[test]
fn test_parse_event_fails() {
let event = parse_event("sun rise", None);
assert!(event.is_err());
}
}
| true |
425a33681586972fc40a39e920c178f21c801d5e
|
Rust
|
LightAndLight/spiddy
|
/errors/src/lib.rs
|
UTF-8
| 4,895 | 3.125 | 3 |
[] |
no_license
|
use span::{Offset, SourceFiles, Span};
use std::io;
use std::io::Write;
pub enum Highlight {
Point(Offset),
Span(Span),
}
impl Highlight {
#[inline]
pub fn start(&self) -> Offset {
match self {
Highlight::Point(start) => *start,
Highlight::Span(span) => span.start,
}
}
#[inline]
pub fn len(&self) -> Offset {
match self {
Highlight::Point(_) => Offset(1),
Highlight::Span(span) => span.length,
}
}
#[inline]
pub fn end(&self) -> Offset {
self.start().add(self.len().to_u32())
}
}
pub struct Error {
pub highlight: Highlight,
pub message: String,
}
fn highlight<'src>(line: &'src str, line_offset: Offset, region: Highlight) -> String {
let mut string = String::new();
let mut pos: usize = 0;
match region {
Highlight::Point(offset) => {
let offset = offset.to_usize() - line_offset.to_usize();
for c in line.chars() {
if pos == offset {
string.push('^');
break;
} else {
string.push(' ');
}
pos += c.len_utf8();
}
}
Highlight::Span(span) => {
let mut in_range = false;
for c in line.chars() {
let line_offset = line_offset.to_usize();
let start_offset = span.start.to_usize() - line_offset;
let end_offset = span.end().to_usize() - line_offset;
if in_range {
if pos == end_offset {
break;
} else {
string.push('^')
}
} else {
if pos == start_offset {
in_range = true;
string.push('^')
} else {
string.push(' ')
}
}
pos += c.len_utf8();
}
}
}
string
}
pub fn __build_report(src_files: &SourceFiles, error: Error) -> [String; 5] {
let error_start = error.highlight.start();
let src_file = src_files.get_by_offset(error_start);
let line = src_file.get_line(error_start);
let highlight = highlight(line.content, line.offset, error.highlight);
let line_number_string = line.number.to_string();
let mut line_number_padding = String::new();
for _ in line_number_string.chars() {
line_number_padding.push(' ');
}
let line_number_padding = line_number_padding;
let mut line0 = src_file.name.clone();
line0 += "\n";
let mut line1 = line_number_padding.clone();
line1 += " |\n";
let mut line2 = line_number_string;
line2 += " | ";
line2 += line.content;
line2 += "\n";
let mut line3 = line_number_padding.clone();
line3 += " | ";
line3 += &highlight;
line3 += "\n";
let mut line4 = String::from(error.message);
line4 += "\n";
[line0, line1, line2, line3, line4]
}
impl Error {
pub fn report(self, src_files: &SourceFiles) {
let [line0, line1, line2, line3, line4] = __build_report(src_files, self);
let _ = io::stdout().write(line0.as_bytes()).unwrap();
let _ = io::stdout().write(line1.as_bytes()).unwrap();
let _ = io::stdout().write(line2.as_bytes()).unwrap();
let _ = io::stdout().write(line3.as_bytes()).unwrap();
let _ = io::stdout().write(line4.as_bytes()).unwrap();
}
}
#[test]
fn test_build_report1() {
let mut src_files = SourceFiles::new();
src_files.new_source_file(
String::from("test"),
String::from("this is a line\nthis is another line"),
);
assert_eq!(
__build_report(
&src_files,
Error {
highlight: Highlight::Point(Offset(8)),
message: String::from("Message")
}
),
[
"test\n",
" |\n",
"1 | this is a line\n",
" | ^\n",
"Message\n"
]
)
}
#[test]
fn test_build_report2() {
let mut src_files = SourceFiles::new();
let mut prefix = String::from("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\nthis is ");
let suffix = "another line";
let aim = prefix.len();
prefix += suffix;
let content = prefix;
src_files.new_source_file(String::from("test"), content);
assert_eq!(
__build_report(
&src_files,
Error {
highlight: Highlight::Point(Offset(aim as u32)),
message: String::from("Message")
}
),
[
"test\n",
" |\n",
"11 | this is another line\n",
" | ^\n",
"Message\n"
]
)
}
| true |
fcd22716a97e9ac7fbd4facb820c655a61f82d4e
|
Rust
|
thibautRe/rustrogueliketutorial
|
/chapter-27-cellular-automata/src/map_builders/simple_map.rs
|
UTF-8
| 3,374 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
use super::{MapBuilder, Map, Rect, apply_room_to_map,
apply_horizontal_tunnel, apply_vertical_tunnel, TileType,
Position, spawner, SHOW_MAPGEN_VISUALIZER};
use rltk::RandomNumberGenerator;
use specs::prelude::*;
pub struct SimpleMapBuilder {
map : Map,
starting_position : Position,
depth: i32,
rooms: Vec<Rect>,
history: Vec<Map>
}
impl MapBuilder for SimpleMapBuilder {
fn get_map(&self) -> Map {
self.map.clone()
}
fn get_starting_position(&self) -> Position {
self.starting_position.clone()
}
fn get_snapshot_history(&self) -> Vec<Map> {
self.history.clone()
}
fn build_map(&mut self) {
self.rooms_and_corridors();
}
fn spawn_entities(&mut self, ecs : &mut World) {
for room in self.rooms.iter().skip(1) {
spawner::spawn_room(ecs, room, self.depth);
}
}
fn take_snapshot(&mut self) {
if SHOW_MAPGEN_VISUALIZER {
let mut snapshot = self.map.clone();
for v in snapshot.revealed_tiles.iter_mut() {
*v = true;
}
self.history.push(snapshot);
}
}
}
impl SimpleMapBuilder {
pub fn new(new_depth : i32) -> SimpleMapBuilder {
SimpleMapBuilder{
map : Map::new(new_depth),
starting_position : Position{ x: 0, y : 0 },
depth : new_depth,
rooms: Vec::new(),
history: Vec::new()
}
}
fn rooms_and_corridors(&mut self) {
const MAX_ROOMS : i32 = 30;
const MIN_SIZE : i32 = 6;
const MAX_SIZE : i32 = 10;
let mut rng = RandomNumberGenerator::new();
for i in 0..MAX_ROOMS {
let w = rng.range(MIN_SIZE, MAX_SIZE);
let h = rng.range(MIN_SIZE, MAX_SIZE);
let x = rng.roll_dice(1, self.map.width - w - 1) - 1;
let y = rng.roll_dice(1, self.map.height - h - 1) - 1;
let new_room = Rect::new(x, y, w, h);
let mut ok = true;
for other_room in self.rooms.iter() {
if new_room.intersect(other_room) { ok = false }
}
if ok {
apply_room_to_map(&mut self.map, &new_room);
self.take_snapshot();
if !self.rooms.is_empty() {
let (new_x, new_y) = new_room.center();
let (prev_x, prev_y) = self.rooms[self.rooms.len()-1].center();
if rng.range(0,2) == 1 {
apply_horizontal_tunnel(&mut self.map, prev_x, new_x, prev_y);
apply_vertical_tunnel(&mut self.map, prev_y, new_y, new_x);
} else {
apply_vertical_tunnel(&mut self.map, prev_y, new_y, prev_x);
apply_horizontal_tunnel(&mut self.map, prev_x, new_x, new_y);
}
}
self.rooms.push(new_room);
self.take_snapshot();
}
}
let stairs_position = self.rooms[self.rooms.len()-1].center();
let stairs_idx = self.map.xy_idx(stairs_position.0, stairs_position.1);
self.map.tiles[stairs_idx] = TileType::DownStairs;
let start_pos = self.rooms[0].center();
self.starting_position = Position{ x: start_pos.0, y: start_pos.1 };
}
}
| true |
ffcaad5e0969d629b46590a88b3dfe770f7fbda4
|
Rust
|
jbschlosser/resin
|
/src/error.rs
|
UTF-8
| 595 | 3.078125 | 3 |
[] |
no_license
|
use std::fmt;
#[derive(PartialEq, Eq)]
pub struct RuntimeError {
pub msg: String
}
impl fmt::Debug for RuntimeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Runtime error: {}", &self.msg)
}
}
#[macro_export]
macro_rules! runtime_error{
($($arg:tt)*) => (
return Err(RuntimeError{msg: format!($($arg)*)})
)
}
#[macro_export]
macro_rules! try_or_runtime_error {
($inp:expr, $($arg:tt)*) => (
match $inp {
Ok(v) => v,
Err(_) => return Err(RuntimeError{msg: format!($($arg)*)})
}
)
}
| true |
09f69874641ef386eb7c6c40796bfd6d827ece2e
|
Rust
|
George-Payne/rust_h_w
|
/averages/src/main.rs
|
UTF-8
| 1,973 | 3.59375 | 4 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use std::env;
enum Average {
Unknown,
Mean,
Median,
Mode,
}
fn main() {
let mut numbers: Vec<f32> = Vec::new();
let mut kind: Average = Average::Unknown;
for argument in env::args() {
match argument.parse::<f32>() {
Ok(i) => numbers.push(i),
Err(_) => {}
}
if argument.starts_with("--") {
match &argument[..] {
"--mean" => kind = Average::Mean,
"--median" => kind = Average::Median,
"--mode" => kind = Average::Mode,
_ => println!("Unknown argument {}", argument),
}
}
}
match kind {
Average::Unknown => handle_unknown(),
Average::Mean => handle_mean(numbers),
Average::Median => handle_median(numbers),
Average::Mode => handle_mode(numbers),
}
}
fn handle_unknown() {
println!("Please specify --mean --median or --mode")
}
fn handle_mean(numbers: Vec<f32>) {
println!("You want the mean of {:?}", numbers);
let len = numbers.len() as f32;
let total = numbers.iter().fold(0.0, |acc, v| acc + v);
let mean = total / len;
println!("The mean is {}", mean);
}
fn handle_median(mut numbers: Vec<f32>) {
println!("You want the median of {:?}", numbers);
numbers.sort_unstable_by(|a, b| a.partial_cmp(&b).unwrap());
let mid = numbers.len() / 2;
let median = numbers.get(mid).unwrap();
println!("The median is {}", median);
}
fn handle_mode(numbers: Vec<f32>) {
println!("You want the mode of {:?}", numbers);
let occurances = numbers.iter().fold(HashMap::new(), |mut acc, v| {
let count = *acc.entry(v.to_string()).or_insert(0);
acc.insert(v.to_string(), count + 1);
acc
});
let mut occurances: Vec<(_, _)> = occurances.iter().collect();
occurances.sort_by(|a, b| b.1.cmp(a.1));
println!("The mode is {}", occurances[0].0)
}
| true |
59273a0f462f377c517c21305b088d18dbf0e76d
|
Rust
|
m000/adventofcode
|
/2022/src/day24b.rs
|
UTF-8
| 13,083 | 3.21875 | 3 |
[
"Unlicense"
] |
permissive
|
#![feature(drain_filter)]
use clap::Parser;
use env_logger::Env;
use log::{debug, info};
use std::cell::Cell;
use std::fmt;
use std::fs::File;
use std::io::{BufRead, BufReader};
/// Advent of Code 2022, Day 24
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// Input file to read
input: String,
/// Part of the puzzle to solve
#[arg(short, long, value_parser = clap::value_parser!(u32).range(1..=2))]
part: u32,
}
type MapGrid = Vec<Vec<MapPos>>;
type Position = (usize, usize);
struct MapPos {
blizzards: Vec<char>,
wall: bool,
}
struct Map {
grid: MapGrid,
start: Position,
exit: Position,
player: Cell<Position>, // only used for rendering
}
impl MapPos {
/// Convert a MapPosition to a char, for display purposes.
pub fn to_char(&self) -> char {
let nblizzards = self.blizzards.len();
match (self, nblizzards) {
(MapPos { wall: true, .. }, _) => '#',
(MapPos { wall: false, .. }, 0) => '.',
(MapPos { wall: false, .. }, 1) => self.blizzards[0],
(MapPos { wall: false, .. }, 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9) => {
char::from_digit(nblizzards as u32, 10).unwrap()
}
_ => 'X',
}
}
}
impl Map {
/// Create a new empty Map with the specified dimensions.
pub fn empty((nrows, ncols): (usize, usize)) -> Map {
let start = (1, 0);
let exit = (ncols - 2, nrows - 1);
Map {
grid: (0..nrows)
.map(|nrow| {
(0..ncols)
.map(|ncol| MapPos {
blizzards: Vec::new(),
wall: match (ncol, nrow) {
(ncol, nrow) if (ncol, nrow) == start => false,
(ncol, nrow) if (ncol, nrow) == exit => false,
(ncol, _) if (ncol == 0 || ncol == ncols - 1) => true,
(_, nrow) if (nrow == 0 || nrow == nrows - 1) => true,
_ => false,
},
})
.collect::<Vec<MapPos>>()
})
.collect::<MapGrid>(),
start: start,
exit: exit,
player: Cell::new(start),
}
}
/// Create a new empty map with the same dimensions and position as the reference map.
pub fn empty_from(map: &Map) -> Map {
let mut new_map = Map::empty((map.grid.len(), map.grid[0].len()));
new_map.start = map.start;
new_map.exit = map.exit;
new_map
}
/// Read a map from a file.
pub fn from_file(filename: &str) -> Map {
let grid = BufReader::new(File::open(filename).unwrap_or_else(|err| {
panic!("Error opening {filename}: {err:?}");
}))
.lines()
.map(|line| {
line.unwrap()
.chars()
.map(|c| match c.clone() {
'#' => MapPos {
blizzards: Vec::new(),
wall: true,
},
'.' => MapPos {
blizzards: Vec::new(),
wall: false,
},
'>' | '<' | '^' | 'v' => MapPos {
blizzards: Vec::from([c]),
wall: false,
},
_ => panic!("Unknown character encountered while reading map: {c:?}"),
})
.collect::<Vec<MapPos>>()
})
.collect::<MapGrid>();
let start = (1, 0);
let exit = (grid[0].len() - 2, grid.len() - 1);
Map {
grid: grid,
start: start,
exit: exit,
player: Cell::new(start),
}
}
/// Calculates the next blizzard position on the map.
pub fn next_blizzard_pos(&self, colnum: usize, rownum: usize, b: char) -> (usize, usize) {
let (mut colnum_next, mut rownum_next) = (colnum, rownum);
match b {
'>' => {
colnum_next += 1;
if self.grid[rownum_next][colnum_next].wall {
colnum_next = 1;
}
}
'<' => {
colnum_next -= 1;
if self.grid[rownum_next][colnum_next].wall {
colnum_next = self.grid[0].len() - 2;
}
}
'^' => {
rownum_next -= 1;
if self.grid[rownum_next][colnum_next].wall {
rownum_next = self.grid.len() - 2;
}
}
'v' => {
rownum_next += 1;
if self.grid[rownum_next][colnum_next].wall {
rownum_next = 1;
}
}
_ => panic!("Unknown blizzard type encountered in ({colnum}, {rownum}): {b:?}"),
}
(colnum_next, rownum_next)
}
/// Returns the map with the positions of the blizzards on the next minute.
pub fn next_minute(&self) -> Map {
let mut new_map = Map::empty_from(self);
// Populate empty map with blizzards.
self.grid.iter().enumerate().for_each(|(rownum, row)| {
row.iter()
.enumerate()
.filter(|(_colnum, pos)| pos.wall == false && pos.blizzards.len() > 0)
.for_each(|(colnum, pos)| {
pos.blizzards.iter().for_each(|b| {
let (colnum_next, rownum_next) = self.next_blizzard_pos(colnum, rownum, *b);
new_map.grid[rownum_next][colnum_next].blizzards.push(*b);
})
})
});
new_map
}
/// Returns the available positions to move on the map.
pub fn available_moves(&self, start: &Position) -> Vec<Position> {
self.grid
.iter()
.enumerate()
.filter(|(rownum, _row)| {
let rowdist = (*rownum as i32 - start.1 as i32).abs();
rowdist <= 1 // keep adjacent and curent rows
})
.map(|(rownum, row)| {
let rowdist = (rownum as i32 - start.1 as i32).abs();
row.iter()
.enumerate()
.filter(|(colnum, pos)| {
let coldist = (*colnum as i32 - start.0 as i32).abs();
coldist <= 1 // keep adjacent and current columns
&& coldist + rowdist <= 1 // exclude diagonal neighbors
&& !pos.wall // exclude walls
&& pos.blizzards.len() == 0 // exclude positions with blizzards
})
.map(|(colnum, _pos)| (colnum, rownum))
.collect::<Vec<Position>>()
})
.flatten()
.collect::<Vec<Position>>()
}
}
impl fmt::Display for Map {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"\n{}",
self.grid
.iter()
.enumerate()
.map(|(rownum, row)| format!(
"{}\n",
row.iter()
.enumerate()
.map(|(colnum, pos)| {
let c = pos.to_char();
match (colnum, rownum) {
(colnum, rownum) if self.player.get() == (colnum, rownum) && c == '.' => '■', // current position
(colnum, rownum) if self.player.get() == (colnum, rownum) && c != '.' => 'E', // indicate error
(colnum, rownum) if self.exit == (colnum, rownum) && c == '.' => '✕', // exit
_ => c,
}
})
.collect::<String>()
))
.collect::<String>()
.trim_end()
)
}
}
fn main() {
env_logger::Builder::from_env(Env::default().default_filter_or("info"))
.format_timestamp(None)
.init();
let args = Args::parse();
let nways: usize = match args.part {
1 => 1, // start-exit
2 => 3, // start-exit-start-exit
part @ _ => panic!("Don't know how to run part {part}."),
};
// Read initial map.
let mut minute = 0;
let map = Map::from_file(&args.input);
debug!("\nMinute: {minute}\nMap:{map}");
// The way blizzards propagate, the maps will be periodic.
// The period will be the lowest common multiple of the map width and map height.
let nrows = map.grid.len();
let ncols = map.grid[0].len();
let period = (1..=(ncols * nrows))
.skip_while(|n| n % nrows != 0 || n % ncols != 0)
.next()
.unwrap();
// Compute all possible maps.
let mut maps = Vec::from([map]);
(1..period).for_each(|n| {
let map_prev = &maps[n - 1];
let map = map_prev.next_minute();
maps.push(map);
});
info!("Precomputed {} maps.", maps.len());
// Fully tracking all the possible paths until we reach the exit explodes.
// For this we only keep track of the possible positions at each minute.
//
// Unlike the implementation in day24.rs, here we don't truncate positions
// as soon as we reach the end of a crossing (way) in part 2.
// This is to cover the case where two different paths for the first
// crossing have times T_1 < S_1, but if we continue with the other
// crossings, it will be S_1 + S_2 + S_3 < T_1 + T_2 + T_3.
//
// In the end, it turned out that this didn't matter. But it is not clear
// if this is always the case, or it just happened to be that way for the
// inputs we tried.
let mut all_possible_positions = Vec::<Vec<Position>>::new();
let mut targets = Vec::<Position>::new();
for n in 0..nways {
all_possible_positions.push(Vec::<Position>::new());
targets.push(if n % 2 == 0 { maps[0].exit } else { maps[0].start });
}
all_possible_positions[0].push(maps[0].start);
let mut done = false;
while !done {
minute += 1;
let map = &maps[minute % period];
let npositions: usize = all_possible_positions.iter().map(|w| w.len()).sum();
info!("\nMinute: {minute}\nNumber of possible positions: {npositions}");
// Update positions for all ways.
all_possible_positions = all_possible_positions
.iter()
.enumerate()
.map(|(way, possible_positions)| {
let mut possible_positions = possible_positions
.iter()
.map(|position| map.available_moves(position))
.flatten()
.collect::<Vec<_>>();
// Keeping track of all possible position still isn't enough.
// We need to deduplicate the possible positions to keep things snappy.
// Duplication arises because it is possible to reach the same position
// through different paths in a set amount of time.
possible_positions.sort();
possible_positions.dedup();
// After deduplication, sort possible positions by Manhattan order to
// the exit. This makes the loop termination condition trivial.
let target = targets[way];
possible_positions.sort_by_key(|pos| {
let dx = (target.0 as i32 - pos.0 as i32).abs();
let dy = (target.1 as i32 - pos.1 as i32).abs();
dx + dy
});
if possible_positions.len() > 0 {
let closest = &possible_positions[0];
info!(
"Way: {way}, Positions: {}, Target: {target:?}, Closest position: {closest:?}",
possible_positions.len()
);
}
possible_positions
})
.collect::<Vec<_>>();
// Move positions that reached target to the next way.
let mut removed = Vec::<Position>::new();
for (way, possible_positions) in all_possible_positions.iter_mut().enumerate() {
// Move removed positions from previous way to the current one.
if removed.len() > 0 {
possible_positions.append(&mut removed);
}
// Repopulate removed.
let target = &targets[way];
removed.extend(possible_positions.drain_filter(|pos| pos == target));
// Done, but finish loop first.
if way == nways - 1 && removed.len() > 0 {
done = true;
continue;
}
}
}
info!("Exited after {minute} minutes.");
}
| true |
70abb8bdf867bbf2556ae253c1e012fae4be6994
|
Rust
|
pyaillet/dive-rs
|
/src/oci/image/manifest.rs
|
UTF-8
| 2,355 | 2.578125 | 3 |
[] |
no_license
|
use std::collections::HashMap as Map;
use serde::{Deserialize, Serialize};
use crate::oci::Hash;
pub const MANIFEST_MIME: &str = "application/vnd.docker.distribution.manifest.v2+json";
pub const CONFIG_MIME: &str = "application/vnd.docker.container.image.v1+json";
pub const LAYER_MIME: &str = "application/vnd.docker.image.rootfs.diff.tar.gzip";
#[derive(Serialize, Deserialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub enum OS {
Linux,
Windows,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub enum Architecture {
Amd64,
Aarch64,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Layer {
pub blob_sum: String,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Media {
pub media_type: String,
pub size: u64,
pub digest: Hash,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Manifest {
pub schema_version: u8,
pub config: Media,
pub layers: Vec<Media>,
pub annotations: Option<Map<String, String>>,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_from_str_ok() {
let c = r#"
{
"schemaVersion": 2,
"config": {
"mediaType": "application/vnd.oci.image.config.v1+json",
"size": 7023,
"digest": "sha256:b5b2b2c507a0944348e0303114d8d93aaaa081732b86451d9bce1f432a537bc7"
},
"layers": [
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"size": 32654,
"digest": "sha256:9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0"
},
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"size": 16724,
"digest": "sha256:3c3a4604a545cdc127456d94e421cd355bca5b528f4a9c1905b15da2eb4a4c6b"
}
],
"annotations": {
"annot1": "value1",
"annot2": "value2"
}
}"#;
let m: Result<Manifest, serde_json::Error> = serde_json::from_str(c);
assert!(m.is_ok(), "Manifest parsing failed: `{}`", m.err().unwrap());
}
}
| true |
8e56078b47f887088e471837c04eec9b5a38dc88
|
Rust
|
vonglasow/rust-of-life
|
/src/main.rs
|
UTF-8
| 824 | 2.953125 | 3 |
[] |
no_license
|
extern crate ansi_term;
extern crate rand;
mod cell;
mod position;
mod printer;
#[cfg(not(test))]
fn main() {
use cell::Cell;
use position::Position;
use printer::CliPrinter;
use rand::Rng;
let mut rng = rand::thread_rng();
println!("Game of Life");
for y in 0..10 {
for x in 0..10 {
let position = Position::create_2d(x, y);
let cell = Cell {
position: position,
alive: true,
};
let new_cell = cell.compute_state(rng.gen_range(1, 4));
let printer = CliPrinter;
printer.print_cell(cell);
printer.print_cell(new_cell);
}
println!("");
}
println!("");
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert!(1 == 1);
}
}
| true |
e3b46f3bf827f2784ff3e19f304d8ad2731fd853
|
Rust
|
SallySoul/tacit
|
/implicit-mesh/src/function.rs
|
UTF-8
| 601 | 2.8125 | 3 |
[] |
no_license
|
use crate::interval::Interval;
use std::collections::HashMap;
pub trait Function: Sized {
fn evaluate(&self, x: f32, y: f32, z: f32) -> f32;
fn evaluate_interval(&self, bindings: &HashMap<char, Interval>) -> Vec<Interval>;
}
#[derive(Copy, Clone)]
pub struct ConstFunction {
pub c: f32,
}
impl Function for ConstFunction {
fn evaluate(&self, _x: f32, _y: f32, _z: f32) -> f32 {
self.c
}
fn evaluate_interval(&self, _bindings: &HashMap<char, Interval>) -> Vec<Interval> {
vec![Interval {
min: self.c,
max: self.c,
}]
}
}
| true |
d8190192997207347b0b24ea955970e52a76c103
|
Rust
|
k-bx/ormx
|
/example-postgres/src/main.rs
|
UTF-8
| 3,154 | 3 | 3 |
[
"MIT"
] |
permissive
|
// #![feature(trace_macros)]
use chrono::{NaiveDateTime, Utc};
use ormx::{Insert, Table, Delete};
use sqlx::PgPool;
// trace_macros!(true);
// To run this example-postgres, first run `/scripts/postgres.sh` to start postgres in a docker container and
// write the database URL to `.env`. Then, source `.env` (`. .env`) and run `cargo run`
mod query2;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
dotenv::dotenv().ok();
simple_logger::SimpleLogger::new()
.with_level(log::LevelFilter::Info)
.init()?;
let db = PgPool::connect(&dotenv::var("DATABASE_URL")?).await?;
log::info!("insert a new row into the database");
let mut new = InsertUser {
user_id: 1,
first_name: "Moritz".to_owned(),
last_name: "Bischof".to_owned(),
email: "[email protected]".to_owned(),
disabled: None,
role: Role::User,
}
.insert(&mut *db.acquire().await?)
.await?;
log::info!("update a single field");
new.set_last_login(&db, Some(Utc::now().naive_utc()))
.await?;
log::info!("update all fields at once");
new.email = "asdf".to_owned();
new.update(&db).await?;
log::info!("apply a patch to the user");
new.patch(
&db,
UpdateUser {
first_name: "NewFirstName".to_owned(),
last_name: "NewLastName".to_owned(),
disabled: Some("Reason".to_owned()),
role: Role::Admin,
},
)
.await?;
log::info!("reload the user, in case it has been modified");
new.reload(&db).await?;
log::info!("use the improved query macro for searching users");
let search_result = query2::query_users(&db, Some("NewFirstName"), None).await?;
println!("{:?}", search_result);
log::info!("delete the user from the database");
new.delete(&db).await?;
Ok(())
}
#[derive(Debug, ormx::Table)]
#[ormx(table = "users", id = user_id, insertable, deletable)]
struct User {
// map this field to the column "id"
#[ormx(column = "id")]
#[ormx(get_one = get_by_user_id)]
user_id: i32,
first_name: String,
last_name: String,
// generate `User::by_email(&str) -> Result<Option<Self>>`
#[ormx(get_optional(&str))]
email: String,
#[ormx(custom_type)]
role: Role,
disabled: Option<String>,
// don't include this field into `InsertUser` since it has a default value
// generate `User::set_last_login(Option<NaiveDateTime>) -> Result<()>`
#[ormx(default, set)]
last_login: Option<NaiveDateTime>,
}
// Patches can be used to update multiple fields at once (in diesel, they're called "ChangeSets").
#[derive(ormx::Patch)]
#[ormx(table_name = "users", table = crate::User, id = "id")]
struct UpdateUser {
first_name: String,
last_name: String,
disabled: Option<String>,
#[ormx(custom_type)]
role: Role,
}
#[derive(Debug, Copy, Clone, sqlx::Type)]
#[sqlx(type_name = "user_role")]
#[sqlx(rename_all = "lowercase")]
enum Role {
User,
Admin,
}
#[derive(Debug, ormx::Table)]
#[ormx(table = "test", id = id, insertable)]
struct Test {
id: i32,
#[ormx(by_ref)]
rows: Vec<String>,
}
| true |
83bbdbd73cb3f4f5873c67d78e586c25a7c4d5e2
|
Rust
|
Gobi03/Rust_mylibrary
|
/Marathon/Direction.rs
|
UTF-8
| 404 | 3.265625 | 3 |
[] |
no_license
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Direction {
Left,
Right,
Up,
Down,
}
impl Direction {
pub fn to_delta(&self) -> Coord {
match *self {
Self::Left => Coord::new((-1, 0)),
Self::Right => Coord::new((1, 0)),
Self::Up => Coord::new((0, -1)),
Self::Down => Coord::new((0, 1)),
}
}
}
| true |
34cd669796db548f34a8d3c855a1025f5164dfe2
|
Rust
|
hilbert-space/hdf5
|
/src/lib.rs
|
UTF-8
| 4,482 | 3.390625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Interface to [HDF5][1].
//!
//! ## Example
//!
//! ```
//! extern crate hdf5;
//! # extern crate temporary;
//!
//! use hdf5::File;
//! # use temporary::Directory;
//!
//! # fn main() {
//! let path = "data.h5";
//! # let directory = Directory::new("hdf5").unwrap();
//! # let path = directory.join(path);
//! let file = File::new(path).unwrap();
//!
//! file.write("foo", 42).unwrap();
//! file.write("bar", &vec![42.0, 69.0]).unwrap();
//! # }
//! ```
//!
//! Structural data can be written using [`rustc-serialize`][2] as follows:
//!
//! ```
//! extern crate hdf5;
//! extern crate rustc_serialize;
//! # extern crate temporary;
//!
//! use hdf5::File;
//! # use temporary::Directory;
//!
//! #[derive(RustcEncodable)]
//! struct Foo {
//! bar: Vec<f64>,
//! baz: Baz,
//! }
//!
//! #[derive(RustcEncodable)]
//! struct Baz {
//! qux: f64,
//! }
//!
//! # fn main() {
//! let foo = Foo {
//! bar: vec![42.0],
//! baz: Baz {
//! qux: 69.0,
//! },
//! };
//!
//! let path = "data.h5";
//! # let directory = Directory::new("hdf5").unwrap();
//! # let path = directory.join(path);
//! let file = File::new(path).unwrap();
//!
//! file.encode("foo", &foo).unwrap();
//! # }
//!
//! [1]: http://www.hdfgroup.org/HDF5
//! [2]: https://crates.io/crates/rustc-serialize
extern crate hdf5_sys as ffi;
extern crate libc;
#[cfg(feature = "serialize")]
extern crate rustc_serialize;
use std::{error, fmt};
/// An error.
#[derive(Clone, Debug)]
pub struct Error(String);
#[doc(hidden)]
pub type ID = ffi::hid_t;
#[doc(hidden)]
pub trait Identity {
fn id(&self) -> ID;
}
#[doc(hidden)]
pub trait Location: Identity {
}
/// A result.
pub type Result<T> = std::result::Result<T, Error>;
macro_rules! identity(
($name:ident) => (
impl ::Identity for $name {
#[inline]
fn id(&self) -> ::ID {
self.id
}
}
);
);
macro_rules! location(
($name:ident) => (
impl ::Location for $name {
}
);
);
macro_rules! ok(
($call:expr) => ({
let result = unsafe { $call };
if result < 0 {
raise!("failed to call a native function (error code {})", result);
}
result
});
($call:expr, $($arg:tt)+) => ({
let result = unsafe { $call };
if result < 0 {
raise!($($arg)+);
}
result
});
);
macro_rules! path_to_cstr(
($path:expr) => ({
let path = $path;
match path.to_str() {
Some(path) => match ::std::ffi::CString::new(path) {
Ok(string) => string,
_ => raise!("failed to process a path {:?}", path),
},
_ => raise!("failed to process a path {:?}", path),
}
});
);
macro_rules! product(
($vector:expr) => (
$vector.iter().fold(1, |result, &next| result * next)
);
);
macro_rules! raise(
($($arg:tt)*) => (return Err(::Error(format!($($arg)*))));
);
macro_rules! str_to_cstr(
($string:expr) => ({
let string = $string;
match ::std::ffi::CString::new(string) {
Ok(string) => string,
_ => raise!("failed to process a string {:?}", string),
}
});
);
macro_rules! whatever(
($call:expr) => ({
let _ = unsafe { $call };
});
);
impl fmt::Display for Error {
#[inline]
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(formatter)
}
}
impl error::Error for Error {
#[inline]
fn description(&self) -> &str {
&self.0
}
}
impl<'l, T: Identity> Identity for &'l T {
#[inline]
fn id(&self) -> ID {
(*self).id()
}
}
impl<'l, T: Location> Location for &'l T {
}
/// Return the version number of HDF5.
pub fn version() -> Result<(usize, usize, usize)> {
let (mut major, mut minor, mut patch) = (0, 0, 0);
ok!(ffi::H5get_libversion(&mut major as *mut _ as *mut _, &mut minor as *mut _ as *mut _,
&mut patch as *mut _ as *mut _));
Ok((major, minor, patch))
}
mod data;
mod dataset;
mod dataspace;
mod datatype;
mod file;
mod link;
mod writer;
#[cfg(feature = "serialize")]
mod decoder;
#[cfg(feature = "serialize")]
mod encoder;
pub use data::{Data, IntoData, Slice};
pub use datatype::Datatype;
pub use file::File;
pub use writer::Writer;
#[cfg(feature = "serialize")]
pub use decoder::Decoder;
#[cfg(feature = "serialize")]
pub use encoder::Encoder;
| true |
d758151563766a64f276897886da6d1129b4dfc5
|
Rust
|
zhangjunfang/rust_read_code
|
/web_basic_01/src/user.rs
|
UTF-8
| 490 | 2.859375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use actix_web::{Json, Result};
#[derive(Deserialize, Serialize, Debug)]
pub struct Info {
username: String,
email: String,
password: String,
confirm_password: String,
}
pub fn info(info: Json<Info>) -> Result<Json<Info>> {
println!("=========={:?}=========", info);
Ok(Json(Info {
username: info.username.clone(),
email: info.email.clone(),
password: info.password.clone(),
confirm_password: info.confirm_password.clone(),
}))
}
| true |
4580ff9d5f63980497e614eeb2d1e742c82072f2
|
Rust
|
beschaef/rtos
|
/src/features/shell/mod.rs
|
UTF-8
| 19,462 | 3.15625 | 3 |
[] |
no_license
|
//! This module provides a basic shell / terminal to enable user interaction with the system.
//! It supports a minimal set of commands and is easily enhancable to implement new commands.
//! Currently supported commands are:
//!
//! 1. "help" -> Displays a man page which shows a list of supported commands.
//! 2. "tetris" -> Starts a self developed version of the ancient tetris
//! 3. "clock" -> Adds a temporary clock which counts up to a specific amount of seconds
//! 4. "reboot" -> Reboots the system
//! 5. "shutdown" -> Shuts down the system
//! 6. "strg + c" -> Terminates the current running task which was issued from the shell
//!
//! A new Shell can be initialized with a custom number of lines, which is determined by passing
//! the initial cursor position (altogether 25 rows are available on the screen).
//! ATTENTION: Currently all screen output not coming from the shell is not adapted to the area
//! which is occupied by the shell, which means that there can be overlaps (dependent on the initial
//! cursor position).
use alloc::string::String;
use alloc::{string::ToString, Vec};
use features::{reboot, shutdown};
use tasks::{tetris, uptime_temp, NEW_TASKS, PIECE, TASK_STARTED};
#[allow(unused_imports)]
use trace::*;
use vga_buffer::*;
use x86_64::VirtualAddress;
pub struct Shell {
/// Specifies the initial cursor position (row, col).
default_cursor_position: (u8, u8),
/// Specifies the current cursor position (row, col). Values change during runtime.
current_cursor_position: (u8, u8),
/// Stores the user input from the shell.
input: String,
/// Stores the commands issued from the shell.
input_history: Vec<String>,
/// If set to true, the currently running task terminates itself and is not scheduled anymore.
pub terminate_running_task: bool,
/// If set to true, the shell checks whether ctrl + c was pressed.
parse_ctrl_command: bool,
/// Contains the running task (started by the shell) as string.
running_task: String,
/// Defines the screen area to which the content of tasks started by the shell is displayed.
active_screen: (u8, u8, u8, u8),
/// Text, which is displayed when the user issues an unsupported command.
unkown_command_help: String,
}
impl Shell {
/// Creates a new shell and sets some default default values.
/// # Arguments
/// * `current_cursor_position` - ((u8, u8)) Specifies the initial cursor position (row, col)
/// and therewith defines the number of lines occupied by the shell.
#[allow(dead_code)]
pub fn new(current_cursor_position: (u8, u8)) -> Self {
Shell {
default_cursor_position: current_cursor_position,
current_cursor_position: current_cursor_position,
input: "".to_string(),
input_history: Vec::new(),
terminate_running_task: false,
parse_ctrl_command: false,
active_screen: (30, 80, 0, 20),
running_task: "".to_string(),
unkown_command_help:
"Unknown command. To see a list of possible commands, type `help`!".to_string(),
}
}
/// Initializes the shell -> Prints the line which seperates the active screen from the shell
/// area and prints the prompt to the according position.
pub fn init_shell(&mut self) {
self.print_separating_line();
let cursor_position_height = self.current_cursor_position.0;
self.print_prompt(cursor_position_height, 0);
}
/// Clears the active screen area (sets it to black fore- and background-color). Normally used
/// when a task issued by the shell is terminated.
pub fn clear_active_screen(&mut self) {
for col in self.active_screen.0..self.active_screen.1 {
for row in self.active_screen.2..self.active_screen.3 {
write_at_background(" ", row, col, Color::Black, Color::Black);
}
}
}
/// Prints the line which seperates the active screen from the shell area.
pub fn print_separating_line(&mut self) {
write_at_background(
"--------------------------------------------------------------------------------",
self.current_cursor_position.0 - 1,
0,
Color::White,
Color::Black,
);
}
/// Prints the prompt to a desired position.
/// # Arguments
/// * `cursor_position_height` - (u8) Specifies the desired vertical position
/// * `cursor_position_width` - (u8) Specifies the desired horizontal position
pub fn print_prompt(&mut self, cursor_position_height: u8, cursor_position_width: u8) {
write_at_background(
"bob@rtos > ",
cursor_position_height,
cursor_position_width,
Color::LightGray,
Color::Black,
);
}
/// Lets the cursor appear on the screen.
/// Used together with cursor_off() to let the cursor blink.
pub fn cursor_on(&mut self) {
write_at_background(
"_",
self.current_cursor_position.0,
self.current_cursor_position.1,
Color::White,
Color::Black,
);
}
/// Lets the cursor disappear from the screen.
/// Used together with cursor_on() to let the cursor blink.
pub fn cursor_off(&mut self) {
write_at_background(
" ",
self.current_cursor_position.0,
self.current_cursor_position.1,
Color::Black,
Color::Black,
);
}
/// Receives preprocessed user input from `parse_input()`.
/// If currently no other task is running, the input is pushed to the string which
/// contains all of the user input of the current line. Otherwise user input is disabled, which
/// means that no user input is stored. After saving the input it is printed on the screen.
/// Finally the cursor is shifted to the next input position.
/// # Arguments
/// * `input` - (String) Symbol corresponding to the pressed key on the keyboard, casted as
/// string
pub fn store_and_print_input(&mut self, input: String) {
unsafe {
if TASK_STARTED != true {
self.input.push(input.chars().next().unwrap());
write_at_background(
&input,
self.current_cursor_position.0,
self.current_cursor_position.1,
Color::White,
Color::Black,
);
if input.len() as u8 > 1 {
self.current_cursor_position.1 += input.len() as u8 + 1;
} else if input.len() as u8 == 1 {
self.current_cursor_position.1 += 1;
}
}
}
}
/// Entry point of user input to the shell module.
/// Every time the user presses a key on the keyboard, a corresponding string is passed to this
/// function, which then handles possible cases:
///
/// 1. *ENTER* -> The blinking cursor is removed from the shell (to signalize that
/// user input is disabled) and parse_command() is called.
/// 2. *BACKSPACE* -> If no task (started by the shell) is running and if the cursor is in
/// a valid position, the last input char is removed from the input string and the cursor is
/// shifted to the left.
/// 3. *CTRL* -> An internal flag is set to signalize that a ctrl-key was pressed.
/// 4. *ARROW* -> If tetris is running, a string corresponding to the pressed arrow key is
/// passed to the control parser of tetris.
/// 5. *DEFAULT* -> If none of the previous cases applies, the input is not interpreted as
/// command but as *normal* input and is passed to `store_and_print_input()`.
/// # Arguments
/// * `input` - (String) Symbol corresponding to the pressed key on the keyboard, casted as
/// string
pub fn parse_input(&mut self, input: String) {
if input == "ENTER" {
// delete blinking cursor in current line
write_at_background(
" ",
self.current_cursor_position.0,
self.current_cursor_position.1,
Color::Black,
Color::Black,
);
self.parse_command();
} else if input == "BACKSPACE" {
unsafe {
if TASK_STARTED != true {
if self.current_cursor_position.1 > self.default_cursor_position.1 {
self.input.pop();
write_at_background(
" ",
self.current_cursor_position.0,
self.current_cursor_position.1,
Color::Black,
Color::Black,
);
self.current_cursor_position.1 -= 1;
}
}
}
} else if input == "CTRL_LEFT" || input == "CTRL_RIGHT" {
self.parse_ctrl_command = true;
} else if self.parse_ctrl_command == true {
self.parse_ctrl_command(input);
} else if input == "ARROW_UP" || input == "ARROW_DOWN" || input == "ARROW_LEFT"
|| input == "ARROW_RIGHT"
{
unsafe {
if TASK_STARTED && self.running_task == "tetris" {
PIECE.lock().parse_control(input);
}
}
} else {
if self.current_cursor_position.1 < BUFFER_WIDTH as u8 - 1 {
self.store_and_print_input(input);
}
}
}
/// Called by `parse_input()`.
/// If neccessary (e.g. in case of tetris, clock), the function pointer of the task which should
/// be started is pushed to the vector NEW_TASKS, which stores new tasksto be started by the shell or other tasks.
/// The main task starts new task from this vector.
/// In case of *reboot* or *shutdown* the corresponding function in the *features* crate is called.
/// If an unsupported command is issued, an appropriate warning is displayed.
fn parse_command(&mut self) {
let x = self.input.to_string();
self.input_history.push(x.clone());
if x == "tetris" {
{
NEW_TASKS.lock().insert(0, VirtualAddress(tetris as usize));
}
unsafe {
TASK_STARTED = true;
}
self.running_task = "tetris".to_string();
} else if x == "htop" {
;
} else if x == "help" {
self.show_shell_manual();
unsafe {
TASK_STARTED = true;
}
self.running_task = "help".to_string();
} else if x == "clock" {
{
NEW_TASKS
.lock()
.insert(0, VirtualAddress(uptime_temp as usize));
}
if self.current_cursor_position.0 as usize >= BUFFER_HEIGHT - 1 {
self.print_shift_history();
} else {
self.current_cursor_position.0 += 1;
}
self.current_cursor_position.1 = self.default_cursor_position.1;
let cursor_position_height = self.current_cursor_position.0;
self.print_prompt(cursor_position_height, 0);
} else if x == "" {
;
} else if x == "reboot" {
reboot();
} else if x == "shutdown" {
shutdown();
} else {
if self.current_cursor_position.0 as usize >= BUFFER_HEIGHT - 1 {
clear_row(BUFFER_HEIGHT - 1);
write_at_background(
&self.unkown_command_help,
BUFFER_HEIGHT as u8 - 1,
0,
Color::Red,
Color::Black,
);
{
let help = &self.unkown_command_help;
self.input_history.push(help.to_string());
}
self.print_shift_history();
self.current_cursor_position.1 = self.default_cursor_position.1;
} else {
self.current_cursor_position.0 += 1;
if self.current_cursor_position.0 as usize >= BUFFER_HEIGHT - 1 {
clear_row(BUFFER_HEIGHT - 1);
write_at_background(
&self.unkown_command_help,
BUFFER_HEIGHT as u8 - 1,
0,
Color::Red,
Color::Black,
);
{
let help = &self.unkown_command_help;
self.input_history.push(help.to_string());
}
self.print_shift_history();
self.current_cursor_position.1 = self.default_cursor_position.1;
} else {
clear_row(self.current_cursor_position.0 as usize);
write_at_background(
&self.unkown_command_help,
self.current_cursor_position.0,
0,
Color::Red,
Color::Black,
);
{
let help = &self.unkown_command_help;
self.input_history.push(help.to_string());
}
self.current_cursor_position.0 += 1;
self.current_cursor_position.1 = self.default_cursor_position.1;
let cursor_position_height = self.current_cursor_position.0;
self.print_prompt(cursor_position_height, 0);
}
}
}
self.input.clear();
}
/// Prints a description of currently implemented shell commands.
/// The text is written quick and dirty to the active screen area.
/// A more professional solution would be to write a method which parses a desired textfile
/// (possibly containing control commands for formatting) and renders the text to a specified
/// area on the screen. Due to lack of time this was not realized any more.
fn show_shell_manual(&mut self) {
write_at_background(
"###### RTOS-SHELL - MANUAL ######",
0,
35,
Color::White,
Color::Black,
);
write_at_background(
"1. help > Shows a full list of possible",
2,
35,
Color::White,
Color::Black,
);
write_at_background(
" shell commands",
3,
35,
Color::White,
Color::Black,
);
write_at_background(
"2. tetris > Starts a funky tetris game",
5,
35,
Color::White,
Color::Black,
);
write_at_background(
"3. clock > Adds a temporary clock to the",
7,
35,
Color::White,
Color::Black,
);
write_at_background(
" left of the screen",
8,
35,
Color::White,
Color::Black,
);
write_at_background(
"4. reboot > Reboots the system",
10,
35,
Color::White,
Color::Black,
);
write_at_background(
"5. shutdown > Powers off the system",
12,
35,
Color::White,
Color::Black,
);
write_at_background(
"6. ctrl-c > Cancels the last command issued",
14,
35,
Color::White,
Color::Black,
);
write_at_background(
" from the shell and activates",
15,
35,
Color::White,
Color::Black,
);
write_at_background(
" new input",
16,
35,
Color::White,
Color::Black,
);
}
/// Called by `parse_input()` after *ctrl* and another key was pressed.
/// If the other key was *c* and a task started by the shell is running, this method sets the
/// `terminate_running_task` flag to inform the scheduler that the task can be terminated.
/// Then the input string is cleared and the shell is resetted to some default values.
/// # Arguments
/// * `input` - (String) Symbol corresponding to the pressed key on the keyboard, casted as string
fn parse_ctrl_command(&mut self, input: String) {
if input == "c" {
if unsafe { TASK_STARTED } {
if self.running_task == "help" {
self.reset_shell();
} else {
self.terminate_running_task = true;
}
self.input.clear();
self.running_task = "".to_string();
}
}
self.parse_ctrl_command = false;
}
/// Sets the shell variables to some default values.
/// For example the `terminate_running_task` flag is cleared, the active screen area is cleared.
/// If the last line is reached, the function `print_shift_history()` is called to shift the previous inputs
/// up.
pub fn reset_shell(&mut self) {
self.terminate_running_task = false;
self.clear_active_screen();
if self.current_cursor_position.0 as usize >= BUFFER_HEIGHT - 1 {
self.print_shift_history();
} else {
self.current_cursor_position.0 += 1;
}
self.current_cursor_position.1 = self.default_cursor_position.1;
let cursor_position_height = self.current_cursor_position.0;
self.print_prompt(cursor_position_height, 0);
unsafe {
TASK_STARTED = false;
}
}
/// Called by several shell functions to shift the previous inputs up if the last line of the shell
/// was reached.
fn print_shift_history(&mut self) {
let mut cnt: usize = 1;
for _row in self.default_cursor_position.0 as usize..BUFFER_HEIGHT - 1 {
clear_row(BUFFER_HEIGHT - 1 - cnt);
let mut history_entry =
&self.input_history[(self.input_history.len() - 1) - (cnt - 1)].to_string();
if history_entry == &self.unkown_command_help {
write_at_background(
history_entry,
BUFFER_HEIGHT as u8 - 1 - cnt as u8,
0,
Color::Red,
Color::Black,
);
} else {
self.print_prompt(BUFFER_HEIGHT as u8 - 1 - cnt as u8, 0);
write_at_background(
history_entry,
BUFFER_HEIGHT as u8 - 1 - cnt as u8,
self.default_cursor_position.1,
Color::White,
Color::Black,
);
}
cnt += 1;
}
// clear last line and print a prompt
clear_row(BUFFER_HEIGHT - 1);
self.print_prompt(BUFFER_HEIGHT as u8 - 1, 0);
}
}
| true |
0fb5f60412972f29351df33911e6c7e20e8fbc96
|
Rust
|
andrelmlins/numero-por-extenso
|
/src/conversor/mod.rs
|
UTF-8
| 3,152 | 3.625 | 4 |
[
"MIT"
] |
permissive
|
mod constantes;
use crate::tipo::TipoExtenso;
use constantes::*;
pub fn gerar_extenso(inteiro: String, decimal: String, tipo: TipoExtenso) -> String {
let inteiro_extenso = extenso(inteiro);
let decimal_extenso = extenso(decimal);
match tipo {
TipoExtenso::DECIMAL => mascara_decimal(inteiro_extenso, decimal_extenso),
TipoExtenso::PORCENTAGEM => mascara_porcetagem(inteiro_extenso, decimal_extenso),
TipoExtenso::MONETARIO => mascara_monetario(inteiro_extenso, decimal_extenso),
}
}
fn mascara_decimal(inteiro: String, decimal: String) -> String {
if decimal != "" {
format!("{} ponto {}", inteiro, decimal)
} else {
inteiro
}
}
fn mascara_porcetagem(inteiro: String, decimal: String) -> String {
if decimal != "" {
format!("{} ponto {} por cento", inteiro, decimal)
} else {
format!("{} por cento", inteiro)
}
}
fn mascara_monetario(inteiro: String, decimal: String) -> String {
if decimal != "" {
format!("{} reais e {} centavos", inteiro, decimal)
} else {
format!("{} reais", inteiro)
}
}
fn extenso(valor: String) -> String {
let mut extenso = String::new();
let quantidade_centena = ((valor.len() as f32) / 3.0).round() as i32;
for contador_centena in 0..quantidade_centena {
let tripla: String = valor
.chars()
.skip((contador_centena * 3) as usize)
.take(3)
.collect();
let tripla_numero: i32 = tripla.parse().unwrap();
let unidades_centena: Vec<&str> = tripla.split("").collect();
let mut centena = CENTENAS[unidades_centena[1].parse::<usize>().unwrap()];
let dezena = DEZENAS[unidades_centena[2].parse::<usize>().unwrap()];
let dezena_composta = DEZENAS_COMPOSTAS[unidades_centena[3].parse::<usize>().unwrap()];
let mut unidade = UNIDADES[unidades_centena[3].parse::<usize>().unwrap()];
let casa = CASAS[(quantidade_centena - contador_centena - 1) as usize];
let casa_plural = CASAS_PLURAL[(quantidade_centena - contador_centena - 1) as usize];
if tripla_numero == 1 {
unidade = "";
}
if tripla_numero >= 100 && tripla_numero < 200 {
centena = "cento";
}
if centena != "" {
extenso.push_str(centena);
if unidade != "" || dezena != "" {
extenso.push_str(" e ");
}
}
if dezena == "dez" {
extenso.push_str(dezena_composta);
} else {
if dezena != "" {
extenso.push_str(dezena);
if unidade != "" {
extenso.push_str(" e ");
}
}
if unidade != "" {
extenso.push_str(unidade);
}
}
if casa != "" && casa_plural != "" {
extenso.push_str(" ");
if tripla_numero > 1 {
extenso.push_str(casa_plural);
} else {
extenso.push_str(casa);
}
extenso.push_str(", ");
}
}
extenso
}
| true |
3acd22f8c007ee07ac0aee4d8f8e46042da27635
|
Rust
|
juancastillo0/rick-morty-back
|
/src/graphql/mod.rs
|
UTF-8
| 8,287 | 2.609375 | 3 |
[] |
no_license
|
use crate::db::{self, establish_connection};
use crate::schema::{character, episode, location};
use diesel::{
dsl::sql,
pg::Pg,
prelude::*,
sql_types::{Bool, Float},
};
use juniper::FieldResult;
use std::sync::RwLock;
pub mod character_model;
use character_model::*;
pub mod episode_model;
use episode_model::*;
pub mod location_model;
use location_model::*;
// ######### CONTEXT ###############
pub struct Ctx {
character: RwLock<i32>,
location: RwLock<i32>,
episode: RwLock<i32>,
}
impl juniper::Context for Ctx {}
impl Ctx {
pub fn new(counts: db::DbCounts) -> Ctx {
Ctx {
character: RwLock::from(counts.character),
location: RwLock::from(counts.location),
episode: RwLock::from(counts.episode),
}
}
}
// ######### QUERIES ###############
#[derive(juniper::GraphQLInputObject)]
struct CharacterFilter {
search_text: Option<String>,
status: Option<Vec<String>>,
species: Option<Vec<String>>,
gender: Option<Vec<String>>,
origin_id: Option<Vec<i32>>,
location_id: Option<Vec<i32>>,
}
type FilterCharacterExpr<'a> =
Box<dyn BoxableExpression<character::table, Pg, SqlType = Bool> + 'a>;
fn filter_characters<'a>(filter: &'a CharacterFilter) -> FilterCharacterExpr<'a> {
let mut filters: Vec<FilterCharacterExpr> = vec![];
let always_true = Box::new(character::id.eq(character::id));
if let Some(status) = &filter.status {
filters.push(Box::new(character::status.eq_any(status)));
}
if let Some(species) = &filter.species {
filters.push(Box::new(character::species.eq_any(species)));
}
if let Some(gender) = &filter.gender {
filters.push(Box::new(character::gender.eq_any(gender)));
}
if let Some(origin_id) = &filter.origin_id {
filters.push(Box::new(character::origin_id.eq_any(origin_id)));
}
if let Some(location_id) = &filter.location_id {
filters.push(Box::new(character::location_id.eq_any(location_id)));
}
filters
.into_iter()
.fold(always_true, |query, curr| Box::new(query.and(curr)))
}
pub struct Query;
#[juniper::object(
Context = Ctx,
)]
impl Query {
fn characters(mut page: i32, context: &Ctx) -> FieldResult<ListResult<Character>> {
if page == -1 {
page = 1;
}
Ok(load_many(character::table, page, &context.character)?)
}
fn characters_filtered(
limit: i32,
offset: i32,
filter: CharacterFilter,
) -> FieldResult<Vec<Character>> {
let query = character::table.filter(filter_characters(&filter));
match &filter.search_text {
Some(search_text) => {
let raw_filter = format!("\"search_text\" @@ plainto_tsquery('{}')", search_text);
let raw_order = format!(
"ts_rank(\"search_text\", plainto_tsquery('{}')) desc",
search_text
);
let query = query.filter(sql(&raw_filter)).order(sql::<
Box<dyn diesel::expression::Expression<SqlType = Float>>,
>(&raw_order));
let query = diesel::QueryDsl::limit(query, limit as i64);
Ok(diesel::QueryDsl::offset(query, offset as i64).load(&establish_connection())?)
}
None => {
let query = diesel::QueryDsl::limit(query, limit as i64);
Ok(diesel::QueryDsl::offset(query, offset as i64).load(&establish_connection())?)
}
}
}
fn character(id: i32) -> FieldResult<Character> {
let db_conn = establish_connection();
Ok(character::table.find(id).first(&db_conn)?)
}
fn episodes(page: i32, context: &Ctx) -> FieldResult<ListResult<Episode>> {
Ok(load_many(episode::table, page, &context.episode)?)
}
fn episode(id: i32) -> FieldResult<Episode> {
let db_conn = establish_connection();
Ok(episode::table.find(id).first(&db_conn)?)
}
fn locations(page: i32, context: &Ctx) -> FieldResult<ListResult<Location>> {
Ok(load_many(location::table, page, &context.location)?)
}
fn location(id: i32) -> FieldResult<Location> {
let db_conn = establish_connection();
Ok(location::table.find(id).first(&db_conn)?)
}
}
use diesel::dsl::{Limit, Offset};
use diesel::query_dsl::{
methods::{LimitDsl, OffsetDsl},
LoadQuery, RunQueryDsl,
};
#[derive(juniper::GraphQLObject)]
struct InfoListResult {
next_page: Option<i32>,
num_pages: i32,
item_count: i32,
}
struct ListResult<Model> {
info: InfoListResult,
results: Vec<Model>,
}
#[juniper::object(name = "CharacterListResult", Context = Ctx,)]
impl ListResult<Character> {
fn info(&self) -> &InfoListResult {
&self.info
}
fn results(&self) -> &Vec<Character> {
&self.results
}
}
#[juniper::object(name = "LocationListResult", Context = Ctx,)]
impl ListResult<Location> {
fn info(&self) -> &InfoListResult {
&self.info
}
fn results(&self) -> &Vec<Location> {
&self.results
}
}
#[juniper::object(name = "EpisodeListResult", Context = Ctx,)]
impl ListResult<Episode> {
fn info(&self) -> &InfoListResult {
&self.info
}
fn results(&self) -> &Vec<Episode> {
&self.results
}
}
const ITEMS_PER_PAGE: i32 = 30;
fn load_many<Model, Table>(
table: Table,
page_input: i32,
count: &RwLock<i32>,
) -> FieldResult<ListResult<Model>>
where
Table: OffsetDsl + LoadQuery<diesel::pg::PgConnection, Model>,
Offset<Table>: LimitDsl,
Limit<Offset<Table>>: LoadQuery<diesel::pg::PgConnection, Model>,
{
let page = std::cmp::max(page_input, 1);
let item_count = *count.read().unwrap();
let offset = ITEMS_PER_PAGE * (page - 1);
let num_pages = f64::ceil(item_count as f64 / (ITEMS_PER_PAGE as f64)) as i32;
let info = InfoListResult {
next_page: if item_count > offset + ITEMS_PER_PAGE {
Some(page + 1)
} else {
None
},
num_pages,
item_count: item_count,
};
let results = if page_input == -1 {
let db_conn = establish_connection();
table.load(&db_conn)?
} else if item_count > offset {
let db_conn = establish_connection();
table
.offset(offset as i64)
.limit(ITEMS_PER_PAGE as i64)
.load::<Model>(&db_conn)?
} else {
vec![]
};
Ok(ListResult { info, results })
}
// ########## MUTATIONS ###############
pub struct Mutation;
#[juniper::object(
Context = Ctx,
)]
impl Mutation {
fn reset_db(context: &Ctx) -> FieldResult<bool> {
let db_conn = establish_connection();
db::reset_db(&db_conn)?;
db_conn.transaction::<(), diesel::result::Error, _>(|| {
let counts = db::get_all_counts(&db_conn)?;
*context.character.write().unwrap() = counts.character;
*context.episode.write().unwrap() = counts.episode;
*context.location.write().unwrap() = counts.location;
Ok(())
})?;
Ok(true)
}
fn character_mutation() -> CaracterMutation {
CaracterMutation
}
fn episode_mutation() -> EpisodeMutation {
EpisodeMutation
}
fn location_mutation() -> LocationMutation {
LocationMutation
}
// fn create_character(
// creator: CharacterCreator,
// relations: CharacterRelations,
// context: &Ctx,
// ) -> FieldResult<Character> {
// CaracterMutation::create_character(creator, relations, context)
// }
// fn update_character(mut updater: CharacterUpdater) -> FieldResult<Character> {
// CaracterMutation::update_character(updater)
// }
// fn delete_character(id: i32, context: &Ctx) -> FieldResult<bool> {
// CaracterMutation::delete_character(id, context)
// }
}
pub type GraphqlSchema = juniper::RootNode<'static, Query, Mutation>;
pub fn create_schema() -> GraphqlSchema {
GraphqlSchema::new(Query, Mutation)
}
// impl Ctx {
// pub fn new(characters: Vec<Character>) -> Ctx {
// Ctx(Database {
// characters: RwLock::new(characters),
// })
// }
// }
// struct Database {
// characters: RwLock<Vec<Character>>,
// }
// impl Database {
// fn get_characters(&self) -> Vec<Character> {
// let g = self.characters.read().unwrap();
// (*g).to_vec()
// }
// fn add_character(
// &self,
// new_character: NewCharacter,
// ) -> Result<Character, sync::PoisonError<sync::RwLockWriteGuard<'_, Vec<Character>>>> {
// let characters = &mut *(self.characters.write()?);
// let id = characters.len() + 1;
// let character = Character::from_new_character(id, new_character);
// characters.push(character.clone());
// Ok(character)
// }
// }
| true |
fec6f1f4d1c21fc70a123e321a8831193cdf6c16
|
Rust
|
mirek186/intakectf-2021-public
|
/Reversing/baby_rusty_key/src/main.rs
|
UTF-8
| 7,871 | 2.53125 | 3 |
[] |
no_license
|
#[macro_use]
extern crate litcrypt;
//trash rust code only obfuscates it further :p
extern crate cursive;
extern crate chrono;
use cursive::align::HAlign;
use cursive::event::{Event};
use cursive::traits::Identifiable;
use cursive::views::{Dialog, EditView, SelectView, TextView};
use cursive::Cursive;
use chrono::prelude::{Utc,Timelike};
// creates unsafe code but it's about the only way for a global var without prostituting a parameter
static mut ALREADY_LICENSED: bool = false;
/* -------- ctrl-c hook stuff ------------ */
extern crate anyhow;
extern crate crossbeam_channel;
use std::time::Duration;
extern crate ctrlc;
use anyhow::Result;
use crossbeam_channel::{bounded, select, tick, Receiver};
/* ------ end of ctrl-c hook stuff ------- */
#[no_mangle]
fn verify_license(license: &str) -> bool{
if !license.contains("-"){
return false;
}
let mut current_hour = Utc::now().hour().to_string();
current_hour=format!("{:0>2}",current_hour);
let key_string = "KEY";
let key_and_hour = [key_string,¤t_hour].join("");
let key_chunks: Vec<&str> =license.split("-").collect();
if key_chunks.len()!=5{
return false;
}else{
for i in 1..4{
if key_chunks[i].len()!=5{
return false;
}
}
}
let uppercase_license = license.to_uppercase();
if license!=uppercase_license{
return false;
}
if license.contains("B") ||
license.contains("G") ||
&key_chunks[4][3..5]!="33" ||
((key_chunks[1].chars().nth(0).unwrap() as u8) < 65) || ((key_chunks[1].chars().nth(0).unwrap() as u8) > 72) ||
!license.contains("AA") ||
!license.contains("Z01") ||
&key_chunks[0]!=&key_and_hour{
return false;
}
//where 23 is the hour in utc
//e.g: KEY23-AAZ01-CCCCC-CCCCC-CCC33
return true;
}
fn exit_program() {
//lib borks terminal so can get in the bin - here's the scuffed part-fix - cope
std::process::Command::new("reset").status().unwrap();
print!("\x1B[2J\x1B[1;1H");
std::process::exit(0);
}
fn license_evaluate_popup(session: &mut Cursive, license: &str) {
session.add_global_callback(Event::CtrlChar('c'), |s| {
s.set_screen(0);
});
if license.is_empty() {
// Show error for empty key
session.add_layer(Dialog::info("Please enter something!"));
}else if !verify_license(license){
session.add_layer(Dialog::info("License invalid"))
}
else {
//program now licensed
unsafe {
ALREADY_LICENSED = true;
}
session.add_layer(
Dialog::around(TextView::new("Success! Program Licensed")).button("Back", |session| {
//since it's a dialog, we don't have to switch back to menu, can just pop dialog off
session.pop_layer();
session.quit();
}),
);
}
}
use_litcrypt!();
fn menu() {
let mut gcli = cursive::default();
let mut options_view = SelectView::new().h_align(HAlign::Center);
//change menu item depending on whether program licensed or not
if unsafe { !ALREADY_LICENSED } {
options_view.add_item("1. License", 1);
} else {
options_view.add_item("Already Licensed", 1);
}
options_view.add_item("2. View Flag", 2);
options_view.add_item("3. View Hint", 3);
options_view.add_item("Exit", 99999);
//when option submitted
options_view.set_on_submit(move |session, op_code: &u32| {
if *op_code == 99999 {
// session.quit();
exit_program();
} else if *op_code == 1 {
// enter_something();
if unsafe { !ALREADY_LICENSED } {
//show program licenser screen
session.set_screen(1)
} else {
//show program already licensed confirmation
session.set_screen(2);
}
} else if *op_code==2{
if unsafe{!ALREADY_LICENSED}{
session.set_screen(3);
}else{
session.set_screen(4);
}
} else if *op_code==3{
session.set_screen(5);
}
});
//add menu to gcli
gcli.screen_mut().add_layer(
Dialog::around(options_view)
.title("Menu")
.padding_lrtb(1, 1, 1, 0),
);
//add new screen, id=1 - licenser
gcli.add_active_screen();
let licenser = Dialog::new()
.title("Enter license")
.padding_lrtb(1, 1, 1, 0)
.content(
EditView::new()
.on_submit(license_evaluate_popup)
.with_name("license"),
)
.button("Back", |s|{
s.set_screen(0);
})
.button("Submit", |session| {
let license = session
.call_on_name("license", |view: &mut EditView| {
// We can return content from the closure
view.get_content()
})
.unwrap();
// Run the next step
license_evaluate_popup(session, &license);
});
gcli.screen_mut().add_layer(licenser);
//add new screen, id = 2 (already licensed)
gcli.add_active_screen();
gcli.screen_mut()
.add_layer(
Dialog::around(TextView::new("Already Licensed")).button("Ok", |s| {
//go back to menu
s.set_screen(0)
}),
);
//add new screen id=3 (not licensed)
gcli.add_active_screen();
gcli.screen_mut()
.add_layer(
Dialog::around(TextView::new("You must license the program to use this feature"))
.button("Ok", |s|{
//go back to menu
s.set_screen(0)
})
);
gcli.add_active_screen();
gcli.screen_mut()
.add_layer(
Dialog::around(TextView::new(lc!("WMG{3z_w4Y_t0_l1cense_the_pr0gr4m}")).h_align(HAlign::Center)));
//add new screen id=5 (show hint)
gcli.add_active_screen();
gcli.screen_mut()
.add_layer(
Dialog::around(
TextView::new("Look at the verify_license function. What happens if you get a valid key?")
)
.button("Ok", |s|{
//go back to menu
s.set_screen(0);
})
);
//set current screen to be menu
gcli.set_screen(0);
//run window
gcli.run();
}
// ctrl-c hook magic https://rust-cli.github.io/book/in-depth/signals.html
fn ctrl_channel() -> Result<Receiver<()>, ctrlc::Error> {
let (sender, receiver) = bounded(100);
ctrlc::set_handler(move || {
let _ = sender.send(());
})?;
Ok(receiver)
}
fn main() -> Result<()> {
/************************
* Screens *
* 0 - menu *
* 1 - License window *
* 2 - already licensed *
* 3 - not licensed *
* 4 - show flag *
* 5 - show hint *
************************/
//so the program appears to work properly if using strings or whatever
let _something: &str="The flag is: ";
// hook/prevent ctrl-c being used (told you there'd be disgusting code - but I bet you thought it'd be mine... well this is a lib example)
// since lib end terminal session, reloads are required, the lib has fps anyway
let ctrl_c_events = ctrl_channel()?;
//tick reload check duration
let ticks = tick(Duration::from_millis(50));
loop {
select! {
//reload menu
recv(ticks) -> _ => {
menu();
// menu();
}
recv(ctrl_c_events) -> _ => {
//do nothing on ctrl c
{}
}
}
}
// Ok(())
}
| true |
6d62939dd9d637e7430171076dce33db577aeda7
|
Rust
|
davechallis/rust-raytracer
|
/src/config.rs
|
UTF-8
| 1,995 | 3.140625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::path::Path;
use clap::{Arg, App};
pub struct Config {
width: u32,
height: u32,
samples: u32,
output: String,
inline: bool,
}
impl<'a> Config {
pub fn width(&self) -> u32 {
self.width
}
pub fn height(&self) -> u32 {
self.height
}
pub fn samples(&self) -> u32 {
self.samples
}
pub fn output(&'a self) -> &'a Path {
Path::new(&self.output)
}
pub fn inline(&self) -> bool {
self.inline
}
pub fn from_cli_args() -> Self {
let matches = App::new("raytracer")
.arg(Arg::with_name("width")
.long("width")
.value_name("W")
.help("Set width of generated image")
.takes_value(true))
.arg(Arg::with_name("height")
.long("height")
.value_name("H")
.help("Set height of generated image")
.takes_value(true))
.arg(Arg::with_name("samples")
.long("samples")
.value_name("N")
.help("Set number of samples per pixel")
.takes_value(true))
.arg(Arg::with_name("output")
.long("output")
.value_name("OUTPUT")
.help("Set output path of generated image")
.takes_value(true))
.arg(Arg::with_name("inline")
.long("inline")
.help("Output image inline (for use with iTerm2)"))
.get_matches();
let width = matches.value_of("width").unwrap_or("200").parse().unwrap();
let height = matches.value_of("height").unwrap_or("100").parse().unwrap();
let samples = matches.value_of("samples").unwrap_or("10").parse().unwrap();
let output = matches.value_of("output").unwrap_or("./raytracer.png").to_owned();
let inline = matches.occurrences_of("inline") > 0;
Self { width, height, samples, output, inline }
}
}
| true |
f649c9b5fb2268d9db83c1b426d2b44dc6708fd1
|
Rust
|
joaompsantos/exercism
|
/rust/raindrops/src/lib.rs
|
UTF-8
| 312 | 2.703125 | 3 |
[] |
no_license
|
#[allow(unused_parens)]
pub fn raindrops(n: u32) -> String {
let mut result : String = String::new();
if ( n%3 == 0) {result+="Pling"}
if ( n%5 == 0) {result+="Plang"}
if ( n%7 == 0) {result+="Plong"}
if result.is_empty() {
result = format!("{}", n);
}
result
}
| true |
23ccb8686ad7a8a190b2599289c618a54c38ecae
|
Rust
|
ebottabi/datafuse
|
/common/datavalues/src/series/comparison.rs
|
UTF-8
| 4,163 | 2.734375 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2020-2021 The Datafuse Authors.
//
// SPDX-License-Identifier: Apache-2.0.
//! Comparison operations on Series.
use common_exception::Result;
use super::Series;
use crate::arrays::ArrayCompare;
use crate::numerical_coercion;
use crate::DFBooleanArray;
use crate::DataType;
macro_rules! impl_compare {
($self:expr, $rhs:expr, $method:ident) => {{
match $self.data_type() {
DataType::Boolean => $self.bool().unwrap().$method($rhs.bool().unwrap()),
DataType::Utf8 => $self.utf8().unwrap().$method($rhs.utf8().unwrap()),
DataType::UInt8 => $self.u8().unwrap().$method($rhs.u8().unwrap()),
DataType::UInt16 => $self.u16().unwrap().$method($rhs.u16().unwrap()),
DataType::UInt32 => $self.u32().unwrap().$method($rhs.u32().unwrap()),
DataType::UInt64 => $self.u64().unwrap().$method($rhs.u64().unwrap()),
DataType::Int8 => $self.i8().unwrap().$method($rhs.i8().unwrap()),
DataType::Int16 => $self.i16().unwrap().$method($rhs.i16().unwrap()),
DataType::Int32 => $self.i32().unwrap().$method($rhs.i32().unwrap()),
DataType::Int64 => $self.i64().unwrap().$method($rhs.i64().unwrap()),
DataType::Float32 => $self.f32().unwrap().$method($rhs.f32().unwrap()),
DataType::Float64 => $self.f64().unwrap().$method($rhs.f64().unwrap()),
DataType::Date32 => $self.date32().unwrap().$method($rhs.date32().unwrap()),
DataType::Date64 => $self.date64().unwrap().$method($rhs.date64().unwrap()),
_ => unimplemented!(),
}
}};
}
fn coerce_cmp_lhs_rhs(lhs: &Series, rhs: &Series) -> Result<(Series, Series)> {
if lhs.data_type() == rhs.data_type()
&& (lhs.data_type() == DataType::Utf8 || lhs.data_type() == DataType::Boolean)
{
return Ok((lhs.clone(), rhs.clone()));
}
let dtype = numerical_coercion(&lhs.data_type(), &rhs.data_type())?;
let mut left = lhs.clone();
if lhs.data_type() != dtype {
left = lhs.cast_with_type(&dtype)?;
}
let mut right = rhs.clone();
if rhs.data_type() != dtype {
right = rhs.cast_with_type(&dtype)?;
}
Ok((left, right))
}
impl ArrayCompare<&Series> for Series {
/// Create a boolean mask by checking for equality.
fn eq(&self, rhs: &Series) -> Result<DFBooleanArray> {
let (lhs, rhs) = coerce_cmp_lhs_rhs(self, rhs)?;
impl_compare!(lhs.as_ref(), rhs.as_ref(), eq)
}
/// Create a boolean mask by checking for inequality.
fn neq(&self, rhs: &Series) -> Result<DFBooleanArray> {
let (lhs, rhs) = coerce_cmp_lhs_rhs(self, rhs)?;
impl_compare!(lhs.as_ref(), rhs.as_ref(), neq)
}
/// Create a boolean mask by checking if lhs > rhs.
fn gt(&self, rhs: &Series) -> Result<DFBooleanArray> {
let (lhs, rhs) = coerce_cmp_lhs_rhs(self, rhs)?;
impl_compare!(lhs.as_ref(), rhs.as_ref(), gt)
}
/// Create a boolean mask by checking if lhs >= rhs.
fn gt_eq(&self, rhs: &Series) -> Result<DFBooleanArray> {
let (lhs, rhs) = coerce_cmp_lhs_rhs(self, rhs)?;
impl_compare!(lhs.as_ref(), rhs.as_ref(), gt_eq)
}
/// Create a boolean mask by checking if lhs < rhs.
fn lt(&self, rhs: &Series) -> Result<DFBooleanArray> {
let (lhs, rhs) = coerce_cmp_lhs_rhs(self, rhs)?;
impl_compare!(lhs.as_ref(), rhs.as_ref(), lt)
}
/// Create a boolean mask by checking if lhs <= rhs.
fn lt_eq(&self, rhs: &Series) -> Result<DFBooleanArray> {
let (lhs, rhs) = coerce_cmp_lhs_rhs(self, rhs)?;
impl_compare!(lhs.as_ref(), rhs.as_ref(), lt_eq)
}
/// Create a boolean mask by checking if lhs < rhs.
fn like(&self, rhs: &Series) -> Result<DFBooleanArray> {
let (lhs, rhs) = coerce_cmp_lhs_rhs(self, rhs)?;
impl_compare!(lhs.as_ref(), rhs.as_ref(), like)
}
/// Create a boolean mask by checking if lhs <= rhs.
fn nlike(&self, rhs: &Series) -> Result<DFBooleanArray> {
let (lhs, rhs) = coerce_cmp_lhs_rhs(self, rhs)?;
impl_compare!(lhs.as_ref(), rhs.as_ref(), nlike)
}
}
| true |
99a0be3e4a0a22d3d2fb722d5d2670f121b4bcf8
|
Rust
|
gnoliyil/fuchsia
|
/third_party/rust_crates/vendor/nom-5.0.0/src/bits/macros.rs
|
UTF-8
| 8,119 | 3.578125 | 4 |
[
"BSD-2-Clause",
"MIT"
] |
permissive
|
//! Bit level parsers and combinators
//!
//! Bit parsing is handled by tweaking the input in most macros.
//! In byte level parsing, the input is generally a `&[u8]` passed from combinator
//! to combinator as the slices are manipulated.
//!
//! Bit parsers take a `(&[u8], usize)` as input. The first part of the tuple is a byte slice,
//! the second part is a bit offset in the first byte of the slice.
//!
//! By passing a pair like this, we can leverage most of the existing combinators, and avoid
//! transforming the whole slice to a vector of booleans. This should make it easy
//! to see a byte slice as a bit stream, and parse code points of arbitrary bit length.
//!
/// Transforms its byte slice input into a bit stream for the underlying parser. This allows the
/// given bit stream parser to work on a byte slice input.
///
/// Signature:
/// `bits!( parser ) => ( &[u8], (&[u8], usize) -> IResult<(&[u8], usize), T> ) -> IResult<&[u8], T>`
///
/// ```
/// # #[macro_use] extern crate nom;
/// # use nom::{Err, Needed};
/// # fn main() {
/// named!( take_4_bits<u8>, bits!( take_bits!( 4u8 ) ) );
///
/// let input = vec![0xAB, 0xCD, 0xEF, 0x12];
/// let sl = &input[..];
///
/// assert_eq!(take_4_bits( sl ), Ok( (&sl[1..], 0xA) ));
/// assert_eq!(take_4_bits( &b""[..] ), Err(Err::Incomplete(Needed::Size(1))));
/// # }
#[macro_export(local_inner_macros)]
macro_rules! bits (
($i:expr, $submac:ident!( $($args:tt)* )) => ({
$crate::bits::bitsc($i, move |i| { $submac!(i, $($args)*) })
});
($i:expr, $f:expr) => (
bits!($i, call!($f))
);
);
/// Counterpart to bits, bytes! transforms its bit stream input into a byte slice for the underlying
/// parser, allowing byte-slice parsers to work on bit streams.
///
/// Signature:
/// `bytes!( parser ) => ( (&[u8], usize), &[u8] -> IResult<&[u8], T> ) -> IResult<(&[u8], usize), T>`,
///
/// A partial byte remaining in the input will be ignored and the given parser will start parsing
/// at the next full byte.
///
/// ```
/// # #[macro_use] extern crate nom;
/// # use nom::combinator::rest;
/// # use nom::error::ErrorKind;
/// # fn main() {
///
/// named!( parse<(u8, u8, &[u8])>, bits!( tuple!(
/// take_bits!(4u8),
/// take_bits!(8u8),
/// bytes!(rest::<_, (_, ErrorKind)>)
/// )));
///
/// let input = &[0xde, 0xad, 0xbe, 0xaf];
///
/// assert_eq!(parse( input ), Ok(( &[][..], (0xd, 0xea, &[0xbe, 0xaf][..]) )));
/// # }
#[macro_export(local_inner_macros)]
macro_rules! bytes (
($i:expr, $submac:ident!( $($args:tt)* )) => ({
$crate::bits::bytesc($i, move |i| { $submac!(i, $($args)*) })
});
($i:expr, $f:expr) => (
bytes!($i, call!($f))
);
);
/// Consumes the specified number of bits and returns them as the specified type.
///
/// Signature:
/// `take_bits!(type, count) => ( (&[T], usize), U, usize) -> IResult<(&[T], usize), U>`
///
/// ```
/// # #[macro_use] extern crate nom;
/// # fn main() {
/// named!(bits_pair<(&[u8], usize), (u8, u8)>, pair!( take_bits!(4u8), take_bits!(4u8) ) );
/// named!( take_pair<(u8, u8)>, bits!( bits_pair ) );
///
/// let input = vec![0xAB, 0xCD, 0xEF];
/// let sl = &input[..];
///
/// assert_eq!(take_pair( sl ), Ok((&sl[1..], (0xA, 0xB))) );
/// assert_eq!(take_pair( &sl[1..] ), Ok((&sl[2..], (0xC, 0xD))) );
/// # }
/// ```
#[macro_export(local_inner_macros)]
macro_rules! take_bits (
($i:expr, $count:expr) => (
{
let res: $crate::IResult<_, _> = $crate::bits::streaming::take($count)($i);
res
}
);
);
/// Matches the given bit pattern.
///
/// Signature:
/// `tag_bits!(type, count, pattern) => ( (&[T], usize), U, usize, U) -> IResult<(&[T], usize), U>`
///
/// The caller must specify the number of bits to consume. The matched value is included in the
/// result on success.
///
/// ```
/// # #[macro_use] extern crate nom;
/// # fn main() {
/// named!( take_a<u8>, bits!( tag_bits!(4usize, 0xA) ) );
///
/// let input = vec![0xAB, 0xCD, 0xEF];
/// let sl = &input[..];
///
/// assert_eq!(take_a( sl ), Ok((&sl[1..], 0xA)) );
/// # }
/// ```
#[macro_export(local_inner_macros)]
macro_rules! tag_bits (
($i:expr, $count:expr, $p: expr) => (
{
let res: $crate::IResult<_, _> = $crate::bits::streaming::tag($p, $count)($i);
res
}
)
);
#[cfg(test)]
mod tests {
use crate::lib::std::ops::{AddAssign, Shl, Shr};
use crate::internal::{Err, Needed, IResult};
use crate::error::ErrorKind;
#[test]
fn take_bits() {
let input = [0b10_10_10_10, 0b11_11_00_00, 0b00_11_00_11];
let sl = &input[..];
assert_eq!(take_bits!((sl, 0), 0u8), Ok(((sl, 0), 0)));
assert_eq!(take_bits!((sl, 0), 8u8), Ok(((&sl[1..], 0), 170)));
assert_eq!(take_bits!((sl, 0), 3u8), Ok(((&sl[0..], 3), 5)));
assert_eq!(take_bits!((sl, 0), 6u8), Ok(((&sl[0..], 6), 42)));
assert_eq!(take_bits!((sl, 1), 1u8), Ok(((&sl[0..], 2), 0)));
assert_eq!(take_bits!((sl, 1), 2u8), Ok(((&sl[0..], 3), 1)));
assert_eq!(take_bits!((sl, 1), 3u8), Ok(((&sl[0..], 4), 2)));
assert_eq!(take_bits!((sl, 6), 3u8), Ok(((&sl[1..], 1), 5)));
assert_eq!(take_bits!((sl, 0), 10u8), Ok(((&sl[1..], 2), 683)));
assert_eq!(take_bits!((sl, 0), 8u8), Ok(((&sl[1..], 0), 170)));
assert_eq!(take_bits!((sl, 6), 10u8), Ok(((&sl[2..], 0), 752)));
assert_eq!(take_bits!((sl, 6), 11u8), Ok(((&sl[2..], 1), 1504)));
assert_eq!(take_bits!((sl, 0), 20u8), Ok(((&sl[2..], 4), 700_163)));
assert_eq!(take_bits!((sl, 4), 20u8), Ok(((&sl[3..], 0), 716_851)));
let r: IResult<_,u32> = take_bits!((sl, 4), 22u8);
assert_eq!(
r,
Err(Err::Incomplete(Needed::Size(22)))
);
}
#[test]
fn tag_bits() {
let input = [0b10_10_10_10, 0b11_11_00_00, 0b00_11_00_11];
let sl = &input[..];
assert_eq!(tag_bits!((sl, 0), 3u8, 0b101), Ok(((&sl[0..], 3), 5)));
assert_eq!(tag_bits!((sl, 0), 4u8, 0b1010), Ok(((&sl[0..], 4), 10)));
}
named!(ch<(&[u8],usize),(u8,u8)>,
do_parse!(
tag_bits!(3u8, 0b101) >>
x: take_bits!(4u8) >>
y: take_bits!(5u8) >>
(x,y)
)
);
#[test]
fn chain_bits() {
let input = [0b10_10_10_10, 0b11_11_00_00, 0b00_11_00_11];
let sl = &input[..];
assert_eq!(ch((&input[..], 0)), Ok(((&sl[1..], 4), (5, 15))));
assert_eq!(ch((&input[..], 4)), Ok(((&sl[2..], 0), (7, 16))));
assert_eq!(ch((&input[..1], 0)), Err(Err::Incomplete(Needed::Size(5))));
}
named!(ch_bytes<(u8, u8)>, bits!(ch));
#[test]
fn bits_to_bytes() {
let input = [0b10_10_10_10, 0b11_11_00_00, 0b00_11_00_11];
assert_eq!(ch_bytes(&input[..]), Ok((&input[2..], (5, 15))));
assert_eq!(ch_bytes(&input[..1]), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
ch_bytes(&input[1..]),
Err(Err::Error(error_position!(&input[1..], ErrorKind::TagBits)))
);
}
named!(bits_bytes_bs, bits!(bytes!(crate::combinator::rest::<_, (&[u8], ErrorKind)>)));
#[test]
fn bits_bytes() {
let input = [0b10_10_10_10];
assert_eq!(bits_bytes_bs(&input[..]), Ok((&[][..], &[0b10_10_10_10][..])));
}
#[derive(PartialEq, Debug)]
struct FakeUint(u32);
impl AddAssign for FakeUint {
fn add_assign(&mut self, other: FakeUint) {
*self = FakeUint(self.0 + other.0);
}
}
impl Shr<usize> for FakeUint {
type Output = FakeUint;
fn shr(self, shift: usize) -> FakeUint {
FakeUint(self.0 >> shift)
}
}
impl Shl<usize> for FakeUint {
type Output = FakeUint;
fn shl(self, shift: usize) -> FakeUint {
FakeUint(self.0 << shift)
}
}
impl From<u8> for FakeUint {
fn from(i: u8) -> FakeUint {
FakeUint(u32::from(i))
}
}
#[test]
fn non_privitive_type() {
let input = [0b10_10_10_10, 0b11_11_00_00, 0b00_11_00_11];
let sl = &input[..];
assert_eq!(
take_bits!((sl, 0), 20u8),
Ok(((&sl[2..], 4), FakeUint(700_163)))
);
assert_eq!(
take_bits!((sl, 4), 20u8),
Ok(((&sl[3..], 0), FakeUint(716_851)))
);
let r3: IResult<_, FakeUint> = take_bits!((sl, 4), 22u8);
assert_eq!(
r3,
Err(Err::Incomplete(Needed::Size(22)))
);
}
}
| true |
ffcfdf72ca1993fb3e3c1e346c77ca7085090d7c
|
Rust
|
airpIane/offset-dumper
|
/src/util.rs
|
UTF-8
| 1,465 | 2.8125 | 3 |
[] |
no_license
|
use regex::bytes::Regex;
use iced_x86::Instruction;
use iced_x86::IntelFormatter;
use iced_x86::Formatter;
use std::time::Instant;
/// Enables the user to generate a byte regex out of the normal signature format.
pub fn generate_regex(raw: &str) -> Option<Regex> {
let mut res = raw
.to_string()
.split_whitespace()
.map(|x| match &x {
&"?" => ".".to_string(),
&"??" => ".".to_string(),
x => format!("\\x{}", x),
})
.collect::<Vec<_>>()
.join("");
res.insert_str(0, "(?s-u)");
Regex::new(&res).ok()
}
pub fn find_pattern(data: &[u8], pattern: &str) -> Vec<u64> {
let pattern = pattern.to_lowercase();
let r = generate_regex(&pattern).expect("Could not make pattern from signature string");
r.find_iter(data).map(|n| n.start() as u64).collect()
}
pub fn find_pattern_n(data: &[u8], pattern: &str, index: i32) -> Option<u64> {
if index < 0 {
find_pattern(data, pattern).into_iter().nth_back((index.abs() - 1) as usize)
} else {
let pattern = pattern.to_lowercase();
let r = generate_regex(&pattern).expect("Could not make pattern from signature string");
let x = r.find_iter(data).map(|n| n.start() as u64).nth(index as _); x
}
}
pub fn format_instruction(i: &Instruction) -> String {
let mut buf = String::new();
let mut formatter = IntelFormatter::new();
formatter.format(&i, &mut buf);
buf
}
| true |
b8591f6aff785f6af3dfc29348a8cc43b19e6906
|
Rust
|
DamonPalovaara/nerdee_engine
|
/src/terrain.rs
|
UTF-8
| 4,720 | 3.15625 | 3 |
[] |
no_license
|
use std::sync::{Arc, Mutex};
use std::thread;
use crate::engine_core::*;
use noise::{NoiseFn, Fbm};
// For file output
use std::fs::File;
use std::io::prelude::*;
const CHUNK_BLOCKS: usize = 512;
const CHUNK_RADIUS: isize = (CHUNK_BLOCKS / 2) as isize;
const BLOCK_SIZE: f64 = 0.5;
const CHUNK_SIZE: f64 = CHUNK_BLOCKS as f64 * BLOCK_SIZE;
// Temporary until I decide on a math library
type Point = (f32, f32, f32);
struct Coordinate {
x: isize,
y: isize
}
impl From<Point> for Coordinate {
fn from(point: Point) -> Self {
let x = (point.0 / CHUNK_SIZE as f32).round() as isize;
let y = (point.1 / CHUNK_SIZE as f32).round() as isize;
Coordinate { x, y }
}
}
/// Holds a chunk of the terrain
struct Chunk {
coordinate: Coordinate,
mesh: Vec<Point>,
}
impl Chunk {
/// Returns a chunk with the given id
fn new(x: isize, y: isize) -> Chunk {
let coordinate = Coordinate { x, y };
let mesh = Vec::new();
Chunk { coordinate, mesh }
}
fn generate(&mut self, noise: Arc<Fbm>) {
println!("Generating chunk: ({}, {})", self.coordinate.x, self.coordinate.y);
for z in (-1 * CHUNK_RADIUS)..CHUNK_RADIUS {
for x in (-1 * CHUNK_RADIUS)..CHUNK_RADIUS {
let x_loc = x as f64 * BLOCK_SIZE;
let z_loc = z as f64 * BLOCK_SIZE;
let y_loc = noise.get([x_loc, z_loc]) as f64;
self.mesh.push( (x_loc as f32, y_loc as f32, z_loc as f32) );
}
}
}
/// Will load a chunk if it's saved otherwise will generate it
fn start_up(&mut self, noise: Arc<Fbm>) {
self.generate(noise);
}
fn save(&self) -> std::io::Result<()> {
println!("Saving chunk: ({}, {})", self.coordinate.x, self.coordinate.y);
let file_name = format!("./data/x{}y{}.obj", self.coordinate.x, self.coordinate.y);
let mut file = File::create(file_name)?;
self.mesh.iter().for_each(|point| {
let line = format!("v {} {} {}\n", point.0, point.1, point.2);
file.write_all(line.as_bytes());
});
Ok(())
}
fn load(&mut self) {
}
/// Updates the chunk
fn update(&mut self) {
}
/// Renders the chunk to screen
fn draw(&self) {
}
}
/// A continuously streaming terrain generator
pub struct Terrain {
chunks: Vec<Arc<Mutex<Chunk>>>,
noise: Arc<Fbm>,
}
impl Terrain {
/// Returns a new Terrain object that can be added to the engine
pub fn new(point: Point, radius: isize) -> Terrain {
let mut chunks = Vec::new();
let origin: Coordinate = point.into();
let noise = Arc::new(Fbm::new());
for y in (-1 * radius)..=radius {
for x in (-1 * radius)..=radius {
let chunk = Arc::new(Mutex::new(
Chunk::new(x + origin.x, y + origin.y)
));
chunks.push(chunk);
}
}
Terrain { chunks, noise }
}
}
impl EngineObject for Terrain {
/// Loads or generates each chunk within render distance
fn start_up(&mut self, core: &Core) {
for chunk in &self.chunks {
let chunk = chunk.clone();
let noise = self.noise.clone();
core.execute(move || {
let mut chunk = chunk.lock().unwrap();
chunk.start_up(noise);
})
}
}
/// Saves each chunk to a save folder
fn save(&self, core: &Core) {
for chunk in &self.chunks {
let chunk = chunk.clone();
core.execute(move || {
let chunk = chunk.lock().unwrap();
chunk.save().unwrap();
});
}
}
/// Loads each chunk from a save folder
fn load(&mut self, core: &Core) {
for chunk in &self.chunks {
let chunk = chunk.clone();
core.execute(move || {
let mut chunk = chunk.lock().unwrap();
chunk.load();
});
}
}
/// Updates each Chunk using a ThreadPool
fn update(&mut self, core: &Core) {
for chunk in &self.chunks {
let chunk = chunk.clone();
core.execute(move || {
let mut chunk = chunk.lock().unwrap();
chunk.update();
});
}
}
/// Renders each Chunk using a ThreadPool
fn draw(&self, core: &Core) {
for chunk in &self.chunks {
let chunk = chunk.clone();
core.execute(move || {
let chunk = chunk.lock().unwrap();
chunk.draw();
});
}
}
}
| true |
a40630593381c32f6a76a6014b3c3174a9c4fc77
|
Rust
|
DForshner/RustExperiments
|
/bevy_event_loss/src/main.rs
|
UTF-8
| 3,376 | 3.15625 | 3 |
[
"Unlicense"
] |
permissive
|
use bevy::prelude::*;
use bevy_diagnostic::{Diagnostic, DiagnosticId, Diagnostics};
// Purposely creating the pitfall where you can miss events because the
// receiver doesn't handle the emitted events within 2 frames.
//
// From Bevy Cookbook:
// Events don't persist. They are stored until the end of the next frame, after which they are lost.
// If your systems do not handle events every frame, you could miss some.
pub const EMITTED_COUNT: DiagnosticId =
DiagnosticId::from_u128(21302464753369276838568507794995836440);
pub const RECEIVED_COUNT: DiagnosticId =
DiagnosticId::from_u128(21302838753369276838568507794995836880);
const TIMESTEP_SLOW: f64 = 5.0 / 60.0;
const TIMESTEP_FAST: f64 = 1.0 / 60.0;
fn main() {
use bevy::core::FixedTimestep;
use bevy_diagnostic::{DiagnosticsPlugin, LogDiagnosticsPlugin};
App::new()
.add_plugins(DefaultPlugins)
.add_plugin(DiagnosticsPlugin::default())
.add_plugin(LogDiagnosticsPlugin::default())
.add_event::<MyEvent>()
.init_resource::<EventTriggerState>()
.init_resource::<DiagnosticState>()
.add_startup_system(startup_system)
.add_system_set(
SystemSet::new()
// Check for events every 5/60 sec
.with_run_criteria(FixedTimestep::step(TIMESTEP_FAST))
.with_system(event_trigger_system),
)
.add_system_set(
SystemSet::new()
// Emit events every 1/60 sec
.with_run_criteria(FixedTimestep::step(TIMESTEP_SLOW))
.with_system(event_listener_system),
)
.run();
}
fn startup_system(mut diagnostics: ResMut<Diagnostics>) {
diagnostics.add(Diagnostic::new(EMITTED_COUNT, "emitted", 3));
diagnostics.add(Diagnostic::new(RECEIVED_COUNT, "received", 3));
}
struct MyEvent {
pub message: String,
}
struct EventTriggerState {
event_timer: Timer,
}
impl Default for EventTriggerState {
fn default() -> Self {
EventTriggerState {
event_timer: Timer::from_seconds(1.0, true),
}
}
}
#[derive(Default)]
struct DiagnosticState {
events_emitted: usize,
events_received: usize,
}
fn event_trigger_system(
mut diagnostics: ResMut<Diagnostics>,
time: Res<Time>,
mut state: ResMut<EventTriggerState>,
mut diagnostic_state: ResMut<DiagnosticState>,
mut my_events: EventWriter<MyEvent>,
) {
//info!("event_trigger_system");
if state.event_timer.tick(time.delta()).finished() {
diagnostic_state.events_emitted += 1;
diagnostics.add_measurement(EMITTED_COUNT, diagnostic_state.events_emitted as f64);
my_events.send(MyEvent {
message: format!(
"The event! total emitted: [{}]",
diagnostic_state.events_emitted,
),
});
}
}
// prints events as they come in
fn event_listener_system(
mut diagnostics: ResMut<Diagnostics>,
mut diagnostic_state: ResMut<DiagnosticState>,
mut events: EventReader<MyEvent>,
) {
diagnostics.add_measurement(RECEIVED_COUNT, diagnostic_state.events_emitted as f64);
for my_event in events.iter() {
diagnostic_state.events_received += 1;
info!(
"{} total received: [{}]",
my_event.message, diagnostic_state.events_received
);
}
}
| true |
7129e3f403fa0a22beb1c75372d03ed4e6327b44
|
Rust
|
pnkfelix/rust
|
/src/test/ui/traits/suggest-fully-qualified-path-with-appropriate-params.rs
|
UTF-8
| 507 | 3.234375 | 3 |
[
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"BSD-2-Clause",
"LicenseRef-scancode-other-permissive",
"NCSA"
] |
permissive
|
struct Thing;
trait Method<T> {
fn method(&self) -> T;
fn mut_method(&mut self) -> T;
}
impl Method<i32> for Thing {
fn method(&self) -> i32 { 0 }
fn mut_method(&mut self) -> i32 { 0 }
}
impl Method<u32> for Thing {
fn method(&self) -> u32 { 0 }
fn mut_method(&mut self) -> u32 { 0 }
}
fn main() {
let thing = Thing;
thing.method();
//~^ ERROR type annotations needed
//~| ERROR type annotations needed
thing.mut_method(); //~ ERROR type annotations needed
}
| true |
7cecda0b9603649949d112a8df00559d214485bf
|
Rust
|
jFransham/quake-level-loader
|
/src/macros.rs
|
UTF-8
| 4,650 | 2.671875 | 3 |
[
"Unlicense"
] |
permissive
|
macro_rules! to_3_arr {
($itr:expr) => {{
let mut i = $itr.into_iter();
[
i.next().unwrap(),
i.next().unwrap(),
i.next().unwrap(),
]
}};
}
macro_rules! to_8_arr {
($itr:expr) => {{
let mut i = $itr.into_iter();
[
i.next().unwrap(), i.next().unwrap(),
i.next().unwrap(), i.next().unwrap(),
i.next().unwrap(), i.next().unwrap(),
i.next().unwrap(), i.next().unwrap(),
]
}};
}
macro_rules! to_9_arr {
($itr:expr) => {{
let mut i = $itr.into_iter();
[
i.next().unwrap(), i.next().unwrap(), i.next().unwrap(),
i.next().unwrap(), i.next().unwrap(), i.next().unwrap(),
i.next().unwrap(), i.next().unwrap(), i.next().unwrap(),
]
}};
}
macro_rules! itry {
($res:expr) => {
match $res {
Done(rest, result) => {
(rest, result)
},
Error(e) => {
return Error(e)
},
Incomplete(needed) => {
return Incomplete(needed)
}
}
}
}
macro_rules! get_from_header {
($bytes:expr, $field:expr, $fun:expr, $t:ty) => {{
use std::mem::size_of;
get_from_header!($bytes, $field, $fun, $t, size_of::<$t>())
}};
($bytes:expr, $field:expr, $fun:expr, $t:ty, $size:expr) => {{
let start = $field.offset as usize;
let slice = &$bytes[start..];
itry!(
parse_vec::<$t>(
slice,
$fun,
$field.size as usize / $size
)
)
}}
}
macro_rules! maybe_from_header {
($bytes:expr, $field:expr, $fun:ident, $t:ty) => {{
let start = $field.offset as usize;
let end = ($field.offset + $field.size) as usize;
let slice = &$bytes[start..end];
itry!(
opt!(slice, complete!($fun))
)
}}
}
macro_rules! consume_from_header {
($bytes:expr, $field:expr, $fun:expr, $t:ty) => {{
let start = $field.offset as usize;
let end = start + $field.size as usize;
let slice = &$bytes[start..end];
itry!(
consume_to_vec::<$t>(
slice,
$fun
)
)
}}
}
macro_rules! take_s {
($count:expr) => {{
|i: &[u8]| take_s!(i, $count)
}};
($i:expr, $count:expr) => {{
use nom::ErrorKind;
match take!($i, $count) {
Done(rest, arr) =>
if let Ok(s) = String::from_utf8(
arr.into_iter()
.map(|&a| a)
.take_while(|&c| c != 0)
.collect::<Vec<_>>()
) {
Done(
rest,
s
)
} else {
Error(Err::Code(ErrorKind::Custom(0)))
},
Error(e) => {
Error(e)
},
Incomplete(needed) => {
Incomplete(needed.into())
}
}
}};
}
macro_rules! take_s_until {
($c:expr) => {{
|i: &[u8]| take_s!(i, $c)
}};
($i:expr, $c:expr) => {{
use nom::ErrorKind;
match take_until!($i, $c) {
Done(rest, arr) =>
if let Ok(s) = String::from_utf8(
arr.into_iter()
.map(|&a| a)
.take_while(|&c| c != 0)
.collect::<Vec<_>>()
) {
Done(
rest,
s
)
} else {
Error(Err::Code(ErrorKind::Custom(0)))
},
Error(e) => {
Error(e)
},
Incomplete(needed) => {
Incomplete(needed.into())
}
}
}};
}
macro_rules! take_exact {
($count:expr) => {{
|i: &[u8]| take_exact!(i, $count)
}};
($slice:expr, $count:expr) => {{
let taken = take!($slice, $count);
match taken {
Done(rest, result) => {
let mut array = [0u8; $count];
for (&x, p) in result.iter().zip(array.iter_mut()) {
*p = x;
}
Done(rest, array)
},
Error(e) => {
Error(e)
},
Incomplete(needed) => {
Incomplete(needed.into())
}
}
}}
}
| true |
cfcd78119b3ec7c6371a6bd319b1f79d21e9a661
|
Rust
|
earthengine/namable_closures
|
/src/stable_fn.rs
|
UTF-8
| 2,541 | 2.9375 | 3 |
[] |
no_license
|
/// Replicates the Fn traits for stable build
pub trait StableFnOnce<Input> {
type Output;
fn stable_call_once(self,args:Input) -> Self::Output;
}
/// Replicates the Fn traits for stable build
pub trait StableFnMut<Input>: StableFnOnce<Input> {
fn stable_call_mut(&mut self,args:Input) -> Self::Output;
}
/// Replicates the Fn traits for stable build
pub trait StableFn<Input>:StableFnMut<Input> {
fn stable_call(&self,args:Input) -> Self::Output;
}
pub fn as_cloning_stable_fn<Input,Output>(f: impl StableFnOnce<Input,Output=Output> + Clone)
-> impl StableFn<Input,Output=Output>
{
struct Wrapper<T>(T);
impl<Input,Output,T> StableFnOnce<Input> for Wrapper<T>
where
T: StableFnOnce<Input,Output=Output>
{
type Output = Output;
fn stable_call_once(self, args:Input) -> Output {
let Wrapper(t) = self;
t.stable_call_once(args)
}
}
impl<Input,Output,T> StableFnMut<Input> for Wrapper<T>
where
T: StableFnOnce<Input,Output=Output> + Clone
{
fn stable_call_mut(&mut self, args:Input) -> Output {
let Wrapper(t) = self;
t.clone().stable_call_once(args)
}
}
impl<Input,Output,T> StableFn<Input> for Wrapper<T>
where
T: StableFnOnce<Input,Output=Output> + Clone
{
fn stable_call(&self, args:Input) -> Output {
let Wrapper(t) = self;
t.clone().stable_call_once(args)
}
}
Wrapper(f)
}
#[cfg(feature="nightly")]
pub fn as_cloning_fn<Input,Output>(f: impl FnOnce<Input,Output=Output> + Clone)
-> impl Fn<Input,Output=Output>
{
struct Wrapper<T>(T);
impl<Input,Output,T> FnOnce<Input> for Wrapper<T>
where
T: FnOnce<Input,Output=Output>
{
type Output = Output;
extern "rust-call" fn call_once(self, args:Input) -> Output {
let Wrapper(t) = self;
t.call_once(args)
}
}
impl<Input,Output,T> FnMut<Input> for Wrapper<T>
where
T: FnOnce<Input,Output=Output> + Clone
{
extern "rust-call" fn call_mut(&mut self, args:Input) -> Output {
let Wrapper(t) = self;
t.clone().call_once(args)
}
}
impl<Input,Output,T> StableFn<Input> for Wrapper<T>
where
T: StableFnOnce<Input,Output=Output> + Clone
{
extern "rust-call" fn call(&self, args:Input) -> Output {
let Wrapper(t) = self;
t.clone().call_once(args)
}
}
Wrapper(f)
}
| true |
5011eb85aed92ce1566e84cf58ed0c86b58f362d
|
Rust
|
robertgoss/aoc-2020
|
/src/passwords.rs
|
UTF-8
| 2,663 | 3.515625 | 4 |
[] |
no_license
|
#[derive(Copy, Clone)]
pub enum RuleSet {
Sled,
Toboggan
}
struct Verification {
letter : char,
max : usize,
min : usize
}
fn parse_range(string : &str) -> Option<(usize, usize)> {
let parts = string.split_once('-').map(
|(min, max)| (min.parse::<usize>(), max.parse::<usize>())
);
match parts {
Some((Ok(min), Ok(max))) => Some((min, max)),
_ => None
}
}
impl Verification {
pub fn from_string(string : &str) -> Option<Verification> {
let parts = string.split_once(' ').map(
|(range, ch)| (parse_range(range), ch.chars().next())
);
match parts {
Some((Some((min, max)), Some(ch))) => Some(
Verification {
letter : ch,
max : max,
min : min
}
),
_ => None
}
}
pub fn verify(self : &Self, string : &str, rules : RuleSet) -> bool {
match rules {
RuleSet::Sled => self.verify_sled(string),
RuleSet::Toboggan => self.verify_toboggan(string)
}
}
pub fn verify_sled(self : &Self, string : &str) -> bool {
let char_count = string.chars().filter(
|&ch| ch == self.letter
).count();
char_count >= self.min && char_count <= self.max
}
pub fn verify_toboggan(self : &Self, string : &str) -> bool {
// Seeif the elements on the bounds are as req handle out of bounds
let min_has = string.chars().nth(self.min - 1).map(
|ch| ch == self.letter
).unwrap_or(false);
let max_has = string.chars().nth(self.max - 1).map(
|ch| ch == self.letter
).unwrap_or(false);
// Either min xor max should have
min_has ^ max_has
}
}
pub struct Database {
passwords : Vec<(Verification, String)>
}
impl Database {
pub fn new() -> Database {
Database { passwords : Vec::new() }
}
pub fn add_line(self : &mut Self, line : &str) {
let parts = line.split_once(':').map(
|(validation, password)| (
Verification::from_string(validation),
password.strip_prefix(" ")
)
);
// Add if everything valid
match parts {
Some((Some(validation), Some(password))) => self.passwords.push((validation, password.to_string())),
_ => ()
}
}
pub fn count_valid(self : &Self, rules : RuleSet) -> usize {
self.passwords.iter().filter(
|(verification, password)| verification.verify(password, rules)
).count()
}
}
| true |
5c5c725adea5a6ab50515cdac7ebb4171a596f74
|
Rust
|
artran/ray-tracer
|
/src/shape.rs
|
UTF-8
| 3,527 | 3.328125 | 3 |
[] |
no_license
|
use std::fmt::Debug;
use crate::color::Color;
use crate::light::PointLight;
use crate::material::Material;
use crate::matrix::Matrix;
use crate::ray::Ray;
use crate::vector4::Vector4;
pub trait Shape {
fn material(&self) -> &Material;
fn transformation(&self) -> Matrix<4>;
fn inv_transform(&self) -> &Matrix<4>;
fn intersect(&self, ray: &Ray) -> Vec<f32> {
let transformed_ray = ray.transform(&self.inv_transform());
self.local_intersect(&transformed_ray)
}
fn local_intersect(&self, ray: &Ray) -> Vec<f32>;
fn normal_at(&self, world_point: &Vector4) -> Vector4 {
let object_point = *self.inv_transform() * *world_point;
let local_normal = self.local_normal_at(object_point);
let mut world_normal = self.inv_transform().transpose() * local_normal;
world_normal.w = 0.0;
(world_normal).normalize()
}
fn local_normal_at(&self, world_point: Vector4) -> Vector4;
// TODO: Put a default implementation here
fn lighting(
&self,
light: &PointLight,
point: Vector4,
eye_vector: Vector4,
normal_vector: Vector4,
in_shadow: bool,
) -> Color;
}
impl PartialEq for dyn Shape {
fn eq(&self, other: &Self) -> bool {
self.material() == other.material() && self.transformation() == other.transformation()
}
}
impl Debug for dyn Shape {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Shape")
.field("material", &self.material())
.field("transform", &self.transformation())
.finish()
}
}
/* -------------------------------------------------------------------------------------------------
Tests
------------------------------------------------------------------------------------------------- */
#[cfg(test)]
mod tests {
use rstest::*;
use spectral::prelude::*;
use super::*;
use crate::material::MaterialBuilder;
use crate::sphere::SphereBuilder;
use crate::transform::Transform;
#[fixture]
fn test_shape() -> impl Shape {
SphereBuilder::new().build()
}
#[rstest]
fn the_default_transformation(test_shape: impl Shape) {
assert_that!(test_shape.transformation()).is_equal_to(&Matrix::identity());
}
#[rstest]
fn assigning_a_transformation() {
let test_shape = SphereBuilder::new()
.with_transform(Matrix::translation(2.0, 3.0, 4.0))
.build();
assert_that!(test_shape.transformation()).is_equal_to(&Matrix::translation(2.0, 3.0, 4.0));
}
#[rstest]
fn the_default_material(test_shape: impl Shape) {
assert_that!(test_shape.material()).is_equal_to(&MaterialBuilder::new().build());
}
#[rstest]
fn assigning_a_material() {
let m = MaterialBuilder::new().with_ambient(1.0).build();
let test_shape = SphereBuilder::new().with_material(m.clone()).build();
assert_that!(test_shape.material()).is_equal_to(&m);
}
#[rstest]
fn intersecting_a_scaled_shape_with_a_ray() {
let r = Ray::new(
Vector4::point(0.0, 0.0, -5.0),
Vector4::vector(0.0, 0.0, 1.0),
);
let s = SphereBuilder::new()
.with_transform(Matrix::scaling(2.0, 2.0, 2.0))
.build();
let xs = s.intersect(&r);
assert_that!(xs.len()).is_equal_to(2);
assert_that!(xs[0]).is_equal_to(3.0);
assert_that!(xs[1]).is_equal_to(7.0);
}
}
| true |
b736cfe656d06136f96a72649523ad352d47b4c4
|
Rust
|
servo/green-rs
|
/librustuv/tests/async.rs
|
UTF-8
| 807 | 2.625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use green::{Callback, RemoteCallback};
use rustuv::Async;
// Make sure that we can fire watchers in remote threads and that they
// actually trigger what they say they will.
test!(fn smoke_test() {
struct MyCallback(Option<Sender<int>>);
impl Callback for MyCallback {
fn call(&mut self) {
// this can get called more than once, but we only want to send
// once
let MyCallback(ref mut s) = *self;
match s.take() {
Some(s) => s.send(1),
None => {}
}
}
}
let (tx, rx) = channel();
let cb = box MyCallback(Some(tx));
let watcher = Async::new(cb).unwrap();
spawn(proc() {
let mut watcher = watcher;
watcher.fire();
});
assert_eq!(rx.recv(), 1);
})
| true |
182245288a6190645e47017267a6d4050f486d88
|
Rust
|
DoumanAsh/clipboard-script
|
/src/lib.rs
|
UTF-8
| 808 | 3.03125 | 3 |
[
"Apache-2.0"
] |
permissive
|
#[inline(always)]
///Returns whether text contains JP.
pub fn is_jp<T: AsRef<str>>(text: T) -> bool {
let text = text.as_ref();
text.chars().any(|elem_char| match elem_char { '\u{3000}'..='\u{303f}'| //punctuation
'\u{3040}'..='\u{309f}'| //hiragana
'\u{30a0}'..='\u{30ff}'| //katakana
'\u{ff00}'..='\u{ffef}'| //roman characters
'\u{4e00}'..='\u{9faf}'| //common kanji
'\u{3400}'..='\u{4dbf}' //rare kanji
=> true,
_ => false,
})
}
| true |
c2e5ff0548e986eef92d4b0d3f8ce87806e73860
|
Rust
|
defuz/sublimate
|
/src/core/command.rs
|
UTF-8
| 1,285 | 3.203125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::hash::{Hash, Hasher};
use core::settings::{Settings, SettingsObject, ParseSettings};
use self::ParseCommandError::*;
#[derive(Debug, PartialEq, Clone)]
pub struct Command {
pub name: String,
pub args: SettingsObject
}
impl Eq for Command {}
impl Hash for Command {
fn hash<H: Hasher>(&self, state: &mut H) {
self.name.hash(state);
}
}
#[derive(Debug)]
pub enum ParseCommandError {
CommandIsNotObject,
CommandNameIsNotString,
CommandArgsIsNotObject,
}
impl ParseSettings for Command {
type Error = ParseCommandError;
fn parse_settings(settings: Settings) -> Result<Command, Self::Error> {
let mut obj = match settings {
Settings::Object(obj) => obj,
_ => return Err(CommandIsNotObject),
};
let name = match obj.remove("command") {
Some(Settings::String(name)) => name,
_ => return Err(CommandNameIsNotString),
};
let args = match obj.remove("args") {
Some(Settings::Object(args)) => args,
None => SettingsObject::default(),
_ => return Err(CommandArgsIsNotObject),
};
// TODO: check obj is empty
Ok(Command {
name: name,
args: args
})
}
}
| true |
1accf62634478041b3e5dc797b45b922de4692dd
|
Rust
|
jstarry/solana
|
/sdk/program/src/sysvar/slot_hashes.rs
|
UTF-8
| 1,248 | 2.625 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! named accounts for synthesized data accounts for bank state, etc.
//!
//! this account carries the Bank's most recent bank hashes for some N parents
//!
pub use crate::slot_hashes::SlotHashes;
use crate::{account_info::AccountInfo, program_error::ProgramError, sysvar::Sysvar};
crate::declare_sysvar_id!("SysvarS1otHashes111111111111111111111111111", SlotHashes);
impl Sysvar for SlotHashes {
// override
fn size_of() -> usize {
// hard-coded so that we don't have to construct an empty
20_488 // golden, update if MAX_ENTRIES changes
}
fn from_account_info(_account_info: &AccountInfo) -> Result<Self, ProgramError> {
// This sysvar is too large to bincode::deserialize in-program
Err(ProgramError::UnsupportedSysvar)
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{clock::Slot, hash::Hash, slot_hashes::MAX_ENTRIES},
};
#[test]
fn test_size_of() {
assert_eq!(
SlotHashes::size_of(),
bincode::serialized_size(
&(0..MAX_ENTRIES)
.map(|slot| (slot as Slot, Hash::default()))
.collect::<SlotHashes>()
)
.unwrap() as usize
);
}
}
| true |
83da822e878f54deae303ea39ee11358b06033b2
|
Rust
|
EFanZh/LeetCode
|
/src/problem_1138_alphabet_board_path/mod.rs
|
UTF-8
| 1,001 | 3.234375 | 3 |
[] |
no_license
|
pub mod iterative;
pub trait Solution {
fn alphabet_board_path(target: String) -> String;
}
#[cfg(test)]
mod tests {
use super::Solution;
pub fn run<S: Solution>() {
let test_cases = [("leet", 15), ("code", 14)];
let board = ["abcde", "fghij", "klmno", "pqrst", "uvwxy", "z"];
let mut buffer = String::new();
for (target, expected) in test_cases {
let result = S::alphabet_board_path(target.to_string());
assert_eq!(result.len(), expected);
let mut row = 0;
let mut column = 0;
for c in result.bytes() {
match c {
b'U' => row -= 1,
b'D' => row += 1,
b'L' => column -= 1,
b'R' => column += 1,
_ => buffer.push(char::from(board[row].as_bytes()[column])),
}
}
assert_eq!(buffer, target);
buffer.clear();
}
}
}
| true |
0e9e35afe6973850e6c795b22c4ba17c2a726841
|
Rust
|
syousif94/clubby
|
/api/src/users/routes.rs
|
UTF-8
| 2,886 | 2.75 | 3 |
[] |
no_license
|
use crate::cities::City;
use crate::users::*;
use serde_json::json;
use warp::{Filter, Rejection, Reply};
async fn sign_up(req: SignUpRequest) -> Result<impl Reply, Rejection> {
req.validate().await?;
let token = encode_sign_up_request(&req)?;
send_login_code(&req.phone).await?;
let res = SignUpResponse { token };
Ok(warp::reply::json(&res))
}
async fn complete_sign_up(req: ConfirmSignUpRequest) -> Result<impl Reply, Rejection> {
let signup_claims = decode_sign_up_request(&req)?;
let city = City::from_place_id(&signup_claims.city).await?;
let user_data = UserData {
name: signup_claims.name,
phone: signup_claims.phone,
photo: signup_claims.photo,
city_id: city.id,
};
let user = user_data.create().await?;
let token = user.token()?;
let res = LoginResponse {
user: user_data,
city,
token,
};
Ok(warp::reply::json(&res))
}
async fn login(req: LoginCodeRequest) -> Result<impl Reply, Rejection> {
req.validate().await?;
send_login_code(&req.phone).await?;
Ok(warp::reply::json(&json!({})))
}
async fn complete_login(req: ConfirmCodeRequest) -> Result<impl Reply, Rejection> {
validate_login_code(&req.phone, &req.code).await?;
let user = User::from_phone(&req.phone).await?;
let token = user.token()?;
let city = City::from_id(&user.city_id).await?;
let res = LoginResponse {
user: user.into(),
city,
token: token.clone(),
};
let reply = warp::reply::json(&res);
let cookie_header = "set-cookie";
let cookie_value = format!(
"token={}; Max-Age=31556952; Secure; HttpOnly; Path=/",
&token
);
let reply_with_header = warp::reply::with_header(reply, cookie_header, cookie_value);
Ok(reply_with_header)
}
pub fn handler() -> warp::filters::BoxedFilter<(impl Reply,)> {
let cities = warp::path("users");
let sign_up_route = warp::path("signup")
.and(warp::post())
.and(warp::path::end())
.and(warp::body::json())
.and_then(sign_up);
let confirm_sign_up_route = warp::path("signup")
.and(warp::path("complete"))
.and(warp::post())
.and(warp::path::end())
.and(warp::body::json())
.and_then(complete_sign_up);
let login_route = warp::path("login")
.and(warp::post())
.and(warp::path::end())
.and(warp::body::json())
.and_then(login);
let confirm_login_route = warp::path("login")
.and(warp::path("complete"))
.and(warp::post())
.and(warp::path::end())
.and(warp::body::json())
.and_then(complete_login);
cities
.and(
sign_up_route
.or(confirm_sign_up_route)
.or(login_route)
.or(confirm_login_route),
)
.boxed()
}
| true |
6587f2c73a65bd07ebbe9b5f726637feece7b407
|
Rust
|
j4qfrost/fork-the-game
|
/src/lib.rs
|
UTF-8
| 3,484 | 2.59375 | 3 |
[
"MIT"
] |
permissive
|
use log::error;
use neovide_plugin::*;
use skulpin::winit::event::{
ElementState, ModifiersState, MouseButton, MouseScrollDelta, WindowEvent,
};
use skulpin::winit::event_loop::{ControlFlow, EventLoopProxy};
use skulpin::winit::window::Window;
use skulpin::{winit::dpi::LogicalSize, Renderer as SkulpinRenderer, WinitWindow};
mod renderer;
use renderer::*;
pub mod game;
use game::*;
// pub mod deno;
// pub mod python;
mod utils;
#[derive(Default)]
pub struct Fork {
window: Option<Window>,
saved_handle: Option<Box<dyn WindowHandle>>,
game: Game,
renderer: Renderer,
modifiers: ModifiersState,
}
impl Fork {
pub fn save_handle(&mut self, handle: Box<dyn WindowHandle>) {
self.saved_handle = Some(handle);
}
}
impl NeovideEventProcessor for Fork {
fn process_event(
&mut self,
e: WindowEvent,
_proxy: &EventLoopProxy<NeovideEvent>,
) -> Option<ControlFlow> {
match e {
WindowEvent::CloseRequested => {
return Some(ControlFlow::Exit);
}
// WindowEvent::DroppedFile(path) => {}
WindowEvent::KeyboardInput { input, .. } => {
self.game.send(input.virtual_keycode, input.state);
}
WindowEvent::ModifiersChanged(m) => {
self.modifiers.set(m, true);
}
// WindowEvent::CursorMoved { position, .. } => {}
WindowEvent::MouseWheel {
delta: MouseScrollDelta::LineDelta(_x, _y),
..
} => {}
WindowEvent::MouseInput {
button: MouseButton::Left,
state,
..
} => {
if state == ElementState::Pressed {
} else {
unimplemented!();
}
}
// WindowEvent::Focused(focus) => {}
// WindowEvent::Resized(size) => {
// let scale_factor = self.window.as_ref().unwrap().scale_factor();
// self.renderer.logical_size = size.to_logical(scale_factor);
// }
_ => {}
}
None
}
}
impl WindowHandle for Fork {
fn init() -> Self
where
Self: Sized,
{
Fork::default()
}
fn window(&mut self) -> Window {
self.window.take().unwrap()
}
fn set_window(&mut self, window: Window) {
self.window = Some(window);
}
fn logical_size(&self) -> LogicalSize<u32> {
self.renderer.logical_size
}
fn update(&mut self) -> bool {
self.game
.schedule
.execute(&mut self.game.world, &mut self.game.resources);
true
}
fn should_draw(&self) -> bool {
true
}
fn draw(&mut self, skulpin_renderer: &mut SkulpinRenderer) -> bool {
if self.should_draw() {
let renderer = &mut self.renderer;
let game = &self.game;
let window = WinitWindow::new(&self.window.as_ref().unwrap());
let error = skulpin_renderer
.draw(&window, |canvas, coordinate_system_helper| {
for _ in 0..3 {
renderer.draw(canvas, &coordinate_system_helper, game);
}
})
.is_err();
if error {
error!("Render failed. Closing");
return false;
}
}
true
}
}
| true |
d8bf3db7e2120f893207fb5637d96c37da2160f7
|
Rust
|
gz/rust-fallible-iterator
|
/src/test.rs
|
UTF-8
| 4,551 | 3.375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use core::iter;
use super::{convert, FallibleIterator, Vec};
#[test]
fn all() {
assert!(convert([0, 1, 2, 3].iter().map(Ok::<&u32, ()>)).all(|&i| i < 4).unwrap());
assert!(!convert([0, 1, 2, 4].iter().map(Ok::<&u32, ()>)).all(|&i| i < 4).unwrap());
}
#[test]
fn and_then() {
let it = convert(vec![0, 1, 2, 3, 4].into_iter().map(Ok::<u32, ()>)).and_then(|n| Ok(n * 2));
assert_eq!(it.collect::<Vec<_>>().unwrap(), [0, 2, 4, 6, 8]);
let mut it = convert(vec![0, 1, 2, 3, 4].into_iter().map(Ok::<u32, ()>))
.and_then(|n| {
if n == 2 {
Err(())
} else {
Ok(n * 2)
}
});
assert_eq!(it.next().unwrap().unwrap(), 0);
assert_eq!(it.next().unwrap().unwrap(), 2);
assert_eq!(it.next(), Err(()));
}
#[test]
fn any() {
assert!(convert([0, 1, 2, 3].iter().map(Ok::<&u32, ()>)).any(|&i| i == 3).unwrap());
assert!(!convert([0, 1, 2, 4].iter().map(Ok::<&u32, ()>)).any(|&i| i == 3).unwrap());
}
#[test]
fn chain() {
let a = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<u32, ()>));
let b = convert(vec![4, 5, 6, 7].into_iter().map(Ok::<u32, ()>));
let it = a.chain(b);
assert_eq!(it.collect::<Vec<_>>().unwrap(), [0, 1, 2, 3, 4, 5, 6, 7]);
let a = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<u32, ()>));
let b = convert(vec![4, 5, 6, 7].into_iter().map(Ok::<u32, ()>));
let it = a.chain(b).rev();
assert_eq!(it.collect::<Vec<_>>().unwrap(), [7, 6, 5, 4, 3, 2, 1, 0]);
}
#[test]
fn count() {
assert_eq!(convert([0, 1, 2, 3].iter().map(Ok::<&u32, ()>)).count().unwrap(), 4);
let it = Some(Ok(1)).into_iter().chain(iter::repeat(Err(())));
assert!(convert(it).count().is_err());
}
#[test]
fn enumerate() {
let it = convert(vec![5, 6, 7, 8].into_iter().map(Ok::<u32, ()>)).enumerate();
assert_eq!(it.collect::<Vec<_>>().unwrap(), [(0, 5), (1, 6), (2, 7), (3, 8)]);
}
#[test]
fn filter() {
let it = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<u32, ()>)).filter(|&x| x % 2 == 0);
assert_eq!(it.collect::<Vec<_>>().unwrap(), [0, 2]);
}
#[test]
fn filter_map() {
let it = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<u32, ()>))
.filter_map(|x| {
if x % 2 == 0 {
Some(x + 1)
} else {
None
}
});
assert_eq!(it.collect::<Vec<_>>().unwrap(), [1, 3]);
}
#[test]
fn find() {
let mut it = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<u32, ()>));
assert_eq!(it.find(|x| x % 2 == 1).unwrap(), Some(1));
assert_eq!(it.next().unwrap(), Some(2));
}
#[test]
fn fold() {
let it = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<u32, ()>));
assert_eq!(it.fold(0, |a, b| a + b).unwrap(), 6);
}
#[test]
fn last() {
let it = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<u32, ()>));
assert_eq!(it.last().unwrap(), Some(3));
}
#[test]
fn max() {
let it = convert(vec![0, 3, 1, -10].into_iter().map(Ok::<i32, ()>));
assert_eq!(it.max().unwrap(), Some(3));
}
#[test]
fn max_by_key() {
let it = convert(vec![0, 3, 1, -10].into_iter().map(Ok::<i32, ()>));
assert_eq!(it.max_by_key(|&i| -i).unwrap(), Some(-10));
}
#[test]
fn min() {
let it = convert(vec![0, 3, -10, 1].into_iter().map(Ok::<i32, ()>));
assert_eq!(it.min().unwrap(), Some(-10));
}
#[test]
fn min_by_key() {
let it = convert(vec![0, 3, 1, -10].into_iter().map(Ok::<i32, ()>));
assert_eq!(it.min_by_key(|&i| -i).unwrap(), Some(3));
}
#[test]
fn nth() {
let mut it = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<i32, ()>));
assert_eq!(it.nth(1).unwrap(), Some(1));
assert_eq!(it.nth(0).unwrap(), Some(2));
assert_eq!(it.nth(2).unwrap(), None);
}
#[test]
fn peekable() {
let mut it = convert(vec![0, 1].into_iter().map(Ok::<i32, ()>)).peekable();
assert_eq!(it.peek().unwrap(), Some(&0));
assert_eq!(it.peek().unwrap(), Some(&0));
assert_eq!(it.next().unwrap(), Some(0));
assert_eq!(it.next().unwrap(), Some(1));
assert_eq!(it.peek().unwrap(), None);
assert_eq!(it.next().unwrap(), None);
}
#[test]
fn position() {
let mut it = convert(vec![1, 2, 3, 4].into_iter().map(Ok::<i32, ()>));
assert_eq!(it.position(|n| n == 2).unwrap(), Some(1));
assert_eq!(it.position(|n| n == 3).unwrap(), Some(0));
assert_eq!(it.position(|n| n == 5).unwrap(), None);
}
#[test]
fn take() {
let it = convert(vec![0, 1, 2, 3].into_iter().map(Ok::<i32, ()>)).take(2);
assert_eq!(it.collect::<Vec<_>>().unwrap(), [0, 1]);
}
| true |
d955d0ecadba558b5281e9a316e90f648373db83
|
Rust
|
facebook/hhvm
|
/hphp/hack/src/hh_codegen/common/context.rs
|
UTF-8
| 5,168 | 2.59375 | 3 |
[
"PHP-3.01",
"Zend-2.0",
"MIT"
] |
permissive
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::collections::VecDeque;
use std::path::Path;
use anyhow::anyhow;
use anyhow::Result;
use hash::IndexMap;
use hash::IndexSet;
use synstructure::Structure;
use crate::common::syn_helpers;
/// A simplified version of `crate::gen_visitor::context::Context`. Contains all
/// of the ASTs for `syn::Item` definitions provided in the constructor, except
/// those whose types are not reachable from the given root type.
pub struct Context {
defs: IndexMap<String, syn::DeriveInput>,
mods: IndexSet<String>,
}
impl Context {
/// Construct a `Context` containing the ASTs of all type definitions
/// reachable from the `root` type. Each type must have a unique name (even
/// if the types are declared in different modules).
pub fn new(files: &[(&Path, Vec<syn::Item>)], root: &str) -> Result<Self> {
Self::with_extern_files(files, &[], root)
}
/// Construct a `Context` containing the ASTs of all type definitions
/// reachable from the `root` type. Each type must have a unique name (even
/// if the types are declared in different modules).
///
/// `extern_files` is used to provide the definitions of types which are
/// declared in `extern_files` and re-exported in `files` (e.g., when using
/// `oxidized_by_ref` for `files`, use `oxidized` for `extern_files`, since
/// `oxidized_by_ref` re-exports types defined in `oxidized`).
pub fn with_extern_files(
files: &[(&Path, Vec<syn::Item>)],
extern_files: &[(&Path, Vec<syn::Item>)],
root: &str,
) -> Result<Self> {
let mut defs = IndexMap::default();
let mut mods = IndexSet::default();
for (filename, items) in files {
eprintln!("Processing {:?}", filename);
for item in items.iter() {
if let Ok(name) = syn_helpers::get_ty_def_name(item) {
if defs.contains_key(&name) {
return Err(anyhow!("Type {} already exists, file {:?}", name, filename));
}
defs.insert(name, item);
}
}
// assuming file name is the module name
mods.insert(
filename
.file_stem()
.and_then(|stem| stem.to_str())
.unwrap()
.into(),
);
}
// The "extern" files provide the definitions of types which were
// imported from the oxidized crate to the oxidized_by_ref crate via
// an extern_types.txt file.
for (filename, items) in extern_files {
eprintln!("Processing extern file {:?}", filename);
for item in items.iter() {
if let Ok(name) = syn_helpers::get_ty_def_name(item) {
// Don't overwrite a definition if one is already there--we
// only need to fill in the ones which are missing (because
// they were re-exported from oxidized).
defs.entry(name).or_insert(item);
}
}
}
let reachable = Self::get_all_tys(&defs, root)?;
let defs = defs
.into_iter()
.filter(|(ty_name, _)| reachable.contains(ty_name.as_str()))
.filter_map(|(ty_name, item)| {
use syn::Item::*;
match item {
Struct(item_struct) => Some((ty_name, item_struct.clone().into())),
Enum(item_enum) => Some((ty_name, item_enum.clone().into())),
_ => None,
}
})
.collect();
Ok(Self { defs, mods })
}
/// Return all the names of modules provided in the `files` argument to
/// `Self::new`. Assumes that each file has the same name as the module it
/// declares (i.e., no mod.rs files).
pub fn modules(&self) -> impl Iterator<Item = &str> {
self.mods.iter().map(|s| s.as_ref())
}
pub fn types(&self) -> impl Iterator<Item = &syn::DeriveInput> {
self.defs.values()
}
pub fn type_structures(&self) -> impl Iterator<Item = Structure<'_>> {
self.types().map(Structure::new)
}
fn get_all_tys(defs: &IndexMap<String, &syn::Item>, root: &str) -> Result<IndexSet<String>> {
let defined_types = defs.keys().map(|s| s.as_str()).collect();
let mut visited = IndexSet::<String>::default();
let mut q = VecDeque::new();
q.push_back(root.into());
while let Some(ty) = q.pop_front() {
let item = defs
.get(&ty)
.ok_or_else(|| anyhow!("Type {} not found", ty))?;
visited.insert(ty);
let deps = syn_helpers::get_dep_tys(&defined_types, item)?;
for d in deps.into_iter() {
if !visited.contains(&d) {
q.push_back(d);
}
}
}
Ok(visited)
}
}
| true |
78c08e01c6dd7b6c81c5ac62738415fa69b9b7a4
|
Rust
|
DeMille/encrusted
|
/src/rust/instruction.rs
|
UTF-8
| 13,558 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt;
use std::hash;
enum_from_primitive! {
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum Opcode {
// Two-operand opcodes (2OP)
OP2_1 = 1, OP2_2 = 2, OP2_3 = 3, OP2_4 = 4, OP2_5 = 5, OP2_6 = 6,
OP2_7 = 7, OP2_8 = 8, OP2_9 = 9, OP2_10 = 10, OP2_11 = 11, OP2_12 = 12,
OP2_13 = 13, OP2_14 = 14, OP2_15 = 15, OP2_16 = 16, OP2_17 = 17, OP2_18 = 18,
OP2_19 = 19, OP2_20 = 20, OP2_21 = 21, OP2_22 = 22, OP2_23 = 23, OP2_24 = 24,
OP2_25 = 25, OP2_26 = 26, OP2_27 = 27, OP2_28 = 28,
// One-operand opcodes (1OP)
OP1_128 = 128, OP1_129 = 129, OP1_130 = 130, OP1_131 = 131, OP1_132 = 132,
OP1_133 = 133, OP1_134 = 134, OP1_135 = 135, OP1_136 = 136, OP1_137 = 137,
OP1_138 = 138, OP1_139 = 139, OP1_140 = 140, OP1_141 = 141, OP1_142 = 142,
OP1_143 = 143,
// Zero-operand opcodes (0OP)
OP0_176 = 176, OP0_177 = 177, OP0_178 = 178, OP0_179 = 179, OP0_180 = 180,
OP0_181 = 181, OP0_182 = 182, OP0_183 = 183, OP0_184 = 184, OP0_185 = 185,
OP0_186 = 186, OP0_187 = 187, OP0_188 = 188, OP0_189 = 189, OP0_191 = 191,
// Variable-operand opcodes (VAR)
VAR_224 = 224, VAR_225 = 225, VAR_226 = 226, VAR_227 = 227, VAR_228 = 228,
VAR_229 = 229, VAR_230 = 230, VAR_231 = 231, VAR_232 = 232, VAR_233 = 233,
VAR_234 = 234, VAR_235 = 235, VAR_236 = 236, VAR_237 = 237, VAR_238 = 238,
VAR_239 = 239, VAR_240 = 240, VAR_241 = 241, VAR_242 = 242, VAR_243 = 243,
VAR_244 = 244, VAR_245 = 245, VAR_246 = 246, VAR_247 = 247, VAR_248 = 248,
VAR_249 = 249, VAR_250 = 250, VAR_251 = 251, VAR_252 = 252, VAR_253 = 253,
VAR_254 = 254, VAR_255 = 255,
// Extended opcodes (EXT)
EXT_1000 = 1000, EXT_1001 = 1001, EXT_1002 = 1002, EXT_1003 = 1003,
EXT_1004 = 1004, EXT_1005 = 1005, EXT_1006 = 1006, EXT_1007 = 1007,
EXT_1008 = 1008, EXT_1009 = 1009, EXT_1010 = 1010, EXT_1011 = 1011,
EXT_1012 = 1012, EXT_1013 = 1013, EXT_1016 = 1016, EXT_1017 = 1017,
EXT_1018 = 1018, EXT_1019 = 1019, EXT_1020 = 1020, EXT_1021 = 1021,
EXT_1022 = 1022, EXT_1023 = 1023, EXT_1024 = 1024, EXT_1025 = 1025,
EXT_1026 = 1026, EXT_1027 = 1027, EXT_1028 = 1028, EXT_1029 = 1029,
}
}
#[derive(Debug, PartialEq)]
pub enum OperandType {
Small,
Large,
Variable,
Omitted,
}
impl OperandType {
pub fn from(bytes: &[u8]) -> Vec<OperandType> {
bytes
.iter()
.fold(Vec::new(), |mut acc, n| {
acc.push((n & 0b1100_0000) >> 6);
acc.push((n & 0b0011_0000) >> 4);
acc.push((n & 0b0000_1100) >> 2);
acc.push(n & 0b0000_0011);
acc
})
.into_iter()
.map(|b| match b {
0b00 => OperandType::Large,
0b01 => OperandType::Small,
0b10 => OperandType::Variable,
0b11 => OperandType::Omitted,
_ => unreachable!("Can't get operand type of: {:08b}", b),
})
.take_while(|t| *t != OperandType::Omitted)
.collect()
}
}
#[derive(Debug)]
pub enum Operand {
Small(u8),
Large(u16),
Variable(u8),
}
impl fmt::Display for Operand {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Operand::Small(x) => write!(f, "#{:02x}", x),
Operand::Large(x) => write!(f, "{:04x}", x),
Operand::Variable(x) => match x {
0 => write!(f, "sp"),
1...15 => write!(f, "local{}", x - 1),
_ => write!(f, "g{}", x - 16),
},
}
}
}
#[derive(Debug)]
pub struct Branch {
pub condition: u16,
pub address: Option<usize>,
pub returns: Option<u16>,
}
#[derive(Debug)]
pub struct Instruction {
pub addr: usize,
pub opcode: Opcode,
pub name: String,
pub operands: Vec<Operand>,
pub store: Option<u8>,
pub branch: Option<Branch>,
pub text: Option<String>,
pub next: usize,
}
impl Instruction {
pub fn does_store(opcode: Opcode, version: u8) -> bool {
use self::Opcode::*;
match opcode {
// does a store in any version
OP2_8 | OP2_9 | OP2_15 | OP2_16 | OP2_17 | OP2_18 | OP2_19 | OP2_20 | OP2_21
| OP2_22 | OP2_23 | OP2_24 | OP2_25 | OP1_129 | OP1_130 | OP1_131 | OP1_132
| OP1_136 | OP1_142 | VAR_224 | VAR_231 | VAR_236 | VAR_246 | VAR_247 | VAR_248
| EXT_1000 | EXT_1001 | EXT_1002 | EXT_1003 | EXT_1004 | EXT_1009 | EXT_1010
| EXT_1019 | EXT_1029 => true,
// only stores in certain versions
OP1_143 => version < 5,
OP0_181 => version == 4, // missing * in spec?
OP0_182 => version == 4, // missing * in spec?
OP0_185 => version >= 5,
VAR_228 => version >= 5,
VAR_233 => version == 6,
_ => false,
}
}
pub fn does_branch(opcode: Opcode, version: u8) -> bool {
use self::Opcode::*;
match opcode {
// does a branch in any version
OP2_1 | OP2_2 | OP2_3 | OP2_4 | OP2_5 | OP2_6 | OP2_7 | OP2_10 | OP1_128 | OP1_129
| OP1_130 | OP0_189 | OP0_191 | VAR_247 | VAR_255 | EXT_1006 | EXT_1024 | EXT_1027 => {
true
}
// only branches in certain versions
OP0_181 => version < 4,
OP0_182 => version < 4,
_ => false,
}
}
pub fn does_text(opcode: Opcode) -> bool {
use self::Opcode::*;
match opcode {
OP0_178 | OP0_179 => true,
_ => false,
}
}
pub fn name(opcode: Opcode, version: u8) -> String {
use self::Opcode::*;
match opcode {
OP2_1 => "je",
OP2_2 => "jl",
OP2_3 => "jg",
OP2_4 => "dec_chk",
OP2_5 => "inc_chk",
OP2_6 => "jin",
OP2_7 => "test",
OP2_8 => "or",
OP2_9 => "and",
OP2_10 => "test_attr",
OP2_11 => "set_attr",
OP2_12 => "clear_attr",
OP2_13 => "store",
OP2_14 => "insert_obj",
OP2_15 => "loadw",
OP2_16 => "loadb",
OP2_17 => "get_prop",
OP2_18 => "get_prop_addr",
OP2_19 => "get_next_prop",
OP2_20 => "add",
OP2_21 => "sub",
OP2_22 => "mul",
OP2_23 => "div",
OP2_24 => "mod",
OP2_25 => "call_2s",
OP2_26 => "call_2n",
OP2_27 => "set_colour",
OP2_28 => "throw",
OP1_128 => "jz",
OP1_129 => "get_sibling",
OP1_130 => "get_child",
OP1_131 => "get_parent",
OP1_132 => "get_prop_len",
OP1_133 => "inc",
OP1_134 => "dec",
OP1_135 => "print_addr",
OP1_136 => "call_1s",
OP1_137 => "remove_obj",
OP1_138 => "print_obj",
OP1_139 => "ret",
OP1_140 => "jump",
OP1_141 => "print_paddr",
OP1_142 => "load",
// actually 2 different operations:
OP1_143 => if version < 4 {
"not"
} else {
"call_1n"
},
OP0_176 => "rtrue",
OP0_177 => "rfalse",
OP0_178 => "print",
OP0_179 => "print_ret",
OP0_180 => "nop",
OP0_181 => "save",
OP0_182 => "restore",
OP0_183 => "restart",
OP0_184 => "ret_popped",
// actually 2 different operations:
OP0_185 => if version < 4 {
"pop"
} else {
"catch"
},
OP0_186 => "quit",
OP0_187 => "new_line",
OP0_188 => "show_status",
OP0_189 => "verify",
OP0_191 => "piracy",
// "call" is the same as "call_vs" (name changed to remove ambiguity)
VAR_224 => if version < 4 {
"call"
} else {
"call_vs"
},
VAR_225 => "storew",
VAR_226 => "storeb",
VAR_227 => "put_prop",
// "sread", "aread", plain "read" are really all the same thing:
VAR_228 => if version < 4 {
"sread"
} else {
"aread"
},
VAR_229 => "print_char",
VAR_230 => "print_num",
VAR_231 => "random",
VAR_232 => "push",
VAR_233 => "pull",
VAR_234 => "split_window",
VAR_235 => "set_window",
VAR_236 => "call_vs2",
VAR_237 => "erase_window",
VAR_238 => "erase_line",
VAR_239 => "set_cursor",
VAR_240 => "get_cursor",
VAR_241 => "set_text_style",
VAR_242 => "buffer_mode",
VAR_243 => "output_stream",
VAR_244 => "input_stream",
VAR_245 => "sound_effect",
VAR_246 => "read_char",
VAR_247 => "scan_table",
VAR_248 => "not",
VAR_249 => "call_vn",
VAR_250 => "call_vn2",
VAR_251 => "tokenise",
VAR_252 => "encode_text",
VAR_253 => "copy_table",
VAR_254 => "print_table",
VAR_255 => "check_arg_count",
EXT_1000 => "save",
EXT_1001 => "restore",
EXT_1002 => "log_shift",
EXT_1003 => "art_shift",
EXT_1004 => "set_font",
EXT_1005 => "draw_picture",
EXT_1006 => "picture_data",
EXT_1007 => "erase_picture",
EXT_1008 => "set_margins",
EXT_1009 => "save_undo",
EXT_1010 => "restore_undo",
EXT_1011 => "print_unicode",
EXT_1012 => "check_unicode",
EXT_1013 => "set_true_colour",
EXT_1016 => "move_window",
EXT_1017 => "window_size",
EXT_1018 => "window_style",
EXT_1019 => "get_wind_prop",
EXT_1020 => "scroll_window",
EXT_1021 => "pop_stack",
EXT_1022 => "read_mouse",
EXT_1023 => "mouse_window",
EXT_1024 => "push_stack",
EXT_1025 => "put_wind_prop",
EXT_1026 => "print_form",
EXT_1027 => "make_menu",
EXT_1028 => "picture_table",
EXT_1029 => "buffer_screen",
}.to_string()
}
}
impl Instruction {
pub fn advances(&self) -> bool {
use self::Opcode::*;
// Some instructions never advance to the next instruction:
// throw, ret, jump, rtrue, rfalse, print_ret, restart, and ret_popped
match self.opcode {
OP2_28 | OP1_139 | OP1_140 | OP0_176 | OP0_177 | OP0_179 | OP0_183 | OP0_184
| OP0_186 => false,
_ => true,
}
}
pub fn does_call(&self, version: u8) -> bool {
use self::Opcode::*;
match self.opcode {
OP2_25 | OP2_26 | OP1_136 | VAR_224 | VAR_236 | VAR_249 | VAR_250 => true,
OP1_143 => version >= 4,
_ => false,
}
}
pub fn should_advance(&self, version: u8) -> bool {
!self.does_call(version) && self.opcode != Opcode::OP0_181 && self.opcode != Opcode::OP0_182
}
}
impl hash::Hash for Instruction {
fn hash<H>(&self, state: &mut H)
where
H: hash::Hasher,
{
state.write_usize(self.addr);
state.finish();
}
}
impl PartialEq for Instruction {
fn eq(&self, other: &Instruction) -> bool {
self.addr == other.addr
}
}
impl Eq for Instruction {}
impl fmt::Display for Instruction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:5x}: {:16}", self.addr, self.name)?;
for op in &self.operands {
write!(f, " {}", op)?;
}
if let Some(x) = self.store {
match x {
0 => write!(f, " -> sp"),
1...15 => write!(f, " -> local{}", x - 1),
_ => write!(f, " -> g{}", x - 16),
}?;
};
if let Some(Branch {
address,
returns,
condition,
}) = self.branch
{
match (address, returns, condition) {
(Some(addr), _, 1) => write!(f, " ?{:04x}", addr),
(Some(addr), _, 0) => write!(f, " ?~{:04x}", addr),
(None, Some(1), 1) => write!(f, " ?rtrue"),
(None, Some(1), 0) => write!(f, " ?~rtrue"),
(None, Some(0), 1) => write!(f, " ?rfalse"),
(None, Some(0), 0) => write!(f, " ?~rfalse"),
_ => write!(f, ""),
}?;
};
if let Some(ref text) = self.text {
write!(f, " {}", text)?;
};
write!(f, "")
}
}
| true |
dab41b7a538cd3d85da398fc043fe30655865b82
|
Rust
|
lulf/dove
|
/src/decoding.rs
|
UTF-8
| 9,639 | 2.703125 | 3 |
[
"Apache-2.0"
] |
permissive
|
/*
* Copyright 2020, Ulf Lilleengen
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
//! The decoding module contains AMQP 1.0 type decoders and rust native type decoders.
use byteorder::NetworkEndian;
use byteorder::ReadBytesExt;
use std::io::Read;
use std::vec::Vec;
use crate::error::*;
use crate::frame_codec::*;
use crate::types::*;
/**
* Decode an AMQP value from an byte reader. Reads the type constructor
* first and passes this to the rest of the decoding function.
*/
pub fn decode_value(reader: &mut dyn Read) -> Result<Value> {
let raw_code: u8 = reader.read_u8()?;
decode_value_with_ctor(raw_code, reader)
}
/**
* Decode an AMQP value from a byte reader based on the type constructor passed
*/
fn decode_value_with_ctor(raw_code: u8, reader: &mut dyn Read) -> Result<Value> {
let code = decode_type(raw_code)?;
match code {
TypeCode::Described => {
let descriptor = decode_value(reader)?;
let value = decode_value(reader)?;
Ok(Value::Described(Box::new(descriptor), Box::new(value)))
}
TypeCode::Null => Ok(Value::Null),
TypeCode::Boolean => {
let val = reader.read_u8()?;
Ok(Value::Bool(val == 1))
}
TypeCode::BooleanTrue => Ok(Value::Bool(true)),
TypeCode::BooleanFalse => Ok(Value::Bool(false)),
TypeCode::Ubyte => {
let val = reader.read_u8()?;
Ok(Value::Ubyte(val))
}
TypeCode::Ushort => {
let val = reader.read_u16::<NetworkEndian>()?;
Ok(Value::Ushort(val))
}
TypeCode::Uint => {
let val = reader.read_u32::<NetworkEndian>()?;
Ok(Value::Uint(val))
}
TypeCode::Uintsmall => {
let val = reader.read_u8()? as u32;
Ok(Value::Uint(val))
}
TypeCode::Uint0 => Ok(Value::Uint(0)),
TypeCode::Ulong => {
let val = reader.read_u64::<NetworkEndian>()?;
Ok(Value::Ulong(val))
}
TypeCode::Ulongsmall => {
let val = reader.read_u8()? as u64;
Ok(Value::Ulong(val))
}
TypeCode::Ulong0 => Ok(Value::Ulong(0)),
TypeCode::Byte => {
let val = reader.read_i8()?;
Ok(Value::Byte(val))
}
TypeCode::Short => {
let val = reader.read_i16::<NetworkEndian>()?;
Ok(Value::Short(val))
}
TypeCode::Int => {
let val = reader.read_i32::<NetworkEndian>()?;
Ok(Value::Int(val))
}
TypeCode::Intsmall => {
let val = reader.read_i8()? as i32;
Ok(Value::Int(val))
}
TypeCode::Long => {
let val = reader.read_i64::<NetworkEndian>()?;
Ok(Value::Long(val))
}
TypeCode::Longsmall => {
let val = reader.read_i8()? as i64;
Ok(Value::Long(val))
}
TypeCode::Str8 => {
let len = reader.read_u8()? as usize;
let mut buffer = vec![0u8; len];
reader.read_exact(&mut buffer)?;
let s = String::from_utf8(buffer)?;
Ok(Value::String(s))
}
TypeCode::Str32 => {
let len = reader.read_u32::<NetworkEndian>()? as usize;
let mut buffer = vec![0u8; len];
reader.read_exact(&mut buffer)?;
let s = String::from_utf8(buffer)?;
Ok(Value::String(s))
}
TypeCode::Sym8 => {
let len = reader.read_u8()? as usize;
let mut buffer = vec![0u8; len];
reader.read_exact(&mut buffer)?;
Ok(Value::Symbol(buffer))
}
TypeCode::Sym32 => {
let len = reader.read_u32::<NetworkEndian>()? as usize;
let mut buffer = vec![0u8; len];
reader.read_exact(&mut buffer)?;
Ok(Value::Symbol(buffer))
}
TypeCode::Bin8 => {
let len = reader.read_u8()? as usize;
let mut buffer = vec![0u8; len];
reader.read_exact(&mut buffer)?;
Ok(Value::Binary(buffer))
}
TypeCode::Bin32 => {
let len = reader.read_u32::<NetworkEndian>()? as usize;
let mut buffer = vec![0u8; len];
reader.read_exact(&mut buffer)?;
Ok(Value::Binary(buffer))
}
TypeCode::List0 => Ok(Value::List(Vec::new())),
TypeCode::List8 => {
let _sz = reader.read_u8()? as usize;
let count = reader.read_u8()? as usize;
let mut data: Vec<Value> = Vec::new();
for _num in 0..count {
let result = decode_value(reader)?;
data.push(result);
}
Ok(Value::List(data))
}
TypeCode::List32 => {
let _sz = reader.read_u32::<NetworkEndian>()? as usize;
let count = reader.read_u32::<NetworkEndian>()? as usize;
let mut data: Vec<Value> = Vec::new();
for _num in 0..count {
let result = decode_value(reader)?;
data.push(result);
}
Ok(Value::List(data))
}
TypeCode::Array8 => {
let _sz = reader.read_u8()? as usize;
let count = reader.read_u8()? as usize;
let ctype = reader.read_u8()?;
let mut data: Vec<Value> = Vec::new();
for _num in 0..count {
let result = decode_value_with_ctor(ctype, reader)?;
data.push(result);
}
Ok(Value::Array(data))
}
TypeCode::Array32 => {
let _sz = reader.read_u32::<NetworkEndian>()? as usize;
let count = reader.read_u32::<NetworkEndian>()? as usize;
let ctype = reader.read_u8()?;
let mut data: Vec<Value> = Vec::new();
for _num in 0..count {
let result = decode_value_with_ctor(ctype, reader)?;
data.push(result);
}
Ok(Value::Array(data))
}
TypeCode::Map8 => {
let _sz = reader.read_u8()? as usize;
let count = reader.read_u8()? as usize / 2;
let mut data: Vec<(Value, Value)> = Vec::new();
for _num in 0..count {
let key = decode_value(reader)?;
let value = decode_value(reader)?;
data.push((key, value));
}
Ok(Value::Map(data))
}
TypeCode::Map32 => {
let _sz = reader.read_u32::<NetworkEndian>()? as usize;
let count = reader.read_u32::<NetworkEndian>()? as usize / 2;
let mut data: Vec<(Value, Value)> = Vec::new();
for _num in 0..count {
let key = decode_value(reader)?;
let value = decode_value(reader)?;
data.push((key, value));
}
Ok(Value::Map(data))
}
TypeCode::Char => {
let val = reader.read_u32::<NetworkEndian>()?;
Ok(Value::Char(char::from_u32(val).ok_or_else(|| {
AmqpError::decode_error(Some("Invalid unicode character received"))
})?))
}
TypeCode::Timestamp => {
let val = reader.read_u64::<NetworkEndian>()?;
Ok(Value::Timestamp(val))
}
}
}
/// Converts a byte value to a type constructor.
/// http://docs.oasis-open.org/amqp/core/v1.0/csprd01/amqp-core-types-v1.0-csprd01.html#doc-idp280416
fn decode_type(code: u8) -> Result<TypeCode> {
match code {
0x00 => Ok(TypeCode::Described),
0x40 => Ok(TypeCode::Null),
0x56 => Ok(TypeCode::Boolean),
0x41 => Ok(TypeCode::BooleanTrue),
0x42 => Ok(TypeCode::BooleanFalse),
0x50 => Ok(TypeCode::Ubyte),
0x60 => Ok(TypeCode::Ushort),
0x70 => Ok(TypeCode::Uint),
0x52 => Ok(TypeCode::Uintsmall),
0x43 => Ok(TypeCode::Uint0),
0x80 => Ok(TypeCode::Ulong),
0x53 => Ok(TypeCode::Ulongsmall),
0x44 => Ok(TypeCode::Ulong0),
0x51 => Ok(TypeCode::Byte),
0x61 => Ok(TypeCode::Short),
0x71 => Ok(TypeCode::Int),
0x54 => Ok(TypeCode::Intsmall),
0x81 => Ok(TypeCode::Long),
0x55 => Ok(TypeCode::Longsmall),
// 0x72 => Ok(TypeCode::Float),
// 0x82 => Ok(TypeCode::Double),
// decimal32
// decimal64
// decimal128
0x73 => Ok(TypeCode::Char),
0x83 => Ok(TypeCode::Timestamp),
// uuid
0xA0 => Ok(TypeCode::Bin8),
0xA1 => Ok(TypeCode::Str8),
0xA3 => Ok(TypeCode::Sym8),
0xB0 => Ok(TypeCode::Bin32),
0xB1 => Ok(TypeCode::Str32),
0xB3 => Ok(TypeCode::Sym32),
0x45 => Ok(TypeCode::List0),
0xC0 => Ok(TypeCode::List8),
0xD0 => Ok(TypeCode::List32),
0xC1 => Ok(TypeCode::Map8),
0xD1 => Ok(TypeCode::Map32),
0xE0 => Ok(TypeCode::Array8),
0xF0 => Ok(TypeCode::Array32),
_ => Err(AmqpError::AmqpDecodeError(Some(format!(
"Unknown type code: 0x{:X}",
code
)))),
}
}
impl ErrorCondition {
pub fn decode(mut decoder: FrameDecoder) -> Result<ErrorCondition> {
let mut condition = ErrorCondition {
condition: String::new(),
description: String::new(),
};
decoder.decode_required(&mut condition.condition)?;
decoder.decode_optional(&mut condition.description)?;
Ok(condition)
}
}
| true |
8b087598d1c024f52c4cd8cd9b5bbec43650e8ab
|
Rust
|
caojen/heystack
|
/src/master/route.rs
|
UTF-8
| 2,751 | 2.796875 | 3 |
[] |
no_license
|
use actix_web::{ web, get, post, put, delete, Responder, HttpResponse, Error };
use super::AppState;
use futures::StreamExt;
#[put("/sync")]
pub async fn sync_index_file(data: web::Data<AppState>) -> impl Responder {
let index_file = data.index_file.lock().unwrap();
match index_file.store_into_file() {
Err(e) => {
println!("{:?}", e);
HttpResponse::InternalServerError()
.body("Something went wrong")
},
_ => {
HttpResponse::Ok()
.body("done")
}
}
}
#[get("/file/{key}")]
pub async fn get_file(data: web::Data<AppState>, web::Path(key): web::Path<u32>) -> impl Responder {
let mut index_file = data.index_file.lock().unwrap();
match index_file.get_data(key) {
Err(_) => {
HttpResponse::InternalServerError()
.body("Something went wrong")
},
Ok(r) => match r {
None => {
HttpResponse::NotFound()
.body("Resource Not Found")
},
Some(t) => {
HttpResponse::Ok()
.body(t)
}
}
}
}
#[post("/file")]
pub async fn upload_file(mut body: web::Payload, data: web::Data<AppState>) -> Result<HttpResponse, Error> {
let mut bytes = web::BytesMut::new();
while let Some(item) = body.next().await {
let item = item?;
bytes.extend_from_slice(&item);
}
let d = &bytes[..];
let mut index_file = data.index_file.lock().unwrap();
match index_file.add_item(&d.to_vec()) {
Err(_) => {
Ok(HttpResponse::InternalServerError()
.body("Something went wrong"))
},
Ok(ifi) => {
Ok(HttpResponse::Ok()
.json(ifi))
}
}
}
#[delete("/file/{key}")]
pub async fn delete_file(data: web::Data<AppState>, web::Path(key): web::Path<u32>) -> impl Responder {
let mut index_file = data.index_file.lock().unwrap();
match index_file.delete_item(key) {
Err(_) => HttpResponse::InternalServerError()
.body("Something went wrong"),
_ => HttpResponse::Ok()
.body("File has deleted")
}
}
#[put("/file/{key}")]
pub async fn update_file(data: web::Data<AppState>, web::Path(key): web::Path<u32>, mut body: web::Payload) -> Result<HttpResponse, Error> {
let mut index_file = data.index_file.lock().unwrap();
let mut bytes = web::BytesMut::new();
while let Some(item) = body.next().await {
let item = item?;
bytes.extend_from_slice(&item);
}
let d = &bytes[..];
// update = delete + add
match index_file.delete_item(key) {
Err(_) => Ok(HttpResponse::InternalServerError()
.body("Cannot delete original file")),
_ => match index_file.add_item(&d.to_vec()) {
Err(_) => Ok(HttpResponse::InternalServerError()
.body("Something went wrong")),
Ok(ifi) => Ok(HttpResponse::Ok().json(ifi))
}
}
}
| true |
f047849cbbfad9eb95607e482720128757193b4d
|
Rust
|
algon-320/regex-vm
|
/src/regex/matcher.rs
|
UTF-8
| 3,262 | 2.875 | 3 |
[] |
no_license
|
use super::compiler::{Char, Ins, Instruction, Position};
type PC = usize;
type SP = usize;
type Context = (PC, SP, usize);
// substring match
pub fn search(ins: &Ins, text: String) -> Option<Vec<(usize, usize)>> {
let text = text.chars().collect::<Vec<_>>();
for p in 0..text.len() {
let ret = search_impl(ins, p, text.as_slice());
if ret != None {
return ret;
}
}
None
}
fn search_impl(ins: &Ins, from: usize, text: &[char]) -> Option<Vec<(usize, usize)>> {
let mut thread_stack: Vec<Context> = Vec::new();
let mut ret: Vec<(usize, usize)> = Vec::new();
let mut group_paren_l = Vec::new();
let mut pc: PC = 0;
let mut sp: SP = from;
use Instruction::*;
loop {
let inst = ins.get(pc)?;
match inst {
MatchChar(c) => {
let ok = match c {
Char::Literal(l) => text.len() > sp && l == text.get(sp).unwrap(),
Char::CharClass(f, cs) => {
fn check(flip: bool, cs: &Vec<char>, c: char) -> bool {
for k in cs.iter() {
if k == &c {
return !flip;
}
}
flip
}
text.len() > sp && check(*f, cs, *text.get(sp).unwrap())
}
Char::Any => text.len() > sp,
};
if ok {
pc += 1;
sp += 1;
} else {
let context = thread_stack.pop()?;
pc = context.0;
sp = context.1;
group_paren_l.truncate(context.2);
}
}
MatchPos(p) => match p {
Position::Front => {
if sp == 0 {
pc += 1;
} else {
let context = thread_stack.pop()?;
pc = context.0;
sp = context.1;
group_paren_l.truncate(context.2);
}
}
Position::Back => {
if sp == text.len() {
pc += 1;
} else {
let context = thread_stack.pop()?;
pc = context.0;
sp = context.1;
group_paren_l.truncate(context.2);
}
}
},
Branch(x, y) => {
thread_stack.push((*y as usize, sp, group_paren_l.len()));
pc = *x as usize;
}
Jump(x) => {
pc = *x as usize;
}
GroupParenL => {
group_paren_l.push(sp);
pc += 1;
}
GroupParenR => {
let left = group_paren_l.pop()?;
ret.push((left, sp));
pc += 1;
}
Finish => {
ret.insert(0, (from, sp));
return Some(ret);
}
}
}
}
| true |
67d5965db467b8d1777d0e384bcd6d70e4de6d5b
|
Rust
|
wolvespaw/grepbot
|
/src/grep.rs
|
UTF-8
| 1,599 | 2.875 | 3 |
[
"MIT"
] |
permissive
|
use std::cmp::{Eq, PartialEq};
use std::hash::{Hash, Hasher};
use serenity::model::id::UserId;
use regex::Regex;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde::de::Error;
/// Wrapper around a `Regex, UserId` tuple that implements `PartialEq`, `Eq` and `Hash` manually,
/// using the `Regex::as_str()` function as the `Regex` object itself cannot be hashed.
pub struct Grep(pub Regex, pub UserId);
impl PartialEq<Grep> for Grep {
fn eq(&self, other: &Self) -> bool {
let Grep(ref regex, id) = *self;
let Grep(ref other_regex, other_id) = *other;
regex.as_str() == other_regex.as_str() && id == other_id
}
}
impl Eq for Grep {}
impl Hash for Grep {
fn hash<H: Hasher>(&self, state: &mut H) {
let Grep(ref regex, id) = *self;
regex.as_str().hash(state);
id.hash(state);
}
}
impl Serialize for Grep {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
let Grep(ref regex, UserId(id)) = *self;
Serialize::serialize(&(regex.as_str(), id), serializer)
}
}
impl<'de> Deserialize<'de> for Grep {
fn deserialize<D>(deserializer: D) -> Result<Self, <D as Deserializer<'de>>::Error>
where
D: Deserializer<'de>,
{
let (regex, id): (String, u64) = Deserialize::deserialize(deserializer)?;
let regex = Regex::new(®ex)
.map_err(|e| <D as Deserializer>::Error::custom(format!("{}", e)))?;
let id = UserId(id);
Ok(Grep(regex, id))
}
}
| true |
342085c65b61f67d56d09b9ae6b9113813676d61
|
Rust
|
critiqjo/cmark-hamlet
|
/src/adapter.rs
|
UTF-8
| 7,930 | 3.078125 | 3 |
[] |
no_license
|
use std::borrow::Cow;
use cmark::Tag as CmTag;
use cmark::Event as CmEvent;
use hamlet::Token as HmToken;
/// _The_ adapter! An iterator that generates `hamlet::Token`s.
pub struct Adapter<'a, I> {
cm_iter: I,
cm_looka: Option<CmEvent<'a>>, // for lookahead
hm_queue: Vec<HmToken<'a>>,
group_text: bool,
table_head: bool,
}
impl<'a, I> Adapter<'a, I>
where I: Iterator<Item = CmEvent<'a>>
{
/// Create a `Token` iterator from an `Event` iterable. See [Text
/// handling](index.html#text-handling) for more details.
pub fn new<T>(iterable: T, group_text: bool) -> Adapter<'a, I>
where T: IntoIterator<IntoIter = I, Item = CmEvent<'a>> + 'a
{
Adapter {
cm_iter: iterable.into_iter(),
cm_looka: None,
hm_queue: Vec::with_capacity(2),
group_text: group_text,
table_head: false,
}
}
fn cm_start_tag(&mut self, tag: CmTag<'a>) -> HmToken<'a> {
match tag {
CmTag::BlockQuote |
CmTag::Code |
CmTag::Emphasis |
CmTag::Header(_) |
CmTag::Item |
CmTag::List(None) |
CmTag::List(Some(1)) |
CmTag::Paragraph |
CmTag::Strong |
CmTag::Table(_) |
CmTag::TableCell |
CmTag::TableRow => HmToken::start_tag(self.tag_map(tag), attrs!()),
CmTag::CodeBlock(lang) => {
self.hm_queue.push(HmToken::start_tag("code", attrs!()));
if lang.is_empty() {
HmToken::start_tag("pre", attrs!())
} else {
HmToken::start_tag("pre", attrs!(dataLang = lang))
}
}
CmTag::FootnoteDefinition(_) => unimplemented!(),
CmTag::Image(src, title) => {
let mut alt = String::from("");
while let Some(cm_ev) = self.cm_iter.next() {
match cm_ev {
CmEvent::Text(text) => alt.push_str(text.as_ref()),
CmEvent::End(CmTag::Image(_, _)) => break,
CmEvent::Start(CmTag::Image(_, _)) => unreachable!(),
_ => (), // ignore other events
}
}
let mut attrs = attrs!(src = src);
if !alt.is_empty() {
attrs.set("alt", alt);
}
if !title.is_empty() {
attrs.set("title", title);
}
HmToken::start_tag("img", attrs).closed()
}
CmTag::Link(href, title) => {
let mut attrs = attrs!(href = href);
if !title.is_empty() {
attrs.set("title", title);
}
HmToken::start_tag("a", attrs)
}
CmTag::List(Some(start)) => {
HmToken::start_tag("ol", attrs!(start = format!("{}", start)))
}
CmTag::Rule => {
let _ = self.cm_iter.next(); // skip End(Rule)
HmToken::start_tag("hr", attrs!()).closed()
}
CmTag::TableHead => {
self.table_head = true;
HmToken::start_tag("tr", attrs!())
}
}
}
fn cm_end_tag(&mut self, tag: CmTag<'a>) -> HmToken<'a> {
match tag {
CmTag::Rule => unreachable!(),
CmTag::BlockQuote |
CmTag::Code |
CmTag::Emphasis |
CmTag::Header(_) |
CmTag::Item |
CmTag::List(_) |
CmTag::Paragraph |
CmTag::Strong |
CmTag::Table(_) |
CmTag::TableCell |
CmTag::TableRow => HmToken::end_tag(self.tag_map(tag)),
CmTag::CodeBlock(_) => {
self.hm_queue.push(HmToken::end_tag("pre"));
HmToken::end_tag("code")
}
CmTag::FootnoteDefinition(_) => unimplemented!(),
CmTag::Image(_, _) => unreachable!(),
CmTag::Link(_, _) => HmToken::end_tag("a"),
CmTag::TableHead => {
self.table_head = false;
HmToken::end_tag("tr")
}
}
}
fn cm_text(&mut self, mut s: Cow<'a, str>) -> HmToken<'a> {
if self.group_text {
while let Some(cm_ev) = self.cm_iter.next() {
match cm_ev {
CmEvent::Text(text) => s.to_mut().push_str(text.as_ref()),
CmEvent::SoftBreak => s.to_mut().push_str("\n"),
_ => {
self.cm_looka = Some(cm_ev);
break;
}
}
}
}
HmToken::Text(s)
}
fn tag_map(&self, tag: CmTag<'a>) -> Cow<'a, str> {
match tag {
CmTag::BlockQuote => "blockquote".into(),
CmTag::Code => "code".into(),
CmTag::Emphasis => "em".into(),
CmTag::Header(level) => format!("h{}", level).into(),
CmTag::Item => "li".into(),
CmTag::List(None) => "ul".into(),
CmTag::List(Some(_)) => "ol".into(),
CmTag::Paragraph => "p".into(),
CmTag::Rule => "hr".into(),
CmTag::Strong => "strong".into(),
CmTag::Table(_) => "table".into(),
CmTag::TableCell => {
if self.table_head {
"th".into()
} else {
"td".into()
}
}
CmTag::TableRow => "tr".into(),
_ => unreachable!(),
}
}
}
impl<'a, I> Iterator for Adapter<'a, I>
where I: Iterator<Item = CmEvent<'a>>
{
type Item = HmToken<'a>;
fn next(&mut self) -> Option<Self::Item> {
if !self.hm_queue.is_empty() {
Some(self.hm_queue.remove(0))
} else {
let cm_ev = if let Some(cm_ev) = self.cm_looka.take() {
cm_ev
} else if let Some(cm_ev) = self.cm_iter.next() {
cm_ev
} else {
return None;
};
let hm_ev = match cm_ev {
CmEvent::Start(tag) => self.cm_start_tag(tag),
CmEvent::End(tag) => self.cm_end_tag(tag),
CmEvent::Text(text) => self.cm_text(text),
CmEvent::Html(html) | CmEvent::InlineHtml(html) => HmToken::RawText(html),
CmEvent::SoftBreak => self.cm_text("\n".into()),
CmEvent::HardBreak => HmToken::start_tag("br", attrs!()).closed(),
CmEvent::FootnoteReference(_) => unimplemented!(),
};
Some(hm_ev)
}
}
}
#[cfg(test)]
mod tests {
use hamlet::Token as HmToken;
use cmark::Event as CmEvent;
use cmark::Parser;
use Adapter;
use std::borrow::Cow;
fn html_skel_map<'a, I>(ada: Adapter<'a, I>) -> Vec<Cow<'a, str>>
where I: Iterator<Item = CmEvent<'a>>
{
ada.map(|hm_ev| {
match hm_ev {
HmToken::StartTag{name, ..} | HmToken::EndTag{name} => name,
HmToken::Text(text) => text,
_ => panic!("Bad token {:?}", hm_ev),
}
})
.collect()
}
#[test]
fn text_grouping() {
let md = "Multi\nLine\nText";
let ada = Adapter::new(Parser::new(md), true);
let res_vec = html_skel_map(ada);
assert_eq!(&["p", md, "p"], &*res_vec);
}
#[test]
fn code_grouping() {
let code = "Multi\n\nline[2][3]\n\ncode\n\n";
let md = String::from("```\n") + code + "```";
let ada = Adapter::new(Parser::new(&*md), true);
let res_vec = html_skel_map(ada);
assert_eq!(&["pre", "code", code, "code", "pre"], &*res_vec);
}
}
| true |
3530d285c0acac44b76f4a2decf27be0bd2bf797
|
Rust
|
fbielejec/rust-opencv
|
/src/mat_mask_operations.rs
|
UTF-8
| 3,054 | 2.8125 | 3 |
[] |
no_license
|
use opencv::{
core::{self, Point, Scalar, Vec3b},
imgproc,
prelude::*,
imgcodecs
};
use crate::{utils};
#[allow(dead_code)]
fn saturate_cast (value: &i32) -> u8 {
(u8::min_value () as i32 +
(u8::max_value () as i32 / i32::max_value () ) * value) as u8
}
fn sharpen (image: &Mat, result: &mut Mat) -> opencv::Result<()> {
let n_channels: i32 = image.channels ()? ;
let n_rows: i32 = image.rows () ;
let n_cols: i32 = image.cols () ;
// for i in 1..n_rows - 1 {
// for j in 1..n_cols - 1 {
// let top = image.at_2d::<Vec3b>(i + 1, j)?;
// let bottom = image.at_2d::<Vec3b>(i - 1, j)?;
// let current = image.at_2d::<Vec3b>(i, j)?;
// let left = image.at_2d::<Vec3b>(i, j - 1)?;
// let right = image.at_2d::<Vec3b>(i, j + 1)?;
// let pixel = result.at_2d_mut::<Vec3b>(i, j)?;
// for k in 0..n_channels as usize {
// let value : i32 = 5 * current [k] as i32 - top [k] as i32
// - bottom [k] as i32 - left [k] as i32 - right [k] as i32;
// pixel [k] = saturate_cast (&value);
// }
// }
// }
// TODO : broken
let n: i32 = n_rows * n_cols;
for j in 3..(n - 3) {
let top = image.at::<Vec3b>(j - 3)?;
let bottom = image.at::<Vec3b>(j + 3)?;
let current = image.at::<Vec3b>(j)?;
let left = image.at::<Vec3b>(j - 1)?;
let right = image.at::<Vec3b>(j + 1)?;
let pixel = result.at_mut::<Vec3b>(j)?;
for k in 0..n_channels as usize {
let ( value, _) = 5u8.overflowing_mul(current[k]);
let ( value, _) = value.overflowing_sub(top[k]);
let ( value, _) = value.overflowing_sub(bottom[k]);
let ( value, _) = value.overflowing_sub(left[k]);
let ( value, _) = value.overflowing_sub(right[k]);
pixel [k] = value;
}
}
Ok(())
}
/*
* Demonstrates how to apply a (sharpen) mask to an image
* https://docs.opencv.org/master/d7/d37/tutorial_mat_mask_operations.html
*/
pub fn run () -> opencv::Result<()> {
let image : Mat = imgcodecs::imread("lena.jpg", imgcodecs::IMREAD_COLOR)?;
let mut image_sharpened : Mat = Mat::new_rows_cols_with_default(image.rows (), image.cols (), image.typ ()?, Scalar::default())?;
sharpen (&image, &mut image_sharpened).ok();
utils::display_img(&image_sharpened, None).expect ("Error displaying image");
// filter2D
// https://docs.rs/opencv/0.33.0/opencv/imgproc/fn.filter_2d.html
let kernel = Mat::from_slice_2d(&[
&[ 0, -1, 0],
&[ -1, 5, -1],
&[ 0, -1, 0i32]
])?;
let mut image_sharpened : Mat = Mat::new_rows_cols_with_default(image.rows (), image.cols (), image.typ ()?, Scalar::default())?;
imgproc::filter_2d( &image, &mut image_sharpened, image.depth()?, &kernel, Point::new(-1,-1), 0.0, core::BORDER_DEFAULT).ok();
utils::display_img(&image_sharpened, None).ok();
Ok(())
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.