blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
3f5cf742f0830e9e493d4e9776c3ce296f1d2cb8
|
Rust
|
maxymkuz/mini_google
|
/rust_pagerank/src/main.rs
|
UTF-8
| 3,919 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use postgres::{Client, NoTls, Error};
// performs a single page rank iteration
fn pagerank_iteration<'a>(
rank: &'a Vec<f64>,
rank_new: &'a mut Vec<f64>,
adjacency_matrix: &'a Vec<Vec<u32>>,
out_nodes_num: &'a Vec<u32>,
d: &f64,
) {
for node_idx in 0..rank.len() {
let mut sum: f64 = 0 as f64;
// calculating the new rank of the node based on adjacency matrix
for in_node_idx in &adjacency_matrix[node_idx] {
sum += rank[*in_node_idx as usize] / out_nodes_num[*in_node_idx as usize] as f64;
}
rank_new[node_idx as usize] = (1.0 - d) + d * sum;
}
// println!("INSIDE PAGERANK: {:?}", rank);
// println!("INSIDE PAGERANK: {:?}", rank_new);
}
fn get_manhattan_distance(rank: &Vec<f64>, rank_new: &Vec<f64>) -> f64{
let mut manhattan_distance: f64 = 0.0;
for index in 0..rank.len() as usize {
manhattan_distance += (rank[index] - rank_new[index]).abs();
}
manhattan_distance
}
fn main() -> Result<(), Error> {
let dampening_factor: f64 = 0.8;
let num_iterations: u32 = 10;
// initializing connection to database
let mut client = Client::connect("postgresql://postgres:postgres@localhost/pagerank_db", NoTls)?;
// counting the total number of websites indexed
let mut x: i64 = client.query("SELECT count(*) FROM pagerank", &[])?[0].get(0);
x += 1;
let total_websites: usize = x as usize;
println!("Total websites: {}", total_websites);
// initializing the adjacency matrix
let mut adjacency_matrix: Vec<Vec<u32>> = vec![vec![]; total_websites];
// saving all edges in memory as iterator, and iterating over it
for row in client.query("SELECT * FROM connections", &[])? {
let out_website_id: i32 = row.get(0);
let in_website_id: i32 = row.get(1);
// Filtering out self links, and adding them to adjacency matrix
if in_website_id != out_website_id {
adjacency_matrix[in_website_id as usize].push(out_website_id as u32);
}
}
// we dont need mutable thingy anymore
let adjacency_matrix = adjacency_matrix;
// initialisation value for all ranks
let init_rank: f64 = 1.0 / total_websites as f64;
// Initialising two vectors with default values(init_ran and 0.0)
let mut rank: Vec<f64> = vec![init_rank; total_websites];
let mut rank_new: Vec<f64> = vec![0.0; total_websites];
// initialising the number of out nodes based on adjacency matrix
let mut out_nodes_num: Vec<u32> = vec![0; total_websites];
for i in 0..total_websites {
for website in 0..adjacency_matrix[i].len() {
out_nodes_num[adjacency_matrix[i][website] as usize] += 1;
}
}
println!("Adj mrtx {:?}", adjacency_matrix);
println!("Out nodes {:?}", out_nodes_num);
// adding manhattan distance vector to measure convergence:
let mut manhattan_distances: Vec<f64> = vec![0.0; num_iterations as usize];
for iteration in 0..num_iterations as usize{
{
pagerank_iteration(&rank, &mut rank_new, &adjacency_matrix, &out_nodes_num, &dampening_factor);
}
// Calculating distance that represents the convergence rate:
manhattan_distances[iteration] = get_manhattan_distance(&rank, &rank_new);
// now we can just make rank to hold new rank without copying
std::mem::swap(&mut rank, &mut rank_new);
println!("After iteration {}:", iteration);
println!("{:?}", rank);
}
println!("\nFinal rankings: {:?}", rank);
println!("\nManhattan distances: {:?}", manhattan_distances);
// updating the ranks in the database:
for (index, i_rank) in rank.iter().enumerate() {
client.execute(
"UPDATE pagerank SET rank = $1 WHERE website_id = $2",
&[i_rank, &(index as i32)],
)?;
println!("{}", index)
}
Ok(())
}
| true |
3e9d3bc47deba888c3a7c77c6443d8e783f993e9
|
Rust
|
mvertescher/psoc6-pac
|
/src/usbfs0/usbhost/host_ctl0.rs
|
UTF-8
| 4,056 | 2.515625 | 3 |
[
"BSD-3-Clause",
"0BSD",
"Apache-2.0"
] |
permissive
|
#[doc = "Reader of register HOST_CTL0"]
pub type R = crate::R<u32, super::HOST_CTL0>;
#[doc = "Writer for register HOST_CTL0"]
pub type W = crate::W<u32, super::HOST_CTL0>;
#[doc = "Register HOST_CTL0 `reset()`'s with value 0"]
impl crate::ResetValue for super::HOST_CTL0 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `HOST`"]
pub type HOST_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HOST`"]
pub struct HOST_W<'a> {
w: &'a mut W,
}
impl<'a> HOST_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `ENABLE`"]
pub type ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ENABLE`"]
pub struct ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - This bit selects an operating mode of this IP. '0' : USB Device '1' : USB Host Notes: - The operation mode does not transition to the required one immediately after it was changed using this bit. Read this bit to check that the operation mode has changed. - This bit is initialized if ENABLE bit of the Host Control 0 Register (HOST_CTL0) changes from '1' to '0'.. - Before changing from the USB Host to the USB Device, check that the following conditions are satisfied and also set the RST bit of the Host Control 1 Register (HOST_CTL1). to '1'. * The SOFBUSY bit of the Host Status Register (HOST_STATUS) is set to '0'. * The TKNEN bits of the Host Token Endpoint Register (HOST_TOKEN) is set to '000'. * The SUSP bit of the Host Status Register (HOST_STATUS) is set to '0'."]
#[inline(always)]
pub fn host(&self) -> HOST_R {
HOST_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 31 - This bit enables the operation of this IP. '0' : Disable USB Host '1' : Enable USB Host Note: - This bit doesn' affect the USB Device."]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - This bit selects an operating mode of this IP. '0' : USB Device '1' : USB Host Notes: - The operation mode does not transition to the required one immediately after it was changed using this bit. Read this bit to check that the operation mode has changed. - This bit is initialized if ENABLE bit of the Host Control 0 Register (HOST_CTL0) changes from '1' to '0'.. - Before changing from the USB Host to the USB Device, check that the following conditions are satisfied and also set the RST bit of the Host Control 1 Register (HOST_CTL1). to '1'. * The SOFBUSY bit of the Host Status Register (HOST_STATUS) is set to '0'. * The TKNEN bits of the Host Token Endpoint Register (HOST_TOKEN) is set to '000'. * The SUSP bit of the Host Status Register (HOST_STATUS) is set to '0'."]
#[inline(always)]
pub fn host(&mut self) -> HOST_W {
HOST_W { w: self }
}
#[doc = "Bit 31 - This bit enables the operation of this IP. '0' : Disable USB Host '1' : Enable USB Host Note: - This bit doesn' affect the USB Device."]
#[inline(always)]
pub fn enable(&mut self) -> ENABLE_W {
ENABLE_W { w: self }
}
}
| true |
80aece9b85ff7f7ea2d53760c0e993d530b06c50
|
Rust
|
oconnor663/blake2_simd
|
/tests/fuzz_many.rs
|
UTF-8
| 8,176 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
use arrayvec::ArrayVec;
use rand::Rng;
use rand::RngCore;
use rand::SeedableRng;
// Do more tests in release mode, but try to keep execution time under 1 sec.
#[cfg(debug_assertions)]
const NUM_TESTS: usize = 1_000;
#[cfg(not(debug_assertions))]
const NUM_TESTS: usize = 100_000;
const BLAKE2B_MAX_LEN: usize = 3 * blake2b_simd::BLOCKBYTES;
const BLAKE2B_MAX_N: usize = 2 * blake2b_simd::many::MAX_DEGREE;
fn random_params_blake2b(rng: &mut rand_chacha::ChaChaRng) -> blake2b_simd::Params {
let mut params = blake2b_simd::Params::new();
// hash_length, key, and last_node are all things that need to be passed
// from the Params through to the State or whatever. Randomize those.
// Everything else just winds up in the state words and doesn't really need
// to be exercised here.
params.hash_length(rng.gen_range(1, blake2b_simd::OUTBYTES + 1));
if rng.gen() {
let len: usize = rng.gen_range(1, blake2b_simd::KEYBYTES + 1);
let key_buf = &[1; blake2b_simd::KEYBYTES];
params.key(&key_buf[..len]);
}
params.last_node(rng.gen());
params
}
fn with_random_inputs_blake2b(mut f: impl FnMut(&[blake2b_simd::Params], &[&[u8]])) {
let mut rng = rand_chacha::ChaChaRng::seed_from_u64(0);
// Generate randomized input buffers to reuse in each test case.
let mut input_bufs = [[0; BLAKE2B_MAX_LEN]; BLAKE2B_MAX_N];
for input in input_bufs.iter_mut() {
rng.fill_bytes(input);
}
for _ in 0..NUM_TESTS {
// Select a random number of random length input slices from the
// buffers.
let num_inputs: usize = rng.gen_range(0, BLAKE2B_MAX_N + 1);
let mut inputs = ArrayVec::<&[u8], BLAKE2B_MAX_N>::new();
for i in 0..num_inputs {
let input_length = rng.gen_range(0, BLAKE2B_MAX_LEN + 1);
inputs.push(&input_bufs[i][..input_length]);
}
// For each input slice, create a random Params object.
let mut params = ArrayVec::<blake2b_simd::Params, BLAKE2B_MAX_N>::new();
for _ in 0..num_inputs {
params.push(random_params_blake2b(&mut rng));
}
f(¶ms, &inputs);
}
}
#[test]
fn fuzz_blake2b_hash_many() {
with_random_inputs_blake2b(|params, inputs| {
// Compute the hash of each input independently.
let mut expected = ArrayVec::<blake2b_simd::Hash, BLAKE2B_MAX_N>::new();
for (param, input) in params.iter().zip(inputs.iter()) {
expected.push(param.hash(input));
}
// Now compute the same hashes in a batch, and check that this gives
// the same result.
let mut jobs: ArrayVec<blake2b_simd::many::HashManyJob, BLAKE2B_MAX_N> = inputs
.iter()
.zip(params.iter())
.map(|(input, param)| blake2b_simd::many::HashManyJob::new(param, input))
.collect();
blake2b_simd::many::hash_many(&mut jobs);
for i in 0..jobs.len() {
assert_eq!(&expected[i], &jobs[i].to_hash(), "job {} mismatch", i);
}
});
}
#[test]
fn fuzz_blake2b_update_many() {
with_random_inputs_blake2b(|params, inputs| {
// Compute the hash of each input independently. Feed each into the
// state twice, to exercise buffering.
let mut expected = ArrayVec::<blake2b_simd::Hash, BLAKE2B_MAX_N>::new();
for (param, input) in params.iter().zip(inputs.iter()) {
let mut state = param.to_state();
state.update(input);
state.update(input);
expected.push(state.finalize());
}
// Now compute the same hashes in a batch, and check that this gives
// the same result.
let mut states = ArrayVec::<blake2b_simd::State, BLAKE2B_MAX_N>::new();
for param in params {
states.push(param.to_state());
}
blake2b_simd::many::update_many(states.iter_mut().zip(inputs.iter()));
blake2b_simd::many::update_many(states.iter_mut().zip(inputs.iter()));
for i in 0..states.len() {
assert_eq!(2 * inputs[i].len() as u128, states[i].count());
assert_eq!(&expected[i], &states[i].finalize(), "state {} mismatch", i);
}
});
}
const BLAKE2S_MAX_LEN: usize = 3 * blake2s_simd::BLOCKBYTES;
const BLAKE2S_MAX_N: usize = 2 * blake2s_simd::many::MAX_DEGREE;
fn random_params_blake2s(rng: &mut rand_chacha::ChaChaRng) -> blake2s_simd::Params {
let mut params = blake2s_simd::Params::new();
// hash_length, key, and last_node are all things that need to be passed
// from the Params through to the State or whatever. Randomize those.
// Everything else just winds up in the state words and doesn't really need
// to be exercised here.
params.hash_length(rng.gen_range(1, blake2s_simd::OUTBYTES + 1));
if rng.gen() {
let len: usize = rng.gen_range(1, blake2s_simd::KEYBYTES + 1);
let key_buf = &[1; blake2s_simd::KEYBYTES];
params.key(&key_buf[..len]);
}
params.last_node(rng.gen());
params
}
fn with_random_inputs_blake2s(mut f: impl FnMut(&[blake2s_simd::Params], &[&[u8]])) {
let mut rng = rand_chacha::ChaChaRng::seed_from_u64(0);
// Generate randomized input buffers to reuse in each test case.
let mut input_bufs = [[0; BLAKE2S_MAX_LEN]; BLAKE2S_MAX_N];
for input in input_bufs.iter_mut() {
rng.fill_bytes(input);
}
for _ in 0..NUM_TESTS {
// Select a random number of random length input slices from the
// buffers.
let num_inputs: usize = rng.gen_range(0, BLAKE2S_MAX_N + 1);
let mut inputs = ArrayVec::<&[u8], BLAKE2S_MAX_N>::new();
for i in 0..num_inputs {
let input_length = rng.gen_range(0, BLAKE2S_MAX_LEN + 1);
inputs.push(&input_bufs[i][..input_length]);
}
// For each input slice, create a random Params object.
let mut params = ArrayVec::<blake2s_simd::Params, BLAKE2S_MAX_N>::new();
for _ in 0..num_inputs {
params.push(random_params_blake2s(&mut rng));
}
f(¶ms, &inputs);
}
}
#[test]
fn fuzz_blake2s_hash_many() {
with_random_inputs_blake2s(|params, inputs| {
// Compute the hash of each input independently.
let mut expected = ArrayVec::<blake2s_simd::Hash, BLAKE2S_MAX_N>::new();
for (param, input) in params.iter().zip(inputs.iter()) {
expected.push(param.hash(input));
}
// Now compute the same hashes in a batch, and check that this gives
// the same result.
let mut jobs: ArrayVec<blake2s_simd::many::HashManyJob, BLAKE2S_MAX_N> = inputs
.iter()
.zip(params.iter())
.map(|(input, param)| blake2s_simd::many::HashManyJob::new(param, input))
.collect();
blake2s_simd::many::hash_many(&mut jobs);
for i in 0..jobs.len() {
assert_eq!(&expected[i], &jobs[i].to_hash(), "job {} mismatch", i);
}
});
}
#[test]
fn fuzz_blake2s_update_many() {
with_random_inputs_blake2s(|params, inputs| {
// Compute the hash of each input independently. Feed each into the
// state twice, to exercise buffering.
let mut expected = ArrayVec::<blake2s_simd::Hash, BLAKE2S_MAX_N>::new();
for (param, input) in params.iter().zip(inputs.iter()) {
let mut state = param.to_state();
state.update(input);
state.update(input);
expected.push(state.finalize());
}
// Now compute the same hashes in a batch, and check that this gives
// the same result.
let mut states = ArrayVec::<blake2s_simd::State, BLAKE2S_MAX_N>::new();
for param in params {
states.push(param.to_state());
}
blake2s_simd::many::update_many(states.iter_mut().zip(inputs.iter()));
blake2s_simd::many::update_many(states.iter_mut().zip(inputs.iter()));
for i in 0..states.len() {
assert_eq!(2 * inputs[i].len() as u64, states[i].count());
assert_eq!(&expected[i], &states[i].finalize(), "state {} mismatch", i);
}
});
}
| true |
941ff317fe76169e0debc040dd29d60b30858f8b
|
Rust
|
tsturzl/dawn
|
/crates/runsv/src/main.rs
|
UTF-8
| 1,774 | 2.75 | 3 |
[] |
no_license
|
extern crates unix_named_pipe;
use std::process::{Command, Child};
use std::fs;
use std::io::prelude::*;
use std::time::{SystemTime, UNIX_EPOCH};
enum Status {
Up(usize),
Down(i32),
Paused(usize),
}
struct Process {
dir: String,
supervise_path: String,
control_pipe: fs::File,
pid: u32,
status: Status,
proc: Child,
uptime: SystemTime
}
impl Process {
pub fn new(dir: &str) -> Proccess {
let supervise_path = dir.to_string().push_str("/supervise");
fs::read_dir(supervise_path).unwrap();
let lock_path = supervise_path.to_string().push_str("/lock");
write_file(lock_path, b"");
let control_path = supervise_path.to_string().push_str("/control");
unix_named_pipe::create(control_path, Some(0o660))
.expect("Control pipe to be created");
let control_pipe = unix_named_pipe::open_read(control_path)
.expect("Control pipe to exist");
let uptime = SystemTime::now();
let run_path = dir.to_string().push_str("/run");
let proc = Command::new(run_path)
.spawn()
.expect("Process to run");
let pid_path = supervise_path.to_string().push_str("/pid");
let pid = proc.id();
write_file(pid_path, pid.to_string().as_bytes());
let status_path = supervise_path.to_string().push_str("/status");
let status_str = "up:".to_string().push_str(uptime.to_string());
write_file(status_path, status_str.as_bytes());
let ok_path = supervise_path.to_string().push_str("/ok");
write_file(ok_path, b"");
}
}
fn write_file(path: &str, data: &[u8]) {
let mut file = fs::File::open(path)?;
file.write_all(data);
}
fn main() {
}
| true |
b1545185eed77823c7932236d10ccd84b78e5fb0
|
Rust
|
karjonas/advent-of-code
|
/2020/day12/src/lib.rs
|
UTF-8
| 2,455 | 3.328125 | 3 |
[] |
no_license
|
extern crate common;
#[macro_use]
extern crate scan_fmt;
type Instruction = (char, i64);
fn parse_input(input: &String) -> Vec<Instruction> {
return input
.lines()
.map(|line| scan_fmt!(line, "{[A-Z]}{d}", char, i64).unwrap())
.collect();
}
fn part_one(instructions: &Vec<Instruction>) -> usize {
const NUM_DIRS: usize = 4;
let mut facing = 1; // 0,1,2,3 N, E, S, W
let mut y = 0;
let mut x = 0;
for (action, value) in instructions.clone() {
match action {
'N' => y += value,
'E' => x += value,
'S' => y -= value,
'W' => x -= value,
'L' => facing = (facing + NUM_DIRS - value as usize / 90) % NUM_DIRS,
'R' => facing = (facing + NUM_DIRS + value as usize / 90) % NUM_DIRS,
'F' => match facing {
0 => y += value,
1 => x += value,
2 => y -= value,
3 => x -= value,
_ => panic!("invalid facing"),
},
_ => panic!("Invalid instruction"),
}
}
return (y.abs() + x.abs()) as usize;
}
fn part_two(instructions: &Vec<Instruction>) -> usize {
let mut pos = (0, 0);
let mut way_pos = (10, 1);
for (action, value) in instructions.clone() {
match action {
'N' => way_pos.1 += value,
'E' => way_pos.0 += value,
'S' => way_pos.1 -= value,
'W' => way_pos.0 -= value,
'L' => {
for _i in 0..value / 90 {
way_pos = (-way_pos.1, way_pos.0);
}
}
'R' => {
for _i in 0..value / 90 {
way_pos = (way_pos.1, -way_pos.0);
}
}
'F' => pos = (pos.0 + way_pos.0 * value, pos.1 + way_pos.1 * value),
_ => panic!("Invalid instruction"),
}
}
return (pos.0.abs() + pos.1.abs()) as usize;
}
pub fn solve() {
let input = common::read_file("2020/day12/input");
let grid = parse_input(&input);
println!("Part one: {}", part_one(&grid));
println!("Part two: {}", part_two(&grid));
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_samples() {
let input = ["F10", "N3", "F7", "R90", "F11"].join("\n");
assert_eq!(part_one(&parse_input(&input)), 25);
assert_eq!(part_two(&parse_input(&input)), 286);
}
}
| true |
116be4a78eeecf6ec96c4d7a3075180149f5a9fd
|
Rust
|
Vlad2001MFS/raytracer
|
/src/material.rs
|
UTF-8
| 3,545 | 3.25 | 3 |
[] |
no_license
|
use crate::{
Vec3, Ray, HitInfo,
};
#[derive(Clone)]
pub struct ScatterInfo {
attenuation: Vec3,
scattered_ray: Ray,
}
impl ScatterInfo {
pub fn attenuation(&self) -> &Vec3 {
&self.attenuation
}
pub fn scattered_ray(&self) -> &Ray {
&self.scattered_ray
}
}
#[derive(Clone)]
pub enum Material {
Lambertian(LambertianMtl),
Metal(MetalMtl),
Dielectric(DielectricMtl),
}
impl Material {
pub fn scatter(&self, ray_in: &Ray, hit_info: &HitInfo) -> Option<ScatterInfo> {
match self {
Material::Lambertian(mtl) => mtl.scatter(ray_in, hit_info),
Material::Metal(mtl) => mtl.scatter(ray_in, hit_info),
Material::Dielectric(mtl) => mtl.scatter(ray_in, hit_info),
}
}
}
#[derive(Clone)]
pub struct LambertianMtl {
albedo: Vec3,
}
impl LambertianMtl {
pub fn new(albedo: Vec3) -> LambertianMtl {
LambertianMtl {
albedo
}
}
pub fn scatter(&self, _ray_in: &Ray, hit_info: &HitInfo) -> Option<ScatterInfo> {
let scatter_dir = hit_info.normal().clone() + Vec3::random_unit_vector();
Some(ScatterInfo {
attenuation: self.albedo,
scattered_ray: Ray::new(hit_info.point().clone(), scatter_dir),
})
}
}
#[derive(Clone)]
pub struct MetalMtl {
albedo: Vec3,
fuzziness: f64,
}
impl MetalMtl {
pub fn new(albedo: Vec3, fuzziness: f64) -> MetalMtl {
MetalMtl {
albedo,
fuzziness: fuzziness.min(1.0).max(0.0),
}
}
pub fn scatter(&self, ray_in: &Ray, hit_info: &HitInfo) -> Option<ScatterInfo> {
let reflected = ray_in.direction.reflected(hit_info.normal().clone());
let scattered_ray = Ray::new(hit_info.point().clone(), reflected + Vec3::random_unit_vector()*self.fuzziness);
if scattered_ray.direction.dot(hit_info.normal().clone()) > 0.0 {
Some(ScatterInfo {
attenuation: self.albedo,
scattered_ray,
})
}
else {
None
}
}
}
fn calc_shlick(cosine: f64, ref_idx: f64) -> f64 {
let r0 = (1.0 - ref_idx) / (1.0 + ref_idx);
let r0 = r0*r0;
r0 + (1.0 - r0)*(1.0 - cosine).powi(5)
}
#[derive(Clone)]
pub struct DielectricMtl {
ref_idx: f64
}
impl DielectricMtl {
pub fn new(ref_idx: f64) -> DielectricMtl {
DielectricMtl {
ref_idx
}
}
pub fn scatter(&self, ray_in: &Ray, hit_info: &HitInfo) -> Option<ScatterInfo> {
Some(ScatterInfo {
attenuation: Vec3(1.0, 1.0, 1.0),
scattered_ray: {
let etai_over_etat = if hit_info.is_front_face() { 1.0 / self.ref_idx } else { self.ref_idx };
let unit_dir = ray_in.direction.normalized();
let cos_theta = unit_dir.dot(-hit_info.normal().clone()).min(1.0);
let sin_theta = (1.0 - cos_theta*cos_theta).sqrt();
if etai_over_etat*sin_theta > 1.0 {
Ray::new(hit_info.point().clone(), unit_dir.reflected(hit_info.normal().clone()))
}
else if rand::random::<f64>() < calc_shlick(cos_theta, etai_over_etat) {
Ray::new(hit_info.point().clone(), unit_dir.reflected(hit_info.normal().clone()))
}
else {
Ray::new(hit_info.point().clone(), unit_dir.refracted(hit_info.normal().clone(), etai_over_etat))
}
}
})
}
}
| true |
df7fa496bff97677e447108a6f9bbe01a5eb0e17
|
Rust
|
pawe/ty
|
/ty-cli/src/main.rs
|
UTF-8
| 2,390 | 2.734375 | 3 |
[] |
no_license
|
extern crate clap;
use clap::{App, Arg};
use load_dotenv::try_load_dotenv;
use ty_lib::ThankYouMessage;
try_load_dotenv!();
fn main() {
openssl_probe::init_ssl_cert_env_vars();
let matches = App::new("ty - thank you")
.version("0.2.1")
.author("Paul Weißenbach <[email protected]>")
.about("Say thank you to the tools (and hopefully it's authors) you use by simply typing ty in your terminal.")
.arg(Arg::with_name("TOOL")
.help("Name of the tool you want to thank. If left blank, it takes the last command in the history.")
.required(true)
.index(1))
.arg(Arg::with_name("message")
.short("m")
.long("message")
.takes_value(true)
.multiple(false)
.help("Add an optional message to your thank you."))
.get_matches();
let program = matches.value_of("TOOL").unwrap().to_string();
let note = match matches.value_of("message") {
None => None,
Some(msg) => Some(msg.to_string()),
};
let message = ThankYouMessage { program, note };
use validator::Validate;
match message.validate() {
Ok(()) => send_ty_note(message),
Err(e) => {
for validation_error_kind in e.errors().values() {
use validator::ValidationErrorsKind::Field;
match validation_error_kind {
Field(val_errors) => {
for val_error in val_errors {
println!("{}", val_error.message.as_ref().expect("There was an error, but we have no error message for it. Stupid, right!"))
}
}
_ => unimplemented!("Sorry, something unexpected happened!"),
}
}
}
}
}
fn send_ty_note(message: ThankYouMessage) {
let endpoint = std::env!(
"TY_API_ENDPOINT",
"needs TY_API_ENDPOINT in enviroment to compile"
)
.to_string();
let response = reqwest::blocking::Client::new()
.post(&(endpoint + "/note"))
.timeout(core::time::Duration::new(7, 0)) // no one has time to wait
.json(&message)
.send();
if response.is_err() || response.unwrap().status() != reqwest::StatusCode::CREATED {
println!("Faild to collect your thank you note. Please try again later.")
}
}
| true |
c9213c20e854cc4d0b1c32e154b0bda7a1e9ea4e
|
Rust
|
guoyucode/rbatis
|
/rbatis-core/src/pool/connection.rs
|
UTF-8
| 4,697 | 2.71875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use futures_core::future::BoxFuture;
use std::borrow::{Borrow, BorrowMut};
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
use std::time::Instant;
use super::inner::{DecrementSizeGuard, SharedPool};
use crate::connection::{Connect, Connection};
/// A connection checked out from [`Pool`][crate::pool::Pool].
///
/// Will be returned to the pool on-drop.
pub struct PoolConnection<C>
where
C: Connect,
{
live: Option<Live<C>>,
pub(crate) pool: Arc<SharedPool<C>>,
}
pub(super) struct Live<C> {
raw: C,
pub(super) created: Instant,
}
pub(super) struct Idle<C> {
live: Live<C>,
pub(super) since: Instant,
}
/// RAII wrapper for connections being handled by functions that may drop them
pub(super) struct Floating<'p, C> {
inner: C,
guard: DecrementSizeGuard<'p>,
}
const DEREF_ERR: &str = "(bug) connection already released to pool";
impl<C> Borrow<C> for PoolConnection<C>
where
C: Connect,
{
fn borrow(&self) -> &C {
&*self
}
}
impl<C> BorrowMut<C> for PoolConnection<C>
where
C: Connect,
{
fn borrow_mut(&mut self) -> &mut C {
&mut *self
}
}
impl<C> Deref for PoolConnection<C>
where
C: Connect,
{
type Target = C;
fn deref(&self) -> &Self::Target {
&self.live.as_ref().expect(DEREF_ERR).raw
}
}
impl<C> DerefMut for PoolConnection<C>
where
C: Connect,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.live.as_mut().expect(DEREF_ERR).raw
}
}
impl<C> Connection for PoolConnection<C>
where
C: Connect,
{
fn close(mut self) -> BoxFuture<'static, crate::Result<()>> {
Box::pin(async move {
let live = self.live.take().expect("PoolConnection double-dropped");
live.float(&self.pool).into_idle().close().await
})
}
#[inline]
fn ping(&mut self) -> BoxFuture<crate::Result<()>> {
Box::pin(self.deref_mut().ping())
}
}
/// Returns the connection to the [`Pool`][crate::pool::Pool] it was checked-out from.
impl<C> Drop for PoolConnection<C>
where
C: Connect,
{
fn drop(&mut self) {
if let Some(live) = self.live.take() {
self.pool.release(live.float(&self.pool));
}
}
}
impl<C> Live<C> {
pub fn float(self, pool: &SharedPool<C>) -> Floating<Self> {
Floating {
inner: self,
guard: DecrementSizeGuard::new(pool),
}
}
pub fn into_idle(self) -> Idle<C> {
Idle {
live: self,
since: Instant::now(),
}
}
}
impl<C> Deref for Idle<C> {
type Target = Live<C>;
fn deref(&self) -> &Self::Target {
&self.live
}
}
impl<C> DerefMut for Idle<C> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.live
}
}
impl<'s, C> Floating<'s, C> {
pub fn into_leakable(self) -> C {
self.guard.cancel();
self.inner
}
}
impl<'s, C> Floating<'s, Live<C>> {
pub fn new_live(conn: C, guard: DecrementSizeGuard<'s>) -> Self {
Self {
inner: Live {
raw: conn,
created: Instant::now(),
},
guard,
}
}
pub fn attach(self, pool: &Arc<SharedPool<C>>) -> PoolConnection<C>
where
C: Connect,
{
let Floating { inner, guard } = self;
debug_assert!(
guard.same_pool(pool),
"BUG: attaching connection to different pool"
);
guard.cancel();
PoolConnection {
live: Some(inner),
pool: Arc::clone(pool),
}
}
pub fn into_idle(self) -> Floating<'s, Idle<C>> {
Floating {
inner: self.inner.into_idle(),
guard: self.guard,
}
}
}
impl<'s, C> Floating<'s, Idle<C>> {
pub fn from_idle(idle: Idle<C>, pool: &'s SharedPool<C>) -> Self {
Self {
inner: idle,
guard: DecrementSizeGuard::new(pool),
}
}
pub async fn ping(&mut self) -> crate::Result<()>
where
C: Connection,
{
self.live.raw.ping().await
}
pub fn into_live(self) -> Floating<'s, Live<C>> {
Floating {
inner: self.inner.live,
guard: self.guard,
}
}
pub async fn close(self) -> crate::Result<()>
where
C: Connection,
{
// `guard` is dropped as intended
self.inner.live.raw.close().await
}
}
impl<C> Deref for Floating<'_, C> {
type Target = C;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<C> DerefMut for Floating<'_, C> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
| true |
a77b3798d97273167fff3c59ab11db57f76cacd7
|
Rust
|
rcore-os/rcore-fs
|
/rcore-fs-mountfs/src/lib.rs
|
UTF-8
| 9,658 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
#![cfg_attr(not(any(test, feature = "std")), no_std)]
extern crate alloc;
#[macro_use]
extern crate log;
use alloc::{
boxed::Box,
collections::BTreeMap,
string::String,
sync::{Arc, Weak},
};
use core::{any::Any, future::Future, pin::Pin};
use rcore_fs::vfs::*;
use spin::RwLock;
#[cfg(test)]
mod tests;
/// The filesystem on which all the other filesystems are mounted
pub struct MountFS {
/// The inner file system
inner: Arc<dyn FileSystem>,
/// All mounted children file systems
mountpoints: RwLock<BTreeMap<INodeId, Arc<MountFS>>>,
/// The mount point of this file system
self_mountpoint: Option<Arc<MNode>>,
/// Weak reference to self
self_ref: Weak<MountFS>,
}
type INodeId = usize;
/// INode for `MountFS`
pub struct MNode {
/// The inner INode
inode: Arc<dyn INode>,
/// Associated `MountFS`
vfs: Arc<MountFS>,
/// Weak reference to self
self_ref: Weak<MNode>,
}
impl MountFS {
/// Create a `MountFS` wrapper for file system `fs`
pub fn new(fs: Arc<dyn FileSystem>) -> Arc<Self> {
MountFS {
inner: fs,
mountpoints: RwLock::new(BTreeMap::new()),
self_mountpoint: None,
self_ref: Weak::default(),
}
.wrap()
}
/// Wrap pure `MountFS` with `Arc<..>`.
/// Used in constructors.
fn wrap(self) -> Arc<Self> {
// Create an Arc, make a Weak from it, then put it into the struct.
// It's a little tricky.
let fs = Arc::new(self);
let weak = Arc::downgrade(&fs);
let ptr = Arc::into_raw(fs) as *mut Self;
unsafe {
(*ptr).self_ref = weak;
Arc::from_raw(ptr)
}
}
/// Strong type version of `root_inode`
pub fn mountpoint_root_inode(&self) -> Arc<MNode> {
MNode {
inode: self.inner.root_inode(),
vfs: self.self_ref.upgrade().unwrap(),
self_ref: Weak::default(),
}
.wrap()
}
}
impl MNode {
/// Wrap pure `INode` with `Arc<..>`.
/// Used in constructors.
fn wrap(self) -> Arc<Self> {
// Create an Arc, make a Weak from it, then put it into the struct.
// It's a little tricky.
let inode = Arc::new(self);
let weak = Arc::downgrade(&inode);
let ptr = Arc::into_raw(inode) as *mut Self;
unsafe {
(*ptr).self_ref = weak;
Arc::from_raw(ptr)
}
}
/// Mount file system `fs` at this INode
pub fn mount(&self, fs: Arc<dyn FileSystem>) -> Result<Arc<MountFS>> {
let metadata = self.inode.metadata()?;
if metadata.type_ != FileType::Dir {
return Err(FsError::NotDir);
}
let new_fs = MountFS {
inner: fs,
mountpoints: RwLock::new(BTreeMap::new()),
self_mountpoint: Some(self.self_ref.upgrade().unwrap()),
self_ref: Weak::default(),
}
.wrap();
self.vfs
.mountpoints
.write()
.insert(metadata.inode, new_fs.clone());
Ok(new_fs)
}
/// Get the root INode of the mounted fs at here.
/// Return self if no mounted fs.
fn overlaid_inode(&self) -> Arc<MNode> {
let inode_id = self.metadata().unwrap().inode;
if let Some(sub_vfs) = self.vfs.mountpoints.read().get(&inode_id) {
sub_vfs.mountpoint_root_inode()
} else {
self.self_ref.upgrade().unwrap()
}
}
/// Is the root INode of its FS?
fn is_mountpoint_root(&self) -> bool {
self.inode.fs().root_inode().metadata().unwrap().inode
== self.inode.metadata().unwrap().inode
}
/// Strong type version of `create()`
pub fn create(&self, name: &str, type_: FileType, mode: u32) -> Result<Arc<Self>> {
Ok(MNode {
inode: self.inode.create(name, type_, mode)?,
vfs: self.vfs.clone(),
self_ref: Weak::default(),
}
.wrap())
}
/// Strong type version of `find()`
pub fn find(&self, root: bool, name: &str) -> Result<Arc<Self>> {
match name {
"" | "." => Ok(self.self_ref.upgrade().unwrap()),
".." => {
// Going Up
// We need to check these things:
// 1. Is going forward allowed, considering the current root?
// 2. Is going forward trespassing the filesystem border,
// thus requires falling back to parent of original_mountpoint?
// TODO: check going up.
if root {
Ok(self.self_ref.upgrade().unwrap())
} else if self.is_mountpoint_root() {
// Here is mountpoint.
match &self.vfs.self_mountpoint {
Some(inode) => inode.find(root, ".."),
// root fs
None => Ok(self.self_ref.upgrade().unwrap()),
}
} else {
// Not trespassing filesystem border. Parent and myself in the same filesystem.
Ok(MNode {
inode: self.inode.find(name)?, // Going up is handled by the filesystem. A better API?
vfs: self.vfs.clone(),
self_ref: Weak::default(),
}
.wrap())
}
}
_ => {
// Going down may trespass the filesystem border.
// An INode replacement is required here.
Ok(MNode {
inode: self.overlaid_inode().inode.find(name)?,
vfs: self.vfs.clone(),
self_ref: Weak::default(),
}
.wrap()
.overlaid_inode())
}
}
}
/// If `child` is a child of `self`, return its name.
pub fn find_name_by_child(&self, child: &Arc<MNode>) -> Result<String> {
for index in 0.. {
let name = self.inode.get_entry(index)?;
match name.as_ref() {
"." | ".." => {}
_ => {
let queryback = self.find(false, &name)?.overlaid_inode();
// TODO: mountpoint check!
debug!("checking name {}", name);
if Arc::ptr_eq(&queryback.vfs, &child.vfs)
&& queryback.inode.metadata()?.inode == child.inode.metadata()?.inode
{
return Ok(name);
}
}
}
}
Err(FsError::EntryNotFound)
}
}
impl FileSystem for MountFS {
fn sync(&self) -> Result<()> {
self.inner.sync()?;
for mount_fs in self.mountpoints.read().values() {
mount_fs.sync()?;
}
Ok(())
}
fn root_inode(&self) -> Arc<dyn INode> {
match &self.self_mountpoint {
Some(inode) => inode.vfs.root_inode(),
None => self.mountpoint_root_inode(),
}
}
fn info(&self) -> FsInfo {
self.inner.info()
}
}
// unwrap `MNode` and forward methods to inner except `find()`
impl INode for MNode {
fn read_at(&self, offset: usize, buf: &mut [u8]) -> Result<usize> {
self.inode.read_at(offset, buf)
}
fn write_at(&self, offset: usize, buf: &[u8]) -> Result<usize> {
self.inode.write_at(offset, buf)
}
fn poll(&self) -> Result<PollStatus> {
self.inode.poll()
}
/// Poll the events, return a bitmap of events, async version.
fn async_poll<'a>(
&'a self,
) -> Pin<Box<dyn Future<Output = Result<PollStatus>> + Send + Sync + 'a>> {
self.inode.async_poll()
}
fn metadata(&self) -> Result<Metadata> {
self.inode.metadata()
}
fn set_metadata(&self, metadata: &Metadata) -> Result<()> {
self.inode.set_metadata(metadata)
}
fn sync_all(&self) -> Result<()> {
self.inode.sync_all()
}
fn sync_data(&self) -> Result<()> {
self.inode.sync_data()
}
fn resize(&self, len: usize) -> Result<()> {
self.inode.resize(len)
}
fn create(&self, name: &str, type_: FileType, mode: u32) -> Result<Arc<dyn INode>> {
Ok(self.create(name, type_, mode)?)
}
fn link(&self, name: &str, other: &Arc<dyn INode>) -> Result<()> {
self.inode.link(name, other)
}
fn unlink(&self, name: &str) -> Result<()> {
let inode_id = self.inode.find(name)?.metadata()?.inode;
// target INode is being mounted
if self.vfs.mountpoints.read().contains_key(&inode_id) {
return Err(FsError::Busy);
}
self.inode.unlink(name)
}
fn move_(&self, old_name: &str, target: &Arc<dyn INode>, new_name: &str) -> Result<()> {
self.inode.move_(old_name, target, new_name)
}
fn find(&self, name: &str) -> Result<Arc<dyn INode>> {
Ok(self.find(false, name)?)
}
fn get_entry(&self, id: usize) -> Result<String> {
self.inode.get_entry(id)
}
fn get_entry_with_metadata(&self, id: usize) -> Result<(Metadata, String)> {
self.inode.get_entry_with_metadata(id)
}
fn io_control(&self, cmd: u32, data: usize) -> Result<usize> {
self.inode.io_control(cmd, data)
}
fn mmap(&self, area: MMapArea) -> Result<()> {
self.inode.mmap(area)
}
fn fs(&self) -> Arc<dyn FileSystem> {
self.vfs.clone()
}
fn as_any_ref(&self) -> &dyn Any {
self.inode.as_any_ref()
}
}
| true |
286963967d8c5ad9fba6b032392130ef6bd3cf3e
|
Rust
|
fiirecore-deprecated/firecore-world-gui
|
/src/gui/text_window.rs
|
UTF-8
| 1,972 | 2.703125 | 3 |
[] |
no_license
|
use firecore_game::util::{Entity, Reset, Completable, text::Message};
use firecore_game::macroquad::prelude::{Vec2, Texture2D};
use firecore_game::data::player::PlayerSave;
use firecore_game::gui::text::DynamicText;
use firecore_game::text::process_messages;
use firecore_game::graphics::{byte_texture, draw};
pub struct TextWindow {
alive: bool,
pos: Vec2,
background: Texture2D,
text: DynamicText,
}
impl TextWindow {
pub fn reset_text(&mut self) {
self.text.reset();
}
pub fn set_text(&mut self, messages: Vec<Message>) {
self.text.messages = Some(messages);
}
pub fn on_start(&mut self, player_save: &PlayerSave) {
if let Some(messages) = self.text.messages.as_mut() {
process_messages(player_save, messages);
}
}
pub fn update(&mut self, delta: f32) {
if self.alive {
self.text.update(delta);
}
}
pub fn render(&self) {
if self.alive {
draw(self.background, self.pos.x, self.pos.y);
self.text.render();
}
}
pub fn input(&mut self) {
self.text.input();
}
}
impl Default for TextWindow {
fn default() -> Self {
let pos = Vec2::new(6.0, 116.0);
Self {
alive: false,
pos,
background: byte_texture(include_bytes!("../../assets/gui/message.png")),
text: DynamicText::new(Vec2::new(11.0, 5.0), pos),
}
}
}
impl Entity for TextWindow {
fn spawn(&mut self) {
self.alive = true;
self.reset();
self.text.spawn();
}
fn despawn(&mut self) {
self.alive = false;
self.text.despawn();
}
fn is_alive(&self) -> bool {
self.alive
}
}
impl Reset for TextWindow {
fn reset(&mut self) {
self.text.reset();
}
}
impl Completable for TextWindow {
fn is_finished(&self) -> bool {
self.text.is_finished()
}
}
| true |
89833382118e274c856bf5551494194809c33f4d
|
Rust
|
xhroot/google-code-jam
|
/2013/codejam2013_1a_bsearch.rs
|
UTF-8
| 1,295 | 3.0625 | 3 |
[] |
no_license
|
// using Mozilla Rust
// rust run prog.rs < prog.in > prog.out
use core::io::ReaderUtil;
use core::from_str::FromStr::from_str;
fn main() {
// Lambda: convert string to integer/float; unsafe.
let atoi = |num_str: &str| from_str::<int>(num_str).get();
let reader = io::stdin();
// Get number of cases.
let T = atoi(reader.read_line());
// Loop through cases.
for int::range(0, T) |c| {
// Read entire line into int array.
let line1 = reader.read_line();
let mut rt = ~[];
for str::each_word(line1) |word| { rt.push(atoi(word)); }
let r = rt[0];
let t = rt[1];
let mut high = 1;
let mut low = 0;
// Double the ring size till we surpass total paint.
while t > computeTotalPaint(r, high) {
low = high;
high *= 2;
}
let mut rings = 0;
// Answer now bounded between low/high. Binary search.
while low <= high {
let mut mid = (high + low)/2;
match (t, computeTotalPaint(r, mid)) {
(x, y) if x > y => { low = mid+1; }
(x, y) if x < y => { high = mid-1; }
(_, _) => { rings = mid; break; }
}
rings = high;
}
io::println(fmt!("Case #%i: %i", c+1, rings));
}
}
fn computeTotalPaint(radius: int, rings: int) -> int {
rings * (2*radius - 1 + 2*rings)
}
| true |
a38bc67f9395589ed6d91debfc93af8c06c57c88
|
Rust
|
rajin-s/specs
|
/src/compiler/_old/flatten/definition_names.rs
|
UTF-8
| 8,263 | 2.71875 | 3 |
[] |
no_license
|
use crate::compiler::binding_state::BindingState;
use crate::compiler::internal::*;
use crate::compiler::utilities::TempNameGenerator;
use std::collections::HashMap;
/* Pass: flatten::definition_names
- Makes all definition names unique (such that they can appear as top-level definitions in C)
- ex. nested fn Foo { fn Bar } => fn Foo { fn Foo/Bar }
- Updates variable references to renamed definitions (respecting shadowing / scope binding rules)
- Leaves definition nodes in-place for flatten::definitions to extract
*/
// Compiler pass instance
pub struct Pass
{
anonymous_names: TempNameGenerator,
}
impl Pass
{
pub fn new() -> Self
{
Self {
anonymous_names: TempNameGenerator::new("anonymous_scope"),
}
}
}
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum StateContext
{
None,
Sequence,
Function,
FunctionBody,
Class,
}
#[derive(Clone)]
pub struct StateData
{
context: StateContext,
scope_name: String,
}
impl Default for StateData
{
fn default() -> Self
{
Self {
context: StateContext::None,
scope_name: String::new(),
}
}
}
impl StateData
{
pub fn new(context: StateContext, scope_name: String) -> Self
{
Self {
context,
scope_name,
}
}
}
type State = BindingState<String, StateData>;
/* -------------------------------------------------------------------------- */
/* Pass */
/* -------------------------------------------------------------------------- */
impl CompilerPass<State> for Pass
{
// Modify a node given some state
// - All child nodes have already been transformed at this point
fn transform(
&mut self,
node: &mut Node,
state: Indirect<State>,
_messages: &mut PassMessageContext,
)
{
match node
{
Node::Variable(variable) =>
{
// Rename variables referencing definitions
if let Some(new_name) = state.borrow().get(variable.get_name())
{
if !new_name.is_empty()
{
*variable.get_name_mut() = new_name;
}
}
}
Node::Binding(binding) =>
{
// Potentially shadow definition names in the current scope
state
.borrow_mut()
.add_binding(binding.get_name().clone(), String::new());
}
Node::Function(function) =>
{
// Rename the function
if let Some(new_name) = state.borrow().get_definition(function.get_name())
{
*function.get_name_mut() = new_name;
}
}
Node::Class(class) =>
{
// TODO: Handle classes
// Rename the class
if let Some(new_name) = state.borrow().get_definition(class.get_name())
{
*class.get_name_mut() = new_name;
}
}
_ =>
{}
}
}
// Get the state to use for a node's children based on the current state
// - Child nodes have not yet been visited
fn get_child_states(
&mut self,
node: &Node,
parent: Indirect<State>,
_messages: &mut PassMessageContext,
) -> Vec<Indirect<State>>
{
use definition::{Argument, Class, Function};
let new_state = match node
{
Node::Function(function) =>
{
// Get the new name of the function scope
let new_scope_name = {
let parent = parent.borrow();
let parent_data = parent.get_data();
let parent_scope_name = &parent_data.scope_name;
get_scoped_name(parent_scope_name, function.get_name())
};
// Argument bindings can shadow external definitions
let argument_bindings =
State::get_bindings_from_function(function, |_arg: &Argument| String::new());
// Create the new state and keep track of the current scope name
let new_state = State::extend_with_bindings(
parent,
false,
argument_bindings,
StateData::new(StateContext::Function, new_scope_name),
);
Indirect::new(new_state)
}
Node::Sequence(sequence) =>
{
let (new_context, new_scope_name) = {
let parent = parent.borrow();
let parent_data = parent.get_data();
let parent_context = parent_data.context;
let parent_scope_name = &parent_data.scope_name;
match parent_context
{
StateContext::Function =>
{
// Don't add a new anonymous scope name if the sequence is the body of a function
(StateContext::FunctionBody, parent_scope_name.clone())
}
StateContext::None =>
{
// Don't add a new anonymous scope name if the sequence is the root node
(StateContext::Sequence, parent_scope_name.clone())
}
_ =>
{
// Create a new anonymous scope name
let anonymous_name = self.anonymous_names.next();
(StateContext::Sequence, get_scoped_name(&parent_scope_name, &anonymous_name))
}
}
};
// Get definition names inside the sequence
let get_scoped_function_name =
|function: &Function| get_scoped_name(&new_scope_name, function.get_name());
let get_scoped_class_name =
|class: &Class| get_scoped_name(&new_scope_name, class.get_name());
let new_definitions = State::get_definitions_from_nodes(
sequence.get_nodes(),
get_scoped_function_name,
get_scoped_class_name,
);
// Create the new state and keep track of the current scope name
let new_state = State::extend_with_definitions(
parent,
true,
new_definitions,
StateData::new(new_context, new_scope_name),
);
Indirect::new(new_state)
}
Node::Class(class) =>
{
// TODO: Handle classes
// Get the new name of the function scope
let new_scope_name = {
let parent = parent.borrow();
let parent_data = parent.get_data();
let parent_scope_name = &parent_data.scope_name;
get_scoped_name(parent_scope_name, class.get_name())
};
// Create the new state and keep track of the current scope name
let new_state = State::extend_with_bindings(
parent,
false,
HashMap::new(),
StateData::new(StateContext::Class, new_scope_name),
);
Indirect::new(new_state)
}
_ => parent.clone(),
};
vec![new_state]
}
// Get the name of the pass (for debugging)
fn get_name(&self) -> String
{
"FlattenDefinitionNames".to_owned()
}
}
fn get_scoped_name(scope_name: &String, name: &String) -> String
{
if scope_name.is_empty()
{
name.clone()
}
else
{
format!("{}/{}", scope_name, name)
}
}
| true |
66e3b69d9d470e2f688a8a2a1749b85470261882
|
Rust
|
torkleyy/nitric
|
/crates/nitric-component/src/impls/usize_allocator.rs
|
UTF-8
| 7,315 | 3.484375 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::{
bit_set::BitSet,
error::{InvalidIdError, OomError},
impls::FlatBitSet,
};
/// A simple, non-atomic allocator that tries to return a free `usize`, bumps
/// the counter otherwise.
#[derive(Debug, Default)]
pub struct UsizeAllocator {
/// Valid IDs
alive: FlatBitSet,
counter: usize,
killed: Vec<usize>,
/// IDs flagged for deletion
flagged: FlatBitSet,
}
impl UsizeAllocator {
/// Creates a fresh allocator.
pub fn new() -> Self {
Default::default()
}
#[inline]
fn checked_inc(&mut self) -> Option<usize> {
match self.counter.checked_add(1) {
Some(new) => {
let old = self.counter;
self.counter = new;
Some(old)
}
None => None,
}
}
}
impl UsizeAllocator {
/// Mirrors `Allocator::is_valid`
#[inline]
pub fn is_valid(&self, id: usize) -> bool {
self.alive.contains(id)
}
/// Mirrors `Allocator::num_valid`
pub fn num_valid(&self) -> usize {
self.alive.count()
}
/// Mirrors `Allocator::num_valid_hint`
#[inline]
pub fn num_valid_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.counter))
}
/// Mirrors `Create::create`
#[inline]
pub fn create(&mut self) -> Result<usize, OomError> {
let id = self
.killed
.pop()
.or_else(|| self.checked_inc())
.ok_or(OomError)?;
self.alive.add(id);
Ok(id)
}
/// Mirrors `Delete::is_delete`
#[inline]
pub fn is_flagged(&self, id: usize) -> bool {
self.flagged.contains(id)
}
/// Mirrors `Delete::delete`
#[inline]
pub fn delete_valid(&mut self, id: usize) {
debug_assert!(self.alive.contains(id));
self.flagged.add(id);
}
/// Mirrors `Delete::try_delete`
#[inline]
pub fn try_delete(&mut self, id: usize) -> Result<(), InvalidIdError<usize>> {
match self.is_valid(id) {
true => self.delete_valid(id),
false => return Err(InvalidIdError(id)),
}
Ok(())
}
/// Mirrors `MergeDeleted::merge_deleted`
pub fn merge_deleted(&mut self) -> &[usize] {
let start = self.killed.len();
while let Some(id) = self.flagged.pop_front() {
self.alive.remove(id);
self.killed.push(id);
}
&self.killed[start..]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn new() {
let empty = UsizeAllocator::new();
for i in 0..100 {
assert_eq!(empty.is_valid(i), false);
}
}
#[test]
fn checked_inc() {
let mut empty = UsizeAllocator::new();
assert_eq!(empty.counter, 0);
assert_eq!(empty.checked_inc(), Some(0));
assert_eq!(empty.checked_inc(), Some(1));
assert_eq!(empty.checked_inc(), Some(2));
empty.counter = usize::max_value();
assert_eq!(empty.checked_inc(), None);
}
#[test]
fn is_valid() {
let mut alloc = UsizeAllocator::new();
for i in 0..3 {
alloc.create().unwrap();
assert_eq!(alloc.is_valid(i), true);
}
assert_eq!(alloc.is_valid(3), false);
for i in 0..3 {
assert_eq!(alloc.is_valid(i), true);
alloc.delete_valid(i);
assert_eq!(alloc.is_valid(i), true);
}
alloc.merge_deleted();
for i in 0..3 {
assert_eq!(alloc.is_valid(i), false);
}
for _ in 0..3 {
alloc.create().unwrap();
}
for i in 0..3 {
assert_eq!(alloc.is_valid(i), true);
}
assert_eq!(alloc.is_valid(3), false);
for i in 0..3 {
assert_eq!(alloc.is_valid(i), true);
alloc.delete_valid(i);
assert_eq!(alloc.is_valid(i), true);
}
}
#[test]
fn num_valid() {
let mut alloc = UsizeAllocator::new();
for i in 0..3 {
assert_eq!(alloc.num_valid(), i);
alloc.create().unwrap();
assert_eq!(alloc.num_valid(), i + 1);
}
assert_eq!(alloc.num_valid(), 3);
for i in 0..3 {
alloc.delete_valid(i);
assert_eq!(alloc.num_valid(), 3);
}
alloc.merge_deleted();
assert_eq!(alloc.num_valid(), 0);
let a = alloc.create().unwrap();
assert_eq!(alloc.num_valid(), 1);
let b = alloc.create().unwrap();
alloc.delete_valid(b);
assert_eq!(alloc.num_valid(), 2);
alloc.merge_deleted();
assert_eq!(alloc.num_valid(), 1);
alloc.delete_valid(a);
alloc.merge_deleted();
for i in 0..3 {
assert_eq!(alloc.num_valid(), i);
alloc.create().unwrap();
assert_eq!(alloc.num_valid(), i + 1);
}
assert_eq!(alloc.num_valid(), 3);
for i in 0..3 {
alloc.delete_valid(i);
assert_eq!(alloc.num_valid(), 3);
}
alloc.merge_deleted();
assert_eq!(alloc.num_valid(), 0);
}
#[test]
fn is_flagged() {
let mut alloc = UsizeAllocator::new();
for i in 0..100 {
assert_eq!(alloc.is_flagged(i), false);
}
for i in 0..3 {
alloc.create().unwrap();
assert_eq!(alloc.is_flagged(i), false);
}
for i in 0..3 {
assert_eq!(alloc.is_flagged(i), false);
alloc.delete_valid(i);
assert_eq!(alloc.is_flagged(i), true);
}
alloc.merge_deleted();
for i in 0..3 {
assert_eq!(alloc.is_flagged(i), false);
}
for _ in 0..3 {
alloc.create().unwrap();
}
for i in 0..3 {
assert_eq!(alloc.is_flagged(i), false);
}
assert_eq!(alloc.is_flagged(3), false);
for i in 0..3 {
assert_eq!(alloc.is_flagged(i), false);
alloc.delete_valid(i);
assert_eq!(alloc.is_flagged(i), true);
}
}
#[test]
fn num_valid_hint() {
let mut alloc = UsizeAllocator::new();
assert_eq!(alloc.num_valid_hint(), (0, Some(0)));
alloc.create().unwrap();
assert_eq!(alloc.num_valid_hint(), (0, Some(1)));
alloc.create().unwrap();
alloc.create().unwrap();
alloc.create().unwrap();
assert_eq!(alloc.num_valid_hint(), (0, Some(4)));
alloc.delete_valid(2);
assert_eq!(alloc.num_valid_hint(), (0, Some(4)));
alloc.merge_deleted();
assert_eq!(alloc.num_valid_hint(), (0, Some(4)));
}
#[test]
fn try_delete() {
let mut alloc = UsizeAllocator::new();
for i in 0..100 {
assert_eq!(alloc.try_delete(i), Err(InvalidIdError(i)));
}
alloc.create().unwrap();
alloc.create().unwrap();
alloc.create().unwrap();
for i in 0..3 {
assert_eq!(alloc.try_delete(i), Ok(()));
assert_eq!(alloc.try_delete(i), Ok(()));
}
alloc.merge_deleted();
for i in 0..3 {
assert_eq!(alloc.try_delete(i), Err(InvalidIdError(i)));
}
}
}
| true |
5236d94f7d064c7b5119b8a75d0a1fe679258498
|
Rust
|
dsherret/swc
|
/ecmascript/minifier/src/compress/optimize/ops.rs
|
UTF-8
| 13,451 | 2.9375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use super::Optimizer;
use crate::{
compress::util::negate,
mode::Mode,
util::{make_bool, ValueExt},
};
use swc_atoms::js_word;
use swc_common::{util::take::Take, EqIgnoreSpan};
use swc_ecma_ast::*;
use swc_ecma_utils::{ident::IdentLike, ExprExt, Type, Value};
use Value::Known;
impl<M> Optimizer<'_, M>
where
M: Mode,
{
///
/// - `'12' === `foo` => '12' == 'foo'`
pub(super) fn optimize_bin_operator(&mut self, e: &mut BinExpr) {
if !self.options.comparisons {
return;
}
match e.op {
op!("===") | op!("==") | op!("!==") | op!("!=") => {
if e.left.is_ident() && e.left.eq_ignore_span(&e.right) {
let id: Ident = e.left.clone().ident().unwrap();
if let Some(t) = self.typeofs.get(&id.to_id()) {
match *t {
js_word!("object") | js_word!("function") => {
e.left = Box::new(make_bool(
e.span,
e.op == op!("===") || e.op == op!("=="),
));
e.right.take();
return;
}
_ => {}
}
}
}
}
_ => {}
}
if e.op == op!("===") || e.op == op!("!==") {
if (e.left.is_ident() || e.left.is_member()) && e.left.eq_ignore_span(&e.right) {
self.changed = true;
tracing::debug!("Reducing comparison of same variable ({})", e.op);
e.op = if e.op == op!("===") {
op!("==")
} else {
op!("!=")
};
return;
}
}
let lt = e.left.get_type();
let rt = e.right.get_type();
if e.op == op!("===") {
if let Known(lt) = lt {
if let Known(rt) = rt {
if lt == rt {
e.op = op!("==");
self.changed = true;
tracing::debug!(
"Reduced `===` to `==` because types of operands are identical"
)
}
}
}
}
}
///
/// - `1 == 1` => `true`
pub(super) fn optimize_lit_cmp(&mut self, n: &mut BinExpr) -> Option<Expr> {
match n.op {
op!("==") | op!("!=") => {
// Abort if types differ, or one of them is unknown.
if n.left.get_type().opt()? != n.right.get_type().opt()? {
return None;
}
let l = n.left.as_pure_bool().opt()?;
let r = n.right.as_pure_bool().opt()?;
let value = if n.op == op!("==") { l == r } else { l != r };
tracing::debug!("Optimizing: literal comparison => bool");
self.changed = true;
return Some(Expr::Lit(Lit::Bool(Bool {
span: n.span,
value,
})));
}
_ => {}
}
None
}
///
/// - `!!(a in b)` => `a in b`
/// - `!!(function() {})()` => `!(function() {})()`
pub(super) fn optimize_bangbang(&mut self, e: &mut Expr) {
match e {
Expr::Unary(UnaryExpr {
op: op!("!"), arg, ..
}) => match &mut **arg {
Expr::Unary(UnaryExpr {
op: op!("!"), arg, ..
}) => match &**arg {
Expr::Unary(UnaryExpr { op: op!("!"), .. })
| Expr::Bin(BinExpr { op: op!("in"), .. })
| Expr::Bin(BinExpr {
op: op!("instanceof"),
..
})
| Expr::Bin(BinExpr { op: op!("=="), .. })
| Expr::Bin(BinExpr { op: op!("!="), .. })
| Expr::Bin(BinExpr { op: op!("==="), .. })
| Expr::Bin(BinExpr { op: op!("!=="), .. })
| Expr::Bin(BinExpr { op: op!("<="), .. })
| Expr::Bin(BinExpr { op: op!("<"), .. })
| Expr::Bin(BinExpr { op: op!(">="), .. })
| Expr::Bin(BinExpr { op: op!(">"), .. }) => {
if let Known(Type::Bool) = arg.get_type() {
self.changed = true;
tracing::debug!("Optimizing: `!!expr` => `expr`");
*e = *arg.take();
}
return;
}
_ => {}
},
_ => {}
},
_ => {}
}
}
/// TODO: Optimize based on the type.
pub(super) fn negate_twice(&mut self, e: &mut Expr) {
self.negate(e);
self.negate(e);
}
pub(super) fn negate(&mut self, e: &mut Expr) {
self.changed = true;
negate(e, self.ctx.in_bool_ctx)
}
/// This method does
///
/// - `x *= 3` => `x = 3 * x`
/// - `x = 3 | x` `x |= 3`
/// - `x = 3 & x` => `x &= 3;`
/// - `x ^= 3` => `x = 3 ^ x`
pub(super) fn compress_bin_assignment_to_right(&mut self, e: &mut AssignExpr) {
// TODO: Handle pure properties.
let lhs = match &e.left {
PatOrExpr::Expr(e) => match &**e {
Expr::Ident(i) => i,
_ => return,
},
PatOrExpr::Pat(p) => match &**p {
Pat::Ident(i) => &i.id,
_ => return,
},
};
let (op, left) = match &mut *e.right {
Expr::Bin(BinExpr {
left, op, right, ..
}) => match &**right {
Expr::Ident(r) if lhs.sym == r.sym && lhs.span.ctxt == r.span.ctxt => {
// We need this check because a function call like below can change value of
// operand.
//
// x = g() * x;
match &**left {
Expr::This(..) | Expr::Ident(..) | Expr::Lit(..) => {}
_ => return,
}
(op, left)
}
_ => return,
},
_ => return,
};
let op = match op {
BinaryOp::Mul => {
op!("*=")
}
BinaryOp::BitOr => {
op!("|=")
}
BinaryOp::BitXor => {
op!("^=")
}
BinaryOp::BitAnd => {
op!("&=")
}
_ => return,
};
tracing::debug!("Compressing: `e = 3 & e` => `e &= 3`");
self.changed = true;
e.op = op;
e.right = left.take();
}
/// Remove meaningless literals in a binary expressions.
///
/// # Parameters
///
/// - `in_bool_ctx`: True for expressions casted to bool.
///
/// # Examples
///
/// - `x() && true` => `!!x()`
pub(super) fn compress_logical_exprs_as_bang_bang(&mut self, e: &mut Expr, in_bool_ctx: bool) {
if !self.options.conditionals && !self.options.reduce_vars {
return;
}
let bin = match e {
Expr::Bin(bin) => bin,
_ => return,
};
match bin.op {
op!("&&") | op!("||") => {
self.compress_logical_exprs_as_bang_bang(&mut bin.left, true);
self.compress_logical_exprs_as_bang_bang(&mut bin.right, true);
}
_ => {}
}
let lt = bin.left.get_type();
if !in_bool_ctx {
match lt {
// Don't change type
Known(Type::Bool) => {}
_ => return,
}
}
let rt = bin.right.get_type();
match rt {
Known(Type::Bool) => {}
_ => return,
}
match bin.op {
op!("&&") => {
let rb = bin.right.as_pure_bool();
let rb = match rb {
Value::Known(v) => v,
_ => return,
};
if rb {
self.changed = true;
tracing::debug!("Optimizing: e && true => !!e");
self.negate_twice(&mut bin.left);
*e = *bin.left.take();
} else {
self.changed = true;
tracing::debug!("Optimizing: e && false => e");
*e = *bin.left.take();
}
}
op!("||") => {
let rb = bin.right.as_pure_bool();
let rb = match rb {
Value::Known(v) => v,
_ => return,
};
if !rb {
self.changed = true;
tracing::debug!("Optimizing: e || false => !!e");
self.negate_twice(&mut bin.left);
*e = *bin.left.take();
}
}
_ => {}
}
}
///
/// - `!(x == y)` => `x != y`
/// - `!(x === y)` => `x !== y`
pub(super) fn compress_negated_bin_eq(&self, e: &mut Expr) {
let unary = match e {
Expr::Unary(e @ UnaryExpr { op: op!("!"), .. }) => e,
_ => return,
};
match &mut *unary.arg {
Expr::Bin(BinExpr {
op: op @ op!("=="),
left,
right,
..
})
| Expr::Bin(BinExpr {
op: op @ op!("==="),
left,
right,
..
}) => {
*e = Expr::Bin(BinExpr {
span: unary.span,
op: if *op == op!("==") {
op!("!=")
} else {
op!("!==")
},
left: left.take(),
right: right.take(),
})
}
_ => {}
}
}
pub(super) fn optimize_nullish_coalescing(&mut self, e: &mut Expr) {
let (l, r) = match e {
Expr::Bin(BinExpr {
op: op!("??"),
left,
right,
..
}) => (&mut **left, &mut **right),
_ => return,
};
match l {
Expr::Lit(Lit::Null(..)) => {
tracing::debug!("Removing null from lhs of ??");
self.changed = true;
*e = r.take();
return;
}
Expr::Lit(Lit::Num(..))
| Expr::Lit(Lit::Str(..))
| Expr::Lit(Lit::BigInt(..))
| Expr::Lit(Lit::Bool(..))
| Expr::Lit(Lit::Regex(..)) => {
tracing::debug!("Removing rhs of ?? as lhs cannot be null nor undefined");
self.changed = true;
*e = l.take();
return;
}
_ => {}
}
}
/// `typeof b !== 'undefined'` => `b != void 0`
pub(super) fn compress_typeofs(&mut self, e: &mut Expr) {
if !self.options.typeofs {
return;
}
match e {
Expr::Unary(UnaryExpr {
span,
op: op!("typeof"),
arg,
..
}) => match &**arg {
Expr::Ident(arg) => {
if let Some(value) = self.typeofs.get(&arg.to_id()).cloned() {
tracing::debug!(
"Converting typeof of variable to literal as we know the value"
);
self.changed = true;
*e = Expr::Lit(Lit::Str(Str {
span: *span,
value,
has_escape: false,
kind: Default::default(),
}));
return;
}
}
Expr::Arrow(..) | Expr::Fn(..) => {
tracing::debug!("Converting typeof to 'function' as we know the value");
self.changed = true;
*e = Expr::Lit(Lit::Str(Str {
span: *span,
value: js_word!("function"),
has_escape: false,
kind: Default::default(),
}));
return;
}
Expr::Array(..) | Expr::Object(..) => {
tracing::debug!("Converting typeof to 'object' as we know the value");
self.changed = true;
*e = Expr::Lit(Lit::Str(Str {
span: *span,
value: js_word!("object"),
has_escape: false,
kind: Default::default(),
}));
return;
}
_ => {}
},
_ => {}
}
}
}
| true |
cb47bba96fe7672d5f013f79d6447d0cad5a660c
|
Rust
|
Tmw/base64-rs
|
/src/alphabet.rs
|
UTF-8
| 1,420 | 3.640625 | 4 |
[
"MIT"
] |
permissive
|
pub trait Alphabet {
fn get_char_for_index(&self, index: u8) -> Option<char>;
fn get_index_for_char(&self, character: char) -> Option<u8>;
fn get_padding_char(&self) -> char;
}
pub struct Classic;
const UPPERCASEOFFSET: i8 = 65;
const LOWERCASEOFFSET: i8 = 71;
const DIGITOFFSET: i8 = -4;
impl Alphabet for Classic {
fn get_char_for_index(&self, index: u8) -> Option<char> {
let index = index as i8;
let ascii_index = match index {
0..=25 => index + UPPERCASEOFFSET, // A-Z
26..=51 => index + LOWERCASEOFFSET, // a-z
52..=61 => index + DIGITOFFSET, // 0-9
62 => 43, // +
63 => 47, // /
_ => return None,
} as u8;
Some(ascii_index as char)
}
fn get_index_for_char(&self, character: char) -> Option<u8> {
let character = character as i8;
let base64_index = match character {
65..=90 => character - UPPERCASEOFFSET, // A-Z
97..=122 => character - LOWERCASEOFFSET, // a-z
48..=57 => character - DIGITOFFSET, // 0-9
43 => 62, // +
47 => 63, // /
_ => return None,
} as u8;
Some(base64_index)
}
fn get_padding_char(&self) -> char {
'='
}
}
| true |
c5e78686674d61be7bcb74ae8d79636604392ba9
|
Rust
|
Sintendo/rust-sms-emu
|
/cpu.rs
|
UTF-8
| 7,917 | 3.15625 | 3 |
[] |
no_license
|
#[derive(Default)]
struct Registers {
pc: u16,
sp: u16,
a: u8,
f: u8,
b: u8,
c: u8,
d: u8,
e: u8,
h: u8,
l: u8,
}
// concat_idents! doesn't work
macro_rules! get_u16 {
($hi: ident, $lo: ident, $name: ident) => (
fn $name(&self) -> u16 {
(self.$hi as u16) << 8 | (self.$lo as u16)
}
);
// ($hi: ident, $lo: ident) => (
// get_u16!(concat_idents!(get_, $hi, $lo), $hi , $lo );
// );
}
macro_rules! set_u16 {
($hi: ident, $lo: ident, $name: ident) => (
fn $name(&mut self, val: u16) {
self.$hi = (val >> 8) as u8;
self.$lo = val as u8;
}
);
}
macro_rules! get_set_u16 {
($hi: ident, $lo: ident, $get: ident, $set: ident) => (
get_u16!($hi, $lo, $get);
set_u16!($hi, $lo, $set);
);
}
macro_rules! get {
($t: ty, $name: ident, $reg: ident) => (
fn $name(&self) -> $t {
self.$reg
}
);
}
macro_rules! set {
($t: ty, $name: ident, $reg: ident) => (
fn $name(&mut self, val: $t) {
self.$reg = val
}
);
}
macro_rules! get_set {
($t: ty, $field: ident, $get: ident, $set: ident) => (
get!($t, $get, $field);
set!($t, $set, $field);
);
}
impl Registers {
get_set!(u8, a, get_a, set_a);
get_set!(u8, b, get_b, set_b);
get_set!(u8, c, get_c, set_c);
get_set!(u8, d, get_d, set_d);
get_set!(u8, e, get_e, set_e);
get_set!(u8, h, get_h, set_h);
get_set!(u8, l, get_l, set_l);
get_set!(u16, pc, get_pc, set_pc);
get_set!(u16, sp, get_sp, set_sp);
get_set_u16!(b, c, get_bc, set_bc);
get_set_u16!(d, e, get_de, set_de);
get_set_u16!(h, l, get_hl, set_hl);
// get_u16!(get_bc, b, c);
// set_u16!(set_bc, b, c);
// get_u16!(get_hl, h, l);
// set_u16!(set_hl, h, l);
}
#[derive(Default)]
struct CPU {
regs: Registers,
}
// const ops: [fn (mut CPU);] = [];
macro_rules! ld_r16_i16 {
($name: ident, $set: ident) => (
fn $name(&mut self) {
let val: u16 = self.fetch_u16();
self.regs.$set(val);
}
);
}
macro_rules! ld_r8_r8 {
($name: ident, $set: ident, $get: ident) => (
fn $name(&mut self) {
let val: u8 = self.regs.$get();
self.regs.$set(val);
}
);
}
macro_rules! inc_r16 {
($name: ident, $set: ident, $get: ident) => (
fn $name(&mut self) {
let val: u16 = self.regs.$get() + 1;
self.regs.$set(val);
}
);
}
macro_rules! dec_r16 {
($name: ident, $set: ident, $get: ident) => (
fn $name(&mut self) {
let val: u16 = self.regs.$get() - 1;
self.regs.$set(val);
}
);
}
impl CPU {
fn fetch_u8(&mut self) -> u8 {
0
}
fn fetch_u16(&mut self) -> u16 {
0
}
fn execute_instruction(&mut self) {
let op = self.fetch_u8();
match op {
0x00 => self.nop(),
0x01 => self.ld_bc_i16(),
0x03 => self.inc_bc(),
0x0B => self.dec_bc(),
0x11 => self.ld_de_i16(),
0x13 => self.inc_de(),
0x1B => self.dec_de(),
0x21 => self.ld_hl_i16(),
0x23 => self.inc_hl(),
0x2B => self.dec_hl(),
0x31 => self.ld_sp_i16(),
0x33 => self.inc_sp(),
0x3B => self.dec_sp(),
0x40 => self.ld_b_b(),
0x41 => self.ld_b_c(),
0x42 => self.ld_b_d(),
0x43 => self.ld_b_e(),
0x44 => self.ld_b_h(),
0x45 => self.ld_b_l(),
0x47 => self.ld_b_a(),
0x48 => self.ld_c_b(),
0x49 => self.ld_c_c(),
0x4a => self.ld_c_d(),
0x4b => self.ld_c_e(),
0x4c => self.ld_c_h(),
0x4d => self.ld_c_l(),
0x4f => self.ld_c_a(),
0x50 => self.ld_d_b(),
0x51 => self.ld_d_c(),
0x52 => self.ld_d_d(),
0x53 => self.ld_d_e(),
0x54 => self.ld_d_h(),
0x55 => self.ld_d_l(),
0x57 => self.ld_d_a(),
0x58 => self.ld_e_b(),
0x59 => self.ld_e_c(),
0x5a => self.ld_e_d(),
0x5b => self.ld_e_e(),
0x5c => self.ld_e_h(),
0x5d => self.ld_e_l(),
0x5f => self.ld_e_a(),
0x60 => self.ld_h_b(),
0x61 => self.ld_h_c(),
0x62 => self.ld_h_d(),
0x63 => self.ld_h_e(),
0x64 => self.ld_h_h(),
0x65 => self.ld_h_l(),
0x67 => self.ld_h_a(),
0x68 => self.ld_l_b(),
0x69 => self.ld_l_c(),
0x6a => self.ld_l_d(),
0x6b => self.ld_l_e(),
0x6c => self.ld_l_h(),
0x6d => self.ld_l_l(),
0x6f => self.ld_l_a(),
0x78 => self.ld_a_b(),
0x79 => self.ld_a_c(),
0x7a => self.ld_a_d(),
0x7b => self.ld_a_e(),
0x7c => self.ld_a_h(),
0x7d => self.ld_a_l(),
0x7f => self.ld_a_a(),
_ => self.unimplemented(op),
}
}
fn unimplemented(&mut self, op: u8) {
println!("unimplemented opcode: 0x{:X}", op);
}
fn nop(&mut self) {}
ld_r16_i16!(ld_bc_i16, set_bc);
ld_r16_i16!(ld_de_i16, set_de);
ld_r16_i16!(ld_hl_i16, set_hl);
ld_r16_i16!(ld_sp_i16, set_sp);
ld_r8_r8!(ld_b_b, set_b, get_b);
ld_r8_r8!(ld_d_b, set_d, get_b);
ld_r8_r8!(ld_h_b, set_h, get_b);
ld_r8_r8!(ld_b_c, set_b, get_c);
ld_r8_r8!(ld_d_c, set_d, get_c);
ld_r8_r8!(ld_h_c, set_h, get_c);
ld_r8_r8!(ld_b_d, set_b, get_d);
ld_r8_r8!(ld_d_d, set_d, get_d);
ld_r8_r8!(ld_h_d, set_h, get_d);
ld_r8_r8!(ld_b_e, set_b, get_e);
ld_r8_r8!(ld_d_e, set_d, get_e);
ld_r8_r8!(ld_h_e, set_h, get_e);
ld_r8_r8!(ld_b_h, set_b, get_h);
ld_r8_r8!(ld_d_h, set_d, get_h);
ld_r8_r8!(ld_h_h, set_h, get_h);
ld_r8_r8!(ld_b_l, set_b, get_l);
ld_r8_r8!(ld_d_l, set_d, get_l);
ld_r8_r8!(ld_h_l, set_h, get_l);
ld_r8_r8!(ld_b_a, set_b, get_a);
ld_r8_r8!(ld_d_a, set_d, get_a);
ld_r8_r8!(ld_h_a, set_h, get_a);
ld_r8_r8!(ld_c_b, set_c, get_b);
ld_r8_r8!(ld_e_b, set_e, get_b);
ld_r8_r8!(ld_l_b, set_l, get_b);
ld_r8_r8!(ld_c_c, set_c, get_c);
ld_r8_r8!(ld_e_c, set_e, get_c);
ld_r8_r8!(ld_l_c, set_l, get_c);
ld_r8_r8!(ld_c_d, set_c, get_d);
ld_r8_r8!(ld_e_d, set_e, get_d);
ld_r8_r8!(ld_l_d, set_l, get_d);
ld_r8_r8!(ld_c_e, set_c, get_e);
ld_r8_r8!(ld_e_e, set_e, get_e);
ld_r8_r8!(ld_l_e, set_l, get_e);
ld_r8_r8!(ld_c_h, set_c, get_h);
ld_r8_r8!(ld_e_h, set_e, get_h);
ld_r8_r8!(ld_l_h, set_l, get_h);
ld_r8_r8!(ld_c_l, set_c, get_l);
ld_r8_r8!(ld_e_l, set_e, get_l);
ld_r8_r8!(ld_l_l, set_l, get_l);
ld_r8_r8!(ld_c_a, set_c, get_a);
ld_r8_r8!(ld_e_a, set_e, get_a);
ld_r8_r8!(ld_l_a, set_l, get_a);
ld_r8_r8!(ld_a_b, set_a, get_b);
ld_r8_r8!(ld_a_c, set_a, get_c);
ld_r8_r8!(ld_a_d, set_a, get_d);
ld_r8_r8!(ld_a_e, set_a, get_e);
ld_r8_r8!(ld_a_h, set_a, get_h);
ld_r8_r8!(ld_a_l, set_a, get_l);
ld_r8_r8!(ld_a_a, set_a, get_a);
inc_r16!(inc_bc, set_bc, get_bc);
inc_r16!(inc_de, set_de, get_de);
inc_r16!(inc_hl, set_hl, get_hl);
inc_r16!(inc_sp, set_sp, get_sp);
dec_r16!(dec_bc, set_bc, get_bc);
dec_r16!(dec_de, set_de, get_de);
dec_r16!(dec_hl, set_hl, get_hl);
dec_r16!(dec_sp, set_sp, get_sp);
}
// impl Default for CPU {
// fn default() -> CPU {
// CPU { regs: Default::default(), ops: [nop]}
// }
// }
fn main() {
let mut cpu: CPU = Default::default();
cpu.regs.set_hl(0xAABB);
println!("{:X} {:X}", cpu.regs.get_h(), cpu.regs.get_l());
}
| true |
3e4fd2f7598c53156b28c3453488589d7014cdb0
|
Rust
|
lawvs/Algorithm-Training
|
/leetcode/080.remove-duplicates-from-sorted-array-ii.rs
|
UTF-8
| 527 | 2.640625 | 3 |
[] |
no_license
|
impl Solution {
pub fn remove_duplicates(nums: &mut Vec<i32>) -> i32 {
let mut p = 1;
let mut cur = nums[0];
let mut flag = false;
for i in 1..nums.len() {
if nums[i] != cur {
nums[p] = nums[i];
cur = nums[p];
flag = false;
p += 1;
} else if flag == false {
nums[p] = nums[i];
flag = true;
p += 1;
}
}
return p as i32;
}
}
| true |
e983b38dd7ee3287504994ddc23c2e1b21507dc2
|
Rust
|
takumioba/conc_ytakano
|
/chap5/5.1/ch5_1_iter/src/main.rs
|
UTF-8
| 784 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
use std::io::{BufRead, BufReader, BufWriter, Write};
use std::net::TcpListener;
fn main() {
// TCPの10000番ポートをリッスン
let listener = TcpListener::bind("127.0.0.1:10000").unwrap(); // <1>
// コネクション要求をアクセプト
while let Ok((stream, _)) = listener.accept() { // <2>
// 読み込み、書き込みオブジェクトを生成 <3>
let stream0 = stream.try_clone().unwrap();
let mut reader = BufReader::new(stream0);
let mut writer = BufWriter::new(stream);
// 1行読み込んで、同じものを書き込み <4>
let mut buf = String::new();
reader.read_line(&mut buf).unwrap();
writer.write(buf.as_bytes()).unwrap();
writer.flush().unwrap(); // <5>
}
}
| true |
0ea30c9798af0d237d87e892320f057b13c854c5
|
Rust
|
KyleMayes/clang-rs
|
/tests/tests.rs
|
UTF-8
| 43,905 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
extern crate clang;
extern crate libc;
use std::env;
use std::fs;
use std::mem;
use std::io::{Write};
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicUsize, Ordering};
use clang::*;
use clang::completion::*;
use clang::source::*;
use libc::{c_int};
//================================================
// Macros
//================================================
macro_rules! assert_location_eq {
($location:expr, $file:expr, $line:expr, $column:expr, $offset:expr) => ({
let location = Location { file: $file, line: $line, column: $column, offset: $offset };
assert_eq!($location, location);
})
}
macro_rules! range {
($file:expr, $sl:expr, $sc:expr, $el:expr, $ec:expr) => ({
let start = $file.get_location($sl, $sc);
::clang::source::SourceRange::new(start, $file.get_location($el, $ec))
})
}
//================================================
// Functions
//================================================
fn with_entity<'c, F: FnOnce(Entity)>(clang: &'c Clang, contents: &str, f: F) {
with_translation_unit(clang, "test.cpp", contents, &[], |_, _, tu| f(tu.get_entity()));
}
fn with_file<'c, F: FnOnce(&Path, File)>(clang: &'c Clang, contents: &str, f: F) {
with_translation_unit(clang, "test.cpp", contents, &[], |_, file, tu| {
f(file, tu.get_file(file).unwrap())
});
}
static COUNTER: AtomicUsize = AtomicUsize::new(0);
fn with_temporary_directory<F: FnOnce(&Path)>(f: F) {
let exe = env::current_exe().unwrap().file_name().unwrap().to_string_lossy().into_owned();
let mut path;
loop {
path = env::temp_dir().join(format!("{}{}", exe, COUNTER.fetch_add(1, Ordering::SeqCst)));
if !path.exists() {
break;
}
}
fs::create_dir(&path).unwrap();
f(&path);
fs::remove_dir_all(&path).unwrap();
}
fn with_temporary_file<F: FnOnce(&Path, &Path)>(name: &str, contents: &str, f: F) {
with_temporary_files(&[(name, contents)], |d, fs| f(d, &fs[0]));
}
fn with_temporary_files<F: FnOnce(&Path, Vec<PathBuf>)>(files: &[(&str, &str)], f: F) {
with_temporary_directory(|d| {
let files = files.iter().map(|&(n, v)| {
let file = d.join(n);
fs::File::create(&file).unwrap().write_all(v.as_bytes()).unwrap();
file
}).collect::<Vec<_>>();
f(d, files);
});
}
fn with_translation_unit<'c, F>(
clang: &'c Clang, name: &str, contents: &str, arguments: &[&str], f: F
) where F: FnOnce(&Path, &Path, TranslationUnit) {
with_temporary_file(name, contents, |d, file| {
let index = Index::new(clang, false, false);
f(d, &file, index.parser(file).arguments(arguments).parse().unwrap());
});
}
fn with_types<'c, F: FnOnce(Vec<Type>)>(clang: &'c Clang, contents: &str, f: F) {
with_translation_unit(clang, "test.cpp", contents, &[], |_, _, tu| {
f(tu.get_entity().get_children().iter().flat_map(|e| e.get_type().into_iter()).collect());
});
}
//================================================
// Tests
//================================================
#[path="completion.rs"]
mod completion_test;
#[path="diagnostic.rs"]
mod diagnostic_test;
#[path="documentation.rs"]
mod documentation_test;
#[path="source.rs"]
mod source_test;
#[path="token.rs"]
mod token_test;
#[path="sonar.rs"]
mod sonar_test;
#[test]
fn test() {
let clang = Clang::new().unwrap();
println!("libclang: {}", get_version());
completion_test::test(&clang);
diagnostic_test::test(&clang);
documentation_test::test(&clang);
source_test::test(&clang);
token_test::test(&clang);
sonar_test::test(&clang);
// SourceError _______________________________
assert_eq!(format!("{}", SourceError::Unknown), "an unknown error occurred");
// Entity ____________________________________
let source = "
struct B { };
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_6_0")]
fn test_is_abstract_record(children: &[Entity]) {
assert_eq!(children.len(), 1);
assert!(!children[0].is_abstract_record());
}
#[cfg(not(feature="clang_6_0"))]
fn test_is_abstract_record(_: &[Entity]) { }
test_is_abstract_record(&e.get_children()[..]);
});
let source = "
thread_local int foo;
int bar;
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_6_0")]
fn test_get_tls_kind(children: &[Entity]) {
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_tls_kind(), Some(TlsKind::Dynamic));
assert_eq!(children[1].get_tls_kind(), None);
}
#[cfg(not(feature="clang_6_0"))]
fn test_get_tls_kind(_: &[Entity]) { }
test_get_tls_kind(&e.get_children()[..]);
});
with_translation_unit(&clang, "test.cpp", "int a = 322;", &[], |_, f, tu| {
#[cfg(feature="clang_5_0")]
fn test_target(tu: &TranslationUnit) {
let target = tu.get_target();
assert!(!target.triple.is_empty());
assert_eq!(target.pointer_width, mem::size_of::<usize>() * 8);
}
#[cfg(not(feature="clang_5_0"))]
fn test_target(_: &TranslationUnit) { }
let file = tu.get_file(f).unwrap();
let entity = tu.get_entity();
assert_eq!(entity.get_completion_string(), None);
assert_eq!(entity.get_display_name(), Some(f.to_str().unwrap().into()));
assert_eq!(entity.get_kind(), EntityKind::TranslationUnit);
assert_eq!(entity.get_location(), None);
assert_eq!(entity.get_name(), Some(f.to_str().unwrap().into()));
assert_eq!(entity.get_name_ranges(), &[]);
assert_eq!(entity.get_platform_availability(), None);
assert_eq!(entity.get_translation_unit().get_file(f), tu.get_file(f));
assert_eq!(entity.get_usr(), None);
let children = entity.get_children();
assert_eq!(children.len(), 1);
assert_eq!(children[0].get_display_name(), Some("a".into()));
assert_eq!(children[0].get_kind(), EntityKind::VarDecl);
assert_eq!(children[0].get_location(), Some(file.get_location(1, 5)));
assert_eq!(children[0].get_name(), Some("a".into()));
assert_eq!(children[0].get_name_ranges(), &[range!(file, 1, 5, 1, 6)]);
assert_eq!(children[0].get_range(), Some(range!(file, 1, 1, 1, 12)));
assert_eq!(children[0].get_translation_unit().get_file(f), tu.get_file(f));
assert_eq!(children[0].get_platform_availability(), Some(vec![]));
assert_eq!(children[0].get_usr(), Some(Usr("c:@a".into())));
let string = children[0].get_completion_string().unwrap();
assert_eq!(string.get_chunks(), &[
CompletionChunk::ResultType("int".into()),
CompletionChunk::TypedText("a".into()),
]);
test_target(&tu);
});
let source = r#"
void f() {
unsigned int a = 2 + 2;
double b = 0.25 * 2.0;
const char* c = "Hello, world!";
}
"#;
with_entity(&clang, source, |e| {
#[cfg(feature="clang_4_0")]
fn test_evaluate<'tu>(expressions: &[Entity<'tu>]) {
assert_eq!(expressions[0].evaluate(), Some(EvaluationResult::UnsignedInteger(4)));
assert_eq!(expressions[1].evaluate(), Some(EvaluationResult::Float(0.5)));
match expressions[2].evaluate() {
Some(EvaluationResult::String(string)) => {
assert_eq!(string.to_str(), Ok("Hello, world!"));
},
_ => unreachable!(),
}
}
#[cfg(all(feature="clang_3_9", not(feature="clang_4_0")))]
fn test_evaluate<'tu>(expressions: &[Entity<'tu>]) {
assert_eq!(expressions[0].evaluate(), Some(EvaluationResult::SignedInteger(4)));
assert_eq!(expressions[1].evaluate(), Some(EvaluationResult::Float(0.5)));
match expressions[2].evaluate() {
Some(EvaluationResult::String(string)) => {
assert_eq!(string.to_str(), Ok("Hello, world!"));
},
_ => unreachable!(),
}
}
#[cfg(not(feature="clang_3_9"))]
fn test_evaluate<'tu>(_: &[Entity<'tu>]) { }
let children = e.get_children()[0].get_children()[0].get_children();
let expressions = children.into_iter().map(|e| {
e.get_children()[0].get_children()[0]
}).collect::<Vec<_>>();
assert_eq!(expressions.len(), 3);
test_evaluate(&expressions);
});
let source = "
class B { };
class A : public B {
private:
void a() { };
protected:
void b() { };
public:
void c() { };
};
";
with_entity(&clang, source, |e| {
assert_eq!(e.get_accessibility(), None);
let children = e.get_children()[1].get_children();
assert_eq!(children.len(), 7);
assert_eq!(children[0].get_accessibility(), Some(Accessibility::Public));
assert_eq!(children[1].get_accessibility(), Some(Accessibility::Private));
assert_eq!(children[2].get_accessibility(), Some(Accessibility::Private));
assert_eq!(children[3].get_accessibility(), Some(Accessibility::Protected));
assert_eq!(children[4].get_accessibility(), Some(Accessibility::Protected));
assert_eq!(children[5].get_accessibility(), Some(Accessibility::Public));
assert_eq!(children[6].get_accessibility(), Some(Accessibility::Public));
});
let source = "
struct A;
struct A;
struct A { int a; };
";
with_entity(&clang, source, |e| {
let children = e.get_children();
assert_eq!(children.len(), 3);
assert_eq!(children[0].get_canonical_entity(), children[0]);
assert_eq!(children[0].get_definition(), Some(children[2]));
assert!(!children[0].is_definition());
assert_eq!(children[1].get_canonical_entity(), children[0]);
assert_eq!(children[1].get_definition(), Some(children[2]));
assert!(!children[1].is_definition());
assert_eq!(children[2].get_canonical_entity(), children[0]);
assert_eq!(children[2].get_definition(), Some(children[2]));
assert!(children[2].is_definition());
});
let source = "
struct A { struct { int b; }; int i : 322; };
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_3_7")]
fn test_is_anonymous<'tu>(children: &[Entity<'tu>]) {
assert!(!children[0].is_anonymous());
let children = children[0].get_children();
assert!(children[0].is_anonymous());
assert!(!children[1].is_anonymous());
}
#[cfg(not(feature="clang_3_7"))]
fn test_is_anonymous<'tu>(_: &[Entity<'tu>]) { }
let children = e.get_children();
assert_eq!(children.len(), 1);
test_is_anonymous(&children);
let children = children[0].get_children();
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_bit_field_width(), None);
assert_eq!(children[0].get_name(), None);
assert_eq!(children[0].get_display_name(), None);
assert!(!children[0].is_bit_field());
if !cfg!(target_os="windows") {
assert_eq!(children[1].get_bit_field_width(), Some(322));
assert_eq!(children[1].get_name(), Some("i".into()));
assert_eq!(children[1].get_display_name(), Some("i".into()));
assert!(children[1].is_bit_field());
}
});
let source = "
int a;
/// \\brief A global integer.
int b;
";
with_translation_unit(&clang, "test.cpp", source, &[], |_, f, tu| {
let file = tu.get_file(f).unwrap();
let children = tu.get_entity().get_children();
assert_eq!(children.len(), 2);
assert_eq!(file.get_location(2, 13).get_entity(), Some(children[0]));
assert_eq!(file.get_location(3, 13).get_entity(), None);
assert_eq!(file.get_location(4, 13).get_entity(), Some(children[1]));
assert_eq!(children[0].get_comment(), None);
assert_eq!(children[0].get_comment_brief(), None);
assert_eq!(children[0].get_comment_range(), None);
assert_eq!(children[1].get_comment(), Some("/// \\brief A global integer.".into()));
assert_eq!(children[1].get_comment_brief(), Some("A global integer.".into()));
assert_eq!(children[1].get_comment_range(), Some(range!(file, 3, 9, 3, 39)));
});
let source = "
unsigned int integer = 322;
enum A { B = 322, C = 644 };
";
with_entity(&clang, source, |e| {
assert_eq!(e.get_language(), None);
let children = e.get_children();
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_enum_constant_value(), None);
assert_eq!(children[0].get_enum_underlying_type(), None);
if !cfg!(target_os="windows") {
assert_eq!(children[1].get_enum_constant_value(), None);
assert_eq!(children[1].get_enum_underlying_type(), Some(children[0].get_type().unwrap()));
}
let children = children[1].get_children();
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_enum_constant_value(), Some((322, 322)));
assert_eq!(children[1].get_enum_constant_value(), Some((644, 644)));
});
let source = "
void a();
void b() throw();
void c() throw(int);
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_5_0")]
fn test_get_exception_specification(children: &[Entity]) {
assert_eq!(children[0].get_exception_specification(), None);
assert_eq!(children[1].get_exception_specification(), Some(ExceptionSpecification::DynamicNone));
assert_eq!(children[2].get_exception_specification(), Some(ExceptionSpecification::Dynamic));
}
#[cfg(not(feature="clang_5_0"))]
fn test_get_exception_specification(_: &[Entity]) { }
let children = e.get_children();
assert_eq!(children.len(), 3);
test_get_exception_specification(&children[..]);
});
let files = &[
("test.hpp", ""),
("test.cpp", "#include \"test.hpp\""),
];
with_temporary_files(files, |_, fs| {
let index = Index::new(&clang, false, false);
let tu = index.parser(&fs[1]).detailed_preprocessing_record(true).parse().unwrap();
let last = tu.get_entity().get_children().iter().last().unwrap().clone();
assert_eq!(last.get_kind(), EntityKind::InclusionDirective);
assert_eq!(last.get_file(), tu.get_file(&fs[0]));
assert_eq!(tu.get_file(&fs[1]).unwrap().get_includes(), &[last]);
});
let source = "
void a() { }
class B { void b() { } };
";
with_entity(&clang, source, |e| {
assert_eq!(e.get_language(), None);
let children = e.get_children();
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_language(), Some(Language::C));
assert_eq!(children[1].get_language(), Some(Language::Cpp));
});
let source = "
struct A { void a(); };
void A::a() { }
";
with_entity(&clang, source, |e| {
assert_eq!(e.get_lexical_parent(), None);
assert_eq!(e.get_semantic_parent(), None);
let children = e.get_children();
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_lexical_parent(), Some(e));
assert_eq!(children[0].get_semantic_parent(), Some(e));
assert_eq!(children[1].get_lexical_parent(), Some(e));
assert_eq!(children[1].get_semantic_parent(), Some(children[0]));
});
let source = "
void a() { }
static void b() { }
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_3_6")]
fn test_get_storage_class<'tu>(entity: Entity<'tu>) {
assert_eq!(entity.get_storage_class(), None);
let children = entity.get_children();
assert_eq!(children[0].get_storage_class(), Some(StorageClass::None));
assert_eq!(children[1].get_storage_class(), Some(StorageClass::Static));
}
#[cfg(not(feature="clang_3_6"))]
fn test_get_storage_class<'tu>(_: Entity<'tu>) { }
assert_eq!(e.get_linkage(), None);
let children = e.get_children();
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_linkage(), Some(Linkage::External));
assert_eq!(children[1].get_linkage(), Some(Linkage::Internal));
test_get_storage_class(e);
});
let source = "
void a(int i) { }
void a(float f) { }
template <typename T> void b(T t) { a(t); }
";
with_entity(&clang, source, |e| {
if !cfg!(target_os="windows") {
let children = e.get_children();
assert_eq!(children.len(), 3);
let children = children[2].get_children();
assert_eq!(children.len(), 3);
let children = children[2].get_children();
assert_eq!(children.len(), 1);
let children = children[0].get_children();
assert_eq!(children.len(), 2);
let children = children[0].get_children();
assert_eq!(children.len(), 1);
let declarations = vec![e.get_children()[1], e.get_children()[0]];
assert_eq!(children[0].get_overloaded_declarations(), Some(declarations));
}
});
let source = "
struct A { virtual void a() { } };
struct B : public A { virtual void a() { } };
";
with_entity(&clang, source, |e| {
let children = e.get_children();
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_children()[0].get_overridden_methods(), None);
assert_eq!(children[1].get_children()[1].get_overridden_methods(), Some(vec![
children[0].get_children()[0]
]));
});
let source = "
int integer = 322;
template <typename T, int I> void function() { }
template <> void function<int, 322>() { }
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_3_6")]
fn test_get_template_arguments<'tu>(children: &[Entity<'tu>]) {
assert_eq!(children[0].get_template_arguments(), None);
assert_eq!(children[1].get_template_arguments(), None);
assert_eq!(children[2].get_template_arguments(), Some(vec![
TemplateArgument::Type(children[0].get_type().unwrap()),
TemplateArgument::Integral(322, 322),
]));
}
#[cfg(not(feature="clang_3_6"))]
fn test_get_template_arguments<'tu>(_: &[Entity<'tu>]) { }
let children = e.get_children();
assert_eq!(children.len(), 3);
assert_eq!(children[0].get_template(), None);
assert_eq!(children[0].get_template_kind(), None);
assert_eq!(children[1].get_template(), None);
assert_eq!(children[1].get_template_kind(), Some(EntityKind::FunctionDecl));
assert_eq!(children[2].get_template(), Some(children[1]));
assert_eq!(children[2].get_template_kind(), None);
test_get_template_arguments(&children);
});
let source = "
int integer = 322;
typedef int Integer;
";
with_entity(&clang, source, |e| {
let children = e.get_children();
assert_eq!(children.len(), 2);
assert_eq!(children[0].get_typedef_underlying_type(), None);
assert_eq!(children[1].get_typedef_underlying_type(), Some(children[0].get_type().unwrap()));
});
let source = r#"
class A { };
class __attribute__((visibility("hidden"))) B { };
"#;
with_entity(&clang, source, |e| {
#[cfg(feature="clang_3_8")]
fn test_get_visibility<'tu>(children: &[Entity<'tu>]) {
assert_eq!(children[0].get_visibility(), Some(Visibility::Default));
assert_eq!(children[1].get_visibility(), Some(Visibility::Hidden));
}
#[cfg(not(feature="clang_3_8"))]
fn test_get_visibility<'tu>(_: &[Entity<'tu>]) { }
let children = e.get_children();
assert_eq!(children.len(), 2);
test_get_visibility(&children);
});
let source = "
void a();
[[noreturn]] void b();
";
with_translation_unit(&clang, "test.cpp", source, &["--std=c++0x"], |_, _, tu| {
#[cfg(feature="clang_3_9")]
fn test_attributes<'tu>(children: &[Entity<'tu>]) {
assert!(!children[0].has_attributes());
assert!(children[1].has_attributes());
}
#[cfg(not(feature="clang_3_9"))]
fn test_attributes<'tu>(_: &[Entity<'tu>]) { }
let children = tu.get_entity().get_children();
assert_eq!(children.len(), 2);
test_attributes(&children);
});
let source = "
class Class {
void a() const { }
virtual void b() = 0;
static void c() { }
virtual void d() { }
};
";
with_entity(&clang, source, |e| {
let children = e.get_children()[0].get_children();
assert_eq!(children.len(), 4);
macro_rules! method {
($entity:expr, $c:expr, $pv:expr, $s:expr, $v:expr) => ({
assert_eq!($entity.is_const_method(), $c);
assert_eq!($entity.is_pure_virtual_method(), $pv);
assert_eq!($entity.is_static_method(), $s);
assert_eq!($entity.is_virtual_method(), $v);
});
}
method!(children[0], true, false, false, false);
method!(children[1], false, true, false, true);
method!(children[2], false, false, true, false);
method!(children[3], false, false, false, true);
});
let source = "
enum A { A_A, A_B, A_C };
enum class B { B_A, B_B, B_C };
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_5_0")]
fn test_is_scoped(children: &[Entity]) {
assert!(!children[0].is_scoped());
assert!(children[1].is_scoped());
}
#[cfg(not(feature="clang_5_0"))]
fn test_is_scoped(_: &[Entity]) { }
let children = e.get_children();
assert_eq!(children.len(), 2);
test_is_scoped(&children[..]);
});
let source = "
class Class {
Class(int) { }
explicit Class(const Class&) = default;
Class() { }
explicit Class(Class&&) = default;
};
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_3_9")]
fn test_constructors<'tu>(children: &[Entity<'tu>]) {
macro_rules! constructor {
($entity:expr, $conv:expr, $cpy:expr, $def:expr, $defed:expr, $mov:expr) => ({
assert_eq!($entity.is_converting_constructor(), $conv);
assert_eq!($entity.is_copy_constructor(), $cpy);
assert_eq!($entity.is_default_constructor(), $def);
assert_eq!($entity.is_defaulted(), $defed);
assert_eq!($entity.is_move_constructor(), $mov);
});
}
constructor!(children[0], true, false, false, false, false);
constructor!(children[1], false, true, false, true, false);
constructor!(children[2], false, false, true, false, false);
constructor!(children[3], false, false, false, true, true);
}
#[cfg(not(feature="clang_3_9"))]
fn test_constructors<'tu>(_: &[Entity<'tu>]) { }
let children = e.get_children()[0].get_children();
assert_eq!(children.len(), 4);
test_constructors(&children);
});
let source = "
struct A {
void a() { }
virtual void b() { }
};
void function() {
A a;
a.a();
a.b();
}
";
with_entity(&clang, source, |e| {
let children = e.get_children()[1].get_children()[0].get_children();
assert_eq!(children.len(), 3);
assert!(!children[1].is_dynamic_call());
assert!(children[2].is_dynamic_call());
});
let source = r#"
void a();
void b()
__attribute__((external_source_symbol(
language="Swift",
defined_in="module",
generated_declaration)));
"#;
with_entity(&clang, source, |e| {
#[cfg(feature="clang_5_0")]
fn test_get_external_symbol(children: &[Entity]) {
assert_eq!(children[0].get_external_symbol(), None);
let symbol = children[1].get_external_symbol();
assert!(symbol.is_some());
let symbol = symbol.unwrap();
assert_eq!(symbol.language, "Swift");
assert_eq!(symbol.defined, "module");
assert!(symbol.generated);
}
#[cfg(not(feature="clang_5_0"))]
fn test_get_external_symbol(_: &[Entity]) { }
let children = e.get_children();
assert_eq!(children.len(), 2);
test_get_external_symbol(&children[..]);
});
let source = "
class A {
int a;
mutable int b;
};
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_3_8")]
fn test_is_mutable<'tu>(children: &[Entity<'tu>]) {
assert!(!children[0].is_mutable());
assert!(children[1].is_mutable());
}
#[cfg(not(feature="clang_3_8"))]
fn test_is_mutable<'tu>(_: &[Entity<'tu>]) { }
let children = e.get_children()[0].get_children();
assert_eq!(children.len(), 2);
test_is_mutable(&children);
});
let source = "
void a() { }
void b(...) { }
";
with_entity(&clang, source, |e| {
let children = e.get_children();
assert_eq!(children.len(), 2);
assert!(!children[0].is_variadic());
assert!(children[1].is_variadic());
});
let source = "
struct A { };
struct B : A { };
struct C : virtual A { };
";
with_entity(&clang, source, |e| {
let children = e.get_children();
assert_eq!(children.len(), 3);
assert!(!children[1].get_children()[0].is_virtual_base());
assert!(children[2].get_children()[0].is_virtual_base());
});
let source = "
@interface A
- (int)foo;
@end
";
with_translation_unit(&clang, "test.mm", source, &[], |_, _, tu| {
let entities = tu.get_entity().get_children();
assert_eq!(entities.len(), 2);
// The Protocol class reference is automatically added by clang at the start of any Objective-C file
assert_eq!(entities[0].get_kind(), EntityKind::ObjCClassRef);
assert_eq!(entities[0].get_name(), Some("Protocol".into()));
assert_eq!(entities[1].get_kind(), EntityKind::ObjCInterfaceDecl);
assert_eq!(entities[1].get_name(), Some("A".into()));
let children = entities[1].get_children();
assert_eq!(children.len(), 1);
assert_eq!(children[0].get_kind(), EntityKind::ObjCInstanceMethodDecl,);
assert_eq!(children[0].get_name(), Some("foo".into()));
match children[0].get_result_type() {
Some(result_type) => {
assert_eq!(result_type.get_kind(), TypeKind::Int);
assert_eq!(result_type.get_display_name(), "int");
}
_ => unreachable!(),
}
#[cfg(all(feature="clang_6_0", target_os="macos"))]
fn test_get_mangled_objc_names(entity: &Entity) {
let names = vec!["_OBJC_CLASS_$_A".into(), "_OBJC_METACLASS_$_A".into()];
assert_eq!(entity.get_mangled_objc_names(), Some(names));
}
#[cfg(all(feature="clang_6_0", not(target_os="macos")))]
fn test_get_mangled_objc_names(entity: &Entity) {
let names = vec!["_OBJC_CLASS_A".into(), "_OBJC_METACLASS_A".into()];
assert_eq!(entity.get_mangled_objc_names(), Some(names));
}
#[cfg(not(feature="clang_6_0"))]
fn test_get_mangled_objc_names(_: &Entity) { }
test_get_mangled_objc_names(&entities[1]);
});
let source = "
struct x {
char y;
char z;
}
";
with_entity(&clang, source, |e| {
let children = e.get_children();
#[cfg(feature="clang_3_7")]
fn test_get_offset_of_field(fields: &[Entity]) {
assert_eq!(fields[0].get_offset_of_field(), Ok(0));
assert_eq!(fields[1].get_offset_of_field(), Ok(8));
}
#[cfg(not(feature="clang_3_7"))]
fn test_get_offset_of_field(_: &[Entity]) {}
test_get_offset_of_field(&children[0].get_children());
});
let source = "
const int x = 0;
";
with_entity(&clang, source, |e| {
let children = e.get_children();
#[cfg(feature="clang_7_0")]
fn test_is_invalid_declaration(entity: Entity) {
assert_eq!(entity.is_invalid_declaration(), false);
}
#[cfg(not(feature="clang_7_0"))]
fn test_is_invalid_declaration(_: Entity) {}
test_is_invalid_declaration(children[0]);
});
let source = "
int main() {
return 0;
}
";
with_entity(&clang, source, |e| {
let children = e.get_children();
#[cfg(feature="clang_7_0")]
fn test_pretty_printer(entity: Entity) {
let s = entity.get_pretty_printer()
.set_indentation_amount(1)
.set_flag(PrintingPolicyFlag::IncludeNewlines, true)
.set_flag(PrintingPolicyFlag::IncludeTagDefinition, true)
.set_flag(PrintingPolicyFlag::PolishForDeclaration, false)
.set_flag(PrintingPolicyFlag::PrintAnonymousTagLocations, false)
.set_flag(PrintingPolicyFlag::PrintConstantArraySizeAsWritten, true)
.set_flag(PrintingPolicyFlag::PrintConstantsAsWritten, true)
.set_flag(PrintingPolicyFlag::PrintFullyQualifiedName, true)
.set_flag(PrintingPolicyFlag::SuppressImplicitBase, true)
.set_flag(PrintingPolicyFlag::SuppressInitializers, false)
.set_flag(PrintingPolicyFlag::SuppressLifetimeQualifiers, false)
.set_flag(PrintingPolicyFlag::SuppressScope, false)
.set_flag(PrintingPolicyFlag::SuppressSpecifiers, false)
.set_flag(PrintingPolicyFlag::SuppressStrongLifetime, false)
.set_flag(PrintingPolicyFlag::SuppressTagKeyword, true)
.set_flag(PrintingPolicyFlag::SuppressTemplateArgsInCXXConstructors, false)
.set_flag(PrintingPolicyFlag::SuppressUnwrittenScope, false)
.set_flag(PrintingPolicyFlag::UseAlignof, true)
.set_flag(PrintingPolicyFlag::UseBool, true)
.set_flag(PrintingPolicyFlag::UseHalf, false)
.set_flag(PrintingPolicyFlag::UseMsWchar, false)
.set_flag(PrintingPolicyFlag::UseMsvcFormatting, false)
.set_flag(PrintingPolicyFlag::UseRestrict, true)
.set_flag(PrintingPolicyFlag::UseTerseOutput, false)
.set_flag(PrintingPolicyFlag::UseUnderscoreAlignof, false)
.set_flag(PrintingPolicyFlag::UseVoidForZeroParams, true)
.print();
assert_eq!(s, "int main() {\n return 0;\n}\n");
}
#[cfg(not(feature="clang_7_0"))]
fn test_pretty_printer(_: Entity) {}
test_pretty_printer(children[0]);
});
let source = "
@interface Foo
- @property NSString *x;
@end
";
with_translation_unit(&clang, "test.mm", source, &[], |_, _, tu| {
let children = tu.get_entity().get_children();
#[cfg(feature="clang_8_0")]
fn test_get_objc_getter_setter_name(properties: &[Entity]) {
assert_eq!(properties[0].get_objc_getter_name().as_ref().map(|s| s.as_ref()), Some("x"));
assert_eq!(properties[0].get_objc_setter_name().as_ref().map(|s| s.as_ref()), Some("setX:"));
}
#[cfg(not(feature="clang_8_0"))]
fn test_get_objc_getter_setter_name(_: &[Entity]) {}
test_get_objc_getter_setter_name(&children[1].get_children());
});
// Index _____________________________________
let mut index = Index::new(&clang, false, false);
let mut options = ThreadOptions::default();
assert_eq!(index.get_thread_options(), options);
options.editing = true;
index.set_thread_options(options);
assert_eq!(index.get_thread_options(), options);
// TranslationUnit ___________________________
with_translation_unit(&clang, "test.cpp", "int a = 322;", &[], |d, _, tu| {
let file = d.join("test.cpp.gch");
tu.save(&file).unwrap();
let index = Index::new(&clang, false, false);
let _ = TranslationUnit::from_ast(&index, &file).unwrap();
});
with_temporary_file("test.cpp", "int a = 322;", |_, f| {
let index = Index::new(&clang, false, false);
let _ = index.parser(f).unsaved(&[Unsaved::new(f, "int a = 644;")]).parse().unwrap();
});
with_translation_unit(&clang, "test.cpp", "int a = 322;", &[], |d, _, tu| {
assert_eq!(tu.get_file(d.join("test.c")), None);
});
with_translation_unit(&clang, "test.cpp", "int a = 322;", &[], |_, _, tu| {
let usage = tu.get_memory_usage();
assert_eq!(usage.get(&MemoryUsage::Selectors), Some(&0));
});
with_translation_unit(&clang, "test.cpp", "int a = 322;", &[], |_, f, tu| {
let _ = tu.reparse(&[Unsaved::new(f, "int a = 644;")]).unwrap();
});
// Type ______________________________________
with_entity(&clang, "int a = 322;", |e| {
assert_eq!(e.get_type(), None);
let type_ = e.get_children()[0].get_type().unwrap();
assert_eq!(type_.get_display_name(), "int");
assert_eq!(type_.get_kind(), TypeKind::Int);
});
let source = "
int integer = 322;
int function(int argument) { return argument; }
";
with_types(&clang, source, |ts| {
assert_eq!(ts[0].get_argument_types(), None);
assert_eq!(ts[0].get_result_type(), None);
assert_eq!(ts[1].get_argument_types(), Some(vec![ts[0]]));
assert_eq!(ts[1].get_result_type(), Some(ts[0]));
});
let source = "
template <typename T> struct A { T a; int b; };
typedef A<int> B;
struct C { int a; int b; };
";
with_types(&clang, source, |ts| {
assert_eq!(ts[0].get_alignof(), Err(AlignofError::Incomplete));
assert_eq!(ts[0].get_offsetof("b"), Err(OffsetofError::Parent));
assert_eq!(ts[0].get_sizeof(), Err(SizeofError::Incomplete));
let size = mem::size_of::<c_int>();
assert_eq!(ts[1].get_alignof(), Ok(size));
assert_eq!(ts[1].get_offsetof("b"), Ok(size * 8));
assert_eq!(ts[1].get_sizeof(), Ok(size * 2));
});
let source = "
int integer = 322;
void a() { }
";
with_types(&clang, source, |ts| {
assert_eq!(ts[0].get_calling_convention(), None);
assert_eq!(ts[1].get_calling_convention(), Some(CallingConvention::Cdecl));
});
let source = "
int integer;
typedef int Integer;
";
with_types(&clang, source, |ts| {
assert_eq!(ts[0].get_canonical_type(), ts[0]);
assert_eq!(ts[1].get_canonical_type(), ts[0]);
});
let source = "
struct Struct { int member; };
int Struct::*pointer = &Struct::member;
";
with_types(&clang, source, |ts| {
assert_eq!(ts[0].get_class_type(), None);
assert_eq!(ts[1].get_class_type(), Some(ts[0]));
});
let source = "
typedef int Integer;
Integer integer;
";
with_entity(&clang, source, |e| {
let types = e.get_children().iter().map(|e| e.get_type().unwrap()).collect::<Vec<_>>();
assert_eq!(types[0].get_declaration(), Some(e.get_children()[0]));
assert_eq!(types[1].get_declaration(), Some(e.get_children()[0]));
});
let source = "
class A { };
int A;
class A a;
";
with_types(&clang, source, |ts| {
#[cfg(feature="clang_3_9")]
fn test_get_elaborated_type<'tu>(types: &[Type<'tu>]) {
assert_eq!(types[2].get_elaborated_type(), Some(types[0]));
}
#[cfg(not(feature="clang_3_9"))]
fn test_get_elaborated_type<'tu>(_: &[Type<'tu>]) { }
test_get_elaborated_type(&ts);
});
let source = "
int integer = 322;
int array[3] = { 3, 2, 2 };
";
with_types(&clang, source, |ts| {
assert_eq!(ts[0].get_element_type(), None);
assert_eq!(ts[0].get_size(), None);
assert_eq!(ts[1].get_element_type(), Some(ts[0]));
assert_eq!(ts[1].get_size(), Some(3));
});
let source = "
void a();
void b() throw();
void c() throw(int);
";
with_types(&clang, source, |ts| {
#[cfg(feature="clang_5_0")]
fn test_get_exception_specification(ts: &[Type]) {
assert_eq!(ts[0].get_exception_specification(), None);
assert_eq!(ts[1].get_exception_specification(), Some(ExceptionSpecification::DynamicNone));
assert_eq!(ts[2].get_exception_specification(), Some(ExceptionSpecification::Dynamic));
}
#[cfg(not(feature="clang_5_0"))]
fn test_get_exception_specification(_: &[Type]) { }
test_get_exception_specification(&ts[..]);
});
let source = "
struct A { int a, b, c; };
";
with_entity(&clang, source, |e| {
#[cfg(feature="clang_3_7")]
fn test_get_fields<'tu>(entity: Entity<'tu>) {
assert_eq!(entity.get_type().unwrap().get_fields(), Some(entity.get_children()));
}
#[cfg(not(feature="clang_3_7"))]
fn test_get_fields<'tu>(_: Entity<'tu>) { }
test_get_fields(e.get_children()[0]);
});
let source = "
int integer = 322;
int* pointer = &integer;
";
with_types(&clang, source, |ts| {
assert_eq!(ts[0].get_pointee_type(), None);
assert_eq!(ts[1].get_pointee_type(), Some(ts[0]));
});
let source = "
class Class {
void a();
void b() &;
void c() &&;
};
";
with_types(&clang, source, |ts| {
let types = ts[0].get_declaration().unwrap().get_children().into_iter().map(|c| {
c.get_type().unwrap()
}).collect::<Vec<_>>();
assert_eq!(types[0].get_ref_qualifier(), None);
assert_eq!(types[1].get_ref_qualifier(), Some(RefQualifier::LValue));
assert_eq!(types[2].get_ref_qualifier(), Some(RefQualifier::RValue));
});
let source = "
template <typename T, int I> class Class { int member; };
int integer = 322;
template <> class Class<int, 322> { int member; };
";
with_types(&clang, source, |ts| {
assert_eq!(ts[0].get_template_argument_types(), None);
assert_eq!(ts[1].get_template_argument_types(), Some(vec![Some(ts[0]), None]));
});
let source = "
int a;
typedef int Integer;
Integer b;
";
with_types(&clang, source, |ts| {
#[cfg(feature="clang_5_0")]
fn test_get_typedef_name(ts: &[Type]) {
assert_eq!(ts[0].get_typedef_name(), None);
assert_eq!(ts[1].get_typedef_name(), Some("Integer".into()));
assert_eq!(ts[2].get_typedef_name(), Some("Integer".into()));
}
#[cfg(not(feature="clang_5_0"))]
fn test_get_typedef_name(_: &[Type]) { }
test_get_typedef_name(&ts[..]);
});
let source = "
int a = 322;
const int b = 322;
int* __restrict__ c = &a;
volatile int d = 322;
";
with_types(&clang, source, |ts| {
macro_rules! qualifiers {
($type_:expr, $c:expr, $r:expr, $v:expr) => ({
assert_eq!($type_.is_const_qualified(), $c);
assert_eq!($type_.is_restrict_qualified(), $r);
assert_eq!($type_.is_volatile_qualified(), $v);
});
}
qualifiers!(ts[0], false, false, false);
qualifiers!(ts[1], true, false, false);
qualifiers!(ts[2], false, true, false);
qualifiers!(ts[3], false, false, true);
});
let source = "
struct A { };
struct B { ~B() { } };
";
with_types(&clang, source, |ts| {
assert!(ts[0].is_pod());
assert!(!ts[1].is_pod());
});
let source = "
void a() { }
void b(...) { }
";
with_types(&clang, source, |ts| {
assert!(!ts[0].is_variadic());
assert!(ts[1].is_variadic());
});
let source = "
@class C<T>;
@protocol P
@end
C<C*><P> *x;
C* y;
";
with_translation_unit(&clang, "test.mm", source, &[], |_, _, tu| {
let children = tu.get_entity().get_children();
#[cfg(feature="clang_8_0")]
fn test_objc_object_type(e: &[Entity]) {
let ty = e[3].get_type().unwrap().get_pointee_type().unwrap();
assert_eq!(ty.get_objc_object_base_type(), Some(e[1].get_type().unwrap()));
let protocols = ty.get_objc_protocol_declarations();
assert_eq!(protocols.len(), 1);
assert_eq!(protocols[0], e[2]);
let args = ty.get_objc_type_arguments();
assert_eq!(args.len(), 1);
assert_eq!(args[0], e[4].get_type().unwrap());
}
#[cfg(not(feature="clang_8_0"))]
fn test_objc_object_type(_: &[Entity]) {}
test_objc_object_type(&children);
});
let source = "
void f(void) __attribute__((availability(ios,unavailable))) __attribute__((availability(macos,introduced=10.1.1,deprecated=10.2,obsoleted=11)));
";
with_entity(&clang, source, |e| {
let platform_availability = e.get_children().first().unwrap().get_platform_availability().unwrap();
assert_eq!(platform_availability, vec![
PlatformAvailability {
platform: "ios".to_string(),
unavailable: true,
introduced: None,
deprecated: None,
obsoleted: None,
message: None,
},
PlatformAvailability {
platform: "macos".to_string(),
unavailable: false,
introduced: Some(Version { x: 10, y: Some(1), z: Some(1) }),
deprecated: Some(Version { x: 10, y: Some(2), z: None }),
obsoleted: Some(Version { x: 11, y: None, z: None }),
message: None,
},
])
});
// Usr _______________________________________
let class = Usr::from_objc_class("A");
assert_eq!(class, Usr("c:objc(cs)A".into()));
assert_eq!(Usr::from_objc_category("A", "B"), Usr("c:objc(cy)A@B".into()));
assert_eq!(Usr::from_objc_ivar(&class, "B"), Usr("c:objc(cs)A@B".into()));
assert_eq!(Usr::from_objc_method(&class, "B", true), Usr("c:objc(cs)A(im)B".into()));
assert_eq!(Usr::from_objc_method(&class, "B", false), Usr("c:objc(cs)A(cm)B".into()));
assert_eq!(Usr::from_objc_property(&class, "B"), Usr("c:objc(cs)A(py)B".into()));
assert_eq!(Usr::from_objc_protocol("A"), Usr("c:objc(pl)A".into()));
}
| true |
5f0740e71e20f0a81a51577e442d2dd975ec5ecd
|
Rust
|
katopz/yew-react-example
|
/src/react_stdweb.rs
|
UTF-8
| 2,456 | 3.09375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::convert::TryInto;
use stdweb::js;
use stdweb::web::Node;
use yew::{Callback, Component, ComponentLink, Html, Properties, ShouldRender};
pub struct ReactCounter {
node: Node,
props: ReactCounterProps,
react_counter_cb: Callback<()>,
react_counter: usize,
}
#[derive(Properties, Clone)]
pub struct ReactCounterProps {
pub native_counter: usize,
pub on_react_counter_change: Callback<usize>,
}
pub enum Msg {
Increment,
}
impl Component for ReactCounter {
type Message = Msg;
type Properties = ReactCounterProps;
fn create(props: Self::Properties, mut link: ComponentLink<Self>) -> Self {
ReactCounter {
// Creating an element that we can render the React component into later
node: stdweb::web::document()
.create_element("div")
.unwrap()
.try_into()
.unwrap(),
props,
// Creating a wrapper for the counter callback
react_counter_cb: Self::link_react_counter_cb(&mut link),
react_counter: 0,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::Increment => {
// Increment internal counter
self.react_counter += 1;
// Invoke callback with new count
self.props.on_react_counter_change.emit(self.react_counter);
}
}
true
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
self.props = props;
true
}
fn view(&self) -> Html {
// Wrap callback in a closure that we can use in the js! macro
let orig_callback = self.react_counter_cb.clone();
let callback = move || orig_callback.emit(());
let label = format!(
"Native count: {} - React count: {}",
self.props.native_counter, self.react_counter
);
js! {
let element = React.createElement(MaterialUI.Chip,
{
label: @{label},
onClick: () => @{callback}(),
}
);
ReactDOM.render(element, @{self.node.clone()});
}
yew::virtual_dom::VNode::VRef(self.node.clone())
}
}
impl ReactCounter {
fn link_react_counter_cb(link: &mut ComponentLink<Self>) -> Callback<()> {
link.callback(|_| Msg::Increment)
}
}
| true |
df25b991fe44129b6093ddd9bac8845c861eb9c5
|
Rust
|
ozmorph/testract
|
/src/archive.rs
|
UTF-8
| 2,940 | 3 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use std::ffi::OsStr;
use std::hash::BuildHasherDefault;
use std::path::{Path, PathBuf};
use twox_hash::XxHash;
use crate::reader::{TESFile, TESReader};
use crate::{dump_to_file, Result};
pub type FileMap<F> = HashMap<PathBuf, F, BuildHasherDefault<XxHash>>;
/// List of file extensions
#[derive(PartialEq)]
pub enum ExtensionSet<'a> {
/// Represents an empty list
None,
/// A list of one or more extensions
List(Vec<&'a str>),
/// The set of all possible extensions
All,
}
impl<'a> ExtensionSet<'a> {
/// Determines if a given file extension has a matches within the set
pub fn is_match(&self, file_extension: &str) -> bool {
use crate::archive::ExtensionSet::*;
match self {
None => false,
All => true,
List(ext_list) => ext_list.contains(&file_extension),
}
}
}
pub struct Archive<H, F> {
/// Path on disk to this file
pub path: PathBuf,
/// Header containing metadata for the entire archive
pub header: H,
/// HashMap mapping file paths to files
pub file_hashmap: FileMap<F>,
}
impl<H, F: Extract> Archive<H, F> {
/// Given a set of extensions, find all of the files that match it
fn get_by_extension(&self, extension_set: &ExtensionSet) -> Vec<&Path> {
let mut file_names = Vec::new();
if *extension_set == ExtensionSet::None {
return file_names;
}
for file_name in self.file_hashmap.keys() {
if *extension_set != ExtensionSet::All {
if let Some(extension) = file_name.extension().and_then(OsStr::to_str) {
if !extension_set.is_match(&extension) {
continue;
}
}
}
println!("{:#?}", file_name);
file_names.push(file_name);
}
file_names
}
/// Given a set of extensions
pub fn extract_by_extension(&self, extension_set: &ExtensionSet, output_dir: &Path) -> Result<()> {
let file_names = self.get_by_extension(&extension_set);
if output_dir != Path::new("") && !file_names.is_empty() {
let mut reader = TESReader::from_file(&self.path)?;
for file_name in file_names {
let file_data = self.extract_by_name(&mut reader, file_name)?;
dump_to_file(&output_dir, &file_name, &file_data)?
}
}
Ok(())
}
/// Given a file path, extracts the file content from the BSA
pub fn extract_by_name(&self, reader: &mut TESFile, file_path: &Path) -> Result<Vec<u8>> {
let file_record = self
.file_hashmap
.get(file_path)
.ok_or_else(|| format_err!("File {:#?} not found", file_path))?;
file_record.extract(reader)
}
}
pub trait Extract {
fn extract(&self, reader: &mut TESFile) -> Result<Vec<u8>>;
}
| true |
ef7516af5ad1481d545ced50319bb9bdad62a93b
|
Rust
|
nuta/archives
|
/noa-lsp/src/editor/ui/selector_view.rs
|
UTF-8
| 8,970 | 2.640625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::cmp::min;
use noa_buffer::{cursor::Position, display_width::DisplayWidth};
use noa_compositor::{
canvas::{CanvasViewMut, Color, Style},
line_edit::LineEdit,
surface::{HandledEvent, Layout, RectSize, Surface},
terminal::{KeyCode, KeyEvent, KeyModifiers},
Compositor,
};
use crate::{config::theme_for, editor::Editor};
use super::helpers::truncate_to_width;
const HEIGHT_MAX: usize = 16;
pub type SelectedCallback = dyn FnOnce(&mut Editor, &mut Compositor<Editor>) + Send;
pub type ChangedCallback = dyn FnMut(&mut Editor, &str) + Send;
pub enum SelectorContent {
Normal {
label: String,
sub_label: Option<String>,
},
SearchMatch {
path: String,
pos: Position,
line_text: String,
before: std::ops::RangeTo<usize>,
matched: std::ops::Range<usize>,
after: std::ops::RangeFrom<usize>,
},
}
pub struct SelectorItem {
pub content: SelectorContent,
pub selected: Box<SelectedCallback>,
}
pub struct SelectorView {
opened_by: &'static str,
active: bool,
cursor_pos: (usize, usize),
items: Vec<SelectorItem>,
selected_index: usize,
scroll: usize,
items_height: usize,
input: Option<LineEdit>,
changed_callback: Option<Box<ChangedCallback>>,
}
impl SelectorView {
pub fn new() -> SelectorView {
SelectorView {
opened_by: "",
active: false,
cursor_pos: (0, 0),
items: Vec::new(),
selected_index: 0,
scroll: 0,
items_height: 0,
input: None,
changed_callback: None,
}
}
pub fn opened_by(&self) -> &'static str {
self.opened_by
}
pub fn open(
&mut self,
opened_by: &'static str,
eanble_input: bool,
changed_callback: Option<Box<ChangedCallback>>,
) {
self.opened_by = opened_by;
self.active = true;
self.selected_index = 0;
self.scroll = 0;
self.items = Vec::new();
self.changed_callback = changed_callback;
if eanble_input {
self.input = Some(LineEdit::new());
} else {
self.input = None;
}
}
pub fn close(&mut self) {
self.active = false;
}
pub fn set_items(&mut self, items: Vec<SelectorItem>) {
self.items = items;
self.selected_index = min(self.selected_index, self.items.len().saturating_sub(1));
self.adjust_scroll();
}
pub fn adjust_scroll(&mut self) {
while self.scroll + self.items_height <= self.selected_index {
self.scroll += 1;
}
while self.scroll > self.selected_index {
self.scroll = self.scroll.saturating_sub(1);
}
}
}
impl Surface for SelectorView {
type Context = Editor;
fn name(&self) -> &str {
"selector"
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
fn is_active(&self, _editor: &mut Editor) -> bool {
self.active
}
fn layout(&mut self, _editor: &mut Editor, screen_size: RectSize) -> (Layout, RectSize) {
let height = min(
self.items.len() + if self.input.is_some() { 1 } else { 0 },
min(HEIGHT_MAX, screen_size.height),
);
self.cursor_pos = match self.input.as_ref() {
Some(input) => (height.saturating_sub(1), 1 + input.cursor_position()),
None => (0, 0),
};
(
Layout::Fixed {
y: screen_size.height.saturating_sub(height + 1),
x: 0,
},
RectSize {
height,
width: screen_size.width,
},
)
}
fn cursor_position(&self, _editor: &mut Editor) -> Option<(usize, usize)> {
if self.active {
Some(self.cursor_pos)
} else {
None
}
}
fn render(&mut self, _editor: &mut Editor, canvas: &mut CanvasViewMut<'_>) {
canvas.clear();
self.items_height =
canvas
.height()
.saturating_sub(if self.input.is_some() { 1 } else { 0 });
for (i, item) in self
.items
.iter()
.skip(self.scroll)
.take(self.items_height)
.enumerate()
{
match &item.content {
SelectorContent::Normal {
label,
sub_label: _,
} => {
canvas.write_str(i, 1, truncate_to_width(label, canvas.width() - 2));
}
SelectorContent::SearchMatch {
path,
pos,
line_text,
before,
after,
matched,
} => {
let before_text = &line_text[..before.end];
let matched_text = &line_text[matched.start..matched.end];
let after_text = &line_text[after.start..];
let s = format!(
"{before_text}{matched_text}{after_text} ({path}:{lineno})",
lineno = pos.y + 1
);
canvas.write_str(i, 1, truncate_to_width(&s, canvas.width() - 2));
let x = before_text.display_width();
canvas.apply_style(
i,
x,
min(canvas.width(), x + matched_text.display_width()),
Style {
fg: Color::Red,
..Default::default()
},
);
}
}
if self.scroll + i == self.selected_index {
canvas.apply_style(i, 0, canvas.width(), theme_for("selector.selected"));
}
}
if let Some(input) = self.input.as_mut() {
input.relocate_scroll(canvas.width());
canvas.write_str(
self.items_height,
1,
truncate_to_width(&input.text(), canvas.width() - 2),
);
canvas.apply_style(
self.items_height,
1,
canvas.width() - 2,
theme_for("selector.input"),
);
}
}
fn handle_key_event(
&mut self,
editor: &mut Editor,
compositor: &mut Compositor<Self::Context>,
key: KeyEvent,
) -> HandledEvent {
const NONE: KeyModifiers = KeyModifiers::NONE;
const CTRL: KeyModifiers = KeyModifiers::CONTROL;
// const ALT: KeyModifiers = KeyModifiers::ALT;
// const SHIFT: KeyModifiers = KeyModifiers::SHIFT;
match (key.code, key.modifiers) {
(KeyCode::Enter, NONE) => {
if self.selected_index >= self.items.len() {
warn!("out of bounds selected_index");
return HandledEvent::Consumed;
}
let item = self.items.remove(self.selected_index);
(item.selected)(editor, compositor);
self.close();
}
(KeyCode::Down, NONE) => {
self.selected_index =
min(self.selected_index + 1, self.items.len().saturating_sub(1));
self.adjust_scroll();
}
(KeyCode::Up, NONE) => {
self.selected_index = self.selected_index.saturating_sub(1);
self.adjust_scroll();
}
(KeyCode::Char('q'), CTRL) => {
self.close();
}
_ => {
if let Some(input) = self.input.as_mut() {
let prev_text = input.text();
input.consume_key_event(key);
let text = input.text();
if prev_text != text {
if let Some(callback) = self.changed_callback.as_mut() {
callback(editor, &text);
}
}
}
}
}
HandledEvent::Consumed
}
fn handle_key_batch_event(
&mut self,
_editor: &mut Editor,
_compositor: &mut Compositor<Editor>,
text: &str,
) -> HandledEvent {
if let Some(input) = self.input.as_mut() {
input.insert(&text.replace('\n', " "));
}
HandledEvent::Consumed
}
fn handle_mouse_event(
&mut self,
_ctx: &mut Self::Context,
_compositor: &mut Compositor<Self::Context>,
_kind: noa_compositor::terminal::MouseEventKind,
_modifiers: noa_compositor::terminal::KeyModifiers,
_surface_y: usize,
_surface_x: usize,
) -> HandledEvent {
HandledEvent::Consumed
}
}
| true |
c89ccc768987a56fa2fccb4ae39a3ccc55292113
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/ui/nll/issue-43058.rs
|
UTF-8
| 565 | 2.71875 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// build-pass (FIXME(62277): could be check-pass?)
use std::borrow::Cow;
#[derive(Clone, Debug)]
struct S<'a> {
name: Cow<'a, str>
}
#[derive(Clone, Debug)]
struct T<'a> {
s: Cow<'a, [S<'a>]>
}
fn main() {
let s1 = [S { name: Cow::Borrowed("Test1") }, S { name: Cow::Borrowed("Test2") }];
let b1 = T { s: Cow::Borrowed(&s1) };
let s2 = [S { name: Cow::Borrowed("Test3") }, S { name: Cow::Borrowed("Test4") }];
let b2 = T { s: Cow::Borrowed(&s2) };
let mut v = Vec::new();
v.push(b1);
v.push(b2);
println!("{:?}", v);
}
| true |
d9fbbac1538cbab3d65d4db53414f08ac1cc16d3
|
Rust
|
ergoplatform/sigma-rust
|
/ergotree-ir/src/chain/json/sigma_protocol.rs
|
UTF-8
| 4,949 | 2.5625 | 3 |
[
"CC0-1.0"
] |
permissive
|
use std::convert::TryFrom;
use std::convert::TryInto;
use bounded_vec::BoundedVecOutOfBounds;
use ergo_chain_types::EcPoint;
use serde::Deserialize;
use serde::Serialize;
use crate::sigma_protocol::sigma_boolean::cand::Cand;
use crate::sigma_protocol::sigma_boolean::cor::Cor;
use crate::sigma_protocol::sigma_boolean::cthreshold::Cthreshold;
use crate::sigma_protocol::sigma_boolean::ProveDhTuple;
use crate::sigma_protocol::sigma_boolean::ProveDlog;
use crate::sigma_protocol::sigma_boolean::SigmaBoolean;
use crate::sigma_protocol::sigma_boolean::SigmaConjecture;
use crate::sigma_protocol::sigma_boolean::SigmaProofOfKnowledgeTree;
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
#[serde(tag = "op")]
#[allow(clippy::large_enum_variant)]
pub enum SigmaBooleanJson {
#[serde(rename = "205")] // OpCode::PROVE_DLOG
ProveDlog { h: EcPoint },
#[serde(rename = "206")] // OpCode::PROVE_DIFFIE_HELLMAN_TUPLE
ProveDhTuple {
g: EcPoint,
h: EcPoint,
u: EcPoint,
v: EcPoint,
},
#[serde(rename = "300")] // OpCode::TRIVIAL_PROP_FALSE
TrivialPropFalse { condition: bool },
#[serde(rename = "301")] // OpCode::TRIVIAL_PROP_TRUE
TrivialPropTrue { condition: bool },
#[serde(rename = "150")] // OpCode::AND
Cand { args: Vec<SigmaBooleanJson> },
#[serde(rename = "151")] // OpCode::OR
Cor { args: Vec<SigmaBooleanJson> },
#[serde(rename = "152")] // OpCode::ATLEAST
Cthreshold { k: u8, args: Vec<SigmaBooleanJson> },
}
impl From<SigmaBoolean> for SigmaBooleanJson {
fn from(sb: SigmaBoolean) -> Self {
match sb {
SigmaBoolean::ProofOfKnowledge(SigmaProofOfKnowledgeTree::ProveDlog(pd)) => {
SigmaBooleanJson::ProveDlog { h: *pd.h }
}
SigmaBoolean::ProofOfKnowledge(SigmaProofOfKnowledgeTree::ProveDhTuple(pdh)) => {
SigmaBooleanJson::ProveDhTuple {
g: *pdh.g,
h: *pdh.h,
u: *pdh.u,
v: *pdh.v,
}
}
SigmaBoolean::TrivialProp(tp) if tp => {
SigmaBooleanJson::TrivialPropTrue { condition: tp }
}
SigmaBoolean::TrivialProp(tp) => SigmaBooleanJson::TrivialPropFalse { condition: tp },
SigmaBoolean::SigmaConjecture(SigmaConjecture::Cand(cand)) => SigmaBooleanJson::Cand {
args: cand
.items
.as_vec()
.clone()
.into_iter()
.map(Into::into)
.collect(),
},
SigmaBoolean::SigmaConjecture(SigmaConjecture::Cor(cor)) => SigmaBooleanJson::Cor {
args: cor
.items
.as_vec()
.clone()
.into_iter()
.map(Into::into)
.collect(),
},
SigmaBoolean::SigmaConjecture(SigmaConjecture::Cthreshold(ct)) => {
SigmaBooleanJson::Cthreshold {
k: ct.k,
args: ct
.children
.as_vec()
.clone()
.into_iter()
.map(Into::into)
.collect(),
}
}
}
}
}
impl TryFrom<SigmaBooleanJson> for SigmaBoolean {
type Error = BoundedVecOutOfBounds;
fn try_from(sbj: SigmaBooleanJson) -> Result<Self, Self::Error> {
Ok(match sbj {
SigmaBooleanJson::ProveDlog { h } => ProveDlog { h: h.into() }.into(),
SigmaBooleanJson::ProveDhTuple { g, h, u, v } => ProveDhTuple::new(g, h, u, v).into(),
SigmaBooleanJson::TrivialPropTrue { condition } => SigmaBoolean::TrivialProp(condition),
SigmaBooleanJson::TrivialPropFalse { condition } => {
SigmaBoolean::TrivialProp(condition)
}
SigmaBooleanJson::Cand { args } => Cand {
items: args
.into_iter()
.map(TryInto::try_into)
.collect::<Result<Vec<SigmaBoolean>, _>>()?
.try_into()?,
}
.into(),
SigmaBooleanJson::Cor { args } => Cor {
items: args
.into_iter()
.map(TryInto::try_into)
.collect::<Result<Vec<SigmaBoolean>, _>>()?
.try_into()?,
}
.into(),
SigmaBooleanJson::Cthreshold { k, args } => Cthreshold {
k,
children: args
.into_iter()
.map(TryInto::try_into)
.collect::<Result<Vec<SigmaBoolean>, _>>()?
.try_into()?,
}
.into(),
})
}
}
| true |
b6b680930540e3d687f26497b00481adc7ffeb32
|
Rust
|
PhilipDaniels/uswindpowerstats
|
/rustworkspace/repository/src/models.rs
|
UTF-8
| 7,048 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;
use tiberius::{numeric::Decimal, time::chrono::NaiveDate, Row};
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ImageSource {
pub id: u8,
pub name: String,
}
impl TryFrom<&Row> for ImageSource {
type Error = crate::error::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let id = row.try_get::<u8, _>(0)?.unwrap();
let name = row.try_get::<&str, _>(1)?.unwrap().to_string();
Ok(ImageSource { id, name })
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum StateType {
State,
Territory,
FederalCapital,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct State {
pub id: String,
pub name: String,
pub capital: Option<String>,
pub population: Option<i32>,
pub area_square_km: Option<i32>,
pub state_type: StateType,
}
impl TryFrom<&Row> for State {
type Error = crate::error::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let id = row.try_get::<&str, _>(0)?.unwrap().to_string();
let name = row.try_get::<&str, _>(1)?.unwrap().to_string();
let capital = row.try_get::<&str, _>(2)?.map(|s| s.to_string());
let population = row.try_get::<i32, _>(3)?;
let area_square_km = row.try_get::<i32, _>(4)?;
let state_type = match row.try_get::<&str, _>(5)? {
Some("S") => StateType::State,
Some("T") => StateType::Territory,
Some("F") => StateType::FederalCapital,
x @ _ => return Err(Self::Error::UnknownStateType(format!("{:?}", x))),
};
Ok(State {
id,
name,
capital,
population,
area_square_km,
state_type,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct County {
pub id: i32,
pub state_id: String,
pub name: String,
}
impl TryFrom<&Row> for County {
type Error = crate::error::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let id = row.try_get::<i32, _>(0)?.unwrap();
let state_id = row.try_get::<&str, _>(1)?.unwrap().to_string();
let name = row.try_get::<&str, _>(2)?.unwrap().to_string();
Ok(County { id, state_id, name })
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum ConfidenceLevel {
Low = 1,
Medium = 2,
High = 3,
}
impl TryFrom<Option<u8>> for ConfidenceLevel {
type Error = crate::error::Error;
fn try_from(value: Option<u8>) -> Result<Self, Self::Error> {
match value {
Some(1) => Ok(ConfidenceLevel::Low),
Some(2) => Ok(ConfidenceLevel::Medium),
Some(3) => Ok(ConfidenceLevel::High),
x @ _ => Err(Self::Error::UnknownConfidenceLevel(format!("{:?}", x))),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Manufacturer {
pub id: i32,
pub name: String,
}
impl TryFrom<&Row> for Manufacturer {
type Error = crate::error::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let id = row.try_get::<i32, _>(0)?.unwrap();
let name = row.try_get::<&str, _>(1)?.unwrap().to_string();
Ok(Manufacturer { id, name })
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Project {
pub id: i32,
pub name: String,
pub num_turbines: Option<i16>,
pub capacity_mw: Option<Decimal>,
}
impl TryFrom<&Row> for Project {
type Error = crate::error::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let id = row.try_get::<i32, _>(0)?.unwrap();
let name = row.try_get::<&str, _>(1)?.unwrap().to_string();
let num_turbines = row.try_get::<i16, _>(2)?;
let capacity_mw = row.try_get::<Decimal, _>(3)?;
Ok(Project {
id,
name,
num_turbines,
capacity_mw,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Model {
pub id: i32,
pub manufacturer_id: i32,
pub name: String,
pub capacity_kw: Option<i32>,
pub hub_height: Option<Decimal>,
pub rotor_diameter: Option<Decimal>,
pub rotor_swept_area: Option<Decimal>,
pub total_height_to_tip: Option<Decimal>,
}
impl TryFrom<&Row> for Model {
type Error = crate::error::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let id = row.try_get::<i32, _>(0)?.unwrap();
let manufacturer_id = row.try_get::<i32, _>(1)?.unwrap();
let name = row.try_get::<&str, _>(2)?.unwrap().to_string();
let capacity_kw = row.try_get::<i32, _>(3)?;
let hub_height = row.try_get::<Decimal, _>(4)?;
let rotor_diameter = row.try_get::<Decimal, _>(5)?;
let rotor_swept_area = row.try_get::<Decimal, _>(6)?;
let total_height_to_tip = row.try_get::<Decimal, _>(7)?;
Ok(Model {
id,
manufacturer_id,
name,
capacity_kw,
hub_height,
rotor_diameter,
rotor_swept_area,
total_height_to_tip,
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Turbine {
pub id: i32,
pub county_id: i32,
pub project_id: i32,
pub model_id: i32,
pub image_source_id: u8,
pub retrofit: bool,
pub retrofit_year: Option<i16>,
pub attributes_confidence_level: ConfidenceLevel,
pub location_confidence_level: ConfidenceLevel,
pub image_date: Option<NaiveDate>,
pub latitude: Decimal,
pub longitude: Decimal,
}
impl TryFrom<&Row> for Turbine {
type Error = crate::error::Error;
fn try_from(row: &Row) -> Result<Self, Self::Error> {
let id = row.try_get::<i32, _>(0)?.unwrap();
let county_id = row.try_get::<i32, _>(1)?.unwrap();
let project_id = row.try_get::<i32, _>(2)?.unwrap();
let model_id = row.try_get::<i32, _>(3)?.unwrap();
let image_source_id = row.try_get::<u8, _>(4)?.unwrap();
let retrofit = row.try_get::<bool, _>(5)?.unwrap();
let retrofit_year = row.try_get::<i16, _>(6)?;
let attributes_confidence_level = ConfidenceLevel::try_from(row.try_get::<u8, _>(7)?)?;
let location_confidence_level = ConfidenceLevel::try_from(row.try_get::<u8, _>(8)?)?;
let image_date = row.try_get::<NaiveDate, _>(9)?;
let latitude = row.try_get::<Decimal, _>(10)?.unwrap();
let longitude = row.try_get::<Decimal, _>(11)?.unwrap();
Ok(Turbine {
id,
county_id,
project_id,
model_id,
image_source_id,
retrofit,
retrofit_year,
attributes_confidence_level,
location_confidence_level,
image_date,
latitude,
longitude,
})
}
}
| true |
4d543c1b027ec8ae322ade7d9ea0a88a36e5a0a2
|
Rust
|
fatshen/rust-book
|
/operators_and_overloading/src/main.rs
|
UTF-8
| 1,047 | 3.8125 | 4 |
[] |
no_license
|
fn main() {
use std::ops::Add;
#[derive(Debug)]
struct Point {
x: i32,
y: i32,
}
impl Add for Point {
type Output = Point;
fn add(self, other: Point) -> Point {
Point { x: self.x + other.x, y: self.y + other.y}
}
}
let p1 = Point { x: 1, y: 2 };
let p2 = Point { x: 3, y: 4 };
let p3 = p1 + p2;
println!("x: {}, y: {}", p3.x, p3.y);
impl Add<i32> for Point {
type Output = f64;
fn add(self, other: i32) -> f64 {
0.7
}
}
println!("add<i32>: {}", p3 + 333);
use std::ops::Mul;
trait HasArea<T> {
fn area(&self) -> T;
}
struct Squre<T> {
x: T,
y: T,
side: T,
}
impl<T> HasArea<T> for Squre<T>
where T: Mul<Output = T> + Copy {
fn area(&self) -> T {
self.side * self.side
}
}
let s = Squre {
x: 0.0f64,
y: 0.0f64,
side: 12.0f64,
};
println!("area: {}", s.area());
}
| true |
a324a9dc3afd41e2d4df8a79bdd6cae073911556
|
Rust
|
OhBonsai/RedisLtree
|
/tree/src/iter.rs
|
UTF-8
| 2,182 | 2.765625 | 3 |
[] |
no_license
|
use super::{Node, Link};
use super::rust::*;
pub struct Iter<'a, T:'a> {
head : *const Link,
tail : *const Link,
len : usize,
mark : PhantomData<&'a Node<T>>,
}
impl<'a, T:'a> Iterator for Iter<'a, T> {
type Item = &'a Node<T>;
#[inline] fn next( &mut self ) -> Option<&'a Node<T>> {
if self.head.is_null() {
None
} else { unsafe {
let node = self.head;
self.head = if self.head == self.tail {
null()
} else {
(*node).next
};
self.len -= 1;
Some( &*( node as *mut Node<T> ))
}}
}
#[inline] fn size_hint( &self ) -> ( usize, Option<usize> ) { ( self.len, Some( self.len ))}
}
impl<'a,T> ExactSizeIterator for Iter<'a, T> {}
impl<'a,T> FusedIterator for Iter<'a, T> {}
impl<'a, T:'a> Iter<'a, T> {
#[inline] pub(crate) fn new( head: *const Link, tail: *const Link, len: usize ) -> Self {
Iter{ head, tail, len, mark: PhantomData }
}
}
impl<'a, T> Clone for Iter<'a, T> {
fn clone(&self) -> Self {
Iter { ..*self }
}
}
pub struct IterMut<'a, T:'a> {
head : *mut Link,
tail : *mut Link,
len : usize,
mark : PhantomData<Pin<&'a mut Node<T>>>,
}
impl<'a, T:'a> Iterator for IterMut<'a, T> {
type Item = Pin<&'a mut Node<T>>;
#[inline] fn next( &mut self ) -> Option<Pin<&'a mut Node<T>>> {
if self.head.is_null() {
None
} else { unsafe {
let node = self.head;
self.head = if self.head == self.tail {
null_mut()
} else {
(*node).next
};
self.len -= 1;
Some( Pin::new_unchecked( &mut *( node as *mut Node<T> )))
}}
}
#[inline] fn size_hint( &self ) -> ( usize, Option<usize> ) { ( self.len, Some( self.len ))}
}
impl<'a,T> ExactSizeIterator for IterMut<'a, T> {}
impl<'a, T> FusedIterator for IterMut<'a, T> {}
impl<'a, T:'a> IterMut<'a, T> {
#[inline] pub(crate) fn new( head: *mut Link, tail: *mut Link, len: usize ) -> Self {
IterMut{ head, tail, len, mark: PhantomData }
}
}
| true |
dc6d08367c276acd481154086542bc65da1271cd
|
Rust
|
Tyg13/tylang
|
/crates/cst/src/syntax/node.rs
|
UTF-8
| 4,915 | 3.109375 | 3 |
[] |
no_license
|
use std::sync::Arc;
use crate::syntax::NodeOrToken;
use crate::{green, SyntaxKind};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Node(pub(crate) Arc<NodeData>);
impl std::fmt::Display for Node {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.to_string_indented(0))
}
}
impl std::ops::Deref for Node {
type Target = NodeData;
#[inline]
fn deref(&self) -> &Self::Target {
self.0.deref()
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct NodeData {
pub offset: usize,
pub parent: Option<Node>,
pub green: Arc<green::Node>,
pub index: usize,
}
impl std::fmt::Debug for NodeData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut f = f.debug_struct("Data");
if let Some(ref parent) = self.parent {
f.field("parent", &format!("{:?}", parent.green.kind));
}
f.field("offset", &self.offset);
f.field("green", &self.green);
f.field("index", &self.index);
f.finish()
}
}
impl Node {
#[inline]
pub fn clone(other: &Node) -> Self {
Self(Arc::clone(&other.0))
}
#[inline]
pub fn root(green: Arc<green::Node>) -> Self {
Self(Arc::new(NodeData {
offset: 0,
parent: None,
green,
index: 0,
}))
}
#[inline]
pub fn kind(&self) -> SyntaxKind {
self.green.kind
}
#[inline]
pub fn text(&self) -> String {
self.green.text()
}
#[inline]
pub fn parent(&self) -> Option<Node> {
self.parent.clone()
}
#[inline]
pub fn ancestors(&self) -> impl Iterator<Item = Node> + '_ {
std::iter::successors(Some(self.clone()), Self::parent)
}
#[inline]
pub fn siblings(&self) -> impl Iterator<Item = NodeOrToken> + '_ {
self.parent.as_ref().into_iter().flat_map(|parent| {
parent
.children_with_tokens()
.filter(|child| child.index() != self.index)
})
}
#[inline]
pub fn prev(&self) -> Option<NodeOrToken> {
self.parent.as_ref().and_then(|parent| {
parent.child_by_index(self.index.saturating_sub(1))
})
}
#[inline]
pub fn next(&self) -> Option<NodeOrToken> {
self.parent.as_ref().and_then(|parent| {
parent.child_by_index(self.index.saturating_add(1))
})
}
#[inline]
fn construct_child(
&self,
index: usize,
green: &green::Child,
) -> NodeOrToken {
match green {
green::Child::Node {
relative_offset,
node,
} => NodeOrToken::node(
self.offset + relative_offset,
&self,
node,
index,
),
green::Child::Token {
relative_offset,
token,
} => NodeOrToken::token(
self.offset + relative_offset,
&self,
token,
index,
),
}
}
#[inline]
fn child_by_index(&self, index: usize) -> Option<NodeOrToken> {
self.green
.children
.get(index)
.map(|child| self.construct_child(index, child))
}
#[inline]
pub fn num_children(&self) -> usize {
self.green.children.len()
}
#[inline]
pub fn children_with_tokens(
&self,
) -> impl Iterator<Item = NodeOrToken> + '_ {
self.green
.children
.iter()
.enumerate()
.map(|(idx, child)| self.construct_child(idx, child))
}
#[inline]
pub fn children(&self) -> impl Iterator<Item = Node> + '_ {
self.children_with_tokens()
.filter_map(|child| child.into_node())
}
#[inline]
pub fn child(&self, i: usize) -> Node {
self.children().nth(i).unwrap()
}
#[inline]
pub fn range(&self) -> std::ops::Range<usize> {
let (start, mut end) = (self.offset, self.offset);
for child in self.children_with_tokens() {
end = child.range().end;
}
start..end
}
pub fn to_string_indented(&self, indent: usize) -> String {
let (start, end) = (self.range().start, self.range().end);
format!(
"{indent}{kind:?} @ {start}..{end}:{children}",
indent = str::repeat(" ", indent),
kind = self.kind(),
children = self
.children_with_tokens()
.map(|child| {
format!("\n{}", child.to_string_indented(indent + 2))
})
.collect::<String>()
)
}
#[inline]
pub fn as_node_or_token(&self) -> NodeOrToken {
NodeOrToken::Node(self.clone())
}
}
| true |
4968d642589a9af610fd179681538e99675962b8
|
Rust
|
s-panferov/later
|
/src/schedule.rs
|
UTF-8
| 2,704 | 3.3125 | 3 |
[] |
no_license
|
use crate::interval::{Interval, Timeline};
pub struct Schedule {
items: Vec<Box<dyn Timeline>>,
bounds: Option<Interval>,
}
impl Schedule {
pub fn new(
mut items: Vec<Box<dyn Timeline>>,
bounds: Option<Interval>,
) -> Self {
items.sort_by(|a, b| b.duration_hint().cmp(&a.duration_hint()));
Schedule { items, bounds }
}
/// Get an interator to resolve intervals
pub fn iter_within(&self, interval: Interval) -> ScheduleIterator {
ScheduleIterator::new(self, interval)
}
}
pub struct ScheduleIterator<'a> {
interval: Interval,
schedule: &'a Schedule,
initialized: bool,
state: Vec<Box<dyn Iterator<Item = Interval>>>,
}
impl<'a> ScheduleIterator<'a> {
fn new(schedule: &'a Schedule, interval: Interval) -> Self {
ScheduleIterator {
schedule,
interval,
initialized: false,
state: Vec::new(),
}
}
}
impl<'a> ScheduleIterator<'a> {
fn init(&mut self, mut i: usize, interval: Interval) -> Option<Interval> {
let mut int = interval.clone();
let len = self.schedule.items.len();
while i < len {
let mut iter =
self.schedule.items.get(i).unwrap().iter_within(int.clone());
let next = iter.next();
match next {
Some(next) => {
int = next;
self.state.push(iter);
}
None => return None,
}
i += 1;
}
Some(int)
}
}
impl<'a> Iterator for ScheduleIterator<'a> {
type Item = Interval;
fn next(&mut self) -> Option<Interval> {
if !self.initialized {
let len = self.schedule.items.len();
if len == 0 {
return None;
}
let int = self.init(0, self.interval.clone());
match int {
Some(int) => {
self.initialized = true;
return Some(int);
}
None => return None,
}
}
let curr_iter = self.state.last_mut().unwrap();
let next = curr_iter.next();
match next {
Some(next) => return Some(next),
None => loop {
self.state.pop();
let len = self.state.len();
if len == 0 {
return None;
}
let curr_iter = self.state.last_mut().unwrap();
let next = curr_iter.next();
match next {
Some(next) => return self.init(len, next),
None => continue,
}
},
}
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// use chrono::Duration;
// use insta::assert_debug_snapshot_matches;
// #[test]
// fn every() {
// let schedule = Schedule::from_parts(vec![
// SchedulePart::Every(Box::new(Duration::minutes(10))),
// SchedulePart::Every(Box::new(Duration::hours(1).nth(2))),
// ]);§1
// let what: Vec<Interval> = schedule
// .iter_within(Interval::from(
// "2019-01-01T00:00:00Z".parse().unwrap(),
// ))
// .take(10)
// .collect();
// assert_debug_snapshot_matches!("every", what);
// }
// }
| true |
3a5dd68927740603a10d6c4e03fd6e2d4d76105b
|
Rust
|
gpaulu/AdventOfCode2020
|
/day04/src/main.rs
|
UTF-8
| 1,911 | 3.046875 | 3 |
[] |
no_license
|
use std::{fs::read_to_string, str::FromStr};
mod passport;
fn main() {
let input = read_to_string("input.txt").expect("error reading input file");
println!("Part 1:");
println!("{} valid \"passports\" ;)", count_valid_passports(&input));
println!("Part 2:");
println!(
"{} super valid \"passports\" ;)",
count_valid_passports_part2(&input)
);
}
fn count_valid_passports(passports: &str) -> usize {
passports
.split("\n\n")
.filter_map(|s| passport::Passport::from_str(s).ok())
.count()
}
fn count_valid_passports_part2(passports: &str) -> usize {
passports
.split("\n\n")
.filter_map(|s| passport::ValidatedPassport::from_str(s).ok())
.count()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example() {
let input = "ecl:gry pid:860033327 eyr:2020 hcl:#fffffd
byr:1937 iyr:2017 cid:147 hgt:183cm
iyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884
hcl:#cfa07d byr:1929
hcl:#ae17e1 iyr:2013
eyr:2024
ecl:brn pid:760753108 byr:1931
hgt:179cm
hcl:#cfa07d eyr:2025 pid:166559648
iyr:2011 ecl:brn hgt:59in";
assert_eq!(count_valid_passports(input), 2);
}
#[test]
fn example_part2() {
let input = "eyr:1972 cid:100
hcl:#18171d ecl:amb hgt:170 pid:186cm iyr:2018 byr:1926
iyr:2019
hcl:#602927 eyr:1967 hgt:170cm
ecl:grn pid:012533040 byr:1946
hcl:dab227 iyr:2012
ecl:brn hgt:182cm pid:021572410 eyr:2020 byr:1992 cid:277
hgt:59cm ecl:zzz
eyr:2038 hcl:74454a iyr:2023
pid:3556412378 byr:2007
pid:087499704 hgt:74in ecl:grn iyr:2012 eyr:2030 byr:1980
hcl:#623a2f
eyr:2029 ecl:blu cid:129 byr:1989
iyr:2014 pid:896056539 hcl:#a97842 hgt:165cm
hcl:#888785
hgt:164cm byr:2001 iyr:2015 cid:88
pid:545766238 ecl:hzl
eyr:2022
iyr:2010 hgt:158cm hcl:#b6652a ecl:blu byr:1944 eyr:2021 pid:093154719";
assert_eq!(count_valid_passports_part2(input), 4);
}
}
| true |
525367d11113dcd2639f8ca49abd8216e5cd0fdb
|
Rust
|
tuzz/ray-tracer
|
/src/geometry/vector2/test.rs
|
UTF-8
| 7,367 | 3.59375 | 4 |
[] |
no_license
|
use assert_approx_eq::assert_approx_eq;
use super::*;
use crate::geometry::point2::Point2;
type Subject<T> = Vector2<T>;
mod new {
use super::*;
#[test]
fn it_builds_a_two_dimensional_vector() {
let subject = Subject::new(1, 2);
assert_eq!(subject.x(), 1);
assert_eq!(subject.y(), 2);
}
}
mod components {
use super::*;
#[test]
fn it_can_iterate_over_components_of_the_struct() {
let subject = Subject::new(1, 2);
let iter = subject.components.iter();
assert_eq!(iter.as_slice(), &[1, 2]);
}
}
mod aliases {
use super::*;
#[test]
fn it_has_a_type_alias_for_a_vector_of_signed_integers() {
Vector2i::new(-1 as i32, 2);
}
#[test]
fn it_has_a_type_alias_for_a_vector_of_double_precision_floats() {
Vector2f::new(0.1 as f64, 0.2);
}
}
mod default {
use super::*;
#[test]
fn it_sets_x_and_y_to_zero() {
let subject = Subject::<u32>::default();
assert_eq!(subject.x(), 0);
assert_eq!(subject.y(), 0);
}
}
mod clone {
use super::*;
#[test]
fn it_clones_the_vector() {
let subject = Subject::new(1, 2).clone();
assert_eq!(subject.x(), 1);
assert_eq!(subject.y(), 2);
}
}
mod conversions {
use super::*;
#[test]
fn it_can_build_a_vector2_from_an_iterator() {
let subject: Subject<_> = (1..3).into();
assert_eq!(subject.x(), 1);
assert_eq!(subject.y(), 2);
}
#[test]
fn it_can_build_a_vector2_from_a_point2() {
let point = &Point2::new(1, 2);
let subject: Subject<_> = point.into();
assert_eq!(subject.x(), 1);
assert_eq!(subject.y(), 2);
}
}
mod addition {
use super::*;
#[test]
fn it_adds_the_components() {
let a = Subject::new(1, 2);
let b = Subject::new(3, 4);
let subject = &a + &b;
assert_eq!(subject.x(), 4);
assert_eq!(subject.y(), 6);
}
#[test]
fn it_can_mutate_the_vector() {
let a = Subject::new(1, 2);
let b = Subject::new(3, 4);
let mut subject = a;
subject += &b;
assert_eq!(subject.x(), 4);
assert_eq!(subject.y(), 6);
}
}
mod subtraction {
use super::*;
#[test]
fn it_subtracts_the_components() {
let a = Subject::new(5, 5);
let b = Subject::new(1, 2);
let subject = &a - &b;
assert_eq!(subject.x(), 4);
assert_eq!(subject.y(), 3);
}
#[test]
fn it_can_mutate_the_vector() {
let a = Subject::new(5, 5);
let b = Subject::new(1, 2);
let mut subject = a;
subject -= &b;
assert_eq!(subject.x(), 4);
assert_eq!(subject.y(), 3);
}
}
mod multiplication {
use super::*;
#[test]
fn it_multiplies_by_a_scalar() {
let subject = &Subject::new(1, 2) * 3;
assert_eq!(subject.x(), 3);
assert_eq!(subject.y(), 6);
}
#[test]
fn it_can_mutate_the_vector() {
let mut subject = Subject::new(1, 2);
subject *= 3;
assert_eq!(subject.x(), 3);
assert_eq!(subject.y(), 6);
}
}
mod division {
use super::*;
#[test]
fn it_divides_by_a_divisor() {
let subject = &Subject::new(1, 2) / 10;
assert_approx_eq!(subject.x(), 0.1);
assert_approx_eq!(subject.y(), 0.2);
}
#[test]
fn it_can_mutate_the_vector() {
let mut subject = Subject::new(1.0, 2.0);
subject /= 10;
assert_approx_eq!(subject.x(), 0.1);
assert_approx_eq!(subject.y(), 0.2);
}
}
mod negation {
use super::*;
#[test]
fn it_returns_a_vector_pointing_in_the_opposite_direction() {
let subject = -&Subject::new(1, 2);
assert_eq!(subject.x(), -1);
assert_eq!(subject.y(), -2);
}
}
mod abs {
use super::*;
#[test]
fn it_can_take_the_absolute_value_of_vector2f_components() {
let subject = Subject::new(-1.0, -2.0).abs();
assert_eq!(subject.x(), 1.0);
assert_eq!(subject.y(), 2.0);
}
#[test]
fn it_can_take_the_absolute_value_of_vector2i_components() {
let subject = Subject::new(-1, -2).abs();
assert_eq!(subject.x(), 1);
assert_eq!(subject.y(), 2);
}
}
mod dot {
use super::*;
#[test]
fn it_returns_the_dot_product_of_two_vectors() {
let a = Subject::new(1, 2);
let b = Subject::new(3, 4);
assert_eq!(a.dot(&b), 11);
}
}
mod abs_dot {
use super::*;
#[test]
fn it_returns_the_absolute_value_of_the_dot_product() {
let a = Subject::new(1, 2);
let b = Subject::new(-3, -4);
assert_eq!(a.abs_dot(&b), 11);
}
}
mod length_squared {
use super::*;
#[test]
fn it_returns_the_square_of_the_length_of_the_vector() {
let subject = Subject::new(1, 2);
assert_eq!(subject.length_squared(), 5.0);
}
}
mod length {
use super::*;
#[test]
fn it_returns_the_length_of_the_vector() {
let subject = Subject::new(1, 2);
assert_eq!(subject.length(), f64::sqrt(5.0));
}
}
mod normalize {
use super::*;
#[test]
fn it_divides_each_component_by_the_vectors_length() {
let subject = Subject::new(1, 2).normalize();
let divisor = f64::sqrt(5.0);
assert_eq!(subject.x(), 1.0 / divisor);
assert_eq!(subject.y(), 2.0 / divisor);
}
}
mod min_component {
use super::*;
#[test]
fn it_returns_the_value_of_the_smallest_component() {
let subject = Subject::new(1, 2);
assert_eq!(subject.min_component(), 1);
}
}
mod max_component {
use super::*;
#[test]
fn it_returns_the_value_of_the_largest_component() {
let subject = Subject::new(1, 2);
assert_eq!(subject.max_component(), 2);
}
}
mod min_dimension {
use super::*;
#[test]
fn it_returns_the_index_of_the_dimension_with_smallest_component() {
let subject = Subject::new(1, 2);
assert_eq!(subject.min_dimension(), 0);
}
}
mod max_dimension {
use super::*;
#[test]
fn it_returns_the_index_of_the_dimension_with_largest_component() {
let subject = Subject::new(1, 2);
assert_eq!(subject.max_dimension(), 1);
}
}
mod min {
use super::*;
#[test]
fn it_returns_a_vector_of_the_component_wise_minimums() {
let a = Subject::new(1, 9);
let b = Subject::new(9, 2);
let subject = a.min(&b);
assert_eq!(subject.x(), 1);
assert_eq!(subject.y(), 2);
}
}
mod max {
use super::*;
#[test]
fn it_returns_a_vector_of_the_component_wise_maximums() {
let a = Subject::new(1, 0);
let b = Subject::new(0, 2);
let subject = a.max(&b);
assert_eq!(subject.x(), 1);
assert_eq!(subject.y(), 2);
}
}
mod permute {
use super::*;
#[test]
fn it_permutes_the_component_values_according_to_the_indexes() {
let subject = Subject::new(5, 6);
let permute_01 = subject.permute(0, 1);
let permute_10 = subject.permute(1, 0);
assert_eq!(permute_01.x(), 5);
assert_eq!(permute_01.y(), 6);
assert_eq!(permute_10.x(), 6);
assert_eq!(permute_10.y(), 5);
}
}
| true |
64a0eb3dd43a5c8afccece7d73c46d42fcc72404
|
Rust
|
clucompany/macro_tt_utils
|
/src/data/if_args/block.rs
|
UTF-8
| 1,285 | 2.828125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use core::fmt::Debug;
use crate::data::if_args::ReverseEqBlock;
pub trait ArgBlock where Self: From<(ReverseEqBlock, <Self as ArgBlock>::A0, <Self as ArgBlock>::A1)> {
type A0: PartialEq<<Self as ArgBlock>::A1>;
type A1;
fn as_arg0(&self) -> &Self::A0;
fn as_arg1(&self) -> &Self::A1;
fn is_equality(&self) -> bool;
}
#[derive(Debug)]
pub struct UniversalArgBlock<A, T> where A: PartialEq<T> {
arg0: A,
arg1: T,
is_reverse: ReverseEqBlock,
}
impl<A, T> From<(ReverseEqBlock, A, T)> for UniversalArgBlock<A, T> where A: PartialEq<T> {
#[inline(always)]
fn from((r, arg0, arg1):(ReverseEqBlock, A, T)) -> Self {
Self::new(r, arg0, arg1)
}
}
impl<A0, A1> ArgBlock for UniversalArgBlock<A0, A1> where A0: PartialEq<A1> {
type A0 = A0;
type A1 = A1;
#[inline(always)]
fn as_arg0(&self) -> &Self::A0 {
&self.arg0
}
#[inline(always)]
fn as_arg1(&self) -> &Self::A1 {
&self.arg1
}
#[inline]
fn is_equality(&self) -> bool {
if self.is_reverse == true {
self.arg0 != self.arg1
}else {
self.arg0 == self.arg1
}
}
}
impl<A, T> UniversalArgBlock<A, T> where A: PartialEq<T> {
#[inline]
pub const fn new(is_reverse: ReverseEqBlock, arg0: A, arg1: T) -> Self {
Self {
arg0: arg0,
arg1: arg1,
is_reverse: is_reverse,
}
}
}
| true |
831f454abf60761f5015a5ea0f454d4393d749c4
|
Rust
|
wilbeibi/newrustacean.com
|
/src/bonus/_6.rs
|
UTF-8
| 3,687 | 2.71875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"CC-BY-4.0"
] |
permissive
|
//! It doesn't have to be sexy
//!
//! - **Date:** August 20, 2016
//! - **Subject:** Building (and celebrating) all the little, not-so-glorious
//! pieces of the Rust ecosystem.
//! - [**Audio**][mp3]
//!
//! [mp3]: https://www.podtrac.com/pts/redirect.mp3/cdn.newrustacean.com/file/newrustacean/bonus/6.mp3
//!
//! <audio style="width: 100%" title="It doesn't have to be sexy" controls preload=metadata src="https://www.podtrac.com/pts/redirect.mp3/cdn.newrustacean.com/file/newrustacean/bonus/6.mp3" />
//!
//!
//! Notes
//! -----
//!
//! We love the Rust compiler team. But there's more to the Rust community, and
//! more required for Rust to be as great as it can be, than just the language
//! itself. We need to celebrate other libraries, and even the small ones, just
//! as much (and maybe more) than changes to the language. We need to dig in and
//! work on building the *whole* ecosystem. (The good news is, we are!)
//!
//!
//! Links
//! -----
//!
//! - [`futures-rs`]
//! + ["Zero-cost futures in Rust"][futures-blog]
//! - [Tokio]
//! + ["Announcing Tokio"][tokio-medium] (Carl Lerche on Medium)
//! - [ShaderCat]: Graphics Programming with Unity and Rust
//! - ["What's new with 'The Rust Programming Language'?"][trpl]
//! - [Friends of Rust]
//! - [`ring`]: Safe, fast, small crypto using Rust
//! - [`alexa-rs`]: Rust library for building Alexa skills
//! - [`gilrs`]: Game Input Library for Rust
//!
//! [`futures-rs`]: https://github.com/alexcrichton/futures-rs
//! [futures-blog]: http://aturon.github.io/blog/2016/08/11/futures/
//! [Tokio]: https://github.com/tokio-rs/tokio
//! [tokio-medium]: https://medium.com/@carllerche/announcing-tokio-df6bb4ddb34#.c9pqf9wyx
//! [Friends of Rust]: https://www.rust-lang.org/en-US/friends.html
//! [trpl]: http://words.steveklabnik.com/whats-new-with-the-rust-programming-language
//! [`ring`]: https://github.com/briansmith/ring
//! [`alexa-rs`]: https://github.com/neil-lobracco/alexa-rs
//! [`gilrs`]: https://gitlab.com/Arvamer/gilrs
//! [ShaderCat]: http://www.shadercat.com
//!
//!
//! Sponsors
//! --------
//!
//! - Aleksey Pirogov
//! - Cameron Mochrie
//! - Cass Costello
//! - [Chris Palmer]
//! - [Daniel Collin]
//! - [Derek Morr]
//! - Doug Reeves
//! - Eric Fulmer
//! - Hamza Sheikh
//! - [Jakub "Limeth" Hlusička]
//! - [Jared Smith]
//! - Keith Gray
//! - Lachlan Collins
//! - Leif Arne Storset
//! - Luca Schmid
//! - Micael Bergeron
//! - [Pascal Hertleif]
//! - Ralph Giles ("rillian")
//! - Ralph "FriarTech" Loizzo
//! - Raph Levien
//! - reddraggone9
//! - Ryan Ollos
//! - Steven Murawski
//! - Vesa Kaihlavirta
//! - [William Roe]
//!
//! [Chris Palmer]: http://red-oxide.org/
//! [Daniel Collin]: twitter.com/daniel_collin
//! [Derek Morr]: https://twitter.com/derekmorr
//! [Jakub "Limeth" Hlusička]: https://github.com/Limeth
//! [Jared Smith]: http://twitter.com/jaredthecoder
//! [Pascal Hertleif]: https://pascalhertleif.de/
//! [William Roe]: http://willroe.me
//!
//! (Thanks to the couple people donating who opted out of the reward tier, as
//! well. You know who you are!)
//!
//! ### Become a sponsor
//!
//! - <a href="https://www.patreon.com/newrustacean" rel="payment">Patreon</a>
//! - [Venmo](https://venmo.com/chriskrycho)
//! - [Dwolla](https://www.dwolla.com/hub/chriskrycho)
//! - [Cash.me](https://cash.me/$chriskrycho)
//! - [Flattr](https://flattr.com/profile/chriskrycho)
//! - [PayPal.me](https://paypal.me/chriskrycho)
//!
//!
//! Contact
//! -------
//!
//! - New Rustacean:
//! + Twitter: [@newrustacean](https://www.twitter.com/newrustacean)
//! + Email: [[email protected]](mailto:[email protected])
//! - Chris Krycho
//! + GitHub: [chriskrycho](https://github.com/chriskrycho)
//! + Twitter: [@chriskrycho](https://www.twitter.com/chriskrycho)
| true |
3a821bed1c32982ed33dcf81cc0868576126b498
|
Rust
|
isgasho/fluent-rs
|
/fluent-resmgr/tests/localization_test.rs
|
UTF-8
| 1,123 | 2.59375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use fluent_fallback::SyncLocalization;
use fluent_resmgr::resource_manager::ResourceManager;
use unic_langid::langid;
#[test]
fn localization_format_value() {
let res_mgr = ResourceManager::new("./tests/resources/{locale}/{res_id}".into());
let loc = SyncLocalization::with_generator(vec!["test.ftl".into()], res_mgr);
let value = loc.format_value_sync("hello-world", None);
assert_eq!(value, "Hello World");
let value2 = loc.format_value_sync("new-message", None);
assert_eq!(value2, "Nowa Wiadomość");
let value3 = loc.format_value_sync("missing-message", None);
assert_eq!(value3, "missing-message");
}
#[test]
fn resmgr_get_bundle() {
let res_mgr = ResourceManager::new("./tests/resources/{locale}/{res_id}".into());
let bundle = res_mgr.get_bundle(vec![langid!("en-US")], vec!["test.ftl".into()]);
let mut errors = vec![];
let msg = bundle.get_message("hello-world").expect("Message exists");
let pattern = msg.value.expect("Message has a value");
let value = bundle.format_pattern(&pattern, None, &mut errors);
assert_eq!(value, "Hello World");
}
| true |
ebb6fba48752f896ba6eea143a5c6faaec91358b
|
Rust
|
Ralith/lahar
|
/src/staging_ring.rs
|
UTF-8
| 6,637 | 2.75 | 3 |
[
"Zlib",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::{mem, ptr::NonNull};
use crate::{Graveyard, RingState};
use ash::{vk, Device};
/// A self-growing circular allocator that frees memory
pub struct StagingRing {
state: RingState,
memory_type: u32,
/// VkPhysicalDeviceLimits::optimalBufferCopyOffsetAlignment
align: usize,
buffer: BackingMem,
old: Vec<BackingMem>,
frames: Box<[usize]>,
current_frame: usize,
}
impl StagingRing {
pub unsafe fn new(
device: &Device,
props: &vk::PhysicalDeviceMemoryProperties,
limits: &vk::PhysicalDeviceLimits,
frames: usize,
capacity: usize,
) -> Self {
let (buffer, memory_type) =
BackingMem::new_from_props(device, props, capacity as vk::DeviceSize);
Self {
state: RingState::new(capacity),
memory_type,
align: limits.optimal_buffer_copy_offset_alignment as usize,
buffer,
old: Vec::new(),
frames: (0..frames).map(|_| 0).collect(),
current_frame: 0,
}
}
pub unsafe fn destroy(&mut self, device: &Device) {
for buffer in Some(&self.buffer).into_iter().chain(self.old.iter()) {
device.destroy_buffer(buffer.buffer, None);
device.free_memory(buffer.memory, None);
}
}
pub unsafe fn push<T: ?Sized>(&mut self, device: &Device, value: &T) -> Alloc {
let alloc = self.alloc(device, mem::size_of_val(value), 1);
self.buffer
.ptr
.as_ptr()
.add(alloc.offset as usize)
.cast::<u8>()
.copy_from_nonoverlapping(value as *const _ as *const u8, mem::size_of_val(value));
alloc
}
pub unsafe fn alloc(&mut self, device: &Device, n: usize, align: usize) -> Alloc {
let align = self.align.max(align);
let offset = match self.state.alloc(n, align) {
Some(x) => x,
None => {
self.grow(device, n);
self.state
.alloc(n, align)
.expect("insufficient space after growing")
}
};
Alloc {
buffer: self.buffer.buffer,
offset: offset as vk::DeviceSize,
}
}
unsafe fn grow(&mut self, device: &Device, min_increment: usize) {
let new_size = min_increment.max(self.state.capacity * 2);
let old = mem::replace(
&mut self.buffer,
BackingMem::new_from_ty(device, self.memory_type, new_size as vk::DeviceSize),
);
self.old.push(old);
self.state = RingState::new(new_size);
}
/// Get the storage for an allocation
pub unsafe fn get_mut(&self, alloc: Alloc) -> *mut u8 {
for buffer in Some(&self.buffer).into_iter().chain(self.old.iter()) {
if alloc.buffer == buffer.buffer {
return buffer.ptr.as_ptr().add(alloc.offset as usize);
}
}
panic!("buffer does not exist in this arena");
}
pub unsafe fn write<T: ?Sized>(&mut self, alloc: Alloc, data: &T) {
self.get_mut(alloc)
.cast::<u8>()
.copy_from_nonoverlapping(data as *const _ as *const u8, mem::size_of_val(data));
}
pub fn begin_frame(&mut self, graveyard: &mut Graveyard) {
// When the previous frame is recycled, free everything that's been allocated so far.
self.frames[self.current_frame] = self.state.head;
// Free everything that was allocated for the oldest frame, which we're now recycling
self.current_frame = (self.current_frame + 1) % self.frames.len();
self.state.tail = self.frames[self.current_frame];
// Move pre-resize buffers from previous frame into graveyard
for buffer in self.old.drain(..) {
graveyard.inter(buffer.buffer);
graveyard.inter(buffer.memory);
}
}
}
struct BackingMem {
memory: vk::DeviceMemory,
buffer: vk::Buffer,
ptr: NonNull<u8>,
}
impl BackingMem {
unsafe fn new_from_props(
device: &Device,
props: &vk::PhysicalDeviceMemoryProperties,
size: vk::DeviceSize,
) -> (Self, u32) {
let buffer = device
.create_buffer(
&vk::BufferCreateInfo::builder()
.size(size)
.usage(vk::BufferUsageFlags::TRANSFER_SRC)
.sharing_mode(vk::SharingMode::EXCLUSIVE),
None,
)
.unwrap();
let reqs = device.get_buffer_memory_requirements(buffer);
let memory_ty = crate::find_memory_type(
props,
reqs.memory_type_bits,
vk::MemoryPropertyFlags::HOST_VISIBLE | vk::MemoryPropertyFlags::HOST_COHERENT,
)
.expect("no matching memory type");
(
Self::new_from_buffer(device, memory_ty, size, buffer, &reqs),
memory_ty,
)
}
unsafe fn new_from_ty(device: &Device, memory_ty: u32, size: vk::DeviceSize) -> Self {
let buffer = device
.create_buffer(
&vk::BufferCreateInfo::builder()
.size(size)
.usage(vk::BufferUsageFlags::TRANSFER_SRC)
.sharing_mode(vk::SharingMode::EXCLUSIVE),
None,
)
.unwrap();
let reqs = device.get_buffer_memory_requirements(buffer);
Self::new_from_buffer(device, memory_ty, size, buffer, &reqs)
}
unsafe fn new_from_buffer(
device: &Device,
memory_ty: u32,
size: vk::DeviceSize,
buffer: vk::Buffer,
reqs: &vk::MemoryRequirements,
) -> Self {
let memory = device
.allocate_memory(
&vk::MemoryAllocateInfo::builder()
.allocation_size(reqs.size)
.memory_type_index(memory_ty)
.push_next(&mut vk::MemoryDedicatedAllocateInfo::builder().buffer(buffer)),
None,
)
.unwrap();
device.bind_buffer_memory(buffer, memory, 0).unwrap();
let ptr = NonNull::new_unchecked(
device
.map_memory(
memory,
0,
size as vk::DeviceSize,
vk::MemoryMapFlags::default(),
)
.unwrap(),
)
.cast();
Self {
memory,
buffer,
ptr,
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct Alloc {
pub buffer: vk::Buffer,
pub offset: vk::DeviceSize,
}
| true |
54d1a6b9badcd1cac80cf69a7669eca61cb99a8b
|
Rust
|
amethyst/legion
|
/src/internals/storage/slicevec.rs
|
UTF-8
| 2,990 | 3.984375 | 4 |
[
"MIT"
] |
permissive
|
//! A vector of slices.
use std::iter::{FusedIterator, IntoIterator};
/// A vector of slices.
///
/// Each slice is stored inline so as to be efficiently iterated through linearly.
#[derive(Debug)]
pub struct SliceVec<T> {
data: Vec<T>,
counts: Vec<usize>,
indices: Vec<usize>,
}
impl<T> Default for SliceVec<T> {
fn default() -> Self {
Self {
data: Vec::new(),
counts: Vec::new(),
indices: Vec::new(),
}
}
}
impl<T> SliceVec<T> {
/// Pushes a new slice onto the end of the vector.
pub fn push<I: IntoIterator<Item = T>>(&mut self, items: I) {
self.indices.push(self.data.len());
let mut count = 0;
for item in items.into_iter() {
self.data.push(item);
count += 1;
}
self.counts.push(count);
}
/// Gets an iterator over slices starting from the given index.
pub fn iter_from(&self, start: usize) -> SliceVecIter<T> {
let index = *self.indices.get(start).unwrap_or(&self.data.len());
SliceVecIter {
data: &self.data[index..],
counts: &self.counts[start..],
}
}
}
/// An iterator over slices in a `SliceVec`.
#[derive(Clone)]
pub struct SliceVecIter<'a, T> {
pub(crate) data: &'a [T],
pub(crate) counts: &'a [usize],
}
impl<'a, T> Iterator for SliceVecIter<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if let Some((count, remaining_counts)) = self.counts.split_first() {
let (data, remaining_data) = self.data.split_at(*count);
self.counts = remaining_counts;
self.data = remaining_data;
Some(data)
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.counts.len(), Some(self.counts.len()))
}
#[inline]
fn count(self) -> usize {
self.len()
}
}
impl<'a, T> ExactSizeIterator for SliceVecIter<'a, T> {}
impl<'a, T> FusedIterator for SliceVecIter<'a, T> {}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn create() {
let _ = SliceVec::<usize>::default();
}
#[test]
fn push() {
let mut vec = SliceVec::default();
let slices = [[1, 2, 3], [4, 5, 6]];
for slice in &slices {
vec.push(slice.iter().copied());
}
assert_eq!(vec.counts.len(), 2);
}
#[test]
fn iter() {
let mut vec = SliceVec::default();
let slices = [[1, 2, 3], [4, 5, 6]];
for slice in &slices {
vec.push(slice.iter().copied());
}
assert_eq!(vec.counts.len(), 2);
for (i, slice) in vec.iter_from(0).enumerate() {
let expected = &slices[i];
assert_eq!(slice.len(), expected.len());
for (j, x) in slice.iter().enumerate() {
assert_eq!(x, &expected[j]);
}
}
}
}
| true |
a5cc6c5f7dbe47a2084c3b2fc9af630d9d1f545b
|
Rust
|
ajunlonglive/CoCreate-openebs
|
/src/mbus-api/src/v0.rs
|
UTF-8
| 28,654 | 2.703125 | 3 |
[
"Apache-2.0"
] |
permissive
|
#![allow(clippy::field_reassign_with_default)]
use super::*;
use percent_encoding::percent_decode_str;
use serde::{Deserialize, Serialize};
use serde_json::value::Value;
use std::{cmp::Ordering, fmt::Debug};
use strum_macros::{EnumString, ToString};
pub(super) const VERSION: &str = "v0";
/// Versioned Channels
#[derive(Clone, Debug, EnumString, ToString)]
#[strum(serialize_all = "camelCase")]
pub enum ChannelVs {
/// Default
Default,
/// Registration of mayastor instances with the control plane
Registry,
/// Node Service which exposes the registered mayastor instances
Node,
/// Pool Service which manages mayastor pools and replicas
Pool,
/// Volume Service which manages mayastor volumes
Volume,
/// Nexus Service which manages mayastor nexuses
Nexus,
/// Keep it In Sync Service
Kiiss,
/// Json gRPC Service
JsonGrpc,
/// Core Service combines Node, Pool and Volume services
Core,
}
impl Default for ChannelVs {
fn default() -> Self {
ChannelVs::Default
}
}
impl From<ChannelVs> for Channel {
fn from(channel: ChannelVs) -> Self {
Channel::v0(channel)
}
}
/// Versioned Message Id's
#[derive(Debug, PartialEq, Clone, ToString, EnumString)]
#[strum(serialize_all = "camelCase")]
pub enum MessageIdVs {
/// Default
Default,
/// Liveness Probe
Liveness,
/// Update Config
ConfigUpdate,
/// Request current Config
ConfigGetCurrent,
/// Register mayastor
Register,
/// Deregister mayastor
Deregister,
/// Node Service
/// Get all node information
GetNodes,
/// Pool Service
///
/// Get pools with filter
GetPools,
/// Create Pool,
CreatePool,
/// Destroy Pool,
DestroyPool,
/// Get replicas with filter
GetReplicas,
/// Create Replica,
CreateReplica,
/// Destroy Replica,
DestroyReplica,
/// Share Replica,
ShareReplica,
/// Unshare Replica,
UnshareReplica,
/// Volume Service
///
/// Get nexuses with filter
GetNexuses,
/// Create nexus
CreateNexus,
/// Destroy Nexus
DestroyNexus,
/// Share Nexus
ShareNexus,
/// Unshare Nexus
UnshareNexus,
/// Remove a child from its parent nexus
RemoveNexusChild,
/// Add a child to a nexus
AddNexusChild,
/// Get all volumes
GetVolumes,
/// Create Volume,
CreateVolume,
/// Delete Volume
DestroyVolume,
/// Add nexus to volume
AddVolumeNexus,
/// Remove nexus from volume
RemoveVolumeNexus,
/// Generic JSON gRPC message
JsonGrpc,
/// Get block devices
GetBlockDevices,
}
// Only V0 should export this macro
// This allows the example code to use the v0 default
// Otherwise they have to impl whatever version they require
#[macro_export]
/// Use version 0 of the Message and Channel
macro_rules! impl_channel_id {
($I:ident, $C:ident) => {
fn id(&self) -> MessageId {
MessageId::v0(v0::MessageIdVs::$I)
}
fn channel(&self) -> Channel {
Channel::v0(v0::ChannelVs::$C)
}
};
}
/// Liveness Probe
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct Liveness {}
bus_impl_message_all!(Liveness, Liveness, (), Default);
/// Mayastor configurations
/// Currently, we have the global mayastor config and the child states config
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq, Hash)]
pub enum Config {
/// Mayastor global config
MayastorConfig,
/// Mayastor child states config
ChildStatesConfig,
}
impl Default for Config {
fn default() -> Self {
Config::MayastorConfig
}
}
/// Config Messages
/// Update mayastor configuration
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct ConfigUpdate {
/// type of config being updated
pub kind: Config,
/// actual config data
pub data: Vec<u8>,
}
bus_impl_message_all!(ConfigUpdate, ConfigUpdate, (), Kiiss);
/// Request message configuration used by mayastor to request configuration
/// from a control plane service
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct ConfigGetCurrent {
/// type of config requested
pub kind: Config,
}
/// Reply message configuration returned by a controle plane service to mayastor
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct ReplyConfig {
/// config data
pub config: Vec<u8>,
}
bus_impl_message_all!(
ConfigGetCurrent,
ConfigGetCurrent,
ReplyConfig,
Kiiss,
GetConfig
);
/// Registration
/// Register message payload
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Register {
/// id of the mayastor instance
pub id: NodeId,
/// grpc_endpoint of the mayastor instance
pub grpc_endpoint: String,
}
bus_impl_message_all!(Register, Register, (), Registry);
/// Deregister message payload
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
pub struct Deregister {
/// id of the mayastor instance
pub id: NodeId,
}
bus_impl_message_all!(Deregister, Deregister, (), Registry);
/// Node Service
///
/// Get all the nodes
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
pub struct GetNodes {}
/// State of the Node
#[derive(
Serialize, Deserialize, Debug, Clone, EnumString, ToString, Eq, PartialEq,
)]
pub enum NodeState {
/// Node has unexpectedly disappeared
Unknown,
/// Node is deemed online if it has not missed the
/// registration keep alive deadline
Online,
/// Node is deemed offline if has missed the
/// registration keep alive deadline
Offline,
}
impl Default for NodeState {
fn default() -> Self {
Self::Unknown
}
}
/// Node information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Node {
/// id of the mayastor instance
pub id: NodeId,
/// grpc_endpoint of the mayastor instance
pub grpc_endpoint: String,
/// deemed state of the node
pub state: NodeState,
}
bus_impl_vector_request!(Nodes, Node);
bus_impl_message_all!(GetNodes, GetNodes, Nodes, Node);
/// Filter Objects based on one of the following criteria
/// # Example:
/// // Get all nexuses from the node `node_id`
/// let nexuses =
/// MessageBus::get_nexuses(Filter::Node(node_id)).await.unwrap();
#[derive(Serialize, Deserialize, Debug, Clone, strum_macros::ToString)] // likely this ToString does not do the right thing...
pub enum Filter {
/// All objects
None,
/// Filter by Node id
Node(NodeId),
/// Pool filters
///
/// Filter by Pool id
Pool(PoolId),
/// Filter by Node and Pool id
NodePool(NodeId, PoolId),
/// Filter by Node and Replica id
NodeReplica(NodeId, ReplicaId),
/// Filter by Node, Pool and Replica id
NodePoolReplica(NodeId, PoolId, ReplicaId),
/// Filter by Pool and Replica id
PoolReplica(PoolId, ReplicaId),
/// Filter by Replica id
Replica(ReplicaId),
/// Volume filters
///
/// Filter by Node and Nexus
NodeNexus(NodeId, NexusId),
/// Filter by Nexus
Nexus(NexusId),
/// Filter by Node and Volume
NodeVolume(NodeId, VolumeId),
/// Filter by Volume
Volume(VolumeId),
}
impl Default for Filter {
fn default() -> Self {
Self::None
}
}
macro_rules! bus_impl_string_id_inner {
($Name:ident, $Doc:literal) => {
#[doc = $Doc]
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq, Hash)]
pub struct $Name(String);
impl std::fmt::Display for $Name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl $Name {
/// Build Self from a string trait id
pub fn as_str<'a>(&'a self) -> &'a str {
self.0.as_str()
}
}
impl From<&str> for $Name {
fn from(id: &str) -> Self {
$Name::from(id)
}
}
impl From<String> for $Name {
fn from(id: String) -> Self {
$Name::from(id.as_str())
}
}
impl From<&$Name> for $Name {
fn from(id: &$Name) -> $Name {
id.clone()
}
}
impl From<$Name> for String {
fn from(id: $Name) -> String {
id.to_string()
}
}
};
}
macro_rules! bus_impl_string_id {
($Name:ident, $Doc:literal) => {
bus_impl_string_id_inner!($Name, $Doc);
impl Default for $Name {
/// Generates new blank identifier
fn default() -> Self {
$Name(uuid::Uuid::default().to_string())
}
}
impl $Name {
/// Build Self from a string trait id
pub fn from<T: Into<String>>(id: T) -> Self {
$Name(id.into())
}
/// Generates new random identifier
pub fn new() -> Self {
$Name(uuid::Uuid::new_v4().to_string())
}
}
};
}
macro_rules! bus_impl_string_id_percent_decoding {
($Name:ident, $Doc:literal) => {
bus_impl_string_id_inner!($Name, $Doc);
impl Default for $Name {
fn default() -> Self {
$Name("".to_string())
}
}
impl $Name {
/// Build Self from a string trait id
pub fn from<T: Into<String>>(id: T) -> Self {
let src: String = id.into();
let decoded_src = percent_decode_str(src.clone().as_str())
.decode_utf8()
.unwrap_or(src.into())
.to_string();
$Name(decoded_src)
}
}
};
}
bus_impl_string_id!(NodeId, "ID of a mayastor node");
bus_impl_string_id!(PoolId, "ID of a mayastor pool");
bus_impl_string_id!(ReplicaId, "UUID of a mayastor pool replica");
bus_impl_string_id!(NexusId, "UUID of a mayastor nexus");
bus_impl_string_id_percent_decoding!(ChildUri, "URI of a mayastor nexus child");
bus_impl_string_id!(VolumeId, "UUID of a mayastor volume");
bus_impl_string_id!(JsonGrpcMethod, "JSON gRPC method");
bus_impl_string_id!(
JsonGrpcParams,
"Parameters to be passed to a JSON gRPC method"
);
/// Pool Service
/// Get all the pools from specific node or None for all nodes
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
pub struct GetPools {
/// Filter request
pub filter: Filter,
}
/// State of the Pool
#[derive(
Serialize, Deserialize, Debug, Clone, EnumString, ToString, Eq, PartialEq,
)]
pub enum PoolState {
/// unknown state
Unknown = 0,
/// the pool is in normal working order
Online = 1,
/// the pool has experienced a failure but can still function
Degraded = 2,
/// the pool is completely inaccessible
Faulted = 3,
}
impl Default for PoolState {
fn default() -> Self {
Self::Unknown
}
}
impl From<i32> for PoolState {
fn from(src: i32) -> Self {
match src {
1 => Self::Online,
2 => Self::Degraded,
3 => Self::Faulted,
_ => Self::Unknown,
}
}
}
/// Pool information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Pool {
/// id of the mayastor instance
pub node: NodeId,
/// id of the pool
pub id: PoolId,
/// absolute disk paths claimed by the pool
pub disks: Vec<String>,
/// current state of the pool
pub state: PoolState,
/// size of the pool in bytes
pub capacity: u64,
/// used bytes from the pool
pub used: u64,
}
// online > degraded > unknown/faulted
impl PartialOrd for PoolState {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match self {
PoolState::Unknown => match other {
PoolState::Unknown => None,
PoolState::Online => Some(Ordering::Less),
PoolState::Degraded => Some(Ordering::Less),
PoolState::Faulted => None,
},
PoolState::Online => match other {
PoolState::Unknown => Some(Ordering::Greater),
PoolState::Online => Some(Ordering::Equal),
PoolState::Degraded => Some(Ordering::Greater),
PoolState::Faulted => Some(Ordering::Greater),
},
PoolState::Degraded => match other {
PoolState::Unknown => Some(Ordering::Greater),
PoolState::Online => Some(Ordering::Less),
PoolState::Degraded => Some(Ordering::Equal),
PoolState::Faulted => Some(Ordering::Greater),
},
PoolState::Faulted => match other {
PoolState::Unknown => None,
PoolState::Online => Some(Ordering::Less),
PoolState::Degraded => Some(Ordering::Less),
PoolState::Faulted => Some(Ordering::Equal),
},
}
}
}
/// Create Pool Request
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct CreatePool {
/// id of the mayastor instance
pub node: NodeId,
/// id of the pool
pub id: PoolId,
/// disk device paths or URIs to be claimed by the pool
pub disks: Vec<String>,
}
bus_impl_message_all!(CreatePool, CreatePool, Pool, Pool);
/// Destroy Pool Request
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DestroyPool {
/// id of the mayastor instance
pub node: NodeId,
/// id of the pool
pub id: PoolId,
}
bus_impl_message_all!(DestroyPool, DestroyPool, (), Pool);
bus_impl_vector_request!(Pools, Pool);
bus_impl_message_all!(GetPools, GetPools, Pools, Pool);
/// Get all the replicas from specific node and pool
/// or None for all nodes or all pools
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
pub struct GetReplicas {
/// Filter request
pub filter: Filter,
}
/// Replica information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Replica {
/// id of the mayastor instance
pub node: NodeId,
/// uuid of the replica
pub uuid: ReplicaId,
/// id of the pool
pub pool: PoolId,
/// thin provisioning
pub thin: bool,
/// size of the replica in bytes
pub size: u64,
/// protocol used for exposing the replica
pub share: Protocol,
/// uri usable by nexus to access it
pub uri: String,
}
bus_impl_vector_request!(Replicas, Replica);
bus_impl_message_all!(GetReplicas, GetReplicas, Replicas, Pool);
/// Create Replica Request
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct CreateReplica {
/// id of the mayastor instance
pub node: NodeId,
/// uuid of the replica
pub uuid: ReplicaId,
/// id of the pool
pub pool: PoolId,
/// size of the replica in bytes
pub size: u64,
/// thin provisioning
pub thin: bool,
/// protocol to expose the replica over
pub share: Protocol,
}
bus_impl_message_all!(CreateReplica, CreateReplica, Replica, Pool);
/// Destroy Replica Request
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DestroyReplica {
/// id of the mayastor instance
pub node: NodeId,
/// id of the pool
pub pool: PoolId,
/// uuid of the replica
pub uuid: ReplicaId,
}
bus_impl_message_all!(DestroyReplica, DestroyReplica, (), Pool);
/// Share Replica Request
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ShareReplica {
/// id of the mayastor instance
pub node: NodeId,
/// id of the pool
pub pool: PoolId,
/// uuid of the replica
pub uuid: ReplicaId,
/// protocol used for exposing the replica
pub protocol: Protocol,
}
bus_impl_message_all!(ShareReplica, ShareReplica, String, Pool);
/// Unshare Replica Request
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct UnshareReplica {
/// id of the mayastor instance
pub node: NodeId,
/// id of the pool
pub pool: PoolId,
/// uuid of the replica
pub uuid: ReplicaId,
}
bus_impl_message_all!(UnshareReplica, UnshareReplica, (), Pool);
/// Indicates what protocol the bdev is shared as
#[derive(
Serialize, Deserialize, Debug, Clone, EnumString, ToString, Eq, PartialEq,
)]
#[strum(serialize_all = "camelCase")]
#[serde(rename_all = "camelCase")]
pub enum Protocol {
/// not shared by any of the variants
Off = 0,
/// shared as NVMe-oF TCP
Nvmf = 1,
/// shared as iSCSI
Iscsi = 2,
/// shared as NBD
Nbd = 3,
}
impl Default for Protocol {
fn default() -> Self {
Self::Off
}
}
impl From<i32> for Protocol {
fn from(src: i32) -> Self {
match src {
0 => Self::Off,
1 => Self::Nvmf,
2 => Self::Iscsi,
_ => Self::Off,
}
}
}
/// State of the Replica
#[derive(
Serialize, Deserialize, Debug, Clone, EnumString, ToString, Eq, PartialEq,
)]
#[strum(serialize_all = "camelCase")]
#[serde(rename_all = "camelCase")]
pub enum ReplicaState {
/// unknown state
Unknown = 0,
/// the replica is in normal working order
Online = 1,
/// the replica has experienced a failure but can still function
Degraded = 2,
/// the replica is completely inaccessible
Faulted = 3,
}
impl Default for ReplicaState {
fn default() -> Self {
Self::Unknown
}
}
impl From<i32> for ReplicaState {
fn from(src: i32) -> Self {
match src {
1 => Self::Online,
2 => Self::Degraded,
3 => Self::Faulted,
_ => Self::Unknown,
}
}
}
/// Volume Nexuses
///
/// Get all the nexuses with a filter selection
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
pub struct GetNexuses {
/// Filter request
pub filter: Filter,
}
/// Nexus information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Nexus {
/// id of the mayastor instance
pub node: NodeId,
/// uuid of the nexus
pub uuid: NexusId,
/// size of the volume in bytes
pub size: u64,
/// current state of the nexus
pub state: NexusState,
/// array of children
pub children: Vec<Child>,
/// URI of the device for the volume (missing if not published).
/// Missing property and empty string are treated the same.
pub device_uri: String,
/// total number of rebuild tasks
pub rebuilds: u32,
}
/// Child information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Child {
/// uri of the child device
pub uri: ChildUri,
/// state of the child
pub state: ChildState,
/// current rebuild progress (%)
pub rebuild_progress: Option<i32>,
}
/// Child State information
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
pub enum ChildState {
/// Default Unknown state
Unknown = 0,
/// healthy and contains the latest bits
Online = 1,
/// rebuild is in progress (or other recoverable error)
Degraded = 2,
/// unrecoverable error (control plane must act)
Faulted = 3,
}
impl Default for ChildState {
fn default() -> Self {
Self::Unknown
}
}
impl From<i32> for ChildState {
fn from(src: i32) -> Self {
match src {
1 => Self::Online,
2 => Self::Degraded,
3 => Self::Faulted,
_ => Self::Unknown,
}
}
}
/// Nexus State information
#[derive(
Serialize, Deserialize, Debug, Clone, EnumString, ToString, Eq, PartialEq,
)]
pub enum NexusState {
/// Default Unknown state
Unknown = 0,
/// healthy and working
Online = 1,
/// not healthy but is able to serve IO (i.e. rebuild is in progress)
Degraded = 2,
/// broken and unable to serve IO
Faulted = 3,
}
impl Default for NexusState {
fn default() -> Self {
Self::Unknown
}
}
impl From<i32> for NexusState {
fn from(src: i32) -> Self {
match src {
1 => Self::Online,
2 => Self::Degraded,
3 => Self::Faulted,
_ => Self::Unknown,
}
}
}
bus_impl_vector_request!(Nexuses, Nexus);
bus_impl_message_all!(GetNexuses, GetNexuses, Nexuses, Nexus);
/// Create Nexus Request
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct CreateNexus {
/// id of the mayastor instance
pub node: NodeId,
/// the nexus uuid will be set to this
pub uuid: NexusId,
/// size of the device in bytes
pub size: u64,
/// replica can be iscsi and nvmf remote targets or a local spdk bdev
/// (i.e. bdev:///name-of-the-bdev).
///
/// uris to the targets we connect to
pub children: Vec<ChildUri>,
}
bus_impl_message_all!(CreateNexus, CreateNexus, Nexus, Nexus);
/// Destroy Nexus Request
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct DestroyNexus {
/// id of the mayastor instance
pub node: NodeId,
/// uuid of the nexus
pub uuid: NexusId,
}
bus_impl_message_all!(DestroyNexus, DestroyNexus, (), Nexus);
/// Share Nexus Request
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct ShareNexus {
/// id of the mayastor instance
pub node: NodeId,
/// uuid of the nexus
pub uuid: NexusId,
/// encryption key
pub key: Option<String>,
/// share protocol
pub protocol: Protocol,
}
bus_impl_message_all!(ShareNexus, ShareNexus, String, Nexus);
/// Unshare Nexus Request
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct UnshareNexus {
/// id of the mayastor instance
pub node: NodeId,
/// uuid of the nexus
pub uuid: NexusId,
}
bus_impl_message_all!(UnshareNexus, UnshareNexus, (), Nexus);
/// Remove Child from Nexus Request
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct RemoveNexusChild {
/// id of the mayastor instance
pub node: NodeId,
/// uuid of the nexus
pub nexus: NexusId,
/// URI of the child device to be removed
pub uri: ChildUri,
}
bus_impl_message_all!(RemoveNexusChild, RemoveNexusChild, (), Nexus);
/// Add child to Nexus Request
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct AddNexusChild {
/// id of the mayastor instance
pub node: NodeId,
/// uuid of the nexus
pub nexus: NexusId,
/// URI of the child device to be added
pub uri: ChildUri,
/// auto start rebuilding
pub auto_rebuild: bool,
}
bus_impl_message_all!(AddNexusChild, AddNexusChild, Child, Nexus);
/// Volumes
///
/// Volume information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Volume {
/// name of the volume
pub uuid: VolumeId,
/// size of the volume in bytes
pub size: u64,
/// current state of the volume
pub state: VolumeState,
/// array of children nexuses
pub children: Vec<Nexus>,
}
/// Volume State information
/// Currently it's the same as the nexus
pub type VolumeState = NexusState;
/// Get volumes
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct GetVolumes {
/// filter volumes
pub filter: Filter,
}
bus_impl_vector_request!(Volumes, Volume);
bus_impl_message_all!(GetVolumes, GetVolumes, Volumes, Volume);
/// Create volume
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct CreateVolume {
/// uuid of the volume
pub uuid: VolumeId,
/// size of the volume in bytes
pub size: u64,
/// number of children nexuses (ANA)
pub nexuses: u64,
/// number of replicas per nexus
pub replicas: u64,
/// only these nodes can be used for the replicas
#[serde(default)]
pub allowed_nodes: Vec<NodeId>,
/// preferred nodes for the replicas
#[serde(default)]
pub preferred_nodes: Vec<NodeId>,
/// preferred nodes for the nexuses
#[serde(default)]
pub preferred_nexus_nodes: Vec<NodeId>,
}
bus_impl_message_all!(CreateVolume, CreateVolume, Volume, Volume);
/// Delete volume
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DestroyVolume {
/// uuid of the volume
pub uuid: VolumeId,
}
bus_impl_message_all!(DestroyVolume, DestroyVolume, (), Volume);
/// Add ANA Nexus to volume
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AddVolumeNexus {
/// uuid of the volume
pub uuid: VolumeId,
/// preferred node id for the nexus
pub preferred_node: Option<NodeId>,
}
bus_impl_message_all!(AddVolumeNexus, AddVolumeNexus, Nexus, Volume);
/// Add ANA Nexus to volume
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct RemoveVolumeNexus {
/// uuid of the volume
pub uuid: VolumeId,
/// id of the node where the nexus lives
pub node: Option<NodeId>,
}
bus_impl_message_all!(RemoveVolumeNexus, RemoveVolumeNexus, (), Volume);
/// Generic JSON gRPC request
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct JsonGrpcRequest {
/// id of the mayastor instance
pub node: NodeId,
/// JSON gRPC method to call
pub method: JsonGrpcMethod,
/// parameters to be passed to the above method
pub params: JsonGrpcParams,
}
bus_impl_message_all!(JsonGrpcRequest, JsonGrpc, Value, JsonGrpc);
/// Partition information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
pub struct Partition {
/// devname of parent device to which this partition belongs
pub parent: String,
/// partition number
pub number: u32,
/// partition name
pub name: String,
/// partition scheme: gpt, dos, ...
pub scheme: String,
/// partition type identifier
pub typeid: String,
/// UUID identifying partition
pub uuid: String,
}
/// Filesystem information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
pub struct Filesystem {
/// filesystem type: ext3, ntfs, ...
pub fstype: String,
/// volume label
pub label: String,
/// UUID identifying the volume (filesystem)
pub uuid: String,
/// path where filesystem is currently mounted
pub mountpoint: String,
}
/// Block device information
#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct BlockDevice {
/// entry in /dev associated with device
pub devname: String,
/// currently "disk" or "partition"
pub devtype: String,
/// major device number
pub devmajor: u32,
/// minor device number
pub devminor: u32,
/// device model - useful for identifying mayastor devices
pub model: String,
/// official device path
pub devpath: String,
/// list of udev generated symlinks by which device may be identified
pub devlinks: Vec<String>,
/// size of device in (512 byte) blocks
pub size: u64,
/// partition information in case where device represents a partition
pub partition: Partition,
/// filesystem information in case where a filesystem is present
pub filesystem: Filesystem,
/// identifies if device is available for use (ie. is not "currently" in
/// use)
pub available: bool,
}
/// Get block devices
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct GetBlockDevices {
/// id of the mayastor instance
pub node: NodeId,
/// specifies whether to get all devices or only usable devices
pub all: bool,
}
bus_impl_vector_request!(BlockDevices, BlockDevice);
bus_impl_message_all!(GetBlockDevices, GetBlockDevices, BlockDevices, Node);
| true |
348c539daa15b643b763ef2462638a1e98bc7d58
|
Rust
|
Ruin0x11/calx
|
/calx-alg/tests/lib.rs
|
UTF-8
| 3,391 | 2.890625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
extern crate serde_json;
extern crate calx_alg;
extern crate rand;
use std::collections::HashMap;
use rand::{Rng, SeedableRng, XorShiftRng};
use calx_alg::WeightedChoice;
#[test]
fn test_serialize_rng() {
use calx_alg::EncodeRng;
let mut rng: EncodeRng<XorShiftRng> = SeedableRng::from_seed([1, 2, 3, 4]);
let saved = serde_json::to_string(&rng).expect("Serialization failed");
let mut rng2: EncodeRng<XorShiftRng> = serde_json::from_str(&saved)
.expect("Deserialization failed");
assert!(rng.next_u32() == rng2.next_u32());
}
#[test]
fn test_noise() {
use calx_alg::noise;
for i in 0i32..100 {
assert!(noise(i) >= -1.0 && noise(i) <= 1.0);
}
}
fn splits_into(space: usize, line: &str, parts: &[&str]) {
use calx_alg::split_line;
split_line(line, |_| 1.0, space as f32)
.zip(parts)
.all(|(actual, &expected)| {
assert_eq!(expected, actual);
true
});
assert_eq!(parts.len(), split_line(line, |_| 1.0, space as f32).count());
}
#[test]
fn test_split_line() {
splits_into(10, "", &[""]);
splits_into(0, "", &[""]);
splits_into(0, "abc", &["a", "b", "c"]);
splits_into(10, "abc", &["abc"]);
splits_into(6, "the cat", &["the", "cat"]);
splits_into(7, "the cat", &["the cat"]);
splits_into(10, "the cat", &["the cat"]);
splits_into(5, "the cat", &["the", "cat"]);
splits_into(5, "the \t cat", &["the", "cat"]);
splits_into(4, "deadbeef", &["dead", "beef"]);
splits_into(5, "the \t cat", &["the", "cat"]);
}
#[test]
fn test_weighted_choice() {
let mut histogram: HashMap<u32, f32> = HashMap::new();
let mut rng: XorShiftRng = SeedableRng::from_seed([1, 2, 3, 4]);
let items = vec![1u32, 2, 3, 4];
let n = 1000;
for _ in 0..n {
let choice = *items.iter().weighted_choice(&mut rng, |&&x| x as f32).unwrap();
*histogram.entry(choice).or_insert(0.0) += 1.0;
}
let measurement = vec![histogram.get(&1).unwrap() / n as f32,
histogram.get(&2).unwrap() / n as f32,
histogram.get(&3).unwrap() / n as f32,
histogram.get(&4).unwrap() / n as f32];
// The weights match the values because 1+2+3+4 = 10.
let ideal = vec![0.1, 0.2, 0.3, 0.4];
let err = measurement.iter().zip(ideal).map(|(x, y)| (x - y) * (x - y)).sum::<f32>() /
measurement.len() as f32;
println!("Mean square error from expected: {}", err);
assert!(err < 0.0001);
}
#[test]
fn test_random_permutation() {
use calx_alg::RandomPermutation;
let perm: Vec<usize> = RandomPermutation::new(&mut rand::thread_rng(), 100).collect();
let mut sorted = perm.clone();
sorted.sort();
assert_eq!(sorted, (0..100).collect::<Vec<usize>>());
// XXX: It is technically possible to get the unpermutating permutation and fail here.
// Not very likely though.
assert_ne!(perm, sorted);
sorted.reverse();
assert_ne!(perm, sorted);
}
#[test]
fn test_bit_spread() {
use calx_alg::{compact_bits_by_2, spread_bits_by_2};
let mut rng = rand::thread_rng();
for _ in 0..1000 {
let x = rng.gen::<u16>() as u32;
assert!(x == 0 || spread_bits_by_2(x) != x);
assert_eq!(compact_bits_by_2(spread_bits_by_2(x)), x);
}
}
| true |
9bf6090aa101eed2b97a331f2d2f3cbfc7c86ee3
|
Rust
|
zhangaz1/spaceout
|
/src/timer.rs
|
UTF-8
| 1,451 | 3.1875 | 3 |
[] |
no_license
|
use ::{current_time};
//计时器
pub struct Timer{
fps:u64,
frame_time:u64,
start_time:u64,
//last_time:u64,
next_time:u64,
current_time:u64,
//time_elapsed:u64,
}
impl Timer{
pub fn new(fps:u64)->Timer{
Timer{
fps:fps,
frame_time: 1000 / fps,
start_time: 0,
//last_time: 0,
next_time: 0,
current_time: 0,
//time_elapsed: 0
}
}
pub fn fps(&self)->u64{
self.fps
}
pub fn start(&mut self){
//设置计数器起始值
self.start_time = unsafe { current_time() as u64 };
//lastTime 记录上一次的时间值
//self.last_time = unsafe { current_time() as u64 } - self.start_time;
//更新时间在下一帧使用
//self.next_time = self.last_time;
self.next_time = self.start_time;
}
pub fn ready_for_next_frame(&mut self)->bool{
//逝去的时间
self.current_time = unsafe { current_time() as u64 } - self.start_time;
if self.current_time > self.next_time {
//逝去的时间
//self.time_elapsed = (self.current_time - self.last_time) / 1000;
//self.last_time = self.current_time;
//更新时间
self.next_time = self.current_time + self.frame_time;
true
}else{
false
}
}
}
| true |
3bb334f2b973888d2afa77781adbdc3a977d5d60
|
Rust
|
Inityx/more-iterators
|
/src/ulam.rs
|
UTF-8
| 2,337 | 3.546875 | 4 |
[] |
no_license
|
pub type UlamCoord = (isize, isize);
#[derive(Default)]
pub struct Ulam {
ring_index: usize,
in_ring_index: usize,
ring_diameter: usize,
cursor: UlamCoord,
}
impl Ulam {
pub fn new() -> Self { Default::default() }
fn at_ring_end(&self) -> bool {
self.cursor.0 == self.ring_index as isize &&
self.cursor.1 == self.ring_index as isize * -1
}
fn edge_end(&self, edge: usize) -> usize {
match edge {
0 => (1 * self.ring_diameter) - 2,
1 => (2 * self.ring_diameter) - 3,
2 => (3 * self.ring_diameter) - 4,
_ => unreachable!(),
}
}
}
impl Iterator for Ulam {
type Item = UlamCoord;
fn next(&mut self) -> Option<Self::Item> {
let result = self.cursor;
if self.at_ring_end() {
self.cursor.0 += 1;
self.in_ring_index = 0;
self.ring_index += 1;
self.ring_diameter = (self.ring_index * 2) + 1;
} else {
match self.in_ring_index {
x if x < self.edge_end(0) => self.cursor.1 += 1,
x if x < self.edge_end(1) => self.cursor.0 -= 1,
x if x < self.edge_end(2) => self.cursor.1 -= 1,
_ => self.cursor.0 += 1,
}
self.in_ring_index += 1;
}
Some(result)
}
}
#[cfg(test)]
mod tests {
use super::*;
const REFERENCE: [(isize, isize); 25] = [
( 0, 0),
( 1, 0),
( 1, 1),
( 0, 1),
(-1, 1),
(-1, 0),
(-1, -1),
( 0, -1),
( 1, -1),
( 2, -1),
( 2, 0),
( 2, 1),
( 2, 2),
( 1, 2),
( 0, 2),
(-1, 2),
(-2, 2),
(-2, 1),
(-2, 0),
(-2, -1),
(-2, -2),
(-1, -2),
( 0, -2),
( 1, -2),
( 2, -2),
];
#[test]
fn twenty_five() {
let mut refr = REFERENCE.iter().cloned();
let mut ulam = Ulam::new().take(25);
for _ in 0..REFERENCE.len() {
assert_eq!(refr.next(), ulam.next());
}
assert_eq!(None, ulam.next());
}
#[test]
fn infinite() {
let count = 10000;
assert_eq!(count, Ulam::new().take(count).count());
}
}
| true |
7afbe40d8aeb3784dd33694d9e390dc4ffd54cc1
|
Rust
|
none-toka/rust-exercise
|
/xiao3sh/src/eval/builtins/mod.rs
|
UTF-8
| 1,248 | 3.078125 | 3 |
[] |
no_license
|
use std;
use std::collections::HashMap;
use std::env;
use std::io::{self, Error, ErrorKind};
use std::path::Path;
fn builtin_exit(args: &[String]) -> io::Result<()> {
if args.len() > 1 {
return Err(Error::new(ErrorKind::InvalidInput, "too many arguments"));
}
let code = args.get(0).map_or(0, |n| n.parse::<i32>().unwrap_or(0));
std::process::exit(code)
}
fn builtin_cd(args: &[String]) -> io::Result<()> {
if args.len() > 1 {
return Err(Error::new(ErrorKind::InvalidInput, "too many arguments"));
}
if args.len() < 1 {
// TODO implement to move home directory after supporting environment variables
return Ok(());
}
let path = Path::new(args.get(0).unwrap());
match env::set_current_dir(path) {
Ok(()) => Ok(()),
Err(err) => {
// FIXME output more kind message
eprintln!("failed to change directory: {} ({:?})", path.display(), err);
Ok(())
}
}
}
pub fn create_builtins() -> HashMap<String, fn(&[String]) -> io::Result<()>> {
let mut r: HashMap<String, fn(&[String]) -> io::Result<()>> = HashMap::new();
r.insert("exit".to_string(), builtin_exit);
r.insert("cd".to_string(), builtin_cd);
r
}
| true |
265ec12f7699a78f33c9349ccc9b18adebaad1fa
|
Rust
|
skippi/leetcode
|
/src/daily_temperatures.rs
|
UTF-8
| 3,420 | 3.734375 | 4 |
[
"MIT"
] |
permissive
|
/// This algorithm is a bit tricky to explain. For this explanation, let's use this example input:
///
/// [68, 69, 71, 70, 69, 72, 69, 68, 70]
///
/// Since 68 < 69 < 71, it's fairly clear that 68 and 69 need to wait for only 1 day to reach a
/// hotter temperature. This isn't too bad.
///
/// However, 71 must wait 3 days to reach a hotter temperature. Reaching this conclusion can be
/// done through a naive O(n^2) method, but can it be done with lower time complexity?
///
/// Spoiler: it can be, but we should explore the data format to understand why. Let's start by
/// listing out the next hottest days for each temperature in order:
///
/// 68 -> 69
/// 69 -> 71
/// 71 -> 72
/// 70 -> 72
/// 69 -> 72
/// 72 -> None
/// 69 -> 70
/// 68 -> 70
/// 70 -> None
///
/// It appears that some of the temperatures share the same next hottest temperature (EG: 71, 70,
/// and 69 share 72). If we can find a way to store these same temperatures (like 72 and 70) to
/// assign them, we'd be able to solve our problem much more efficiently. Let's start by addressing
/// some patterns to gauge our problem better.
///
/// One pattern to notice is that the next hottest temperature always occurs afterwards for each
/// day. This hints at a small suggestion: it might be better to iterate from the end of the input.
///
/// Something else to notice is that the next hottest temperature never occurs on the day of;
/// rather that it always happens afterwards. This is most prevalent with 72 and 70.
///
/// Given this information, there is a small link to our data format. Let's look at [69, 68, 70].
/// Say we iterated from end to beginning. Let's make it a goal to figure out each day's next
/// hottest temperature. To do this for these three days, we essentially discarded [68, 69] to
/// reach 70 for all of these. Modeling this sort of discarding can be done with a stack structure.
///
/// 70 -> [] | None
/// -> [70]
/// 68 -> [70] | 70
/// -> [70, 68]
/// 69 -> [70, 68]
/// -> [70] | 70
/// -> [70, 69]
/// 72 -> [70, 69]
/// -> [70]
/// -> [] | None
///
/// This stack-based algorithm leads us to our non-naive solution:
#[allow(dead_code)]
fn daily_temperatures(t: Vec<i32>) -> Vec<i32> {
let mut result = vec![0; t.len()];
let mut days_stack = vec![];
for i in (0..t.len()).rev() {
while !days_stack.is_empty() && t[*days_stack.last().unwrap()] <= t[i] {
days_stack.pop();
}
result[i] = match days_stack.last() {
Some(j) => (j - i) as i32,
None => 0,
};
days_stack.push(i);
}
result
}
#[allow(dead_code)]
fn daily_temperatures_naive(t: Vec<i32>) -> Vec<i32> {
let mut result = vec![];
for i in 0..t.len() {
let search = (i..t.len()).find(|&j| t[j] > t[i]);
let days_away = match search {
Some(j) => j - i,
None => 0,
};
result.push(days_away as i32);
}
result
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_daily_temperatures() {
run_tests(daily_temperatures);
}
#[test]
fn test_daily_temperatures_naive() {
run_tests(daily_temperatures_naive);
}
fn run_tests<F: Fn(Vec<i32>) -> Vec<i32>>(fun: F) {
assert_eq!(
fun(vec![73, 74, 75, 71, 69, 72, 76, 73]),
vec![1, 1, 4, 2, 1, 1, 0, 0]
);
}
}
| true |
7ce1657ec6f4b38b90dc1ad4ac378c40d15c7a55
|
Rust
|
CNife/leetcode
|
/rust/finished/src/surrounded_regions.rs
|
UTF-8
| 2,276 | 3.28125 | 3 |
[] |
no_license
|
macro_rules! fuck {
($board: expr, $predicate: expr, $x: expr, $y: expr) => {
if $predicate && $board[$x][$y] == 'O' {
mark_not_surrounded($board, $x, $y);
}
};
($board: expr, $x: expr, $y: expr) => {
fuck!($board, true, $x, $y)
};
}
pub fn solve(board: &mut Vec<Vec<char>>) {
let m = board.len();
if m == 0 {
return;
}
let n = board[0].len();
for x in 0..m {
fuck!(board, x, 0);
fuck!(board, x, n - 1);
}
for y in 1..n - 1 {
fuck!(board, 0, y);
fuck!(board, m - 1, y);
}
for row in board.iter_mut() {
for cell in row.iter_mut() {
*cell = match *cell {
'o' => 'O',
'X' | 'O' => 'X',
_ => unreachable!(),
};
}
}
}
fn mark_not_surrounded(board: &mut Vec<Vec<char>>, x: usize, y: usize) {
board[x][y] = 'o';
fuck!(board, x > 0, x - 1, y);
fuck!(board, x < board.len() - 1, x + 1, y);
fuck!(board, y > 0, x, y - 1);
fuck!(board, y < board[0].len() - 1, x, y + 1);
}
#[test]
fn test() {
let cases = vec![
(
vec![
vec!['X', 'X', 'X', 'X'],
vec!['X', 'O', 'O', 'X'],
vec!['X', 'X', 'O', 'X'],
vec!['X', 'O', 'X', 'X'],
],
vec![
vec!['X', 'X', 'X', 'X'],
vec!['X', 'X', 'X', 'X'],
vec!['X', 'X', 'X', 'X'],
vec!['X', 'O', 'X', 'X'],
],
),
(
vec![
vec!['O', 'X', 'X', 'O', 'X'],
vec!['X', 'O', 'O', 'X', 'O'],
vec!['X', 'O', 'X', 'O', 'X'],
vec!['O', 'X', 'O', 'O', 'O'],
vec!['X', 'X', 'O', 'X', 'O'],
],
vec![
vec!['O', 'X', 'X', 'O', 'X'],
vec!['X', 'X', 'X', 'X', 'O'],
vec!['X', 'X', 'X', 'O', 'X'],
vec!['O', 'X', 'O', 'O', 'O'],
vec!['X', 'X', 'O', 'X', 'O'],
],
),
(vec![], vec![]),
];
for (mut board, expected) in cases {
solve(&mut board);
assert_eq!(board, expected);
}
}
| true |
4cdb79be290fc9f0f57ecfa33274dd72bb8bb7c6
|
Rust
|
just-cli/just-fetch
|
/src/fetch.rs
|
UTF-8
| 1,459 | 2.71875 | 3 |
[] |
no_license
|
use just_core::kernel::AvailableVersions;
use just_core::manifest::Manifest;
use just_core::result::BoxedResult;
pub struct Fetch<'a> {
pub manifest: &'a Manifest,
pub versions: &'a mut AvailableVersions,
}
impl<'a> Fetch<'a> {
pub fn new(manifest: &'a Manifest, versions: &'a mut AvailableVersions) -> Self {
Self { manifest, versions }
}
pub fn fetch_all_versions(&mut self) -> BoxedResult<()> {
use log::debug;
let name = self.manifest.package.name.as_str();
debug!("Fetching all versions for package {}...", name);
match self.manifest.versions {
Some(ref versions) => {
for version in just_versions::find_all_versions(versions) {
self.versions.add_version(&self.manifest.package, &version);
}
Ok(())
}
None => panic!(
"Could not fetch versions for package {} because there is no way provided to fetch the versions",
name
),
}
}
#[cfg(feature = "lib")]
pub fn needs_fetch(&self) -> bool {
use log::debug;
debug!(
"Try deciding if package with alias {} needs an update",
self.manifest.package.name.as_str()
);
self.versions
.get_latest_versions_of(&self.manifest.package)
.is_none()
}
}
| true |
dfa6f93ad519d9c10e86f366e35cdd3dda986a2b
|
Rust
|
Jerald/yoloxide
|
/src/types/ast/statement.rs
|
UTF-8
| 3,883 | 3.5 | 4 |
[
"MIT"
] |
permissive
|
use std::fmt;
use crate::types::ast::{
expression::Expression,
operators::Operator,
value::Value,
};
#[derive(Debug, PartialEq, Clone)]
pub enum Statement
{
Comment(String),
If(Box<Expression>, Vec<Statement>, Option<Vec<Statement>>),
Goto(Box<Expression>),
Assignment(Value, Operator, Box<Expression>),
Expression(Box<Expression>)
}
impl fmt::Display for Statement
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
let write_value: String = match self
{
Statement::Comment(string) => format!("//{}", string),
Statement::If(cond, ref body, Some(ref else_body)) => format!("if {} then {} else {} end", cond, body.iter().fold(String::from(""), |a, e| a + e.to_string().as_str()), else_body.iter().fold(String::from(""), |a, e| a + e.to_string().as_str())),
Statement::If(cond, body, None) => format!("if {} then {} end", cond, body.iter().fold(String::from(""), |a, e| a + e.to_string().as_str())),
Statement::Goto(expr) => format!("goto {}", expr.as_ref()),
Statement::Assignment(ident, op, value) => format!("{} {} {}", ident, op, value),
Statement::Expression(expr) => format!("{}", expr.as_ref()),
};
write!(f, "{}", write_value)
}
}
// impl TryFrom<CylonStat> for Statement
// {
// type Error = String;
// fn try_from(stat: CylonStat) -> Result<Self, Self::Error>
// {
// match stat
// {
// CylonStat::Goto { expression } => {
// let expr = Box::new(expression.try_into()?);
// Ok(Statement::Goto(expr))
// },
// CylonStat::If { condition, body, else_body } => {
// let cond = Box::new(condition.try_into()?);
// let mut ast_body = vec![];
// for stat in body
// {
// ast_body.push(stat.try_into()?)
// }
// let ast_else_body = if else_body.is_empty()
// {
// None
// }
// else
// {
// let mut ast_else_body = vec![];
// for stat in else_body
// {
// ast_else_body.push(stat.try_into()?)
// }
// Some(ast_else_body)
// };
// Ok(Statement::If(cond, ast_body, ast_else_body))
// },
// CylonStat::Assignment { identifier, operator, value } => {
// let ident = if identifier.starts_with(':')
// {
// Value::DataField(identifier)
// }
// else
// {
// Value::LocalVar(identifier)
// };
// let op = match operator.as_str()
// {
// "=" => Operator::Assign,
// "+=" => Operator::AddAssign,
// "-=" => Operator::SubAssign,
// "*=" => Operator::MulAssign,
// "/=" => Operator::DivAssign,
// "%=" => Operator::ModAssign,
// bad_op => return Err(format!("[Statement::TryFrom<CylonStat>] Unable to convert to assignment op from string! Found{}", bad_op))
// };
// let value = Box::new(value.try_into()?);
// Ok(Statement::Assignment(ident, op, value))
// },
// CylonStat::Expression { expression } => {
// let expr = Box::new(expression.try_into()?);
// Ok(Statement::Expression(expr))
// }
// }
// }
// }
| true |
24b463b9ec02ee8c68ac52b4def7d37a296c80e0
|
Rust
|
ilyadruzh/zen
|
/dev/backend/exonum_services/services/database/src/transactions.rs
|
UTF-8
| 3,155 | 2.671875 | 3 |
[] |
no_license
|
use exonum::crypto::PublicKey;
use schema::EmployeeId;
use service::SERVICE_ID;
transactions! {
pub EmployeesTransactions {
const SERVICE_ID = SERVICE_ID;
/// Transaction type for creating a new account.
///
/// See [the `Transaction` trait implementation](#impl-Transaction) for details how
/// `TxCreateAccount` transactions are processed.
struct TxCreateAccount {
/// Public key of the editor. Must be a superuser.
pkey_by_who: &PublicKey,
/// Public key of the account's owner.
pub_key: &PublicKey,
/// UTF-8 string with the owner's name.
first_name: &str,
/// UTF-8 string with the owner's surname.
last_name: &str,
/// 64-bit ID document number of an author.
id_number: EmployeeId,
}
/// Transaction type for editing an account in database.
///
/// See [the `Transaction` trait implementation](#impl-Transaction) for details how
/// `TxEditAccount` transactions are processed.
struct TxEditAccount {
/// Public key of the editor.
pkey_by_who: &PublicKey,
/// Public key of the account owner.
pkey_account: &PublicKey,
/// UTF-8 string with the owner's name.
first_name: &str,
/// UTF-8 string with the owner's surname.
last_name: &str,
/// 64-bit ID document number of an author.
id_number: EmployeeId,
/// Auxiliary number to guarantee [non-idempotence][idempotence] of transactions.
///
/// [idempotence]: https://en.wikipedia.org/wiki/Idempotence
seed: u64,
}
/// Transaction type editing an account's custom data in database.
///
/// See [the `Transaction` trait implementation](#impl-Transaction) for details how
/// `TxSetCustomData` transactions are processed.
struct TxSetCustomData {
/// Public key of the editor.
pkey_by_who: &PublicKey,
/// Public key of the account owner.
pkey_account: &PublicKey,
/// Optional set of custom data bytes.
custom_data: &str,
/// Auxiliary number to guarantee [non-idempotence][idempotence] of transactions.
///
/// [idempotence]: https://en.wikipedia.org/wiki/Idempotence
seed: u64,
}
/// Transaction type for account's deletion.
///
/// See [the `Transaction` trait implementation](#impl-Transaction) for details how
/// `TxDeleteAccount` transactions are processed.
struct TxDeleteAccount {
/// Public key of the editor.
pkey_by_who: &PublicKey,
/// Public key of the account owner.
pkey_account: &PublicKey,
/// Auxiliary number to guarantee [non-idempotence][idempotence] of transactions.
///
/// [idempotence]: https://en.wikipedia.org/wiki/Idempotence
seed: u64,
}
}
}
| true |
d8be6371667a6c65cfaf301693071232482d43f3
|
Rust
|
Xiphoseer/prosemirror-rs
|
/src/markdown/to_markdown.rs
|
UTF-8
| 10,225 | 2.8125 | 3 |
[] |
no_license
|
use super::{MarkdownMark, MarkdownNode, MD};
use crate::model::{AttrNode, Block, Fragment, Leaf, Node};
use displaydoc::Display;
use pulldown_cmark::{CodeBlockKind, CowStr, Event, InlineStr, LinkType, Tag};
use pulldown_cmark_to_cmark::cmark;
use thiserror::Error;
/// Possible error when generating markdown
#[derive(Debug, Clone, PartialEq, Eq, Display, Error)]
pub struct ToMarkdownError {
/// The inner error
inner: std::fmt::Error,
}
impl From<std::fmt::Error> for ToMarkdownError {
fn from(e: std::fmt::Error) -> ToMarkdownError {
Self { inner: e }
}
}
/// Turn a markdown document into a string
pub fn to_markdown(doc: &MarkdownNode) -> Result<String, ToMarkdownError> {
let mut buf = String::with_capacity(doc.node_size() + 128);
let events = MarkdownSerializer::new(doc);
cmark(events, &mut buf, None)?;
Ok(buf)
}
struct MarkdownSerializer<'a> {
inner: Vec<(&'a MarkdownNode, usize)>,
marks: Vec<&'a MarkdownMark>,
stack: Vec<Event<'a>>,
}
impl<'a> MarkdownSerializer<'a> {
fn new(doc: &'a MarkdownNode) -> Self {
Self {
inner: vec![(doc, 0)],
marks: vec![],
stack: vec![],
}
}
}
fn mark_tag(mark: &MarkdownMark) -> Tag {
match mark {
MarkdownMark::Strong => Tag::Strong,
MarkdownMark::Em => Tag::Emphasis,
MarkdownMark::Code => unimplemented!("Should not be pushed on the mark stack"),
MarkdownMark::Link { attrs } => Tag::Link(
LinkType::Inline,
CowStr::Borrowed(attrs.href.as_str()),
CowStr::Borrowed(attrs.title.as_str()),
),
}
}
impl<'a> MarkdownSerializer<'a> {
fn process_content(
&mut self,
index: usize,
content: &'a Fragment<MD>,
node: &'a MarkdownNode,
) -> bool {
if let Some(child) = content.maybe_child(index) {
self.inner.push((node, index + 1));
self.inner.push((child, 0));
false
} else {
true
}
}
fn process_attr_node<A, F>(
&mut self,
index: usize,
content: &'a Fragment<MD>,
attrs: &'a A,
node: &'a MarkdownNode,
map: F,
) -> Option<Event<'a>>
where
F: FnOnce(&'a A) -> Tag<'a>,
{
if index == 0 {
if let Some(mark) = self.marks.pop() {
self.inner.push((node, 0));
return Some(Event::End(mark_tag(mark)));
}
}
let last = self.process_content(index, content, node);
if index == 0 {
if last {
// close the tag next
self.inner.push((node, index + 1));
}
Some(Event::Start(map(attrs)))
} else if last {
if let Some(mark) = self.marks.pop() {
self.inner.push((node, index));
return Some(Event::End(mark_tag(mark)));
}
let tag = map(attrs);
if matches!(&tag, Tag::CodeBlock(..)) {
self.stack.push(Event::End(tag));
Some(Event::Text(CowStr::Inlined(InlineStr::from('\n'))))
} else {
Some(Event::End(tag))
}
} else {
self.next()
}
}
}
impl<'a> Iterator for MarkdownSerializer<'a> {
type Item = Event<'a>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(ev) = self.stack.pop() {
return Some(ev);
}
if let Some((node, index)) = self.inner.pop() {
match node {
MarkdownNode::Doc(Block { content }) => {
self.process_content(index, content, node);
self.next()
}
MarkdownNode::Heading(AttrNode { attrs, content }) => {
self.process_attr_node(index, content, attrs, node, |attrs| {
Tag::Heading(attrs.level.into())
})
}
MarkdownNode::CodeBlock(AttrNode { attrs, content }) => {
self.process_attr_node(index, content, attrs, node, |attrs| {
Tag::CodeBlock(CodeBlockKind::Fenced(CowStr::Borrowed(&attrs.params)))
})
}
MarkdownNode::Text(text_node) => {
if let Some(last) = self.marks.last().map(|n| *n) {
if !text_node.marks.contains(last) {
self.inner.push((node, index));
self.marks.pop();
return Some(Event::End(mark_tag(last)));
}
}
let mut is_code = false;
for mark in &text_node.marks {
if *mark == MarkdownMark::Code {
is_code = true;
} else if !self.marks.contains(&mark) {
self.inner.push((node, index));
self.marks.push(mark);
return Some(Event::Start(mark_tag(mark)));
}
}
if is_code {
Some(Event::Code(CowStr::Borrowed(text_node.text.as_str())))
} else {
Some(Event::Text(CowStr::Borrowed(text_node.text.as_str())))
}
}
MarkdownNode::Blockquote(Block { content }) => {
self.process_attr_node(index, content, &(), node, |()| Tag::BlockQuote)
}
MarkdownNode::Paragraph(Block { content }) => {
self.process_attr_node(index, content, &(), node, |()| Tag::Paragraph)
}
MarkdownNode::BulletList(AttrNode { attrs, content }) => {
self.process_attr_node(index, content, attrs, node, |_| Tag::List(None))
}
MarkdownNode::OrderedList(AttrNode { attrs, content }) => {
self.process_attr_node(index, content, attrs, node, |_| {
Tag::List(Some(attrs.order as u64))
})
}
MarkdownNode::ListItem(Block { content }) => {
self.process_attr_node(index, content, &(), node, |()| Tag::Item)
}
MarkdownNode::HorizontalRule => Some(Event::Rule),
MarkdownNode::HardBreak => {
// todo: inline marks
Some(Event::HardBreak)
}
MarkdownNode::Image(Leaf { attrs }) => {
self.process_attr_node(index, Fragment::EMPTY_REF, &(), node, |()| {
Tag::Image(
LinkType::Inline,
CowStr::Borrowed(attrs.src.as_str()),
CowStr::Borrowed(attrs.src.as_str()),
)
})
}
}
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::to_markdown;
use crate::markdown::{
helper::{blockquote, code_block, doc, h1, h2, node, p, strong},
MarkdownNode,
};
fn initial_doc() -> MarkdownNode {
doc(vec![
h1((
"Padington",
)),
code_block("", (
"fn foo(a: u32) -> u32 {\n 2 * a\n}",
)),
h2((
"Lorem Ipsum",
)),
blockquote((
p(vec![
node("Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. "),
strong("At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet."),
node(" Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet."),
]),
))
])
}
#[test]
fn test() {
let node = initial_doc();
let res = "\
# Padington\n\
\n\
````\n\
fn foo(a: u32) -> u32 {\n 2 * a\n}\n\
````\n\
\n\
## Lorem Ipsum\n\
\n > \n > Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. **At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.** Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.\
".to_string();
let save = to_markdown(&node);
let line = "\n---------------------------------------\n";
assert_eq!(
save.as_ref(),
Ok(&res),
"\ngenerated:{}{}{}\n\nexpected:{}{}{}\n",
line,
save.as_ref().unwrap(),
line,
line,
&res,
line,
);
}
}
| true |
77bad7770ad3a137c2ce69688bdeba01f82305e6
|
Rust
|
jvperrin/rusty-tetris
|
/src/main.rs
|
UTF-8
| 1,374 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
extern crate rand;
extern crate ncurses;
use ncurses::*;
#[derive(PartialEq)]
enum Status {
SUCCESS,
FAILURE,
}
enum Direction {
LEFT,
RIGHT,
DOWN,
NONE,
}
struct Piece {
points: Vec<Point>,
}
#[derive(Clone, Copy, PartialEq)]
struct Point {
x: u32,
y: u32,
}
struct Board {
xmax: u32,
ymax: u32,
placed_pieces: Vec<Piece>,
upcoming_pieces: Vec<Piece>,
}
fn get_next_move() -> Direction {
let ch = getch();
match ch {
KEY_RIGHT => Direction::RIGHT,
KEY_LEFT => Direction::LEFT,
// KEY_DOWN => Direction::DOWN,
_ => Direction::NONE,
}
}
fn main() {
initscr();
cbreak();
noecho();
keypad(stdscr(), true);
curs_set(CURSOR_VISIBILITY::CURSOR_INVISIBLE);
timeout(100);
let mut xmax: i32 = 0;
let mut ymax: i32 = 0;
// Get screen size to get board limits
getmaxyx(stdscr(), &mut ymax, &mut xmax);
let mut board = Board {
xmax: xmax as u32,
ymax: ymax as u32,
placed_pieces: vec![],
upcoming_pieces: vec![],
};
let mut status = Status::SUCCESS;
let mut dir = Direction::NONE;
while status == Status::SUCCESS {
clear();
// TODO: Display the board
refresh();
dir = get_next_move();
// TODO: Move the pieces down the board
}
endwin();
}
| true |
9335208dc646cdc613b0f496336bd04cd34fca50
|
Rust
|
ssoRL/bspts
|
/frontend/src/data/task_list.rs
|
UTF-8
| 2,120 | 3.25 | 3 |
[] |
no_license
|
use crate::components::TaskItem;
use std::collections::VecDeque;
use data::task::Task;
use yew::prelude::*;
// use yew::services::ConsoleService;
use crate::data::store::Store;
type BoxT = Box<Task>;
/// A list of tasks with some methods to control it and return html
#[derive(Default, Clone)]
pub struct TaskList {
tasks_o: Option<VecDeque<BoxT>>
}
impl TaskList {
/// Instantiate a new Tasks from a vec of tasks
pub fn from_vec(tasks_vec: Vec<Task>) -> Self {
let boxed: Vec<BoxT> = tasks_vec.iter().map(|task| -> BoxT {Box::new(task.clone())}).collect();
Self {tasks_o: Some(boxed.into())}
}
/// Add a task to the front of the list of tasks
pub fn push(self: &mut Self, task: BoxT) {
match &mut self.tasks_o {
Some(tasks) => tasks.push_front(task),
None => self.tasks_o = Some(vec![task].into()),
}
}
/// Add a new list of tasks to the front of the current list of tasks
pub fn push_vec(self: &mut Self, task_list: &Vec<Task>) {
for task in task_list {
self.push(Box::new(task.clone()));
}
}
/// Removes the task with the specified id.
/// Returns the removed task on success
pub fn remove(self: &mut Self, task_id: i32) -> Option<BoxT> {
let tasks = self.tasks_o.as_mut()?;
let task_index = tasks.iter().position(|t| t.id == task_id)?;
tasks.remove(task_index)
}
pub fn is_unset(self: &Self) -> bool {
self.tasks_o.is_none()
}
pub fn is_empty(self: &Self) -> bool {
match &self.tasks_o {
Some(tasks) => tasks.len() == 0,
None => true,
}
}
/// Converts these tasks to html
pub fn to_html<>(self: &Self, store: Store) -> Html
{
match &self.tasks_o {
Some(tasks) => tasks.iter().map(|task| {
html!{
<TaskItem
task={task}
store={store.clone()}
/>
}
}).collect(),
None => html!{<></>},
}
}
}
| true |
a44f8e3c26310b8571419d59528035a74fefda5a
|
Rust
|
aead/libgc
|
/src/circuit/binary/mod.rs
|
UTF-8
| 13,668 | 2.90625 | 3 |
[] |
no_license
|
mod error;
use std::path::PathBuf;
use std::collections::HashMap;
use self::error::ExecError;
use super::super::parser;
use super::super::parser::MetaInfo;
use super::super::parser::types::*;
use super::super::parser::types::ID::*;
use super::super::parser::types::GateType::*;
type Entry = (ID, Pin);
pub struct Circuit {
info: MetaInfo,
input: HashMap<ID, u8>,
output: HashMap<ID, u8>,
lookup: HashMap<Entry, u8>,
sub: HashMap<String, Circuit>,
}
impl Circuit {
pub fn new(info: MetaInfo) -> Result<Circuit, ExecError> {
let mut c = Circuit {
info: info,
input: HashMap::new(),
output: HashMap::new(),
lookup: HashMap::new(),
sub: HashMap::new(),
};
if !c.info.sub_circuits.is_empty() {
for (key, val) in &(c.info.sub_circuits) {
let path = PathBuf::from(val);
let inf = try!(parser::parse_meta_info(path.as_path()));
c.sub.insert(key.clone(), try!(Circuit::new(inf)));
}
}
match c.info.one {
Some(ref node) => {
for edge in node.edges() {
match edge.id() {
Output(id) => {
c.output.insert(Output(id), 1);
}
Gate(id) => {
let pin = try!(Circuit::expect_some(edge.pin(),
format!("ONE: pin is required \
for edge: {}",
edge)));
c.lookup.insert((Gate(id), pin), 1);
}
Input(id) => {
let key = try!(Circuit::expect_some(edge.circuit(),
format!("invalid edge: \
expected sub circuit \
- {}",
edge)));
let sub: &mut Circuit =
try!(Circuit::expect_some(c.sub.get_mut(&key),
format!("unknown sub circuit: {}", key)));
sub.set_input(id, 1);
if sub.is_executable() {
try!(sub.execute());
}
}
_ => {
return Err(ExecError::from(format!("invalid edge: {}", edge)));
}
}
}
}
None => (),
};
Ok(c)
}
pub fn is_executable(&self) -> bool {
self.info.inputs == self.input.len() as u64
}
pub fn set_input(&mut self, id: u64, val: u8) {
if val == 0 {
self.input.insert(ID::Input(id), 0);
} else {
self.input.insert(ID::Input(id), 1);
}
}
pub fn get_output(&self, id: u64) -> Result<u8, ExecError> {
match self.output.get(&Output(id)) {
Some(val) => Ok(*val),
None => Err(ExecError::from(format!("no output for id {}", id))),
}
}
pub fn collect_output(&self) -> Vec<u8> {
let mut output = Vec::with_capacity(self.output.len());
let mut i = 1;
while i < self.output.len() + 1 {
let v = self.output.get(&Output(i as u64)).unwrap();
output.push(*v);
i += 1;
}
output
}
pub fn execute(&mut self) -> Result<(), ExecError> {
if !self.is_executable() {
return Err(ExecError::from("circuit is not executable"));
}
let path = PathBuf::from(self.info.path.as_path());
for node in try!(parser::open_circuit(path.as_path())) {
let mut node: Node = try!(node);
try!(match node.id() {
ID::Input(_) => self.process_input(&mut node),
ID::Output(_) => self.process_output(&mut node),
ID::Gate(_) => self.process_gate(&mut node),
_ => Err(ExecError::from(format!("invalid node id: {}", node.id()))),
});
}
Ok(())
}
fn check<T>(expr: bool, msg: T) -> Result<(), ExecError>
where ExecError: From<T>
{
match expr {
true => Ok(()),
false => Err(ExecError::from(msg)),
}
}
fn expect_some<O, T>(expr: Option<O>, msg: T) -> Result<O, ExecError>
where ExecError: From<T>
{
match expr {
Some(val) => Ok(val),
None => Err(ExecError::from(msg)),
}
}
fn process_input(&mut self, node: &mut Node) -> Result<(), ExecError> {
let val = *try!(Circuit::expect_some(self.input.get(&node.id()),
format!("missing input value {}", node.id())));
for edge in node.edges() {
match edge.circuit() {
Some(key) => {
try!(Circuit::check(edge.id().is_input(),
format!("invalid edge: expected input id - {}", edge)));
let sub: &mut Circuit =
try!(Circuit::expect_some(self.sub.get_mut(&key),
format!("unknown sub circuit: {}", key)));
sub.set_input(edge.id().into(), val);
if sub.is_executable() {
try!(sub.execute());
}
}
None => {
match edge.id() {
Output(id) => {
self.output.insert(Output(id), val);
}
Gate(id) => {
let pin = try!(Circuit::expect_some(edge.pin(),
format!("pin is required for \
edge: {}",
edge)));
self.lookup.insert((Gate(id), pin), val);
}
_ => {
return Err(ExecError::from(format!("invalid edge: {}", edge)));
}
}
}
};
}
Ok(())
}
fn process_output(&mut self, node: &mut Node) -> Result<(), ExecError> {
match node.circuit() {
Some(key) => {
let val = try!(try!(Circuit::expect_some(self.sub.get(&key),
format!("unknown sub circuit: {}", key)))
.get_output(node.id().into()));
for edge in node.edges() {
match edge.id() {
Output(id) => {
self.output.insert(Output(id), val);
}
Gate(id) => {
let pin = try!(Circuit::expect_some(edge.pin(),
format!("pin is required for \
edge: {}",
edge)));
self.lookup.insert((Gate(id), pin), val);
}
Input(id) => {
let key = try!(Circuit::expect_some(edge.circuit(),
format!("invalid edge: \
expected sub circuit \
- {}",
edge)));
let sub: &mut Circuit =
try!(Circuit::expect_some(self.sub.get_mut(&key),
format!("unknown sub circuit: {}", key)));
sub.set_input(id, val);
if sub.is_executable() {
try!(sub.execute());
}
}
_ => {
return Err(ExecError::from(format!("invalid edge: {}", edge)));
}
}
}
}
None => {
let val = try!(self.get_output(node.id().into()));
for edge in node.edges() {
match edge.id() {
Input(id) => {
let key = try!(Circuit::expect_some(edge.circuit(),
format!("invalid edge: \
expected sub circuit \
- {}",
edge)));
let sub: &mut Circuit =
try!(Circuit::expect_some(self.sub.get_mut(&key),
format!("unknown sub circuit: {}", key)));
sub.set_input(id, val);
if sub.is_executable() {
try!(sub.execute());
}
}
_ => {
return Err(ExecError::from(format!("invalid edge: {}", edge)));
}
};
}
}
}
Ok(())
}
fn process_gate(&mut self, node: &mut Node) -> Result<(), ExecError> {
try!(Circuit::check(node.circuit().is_none(),
format!("node with id: {} cannot reference sub circuit", node.id())));
let gate_type = try!(Circuit::expect_some(node.gate_type(),
format!("node with id: {} must have a gate \
type",
node.id())));
let val = match gate_type.operands() {
1 => {
let v0 = try!(Circuit::expect_some(self.lookup.remove(&(node.id(), Pin::Left)),
format!("cannot find value for node: {}",
node.id())));
!v0 & 0x01 // NOT
}
2 => {
let v0 = try!(Circuit::expect_some(self.lookup.remove(&(node.id(), Pin::Left)),
format!("cannot find value for left pin of \
node: {}",
node.id())));
let v1 = try!(Circuit::expect_some(self.lookup.remove(&(node.id(), Pin::Right)),
format!("cannot find value for right pin of \
node: {}",
node.id())));
match gate_type {
AND => v0 & v1,
XOR => v0 ^ v1,
OR => v0 | v1,
_ => {
panic!("impossible situation");
}
}
}
_ => {
panic!("impossible situation");
}
};
for edge in node.edges() {
match edge.id() {
Output(id) => {
self.output.insert(Output(id), val);
}
Gate(id) => {
let pin = try!(Circuit::expect_some(edge.pin(),
format!("pin is required for edge: {}",
edge)));
self.lookup.insert((Gate(id), pin), val);
}
Input(id) => {
let key = try!(Circuit::expect_some(edge.circuit(),
format!("invalid edge: expected sub \
circuit - {}",
edge)));
let sub: &mut Circuit =
try!(Circuit::expect_some(self.sub.get_mut(&key),
format!("unknown sub circuit: {}", key)));
sub.set_input(id, val);
if sub.is_executable() {
try!(sub.execute());
}
}
_ => {
return Err(ExecError::from(format!("invalid edge: {}", edge)));
}
}
}
Ok(())
}
}
| true |
0fdc3aff137c3f89c299453d13732044c1f67b58
|
Rust
|
gnoliyil/fuchsia
|
/src/lib/fake-clock/named-timer/src/lib.rs
|
UTF-8
| 6,274 | 2.765625 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
// Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use fuchsia_async::{self as fasync, TimeoutExt};
use fuchsia_zircon as zx;
use fuchsia_zircon::sys::zx_time_t;
use std::future::Future;
use std::os::raw::c_char;
/// A version of the fidl `DeadlineId` containing unowned data.
#[derive(Clone, Copy)]
pub struct DeadlineId<'a> {
component_id: &'a str,
code: &'a str,
}
impl<'a> Into<fidl_fuchsia_testing_deadline::DeadlineId> for DeadlineId<'a> {
fn into(self) -> fidl_fuchsia_testing_deadline::DeadlineId {
fidl_fuchsia_testing_deadline::DeadlineId {
component_id: self.component_id.to_string(),
code: self.code.to_string(),
}
}
}
impl<'a> DeadlineId<'a> {
/// Create a new deadline identifier.
pub const fn new(component_id: &'a str, code: &'a str) -> Self {
Self { component_id, code }
}
}
extern "C" {
fn create_named_deadline(
component: *const c_char,
component_len: usize,
code: *const c_char,
code_len: usize,
duration: zx_time_t,
out: *mut zx_time_t,
) -> bool;
}
fn create_named_deadline_rust(deadline: &DeadlineId<'_>, duration: zx::Duration) -> fasync::Time {
let mut time: zx_time_t = 0;
let time_valid = unsafe {
create_named_deadline(
deadline.component_id.as_ptr() as *const c_char,
deadline.component_id.len(),
deadline.code.as_ptr() as *const c_char,
deadline.code.len(),
duration.into_nanos(),
&mut time,
)
};
match time_valid {
true => zx::Time::from_nanos(time).into(),
false => fasync::Time::now() + duration,
}
}
/// A timer with an associated name.
/// This timer is intended to be used in conjunction with the fake-clock library. Under normal
/// execution, the timer behaves the same as a regular [`fuchsia_async::Timer`]. When run in an
/// integration test with the fake-clock library linked in, the creation of the timer and
/// the expiration of the timer are reported to the fake-clock service. The integration test may
/// register interest in these events to stop time when they occur.
pub struct NamedTimer;
impl NamedTimer {
/// Create a new `NamedTimer` that will expire `duration` in the future.
/// In an integration test, the `SET` event is reported immediately when this method is called,
/// and `EXPIRED` is reported after `duration` elapses. Note `EXPIRED` is still reported even
/// if the timer is dropped before `duration` elapses.
pub fn new(id: &DeadlineId<'_>, duration: zx::Duration) -> fasync::Timer {
let deadline = create_named_deadline_rust(id, duration);
fasync::Timer::new(deadline)
}
}
/// An extension trait that allows setting a timeout with an associated name.
/// The timeout is intended to be used in conjunction with the fake-clock library. Under normal
/// execution, this behaves identically to [`fuchsia_async::TimeoutExt`].
/// When run in an integration test with the fake-clock library linked in, the creation of the
/// timer and the expiration of the timer are reported to the fake-clock service. The integration
/// test may register interest in these events to stop time when they occur.
pub trait NamedTimeoutExt: Future + Sized {
/// Wraps the future in a timeout, calling `on_timeout` when the timeout occurs.
/// In an integration test, the `SET` event is reported immediately when this method is called,
/// and `EXPIRED` is reported after `duration` elapses. Note `EXPIRED` is still reported even
/// if `on_timeout` is not run.
fn on_timeout_named<OT>(
self,
id: &DeadlineId<'_>,
duration: zx::Duration,
on_timeout: OT,
) -> fasync::OnTimeout<Self, OT>
where
OT: FnOnce() -> Self::Output,
{
let deadline = create_named_deadline_rust(id, duration);
self.on_timeout(deadline, on_timeout)
}
}
impl<F: Future + Sized> NamedTimeoutExt for F {}
#[cfg(test)]
mod test {
use super::*;
use core::task::Poll;
// When the fake-clock library is not linked in, these timers should behave identical to
// fasync::Timer. These tests verify that the fake time utilities provided by
// fasync::TestExecutor continue to work when fake-clock is NOT linked in. Behavior with
// fake-clock linked in is verified by integration tests in fake-clock/examples.
const ONE_HOUR: zx::Duration = zx::Duration::from_hours(1);
const DEADLINE_ID: DeadlineId<'static> = DeadlineId::new("component", "code");
#[test]
fn test_timer() {
let mut executor =
fasync::TestExecutor::new_with_fake_time().expect("creating executor failed");
let start_time = executor.now();
let mut timer = NamedTimer::new(&DEADLINE_ID, ONE_HOUR);
assert!(executor.run_until_stalled(&mut timer).is_pending());
executor.set_fake_time(start_time + ONE_HOUR);
assert_eq!(executor.wake_next_timer(), Some(start_time + ONE_HOUR));
assert!(executor.run_until_stalled(&mut timer).is_ready());
}
#[test]
fn test_timeout_not_invoked() {
let mut executor =
fasync::TestExecutor::new_with_fake_time().expect("creating executor failed");
let mut ready_future =
futures::future::ready("ready").on_timeout_named(&DEADLINE_ID, ONE_HOUR, || "timeout");
assert_eq!(executor.run_until_stalled(&mut ready_future), Poll::Ready("ready"));
}
#[test]
fn test_timeout_invoked() {
let mut executor =
fasync::TestExecutor::new_with_fake_time().expect("creating executor failed");
let start_time = executor.now();
let mut stalled_future =
futures::future::pending().on_timeout_named(&DEADLINE_ID, ONE_HOUR, || "timeout");
assert!(executor.run_until_stalled(&mut stalled_future).is_pending());
executor.set_fake_time(start_time + ONE_HOUR);
assert_eq!(executor.wake_next_timer(), Some(start_time + ONE_HOUR));
assert_eq!(executor.run_until_stalled(&mut stalled_future), Poll::Ready("timeout"));
}
}
| true |
3eaac348e8647ef63a8141aba9df89d98d5af3ed
|
Rust
|
qezz/riker
|
/tests/system.rs
|
UTF-8
| 3,056 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
#![feature(
async_await,
await_macro,
futures_api,
arbitrary_self_types
)]
use riker::actors::*;
use riker_default::DefaultModel;
use futures::executor::block_on;
struct ShutdownTestActor {
level: u32,
}
impl ShutdownTestActor {
fn new(level: u32) -> BoxActor<TestMsg> {
let actor = ShutdownTestActor {
level: level
};
Box::new(actor)
}
}
impl Actor for ShutdownTestActor {
type Msg = TestMsg;
fn pre_start(&mut self, ctx: &Context<Self::Msg>) {
if self.level < 10 {
let props = Props::new_args(Box::new(ShutdownTestActor::new), self.level + 1);
ctx.actor_of(props, format!("test-actor-{}", self.level + 1).as_str()).unwrap();
}
}
fn receive(&mut self, _: &Context<Self::Msg>, _: Self::Msg, _: Option<ActorRef<Self::Msg>>) {}
}
#[derive(Clone, Debug)]
struct TestMsg(());
impl Into<ActorMsg<TestMsg>> for TestMsg {
fn into(self) -> ActorMsg<TestMsg> {
ActorMsg::User(self)
}
}
#[test]
#[allow(dead_code)]
fn system_shutdown() {
let model: DefaultModel<TestMsg> = DefaultModel::new();
let system = ActorSystem::new(&model).unwrap();
let props = Props::new_args(Box::new(ShutdownTestActor::new), 1);
let _ = system.actor_of(props, "test-actor-1").unwrap();
block_on(system.shutdown());
}
#[test]
fn system_guardian_mailboxes() {
let model: DefaultModel<TestMsg> = DefaultModel::new();
let system = ActorSystem::new(&model).unwrap();
let user_root = system.user_root();
user_root.tell(TestMsg(()), None);
user_root.tell(TestMsg(()), None);
// We're testing a system actor so wait for 1s after
// sending the system message to give time to panic if it fails
std::thread::sleep(std::time::Duration::from_millis(1000));
}
#[test]
fn system_execute_futures() {
let model: DefaultModel<TestMsg> = DefaultModel::new();
let system = ActorSystem::new(&model).unwrap();
for i in 0..100 {
let handle = system.execute(async move {
format!("some_val_{}", i)
});
assert_eq!(block_on(handle), format!("some_val_{}", i));
}
}
#[test]
fn system_panic_futures() {
let model: DefaultModel<TestMsg> = DefaultModel::new();
let system = ActorSystem::new(&model).unwrap();
for _ in 0..100 {
let _ = system.execute(async move {
panic!("// TEST PANIC // TEST PANIC // TEST PANIC //");
});
}
for i in 0..100 {
let handle = system.execute(async move {
format!("some_val_{}", i)
});
assert_eq!(block_on(handle), format!("some_val_{}", i));
}
}
#[test]
fn system_load_app_config() {
let model: DefaultModel<TestMsg> = DefaultModel::new();
let system = ActorSystem::new(&model).unwrap();
assert_eq!(system.config().get_int("app.some_setting").unwrap() as i64, 1);
}
| true |
f97503fbc4b5f2721b30901092acd873cf708d84
|
Rust
|
ralvescosta/rust_principles
|
/intro_rust/vectors_hashmap_castring_iflet_while_result/src/main.rs
|
UTF-8
| 2,245 | 3.734375 | 4 |
[] |
no_license
|
use core::panic;
use std::{collections::HashMap, fs::File};
fn main() {
vectors_primitive();
vector_enum();
hash_map_primitive();
if_let();
while_let();
casting();
result_enum();
}
fn vectors_primitive() {
println!("VECTORS PRIMITIVE");
let x = vec![1, 2, 3, 4]; // vector is a resizable array
println!("{:?}", x);
let mut v = Vec::<u32>::new(); // vector struct has the data, the length and the capacity
v.push(5);
v.push(1);
v.push(10);
v.push(87);
for i in &v {
println!("{}", i);
}
println!("{:?} {} {}", &v, v.len(), v.capacity());
println!("");
}
fn vector_enum() {
println!("VECTOR ENUM");
#[derive(Debug)]
enum Example {
Float(f64),
Int(i32),
Text(String),
}
let vec = vec![
Example::Int(12),
Example::Float(10.1),
Example::Text(String::from("String")),
];
println!("{:?}", vec);
println!("");
}
fn hash_map_primitive() {
println!("HASH MAP PRIMITIVE");
let mut hm = HashMap::new();
hm.insert(String::from("random"), 12);
hm.insert(String::from("strings"), 22);
for (k, v) in &hm {
println!("{} : {}", k, v);
}
match hm.get("random") {
Some(&n) => println!("Match: {}", n),
_ => println!("no match"),
}
// hm.remove("strings");
println!("");
}
fn if_let() {
println!("IF LET");
let s = Some('c');
if let Some(i) = s {
println!("{}", i);
}
println!("");
}
fn while_let() {
println!("WHILE LET");
let mut s = Some(0);
while let Some(i) = s {
if i > 19 {
println!("Quit");
s = None;
} else {
println!("{}", i);
s = Some(i + 2);
}
}
println!("");
}
fn casting() {
println!("CASTING");
let f = 24.1414_f32; // cast to float32
let i = f as u8;
let c = i as char;
println!("{}", c);
println!("");
}
fn result_enum() {
println!("RESULT ENUM");
let f = File::open("blablabla.txt");
let f = match f {
Ok(file) => file,
Err(err) => panic!("There was a problem opening the file: {:?}", err),
};
println!("");
}
| true |
ef10d4797b5eea7a79da62b482e93a8969483483
|
Rust
|
Aceeri/raytracer
|
/src/color.rs
|
UTF-8
| 162 | 3.109375 | 3 |
[] |
no_license
|
#[derive(Debug, Copy, Clone)]
pub enum Color {
Bit8(u8, u8, u8),
}
impl Default for Color {
fn default() -> Self {
Color::Bit8(0, 0, 0)
}
}
| true |
f28fa5a391c6e0a150982960b9de87c56e6587e3
|
Rust
|
bcmyers/num-format
|
/num-format/tests/test_non_zero.rs
|
UTF-8
| 9,984 | 2.9375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
mod common;
use core::num::{NonZeroU128, NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize};
use num_format::{Buffer, CustomFormat};
#[cfg(feature = "std")]
use num_format::{ToFormattedString, WriteFormatted};
use crate::common::POLICIES;
#[test]
fn test_non_zero_u8() {
let test_cases: &[(&str, NonZeroU8, &CustomFormat)] = &[
("1", NonZeroU8::new(1).unwrap(), &POLICIES[0]),
("1", NonZeroU8::new(1).unwrap(), &POLICIES[1]),
("1", NonZeroU8::new(1).unwrap(), &POLICIES[2]),
("1", NonZeroU8::new(1).unwrap(), &POLICIES[3]),
("1", NonZeroU8::new(1).unwrap(), &POLICIES[4]),
("255", NonZeroU8::new(std::u8::MAX).unwrap(), &POLICIES[0]),
("255", NonZeroU8::new(std::u8::MAX).unwrap(), &POLICIES[1]),
("255", NonZeroU8::new(std::u8::MAX).unwrap(), &POLICIES[2]),
("255", NonZeroU8::new(std::u8::MAX).unwrap(), &POLICIES[3]),
("255", NonZeroU8::new(std::u8::MAX).unwrap(), &POLICIES[4]),
];
for (expected, input, format) in test_cases {
// Buffer
let mut buf = Buffer::default();
buf.write_formatted(input, *format);
assert_eq!(*expected, buf.as_str());
#[cfg(feature = "std")]
{
// ToFormattedString
assert_eq!(expected.to_string(), input.to_formatted_string(*format));
// WriteFormatted
let mut s = String::new();
s.write_formatted(input, *format).unwrap();
assert_eq!(expected.to_string(), s);
}
}
}
#[test]
fn test_non_zero_u16() {
let test_cases: &[(&str, NonZeroU16, &CustomFormat)] = &[
("1", NonZeroU16::new(1).unwrap(), &POLICIES[0]),
("1", NonZeroU16::new(1).unwrap(), &POLICIES[1]),
("1", NonZeroU16::new(1).unwrap(), &POLICIES[2]),
("1", NonZeroU16::new(1).unwrap(), &POLICIES[3]),
("1", NonZeroU16::new(1).unwrap(), &POLICIES[4]),
(
"65,535",
NonZeroU16::new(std::u16::MAX).unwrap(),
&POLICIES[0],
),
(
"65𠜱535",
NonZeroU16::new(std::u16::MAX).unwrap(),
&POLICIES[1],
),
(
"65𠜱535",
NonZeroU16::new(std::u16::MAX).unwrap(),
&POLICIES[2],
),
(
"65535",
NonZeroU16::new(std::u16::MAX).unwrap(),
&POLICIES[3],
),
(
"65535",
NonZeroU16::new(std::u16::MAX).unwrap(),
&POLICIES[4],
),
];
for (expected, input, format) in test_cases {
// Buffer
let mut buf = Buffer::default();
buf.write_formatted(input, *format);
assert_eq!(*expected, buf.as_str());
#[cfg(feature = "std")]
{
// ToFormattedString
assert_eq!(expected.to_string(), input.to_formatted_string(*format));
// WriteFormatted
let mut s = String::new();
s.write_formatted(input, *format).unwrap();
assert_eq!(expected.to_string(), s);
}
}
}
#[test]
fn test_non_zero_u32() {
let test_cases: &[(&str, NonZeroU32, &CustomFormat)] = &[
("1", NonZeroU32::new(1).unwrap(), &POLICIES[0]),
("1", NonZeroU32::new(1).unwrap(), &POLICIES[1]),
("1", NonZeroU32::new(1).unwrap(), &POLICIES[2]),
("1", NonZeroU32::new(1).unwrap(), &POLICIES[3]),
("1", NonZeroU32::new(1).unwrap(), &POLICIES[4]),
(
"4,294,967,295",
NonZeroU32::new(std::u32::MAX).unwrap(),
&POLICIES[0],
),
(
"4𠜱294𠜱967𠜱295",
NonZeroU32::new(std::u32::MAX).unwrap(),
&POLICIES[1],
),
(
"4𠜱29𠜱49𠜱67𠜱295",
NonZeroU32::new(std::u32::MAX).unwrap(),
&POLICIES[2],
),
(
"4294967295",
NonZeroU32::new(std::u32::MAX).unwrap(),
&POLICIES[3],
),
(
"4294967295",
NonZeroU32::new(std::u32::MAX).unwrap(),
&POLICIES[4],
),
];
for (expected, input, format) in test_cases {
// Buffer
let mut buf = Buffer::default();
buf.write_formatted(input, *format);
assert_eq!(*expected, buf.as_str());
#[cfg(feature = "std")]
{
// ToFormattedString
assert_eq!(expected.to_string(), input.to_formatted_string(*format));
// WriteFormatted
let mut s = String::new();
s.write_formatted(input, *format).unwrap();
assert_eq!(expected.to_string(), s);
}
}
}
#[test]
fn test_non_zero_usize() {
let test_cases: &[(&str, NonZeroUsize, &CustomFormat)] = &[
("1", NonZeroUsize::new(1).unwrap(), &POLICIES[0]),
("1", NonZeroUsize::new(1).unwrap(), &POLICIES[1]),
("1", NonZeroUsize::new(1).unwrap(), &POLICIES[2]),
("1", NonZeroUsize::new(1).unwrap(), &POLICIES[3]),
("1", NonZeroUsize::new(1).unwrap(), &POLICIES[4]),
(
"18,446,744,073,709,551,615",
NonZeroUsize::new(std::usize::MAX).unwrap(),
&POLICIES[0],
),
(
"18𠜱446𠜱744𠜱073𠜱709𠜱551𠜱615",
NonZeroUsize::new(std::usize::MAX).unwrap(),
&POLICIES[1],
),
(
"1𠜱84𠜱46𠜱74𠜱40𠜱73𠜱70𠜱95𠜱51𠜱615",
NonZeroUsize::new(std::usize::MAX).unwrap(),
&POLICIES[2],
),
(
"18446744073709551615",
NonZeroUsize::new(std::usize::MAX).unwrap(),
&POLICIES[3],
),
(
"18446744073709551615",
NonZeroUsize::new(std::usize::MAX).unwrap(),
&POLICIES[4],
),
];
for (expected, input, format) in test_cases {
// Buffer
let mut buf = Buffer::default();
buf.write_formatted(input, *format);
assert_eq!(*expected, buf.as_str());
#[cfg(feature = "std")]
{
// ToFormattedString
assert_eq!(expected.to_string(), input.to_formatted_string(*format));
// WriteFormatted
let mut s = String::new();
s.write_formatted(input, *format).unwrap();
assert_eq!(expected.to_string(), s);
}
}
}
#[test]
fn test_non_zero_u64() {
let test_cases: &[(&str, NonZeroU64, &CustomFormat)] = &[
("1", NonZeroU64::new(1).unwrap(), &POLICIES[0]),
("1", NonZeroU64::new(1).unwrap(), &POLICIES[1]),
("1", NonZeroU64::new(1).unwrap(), &POLICIES[2]),
("1", NonZeroU64::new(1).unwrap(), &POLICIES[3]),
("1", NonZeroU64::new(1).unwrap(), &POLICIES[4]),
(
"18,446,744,073,709,551,615",
NonZeroU64::new(std::u64::MAX).unwrap(),
&POLICIES[0],
),
(
"18𠜱446𠜱744𠜱073𠜱709𠜱551𠜱615",
NonZeroU64::new(std::u64::MAX).unwrap(),
&POLICIES[1],
),
(
"1𠜱84𠜱46𠜱74𠜱40𠜱73𠜱70𠜱95𠜱51𠜱615",
NonZeroU64::new(std::u64::MAX).unwrap(),
&POLICIES[2],
),
(
"18446744073709551615",
NonZeroU64::new(std::u64::MAX).unwrap(),
&POLICIES[3],
),
(
"18446744073709551615",
NonZeroU64::new(std::u64::MAX).unwrap(),
&POLICIES[4],
),
];
for (expected, input, format) in test_cases {
// Buffer
let mut buf = Buffer::default();
buf.write_formatted(input, *format);
assert_eq!(*expected, buf.as_str());
#[cfg(feature = "std")]
{
// ToFormattedString
assert_eq!(expected.to_string(), input.to_formatted_string(*format));
// WriteFormatted
let mut s = String::new();
s.write_formatted(input, *format).unwrap();
assert_eq!(expected.to_string(), s);
}
}
}
#[test]
fn test_non_zero_u128() {
let test_cases: &[(&str, NonZeroU128, &CustomFormat)] = &[
("1", NonZeroU128::new(1).unwrap(), &POLICIES[0]),
("1", NonZeroU128::new(1).unwrap(), &POLICIES[1]),
("1", NonZeroU128::new(1).unwrap(), &POLICIES[2]),
("1", NonZeroU128::new(1).unwrap(), &POLICIES[3]),
("1", NonZeroU128::new(1).unwrap(), &POLICIES[4]),
(
"340,282,366,920,938,463,463,374,607,431,768,211,455",
NonZeroU128::new(std::u128::MAX).unwrap(),
&POLICIES[0],
),
(
"340𠜱282𠜱366𠜱920𠜱938𠜱463𠜱463𠜱374𠜱607𠜱431𠜱768𠜱211𠜱455",
NonZeroU128::new(std::u128::MAX).unwrap(),
&POLICIES[1],
),
(
"34𠜱02𠜱82𠜱36𠜱69𠜱20𠜱93𠜱84𠜱63𠜱46𠜱33𠜱74𠜱60𠜱74𠜱31𠜱76𠜱82𠜱11𠜱455",
NonZeroU128::new(std::u128::MAX).unwrap(),
&POLICIES[2],
),
(
"340282366920938463463374607431768211455",
NonZeroU128::new(std::u128::MAX).unwrap(),
&POLICIES[3],
),
(
"340282366920938463463374607431768211455",
NonZeroU128::new(std::u128::MAX).unwrap(),
&POLICIES[4],
),
];
for (expected, input, format) in test_cases {
// Buffer
let mut buf = Buffer::default();
buf.write_formatted(input, *format);
assert_eq!(*expected, buf.as_str());
#[cfg(feature = "std")]
{
// ToFormattedString
assert_eq!(expected.to_string(), input.to_formatted_string(*format));
// WriteFormatted
let mut s = String::new();
s.write_formatted(input, *format).unwrap();
assert_eq!(expected.to_string(), s);
}
}
}
| true |
a2c1d06b439d1ca6cb0201ef8e0df6b542fc25bf
|
Rust
|
sonicXconst1/kuna_agnostic
|
/src/trader.rs
|
UTF-8
| 3,408 | 2.953125 | 3 |
[] |
no_license
|
use agnostic::trade::{Trade, TradeResult};
use agnostic::order::OrderWithId;
pub struct Trader<TConnector> {
private_client: std::sync::Arc<kuna_sdk::client::KunaClient<TConnector>>,
}
impl<TConnector> Trader<TConnector> {
pub fn new(
private_client: std::sync::Arc<kuna_sdk::client::KunaClient<TConnector>>,
) -> Trader<TConnector> {
Trader {
private_client,
}
}
}
impl<TConnector> agnostic::market::Trader for Trader<TConnector>
where
TConnector: hyper::client::connect::Connect + Send + Sync + Clone + 'static,
{
fn create_order(
&self,
order: agnostic::order::Order
) -> agnostic::market::Future<Result<Trade, String>> {
let future = create_order(self.private_client.clone(), order);
Box::pin(future)
}
fn delete_order(&self, id: &str) -> agnostic::market::Future<Result<(), String>> {
let future = delete_order(self.private_client.clone(), id.to_owned());
Box::pin(future)
}
}
async fn create_order<TConnector>(
private_client: std::sync::Arc<kuna_sdk::client::KunaClient<TConnector>>,
order: agnostic::order::Order,
) -> Result<Trade, String>
where
TConnector: hyper::client::connect::Connect + Send + Sync + Clone + 'static
{
use crate::convert;
use agnostic::trading_pair::TradingPairConverter;
let converter = convert::CoinConverter::default();
let trading_pair = order.trading_pair.clone();
let kuna_symbol = converter.to_pair(order.trading_pair.clone());
use agnostic::trading_pair::Target;
let target = match order.trading_pair.target {
Target::Limit => kuna_sdk::base::Target::Limit,
Target::Market => kuna_sdk::base::Target::Market,
};
use agnostic::trading_pair::Side;
let create_order = kuna_sdk::models::CreateOrder {
symbol: kuna_symbol.to_string(),
amount: match order.trading_pair.side {
Side::Buy => order.amount,
Side::Sell => -order.amount,
},
price: order.price,
order_type: target.to_string(),
};
let price = order.price;
let amount = order.amount;
match private_client.create_order(create_order).await {
Ok(result) => Ok(match order.trading_pair.target {
Target::Market => Trade::Market(TradeResult {
id: result.id.to_string(),
trading_pair,
price: result.price.map_or(price, |price| price),
amount,
}),
Target::Limit => Trade::Limit(OrderWithId {
id: result.id.to_string(),
trading_pair,
price,
amount,
}),
}),
Err(error) => Err(error),
}
}
async fn delete_order<TConnector>(
private_client: std::sync::Arc<kuna_sdk::client::KunaClient<TConnector>>,
id: String,
) -> Result<(), String>
where
TConnector: hyper::client::connect::Connect + Send + Sync + Clone + 'static
{
use std::str::FromStr;
let id = match i32::from_str(id.as_ref()) {
Ok(id) => id,
Err(error) => return Err(format!("Failed to convert id to i32: {}. {}", id, error)),
};
let cancel_order = kuna_sdk::models::CancelOrderRequest {
order_id: id,
};
match private_client.delete_order(cancel_order).await {
Ok(_) => Ok(()),
Err(error) => Err(error),
}
}
| true |
46f373cdb81e25daf2db4bdf1ca431afa7304b56
|
Rust
|
Spu7Nix/gd_mesh_render_tools
|
/src/levelstring.rs
|
UTF-8
| 5,622 | 2.6875 | 3 |
[] |
no_license
|
use super::triangle::Vector;
use std::collections::HashMap;
pub struct GDObject {
props: HashMap<u8, String>,
}
impl GDObject {
pub fn new(id: &str) -> Self {
let mut props = HashMap::<u8, String>::new();
props.insert(1, id.to_string());
GDObject { props }
}
pub fn set_prop(&mut self, key: u8, value: &str) {
(*self).props.insert(key, value.to_string());
}
pub fn set_pos(&mut self, pos: Vector) {
self.set_prop(2, &(pos.x * 30.0).to_string());
self.set_prop(3, &(pos.y * 30.0).to_string());
}
fn get_obj_string(&self) -> String {
let mut obj_str = String::new();
for (key, val) in self.props.iter() {
obj_str += &format!("{},{},", key, val);
}
obj_str += ";";
obj_str
}
}
pub fn create_level_string(objects: Vec<GDObject>) -> String {
let mut levelstring = String::from(";");
for obj in objects.iter() {
levelstring += &obj.get_obj_string();
}
levelstring
}
use quick_xml::Writer;
use std::fs;
use std::io::Cursor;
use std::path::PathBuf;
use base64;
use libflate::{gzip, zlib};
//use std::io::Read;
fn xor(data: Vec<u8>, key: u8) -> Vec<u8> {
let mut new_data = Vec::new();
for b in data {
//let new_byte = u64::from(b).pow(key);
new_data.push(b ^ key)
}
new_data
}
use quick_xml::events::{BytesText, Event};
use quick_xml::Reader;
use std::io::BufReader;
pub fn get_local_levels_path() -> Result<PathBuf, String> {
Ok(PathBuf::from(match std::env::var("localappdata") {
Ok(path) => path,
Err(e) => return Err(e.to_string()),
})
.join("GeometryDash/CCLocalLevels.dat"))
}
pub fn encrypt_level_string(ls: String) {
let path = get_local_levels_path().unwrap();
let file_content = fs::read_to_string(path.clone()).unwrap();
//decrypting the savefile
let xor_encrypted = xor(file_content.as_bytes().to_vec(), 11);
let replaced = String::from_utf8(xor_encrypted)
.unwrap()
.replace("-", "+")
.replace("_", "/")
.replace("\0", "");
let b64 = base64::decode(replaced.as_str()).unwrap();
let decoder = gzip::Decoder::new(&b64[..]).unwrap();
//encrypt the ls
//encrypting level string
/*def encrypt(dls):
fin = gzip.compress(dls)
fin = base64.b64encode(fin)
fin = fin.decode("utf-8").replace('+', '-').replace('/', '_')
fin = 'H4sIAAAAAAAAC' + fin[13:]
return(fin)*/
//setting level string
let mut reader = Reader::from_reader(BufReader::new(decoder));
reader.trim_text(true);
let mut writer = Writer::new(Cursor::new(Vec::new()));
let mut buf = Vec::new();
let mut k4_detected = false;
let mut done = false;
let mut k2_detected = false;
//println!("{}", old_ls);
loop {
match reader.read_event(&mut buf) {
// unescape and decode the text event using the reader encoding
Ok(Event::Text(e)) => {
let text = e.unescape_and_decode(&reader).unwrap();
if k4_detected {
let encrypted_ls: String = {
let mut ls_encoder = gzip::Encoder::new(Vec::new()).unwrap();
ls_encoder.write_all(&ls.as_bytes()).unwrap();
let b64_encrypted =
base64::encode(&ls_encoder.finish().into_result().unwrap());
let fin = b64_encrypted.replace("+", "-").replace("/", "_");
"H4sIAAAAAAAAC".to_string() + &fin[13..]
};
assert!(writer
.write_event(Event::Text(BytesText::from_plain_str(&encrypted_ls)))
.is_ok());
done = true;
k4_detected = false;
} else {
assert!(writer.write_event(Event::Text(e)).is_ok())
}
if k2_detected {
println!("Writing to level: {}", text);
k2_detected = false;
}
if !done && text == "k4" {
k4_detected = true
}
if !done && text == "k2" {
k2_detected = true
}
}
Ok(Event::Eof) => break, // exits the loop when reaching end of file
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
Ok(e) => assert!(writer.write_event(e).is_ok()),
}
// if we don't keep a borrow elsewhere, we can clear the buffer to keep memory usage low
buf.clear();
}
let bytes = writer.into_inner().into_inner();
//encrypt level save
use std::io::Write;
let mut encoder = zlib::Encoder::new(Vec::new()).unwrap();
encoder.write_all(&bytes).unwrap();
let compressed = encoder.finish().into_result().unwrap();
use crc32fast::Hasher;
let mut hasher = Hasher::new();
hasher.update(&bytes);
let checksum = hasher.finalize();
let data_size = bytes.len() as u32;
let mut with_signature = b"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x0b".to_vec();
with_signature.extend(&compressed[2..compressed.len() - 4]);
with_signature.extend(checksum.to_le_bytes().to_vec());
with_signature.extend(data_size.to_le_bytes().to_vec());
let encoded = base64::encode(&with_signature)
.replace("+", "-")
.replace("/", "_")
.as_bytes()
.to_vec();
let fin = xor(encoded, 11);
assert!(fs::write(path, fin).is_ok());
}
| true |
0ca4e5bac6b2be5a4cd01b485e372c06c32b3187
|
Rust
|
Pratyush/fancy-garbling
|
/src/wire.rs
|
UTF-8
| 17,599 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
//! Low-level operations on wire-labels, the basic building block of garbled circuits.
use crate::fancy::HasModulus;
use crate::util;
use rand::{CryptoRng, Rng, RngCore};
use scuttlebutt::{Block, AES_HASH};
/// The core wire-label type.
#[derive(Debug, Clone, PartialEq)]
pub enum Wire {
/// Representation of a `mod-2` wire.
Mod2 {
/// A 128-bit value.
val: Block,
},
/// Representation of a `mod-3` wire.
///
/// We represent a `mod-3` wire by 64 `mod-3` elements. These elements are
/// stored as follows: the least-significant bits of each element are stored
/// in `lsb` and the most-significant bits of each element are stored in
/// `msb`. This representation allows for efficient addition and
/// multiplication as described here by the paper "Hardware Implementation
/// of Finite Fields of Characteristic Three." D. Page, N.P. Smart. CHES
/// 2002. Link:
/// <https://link.springer.com/content/pdf/10.1007/3-540-36400-5_38.pdf>.
Mod3 {
/// The least-significant bits of each `mod-3` element.
lsb: u64,
/// The most-significant bits of each `mod-3` element.
msb: u64,
},
/// Representation of a `mod-q` wire.
///
/// We represent a `mod-q` wire for `q > 3` by the modulus `q` alongside a
/// list of `mod-q` digits.
ModN {
/// The modulus of this wire-label.
q: u16,
/// A list of `mod-q` digits.
ds: Vec<u16>,
},
}
impl std::default::Default for Wire {
fn default() -> Self {
Wire::Mod2 {
val: Block::default(),
}
}
}
impl HasModulus for Wire {
#[inline]
fn modulus(&self) -> u16 {
match self {
Wire::Mod2 { .. } => 2,
Wire::Mod3 { .. } => 3,
Wire::ModN { q, .. } => *q,
}
}
}
impl Wire {
/// Get the digits of the wire.
#[inline]
pub fn digits(&self) -> Vec<u16> {
match self {
Wire::Mod2 { val } => (0..128)
.map(|i| ((u128::from(*val) >> i) as u16) & 1)
.collect(),
Wire::Mod3 { lsb, msb } => (0..64)
.map(|i| (((lsb >> i) as u16) & 1) & ((((msb >> i) as u16) & 1) << 1))
.collect(),
Wire::ModN { ds, .. } => ds.clone(),
}
}
fn _from_block_lookup(inp: Block, q: u16) -> Self {
debug_assert!(q < 256);
debug_assert!(base_conversion::lookup_defined_for_mod(q));
let bytes: [u8; 16] = inp.into();
// The digits in position 15 will be the longest, so we can use stateful
// (fast) base `q` addition.
let mut ds = base_conversion::lookup_digits_mod_at_position(bytes[15], q, 15).to_vec();
for i in 0..15 {
let cs = base_conversion::lookup_digits_mod_at_position(bytes[i], q, i);
util::base_q_add_eq(&mut ds, &cs, q);
// for (x,y) in ds.iter_mut().zip(cs.into_iter()) {
// *x += y;
// if *x >= q {
// *x -= q;
// }
// }
}
// Drop the digits we won't be able to pack back in again, especially if
// they get multiplied.
ds.truncate(util::digits_per_u128(q));
Wire::ModN { q, ds }
}
/// Unpack the wire represented by a `Block` with modulus `q`. Assumes that
/// the block was constructed through the `Wire` API.
#[inline]
pub fn from_block(inp: Block, q: u16) -> Self {
if q == 2 {
Wire::Mod2 { val: inp }
} else if q == 3 {
let inp = u128::from(inp);
let lsb = inp as u64;
let msb = (inp >> 64) as u64;
debug_assert_eq!(lsb & msb, 0);
Wire::Mod3 { lsb, msb }
} else if q < 256 && base_conversion::lookup_defined_for_mod(q) {
Self::_from_block_lookup(inp, q)
} else {
Wire::ModN {
q,
ds: util::as_base_q_u128(u128::from(inp), q),
}
}
}
/// Pack the wire into a `Block`.
#[inline]
pub fn as_block(&self) -> Block {
match self {
Wire::Mod2 { val } => *val,
Wire::Mod3 { lsb, msb } => Block::from(((*msb as u128) << 64) | (*lsb as u128)),
Wire::ModN { q, ref ds } => Block::from(util::from_base_q(ds, *q)),
}
}
/// The zero wire with modulus `q`.
#[inline]
pub fn zero(q: u16) -> Self {
match q {
1 => panic!("[Wire::zero] mod 1 not allowed!"),
2 => Wire::Mod2 {
val: Default::default(),
},
3 => Wire::Mod3 {
lsb: Default::default(),
msb: Default::default(),
},
_ => Wire::ModN {
q,
ds: vec![0; util::digits_per_u128(q)],
},
}
}
/// Get a random wire label mod `q`, with the first digit set to `1`.
#[inline]
pub fn rand_delta<R: CryptoRng + RngCore>(rng: &mut R, q: u16) -> Self {
let mut w = Self::rand(rng, q);
match w {
Wire::Mod2 { ref mut val } => *val = val.set_lsb(),
Wire::Mod3 {
ref mut lsb,
ref mut msb,
} => {
// We want the color digit to be `1`, which requires setting the
// appropriate `lsb` element to `1` and the appropriate `msb`
// element to `0`.
*lsb |= 1;
*msb &= 0xFFFF_FFFF_FFFF_FFFE;
}
Wire::ModN { ref mut ds, .. } => ds[0] = 1,
}
w
}
/// Get the color digit of the wire.
#[inline]
pub fn color(&self) -> u16 {
match self {
Wire::Mod2 { val } => val.lsb() as u16,
Wire::Mod3 { lsb, msb } => (((msb & 1) as u16) << 1) | ((lsb & 1) as u16),
Wire::ModN { ref ds, .. } => ds[0],
}
}
/// Add two wires digit-wise, returning a new wire.
#[inline]
pub fn plus(&self, other: &Self) -> Self {
self.clone().plus_mov(other)
}
/// Add another wire digit-wise into this one. Assumes that both wires have
/// the same modulus.
#[inline]
pub fn plus_eq<'a>(&'a mut self, other: &Wire) -> &'a mut Wire {
match (&mut *self, other) {
(Wire::Mod2 { val: ref mut x }, Wire::Mod2 { val: ref y }) => {
*x ^= *y;
}
(
Wire::Mod3 {
lsb: ref mut a1,
msb: ref mut a2,
},
Wire::Mod3 { lsb: b1, msb: b2 },
) => {
// As explained in the cited paper above, the following
// operations do element-wise addition.
let t = (*a1 | b2) ^ (*a2 | b1);
let c1 = (*a2 | b2) ^ t;
let c2 = (*a1 | b1) ^ t;
*a1 = c1;
*a2 = c2;
}
(
Wire::ModN {
q: ref xmod,
ds: ref mut xs,
},
Wire::ModN {
q: ref ymod,
ds: ref ys,
},
) => {
debug_assert_eq!(xmod, ymod);
debug_assert_eq!(xs.len(), ys.len());
xs.iter_mut().zip(ys.iter()).for_each(|(x, &y)| {
let (zp, overflow) = (*x + y).overflowing_sub(*xmod);
*x = if overflow { *x + y } else { zp }
});
}
_ => panic!("[Wire::plus_eq] unequal moduli!"),
}
self
}
/// Add another wire into this one, consuming it for chained computations.
#[inline]
pub fn plus_mov(mut self, other: &Wire) -> Wire {
self.plus_eq(other);
self
}
/// Multiply each digit by a constant `c mod q`, returning a new wire.
#[inline]
pub fn cmul(&self, c: u16) -> Self {
self.clone().cmul_mov(c)
}
/// Multiply each digit by a constant `c mod q`.
#[inline]
pub fn cmul_eq(&mut self, c: u16) -> &mut Wire {
match self {
Wire::Mod2 { val } => {
if c & 1 == 0 {
*val = Block::default();
}
}
Wire::Mod3 { lsb, msb } => match c {
0 => {
*lsb = 0;
*msb = 0;
}
1 => {}
2 => {
// Multiplication by two is the same as negation in `mod-3`,
// which just involves swapping `lsb` and `msb`.
std::mem::swap(lsb, msb);
}
c => {
self.cmul_eq(c % 3);
}
},
Wire::ModN { q, ds } => {
ds.iter_mut()
.for_each(|d| *d = (*d as u32 * c as u32 % *q as u32) as u16);
}
}
self
}
/// Multiply each digit by a constant `c mod q`, consuming it for chained computations.
#[inline]
pub fn cmul_mov(mut self, c: u16) -> Wire {
self.cmul_eq(c);
self
}
/// Negate all the digits `mod q`, returning a new wire.
#[inline]
pub fn negate(&self) -> Self {
self.clone().negate_mov()
}
/// Negate all the digits mod q.
#[inline]
pub fn negate_eq(&mut self) -> &mut Wire {
match self {
Wire::Mod2 { val } => *val = val.flip(),
Wire::Mod3 { lsb, msb } => {
// Negation just involves swapping `lsb` and `msb`.
std::mem::swap(lsb, msb);
}
Wire::ModN { q, ds } => {
ds.iter_mut().for_each(|d| {
if *d > 0 {
*d = *q - *d;
} else {
*d = 0;
}
});
}
}
self
}
/// Negate all the digits `mod q`, consuming it for chained computations.
#[inline]
pub fn negate_mov(mut self) -> Wire {
self.negate_eq();
self
}
/// Subtract two wires, returning the result.
#[inline]
pub fn minus(&self, other: &Wire) -> Wire {
self.clone().minus_mov(other)
}
/// Subtract a wire from this one.
#[inline]
pub fn minus_eq<'a>(&'a mut self, other: &Wire) -> &'a mut Wire {
match *self {
Wire::Mod2 { .. } => self.plus_eq(&other),
_ => self.plus_eq(&other.negate()),
}
}
/// Subtract a wire from this one, consuming it for chained computations.
#[inline]
pub fn minus_mov(mut self, other: &Wire) -> Wire {
self.minus_eq(other);
self
}
/// Get a random wire `mod q`.
#[inline]
pub fn rand<R: CryptoRng + RngCore>(rng: &mut R, q: u16) -> Wire {
if q == 2 {
Wire::Mod2 { val: rng.gen() }
} else if q == 3 {
// Generate 64 mod-three values and then embed them into `lsb` and
// `msb`.
let mut lsb = 0u64;
let mut msb = 0u64;
for (i, v) in (0..64).map(|_| rng.gen::<u8>() % 3).enumerate() {
lsb |= ((v & 1) as u64) << i;
msb |= (((v >> 1) & 1) as u64) << i;
}
debug_assert_eq!(lsb & msb, 0);
Wire::Mod3 { lsb, msb }
} else {
let ds = (0..util::digits_per_u128(q))
.map(|_| rng.gen::<u16>() % q)
.collect();
Wire::ModN { q, ds }
}
}
/// Compute the hash of this wire.
///
/// Uses fixed-key AES.
#[inline]
pub fn hash(&self, tweak: Block) -> Block {
AES_HASH.tccr_hash(tweak, self.as_block())
}
/// Compute the hash of this wire, converting the result back to a wire.
///
/// Uses fixed-key AES.
#[inline]
pub fn hashback(&self, tweak: Block, q: u16) -> Wire {
if q == 3 {
let block = self.hash(tweak);
// We now have to convert `block` into a valid `Mod3` encoding. We
// do this by using the `base_conversion` lookup capabilities to
// build a `ModN` encoding, and then map this `ModN` encoding to a
// `Mod3` encoding.
let mut lsb = 0u64;
let mut msb = 0u64;
match Self::_from_block_lookup(block, q) {
Wire::ModN { ds, .. } => {
for (i, v) in ds.iter().enumerate() {
lsb |= ((v & 1) as u64) << i;
msb |= (((v >> 1) & 1u16) as u64) << i;
}
Wire::Mod3 { lsb, msb }
}
_ => panic!("[Wire::hashback] should never get here!"),
}
} else {
Self::from_block(self.hash(tweak), q)
}
}
}
////////////////////////////////////////////////////////////////////////////////
// tests
#[cfg(test)]
mod tests {
use super::*;
use crate::util::RngExt;
use itertools::Itertools;
use rand::thread_rng;
#[test]
fn packing() {
let ref mut rng = thread_rng();
for _ in 0..100 {
let q = 2 + (rng.gen_u16() % 111);
let w = rng.gen_usable_block(q);
let x = Wire::from_block(w, q);
let y = x.as_block();
assert_eq!(w, y);
let z = Wire::from_block(y, q);
assert_eq!(x, z);
}
}
#[test]
fn base_conversion_lookup_method() {
let ref mut rng = thread_rng();
for _ in 0..1000 {
let q = 5 + (rng.gen_u16() % 110);
let x = rng.gen_u128();
let w = Wire::from_block(Block::from(x), q);
let should_be = util::as_base_q_u128(x, q);
assert_eq!(w.digits(), should_be, "x={} q={}", x, q);
}
}
#[test]
fn hash() {
let mut rng = thread_rng();
for _ in 0..100 {
let q = 2 + (rng.gen_u16() % 110);
let x = Wire::rand(&mut rng, q);
let y = x.hashback(Block::from(1u128), q);
assert!(x != y);
match y {
Wire::Mod2 { val } => assert!(u128::from(val) > 0),
Wire::Mod3 { lsb, msb } => assert!(lsb > 0 && msb > 0),
Wire::ModN { ds, .. } => assert!(!ds.iter().all(|&y| y == 0)),
}
}
}
#[test]
fn negation() {
let ref mut rng = thread_rng();
for _ in 0..1000 {
let q = rng.gen_modulus();
let x = Wire::rand(rng, q);
let xneg = x.negate();
assert!(x != xneg);
let y = xneg.negate();
assert_eq!(x, y);
}
}
#[test]
fn zero() {
let mut rng = thread_rng();
for _ in 0..1000 {
let q = 3 + (rng.gen_u16() % 110);
let z = Wire::zero(q);
let ds = z.digits();
assert_eq!(ds, vec![0; ds.len()], "q={}", q);
}
}
#[test]
fn subzero() {
let mut rng = thread_rng();
for _ in 0..1000 {
let q = rng.gen_modulus();
let x = Wire::rand(&mut rng, q);
let z = Wire::zero(q);
assert_eq!(x.minus(&x), z);
}
}
#[test]
fn pluszero() {
let mut rng = thread_rng();
for _ in 0..1000 {
let q = rng.gen_modulus();
let x = Wire::rand(&mut rng, q);
assert_eq!(x.plus(&Wire::zero(q)), x);
}
}
#[test]
fn arithmetic() {
let mut rng = thread_rng();
for _ in 0..1024 {
let q = rng.gen_modulus();
let x = Wire::rand(&mut rng, q);
let y = Wire::rand(&mut rng, q);
assert_eq!(x.cmul(0), Wire::zero(q));
assert_eq!(x.cmul(q), Wire::zero(q));
assert_eq!(x.plus(&x), x.cmul(2));
assert_eq!(x.plus(&x).plus(&x), x.cmul(3));
assert_eq!(x.negate().negate(), x);
if q == 2 {
assert_eq!(x.plus(&y), x.minus(&y));
} else {
assert_eq!(x.plus(&x.negate()), Wire::zero(q), "q={}", q);
assert_eq!(x.minus(&y), x.plus(&y.negate()));
}
let mut w = x.clone();
let z = w.plus(&y);
w.plus_eq(&y);
assert_eq!(w, z);
w = x.clone();
w.cmul_eq(2);
assert_eq!(x.plus(&x), w);
w = x.clone();
w.negate_eq();
assert_eq!(x.negate(), w);
}
}
#[test]
fn ndigits_correct() {
let mut rng = thread_rng();
for _ in 0..1024 {
let q = rng.gen_modulus();
let x = Wire::rand(&mut rng, q);
assert_eq!(x.digits().len(), util::digits_per_u128(q));
}
}
#[test]
fn parallel_hash() {
let n = 1000;
let mut rng = thread_rng();
let q = rng.gen_modulus();
let ws = (0..n).map(|_| Wire::rand(&mut rng, q)).collect_vec();
let hashes = crossbeam::scope(|scope| {
let hs = ws
.iter()
.map(|w| scope.spawn(move |_| w.hash(Block::default())))
.collect_vec();
hs.into_iter().map(|h| h.join().unwrap()).collect_vec()
})
.unwrap();
let should_be = ws.iter().map(|w| w.hash(Block::default())).collect_vec();
assert_eq!(hashes, should_be);
}
}
| true |
0b382cc74577687ea2d651c98f24f2aab1e96664
|
Rust
|
Wizdave97/rust-handbook
|
/smart-pointers/src/test.rs
|
UTF-8
| 967 | 3.046875 | 3 |
[] |
no_license
|
#[cfg(test)]
mod tests {
use std::rc::Rc;
use std::cell::RefCell;
use smart_pointers::limit_tracker;
struct MockMessenger<'a> {
messages: Rc<RefCell<Vec<&'a str>>>
}
impl<'a> MockMessenger<'a> {
fn new() -> MockMessenger<'a> {
MockMessenger {
messages: Rc::new(RefCell::new(vec![]))
}
}
}
impl<'a> limit_tracker::Messenger<'a> for MockMessenger<'a> {
fn send(&self, msg: &'a str) {
self.messages.borrow_mut().push(msg.trim());
}
}
#[test]
fn it_should_send_message_when_user_has_reached_75_of_quota() {
let messenger = MockMessenger::new();
let mut quota_tracker = limit_tracker::LimitTracker::new(100, 50, &messenger);
quota_tracker.set_value(75);
assert_eq!(messenger.messages.borrow()[0], "You have used up 75% of your available quota");
}
}
| true |
26d45127b3bfd4267722cb9b596be6ac7187775c
|
Rust
|
pgoreutn/rust-server-example
|
/src/web/src/token.rs
|
UTF-8
| 1,125 | 2.671875 | 3 |
[] |
no_license
|
use chrono::serde::ts_seconds::{deserialize as from_ts, serialize as to_ts};
use chrono::{DateTime, Duration, Utc};
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct TokenClaims {
pub user_id: i64,
#[serde(serialize_with = "to_ts", deserialize_with = "from_ts")]
iat: DateTime<Utc>,
#[serde(serialize_with = "to_ts", deserialize_with = "from_ts")]
exp: DateTime<Utc>,
}
pub fn encode_token(user_id: i64, secret: &str) -> Result<String, jsonwebtoken::errors::Error> {
let claims = TokenClaims {
user_id,
iat: Utc::now(),
exp: Utc::now() + Duration::days(30),
};
encode(
&Header::default(),
&claims,
&EncodingKey::from_secret(secret.as_ref()),
)
}
pub fn decode_token(token: &str, secret: &str) -> Result<TokenClaims, jsonwebtoken::errors::Error> {
let data = decode::<TokenClaims>(
&token,
&DecodingKey::from_secret(secret.as_ref()),
&Validation::default(),
)?;
Ok(data.claims)
}
| true |
6e1c755d13b0ab8483a653cb3212b1560ed9a1a4
|
Rust
|
kevinwylder/vanity_wg_key
|
/src/score.rs
|
UTF-8
| 3,394 | 2.90625 | 3 |
[] |
no_license
|
use std::{
fs::File,
io::{self, BufRead},
path::Path,
str,
};
use super::KeyBuffer;
pub struct Scorer {
root: ScoreDict,
min_score: u32,
}
struct ScoreDict {
index: [Option<Box<Self>>; 37],
val: u32,
}
pub struct Match {
pub start: usize,
pub len: usize,
pub score: u32,
}
pub struct MultiMatch {
pub matches: Vec<Match>,
pub total_score: u32,
pub key: KeyBuffer,
}
impl Default for ScoreDict {
fn default() -> ScoreDict {
const DEFAULT: Option<Box<ScoreDict>> = None;
ScoreDict{
index: [DEFAULT; 37],
val: 0,
}
}
}
fn alphabet_37(c: char) -> usize {
if c >= 'a' && c <= 'z' {
return (c as u8 - 'a' as u8) as usize;
}
if c >= 'A' && c <= 'Z' {
return (c as u8 - 'A' as u8) as usize;
}
if c >= '0' && c <= '9' {
return 26 + (c as u8 - '0' as u8) as usize;
}
36
}
impl ScoreDict {
fn add(&mut self, mut term: str::Chars, val: u32) {
match term.next() {
Some(c) => {
let pos = alphabet_37(c);
match self.index[pos] {
Some(ref mut child) => {
child.add(term, val);
}
None => {
self.index[pos] = Some(Box::new(ScoreDict::default()));
self.index[pos].as_mut().unwrap().add(term, val);
}
}
},
None => {
self.val = val;
}
}
}
fn search(&self, i: usize, mut term: str::Chars) -> (u32, usize) {
match term.next() {
Some(c) => {
let pos = alphabet_37(c);
match self.index[pos] {
Some(ref child) => {
return child.search(i+1, term);
},
None => {
return (self.val, i);
}
}
},
None => {
return (0, 0);
}
}
}
}
impl Scorer {
pub fn from_file(path: &Path, min_score: u32) -> io::Result<Self> {
let file = File::open(path)?;
let mut root = ScoreDict::default();
for line in io::BufReader::new(file).lines() {
if let Ok(s) = line {
root.add(s.as_str().chars(), (s.len() * s.len()) as u32);
}
}
Ok(Scorer{
root,
min_score,
})
}
pub fn score(&self, key: &KeyBuffer) -> Option<MultiMatch> {
let term = base64::encode(key.pubkey());
let mut matches = vec![];
let mut total_score = 0;
let mut start = 0;
let mut iter = term.chars();
loop {
let substr = iter.clone();
let (score, len) = self.root.search(0, substr);
if score > 0 {
matches.push(Match{ start, len, score });
total_score += score;
}
start += 1;
if let None = iter.next() {
break;
}
}
if total_score > self.min_score {
return Some(MultiMatch{
matches,
total_score,
key: key.clone(),
});
}
return None;
}
}
| true |
82e9c1ce3ec8539831f449e7e3eb8fc560271372
|
Rust
|
tomcumming/pga2d
|
/src/pseudoscalar.rs
|
UTF-8
| 751 | 3.03125 | 3 |
[
"MIT"
] |
permissive
|
use std::ops::Mul;
use crate::Scalar;
/// Grade-3 blade, also known as I
pub struct PseudoScalar {
pub e012: Scalar,
}
impl Mul<Scalar> for PseudoScalar {
type Output = PseudoScalar;
fn mul(self, s: Scalar) -> PseudoScalar {
PseudoScalar::from(self.e012 * s)
}
}
impl Mul<PseudoScalar> for Scalar {
type Output = PseudoScalar;
fn mul(self, ps: PseudoScalar) -> PseudoScalar {
ps * self
}
}
impl From<Scalar> for PseudoScalar {
fn from(s: Scalar) -> PseudoScalar {
PseudoScalar { e012: s }
}
}
impl From<PseudoScalar> for Scalar {
fn from(ps: PseudoScalar) -> Scalar {
ps.e012
}
}
impl PseudoScalar {
pub fn ideal_norm(self) -> Scalar {
self.e012
}
}
| true |
511b424841253b87bfc0ae7e322d2761a20a7b36
|
Rust
|
ScarboroughCoral/Notes
|
/LeetCode/680. 验证回文字符串 Ⅱ.rs
|
UTF-8
| 432 | 3.140625 | 3 |
[] |
no_license
|
impl Solution {
pub fn valid_palindrome(s: String) -> bool {
let s=s.as_bytes();
let (mut l,mut r)=(0,s.len()-1);
let is_palindrome=|mut l,mut r|{
while l<r&&s[l]==s[r]{
l+=1;
r-=1;
}
return l>=r;
};
while l<r&&s[l]==s[r]{
l+=1;
r-=1;
}
return is_palindrome(l+1,r)||is_palindrome(l,r-1);
}
}
| true |
29c044b3745f0e88c36aaebd9196393bfd64eb67
|
Rust
|
JamesMenetrey/rust-web-app
|
/src/domain/products/model/store.rs
|
UTF-8
| 4,046 | 3.3125 | 3 |
[] |
no_license
|
/*! Persistent storage for products. */
use auto_impl::auto_impl;
use std::{
collections::{
hash_map::Entry,
HashMap,
},
sync::RwLock,
vec::IntoIter,
};
use crate::domain::{
error::{
err_msg,
Error,
},
products::{
Product,
ProductData,
ProductId,
},
};
/* A place to persist and fetch product entities. */
#[auto_impl(&, Arc)]
pub(in crate::domain) trait ProductStore {
fn get_product(&self, id: ProductId) -> Result<Option<Product>, Error>;
fn set_product(&self, product: Product) -> Result<(), Error>;
}
/**
An additional store for fetching multiple product records at a time.
This trait is an implementation detail that lets us fetch more than one product.
It will probably need to be refactored or just removed when we add a proper database.
The fact that it's internal to `domain::products` though means the scope of breakage is a bit smaller.
Commands and queries that depend on `ProductStoreFilter` won't need to break their public API.
*/
#[auto_impl(&, Arc)]
pub(in crate::domain) trait ProductStoreFilter {
fn filter<F>(&self, predicate: F) -> Result<Iter, Error>
where
F: Fn(&ProductData) -> bool;
}
pub(in crate::domain) type Iter = IntoIter<ProductData>;
/** A test in-memory product store. */
pub(in crate::domain) type InMemoryStore = RwLock<HashMap<ProductId, ProductData>>;
impl ProductStore for InMemoryStore {
fn get_product(&self, id: ProductId) -> Result<Option<Product>, Error> {
let products = self.read().map_err(|_| err_msg("not good!"))?;
if let Some(data) = products.get(&id) {
Ok(Some(Product::from_data(data.clone())))
} else {
Ok(None)
}
}
fn set_product(&self, product: Product) -> Result<(), Error> {
let mut data = product.into_data();
let id = data.id;
let mut products = self.write().map_err(|_| err_msg("not good!"))?;
match products.entry(id) {
Entry::Vacant(entry) => {
data.version.next();
entry.insert(data);
}
Entry::Occupied(mut entry) => {
let entry = entry.get_mut();
if entry.version != data.version {
Err(err_msg("optimistic concurrency fail"))?
}
data.version.next();
*entry = data;
}
}
Ok(())
}
}
impl ProductStoreFilter for InMemoryStore {
fn filter<F>(&self, predicate: F) -> Result<Iter, Error>
where
F: Fn(&ProductData) -> bool,
{
let products: Vec<_> = self
.read()
.map_err(|_| err_msg("not good!"))?
.values()
.filter(|p| predicate(*p))
.cloned()
.collect();
Ok(products.into_iter())
}
}
pub(in crate::domain::products) fn in_memory_store() -> InMemoryStore {
RwLock::new(HashMap::new())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::domain::products::{
model::test_data,
*,
};
#[test]
fn test_in_memory_store() {
let store = in_memory_store();
let id = ProductId::new();
// Create a product in the store
let product = test_data::ProductBuilder::new().id(id).build();
store.set_product(product).unwrap();
// Get the product from the store
let found = store.get_product(id).unwrap().unwrap();
assert_eq!(id, found.data.id);
}
#[test]
fn add_product_twice_fails_concurrency_check() {
let store = in_memory_store();
let id = ProductId::new();
// Create a product in the store
store
.set_product(test_data::ProductBuilder::new().id(id).build())
.unwrap();
// Attempting to create a second time fails optimistic concurrency check
assert!(store
.set_product(test_data::ProductBuilder::new().id(id).build())
.is_err());
}
}
| true |
6a1b75b005a096a9a104d5af84a6ce4f0a799063
|
Rust
|
AdamVincent90/Locker
|
/src/main.rs
|
UTF-8
| 2,097 | 3.90625 | 4 |
[] |
no_license
|
fn main() {
// println!("Hello, Matches!");
// let country_code = 100;
// let country = match country_code {
// 826 => "GB",
// 46 => "SWE",
// 0..=45 => "UNKNOWN",
// _ => "INVALID",
// };
// println!("country code {} is {}", country_code, country);
// match_statement();
combo_lock();
}
// fn match_statement() {
// let mut i = 0;
// while i <= 100 {
// let num = match i {
// 0 => "Number is the default value of i",
// 1..=99 => "The number is currently between 1 and 99",
// _ => "This number is too high so the match statement will end..",
// };
// println!("{}", num);
// i += 1;
// }
// }
// 3 stages of a lock = locked, unlocked, or invalid
enum Lock {
LOCKED,
UNLOCKED,
INVALID,
}
fn combo_lock() {
let code = String::from("Hello!"); // 24 bytes
let mut locker = Lock::LOCKED;
let mut attempt = 1;
loop {
match locker {
Lock::LOCKED => {
println!("Attempt {}", attempt);
let mut input = String::new();
match std::io::stdin().read_line(&mut input) {
Ok(_) => {
if input.trim().eq(&code) {
locker = Lock::UNLOCKED;
continue;
} else {
println!("Incorrect Code");
attempt += 1;
if attempt > 5 {
locker = Lock::INVALID;
}
}
}
Err(_) => locker = Lock::INVALID,
}
}
Lock::UNLOCKED => {
println!("You are now logged in!");
println!("Admin stuff can be set here!");
break;
}
Lock::INVALID => {
println!("Attempts exceeded.. exiting");
break;
}
}
}
}
| true |
5c1ba5f1819567f9c7f26f1d363c97a1ce352f7b
|
Rust
|
okeri/rstatus
|
/src/utility.rs
|
UTF-8
| 1,761 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
use std::{
fs, io,
mem::{zeroed, MaybeUninit},
ptr, str,
};
pub const SIGRTMIN: i32 = 34;
pub fn read_filen(filename: &str, max: usize) -> Result<String, io::Error> {
use std::io::Read;
let mut file = fs::File::open(filename)?;
let mut buf = vec![0u8; max];
file.read_exact(&mut buf)
.map(|_| String::from(str::from_utf8(&buf).unwrap()))
}
pub fn gcd(i1: u32, i2: u32) -> u32 {
let mut x = i1;
let mut y = i2;
let mut r = x % y;
while r != 0 {
x = y;
y = r;
r = x % y;
}
y
}
pub fn mask(signals: Vec<i32>) {
unsafe {
let mut sigset = MaybeUninit::uninit();
if libc::sigemptyset(sigset.as_mut_ptr()) != -1 {
let mut sigset = sigset.assume_init();
for signal in signals.iter() {
libc::sigaddset(&mut sigset, SIGRTMIN + signal);
}
libc::pthread_sigmask(libc::SIG_BLOCK, &sigset, ptr::null_mut());
}
}
}
pub fn signal(signal: i32, action: fn(i32)) {
unsafe {
let mut sigset = MaybeUninit::uninit();
if libc::sigfillset(sigset.as_mut_ptr()) != -1 {
let mut sigaction: libc::sigaction = zeroed();
sigaction.sa_mask = sigset.assume_init();
sigaction.sa_sigaction = action as usize;
libc::sigaction(signal + SIGRTMIN, &sigaction, ptr::null_mut());
}
}
}
pub fn read_color(input: &str, default: u32) -> u32 {
if let Some(first) = input.chars().next() {
if first == '#' {
i64::from_str_radix(&input[1..], 16).unwrap_or(default as i64) as u32
} else {
i64::from_str_radix(input, 16).unwrap_or(default as i64) as u32
}
} else {
default
}
}
| true |
572252755c8683cef85cf3e484f6e9d287c4b678
|
Rust
|
mikialex/artgl
|
/packages/wasm-scene/src/math/vec2.rs
|
UTF-8
| 11,298 | 3.484375 | 3 |
[] |
no_license
|
use std::fmt;
use std::fmt::Debug;
use std::ops::{Add, Sub, Mul, Div, Neg};
use std::ops::{AddAssign, SubAssign, MulAssign, DivAssign};
use super::vec::{Vec, Math, Lerp, Slerp};
use super::vec3::Vec3;
use super::vec4::Vec4;
use super::consts::{Zero, One, UnitX, UnitY};
#[repr(C)]
#[derive(Debug, Copy, Clone, Default, Hash, Eq, PartialEq)]
pub struct Vec2<T>
{
pub x: T,
pub y: T,
}
impl<T> Neg for Vec2<T> where T:Neg<Output=T>
{
type Output = Self;
fn neg(self) -> Self
{
Self
{
x: -self.x,
y: -self.y
}
}
}
impl<T> Add for Vec2<T> where T:Add<Output=T>
{
type Output = Self;
fn add(self, other: Self) -> Self
{
Self
{
x: self.x + other.x,
y: self.y + other.y
}
}
}
impl<T> Sub for Vec2<T> where T:Sub<Output=T>
{
type Output = Self;
fn sub(self, other: Self) -> Self
{
Self
{
x: self.x - other.x,
y: self.y - other.y
}
}
}
impl<T> Mul<T> for Vec2<T> where T:Mul<Output=T> + Copy
{
type Output = Self;
fn mul(self, s: T) -> Self
{
Self
{
x:self.x * s,
y:self.y * s,
}
}
}
impl<T> Mul for Vec2<T> where T:Mul<Output=T>
{
type Output = Self;
fn mul(self, other: Self) -> Self
{
Self
{
x: self.x * other.x,
y: self.y * other.y
}
}
}
impl<T> Div<T> for Vec2<T> where T:Div<Output=T> + Copy
{
type Output = Self;
fn div(self, s: T) -> Self
{
Self
{
x:self.x / s,
y:self.y / s,
}
}
}
impl<T> Div for Vec2<T> where T:Div<Output=T>
{
type Output = Self;
fn div(self, other: Self) -> Self
{
Self
{
x: self.x / other.x,
y: self.y / other.y
}
}
}
impl<T> AddAssign for Vec2<T> where T:AddAssign<T>
{
fn add_assign(&mut self, other: Self)
{
self.x += other.x;
self.y += other.y;
}
}
impl<T> SubAssign for Vec2<T> where T:SubAssign<T>
{
fn sub_assign(&mut self, other: Self)
{
self.x -= other.x;
self.y -= other.y;
}
}
impl<T> MulAssign for Vec2<T> where T: MulAssign<T>
{
fn mul_assign(&mut self, other: Self)
{
self.x *= other.x;
self.y *= other.y;
}
}
impl<T> MulAssign<T> for Vec2<T> where T:MulAssign<T> + Copy
{
fn mul_assign(&mut self, s: T)
{
self.x *= s;
self.y *= s;
}
}
impl<'a, T> MulAssign<&'a T> for Vec2<T> where T:MulAssign<T> + Copy
{
fn mul_assign(&mut self, other: &'a T)
{
self.x *= *other;
self.y *= *other;
}
}
impl<T> DivAssign for Vec2<T> where T:DivAssign<T>
{
fn div_assign(&mut self, other: Self)
{
self.x /= other.x;
self.y /= other.y;
}
}
impl<T> DivAssign<T> for Vec2<T> where T:DivAssign<T> + Copy
{
fn div_assign(&mut self, s: T)
{
self.x /= s;
self.y /= s;
}
}
impl<'a, T> DivAssign<&'a T> for Vec2<T> where T:DivAssign<T> + Copy
{
fn div_assign(&mut self, s: &'a T)
{
self.x /= *s;
self.y /= *s;
}
}
impl<T> Vec2<T> where T: Copy
{
/// Creates a new Vec2 from multiple components
#[inline(always)]
pub fn new(x: T, y: T) -> Self { Self { x, y } }
/// return the length of element
#[inline(always)]
pub fn len() -> usize
{
return 2;
}
#[inline(always)]
pub fn to_tuple(&self) -> (T, T)
{
(self.x, self.y)
}
#[inline(always)]
pub fn xx(&self) -> Self { Self::new(self.x, self.x) }
#[inline(always)]
pub fn xy(&self) -> Self { Self::new(self.x, self.y) }
#[inline(always)]
pub fn yx(&self) -> Self { Self::new(self.y, self.x) }
#[inline(always)]
pub fn yy(&self) -> Self { Self::new(self.y, self.y) }
#[inline(always)]
pub fn xxx(&self) -> Vec3<T> { Vec3::new(self.x, self.x, self.x) }
#[inline(always)]
pub fn xxy(&self) -> Vec3<T> { Vec3::new(self.x, self.x, self.y) }
#[inline(always)]
pub fn xyx(&self) -> Vec3<T> { Vec3::new(self.x, self.y, self.x) }
#[inline(always)]
pub fn xyy(&self) -> Vec3<T> { Vec3::new(self.x, self.y, self.y) }
#[inline(always)]
pub fn yxx(&self) -> Vec3<T> { Vec3::new(self.y, self.x, self.x) }
#[inline(always)]
pub fn yxy(&self) -> Vec3<T> { Vec3::new(self.y, self.x, self.y) }
#[inline(always)]
pub fn yyx(&self) -> Vec3<T> { Vec3::new(self.y, self.y, self.x) }
#[inline(always)]
pub fn yyy(&self) -> Vec3<T> { Vec3::new(self.y, self.y, self.y) }
#[inline(always)]
pub fn xxxx(&self) -> Vec4<T> { Vec4::new(self.x, self.x, self.x, self.x) }
#[inline(always)]
pub fn xxyx(&self) -> Vec4<T> { Vec4::new(self.x, self.x, self.y, self.x) }
#[inline(always)]
pub fn xyxx(&self) -> Vec4<T> { Vec4::new(self.x, self.y, self.x, self.x) }
#[inline(always)]
pub fn xyyx(&self) -> Vec4<T> { Vec4::new(self.x, self.y, self.y, self.x) }
#[inline(always)]
pub fn xxxy(&self) -> Vec4<T> { Vec4::new(self.x, self.x, self.x, self.y) }
#[inline(always)]
pub fn xxyy(&self) -> Vec4<T> { Vec4::new(self.x, self.x, self.y, self.y) }
#[inline(always)]
pub fn xyxy(&self) -> Vec4<T> { Vec4::new(self.x, self.y, self.x, self.y) }
#[inline(always)]
pub fn xyyy(&self) -> Vec4<T> { Vec4::new(self.x, self.y, self.y, self.y) }
#[inline(always)]
pub fn yxxx(&self) -> Vec4<T> { Vec4::new(self.y, self.x, self.x, self.x) }
#[inline(always)]
pub fn yxyx(&self) -> Vec4<T> { Vec4::new(self.y, self.x, self.y, self.x) }
#[inline(always)]
pub fn yyxx(&self) -> Vec4<T> { Vec4::new(self.y, self.y, self.x, self.x) }
#[inline(always)]
pub fn yyyx(&self) -> Vec4<T> { Vec4::new(self.y, self.y, self.y, self.x) }
#[inline(always)]
pub fn yxxy(&self) -> Vec4<T> { Vec4::new(self.y, self.x, self.x, self.y) }
#[inline(always)]
pub fn yxyy(&self) -> Vec4<T> { Vec4::new(self.y, self.x, self.y, self.y) }
#[inline(always)]
pub fn yyxy(&self) -> Vec4<T> { Vec4::new(self.y, self.y, self.x, self.y) }
#[inline(always)]
pub fn yyyy(&self) -> Vec4<T> { Vec4::new(self.y, self.y, self.y, self.y) }
}
impl<T> Vec2<T> where T:Vec + Math
{
#[inline]
pub fn dot(&self, b: Self) -> T
{
return self.x * b.x + self.y * b.y;
}
#[inline]
pub fn cross(&self, b: Self) -> Self
{
Self
{
x:self.y * b.x - self.x * b.y,
y:self.x * b.y - self.y * b.x
}
}
#[inline]
pub fn length2(&self) -> T
{
return self.dot(*self);
}
#[inline]
pub fn length(&self) -> T
{
return self.length2().sqrt();
}
#[inline]
pub fn distance(&self, b: Self) -> T
{
return (*self - b).length();
}
#[inline]
pub fn normalize(&self) -> Self
{
let mag_sq = self.length2();
if mag_sq.gt(T::zero())
{
let inv_sqrt = T::one() / mag_sq.sqrt();
return *self * inv_sqrt;
}
return *self;
}
}
impl<T> Math for Vec2<T> where T:Copy + Math
{
#[inline]
fn abs(self) -> Self
{
let mx = self.x.abs();
let my = self.y.abs();
Self { x: mx, y: my }
}
#[inline]
fn recip(self) -> Self
{
let mx = self.x.recip();
let my = self.y.recip();
Self { x: mx, y: my }
}
#[inline]
fn sqrt(self) -> Self
{
let mx = self.x.sqrt();
let my = self.y.sqrt();
Self { x: mx, y: my }
}
#[inline]
fn rsqrt(self) -> Self
{
let mx = self.x.rsqrt();
let my = self.y.rsqrt();
Self { x: mx, y: my }
}
#[inline]
fn sin(self) -> Self
{
let mx = self.x.sin();
let my = self.y.sin();
Self { x: mx, y: my }
}
#[inline]
fn cos(self) -> Self
{
let mx = self.x.cos();
let my = self.y.cos();
Self { x: mx, y: my }
}
#[inline]
fn tan(self) -> Self
{
let mx = self.x.tan();
let my = self.y.tan();
Self { x: mx, y: my }
}
#[inline]
fn sincos(self) -> (Self, Self)
{
let mx = self.x.sincos();
let my = self.y.sincos();
(
Self { x: mx.0, y: my.0 },
Self { x: mx.1, y: my.1 }
)
}
#[inline]
fn acos(self) -> Self
{
let mx = self.x.acos();
let my = self.y.acos();
Self { x: mx, y: my }
}
#[inline]
fn asin(self) -> Self
{
let mx = self.x.asin();
let my = self.y.asin();
Self { x: mx, y: my }
}
#[inline]
fn atan(self) -> Self
{
let mx = self.x.atan();
let my = self.y.atan();
Self { x: mx, y: my }
}
#[inline]
fn exp(self) -> Self
{
let mx = self.x.exp();
let my = self.y.exp();
Self { x: mx, y: my }
}
#[inline]
fn exp2(self) -> Self
{
let mx = self.x.exp2();
let my = self.y.exp2();
Self { x: mx, y: my }
}
#[inline]
fn log(self, _rhs:Self) -> Self
{
let mx = self.x.log(_rhs.x);
let my = self.y.log(_rhs.y);
Self { x: mx, y: my }
}
#[inline]
fn log2(self) -> Self
{
let mx = self.x.log2();
let my = self.y.log2();
Self { x: mx, y: my }
}
#[inline]
fn log10(self) -> Self
{
let mx = self.x.log10();
let my = self.y.log10();
Self { x: mx, y: my }
}
#[inline]
fn to_radians(self) -> Self
{
let mx = self.x.to_radians();
let my = self.y.to_radians();
Self { x: mx, y: my }
}
#[inline]
fn to_degrees(self) -> Self
{
let mx = self.x.to_degrees();
let my = self.y.to_degrees();
Self { x: mx, y: my }
}
#[inline]
fn min(self, _rhs: Self) -> Self
{
let mx = self.x.min(_rhs.x);
let my = self.y.min(_rhs.y);
Self { x: mx, y: my }
}
#[inline]
fn max(self, _rhs: Self) -> Self
{
let mx = self.x.max(_rhs.x);
let my = self.y.max(_rhs.y);
Self { x: mx, y: my }
}
#[inline]
fn saturate(self) -> Self
{
let mx = self.x.saturate();
let my = self.y.saturate();
Self { x: mx, y: my }
}
#[inline]
fn snorm2unorm(self) -> Self
{
let mx = self.x.snorm2unorm();
let my = self.y.snorm2unorm();
Self { x: mx, y: my }
}
#[inline]
fn unorm2snorm(self) -> Self
{
let mx = self.x.unorm2snorm();
let my = self.y.unorm2snorm();
Self { x: mx, y: my }
}
#[inline]
fn clamp(self, minval: Self, maxval: Self) -> Self
{
let mx = self.x.clamp(minval.x, maxval.x);
let my = self.y.clamp(minval.y, maxval.y);
Self { x: mx, y: my }
}
}
impl<T> Lerp<T> for Vec2<T> where T: Copy + One + Mul<Output=T> + Add<Output=T> + Sub<Output=T>
{
#[inline(always)]
fn lerp(self, b: Self, t: T) -> Self
{
return self*(T::one() - t) + b*t;
}
}
impl<T> Slerp<T> for Vec2<T> where T: Vec + Math
{
fn slerp(self, other: Self, factor: T) -> Self
{
let dot = self.dot(other);
let s = T::one() - factor;
let t = if dot.gt(T::zero()) { factor } else { -factor };
let q = self * s + other * t;
q.normalize()
}
}
impl<T> Zero for Vec2<T> where T:Zero
{
#[inline(always)]
fn zero() -> Self
{
Self
{
x: T::zero(), y: T::zero()
}
}
}
impl<T> One for Vec2<T> where T:One
{
#[inline(always)]
fn one() -> Self
{
Self
{
x: T::one(), y: T::one()
}
}
}
impl<T> UnitX for Vec2<T> where T:One + Zero
{
#[inline(always)]
fn unit_x() -> Self
{
Self
{
x: T::one(), y: T::zero()
}
}
}
impl<T> UnitY for Vec2<T> where T:One + Zero
{
#[inline(always)]
fn unit_y() -> Self
{
Self
{
x: T::zero(), y: T::one()
}
}
}
impl<T> fmt::Display for Vec2<T> where T:Debug
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "({:?}, {:?})", self.x, self.y)
}
}
impl<T> fmt::Binary for Vec2<T> where T:Vec + Math
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
let len = self.length();
let decimals = f.precision().unwrap_or(3);
let string = format!("{:.*?}", decimals, len);
f.pad_integral(true, "", &string)
}
}
impl<T> From<[T;2]> for Vec2<T> where T:Copy
{
fn from(v:[T;2]) -> Self
{
Self
{
x:v[0],
y:v[1]
}
}
}
impl<T> From<(T,T)> for Vec2<T> where T:Copy
{
fn from(v:(T,T)) -> Self
{
Self
{
x:v.0,
y:v.1,
}
}
}
impl<T> AsRef<Vec2<T>> for Vec2<T>
{
fn as_ref(&self) -> &Vec2<T>
{
self
}
}
impl<T> AsMut<Vec2<T>> for Vec2<T>
{
fn as_mut(&mut self) -> &mut Vec2<T>
{
self
}
}
| true |
1b2428d69eaa999ee3b0cc7ccba6dbb8faacc266
|
Rust
|
tsutton/aoc2020
|
/src/day6.rs
|
UTF-8
| 947 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::HashSet;
use std::convert::TryInto;
type Group = Vec<HashSet<char>>;
#[aoc_generator(day6)]
pub fn gen(input: &str) -> Vec<Group> {
let mut groups = vec![];
for group in input.split("\n\n") {
groups.push(group.lines().map(|line| line.chars().collect()).collect());
}
groups
}
#[aoc(day6, part1)]
pub fn part1(groups: &[Group]) -> i64 {
let mut ans = 0;
for group in groups {
let mut acc = HashSet::new();
for person in group {
acc = acc.union(person).cloned().collect();
}
ans += acc.len();
}
ans.try_into().unwrap()
}
#[aoc(day6, part2)]
pub fn part2(groups: &[Group]) -> i64 {
let mut ans = 0;
for group in groups {
let mut acc = group[0].clone();
for person in &group[1..] {
acc = acc.intersection(person).cloned().collect();
}
ans += acc.len();
}
ans.try_into().unwrap()
}
| true |
fac193edd6fd95ec09e72595592426d42f7d2666
|
Rust
|
TzuRen/SafeSearchSSE
|
/libgt/src/treeplex/util.rs
|
UTF-8
| 657 | 3.265625 | 3 |
[] |
no_license
|
/// A `Sequence` object is just a 2-tuple of (infoset_index, action_index).
/// Note that `Sequence` does not contain `PlayerInfo` or `Action`, it merely
/// contains *indices* to them.
/// (TODO(chunkail): either refactor or change to reference counted scheme?
/// Maybe rename this to `SequenceIndex` instead.
pub type Sequence = (usize, usize);
/// Specifies a sequence (in terms of `PlayerInfo`, `Action` *index* pairs) or whether this is
/// an empty sequence. This representation is cheap and copy-able at
/// constant time.
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Clone, Copy)]
pub enum SequenceOrEmpty {
Empty,
Sequence(Sequence),
}
| true |
12d8399386e5df08e317a84341c97e72d2e74fac
|
Rust
|
udoprog/ptscan
|
/crates/ptscan/src/alignment.rs
|
UTF-8
| 641 | 3.328125 | 3 |
[
"MIT",
"Apache-2.0",
"CC-BY-SA-3.0"
] |
permissive
|
pub trait Alignment: 'static + Copy + Send + Sync {
/// Align the given offset.
fn align(self, offset: &mut usize);
/// Get the step for the current alignment.
fn step(self) -> usize;
}
#[derive(Clone, Copy)]
pub struct Aligned(pub usize);
impl Alignment for Aligned {
fn align(self, offset: &mut usize) {
let d = *offset % self.0;
if d > 0 {
*offset -= d;
}
}
fn step(self) -> usize {
self.0
}
}
#[derive(Clone, Copy)]
pub struct Unaligned;
impl Alignment for Unaligned {
fn align(self, _: &mut usize) {}
fn step(self) -> usize {
1
}
}
| true |
3a98ca5d8a3defb316d8d8c957d23c9560d47065
|
Rust
|
jellybobbin/tiny
|
/termbox/src/lib.rs
|
UTF-8
| 3,740 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
pub const TB_DEFAULT: u16 = 0x00;
pub const TB_BLACK: u16 = 0x01;
pub const TB_RED: u16 = 0x02;
pub const TB_GREEN: u16 = 0x03;
pub const TB_YELLOW: u16 = 0x04;
pub const TB_BLUE: u16 = 0x05;
pub const TB_MAGENTA: u16 = 0x06;
pub const TB_CYAN: u16 = 0x07;
pub const TB_WHITE: u16 = 0x08;
pub const TB_BOLD: u16 = 0x0100;
pub const TB_UNDERLINE: u16 = 0x0200;
pub const TB_REVERSE: u16 = 0x0400;
const TB_EUNSUPPORTED_TERMINAL: libc::c_int = -1;
const TB_EFAILED_TO_OPEN_TTY: libc::c_int = -2;
const TB_HIDE_CURSOR: libc::c_int = -1;
extern "C" {
pub fn tb_init() -> libc::c_int;
pub fn tb_resize();
pub fn tb_shutdown();
pub fn tb_width() -> libc::c_int;
pub fn tb_height() -> libc::c_int;
pub fn tb_clear() -> libc::c_int;
pub fn tb_set_clear_attributes(fg: u16, bg: u16);
pub fn tb_present();
pub fn tb_set_cursor(cx: libc::c_int, cy: libc::c_int);
pub fn tb_change_cell(x: libc::c_int, y: libc::c_int, ch: u32, cw: u8, fg: u16, bg: u16);
}
pub struct Termbox {}
#[derive(Debug)]
pub enum InitError {
UnsupportedTerminal,
FailedToOpenTty,
}
impl Termbox {
pub fn init() -> Result<Termbox, InitError> {
let ret = unsafe { tb_init() };
if ret == TB_EUNSUPPORTED_TERMINAL {
Err(InitError::UnsupportedTerminal)
} else if ret == TB_EFAILED_TO_OPEN_TTY {
Err(InitError::FailedToOpenTty)
} else {
Ok(Termbox {})
}
}
pub fn resize(&mut self) {
unsafe {
tb_resize();
}
}
pub fn width(&self) -> i32 {
unsafe { tb_width() as i32 }
}
pub fn height(&self) -> i32 {
unsafe { tb_height() as i32 }
}
pub fn clear(&mut self) {
unsafe {
tb_clear();
}
}
pub fn set_clear_attributes(&mut self, fg: u16, bg: u16) {
unsafe { tb_set_clear_attributes(fg, bg) }
}
pub fn present(&mut self) {
unsafe { tb_present() }
}
pub fn hide_cursor(&mut self) {
unsafe {
tb_set_cursor(TB_HIDE_CURSOR, TB_HIDE_CURSOR);
}
}
pub fn set_cursor(&mut self, cx: i32, cy: i32) {
unsafe { tb_set_cursor(cx as libc::c_int, cy as libc::c_int) }
}
pub fn change_cell(&mut self, x: i32, y: i32, ch: char, fg: u16, bg: u16) {
let cw = unicode_width::UnicodeWidthChar::width(ch).unwrap_or(1) as u8;
unsafe { tb_change_cell(x as libc::c_int, y as libc::c_int, char_to_utf8(ch), cw, fg, bg) }
}
}
impl Drop for Termbox {
fn drop(&mut self) {
unsafe {
tb_shutdown();
}
}
}
// https://github.com/rust-lang/rust/blob/03bed655142dd5e42ba4539de53b3663d8a123e0/src/libcore/char.rs#L424
const TAG_CONT: u8 = 0b1000_0000;
const TAG_TWO_B: u8 = 0b1100_0000;
const TAG_THREE_B: u8 = 0b1110_0000;
const TAG_FOUR_B: u8 = 0b1111_0000;
const MAX_ONE_B: u32 = 0x80;
const MAX_TWO_B: u32 = 0x800;
const MAX_THREE_B: u32 = 0x10000;
fn char_to_utf8(c: char) -> u32 {
let code = c as u32;
if code < MAX_ONE_B {
code as u32
} else if code < MAX_TWO_B {
((u32::from((code >> 6 & 0x1F) as u8 | TAG_TWO_B)) << 8)
+ u32::from((code & 0x3F) as u8 | TAG_CONT)
} else if code < MAX_THREE_B {
(u32::from((code >> 12 & 0x0F) as u8 | TAG_THREE_B) << 16)
+ (u32::from((code >> 6 & 0x3F) as u8 | TAG_CONT) << 8)
+ (u32::from((code & 0x3F) as u8 | TAG_CONT))
} else {
((u32::from((code >> 18 & 0x07) as u8 | TAG_FOUR_B)) << 24)
+ ((u32::from((code >> 12 & 0x3F) as u8 | TAG_CONT)) << 16)
+ ((u32::from((code >> 6 & 0x3F) as u8 | TAG_CONT)) << 8)
+ (u32::from((code & 0x3F) as u8 | TAG_CONT))
}
}
| true |
9a42cc1b8ba0ae30ce74408b3caf037c046ce131
|
Rust
|
tramulns/rust-by-example
|
/flow_control/while_let/src/main.rs
|
UTF-8
| 957 | 3.78125 | 4 |
[] |
no_license
|
fn main() {
// Создадим переменную `optional` с типом `Option<i32>`
let mut optional = Some(0);
// Это можно прочитать так: "Пока `let` деструктурирует `optional` в
// `Some(i)`, выполняем блок (`{}`). В противном случае `break`.
while let Some(i) = optional {
if i > 9 {
println!("Больше 9, уходим отсюда!");
optional = None;
} else {
println!("`i` равен `{:?}`. Попробуем ещё раз.", i);
optional = Some(i + 1);
}
// ^ Меньше смещаемся вправо, к тому же
// нет необходимости обрабатывать ошибки.
}
// ^ К `if let` можно добавить дополнительный блок `else`/`else if`
// `while let` подобного нет.
}
| true |
096e4c39730a58bf3f02072a007ee0262925c7b4
|
Rust
|
himlpplm/rust-tdlib
|
/src/types/get_deep_link_info.rs
|
UTF-8
| 1,992 | 2.9375 | 3 |
[
"MIT"
] |
permissive
|
use crate::errors::*;
use crate::types::*;
use uuid::Uuid;
/// Returns information about a tg:// deep link. Use "tg://need_update_for_some_feature" or "tg:some_unsupported_feature" for testing. Returns a 404 error for unknown links. Can be called before authorization
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct GetDeepLinkInfo {
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
#[serde(rename(serialize = "@client_id", deserialize = "@client_id"))]
client_id: Option<i32>,
/// The link
link: String,
#[serde(rename(serialize = "@type"))]
td_type: String,
}
impl RObject for GetDeepLinkInfo {
#[doc(hidden)]
fn extra(&self) -> Option<&str> {
self.extra.as_deref()
}
#[doc(hidden)]
fn client_id(&self) -> Option<i32> {
self.client_id
}
}
impl RFunction for GetDeepLinkInfo {}
impl GetDeepLinkInfo {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDGetDeepLinkInfoBuilder {
let mut inner = GetDeepLinkInfo::default();
inner.extra = Some(Uuid::new_v4().to_string());
inner.td_type = "getDeepLinkInfo".to_string();
RTDGetDeepLinkInfoBuilder { inner }
}
pub fn link(&self) -> &String {
&self.link
}
}
#[doc(hidden)]
pub struct RTDGetDeepLinkInfoBuilder {
inner: GetDeepLinkInfo,
}
impl RTDGetDeepLinkInfoBuilder {
pub fn build(&self) -> GetDeepLinkInfo {
self.inner.clone()
}
pub fn link<T: AsRef<str>>(&mut self, link: T) -> &mut Self {
self.inner.link = link.as_ref().to_string();
self
}
}
impl AsRef<GetDeepLinkInfo> for GetDeepLinkInfo {
fn as_ref(&self) -> &GetDeepLinkInfo {
self
}
}
impl AsRef<GetDeepLinkInfo> for RTDGetDeepLinkInfoBuilder {
fn as_ref(&self) -> &GetDeepLinkInfo {
&self.inner
}
}
| true |
4ba41e75f7a93bfc238de6927c7c5c510dd766ad
|
Rust
|
Azure/azure-sdk-for-rust
|
/services/mgmt/powerbiembedded/src/package_2016_01/models.rs
|
UTF-8
| 15,009 | 2.75 | 3 |
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::de::{value, Deserializer, IntoDeserializer};
use serde::{Deserialize, Serialize, Serializer};
use std::str::FromStr;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AzureSku {
#[doc = "SKU name"]
pub name: azure_sku::Name,
#[doc = "SKU tier"]
pub tier: azure_sku::Tier,
}
impl AzureSku {
pub fn new(name: azure_sku::Name, tier: azure_sku::Tier) -> Self {
Self { name, tier }
}
}
pub mod azure_sku {
use super::*;
#[doc = "SKU name"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Name")]
pub enum Name {
S1,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Name {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Name {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Name {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::S1 => serializer.serialize_unit_variant("Name", 0u32, "S1"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "SKU tier"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Tier")]
pub enum Tier {
Standard,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Tier {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Tier {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Tier {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Standard => serializer.serialize_unit_variant("Tier", 0u32, "Standard"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CheckNameRequest {
#[doc = "Workspace collection name"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Resource type"]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
impl CheckNameRequest {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CheckNameResponse {
#[doc = "Specifies a Boolean value that indicates whether the specified Power BI Workspace Collection name is available to use."]
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[doc = "Reason why the workspace collection name cannot be used."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<check_name_response::Reason>,
#[doc = "Message indicating an unavailable name due to a conflict, or a description of the naming rules that are violated."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
impl CheckNameResponse {
pub fn new() -> Self {
Self::default()
}
}
pub mod check_name_response {
use super::*;
#[doc = "Reason why the workspace collection name cannot be used."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Reason")]
pub enum Reason {
Unavailable,
Invalid,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Reason {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Reason {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Reason {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Unavailable => serializer.serialize_unit_variant("Reason", 0u32, "Unavailable"),
Self::Invalid => serializer.serialize_unit_variant("Reason", 1u32, "Invalid"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CreateWorkspaceCollectionRequest {
#[doc = "Azure location"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<AzureSku>,
}
impl CreateWorkspaceCollectionRequest {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Display {
#[doc = "The localized friendly form of the resource provider name. This form is also expected to include the publisher/company responsible. Use Title Casing. Begin with \"Microsoft\" for 1st party services."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[doc = "The localized friendly form of the resource type related to this action/operation. This form should match the public documentation for the resource provider. Use Title Casing. For examples, refer to the \"name\" section."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[doc = "The localized friendly name for the operation as shown to the user. This name should be concise (to fit in drop downs), but clear (self-documenting). Use Title Casing and include the entity/resource to which it applies."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[doc = "The localized friendly description for the operation as shown to the user. This description should be thorough, yet concise. It will be used in tool-tips and detailed views."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[doc = "The intended executor of the operation; governs the display of the operation in the RBAC UX and the audit logs UX. Default value is 'user,system'"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
}
impl Display {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Error {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub details: Vec<ErrorDetail>,
}
impl azure_core::Continuable for Error {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
None
}
}
impl Error {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
}
impl ErrorDetail {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct MigrateWorkspaceCollectionRequest {
#[doc = "Name of the resource group the Power BI workspace collections will be migrated to."]
#[serde(rename = "targetResourceGroup", default, skip_serializing_if = "Option::is_none")]
pub target_resource_group: Option<String>,
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub resources: Vec<String>,
}
impl MigrateWorkspaceCollectionRequest {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Operation {
#[doc = "The name of the operation being performed on this particular object. This name should match the action name that appears in RBAC / the event service."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<Display>,
}
impl Operation {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct OperationList {
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<Operation>,
}
impl OperationList {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct UpdateWorkspaceCollectionRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<AzureSku>,
}
impl UpdateWorkspaceCollectionRequest {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Workspace {
#[doc = "Workspace id"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "Workspace name"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Resource type"]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[doc = "Property bag"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
impl Workspace {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct WorkspaceCollection {
#[doc = "Resource id"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "Workspace collection name"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Resource type"]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[doc = "Azure location"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<AzureSku>,
#[doc = "Properties"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
impl WorkspaceCollection {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct WorkspaceCollectionAccessKey {
#[doc = "Key name"]
#[serde(rename = "keyName", default, skip_serializing_if = "Option::is_none")]
pub key_name: Option<workspace_collection_access_key::KeyName>,
}
impl WorkspaceCollectionAccessKey {
pub fn new() -> Self {
Self::default()
}
}
pub mod workspace_collection_access_key {
use super::*;
#[doc = "Key name"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum KeyName {
#[serde(rename = "key1")]
Key1,
#[serde(rename = "key2")]
Key2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct WorkspaceCollectionAccessKeys {
#[doc = "Access key 1"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key1: Option<String>,
#[doc = "Access key 2"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key2: Option<String>,
}
impl WorkspaceCollectionAccessKeys {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct WorkspaceCollectionList {
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<WorkspaceCollection>,
}
impl azure_core::Continuable for WorkspaceCollectionList {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
None
}
}
impl WorkspaceCollectionList {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct WorkspaceList {
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<Workspace>,
}
impl azure_core::Continuable for WorkspaceList {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
None
}
}
impl WorkspaceList {
pub fn new() -> Self {
Self::default()
}
}
| true |
62b07bdddf54e2d846acb42b8e323a115181486e
|
Rust
|
curlywurlycraig/rust-learning
|
/traits/src/main.rs
|
UTF-8
| 934 | 3.953125 | 4 |
[] |
no_license
|
// Important learning regarding &self, &mut self, and self in method signatures.
// let &item = something;
//
// destructures item and thus item is not a reference.
//
// but fn(&self) { ... }
//
// doesn't destructure, it is syntax sugar to specify that self is a &Self
// You can also do:
// fn(self: &Self)
// or:
// fn(self: &mut Self)
// or:
// fn(self: Self).
//
// The latter takes ownership of the thing whose method is called.
trait Thing {
fn thing(&self) {}
}
trait Test: Thing {
fn test(&self) -> u8 {
self.thing();
1
}
fn other_test(&mut self);
}
impl Test for Example {
fn test(&self) -> u8 {
1
}
fn other_test(&mut self) {
self.item = 10;
}
}
#[derive(Clone)]
struct Example {
item: u8
}
impl Thing for Example {}
fn main() {
let mut yes = Example {
item: 5
};
yes.other_test();
println!("holy cow! {}", yes.item);
}
| true |
4949525b594694fc5f9de4fd28c0a8bad91efee3
|
Rust
|
scotow/advent-of-code
|
/2015/src/bin/day_07.rs
|
UTF-8
| 3,005 | 2.875 | 3 |
[] |
no_license
|
advent_of_code_2015::main!();
type Action = (Value, Option<Value>, fn(u16, Option<u16>) -> u16, String);
fn parse_action(line: &str) -> Action {
let (left, right) = line.split(" -> ").collect_tuple().unwrap();
let left = left.split(' ').collect_vec();
let func: fn(u16, Option<u16>) -> u16;
let lhs: Value;
let rhs: Option<Value>;
match left.len() {
1 => {
func = assign;
lhs = left[0].parse().unwrap();
rhs = None;
}
2 => {
func = not;
lhs = left[1].parse().unwrap();
rhs = None;
}
3 => {
func = match left[1] {
"AND" => and,
"OR" => or,
"LSHIFT" => lshift,
"RSHIFT" => rshift,
_ => unreachable!(),
};
lhs = left[0].parse().unwrap();
rhs = Some(left[2].parse().unwrap());
}
_ => unreachable!(),
}
(lhs, rhs, func, right.to_string())
}
fn assign(lhs: u16, _rhs: Option<u16>) -> u16 {
lhs
}
fn not(lhs: u16, _rhs: Option<u16>) -> u16 {
!lhs
}
fn and(lhs: u16, rhs: Option<u16>) -> u16 {
lhs & rhs.unwrap()
}
fn or(lhs: u16, rhs: Option<u16>) -> u16 {
lhs | rhs.unwrap()
}
fn lshift(lhs: u16, rhs: Option<u16>) -> u16 {
lhs << rhs.unwrap()
}
fn rshift(lhs: u16, rhs: Option<u16>) -> u16 {
lhs >> rhs.unwrap()
}
#[derive(Debug, Clone)]
enum Value {
Direct(u16),
Variable(String),
}
impl FromStr for Value {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s.parse::<u16>() {
Ok(n) => Value::Direct(n),
Err(_) => Value::Variable(s.to_string()),
})
}
}
fn generator(input: &str) -> Vec<Action> {
input.lines().map(parse_action).collect()
}
fn part_1(input: Vec<Action>) -> u16 {
solve(&input)
}
fn part_2(input: Vec<Action>) -> u16 {
let a = solve(&input);
let mut actions = input
.iter()
.filter(|act| act.3 != "b")
.cloned()
.collect_vec();
actions.insert(0, (Value::Direct(a), None, assign, "b".to_string()));
solve(&actions)
}
fn solve(input: &[Action]) -> u16 {
let mut values = HashMap::new();
while !values.contains_key("a") {
input.iter().for_each(|(lhs, rhs, action, dest)| {
let lhs = match resolve(&values, lhs) {
Some(v) => v,
None => return,
};
let rhs = match rhs {
None => None,
Some(rhs) => match resolve(&values, rhs) {
None => return,
Some(v) => Some(v),
},
};
values.insert(dest.clone(), action(lhs, rhs));
});
}
values["a"]
}
fn resolve(values: &HashMap<String, u16>, v: &Value) -> Option<u16> {
match v {
Value::Direct(n) => Some(*n),
Value::Variable(k) => values.get(k).copied(),
}
}
| true |
c8025b0a7ba3abb6dc0f4a4f3efd866a7ec8abf1
|
Rust
|
acmcarther/cargo-raze-examples
|
/bazel/complicated_cargo_library/cargo/vendor/arrayvec-0.3.25/src/array.rs
|
UTF-8
| 3,014 | 3.03125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
/// Trait for fixed size arrays.
pub unsafe trait Array {
/// The array's element type
type Item;
#[doc(hidden)]
/// The smallest index type that indexes the array.
type Index: Index;
#[doc(hidden)]
fn as_ptr(&self) -> *const Self::Item;
#[doc(hidden)]
fn as_mut_ptr(&mut self) -> *mut Self::Item;
#[doc(hidden)]
fn capacity() -> usize;
}
pub trait Index : PartialEq + Copy {
fn to_usize(self) -> usize;
fn from(usize) -> Self;
}
use std::slice::{from_raw_parts};
pub trait ArrayExt : Array {
#[inline(always)]
fn as_slice(&self) -> &[Self::Item] {
unsafe {
from_raw_parts(self.as_ptr(), Self::capacity())
}
}
}
impl<A> ArrayExt for A where A: Array { }
#[cfg(feature = "use_generic_array")]
unsafe impl<T, U> Array for ::generic_array::GenericArray<T, U>
where U: ::generic_array::ArrayLength<T>
{
type Item = T;
type Index = usize;
fn as_ptr(&self) -> *const Self::Item {
(**self).as_ptr()
}
fn as_mut_ptr(&mut self) -> *mut Self::Item {
(**self).as_mut_ptr()
}
fn capacity() -> usize {
U::to_usize()
}
}
impl Index for u8 {
#[inline(always)]
fn to_usize(self) -> usize { self as usize }
#[inline(always)]
fn from(ix: usize) -> Self { ix as u8 }
}
impl Index for u16 {
#[inline(always)]
fn to_usize(self) -> usize { self as usize }
#[inline(always)]
fn from(ix: usize) -> Self { ix as u16 }
}
impl Index for u32 {
#[inline(always)]
fn to_usize(self) -> usize { self as usize }
#[inline(always)]
fn from(ix: usize) -> Self { ix as u32 }
}
impl Index for usize {
#[inline(always)]
fn to_usize(self) -> usize { self }
#[inline(always)]
fn from(ix: usize) -> Self { ix }
}
macro_rules! fix_array_impl {
($index_type:ty, $len:expr ) => (
unsafe impl<T> Array for [T; $len] {
type Item = T;
type Index = $index_type;
#[inline(always)]
fn as_ptr(&self) -> *const T { self as *const _ as *const _ }
#[inline(always)]
fn as_mut_ptr(&mut self) -> *mut T { self as *mut _ as *mut _}
#[inline(always)]
fn capacity() -> usize { $len }
}
)
}
macro_rules! fix_array_impl_recursive {
($index_type:ty, ) => ();
($index_type:ty, $len:expr, $($more:expr,)*) => (
fix_array_impl!($index_type, $len);
fix_array_impl_recursive!($index_type, $($more,)*);
);
}
fix_array_impl_recursive!(u8, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 40, 48, 56, 64, 72, 96, 128, 160, 192, 224,);
fix_array_impl_recursive!(u16, 256, 384, 512, 768, 1024, 2048, 4096, 8192, 16384, 32768,);
// This array size doesn't exist on 16-bit
#[cfg(any(target_pointer_width="32", target_pointer_width="64"))]
fix_array_impl_recursive!(u32, 1 << 16,);
| true |
d776f8241c3b66081eb57e7c4d4606a36f77c2c6
|
Rust
|
richwandell/rust-redis
|
/src/command/command_del.rs
|
UTF-8
| 596 | 2.5625 | 3 |
[] |
no_license
|
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use crate::server::Storage;
use crate::command::command_response::{CommandResponse, CommandError};
pub(crate) fn command_del(
commands: Vec<Storage>,
data_map_mutex: &Arc<Mutex<HashMap<String, Storage>>>
) -> Result<CommandResponse, CommandError> {
let data_map = &mut*data_map_mutex.lock().unwrap();
let mut removed = 0;
for key in commands {
let key = storage_string!(key);
if let Some(_) = data_map.remove(&key) {
removed += 1;
}
}
Ok(CommandResponse::Del {removed})
}
| true |
75f38b0e65caee5aacd88741b721377f04a1bb3b
|
Rust
|
tankofzion/narc-rs
|
/src/syntax/core/subst/prim.rs
|
UTF-8
| 6,232 | 3 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::rc::Rc;
use either::Either;
use voile_util::uid::DBI;
use crate::syntax::core::subst::{DeBruijn, RedEx};
/// Substitution type.
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.Syntax.Internal.html#Substitution%27).
#[derive(Clone)]
pub enum PrimSubst<T> {
/// The identity substitution.
/// $$
/// \Gamma \vdash \text{IdS} : \Gamma
/// $$
IdS,
/// The "add one more" substitution, or "substitution extension".
/// $$
/// \cfrac{\Gamma \vdash u : A \rho \quad \Gamma \vdash \rho : \Delta}
/// {\Gamma \vdash \text{Cons}(u, \rho) : \Delta, A}
/// $$
Cons(T, Rc<Self>),
/// Strengthening substitution.
/// Apply this to a term which does not contain variable 0
/// to lower all de Bruijn indices by one.
/// $$
/// \cfrac{\Gamma \vdash \rho : \Delta}
/// {\Gamma \vdash \text{Succ} \rho : \Delta, A}
/// $$
Succ(Rc<Self>),
/// Weakening substitution, lifts to an extended context.
/// $$
/// \cfrac{\Gamma \vdash \rho : \Delta}
/// {\Gamma, \Psi \vdash \text{Weak}_\Psi \rho : \Delta}
/// $$
Weak(DBI, Rc<Self>),
/// Lifting substitution. Use this to go under a binder.
/// $\text{Lift}\_1 \rho := \text{Cons}(\texttt{Term::form\\\_dbi(0)},
/// \text{Weak}\_1 \rho)$. $$
/// \cfrac{\Gamma \vdash \rho : \Delta}
/// {\Gamma, \Psi \rho \vdash \text{Lift}_\Psi \rho : \Delta, \Psi}
/// $$
Lift(DBI, Rc<Self>),
}
impl<T> Default for PrimSubst<T> {
fn default() -> Self {
PrimSubst::IdS
}
}
impl<Term: DeBruijn + RedEx<Term, Term> + Clone> PrimSubst<Term> {
pub fn lookup(&self, dbi: DBI) -> Term {
self.lookup_impl(dbi).map_left(Clone::clone).into_inner()
}
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.TypeChecking.Substitute.Class.html#raise).
pub fn raise_term(k: DBI, term: Term) -> Term {
Self::raise_from(DBI(0), k, term)
}
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.TypeChecking.Substitute.Class.html#raiseFrom).
pub fn raise_from(n: DBI, k: DBI, term: Term) -> Term {
term.reduce_dbi(Self::raise(k).lift_by(n))
}
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.TypeChecking.Substitute.Class.html#composeS).
pub fn compose(self: Rc<Self>, sgm: Rc<Self>) -> Rc<Self> {
use PrimSubst::*;
match (&*self, &*sgm) {
(_, IdS) => self,
(IdS, _) => sgm,
// self, EmptyS(err) => EmptyS(err)
(_, Weak(n, sgm)) => self.drop_by(*n).compose(sgm.clone()),
(_, Cons(u, sgm)) => Rc::new(Cons(
u.clone().reduce_dbi(self.clone()),
self.compose(sgm.clone()),
)),
(_, Succ(sgm)) => Rc::new(Succ(self.compose(sgm.clone()))),
(_, Lift(DBI(0), _sgm)) => unreachable!(),
(Cons(u, rho), Lift(n, sgm)) => Rc::new(Cons(
u.clone(),
rho.clone().compose(sgm.clone().lift_by(*n - 1)),
)),
(_, Lift(n, sgm)) => Rc::new(Cons(
self.lookup(DBI(0)),
self.compose(sgm.clone().lift_by(*n - 1).weaken(DBI(1))),
)),
}
}
/// If lookup failed, return the DBI.
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.TypeChecking.Substitute.Class.html#lookupS).
pub fn lookup_impl(&self, dbi: DBI) -> Either<&Term, Term> {
use Either::*;
use PrimSubst::*;
match self {
IdS => Right(DeBruijn::from_dbi(dbi)),
Cons(o, rest) => match dbi.nat() {
None => Left(o),
Some(dbi) => rest.lookup_impl(dbi),
},
Succ(rest) => rest.lookup_impl(dbi.pred()),
Weak(i, rest) => match &**rest {
IdS => Right(Term::from_dbi(dbi + *i)),
rho => Right(rho.lookup(*i).reduce_dbi(Self::raise(*i))),
},
Lift(n, _) if dbi < *n => Right(DeBruijn::from_dbi(dbi)),
Lift(n, rest) => Right(Self::raise_term(*n, rest.lookup(dbi - *n))),
}
}
}
impl<T> PrimSubst<T> {
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.TypeChecking.Substitute.Class.html#raiseS).
pub fn raise(by: DBI) -> Rc<Self> {
Self::weaken(Default::default(), by)
}
/// Lift a substitution under k binders.
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.TypeChecking.Substitute.Class.html#dropS).
pub fn drop_by(self: Rc<Self>, drop_by: DBI) -> Rc<Self> {
use PrimSubst::*;
match (drop_by, &*self) {
(DBI(0), _) => self,
(n, IdS) => Self::raise(n),
(n, Weak(m, rho)) => rho.clone().drop_by(n - 1).weaken(*m),
(n, Cons(_, rho)) | (n, Succ(rho)) => rho.clone().drop_by(n - 1),
// n, EmptyS(err) => absurd(err)
(n, Lift(DBI(0), _rho)) => unreachable!(&format!("n = {:?}", n)),
(n, Lift(m, rho)) => rho.clone().lift_by(*m - 1).drop_by(n - 1).weaken(DBI(1)),
}
}
/// Lift a substitution under k binders.
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.TypeChecking.Substitute.Class.html#liftS).
pub fn lift_by(self: Rc<Self>, lift_by: DBI) -> Rc<Self> {
use PrimSubst::*;
match (lift_by, &*self) {
(DBI(0), _) => self,
(_, IdS) => Default::default(),
(k, Lift(n, rho)) => Rc::new(Lift(*n + k, rho.clone())),
(k, _) => Rc::new(Lift(k, self)),
}
}
/// [Agda](https://hackage.haskell.org/package/Agda-2.6.0.1/docs/src/Agda.TypeChecking.Substitute.Class.html#wkS).
pub fn weaken(self: Rc<Self>, weaken_by: DBI) -> Rc<Self> {
use PrimSubst::*;
match (weaken_by, &*self) {
(DBI(0), _) => self,
(n, Weak(m, rho)) => Rc::new(Weak(n + *m, rho.clone())),
// n, EmptyS(err) => EmptyS(err)
(n, _) => Rc::new(Weak(n, self)),
}
}
pub fn one(t: T) -> Rc<Self> {
Rc::new(PrimSubst::Cons(t, Default::default()))
}
}
| true |
1c0cd3374ce9398e41690f9ad1da8212aa66d8cc
|
Rust
|
KeAiMianYang/work_repo_training
|
/Rust/book/08_02_strings/src/main.rs
|
UTF-8
| 1,482 | 3.828125 | 4 |
[] |
no_license
|
fn main() {
// strings are UTF-8 encoded
// they are an array of bytes, and a character can be made of more than one byte
let s1: String = "data".to_string();
let s2: String = String::from("你好");
let mut s3 = format!("{} {}", s1, s2);
s3.push_str(" aaa");
s3.push('l');
//========== indexing into string ==========//
// s3[0]
// doesn't work, needs to precise if user wants:
// bytes (actual content stored)
// scalar values (`char`)
// grapheme clusters (human characters)
let s = &s3[0..4]; // clearly tells it it's a string slice. the program will panic! if the slice is not valid unicode characters
let s: String = String::from("你好");
println!("你好 is of length {}", s.len()); // each char is 3 bytes
print!("你好 in bytes: ");
for c in "你好".bytes()
{
print!("{}, ", c);
}
print!("\n");
print!("你好 in chars: ");
for c in "你好".chars()
{
print!("{}, ", c);
}
print!("\n");
print!("नमस्ते in chars: ");
/*
न
म
स
्
त
े
*/
for c in "नमस्ते".chars()
{
print!("{}, ", c);
}
print!("\n");
//=== handle grapheme clusters ===//
use unicode_segmentation::UnicodeSegmentation;
let g = UnicodeSegmentation::graphemes("नमस्ते", true).collect::<Vec<&str>>();
println!("{:?}", g);
for c in &g
{
print!("{}, ", c);
}
}
| true |
ab2d4255a55c6497f3ce86919ec31c27aedb36ac
|
Rust
|
drmorr0/avr-hal
|
/chips/atmega8u2-hal/src/pwm.rs
|
UTF-8
| 3,313 | 3.21875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Support for PWM pins
//!
//! The 2 timers of ATmega8U2 can be used for PWM on certain pins.
//! The PWM methods are from `embedded_hal::PwmPin`.
//!
//! # Example
//! ```
//! let mut portb = dp.PORTB.split();
//! let mut timer1 = Timer1Pwm::new(dp.TC1, pwm::Prescaler::Prescale64);
//!
//! let mut pb7 = portb.pb7.into_output(&mut portb.ddr).into_pwm(&mut timer1);
//!
//! pb7.set_duty(128);
//! pb7.enable();
//! ```
//!
//! Here is an overview of pins and which timer they work with:
//!
//! | Pin | Conversion Method | Alternate Conversion Method |
//! | --- | --- | --- |
//! | `PB7` | `.into_pwm(&mut timer0)` | `.into_pwm(&mut timer1)` |
use crate::port::portb;
pub use avr_hal_generic::pwm::*;
avr_hal_generic::impl_pwm! {
/// Use `TC0` for PWM
///
/// # Example
/// ```
/// let mut portb = dp.PORTB.split();
/// let mut timer0 = Timer0Pwm::new(dp.TC0, pwm::Prescaler::Prescale64);
///
/// let mut pb7 = portb.pb7.into_output(&mut portb.ddr).into_pwm(&mut timer0);
///
/// pb7.set_duty(128);
/// pb7.enable();
/// ```
pub struct Timer0Pwm {
timer: crate::pac::TC0,
init: |tim, prescaler| {
tim.tccr0a.modify(|_, w| w.wgm0().pwm_fast());
tim.tccr0b.modify(|_, w| match prescaler {
Prescaler::Direct => w.cs0().direct(),
Prescaler::Prescale8 => w.cs0().prescale_8(),
Prescaler::Prescale64 => w.cs0().prescale_64(),
Prescaler::Prescale256 => w.cs0().prescale_256(),
Prescaler::Prescale1024 => w.cs0().prescale_1024(),
});
},
pins: {
portb::PB7: {
ocr: ocr0a,
into_pwm: |tim| if enable {
tim.tccr0a.modify(|_, w| w.com0a().match_clear());
} else {
tim.tccr0a.modify(|_, w| w.com0a().disconnected());
},
},
},
}
}
avr_hal_generic::impl_pwm! {
/// Use `TC1` for PWM
///
/// # Example
/// ```
/// let mut portb = dp.PORTB.split();
/// let mut timer1 = Timer1Pwm::new(dp.TC1, pwm::Prescaler::Prescale64);
///
/// let mut pb7 = portb.pb7.into_output(&mut portb.ddr).into_pwm1(&mut timer1);
///
/// pb7.set_duty(128);
/// pb7.enable();
/// ```
pub struct Timer1Pwm {
timer: crate::pac::TC1,
init: |tim, prescaler| {
tim.tccr1a.modify(|_, w| w.wgm1().bits(0b01));
tim.tccr1b.modify(|_, w| {
w.wgm1().bits(0b01);
match prescaler {
Prescaler::Direct => w.cs1().direct(),
Prescaler::Prescale8 => w.cs1().prescale_8(),
Prescaler::Prescale64 => w.cs1().prescale_64(),
Prescaler::Prescale256 => w.cs1().prescale_256(),
Prescaler::Prescale1024 => w.cs1().prescale_1024(),
}
});
},
pins: {
portb::PB7: {
ocr: ocr1c,
into_pwm1: |tim| if enable {
tim.tccr1a.modify(|_, w| w.com1c().match_clear());
} else {
tim.tccr1a.modify(|_, w| w.com1c().disconnected());
},
},
},
}
}
| true |
12084c4015b148fd4e312d8c87d05e946584d1d4
|
Rust
|
ehuss/raytracer
|
/src/hitable_list.rs
|
UTF-8
| 1,955 | 3.078125 | 3 |
[] |
no_license
|
use hitable::*;
use ray::Ray;
use aabb::*;
use util::*;
use vec3::*;
#[derive(Debug)]
pub struct HitableList<'a> {
list: Vec<Box<Hitable + 'a>>,
}
impl<'a> HitableList<'a> {
pub fn new() -> HitableList<'a> {
HitableList { list: Vec::new() }
}
pub fn add_hitable<T: Hitable + 'a>(&mut self, h: T) {
self.list.push(Box::new(h));
}
}
impl<'a> Hitable for HitableList<'a> {
fn hit(&self, rng: &mut Rng, r: &Ray<f64>, t_min: f64, t_max: f64) -> Option<HitRecord> {
let mut result: Option<HitRecord> = None;
let mut closest_so_far = t_max;
for h in &self.list {
if let Some(hr) = h.hit(rng, r, t_min, closest_so_far) {
closest_so_far = hr.t;
result = Some(hr);
}
}
return result;
}
fn bounding_box(&self, t0: f64, t1: f64) -> Option<AABB> {
if self.list.len() == 0 {
return None;
}
let bb1 = self.list[0].bounding_box(t0, t1);
if let Some(bb1) = bb1 {
let mut bb = bb1;
for h in self.list.iter().skip(1) {
if let Some(temp_box) = h.bounding_box(t0, t1) {
bb = surrounding_box(&bb, &temp_box);
} else {
// One of our items is infinite.
return None;
}
}
return Some(bb);
} else {
return None;
}
}
fn pdf_value(&self, rng: &mut Rng, o: &Vec3<f64>, v: &Vec3<f64>) -> f64 {
let weight = 1./self.list.len() as f64;
let mut sum = 0.;
for i in 0..self.list.len() {
sum += weight * self.list[i].pdf_value(rng, o, v);
}
return sum;
}
fn random(&self, rng: &mut Rng, o: &Vec3<f64>) -> Vec3<f64> {
let index = (rng.rand64() * self.list.len() as f64) as usize;
return self.list[index].random(rng, o);
}
}
| true |
40a6756d6b142d31f53b2ab6053483ef84503124
|
Rust
|
Frezc/leetcode-solutions
|
/src/n0048_rotate_image.rs
|
UTF-8
| 3,140 | 3.6875 | 4 |
[] |
no_license
|
/**
* [48] Rotate Image
*
* You are given an n x n 2D matrix representing an image.
*
* Rotate the image by 90 degrees (clockwise).
*
* Note:
*
* You have to rotate the image <a href="https://en.wikipedia.org/wiki/In-place_algorithm" target="_blank">in-place</a>, which means you have to modify the input 2D matrix directly. DO NOT allocate another 2D matrix and do the rotation.
*
* Example 1:
*
*
* Given input matrix =
* [
* [1,2,3],
* [4,5,6],
* [7,8,9]
* ],
*
* rotate the input matrix in-place such that it becomes:
* [
* [7,4,1],
* [8,5,2],
* [9,6,3]
* ]
*
*
* Example 2:
*
*
* Given input matrix =
* [
* [ 5, 1, 9,11],
* [ 2, 4, 8,10],
* [13, 3, 6, 7],
* [15,14,12,16]
* ],
*
* rotate the input matrix in-place such that it becomes:
* [
* [15,13, 2, 5],
* [14, 3, 4, 1],
* [12, 6, 8, 9],
* [16, 7,10,11]
* ]
*
*
*/
/// Loop every 4-corner-point-rotate circle by circle.
/// # Example
/// [
/// [1,2,3],
/// [4,5,6],
/// [7,8,9]
/// ],
/// 1. first rotate 4 corner element
/// [
/// [7,2,1],
/// [4,5,6],
/// [9,8,3]
/// ],
/// 2. then rotate every element on every edge
/// [
/// [7,4,1],
/// [8,5,2],
/// [9,6,3]
/// ],
/// 3. rotate next circle inside
pub struct Solution {}
// submission codes start here
impl Solution {
pub fn rotate(matrix: &mut Vec<Vec<i32>>) {
if matrix.len() != 0 {
let l = matrix.len();
for i in 0..l / 2 {
for j in 0..l - 2 * i - 1 {
let c = matrix[i + j][l - 1 - i];
matrix[i + j][l - 1 - i] = matrix[i][i + j];
matrix[i][i + j] = matrix[l - 1 - i - j][i];
matrix[l - 1 - i - j][i] = matrix[l - 1 - i][l - 1 - i - j];
matrix[l - 1 - i][l - 1 - i - j] = c;
}
}
}
}
}
// submission codes end
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_48() {
let mut matrix1 = vec![
vec![1, 2, 3],
vec![4, 5, 6],
vec![7, 8, 9],
];
Solution::rotate(&mut matrix1);
assert_eq!(matrix1, vec![
vec![7, 4, 1],
vec![8, 5, 2],
vec![9, 6, 3],
]);
let mut matrix2 = vec![
vec![5, 1, 9, 11],
vec![2, 4, 8, 10],
vec![13, 3, 6, 7],
vec![15, 14, 12, 16],
];
Solution::rotate(&mut matrix2);
assert_eq!(matrix2, vec![
vec![15, 13, 2, 5],
vec![14, 3, 4, 1],
vec![12, 6, 8, 9],
vec![16, 7, 10, 11]
]);
let mut matrix3 = vec![vec![0; 0]; 0];
Solution::rotate(&mut matrix3);
assert_eq!(matrix3, vec![vec![0; 0]; 0]);
let mut matrix4 = vec![
vec![13],
];
Solution::rotate(&mut matrix4);
assert_eq!(matrix4, vec![
vec![13],
]);
}
}
| true |
51a88e326d18da5cc2207f503f0d812e74f544b5
|
Rust
|
kuwana-kb/ddd-in-rust
|
/chapter08_sample_application/src/domain/value_object/name.rs
|
UTF-8
| 859 | 3.15625 | 3 |
[] |
no_license
|
use std::str::FromStr;
use anyhow::Result;
use common::MyError;
use derive_more::Display;
use serde::{de, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, Display, Serialize)]
pub struct Name(String);
impl Name {
pub fn new(s: &str) -> Result<Self> {
if s.chars().count() < 3 || s.chars().count() > 20 {
bail!(MyError::type_error("ユーザ名は3文字以上、20文字以下です"))
}
Ok(Name(s.to_string()))
}
}
impl FromStr for Name {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self> {
Self::new(s)
}
}
impl<'de> de::Deserialize<'de> for Name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Self::new(&s).map_err(de::Error::custom)
}
}
| true |
cbb64e8405e1527a15213357a4046eca635936d2
|
Rust
|
Pfarrer/rust-jvm
|
/_deprecated/src/vm/native/java_lang_class_loader_native_library.rs
|
UTF-8
| 1,183 | 2.546875 | 3 |
[] |
no_license
|
use vm::Vm;
use vm::primitive::Primitive;
use vm::utils;
pub fn invoke(vm: &mut Vm, class_path: &String, method_name: &String, method_signature: &String) {
match method_name.as_ref() {
"load" => load(vm, class_path, method_name, method_signature), // (Ljava/lang/String;)V
_ => panic!("Native implementation of method {}.{}{} missing.", class_path, method_name, method_signature),
}
}
/// (Ljava/lang/String;)V
fn load(vm: &mut Vm, class_path: &String, method_name: &String, method_signature: &String) {
trace!("Execute native {}.{}{}", class_path, method_name, method_signature);
let value = {
let frame = vm.frame_stack.last_mut().unwrap();
let rc_string = frame.stack_pop_objectref();
let string = rc_string.borrow();
utils::get_java_string_value(&*string)
};
assert_eq!("/libzip.so", value);
let frame = vm.frame_stack.last_mut().unwrap();
let rc_nativelib = frame.stack_pop_objectref();
let mut nativelib = rc_nativelib.borrow_mut();
nativelib.fields.insert("handle".to_owned(), Primitive::Long(1337));
warn!("Loading /libzip.so not implemented... will fake a handle value");
}
| true |
750fa831e8a9ee39954198e68ddc088290af0d67
|
Rust
|
warnp/coding_dojo_belote
|
/src/card.rs
|
UTF-8
| 1,017 | 3.953125 | 4 |
[] |
no_license
|
pub struct Card{
figure: String,
color: String,
}
impl Card {
pub fn new(figure: String, color: String) -> Card {
Card{
figure: figure,
color: color,
}
}
pub fn new_with_pointers(figure: &str, color: &str) -> Card {
Card::new(figure.to_string(), color.to_string())
}
pub fn get_figure(&self) -> String {
self.figure.clone()
}
pub fn get_color(&self) -> String {
self.color.clone()
}
}
#[cfg(test)]
mod tests{
use super::*;
#[test]
fn retrieve_card_info(){
let card = Card::new("jack".to_string(),"heart".to_string());
assert_eq!(card.get_figure(), "jack".to_string());
assert_eq!(card.get_color(), "heart".to_string());
}
#[test]
fn retrieve_card_info_with_pointer(){
let card = Card::new_with_pointers("jack","heart");
assert_eq!(card.get_figure(), "jack".to_string());
assert_eq!(card.get_color(), "heart".to_string());
}
}
| true |
a088f762db0b184ad9dc2a3d11a992721ae5b62f
|
Rust
|
manokara/bencode-rs
|
/src/parser.rs
|
UTF-8
| 32,408 | 3.234375 | 3 |
[] |
no_license
|
use super::Value;
use std::{
cell::RefCell,
collections::BTreeMap,
convert::{TryFrom, TryInto},
fmt,
io::{Error as IoError, Read, Result as IOResult, Seek, SeekFrom},
rc::Rc,
};
const MAX_INT_BUF: usize = 32;
const MAX_DEPTH: usize = 32;
const CHUNK_SIZE: u64 = 8096;
enum Token {
Dict,
Int,
List,
End,
Colon,
}
#[derive(Debug, PartialEq)]
enum State {
Root,
Int,
Str,
DictKey,
DictVal,
StrRem,
DictFlush,
DictValStr,
DictValInt,
DictValDict,
DictValList,
ListVal,
ListValStr,
ListValInt,
ListValDict,
ListValList,
ListFlush,
RootValInt,
RootValStr,
RootValDict,
RootValList,
}
enum LocalValue {
DictRef(Rc<RefCell<BTreeMap<String, Value>>>),
ListRef(Rc<RefCell<Vec<Value>>>),
Owned(Value),
}
/// An error from the [`load`] function.
///
/// [`load`]: fn.load.html
#[derive(Debug)]
pub enum ParserError {
/// `(inner_error)`
///
/// An IO error has occured.
Io(IoError),
/// Stream was empty.
Empty,
/// `(position, reason)`
///
/// There was a syntax error in the stream.
Syntax(usize, String),
/// Reached end of stream while trying to parse something.
///
/// This can be caused by a missing 'e' token, or the stream simply cuts in the middle of the
/// structure.
Eof,
/// The root value was not the one expected.
///
/// This is only returned by the [`load_dict`] and [`load_list`] functions. Note that when using
/// these functions, this variant will used if there's anything other than "start structure"
/// token.
///
/// [`load_dict`]: fn.load_dict.html
/// [`load_list`]: fn.load_list.html
UnexpectedRoot,
/// Too many nested structures
///
/// This can only happen if the stream has no size. See [`Stream`].
///
/// [`Stream`]: enum.Stream.html
RecursionLimit,
}
/// Wrapper for the input stream used in [`load`].
///
/// Sized streams are slices and any type that implements [`Read`] and [`Seek`]. In the latter, if
/// there are IO errors of any kind when seeking, the stream will be considered unsized.
///
/// Ideally, types that only implement [`Read`] would be converted automatically, but that would
/// require [specialization] which is very unstable. You must use Stream explicitly with the
/// [`new`] method.
///
/// [`load`]: fn.load.html
/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html
/// [`Seek`]: https://doc.rust-lang.org/std/io/trait.Seek.html
/// [specialization]: https://github.com/rust-lang/rfcs/blob/master/text/1210-impl-specialization.md
/// [`new`]: #method.new
pub enum Stream<'a> {
Sized(Box<&'a mut dyn Read>, usize),
Slice(&'a [u8], usize),
Unsized(Box<&'a mut dyn Read>),
}
/// Parse a bencode data structure from a stream.
///
/// If you expect the stream to contain a certain type, see the [`load_dict`], [`load_list`] and
/// [`load_prim`] functions.
///
/// The parser will try to convert bytestrings to UTF-8 and return a [`Value::Str`] variant if the
/// conversion is succesful, otherwise the value will be [`Value::Bytes`].
///
/// # Stream
///
/// Depending on the input stream, the parser will behave differently. If the stream is sized (see
/// [`Stream`]) the parser will try to run until EOF, otherwise it will run as long as it does not
/// exceed the recursion limit (32 containers deep).
///
/// # Errors
///
/// There are many ways this function can fail. It will fail if the stream is empty, if there are
/// syntax errors or an integer string is way too big. See [`ParserError`].
///
/// [`load_dict`]: fn.load_dict.html
/// [`load_list`]: fn.load_list.html
/// [`load_prim`]: fn.load_prim.html
/// [`Value::Str`]: enum.Value.html#variant.Str
/// [`Value::Bytes`]: enum.Value.html#variant.Bytes
/// [`Stream`]: enum.Stream.html
/// [`ParserError`]: enum.ParserError.html
pub fn load<'a, S>(stream: S) -> Result<Value, ParserError>
where
S: Into<Stream<'a>>,
{
real_load(stream, State::Root, None)
}
/// Parse a bencode stream expecting a list as the root value.
///
/// If your application requires the root value to a be a dict and you want to avoid unnecessary
/// allocations, this function will check if the first token in the stream is 'd' and then actually
/// parse the stream.
///
/// # Errors
///
/// If the first character in the stream is not a 'd', we fail with `ParserError::UnexpectedRoot`.
/// Other parsing errors may be returned following the check.
pub fn load_dict<'a, S>(stream: S) -> Result<Value, ParserError>
where
S: Into<Stream<'a>>,
{
let mut buf = [0u8];
let mut stream = stream.into();
stream.read_exact(&mut buf)?;
match buf[0].try_into() {
Ok(Token::Dict) => real_load(stream, State::DictKey, None),
_ => Err(ParserError::UnexpectedRoot),
}
}
/// Parse a bencode stream expecting a list as the root value.
///
/// If your application requires the root value to a be a list and you want to avoid unnecessary
/// allocations, this function will check if the first token in the stream is 'l' and then actually
/// parse the stream.
///
/// # Errors
///
/// If the first character in the stream is not a 'l', we fail with `ParserError::UnexpectedRoot`.
/// Other parsing errors may be returned following the check.
pub fn load_list<'a, S>(stream: S) -> Result<Value, ParserError>
where
S: Into<Stream<'a>>,
{
let mut buf = [0u8];
let mut stream = stream.into();
stream.read_exact(&mut buf)?;
match buf[0].try_into() {
Ok(Token::List) => real_load(stream, State::ListVal, None),
_ => Err(ParserError::UnexpectedRoot),
}
}
/// Parse a bencode stream expecting a primitive as the root value.
///
/// If your application requires the root value to a be a primitive and you want to avoid
/// unnecessary allocations, this function will check if the first token in the stream is not one of
/// the container tokens ('d' or 'l') and then actually parse the stream.
///
/// # Errors
///
/// If the first character in the stream is a 'd' or 'l', we fail with
/// `ParserError::UnexpectedRoot`. Other parsing errors may be returned following the check.
pub fn load_prim<'a, S>(stream: S) -> Result<Value, ParserError>
where
S: Into<Stream<'a>>,
{
let mut buf = [0u8];
let mut stream = stream.into();
stream.read_exact(&mut buf)?;
match buf[0].try_into() {
Ok(Token::List) => Err(ParserError::UnexpectedRoot),
Ok(Token::Dict) => Err(ParserError::UnexpectedRoot),
Ok(Token::End) | Ok(Token::Colon) => Err(ParserError::Syntax(
0,
format!("Unexpected '{}' token", buf[0] as char),
)),
Ok(Token::Int) => real_load(stream, State::Int, None),
Err(_) => real_load(stream, State::Str, Some(buf[0])),
}
}
fn real_load<'a, S>(
stream: S,
initial_state: State,
mut first_char: Option<u8>,
) -> Result<Value, ParserError>
where
S: Into<Stream<'a>>,
{
const NOTHING: &[u8] = b"";
let stream = &mut stream.into();
let file_size = stream.size();
if let Some(&size) = file_size.as_ref() {
if size == 0 {
return Err(ParserError::Empty);
}
}
let mut file_index = 0u64;
let mut buf_index = 0usize;
let mut state = initial_state;
let mut next_state = Vec::new();
let mut buf = Vec::<u8>::with_capacity(CHUNK_SIZE as usize);
let mut buf_chars;
let mut buf_str = Vec::new();
let mut buf_str_remainder = 0u64;
let mut buf_int = String::new();
let mut key_stack = Vec::new();
let mut val_stack = Vec::new();
let mut item_stack = Vec::new();
let mut dict_stack = Vec::new();
let mut list_stack = Vec::new();
let mut depth = 0;
let root;
stream.take(CHUNK_SIZE).read_to_end(&mut buf)?;
buf_chars = buf.iter().peekable();
// Initial state
match state {
State::Root => {
let c = **buf_chars.peek().unwrap();
match c.try_into() {
// Dict value
Ok(Token::Dict) => {
buf_chars.next();
buf_index += 1;
depth += 1;
dict_stack.push(Rc::new(RefCell::new(BTreeMap::new())));
key_stack.push(None);
val_stack.push(None);
state = State::DictKey;
next_state.push(State::RootValDict);
}
// List value
Ok(Token::List) => {
buf_chars.next();
buf_index += 1;
depth += 1;
list_stack.push(Rc::new(RefCell::new(Vec::new())));
item_stack.push(None);
state = State::ListVal;
next_state.push(State::RootValList);
}
// Int value
Ok(Token::Int) => {
state = State::Int;
buf_chars.next();
buf_index += 1;
next_state.push(State::RootValInt);
}
// Str value
Err(_) => {
state = State::Str;
next_state.push(State::RootValStr);
}
// End, Colon
Ok(a) => {
return Err(ParserError::Syntax(
0,
format!("Unexpected '{}' token", Into::<u8>::into(a) as char),
))
}
}
}
// load_dict
State::DictKey => {
depth += 1;
dict_stack.push(Rc::new(RefCell::new(BTreeMap::new())));
key_stack.push(None);
val_stack.push(None);
next_state.push(State::RootValDict);
}
// load_list
State::ListVal => {
depth += 1;
list_stack.push(Rc::new(RefCell::new(Vec::new())));
item_stack.push(None);
next_state.push(State::RootValList);
}
// load_prim
State::Int => {
next_state.push(State::RootValInt);
}
// load_prim
State::Str => {
next_state.push(State::RootValStr);
}
_ => unreachable!(),
}
loop {
let real_index = file_index + buf_index as u64;
if real_index >= (file_index + buf.len() as u64) {
buf.clear();
stream.take(CHUNK_SIZE).read_to_end(&mut buf)?;
buf_chars = buf.iter().peekable();
file_index += buf_index as u64;
buf_index = 0;
}
match state {
State::Root => unreachable!(),
// Root states
State::RootValInt | State::RootValStr | State::RootValDict | State::RootValList => {
if state == State::RootValInt {
buf_index += 1;
}
break;
}
// Read dict key or end the dict if it's empty
State::DictKey => {
let c = **buf_chars.peek().unwrap();
if c == Token::End.into() {
buf_chars.next();
buf_index += 1;
state = next_state.pop().unwrap();
} else {
if buf_str.len() == 0 {
state = State::Str;
next_state.push(State::DictKey);
} else {
let key = String::from_utf8(buf_str.clone()).map_err(|_| {
ParserError::Syntax(
real_index as usize,
"Dict key must be a utf8 string".into(),
)
})?;
*key_stack.last_mut().unwrap() = Some(key);
buf_str.clear();
state = State::DictVal;
}
}
}
// Read dict value
State::DictVal => {
let c = **buf_chars.peek().ok_or(ParserError::Eof)?;
match c.try_into() {
// Dict value
Ok(Token::Dict) => {
if depth < MAX_DEPTH || file_size.is_some() {
let map = Rc::new(RefCell::new(BTreeMap::new()));
depth += 1;
buf_chars.next();
buf_index += 1;
*val_stack.last_mut().unwrap() =
Some(LocalValue::DictRef(Rc::clone(&map)));
dict_stack.push(map);
key_stack.push(None);
val_stack.push(None);
state = State::DictKey;
next_state.push(State::DictValDict);
} else if file_size.is_none() && depth == MAX_DEPTH {
return Err(ParserError::RecursionLimit);
}
}
// List value
Ok(Token::List) => {
if depth < MAX_DEPTH || file_size.is_some() {
let vec = Rc::new(RefCell::new(Vec::new()));
depth += 1;
buf_chars.next();
buf_index += 1;
*val_stack.last_mut().unwrap() =
Some(LocalValue::ListRef(Rc::clone(&vec)));
list_stack.push(vec);
item_stack.push(None);
state = State::ListVal;
next_state.push(State::DictValList);
} else if file_size.is_none() && depth == MAX_DEPTH {
return Err(ParserError::RecursionLimit);
}
}
// Int value
Ok(Token::Int) => {
buf_chars.next();
buf_index += 1;
state = State::Int;
next_state.push(State::DictValInt);
}
// String value
Err(_) => {
state = State::Str;
next_state.push(State::DictValStr);
}
// Colon, End
_ => {
return Err(ParserError::Syntax(
real_index as usize,
format!("Unexpected '{}' token", c),
))
}
}
}
// Process current dict value as str
State::DictValStr => {
*val_stack.last_mut().unwrap() =
Some(LocalValue::Owned(str_or_bytes(buf_str.clone())));
buf_str.clear();
state = State::DictFlush;
}
// Process current dict value as int
State::DictValInt => {
// Unwrap here because Int state already checks for EOF
let c = *buf_chars.next().unwrap();
if c != Token::End.into() {
return Err(ParserError::Syntax(
real_index as usize,
"Expected 'e' token".into(),
));
}
let val = buf_int.parse::<i64>().map_err(|_| {
ParserError::Syntax(real_index as usize, "Invalid integer".into())
})?;
*val_stack.last_mut().unwrap() = Some(LocalValue::Owned(Value::Int(val)));
buf_int.clear();
buf_index += 1;
state = State::DictFlush;
}
// Process current dict value as dict
State::DictValDict => {
let dict = dict_stack.pop().unwrap();
*val_stack.last_mut().unwrap() = Some(LocalValue::DictRef(dict));
key_stack.pop().unwrap();
val_stack.pop().unwrap();
depth -= 1;
state = State::DictFlush;
}
// Process current dict value as list
State::DictValList => {
let list = list_stack.pop().unwrap();
*val_stack.last_mut().unwrap() = Some(LocalValue::ListRef(list));
item_stack.pop().unwrap();
depth -= 1;
state = State::DictFlush;
}
// Insert current (key, value) pair into current dict
State::DictFlush => {
let key = key_stack.last().unwrap().clone().unwrap();
let val = val_stack.last().unwrap().as_ref().unwrap().to_owned();
dict_stack.last_mut().unwrap().borrow_mut().insert(key, val);
let c = **buf_chars.peek().ok_or(ParserError::Eof)?;
if c == Token::End.into() {
buf_chars.next();
buf_index += 1;
state = next_state.pop().unwrap();
} else {
state = State::DictKey;
}
}
// List value
State::ListVal => {
let c = **buf_chars.peek().ok_or(ParserError::Eof)?;
match c.try_into() {
// End of list
Ok(Token::End) => {
buf_chars.next();
buf_index += 1;
state = next_state.pop().unwrap();
}
// Dict value
Ok(Token::Dict) => {
if depth < MAX_DEPTH || file_size.is_some() {
let d = Rc::new(RefCell::new(BTreeMap::new()));
depth += 1;
*item_stack.last_mut().unwrap() =
Some(LocalValue::DictRef(Rc::clone(&d)));
buf_chars.next();
dict_stack.push(d);
key_stack.push(None);
val_stack.push(None);
buf_index += 1;
state = State::DictKey;
next_state.push(State::ListValDict);
} else if file_size.is_none() && depth == MAX_DEPTH {
return Err(ParserError::RecursionLimit);
}
}
// List value
Ok(Token::List) => {
if depth < MAX_DEPTH || file_size.is_some() {
let l = Rc::new(RefCell::new(Vec::new()));
depth += 1;
*item_stack.last_mut().unwrap() =
Some(LocalValue::ListRef(Rc::clone(&l)));
buf_chars.next();
list_stack.push(l);
item_stack.push(None);
buf_index += 1;
next_state.push(State::ListValList);
} else if file_size.is_none() && depth == MAX_DEPTH {
return Err(ParserError::RecursionLimit);
}
}
// Int value
Ok(Token::Int) => {
buf_chars.next();
buf_index += 1;
state = State::Int;
next_state.push(State::ListValInt);
}
// String value
Err(_) => {
state = State::Str;
next_state.push(State::ListValStr);
}
// Colon
_ => {
return Err(ParserError::Syntax(
real_index as usize,
"Unexpected ':' token".into(),
))
}
}
}
// Process current list value as str
State::ListValStr => {
*item_stack.last_mut().unwrap() =
Some(LocalValue::Owned(str_or_bytes(buf_str.clone())));
buf_str.clear();
state = State::ListFlush;
}
// Process current list value as int
State::ListValInt => {
// Unwrap here because Int state already checks for EOF
let c = *buf_chars.next().unwrap();
if c != Token::End.into() {
return Err(ParserError::Syntax(
real_index as usize,
"Expected 'e' token".into(),
));
}
let val = buf_int.parse::<i64>().map_err(|_| {
ParserError::Syntax(real_index as usize, "Invalid integer".into())
})?;
*item_stack.last_mut().unwrap() = Some(LocalValue::Owned(Value::Int(val)));
buf_int.clear();
buf_index += 1;
state = State::ListFlush;
}
// Process current list value as dict
State::ListValDict => {
let dict = dict_stack.pop().unwrap().borrow().clone();
*item_stack.last_mut().unwrap() = Some(LocalValue::Owned(Value::Dict(dict)));
key_stack.pop();
val_stack.pop();
state = State::ListFlush;
}
// Process current list value as list
State::ListValList => {
let list = list_stack.pop().unwrap().borrow().clone();
*item_stack.last_mut().unwrap() = Some(LocalValue::Owned(Value::List(list)));
item_stack.pop();
state = State::ListFlush;
}
// Add current list value to the current list
State::ListFlush => {
let val = item_stack.last().unwrap().as_ref().unwrap().to_owned();
list_stack.last_mut().unwrap().borrow_mut().push(val);
let c = **buf_chars.peek().unwrap();
if c == Token::End.into() {
buf_chars.next();
buf_index += 1;
state = next_state.pop().unwrap();
} else {
state = State::ListVal;
}
}
// Process string
State::Str => {
if buf_int.len() == 0 {
buf_str.clear();
buf_str_remainder = 0;
state = State::Int;
next_state.push(State::Str);
} else {
let c = *buf_chars.next().ok_or(ParserError::Eof)?;
if c != Token::Colon.into() {
return Err(ParserError::Syntax(
real_index as usize,
"Expected ':'".into(),
));
}
let buf_str_size = buf_int.parse::<u64>().map_err(|_| {
ParserError::Syntax(real_index as usize, "Invalid integer".into())
})?;
buf_int.clear();
buf_index += 1;
// String is bigger than buffer
if buf_index + buf_str_size as usize > buf.len() {
let chunk_size = buf.len() - buf_index;
buf_str_remainder = buf_str_size - chunk_size as u64;
buf_str.extend(buf_chars.by_ref());
buf_index += chunk_size;
state = State::StrRem;
} else {
buf_str.extend(buf_chars.by_ref().take(buf_str_size as usize));
buf_index += buf_str_size as usize;
state = next_state.pop().unwrap();
}
}
}
// Process string remainder
State::StrRem => {
if buf_str_remainder > 0 && buf_index + buf_str_remainder as usize > buf.len() {
let chunk_size = buf.len() - buf_index;
buf_str_remainder -= chunk_size as u64;
buf_str.extend(buf_chars.by_ref());
buf_index += chunk_size;
} else {
buf_str.extend(buf_chars.by_ref().take(buf_str_remainder as usize));
buf_index += buf_str_remainder as usize;
buf_str_remainder = 0;
state = next_state.pop().unwrap();
}
}
// Int
State::Int => {
const CHARS: &[char] = &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-'];
let (advance, c) = if let Some(b) = first_char.take() {
(false, b as char)
} else {
(true, **buf_chars.peek().ok_or(ParserError::Eof)? as char)
};
if CHARS.contains(&c) {
// Only allow minus at the beginning
if c == '-' && buf_int.len() > 0 {
return Err(ParserError::Syntax(
real_index as usize,
"Unexpected '-'".into(),
));
}
buf_int.push(c);
// Only advance iterator if c didn't come from first_char
if advance {
buf_chars.next();
buf_index += 1;
}
} else {
if buf_int.len() == 0 {
return Err(ParserError::Syntax(
real_index as usize,
"Empty integer".into(),
));
}
if buf_int.len() > MAX_INT_BUF {
return Err(ParserError::Syntax(
real_index as usize,
"Integer string too big".into(),
));
}
state = next_state.pop().unwrap();
}
}
}
if let Some(&size) = file_size.as_ref() {
if file_index + buf_index as u64 == size as u64 {
break;
}
}
}
let final_index = file_index as usize + buf_index;
if next_state.len() > 0 {
return Err(ParserError::Eof);
}
match state {
State::RootValInt => {
// Unwrap here because Int state already checks for EOF
let c = *buf_chars.next().unwrap();
if c != Token::End.into() {
return Err(ParserError::Syntax(
final_index - 1,
"Expected 'e' token".into(),
));
}
let val = buf_int.parse::<i64>().map_err(|_| {
ParserError::Syntax(file_index as usize + buf_index, "Invalid integer".into())
})?;
root = Some(Value::Int(val));
}
State::RootValStr => root = Some(str_or_bytes(buf_str)),
State::RootValDict => {
let dict = dict_stack.pop().unwrap().borrow().clone();
root = Some(Value::Dict(dict));
}
State::RootValList => {
let list = list_stack.pop().unwrap().borrow().clone();
root = Some(Value::List(list));
}
_ => unreachable!(),
}
buf.splice(0..buf_index, NOTHING.iter().cloned());
if buf.len() > 0 {
return Err(ParserError::Syntax(
file_index as usize + buf_index,
"Trailing data".into(),
));
}
if stream.bytes().next().is_some() {
return Err(ParserError::Syntax(
file_index as usize + buf_index,
"Trailing data".into(),
));
}
Ok(root.unwrap())
}
/// Try to convert a raw buffer to utf8 and return the appropriate Value.
fn str_or_bytes(vec: Vec<u8>) -> Value {
match String::from_utf8(vec) {
Ok(s) => Value::Str(s),
Err(e) => Value::Bytes(e.into_bytes()),
}
}
impl LocalValue {
fn to_owned(&self) -> Value {
match self {
Self::DictRef(r) => Value::Dict(r.borrow().clone()),
Self::ListRef(r) => Value::List(r.borrow().clone()),
Self::Owned(v) => v.clone(),
}
}
}
impl<'a> Stream<'a> {
/// Creates a new stream from a [`Read`]able type.
///
/// [`Read`]: https://doc.rust-lang.org/std/io/trait.Read.html
pub fn new<T>(t: &'a mut T) -> Self
where
T: Read,
{
Self::Unsized(Box::new(t))
}
/// Returns the size of this stream, if possible.
pub fn size(&self) -> Option<usize> {
match self {
Self::Sized(_, size) => Some(*size),
Self::Slice(_, size) => Some(*size),
Self::Unsized(_) => None,
}
}
}
impl<'a, T> From<&'a mut T> for Stream<'a>
where
T: Read + Seek,
{
fn from(t: &'a mut T) -> Self {
let result1 = t.seek(SeekFrom::End(0));
let result2 = t.seek(SeekFrom::Start(0));
let result = result1.or(result2);
match result {
Ok(size) => Self::Sized(Box::new(t), size as usize),
Err(_) => Self::Unsized(Box::new(t)),
}
}
}
impl<'a, T> From<&'a T> for Stream<'a>
where
T: AsRef<[u8]>,
{
fn from(t: &'a T) -> Self {
let slice = t.as_ref();
Self::Slice(slice, slice.len())
}
}
impl<'a> From<&'a [u8]> for Stream<'a> {
fn from(s: &'a [u8]) -> Self {
let slice = s.as_ref();
Self::Slice(slice, slice.len())
}
}
impl From<IoError> for ParserError {
fn from(e: IoError) -> Self {
Self::Io(e)
}
}
impl Into<u8> for Token {
fn into(self) -> u8 {
match self {
Self::Dict => 'd' as u8,
Self::Int => 'i' as u8,
Self::List => 'l' as u8,
Self::Colon => ':' as u8,
Self::End => 'e' as u8,
}
}
}
impl TryFrom<u8> for Token {
type Error = ();
fn try_from(c: u8) -> Result<Token, Self::Error> {
const D: u8 = 'd' as u8;
const I: u8 = 'i' as u8;
const L: u8 = 'l' as u8;
const C: u8 = ':' as u8;
const E: u8 = 'e' as u8;
match c {
D => Ok(Token::Dict),
I => Ok(Token::Int),
L => Ok(Token::List),
C => Ok(Token::Colon),
E => Ok(Token::End),
_ => Err(()),
}
}
}
impl fmt::Display for ParserError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ParserError::Io(e) => write!(f, "IO Error: {}", e),
ParserError::Empty => write!(f, "Empty file"),
ParserError::Syntax(n, s) => write!(f, "Syntax error at {}: {}", n + 1, s),
ParserError::Eof => write!(f, "Unexpected end of file reached"),
ParserError::UnexpectedRoot => write!(f, "Unexpected root value"),
ParserError::RecursionLimit => write!(f, "Too many nested structures"),
}
}
}
impl<'a> fmt::Debug for Stream<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Stream::Sized(_, size) => write!(f, "Sized({})", size),
Stream::Slice(_, size) => write!(f, "Slice({})", size),
Stream::Unsized(_) => write!(f, "Unsized"),
}
}
}
impl<'a> Read for Stream<'a> {
fn read(&mut self, buf: &mut [u8]) -> IOResult<usize> {
match self {
Self::Sized(stream, _) => stream.read(buf),
Self::Slice(slice, _) => slice.read(buf),
Self::Unsized(stream) => stream.read(buf),
}
}
}
| true |
b84125f8ffcb36252696bb7d0c5d38e88fecfd6f
|
Rust
|
thevirtuoso1973/rust-buildkit
|
/buildkit-frontend/src/error.rs
|
UTF-8
| 5,413 | 3.203125 | 3 |
[
"LicenseRef-scancode-other-permissive",
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
/// https://godoc.org/google.golang.org/grpc/codes#Code
pub enum ErrorCode {
/// OK is returned on success.
OK = 0,
/// Canceled indicates the operation was canceled (typically by the caller).
Canceled = 1,
/// Unknown error. An example of where this error may be returned is
/// if a Status value received from another address space belongs to
/// an error-space that is not known in this address space. Also
/// errors raised by APIs that do not return enough error information
/// may be converted to this error.
Unknown = 2,
/// InvalidArgument indicates client specified an invalid argument.
/// Note that this differs from FailedPrecondition. It indicates arguments
/// that are problematic regardless of the state of the system
/// (e.g., a malformed file name).
InvalidArgument = 3,
/// DeadlineExceeded means operation expired before completion.
/// For operations that change the state of the system, this error may be
/// returned even if the operation has completed successfully. For
/// example, a successful response from a server could have been delayed
/// long enough for the deadline to expire.
DeadlineExceeded = 4,
/// NotFound means some requested entity (e.g., file or directory) was
/// not found.
NotFound = 5,
/// AlreadyExists means an attempt to create an entity failed because one
/// already exists.
AlreadyExists = 6,
/// PermissionDenied indicates the caller does not have permission to
/// execute the specified operation. It must not be used for rejections
/// caused by exhausting some resource (use ResourceExhausted
/// instead for those errors). It must not be
/// used if the caller cannot be identified (use Unauthenticated
/// instead for those errors).
PermissionDenied = 7,
/// ResourceExhausted indicates some resource has been exhausted, perhaps
/// a per-user quota, or perhaps the entire file system is out of space.
ResourceExhausted = 8,
/// FailedPrecondition indicates operation was rejected because the
/// system is not in a state required for the operation's execution.
/// For example, directory to be deleted may be non-empty, an rmdir
/// operation is applied to a non-directory, etc.
///
/// A litmus test that may help a service implementor in deciding
/// between FailedPrecondition, Aborted, and Unavailable:
/// (a) Use Unavailable if the client can retry just the failing call.
/// (b) Use Aborted if the client should retry at a higher-level
/// (e.g., restarting a read-modify-write sequence).
/// (c) Use FailedPrecondition if the client should not retry until
/// the system state has been explicitly fixed. E.g., if an "rmdir"
/// fails because the directory is non-empty, FailedPrecondition
/// should be returned since the client should not retry unless
/// they have first fixed up the directory by deleting files from it.
/// (d) Use FailedPrecondition if the client performs conditional
/// REST Get/Update/Delete on a resource and the resource on the
/// server does not match the condition. E.g., conflicting
/// read-modify-write on the same resource.
FailedPrecondition = 9,
/// Aborted indicates the operation was aborted, typically due to a
/// concurrency issue like sequencer check failures, transaction aborts,
/// etc.
///
/// See litmus test above for deciding between FailedPrecondition,
/// Aborted, and Unavailable.
Aborted = 10,
/// OutOfRange means operation was attempted past the valid range.
/// E.g., seeking or reading past end of file.
///
/// Unlike InvalidArgument, this error indicates a problem that may
/// be fixed if the system state changes. For example, a 32-bit file
/// system will generate InvalidArgument if asked to read at an
/// offset that is not in the range [0,2^32-1], but it will generate
/// OutOfRange if asked to read from an offset past the current
/// file size.
///
/// There is a fair bit of overlap between FailedPrecondition and
/// OutOfRange. We recommend using OutOfRange (the more specific
/// error) when it applies so that callers who are iterating through
/// a space can easily look for an OutOfRange error to detect when
/// they are done.
OutOfRange = 11,
/// Unimplemented indicates operation is not implemented or not
/// supported/enabled in this service.
Unimplemented = 12,
/// Internal errors. Means some invariants expected by underlying
/// system has been broken. If you see one of these errors,
/// something is very broken.
Internal = 13,
/// Unavailable indicates the service is currently unavailable.
/// This is a most likely a transient condition and may be corrected
/// by retrying with a backoff. Note that it is not always safe to retry
/// non-idempotent operations.
///
/// See litmus test above for deciding between FailedPrecondition,
/// Aborted, and Unavailable.
Unavailable = 14,
/// DataLoss indicates unrecoverable data loss or corruption.
DataLoss = 15,
/// Unauthenticated indicates the request does not have valid
/// authentication credentials for the operation.
Unauthenticated = 16,
}
| true |
af794fb554c2ed71f8c9eb30c66f1e3522936f71
|
Rust
|
artronics/websocket2socketcan
|
/src/can.rs
|
UTF-8
| 1,431 | 2.890625 | 3 |
[] |
no_license
|
use serde_derive::{Deserialize, Serialize};
use serde_json::Result as SerdeResult;
use socketcan::{CANFrame, CANSocket, ConstructionError};
#[derive(Serialize, Deserialize)]
pub struct CanFrame {
id: u32,
data: [u8; 8],
data_length: usize,
is_remote: bool,
is_error: bool,
}
impl CanFrame {
pub fn new(id: u32, data: [u8; 8], data_length: usize, is_remote: bool, is_error: bool) -> Self {
CanFrame {
id,
data,
data_length,
is_remote,
is_error,
}
}
pub fn from_linux_frame(f: CANFrame) -> Self {
let mut data = [0; 8];
for i in 0..f.data().len() {
data[i] = f.data()[i];
}
CanFrame {
id: f.id(),
data,
data_length: f.data().len(),
is_error: f.is_error(),
is_remote: f.is_rtr(),
}
}
pub fn to_linux_frame(&self) -> CANFrame {
CANFrame::new(self.id, &self.data, self.is_remote, self.is_error).unwrap()
}
}
pub struct Can {
socket: CANSocket
}
impl Can {
pub fn new(can_if: &str) -> Self {
let socket = CANSocket::open(can_if).unwrap();
socket.set_nonblocking(true);
Can {
socket,
}
}
pub fn write(&self, frame: CanFrame) {
let linux_frame = frame.to_linux_frame();
self.socket.write_frame(&linux_frame);
}
}
| true |
0596b30bdb0acdcb0d1e4bd364dddafa66cdfd74
|
Rust
|
nindwen/Hepoklaani
|
/src/main.rs
|
UTF-8
| 2,199 | 2.75 | 3 |
[] |
no_license
|
mod replacements;
mod transforms;
use std::net::{TcpListener, TcpStream};
use std::io::{Read, BufRead, Write, BufReader, Error};
use std::{thread, str};
#[cfg(debug_assertions)]
static DOMAIN: &'static str = "localhost:8086";
#[cfg(not(debug_assertions))]
static DOMAIN: &'static str = "bioklaani.horse";
// Handling single connection
fn handle_client(stream: TcpStream) -> Result<(), Error> {
let mut client_connection = BufReader::new(stream);
let mut request = String::new();
let mut request_body = String::new();
// Read request headers
for line in client_connection.by_ref().lines() {
let current = transforms::header_mutate(line?);
request += ¤t;
request += "\r\n";
if current == "" {
break;
}
}
let content_length = transforms::parse_content_length(&request);
// Read request body
client_connection.by_ref()
.take(content_length as u64)
.read_to_string(&mut request_body)?;
request += &request_body;
// Connect to remote
let mut remote_connection = TcpStream::connect("bioklaani.fi:80")?;
// Relay the request
remote_connection.write_all(request.as_bytes())?;
remote_connection.flush()?;
remote_connection.shutdown(std::net::Shutdown::Write)?;
// Send remote's response to client
let mut response = vec![0; 0];
remote_connection.read_to_end(&mut response)?;
let bytes = transforms::form_response(response);
client_connection.into_inner().write_all(&bytes)?;
Ok(())
}
fn main() {
let listener = TcpListener::bind("127.0.0.1:8086").unwrap();
// Launch new thread for every connection
for stream in listener.incoming() {
match stream {
Ok(stream) => {
thread::spawn(|| {
match handle_client(stream) {
Ok(_) => {}
Err(e) => {
println!("Thread returned with error: {}", e);
}
}
});
}
Err(e) => {
println!("Error on incoming connection: {}", e);
}
}
}
}
| true |
b7f7c849136dc19592356bc7640d12f7e39d320a
|
Rust
|
EFanZh/LeetCode
|
/src/problem_1324_print_words_vertically/mod.rs
|
UTF-8
| 560 | 3.15625 | 3 |
[] |
no_license
|
pub mod iterative;
pub trait Solution {
fn print_vertically(s: String) -> Vec<String>;
}
#[cfg(test)]
mod tests {
use super::Solution;
pub fn run<S: Solution>() {
let test_cases = [
("HOW ARE YOU", &["HAY", "ORO", "WEU"] as &[_]),
("TO BE OR NOT TO BE", &["TBONTB", "OEROOE", " T"]),
("CONTEST IS COMING", &["CIC", "OSO", "N M", "T I", "E N", "S G", "T"]),
];
for (s, expected) in test_cases {
assert_eq!(S::print_vertically(s.to_string()), expected);
}
}
}
| true |
4ca4817ae0cf0ea2e56d381f9825d241a70a1339
|
Rust
|
sampwing/grep-rs
|
/src/main.rs
|
UTF-8
| 3,730 | 2.953125 | 3 |
[] |
no_license
|
#![feature(phase)]
#[phase(plugin, link)] extern crate log;
#[phase(plugin)] extern crate regex_macros;
extern crate getopts;
extern crate regex;
use std::io::File;
use std::io::BufferedReader;
use std::io::fs;
use std::io::fs::PathExtensions;
use std::os;
use std::clone;
use getopts::{optflag, getopts, OptGroup};
use regex::Regex;
struct LineContent{
line: uint,
content: String,
}
fn search_path(path: &Path, re: &Regex, line_numbers: bool) {
let mut file = BufferedReader::new(File::open(path));
let line_matches: Vec<LineContent> = file
// get the Lines from the file
.lines()
// get only the ok Results
.filter(|x| x.is_ok())
// unwrap the Optionals for consumption
.map(|x| x.unwrap())
// enumerate the results
.enumerate()
// map to a line match object
.map(|(idx, line)| LineContent{line: idx + 1, content: line.to_string()})
// filter out lines which do not match the regex
.filter(|line_content| re.is_match(line_content.content.as_slice()))
// collect into the linematch vector
.collect();
if line_numbers {
for line_match in line_matches.iter() {
print!("{}:{}:{}", path.display(), line_match.line, line_match.content)
}
} else {
for line_match in line_matches.iter() {
print!("{}:{}", path.display(), line_match.content)
}
}
}
fn print_usage(program: &str, _opts: &[OptGroup]) {
println!("Usage: {} [options]", program);
println!("-n --line-number\n\tPrint Line Numbers");
println!("-h --help\n\tUsage");
}
fn main() {
let args: Vec<String> = os::args();
let program = args[0].clone();
let opts = &[
optflag("n", "line-number", "display line number"),
optflag("h", "help", "print this help menu"),
optflag("r", "recursive", "recursively walk paths")
];
let matches = match getopts(args.tail(), opts) {
Ok(m) => m,
Err(err) => panic!(err.to_string())
};
// determine if help requested
if matches.opt_present("h") {
print_usage(program.as_slice(), opts);
return;
}
// get the pattern and path
let (input_pattern, input_path) = if matches.free.len() == 2 {
(matches.free[0].clone(), matches.free[1].clone())
} else {
print_usage(program.as_slice(), opts);
return;
};
let re = match Regex::new(input_pattern.as_slice()) {
Ok(re) => re,
Err(err) => {
debug!("{}", err);
error!("Invalid search pattern specified.");
return
}
};
let path = Path::new(input_path);
if !path.exists() {
error!("Invalid Path Specified.");
return;
}
let line_numbers = matches.opt_present("n");
if matches.opt_present("r") {
if !path.is_file(){
let paths = match fs::walk_dir(&path) {
Ok(paths) => paths.filter(|path| path.is_file()).collect::<Vec<Path>>(),
Err(err) => {
debug!("{}", err);
error!("Unable to walk paths recursively.");
return
}
};
for path in paths.iter() {
//spawn(move || {
// search_path(&path.clone(), &re.clone(), line_numbers);
//});
search_path(path, &re, line_numbers);
}
} else {
search_path(&path, &re, line_numbers);
}
} else {
if !path.is_file() {
error!("Path is not a file.");
return;
}
search_path(&path, &re, line_numbers);
}
}
| true |
d07efccf5d67d626dd65f18b70e4aebdb34e7c44
|
Rust
|
kabeone/rust_encrypted_filesystem
|
/fuse/src/get_fs.rs
|
UTF-8
| 3,766 | 2.859375 | 3 |
[] |
no_license
|
use std::env;
use std::path;
use std::fs;
use std::process::exit;
use fuse::FileAttr;
use std::fs::{ReadDir, read_dir, DirEntry};
use time::Timespec;
use std::os::macos::fs::MetadataExt;
use std::ffi::OsString;
use std::path::{Path, PathBuf};
pub static mut A :u64 = 1;
pub const CREATE_TIME: Timespec = Timespec { sec: 0, nsec: 0 };
pub struct FileInfo {
pub attribute : FileAttr,
pub parent_inode : u64,
pub name : OsString,
pub path : OsString
}
fn add_info_entry(vec :&mut Vec<FileInfo>, entry :&DirEntry, parent_inode :u64) {
let f_type :fuse::FileType;
let inode;
unsafe {
A += 1;
inode = A;
}
if entry.metadata().unwrap().file_type().is_symlink() {
f_type = fuse::FileType::Symlink;
} else if entry.metadata().unwrap().file_type().is_dir() {
f_type = fuse::FileType::Directory;
} else {
f_type = fuse::FileType::RegularFile;
}
let infos :FileInfo = FileInfo {
attribute: FileAttr {
ino: inode,
size: entry.metadata().unwrap().st_size(),
blocks: 1,
atime: CREATE_TIME,
mtime: CREATE_TIME,
ctime: CREATE_TIME,
crtime: CREATE_TIME,
kind: f_type,
perm: 0o755,
nlink: 0,
uid: 501,
gid: 20,
rdev: 0,
flags: 0,
},
parent_inode,
name: entry.file_name(),
path: entry.path().as_os_str().to_owned()
};
vec.push(infos);
}
fn set_root_dir(vec :&mut Vec<FileInfo>, path :& Path) {
let infos :FileInfo = FileInfo {
attribute: FileAttr {
ino: 1,
size: path.metadata().unwrap().st_size(),
blocks: 1,
atime: CREATE_TIME,
mtime: CREATE_TIME,
ctime: CREATE_TIME,
crtime: CREATE_TIME,
kind: fuse::FileType::Directory,
perm: 0o777,
nlink: 0,
uid: 501,
gid: 20,
rdev: 0,
flags: 0,
},
parent_inode: 0,
name: match path.file_name() {
Some(n) => n.to_owned(),
None => {
println!("Error cant mount filesystem: no name found for mountpoint root directory\n\t\tAre you trying to mount \"/\"?\
\n\t\tElse please specify full path");
exit(84);
},
},
path : path.as_os_str().to_owned()
};
vec.push(infos);
}
fn get_fs_loop(dir_entries :ReadDir, parent_ino :u64) {
unsafe {
for entry in dir_entries {
let entry = entry.unwrap();
add_info_entry(&mut crate::fs::FILE_ENTRIES, &entry, parent_ino);
if entry.metadata().unwrap().is_dir() {
let dir = match read_dir(entry.path()) {
Ok(t) => t,
Err(e) => {
println!("Error: {}", e);
exit(84);
}
};
get_fs_loop(dir, A);
}
}
}
}
pub fn get_fs() {
let path = env::args_os().nth(1).unwrap();
let path2 = env::args_os().nth(2).unwrap();
unsafe {
set_root_dir(&mut crate::fs::FILE_ENTRIES, path::Path::new(path.to_str().unwrap()));
let dir_entry = match fs::read_dir(path::Path::new(&path.to_str().unwrap())) {
Ok(t) => t,
Err(e) => {
println!("Error: {}", e);
exit(84);
}
};
get_fs_loop(dir_entry, 1);
crate::fs::PATH_SRC = Some(PathBuf::from(path).canonicalize().unwrap());
crate::fs::PATH_MOUNTPOINT = Some(PathBuf::from(path2).canonicalize().unwrap());
}
}
| true |
7d6ad9969c2d664a926046d938f3db43a4f7af55
|
Rust
|
cmsd2/wagon-api
|
/authorizers/src/token.rs
|
UTF-8
| 3,388 | 2.765625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use lazy_static::lazy_static;
use maplit::hashmap;
use std::env;
use aws_lambda_events::event::apigw;
use crate::result::AuthResult;
use crate::error::AuthError;
use rusoto_core::Region;
use rusoto_dynamodb::{AttributeValue, DynamoDb, DynamoDbClient, QueryInput};
lazy_static! {
static ref DYNAMODB_CLIENT: DynamoDbClient = DynamoDbClient::new(Region::default());
static ref TOKENS_TABLE: String = env::var("TOKENS_TABLE").unwrap();
static ref TOKENS_TABLE_TOKENS_INDEX: String = env::var("TOKENS_TABLE_TOKENS_INDEX").unwrap();
}
#[derive(Debug, Clone, PartialEq)]
pub enum AuthorizationHeader {
NotPresent,
Empty,
BearerToken(String),
ApiKey(String),
}
impl AuthorizationHeader {
pub fn from_request(req: &apigw::ApiGatewayCustomAuthorizerRequest) -> Self {
req
.authorization_token
.as_ref()
.map(|s| &s[..])
.map(Self::from_value)
.unwrap_or(AuthorizationHeader::NotPresent)
}
pub fn from_value(value: &str) -> Self {
if value == "" {
return AuthorizationHeader::Empty;
}
let bearer_prefix = "Bearer ";
if value.starts_with(bearer_prefix) {
AuthorizationHeader::BearerToken(value[bearer_prefix.len()..].to_owned())
} else {
AuthorizationHeader::ApiKey(value.to_owned())
}
}
}
pub async fn lookup_token(token: &str) -> AuthResult<Option<String>> {
let results = DYNAMODB_CLIENT.query(QueryInput {
key_condition_expression: Some("#T = :token".to_string()),
expression_attribute_values: Some(hashmap! {
":token".to_string() => AttributeValue { s: Some(token.to_owned()), ..Default::default() }
}),
expression_attribute_names: Some(hashmap! {
"#T".to_string() => "token".to_string()
}),
table_name: TOKENS_TABLE.clone(),
index_name: Some(TOKENS_TABLE_TOKENS_INDEX.clone()),
..Default::default()
}).await
.map_err(|err| {
log::info!("error querying tokens index: {:?}", err);
AuthError::DatabaseError(format!("error querying database for api key"))
})?;
log::debug!("{:?}", results);
Ok(results
.items
.and_then(|items| items.into_iter().next())
.and_then(|attrs| attrs.get("user_id").map(|v| v.to_owned()))
.and_then(|attr_value| attr_value.s))
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_auth_bearer_token_from_value() {
let value = "Bearer foo";
let expected = AuthorizationHeader::BearerToken("foo".to_owned());
assert_eq!(AuthorizationHeader::from_value(value), expected);
}
#[test]
fn test_auth_api_key_from_value() {
let value = "api key";
let expected = AuthorizationHeader::ApiKey("api key".to_owned());
assert_eq!(AuthorizationHeader::from_value(value), expected);
}
#[test]
fn test_auth_bearer_token_from_req() {
let req = apigw::ApiGatewayCustomAuthorizerRequest {
authorization_token: Some("Bearer foo".to_string()),
method_arn: Some("arn".to_string()),
type_: Some("TOKEN".to_string()),
};
let expected = AuthorizationHeader::BearerToken("foo".to_owned());
assert_eq!(AuthorizationHeader::from_request(&req), expected);
}
}
| true |
2ae543396d51b2a9d471bfb5c52c9bf4987ffec8
|
Rust
|
fasterthanlime/simdeez
|
/src/overloads/shr_assign.rs
|
UTF-8
| 2,290 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
use super::*;
impl ShrAssign<i32> for I16x1 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
*self = I16x1(self.0 >> rhs);
}
}
impl ShrAssign<i32> for I32x1 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
*self = I32x1(self.0 >> rhs);
}
}
impl ShrAssign<i32> for I64x1 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
*self = I64x1(self.0 >> rhs);
}
}
impl ShrAssign<i32> for F32x1 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
let bits = self.0.to_bits() >> rhs;
*self = F32x1(f32::from_bits(bits));
}
}
impl ShrAssign<i32> for F64x1 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
let bits = self.0.to_bits() >> rhs;
*self = F64x1(f64::from_bits(bits));
}
}
impl ShrAssign<i32> for I16x8 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
macro_rules! call {
($rhs:expr) => {
*self = unsafe { I16x8(_mm_srai_epi16(self.0, $rhs)) }
};
}
constify_imm8!(rhs, call)
}
}
impl ShrAssign<i32> for I16x16 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
macro_rules! call {
($rhs:expr) => {
*self = unsafe { I16x16(_mm256_srai_epi16(self.0, $rhs)) }
};
}
constify_imm8!(rhs, call)
}
}
impl ShrAssign<i32> for I32x4 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
macro_rules! call {
($rhs:expr) => {
*self = unsafe { I32x4(_mm_srai_epi32(self.0, $rhs)) }
};
}
constify_imm8!(rhs, call)
}
}
impl ShrAssign<i32> for I32x4_41 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
macro_rules! call {
($rhs:expr) => {
*self = unsafe { I32x4_41(_mm_srai_epi32(self.0, $rhs)) }
};
}
constify_imm8!(rhs, call)
}
}
impl ShrAssign<i32> for I32x8 {
#[inline(always)]
fn shr_assign(&mut self, rhs: i32) {
macro_rules! call {
($rhs:expr) => {
*self = unsafe { I32x8(_mm256_srai_epi32(self.0, $rhs)) }
};
}
constify_imm8!(rhs, call)
}
}
| true |
4333392ce7e768d35d79502f32c08c2e84bf8a25
|
Rust
|
manishsingh10895/ets-cli
|
/src/helper.rs
|
UTF-8
| 3,406 | 2.609375 | 3 |
[] |
no_license
|
use inflector::Inflector;
pub fn gen_route_content(name: &str) -> (String, String) {
let t_name: String = name.to_class_case() + "Route";
let content = format!(
r#"
import {{Express, Router}} from "express";
export default class {t_name} {{
constructor(app: Express) {{
let _router = Router();
app.use('/v1/{name}', _router);
}}
}}
"#,
t_name = t_name,
name = name
);
(content, format!("{}.route.ts", name))
}
pub fn gen_service_content(name: &str) -> (String, String) {
let t_name: String = name.to_class_case() + "Service";
let content = format!(
r#"
import logger from "../helpers/logger";
import {{ AppError }} from "../infra/app-error";
class {t_name} {{
}}
export default new {t_name}();
"#,
t_name = t_name,
);
(content, format!("{}.service.ts", name))
}
pub fn gen_controller_content(name: &str) -> (String, String) {
let t_name: String = name.to_class_case() + "Controller";
let content = format!(
r#"
import {{ Request, Response }} from 'express';
import {{ validationResult }} from 'express-validator';
import logger from '../helpers/logger';
import NResponse from '../services/response.service';
import {{ Errors }} from '../infra/messages';
import {{ AppError }} from '../infra/app-error';
export default class {t_name} {{
}}
"#,
t_name = t_name,
);
(content, format!("{}.controller.ts", name))
}
pub fn gen_model(name: &str) -> (String, String) {
let cap_name: String = format!("{}", name.to_class_case().clone());
let t_name: String = format!("I{}", cap_name.clone());
let t_doc_name: String = format!("I{}", cap_name.clone() + "Document");
let t_schema_name: String = cap_name.clone() + "Schema";
let content = format!(
r#"
import {{ Schema, Document, model, Model, Mongoose, Types }} from "mongoose";
export interface {t_doc_name} extends Document, {t_name} {{
}}
export interface {t_name} {{
_id: any,
createdAt: Date,
updatedAt: Date,
}}
export const {t_schema_name}: Schema = new Schema({{
createdAt: {{ type: Date, default: new Date() }},
updatedAt: {{ type: Date, default: new Date() }}
}})
export const {cap_name}: Model<{t_doc_name}> = model<{t_doc_name}>('{t_schema_name}', {t_schema_name});
"#,
t_name = t_name,
t_doc_name = t_doc_name,
cap_name = cap_name,
t_schema_name = t_schema_name
);
(content, format!("{}.model.ts", name))
}
pub fn gen_request_schema(name: &str) -> (String, String) {
let t_name: String = name.to_class_case() + "RequestSchema";
let content = format!(
r#"
import {{ checkSchema, ValidationSchema }} from 'express-validator';
export const {t_name}: ValidationSchema = {{
}}
"#,
t_name = t_name,
);
(content, format!("{}.request-schema.ts", name))
}
pub fn gen_middleware_content(name: &str) -> (String, String) {
let t_name: String = name.to_class_case() + "Middleware";
let content = format!(
r#"
/**
* Use only static methods here
*
*/
import logger from "../helpers/logger";
import {{ Request, Response }} from 'express';
import NResponse from "../services/response.service";
export default class {t_name} {{
}}
"#,
t_name = t_name,
);
(content, format!("{}.middleware.ts", name))
}
| true |
06b681fc4e2a910f446dce92b363af15aa915213
|
Rust
|
Type-3/rAdminContacts
|
/src/factories/email.rs
|
UTF-8
| 1,064 | 2.765625 | 3 |
[] |
no_license
|
use diesel::RunQueryDsl;
use radmin::diesel::PgConnection;
use radmin::serde::{Deserialize, Serialize};
use crate::models::Email;
use crate::schema::email_addresses;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Insertable)]
#[table_name = "email_addresses"]
pub struct EmailFactory {
pub account: String,
pub domain: String,
}
impl EmailFactory {
pub fn new<S: Into<String>>(account: S, domain: S) -> EmailFactory {
EmailFactory {
account: account.into(),
domain: domain.into(),
}
}
pub fn insert(self, conn: &PgConnection) -> Email {
radmin::diesel::insert_into(email_addresses::table)
.values(&self)
.get_result(conn)
.expect("Failed to insert new Category Factory")
}
pub fn account<S: Into<String>>(mut self, account: S) -> EmailFactory {
self.account = account.into();
self
}
pub fn domain<S: Into<String>>(mut self, domain: S) -> EmailFactory {
self.domain = domain.into();
self
}
}
| true |
81f8e723847c91dbd631b62f42c98461d1cc46ef
|
Rust
|
defghij/exercism_solutions
|
/rust/leap/src/lib.rs
|
UTF-8
| 564 | 3.359375 | 3 |
[] |
no_license
|
/**
Given a year, report if it is a leap year.
The tricky thing here is that a leap year in the Gregorian calendar occurs:
on every year that is evenly divisible by 4
except every year that is evenly divisible by 100
unless the year is also evenly divisible by 400
For example, 1997 is not a leap year, but 1996 is. 1900 is not a leap year, but 2000 is.
*/
pub fn is_leap_year(year: u64) -> bool {
if (year % 4 == 0 ) && (((year % 100 == 0) && (year % 400 ==0)) || (year % 100 != 0)){
return true;
}
else{
return false;
}
}
| true |
a583e6a7e2c8062dbfd74448427523b1536caa25
|
Rust
|
juniuszhou/forest
|
/vm/src/exit_code.rs
|
UTF-8
| 2,948 | 2.8125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
// Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
/// ExitCode defines the exit code from the VM execution
#[derive(PartialEq, Eq, Debug, Clone)]
pub enum ExitCode {
/// Code for successful VM execution
Success,
/// VM execution failed with system error
SystemErrorCode(SysCode),
/// VM execution failed with a user code
UserDefinedError(UserCode),
}
/// Defines the system error codes defined by the protocol
#[derive(PartialEq, Eq, Debug, Clone)]
pub enum SysCode {
/// ActorNotFound represents a failure to find an actor.
ActorNotFound,
/// ActorCodeNotFound represents a failure to find the code for a
/// particular actor in the VM registry.
ActorCodeNotFound,
/// InvalidMethod represents a failure to find a method in
/// an actor
InvalidMethod,
/// InvalidArguments indicates that a method was called with the incorrect
/// number of arguments, or that its arguments did not satisfy its
/// preconditions
InvalidArguments,
/// InsufficientFunds represents a failure to apply a message, as
/// it did not carry sufficient funds for its application.
InsufficientFunds,
/// InvalidCallSeqNum represents a message invocation out of sequence.
/// This happens when message.CallSeqNum is not exactly actor.CallSeqNum + 1
InvalidCallSeqNum,
/// OutOfGas is returned when the execution of an actor method
/// (including its subcalls) uses more gas than initially allocated.
OutOfGas,
/// RuntimeAPIError is returned when an actor method invocation makes a call
/// to the runtime that does not satisfy its preconditions.
RuntimeAPIError,
/// RuntimeAssertFailure is returned when an actor method invocation calls
/// rt.Assert with a false condition.
RuntimeAssertFailure,
/// MethodSubcallError is returned when an actor method's Send call has
/// returned with a failure error code (and the Send call did not specify
/// to ignore errors).
MethodSubcallError,
}
/// defines user specific error codes from VM execution
#[derive(PartialEq, Eq, Debug, Clone)]
pub enum UserCode {
InsufficientFunds,
InvalidArguments,
InconsistentState,
InvalidSectorPacking,
SealVerificationFailed,
DeadlineExceeded,
InsufficientPledgeCollateral,
}
impl ExitCode {
/// returns true if the exit code was a success
pub fn is_success(&self) -> bool {
match self {
ExitCode::Success => true,
_ => false,
}
}
/// returns true if exited with an error code
pub fn is_error(&self) -> bool {
match self {
ExitCode::Success => false,
_ => true,
}
}
/// returns true if the execution was successful
pub fn allows_state_update(&self) -> bool {
match self {
ExitCode::Success => true,
_ => false,
}
}
}
| true |
ad73489fbcd75ae0a90fe70871409f2b28d3b5bf
|
Rust
|
aldis-ameriks/kliversala-bot
|
/src/telegram/error.rs
|
UTF-8
| 1,046 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use std::error::Error;
use std::fmt;
use reqwest::Error as ReqwestError;
use serde::export::Formatter;
use serde_json::error::Error as SerdeError;
#[derive(Debug)]
pub enum TelegramError {
NetworkError(ReqwestError),
MalformedResponse(SerdeError),
Unsuccessful(String),
}
impl From<ReqwestError> for TelegramError {
fn from(error: ReqwestError) -> Self {
TelegramError::NetworkError(error)
}
}
impl From<SerdeError> for TelegramError {
fn from(error: SerdeError) -> Self {
TelegramError::MalformedResponse(error)
}
}
impl From<String> for TelegramError {
fn from(error: String) -> Self {
TelegramError::Unsuccessful(error)
}
}
impl Error for TelegramError {}
impl fmt::Display for TelegramError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
TelegramError::NetworkError(err) => err.fmt(f),
TelegramError::MalformedResponse(err) => err.fmt(f),
TelegramError::Unsuccessful(err) => err.fmt(f),
}
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.