blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
b4a8e65bed45d75dd4f384ed9a7bf50782be4a19
|
Rust
|
ashleysmithgpu/rust_vulkan_api_generator
|
/vkraw/tests/device.rs
|
UTF-8
| 6,871 | 3 | 3 |
[] |
no_license
|
#[cfg(test)]
mod tests {
use std::ptr;
use std::vec;
use std;
/*
enum VulkanParameter {
ApplicationInfo { next: Option<VulkanParameter>, application_name: str, application_version: u32, engine_name: str, engine_version: u32, api_version: u32 },
InstanceCreateInfo { next: Option<VulkanParameter>, flags: u32, application_info: VulkanParameter, enabled_layers: [str], enabled_extensions: [str] }
}
fn create_instance(create_info: VulkanParameter) -> Result<vkraw::VkInstance, vkraw::VkResult> {
unsafe {
}
}
*/
struct InstanceCreateInfo {
pub flags: u32,
pub application_info: ApplicationInfo,
pub enabled_layers: vec::Vec<String>,
pub enabled_extensions: vec::Vec<String>,
}
struct ApplicationInfo {
pub application_name: String,
pub application_version: u32,
pub engine_name: String,
pub engine_version: u32,
pub api_version: u32,
}
fn create_instance(create_info: InstanceCreateInfo) -> Result<vkraw::VkInstance, vkraw::VkResult> {
let mut instance: vkraw::VkInstance = 0;
let app_name: *const u8 = create_info.application_info.application_name.as_ptr();
let engine_name: *const u8 = create_info.application_info.engine_name.as_ptr();
let application_info = vkraw::VkApplicationInfo {
sType: vkraw::VkStructureType::VK_STRUCTURE_TYPE_APPLICATION_INFO,
pNext: ptr::null(),
pApplicationName: app_name,
applicationVersion: create_info.application_info.application_version,
pEngineName: engine_name,
engineVersion: create_info.application_info.engine_version,
apiVersion: create_info.application_info.api_version,
};
let create_info = vkraw::VkInstanceCreateInfo {
sType: vkraw::VkStructureType::VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
pNext: ptr::null(),
flags: create_info.flags,
pApplicationInfo: &application_info,
enabledLayerCount: create_info.enabled_layers.len() as u32,
ppEnabledLayerNames: create_info.enabled_layers.as_ptr() as *const*const u8,
enabledExtensionCount: create_info.enabled_extensions.len() as u32,
ppEnabledExtensionNames: create_info.enabled_extensions.as_ptr() as *const*const u8,
};
let res: vkraw::VkResult;
unsafe {
res = vkraw::vkCreateInstance(&create_info, ptr::null(), &mut instance);
};
match res {
vkraw::VkResult::VK_SUCCESS => Ok(instance),
_ => Err(res)
}
}
fn destroy_instance(instance: vkraw::VkInstance) {
unsafe {
vkraw::vkDestroyInstance(instance, ptr::null());
}
}
#[test]
fn test_device_bad_extensions() {
// Bad extensions
let ici = InstanceCreateInfo {
flags: 0,
application_info: ApplicationInfo {
application_name: "test".to_string(),
application_version: 1,
engine_name: "test_engine".to_string(),
engine_version: 0,
api_version: 0,
},
enabled_layers: vec::Vec::new(),
enabled_extensions: vec!["non_existant_extension".to_string()],
};
let res = create_instance(ici);
assert!(res.is_err());
match res {
Err(e) => assert!(e == vkraw::VkResult::VK_ERROR_EXTENSION_NOT_PRESENT),
Ok(_) => {}
}
std::thread::sleep(std::time::Duration::from_millis(1000));
}
#[test]
fn test_device_bad_layers() {
// Bad layers
let ici = InstanceCreateInfo {
flags: 0,
application_info: ApplicationInfo {
application_name: "test".to_string(),
application_version: 1,
engine_name: "test_engine".to_string(),
engine_version: 0,
api_version: 0,
},
enabled_layers: vec!["non_existant_layer".to_string()],
enabled_extensions: vec::Vec::new()
};
let res = create_instance(ici);
assert!(res.is_err());
match res {
Err(e) => assert!(e == vkraw::VkResult::VK_ERROR_LAYER_NOT_PRESENT),
Ok(_) => {}
}
std::thread::sleep(std::time::Duration::from_millis(1000));
}
// TODO: no way to use i.e. should_fail since we segfault here
/*
#[test]
#[should_fail]
fn test_device_bad_layers_segfault() {
let res: vkraw::VkResult;
let mut instance: vkraw::VkInstance = 0;
let application_info = vkraw::VkApplicationInfo {
sType: vkraw::VkStructureType::VK_STRUCTURE_TYPE_APPLICATION_INFO,
pNext: ptr::null(),
pApplicationName: "app name".as_ptr(),
applicationVersion: vkraw::VK_MAKE_VERSION(1,0,0),
pEngineName: "engine name".as_ptr(),
engineVersion: vkraw::VK_MAKE_VERSION(1,0,0),
apiVersion: vkraw::VK_MAKE_VERSION(1,0,0),
};
let create_info = vkraw::VkInstanceCreateInfo {
sType: vkraw::VkStructureType::VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
pNext: ptr::null(),
flags: 0,
pApplicationInfo: &application_info,
enabledLayerCount: 100,
ppEnabledLayerNames: ptr::null(),
enabledExtensionCount: 0,
ppEnabledExtensionNames: ptr::null(),
};
unsafe {
res = vkraw::vkCreateInstance(&create_info, ptr::null(), &mut instance);
};
}*/
#[test]
fn test_device_bad_version() {
// Bad api version
let ici = InstanceCreateInfo {
flags: 0,
application_info: ApplicationInfo {
application_name: "test".to_string(),
application_version: 1,
engine_name: "test_engine".to_string(),
engine_version: 0,
api_version: vkraw::VK_MAKE_VERSION(0, 0, 0),
},
enabled_layers: vec::Vec::new(),
enabled_extensions: vec::Vec::new()
};
let res = create_instance(ici);
assert!(res.is_err());
match res {
Err(e) => assert!(e == vkraw::VkResult::VK_ERROR_INCOMPATIBLE_DRIVER),
Ok(_) => {}
}
std::thread::sleep(std::time::Duration::from_millis(1000));
}
#[test]
fn test_ok_create_destroy() {
// Ok create destroy
let ici = InstanceCreateInfo {
flags: 0,
application_info: ApplicationInfo {
application_name: "test".to_string(),
application_version: 1,
engine_name: "test_engine".to_string(),
engine_version: 0,
api_version: 0,
},
enabled_layers: vec::Vec::new(),
enabled_extensions: vec::Vec::new()
};
let res = create_instance(ici);
assert!(res.is_ok());
destroy_instance(res.unwrap());
std::thread::sleep(std::time::Duration::from_millis(1000));
}
#[test]
fn test_device_two_instances() {
let ici = InstanceCreateInfo {
flags: 0,
application_info: ApplicationInfo {
application_name: "test".to_string(),
application_version: 1,
engine_name: "test_engine".to_string(),
engine_version: 0,
api_version: 0,
},
enabled_layers: vec::Vec::new(),
enabled_extensions: vec::Vec::new()
};
let res = create_instance(ici);
assert!(res.is_ok());
let ici2 = InstanceCreateInfo {
flags: 0,
application_info: ApplicationInfo {
application_name: "test".to_string(),
application_version: 1,
engine_name: "test_engine".to_string(),
engine_version: 0,
api_version: 0,
},
enabled_layers: vec::Vec::new(),
enabled_extensions: vec::Vec::new()
};
let res2 = create_instance(ici2);
assert!(res2.is_ok());
destroy_instance(res2.unwrap());
destroy_instance(res.unwrap());
std::thread::sleep(std::time::Duration::from_millis(1000));
}
}
| true |
beaefd3bbf0b10c92ada4d1b0f3bd5eecdc001a9
|
Rust
|
vmx/rust-ipld
|
/dag-cbor/tests/lib.rs
|
UTF-8
| 1,025 | 2.828125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::collections::BTreeMap;
use ipld_core::Ipld;
use ipld_dag_cbor;
#[test]
fn encode_struct() {
// Create a contact object that looks like:
// Contact { name: "Hello World", details: CID }
let mut map = BTreeMap::new();
map.insert("name".to_string(), Ipld::String("Hello World!".to_string()));
map.insert("details".to_string(), Ipld::Link(vec![7, 8, 9]));
let contact = Ipld::Map(map);
let contact_encoded = ipld_dag_cbor::encode(&contact).unwrap();
println!("encoded: {:02x?}", contact_encoded);
let expected_encoded = vec![
0xa2, 0x67, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0xd8, 0x2a, 0x43, 0x07, 0x08, 0x09,
0x64, 0x6e, 0x61, 0x6d, 0x65, 0x6c, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x57, 0x6f, 0x72,
0x6c, 0x64, 0x21,
];
println!("expected: {:02x?}", expected_encoded);
assert_eq!(contact_encoded, expected_encoded);
let contact_decoded: Ipld = ipld_dag_cbor::decode(&contact_encoded).unwrap();
assert_eq!(contact_decoded, contact);
}
| true |
6b8de65a412ad43e0463c43375ba5770aa7337d8
|
Rust
|
jakmeier/paddlers-browser-game
|
/paddlers-game-master/src/setup/map_generation/village_creation.rs
|
UTF-8
| 5,092 | 2.921875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! For generating villages on the map
use crate::db::DB;
use crate::setup::map_generation::Lcg;
use paddlers_shared_lib::game_mechanics::map::*;
use paddlers_shared_lib::prelude::*;
impl DB {
pub fn add_village(&self, pid: PlayerKey) -> Result<Village, &'static str> {
// Find unsaturated stream
let streams = self.streams_to_add_village();
for s in &streams {
// Pick a position on it
match self.insert_village_on_stream(s, Some(pid)) {
Err(_) => {}
Ok(v) => return Ok(v),
}
}
Err("World full: No space for another village")
}
pub fn generate_anarchists(&self, n: usize, seed: u64) -> Result<(), &'static str> {
let mut lcg = Lcg::new(seed);
for i in 1..n + 1 {
self.add_anarchists_village(StreamKey(i as i64), &mut lcg)?;
}
Ok(())
}
fn add_anarchists_village(
&self,
stream_id: StreamKey,
lcg: &mut Lcg,
) -> Result<Village, &'static str> {
let s = self.stream(stream_id);
let village = self.insert_village_on_stream(&s, None)?;
self.generate_anarchist_town_content(village.key(), lcg)?;
Ok(village)
}
fn insert_village_on_stream(
&self,
s: &Stream,
player: Option<PlayerKey>,
) -> Result<Village, &'static str> {
let vp = village_positions(&s.control_points);
for (x, y) in vp {
if self.map_position_empty(x, y) {
let v = NewVillage {
stream_id: s.id,
x,
y,
player_id: player.as_ref().map(PlayerKey::num),
faith: None, // Start with default value
};
return Ok(self.insert_villages(&[v])[0]);
}
}
Err("Stream full: No space for another village")
}
fn streams_to_add_village(&self) -> Vec<Stream> {
// Good enough for now
self.streams(0.0, MAP_MAX_X as f32)
}
fn map_position_empty(&self, x: f32, y: f32) -> bool {
self.village_at(x, y).is_none()
}
#[cfg(debug_assertions)]
#[allow(dead_code)]
fn test_add_all_villages(&self) {
let streams = self.streams(-1.0, 21.0);
for s in streams {
let vp = village_positions(&s.control_points);
for (x, y) in vp {
let v = NewVillage {
stream_id: s.id,
x,
y,
player_id: None,
faith: None, // Start with default value
};
self.insert_villages(&[v]);
}
}
}
}
fn village_positions(stream_points: &[f32]) -> Vec<(f32, f32)> {
let mut v: std::collections::HashSet<(i32, i32)> = std::collections::HashSet::new();
let points: Vec<(f32, f32)> = stream_points
.chunks_exact(2)
.map(|t| (t[0], t[1]))
.collect();
let mut r = P(points[0].0, points[0].1);
for slice in points.windows(2) {
match slice {
&[p, q] => {
let p = P(p.0, p.1);
let q = P(q.0, q.1);
/* p,q are bezier control points
* their center define the fixed point on the curve
* for the previous pair of control points (o,p)
*/
let o = P((p.0 + q.0) / 2.0, (p.1 + q.1) / 2.0);
/* formula:
* f(0 <= t <= 1) =
* (1-t)[(1-t)p + t*r]
* + (t) [(1-t)r + t*q]
*/
let n = 4;
for t in 0..n {
let t = 1.0 / n as f32 * t as f32;
let f = (p * (1.0 - t) + r * t) * (1.0 - t) + (r * (1.0 - t) + q * t) * t;
let draw_anker = ((f.0 - 0.5).round(), (f.1 - 0.5).round());
let on_map = draw_anker.1 < MAP_H as f32 && draw_anker.1 >= 0.0;
let on_river = draw_anker.1 == (MAP_H - 1) as f32 / 2.0;
let distance2 =
(draw_anker.0 + 0.5 - f.0).powi(2) + (draw_anker.1 + 0.5 - f.1).powi(2);
// defines radius of circle around center
let distance_close_enough = distance2 < 0.15;
if !on_river && distance_close_enough && on_map {
// Village indices are stored human-readable
v.insert((draw_anker.0 as i32 + 1, draw_anker.1 as i32 + 1));
}
}
r = o;
}
_ => panic!(),
}
}
v.drain().map(|(a, b)| (a as f32, b as f32)).collect()
}
use core::ops::*;
#[derive(Copy, Clone, Debug)]
struct P(f32, f32);
impl Mul<f32> for P {
type Output = P;
fn mul(self, rhs: f32) -> P {
P(self.0 * rhs, self.1 * rhs)
}
}
impl Add for P {
type Output = P;
fn add(self, rhs: P) -> P {
P(self.0 + rhs.0, self.1 + rhs.1)
}
}
| true |
6891f2913e2a6c8eaf888d6de93e2649e591654a
|
Rust
|
zachross015/mixal
|
/src/word.rs
|
UTF-8
| 2,091 | 3.28125 | 3 |
[] |
no_license
|
use std::fmt;
use crate::instruction_functions::adjusted_field_specification;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Word {
pub positive: bool,
pub bytes: [u8; 5],
}
impl Word {
pub fn new(positive: bool, b: [u8; 5]) -> Word {
Word {
positive: positive,
bytes: b,
}
}
pub fn default() -> Word {
Word::new(true, [0; 5])
}
pub fn from_value(value: i64) -> Word {
let positive = value >= 0;
let mut bytes : [u8; 5] = [0; 5];
let mut value_mut = value.clone();
for i in 0..5 {
bytes[4 - i] = (value_mut % 256) as u8;
value_mut = value_mut >> 8;
}
Word::new(positive, bytes)
}
pub fn address(&self) -> usize {
self.field_value((1, 2)) as usize
}
pub fn index(&self) -> u8 {
self.bytes[2]
}
// 8L + R in (L:R)
pub fn field(&self) -> u8 {
self.bytes[3]
}
pub fn opcode(&self) -> u8 {
self.bytes[4]
}
pub fn negate(&self) -> Word {
let mut new_word = self.clone();
new_word.positive = !new_word.positive;
new_word
}
pub fn field_value(&self, field_specification: (usize, usize)) -> i64 {
let (zero_included, only_zero, (l, r)) = adjusted_field_specification(field_specification);
if only_zero { return 0; }
let mut result = self.bytes[l] as i64;
for i in (l + 1)..=(r) {
result = result << 8;
result = result + (self.bytes[i] as i64);
}
result * (if zero_included && !self.positive { -1 } else { 1 })
}
}
impl fmt::Display for Word {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:>2} {:>4} {:>4} {:>4} {:>4} {:>4}",
if self.positive { '+' } else { '-' },
self.bytes[0],
self.bytes[1],
self.bytes[2],
self.bytes[3],
self.bytes[4]
)
}
}
| true |
c4b74e450464974c68c2103b594ed943e8b16ca7
|
Rust
|
irbis-labs/rsmorphy
|
/src/container/hyphen.rs
|
UTF-8
| 404 | 3.015625 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord)]
pub struct HyphenSeparatedParticle {
pub particle: String,
}
impl HyphenSeparatedParticle {
pub fn new<P>(particle: P) -> Self
where
P: Into<String>,
{
let particle = particle.into();
HyphenSeparatedParticle { particle }
}
pub fn title_rus(&self) -> &'static str {
"частица"
}
}
| true |
ad472ebdc443a19156d2cc74d691a6f0bf84a325
|
Rust
|
adamgreig/stm32ral
|
/src/stm32f3/stm32f3x4/rcc.rs
|
UTF-8
| 63,340 | 2.640625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#![allow(non_snake_case, non_upper_case_globals)]
#![allow(non_camel_case_types)]
//! Reset and clock control
use crate::RWRegister;
#[cfg(not(feature = "nosync"))]
use core::marker::PhantomData;
/// Clock control register
pub mod CR {
/// Internal High Speed clock enable
pub mod HSION {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: Clock Off
pub const Off: u32 = 0b0;
/// 0b1: Clock On
pub const On: u32 = 0b1;
}
}
/// Internal High Speed clock ready flag
pub mod HSIRDY {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
/// Read-only values
pub mod R {
/// 0b0: Clock not ready
pub const NotReady: u32 = 0b0;
/// 0b1: Clock ready
pub const Ready: u32 = 0b1;
}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Internal High Speed clock trimming
pub mod HSITRIM {
/// Offset (3 bits)
pub const offset: u32 = 3;
/// Mask (5 bits: 0b11111 << 3)
pub const mask: u32 = 0b11111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Internal High Speed clock Calibration
pub mod HSICAL {
/// Offset (8 bits)
pub const offset: u32 = 8;
/// Mask (8 bits: 0xff << 8)
pub const mask: u32 = 0xff << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// External High Speed clock enable
pub mod HSEON {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (1 bit: 1 << 16)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::HSION::RW;
}
/// External High Speed clock ready flag
pub mod HSERDY {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
pub use super::HSIRDY::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// External High Speed clock Bypass
pub mod HSEBYP {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (1 bit: 1 << 18)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: HSE crystal oscillator not bypassed
pub const NotBypassed: u32 = 0b0;
/// 0b1: HSE crystal oscillator bypassed with external clock
pub const Bypassed: u32 = 0b1;
}
}
/// Clock Security System enable
pub mod CSSON {
/// Offset (19 bits)
pub const offset: u32 = 19;
/// Mask (1 bit: 1 << 19)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: Clock security system disabled (clock detector OFF)
pub const Off: u32 = 0b0;
/// 0b1: Clock security system enable (clock detector ON if the HSE is ready, OFF if not)
pub const On: u32 = 0b1;
}
}
/// PLL enable
pub mod PLLON {
/// Offset (24 bits)
pub const offset: u32 = 24;
/// Mask (1 bit: 1 << 24)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::HSION::RW;
}
/// PLL clock ready flag
pub mod PLLRDY {
/// Offset (25 bits)
pub const offset: u32 = 25;
/// Mask (1 bit: 1 << 25)
pub const mask: u32 = 1 << offset;
pub use super::HSIRDY::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// Clock configuration register (RCC_CFGR)
pub mod CFGR {
/// System clock Switch
pub mod SW {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (2 bits: 0b11 << 0)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b00: HSI selected as system clock
pub const HSI: u32 = 0b00;
/// 0b01: HSE selected as system clock
pub const HSE: u32 = 0b01;
/// 0b10: PLL selected as system clock
pub const PLL: u32 = 0b10;
}
}
/// System Clock Switch Status
pub mod SWS {
/// Offset (2 bits)
pub const offset: u32 = 2;
/// Mask (2 bits: 0b11 << 2)
pub const mask: u32 = 0b11 << offset;
/// Read-only values
pub mod R {
/// 0b00: HSI oscillator used as system clock
pub const HSI: u32 = 0b00;
/// 0b01: HSE oscillator used as system clock
pub const HSE: u32 = 0b01;
/// 0b10: PLL used as system clock
pub const PLL: u32 = 0b10;
}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// AHB prescaler
pub mod HPRE {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (4 bits: 0b1111 << 4)
pub const mask: u32 = 0b1111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0000: SYSCLK not divided
pub const Div1: u32 = 0b0000;
/// 0b1000: SYSCLK divided by 2
pub const Div2: u32 = 0b1000;
/// 0b1001: SYSCLK divided by 4
pub const Div4: u32 = 0b1001;
/// 0b1010: SYSCLK divided by 8
pub const Div8: u32 = 0b1010;
/// 0b1011: SYSCLK divided by 16
pub const Div16: u32 = 0b1011;
/// 0b1100: SYSCLK divided by 64
pub const Div64: u32 = 0b1100;
/// 0b1101: SYSCLK divided by 128
pub const Div128: u32 = 0b1101;
/// 0b1110: SYSCLK divided by 256
pub const Div256: u32 = 0b1110;
/// 0b1111: SYSCLK divided by 512
pub const Div512: u32 = 0b1111;
}
}
/// APB Low speed prescaler (APB1)
pub mod PPRE1 {
/// Offset (8 bits)
pub const offset: u32 = 8;
/// Mask (3 bits: 0b111 << 8)
pub const mask: u32 = 0b111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b000: HCLK not divided
pub const Div1: u32 = 0b000;
/// 0b100: HCLK divided by 2
pub const Div2: u32 = 0b100;
/// 0b101: HCLK divided by 4
pub const Div4: u32 = 0b101;
/// 0b110: HCLK divided by 8
pub const Div8: u32 = 0b110;
/// 0b111: HCLK divided by 16
pub const Div16: u32 = 0b111;
}
}
/// APB high speed prescaler (APB2)
pub mod PPRE2 {
/// Offset (11 bits)
pub const offset: u32 = 11;
/// Mask (3 bits: 0b111 << 11)
pub const mask: u32 = 0b111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::PPRE1::RW;
}
/// PLL entry clock source
pub mod PLLSRC {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (1 bit: 1 << 16)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: HSI divided by 2 selected as PLL input clock
pub const HSI_Div2: u32 = 0b0;
/// 0b1: HSE divided by PREDIV selected as PLL input clock
pub const HSE_Div_PREDIV: u32 = 0b1;
}
}
/// HSE divider for PLL entry
pub mod PLLXTPRE {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: HSE clock not divided
pub const Div1: u32 = 0b0;
/// 0b1: HSE clock divided by 2
pub const Div2: u32 = 0b1;
}
}
/// PLL Multiplication Factor
pub mod PLLMUL {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (4 bits: 0b1111 << 18)
pub const mask: u32 = 0b1111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0000: PLL input clock x2
pub const Mul2: u32 = 0b0000;
/// 0b0001: PLL input clock x3
pub const Mul3: u32 = 0b0001;
/// 0b0010: PLL input clock x4
pub const Mul4: u32 = 0b0010;
/// 0b0011: PLL input clock x5
pub const Mul5: u32 = 0b0011;
/// 0b0100: PLL input clock x6
pub const Mul6: u32 = 0b0100;
/// 0b0101: PLL input clock x7
pub const Mul7: u32 = 0b0101;
/// 0b0110: PLL input clock x8
pub const Mul8: u32 = 0b0110;
/// 0b0111: PLL input clock x9
pub const Mul9: u32 = 0b0111;
/// 0b1000: PLL input clock x10
pub const Mul10: u32 = 0b1000;
/// 0b1001: PLL input clock x11
pub const Mul11: u32 = 0b1001;
/// 0b1010: PLL input clock x12
pub const Mul12: u32 = 0b1010;
/// 0b1011: PLL input clock x13
pub const Mul13: u32 = 0b1011;
/// 0b1100: PLL input clock x14
pub const Mul14: u32 = 0b1100;
/// 0b1101: PLL input clock x15
pub const Mul15: u32 = 0b1101;
/// 0b1110: PLL input clock x16
pub const Mul16: u32 = 0b1110;
/// 0b1111: PLL input clock x16
pub const Mul16x: u32 = 0b1111;
}
}
/// Microcontroller clock output
pub mod MCO {
/// Offset (24 bits)
pub const offset: u32 = 24;
/// Mask (3 bits: 0b111 << 24)
pub const mask: u32 = 0b111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b000: MCO output disabled, no clock on MCO
pub const NoMCO: u32 = 0b000;
/// 0b010: Internal low speed (LSI) oscillator clock selected
pub const LSI: u32 = 0b010;
/// 0b011: External low speed (LSE) oscillator clock selected
pub const LSE: u32 = 0b011;
/// 0b100: System clock selected
pub const SYSCLK: u32 = 0b100;
/// 0b101: Internal RC 8 MHz (HSI) oscillator clock selected
pub const HSI: u32 = 0b101;
/// 0b110: External 4-32 MHz (HSE) oscillator clock selected
pub const HSE: u32 = 0b110;
/// 0b111: PLL clock selected (divided by 1 or 2, depending en PLLNODIV)
pub const PLL: u32 = 0b111;
}
}
/// Microcontroller Clock Output Prescaler
pub mod MCOPRE {
/// Offset (28 bits)
pub const offset: u32 = 28;
/// Mask (3 bits: 0b111 << 28)
pub const mask: u32 = 0b111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b000: MCO is divided by 1
pub const Div1: u32 = 0b000;
/// 0b001: MCO is divided by 2
pub const Div2: u32 = 0b001;
/// 0b010: MCO is divided by 4
pub const Div4: u32 = 0b010;
/// 0b011: MCO is divided by 8
pub const Div8: u32 = 0b011;
/// 0b100: MCO is divided by 16
pub const Div16: u32 = 0b100;
/// 0b101: MCO is divided by 32
pub const Div32: u32 = 0b101;
/// 0b110: MCO is divided by 64
pub const Div64: u32 = 0b110;
/// 0b111: MCO is divided by 128
pub const Div128: u32 = 0b111;
}
}
/// Do not divide PLL to MCO
pub mod PLLNODIV {
/// Offset (31 bits)
pub const offset: u32 = 31;
/// Mask (1 bit: 1 << 31)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: PLL is divided by 2 for MCO
pub const Div2: u32 = 0b0;
/// 0b1: PLL is not divided for MCO
pub const Div1: u32 = 0b1;
}
}
}
/// Clock interrupt register (RCC_CIR)
pub mod CIR {
/// LSI Ready Interrupt flag
pub mod LSIRDYF {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values
pub mod R {
/// 0b0: No clock ready interrupt
pub const NotInterrupted: u32 = 0b0;
/// 0b1: Clock ready interrupt
pub const Interrupted: u32 = 0b1;
}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// LSE Ready Interrupt flag
pub mod LSERDYF {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
pub use super::LSIRDYF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSI Ready Interrupt flag
pub mod HSIRDYF {
/// Offset (2 bits)
pub const offset: u32 = 2;
/// Mask (1 bit: 1 << 2)
pub const mask: u32 = 1 << offset;
pub use super::LSIRDYF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSE Ready Interrupt flag
pub mod HSERDYF {
/// Offset (3 bits)
pub const offset: u32 = 3;
/// Mask (1 bit: 1 << 3)
pub const mask: u32 = 1 << offset;
pub use super::LSIRDYF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLL Ready Interrupt flag
pub mod PLLRDYF {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (1 bit: 1 << 4)
pub const mask: u32 = 1 << offset;
pub use super::LSIRDYF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Clock Security System Interrupt flag
pub mod CSSF {
/// Offset (7 bits)
pub const offset: u32 = 7;
/// Mask (1 bit: 1 << 7)
pub const mask: u32 = 1 << offset;
/// Read-only values
pub mod R {
/// 0b0: No clock security interrupt caused by HSE clock failure
pub const NotInterrupted: u32 = 0b0;
/// 0b1: Clock security interrupt caused by HSE clock failure
pub const Interrupted: u32 = 0b1;
}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// LSI Ready Interrupt Enable
pub mod LSIRDYIE {
/// Offset (8 bits)
pub const offset: u32 = 8;
/// Mask (1 bit: 1 << 8)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: Interrupt disabled
pub const Disabled: u32 = 0b0;
/// 0b1: Interrupt enabled
pub const Enabled: u32 = 0b1;
}
}
/// LSE Ready Interrupt Enable
pub mod LSERDYIE {
/// Offset (9 bits)
pub const offset: u32 = 9;
/// Mask (1 bit: 1 << 9)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::LSIRDYIE::RW;
}
/// HSI Ready Interrupt Enable
pub mod HSIRDYIE {
/// Offset (10 bits)
pub const offset: u32 = 10;
/// Mask (1 bit: 1 << 10)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::LSIRDYIE::RW;
}
/// HSE Ready Interrupt Enable
pub mod HSERDYIE {
/// Offset (11 bits)
pub const offset: u32 = 11;
/// Mask (1 bit: 1 << 11)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::LSIRDYIE::RW;
}
/// PLL Ready Interrupt Enable
pub mod PLLRDYIE {
/// Offset (12 bits)
pub const offset: u32 = 12;
/// Mask (1 bit: 1 << 12)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::LSIRDYIE::RW;
}
/// LSI Ready Interrupt Clear
pub mod LSIRDYC {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (1 bit: 1 << 16)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values
pub mod W {
/// 0b1: Clear interrupt flag
pub const Clear: u32 = 0b1;
}
/// Read-write values (empty)
pub mod RW {}
}
/// LSE Ready Interrupt Clear
pub mod LSERDYC {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
pub use super::LSIRDYC::W;
/// Read-write values (empty)
pub mod RW {}
}
/// HSI Ready Interrupt Clear
pub mod HSIRDYC {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (1 bit: 1 << 18)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
pub use super::LSIRDYC::W;
/// Read-write values (empty)
pub mod RW {}
}
/// HSE Ready Interrupt Clear
pub mod HSERDYC {
/// Offset (19 bits)
pub const offset: u32 = 19;
/// Mask (1 bit: 1 << 19)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
pub use super::LSIRDYC::W;
/// Read-write values (empty)
pub mod RW {}
}
/// PLL Ready Interrupt Clear
pub mod PLLRDYC {
/// Offset (20 bits)
pub const offset: u32 = 20;
/// Mask (1 bit: 1 << 20)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
pub use super::LSIRDYC::W;
/// Read-write values (empty)
pub mod RW {}
}
/// Clock security system interrupt clear
pub mod CSSC {
/// Offset (23 bits)
pub const offset: u32 = 23;
/// Mask (1 bit: 1 << 23)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values
pub mod W {
/// 0b1: Clear CSSF flag
pub const Clear: u32 = 0b1;
}
/// Read-write values (empty)
pub mod RW {}
}
}
/// APB2 peripheral reset register (RCC_APB2RSTR)
pub mod APB2RSTR {
/// SYSCFG and COMP reset
pub mod SYSCFGRST {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b1: Reset the selected module
pub const Reset: u32 = 0b1;
}
}
/// TIM1 timer reset
pub mod TIM1RST {
/// Offset (11 bits)
pub const offset: u32 = 11;
/// Mask (1 bit: 1 << 11)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGRST::RW;
}
/// SPI 1 reset
pub mod SPI1RST {
/// Offset (12 bits)
pub const offset: u32 = 12;
/// Mask (1 bit: 1 << 12)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGRST::RW;
}
/// USART1 reset
pub mod USART1RST {
/// Offset (14 bits)
pub const offset: u32 = 14;
/// Mask (1 bit: 1 << 14)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGRST::RW;
}
/// TIM15 timer reset
pub mod TIM15RST {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (1 bit: 1 << 16)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGRST::RW;
}
/// TIM16 timer reset
pub mod TIM16RST {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGRST::RW;
}
/// TIM17 timer reset
pub mod TIM17RST {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (1 bit: 1 << 18)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGRST::RW;
}
/// High Resolution Timer1 reset
pub mod HRTIM1RST {
/// Offset (29 bits)
pub const offset: u32 = 29;
/// Mask (1 bit: 1 << 29)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGRST::RW;
}
}
/// APB1 peripheral reset register (RCC_APB1RSTR)
pub mod APB1RSTR {
/// Timer 2 reset
pub mod TIM2RST {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b1: Reset the selected module
pub const Reset: u32 = 0b1;
}
}
/// Timer 3 reset
pub mod TIM3RST {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// Timer 6 reset
pub mod TIM6RST {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (1 bit: 1 << 4)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// Timer 7 reset
pub mod TIM7RST {
/// Offset (5 bits)
pub const offset: u32 = 5;
/// Mask (1 bit: 1 << 5)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// Window watchdog reset
pub mod WWDGRST {
/// Offset (11 bits)
pub const offset: u32 = 11;
/// Mask (1 bit: 1 << 11)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// USART 2 reset
pub mod USART2RST {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// USART3 reset
pub mod USART3RST {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (1 bit: 1 << 18)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// I2C1 reset
pub mod I2C1RST {
/// Offset (21 bits)
pub const offset: u32 = 21;
/// Mask (1 bit: 1 << 21)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// CAN reset
pub mod CANRST {
/// Offset (25 bits)
pub const offset: u32 = 25;
/// Mask (1 bit: 1 << 25)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// Power interface reset
pub mod PWRRST {
/// Offset (28 bits)
pub const offset: u32 = 28;
/// Mask (1 bit: 1 << 28)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// DAC interface reset
pub mod DAC1RST {
/// Offset (29 bits)
pub const offset: u32 = 29;
/// Mask (1 bit: 1 << 29)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
/// DAC2 interface reset
pub mod DAC2RST {
/// Offset (26 bits)
pub const offset: u32 = 26;
/// Mask (1 bit: 1 << 26)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2RST::RW;
}
}
/// AHB Peripheral Clock enable register (RCC_AHBENR)
pub mod AHBENR {
/// DMA1 clock enable
pub mod DMA1EN {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: The selected clock is disabled
pub const Disabled: u32 = 0b0;
/// 0b1: The selected clock is enabled
pub const Enabled: u32 = 0b1;
}
}
/// SRAM interface clock enable
pub mod SRAMEN {
/// Offset (2 bits)
pub const offset: u32 = 2;
/// Mask (1 bit: 1 << 2)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// FLITF clock enable
pub mod FLITFEN {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (1 bit: 1 << 4)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// CRC clock enable
pub mod CRCEN {
/// Offset (6 bits)
pub const offset: u32 = 6;
/// Mask (1 bit: 1 << 6)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// I/O port A clock enable
pub mod IOPAEN {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// I/O port B clock enable
pub mod IOPBEN {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (1 bit: 1 << 18)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// I/O port C clock enable
pub mod IOPCEN {
/// Offset (19 bits)
pub const offset: u32 = 19;
/// Mask (1 bit: 1 << 19)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// I/O port D clock enable
pub mod IOPDEN {
/// Offset (20 bits)
pub const offset: u32 = 20;
/// Mask (1 bit: 1 << 20)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// I/O port F clock enable
pub mod IOPFEN {
/// Offset (22 bits)
pub const offset: u32 = 22;
/// Mask (1 bit: 1 << 22)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// Touch sensing controller clock enable
pub mod TSCEN {
/// Offset (24 bits)
pub const offset: u32 = 24;
/// Mask (1 bit: 1 << 24)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// ADC1 and ADC2 clock enable
pub mod ADC12EN {
/// Offset (28 bits)
pub const offset: u32 = 28;
/// Mask (1 bit: 1 << 28)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
/// ADC3 and ADC4 clock enable
pub mod ADC34EN {
/// Offset (29 bits)
pub const offset: u32 = 29;
/// Mask (1 bit: 1 << 29)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::DMA1EN::RW;
}
}
/// APB2 peripheral clock enable register (RCC_APB2ENR)
pub mod APB2ENR {
/// SYSCFG clock enable
pub mod SYSCFGEN {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: The selected clock is disabled
pub const Disabled: u32 = 0b0;
/// 0b1: The selected clock is enabled
pub const Enabled: u32 = 0b1;
}
}
/// TIM1 Timer clock enable
pub mod TIM1EN {
/// Offset (11 bits)
pub const offset: u32 = 11;
/// Mask (1 bit: 1 << 11)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGEN::RW;
}
/// SPI 1 clock enable
pub mod SPI1EN {
/// Offset (12 bits)
pub const offset: u32 = 12;
/// Mask (1 bit: 1 << 12)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGEN::RW;
}
/// USART1 clock enable
pub mod USART1EN {
/// Offset (14 bits)
pub const offset: u32 = 14;
/// Mask (1 bit: 1 << 14)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGEN::RW;
}
/// TIM15 timer clock enable
pub mod TIM15EN {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (1 bit: 1 << 16)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGEN::RW;
}
/// TIM16 timer clock enable
pub mod TIM16EN {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGEN::RW;
}
/// TIM17 timer clock enable
pub mod TIM17EN {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (1 bit: 1 << 18)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGEN::RW;
}
/// High Resolution Timer 1 clock enable
pub mod HRTIM1EN {
/// Offset (29 bits)
pub const offset: u32 = 29;
/// Mask (1 bit: 1 << 29)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::SYSCFGEN::RW;
}
}
/// APB1 peripheral clock enable register (RCC_APB1ENR)
pub mod APB1ENR {
/// Timer 2 clock enable
pub mod TIM2EN {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: The selected clock is disabled
pub const Disabled: u32 = 0b0;
/// 0b1: The selected clock is enabled
pub const Enabled: u32 = 0b1;
}
}
/// Timer 3 clock enable
pub mod TIM3EN {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// Timer 6 clock enable
pub mod TIM6EN {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (1 bit: 1 << 4)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// Timer 7 clock enable
pub mod TIM7EN {
/// Offset (5 bits)
pub const offset: u32 = 5;
/// Mask (1 bit: 1 << 5)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// Window watchdog clock enable
pub mod WWDGEN {
/// Offset (11 bits)
pub const offset: u32 = 11;
/// Mask (1 bit: 1 << 11)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// USART 2 clock enable
pub mod USART2EN {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// I2C 1 clock enable
pub mod I2C1EN {
/// Offset (21 bits)
pub const offset: u32 = 21;
/// Mask (1 bit: 1 << 21)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// CAN clock enable
pub mod CANEN {
/// Offset (25 bits)
pub const offset: u32 = 25;
/// Mask (1 bit: 1 << 25)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// Power interface clock enable
pub mod PWREN {
/// Offset (28 bits)
pub const offset: u32 = 28;
/// Mask (1 bit: 1 << 28)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// DAC interface clock enable
pub mod DAC1EN {
/// Offset (29 bits)
pub const offset: u32 = 29;
/// Mask (1 bit: 1 << 29)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// USART3 clock enable
pub mod USART3EN {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (1 bit: 1 << 18)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
/// DAC2 interface clock enable
pub mod DAC2EN {
/// Offset (26 bits)
pub const offset: u32 = 26;
/// Mask (1 bit: 1 << 26)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::TIM2EN::RW;
}
}
/// Backup domain control register (RCC_BDCR)
pub mod BDCR {
/// External Low Speed oscillator enable
pub mod LSEON {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: LSE oscillator Off
pub const Off: u32 = 0b0;
/// 0b1: LSE oscillator On
pub const On: u32 = 0b1;
}
}
/// External Low Speed oscillator ready
pub mod LSERDY {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
/// Read-only values
pub mod R {
/// 0b0: LSE oscillator not ready
pub const NotReady: u32 = 0b0;
/// 0b1: LSE oscillator ready
pub const Ready: u32 = 0b1;
}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// External Low Speed oscillator bypass
pub mod LSEBYP {
/// Offset (2 bits)
pub const offset: u32 = 2;
/// Mask (1 bit: 1 << 2)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: LSE crystal oscillator not bypassed
pub const NotBypassed: u32 = 0b0;
/// 0b1: LSE crystal oscillator bypassed with external clock
pub const Bypassed: u32 = 0b1;
}
}
/// LSE oscillator drive capability
pub mod LSEDRV {
/// Offset (3 bits)
pub const offset: u32 = 3;
/// Mask (2 bits: 0b11 << 3)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b00: Low drive capacity
pub const Low: u32 = 0b00;
/// 0b01: Medium-high drive capacity
pub const MediumHigh: u32 = 0b01;
/// 0b10: Medium-low drive capacity
pub const MediumLow: u32 = 0b10;
/// 0b11: High drive capacity
pub const High: u32 = 0b11;
}
}
/// RTC clock source selection
pub mod RTCSEL {
/// Offset (8 bits)
pub const offset: u32 = 8;
/// Mask (2 bits: 0b11 << 8)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b00: No clock
pub const NoClock: u32 = 0b00;
/// 0b01: LSE oscillator clock used as RTC clock
pub const LSE: u32 = 0b01;
/// 0b10: LSI oscillator clock used as RTC clock
pub const LSI: u32 = 0b10;
/// 0b11: HSE oscillator clock divided by a prescaler used as RTC clock
pub const HSE: u32 = 0b11;
}
}
/// RTC clock enable
pub mod RTCEN {
/// Offset (15 bits)
pub const offset: u32 = 15;
/// Mask (1 bit: 1 << 15)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: RTC clock disabled
pub const Disabled: u32 = 0b0;
/// 0b1: RTC clock enabled
pub const Enabled: u32 = 0b1;
}
}
/// Backup domain software reset
pub mod BDRST {
/// Offset (16 bits)
pub const offset: u32 = 16;
/// Mask (1 bit: 1 << 16)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: Reset not activated
pub const Disabled: u32 = 0b0;
/// 0b1: Reset the entire RTC domain
pub const Enabled: u32 = 0b1;
}
}
}
/// Control/status register (RCC_CSR)
pub mod CSR {
/// Internal low speed oscillator enable
pub mod LSION {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: LSI oscillator Off
pub const Off: u32 = 0b0;
/// 0b1: LSI oscillator On
pub const On: u32 = 0b1;
}
}
/// Internal low speed oscillator ready
pub mod LSIRDY {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
/// Read-only values
pub mod R {
/// 0b0: LSI oscillator not ready
pub const NotReady: u32 = 0b0;
/// 0b1: LSI oscillator ready
pub const Ready: u32 = 0b1;
}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Remove reset flag
pub mod RMVF {
/// Offset (24 bits)
pub const offset: u32 = 24;
/// Mask (1 bit: 1 << 24)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values
pub mod W {
/// 0b1: Clears the reset flag
pub const Clear: u32 = 0b1;
}
/// Read-write values (empty)
pub mod RW {}
}
/// Option byte loader reset flag
pub mod OBLRSTF {
/// Offset (25 bits)
pub const offset: u32 = 25;
/// Mask (1 bit: 1 << 25)
pub const mask: u32 = 1 << offset;
/// Read-only values
pub mod R {
/// 0b0: No reset has occured
pub const NoReset: u32 = 0b0;
/// 0b1: A reset has occured
pub const Reset: u32 = 0b1;
}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PIN reset flag
pub mod PINRSTF {
/// Offset (26 bits)
pub const offset: u32 = 26;
/// Mask (1 bit: 1 << 26)
pub const mask: u32 = 1 << offset;
pub use super::OBLRSTF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// POR/PDR reset flag
pub mod PORRSTF {
/// Offset (27 bits)
pub const offset: u32 = 27;
/// Mask (1 bit: 1 << 27)
pub const mask: u32 = 1 << offset;
pub use super::OBLRSTF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Software reset flag
pub mod SFTRSTF {
/// Offset (28 bits)
pub const offset: u32 = 28;
/// Mask (1 bit: 1 << 28)
pub const mask: u32 = 1 << offset;
pub use super::OBLRSTF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Independent watchdog reset flag
pub mod IWDGRSTF {
/// Offset (29 bits)
pub const offset: u32 = 29;
/// Mask (1 bit: 1 << 29)
pub const mask: u32 = 1 << offset;
pub use super::OBLRSTF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Window watchdog reset flag
pub mod WWDGRSTF {
/// Offset (30 bits)
pub const offset: u32 = 30;
/// Mask (1 bit: 1 << 30)
pub const mask: u32 = 1 << offset;
pub use super::OBLRSTF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Low-power reset flag
pub mod LPWRRSTF {
/// Offset (31 bits)
pub const offset: u32 = 31;
/// Mask (1 bit: 1 << 31)
pub const mask: u32 = 1 << offset;
pub use super::OBLRSTF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// Reset flag of the 1.8 V domain
pub mod V18PWRRSTF {
/// Offset (23 bits)
pub const offset: u32 = 23;
/// Mask (1 bit: 1 << 23)
pub const mask: u32 = 1 << offset;
pub use super::OBLRSTF::R;
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// AHB peripheral reset register
pub mod AHBRSTR {
/// I/O port A reset
pub mod IOPARST {
/// Offset (17 bits)
pub const offset: u32 = 17;
/// Mask (1 bit: 1 << 17)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b1: Reset the selected module
pub const Reset: u32 = 0b1;
}
}
/// I/O port B reset
pub mod IOPBRST {
/// Offset (18 bits)
pub const offset: u32 = 18;
/// Mask (1 bit: 1 << 18)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::IOPARST::RW;
}
/// I/O port C reset
pub mod IOPCRST {
/// Offset (19 bits)
pub const offset: u32 = 19;
/// Mask (1 bit: 1 << 19)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::IOPARST::RW;
}
/// I/O port D reset
pub mod IOPDRST {
/// Offset (20 bits)
pub const offset: u32 = 20;
/// Mask (1 bit: 1 << 20)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::IOPARST::RW;
}
/// I/O port F reset
pub mod IOPFRST {
/// Offset (22 bits)
pub const offset: u32 = 22;
/// Mask (1 bit: 1 << 22)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::IOPARST::RW;
}
/// Touch sensing controller reset
pub mod TSCRST {
/// Offset (24 bits)
pub const offset: u32 = 24;
/// Mask (1 bit: 1 << 24)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::IOPARST::RW;
}
/// ADC1 and ADC2 reset
pub mod ADC12RST {
/// Offset (28 bits)
pub const offset: u32 = 28;
/// Mask (1 bit: 1 << 28)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
pub use super::IOPARST::RW;
}
}
/// Clock configuration register 2
pub mod CFGR2 {
/// PREDIV division factor
pub mod PREDIV {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (4 bits: 0b1111 << 0)
pub const mask: u32 = 0b1111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0000: PREDIV input clock not divided
pub const Div1: u32 = 0b0000;
/// 0b0001: PREDIV input clock divided by 2
pub const Div2: u32 = 0b0001;
/// 0b0010: PREDIV input clock divided by 3
pub const Div3: u32 = 0b0010;
/// 0b0011: PREDIV input clock divided by 4
pub const Div4: u32 = 0b0011;
/// 0b0100: PREDIV input clock divided by 5
pub const Div5: u32 = 0b0100;
/// 0b0101: PREDIV input clock divided by 6
pub const Div6: u32 = 0b0101;
/// 0b0110: PREDIV input clock divided by 7
pub const Div7: u32 = 0b0110;
/// 0b0111: PREDIV input clock divided by 8
pub const Div8: u32 = 0b0111;
/// 0b1000: PREDIV input clock divided by 9
pub const Div9: u32 = 0b1000;
/// 0b1001: PREDIV input clock divided by 10
pub const Div10: u32 = 0b1001;
/// 0b1010: PREDIV input clock divided by 11
pub const Div11: u32 = 0b1010;
/// 0b1011: PREDIV input clock divided by 12
pub const Div12: u32 = 0b1011;
/// 0b1100: PREDIV input clock divided by 13
pub const Div13: u32 = 0b1100;
/// 0b1101: PREDIV input clock divided by 14
pub const Div14: u32 = 0b1101;
/// 0b1110: PREDIV input clock divided by 15
pub const Div15: u32 = 0b1110;
/// 0b1111: PREDIV input clock divided by 16
pub const Div16: u32 = 0b1111;
}
}
/// ADC1 and ADC2 prescaler
pub mod ADC12PRES {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (5 bits: 0b11111 << 4)
pub const mask: u32 = 0b11111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b00000: No clock
pub const NoClock: u32 = 0b00000;
/// 0b10000: PLL clock not divided
pub const Div1: u32 = 0b10000;
/// 0b10001: PLL clock divided by 2
pub const Div2: u32 = 0b10001;
/// 0b10010: PLL clock divided by 4
pub const Div4: u32 = 0b10010;
/// 0b10011: PLL clock divided by 6
pub const Div6: u32 = 0b10011;
/// 0b10100: PLL clock divided by 8
pub const Div8: u32 = 0b10100;
/// 0b10101: PLL clock divided by 10
pub const Div10: u32 = 0b10101;
/// 0b10110: PLL clock divided by 12
pub const Div12: u32 = 0b10110;
/// 0b10111: PLL clock divided by 16
pub const Div16: u32 = 0b10111;
/// 0b11000: PLL clock divided by 32
pub const Div32: u32 = 0b11000;
/// 0b11001: PLL clock divided by 64
pub const Div64: u32 = 0b11001;
/// 0b11010: PLL clock divided by 128
pub const Div128: u32 = 0b11010;
/// 0b11011: PLL clock divided by 256
pub const Div256: u32 = 0b11011;
}
}
}
/// Clock configuration register 3
pub mod CFGR3 {
/// USART1 clock source selection
pub mod USART1SW {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (2 bits: 0b11 << 0)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b00: PCLK selected as USART clock source
pub const PCLK: u32 = 0b00;
/// 0b01: SYSCLK selected as USART clock source
pub const SYSCLK: u32 = 0b01;
/// 0b10: LSE selected as USART clock source
pub const LSE: u32 = 0b10;
/// 0b11: HSI selected as USART clock source
pub const HSI: u32 = 0b11;
}
}
/// I2C1 clock source selection
pub mod I2C1SW {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (1 bit: 1 << 4)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: HSI clock selected as I2C clock source
pub const HSI: u32 = 0b0;
/// 0b1: SYSCLK clock selected as I2C clock source
pub const SYSCLK: u32 = 0b1;
}
}
/// Timer1 clock source selection
pub mod TIM1SW {
/// Offset (8 bits)
pub const offset: u32 = 8;
/// Mask (1 bit: 1 << 8)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values
pub mod RW {
/// 0b0: PCLK2 clock (doubled frequency when prescaled)
pub const PCLK2: u32 = 0b0;
/// 0b1: PLL vco output (running up to 144 MHz)
pub const PLL: u32 = 0b1;
}
}
}
#[repr(C)]
pub struct RegisterBlock {
/// Clock control register
pub CR: RWRegister<u32>,
/// Clock configuration register (RCC_CFGR)
pub CFGR: RWRegister<u32>,
/// Clock interrupt register (RCC_CIR)
pub CIR: RWRegister<u32>,
/// APB2 peripheral reset register (RCC_APB2RSTR)
pub APB2RSTR: RWRegister<u32>,
/// APB1 peripheral reset register (RCC_APB1RSTR)
pub APB1RSTR: RWRegister<u32>,
/// AHB Peripheral Clock enable register (RCC_AHBENR)
pub AHBENR: RWRegister<u32>,
/// APB2 peripheral clock enable register (RCC_APB2ENR)
pub APB2ENR: RWRegister<u32>,
/// APB1 peripheral clock enable register (RCC_APB1ENR)
pub APB1ENR: RWRegister<u32>,
/// Backup domain control register (RCC_BDCR)
pub BDCR: RWRegister<u32>,
/// Control/status register (RCC_CSR)
pub CSR: RWRegister<u32>,
/// AHB peripheral reset register
pub AHBRSTR: RWRegister<u32>,
/// Clock configuration register 2
pub CFGR2: RWRegister<u32>,
/// Clock configuration register 3
pub CFGR3: RWRegister<u32>,
}
pub struct ResetValues {
pub CR: u32,
pub CFGR: u32,
pub CIR: u32,
pub APB2RSTR: u32,
pub APB1RSTR: u32,
pub AHBENR: u32,
pub APB2ENR: u32,
pub APB1ENR: u32,
pub BDCR: u32,
pub CSR: u32,
pub AHBRSTR: u32,
pub CFGR2: u32,
pub CFGR3: u32,
}
#[cfg(not(feature = "nosync"))]
pub struct Instance {
pub(crate) addr: u32,
pub(crate) _marker: PhantomData<*const RegisterBlock>,
}
#[cfg(not(feature = "nosync"))]
impl ::core::ops::Deref for Instance {
type Target = RegisterBlock;
#[inline(always)]
fn deref(&self) -> &RegisterBlock {
unsafe { &*(self.addr as *const _) }
}
}
#[cfg(feature = "rtic")]
unsafe impl Send for Instance {}
/// Access functions for the RCC peripheral instance
pub mod RCC {
use super::ResetValues;
#[cfg(not(feature = "nosync"))]
use super::Instance;
#[cfg(not(feature = "nosync"))]
const INSTANCE: Instance = Instance {
addr: 0x40021000,
_marker: ::core::marker::PhantomData,
};
/// Reset values for each field in RCC
pub const reset: ResetValues = ResetValues {
CR: 0x00000083,
CFGR: 0x00000000,
CIR: 0x00000000,
APB2RSTR: 0x00000000,
APB1RSTR: 0x00000000,
AHBENR: 0x00000014,
APB2ENR: 0x00000000,
APB1ENR: 0x00000000,
BDCR: 0x00000000,
CSR: 0x0C000000,
AHBRSTR: 0x00000000,
CFGR2: 0x00000000,
CFGR3: 0x00000000,
};
#[cfg(not(feature = "nosync"))]
#[allow(renamed_and_removed_lints)]
#[allow(private_no_mangle_statics)]
#[no_mangle]
static mut RCC_TAKEN: bool = false;
/// Safe access to RCC
///
/// This function returns `Some(Instance)` if this instance is not
/// currently taken, and `None` if it is. This ensures that if you
/// do get `Some(Instance)`, you are ensured unique access to
/// the peripheral and there cannot be data races (unless other
/// code uses `unsafe`, of course). You can then pass the
/// `Instance` around to other functions as required. When you're
/// done with it, you can call `release(instance)` to return it.
///
/// `Instance` itself dereferences to a `RegisterBlock`, which
/// provides access to the peripheral's registers.
#[cfg(not(feature = "nosync"))]
#[inline]
pub fn take() -> Option<Instance> {
external_cortex_m::interrupt::free(|_| unsafe {
if RCC_TAKEN {
None
} else {
RCC_TAKEN = true;
Some(INSTANCE)
}
})
}
/// Release exclusive access to RCC
///
/// This function allows you to return an `Instance` so that it
/// is available to `take()` again. This function will panic if
/// you return a different `Instance` or if this instance is not
/// already taken.
#[cfg(not(feature = "nosync"))]
#[inline]
pub fn release(inst: Instance) {
external_cortex_m::interrupt::free(|_| unsafe {
if RCC_TAKEN && inst.addr == INSTANCE.addr {
RCC_TAKEN = false;
} else {
panic!("Released a peripheral which was not taken");
}
});
}
/// Unsafely steal RCC
///
/// This function is similar to take() but forcibly takes the
/// Instance, marking it as taken irregardless of its previous
/// state.
#[cfg(not(feature = "nosync"))]
#[inline]
pub unsafe fn steal() -> Instance {
RCC_TAKEN = true;
INSTANCE
}
}
/// Raw pointer to RCC
///
/// Dereferencing this is unsafe because you are not ensured unique
/// access to the peripheral, so you may encounter data races with
/// other users of this peripheral. It is up to you to ensure you
/// will not cause data races.
///
/// This constant is provided for ease of use in unsafe code: you can
/// simply call for example `write_reg!(gpio, GPIOA, ODR, 1);`.
pub const RCC: *const RegisterBlock = 0x40021000 as *const _;
| true |
0729666d5e6cfff61079832cf4046e7a895218c3
|
Rust
|
TheDan64/limonite
|
/src/syntax/items.rs
|
UTF-8
| 1,037 | 2.796875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::span::Spanned;
use crate::syntax::{Block, Type};
pub type Item<'s> = Spanned<ItemKind<'s>>;
// TODO: Better place for this
#[derive(Clone, Debug, PartialEq)]
pub struct FnSig<'s> {
params: Vec<(Spanned<&'s str>, Type<'s>)>,
ret: Option<Type<'s>>,
}
impl<'s> FnSig<'s> {
pub fn new(params: Vec<(Spanned<&'s str>, Type<'s>)>, ret: Option<Type<'s>>) -> Self {
FnSig { params, ret }
}
pub fn return_type(&self) -> Option<&Type<'s>> {
self.ret.as_ref()
}
pub fn params(&self) -> &[(Spanned<&'s str>, Type<'s>)] {
self.params.as_ref()
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum ItemKind<'s> {
// Define a function with a name, signature, and body
FnDef(Spanned<&'s str>, Spanned<FnSig<'s>>, Block<'s>),
// Define a struct with a name and fields
StructDef(Spanned<&'s str>, Vec<(Spanned<&'s str>, Type<'s>)>),
// Import a function or type into scope. Use keyword and double colon (rust-style) path
Use(Spanned<()>, Vec<Spanned<&'s str>>),
}
| true |
2e87120659efedc04eb25b27effdb8483f438133
|
Rust
|
M3kH/dgc
|
/dgc-cli/src/main.rs
|
UTF-8
| 3,810 | 2.84375 | 3 |
[] |
no_license
|
use clap::{App, Arg};
use dgc_lib::dgc;
use serde_json::Value;
use std::io::{self, Read, Write};
fn main() {
let matches = App::new("dgc")
.version("1.0")
.author("Mauro Mandracchia <[email protected]>")
.about("Encode and Decode for Digital Green Certificate")
.subcommand(
App::new("sign")
.about("encode the message")
.arg(
Arg::new("private_key")
.short('p')
.long("privateKey")
.value_name("PRIVATEKEY_FILE")
.about("Sets certificate from file")
.takes_value(true)
.unset_setting(clap::ArgSettings::UseValueDelimiter)
.required(true),
)
.arg(
Arg::new("certificate")
.short('c')
.long("certificate")
.value_name("CERTIFICATE_FILE")
.about("Sets certificate from file")
.takes_value(true)
.unset_setting(clap::ArgSettings::UseValueDelimiter)
.required(true),
),
)
.subcommand(
App::new("verify").about("decode the message").arg(
Arg::new("certificate")
.short('c')
.long("certificate")
.value_name("CERTIFICATE_FILE")
.about("Sets certificate from file")
.takes_value(true)
.unset_setting(clap::ArgSettings::UseValueDelimiter)
.required(true),
),
)
.get_matches();
let mut message = Vec::new();
let stdin = io::stdin();
let mut handle = stdin.lock();
handle.read_to_end(&mut message).unwrap();
if let Some(ref matches) = matches.subcommand_matches("sign") {
if matches.is_present("certificate")
&& matches.is_present("private_key")
&& message.len() > 0
{
let certificate = matches.value_of("certificate").unwrap();
let private_key = matches.value_of("private_key").unwrap();
let encoded_message = sign_with_file_certificate(
certificate,
private_key,
&String::from_utf8(message).unwrap(),
);
let mut out = std::io::stdout();
out.write_all(&("HC1:".to_owned() + &encoded_message.to_owned()).as_bytes())
.unwrap();
out.flush().unwrap();
} else {
println!("Required parameters");
}
return;
}
if let Some(ref matches) = matches.subcommand_matches("verify") {
if matches.is_present("certificate") {
// "$ myapp test -l" was run
let certificate = matches.value_of("certificate").unwrap();
let decode_message =
verify_with_file_certificate(certificate, &message.to_owned()[4..]);
println!("{}", decode_message);
} else {
println!("Required parameters");
}
}
}
fn sign_with_file_certificate(certificate: &str, private_key: &str, message: &str) -> String {
let public = std::fs::read_to_string(certificate)
.unwrap()
.as_bytes()
.to_vec();
let private = std::fs::read_to_string(private_key)
.unwrap()
.as_bytes()
.to_vec();
dgc::sign(public, private, message)
}
fn verify_with_file_certificate(certificate: &str, message: &[u8]) -> Value {
dgc::read(
&std::fs::read_to_string(certificate)
.unwrap()
.as_bytes()
.to_vec(),
message,
)
}
| true |
0a65860505d2af6a2019bea46c839b53c27a6e48
|
Rust
|
innoave/genevo
|
/src/population/mod.rs
|
UTF-8
| 12,687 | 3.640625 | 4 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! The `population` module defines the `Population` struct and the
//! `PopulationBuilder` for building random populations.
//!
//! To use the `PopulationBuilder` for building `Population`s of a custom
//! `genetic::Genotype` an implementation of the `GenomeBuilder` must be
//! provided. A `GenomeBuilder` can build new individuals of a custom
//! `genetic::Genotype`.
//!
//! Default implementations of `GenomeBuilder` are provided for the binary
//! encoded types `fixedbitset::FixedBitSet` and `Vec<bool>` and for the
//! value encoded type `Vec<T>`.
//!
//! ## Examples
//!
//! In the first example we build a population of binary encoded genomes. Each
//! genome has a length of 12 bits and the population comprises 200 individuals.
//!
//! ```rust
//! use genevo::prelude::*;
//! use genevo::population::BinaryEncodedGenomeBuilder;
//! #[cfg(feature = "fixedbitset")]
//! use fixedbitset::FixedBitSet;
//!
//! fn main() {
//! #[cfg(feature = "fixedbitset")]
//! let population: Population<FixedBitSet> = build_population()
//! .with_genome_builder(BinaryEncodedGenomeBuilder::new(12))
//! .of_size(200)
//! .uniform_at_random();
//! #[cfg(not(feature = "fixedbitset"))]
//! let population: Population<Vec<bool>> = build_population()
//! .with_genome_builder(BinaryEncodedGenomeBuilder::new(12))
//! .of_size(200)
//! .uniform_at_random();
//!
//! println!("{:?}", population);
//! assert_eq!(200, population.size());
//! }
//! ```
//!
//! The next example builds a population of value encoded genomes. Each genome
//! is represented by a `Vec` of 4 `i64` values in the range of -200 to +200.
//! The generated population consists of 200 individuals.
//!
//! ```rust
//! use genevo::prelude::*;
//! use genevo::population::ValueEncodedGenomeBuilder;
//!
//! fn main() {
//! let population: Population<Vec<i64>> = build_population()
//! .with_genome_builder(ValueEncodedGenomeBuilder::new(4, -200, 201))
//! .of_size(200)
//! .uniform_at_random();
//!
//! println!("{:?}", population);
//! assert_eq!(200, population.size());
//! }
//! ```
//!
//! In the following example we demonstrate how to generate a population
//! containing individuals of the custom type `Pos`. Each genome consists of 8
//! `Pos` values. The generated population comprises 200 individuals.
//!
//! ```rust
//! use genevo::prelude::*;
//!
//! #[derive(Clone,Debug,PartialEq)]
//! struct Pos {
//! x: usize,
//! y: usize,
//! }
//!
//! struct PositionsBuilder;
//!
//! impl GenomeBuilder<Vec<Pos>> for PositionsBuilder {
//!
//! fn build_genome<R>(&self, _: usize, rng: &mut R) -> Vec<Pos>
//! where R: Rng + Sized
//! {
//! (0..8).map(|row|
//! Pos {
//! x: row,
//! y: rng.gen_range(0..8)
//! }
//! ).collect()
//! }
//! }
//!
//! fn main() {
//! let population: Population<Vec<Pos>> = build_population()
//! .with_genome_builder(PositionsBuilder)
//! .of_size(200)
//! .uniform_at_random();
//!
//! println!("{:?}", population);
//! assert_eq!(200, population.size());
//! }
//! ```
use crate::{
genetic::Genotype,
random::{get_rng, random_seed, Prng, Rng, Seed},
};
use rand::distributions::uniform::SampleUniform;
#[cfg(not(target_arch = "wasm32"))]
use rayon;
use std::{fmt::Debug, marker::PhantomData};
/// The `Population` defines a set of possible solutions to the optimization
/// or search problem.
#[derive(Clone, Debug, PartialEq)]
pub struct Population<G>
where
G: Genotype,
{
/// The individuals or members of the population.
individuals: Vec<G>,
}
impl<G> Population<G>
where
G: Genotype,
{
/// Creates a new `Population` with the given individuals as members.
pub fn with_individuals(individuals: Vec<G>) -> Population<G> {
Population { individuals }
}
/// Returns a slice of all individuals of this `Population`.
pub fn individuals(&self) -> &[G] {
&self.individuals
}
/// Returns the number of individuals in this `Population`.
pub fn size(&self) -> usize {
self.individuals.len()
}
}
/// The `PopulationBuilder` creates a new `Population` with a number of newly
/// created individuals or just individual `genetic::Genotype`s.
///
/// Typically the `PopulationBuilder` is used to create the initial population
/// with randomly created individuals.
///
/// To use this `PopulationBuilder` for a custom `genetic::Genotype` the trait
/// `GenomeBuilder` must be implemented for the custom `genetic::Genotype`.
#[allow(missing_copy_implementations)]
#[derive(Clone, Debug, PartialEq)]
pub struct PopulationBuilder;
#[cfg(not(target_arch = "wasm32"))]
impl PopulationBuilder {
fn build_population<B, G>(genome_builder: &B, size: usize, mut rng: Prng) -> Population<G>
where
B: GenomeBuilder<G>,
G: Genotype,
{
if size < 50 {
Population {
individuals: (0..size)
.map(|index| genome_builder.build_genome(index, &mut rng))
.collect(),
}
} else {
rng.jump();
let rng1 = rng.clone();
rng.jump();
let rng2 = rng.clone();
let left_size = size / 2;
let right_size = size - left_size;
let (left_population, right_population) = rayon::join(
|| Self::build_population(genome_builder, left_size, rng1),
|| Self::build_population(genome_builder, right_size, rng2),
);
let mut right_individuals = right_population.individuals;
let mut individuals = left_population.individuals;
individuals.append(&mut right_individuals);
Population { individuals }
}
}
}
#[cfg(target_arch = "wasm32")]
impl PopulationBuilder {
fn build_population<B, G>(genome_builder: &B, size: usize, mut rng: Prng) -> Population<G>
where
B: GenomeBuilder<G>,
G: Genotype,
{
Population {
individuals: (0..size)
.map(|index| genome_builder.build_genome(index, &mut rng))
.collect(),
}
}
}
/// A `GenomeBuilder` defines how to build individuals of a population for
/// custom `genetic::Genotype`s.
///
/// Typically the individuals are generated randomly.
pub trait GenomeBuilder<G>: Sync
where
G: Genotype,
{
/// Builds a new genome of type `genetic::Genotype` for the given
/// `index` using the given random number generator `rng`.
fn build_genome<R>(&self, index: usize, rng: &mut R) -> G
where
R: Rng + Sized;
}
#[allow(missing_copy_implementations)]
#[derive(Clone, Debug, PartialEq)]
pub struct EmptyPopulationBuilder {
// Phantom data to prevent direct instantiation by lib users.
_empty: PhantomData<bool>,
}
impl EmptyPopulationBuilder {
pub fn with_genome_builder<B, G>(
self,
genome_builder: B,
) -> PopulationWithGenomeBuilderBuilder<B, G>
where
B: GenomeBuilder<G>,
G: Genotype,
{
PopulationWithGenomeBuilderBuilder {
_g: PhantomData,
genome_builder,
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct PopulationWithGenomeBuilderBuilder<B, G>
where
B: GenomeBuilder<G>,
G: Genotype,
{
_g: PhantomData<G>,
genome_builder: B,
}
impl<B, G> PopulationWithGenomeBuilderBuilder<B, G>
where
B: GenomeBuilder<G>,
G: Genotype,
{
pub fn of_size(
self,
population_size: usize,
) -> PopulationWithGenomeBuilderAndSizeBuilder<B, G> {
PopulationWithGenomeBuilderAndSizeBuilder {
_g: self._g,
genome_builder: self.genome_builder,
population_size,
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct PopulationWithGenomeBuilderAndSizeBuilder<B, G>
where
B: GenomeBuilder<G>,
G: Genotype,
{
_g: PhantomData<G>,
genome_builder: B,
population_size: usize,
}
impl<B, G> PopulationWithGenomeBuilderAndSizeBuilder<B, G>
where
B: GenomeBuilder<G>,
G: Genotype,
{
pub fn uniform_at_random(self) -> Population<G> {
PopulationBuilder::build_population(
&self.genome_builder,
self.population_size,
get_rng(random_seed()),
)
}
pub fn using_seed(self, seed: Seed) -> Population<G> {
PopulationBuilder::build_population(
&self.genome_builder,
self.population_size,
get_rng(seed),
)
}
}
pub fn build_population() -> EmptyPopulationBuilder {
EmptyPopulationBuilder {
_empty: PhantomData,
}
}
/// A `GenomeBuilder` that builds binary encoded `genetic::Genotype`s.
///
/// The default implementation can build `fixedbitset::FixedBitSet` genomes
/// and `Vec<bool>` genomes.
#[allow(missing_copy_implementations)]
#[derive(Clone, Debug, PartialEq)]
pub struct BinaryEncodedGenomeBuilder {
genome_length: usize,
}
impl BinaryEncodedGenomeBuilder {
/// Returns a new instance of the `BinaryEncodedGenomeBuilder` that builds
/// binary encoded genomes of length specified by the given `genome_length`.
pub fn new(genome_length: usize) -> Self {
BinaryEncodedGenomeBuilder { genome_length }
}
}
impl GenomeBuilder<Vec<bool>> for BinaryEncodedGenomeBuilder {
fn build_genome<R>(&self, _index: usize, rng: &mut R) -> Vec<bool>
where
R: Rng + Sized,
{
(0..self.genome_length).map(|_| rng.gen()).collect()
}
}
/// A `GenomeBuilder` that builds value encoded `genetic::Genotype`s.
///
/// The default implementation can build `Vec<T>` genomes. The values of
/// `T` are generated randomly in the range between a min value and a max
/// value.
#[derive(Clone, Debug, PartialEq)]
pub struct ValueEncodedGenomeBuilder<V> {
genome_length: usize,
min_value: V,
max_value: V,
}
impl<V> ValueEncodedGenomeBuilder<V> {
/// Returns a new instance of the `ValueEncodedGenomeBuilder` that builds
/// value encoded genomes of length specified by the given `genome_length`.
///
/// The values of the generated genomes are in the range between the given
/// `min_value` (inclusive) and `max_value` (exclusive).
pub fn new(genome_length: usize, min_value: V, max_value: V) -> Self {
ValueEncodedGenomeBuilder {
genome_length,
min_value,
max_value,
}
}
}
impl<V> GenomeBuilder<Vec<V>> for ValueEncodedGenomeBuilder<V>
where
V: Clone + Debug + PartialEq + PartialOrd + SampleUniform + Send + Sync,
{
fn build_genome<R>(&self, _: usize, rng: &mut R) -> Vec<V>
where
R: Rng + Sized,
{
(0..self.genome_length)
.map(|_| rng.gen_range(self.min_value.clone()..self.max_value.clone()))
.collect()
}
}
#[cfg(feature = "fixedbitset")]
mod fixedbitset_genome_builder {
use super::{BinaryEncodedGenomeBuilder, GenomeBuilder};
use fixedbitset::FixedBitSet;
use rand::Rng;
impl GenomeBuilder<FixedBitSet> for BinaryEncodedGenomeBuilder {
fn build_genome<R>(&self, _index: usize, rng: &mut R) -> FixedBitSet
where
R: Rng + Sized,
{
let mut genome = FixedBitSet::with_capacity(self.genome_length);
for bit in 0..self.genome_length {
genome.set(bit, rng.gen());
}
genome
}
}
}
#[cfg(feature = "smallvec")]
mod smallvec_genome_builder {
use super::{BinaryEncodedGenomeBuilder, GenomeBuilder, ValueEncodedGenomeBuilder};
use rand::{distributions::uniform::SampleUniform, Rng};
use smallvec::{Array, SmallVec};
use std::fmt::Debug;
impl<A> GenomeBuilder<SmallVec<A>> for BinaryEncodedGenomeBuilder
where
A: Array<Item = bool> + Sync,
{
fn build_genome<R>(&self, _index: usize, rng: &mut R) -> SmallVec<A>
where
R: Rng + Sized,
{
(0..self.genome_length).map(|_| rng.gen()).collect()
}
}
impl<A, V> GenomeBuilder<SmallVec<A>> for ValueEncodedGenomeBuilder<V>
where
A: Array<Item = V> + Sync,
V: Clone + Debug + PartialEq + PartialOrd + SampleUniform + Send + Sync,
{
fn build_genome<R>(&self, _: usize, rng: &mut R) -> SmallVec<A>
where
R: Rng + Sized,
{
(0..self.genome_length)
.map(|_| rng.gen_range(self.min_value.clone()..self.max_value.clone()))
.collect()
}
}
}
#[cfg(test)]
mod tests;
| true |
893492d2e7c7fa91ed4967264d1aeb4eea982162
|
Rust
|
lyfzero/OS-learning
|
/rust_learning/variables/src/main.rs
|
UTF-8
| 1,267 | 3.734375 | 4 |
[] |
no_license
|
fn main() {
// 设置可变
let mut x = 5;
println!("The value of x is: {}", x);
x = 6;
println!("The value of x is: {}", x);
// 常量
const MAX_POINTS: u32 = 100_000;
println!("The value of MAX_POINTS is: {}", MAX_POINTS);
// 隐藏
let x = 5;
let x = x + 1;
let x = x * 2;
println!("The value of x is: {}", x);
// 隐藏 vs mut
let spaces = " ";
let spaces = spaces.len();
println!("The length of spaces is: {}", spaces);
// let spaces = " ";
// spaces = spaces.len(); // error
// float
let x = 2.0;
let y: f32 = 3.0;
// calculation
let sum = 5 + 19;
let diff = 95.5 - 4.3;
let prod = 4 * 30;
let quot = 56.7 / 23.2;
let remainder = 42%6;
// bool
let t = true;
let f: bool = false; // 显式
// tuple
let tup: (i32, f64, u8) = (500, 6.4, 1);
let (x, y, z) = tup; // destructuring
println!("The value of y is: {}", y);
let a = tup.0; // indice
let b = tup.1;
let c = tup.2;
println!("The value of a, b, c are: {}, {}, {}", a, b, c);
// array
let a = [1, 2, 3, 4, 5];
let a: [i32; 5] = [1, 2, 3, 4, 5];
let a = [3; 5];
let first = a[0];
let second = a[1];
}
| true |
e737aa8fe679f172b3db8f7ccc5898200daa4d6e
|
Rust
|
jackthecodemonkey/simple-bank-console-app
|
/src/models/commands.rs
|
UTF-8
| 967 | 3.078125 | 3 |
[] |
no_license
|
use super::super::traits::ValidateCommands::ValidateCommands;
#[derive(Debug)]
pub struct commands {
pub arguments: Vec<String>,
}
impl commands {
pub fn new(commands: Vec<String>) -> Self {
commands {
arguments: commands,
}
}
}
#[derive(Debug)]
pub struct ValidCommands {
pub valid_commands: Vec<String>,
}
impl ValidateCommands for commands {
fn validate(&self, valid_commands: &ValidCommands) -> Result<(), String> {
let mut invalid_commands: String = String::from("");
for argument in self.arguments.iter() {
if !valid_commands.valid_commands.contains(argument) {
invalid_commands.push_str("invalid command entered: ");
invalid_commands.push_str(&argument.as_str());
invalid_commands.push_str("\n");
}
}
if invalid_commands != "" {
return Err(invalid_commands);
}
Ok(())
}
}
| true |
dc5a58b6d4496d814a32a3fa2a6e9569c5486bef
|
Rust
|
enso-org/enso
|
/app/gui/controller/engine-protocol/src/binary/client.rs
|
UTF-8
| 13,583 | 2.8125 | 3 |
[
"AGPL-3.0-only",
"Apache-2.0",
"AGPL-3.0-or-later"
] |
permissive
|
//! Module defines LS binary protocol client `API` and its two implementation: `Client` and
//! `MockClient`.
use crate::prelude::*;
use crate::binary::message::ErrorPayload;
use crate::binary::message::FromServerPayloadOwned;
use crate::binary::message::MessageFromServerOwned;
use crate::binary::message::MessageToServerRef;
use crate::binary::message::ToServerPayload;
use crate::binary::message::VisualizationContext;
use crate::common::error::UnexpectedMessage;
use crate::handler::Disposition;
use crate::handler::Handler;
use crate::language_server::types::Path;
use crate::types::Sha3_224;
use json_rpc::Transport;
use json_rpc::TransportEvent;
use mockall::automock;
// ==============
// === Errors ===
// ==============
#[allow(missing_docs)]
#[derive(Debug, Fail, Clone, Copy)]
#[fail(display = "Received a text message when expecting only the binary ones.")]
pub struct UnexpectedTextMessage;
/// Errors that can cause a remote call to fail.
pub type RpcError = json_rpc::error::RpcError<ErrorPayload>;
// ====================
// === Notification ===
// ====================
/// The notifications that binary protocol client may receive.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Notification {
/// A new data has been sent for a visualization.
VisualizationUpdate {
/// Identifies the specific visualization.
context: VisualizationContext,
/// Data to be passed to the visualization.
data: Vec<u8>,
},
}
/// Events emitted by the LS binary protocol client.
pub type Event = crate::common::event::Event<Notification>;
// ===========
// === API ===
// ===========
/// The Engine Services Language Server Binary Protocol Client API.
#[automock]
pub trait API {
/// Initializes the protocol. Must be called exactly once before making any other calls.
fn init(&self, client_id: Uuid) -> StaticBoxFuture<FallibleResult>;
/// Writes binary data to the file.
fn write_file(&self, path: &Path, contents: &[u8]) -> StaticBoxFuture<FallibleResult>;
/// Retrieves the file contents as a binary data.
fn read_file(&self, path: &Path) -> StaticBoxFuture<FallibleResult<Vec<u8>>>;
/// Writes a set of bytes to the specified file at the specified offset.
fn write_bytes(
&self,
path: &Path,
byte_offset: u64,
overwrite: bool,
bytes: &[u8],
) -> StaticBoxFuture<FallibleResult<Sha3_224>>;
/// Asynchronous event stream with notification and errors.
///
/// On a repeated call, previous stream is closed.
fn event_stream(&self) -> StaticBoxStream<Event>;
}
// ==============
// === Client ===
// ==============
/// The client for Engine Services Language Server Binary Protocol.
#[derive(Clone, Derivative)]
#[derivative(Debug)]
pub struct Client {
handler: Handler<Uuid, FromServerPayloadOwned, Notification>,
}
impl Client {
/// Helper function that fails if the received message represents a remote error.
fn expect_success(result: FromServerPayloadOwned) -> FallibleResult {
if let FromServerPayloadOwned::Success {} = result {
Ok(())
} else {
Err(RpcError::MismatchedResponseType.into())
}
}
/// Function that does early processing of the peer's message and decides how it shall be
/// handled. Returns a function so that it may be passed to the `Handler`.
fn processor(
) -> impl FnMut(TransportEvent) -> Disposition<Uuid, FromServerPayloadOwned, Notification> + 'static
{
move |event: TransportEvent| {
let binary_data = match event {
TransportEvent::BinaryMessage(data) => data,
_ => return Disposition::error(UnexpectedTextMessage),
};
let message = match MessageFromServerOwned::deserialize(&binary_data) {
Ok(message) => message,
Err(e) => return Disposition::error(e),
};
debug!("Deserialized incoming binary message: {message:?}");
let correlation_id = message.correlation_id;
match message.0.payload {
FromServerPayloadOwned::VisualizationUpdate { context, data } =>
Disposition::notify(Notification::VisualizationUpdate { data, context }),
payload => {
if let Some(id) = correlation_id {
Disposition::HandleReply { id, reply: payload }
} else {
// Not a known notification and yet not a response to our request.
Disposition::error(UnexpectedMessage)
}
}
}
}
}
/// Creates a new client from the given transport to the Language Server Data Endpoint.
///
/// Before client is functional:
/// * `runner` must be scheduled for execution;
/// * `init` must be called or it needs to be wrapped into `Connection`.
pub fn new(transport: impl Transport + 'static) -> Client {
let processor = Self::processor();
Client { handler: Handler::new(transport, processor) }
}
/// Starts a new request, described by the given payload.
/// Function `f` serves to retrieve the request's result from the more general `Reply` type.
pub fn make_request<F, R>(
&self,
payload: ToServerPayload,
f: F,
) -> StaticBoxFuture<FallibleResult<R>>
where
F: FnOnce(FromServerPayloadOwned) -> FallibleResult<R>,
R: 'static,
F: 'static,
{
let message = MessageToServerRef::new(payload);
let id = message.message_id;
let completer = move |reply| {
info!("Completing request {id} with a reply: {reply:?}");
if let FromServerPayloadOwned::Error { code, message, data } = reply {
let code = code as i64;
let error = json_rpc::messages::Error { code, message, data };
Err(RpcError::RemoteError(error).into())
} else {
f(reply)
}
};
let fut = self.handler.make_request(&message, completer);
Box::pin(fut)
}
/// A `runner`. Its execution must be scheduled for `Client` to be able to complete requests and
/// emit events.
pub fn runner(&self) -> impl Future<Output = ()> {
self.handler.runner()
}
}
impl API for Client {
fn init(&self, client_id: Uuid) -> StaticBoxFuture<FallibleResult> {
info!("Initializing binary connection as client with id {client_id}.");
let payload = ToServerPayload::InitSession { client_id };
self.make_request(payload, Self::expect_success)
}
fn write_file(&self, path: &Path, contents: &[u8]) -> StaticBoxFuture<FallibleResult> {
info!("Writing file {} with {} bytes.", path, contents.len());
let payload = ToServerPayload::WriteFile { path, contents };
self.make_request(payload, Self::expect_success)
}
fn read_file(&self, path: &Path) -> StaticBoxFuture<FallibleResult<Vec<u8>>> {
info!("Reading file {path}.");
let payload = ToServerPayload::ReadFile { path };
self.make_request(payload, move |result| {
if let FromServerPayloadOwned::FileContentsReply { contents } = result {
Ok(contents)
} else {
Err(RpcError::MismatchedResponseType.into())
}
})
}
fn write_bytes(
&self,
path: &Path,
byte_offset: u64,
overwrite: bool,
bytes: &[u8],
) -> StaticBoxFuture<FallibleResult<Sha3_224>> {
info!("Writing {} bytes to {path} at offset {byte_offset}", bytes.len());
let payload = ToServerPayload::WriteBytes { path, byte_offset, overwrite, bytes };
self.make_request(payload, move |result| {
if let FromServerPayloadOwned::WriteBytesReply { checksum } = result {
Ok(checksum.into())
} else {
Err(RpcError::MismatchedResponseType.into())
}
})
}
fn event_stream(&self) -> StaticBoxStream<Event> {
self.handler.event_stream().boxed_local()
}
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
use crate::binary::message::MessageFromServer;
use crate::binary::message::MessageToServerOwned;
use crate::binary::message::ToServerPayloadOwned;
use futures::task::LocalSpawnExt;
use json_rpc::test_util::transport::mock::MockTransport;
// ===============
// === Fixture ===
// ===============
struct ClientFixture {
transport: MockTransport,
client: Client,
executor: futures::executor::LocalPool,
}
impl ClientFixture {
fn new() -> ClientFixture {
let transport = MockTransport::new();
let client = Client::new(transport.clone());
let executor = futures::executor::LocalPool::new();
executor.spawner().spawn_local(client.runner()).unwrap();
ClientFixture { transport, client, executor }
}
}
// ========================
// === Testing Requests ===
// ========================
fn test_request<R>(
make_request: impl Fn(&Client) -> StaticBoxFuture<FallibleResult<R>>,
expected_result: R,
expected_request: ToServerPayloadOwned,
mock_reply: FromServerPayloadOwned,
) where
R: Debug + PartialEq + Sized,
{
let mut fixture = ClientFixture::new();
let mut fut = make_request(&fixture.client);
let generated_message = fixture.transport.expect_binary_message();
let generated_message = MessageToServerOwned::deserialize(&generated_message).unwrap();
assert_eq!(generated_message.payload, expected_request);
fut.expect_pending();
let mut mock_reply = MessageFromServer::new(mock_reply);
mock_reply.correlation_id = Some(generated_message.message_id);
mock_reply.with_serialized(|data| fixture.transport.mock_peer_binary_message(data));
fixture.executor.run_until_stalled();
assert_eq!(fut.expect_ok(), expected_result);
// Repeat request but now answer with error.
let mut fut = make_request(&fixture.client);
let generated_message = fixture.transport.expect_binary_message();
let generated_message = MessageToServerOwned::deserialize(&generated_message).unwrap();
let mock_error_code = 444;
let mock_error_message = "This is error".to_string();
let mut mock_reply = MessageFromServer::new(FromServerPayloadOwned::Error {
code: mock_error_code,
message: mock_error_message,
data: None,
});
mock_reply.correlation_id = Some(generated_message.message_id);
mock_reply.with_serialized(|data| fixture.transport.mock_peer_binary_message(data));
fixture.executor.run_until_stalled();
fut.expect_err();
}
#[test]
fn test_init() {
let client_id = Uuid::new_v4();
test_request(
|client| client.init(client_id),
(),
ToServerPayloadOwned::InitSession { client_id },
FromServerPayloadOwned::Success {},
);
}
#[test]
fn test_write_file() {
let root_id = Uuid::new_v4();
let path = Path::new(root_id, &["Main.enso"]);
let data = Vec::from("hello".as_bytes());
test_request(
|client| client.write_file(&path, &data),
(),
ToServerPayloadOwned::WriteFile { contents: data.clone(), path: path.clone() },
FromServerPayloadOwned::Success {},
);
}
#[test]
fn test_read_file() {
let root_id = Uuid::new_v4();
let path = Path::new(root_id, &["Main.enso"]);
let data = Vec::from("hello".as_bytes());
test_request(
|client| client.read_file(&path),
data.clone(),
ToServerPayloadOwned::ReadFile { path: path.clone() },
FromServerPayloadOwned::FileContentsReply { contents: data },
);
}
// =============================
// === Testing Notifications ===
// =============================
#[test]
fn test_visualization_update() {
let mut fixture = ClientFixture::new();
let mut event_fut = fixture.client.event_stream().into_future().boxed_local();
fixture.executor.run_until_stalled();
event_fut.expect_pending();
let context = VisualizationContext {
visualization_id: Uuid::new_v4(),
expression_id: Uuid::new_v4(),
context_id: Uuid::new_v4(),
};
let data = Vec::from("Hello".as_bytes());
let message = MessageFromServer::new(FromServerPayloadOwned::VisualizationUpdate {
data: data.clone(),
context,
});
message.with_serialized(|data| fixture.transport.mock_peer_binary_message(data));
fixture.executor.run_until_stalled();
let expected_notification = Notification::VisualizationUpdate { context, data };
let (event, tail) = event_fut.expect_ready();
match event.expect("Expected some notification.") {
Event::Notification(notification) => assert_eq!(notification, expected_notification),
event => panic!("Expected notification event, got: {event:?}"),
}
tail.boxed_local().expect_pending();
}
}
| true |
736a986347d82def44bcea5c2d01612734737d30
|
Rust
|
pchickey/cap-std
|
/cap-primitives/src/posish/darwin/fs/file_path.rs
|
UTF-8
| 841 | 2.703125 | 3 |
[
"MIT",
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
//! `get_path` translation code for macOS derived from Rust's
//! library/std/src/sys/unix/fs.rs at revision
//! 108e90ca78f052c0c1c49c42a22c85620be19712.
use posish::fs::getpath;
use std::{fs, os::unix::ffi::OsStringExt, path::PathBuf};
pub(crate) fn file_path(file: &fs::File) -> Option<PathBuf> {
// The use of PATH_MAX is generally not encouraged, but it
// is inevitable in this case because macOS defines `fcntl` with
// `F_GETPATH` in terms of `MAXPATHLEN`, and there are no
// alternatives. If a better method is invented, it should be used
// instead.
let mut buf = vec![0; libc::PATH_MAX as usize];
getpath(file, &mut buf).ok()?;
let l = buf.iter().position(|&c| c == 0).unwrap();
buf.truncate(l as usize);
buf.shrink_to_fit();
Some(PathBuf::from(std::ffi::OsString::from_vec(buf)))
}
| true |
3551578bb7ea364d6f8330194f50e6b9f3a27975
|
Rust
|
jlb6740/wasmtime
|
/crates/wiggle/generate/src/codegen_settings.rs
|
UTF-8
| 3,366 | 2.59375 | 3 |
[
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
use crate::config::{AsyncConf, ErrorConf};
use anyhow::{anyhow, Error};
use proc_macro2::TokenStream;
use quote::quote;
use std::collections::HashMap;
use std::rc::Rc;
use witx::{Document, Id, InterfaceFunc, Module, NamedType, TypeRef};
pub use crate::config::Asyncness;
pub struct CodegenSettings {
pub errors: ErrorTransform,
pub async_: AsyncConf,
pub wasmtime: bool,
}
impl CodegenSettings {
pub fn new(
error_conf: &ErrorConf,
async_: &AsyncConf,
doc: &Document,
wasmtime: bool,
) -> Result<Self, Error> {
let errors = ErrorTransform::new(error_conf, doc)?;
Ok(Self {
errors,
async_: async_.clone(),
wasmtime,
})
}
pub fn get_async(&self, module: &Module, func: &InterfaceFunc) -> Asyncness {
self.async_.get(module.name.as_str(), func.name.as_str())
}
}
pub struct ErrorTransform {
m: Vec<UserErrorType>,
}
impl ErrorTransform {
pub fn empty() -> Self {
Self { m: Vec::new() }
}
pub fn new(conf: &ErrorConf, doc: &Document) -> Result<Self, Error> {
let mut richtype_identifiers = HashMap::new();
let m = conf.iter().map(|(ident, field)|
if let Some(abi_type) = doc.typename(&Id::new(ident.to_string())) {
if let Some(ident) = field.rich_error.get_ident() {
if let Some(prior_def) = richtype_identifiers.insert(ident.clone(), field.err_loc.clone())
{
return Err(anyhow!(
"duplicate rich type identifier of {:?} not allowed. prior definition at {:?}",
ident, prior_def
));
}
Ok(UserErrorType {
abi_type,
rich_type: field.rich_error.clone(),
method_fragment: ident.to_string()
})
} else {
return Err(anyhow!(
"rich error type must be identifier for now - TODO add ability to provide a corresponding identifier: {:?}",
field.err_loc
))
}
}
else { Err(anyhow!("No witx typename \"{}\" found", ident.to_string())) }
).collect::<Result<Vec<_>, Error>>()?;
Ok(Self { m })
}
pub fn iter(&self) -> impl Iterator<Item = &UserErrorType> {
self.m.iter()
}
pub fn for_abi_error(&self, tref: &TypeRef) -> Option<&UserErrorType> {
match tref {
TypeRef::Name(nt) => self.for_name(nt),
TypeRef::Value { .. } => None,
}
}
pub fn for_name(&self, nt: &NamedType) -> Option<&UserErrorType> {
self.m.iter().find(|u| u.abi_type.name == nt.name)
}
}
pub struct UserErrorType {
abi_type: Rc<NamedType>,
rich_type: syn::Path,
method_fragment: String,
}
impl UserErrorType {
pub fn abi_type(&self) -> TypeRef {
TypeRef::Name(self.abi_type.clone())
}
pub fn typename(&self) -> TokenStream {
let t = &self.rich_type;
quote!(#t)
}
pub fn method_fragment(&self) -> &str {
&self.method_fragment
}
}
| true |
962b62e18c6f74c5408807a479043b8470ef055a
|
Rust
|
Celeo/bless_you_bot
|
/src/main.rs
|
UTF-8
| 5,270 | 2.78125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use log::{debug, error, info};
use once_cell::sync::Lazy;
use regex::Regex;
use serenity::{
model::{channel::Message, gateway::Ready},
prelude::*,
};
use std::{env, path::Path, process};
const WORDS_FILE_RAW: &str = include_str!(concat!(env!("OUT_DIR"), "/words_alpha.txt"));
const MINIMUM_MESSAGE_LENGTH: usize = 8;
static WORDS: Lazy<Vec<&str>> = Lazy::new(|| {
debug!("Initializing words vec");
WORDS_FILE_RAW.split_whitespace().collect()
});
static MONITORED_USER_IDS: Lazy<Vec<u64>> = Lazy::new(|| {
env::var("MONITORED_USER_IDS")
.expect("Could not load env var")
.split(',')
.filter_map(|s| s.parse::<u64>().ok())
.collect()
});
static OTHER_IGNORE_PATTERNS: Lazy<Vec<Regex>> = Lazy::new(|| {
vec![
Regex::new(r"^[hue]{5,}$").unwrap(),
Regex::new(r"^[bha]{5,}$").unwrap(),
Regex::new(r"^[lo]{5,}$").unwrap(),
Regex::new(r"^https?://").unwrap(),
Regex::new(r"^re{5,}").unwrap(),
Regex::new(r"^<:\w+:\d+>$").unwrap(),
]
});
fn strip_formatting(content: &str) -> String {
content
.replace('*', "")
.replace('_', "")
.replace('~', "")
.replace('`', "")
}
fn strip_punctuation(content: &str) -> String {
content.replace('?', "").replace('!', "").replace('"', "")
}
fn is_incoherent(content: &str) -> bool {
let content = {
let mut content = content.to_lowercase();
content = strip_formatting(&content);
content = strip_punctuation(&content);
content
};
if content.contains(' ') {
debug!("Message contains a space");
return false;
}
if content.len() < MINIMUM_MESSAGE_LENGTH {
debug!("Message is under {} chars long", MINIMUM_MESSAGE_LENGTH);
return false;
}
// It is actually faster to use `.contains` rather than looping through
// the vec's (pre-sorted) items and checking the first letter in the word
// to see if the search has already gone past the first letter in the checked
// word, i.e. if the loop is checking 'l' but `content` starts with an 'i'.
if WORDS.contains(&content.as_str()) {
debug!("Message found in word bank");
return false;
}
for pattern in OTHER_IGNORE_PATTERNS.iter() {
if pattern.is_match(&content) {
debug!("Matches ignore pattern");
return false;
}
}
true
}
struct Handler;
#[serenity::async_trait]
impl EventHandler for Handler {
async fn ready(&self, _: Context, _: Ready) {
info!("Bot connected");
}
async fn message(&self, context: Context, message: Message) {
if !MONITORED_USER_IDS.contains(message.author.id.as_u64()) {
return;
}
debug!("Message: {}", message.content);
if !is_incoherent(&message.content.to_lowercase()) {
return;
}
if let Err(e) = message.react(&context, '🤧').await {
error!("Error adding reaction: {}", e);
}
}
}
#[tokio::main]
async fn main() {
if Path::new(".env").exists() {
dotenv::dotenv().expect("Could not load from .env file");
}
pretty_env_logger::init();
let token = match env::var("DISCORD_TOKEN") {
Ok(t) => t,
Err(_) => {
error!("Environment variable 'DISCORD_TOKEN' is not set");
process::exit(1);
}
};
debug!("Token loaded from environment variable");
let mut client = Client::builder(&token)
.event_handler(Handler)
.await
.expect("Could not create client");
debug!("Bot set up");
if let Err(e) = client.start().await {
error!("Error starting client: {}", e);
}
}
#[cfg(test)]
mod tests {
use super::{is_incoherent, strip_formatting, strip_punctuation};
#[test]
fn test_is_incoherent_space() {
assert!(!is_incoherent("a a"));
}
#[test]
fn test_is_incoherent_length() {
assert!(!is_incoherent("a"));
}
#[test]
fn test_is_incoherent_real_word() {
assert!(!is_incoherent("DicTiONAry"));
assert!(!is_incoherent("dictionary"));
}
#[test]
fn test_is_incoherent_patterns() {
assert!(!is_incoherent("bahahaha"));
assert!(!is_incoherent("lolololloo"));
assert!(!is_incoherent("hueuhueuhuhe"));
assert!(!is_incoherent("http://example.com"));
assert!(!is_incoherent("https://google.com"));
assert!(!is_incoherent("reeeeeeeeee"));
assert!(!is_incoherent("<:Screampackman2:754148436906999888>"));
}
#[test]
fn test_strip_formatting() {
assert_eq!("word", strip_formatting("word"));
assert_eq!("word", strip_formatting("*word*"));
assert_eq!("word", strip_formatting("**word**"));
assert_eq!("word", strip_formatting("_word_"));
assert_eq!("word", strip_formatting("~~word~~"));
assert_eq!("word", strip_formatting("`word`"));
assert_eq!("word", strip_formatting("***word***"));
}
#[test]
fn test_strip_punctuation() {
assert_eq!("word", strip_punctuation("\"word\""));
assert_eq!("word", strip_punctuation("word!!"));
assert_eq!("word", strip_punctuation("word???"));
}
}
| true |
a5a0784bb84e70d3b7cc8ef1300b3f184210a305
|
Rust
|
scotow/minecrust
|
/src/fsm.rs
|
UTF-8
| 3,238 | 3.015625 | 3 |
[] |
no_license
|
use crate::packets::{Handshake, LoginRequest, Packet, Ping, StatusRequest};
use crate::types::{Receive, ServerDescription, TAsyncRead, TAsyncWrite};
use anyhow::Result;
use futures::prelude::*;
pub struct Fsm<'a> {
server_description: &'a ServerDescription,
state: State,
reader: &'a mut dyn TAsyncRead,
writer: &'a mut dyn TAsyncWrite,
}
impl<'a> Fsm<'a> {
pub fn from_rw(
server_description: &'a ServerDescription,
reader: &'a mut dyn TAsyncRead,
writer: &'a mut dyn TAsyncWrite,
) -> Self {
Self {
server_description,
state: State::new(),
reader,
writer,
}
}
pub async fn next_state(&mut self) -> Result<State> {
let state = self
.state
.clone()
.next(&self.server_description, &mut self.reader, &mut self.writer)
.await?;
Ok(state)
}
pub async fn play(mut self) -> Result<Option<LoginRequest>> {
loop {
self.state = match self.next_state().await? {
State::Finished(login) => return Ok(Some(login)),
State::StatusFinished => return Ok(None),
state @ State::Status => {
// ignore what happens after a ping has been asked
let _ = state
.next(&self.server_description, &mut self.reader, &mut self.writer)
.await;
return Ok(None);
}
state => state,
}
}
}
}
#[derive(Debug, Clone)]
pub enum State {
Handshake,
Status,
StatusFinished,
Play,
Finished(LoginRequest),
}
impl Default for State {
fn default() -> Self {
State::Handshake
}
}
impl State {
pub fn new() -> Self {
Default::default()
}
pub async fn next(
self,
server_description: &ServerDescription,
mut reader: &mut dyn TAsyncRead,
mut writer: &mut dyn TAsyncWrite,
) -> Result<Self> {
match self {
State::Handshake => {
let handshake: Handshake = reader.receive().await?;
Ok(match *handshake.next_state {
1 => State::Status,
2 => State::Play,
_ => unreachable!(),
})
}
State::Status => {
let status_request: StatusRequest = reader.receive().await?;
status_request
.answer(&mut writer, &server_description)
.await?;
writer.flush().await?;
let ping: Ping = reader.receive().await?;
ping.send_packet(&mut writer).await?;
writer.flush().await?;
Ok(State::StatusFinished)
}
State::Play => {
let login_start: LoginRequest = reader.receive().await?;
login_start.answer(&mut writer).await?;
writer.flush().await?;
Ok(State::Finished(login_start))
}
State::StatusFinished => Ok(self),
State::Finished(_) => Ok(self),
}
}
}
| true |
936e045a921eb1892570e35aca73571f445cc789
|
Rust
|
skgbanga/AOC
|
/2017/1/pro.rs
|
UTF-8
| 1,280 | 3.125 | 3 |
[] |
no_license
|
use std::{
convert::TryInto,
fs::File,
io::{BufRead, BufReader},
};
fn part1(line: &str) -> i32 {
// python code:
//
// s = sum(int(a) for a, b in zip(line, line[1:] + [line[0]]) if a == b)
line.chars()
.zip(line.chars().cycle().skip(1))
.fold(0, |acc, (a, b)| {
if a == b {
acc + a.to_digit(10).expect("Could not convert char to digit")
} else {
acc
}
})
.try_into()
.expect("Failed to convert to i32")
}
fn part2(line: &str) -> i32 {
let sum: i32 = line
.chars()
.zip(line.chars().skip(line.len() / 2))
.fold(0, |acc, (a, b)| {
if a == b {
acc + a.to_digit(10).expect("Could not convert char to digit")
} else {
acc
}
})
.try_into()
.expect("Failed to convert to i32");
sum * 2
}
fn main() {
let filename = "input";
let file = File::open(filename).unwrap();
let vec = BufReader::new(file)
.lines()
.map(|line| line.expect("Could not read line from file"))
.collect::<Vec<String>>();
let line = &vec[0];
println!("{}", part1(line));
println!("{}", part2(line));
}
| true |
3c7abbeafa93b97e06f4497763e36d702b6d4544
|
Rust
|
garrettkoontz/aoc-2019-rust
|
/src/aoc/day1.rs
|
UTF-8
| 888 | 3.125 | 3 |
[] |
no_license
|
use crate::utils;
const FILE_NAME: &str = "day1.txt";
pub fn part1(path: &str) -> i32 {
let file_path = &format!("./{}/{}", path, FILE_NAME);
let inputs = utils::read_file(file_path);
inputs
.into_iter()
.map(|x| fuel_required(x.parse::<i32>().unwrap()))
.fold(0, |acc, x| acc + x)
}
pub fn part2(path: &str) -> i32 {
let file_path = &format!("{}/{}", path, FILE_NAME);
let inputs = utils::read_file(file_path);
inputs
.into_iter()
.map(|x| fuel_required_with_fuel(x.parse::<i32>().unwrap()))
.fold(0, |acc, x| acc + x)
}
fn fuel_required(i: i32) -> i32 {
(i / 3) - 2
}
fn fuel_required_with_fuel(i: i32) -> i32 {
let mut sum = 0;
let mut mass = i;
while mass > 0 {
let new_m = fuel_required(mass);
sum += if new_m > 0 { new_m } else { 0 };
mass = new_m;
}
sum
}
| true |
19eaabb82d65367c7da160a0a482e51b51ab0436
|
Rust
|
elertan/amino_api
|
/src/api/v1/community/s/user_profile/fetch_list/fetch_recent.rs
|
UTF-8
| 3,002 | 3.015625 | 3 |
[] |
no_license
|
use crate::api::v1::api_instance::ApiInstance;
use crate::api::v1::models::api_response::ApiResponse;
use crate::api::v1::community::community::Community;
use chrono::{Utc, DateTime};
use crate::api::v1::models::user::User;
#[derive(Debug, Clone)]
pub struct FetchRecentParams {
pub start: Option<u32>,
pub size: u32,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct FetchRecentResult {
#[serde(rename="userProfileCount")]
pub user_profile_count: u32,
#[serde(rename="userProfileList")]
pub user_profile_list: Vec<User>,
}
pub async fn fetch_recent(
api: &ApiInstance,
community: &Community,
params: &FetchRecentParams
) -> Result<ApiResponse<FetchRecentResult>, failure::Error> {
let mut partial_url =
format!(
"{}/s/user-profile?size={}&type=recent",
community.get_url_identifier(),
params.size
);
if params.start.is_some() {
let start = params.start.unwrap();
// let stop_time: DateTime<Utc> = Utc::now();
// let stop_time_string = format!("{:?}", stop_time);// stop_time.to_string();
// let stop_time_string_encoded = urlencoding::encode(stop_time_string.as_str());
// let part = format!("&stoptime={}&pagingType=o&start={}", stop_time_string_encoded, start);
let part = format!("&pagingType=o&start={}", start);
partial_url += part.as_str();
}
let url = api.base_url.create_full_url(&partial_url);
let client = &api.client;
let response = client.get(&url)
.send()
.await?;
let response_text = response.text().await?;
let result = serde_json::from_str(&response_text)?;
Ok(result)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn fetch_recent_should_work() {
let mut rt = tokio::runtime::current_thread::Runtime::new().expect("new rt");
crate::helpers::testing::load_test_env();
let api: ApiInstance = rt.block_on(crate::helpers::testing::get_authorized_v1_api_instance());
let community = Community::from_id(3);
let result: Result<ApiResponse<FetchRecentResult>, failure::Error> = rt.block_on(
fetch_recent(&api, &community, &FetchRecentParams {
size: 25,
start: None
})
);
assert!(result.is_ok());
}
#[test]
fn fetch_recent_with_some_should_work() {
let mut rt = tokio::runtime::current_thread::Runtime::new().expect("new rt");
crate::helpers::testing::load_test_env();
let api: ApiInstance = rt.block_on(crate::helpers::testing::get_authorized_v1_api_instance());
let community = Community::from_id(3);
let result: Result<ApiResponse<FetchRecentResult>, failure::Error> = rt.block_on(
fetch_recent(&api, &community, &FetchRecentParams {
size: 25,
start: Some(25)
})
);
dbg!(&result);
assert!(result.is_ok());
}
}
| true |
ab4d5a650a5423408025e457f20d4039729264d5
|
Rust
|
foundpatterns/scl
|
/parser/tests/invalid.rs
|
UTF-8
| 1,556 | 2.875 | 3 |
[] |
no_license
|
extern crate scl;
use scl::{parse_file, Error};
fn assert_error_msg(filename: &str, needle: &str) {
let res = parse_file(&format!("./tests/invalid/{}.scl", filename));
assert!(res.is_err());
let err = res.unwrap_err();
match err {
Error::InvalidSyntax(msg) => {
println!("{}", msg);
assert!(msg.contains(needle));
}
}
}
// Invalid syntax errors
#[test]
fn test_eof() {
assert_error_msg(
"eof",
"expected include or string / int / float / byte size / date / bool / array / dict / environment variable"
);
}
#[test]
fn test_invalid_int() {
assert_error_msg(
"invalid_int",
"expected a byte size unit (kB / MB / GB / TB / PB)",
);
}
#[test]
fn test_env_var_default() {
assert_error_msg(
"env_var_default",
"expected a boolean (true / false), a string, a multiline string, an integer, a float, or a date"
);
}
#[test]
fn test_invalid_key() {
assert_error_msg("invalid_key", "expected include or a key");
}
#[test]
fn test_invalid_document() {
assert_error_msg(
"invalid_doc",
"expected a key value, an include or a comment",
);
}
#[test]
fn test_invalid_date() {
assert_error_msg(
"invalid_date",
"expected a byte size unit (kB / MB / GB / TB / PB)",
);
}
#[test]
fn test_invalid_array_comment() {
assert_error_msg(
"invalid_array_comment",
"expected string / int / float / byte size / date / bool / array / dict / environment variable"
);
}
| true |
61f10607f6d2fc71ff92d31b41883914c922022b
|
Rust
|
ICGNYN/RustLearning
|
/勉強/ownership/3.rs
|
UTF-8
| 295 | 3.125 | 3 |
[] |
no_license
|
fn main(){
let a: [i32; 5] = [0,1,2,3,4];
let b: [i32; 5] = a;
println!("{:?}",a);
println!("{:?}",b);
let mut c = Vec::new();
c.push(1);
//let d: Vec<i16> = vec![0,0,0];
let d = vec![0; 10];
println!("{:?}",d );
let e = 5.0/3.0;
println!("{}",e);
}
| true |
c99ae123a3d4fec734c2702d4b31e49b4cd7eb2b
|
Rust
|
koaji/Rust-learning
|
/rust-doc/first_loop/src/main.rs
|
UTF-8
| 325 | 3.484375 | 3 |
[] |
no_license
|
fn main() {
/* 無限ループ
loop {
println!("Loop !!")
}*/
let mut x = 5;
let mut done = false;
while !done {
x += x - 3;
println!("{}", x);
if x % 5 == 0 {
done = true;
}
}
for x in 0..10 {
println!("{}", x);
}
}
| true |
f504a51fce6543be22e55c275a32d7d2af4dcdaa
|
Rust
|
UnicodingUnicorn/buckets
|
/src/main.rs
|
UTF-8
| 778 | 2.640625 | 3 |
[] |
no_license
|
use actix_web::{ get, App, HttpResponse, HttpServer, Responder };
use actix_web::web::Data;
use handlebars::Handlebars;
use std::collections::BTreeMap;
use std::sync::Arc;
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let mut handlebars = Handlebars::new();
handlebars.register_template_file("main", "./templates/main.hbs").unwrap();
let handlebars = Arc::new(handlebars);
HttpServer::new(move || {
App::new().data(handlebars.clone())
.service(index)
})
.bind("0.0.0.0:8080")?
.run()
.await
}
#[get("/")]
async fn index(handlebars:Data<Arc<Handlebars<'_>>>) -> impl Responder {
let data:BTreeMap<String, String> = BTreeMap::new();
HttpResponse::Ok().body(handlebars.render("main", &data).unwrap())
}
| true |
8f1313e45515a95fc132681eed3fa0e8123aacc9
|
Rust
|
KodrAus/elastic-responses
|
/src/get.rs
|
UTF-8
| 1,524 | 2.625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use serde::de::DeserializeOwned;
use serde_json::Value;
use parsing::{IsOk, HttpResponseHead, ResponseBody, Unbuffered, MaybeOkResponse};
use error::*;
/// Response for a [get document request](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html).
#[derive(Deserialize, Debug)]
pub struct GetResponseOf<T> {
#[serde(rename = "_index")]
pub index: String,
#[serde(rename = "_type")]
pub ty: String,
#[serde(rename = "_id")]
pub id: String,
#[serde(rename = "_version")]
pub version: Option<u32>,
pub found: bool,
#[serde(rename = "_source")]
pub source: Option<T>,
#[serde(rename="_routing")]
pub routing: Option<String>,
}
pub type GetResponse = GetResponseOf<Value>;
impl<T: DeserializeOwned> IsOk for GetResponseOf<T> {
fn is_ok<B: ResponseBody>(head: HttpResponseHead, body: Unbuffered<B>) -> Result<MaybeOkResponse<B>, ParseResponseError> {
match head.status() {
200...299 => Ok(MaybeOkResponse::ok(body)),
404 => {
// If we get a 404, it could be an IndexNotFound error or ok
// Check if the response contains a root 'error' node
let (maybe_err, body) = body.body()?;
let is_ok = maybe_err.as_object()
.and_then(|maybe_err| maybe_err.get("error"))
.is_none();
Ok(MaybeOkResponse::new(is_ok, body))
}
_ => Ok(MaybeOkResponse::err(body)),
}
}
}
| true |
5cb8fbbf1feca84c6cbebe1ea63648db06d7f1ca
|
Rust
|
Mic92/cntr
|
/src/files.rs
|
UTF-8
| 1,202 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
use nix::fcntl::OFlag;
use std::fs::create_dir_all;
use std::fs::File;
use std::io;
use std::os::unix::prelude::*;
use std::path::Path;
#[derive(PartialOrd, Eq, PartialEq)]
pub enum FdState {
None,
Readable,
ReadWritable,
}
pub fn fd_path(fd: &Fd) -> String {
format!("/proc/self/fd/{}", fd.raw())
}
pub fn mkdir_p<P: AsRef<Path>>(path: &P) -> io::Result<()> {
if let Err(e) = create_dir_all(path) {
if e.kind() != io::ErrorKind::AlreadyExists {
return Err(e);
}
}
Ok(())
}
impl From<OFlag> for FdState {
fn from(flags: OFlag) -> FdState {
if flags & OFlag::O_RDWR == OFlag::O_RDWR {
FdState::ReadWritable
} else if flags & OFlag::O_RDONLY == OFlag::O_RDONLY {
FdState::Readable
} else {
FdState::None
}
}
}
pub struct Fd {
pub file: File,
pub state: FdState,
}
impl Fd {
pub fn new(fd: RawFd, state: FdState) -> Fd {
Fd {
file: unsafe { File::from_raw_fd(fd) },
state,
}
}
pub fn raw(&self) -> RawFd {
self.file.as_raw_fd()
}
pub fn path(&self) -> String {
fd_path(self)
}
}
| true |
ad03a8c5af83ce39f8f42d2b070a00802518f614
|
Rust
|
bouzuya/rust-atcoder
|
/cargo-atcoder/contests/arc119/src/bin/c.rs
|
UTF-8
| 658 | 2.703125 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use proconio::input;
fn main() {
input! {
n: usize,
a: [i64; n],
};
let b = a
.into_iter()
.enumerate()
.map(|(i, a_i)| a_i * if i % 2 == 0 { 1 } else { -1 })
.collect::<Vec<i64>>();
let c = std::iter::once(0)
.chain(b.iter().scan(0, |acc, &i| {
*acc += i;
Some(*acc)
}))
.collect::<Vec<i64>>();
let mut map = HashMap::new();
for c_i in c {
*map.entry(c_i).or_insert(0) += 1;
}
let mut ans = 0_i64;
for (_, v) in map {
ans += v * (v - 1) / 2;
}
println!("{}", ans);
}
| true |
c57101f3d1185a6f03a445aa79d7c409e273f738
|
Rust
|
pola-rs/polars
|
/crates/polars-row/src/lib.rs
|
UTF-8
| 13,846 | 3.5 | 4 |
[
"MIT"
] |
permissive
|
//! Row format as defined in `arrow-rs`.
//! This currently partially implements that format only for needed types.
//! For completeness sake the format as defined by `arrow-rs` is as followed:
//! Converts [`ArrayRef`] columns into a [row-oriented](self) format.
//!
//! ## Overview
//!
//! The row format is a variable length byte sequence created by
//! concatenating the encoded form of each column. The encoding for
//! each column depends on its datatype (and sort options).
//!
//! The encoding is carefully designed in such a way that escaping is
//! unnecessary: it is never ambiguous as to whether a byte is part of
//! a sentinel (e.g. null) or a value.
//!
//! ## Unsigned Integer Encoding
//!
//! A null integer is encoded as a `0_u8`, followed by a zero-ed number of bytes corresponding
//! to the integer's length.
//!
//! A valid integer is encoded as `1_u8`, followed by the big-endian representation of the
//! integer.
//!
//! ```text
//! ┌──┬──┬──┬──┐ ┌──┬──┬──┬──┬──┐
//! 3 │03│00│00│00│ │01│00│00│00│03│
//! └──┴──┴──┴──┘ └──┴──┴──┴──┴──┘
//! ┌──┬──┬──┬──┐ ┌──┬──┬──┬──┬──┐
//! 258 │02│01│00│00│ │01│00│00│01│02│
//! └──┴──┴──┴──┘ └──┴──┴──┴──┴──┘
//! ┌──┬──┬──┬──┐ ┌──┬──┬──┬──┬──┐
//! 23423 │7F│5B│00│00│ │01│00│00│5B│7F│
//! └──┴──┴──┴──┘ └──┴──┴──┴──┴──┘
//! ┌──┬──┬──┬──┐ ┌──┬──┬──┬──┬──┐
//! NULL │??│??│??│??│ │00│00│00│00│00│
//! └──┴──┴──┴──┘ └──┴──┴──┴──┴──┘
//!
//! 32-bit (4 bytes) Row Format
//! Value Little Endian
//! ```
//!
//! ## Signed Integer Encoding
//!
//! Signed integers have their most significant sign bit flipped, and are then encoded in the
//! same manner as an unsigned integer.
//!
//! ```text
//! ┌──┬──┬──┬──┐ ┌──┬──┬──┬──┐ ┌──┬──┬──┬──┬──┐
//! 5 │05│00│00│00│ │05│00│00│80│ │01│80│00│00│05│
//! └──┴──┴──┴──┘ └──┴──┴──┴──┘ └──┴──┴──┴──┴──┘
//! ┌──┬──┬──┬──┐ ┌──┬──┬──┬──┐ ┌──┬──┬──┬──┬──┐
//! -5 │FB│FF│FF│FF│ │FB│FF│FF│7F│ │01│7F│FF│FF│FB│
//! └──┴──┴──┴──┘ └──┴──┴──┴──┘ └──┴──┴──┴──┴──┘
//!
//! Value 32-bit (4 bytes) High bit flipped Row Format
//! Little Endian
//! ```
//!
//! ## Float Encoding
//!
//! Floats are converted from IEEE 754 representation to a signed integer representation
//! by flipping all bar the sign bit if they are negative.
//!
//! They are then encoded in the same manner as a signed integer.
//!
//! ## Fixed Length Bytes Encoding
//!
//! Fixed length bytes are encoded in the same fashion as primitive types above.
//!
//! For a fixed length array of length `n`:
//!
//! A null is encoded as `0_u8` null sentinel followed by `n` `0_u8` bytes
//!
//! A valid value is encoded as `1_u8` followed by the value bytes
//!
//! ## Variable Length Bytes (including Strings) Encoding
//!
//! A null is encoded as a `0_u8`.
//!
//! An empty byte array is encoded as `1_u8`.
//!
//! A non-null, non-empty byte array is encoded as `2_u8` followed by the byte array
//! encoded using a block based scheme described below.
//!
//! The byte array is broken up into 32-byte blocks, each block is written in turn
//! to the output, followed by `0xFF_u8`. The final block is padded to 32-bytes
//! with `0_u8` and written to the output, followed by the un-padded length in bytes
//! of this final block as a `u8`.
//!
//! Note the following example encodings use a block size of 4 bytes,
//! as opposed to 32 bytes for brevity:
//!
//! ```text
//! ┌───┬───┬───┬───┬───┬───┐
//! "MEEP" │02 │'M'│'E'│'E'│'P'│04 │
//! └───┴───┴───┴───┴───┴───┘
//!
//! ┌───┐
//! "" │01 |
//! └───┘
//!
//! NULL ┌───┐
//! │00 │
//! └───┘
//!
//! "Defenestration" ┌───┬───┬───┬───┬───┬───┐
//! │02 │'D'│'e'│'f'│'e'│FF │
//! └───┼───┼───┼───┼───┼───┤
//! │'n'│'e'│'s'│'t'│FF │
//! ├───┼───┼───┼───┼───┤
//! │'r'│'a'│'t'│'r'│FF │
//! ├───┼───┼───┼───┼───┤
//! │'a'│'t'│'i'│'o'│FF │
//! ├───┼───┼───┼───┼───┤
//! │'n'│00 │00 │00 │01 │
//! └───┴───┴───┴───┴───┘
//! ```
//!
//! This approach is loosely inspired by [COBS] encoding, and chosen over more traditional
//! [byte stuffing] as it is more amenable to vectorisation, in particular AVX-256.
//!
//! ## Dictionary Encoding
//!
//! [`RowsEncoded`] needs to support converting dictionary encoded arrays with unsorted, and
//! potentially distinct dictionaries. One simple mechanism to avoid this would be to reverse
//! the dictionary encoding, and encode the array values directly, however, this would lose
//! the benefits of dictionary encoding to reduce memory and CPU consumption.
//!
//! As such the [`RowsEncoded`] creates an order-preserving mapping
//! for each dictionary encoded column, which allows new dictionary
//! values to be added whilst preserving the sort order.
//!
//! A null dictionary value is encoded as `0_u8`.
//!
//! A non-null dictionary value is encoded as `1_u8` followed by a null-terminated byte array
//! key determined by the order-preserving dictionary encoding
//!
//! ```text
//! ┌──────────┐ ┌─────┐
//! │ "Bar" │ ───────────────▶│ 01 │
//! └──────────┘ └─────┘
//! ┌──────────┐ ┌─────┬─────┐
//! │"Fabulous"│ ───────────────▶│ 01 │ 02 │
//! └──────────┘ └─────┴─────┘
//! ┌──────────┐ ┌─────┐
//! │ "Soup" │ ───────────────▶│ 05 │
//! └──────────┘ └─────┘
//! ┌──────────┐ ┌─────┐
//! │ "ZZ" │ ───────────────▶│ 07 │
//! └──────────┘ └─────┘
//!
//! Example Order Preserving Mapping
//! ```
//! Using the map above, the corresponding row format will be
//!
//! ```text
//! ┌─────┬─────┬─────┬─────┐
//! "Fabulous" │ 01 │ 03 │ 05 │ 00 │
//! └─────┴─────┴─────┴─────┘
//!
//! ┌─────┬─────┬─────┐
//! "ZZ" │ 01 │ 07 │ 00 │
//! └─────┴─────┴─────┘
//!
//! ┌─────┐
//! NULL │ 00 │
//! └─────┘
//!
//! Input Row Format
//! ```
//!
//! ## Struct Encoding
//!
//! A null is encoded as a `0_u8`.
//!
//! A valid value is encoded as `1_u8` followed by the row encoding of each child.
//!
//! This encoding effectively flattens the schema in a depth-first fashion.
//!
//! For example
//!
//! ```text
//! ┌───────┬────────────────────────┬───────┐
//! │ Int32 │ Struct[Int32, Float32] │ Int32 │
//! └───────┴────────────────────────┴───────┘
//! ```
//!
//! Is encoded as
//!
//! ```text
//! ┌───────┬───────────────┬───────┬─────────┬───────┐
//! │ Int32 │ Null Sentinel │ Int32 │ Float32 │ Int32 │
//! └───────┴───────────────┴───────┴─────────┴───────┘
//! ```
//!
//! ## List Encoding
//!
//! Lists are encoded by first encoding all child elements to the row format.
//!
//! A "canonical byte array" is then constructed by concatenating the row
//! encodings of all their elements into a single binary array, followed
//! by the lengths of each encoded row, and the number of elements, encoded
//! as big endian `u32`.
//!
//! This canonical byte array is then encoded using the variable length byte
//! encoding described above.
//!
//! _The lengths are not strictly necessary but greatly simplify decode, they
//! may be removed in a future iteration_.
//!
//! For example given:
//!
//! ```text
//! [1_u8, 2_u8, 3_u8]
//! [1_u8, null]
//! []
//! null
//! ```
//!
//! The elements would be converted to:
//!
//! ```text
//! ┌──┬──┐ ┌──┬──┐ ┌──┬──┐ ┌──┬──┐ ┌──┬──┐
//! 1 │01│01│ 2 │01│02│ 3 │01│03│ 1 │01│01│ null │00│00│
//! └──┴──┘ └──┴──┘ └──┴──┘ └──┴──┘ └──┴──┘
//!```
//!
//! Which would be grouped into the following canonical byte arrays:
//!
//! ```text
//! ┌──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┐
//! [1_u8, 2_u8, 3_u8] │01│01│01│02│01│03│00│00│00│02│00│00│00│02│00│00│00│02│00│00│00│03│
//! └──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┘
//! └──── rows ────┘ └───────── row lengths ─────────┘ └─ count ─┘
//!
//! ┌──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┬──┐
//! [1_u8, null] │01│01│00│00│00│00│00│02│00│00│00│02│00│00│00│02│
//! └──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┴──┘
//!```
//!
//! With `[]` represented by an empty byte array, and `null` a null byte array.
//!
//! These byte arrays will then be encoded using the variable length byte encoding
//! described above.
//!
//! # Ordering
//!
//! ## Float Ordering
//!
//! Floats are totally ordered in accordance to the `totalOrder` predicate as defined
//! in the IEEE 754 (2008 revision) floating point standard.
//!
//! The ordering established by this does not always agree with the
//! [`PartialOrd`] and [`PartialEq`] implementations of `f32`. For example,
//! they consider negative and positive zero equal, while this does not
//!
//! ## Null Ordering
//!
//! The encoding described above will order nulls first, this can be inverted by representing
//! nulls as `0xFF_u8` instead of `0_u8`
//!
//! ## Reverse Column Ordering
//!
//! The order of a given column can be reversed by negating the encoded bytes of non-null values
//!
//! [COBS]: https://en.wikipedia.org/wiki/Consistent_Overhead_Byte_Stuffing
//! [byte stuffing]: https://en.wikipedia.org/wiki/High-Level_Data_Link_Control#Asynchronous_framing
extern crate core;
pub mod decode;
pub mod encode;
pub(crate) mod fixed;
mod row;
mod utils;
pub(crate) mod variable;
use arrow::array::*;
pub type ArrayRef = Box<dyn Array>;
pub use encode::{
convert_columns, convert_columns_amortized, convert_columns_amortized_no_order,
convert_columns_no_order,
};
pub use row::{RowsEncoded, SortField};
| true |
a9c4d5cede12d317dbe68ebceea8cd2e7cd5eef4
|
Rust
|
mcneja/disguiser
|
/src/coord.rs
|
UTF-8
| 1,370 | 3.265625 | 3 |
[] |
no_license
|
#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd, Hash)]
pub struct Coord(pub i32, pub i32);
impl Coord {
pub fn dot(self, rhs: Self) -> i32 {
self.0 * rhs.0 + self.1 * rhs.1
}
pub fn length_squared(self) -> i32 {
self.0 * self.0 + self.1 * self.1
}
pub fn mul_components(self, rhs: Self) -> Self {
Self(self.0 * rhs.0, self.1 * rhs.1)
}
}
impl std::ops::Add for Coord {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Self(self.0 + rhs.0, self.1 + rhs.1)
}
}
impl std::ops::AddAssign for Coord {
fn add_assign(&mut self, rhs: Self) {
self.0 += rhs.0;
self.1 += rhs.1;
}
}
impl std::ops::Sub for Coord {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Self(self.0 - rhs.0, self.1 - rhs.1)
}
}
impl std::ops::SubAssign for Coord {
fn sub_assign(&mut self, rhs: Self) {
self.0 -= rhs.0;
self.1 -= rhs.1;
}
}
impl std::ops::Neg for Coord {
type Output = Self;
fn neg(self) -> Self {
Self(-self.0, -self.1)
}
}
impl std::ops::Mul<i32> for Coord {
type Output = Self;
fn mul(self, rhs: i32) -> Self {
Self(self.0 * rhs, self.1 * rhs)
}
}
impl std::ops::MulAssign<i32> for Coord {
fn mul_assign(&mut self, rhs: i32) {
self.0 *= rhs;
self.1 *= rhs;
}
}
| true |
674ac05fcf7bbcde267a2e4fe67b0304a145ab76
|
Rust
|
tokio-rs/tokio
|
/tokio/src/sync/oneshot.rs
|
UTF-8
| 42,481 | 3.625 | 4 |
[
"MIT"
] |
permissive
|
#![cfg_attr(not(feature = "sync"), allow(dead_code, unreachable_pub))]
//! A one-shot channel is used for sending a single message between
//! asynchronous tasks. The [`channel`] function is used to create a
//! [`Sender`] and [`Receiver`] handle pair that form the channel.
//!
//! The `Sender` handle is used by the producer to send the value.
//! The `Receiver` handle is used by the consumer to receive the value.
//!
//! Each handle can be used on separate tasks.
//!
//! Since the `send` method is not async, it can be used anywhere. This includes
//! sending between two runtimes, and using it from non-async code.
//!
//! If the [`Receiver`] is closed before receiving a message which has already
//! been sent, the message will remain in the channel until the receiver is
//! dropped, at which point the message will be dropped immediately.
//!
//! # Examples
//!
//! ```
//! use tokio::sync::oneshot;
//!
//! #[tokio::main]
//! async fn main() {
//! let (tx, rx) = oneshot::channel();
//!
//! tokio::spawn(async move {
//! if let Err(_) = tx.send(3) {
//! println!("the receiver dropped");
//! }
//! });
//!
//! match rx.await {
//! Ok(v) => println!("got = {:?}", v),
//! Err(_) => println!("the sender dropped"),
//! }
//! }
//! ```
//!
//! If the sender is dropped without sending, the receiver will fail with
//! [`error::RecvError`]:
//!
//! ```
//! use tokio::sync::oneshot;
//!
//! #[tokio::main]
//! async fn main() {
//! let (tx, rx) = oneshot::channel::<u32>();
//!
//! tokio::spawn(async move {
//! drop(tx);
//! });
//!
//! match rx.await {
//! Ok(_) => panic!("This doesn't happen"),
//! Err(_) => println!("the sender dropped"),
//! }
//! }
//! ```
//!
//! To use a oneshot channel in a `tokio::select!` loop, add `&mut` in front of
//! the channel.
//!
//! ```
//! use tokio::sync::oneshot;
//! use tokio::time::{interval, sleep, Duration};
//!
//! #[tokio::main]
//! # async fn _doc() {}
//! # #[tokio::main(flavor = "current_thread", start_paused = true)]
//! async fn main() {
//! let (send, mut recv) = oneshot::channel();
//! let mut interval = interval(Duration::from_millis(100));
//!
//! # let handle =
//! tokio::spawn(async move {
//! sleep(Duration::from_secs(1)).await;
//! send.send("shut down").unwrap();
//! });
//!
//! loop {
//! tokio::select! {
//! _ = interval.tick() => println!("Another 100ms"),
//! msg = &mut recv => {
//! println!("Got message: {}", msg.unwrap());
//! break;
//! }
//! }
//! }
//! # handle.await.unwrap();
//! }
//! ```
//!
//! To use a `Sender` from a destructor, put it in an [`Option`] and call
//! [`Option::take`].
//!
//! ```
//! use tokio::sync::oneshot;
//!
//! struct SendOnDrop {
//! sender: Option<oneshot::Sender<&'static str>>,
//! }
//! impl Drop for SendOnDrop {
//! fn drop(&mut self) {
//! if let Some(sender) = self.sender.take() {
//! // Using `let _ =` to ignore send errors.
//! let _ = sender.send("I got dropped!");
//! }
//! }
//! }
//!
//! #[tokio::main]
//! # async fn _doc() {}
//! # #[tokio::main(flavor = "current_thread")]
//! async fn main() {
//! let (send, recv) = oneshot::channel();
//!
//! let send_on_drop = SendOnDrop { sender: Some(send) };
//! drop(send_on_drop);
//!
//! assert_eq!(recv.await, Ok("I got dropped!"));
//! }
//! ```
use crate::loom::cell::UnsafeCell;
use crate::loom::sync::atomic::AtomicUsize;
use crate::loom::sync::Arc;
#[cfg(all(tokio_unstable, feature = "tracing"))]
use crate::util::trace;
use std::fmt;
use std::future::Future;
use std::mem::MaybeUninit;
use std::pin::Pin;
use std::sync::atomic::Ordering::{self, AcqRel, Acquire};
use std::task::Poll::{Pending, Ready};
use std::task::{Context, Poll, Waker};
/// Sends a value to the associated [`Receiver`].
///
/// A pair of both a [`Sender`] and a [`Receiver`] are created by the
/// [`channel`](fn@channel) function.
///
/// # Examples
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, rx) = oneshot::channel();
///
/// tokio::spawn(async move {
/// if let Err(_) = tx.send(3) {
/// println!("the receiver dropped");
/// }
/// });
///
/// match rx.await {
/// Ok(v) => println!("got = {:?}", v),
/// Err(_) => println!("the sender dropped"),
/// }
/// }
/// ```
///
/// If the sender is dropped without sending, the receiver will fail with
/// [`error::RecvError`]:
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, rx) = oneshot::channel::<u32>();
///
/// tokio::spawn(async move {
/// drop(tx);
/// });
///
/// match rx.await {
/// Ok(_) => panic!("This doesn't happen"),
/// Err(_) => println!("the sender dropped"),
/// }
/// }
/// ```
///
/// To use a `Sender` from a destructor, put it in an [`Option`] and call
/// [`Option::take`].
///
/// ```
/// use tokio::sync::oneshot;
///
/// struct SendOnDrop {
/// sender: Option<oneshot::Sender<&'static str>>,
/// }
/// impl Drop for SendOnDrop {
/// fn drop(&mut self) {
/// if let Some(sender) = self.sender.take() {
/// // Using `let _ =` to ignore send errors.
/// let _ = sender.send("I got dropped!");
/// }
/// }
/// }
///
/// #[tokio::main]
/// # async fn _doc() {}
/// # #[tokio::main(flavor = "current_thread")]
/// async fn main() {
/// let (send, recv) = oneshot::channel();
///
/// let send_on_drop = SendOnDrop { sender: Some(send) };
/// drop(send_on_drop);
///
/// assert_eq!(recv.await, Ok("I got dropped!"));
/// }
/// ```
///
/// [`Option`]: std::option::Option
/// [`Option::take`]: std::option::Option::take
#[derive(Debug)]
pub struct Sender<T> {
inner: Option<Arc<Inner<T>>>,
#[cfg(all(tokio_unstable, feature = "tracing"))]
resource_span: tracing::Span,
}
/// Receives a value from the associated [`Sender`].
///
/// A pair of both a [`Sender`] and a [`Receiver`] are created by the
/// [`channel`](fn@channel) function.
///
/// This channel has no `recv` method because the receiver itself implements the
/// [`Future`] trait. To receive a `Result<T, `[`error::RecvError`]`>`, `.await` the `Receiver` object directly.
///
/// The `poll` method on the `Future` trait is allowed to spuriously return
/// `Poll::Pending` even if the message has been sent. If such a spurious
/// failure happens, then the caller will be woken when the spurious failure has
/// been resolved so that the caller can attempt to receive the message again.
/// Note that receiving such a wakeup does not guarantee that the next call will
/// succeed — it could fail with another spurious failure. (A spurious failure
/// does not mean that the message is lost. It is just delayed.)
///
/// [`Future`]: trait@std::future::Future
///
/// # Examples
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, rx) = oneshot::channel();
///
/// tokio::spawn(async move {
/// if let Err(_) = tx.send(3) {
/// println!("the receiver dropped");
/// }
/// });
///
/// match rx.await {
/// Ok(v) => println!("got = {:?}", v),
/// Err(_) => println!("the sender dropped"),
/// }
/// }
/// ```
///
/// If the sender is dropped without sending, the receiver will fail with
/// [`error::RecvError`]:
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, rx) = oneshot::channel::<u32>();
///
/// tokio::spawn(async move {
/// drop(tx);
/// });
///
/// match rx.await {
/// Ok(_) => panic!("This doesn't happen"),
/// Err(_) => println!("the sender dropped"),
/// }
/// }
/// ```
///
/// To use a `Receiver` in a `tokio::select!` loop, add `&mut` in front of the
/// channel.
///
/// ```
/// use tokio::sync::oneshot;
/// use tokio::time::{interval, sleep, Duration};
///
/// #[tokio::main]
/// # async fn _doc() {}
/// # #[tokio::main(flavor = "current_thread", start_paused = true)]
/// async fn main() {
/// let (send, mut recv) = oneshot::channel();
/// let mut interval = interval(Duration::from_millis(100));
///
/// # let handle =
/// tokio::spawn(async move {
/// sleep(Duration::from_secs(1)).await;
/// send.send("shut down").unwrap();
/// });
///
/// loop {
/// tokio::select! {
/// _ = interval.tick() => println!("Another 100ms"),
/// msg = &mut recv => {
/// println!("Got message: {}", msg.unwrap());
/// break;
/// }
/// }
/// }
/// # handle.await.unwrap();
/// }
/// ```
#[derive(Debug)]
pub struct Receiver<T> {
inner: Option<Arc<Inner<T>>>,
#[cfg(all(tokio_unstable, feature = "tracing"))]
resource_span: tracing::Span,
#[cfg(all(tokio_unstable, feature = "tracing"))]
async_op_span: tracing::Span,
#[cfg(all(tokio_unstable, feature = "tracing"))]
async_op_poll_span: tracing::Span,
}
pub mod error {
//! Oneshot error types.
use std::fmt;
/// Error returned by the `Future` implementation for `Receiver`.
///
/// This error is returned by the receiver when the sender is dropped without sending.
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct RecvError(pub(super) ());
/// Error returned by the `try_recv` function on `Receiver`.
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum TryRecvError {
/// The send half of the channel has not yet sent a value.
Empty,
/// The send half of the channel was dropped without sending a value.
Closed,
}
// ===== impl RecvError =====
impl fmt::Display for RecvError {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "channel closed")
}
}
impl std::error::Error for RecvError {}
// ===== impl TryRecvError =====
impl fmt::Display for TryRecvError {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TryRecvError::Empty => write!(fmt, "channel empty"),
TryRecvError::Closed => write!(fmt, "channel closed"),
}
}
}
impl std::error::Error for TryRecvError {}
}
use self::error::*;
struct Inner<T> {
/// Manages the state of the inner cell.
state: AtomicUsize,
/// The value. This is set by `Sender` and read by `Receiver`. The state of
/// the cell is tracked by `state`.
value: UnsafeCell<Option<T>>,
/// The task to notify when the receiver drops without consuming the value.
///
/// ## Safety
///
/// The `TX_TASK_SET` bit in the `state` field is set if this field is
/// initialized. If that bit is unset, this field may be uninitialized.
tx_task: Task,
/// The task to notify when the value is sent.
///
/// ## Safety
///
/// The `RX_TASK_SET` bit in the `state` field is set if this field is
/// initialized. If that bit is unset, this field may be uninitialized.
rx_task: Task,
}
struct Task(UnsafeCell<MaybeUninit<Waker>>);
impl Task {
unsafe fn will_wake(&self, cx: &mut Context<'_>) -> bool {
self.with_task(|w| w.will_wake(cx.waker()))
}
unsafe fn with_task<F, R>(&self, f: F) -> R
where
F: FnOnce(&Waker) -> R,
{
self.0.with(|ptr| {
let waker: *const Waker = (*ptr).as_ptr();
f(&*waker)
})
}
unsafe fn drop_task(&self) {
self.0.with_mut(|ptr| {
let ptr: *mut Waker = (*ptr).as_mut_ptr();
ptr.drop_in_place();
});
}
unsafe fn set_task(&self, cx: &mut Context<'_>) {
self.0.with_mut(|ptr| {
let ptr: *mut Waker = (*ptr).as_mut_ptr();
ptr.write(cx.waker().clone());
});
}
}
#[derive(Clone, Copy)]
struct State(usize);
/// Creates a new one-shot channel for sending single values across asynchronous
/// tasks.
///
/// The function returns separate "send" and "receive" handles. The `Sender`
/// handle is used by the producer to send the value. The `Receiver` handle is
/// used by the consumer to receive the value.
///
/// Each handle can be used on separate tasks.
///
/// # Examples
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, rx) = oneshot::channel();
///
/// tokio::spawn(async move {
/// if let Err(_) = tx.send(3) {
/// println!("the receiver dropped");
/// }
/// });
///
/// match rx.await {
/// Ok(v) => println!("got = {:?}", v),
/// Err(_) => println!("the sender dropped"),
/// }
/// }
/// ```
#[track_caller]
pub fn channel<T>() -> (Sender<T>, Receiver<T>) {
#[cfg(all(tokio_unstable, feature = "tracing"))]
let resource_span = {
let location = std::panic::Location::caller();
let resource_span = tracing::trace_span!(
"runtime.resource",
concrete_type = "Sender|Receiver",
kind = "Sync",
loc.file = location.file(),
loc.line = location.line(),
loc.col = location.column(),
);
resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
tx_dropped = false,
tx_dropped.op = "override",
)
});
resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
rx_dropped = false,
rx_dropped.op = "override",
)
});
resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
value_sent = false,
value_sent.op = "override",
)
});
resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
value_received = false,
value_received.op = "override",
)
});
resource_span
};
let inner = Arc::new(Inner {
state: AtomicUsize::new(State::new().as_usize()),
value: UnsafeCell::new(None),
tx_task: Task(UnsafeCell::new(MaybeUninit::uninit())),
rx_task: Task(UnsafeCell::new(MaybeUninit::uninit())),
});
let tx = Sender {
inner: Some(inner.clone()),
#[cfg(all(tokio_unstable, feature = "tracing"))]
resource_span: resource_span.clone(),
};
#[cfg(all(tokio_unstable, feature = "tracing"))]
let async_op_span = resource_span
.in_scope(|| tracing::trace_span!("runtime.resource.async_op", source = "Receiver::await"));
#[cfg(all(tokio_unstable, feature = "tracing"))]
let async_op_poll_span =
async_op_span.in_scope(|| tracing::trace_span!("runtime.resource.async_op.poll"));
let rx = Receiver {
inner: Some(inner),
#[cfg(all(tokio_unstable, feature = "tracing"))]
resource_span,
#[cfg(all(tokio_unstable, feature = "tracing"))]
async_op_span,
#[cfg(all(tokio_unstable, feature = "tracing"))]
async_op_poll_span,
};
(tx, rx)
}
impl<T> Sender<T> {
/// Attempts to send a value on this channel, returning it back if it could
/// not be sent.
///
/// This method consumes `self` as only one value may ever be sent on a oneshot
/// channel. It is not marked async because sending a message to an oneshot
/// channel never requires any form of waiting. Because of this, the `send`
/// method can be used in both synchronous and asynchronous code without
/// problems.
///
/// A successful send occurs when it is determined that the other end of the
/// channel has not hung up already. An unsuccessful send would be one where
/// the corresponding receiver has already been deallocated. Note that a
/// return value of `Err` means that the data will never be received, but
/// a return value of `Ok` does *not* mean that the data will be received.
/// It is possible for the corresponding receiver to hang up immediately
/// after this function returns `Ok`.
///
/// # Examples
///
/// Send a value to another task
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, rx) = oneshot::channel();
///
/// tokio::spawn(async move {
/// if let Err(_) = tx.send(3) {
/// println!("the receiver dropped");
/// }
/// });
///
/// match rx.await {
/// Ok(v) => println!("got = {:?}", v),
/// Err(_) => println!("the sender dropped"),
/// }
/// }
/// ```
pub fn send(mut self, t: T) -> Result<(), T> {
let inner = self.inner.take().unwrap();
inner.value.with_mut(|ptr| unsafe {
// SAFETY: The receiver will not access the `UnsafeCell` unless the
// channel has been marked as "complete" (the `VALUE_SENT` state bit
// is set).
// That bit is only set by the sender later on in this method, and
// calling this method consumes `self`. Therefore, if it was possible to
// call this method, we know that the `VALUE_SENT` bit is unset, and
// the receiver is not currently accessing the `UnsafeCell`.
*ptr = Some(t);
});
if !inner.complete() {
unsafe {
// SAFETY: The receiver will not access the `UnsafeCell` unless
// the channel has been marked as "complete". Calling
// `complete()` will return true if this bit is set, and false
// if it is not set. Thus, if `complete()` returned false, it is
// safe for us to access the value, because we know that the
// receiver will not.
return Err(inner.consume_value().unwrap());
}
}
#[cfg(all(tokio_unstable, feature = "tracing"))]
self.resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
value_sent = true,
value_sent.op = "override",
)
});
Ok(())
}
/// Waits for the associated [`Receiver`] handle to close.
///
/// A [`Receiver`] is closed by either calling [`close`] explicitly or the
/// [`Receiver`] value is dropped.
///
/// This function is useful when paired with `select!` to abort a
/// computation when the receiver is no longer interested in the result.
///
/// # Return
///
/// Returns a `Future` which must be awaited on.
///
/// [`Receiver`]: Receiver
/// [`close`]: Receiver::close
///
/// # Examples
///
/// Basic usage
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (mut tx, rx) = oneshot::channel::<()>();
///
/// tokio::spawn(async move {
/// drop(rx);
/// });
///
/// tx.closed().await;
/// println!("the receiver dropped");
/// }
/// ```
///
/// Paired with select
///
/// ```
/// use tokio::sync::oneshot;
/// use tokio::time::{self, Duration};
///
/// async fn compute() -> String {
/// // Complex computation returning a `String`
/// # "hello".to_string()
/// }
///
/// #[tokio::main]
/// async fn main() {
/// let (mut tx, rx) = oneshot::channel();
///
/// tokio::spawn(async move {
/// tokio::select! {
/// _ = tx.closed() => {
/// // The receiver dropped, no need to do any further work
/// }
/// value = compute() => {
/// // The send can fail if the channel was closed at the exact same
/// // time as when compute() finished, so just ignore the failure.
/// let _ = tx.send(value);
/// }
/// }
/// });
///
/// // Wait for up to 10 seconds
/// let _ = time::timeout(Duration::from_secs(10), rx).await;
/// }
/// ```
pub async fn closed(&mut self) {
use crate::future::poll_fn;
#[cfg(all(tokio_unstable, feature = "tracing"))]
let resource_span = self.resource_span.clone();
#[cfg(all(tokio_unstable, feature = "tracing"))]
let closed = trace::async_op(
|| poll_fn(|cx| self.poll_closed(cx)),
resource_span,
"Sender::closed",
"poll_closed",
false,
);
#[cfg(not(all(tokio_unstable, feature = "tracing")))]
let closed = poll_fn(|cx| self.poll_closed(cx));
closed.await
}
/// Returns `true` if the associated [`Receiver`] handle has been dropped.
///
/// A [`Receiver`] is closed by either calling [`close`] explicitly or the
/// [`Receiver`] value is dropped.
///
/// If `true` is returned, a call to `send` will always result in an error.
///
/// [`Receiver`]: Receiver
/// [`close`]: Receiver::close
///
/// # Examples
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, rx) = oneshot::channel();
///
/// assert!(!tx.is_closed());
///
/// drop(rx);
///
/// assert!(tx.is_closed());
/// assert!(tx.send("never received").is_err());
/// }
/// ```
pub fn is_closed(&self) -> bool {
let inner = self.inner.as_ref().unwrap();
let state = State::load(&inner.state, Acquire);
state.is_closed()
}
/// Checks whether the oneshot channel has been closed, and if not, schedules the
/// `Waker` in the provided `Context` to receive a notification when the channel is
/// closed.
///
/// A [`Receiver`] is closed by either calling [`close`] explicitly, or when the
/// [`Receiver`] value is dropped.
///
/// Note that on multiple calls to poll, only the `Waker` from the `Context` passed
/// to the most recent call will be scheduled to receive a wakeup.
///
/// [`Receiver`]: struct@crate::sync::oneshot::Receiver
/// [`close`]: fn@crate::sync::oneshot::Receiver::close
///
/// # Return value
///
/// This function returns:
///
/// * `Poll::Pending` if the channel is still open.
/// * `Poll::Ready(())` if the channel is closed.
///
/// # Examples
///
/// ```
/// use tokio::sync::oneshot;
///
/// use futures::future::poll_fn;
///
/// #[tokio::main]
/// async fn main() {
/// let (mut tx, mut rx) = oneshot::channel::<()>();
///
/// tokio::spawn(async move {
/// rx.close();
/// });
///
/// poll_fn(|cx| tx.poll_closed(cx)).await;
///
/// println!("the receiver dropped");
/// }
/// ```
pub fn poll_closed(&mut self, cx: &mut Context<'_>) -> Poll<()> {
ready!(crate::trace::trace_leaf(cx));
// Keep track of task budget
let coop = ready!(crate::runtime::coop::poll_proceed(cx));
let inner = self.inner.as_ref().unwrap();
let mut state = State::load(&inner.state, Acquire);
if state.is_closed() {
coop.made_progress();
return Ready(());
}
if state.is_tx_task_set() {
let will_notify = unsafe { inner.tx_task.will_wake(cx) };
if !will_notify {
state = State::unset_tx_task(&inner.state);
if state.is_closed() {
// Set the flag again so that the waker is released in drop
State::set_tx_task(&inner.state);
coop.made_progress();
return Ready(());
} else {
unsafe { inner.tx_task.drop_task() };
}
}
}
if !state.is_tx_task_set() {
// Attempt to set the task
unsafe {
inner.tx_task.set_task(cx);
}
// Update the state
state = State::set_tx_task(&inner.state);
if state.is_closed() {
coop.made_progress();
return Ready(());
}
}
Pending
}
}
impl<T> Drop for Sender<T> {
fn drop(&mut self) {
if let Some(inner) = self.inner.as_ref() {
inner.complete();
#[cfg(all(tokio_unstable, feature = "tracing"))]
self.resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
tx_dropped = true,
tx_dropped.op = "override",
)
});
}
}
}
impl<T> Receiver<T> {
/// Prevents the associated [`Sender`] handle from sending a value.
///
/// Any `send` operation which happens after calling `close` is guaranteed
/// to fail. After calling `close`, [`try_recv`] should be called to
/// receive a value if one was sent **before** the call to `close`
/// completed.
///
/// This function is useful to perform a graceful shutdown and ensure that a
/// value will not be sent into the channel and never received.
///
/// `close` is no-op if a message is already received or the channel
/// is already closed.
///
/// [`Sender`]: Sender
/// [`try_recv`]: Receiver::try_recv
///
/// # Examples
///
/// Prevent a value from being sent
///
/// ```
/// use tokio::sync::oneshot;
/// use tokio::sync::oneshot::error::TryRecvError;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, mut rx) = oneshot::channel();
///
/// assert!(!tx.is_closed());
///
/// rx.close();
///
/// assert!(tx.is_closed());
/// assert!(tx.send("never received").is_err());
///
/// match rx.try_recv() {
/// Err(TryRecvError::Closed) => {}
/// _ => unreachable!(),
/// }
/// }
/// ```
///
/// Receive a value sent **before** calling `close`
///
/// ```
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, mut rx) = oneshot::channel();
///
/// assert!(tx.send("will receive").is_ok());
///
/// rx.close();
///
/// let msg = rx.try_recv().unwrap();
/// assert_eq!(msg, "will receive");
/// }
/// ```
pub fn close(&mut self) {
if let Some(inner) = self.inner.as_ref() {
inner.close();
#[cfg(all(tokio_unstable, feature = "tracing"))]
self.resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
rx_dropped = true,
rx_dropped.op = "override",
)
});
}
}
/// Attempts to receive a value.
///
/// If a pending value exists in the channel, it is returned. If no value
/// has been sent, the current task **will not** be registered for
/// future notification.
///
/// This function is useful to call from outside the context of an
/// asynchronous task.
///
/// Note that unlike the `poll` method, the `try_recv` method cannot fail
/// spuriously. Any send or close event that happens before this call to
/// `try_recv` will be correctly returned to the caller.
///
/// # Return
///
/// - `Ok(T)` if a value is pending in the channel.
/// - `Err(TryRecvError::Empty)` if no value has been sent yet.
/// - `Err(TryRecvError::Closed)` if the sender has dropped without sending
/// a value, or if the message has already been received.
///
/// # Examples
///
/// `try_recv` before a value is sent, then after.
///
/// ```
/// use tokio::sync::oneshot;
/// use tokio::sync::oneshot::error::TryRecvError;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, mut rx) = oneshot::channel();
///
/// match rx.try_recv() {
/// // The channel is currently empty
/// Err(TryRecvError::Empty) => {}
/// _ => unreachable!(),
/// }
///
/// // Send a value
/// tx.send("hello").unwrap();
///
/// match rx.try_recv() {
/// Ok(value) => assert_eq!(value, "hello"),
/// _ => unreachable!(),
/// }
/// }
/// ```
///
/// `try_recv` when the sender dropped before sending a value
///
/// ```
/// use tokio::sync::oneshot;
/// use tokio::sync::oneshot::error::TryRecvError;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, mut rx) = oneshot::channel::<()>();
///
/// drop(tx);
///
/// match rx.try_recv() {
/// // The channel will never receive a value.
/// Err(TryRecvError::Closed) => {}
/// _ => unreachable!(),
/// }
/// }
/// ```
pub fn try_recv(&mut self) -> Result<T, TryRecvError> {
let result = if let Some(inner) = self.inner.as_ref() {
let state = State::load(&inner.state, Acquire);
if state.is_complete() {
// SAFETY: If `state.is_complete()` returns true, then the
// `VALUE_SENT` bit has been set and the sender side of the
// channel will no longer attempt to access the inner
// `UnsafeCell`. Therefore, it is now safe for us to access the
// cell.
match unsafe { inner.consume_value() } {
Some(value) => {
#[cfg(all(tokio_unstable, feature = "tracing"))]
self.resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
value_received = true,
value_received.op = "override",
)
});
Ok(value)
}
None => Err(TryRecvError::Closed),
}
} else if state.is_closed() {
Err(TryRecvError::Closed)
} else {
// Not ready, this does not clear `inner`
return Err(TryRecvError::Empty);
}
} else {
Err(TryRecvError::Closed)
};
self.inner = None;
result
}
/// Blocking receive to call outside of asynchronous contexts.
///
/// # Panics
///
/// This function panics if called within an asynchronous execution
/// context.
///
/// # Examples
///
/// ```
/// use std::thread;
/// use tokio::sync::oneshot;
///
/// #[tokio::main]
/// async fn main() {
/// let (tx, rx) = oneshot::channel::<u8>();
///
/// let sync_code = thread::spawn(move || {
/// assert_eq!(Ok(10), rx.blocking_recv());
/// });
///
/// let _ = tx.send(10);
/// sync_code.join().unwrap();
/// }
/// ```
#[track_caller]
#[cfg(feature = "sync")]
#[cfg_attr(docsrs, doc(alias = "recv_blocking"))]
pub fn blocking_recv(self) -> Result<T, RecvError> {
crate::future::block_on(self)
}
}
impl<T> Drop for Receiver<T> {
fn drop(&mut self) {
if let Some(inner) = self.inner.as_ref() {
inner.close();
#[cfg(all(tokio_unstable, feature = "tracing"))]
self.resource_span.in_scope(|| {
tracing::trace!(
target: "runtime::resource::state_update",
rx_dropped = true,
rx_dropped.op = "override",
)
});
}
}
}
impl<T> Future for Receiver<T> {
type Output = Result<T, RecvError>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
// If `inner` is `None`, then `poll()` has already completed.
#[cfg(all(tokio_unstable, feature = "tracing"))]
let _res_span = self.resource_span.clone().entered();
#[cfg(all(tokio_unstable, feature = "tracing"))]
let _ao_span = self.async_op_span.clone().entered();
#[cfg(all(tokio_unstable, feature = "tracing"))]
let _ao_poll_span = self.async_op_poll_span.clone().entered();
let ret = if let Some(inner) = self.as_ref().get_ref().inner.as_ref() {
#[cfg(all(tokio_unstable, feature = "tracing"))]
let res = ready!(trace_poll_op!("poll_recv", inner.poll_recv(cx)))?;
#[cfg(any(not(tokio_unstable), not(feature = "tracing")))]
let res = ready!(inner.poll_recv(cx))?;
res
} else {
panic!("called after complete");
};
self.inner = None;
Ready(Ok(ret))
}
}
impl<T> Inner<T> {
fn complete(&self) -> bool {
let prev = State::set_complete(&self.state);
if prev.is_closed() {
return false;
}
if prev.is_rx_task_set() {
// TODO: Consume waker?
unsafe {
self.rx_task.with_task(Waker::wake_by_ref);
}
}
true
}
fn poll_recv(&self, cx: &mut Context<'_>) -> Poll<Result<T, RecvError>> {
ready!(crate::trace::trace_leaf(cx));
// Keep track of task budget
let coop = ready!(crate::runtime::coop::poll_proceed(cx));
// Load the state
let mut state = State::load(&self.state, Acquire);
if state.is_complete() {
coop.made_progress();
match unsafe { self.consume_value() } {
Some(value) => Ready(Ok(value)),
None => Ready(Err(RecvError(()))),
}
} else if state.is_closed() {
coop.made_progress();
Ready(Err(RecvError(())))
} else {
if state.is_rx_task_set() {
let will_notify = unsafe { self.rx_task.will_wake(cx) };
// Check if the task is still the same
if !will_notify {
// Unset the task
state = State::unset_rx_task(&self.state);
if state.is_complete() {
// Set the flag again so that the waker is released in drop
State::set_rx_task(&self.state);
coop.made_progress();
// SAFETY: If `state.is_complete()` returns true, then the
// `VALUE_SENT` bit has been set and the sender side of the
// channel will no longer attempt to access the inner
// `UnsafeCell`. Therefore, it is now safe for us to access the
// cell.
return match unsafe { self.consume_value() } {
Some(value) => Ready(Ok(value)),
None => Ready(Err(RecvError(()))),
};
} else {
unsafe { self.rx_task.drop_task() };
}
}
}
if !state.is_rx_task_set() {
// Attempt to set the task
unsafe {
self.rx_task.set_task(cx);
}
// Update the state
state = State::set_rx_task(&self.state);
if state.is_complete() {
coop.made_progress();
match unsafe { self.consume_value() } {
Some(value) => Ready(Ok(value)),
None => Ready(Err(RecvError(()))),
}
} else {
Pending
}
} else {
Pending
}
}
}
/// Called by `Receiver` to indicate that the value will never be received.
fn close(&self) {
let prev = State::set_closed(&self.state);
if prev.is_tx_task_set() && !prev.is_complete() {
unsafe {
self.tx_task.with_task(Waker::wake_by_ref);
}
}
}
/// Consumes the value. This function does not check `state`.
///
/// # Safety
///
/// Calling this method concurrently on multiple threads will result in a
/// data race. The `VALUE_SENT` state bit is used to ensure that only the
/// sender *or* the receiver will call this method at a given point in time.
/// If `VALUE_SENT` is not set, then only the sender may call this method;
/// if it is set, then only the receiver may call this method.
unsafe fn consume_value(&self) -> Option<T> {
self.value.with_mut(|ptr| (*ptr).take())
}
}
unsafe impl<T: Send> Send for Inner<T> {}
unsafe impl<T: Send> Sync for Inner<T> {}
fn mut_load(this: &mut AtomicUsize) -> usize {
this.with_mut(|v| *v)
}
impl<T> Drop for Inner<T> {
fn drop(&mut self) {
let state = State(mut_load(&mut self.state));
if state.is_rx_task_set() {
unsafe {
self.rx_task.drop_task();
}
}
if state.is_tx_task_set() {
unsafe {
self.tx_task.drop_task();
}
}
}
}
impl<T: fmt::Debug> fmt::Debug for Inner<T> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
use std::sync::atomic::Ordering::Relaxed;
fmt.debug_struct("Inner")
.field("state", &State::load(&self.state, Relaxed))
.finish()
}
}
/// Indicates that a waker for the receiving task has been set.
///
/// # Safety
///
/// If this bit is not set, the `rx_task` field may be uninitialized.
const RX_TASK_SET: usize = 0b00001;
/// Indicates that a value has been stored in the channel's inner `UnsafeCell`.
///
/// # Safety
///
/// This bit controls which side of the channel is permitted to access the
/// `UnsafeCell`. If it is set, the `UnsafeCell` may ONLY be accessed by the
/// receiver. If this bit is NOT set, the `UnsafeCell` may ONLY be accessed by
/// the sender.
const VALUE_SENT: usize = 0b00010;
const CLOSED: usize = 0b00100;
/// Indicates that a waker for the sending task has been set.
///
/// # Safety
///
/// If this bit is not set, the `tx_task` field may be uninitialized.
const TX_TASK_SET: usize = 0b01000;
impl State {
fn new() -> State {
State(0)
}
fn is_complete(self) -> bool {
self.0 & VALUE_SENT == VALUE_SENT
}
fn set_complete(cell: &AtomicUsize) -> State {
// This method is a compare-and-swap loop rather than a fetch-or like
// other `set_$WHATEVER` methods on `State`. This is because we must
// check if the state has been closed before setting the `VALUE_SENT`
// bit.
//
// We don't want to set both the `VALUE_SENT` bit if the `CLOSED`
// bit is already set, because `VALUE_SENT` will tell the receiver that
// it's okay to access the inner `UnsafeCell`. Immediately after calling
// `set_complete`, if the channel was closed, the sender will _also_
// access the `UnsafeCell` to take the value back out, so if a
// `poll_recv` or `try_recv` call is occurring concurrently, both
// threads may try to access the `UnsafeCell` if we were to set the
// `VALUE_SENT` bit on a closed channel.
let mut state = cell.load(Ordering::Relaxed);
loop {
if State(state).is_closed() {
break;
}
// TODO: This could be `Release`, followed by an `Acquire` fence *if*
// the `RX_TASK_SET` flag is set. However, `loom` does not support
// fences yet.
match cell.compare_exchange_weak(
state,
state | VALUE_SENT,
Ordering::AcqRel,
Ordering::Acquire,
) {
Ok(_) => break,
Err(actual) => state = actual,
}
}
State(state)
}
fn is_rx_task_set(self) -> bool {
self.0 & RX_TASK_SET == RX_TASK_SET
}
fn set_rx_task(cell: &AtomicUsize) -> State {
let val = cell.fetch_or(RX_TASK_SET, AcqRel);
State(val | RX_TASK_SET)
}
fn unset_rx_task(cell: &AtomicUsize) -> State {
let val = cell.fetch_and(!RX_TASK_SET, AcqRel);
State(val & !RX_TASK_SET)
}
fn is_closed(self) -> bool {
self.0 & CLOSED == CLOSED
}
fn set_closed(cell: &AtomicUsize) -> State {
// Acquire because we want all later writes (attempting to poll) to be
// ordered after this.
let val = cell.fetch_or(CLOSED, Acquire);
State(val)
}
fn set_tx_task(cell: &AtomicUsize) -> State {
let val = cell.fetch_or(TX_TASK_SET, AcqRel);
State(val | TX_TASK_SET)
}
fn unset_tx_task(cell: &AtomicUsize) -> State {
let val = cell.fetch_and(!TX_TASK_SET, AcqRel);
State(val & !TX_TASK_SET)
}
fn is_tx_task_set(self) -> bool {
self.0 & TX_TASK_SET == TX_TASK_SET
}
fn as_usize(self) -> usize {
self.0
}
fn load(cell: &AtomicUsize, order: Ordering) -> State {
let val = cell.load(order);
State(val)
}
}
impl fmt::Debug for State {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_struct("State")
.field("is_complete", &self.is_complete())
.field("is_closed", &self.is_closed())
.field("is_rx_task_set", &self.is_rx_task_set())
.field("is_tx_task_set", &self.is_tx_task_set())
.finish()
}
}
| true |
bb72c8dbdda3255677fecff2e8d246cefafb5f26
|
Rust
|
shonenada/roundrobin-rs
|
/src/rr.rs
|
UTF-8
| 1,561 | 3.4375 | 3 |
[
"MIT"
] |
permissive
|
use std::sync::{Arc, RwLock};
#[derive(Debug)]
pub struct Server {
url: String,
}
impl Server {
pub fn new(url: String) -> Server {
return Server { url };
}
}
#[derive(Debug)]
pub struct RoundRobinBalancer {
pub servers: Vec<Server>,
cur_idx: Arc<RwLock<usize>>,
}
impl RoundRobinBalancer {
pub fn new() -> RoundRobinBalancer {
return RoundRobinBalancer {
servers: vec![],
cur_idx: Arc::new(RwLock::new(0)),
};
}
pub fn insert_server(&mut self, server: Server) {
self.servers.push(server);
}
pub fn insert_url(&mut self, url: String) {
let server = Server::new(url);
self.insert_server(server);
}
pub fn next(&self) -> Option<&Server> {
let mut ci = self.cur_idx.write().unwrap();
let s = self.servers.get(*ci);
*ci = (*ci + 1) % self.servers.len();
s.clone()
}
}
mod tests {
use super::{RoundRobinBalancer, Server};
#[test]
fn test_simple_next() {
let url01 = "http://localhost:8081".to_string();
let url02 = "http://localhost:8082".to_string();
let server01 = Server::new(url01.clone());
let mut rr = RoundRobinBalancer::new();
rr.insert_server(server01);
rr.insert_url(url02.clone());
let r1 = rr.next().unwrap();
assert!(r1.url == url01.clone());
let r2 = rr.next().unwrap();
assert!(r2.url == url02.clone());
let r3 = rr.next().unwrap();
assert!(r3.url == url01.clone());
}
}
| true |
5323ef65d0c180b81b31239a8e16ff5ee36f5e67
|
Rust
|
leoschwarz/musicbrainz_rust
|
/src/entities/event.rs
|
UTF-8
| 3,600 | 2.984375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use xpath_reader::{FromXml, FromXmlOptional, Error, Reader};
use crate::entities::{Mbid, ResourceOld};
use crate::entities::date::PartialDate;
enum_mb_xml_optional! {
pub enum EventType {
var Concert = "Concert",
var Festival = "Festival",
var LaunchEvent = "Launch event",
var ConventionExpo = "Convention/Expo",
var MasterclassClinic = "Masterclass/Clinic",
}
}
/// An organized event people can attend, these are generally live performances.
///
/// Additional information can be found in the [MusicBrainz
/// docs](https://musicbrainz.org/doc/Event)
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Event {
/// MBID of the entity in the MusicBrainz database.
pub mbid: Mbid,
/// The official name of the event or a descriptive name if the event
/// doesn't have an official name.
pub name: String,
/// Aternative event names.
pub aliases: Vec<String>,
/// Describes what type of event this is exactly.
pub event_type: Option<EventType>,
/// List of songs played at the event.
///
/// This is provided in an extensive text format, for which parsing is not
/// yet implemented.
pub setlist: Option<String>,
/// Begin date of the event.
pub begin_date: PartialDate,
/// End date of the event.
pub end_date: Option<PartialDate>,
/// Additional disambiguation if there are multiple `Event`s with the same
/// name.
pub disambiguation: Option<String>,
/// Any additional free form annotation for this `Event`.
pub annotation: Option<String>,
}
impl ResourceOld for Event {
const NAME: &'static str = "event";
const INCL: &'static str = "aliases+annotation";
}
impl FromXml for Event {
fn from_xml<'d>(reader: &'d Reader<'d>) -> Result<Self, Error> {
Ok(Event {
mbid: reader.read(".//mb:event/@id")?,
name: reader.read(".//mb:event/mb:name")?,
aliases: reader.read(".//mb:event/mb:alias-list/mb:alias/text()")?,
event_type: reader.read(".//mb:event/@type")?,
setlist: reader.read(".//mb:event/mb:setlist")?,
begin_date: reader.read(".//mb:event/mb:life-span/mb:begin")?,
end_date: reader.read(".//mb:event/mb:life-span/mb:end")?,
disambiguation: reader.read(".//mb:event/mb:disambiguation")?,
annotation: reader.read(".//mb:event/mb:annotation/mb:text/text()")?,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::str::FromStr;
#[test]
fn read_1() {
let mbid = Mbid::from_str("6e2ab7d5-f340-4c41-99a3-c901733402b4").unwrap();
let event: Event = crate::util::test_utils::fetch_entity_old(&mbid).unwrap();
assert_eq!(event.mbid, mbid);
assert_eq!(event.name, "25. Wave-Gotik-Treffen".to_string());
assert_eq!(event.aliases, vec!["WGT 2016".to_string()]);
assert_eq!(event.event_type, Some(EventType::Festival));
assert_eq!(event.setlist, None);
assert_eq!(event.begin_date, "2016-05-13".parse().unwrap());
assert_eq!(event.end_date.unwrap(), "2016-05-16".parse().unwrap());
assert_eq!(event.disambiguation, None);
assert_eq!(event.annotation.unwrap().len(), 2233);
}
#[test]
fn read_2() {
let mbid = Mbid::from_str("9754f4dd-6fad-49b7-8f30-940c9af6b776").unwrap();
let event: Event = crate::util::test_utils::fetch_entity_old(&mbid).unwrap();
assert_eq!(event.event_type, Some(EventType::Concert));
assert_eq!(event.setlist.unwrap().len(), 225);
}
}
| true |
394980e2d7036784f813bd1c0488060debd27b8b
|
Rust
|
kengonakajima/snippets
|
/rust/hello/main.rs
|
UTF-8
| 261 | 2.796875 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::prelude::*;
fn main() -> std::io::Result<()> {
let mut file = File::create("hoge_rs.txt")?;
for i in 0..1000000 {
let s = format!("hoge:{}\n",i);
file.write( & s.into_bytes() )?;
// file.flush()?;
}
Ok(())
}
| true |
aea263ba641672b02483ddcac5424d50267f96cc
|
Rust
|
znewman01/bellman-bignat
|
/src/util/bit.rs
|
UTF-8
| 6,473 | 2.953125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
// (mostly from franklin-crypto)
use sapling_crypto::bellman::pairing::ff::Field;
use sapling_crypto::bellman::pairing::Engine;
use sapling_crypto::bellman::{ConstraintSystem, LinearCombination, SynthesisError};
use sapling_crypto::circuit::boolean::Boolean;
use std::fmt::{self, Display, Formatter};
use OptionExt;
#[derive(Clone)]
/// A representation of a bit
pub struct Bit<E: Engine> {
/// The linear combination which constrain the value of the bit
pub bit: LinearCombination<E>,
/// The value of the bit (filled at witness-time)
pub value: Option<bool>,
}
#[derive(Clone)]
/// A representation of a bit-vector
pub struct Bitvector<E: Engine> {
/// The linear combination which constrain the values of the bits
pub bits: Vec<LinearCombination<E>>,
/// The value of the bits (filled at witness-time)
pub values: Option<Vec<bool>>,
}
impl<E: Engine> Bitvector<E> {
/// Reverse the order of the bits
pub fn reversed(mut self) -> Self {
self.values.as_mut().map(|v| v.reverse());
self.bits.reverse();
self
}
/// Keep only the first `n` bits.
pub fn truncate(mut self, n: usize) -> Self {
self.values.as_mut().map(|v| v.truncate(n));
self.bits.truncate(n);
self
}
pub fn get(&self, i: usize) -> Option<Bit<E>> {
self.bits.get(i).map(|lc| Bit {
bit: lc.clone(),
value: self.values.as_ref().map(|vs| vs[i].clone()),
})
}
pub fn shr(mut self, i: usize) -> Self {
self.values.as_mut().map(|v| {
v.drain(0..i);
});
self.bits.drain(0..i);
self
}
pub fn shl(mut self, i: usize) -> Self {
self.values.as_mut().map(|v| {
v.splice(0..0, std::iter::repeat(false).take(i));
});
self.bits
.splice(0..0, std::iter::repeat(LinearCombination::zero()).take(i));
self
}
pub fn split_off(&mut self, n_bits: usize) -> Bitvector<E> {
let bits = self.bits.split_off(n_bits);
let values = self.values.as_mut().map(|vs| vs.split_off(n_bits));
Bitvector { bits, values }
}
pub fn pop(&mut self) -> Option<Bit<E>> {
if self.bits.len() > 0 {
Some(Bit::new(
self.bits.pop().unwrap(),
self.values.as_mut().map(|vs| vs.pop().unwrap()),
))
} else {
None
}
}
pub fn push(&mut self, mut b: Bit<E>) {
self.values
.as_mut()
.map(|vs| b.value.take().map(|v| vs.push(v)));
self.bits.push(b.bit);
}
pub fn insert(&mut self, i: usize, mut b: Bit<E>) {
self.values
.as_mut()
.map(|vs| b.value.take().map(|v| vs.insert(i, v)));
self.bits.insert(i, b.bit);
}
pub fn append(&mut self, mut other: Self) {
let ovs = other.values.take();
self.bits.extend(other.bits.into_iter());
self.values
.as_mut()
.map(|vs| ovs.map(|ovs| vs.extend(ovs.into_iter())));
}
pub fn into_bits(mut self) -> Vec<Bit<E>> {
let vs = self.values.take();
self.bits
.into_iter()
.enumerate()
.map(|(i, b)| Bit {
bit: b,
value: vs.as_ref().map(|vs| vs[i]),
})
.collect()
}
pub fn from_bits(bs: Vec<Bit<E>>) -> Self {
let mut bits = Vec::new();
let mut values = Some(Vec::new());
for mut b in bs {
let v = b.value.take();
bits.push(b.bit);
values = values.take().and_then(|mut vs| {
v.map(|v| {
vs.push(v);
vs
})
});
}
Self { bits, values }
}
}
impl<E: Engine> Bit<E> {
/// Allocate a variable in the constraint system which can only be a
/// boolean value.
pub fn alloc<CS>(mut cs: CS, value: Option<bool>) -> Result<Self, SynthesisError>
where
CS: ConstraintSystem<E>,
{
let var = cs.alloc(
|| "boolean",
|| {
if *value.grab()? {
Ok(E::Fr::one())
} else {
Ok(E::Fr::zero())
}
},
)?;
// Constrain: (1 - a) * a = 0
// This constrains a to be either 0 or 1.
cs.enforce(
|| "boolean constraint",
|lc| lc + CS::one() - var,
|lc| lc + var,
|lc| lc,
);
Ok(Self {
bit: LinearCombination::zero() + var,
value,
})
}
pub fn constrain_value<CS>(&self, mut cs: CS, value: bool)
where
CS: ConstraintSystem<E>,
{
cs.enforce(
|| format!("is {}", value),
|lc| lc,
|lc| lc,
|lc| {
if value {
lc + &self.bit - CS::one()
} else {
lc + &self.bit
}
},
);
}
pub fn new(bit: LinearCombination<E>, value: Option<bool>) -> Self {
Self { bit, value }
}
pub fn from_sapling<CS: ConstraintSystem<E>>(b: Boolean) -> Self {
Self::new(b.lc(CS::one(), E::Fr::one()), b.get_value())
}
pub fn not<CS: ConstraintSystem<E>>(&self) -> Self {
Self::new(
LinearCombination::zero() + CS::one() - &self.bit,
self.value.clone().map(|b| !b),
)
}
pub fn new_false<CS: ConstraintSystem<E>>() -> Self {
Self::new(LinearCombination::zero(), Some(false))
}
pub fn new_true<CS: ConstraintSystem<E>>() -> Self {
Self::new(LinearCombination::zero() + CS::one(), Some(true))
}
pub fn new_value<CS: ConstraintSystem<E>>(v: bool) -> Self {
if v {
Self::new_true::<CS>()
} else {
Self::new_false::<CS>()
}
}
}
impl<E: Engine> Display for Bitvector<E> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self.values.as_ref() {
Some(vs) => write!(
f,
"Bitvector({})",
vs.into_iter()
.map(|b| if *b { "1" } else { "0" })
.collect::<Vec<_>>()
.concat()
),
None => write!(f, "Bitvector(len {})", self.bits.len()),
}
}
}
| true |
539ed7aee4003c3f4a886ac1cea98ce4a2485164
|
Rust
|
maximbaz/advents-of-code
|
/src/year2020/day18.rs
|
UTF-8
| 6,455 | 3.484375 | 3 |
[
"ISC"
] |
permissive
|
use super::super::*;
use Expression::*;
use Operator::*;
pub struct Task;
impl Solution for Task {
type Input = Vec<Vec<char>>;
type Output = i64;
fn parse_input(&self, input: String) -> Self::Input {
input
.trim()
.lines()
.map(|line| line.chars().filter(|c| !c.is_whitespace()).collect())
.collect()
}
fn part1(&self, input: Self::Input) -> Self::Output {
solve(input, false)
}
fn part2(&self, input: Self::Input) -> Self::Output {
solve(input, true)
}
}
fn solve(homework: Vec<Vec<char>>, is_advanced: bool) -> i64 {
homework
.iter()
.filter_map(|chars| Parser { chars, is_advanced }.run())
.map(|expr| expr.eval())
.sum()
}
#[derive(Copy, Clone, PartialEq)]
enum Operator {
Sum,
Product,
}
enum Expression {
Value(i64),
Parenthesis(Box<Expression>),
Binary(Operator, Box<Expression>, Box<Expression>),
}
impl Expression {
fn eval(&self) -> i64 {
match self {
Value(n) => *n,
Parenthesis(a) => a.eval(),
Binary(Product, a, b) => a.eval() * b.eval(),
Binary(Sum, a, b) => a.eval() + b.eval(),
}
}
}
struct Parser<'a> {
chars: &'a [char],
is_advanced: bool,
}
impl<'a> Parser<'a> {
fn run(&self) -> Option<Expression> {
self.parse_expression(0).map(|(_, expr)| expr)
}
fn parse_expression(&self, pos: usize) -> Option<(usize, Expression)> {
self.parse_start(pos)
.and_then(|(pos, left)| match self.parse_operator(pos) {
Some((pos, op)) => self
.parse_expression(pos)
.and_then(|(pos, right)| Some((pos, self.combine(op, left, right)))),
None => Some((pos, left)),
})
}
fn parse_start(&self, pos: usize) -> Option<(usize, Expression)> {
match self.parse_open_parenthesis(pos) {
Some(pos) => self
.parse_expression(pos)
.and_then(|(pos, expr)| self.parse_close_parenthesis(expr, pos)),
None => self.parse_value(pos),
}
}
fn parse_open_parenthesis(&self, pos: usize) -> Option<usize> {
self.chars.get(pos).filter(|&&v| v == '(').map(|_| pos + 1)
}
fn parse_close_parenthesis(&self, expr: Expression, pos: usize) -> Option<(usize, Expression)> {
self.chars
.get(pos)
.filter(|&&v| v == ')')
.map(|_| (pos + 1, Parenthesis(Box::new(expr))))
}
fn parse_value(&self, pos: usize) -> Option<(usize, Expression)> {
let string = self
.chars
.iter()
.skip(pos)
.take_while(|&&c| c.is_digit(10))
.collect::<String>();
string.parse().ok().map(|v| (pos + string.len(), Value(v)))
}
fn parse_operator(&self, pos: usize) -> Option<(usize, Operator)> {
self.chars
.get(pos)
.filter(|&&v| v == '+' || v == '*')
.map(|&op| (pos + 1, if op == '+' { Sum } else { Product }))
}
fn combine(&self, op: Operator, left: Expression, right: Expression) -> Expression {
match right {
Binary(op2, left2, right2) => {
if self.has_lower_precedence(op, op2) {
Binary(op, Box::new(left), Box::new(Binary(op2, left2, right2)))
} else {
Binary(op2, Box::new(self.combine(op, left, *left2)), right2)
}
}
_ => Binary(op, Box::new(left), Box::new(right)),
}
}
fn has_lower_precedence(&self, op1: Operator, op2: Operator) -> bool {
self.is_advanced && op1 == Product && op2 == Sum
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(
71,
Task.part1(Task.parse_input("1 + 2 * 3 + 4 * 5 + 6".to_string()))
);
assert_eq!(
51,
Task.part1(Task.parse_input("1 + (2 * 3) + (4 * (5 + 6))".to_string()))
);
assert_eq!(
26,
Task.part1(Task.parse_input("2 * 3 + (4 * 5)".to_string()))
);
assert_eq!(
437,
Task.part1(Task.parse_input("5 + (8 * 3 + 9 + 3 * 4 * 3)".to_string()))
);
assert_eq!(
12240,
Task.part1(Task.parse_input("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))".to_string()))
);
assert_eq!(
13632,
Task.part1(
Task.parse_input("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2".to_string())
)
);
assert_eq!(
71 + 51 + 26 + 437 + 12240 + 13632,
Task.part1(
Task.parse_input(
"
1 + 2 * 3 + 4 * 5 + 6
1 + (2 * 3) + (4 * (5 + 6))
2 * 3 + (4 * 5)
5 + (8 * 3 + 9 + 3 * 4 * 3)
5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))
((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2
"
.to_string()
)
)
);
}
#[test]
fn test_part2() {
assert_eq!(
231,
Task.part2(Task.parse_input("1 + 2 * 3 + 4 * 5 + 6".to_string()))
);
assert_eq!(
51,
Task.part2(Task.parse_input("1 + (2 * 3) + (4 * (5 + 6))".to_string()))
);
assert_eq!(
46,
Task.part2(Task.parse_input("2 * 3 + (4 * 5)".to_string()))
);
assert_eq!(
1445,
Task.part2(Task.parse_input("5 + (8 * 3 + 9 + 3 * 4 * 3)".to_string()))
);
assert_eq!(
669060,
Task.part2(Task.parse_input("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))".to_string()))
);
assert_eq!(
23340,
Task.part2(
Task.parse_input("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2".to_string())
)
);
assert_eq!(
231 + 51 + 46 + 1445 + 669060 + 23340,
Task.part2(
Task.parse_input(
"
1 + 2 * 3 + 4 * 5 + 6
1 + (2 * 3) + (4 * (5 + 6))
2 * 3 + (4 * 5)
5 + (8 * 3 + 9 + 3 * 4 * 3)
5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))
((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2
"
.to_string()
)
)
);
}
}
| true |
02a533a78b511c7c2e1c0a9d434327b4817257d4
|
Rust
|
CBenoit/advent-of-code
|
/2020/src/bin/2-1.rs
|
UTF-8
| 1,334 | 3.421875 | 3 |
[] |
no_license
|
use std::io::{self, BufRead};
fn main() -> Result<(), Box<dyn std::error::Error>> {
let stdin = io::stdin();
let stdin = stdin.lock();
let nb_valids = stdin.lines()
.filter_map(Result::ok)
.filter_map(Line::new)
.filter(Line::is_valid)
.count();
println!("{}", nb_valids);
Ok(())
}
struct Policy {
letter: char,
min: usize,
max: usize,
}
impl Policy {
fn from_str(s: &str) -> Option<Self> {
let dash = s.find('-')?;
let space = s.find(' ')?;
let min = s[..dash].parse().ok()?;
let max = s[dash+1..space].parse().ok()?;
let letter = s[space+2..].chars().next()?;
Some(Self {
letter,
min,
max,
})
}
}
struct Line {
policy: Policy,
password: String,
}
impl Line {
fn new(line: String) -> Option<Self> {
let sep = line.find(":")?;
let policy = Policy::from_str(&line[..sep])?;
let password = line[sep+1..].to_owned();
Some(Self {
policy,
password,
})
}
fn is_valid(&self) -> bool {
let count = self.password
.chars()
.filter(|c| *c == self.policy.letter)
.count();
count >= self.policy.min && count <= self.policy.max
}
}
| true |
86e35c4e8d14ff037f591e5c52422e6ece684d4d
|
Rust
|
liuzl/rust_misc
|
/6_struct.rs
|
UTF-8
| 952 | 3.53125 | 4 |
[] |
no_license
|
#[derive(Debug)]
struct Person<'a> {
name: &'a str,
age: u8,
}
struct Nil;
struct Pair(i32, f32);
struct Point {
x: f32,
y: f32,
}
#[allow(dead_code)]
struct Rectangle {
top_left: Point,
bottom_right: Point,
}
fn main() {
let name = "RUC";
let age = 81;
let ruc = Person {name, age};
println!("{:?}", ruc);
let point: Point = Point {x:10.3, y:0.4};
println!("point coordinates: ({}, {})", point.x, point.y);
let bottom_right = Point {x:5.2, ..point};
println!("second point: ({}, {})", bottom_right.x, bottom_right.y);
let Point {x: abc, y: xyz} = point;
let _rec = Rectangle {
top_left: Point {x: abc, y: xyz},
bottom_right: bottom_right,
};
let _nil = Nil;
let pair = Pair(1, 0.1);
println!("pair contains {:?} and {:?}", pair.0, pair.1);
let Pair(integer, decimal) = pair;
println!("pair contains {:?} and {:?}", integer, decimal);
}
| true |
c113a71a6f1927d60615ce472831a41dd4b080ff
|
Rust
|
neovide/neovide
|
/src/editor/window.rs
|
UTF-8
| 11,246 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use std::{collections::HashMap, sync::Arc};
use log::warn;
use unicode_segmentation::UnicodeSegmentation;
use crate::{
bridge::GridLineCell,
editor::{grid::CharacterGrid, style::Style, AnchorInfo, DrawCommand, DrawCommandBatcher},
renderer::{LineFragment, WindowDrawCommand},
};
pub enum WindowType {
Editor,
Message,
}
pub struct Window {
grid_id: u64,
grid: CharacterGrid,
pub window_type: WindowType,
pub anchor_info: Option<AnchorInfo>,
grid_position: (f64, f64),
draw_command_batcher: Arc<DrawCommandBatcher>,
}
impl Window {
pub fn new(
grid_id: u64,
window_type: WindowType,
anchor_info: Option<AnchorInfo>,
grid_position: (f64, f64),
grid_size: (u64, u64),
draw_command_batcher: Arc<DrawCommandBatcher>,
) -> Window {
let window = Window {
grid_id,
grid: CharacterGrid::new(grid_size),
window_type,
anchor_info,
grid_position,
draw_command_batcher,
};
window.send_updated_position();
window
}
fn send_command(&self, command: WindowDrawCommand) {
self.draw_command_batcher
.queue(DrawCommand::Window {
grid_id: self.grid_id,
command,
})
.ok();
}
fn send_updated_position(&self) {
self.send_command(WindowDrawCommand::Position {
grid_position: self.grid_position,
grid_size: (self.grid.width, self.grid.height),
floating_order: self.anchor_info.clone().map(|anchor| anchor.sort_order),
});
}
pub fn get_cursor_grid_cell(
&self,
window_left: u64,
window_top: u64,
) -> (String, Option<Arc<Style>>, bool) {
let grid_cell = match self.grid.get_cell(window_left, window_top) {
Some((character, style)) => (character.clone(), style.clone()),
_ => (' '.to_string(), None),
};
let double_width = match self.grid.get_cell(window_left + 1, window_top) {
Some((character, _)) => character.is_empty(),
_ => false,
};
(grid_cell.0, grid_cell.1, double_width)
}
pub fn get_width(&self) -> u64 {
self.grid.width
}
pub fn get_height(&self) -> u64 {
self.grid.height
}
pub fn get_grid_position(&self) -> (f64, f64) {
self.grid_position
}
pub fn position(
&mut self,
anchor_info: Option<AnchorInfo>,
grid_size: (u64, u64),
grid_position: (f64, f64),
) {
self.grid.resize(grid_size);
self.anchor_info = anchor_info;
self.grid_position = grid_position;
self.send_updated_position();
self.redraw();
}
pub fn resize(&mut self, new_size: (u64, u64)) {
self.grid.resize(new_size);
self.send_updated_position();
self.redraw();
}
fn modify_grid(
&mut self,
row_index: u64,
column_pos: &mut u64,
cell: GridLineCell,
defined_styles: &HashMap<u64, Arc<Style>>,
previous_style: &mut Option<Arc<Style>>,
) {
// Get the defined style from the style list.
let style = match cell.highlight_id {
Some(0) => None,
Some(style_id) => defined_styles.get(&style_id).cloned(),
None => previous_style.clone(),
};
// Compute text.
let mut text = cell.text;
if let Some(times) = cell.repeat {
text = text.repeat(times as usize);
}
// Insert the contents of the cell into the grid.
if text.is_empty() {
if let Some(cell) = self.grid.get_cell_mut(*column_pos, row_index) {
*cell = (text, style.clone());
}
*column_pos += 1;
} else {
for character in text.graphemes(true) {
if let Some(cell) = self.grid.get_cell_mut(*column_pos, row_index) {
*cell = (character.to_string(), style.clone());
}
*column_pos += 1;
}
}
*previous_style = style;
}
// Build a line fragment for the given row starting from current_start up until the next style
// change or double width character.
fn build_line_fragment(&self, row_index: u64, start: u64) -> (u64, LineFragment) {
let row = self.grid.row(row_index).unwrap();
let (_, style) = &row[start as usize];
let mut text = String::new();
let mut width = 0;
for possible_end_index in start..self.grid.width {
let (character, possible_end_style) = &row[possible_end_index as usize];
// Style doesn't match. Draw what we've got.
if style != possible_end_style {
break;
}
width += 1;
// The previous character is double width, so send this as its own draw command.
if character.is_empty() {
break;
}
// Add the grid cell to the cells to render.
text.push_str(character);
}
let line_fragment = LineFragment {
text,
window_left: start,
window_top: row_index,
width,
style: style.clone(),
};
(start + width, line_fragment)
}
// Redraw line by calling build_line_fragment starting at 0
// until current_start is greater than the grid width and sending the resulting
// fragments as a batch.
fn redraw_line(&self, row: u64) {
let mut current_start = 0;
let mut line_fragments = Vec::new();
while current_start < self.grid.width {
let (next_start, line_fragment) = self.build_line_fragment(row, current_start);
current_start = next_start;
line_fragments.push(line_fragment);
}
self.send_command(WindowDrawCommand::DrawLine(line_fragments));
}
pub fn draw_grid_line(
&mut self,
row: u64,
column_start: u64,
cells: Vec<GridLineCell>,
defined_styles: &HashMap<u64, Arc<Style>>,
) {
let mut previous_style = None;
if row < self.grid.height {
let mut column_pos = column_start;
for cell in cells {
self.modify_grid(
row,
&mut column_pos,
cell,
defined_styles,
&mut previous_style,
);
}
// Due to the limitations of the current rendering strategy, some underlines get
// clipped by the line below. To mitigate that, we redraw the adjacent lines whenever
// an individual line is redrawn. Unfortunately, some clipping still happens.
// TODO: figure out how to solve this
if row < self.grid.height - 1 {
self.redraw_line(row + 1);
}
self.redraw_line(row);
if row > 0 {
self.redraw_line(row - 1);
}
} else {
warn!("Draw command out of bounds");
}
}
pub fn scroll_region(
&mut self,
top: u64,
bottom: u64,
left: u64,
right: u64,
rows: i64,
cols: i64,
) {
let mut top_to_bottom;
let mut bottom_to_top;
let y_iter: &mut dyn Iterator<Item = i64> = if rows > 0 {
top_to_bottom = (top as i64 + rows)..bottom as i64;
&mut top_to_bottom
} else {
bottom_to_top = (top as i64..(bottom as i64 + rows)).rev();
&mut bottom_to_top
};
self.send_command(WindowDrawCommand::Scroll {
top,
bottom,
left,
right,
rows,
cols,
});
// Scrolls must not only translate the rendered texture, but also must move the grid data
// accordingly so that future renders work correctly.
for y in y_iter {
let dest_y = y - rows;
let mut cols_left;
let mut cols_right;
if dest_y >= 0 && dest_y < self.grid.height as i64 {
let x_iter: &mut dyn Iterator<Item = i64> = if cols > 0 {
cols_left = (left as i64 + cols)..right as i64;
&mut cols_left
} else {
cols_right = (left as i64..(right as i64 + cols)).rev();
&mut cols_right
};
for x in x_iter {
let dest_x = x - cols;
let cell_data = self.grid.get_cell(x as u64, y as u64).cloned();
if let Some(cell_data) = cell_data {
if let Some(dest_cell) =
self.grid.get_cell_mut(dest_x as u64, dest_y as u64)
{
*dest_cell = cell_data;
}
}
}
}
}
}
pub fn clear(&mut self) {
self.grid.clear();
self.send_command(WindowDrawCommand::Clear);
}
pub fn redraw(&self) {
self.send_command(WindowDrawCommand::Clear);
// Draw the lines from the bottom up so that underlines don't get overwritten by the line
// below.
for row in (0..self.grid.height).rev() {
self.redraw_line(row);
}
}
pub fn hide(&self) {
self.send_command(WindowDrawCommand::Hide);
}
pub fn show(&self) {
self.send_command(WindowDrawCommand::Show);
}
pub fn close(&self) {
self.send_command(WindowDrawCommand::Close);
}
pub fn update_viewport(&self, scroll_delta: f64) {
self.send_command(WindowDrawCommand::Viewport { scroll_delta });
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::*;
use crate::event_aggregator::EVENT_AGGREGATOR;
#[test]
fn window_separator_modifies_grid_and_sends_draw_command() {
let mut draw_command_receiver = EVENT_AGGREGATOR.register_event::<Vec<DrawCommand>>();
let draw_command_batcher = Arc::new(DrawCommandBatcher::new());
let mut window = Window::new(
1,
WindowType::Editor,
None,
(0.0, 0.0),
(114, 64),
draw_command_batcher.clone(),
);
draw_command_batcher.send_batch();
draw_command_receiver
.try_recv()
.expect("Could not receive commands");
window.draw_grid_line(
1,
70,
vec![GridLineCell {
text: "|".to_owned(),
highlight_id: None,
repeat: None,
}],
&HashMap::new(),
);
assert_eq!(window.grid.get_cell(70, 1), Some(&("|".to_owned(), None)));
draw_command_batcher.send_batch();
let sent_commands = draw_command_receiver
.try_recv()
.expect("Could not receive commands");
assert!(!sent_commands.is_empty());
}
}
| true |
204856e91414983ba04f41eee04df151580104a4
|
Rust
|
nenad1002/advent-of-code-2020-rust
|
/Day11/src/main.rs
|
UTF-8
| 4,210 | 3.171875 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
fn main() {
let mut board = read_input();
println!("{:?}", part_2_solution(&mut board));
}
fn part_1_solution(board: &mut Vec<String>) -> i32 {
let neigh = [
[1, 0],
[0, 1],
[-1, 0],
[0, -1],
[-1, -1],
[1, -1],
[-1, 1],
[1, 1],
];
let mut board_copy = vec![];
let mut res = 0;
let mut change = false;
for i in 0..board.len() {
let mut row = "".to_string();
for j in 0..board[0].len() {
let mut count_occupied = 0;
for k in 0..neigh.len() {
//println!("{:?}", board[i].as_bytes()[j] as char);
let new_i = i as isize + neigh[k][0];
let new_j = j as isize + neigh[k][1];
if new_i < 0
|| new_j < 0
|| new_i >= board.len() as isize
|| new_j >= board[0].len() as isize
{
continue;
}
let new_i = new_i as usize;
let new_j = new_j as usize;
if board[new_i].as_bytes()[new_j] as char == '#' {
count_occupied += 1;
}
}
let c = board[i].as_bytes()[j] as char;
if c == '#' && count_occupied >= 4 {
row.push('L');
res += 1;
change = true;
} else if c == 'L' && count_occupied == 0 {
row.push('#');
change = true;
} else {
if c == '#' {
res += 1;
}
row.push(c);
}
}
board_copy.push(row.clone());
row.clear();
}
if change {
return part_1_solution(&mut board_copy);
}
res
}
fn part_2_solution(board: &mut Vec<String>) -> i32 {
let neigh = [
[1, 0],
[0, 1],
[-1, 0],
[0, -1],
[-1, -1],
[1, -1],
[-1, 1],
[1, 1],
];
let mut board_copy = vec![];
let mut res = 0;
let mut change = false;
for i in 0..board.len() {
let mut row = "".to_string();
for j in 0..board[0].len() {
let mut count_occupied = 0;
for k in 0..neigh.len() {
//println!("{:?}", board[i].as_bytes()[j] as char);
let mut new_i = i as isize + neigh[k][0];
let mut new_j = j as isize + neigh[k][1];
while new_i >= 0
&& new_j >= 0
&& new_i < board.len() as isize
&& new_j < board[0].len() as isize
{
let c = board[new_i as usize].as_bytes()[new_j as usize] as char;
if c == '#' || c == 'L' {
if c == '#' {
count_occupied += 1;
}
break;
}
new_i += neigh[k][0];
new_j += neigh[k][1];
}
}
let c = board[i].as_bytes()[j] as char;
if c == '#' && count_occupied >= 5 {
row.push('L');
res += 1;
change = true;
} else if c == 'L' && count_occupied == 0 {
row.push('#');
change = true;
} else {
if c == '#' {
res += 1;
}
row.push(c);
}
}
board_copy.push(row.clone());
row.clear();
}
if change {
return part_2_solution(&mut board_copy);
}
res
}
fn read_input() -> Vec<String> {
let mut res = vec![];
if let Ok(lines) = read_lines("./input.txt") {
for line in lines {
if let Ok(l) = line {
res.push(l);
}
}
}
res
}
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
| true |
d2bace3f44993dcb3ebf4c6354f8bcbbbcb6f149
|
Rust
|
pismute/exercism
|
/rust/forth/src/lib.rs
|
UTF-8
| 7,280 | 3.078125 | 3 |
[] |
no_license
|
#![feature(if_let_guard)]
use std::collections::HashMap;
pub type Value = i32;
pub type Result = std::result::Result<(), Error>;
type Stack = Vec<Value>;
type Words = HashMap<String, Vec<Term>>;
type TermResult = std::result::Result<(), (Term, Error)>;
#[derive(Clone, Debug)]
enum Term {
Var(Box<Word>),
Val(Value),
Op(String),
InvalidWord,
}
#[derive(Clone, Debug)]
struct Word {
name: String,
terms: Vec<Term>,
}
mod parser {
use super::{Term, Word};
use nom::{
bytes::complete::*, character::complete::*, multi::*, sequence::*, AsChar, IResult, Parser,
};
fn value(i: &str) -> IResult<&str, Term> {
i32.map(Term::Val).parse(i)
}
fn op_name(i: &str) -> IResult<&str, &str> {
take_while1(|x: char| x.is_alpha() || x == '+' || x == '*' || x == '/' || x == '-').parse(i)
}
fn op(i: &str) -> IResult<&str, Term> {
tag("+")
.or(tag("-"))
.or(tag("*"))
.or(tag("/"))
.or(op_name)
.map(|x: &str| Term::Op(x.to_lowercase()))
.parse(i)
}
fn word(i: &str) -> IResult<&str, Term> {
let open = pair(tag(":"), space1);
let name_terms = separated_pair(op_name, space1, separated_list1(space1, value.or(op)));
let close = pair(space1, tag(";"));
delimited(open, name_terms, close)
.map(|(name, terms)| {
let word = Word {
name: name.to_lowercase(),
terms,
};
Term::Var(Box::new(word))
})
.parse(i)
}
fn undefined(i: &str) -> IResult<&str, Term> {
Ok((i, Term::InvalidWord))
}
pub(super) fn term(i: &str) -> IResult<&str, Term> {
value.or(op).or(word).or(undefined).parse(i)
}
// complete combinators are used, because streaming combinators return 'Incomplete(Size(1))'.
// streaming combinators need a complete maker need in input.
// a input "1 + 2 + 3" would return 'Incomplete(Size(1)) in streaming combinators.
// a input "1 + 2 + 3;" can be complete with 'many_until' combinator in steaming combinators.
pub(super) fn terms(i: &str) -> IResult<&str, Vec<Term>> {
separated_list0(space1, term).parse(i)
}
// note: 'iterator' combinator.
// It is a great combinator to parse lazily, but it is not enough.
//
// 1. this combinator is applicable only input has repetition of one pattern.
// ex) '1_2_3_', iterator(input, pair(number, char('_'))), This is repetition of 'number_'.
//
// 2. To build a complete lazy parser, it seems to need many iterators like 'separated_iter', 'iter_until'... and so on.
}
pub struct Forth {
words: Words,
stack: Stack,
}
#[derive(Debug, PartialEq)]
pub enum Error {
DivisionByZero,
StackUnderflow,
UnknownWord,
InvalidWord,
}
impl Forth {
pub fn new() -> Self {
Forth {
words: HashMap::new(),
stack: vec![],
}
}
pub fn stack(&self) -> &[Value] {
&self.stack
}
fn binary<F: FnOnce(Value, Value, &mut Stack) -> Result>(stack: &mut Stack, f: F) -> Result {
if stack.len() < 2 {
// to keep values in stack on error;
Err(Error::StackUnderflow)
} else {
stack
.pop()
.zip(stack.pop())
.ok_or_else(|| Error::StackUnderflow) // won't be error
.and_then(|(x, y)| f(y, x, stack))
}
}
fn unary<F: FnOnce(Value, &mut Stack) -> Result>(stack: &mut Stack, f: F) -> Result {
if stack.len() < 1 {
// to keep values in stack on error;
Err(Error::StackUnderflow)
} else {
stack
.pop()
.ok_or_else(|| Error::StackUnderflow)
.and_then(|x| f(x, stack))
}
}
fn eval_term(words: &mut Words, stack: &mut Stack, term: Term) -> TermResult {
fn with_term(t: Term) -> impl FnOnce(Error) -> (Term, Error) {
|err| (t, err)
}
match term {
Term::Var(word) => {
let mut new_stack: Stack = vec![];
let mut terms_iter = word.terms.into_iter();
match Self::eval_iter(words, &mut new_stack, &mut terms_iter) {
Ok(_) => {
// fully evaluated
let terms: Vec<Term> = new_stack.into_iter().map(Term::Val).collect();
words.insert(word.name, terms);
Ok(())
}
Err((last, Error::StackUnderflow)) => {
// partially evaluated
let terms: Vec<Term> = new_stack
.into_iter()
.map(Term::Val)
.chain(std::iter::once(last))
.chain(terms_iter)
.collect();
words.insert(word.name, terms);
Ok(())
}
Err(x) => Err(x),
}
}
Term::Val(x) => Ok(stack.push(x)),
Term::Op(ref op) => match op.as_str() {
_ if let Some(ys) = words.get(op) => {
let mut iter = ys.clone().into_iter();
Self::eval_iter(words, stack, &mut iter)
},
"/" => Self::binary(stack, |x, y, stack| {
if y != 0 {
Ok(stack.push(x / y))
} else {
Err(Error::DivisionByZero)
}
}).map_err(with_term(term.clone())),
"+" => Self::binary(stack, |x, y, stack| Ok(stack.push(x + y))).map_err(with_term(term)),
"*" => Self::binary(stack, |x, y, stack| Ok(stack.push(x * y))).map_err(with_term(term)),
"-" => Self::binary(stack, |x, y, stack| Ok(stack.push(x - y))).map_err(with_term(term)),
"dup" => Self::unary(stack, |x, stack| Ok(stack.extend_from_slice(&[x, x]))).map_err(with_term(term)),
"drop" => Self::unary(stack, |_, _| Ok(())).map_err(with_term(term)),
"swap" => Self::binary(stack, |x, y, stack| Ok(stack.extend_from_slice(&[y, x]))).map_err(with_term(term)),
"over" => Self::binary(stack, |x, y, stack| Ok(stack.extend_from_slice(&[x, y, x]))).map_err(with_term(term)),
_ => Err((term, Error::UnknownWord)),
},
Term::InvalidWord => Err((term, Error::InvalidWord)),
}
}
// don't consume iterator if it failed to eagerly evaluate word.
fn eval_iter(
words: &mut Words,
stack: &mut Stack,
terms: &mut impl Iterator<Item = Term>,
) -> TermResult {
terms.try_for_each(|x| Self::eval_term(words, stack, x))
}
pub fn eval(&mut self, input: &str) -> Result {
let (_, terms) = parser::terms(input).unwrap();
let iter = terms.into_iter();
Self::eval_iter(&mut self.words, &mut self.stack, &mut iter.into_iter()).map_err(|(_, x)| x)
}
}
| true |
d33cb87b6580dd859782a425293da290a80c4b5f
|
Rust
|
uplol/chooch
|
/src/main.rs
|
UTF-8
| 5,516 | 2.609375 | 3 |
[] |
no_license
|
use std::{net::IpAddr, path::PathBuf};
use chooch::Choocher;
use futures::StreamExt;
use hyper::Uri;
use indicatif::{ProgressBar, ProgressStyle};
use rand::Rng;
use structopt::StructOpt;
use tokio::io::AsyncWriteExt;
fn parse_bytes(src: &str) -> Result<usize, &'static str> {
bytefmt::parse(src).map(|n| n as usize)
}
#[derive(Debug, StructOpt)]
#[structopt(
name = "chooch",
about = "Downloads files over HTTP using multiple streams"
)]
struct Opt {
#[structopt(name = "url", help = "The URL you wish to download")]
url: Uri,
#[structopt(name = "output", help = "The output destination for this download")]
output: PathBuf,
#[structopt(long = "chunk-size", short, default_value = "32MB", parse(try_from_str = parse_bytes))]
chunk_size: usize,
#[structopt(long = "workers", short, default_value = "6")]
worker_count: usize,
#[structopt(
long = "force-overwrite",
short = "f",
help = "Overwrites existing output file if it already exists"
)]
force_overwrite: bool,
#[structopt(
long = "skip-prealloc",
short = "s",
help = "Skips the pre-allocation of the target file"
)]
skip_prealloc: bool,
#[structopt(
long = "bind-ip",
short = "ip",
help = "Sets the IP address used to make outgoing connections."
)]
bind_ip: Option<IpAddr>,
#[structopt(
long = "no-interactive",
short = "n",
help = "Turns off the progress bar in favor of logging messages to stdout."
)]
no_interactive: bool,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let opt = Opt::from_args();
let choocher = Choocher::new(opt.url, opt.chunk_size, opt.worker_count, opt.bind_ip);
let (content_length, mut chunks) = choocher.chunks().await?;
let real_path = opt.output;
let tmp_path = create_temp_path(&real_path, opt.force_overwrite)?;
println!("final path: {}", &real_path.to_str().unwrap());
println!("temp path: {}", &tmp_path.to_string_lossy());
let mut output_file = tokio::fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(&tmp_path)
.await?;
let skip_prealloc = opt.skip_prealloc;
let no_interactive = opt.no_interactive;
let tmp_path_filename = tmp_path
.clone()
.file_name()
.unwrap()
.to_string_lossy()
.to_string();
let task_res = tokio::spawn(async move {
if !skip_prealloc {
println!(
"preallocating file ({})",
bytefmt::format(content_length as _)
);
output_file.set_len(content_length as _).await?;
}
let mut bytes_written = 0;
let bar = setup_progress_bar(content_length as u64);
{
while let Some(chunk) = chunks.next().await {
output_file.write_all(&chunk).await?;
bar.inc(chunk.len() as _);
bytes_written += chunk.len();
if no_interactive {
println!(
"{} downloaded {}% ({})",
tmp_path_filename,
f64::trunc((bytes_written as f64 / content_length as f64) * 100.0),
bytefmt::format(bytes_written as _)
);
}
}
output_file.flush().await?;
}
bar.finish();
Ok(bytes_written)
});
let exit_signal = tokio::signal::ctrl_c();
let res = async move {
loop {
tokio::select! {
_ = exit_signal => {
return Err(anyhow::anyhow!("user-terminated via signal"));
}
res = task_res => {
return res?
}
}
}
};
match res.await {
Ok(bytes_written) => {
println!("done! renaming to final destination...");
tokio::fs::rename(&tmp_path, &real_path).await?;
println!(
"{} bytes written to {}",
bytes_written,
real_path.to_string_lossy()
);
Ok(())
}
Err(e) => {
println!("something went wrong. removing temp file...");
tokio::fs::remove_file(&tmp_path).await.unwrap();
Err(e)
}
}
}
fn setup_progress_bar(length: u64) -> ProgressBar {
let bar = ProgressBar::new_spinner();
bar.set_length(length);
bar.set_style(ProgressStyle::default_spinner().template("[{elapsed_precise}] {bar:40.cyan/blue} {bytes:>7}/{total_bytes:7} ({bytes_per_sec}, eta: {eta_precise})"));
bar
}
fn create_temp_path(real_path: &PathBuf, overwrite: bool) -> anyhow::Result<PathBuf> {
if real_path.exists() {
if overwrite {
println!(
"warning: {} already exists, ovewriting...",
&real_path.to_string_lossy()
)
} else {
return Err(anyhow::anyhow!(
"cannot overwrite destination file, use --force to force overwrite"
));
}
}
let mut tmp_path = real_path.clone();
tmp_path.set_file_name(format!(
".{}.choochdl~{}",
real_path.file_name().unwrap().to_str().unwrap(),
rand::thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(6)
.map(char::from)
.collect::<String>()
));
Ok(tmp_path)
}
| true |
25dbafc4e8099764ee0e0d8d0e273f7b47404ba6
|
Rust
|
caklimas/rust-nes
|
/src/mappers/mapper_results.rs
|
UTF-8
| 1,701 | 3 | 3 |
[] |
no_license
|
pub struct MapperReadResult {
pub data: u8,
pub mapped_address: u32,
pub read_from_cart_ram: bool,
pub read_from_mapper_ram: bool
}
impl MapperReadResult {
pub fn from_cart_ram(mapped_address: u32) -> Self {
MapperReadResult {
data: 0,
mapped_address,
read_from_cart_ram: true,
read_from_mapper_ram: false
}
}
pub fn from_mapper_ram(data: u8) -> Self {
MapperReadResult {
data,
mapped_address: 0,
read_from_cart_ram: false,
read_from_mapper_ram: true
}
}
pub fn none() -> Self {
MapperReadResult {
data: 0,
mapped_address: 0,
read_from_cart_ram: false,
read_from_mapper_ram: false
}
}
}
pub struct MapperWriteResult {
pub handled: bool,
pub mapped_address: u32,
pub write_to_cart_ram: bool
}
impl MapperWriteResult {
pub fn handled() -> Self {
MapperWriteResult {
handled: true,
mapped_address: 0,
write_to_cart_ram: false
}
}
pub fn write_to_cart_ram(mapped_address: u32) -> Self {
MapperWriteResult {
handled: true,
mapped_address,
write_to_cart_ram: true
}
}
pub fn with_mapped_address(mapped_address: u32) -> Self {
MapperWriteResult {
handled: true,
mapped_address,
write_to_cart_ram: false
}
}
pub fn none() -> Self {
MapperWriteResult {
handled: false,
mapped_address: 0,
write_to_cart_ram: false
}
}
}
| true |
7f8722b9bc4dda94d282cf94dd38afc28aad683a
|
Rust
|
OneFourth/advent2019
|
/day24/src/main.rs
|
UTF-8
| 7,426 | 3.171875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::collections::HashSet;
fn state_to_str(state: &HashMap<(isize, isize), bool>) -> String {
let mut s = "".to_string();
for y in 0..5 {
for x in 0..5 {
match state.get(&(x, y)).unwrap() {
true => s += "#",
false => s += ".",
};
}
s += "\n";
}
s
}
fn bio_rating(state: HashMap<(isize, isize), bool>) -> u32 {
let mut total = 0;
for ((x, y), b) in state {
if b {
total += 2_u32.pow(x as u32 + y as u32 * 5);
}
}
total
}
fn part1(state: HashMap<(isize, isize), bool>) -> u32 {
let mut states = HashSet::new();
states.insert(state_to_str(&state));
let mut old_state = state;
loop {
let mut new_state = HashMap::new();
for y in 0..5 {
for x in 0..5 {
let adj: Vec<_> = [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)]
.iter()
.filter_map(|p| old_state.get(&p))
.collect();
let bugs = adj.iter().filter(|&&a| *a).count();
let curr = *old_state.get(&(x, y)).unwrap();
if bugs != 1 && curr {
new_state.insert((x, y), false);
} else if (bugs == 1 || bugs == 2) && !curr {
new_state.insert((x, y), true);
} else {
new_state.insert((x, y), curr);
}
}
}
let s = state_to_str(&new_state);
old_state = new_state;
if !states.insert(s) {
break;
};
}
bio_rating(old_state)
}
fn part2(state: HashMap<(isize, isize), bool>) -> usize {
let mut current_state = state
.iter()
.filter_map(|(&(x, y), &b)| {
if !(x == 2 && y == 2) && b {
Some((x, y, 0))
} else {
None
}
})
.collect::<HashSet<_>>();
for _ in 0..200 {
let mut counts = HashMap::new();
for &(x, y, d) in ¤t_state {
match (x, y) {
(0, 0) => {
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
*counts.entry((2, 1, d - 1)).or_insert(0) += 1;
*counts.entry((1, 2, d - 1)).or_insert(0) += 1;
}
(4, 0) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
*counts.entry((3, 2, d - 1)).or_insert(0) += 1;
*counts.entry((2, 1, d - 1)).or_insert(0) += 1;
}
(0, 4) => {
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
*counts.entry((1, 2, d - 1)).or_insert(0) += 1;
*counts.entry((2, 3, d - 1)).or_insert(0) += 1;
}
(4, 4) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
*counts.entry((3, 2, d - 1)).or_insert(0) += 1;
*counts.entry((2, 3, d - 1)).or_insert(0) += 1;
}
(0, _) => {
*counts.entry((1, 2, d - 1)).or_insert(0) += 1;
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
}
(4, _) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((3, 2, d - 1)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
}
(_, 0) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((2, 1, d - 1)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
}
(_, 4) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
*counts.entry((2, 3, d - 1)).or_insert(0) += 1;
}
(1, 2) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
for y_ in 0..5 {
*counts.entry((0, y_, d + 1)).or_insert(0) += 1;
}
}
(3, 2) => {
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
for y_ in 0..5 {
*counts.entry((4, y_, d + 1)).or_insert(0) += 1;
}
}
(2, 1) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
for x_ in 0..5 {
*counts.entry((x_, 0, d + 1)).or_insert(0) += 1;
}
}
(2, 3) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
for x_ in 0..5 {
*counts.entry((x_, 4, d + 1)).or_insert(0) += 1;
}
}
(x, y) => {
*counts.entry((x - 1, y, d)).or_insert(0) += 1;
*counts.entry((x + 1, y, d)).or_insert(0) += 1;
*counts.entry((x, y - 1, d)).or_insert(0) += 1;
*counts.entry((x, y + 1, d)).or_insert(0) += 1;
}
}
}
current_state = counts
.iter()
.filter_map(|(&p, &c)| {
if current_state.contains(&p) {
if c == 1 {
return Some(p);
}
} else if c == 1 || c == 2 {
return Some(p);
}
None
})
.collect();
}
current_state.len()
}
fn main() {
let input = include_str!("../input");
let state: HashMap<_, _> = input
.lines()
.enumerate()
.flat_map(|(y, l)| {
l.trim()
.chars()
.enumerate()
.map(move |(x, c)| ((x as isize, y as isize), c == '#'))
})
.collect();
println!("Part 1: {}", part1(state.clone()));
println!("Part 2: {}", part2(state));
}
| true |
6f503c4727fa5717be755886d778508ee2706b95
|
Rust
|
KodrAus/rust-web-app
|
/src/domain/customers/queries/get_customer_with_orders.rs
|
UTF-8
| 1,751 | 2.984375 | 3 |
[] |
no_license
|
/*! Contains the `GetCustomerWithOrdersQuery` type. */
use crate::domain::{
customers::*,
infra::*,
orders::*,
Error,
};
/** Input for a `GetCustomerWithOrdersQuery`. */
#[derive(Deserialize)]
pub struct GetCustomerWithOrders {
pub id: CustomerId,
}
/** An order with a order summary for each of its line items. */
#[derive(Serialize)]
pub struct CustomerWithOrders {
pub id: CustomerId,
pub orders: Vec<CustomerOrder>,
}
/** An individual order. */
#[derive(Serialize)]
pub struct CustomerOrder {
pub id: OrderId,
}
impl QueryArgs for GetCustomerWithOrders {
type Output = Result<Option<CustomerWithOrders>, Error>;
}
async fn execute(
query: GetCustomerWithOrders,
store: impl CustomerStore,
orders_query: impl Query<GetOrderSummariesForCustomer>,
) -> Result<Option<CustomerWithOrders>, Error> {
let customer = match store.get_customer(query.id)? {
Some(customer) => customer.into_data(),
None => return Ok(None),
};
let orders = orders_query
.execute(GetOrderSummariesForCustomer { id: query.id })
.await?;
Ok(Some(CustomerWithOrders {
id: customer.id,
orders: orders
.into_iter()
.map(|order| CustomerOrder { id: order.id })
.collect(),
}))
}
impl Resolver {
/** Get a customer along with all of their orders. */
pub fn get_customer_with_orders_query(&self) -> impl Query<GetCustomerWithOrders> {
self.query(|resolver, query: GetCustomerWithOrders| async move {
let store = resolver.customer_store();
let orders_query = resolver.get_order_summaries_for_customer_query();
execute(query, store, orders_query).await
})
}
}
| true |
1d972918351f8e5b95308f5d1882ab6e2949b725
|
Rust
|
thesues/nand2tetris
|
/projects/10/jackanalyzer-rs/src/main.rs
|
UTF-8
| 31,653 | 3.125 | 3 |
[] |
no_license
|
extern crate regex;
use std::env;
use std::vec::Vec;
use std::process;
use std::fs;
use std::fs::File;
use std::io::prelude::*;
use std::io::{BufReader};
use std::fs::OpenOptions;
use std::fmt;
#[derive(Debug, Clone)]
enum TokenType{
KEYWORD(String),
SYMBOL(String),
INTEGER(u16),
STRING(String),
IDENTIFIER(String),
}
impl PartialEq for TokenType {
fn eq(&self, other: &TokenType) -> bool {
match (self, other) {
(&TokenType::KEYWORD(ref a), &TokenType::KEYWORD(ref b)) => {
a == b
},
(&TokenType::SYMBOL(ref a), &TokenType::SYMBOL(ref b)) => {
a == b
},
(&TokenType::IDENTIFIER(ref _a), &TokenType::IDENTIFIER(ref _b)) => {
true
},
(&TokenType::INTEGER(ref _a), &TokenType::INTEGER(ref _b)) => {
true
},
(&TokenType::STRING(ref _a), &TokenType::STRING(ref _b)) => {
true
},
_ => false
}
}
}
fn escape_word(s :&str) -> String {
match s {
"&" => String::from("&"),
">" => String::from(">"),
"<" => String::from("<"),
"\"" => String::from("""),
_ => s.to_string()
}
}
impl fmt::Display for TokenType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let display = match self{
&TokenType::IDENTIFIER(ref s) => format!("<identifier> {} </identifier>", escape_word(s)),
&TokenType::KEYWORD(ref s) => format!("<keyword> {} </keyword>", escape_word(s)),
&TokenType::INTEGER(ref i) => format!("<integerConstant> {} </integerConstant>", i),
&TokenType::STRING(ref s) => format!("<stringConstant> {} </stringConstant>", escape_word(s)),
&TokenType::SYMBOL(ref s) => format!("<symbol> {} </symbol>", escape_word(s))
};
write!(f, "{}", display)
}
}
struct Token {
token_type: TokenType,
}
fn base_name(filename: &str) -> &str {
match filename.rfind(".") {
Some(pos) => &filename[0..pos],
None => "tempfile"
}
}
//for each file
struct JackTokenizer {
file: File,
filename: String,
vector: Vec<Token>
}
impl JackTokenizer {
//helper functions
fn is_defined_symbol(c :char) -> bool {
match c {
'{'| '}' | '(' | ')' | '[' | ']' | '.' | ',' | ';' | '+' | '-' | '*' | '/' | '&' | '|' | '<' | '>' | '=' | '~' => true,
_ => false
}
}
fn is_keyword(s :&str) -> bool {
match s {
"class"|"constructor"|"function"|"method"|
"field"|"static"|"var"|"int"|"char"|"boolean"|
"void"|"true"|"false"|"null"| "this"|"let"|"do"|
"if"|"else"|"while"|"return" => true,
_ => {false}
}
}
fn add_to_list(vec: &mut Vec<Token>, filename: &str, t: TokenType) {
//print to xml
vec.push(
Token{
token_type: t
});
}
pub fn new(filename: &str) -> JackTokenizer{
let file = File::open(filename).unwrap();
return JackTokenizer{file:file, filename: filename.to_string(), vector:vec![]}
}
pub fn process(&mut self) {
let mut in_comment = false;
let mut vec :Vec<Token> = vec![];
for line in BufReader::new(&mut self.file)
.lines()
.filter_map(|x| x.ok())
.map(|x| x.trim().to_string())
.filter(|x| !x.is_empty())
{
//skip multiline comment
if line.starts_with("/*") || line.starts_with("/**") {
in_comment = true;
}
if line.ends_with("*/") {
in_comment = false;
continue;
}
if in_comment {
continue;
}
//skip line comment
let comment_offset = line.find("//").unwrap_or(line.len());
let (first, _last) = line.split_at(comment_offset);
if first.is_empty() {
continue;
}
//fuck this, I can not use regex to resolve the token
//word could be ident or keyword
let mut word_pos: usize = 0;
let mut number_pos: usize = 0;
let mut string_pos: usize = 0;
enum parse_stat {
STRING,
SYMBOL,
NUMBER,
SPACE,
WORD,
NONE
}
let mut previous_stat = parse_stat::NONE;
for (pos, c) in first.chars().enumerate() {
//SYMBOL
if JackTokenizer::is_defined_symbol(c) {
match previous_stat {
parse_stat::WORD => {
let word = &first[word_pos..pos as usize];
if JackTokenizer::is_keyword(word) {
JackTokenizer::add_to_list(&mut vec, &self.filename, TokenType::KEYWORD(word.to_string()));
} else {
JackTokenizer::add_to_list(&mut vec, &self.filename, TokenType::IDENTIFIER(word.to_string()));
}
},
parse_stat::NUMBER => {
let number :u16 = first[number_pos..pos].parse().unwrap();
JackTokenizer::add_to_list(&mut vec, &self.filename, TokenType::INTEGER(number));
},
parse_stat::STRING => {
continue;
}
_ => {}
}
JackTokenizer::add_to_list(&mut vec, &self.filename, TokenType::SYMBOL(c.to_string()));
previous_stat = parse_stat::SYMBOL;
}
//STRING
if c == '"' {
match previous_stat {
parse_stat::STRING => {
let s = first[(string_pos+1) as usize ..pos as usize].to_string();
JackTokenizer::add_to_list(&mut vec, &self.filename, TokenType::STRING(s));
previous_stat = parse_stat::NONE;
},
_ => {
string_pos = pos;
previous_stat = parse_stat::STRING;
continue;
}
}
}
//keyword do not have number,
//identy can not start with a number
//so this must be number
if c.is_numeric() {
match previous_stat {
parse_stat::STRING => {
continue;
},
parse_stat::WORD => {
continue;
},
parse_stat::NUMBER => {
continue;
}
_ => {
previous_stat = parse_stat::NUMBER;
number_pos = pos;
}
}
}
/* a-z, A-Z, _*/
if c.is_alphabetic() || c == '_' {
match previous_stat {
parse_stat::STRING => {
continue;
},
parse_stat::NONE | parse_stat::SPACE | parse_stat::SYMBOL => {
word_pos = pos;
previous_stat = parse_stat::WORD;
},
parse_stat::NUMBER => {
panic!("tokeniz wrong {}!", line);
},
parse_stat::WORD => {
continue;
}
}
}
if c.is_whitespace() {
match previous_stat {
parse_stat::WORD => {
let word = &first[word_pos..pos as usize];
if JackTokenizer::is_keyword(word) {
JackTokenizer::add_to_list(&mut vec, &self.filename, TokenType::KEYWORD(word.to_string()));
} else {
JackTokenizer::add_to_list(&mut vec, &self.filename, TokenType::IDENTIFIER(word.to_string()));
}
previous_stat = parse_stat::SPACE;
},
parse_stat::NONE | parse_stat::SPACE | parse_stat::SYMBOL => {
previous_stat = parse_stat::SPACE;
continue;
},
parse_stat::NUMBER=>{
let number :u16 = first[number_pos..pos].parse().unwrap();
JackTokenizer::add_to_list(&mut vec, &self.filename, TokenType::INTEGER(number));
previous_stat = parse_stat::SPACE;
},
parse_stat::STRING=>{
//space is in the constant string
continue;
},
}
}
}
}
self.vector = vec;
}
pub fn output(&self) {
let target_file_name = format!("{}T.{}", base_name(&self.filename), "xml");
let mut target_file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(target_file_name).unwrap();
writeln!(target_file, "<tokens>");
for t in &self.vector {
writeln!(target_file, "{}", t.token_type);
}
writeln!(target_file, "</tokens>");
}
pub fn has_more_tokens(&self) -> bool{
self.vector.len() > 0
}
pub fn advance(&mut self) -> Option<Token> {
if self.vector.len() > 0 {
Some(self.vector.remove(0))
} else {
None
}
}
pub fn back_to_vector(&mut self, t: Token) {
self.vector.insert(0, t);
}
pub fn peek(&self, i :usize) -> Option<&Token> {
if i < self.vector.len() {
return Some(&self.vector[i])
} else {
None
}
}
}
//vec![TokenType::SYMBOL("{".to_string())
macro_rules! jack{
( $($t: ident : $e: expr),* ) => {{
let mut temp_vec = Vec::new();
$(
match $e {
_ => temp_vec.push(TokenType::$t($e.to_string()))
}
)*
temp_vec
}}
}
struct JackAnalyzer {
target_file_name :String,
target_file: File,
tokenizer: JackTokenizer
}
impl JackAnalyzer {
pub fn new(jackfilename: &str, tokenizer :JackTokenizer ) -> JackAnalyzer {
let target_file_name = format!("{}.{}", base_name(jackfilename), "xml");
let target_file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(&target_file_name).unwrap();
return JackAnalyzer{target_file_name: target_file_name, target_file: target_file, tokenizer:tokenizer};
}
fn eat(&mut self, token_type_vec: Vec<TokenType>) -> Option<TokenType> {
let source_token = self.tokenizer.advance().unwrap();
for i in &token_type_vec {
if source_token.token_type == *i {
return Some(source_token.token_type);
}
}
//if the pop failed, I will push the token back
/*
println!("I GOT a {},", source_token.token_type);
for i in &token_type_vec {
println!("expected {}", i);
}
*/
self.tokenizer.back_to_vector(source_token);
return None;
}
fn eat_force(&mut self) -> Option<TokenType> {
match self.tokenizer.advance() {
Some(t) => Some(t.token_type),
None => None
}
}
fn peek(&self, i: usize) -> Option<&TokenType> {
match self.tokenizer.peek(i) {
Some(t) => {
Some(&t.token_type)
},
None => None
}
}
fn compile_class(&mut self) {
writeln!(self.target_file,"<class>");
//need macro to simplifiy the code
let valid_token = self.eat(jack!(KEYWORD:"class")).unwrap();
writeln!(self.target_file, "{}", valid_token);
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//let valid_token = self.eat(vec![TokenType::SYMBOL("{".to_string())]).unwrap();
let valid_token = self.eat(jack!(SYMBOL:"{")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//0 or * class var declares
while self.compile_class_var_dec(){};
//0 or * class subroutine
while self.compile_subroutine(){};
let valid_token = self.eat(jack!(SYMBOL:"}")).unwrap();
writeln!(self.target_file,"{}", valid_token);
writeln!(self.target_file,"</class>");
}
// classVarDec*
// if return false, means no parser,
// if return true, means could have more to parse
fn compile_class_var_dec(&mut self) -> bool{
//static | field
let valid_token = match self.eat(jack!(KEYWORD:"static", KEYWORD:"field")) {
Some(valid_token) => valid_token,
None => {return false}
};
writeln!(self.target_file,"<classVarDec>");
writeln!(self.target_file,"{}", valid_token);
//type
let valid_token = self.eat(jack!(KEYWORD:"int", KEYWORD:"char", KEYWORD:"boolean", IDENTIFIER:"")).unwrap();
writeln!(self.target_file,"{}", valid_token);
//varname
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file,"{}", valid_token);
//(, varname)*
loop {
let valid_token = match self.eat(jack!(SYMBOL:",")) {
Some(valid_token) => valid_token,
None => {break;}
};
writeln!(self.target_file,"{}", valid_token);
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file,"{}", valid_token);
}
let valid_token = self.eat(jack!(SYMBOL:";")).unwrap();
writeln!(self.target_file,"{}", valid_token);
writeln!(self.target_file,"</classVarDec>");
true
}
fn compile_subroutine(&mut self) -> bool{
//constructor, function, method
let valid_token = match self.eat(jack!(KEYWORD:"constructor", KEYWORD:"function", KEYWORD:"method")) {
Some(valid_token) => valid_token,
None => {return false}
};
writeln!(self.target_file, "<subroutineDec>");
writeln!(self.target_file, "{}", valid_token);
//void , type => int, boolean, char, ident
let valid_token = self.eat(jack!(KEYWORD:"void", KEYWORD:"int", KEYWORD:"boolean", KEYWORD:"char", IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//subroutine name
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//symbol '('
let valid_token = self.eat(jack!(SYMBOL:"(")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//one parameter list or none
self.compile_parameter_list();
//symbol ')'
let valid_token = self.eat(jack!(SYMBOL:")")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_subroutine_body();
writeln!(self.target_file, "</subroutineDec>");
true
}
fn compile_subroutine_body(&mut self) {
writeln!(self.target_file, "{}", "<subroutineBody>");
//symbol '{'
let valid_token = self.eat(jack!(SYMBOL:"{")).unwrap();
writeln!(self.target_file, "{}", valid_token);
while self.compile_var_dec() {}
self.compile_statements();
//symbol '}'
let valid_token = self.eat(jack!(SYMBOL:"}")).unwrap();
writeln!(self.target_file, "{}", valid_token);
writeln!(self.target_file, "{}", "</subroutineBody>");
}
fn compile_parameter_list(&mut self) {
//type
writeln!(self.target_file, "<parameterList>");
let valid_token = match self.eat(jack!(KEYWORD:"void", KEYWORD:"int",
KEYWORD:"boolean", KEYWORD:"char", IDENTIFIER:"")) {
Some(valid_token) => valid_token,
None => {
writeln!(self.target_file, "</parameterList>");
return;
}
};
writeln!(self.target_file, "{}", valid_token);
//varname
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//(, type varname)
loop {
//,
let valid_token = match self.eat(jack!(SYMBOL:",")) {
Some(valid_token) => valid_token,
None => {break;}
};
writeln!(self.target_file, "{}", valid_token);
//type
let valid_token = self.eat(jack!(KEYWORD:"void", KEYWORD:"int",
KEYWORD:"boolean", KEYWORD:"char", IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
}
writeln!(self.target_file, "</parameterList>");
}
fn compile_var_dec(&mut self) -> bool{
//var
let valid_token = match self.eat(jack!(KEYWORD:"var")) {
Some(valid_token) => valid_token,
None => {return false;}
};
writeln!(self.target_file, "{}", "<varDec>");
writeln!(self.target_file, "{}", valid_token);
//type
let valid_token = self.eat(jack!(KEYWORD:"void", KEYWORD:"int",
KEYWORD:"boolean", KEYWORD:"char", IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//varName
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
loop {
//,
let valid_token = match self.eat(jack!(SYMBOL:",")) {
Some(valid_token) => valid_token,
None => {break;}
};
writeln!(self.target_file, "{}", valid_token);
//varname
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
}
//;
let valid_token = self.eat(jack!(SYMBOL:";")).unwrap();
writeln!(self.target_file, "{}", valid_token);
writeln!(self.target_file, "{}", "</varDec>");
true
}
fn compile_statements(&mut self ) {
writeln!(self.target_file, "{}","<statements>");
loop {
let x :u8;
{
x = match self.peek(0){
Some(&TokenType::KEYWORD(ref s)) if s == "if" => 1,
Some(&TokenType::KEYWORD(ref s)) if s == "while" => 2,
Some(&TokenType::KEYWORD(ref s)) if s == "do" => 3,
Some(&TokenType::KEYWORD(ref s)) if s == "let" => 4,
Some(&TokenType::KEYWORD(ref s)) if s == "return" => 5,
_ => 6
}
}//dispose immuable self
match x {
1 => self.compile_if_statement(),
2 => self.compile_while_statement(),
3 => self.compile_do_statement(),
4 => self.compile_let_statement(),
5 => self.compile_return_statement(),
_ => {
break;
}
}
}
writeln!(self.target_file, "{}","</statements>");
}
fn compile_if_statement(&mut self) {
writeln!(self.target_file, "{}", "<ifStatement>");
let valid_token = self.eat(jack!(KEYWORD:"if")).unwrap();
writeln!(self.target_file, "{}", valid_token);
let valid_token = self.eat(jack!(SYMBOL:"(")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_expression();
let valid_token = self.eat(jack!(SYMBOL:")")).unwrap();
writeln!(self.target_file, "{}", valid_token);
let valid_token = self.eat(jack!(SYMBOL:"{")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_statements();
let valid_token = self.eat(jack!(SYMBOL:"}")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//does it have else?
match self.eat(jack!(KEYWORD:"else")) {
None => {
writeln!(self.target_file, "{}", "</ifStatement>");
return
},
Some(valid_token) => {
writeln!(self.target_file, "{}", valid_token);
}
}
let valid_token = self.eat(jack!(SYMBOL:"{")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_statements();
let valid_token = self.eat(jack!(SYMBOL:"}")).unwrap();
writeln!(self.target_file, "{}", valid_token);
writeln!(self.target_file, "{}", "</ifStatement>");
}
fn compile_while_statement(&mut self) {
writeln!(self.target_file, "{}", "<whileStatement>");
let valid_token = self.eat(jack!(KEYWORD:"while")).unwrap();
writeln!(self.target_file, "{}", valid_token);
let valid_token = self.eat(jack!(SYMBOL:"(")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_expression();
let valid_token = self.eat(jack!(SYMBOL:")")).unwrap();
writeln!(self.target_file, "{}", valid_token);
let valid_token = self.eat(jack!(SYMBOL:"{")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_statements();
let valid_token = self.eat(jack!(SYMBOL:"}")).unwrap();
writeln!(self.target_file, "{}", valid_token);
writeln!(self.target_file, "{}", "</whileStatement>");
}
fn compile_do_statement(&mut self) {
writeln!(self.target_file, "{}", "<doStatement>");
//do
let valid_token = self.eat(jack!(KEYWORD:"do")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_subroutine_call();
//;
let valid_token = self.eat(jack!(SYMBOL:";")).unwrap();
writeln!(self.target_file, "{}", valid_token);
writeln!(self.target_file, "{}", "</doStatement>");
}
fn compile_let_statement(&mut self) {
writeln!(self.target_file, "{}", "<letStatement>");
//let
let valid_token = self.eat(jack!(KEYWORD:"let")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//ident
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
match self.eat(jack!(SYMBOL:"[")) {
Some(valid_token) => {
//[
writeln!(self.target_file, "{}", valid_token);
self.compile_expression();
//]
let valid_token = self.eat(jack!(SYMBOL:"]")).unwrap();
writeln!(self.target_file, "{}", valid_token);
},
None=>{}
}
//=
let valid_token = self.eat(jack!(SYMBOL:"=")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_expression();
//;
let valid_token = self.eat(jack!(SYMBOL:";")).unwrap();
writeln!(self.target_file, "{}", valid_token);
writeln!(self.target_file, "{}", "</letStatement>");
}
fn compile_return_statement(&mut self) {
//return
writeln!(self.target_file, "{}", "<returnStatement>");
let valid_token = self.eat(jack!(KEYWORD:"return")).unwrap();
writeln!(self.target_file, "{}", valid_token);
let has_expression = match self.peek(0).unwrap() {
&TokenType::SYMBOL(ref s) if s == ";" => false,
_ => true
};
if has_expression {
self.compile_expression();
}
let valid_token = self.eat(jack!(SYMBOL:";")).unwrap();
writeln!(self.target_file, "{}", valid_token);
writeln!(self.target_file, "{}", "</returnStatement>");
}
fn compile_expression(&mut self) {
//START FROM HERE;
writeln!(self.target_file, "{}","<expression>");
self.compile_term();
loop {
match self.eat(jack!(SYMBOL:"+", SYMBOL:"-", SYMBOL:"*", SYMBOL:"/",
SYMBOL:"&", SYMBOL:"|", SYMBOL:">", SYMBOL:"<", SYMBOL:"=")) {
Some(ref s) => {writeln!(self.target_file, "{}", s);},
None => break
}
self.compile_term();
}
writeln!(self.target_file, "{}","</expression>");
}
//https://stackoverflow.com/questions/42075409/drop-a-immutable-borrow-to-make-a-mutable-borrow
fn compile_term(&mut self) {
//the eat function can not fit the needs
writeln!(self.target_file, "{}", "<term>");
let peek_token = self.peek(0).unwrap().clone();
match peek_token {
TokenType::IDENTIFIER(ref _s) => {
//VARNAME | VARNAME [expression] | subroutine call
let peek_token1 = self.peek(1).unwrap().clone();
match peek_token1 {
TokenType::SYMBOL(ref s) if s == "[" => {
writeln!(self.target_file, "{}", peek_token);
self.eat_force();
writeln!(self.target_file, "{}", peek_token1);
self.eat_force();
self.compile_expression();
let valid_token = self.eat(jack!(SYMBOL:"]")).unwrap();
writeln!(self.target_file, "{}", valid_token);
},
TokenType::SYMBOL(ref s) if (s == "(" || s == ".") => {
self.compile_subroutine_call();
}
_ => {
writeln!(self.target_file, "{}", peek_token);
self.eat_force();
}
}
},
TokenType::SYMBOL(ref s) if (s == "-" || s == "~") => {
writeln!(self.target_file, "{}", peek_token);
self.eat_force();
self.compile_term();
},
TokenType::INTEGER(ref s) => {
writeln!(self.target_file, "{}", peek_token);
self.eat_force();
}
TokenType::SYMBOL(ref s) if s == "(" => {
writeln!(self.target_file, "{}", peek_token);
self.eat_force();
self.compile_expression();
let valid_token = self.eat(jack!(SYMBOL:")")).unwrap();
writeln!(self.target_file, "{}", valid_token);
},
TokenType::KEYWORD(ref s ) if (s == "true" || s == "false" || s == "null" || s == "this") => {
writeln!(self.target_file, "{}", peek_token);
self.eat_force();
},
TokenType::STRING(ref _s) => {
writeln!(self.target_file, "{}", peek_token);
self.eat_force();
}
_ => {
panic!("failed to parse");
}
}
writeln!(self.target_file, "{}", "</term>");
}
fn compile_expression_list(&mut self) {
writeln!(self.target_file, "{}", "<expressionList>");
let is_expression_list_empty = match self.peek(0).unwrap() {
&TokenType::SYMBOL(ref s) if s == ")" => true,
_ => false
};
if is_expression_list_empty {
writeln!(self.target_file, "{}", "</expressionList>");
return;
}
self.compile_expression();
//,
loop {
let valid_token = match self.eat(jack!(SYMBOL:",")) {
Some(s) => s,
None => {break}
};
writeln!(self.target_file, "{}", valid_token);
//expression
self.compile_expression();
}
writeln!(self.target_file, "{}", "</expressionList>");
}
fn compile_subroutine_call(&mut self) {
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
//next could be . or (
let valid_token = self.eat(jack!(SYMBOL:".", SYMBOL:"(")).unwrap();
match valid_token {
TokenType::SYMBOL(ref s) if s == "(" => {
writeln!(self.target_file, "{}", valid_token);
self.compile_expression_list();
let valid_token = self.eat(jack!(SYMBOL:")")).unwrap();
writeln!(self.target_file, "{}", valid_token);
},
TokenType::SYMBOL(ref s) if s == "." => {
writeln!(self.target_file, "{}", valid_token);
let valid_token = self.eat(jack!(IDENTIFIER:"")).unwrap();
writeln!(self.target_file, "{}", valid_token);
let valid_token = self.eat(jack!(SYMBOL:"(")).unwrap();
writeln!(self.target_file, "{}", valid_token);
self.compile_expression_list();
let valid_token = self.eat(jack!(SYMBOL:")")).unwrap();
writeln!(self.target_file, "{}", valid_token);
},
_ => {panic!("never will be herer")}
}
}
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("usage: ./jackanalyzier yourfile.jack");
process::exit(-1);
}
let filename : &str = &args[1];
//is args[1] a directory;
let mut is_diretory :bool = false;
let meta = fs::metadata(filename).unwrap_or_else(|err|{
panic!("Problem parsing arguments: {}", err);
});
if meta.is_dir() {
is_diretory = true;
}
let mut jack_files :Vec<String> = Vec::new();
if is_diretory {
let path_iter = fs::read_dir(filename).unwrap();
for p in path_iter {
let p = p.unwrap().path();
let filename = p.to_str().unwrap();
if filename.ends_with(".jack") {
jack_files.push(filename.to_string());
}
}
} else {
jack_files.push(filename.to_string());
}
for filename in &jack_files {
let mut jt = JackTokenizer::new(filename);
jt.process();
jt.output();
let mut je = JackAnalyzer::new(filename, jt);
je.compile_class();
}
}
| true |
356ffe68bd0e7931d9112e965cc1e8cd7558667f
|
Rust
|
OwenDF/LearningRust
|
/HelloWorld.rs
|
UTF-8
| 256 | 3.171875 | 3 |
[] |
no_license
|
fn main()
{
print!("Hello, World, ");
println!("I'm a Rustacean!");
println!("My Name is {name}",
name = "Owen");
println!("Pi is roughly {pi_exact:.precision$}",
pi_exact = 3.141592,
precision = 3)
}
| true |
edb0c502c01151eab9f6e294fa5bae709a01b07f
|
Rust
|
viquezclaudio/core-rs-albatross
|
/primitives/trie/src/key_nibbles.rs
|
UTF-8
| 14,037 | 3.421875 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::borrow::Cow;
use std::cmp;
use std::fmt;
use std::ops;
use std::str;
use std::usize;
use log::error;
use beserial::{Deserialize, ReadBytesExt, Serialize, SerializingError, WriteBytesExt};
use nimiq_database::AsDatabaseBytes;
use nimiq_keys::Address;
/// A compact representation of a node's key. It stores the key in big endian. Each byte
/// stores up to 2 nibbles. Internally, we assume that a key is represented in hexadecimal form.
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)]
pub struct KeyNibbles {
bytes: [u8; KeyNibbles::MAX_BYTES],
bytes_length: u8,
length: u8,
}
impl KeyNibbles {
const MAX_BYTES: usize = 62;
/// Create the root (empty) key.
pub fn root() -> KeyNibbles {
KeyNibbles {
bytes: [0; KeyNibbles::MAX_BYTES],
bytes_length: 0,
length: 0,
}
}
/// Returns the length of the key in nibbles.
pub fn len(&self) -> usize {
self.length as usize
}
pub fn is_empty(&self) -> bool {
self.length == 0
}
/// Returns the nibble at the given index as an usize. The usize represents a hexadecimal
/// character.
pub fn get(&self, index: usize) -> Option<usize> {
if index >= self.len() {
error!(
"Index {} exceeds the length of KeyNibbles {}, which has length {}.",
index,
self,
self.len()
);
return None;
}
let byte = index / 2;
let nibble = index % 2;
Some(((self.bytes[byte] >> ((1 - nibble) * 4)) & 0xf) as usize)
}
/// Checks if the current key is a prefix of the given key. If the keys are equal it also
/// returns true.
pub fn is_prefix_of(&self, other: &KeyNibbles) -> bool {
if self.length > other.length {
return false;
}
// Get the last byte index and check if the key is an even number of nibbles.
let end_byte = self.len() / 2;
let even_nibbles = self.length % 2 == 1;
// If key ends in the middle of a byte, compare that part.
if even_nibbles {
let own_nibble = (self.bytes[end_byte] >> 4) & 0xf;
let other_nibble = (other.bytes[end_byte] >> 4) & 0xf;
if own_nibble != other_nibble {
return false;
}
}
// Compare the remaining bytes.
self.bytes[..end_byte] == other.bytes[..end_byte]
}
/// Returns the common prefix between the current key and a given key.
#[must_use]
pub fn common_prefix(&self, other: &KeyNibbles) -> Self {
// Get the smaller length (in nibbles) of the two keys.
let min_len = cmp::min(self.len(), other.len());
// Calculate the minimum length (in bytes) rounded up.
let byte_len = min_len / 2 + (min_len % 2);
// Find the nibble index where the two keys first differ.
let mut first_difference_nibble = min_len;
for j in 0..byte_len {
if self.bytes[j] != other.bytes[j] {
if self.get(j * 2) != other.get(j * 2) {
first_difference_nibble = j * 2
} else {
first_difference_nibble = j * 2 + 1
};
break;
}
}
// Get the slice corresponding to the common prefix.
self.slice(0, first_difference_nibble)
}
/// Returns a slice of the current key. Starting at the given start (inclusive) nibble index and
/// ending at the given end (exclusive) nibble index.
#[must_use]
pub fn slice(&self, start: usize, end: usize) -> Self {
// Do some basic sanity checks.
if start >= self.len() || end <= start {
error!(
"Slice parameters don't make sense! Key length {}, start index {}, end index {}.",
self.len(),
start,
end
);
return KeyNibbles::root();
}
// Calculate the end nibble index (it can't exceed the key length).
let end = cmp::min(end, self.len());
// Get the start and end in bytes (rounded down).
let byte_start = start / 2;
let byte_end = end / 2;
// Get the nibbles for the slice.
let mut new_bytes = [0; KeyNibbles::MAX_BYTES];
let mut new_bytes_length = 0;
// If the slice starts at the beginning of a byte, then it's an easy case.
if start % 2 == 0 {
new_bytes_length = byte_end - byte_start;
new_bytes[0..new_bytes_length].copy_from_slice(&self.bytes[byte_start..byte_end]);
}
// Otherwise we need to shift everything by one nibble.
else {
let mut current_byte = (self.bytes[byte_start] & 0xf) << 4; // Right nibble.
for (count, byte) in self.bytes[(byte_start + 1)..byte_end].iter().enumerate() {
let tmp_byte = byte;
let left_nibble = (tmp_byte >> 4) & 0xf;
new_bytes[count] = current_byte | left_nibble;
new_bytes_length += 1;
current_byte = (tmp_byte & 0xf) << 4;
}
new_bytes[new_bytes_length] = current_byte;
new_bytes_length += 1;
};
// If we have an odd number of nibbles we add the last nibble now.
if end % 2 == 1 {
let last_nibble = self.bytes[byte_end] & 0xf0;
if start % 2 == 0 {
new_bytes[new_bytes_length] = last_nibble;
new_bytes_length += 1;
} else {
new_bytes[new_bytes_length - 1] |= last_nibble >> 4;
}
}
// Return the slice as a new key.
KeyNibbles {
bytes: new_bytes,
bytes_length: new_bytes_length as u8,
length: (end - start) as u8,
}
}
/// Returns the suffix of the current key starting at the given nibble index.
#[must_use]
pub fn suffix(&self, start: u8) -> Self {
self.slice(start as usize, self.len())
}
}
impl From<&Address> for KeyNibbles {
fn from(address: &Address) -> Self {
KeyNibbles::from(address.as_bytes())
}
}
impl From<&[u8]> for KeyNibbles {
fn from(v: &[u8]) -> Self {
if v.len() > KeyNibbles::MAX_BYTES {
error!(
"Array of len {} exceeds the max length of KeyNibbles {}",
v.len(),
KeyNibbles::MAX_BYTES,
);
return KeyNibbles::root();
}
let mut new_bytes = [0; KeyNibbles::MAX_BYTES];
new_bytes[..v.len()].copy_from_slice(v);
KeyNibbles {
bytes: new_bytes,
bytes_length: v.len() as u8,
length: (v.len() * 2) as u8,
}
}
}
impl str::FromStr for KeyNibbles {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes: [u8; KeyNibbles::MAX_BYTES] = [0; KeyNibbles::MAX_BYTES];
if s.len() % 2 == 0 {
hex::decode_to_slice(s, &mut bytes[..(s.len() / 2)])?;
Ok(KeyNibbles {
bytes,
bytes_length: (s.len() / 2) as u8,
length: s.len() as u8,
})
} else {
let last_complete_nibble_idx = s.len() - 2;
let last_complete_byte_idx = (last_complete_nibble_idx) / 2;
hex::decode_to_slice(
&s[..last_complete_nibble_idx + 1],
&mut bytes[..last_complete_byte_idx + 1],
)?;
let last_nibble = s.chars().last().unwrap();
let last_nibble =
last_nibble
.to_digit(16)
.ok_or(hex::FromHexError::InvalidHexCharacter {
c: last_nibble,
index: s.len() - 1,
})?;
bytes[last_complete_byte_idx + 1] = (last_nibble as u8) << 4;
Ok(KeyNibbles {
bytes,
bytes_length: (last_complete_byte_idx + 2) as u8,
length: s.len() as u8,
})
}
}
}
impl fmt::Display for KeyNibbles {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut hex_representation = hex::encode(&self.bytes[..self.bytes_length as usize]);
// If prefix ends in the middle of a byte, remove last char.
if self.length % 2 == 1 {
hex_representation.pop();
}
f.write_str(&hex_representation)
}
}
impl ops::Add<&KeyNibbles> for &KeyNibbles {
type Output = KeyNibbles;
fn add(self, other: &KeyNibbles) -> KeyNibbles {
let mut bytes = self.bytes;
let mut bytes_length;
if self.len() % 2 == 0 {
// Easy case: the lhs ends with a full byte.
bytes_length = self.bytes_length + other.bytes_length;
bytes[self.bytes_length as usize..bytes_length as usize]
.copy_from_slice(&other.bytes[..other.bytes_length as usize]);
} else {
// Complex case: the lhs ends in the middle of a byte.
let mut next_byte = bytes[(self.bytes_length - 1) as usize];
bytes_length = self.bytes_length - 1;
for (count, byte) in other.bytes[..other.bytes_length as usize]
.iter()
.enumerate()
{
let left_nibble = byte >> 4;
bytes[(self.bytes_length - 1) as usize + count] = next_byte | left_nibble;
bytes_length += 1;
next_byte = (byte & 0xf) << 4;
}
if other.length % 2 == 0 {
// Push next_byte
bytes[bytes_length as usize] = next_byte;
bytes_length += 1;
}
}
KeyNibbles {
bytes,
bytes_length,
length: self.length + other.length,
}
}
}
impl AsDatabaseBytes for KeyNibbles {
fn as_database_bytes(&self) -> Cow<[u8]> {
// TODO: Improve KeyNibbles, so that no serialization is needed.
let v = Serialize::serialize_to_vec(&self);
Cow::Owned(v)
}
}
impl Serialize for KeyNibbles {
fn serialize<W: WriteBytesExt>(&self, writer: &mut W) -> Result<usize, SerializingError> {
let mut size = 2;
writer.write_u8(self.length)?;
writer.write_u8(self.bytes_length as u8)?;
size += writer.write(&self.bytes[..self.bytes_length as usize])?;
Ok(size)
}
fn serialized_size(&self) -> usize {
2 + self.bytes_length as usize
}
}
impl Deserialize for KeyNibbles {
fn deserialize<R: ReadBytesExt>(reader: &mut R) -> Result<Self, SerializingError> {
let length = reader.read_u8()?;
let bytes_length = reader.read_u8()?;
let mut bytes = [0; KeyNibbles::MAX_BYTES];
reader.read_exact(&mut bytes[..bytes_length as usize])?;
Ok(KeyNibbles {
bytes,
length,
bytes_length,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use nimiq_test_log::test;
#[test]
fn to_from_str_works() {
let key: KeyNibbles = "cfb98637bcae43c13323eaa1731ced2b716962fd".parse().unwrap();
assert_eq!(key.to_string(), "cfb98637bcae43c13323eaa1731ced2b716962fd");
}
#[test]
fn sum_works() {
let key1: KeyNibbles = "cfb".parse().unwrap();
let key2: KeyNibbles = "986".parse().unwrap();
assert_eq!((&key1 + &key2).to_string(), "cfb986");
}
#[test]
fn nibbles_get_works() {
let key: KeyNibbles = "cfb98637bcae43c13323eaa1731ced2b716962fd".parse().unwrap();
assert_eq!(key.get(0), Some(12));
assert_eq!(key.get(1), Some(15));
assert_eq!(key.get(2), Some(11));
assert_eq!(key.get(3), Some(9));
assert_eq!(key.get(41), None);
assert_eq!(key.get(42), None);
}
#[test]
fn nibbles_slice_works() {
let key: KeyNibbles = "cfb98637bcae43c13323eaa1731ced2b716962fd".parse().unwrap();
assert_eq!(key.slice(0, 1).to_string(), "c");
assert_eq!(key.slice(0, 2).to_string(), "cf");
assert_eq!(key.slice(0, 3).to_string(), "cfb");
assert_eq!(key.slice(1, 3).to_string(), "fb");
assert_eq!(
key.slice(0, 41).to_string(),
"cfb98637bcae43c13323eaa1731ced2b716962fd"
);
assert_eq!(
key.slice(1, 40).to_string(),
"fb98637bcae43c13323eaa1731ced2b716962fd"
);
assert_eq!(key.slice(2, 1).to_string(), "");
assert_eq!(key.slice(42, 43).to_string(), "");
}
#[test]
fn nibbles_suffix_works() {
let key: KeyNibbles = "cfb98637bcae43c13323eaa1731ced2b716962fd".parse().unwrap();
assert_eq!(
key.suffix(0).to_string(),
"cfb98637bcae43c13323eaa1731ced2b716962fd"
);
assert_eq!(
key.suffix(1).to_string(),
"fb98637bcae43c13323eaa1731ced2b716962fd"
);
assert_eq!(
key.suffix(2).to_string(),
"b98637bcae43c13323eaa1731ced2b716962fd"
);
assert_eq!(key.suffix(40).to_string(), "");
assert_eq!(key.suffix(42).to_string(), "");
}
#[test]
fn nibbles_is_prefix_of_works() {
let key1: KeyNibbles = "cfb98637bcae43c13323eaa1731ced2b716962fd".parse().unwrap();
let key2: KeyNibbles = "cfb".parse().unwrap();
assert!(key2.is_prefix_of(&key1));
assert!(!key1.is_prefix_of(&key2));
}
#[test]
fn nibbles_common_prefix_works() {
let key1: KeyNibbles = "1000000000000000000000000000000000000000".parse().unwrap();
let key2: KeyNibbles = "1200000000000000000000000000000000000000".parse().unwrap();
assert_eq!(key1.common_prefix(&key2), key2.common_prefix(&key1));
assert_eq!(key1.common_prefix(&key2).to_string(), "1");
let key3: KeyNibbles = "2dc".parse().unwrap();
let key4: KeyNibbles = "2da3183636aae21c2710b5bd4486903f8541fb80".parse().unwrap();
assert_eq!(key3.common_prefix(&key4), "2d".parse().unwrap());
let key5: KeyNibbles = "2da".parse().unwrap();
let key6: KeyNibbles = "2da3183636aae21c2710b5bd4486903f8541fb80".parse().unwrap();
assert_eq!(key5.common_prefix(&key6), "2da".parse().unwrap());
}
}
| true |
589ae1a23c172ee5616c02f5cfdcfc8a6f23444f
|
Rust
|
clap-rs/clap
|
/clap_bench/benches/04_new_help.rs
|
UTF-8
| 6,089 | 2.984375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use clap::Command;
use clap::{arg, Arg, ArgAction};
use criterion::{criterion_group, criterion_main, Criterion};
fn build_help(cmd: &mut Command) -> String {
let help = cmd.render_help();
help.to_string()
}
fn app_example1() -> Command {
Command::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
.arg(
arg!(
-c --config <FILE> "Sets a custom config file"
)
.required(false),
)
.arg(arg!(<output> "Sets an optional output file"))
.arg(arg!(d: -d ... "Turn debugging information on"))
.subcommand(
Command::new("test")
.about("does testing things")
.arg(arg!(-l --list "lists test values")),
)
}
fn app_example2() -> Command {
Command::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
}
fn app_example3() -> Command {
Command::new("MyApp")
.arg(
Arg::new("debug")
.help("turn on debugging information")
.short('d')
.action(ArgAction::SetTrue),
)
.args([
Arg::new("config")
.help("sets the config file to use")
.action(ArgAction::Set)
.short('c')
.long("config"),
Arg::new("input")
.help("the input file to use")
.required(true),
])
.arg(arg!(--license "display the license file"))
.arg(arg!([output] "Supply an output file to use"))
.arg(
arg!(
-i --int <IFACE> "Set an interface to use"
)
.required(false),
)
}
fn app_example4() -> Command {
Command::new("MyApp")
.about("Parses an input file to do awesome things")
.version("1.0")
.author("Kevin K. <[email protected]>")
.arg(
Arg::new("debug")
.help("turn on debugging information")
.short('d')
.action(ArgAction::SetTrue)
.long("debug"),
)
.arg(
Arg::new("config")
.help("sets the config file to use")
.short('c')
.long("config"),
)
.arg(
Arg::new("input")
.help("the input file to use")
.index(1)
.required(true),
)
}
fn app_example5() -> Command {
Command::new("MyApp").arg(
Arg::new("awesome")
.help("turns up the awesome")
.short('a')
.long("awesome")
.action(ArgAction::Count),
)
}
fn app_example6() -> Command {
Command::new("MyApp")
.arg(
Arg::new("input")
.help("the input file to use")
.index(1)
.requires("config")
.required(true),
)
.arg(Arg::new("config").help("the config file to use").index(2))
}
fn app_example7() -> Command {
Command::new("MyApp")
.arg(Arg::new("config"))
.arg(Arg::new("output"))
.arg(
Arg::new("input")
.help("the input file to use")
.num_args(1..)
.action(ArgAction::Append)
.required(true)
.short('i')
.long("input")
.requires("config")
.conflicts_with("output"),
)
}
fn app_example8() -> Command {
Command::new("MyApp")
.arg(Arg::new("config"))
.arg(Arg::new("output"))
.arg(
Arg::new("input")
.help("the input file to use")
.num_args(1..)
.action(ArgAction::Append)
.required(true)
.short('i')
.long("input")
.requires("config")
.conflicts_with("output"),
)
}
fn app_example10() -> Command {
Command::new("myapp").about("does awesome things").arg(
Arg::new("CONFIG")
.help("The config file to use (default is \"config.json\")")
.short('c')
.action(ArgAction::Set),
)
}
pub fn example1(c: &mut Criterion) {
let mut cmd = app_example1();
c.bench_function("example1", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example2(c: &mut Criterion) {
let mut cmd = app_example2();
c.bench_function("example2", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example3(c: &mut Criterion) {
let mut cmd = app_example3();
c.bench_function("example3", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example4(c: &mut Criterion) {
let mut cmd = app_example4();
c.bench_function("example4", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example5(c: &mut Criterion) {
let mut cmd = app_example5();
c.bench_function("example5", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example6(c: &mut Criterion) {
let mut cmd = app_example6();
c.bench_function("example6", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example7(c: &mut Criterion) {
let mut cmd = app_example7();
c.bench_function("example7", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example8(c: &mut Criterion) {
let mut cmd = app_example8();
c.bench_function("example8", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example10(c: &mut Criterion) {
let mut cmd = app_example10();
c.bench_function("example10", |b| b.iter(|| build_help(&mut cmd)));
}
pub fn example4_template(c: &mut Criterion) {
let mut cmd = app_example4().help_template("{name} {version}\n{author}\n{about}\n\nUSAGE:\n {usage}\n\nOPTIONS:\n{options}\n\nARGS:\n{args}\n");
c.bench_function("example4_template", |b| b.iter(|| build_help(&mut cmd)));
}
criterion_group!(
benches,
example1,
example2,
example3,
example4,
example5,
example6,
example7,
example8,
example10,
example4_template
);
criterion_main!(benches);
| true |
a236e0f3b3e8e6016f2b3ad9bde28b4e2abffd7b
|
Rust
|
wladwm/surge-ping
|
/src/error.rs
|
UTF-8
| 1,058 | 2.75 | 3 |
[
"MIT"
] |
permissive
|
#![allow(dead_code)]
use std::io;
use thiserror::Error;
pub type Result<T> = std::result::Result<T, SurgeError>;
/// An error resulting from a ping option-setting or send/receive operation.
///
#[derive(Error, Debug)]
pub enum SurgeError {
#[error("buffer size was too small")]
IncorrectBufferSize,
#[error("malformed packet: {0}")]
MalformedPacket(#[from] MalformedPacketError),
#[error("io error")]
IOError(#[from] io::Error),
#[error("Request timeout for icmp_seq {seq}")]
Timeout { seq: u16 },
#[error("Echo Request packet.")]
EchoRequestPacket,
#[error("Network error.")]
NetworkError,
}
#[derive(Error, Debug)]
pub enum MalformedPacketError {
#[error("expected an Ipv4Packet")]
NotIpv4Packet,
#[error("expected an Ipv6Packet")]
NotIpv6Packet,
#[error("expected an Icmpv4Packet payload")]
NotIcmpv4Packet,
#[error("expected an Icmpv6Packet")]
NotIcmpv6Packet,
#[error("payload too short, got {got}, want {want}")]
PayloadTooShort { got: usize, want: usize },
}
| true |
a94958284da5941cf7a398e8c9e6115f15e97106
|
Rust
|
seanlees/rust-yt-dl
|
/examples/process_test.rs
|
UTF-8
| 2,218 | 2.5625 | 3 |
[] |
no_license
|
extern crate core;
extern crate local_encoding;
extern crate regex;
use std::process::{Command, Stdio, Child};
use std::error::Error as EType;
use std::io::{BufReader, Read, BufRead};
use std::thread;
use core::time;
use local_encoding::{Encoding, Encoder};
use regex::Regex;
fn main() {
let url = " https://www.youtube.com/watch?v=-SsllwKLYJc";
let mut cmd = Command::new("E:/workspace/rust-yt-dl/resources/youtube-dl.exe");
//cmd.current_dir("E:/workspace/rust-yt-dl/resources/");
cmd.args(&["--proxy", "127.0.0.1:1080"]);
cmd.arg("--get-filename");
let mut child = cmd
.arg(url)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn();
let mut process = child.unwrap();
let mut stdout_buffer = BufReader::new(process.stdout.take().unwrap());
let mut stderr_buffer = BufReader::new(process.stderr.take().unwrap());
//let mut stdout: String = String::new();
//stdout_buffer.read_to_string(&mut stdout);
let mut stderr: String = String::new();
stderr_buffer.read_to_string(&mut stderr);
let mut buf = Vec::<u8>::new();
stdout_buffer.read_to_end(&mut buf).expect("read_until failed");
process.wait();
println!("get_file_name: {:?}", Encoding::OEM.to_string(buf.as_slice()).unwrap().trim_end());
println!("error: {:?}", stderr);
/*let capture = REGEX_NAME.captures(&stdout.trim());
if stderr.is_empty() && capture.is_some() {
let caps = capture.unwrap();
println!("get_file_name: {:?}", stdout);
} else {
println!("error: {:?}", stderr);
}*/
//-------------------
let output = Command::new("E:/workspace/rust-yt-dl/resources/youtube-dl.exe")
.args(&["--proxy", "127.0.0.1:1080"])
.arg("--get-filename")
.arg(url)
.output().unwrap_or_else(|e| {
panic!("failed to execute process: {}", e)
});
if output.status.success() {
let s = String::from_utf8_lossy(&output.stdout);
print!("rustc succeeded and stdout was:\n{}", s);
} else {
let s = String::from_utf8_lossy(&output.stderr);
print!("rustc failed and stderr was:\n{}", s);
}
}
| true |
3a90c11c4ac09758a53f8562198f1037736856e8
|
Rust
|
starblue/advent_of_code
|
/a2020/src/bin/a202021.rs
|
UTF-8
| 5,270 | 2.84375 | 3 |
[] |
no_license
|
use core::slice::Iter;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fmt;
use std::io;
use std::io::Read;
use nom::bytes::complete::tag;
use nom::character::complete::alpha1;
use nom::character::complete::line_ending;
use nom::combinator::map;
use nom::combinator::recognize;
use nom::multi::many1;
use nom::multi::separated_list1;
use nom::IResult;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
struct Ingredient(String);
impl fmt::Display for Ingredient {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
struct Allergen(String);
impl fmt::Display for Allergen {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
struct IngredientList(Vec<Ingredient>);
impl IngredientList {
fn iter(&self) -> Iter<Ingredient> {
self.0[..].iter()
}
}
impl fmt::Display for IngredientList {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut sep = "";
for i in &self.0 {
write!(f, "{}{}", sep, i)?;
sep = " ";
}
Ok(())
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
struct AllergenList(Vec<Allergen>);
impl AllergenList {
fn iter(&self) -> Iter<Allergen> {
self.0[..].iter()
}
}
impl fmt::Display for AllergenList {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut sep = "";
for i in &self.0 {
write!(f, "{}{}", sep, i)?;
sep = ", ";
}
Ok(())
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
struct Food {
ingredients: IngredientList,
allergens: AllergenList,
}
impl fmt::Display for Food {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} (contains {})", self.ingredients, self.allergens)
}
}
fn name(i: &str) -> IResult<&str, String> {
map(recognize(alpha1), String::from)(i)
}
fn ingredient(i: &str) -> IResult<&str, Ingredient> {
let (i, n) = name(i)?;
Ok((i, Ingredient(n)))
}
fn ingredients(i: &str) -> IResult<&str, IngredientList> {
let (i, is) = separated_list1(tag(" "), ingredient)(i)?;
Ok((i, IngredientList(is)))
}
fn allergen(i: &str) -> IResult<&str, Allergen> {
let (i, n) = name(i)?;
Ok((i, Allergen(n)))
}
fn allergens(i: &str) -> IResult<&str, AllergenList> {
let (i, is) = separated_list1(tag(", "), allergen)(i)?;
Ok((i, AllergenList(is)))
}
fn food(i: &str) -> IResult<&str, Food> {
let (i, ingredients) = ingredients(i)?;
let (i, _) = tag(" (contains ")(i)?;
let (i, allergens) = allergens(i)?;
let (i, _) = tag(")")(i)?;
let (i, _) = line_ending(i)?;
Ok((
i,
Food {
ingredients,
allergens,
},
))
}
fn input(i: &str) -> IResult<&str, Vec<Food>> {
many1(food)(i)
}
fn main() {
let mut input_data = String::new();
io::stdin()
.read_to_string(&mut input_data)
.expect("I/O error");
// make nom happy
input_data.push('\n');
// parse input
let result = input(&input_data);
//println!("{:?}", result);
let foods = result.unwrap().1;
let mut can_occur_in: HashMap<Allergen, HashSet<Ingredient>> = HashMap::new();
for f in &foods {
for a in f.allergens.iter() {
let ingredient_set = f
.ingredients
.iter()
.cloned()
.collect::<HashSet<Ingredient>>();
if let Some(ingredients) = can_occur_in.get_mut(a) {
*ingredients = ingredients
.intersection(&ingredient_set)
.cloned()
.collect::<HashSet<Ingredient>>();
} else {
can_occur_in.insert(a.clone(), ingredient_set);
}
}
}
let mut allergen_ingredients = HashSet::new();
for is in can_occur_in.values() {
for i in is.iter() {
allergen_ingredients.insert(i.clone());
}
}
let mut count = 0;
for f in &foods {
for i in f.ingredients.iter() {
if !allergen_ingredients.contains(i) {
count += 1;
}
}
}
let result_a = count;
let mut allergen_map: BTreeMap<Allergen, Ingredient> = BTreeMap::new();
loop {
let mut find = None;
for a in can_occur_in.keys() {
if can_occur_in[a].len() == 1 {
// found unique ingredient for allergen
let i = can_occur_in[a].iter().cloned().next().unwrap();
find = Some((a, i));
}
}
if let Some((a, i)) = find {
allergen_map.insert(a.clone(), i.clone());
for is in &mut can_occur_in.values_mut() {
is.remove(&i);
}
} else {
break;
}
}
let mut s = String::new();
let mut sep = "";
for i in allergen_map.values() {
s.push_str(&format!("{}{}", sep, i));
sep = ",";
}
let result_b = s;
println!("a: {}", result_a);
println!("b: {}", result_b);
}
| true |
42b8f2b6f17ced0ea9a19b70eabceeededa506e0
|
Rust
|
mjbryant/advent
|
/2018/day_3/part_a/main.rs
|
UTF-8
| 3,203 | 3.671875 | 4 |
[] |
no_license
|
use std::env;
use std::fs::File;
use std::io::{BufRead, BufReader};
#[derive(Debug)]
struct Rectangle {
id: u32,
x: usize,
y: usize,
width: usize,
height: usize,
}
impl Rectangle {
pub fn from_input(input: &str) -> Self {
let mut words = input.split_whitespace();
let rect_id = words.next().unwrap().trim_start_matches('#').parse::<u32>().unwrap();
let x_y = words.nth(1).unwrap();
let x_y_split: Vec<&str> = x_y.split(',').collect();
let x = x_y_split[0].parse::<usize>().unwrap();
let y = x_y_split[1].trim_end_matches(':').parse::<usize>().unwrap();
let w_h = words.next().unwrap();
let w_h_split: Vec<&str> = w_h.split('x').collect();
let w = w_h_split[0].parse::<usize>().unwrap();
let h = w_h_split[1].parse::<usize>().unwrap();
Self {
id: rect_id,
x: x,
y: y,
width: w,
height: h,
}
}
}
fn make_empty_grid(rects: &[Rectangle]) -> Vec<Vec<u32>> {
let mut width = 0;
let mut height = 0;
for rect in rects {
if rect.x + rect.width > width {
width = rect.x + rect.width;
}
if rect.y + rect.height > height {
height = rect.y + rect.height;
}
}
vec![vec![0; width]; height]
}
fn populate_grid(grid: &mut[Vec<u32>], rects: &[Rectangle]) {
for rect in rects {
let mut i = rect.y;
while i < (rect.y + rect.height) {
let mut j = rect.x;
while j < (rect.x + rect.width) {
grid[i][j] += 1;
j += 1;
}
i += 1;
}
}
}
fn count_overlapping(grid: &[Vec<u32>]) -> u32 {
let mut total = 0;
for row in grid {
for cell in row {
if cell > &1 {
total += 1;
}
}
}
total
}
fn print_grid(grid: &[Vec<u32>]) {
for row in grid {
println!("{:?}", row);
}
}
/**
* Goal: given a list of rectangles in the format `X,Y: WxH`, find the total
* number of square inches that overlap between at least two rectangles.
* The easiest way I can think of to do this (and probably the least clever),
* is to just keep a big grid of integers representing the entire space. For
* every rect you then increment each of the cells in its area.
*/
fn main() {
let args: Vec<String> = env::args().collect();
let filename = match args.len() {
1 => "input.txt",
_ => &args[1],
};
// Read in the file and create the Rectangle structs
let reader = BufReader::new(File::open(filename).expect("Failed to read file"));
let rects: Vec<Rectangle> = reader.lines().map(|line_result| {
let line = line_result.unwrap();
Rectangle::from_input(&line)
}).collect();
// Create the encompassing grid and populate it from the rectangles
let mut grid: Vec<Vec<u32>> = make_empty_grid(rects.as_slice());
populate_grid(&mut grid, rects.as_slice());
// Count the total number of overlapping squares
let overlapping = count_overlapping(&grid);
println!("There are {:?} overlapping squares", overlapping);
}
| true |
be123d5ecaa0518165f0791518c671020ecabf77
|
Rust
|
epwalsh/check-links
|
/src/main.rs
|
UTF-8
| 5,592 | 2.640625 | 3 |
[] |
no_license
|
#[macro_use]
extern crate lazy_static;
use std::sync::Arc;
use std::time::Duration;
use exitfailure::ExitFailure;
use ignore::WalkBuilder;
use structopt::StructOpt;
use tokio::sync::mpsc::channel;
mod doc_file;
mod link;
mod log;
use doc_file::DocFile;
use link::LinkStatus;
use log::Logger;
#[derive(Debug, StructOpt)]
#[structopt(
name = "check-links",
about = "Check the links in your crate's documentation.",
raw(setting = "structopt::clap::AppSettings::ColoredHelp")
)]
struct Opt {
/// Verbose mode (-v, -vv, -vvv, etc)
#[structopt(short = "v", long = "verbose", parse(from_occurrences))]
verbose: usize,
/// Don't log in color
#[structopt(long = "no-color")]
no_color: bool,
/// Set the maximum directory depth to recurse
#[structopt(short = "d", long = "depth")]
depth: Option<usize>,
/// Specify the timeout for HTTP requests
#[structopt(short = "t", long = "timeout", default_value = "3")]
timeout: u64,
}
fn maybe_pluralize(n: u32) -> &'static str {
match n {
1 => "",
_ => "s",
}
}
#[tokio::main(threaded_scheduler)]
async fn main() -> Result<(), ExitFailure> {
let opt = Opt::from_args();
let mut logger = Logger::default(opt.verbose, !opt.no_color);
logger.debug(&format!("{:?}", opt)[..])?;
// Initialize a MPSC channel. Each link to check will get its own copy
// of the transmitter `tx`. When the link is verified we'll send the results through
// the channel to the receiver `rx`. Then we gather all the results and log them
// to the terminal.
let (tx, mut rx) = channel(100);
// We'll search all Rust and Markdown files.
let doc_files = vec![
// Rust files.
DocFile::new(
vec!["*.rs"],
r"^\s*(///|//!).*\[[^\[\]]+\]\(([^\(\)]+)\)",
2,
),
// Markdown files.
DocFile::new(vec!["*.md"], r"\[[^\[\]]+\]\(([^\(\)]+)\)", 1),
];
// Build file iterator.
// We iterator through all non-hidden Rust and Markdown files not included in a .gitignore.
let file_iter = WalkBuilder::new("./")
.max_depth(opt.depth)
.build()
.filter_map(Result::ok)
.filter(|x| match x.file_type() {
Some(file_type) => file_type.is_file(),
None => false,
})
.map(|x| x.into_path());
// Keep track of the total number of links so we know how many the receiver `rx`
// should be expecting.
let mut n_links = 0u32;
// Configure HTTP client.
let http_client = Arc::new(
isahc::HttpClient::builder()
.timeout(Duration::from_secs(opt.timeout))
.connect_timeout(Duration::from_secs(opt.timeout))
.build()?,
);
// Now iter through all files in our `file_iter` and check if they match one of
// the doc files.
for path in file_iter {
for doc_file in &doc_files {
if doc_file.is_match(&path) {
logger.debug(&format!("Searching {}", path.display())[..])?;
// Search for links in the file. For each link found, we spawn a task
// that will verify the link and report the results to the channel.
doc_file.iter_links(&path, |mut link| {
n_links += 1;
let mut tx = tx.clone();
let http_client = http_client.clone();
tokio::spawn(async move {
link.verify(http_client).await;
if tx.send(link).await.is_err() {
std::process::exit(1);
};
});
})?;
break;
} else {
logger.debug(&format!("Ignoring {}", path.display())[..])?;
}
}
}
// Drop transmitter so that the receiver knows it is finished.
drop(tx);
// Now loop through all the links we found and log the results to the terminal.
let mut n_errors = 0u32;
let mut n_warnings = 0u32;
while let Some(link) = rx.recv().await {
match link.status.as_ref().unwrap() {
LinkStatus::Reachable => {
logger.info(&format!("✓ {}", link)[..])?;
}
LinkStatus::Questionable(reason) => {
n_warnings += 1;
logger.warn(&format!("✗ {}\n ► {}", link, reason)[..])?;
}
LinkStatus::Unreachable(reason) => {
n_errors += 1;
match reason {
Some(s) => logger.error(&format!("✗ {}\n ► {}", link, s)[..])?,
None => logger.error(&format!("✗ {}", link)[..])?,
};
}
};
}
if n_links == 0 {
logger.info("No links found")?;
} else if n_errors > 0 {
// Exit with an error code if any bad links were found.
logger.error(&format!(
"{} error{}, {} warning{} out of {} link{} found",
n_errors,
maybe_pluralize(n_errors),
n_warnings,
maybe_pluralize(n_warnings),
n_links,
maybe_pluralize(n_links)
))?;
std::process::exit(1);
} else {
logger.info(&format!(
"{} error{}, {} warning{} out of {} link{} found",
n_errors,
maybe_pluralize(n_errors),
n_warnings,
maybe_pluralize(n_warnings),
n_links,
maybe_pluralize(n_links)
))?;
}
Ok(())
}
| true |
1c31c8503ea381962ddf9af511ca4ef0f95324f7
|
Rust
|
noscripter/differential-dataflow
|
/src/stream.rs
|
UTF-8
| 2,707 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use std::hash::Hash;
use timely::Data;
use timely::progress::Timestamp;
use timely::dataflow::scopes::Child;
use timely::dataflow::{Scope, Stream};
use timely::dataflow::operators::*;
use ::Delta;
/// A mutable collection of values of type `D`
#[derive(Clone)]
pub struct Collection<G: Scope, D: Data> {
pub inner: Stream<G, (D, Delta)>
}
impl<G: Scope, D: Data> Collection<G, D> {
pub fn new(inner: Stream<G, (D, Delta)>) -> Collection<G, D> {
Collection {
inner: inner
}
}
pub fn map<D2: Data, L: Fn(D) -> D2 + 'static>(&self, logic: L) -> Collection<G, D2> {
Collection {
inner: self.inner.map(move |(data, delta)| (logic(data), delta))
}
}
pub fn map_in_place<L: Fn(&mut D) + 'static>(&self, logic: L) -> Collection<G, D> {
Collection {
inner: self.inner.map_in_place(move |&mut (ref mut data, _)| logic(data))
}
}
pub fn negate(&self) -> Collection<G, D> {
Collection {
inner: self.inner.map_in_place(|x| x.1 *= -1)
}
}
pub fn filter<L: Fn(&D) -> bool + 'static>(&self, logic: L) -> Collection<G, D> {
Collection {
inner: self.inner.filter(move |&(ref data, _)| logic(data))
}
}
pub fn concat(&self, other: &Collection<G, D>) -> Collection<G, D> {
Collection {
inner: self.inner.concat(&other.inner)
}
}
pub fn enter<T: Timestamp>(&self, child: &Child<G, T>) -> Collection<Child<G, T>, D> {
Collection {
inner: self.inner.enter(child)
}
}
pub fn enter_at<T: Timestamp, F: Fn(&(D, Delta)) -> T + 'static>(&self, child: &Child<G, T>, initial: F) -> Collection<Child<G, T>, D> where G::Timestamp: Hash, T: Hash {
Collection {
inner: self.inner.enter_at(child, initial)
}
}
pub fn inspect<F: FnMut(&(D, Delta))+'static>(&self, func: F) -> Collection<G, D> {
Collection {
inner: self.inner.inspect(func)
}
}
pub fn inspect_batch<F: FnMut(&G::Timestamp, &[(D, Delta)])+'static>(&self, func: F) -> Collection<G, D> {
Collection {
inner: self.inner.inspect_batch(func)
}
}
pub fn probe(&self) -> (probe::Handle<G::Timestamp>, Collection<G, D>) {
let (handle, stream) = self.inner.probe();
(handle, Collection {
inner: stream
})
}
pub fn scope(&self) -> G {
self.inner.scope()
}
}
impl<G: Scope, T: Timestamp, D: Data> Collection<Child<G, T>, D> {
pub fn leave(&self) -> Collection<G, D> {
Collection {
inner: self.inner.leave()
}
}
}
| true |
1e1dc8c79b362f593d1d89604a3231987340c1a4
|
Rust
|
u-03c9/sctp
|
/src/chunk/chunk_header.rs
|
UTF-8
| 3,837 | 3.203125 | 3 |
[
"MIT"
] |
permissive
|
use super::{chunk_type::*, *};
use bytes::{Buf, BufMut, Bytes, BytesMut};
use std::fmt;
///chunkHeader represents a SCTP Chunk header, defined in https://tools.ietf.org/html/rfc4960#section-3.2
///The figure below illustrates the field format for the chunks to be
///transmitted in the SCTP packet. Each chunk is formatted with a Chunk
///Type field, a chunk-specific Flag field, a Chunk Length field, and a
///Value field.
///
/// 0 1 2 3
/// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Chunk Type | Chunk Flags | Chunk Length |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| |
///| Chunk Value |
///| |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#[derive(Debug, Clone)]
pub(crate) struct ChunkHeader {
pub(crate) typ: ChunkType,
pub(crate) flags: u8,
pub(crate) value_length: u16,
}
pub(crate) const CHUNK_HEADER_SIZE: usize = 4;
/// makes ChunkHeader printable
impl fmt::Display for ChunkHeader {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.typ)
}
}
impl Chunk for ChunkHeader {
fn header(&self) -> ChunkHeader {
self.clone()
}
fn unmarshal(raw: &Bytes) -> Result<Self, Error> {
if raw.len() < CHUNK_HEADER_SIZE {
return Err(Error::ErrChunkHeaderTooSmall);
}
let reader = &mut raw.clone();
let typ = ChunkType(reader.get_u8());
let flags = reader.get_u8();
let length = reader.get_u16();
if length < CHUNK_HEADER_SIZE as u16 {
return Err(Error::ErrChunkHeaderInvalidLength);
}
// Length includes Chunk header
let value_length = length as isize - CHUNK_HEADER_SIZE as isize;
let length_after_value = raw.len() as isize - length as isize;
if length_after_value < 0 {
return Err(Error::ErrChunkHeaderNotEnoughSpace);
} else if length_after_value < 4 {
// https://tools.ietf.org/html/rfc4960#section-3.2
// The Chunk Length field does not count any chunk PADDING.
// Chunks (including Type, Length, and Value fields) are padded out
// by the sender with all zero bytes to be a multiple of 4 bytes
// long. This PADDING MUST NOT be more than 3 bytes in total. The
// Chunk Length value does not include terminating PADDING of the
// chunk. However, it does include PADDING of any variable-length
// parameter except the last parameter in the chunk. The receiver
// MUST ignore the PADDING.
for i in (1..=length_after_value).rev() {
let padding_offset = CHUNK_HEADER_SIZE + (value_length + i - 1) as usize;
if raw[padding_offset] != 0 {
return Err(Error::ErrChunkHeaderPaddingNonZero);
}
}
}
Ok(ChunkHeader {
typ,
flags,
value_length: length - CHUNK_HEADER_SIZE as u16,
})
}
fn marshal_to(&self, writer: &mut BytesMut) -> Result<usize, Error> {
writer.put_u8(self.typ.0);
writer.put_u8(self.flags);
writer.put_u16(self.value_length + CHUNK_HEADER_SIZE as u16);
Ok(writer.len())
}
fn check(&self) -> Result<(), Error> {
Ok(())
}
fn value_length(&self) -> usize {
self.value_length as usize
}
fn as_any(&self) -> &(dyn Any + Send + Sync) {
self
}
}
| true |
13d52852b3b5072711d4d89e4951b700aae1140b
|
Rust
|
pchampin/sophia_rs
|
/resource/src/resource/_error.rs
|
UTF-8
| 3,594 | 2.953125 | 3 |
[
"LicenseRef-scancode-cecill-b-en",
"CECILL-B"
] |
permissive
|
use sophia_api::term::TermKind;
use sophia_api::{prelude::*, term::SimpleTerm};
use std::error::Error;
use std::fmt;
/// An error raised when creating a [`Resource`](crate::Resource)
#[derive(Debug)]
pub enum ResourceError<E: Error> {
/// The IRI is not absolute (an can therefore not be dereferenced)
IriNotAbsolute(IriRef<Box<str>>),
/// The resource could not be loaded
LoaderError(crate::loader::LoaderError),
/// The underlying graph raised an error
GraphError {
/// The identifier of the resource
id: SimpleTerm<'static>,
/// The error that was raised
error: E,
},
/// A value was expeced and not found for the given predicate on the given resource
NoValueFor {
/// The identifier of the resource
id: SimpleTerm<'static>,
/// The predicate
predicate: SimpleTerm<'static>,
},
/// Multiple values were not expected and were found for the given predicate
UnexpectedMultipleValueFor {
/// The identifier of the resource
id: SimpleTerm<'static>,
/// The predicate
predicate: SimpleTerm<'static>,
},
// Variants below are not used in this crate,
// but are common cases of errors in [`TypedResource`] implementations.
/// The node was expected to have given type
MissingType {
/// The identifier of the resource
id: SimpleTerm<'static>,
/// The expected type
typ: SimpleTerm<'static>,
},
/// The term was expected to have a given kind
UnexpectedKind {
/// The identifier of the resource
id: SimpleTerm<'static>,
/// The predicate
predicate: SimpleTerm<'static>,
/// The term kind that was found
found_kind: TermKind,
},
/// The literal was expected to have a given datatype
UnexpectedDatatype {
/// The identifier of the resource
id: SimpleTerm<'static>,
/// The predicate
predicate: SimpleTerm<'static>,
/// The datatype that was found
found_datatype: TermKind,
},
}
impl<E: Error> ResourceError<E> {
/// The identifier of the resource raising the error.
///
/// NB: for errors raised during creation ([`ResourceError::IriNotAbsolute`], [`ResourceError::LoaderError`]),
/// the identifier of the to-be-created resource is returned
/// (*not* the resource from which it was discovered).
pub fn resource_id(&self) -> SimpleTerm {
match self {
ResourceError::IriNotAbsolute(iriref) => iriref.as_simple(),
ResourceError::LoaderError(err) => err.iri().into_term(),
ResourceError::GraphError { id, .. } => id.as_simple(),
ResourceError::NoValueFor { id, .. } => id.as_simple(),
ResourceError::UnexpectedMultipleValueFor { id, .. } => id.as_simple(),
ResourceError::MissingType { id, .. } => id.as_simple(),
ResourceError::UnexpectedKind { id, .. } => id.as_simple(),
ResourceError::UnexpectedDatatype { id, .. } => id.as_simple(),
}
}
}
impl<E: Error> fmt::Display for ResourceError<E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl<E: Error> From<crate::loader::LoaderError> for ResourceError<E> {
fn from(value: crate::loader::LoaderError) -> Self {
Self::LoaderError(value)
}
}
impl<E: Error> Error for ResourceError<E> {}
/// A result whose error is a [`ResourceError`]
pub type ResourceResult<T, G> = Result<T, ResourceError<<G as Graph>::Error>>;
| true |
c9598bae5519aee949b62748c84f6dd9ce30ffc9
|
Rust
|
RustWorks/files-store
|
/users/src/domain/signup.rs
|
UTF-8
| 727 | 2.953125 | 3 |
[] |
no_license
|
use bcrypt::BcryptError;
use serde::Deserialize;
use std::convert::TryFrom;
use validator::Validate;
use crate::domain::User;
use crate::password::secure_user;
#[derive(Debug, Clone, Validate, Deserialize)]
pub struct Signup {
#[validate(length(min = 3, message = "validation.login.short"))]
pub login: String,
#[validate(email(message = "validation.email"))]
pub email: String,
#[validate(length(min = 6, message = "validation.password.short"))]
pub password: String,
}
impl TryFrom<Signup> for User {
type Error = BcryptError;
fn try_from(signup: Signup) -> Result<Self, Self::Error> {
let user = secure_user(signup.login, signup.email, signup.password)?;
Ok(user)
}
}
| true |
ef70ef9e934ff852796aefef88bb67258bc506a0
|
Rust
|
wayeast/demo-cookie-weirdness
|
/client/src/lib.rs
|
UTF-8
| 9,263 | 3.046875 | 3 |
[] |
no_license
|
use seed::{prelude::*, *};
// Paths
const LOGIN: &str = "login";
// ------ ------
// Init
// ------ ------
fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {
orders
.subscribe(Msg::UrlChanged)
.send_msg(Msg::CheckAuth);
let user = User::Loading;
Model {
base_url: url.to_base_url(),
page: Page::init(url, &user),
user,
}
}
// ------ ------
// Model
// ------ ------
struct Model {
base_url: Url,
page: Page,
user: User,
}
enum User {
Anonymous,
Loading,
Loaded(String),
}
// The idea is to follow the same pattern as the seed
// auth example. Only, instead of getting a LoggedUser
// directly from the login response, it must be fetched
// from a call to the /auth/check endpoint. The server
// doesn't care who seed thinks the user is; it will
// authenticate requests _only_ using the actix-identity::Identity
// cookie sent from the browser http-only cache.
enum Page {
Login { username: String, password: String },
Dashboard,
NotFound,
}
impl Page {
fn init(mut url: Url, user: &User) -> Self {
match user {
User::Anonymous => {
Self::Login {
username: String::new(),
password: String::new(),
}
}
User::Loading | User::Loaded(_) => {
match url.next_path_part() {
None => Self::Dashboard,
Some(LOGIN) => Self::Login {
username: String::new(),
password: String::new(),
},
Some(_) => Self::NotFound,
}
}
}
}
}
// ------ ------
// Urls
// ------ ------
struct_urls!();
impl<'a> Urls<'a> {
pub fn home(self) -> Url {
self.base_url()
}
pub fn login(self) -> Url {
self.base_url().add_path_part(LOGIN)
}
}
// ------ ------
// Update
// ------ ------
enum Msg {
// basic switching between a /login page and
// a / home page
UrlChanged(subs::UrlChanged),
// /auth/login messages
UpdateLoginUser(String),
UpdateLoginPass(String),
Login,
LoginResponse(fetch::Result<String>),
Logout,
LogoutResponse(fetch::Result<()>),
// /auth/check messages
CheckAuth,
AuthStatus(fetch::Result<String>),
}
fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
match msg {
Msg::UrlChanged(subs::UrlChanged(url)) => {
model.page = Page::init(url, &model.user);
}
Msg::CheckAuth => {
// check with the server whether or not we have
// a secure session cookie.
// Do this before the seed model knows that it's
// logged in, such as on startup, or with any url
// change before a successful login is submitted
log!("checking auth");
orders.perform_cmd(async {
Msg::AuthStatus(
async {
Request::new("/auth/check")
.timeout(5_000)
.fetch()
.await?
.check_status()?
.text()
.await
}
.await,
)
});
}
Msg::AuthStatus(Ok(user)) => {
log!("auth status ok:", user);
if user.is_empty() {
model.user = User::Anonymous;
request_url(Urls::new(&model.base_url).login(), orders);
} else {
model.user = User::Loaded(user);
}
}
Msg::AuthStatus(Err(e)) => {
#[cfg(debug_assertions)]
log!("Error checking auth:", e);
model.user = User::Anonymous;
request_url(Urls::new(&model.base_url).login(), orders);
}
Msg::UpdateLoginUser(user) => {
if let Page::Login { username, ..} = &mut model.page {
*username = user;
}
}
Msg::UpdateLoginPass(pass) => {
if let Page::Login { password, ..} = &mut model.page {
*password = pass;
}
}
Msg::Login => {
let (username, password) = match &model.page {
Page::Login { username, password, ..} => (username.clone(), password.clone()),
_ => return
};
// Here is where the problem appears to be. I want the
// app to wait until it has a LoginResponse from the server
// before doing _anything_. Yet things happen immediately
// after login is clicked -- the page refreshes, and auth
// status is check again _before_ any response from the server
// has arrived! I don't understand why! Any help explaining
// this would be appreciated...
orders.perform_cmd(async move {
Msg::LoginResponse(
async {
Request::new("/auth/login")
.header(Header::authorization(format!(
"Basic {}",
base64::encode(format!("{}:{}", username, password))
)))
.timeout(5_000)
.fetch()
.await?
.check_status()?
.text()
.await
}
.await,
)
});
}
Msg::LoginResponse(Ok(user)) => {
// If there is an Ok response from out login request, great!
// We should have a session cookie in the browser and we
// can then go to our home page and have the app check our
// auth status successfully. If not, then we need to try to
// log in again.
log!("auth status ok:", user);
if user.is_empty() {
model.user = User::Anonymous;
request_url(Urls::new(&model.base_url).login(), orders);
} else {
model.user = User::Loaded(user);
request_url(Urls::new(&model.base_url).home(), orders);
}
}
Msg::LoginResponse(Err(e)) => {
#[cfg(debug_assertions)]
log!("Error checking auth:", e);
model.user = User::Anonymous;
request_url(Urls::new(&model.base_url).login(), orders);
}
Msg::Logout => {
orders.perform_cmd(async move {
Msg::LogoutResponse(
async {
Request::new("/auth/logout")
.fetch()
.await?
.check_status()?;
Ok(())
}
.await,
)
});
}
Msg::LogoutResponse(Ok(())) => {
log!("User has been logged out.");
model.user = User::Anonymous;
request_url(Urls::new(&model.base_url).login(), orders);
}
Msg::LogoutResponse(Err(e)) => {
error!("Log out failed:", e);
}
}
}
fn request_url(url: Url, orders: &mut impl Orders<Msg>) {
orders.notify(subs::UrlRequested::new(url));
}
// ------ ------
// View
// ------ ------
fn view(model: &Model) -> Node<Msg> {
match &model.page {
Page::Login {
username, password, ..
} => div![
h1!["Login"],
form![
ev(Ev::Submit, move |event| {
event.prevent_default();
Msg::Login
}),
label!["Enter your email:"],
input![
attrs! {
At::Value => username;
At::Placeholder => "[email protected]";
},
input_ev(Ev::Input, Msg::UpdateLoginUser),
],
label!["Enter your password:"],
input![
attrs! {
At::Value => password;
At::Type => "password";
At::Placeholder => "password";
},
input_ev(Ev::Input, Msg::UpdateLoginPass),
],
button!["Log In"],
],
],
Page::Dashboard => {
if let User::Loaded(user) = &model.user {
div![
h1![format!("{} 's Dashboard", user)],
button![
"Log Out",
ev(Ev::Click, |_| Msg::Logout),
],
]
} else {
div!["unexpected model state"]
}
}
Page::NotFound => div!["Page not found"],
}
}
// ------ ------
// Start
// ------ ------
#[wasm_bindgen(start)]
pub fn start() {
App::start("app", init, update, view);
}
| true |
146852397f48a55207a9d6cd16450863f2fdf0b3
|
Rust
|
JeeZeh/advent-of-code
|
/2022/src/day18.rs
|
UTF-8
| 3,658 | 3.296875 | 3 |
[] |
no_license
|
use std::{
collections::{HashMap, HashSet, VecDeque},
ops::Add,
};
const DIRECTIONS: [Pos; 6] = [
Pos(-1, 0, 0), // Left
Pos(1, 0, 0), // Right
Pos(0, 1, 0), // Up
Pos(0, -1, 0), // Down
Pos(0, 0, 1), // Forward
Pos(0, 0, -1), // Back
];
pub fn solve(input: String) -> (usize, usize) {
let mut map: HashMap<Pos, bool> = HashMap::new();
input.lines().map(Pos::from).for_each(|p| {
map.insert(p, true);
});
(count_exposed_sides(&map), walk_exterior(&map))
}
/// This is essentially a flood-fill of the exterior
/// which counts every time we see a face of a cube of the droplet
fn walk_exterior(map: &HashMap<Pos, bool>) -> usize {
// Keep track of exposed cube faces encountered.
// Since we're using BFS it guarantees that we will only see a
// given face once, so we don't need to track it on a per-cube level.
let mut exterior_faces = 0;
// Start somewhere outside the droplet
let (min_x, min_y, min_z, max_x, max_y, max_z) = get_bounds(map);
let start = Pos(min_x - 1, min_y - 1, min_z - 1);
// Set up our BFS
let mut seen: HashSet<Pos> = HashSet::new();
seen.insert(start.clone());
let mut queue: VecDeque<Pos> = VecDeque::new();
queue.push_back(start);
while let Some(current) = queue.pop_front() {
// Look in every direction for either:
// - a cube, in which case we've found an exposed face
// - an empty space, which might be or lead to another exposed face
for delta in DIRECTIONS {
let check = ¤t + δ
// We're bordering a cube, and guaranteed that this is the only
// time we'll see it from this side, so increment the counter
if map.contains_key(&check) {
exterior_faces += 1;
} else if !seen.contains(&check)
// Don't allow exploration past the boundaries
// since we're working with a hashmap
&& min_x - 1 <= check.0
&& min_y - 1 <= check.1
&& min_z - 1 <= check.2
&& check.0 <= max_x + 1
&& check.1 <= max_y + 1
&& check.2 <= max_z + 1
{
queue.push_back(check.clone());
seen.insert(check);
}
}
}
exterior_faces
}
/// Just look around each cube to find empty spaces
fn count_exposed_sides(map: &HashMap<Pos, bool>) -> usize {
let mut sides = 0;
for cube in map.keys() {
for delta in DIRECTIONS {
if !map.contains_key(&(cube + &delta)) {
sides += 1;
}
}
}
sides
}
fn get_bounds(map: &HashMap<Pos, bool>) -> (i32, i32, i32, i32, i32, i32) {
let min_x = map.keys().map(|p| p.0).min().unwrap();
let min_y = map.keys().map(|p| p.1).min().unwrap();
let min_z = map.keys().map(|p| p.2).min().unwrap();
let max_x = map.keys().map(|p| p.0).max().unwrap();
let max_y = map.keys().map(|p| p.1).max().unwrap();
let max_z = map.keys().map(|p| p.2).max().unwrap();
(min_x, min_y, min_z, max_x, max_y, max_z)
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct Pos(i32, i32, i32);
impl Add for &Pos {
type Output = Pos;
fn add(self, rhs: Self) -> Self::Output {
Pos(self.0 + rhs.0, self.1 + rhs.1, self.2 + rhs.2)
}
}
impl From<&str> for Pos {
fn from(value: &str) -> Self {
let mut parts = value.split(',').map(|p| p.parse::<i32>().unwrap());
Pos(
parts.next().unwrap(),
parts.next().unwrap(),
parts.next().unwrap(),
)
}
}
| true |
a55e78e2d98a0d71cffd36362916786acbf991b9
|
Rust
|
NathanHowell/google-cloud-storage-rs
|
/src/serde.rs
|
UTF-8
| 3,338 | 2.90625 | 3 |
[
"MIT"
] |
permissive
|
#[inline]
pub(crate) fn is_default<T: Default + PartialEq>(value: &T) -> bool {
value == &Default::default()
}
pub(crate) mod into_string {
use serde::{Deserialize, Serialize, Serializer};
pub fn serialize<S, T>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: std::fmt::Display,
{
format!("{}", value).serialize(serializer)
}
pub fn deserialize<'de, D, T>(deserializer: D) -> std::result::Result<T, D::Error>
where
D: ::serde::Deserializer<'de>,
T: std::str::FromStr,
T::Err: std::fmt::Display,
{
String::deserialize(deserializer)?
.parse::<T>()
.map_err(::serde::de::Error::custom)
}
}
pub(crate) mod optional_crc32c {
use serde::{Deserialize, Serialize, Serializer};
use std::convert::TryInto;
pub fn serialize<S>(value: &Option<u32>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
value
.map(|v| base64::encode(v.to_be_bytes()))
.serialize(serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> std::result::Result<Option<u32>, D::Error>
where
D: ::serde::Deserializer<'de>,
{
match Option::<String>::deserialize(deserializer)? {
Some(v) => Ok(Some(u32::from_be_bytes(
base64::decode(v)
.map_err(::serde::de::Error::custom)?
.as_slice()
.try_into()
.map_err(::serde::de::Error::custom)?,
))),
None => Ok(None),
}
}
}
pub(crate) mod optional_timestamp {
use chrono::{DateTime, Utc};
use prost_types::Timestamp;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::convert::TryInto;
pub fn serialize<S>(value: &Option<Timestamp>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
value
.as_ref()
.map(|value| {
DateTime::<Utc>::from_utc(
chrono::NaiveDateTime::from_timestamp(
value.seconds,
value.nanos.try_into().unwrap(),
),
Utc,
)
})
.serialize(serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Timestamp>, D::Error>
where
D: Deserializer<'de>,
{
let value = Option::<DateTime<Utc>>::deserialize(deserializer)?;
Ok(value.map(|value| Timestamp {
seconds: value.timestamp(),
nanos: value.timestamp_subsec_nanos().try_into().unwrap(),
}))
}
}
pub(crate) mod optional_field_mask {
use prost_types::FieldMask;
use serde::{Deserializer, Serializer};
pub fn serialize<S>(_value: &Option<FieldMask>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// FieldMask is serialized as a query parameter, it is not in the body
serializer.serialize_none()
}
pub fn deserialize<'de, D>(_deserializer: D) -> Result<Option<FieldMask>, D::Error>
where
D: Deserializer<'de>,
{
// FieldMask is serialized as a query parameter, it is not in the body
Ok(None)
}
}
| true |
912208494a09fa81fc076326242bc19c50c08531
|
Rust
|
malkoG/RustPython
|
/vm/src/obj/objfilter.rs
|
UTF-8
| 1,998 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
use super::objbool;
use super::objiter;
use super::objtype::PyClassRef;
use crate::pyobject::{IdProtocol, PyClassImpl, PyContext, PyObjectRef, PyRef, PyResult, PyValue};
use crate::vm::VirtualMachine;
pub type PyFilterRef = PyRef<PyFilter>;
/// filter(function or None, iterable) --> filter object
///
/// Return an iterator yielding those items of iterable for which function(item)
/// is true. If function is None, return the items that are true.
#[pyclass]
#[derive(Debug)]
pub struct PyFilter {
predicate: PyObjectRef,
iterator: PyObjectRef,
}
impl PyValue for PyFilter {
fn class(vm: &VirtualMachine) -> PyClassRef {
vm.ctx.filter_type()
}
}
#[pyimpl]
impl PyFilter {
#[pyslot(new)]
fn tp_new(
cls: PyClassRef,
function: PyObjectRef,
iterable: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyFilterRef> {
let iterator = objiter::get_iter(vm, &iterable)?;
PyFilter {
predicate: function.clone(),
iterator,
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
let predicate = &self.predicate;
let iterator = &self.iterator;
loop {
let next_obj = objiter::call_next(vm, iterator)?;
let predicate_value = if predicate.is(&vm.get_none()) {
next_obj.clone()
} else {
// the predicate itself can raise StopIteration which does stop the filter
// iteration
vm.invoke(&predicate, vec![next_obj.clone()])?
};
if objbool::boolval(vm, predicate_value)? {
return Ok(next_obj);
}
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>, _vm: &VirtualMachine) -> PyRef<Self> {
zelf
}
}
pub fn init(context: &PyContext) {
PyFilter::extend_class(context, &context.types.filter_type);
}
| true |
d9edf05d18088099fec7339086a4692acbad38b4
|
Rust
|
emberian/pct
|
/src/cfg/test.rs
|
UTF-8
| 4,564 | 2.765625 | 3 |
[] |
no_license
|
#![cfg(test)]
use cfg::{ll1, Symbol, Cfg, EPSILON, END_OF_INPUT, Token, Rule};
use cfg::util::{compute_first_of, Follow, compute_follow};
use cfg::bnf::{from_str, to_string};
#[test]
fn first_is_correct() {
let mut cfg = Cfg::new();
let s = cfg.add_nonterminal();
let a = cfg.add_nonterminal();
let b = cfg.add_nonterminal();
let c = cfg.add_nonterminal();
let d = cfg.add_terminal();
let e = cfg.add_terminal();
let f = cfg.add_terminal();
// Construct the grammar:
//
// S -> A B C
// A -> EPSILON | d
// B -> e
// C -> EPSILON | f
let _rs = cfg.add_rule(s, &[a, b, c]);
let _ra1 = cfg.add_rule(a, &[EPSILON]);
let _ra2 = cfg.add_rule(a, &[d]);
let _rb = cfg.add_rule(b, &[e]);
let _rc1 = cfg.add_rule(c, &[EPSILON]);
let _rc2 = cfg.add_rule(c, &[f]);
assert_eq!(compute_first_of(&mut cfg, &[s]), vec![d.into(), e.into()].into_iter().collect());
//assert_eq!(compute_first_of(&mut cfg, &[a, b]), vec![].into_iter().collect());
//assert_eq!(compute_first_of(&mut cfg, &[b]), vec![].into_iter().collect());
//assert_eq!(compute_first_of(&mut cfg, &[c]), vec![].into_iter().collect());
assert_eq!(compute_first_of(&mut cfg, &[a, c]), vec![EPSILON, d.into(), f.into()].into_iter().collect());
assert!(compute_first_of::<_, Symbol>(&mut cfg, &[]).is_empty());
}
#[test]
fn from_str_is_correct() {
let input = "S A B C
A
A d
B e
C
C f";
let c = from_str(input);
assert_eq!("S -> ABC
A -> ε
A -> d
B -> e
C -> ε
C -> f", to_string(&c));
}
#[test]
fn follow_is_correct() {
let mut cfg = Cfg::new();
let s = cfg.add_nonterminal();
let a = cfg.add_nonterminal();
let b = cfg.add_nonterminal();
let c = cfg.add_nonterminal();
let d = cfg.add_terminal();
let e = cfg.add_terminal();
let f = cfg.add_terminal();
// Construct the grammar:
//
// S -> A B C
// A -> EPSILON | d
// B -> e
// C -> EPSILON | f
let _rs = cfg.add_rule(s, &[a, b, c]);
let _ra1 = cfg.add_rule(a, &[EPSILON]);
let _ra2 = cfg.add_rule(a, &[d]);
let _rb = cfg.add_rule(b, &[e]);
let _rc1 = cfg.add_rule(c, &[EPSILON]);
let _rc2 = cfg.add_rule(c, &[f]);
let mut cfg = cfg.freeze();
compute_follow(&mut cfg);
let follow = cfg.extra().get::<Follow>().unwrap();
let expected = vec![vec![END_OF_INPUT].into_iter().collect(), vec![e.into()].into_iter().collect(), vec![END_OF_INPUT, f.into()].into_iter().collect(), vec![END_OF_INPUT].into_iter().collect()];
assert_eq!(follow, &expected);
}
#[test]
fn can_make_ll1_table() {
let mut cfg = Cfg::new();
let s = cfg.add_nonterminal();
let a = cfg.add_nonterminal();
let b = cfg.add_nonterminal();
let c = cfg.add_nonterminal();
let d = cfg.add_terminal();
let e = cfg.add_terminal();
let f = cfg.add_terminal();
// Construct the grammar:
//
// S -> A B C
// A -> EPSILON | d
// B -> e
// C -> EPSILON | f
let _rs = cfg.add_rule(s, &[a, b, c]);
let _ra1 = cfg.add_rule(a, &[EPSILON]);
let _ra2 = cfg.add_rule(a, &[d]);
let _rb = cfg.add_rule(b, &[e]);
let _rc1 = cfg.add_rule(c, &[EPSILON]);
let _rc2 = cfg.add_rule(c, &[f]);
let mut cfg = cfg.freeze();
let tab = ll1::generate_table(&mut cfg);
let expected_table = vec![
vec![None, None, Some(Rule(0)), Some(Rule(0)), None],
vec![None, Some(Rule(1)), Some(Rule(2)), Some(Rule(1)), None],
vec![None, None, None, Some(Rule(3)), None],
vec![Some(Rule(4)), Some(Rule(4)), None, None, Some(Rule(5))]
];
assert_eq!(tab.table, expected_table);
}
#[test]
fn can_parse_ll1_string() {
let mut cfg = Cfg::new();
let s = cfg.add_nonterminal();
let a = cfg.add_nonterminal();
let b = cfg.add_nonterminal();
let c = cfg.add_nonterminal();
let d = cfg.add_terminal();
let e = cfg.add_terminal();
let f = cfg.add_terminal();
// Construct the grammar:
//
// S -> A B C
// A -> EPSILON | d
// B -> e
// C -> EPSILON | f
let _rs = cfg.add_rule(s, &[a, b, c]);
let _ra1 = cfg.add_rule(a, &[EPSILON]);
let _ra2 = cfg.add_rule(a, &[d]);
let _rb = cfg.add_rule(b, &[e]);
let _rc1 = cfg.add_rule(c, &[EPSILON]);
let _rc2 = cfg.add_rule(c, &[f]);
let mut cfg = cfg.freeze();
let tab = ll1::generate_table(&mut cfg);
println!("{:?}", tab);
let derivation = ll1::parse(&tab, vec![&e as &Token]);
assert_eq!(derivation, vec![Rule(0), Rule(1), Rule(3), Rule(4)]);
}
| true |
9cc03bb23d60dc3bdbb85993a1776c4483fe7bc8
|
Rust
|
Smithay/smithay
|
/src/backend/x11/buffer.rs
|
UTF-8
| 7,753 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
//! Utilities for importing buffers into X11.
//!
//! Buffers imported into X11 are represented as X pixmaps which are then presented to the window.
//!
//! At the moment only [`Dmabuf`] backed pixmaps are supported.
//!
//! ## Dmabuf pixmaps
//!
//! A [`Dmabuf`] backed pixmap is created using the [`DRI3`](x11rb::protocol::dri3) extension of
//! the X server. One of two code paths is used here. For more modern DRI3 (>= 1.2) implementations
//! multi-plane Dmabufs, may be used to create a pixmap. Otherwise the fallback code path
//! (available in >= 1.0) is used to create the pixmap. Although the Dmabuf may only have one plane.
//!
//! If you do need to modify any of the logic pertaining to the Dmabuf presentation, do ensure you
//! read the `dri3proto.txt` file (link in the non-public comments of the x11 mod.rs).
//!
//! ## Presentation to the window
//!
//! Presentation to the window is handled through the [`Present`](x11rb::protocol::present)
//! extension of the X server. Because we use direct rendering to present to the window, using
//! V-Sync from OpenGL or the equivalents in other rendering APIs will not work. This is where
//! the utility of the present extension is useful. When using the `present_pixmap` function,
//! the X server will notify when the frame has been presented to the window. The notification
//! of presentation usually occurs on a V-blank.
//!
//! If you do need to modify any of the logic pertaining to the using the present extension, do
//! ensure you read the `presentproto.txt` file (link in the non-public comments of the
//! x11 mod.rs).
use std::{os::unix::io::AsRawFd, sync::atomic::Ordering};
use super::{PresentError, Window, X11Error};
use drm_fourcc::DrmFourcc;
use nix::fcntl;
use x11rb::{
connection::Connection,
protocol::{
dri3::ConnectionExt as _,
present::{self, ConnectionExt},
xproto::PixmapWrapper,
},
utils::RawFdContainer,
};
use crate::backend::allocator::{dmabuf::Dmabuf, Buffer};
// Shm can be easily supported in the future using, xcb_shm_create_pixmap.
pub trait PixmapWrapperExt<'c, C>
where
C: Connection,
{
/// Creates a new Pixmap using the supplied Dmabuf.
///
/// The returned Pixmap is freed when dropped.
fn with_dmabuf(
connection: &'c C,
window: &Window,
dmabuf: &Dmabuf,
) -> Result<PixmapWrapper<'c, C>, X11Error>;
/// Presents the pixmap to the window.
///
/// The wrapper is consumed when this function is called. The return value will contain the
/// id of the pixmap.
///
/// The pixmap will be automatically dropped when it bubbles up in the X11 event loop after the
/// X server has finished presentation with the buffer behind the pixmap.
fn present(self, connection: &C, window: &Window) -> Result<u32, X11Error>;
}
impl<'c, C> PixmapWrapperExt<'c, C> for PixmapWrapper<'c, C>
where
C: Connection,
{
#[profiling::function]
fn with_dmabuf(
connection: &'c C,
window: &Window,
dmabuf: &Dmabuf,
) -> Result<PixmapWrapper<'c, C>, X11Error> {
if dmabuf.format().code != window.format() {
return Err(PresentError::IncorrectFormat(window.format()).into());
}
let mut fds = Vec::new();
// XCB closes the file descriptor after sending, so duplicate the file descriptors.
for handle in dmabuf.handles() {
let fd = fcntl::fcntl(
handle.as_raw_fd(),
fcntl::FcntlArg::F_DUPFD_CLOEXEC(3), // Set to 3 so the fd cannot become stdin, stdout or stderr
)
.map_err(|e| PresentError::DupFailed(e.to_string()))?;
fds.push(RawFdContainer::new(fd))
}
// We need dri3 >= 1.2 in order to use the enhanced dri3_pixmap_from_buffers function.
let xid = if window.0.extensions.dri3 >= Some((1, 2)) {
if dmabuf.num_planes() > 4 {
return Err(PresentError::TooManyPlanes.into());
}
let xid = connection.generate_id()?;
let mut strides = dmabuf.strides();
let mut offsets = dmabuf.offsets();
connection.dri3_pixmap_from_buffers(
xid,
window.id(),
dmabuf.width() as u16,
dmabuf.height() as u16,
strides.next().unwrap(), // there must be at least one plane and stride.
offsets.next().unwrap(),
// The other planes are optional, so unwrap_or to `NONE` if those planes are not available.
strides.next().unwrap_or(x11rb::NONE),
offsets.next().unwrap_or(x11rb::NONE),
strides.next().unwrap_or(x11rb::NONE),
offsets.next().unwrap_or(x11rb::NONE),
strides.next().unwrap_or(x11rb::NONE),
offsets.next().unwrap_or(x11rb::NONE),
window.depth(),
// In the future this could be made nicer.
match window.format() {
DrmFourcc::Argb8888 => 32,
DrmFourcc::Xrgb8888 => 24,
_ => unreachable!(),
},
dmabuf.format().modifier.into(),
fds,
)?;
xid
} else {
// Old codepath can only create a pixmap using one plane from a dmabuf.
if dmabuf.num_planes() != 1 {
return Err(PresentError::TooManyPlanes.into());
}
let xid = connection.generate_id()?;
let mut strides = dmabuf.strides();
let stride = strides.next().unwrap();
connection.dri3_pixmap_from_buffer(
xid,
window.id(),
dmabuf.height() * stride,
dmabuf.width() as u16,
dmabuf.height() as u16,
stride as u16,
window.depth(),
// In the future this could be made nicer.
match window.format() {
DrmFourcc::Argb8888 => 32,
DrmFourcc::Xrgb8888 => 24,
_ => unreachable!(),
},
fds.remove(0),
)?;
xid
};
Ok(PixmapWrapper::for_pixmap(connection, xid))
}
#[profiling::function]
fn present(self, connection: &C, window: &Window) -> Result<u32, X11Error> {
let next_serial = window.0.next_serial.fetch_add(1, Ordering::SeqCst);
// We want to present as soon as possible, so wait 1ms so the X server will present when next convenient.
let msc = window.0.last_msc.load(Ordering::SeqCst) + 1;
// options parameter does not take the enum but a u32.
const OPTIONS: present::Option = present::Option::NONE;
connection.present_pixmap(
window.id(),
self.pixmap(),
next_serial,
x11rb::NONE, // Update the entire window
x11rb::NONE, // Update the entire window
0, // No offsets
0,
x11rb::NONE, // Let the X server pick the most suitable crtc
x11rb::NONE, // Do not wait to present
x11rb::NONE, // We will wait for the X server to tell us when it is done with the pixmap.
OPTIONS.into(), // No special presentation options.
msc,
0,
0,
&[], // We don't need to notify any other windows.
)?;
// Pixmaps are reference counted on the X server. Because of reference counting we may
// drop the wrapper and the X server will free the pixmap when presentation has completed.
Ok(self.pixmap())
}
}
| true |
c661bba1bd14e832d8a7fdff8e0b8cfa63470f70
|
Rust
|
themasch/aoc-2020
|
/src/day10.rs
|
UTF-8
| 5,467 | 3.28125 | 3 |
[] |
no_license
|
use crate::*;
use std::convert::TryInto;
use std::io::BufRead;
pub struct Input(Vec<usize>);
impl<R: BufRead> ReadInput<R> for Input {
fn read(b: R) -> Result<Input, ()> {
Ok(Input(
b.lines()
.flatten()
.filter_map(|line| line.parse::<usize>().ok())
.collect::<Vec<_>>(),
))
}
}
pub struct FirstStep;
impl Solution for FirstStep {
type Input = Input;
type Output = usize;
fn solve(i: Input) -> Result<usize, ()> {
let mut num = i.0;
num.sort_unstable();
let min = num[0];
let fold_start = {
let mut x = [0, 0, 0];
x[min - 1] = 1;
x
};
let deltas = num.windows(2).fold(fold_start, |mut acc, nums| {
let [na, nb]: [usize; 2] = dbg!(nums.try_into().unwrap());
debug_assert!(nb - na >= 1 && nb - na <= 3);
acc[nb - na - 1] += 1;
dbg!(acc)
});
Ok(deltas[0] * (deltas[2] + 1))
}
}
pub struct SecondStep;
impl Solution for SecondStep {
type Input = Input;
type Output = usize;
/// when determining the number of possible mutations of this chain, we only need to look
/// at the entries with a difference of less than 3, because the only modification we can
/// do is remove items from the chain (since adapters cannot take input higher than their
/// rating) and we can only remove an entry, iff its neighbors are compatible to each other.
///
/// It turns out, that there are no followers with a delta of 2, neither in my personal
/// payload, nor in the samples. This allows us to just check for a delta of 1.
///
/// We can think of the whole chain as a set of smaller chains, which are connected to each
/// other. Each delta of three between two consecutive entris is a connection point where a
/// new "sub-chain" starts. Each of these smaller sub-chains is made up of entries exactly
/// 1 jolt apart from each other.
/// We then have to determine the number of possible permutations of each of these smaller
/// chains, multiply them all up and we got the number of possible permutations for the
/// complete chain.
///
/// The length of a chain is here defined as the number of CONNECTIONS between entries.
/// A chain of length 1 contains of two entries (either two adapters, or one adapter and
/// the outlet).
///
/// The number of permutations for length 1, 2 and 3 are trivial to calculate:
/// 1. got exactly 2 entries, we need to keep the start and the end, so theres exactly 1
/// variation
/// 2. got 3 entries, we may either cut the middle one, or not, and thus got two entries
/// 3. got "floating adapters" we can cut, which leads to four permutations (none, a, b,
/// or a & b).
///
/// At this point we may want to get ahead of ourself and think "HEY! I know this series!".
/// When interpreting the number of "floating adapterS" (not the start or end of a chain)
/// as bits, the number of possible values these bits could represent is identical to the
/// number of permutations, which is no suprise, since all we do is take an adapter away or
/// not which is representable as 1 or 0.
/// But sadly the correct answert for the length of 4 is not 8, but 7.
/// Thats because we cannot remove three consecutive adapters, because then the difference
/// between the previous and next one would be >3 which breaks the rules of santa-physics.
///
/// Thus we would need to account for permutations containting three consecutive zeros for
/// all chain lenghts >= 4 and only count those who follow the rules.
/// Luckily, again, the input only contains chains up to a lenght of 4, so a tiny LUT saves
/// the day.
///
/// Oh wow, that was a fun one! type Output = usize;
fn solve(i: Input) -> Result<usize, ()> {
let mut num = i.0;
num.sort_unstable();
let max = num[num.len() - 1];
let x: Vec<usize> = vec![0usize]
.into_iter()
.chain(num)
.chain(vec![max + 3])
.collect();
let magic_number = get_chain_lengths(&x)
.iter()
.map(|len| match len {
0 | 1 => 1,
2 => 2,
3 => 4,
4 => 7,
_ => panic!("{}", len),
})
.product();
Ok(magic_number)
}
}
fn get_chain_lengths(numbers: &[usize]) -> Vec<usize> {
let (_, chains) = numbers
.windows(2)
.fold((0, Vec::new()), |(len, mut hist), x| {
if x[1] - x[0] == 1 {
(len + 1, hist)
} else {
if len > 0 {
hist.push(len);
}
(0, hist)
}
});
chains
}
#[cfg(test)]
mod test {
use super::*;
use std::io::BufReader;
static INPUT: &str = r#"28
33
18
42
31
14
46
20
48
47
24
23
49
45
19
38
39
11
1
32
25
35
8
17
7
9
4
2
34
10
3"#;
#[test]
fn test_step_1() {
let read = Input::read(BufReader::new(INPUT.as_bytes())).unwrap();
assert_eq!(Ok(220), FirstStep::solve(read));
}
#[test]
fn test_step_2() {
let read = Input::read(BufReader::new(INPUT.as_bytes())).unwrap();
assert_eq!(Ok(19208), SecondStep::solve(read));
}
}
| true |
df30a94842bd27c93e77442a5be4c0502ad14b43
|
Rust
|
matijaskala/election
|
/src/lib.rs
|
UTF-8
| 3,048 | 3.125 | 3 |
[] |
no_license
|
pub struct Tensor {
candidates: u32,
data: Vec<u64>,
votes: u64,
}
pub fn new_tensor(c: u32) -> Tensor {
Tensor{candidates: c, data: vec![0; (5*c*c) as usize], votes: 0}
}
impl Tensor {
pub fn add_ballot(&mut self, ballot: &[u8]) {
let c = self.candidates as usize;
assert!(5*c*c == self.data.len());
assert!(c == ballot.len());
for i in 0..c {
for j in 0..c {
for k in 0..5 {
if ballot[i] > k && ballot[j] > k {
self.data[i*c+j+k as usize*c*c] += 1;
}
}
}
}
self.votes += 1;
}
fn get_score(&self, idx: usize, already_elected: &[u32], cutoff: usize) -> f64 {
let c = self.candidates as usize;
assert!(5*c*c == self.data.len());
if self.data[idx*c+idx+cutoff*c*c] == 0 { return 0.0 }
let mut d = 1.0;
for i in already_elected {
let i = *i as usize;
if self.data[i*c+i+cutoff*c*c] == 0 { return 0.0 }
d += self.data[idx*c+i+cutoff*c*c] as f64/self.data[i*c+i+cutoff*c*c] as f64;
}
self.data[idx*c+idx+cutoff*c*c] as f64/d
}
fn get_next_winner(&self, already_elected: &[u32], seats: u32) -> u32 {
let c = self.candidates as usize;
assert!(5*c*c == self.data.len());
let mut idx = vec![];
let q = self.votes as f64/seats as f64;
for cutoff in (0..5).rev() {
let mut val = -1.0;
for i in 0..c {
let mut skip = false;
for j in already_elected {
if i == *j as usize { skip = true }
}
if !skip && (cutoff == 0 || self.get_score(i, already_elected, cutoff - 1) >= q) {
let cur = self.get_score(i, already_elected, cutoff);
if val == cur { idx.push(i) }
else if val < cur {
val = cur;
idx = vec![i];
}
}
}
if idx.len() != 0 {
for i in cutoff+1..5 {
if idx.len() == 1 { break }
let mut idx2 = vec![];
let mut val = -1.0;
for j in idx {
let cur = self.get_score(j, already_elected, i);
if val == cur { idx2.push(j) }
else if val < cur {
val = cur;
idx2 = vec![j];
}
}
assert!(idx2.len() != 0);
idx = idx2;
}
break;
}
}
idx[0] as u32
}
pub fn get_winners(&self, seats: u32) -> Vec<u32> {
assert!(seats <= self.candidates);
let mut w = vec![];
for _ in 0..seats {
w.push(self.get_next_winner(&w, seats));
}
w
}
}
| true |
2d3ac9c42d2978f9415bc799ad6b863c8b5a5014
|
Rust
|
takatori/hyperloglog
|
/src/lib.rs
|
UTF-8
| 9,191 | 3.046875 | 3 |
[] |
no_license
|
extern crate rand;
use rand::Rng;
use std::fmt;
use std::error::Error;
use std::hash::{Hash, Hasher};
use std::collections::BTreeMap;
/// SiphasherはRust1.13.0で非推奨になった。しかしそれを置き換えるSipHasher24は
/// 現状では非安定(unstable)なため、安定版のRustリリースは利用できない。
#[allow(deperaceted)]
use std::hash::SipHasher;
/// 推定アルゴリズム。デバッグ出力用
pub enum Estimator {
HyperLogLog,
LinerCounting // スモールレンジの見積もりに使用する
}
/// `HyperLogLog`オブジェクト
pub struct HyperLogLog {
// レジスタのアドレッシングに使う2進数のビット数
// 範囲は4以上、16以下で、大きいほど見積もり誤差が少なくなるが、その分メモリを使用する。
b: u8,
// usizes型のハッシュ値の右からbビットを取り出すためのマスク
b_mask: usize,
// レジスタの数(2のb乗)。例: b = 4 → 16、b = 16 → 65536
m: usize,
alpha: f64,
// レジスタ。サイズが mバイトのバイト配列
registers: Vec<u8>,
// SipHasher の初期化に使うキー
hasher_key0: u64,
hasher_key1: u64,
}
/// `HyperLogLog`のデバッグ用文字列を返す。
impl fmt::Debug for HyperLogLog {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (est, est_method) = estimate_cardinality(self);
write!(f,
r#"HyperLogLog
estimated cardinality: {}
estimation method: {:?}
-----------------------------------------------------
b: {} bits (typical error rate: {}%)
m: {} registers
alpha: {}
hasher: ({}, {})"#,
est,
est_method,
self.b,
self.typical_error_rate() * 100.0,
self.m,
self.alpha,
self.hasher_key0,
self.hasher_key1)
}
}
impl HyperLogLog {
/// `HyperLogLog`オブジェクトを作成する。bで指定したビット数をレジスタの
/// アドレッシングに使用する。bの範囲は4以上、16以下でなければならない
/// 範囲外なら`Err`を返す
pub fn new(b: u8) -> Result<Self, Box<Error>> {
if b < 4 || b > 16 {
return Err(From::from(format!("b must be between 4 and 16. b = {}", b)))
}
/// 構造体のフィールド`m`は2のb条。シフト演算で実装
let m = 1 << b;
let alpha = get_alpha(b)?;
// hasher_key0, key1を初期化するための乱数ジェネレータ
let mut rng = rand::OsRng::new().map_err(|e| format!("Failed to create an OS RNG: {}", e))?;
Ok(HyperLogLog {
alpha: alpha,
b: b,
b_mask: m - 1,
m: m,
registers: vec![0; m],
hasher_key0: rng.gen(),
hasher_key1: rng.gen(),
})
}
/// 要素を追加する。要素は`std::hash::Hash`トレイトを実装していなければならない
pub fn insert<H: Hash>(&mut self, value: &H) {
let x = self.hash(value);
let j = x as usize & self.b_mask;
let w = x >> self.b;
let p1 = position_of_leftmost_one_bit(w, 64 - self.b);
let p2 = &mut self.registers[j];
if *p2 < p1 {
*p2 = p1;
}
}
/// カーディナリティの見積もり値を返す
pub fn cardinality(&self) -> f64 {
estimate_cardinality(self).0
}
/// b から予想される典型的なエラー率を返す
pub fn typical_error_rate(&self) -> f64 {
1.04 / (self.m as f64).sqrt()
}
/// 与えられたvalueに対する64ビットのハッシュ値を求める。
#[allow(deprecated)] // SipHasherがRust1.13.0で非推奨(deprecated)のため
fn hash<H: Hash>(&self, value: &H) -> u64 {
let mut hasher = SipHasher::new_with_keys(self.hasher_key0, self.hasher_key1);
value.hash(&mut hasher);
hasher.finish()
}
/// レジスタに格納された値について、その分布を示すヒストグラムを返す。
pub fn histgram_of_register_value_distribution(&self) -> String {
let mut histgram = Vec::new();
let mut map = BTreeMap::new();
for x in &self.registers {
let count = map.entry(*x).or_insert(0);
*count += 1;
}
if let (Some(last_reg_value), Some(max_count)) = (map.keys().last(), map.values().max()) {
// グラフの最大幅 = 40文字
let width = 40.0;
let rate = width / (*max_count as f64);
for i 0..(last_reg_value + 1) {
let mut line = format!("{:3}: ", i);
if let Some(count) = map.get(&i) {
// アスタリスク(*)で横棒を描く
let h_bar = str::iter::repeat("*")
.take((*count as f64 * rate).cell() as usize)
.collect::<String>();
line.push_str(&h_bar);
line.push_str(&format!(" {}", count));
} else {
line.push_str("0");
};
histgram.push(line);
}
}
histgram.join("\n")
}
}
/// ビット数bに対応するα値を返す。
fn get_alpha(b: u8) -> Result<f64, Box<Error>> {
if b < 4 || b > 16 {
Err(From::from(format!("b must be between 4 and 16. b = {}", b)))
} else {
Ok(match b {
4 => 0.673, // α16
5 => 0.697, // α32
6 => 0.709, // α64
_ => 0.7213 / (1.0 + 1.079 / (1 << b) as f64),
})
}
}
/// ハッシュ値(64ビット符号なしの2進数)の左端からみて最初に出現した1の位置を返す
/// 例: 10000... -> 1、00010... -> 4
fn position_of_leftmost_one_bit(s: u64, max_width: u8) -> u8 {
count_leading_zeros(s, max_width) + 1
}
/// ハッシュ値(64ビット符号なし2進数)左端に連続して並んでいる0の個数を返す.
/// 10000... -> 0、00010... -> 3
fn count_leading_zeros(mut s: u64, max_width: u8) -> u8 {
let mut lz = max_width;
while s != 0 {
lz -= 1;
s >>= 1;
}
lz
}
/// カーディナリティを推定し、その値と見積もりに使用したアルゴリズムを返す
/// スモールレンジでは`Linear Counting`アルゴリズムを使用し、それを超えるレンジでは
/// `HyperLogLog`アルゴリズムを使用する。ここまでは論文の通り。
/// しかし、論文にあるラーレンジ補正は行わない。なぜなら、本実装では、32ビットの
/// ハッシュ値の代わりに64ビットのハッシュ値を使用しており、ハッシュ値が衝突する
/// 頻度が極めて低いと予想されるため
fn estimate_cardinality(hll: &HyperLogLog) -> (f64, Estimator) {
let m_64 = hll.m as f64;
// まず`HyperLogLog`アルゴリズムによる見積もり値を算出する
let est = raw_hyperloglog_estimate(hll.alpha, m_64, &hll.registers);
if est < (5.0 / 2.0 * m_64) {
// スモールレンジの見積もりを行う。もし値が0のレジスタが一つでもあるならば
// `Linear Counting`アルゴリズムで見積もりし直す。
match count_zero_registers(&hll.registers) {
0 => (est, Estimator::HyperLogLog),
v => (linear_counting_estimate(m_f64, v as f64), Estimator::LinerCounting),
}
} else {
(est, Estimator::HyperLogLog)
}
}
/// 値が0のレジスタの個数を返す
fn count_zero_registers(registers: &[u8]) -> usize {
registers.iter().filter(|&x| *x == 0).count()
}
/// `HyperLogLog`アルゴリズムによる未補正の見積もり値を計算する
fn raw_hyperloglog_estimate(alpha: f64, m: f64, registers: &[u8]) -> f64 {
let sum = registers.iter().map(|&x| 2.0f64.powi(-(x as i32))).sum::<f64>();
alpha * m * m / sum
}
/// `Linear Counting`アルゴリズムによる見積もり値を算出する
fn linear_counting_estimate(m: f64, number_of_zero_registers: f64) -> f64 {
m * (m / number_of_zero_registers).ln()
}
// テストケース
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn create_hll() {
use std::f64;
assert!(HyperLogLog::new(3).is_err());
assert!(HyperLogLog::new(17).is_err());
let hll = HyperLogLog::new(4);
assert!(hll.is_ok());
let hll = hll.unwrap();
assert_eq!(hll.b, 4);
assert_eq!(hll.m, 2_f64.powi(4) as usize);
assert_eq!(hll.alpha, 0.673);
assert_eq!(hll.registers.len(), 2_f64.powi(4) as usize);
assert!(HyperLogLog::new(16).is_ok());
}
#[test]
fn small_range() {
let mut hll = HyperLogLog::new(12).unwrap();
let items = ["test1", "test2", "test3", "test2", "test2", "test2"];
println!("\n=== Loading {} items.\n", items.len());
for item in &items {
hll.insert(item);
}
}
}
| true |
435f5c6e8a6bd7be2dfa301c90c499e87542f466
|
Rust
|
millerjs/harNes
|
/src/cartridge.rs
|
UTF-8
| 4,318 | 2.75 | 3 |
[] |
no_license
|
use types::*;
use std::io::Read;
use std::io::Error as IOError;
use std::path::Path;
use std::fs::File;
use std::fmt;
quick_error! {
#[derive(Debug)]
pub enum CartridgeError {
LoadError(err: String) { from() }
IOError(err: IOError) { from() }
}
}
pub type Flags = Byte;
#[repr(C, packed)]
#[derive(Debug, Default)]
pub struct Header {
/// Constant $4E $45 $53 $1A ("NES" followed by MS-DOS end-of-file)
constant: [Byte; 4],
/// Size of PRG ROM in 16 KB units
program_memory_size: Byte,
/// Size of CHR ROM in 8 KB units (Value 0 means the board uses CHR RAM)
character_memory_size: Byte,
/// 76543210
/// ||||||||
/// |||||||+- Mirroring: 0: horizontal (vertical arrangement) (CIRAM A10 = PPU A11)
/// ||||||| 1: vertical (horizontal arrangement) (CIRAM A10 = PPU A10)
/// ||||||+-- 1: Cartridge contains battery-backed PRG RAM ($6000-7FFF) or other persistent memory
/// |||||+--- 1: 512-byte trainer at $7000-$71FF (stored before PRG data)
/// ||||+---- 1: Ignore mirroring control or above mirroring bit; instead provide four-screen VRAM
/// ++++----- Lower nybble of mapper number
flag_6: Byte,
/// 76543210
/// ||||||||
/// |||||||+- VS Unisystem
/// ||||||+-- PlayChoice-10 (8KB of Hint Screen data stored after CHR data)
/// ||||++--- If equal to 2, flags 8-15 are in NES 2.0 format
/// ++++----- Upper nybble of mapper number
flag_7: Byte,
/// Size of PRG RAM in 8 KB units (Value 0 infers 8 KB for compatibility)
program_ram_size: Byte,
/// 76543210
/// ||||||||
/// |||||||+- TV system (0: NTSC; 1: PAL)
/// +++++++-- Reserved, set to zero
flag_9: Byte,
/// 76543210
/// || ||
/// || ++- TV system (0: NTSC; 2: PAL; 1/3: dual compatible)
/// |+----- PRG RAM ($6000-$7FFF) (0: present; 1: not present)
/// +------ 0: Board has no bus conflicts; 1: Board has bus conflicts
flag_10: Byte,
zero_filled: [Byte; 5]
}
#[derive(Default)]
pub struct Cartridge {
header: Header,
pub program_memory: Vec<Byte>,
pub character_memory: Vec<Byte>,
}
impl Header {
pub fn load<R: Read>(source: &mut R) -> Result<Header, CartridgeError> {
use std::{mem, slice};
let mut header: Header = unsafe { mem::zeroed() };
unsafe {
let buffer = &mut header as *mut _ as *mut u8;
let header_slice = slice::from_raw_parts_mut(buffer, mem::size_of::<Header>());
source.read_exact(header_slice)?;
}
if header.constant != [0x4E, 0x45, 0x53, 0x1A] {
Err(CartridgeError::LoadError(String::from("Invalid header constant")))
} else {
Ok(header)
}
}
pub fn trainer(&self) -> bool {
self.flag_6 & 0b00000100 != 0
}
}
impl Cartridge {
pub fn load<R: Read>(source: &mut R) -> Result<Cartridge, CartridgeError> {
let header = Header::load(source)?;
let mut trainer = vec![0; 512];
let mut program_memory = vec![0; header.program_memory_size as usize * 16384];
let mut character_memory = vec![0; header.character_memory_size as usize * 8192];
if header.trainer() {
source.read_exact(&mut trainer)?;
}
source.read_exact(&mut program_memory)?;
source.read_exact(&mut character_memory)?;
let cartridge = Cartridge {
header: header,
program_memory,
character_memory
};
info!("Loaded iNES cartridge {}", cartridge);
Ok(cartridge)
}
pub fn load_file<P: AsRef<Path>>(path: &P) -> Result<Cartridge, CartridgeError> {
let mut f = File::open(path.as_ref())?;
Cartridge::load(&mut f)
}
pub fn mapper_code(&self) -> Byte {
(self.header.flag_6 & 0b11110000) | ((self.header.flag_7 & 0b11110000) << 4)
}
}
impl fmt::Display for Cartridge {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "<Rom(PRom: {}K, CRom: {}k, trainer: {}, f6: {:#b}, f7: {:#b})>",
self.program_memory.len() / 1028,
self.character_memory.len() / 1028,
self.header.trainer(),
self.header.flag_6,
self.header.flag_7)
}
}
| true |
11f1056cb339e8860c6090dbe45e9418ac41280b
|
Rust
|
polymath-is/loadstone
|
/tools/webserver/src/bin/webserver.rs
|
UTF-8
| 4,841 | 2.875 | 3 |
[] |
no_license
|
use std::{
path::PathBuf,
};
use server::device::{new_system_port, write_to_device, read_from_device};
use warp::{
Filter,
http::StatusCode,
reply::Response,
};
enum MetricsError {
BadPath,
BadDevice,
WriteError,
ReadError,
BadMetrics
}
impl std::fmt::Display for MetricsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use MetricsError::*;
match self {
BadPath => write!(f, "internal"),
BadDevice => write!(f, "device"),
WriteError => write!(f, "io"),
ReadError => write!(f, "io"),
BadMetrics => write!(f, "metrics"),
}
}
}
fn get_device_path() -> Option<String> {
// The device path should be the first argument passed to the server.
std::env::args().nth(1)
}
fn try_parse_metrics(string: &str) -> Option<(String, String)> {
const NO_METRICS_MESSAGE : &str =
"Loadstone did not relay any boot metrics, or the boot metrics were corrupted";
if string.contains(NO_METRICS_MESSAGE) {
return Some((
"unknown".to_owned(),
"unknown".to_owned(),
));
}
const VALID_REGEX_SOURCE : &str =
r#"\[Boot Metrics\][\r\n]+\* (.*)[\r\n]+\* Boot process took (.*) milliseconds\."#;
let regex = regex::Regex::new(VALID_REGEX_SOURCE).unwrap();
let captures = regex.captures(string)?;
let path = captures.get(1)?.as_str().trim();
let time = captures.get(2)?.as_str();
Some((
path.to_owned(),
time.to_owned() + "ms",
))
}
fn handle_metrics_api_request() -> Result<(String, String), MetricsError> {
let device_path = get_device_path().ok_or(MetricsError::BadPath)?;
let mut device = new_system_port(&device_path).ok_or(MetricsError::BadDevice)?;
const METRICS_COMMAND : &[u8] = b"metrics\n";
write_to_device(&mut device, METRICS_COMMAND).map_err(|_| MetricsError::WriteError)?;
let raw_data = read_from_device(&mut device).map_err(|_| MetricsError::ReadError)?;
if raw_data.is_empty() { return Err(MetricsError::ReadError); }
let message = String::from_utf8_lossy(&raw_data);
try_parse_metrics(&message).ok_or(MetricsError::BadMetrics)
}
fn respond_to_api_request(file_name: String) -> Response {
match file_name.as_str() {
"server-version" => {
Response::new(std::env!("CARGO_PKG_VERSION").into())
},
"metrics" => {
let body = match handle_metrics_api_request() {
Ok((path, time)) =>
format!(r#"{{ "error": "none", "path": "{}", "time": "{}" }}"#, path, time),
Err(error) =>
format!(r#"{{ "error": "{}", "path": "", "time": "" }}"#, error),
};
Response::new(body.into())
},
_ => {
let mut response = Response::new("404 Not found".into());
let status = response.status_mut();
*status = StatusCode::NOT_FOUND;
response
},
}
}
async fn handle_websocket(socket: warp::ws::WebSocket) {
let device = get_device_path()
.and_then(|path| new_system_port(&path));
let device = match device {
Some(d) => d,
None => {
eprintln!("Failed to open device for websocket.");
return;
}
};
use server::websocket_session::WebSocketSession;
let r = WebSocketSession::run_new(socket, device).await;
println!("{:?}", r);
}
#[tokio::main]
async fn main() {
if let Some(p) = get_device_path() {
println!("Using '{}' as a path to the device.", p);
} else {
eprintln!("No device specified. Please provide the path to the device as an argument.");
return;
}
let html_directory : PathBuf = PathBuf::from("public_html/");
let get_request = warp::get();
let index = get_request
.and(warp::path::end())
.and(warp::fs::file(html_directory.join("index.html")));
let files = get_request
.and(warp::fs::dir(html_directory))
.with(warp::compression::gzip());
let api_request = get_request
.and(warp::path!("api" / String))
.map(respond_to_api_request);
let upload_websocket = warp::ws()
.and(warp::path!("upload"))
.map(|w: warp::ws::Ws| {
w.on_upgrade(handle_websocket)
});
let not_found = get_request
.map(|| {
let mut response = Response::new("404 Not found".into());
let status = response.status_mut();
*status = StatusCode::NOT_FOUND;
response
});
let routes = index
.or(api_request)
.or(files)
.or(upload_websocket)
.or(not_found);
warp::serve(routes)
.run(([127, 0, 0, 1], 8000))
.await;
}
| true |
b632d8b048cb912a8e2fa4b8ed31dc1344fc046c
|
Rust
|
mnts26/aws-sdk-rust
|
/sdk/outposts/src/model.rs
|
UTF-8
| 33,755 | 2.609375 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// <p>Information about a site.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Site {
/// <p>The ID of the site.</p>
pub site_id: std::option::Option<std::string::String>,
/// <p>The ID of the AWS account.</p>
pub account_id: std::option::Option<std::string::String>,
/// <p>The name of the site.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The description of the site.</p>
pub description: std::option::Option<std::string::String>,
/// <p>The site tags.</p>
pub tags:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>The Amazon Resource Name (ARN) of the site.</p>
pub site_arn: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for Site {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Site");
formatter.field("site_id", &self.site_id);
formatter.field("account_id", &self.account_id);
formatter.field("name", &self.name);
formatter.field("description", &self.description);
formatter.field("tags", &self.tags);
formatter.field("site_arn", &self.site_arn);
formatter.finish()
}
}
/// See [`Site`](crate::model::Site)
pub mod site {
/// A builder for [`Site`](crate::model::Site)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) site_id: std::option::Option<std::string::String>,
pub(crate) account_id: std::option::Option<std::string::String>,
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) description: std::option::Option<std::string::String>,
pub(crate) tags: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) site_arn: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The ID of the site.</p>
pub fn site_id(mut self, input: impl Into<std::string::String>) -> Self {
self.site_id = Some(input.into());
self
}
pub fn set_site_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.site_id = input;
self
}
/// <p>The ID of the AWS account.</p>
pub fn account_id(mut self, input: impl Into<std::string::String>) -> Self {
self.account_id = Some(input.into());
self
}
pub fn set_account_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.account_id = input;
self
}
/// <p>The name of the site.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The description of the site.</p>
pub fn description(mut self, input: impl Into<std::string::String>) -> Self {
self.description = Some(input.into());
self
}
pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self {
self.description = input;
self
}
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.tags.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.tags = Some(hash_map);
self
}
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.tags = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the site.</p>
pub fn site_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.site_arn = Some(input.into());
self
}
pub fn set_site_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.site_arn = input;
self
}
/// Consumes the builder and constructs a [`Site`](crate::model::Site)
pub fn build(self) -> crate::model::Site {
crate::model::Site {
site_id: self.site_id,
account_id: self.account_id,
name: self.name,
description: self.description,
tags: self.tags,
site_arn: self.site_arn,
}
}
}
}
impl Site {
/// Creates a new builder-style object to manufacture [`Site`](crate::model::Site)
pub fn builder() -> crate::model::site::Builder {
crate::model::site::Builder::default()
}
}
/// <p>Information about an Outpost.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Outpost {
/// <p>
/// The ID of the Outpost.
/// </p>
pub outpost_id: std::option::Option<std::string::String>,
/// <p>The AWS account ID of the Outpost owner.</p>
pub owner_id: std::option::Option<std::string::String>,
/// <p>The Amazon Resource Name (ARN) of the Outpost.</p>
pub outpost_arn: std::option::Option<std::string::String>,
/// <p>The ID of the site.</p>
pub site_id: std::option::Option<std::string::String>,
/// <p>The name of the Outpost.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The description of the Outpost.</p>
pub description: std::option::Option<std::string::String>,
/// <p>The life cycle status.</p>
pub life_cycle_status: std::option::Option<std::string::String>,
/// <p>The Availability Zone.</p>
pub availability_zone: std::option::Option<std::string::String>,
/// <p>The ID of the Availability Zone.</p>
pub availability_zone_id: std::option::Option<std::string::String>,
/// <p>The Outpost tags.</p>
pub tags:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>The Amazon Resource Name (ARN) of the site.</p>
pub site_arn: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for Outpost {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Outpost");
formatter.field("outpost_id", &self.outpost_id);
formatter.field("owner_id", &self.owner_id);
formatter.field("outpost_arn", &self.outpost_arn);
formatter.field("site_id", &self.site_id);
formatter.field("name", &self.name);
formatter.field("description", &self.description);
formatter.field("life_cycle_status", &self.life_cycle_status);
formatter.field("availability_zone", &self.availability_zone);
formatter.field("availability_zone_id", &self.availability_zone_id);
formatter.field("tags", &self.tags);
formatter.field("site_arn", &self.site_arn);
formatter.finish()
}
}
/// See [`Outpost`](crate::model::Outpost)
pub mod outpost {
/// A builder for [`Outpost`](crate::model::Outpost)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) outpost_id: std::option::Option<std::string::String>,
pub(crate) owner_id: std::option::Option<std::string::String>,
pub(crate) outpost_arn: std::option::Option<std::string::String>,
pub(crate) site_id: std::option::Option<std::string::String>,
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) description: std::option::Option<std::string::String>,
pub(crate) life_cycle_status: std::option::Option<std::string::String>,
pub(crate) availability_zone: std::option::Option<std::string::String>,
pub(crate) availability_zone_id: std::option::Option<std::string::String>,
pub(crate) tags: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) site_arn: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>
/// The ID of the Outpost.
/// </p>
pub fn outpost_id(mut self, input: impl Into<std::string::String>) -> Self {
self.outpost_id = Some(input.into());
self
}
pub fn set_outpost_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.outpost_id = input;
self
}
/// <p>The AWS account ID of the Outpost owner.</p>
pub fn owner_id(mut self, input: impl Into<std::string::String>) -> Self {
self.owner_id = Some(input.into());
self
}
pub fn set_owner_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.owner_id = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the Outpost.</p>
pub fn outpost_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.outpost_arn = Some(input.into());
self
}
pub fn set_outpost_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.outpost_arn = input;
self
}
/// <p>The ID of the site.</p>
pub fn site_id(mut self, input: impl Into<std::string::String>) -> Self {
self.site_id = Some(input.into());
self
}
pub fn set_site_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.site_id = input;
self
}
/// <p>The name of the Outpost.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The description of the Outpost.</p>
pub fn description(mut self, input: impl Into<std::string::String>) -> Self {
self.description = Some(input.into());
self
}
pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self {
self.description = input;
self
}
/// <p>The life cycle status.</p>
pub fn life_cycle_status(mut self, input: impl Into<std::string::String>) -> Self {
self.life_cycle_status = Some(input.into());
self
}
pub fn set_life_cycle_status(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.life_cycle_status = input;
self
}
/// <p>The Availability Zone.</p>
pub fn availability_zone(mut self, input: impl Into<std::string::String>) -> Self {
self.availability_zone = Some(input.into());
self
}
pub fn set_availability_zone(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.availability_zone = input;
self
}
/// <p>The ID of the Availability Zone.</p>
pub fn availability_zone_id(mut self, input: impl Into<std::string::String>) -> Self {
self.availability_zone_id = Some(input.into());
self
}
pub fn set_availability_zone_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.availability_zone_id = input;
self
}
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.tags.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.tags = Some(hash_map);
self
}
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.tags = input;
self
}
/// <p>The Amazon Resource Name (ARN) of the site.</p>
pub fn site_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.site_arn = Some(input.into());
self
}
pub fn set_site_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.site_arn = input;
self
}
/// Consumes the builder and constructs a [`Outpost`](crate::model::Outpost)
pub fn build(self) -> crate::model::Outpost {
crate::model::Outpost {
outpost_id: self.outpost_id,
owner_id: self.owner_id,
outpost_arn: self.outpost_arn,
site_id: self.site_id,
name: self.name,
description: self.description,
life_cycle_status: self.life_cycle_status,
availability_zone: self.availability_zone,
availability_zone_id: self.availability_zone_id,
tags: self.tags,
site_arn: self.site_arn,
}
}
}
}
impl Outpost {
/// Creates a new builder-style object to manufacture [`Outpost`](crate::model::Outpost)
pub fn builder() -> crate::model::outpost::Builder {
crate::model::outpost::Builder::default()
}
}
/// <p>Information about an instance type.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InstanceTypeItem {
/// <p>The instance type.</p>
pub instance_type: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InstanceTypeItem {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InstanceTypeItem");
formatter.field("instance_type", &self.instance_type);
formatter.finish()
}
}
/// See [`InstanceTypeItem`](crate::model::InstanceTypeItem)
pub mod instance_type_item {
/// A builder for [`InstanceTypeItem`](crate::model::InstanceTypeItem)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) instance_type: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The instance type.</p>
pub fn instance_type(mut self, input: impl Into<std::string::String>) -> Self {
self.instance_type = Some(input.into());
self
}
pub fn set_instance_type(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.instance_type = input;
self
}
/// Consumes the builder and constructs a [`InstanceTypeItem`](crate::model::InstanceTypeItem)
pub fn build(self) -> crate::model::InstanceTypeItem {
crate::model::InstanceTypeItem {
instance_type: self.instance_type,
}
}
}
}
impl InstanceTypeItem {
/// Creates a new builder-style object to manufacture [`InstanceTypeItem`](crate::model::InstanceTypeItem)
pub fn builder() -> crate::model::instance_type_item::Builder {
crate::model::instance_type_item::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum ResourceType {
Outpost,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for ResourceType {
fn from(s: &str) -> Self {
match s {
"OUTPOST" => ResourceType::Outpost,
other => ResourceType::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for ResourceType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(ResourceType::from(s))
}
}
impl ResourceType {
pub fn as_str(&self) -> &str {
match self {
ResourceType::Outpost => "OUTPOST",
ResourceType::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["OUTPOST"]
}
}
impl AsRef<str> for ResourceType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Information about an order.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct Order {
/// <p>
/// The ID of the Outpost.
/// </p>
pub outpost_id: std::option::Option<std::string::String>,
/// <p>The ID of the order.</p>
pub order_id: std::option::Option<std::string::String>,
/// <p>The status of the order</p>
pub status: std::option::Option<crate::model::OrderStatus>,
/// <p>The line items for the order</p>
pub line_items: std::option::Option<std::vec::Vec<crate::model::LineItem>>,
/// <p>The payment option for the order.</p>
pub payment_option: std::option::Option<crate::model::PaymentOption>,
/// <p>The submission date for the order.</p>
pub order_submission_date: std::option::Option<smithy_types::Instant>,
/// <p>The fulfillment date of the order.</p>
pub order_fulfilled_date: std::option::Option<smithy_types::Instant>,
}
impl std::fmt::Debug for Order {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("Order");
formatter.field("outpost_id", &self.outpost_id);
formatter.field("order_id", &self.order_id);
formatter.field("status", &self.status);
formatter.field("line_items", &self.line_items);
formatter.field("payment_option", &self.payment_option);
formatter.field("order_submission_date", &self.order_submission_date);
formatter.field("order_fulfilled_date", &self.order_fulfilled_date);
formatter.finish()
}
}
/// See [`Order`](crate::model::Order)
pub mod order {
/// A builder for [`Order`](crate::model::Order)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) outpost_id: std::option::Option<std::string::String>,
pub(crate) order_id: std::option::Option<std::string::String>,
pub(crate) status: std::option::Option<crate::model::OrderStatus>,
pub(crate) line_items: std::option::Option<std::vec::Vec<crate::model::LineItem>>,
pub(crate) payment_option: std::option::Option<crate::model::PaymentOption>,
pub(crate) order_submission_date: std::option::Option<smithy_types::Instant>,
pub(crate) order_fulfilled_date: std::option::Option<smithy_types::Instant>,
}
impl Builder {
/// <p>
/// The ID of the Outpost.
/// </p>
pub fn outpost_id(mut self, input: impl Into<std::string::String>) -> Self {
self.outpost_id = Some(input.into());
self
}
pub fn set_outpost_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.outpost_id = input;
self
}
/// <p>The ID of the order.</p>
pub fn order_id(mut self, input: impl Into<std::string::String>) -> Self {
self.order_id = Some(input.into());
self
}
pub fn set_order_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.order_id = input;
self
}
/// <p>The status of the order</p>
pub fn status(mut self, input: crate::model::OrderStatus) -> Self {
self.status = Some(input);
self
}
pub fn set_status(mut self, input: std::option::Option<crate::model::OrderStatus>) -> Self {
self.status = input;
self
}
pub fn line_items(mut self, input: impl Into<crate::model::LineItem>) -> Self {
let mut v = self.line_items.unwrap_or_default();
v.push(input.into());
self.line_items = Some(v);
self
}
pub fn set_line_items(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::LineItem>>,
) -> Self {
self.line_items = input;
self
}
/// <p>The payment option for the order.</p>
pub fn payment_option(mut self, input: crate::model::PaymentOption) -> Self {
self.payment_option = Some(input);
self
}
pub fn set_payment_option(
mut self,
input: std::option::Option<crate::model::PaymentOption>,
) -> Self {
self.payment_option = input;
self
}
/// <p>The submission date for the order.</p>
pub fn order_submission_date(mut self, input: smithy_types::Instant) -> Self {
self.order_submission_date = Some(input);
self
}
pub fn set_order_submission_date(
mut self,
input: std::option::Option<smithy_types::Instant>,
) -> Self {
self.order_submission_date = input;
self
}
/// <p>The fulfillment date of the order.</p>
pub fn order_fulfilled_date(mut self, input: smithy_types::Instant) -> Self {
self.order_fulfilled_date = Some(input);
self
}
pub fn set_order_fulfilled_date(
mut self,
input: std::option::Option<smithy_types::Instant>,
) -> Self {
self.order_fulfilled_date = input;
self
}
/// Consumes the builder and constructs a [`Order`](crate::model::Order)
pub fn build(self) -> crate::model::Order {
crate::model::Order {
outpost_id: self.outpost_id,
order_id: self.order_id,
status: self.status,
line_items: self.line_items,
payment_option: self.payment_option,
order_submission_date: self.order_submission_date,
order_fulfilled_date: self.order_fulfilled_date,
}
}
}
}
impl Order {
/// Creates a new builder-style object to manufacture [`Order`](crate::model::Order)
pub fn builder() -> crate::model::order::Builder {
crate::model::order::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum PaymentOption {
AllUpfront,
NoUpfront,
PartialUpfront,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for PaymentOption {
fn from(s: &str) -> Self {
match s {
"ALL_UPFRONT" => PaymentOption::AllUpfront,
"NO_UPFRONT" => PaymentOption::NoUpfront,
"PARTIAL_UPFRONT" => PaymentOption::PartialUpfront,
other => PaymentOption::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for PaymentOption {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(PaymentOption::from(s))
}
}
impl PaymentOption {
pub fn as_str(&self) -> &str {
match self {
PaymentOption::AllUpfront => "ALL_UPFRONT",
PaymentOption::NoUpfront => "NO_UPFRONT",
PaymentOption::PartialUpfront => "PARTIAL_UPFRONT",
PaymentOption::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["ALL_UPFRONT", "NO_UPFRONT", "PARTIAL_UPFRONT"]
}
}
impl AsRef<str> for PaymentOption {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Information about a line item.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct LineItem {
/// <p>
/// The ID of the catalog item.
/// </p>
pub catalog_item_id: std::option::Option<std::string::String>,
/// <p>The ID of the line item.</p>
pub line_item_id: std::option::Option<std::string::String>,
/// <p>The quantity of the line item.</p>
pub quantity: i32,
/// <p>The status of the line item.</p>
pub status: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for LineItem {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("LineItem");
formatter.field("catalog_item_id", &self.catalog_item_id);
formatter.field("line_item_id", &self.line_item_id);
formatter.field("quantity", &self.quantity);
formatter.field("status", &self.status);
formatter.finish()
}
}
/// See [`LineItem`](crate::model::LineItem)
pub mod line_item {
/// A builder for [`LineItem`](crate::model::LineItem)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) catalog_item_id: std::option::Option<std::string::String>,
pub(crate) line_item_id: std::option::Option<std::string::String>,
pub(crate) quantity: std::option::Option<i32>,
pub(crate) status: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>
/// The ID of the catalog item.
/// </p>
pub fn catalog_item_id(mut self, input: impl Into<std::string::String>) -> Self {
self.catalog_item_id = Some(input.into());
self
}
pub fn set_catalog_item_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.catalog_item_id = input;
self
}
/// <p>The ID of the line item.</p>
pub fn line_item_id(mut self, input: impl Into<std::string::String>) -> Self {
self.line_item_id = Some(input.into());
self
}
pub fn set_line_item_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.line_item_id = input;
self
}
/// <p>The quantity of the line item.</p>
pub fn quantity(mut self, input: i32) -> Self {
self.quantity = Some(input);
self
}
pub fn set_quantity(mut self, input: std::option::Option<i32>) -> Self {
self.quantity = input;
self
}
/// <p>The status of the line item.</p>
pub fn status(mut self, input: impl Into<std::string::String>) -> Self {
self.status = Some(input.into());
self
}
pub fn set_status(mut self, input: std::option::Option<std::string::String>) -> Self {
self.status = input;
self
}
/// Consumes the builder and constructs a [`LineItem`](crate::model::LineItem)
pub fn build(self) -> crate::model::LineItem {
crate::model::LineItem {
catalog_item_id: self.catalog_item_id,
line_item_id: self.line_item_id,
quantity: self.quantity.unwrap_or_default(),
status: self.status,
}
}
}
}
impl LineItem {
/// Creates a new builder-style object to manufacture [`LineItem`](crate::model::LineItem)
pub fn builder() -> crate::model::line_item::Builder {
crate::model::line_item::Builder::default()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum OrderStatus {
Cancelled,
Fulfilled,
Installing,
Pending,
Processing,
Received,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for OrderStatus {
fn from(s: &str) -> Self {
match s {
"CANCELLED" => OrderStatus::Cancelled,
"FULFILLED" => OrderStatus::Fulfilled,
"INSTALLING" => OrderStatus::Installing,
"PENDING" => OrderStatus::Pending,
"PROCESSING" => OrderStatus::Processing,
"RECEIVED" => OrderStatus::Received,
other => OrderStatus::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for OrderStatus {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(OrderStatus::from(s))
}
}
impl OrderStatus {
pub fn as_str(&self) -> &str {
match self {
OrderStatus::Cancelled => "CANCELLED",
OrderStatus::Fulfilled => "FULFILLED",
OrderStatus::Installing => "INSTALLING",
OrderStatus::Pending => "PENDING",
OrderStatus::Processing => "PROCESSING",
OrderStatus::Received => "RECEIVED",
OrderStatus::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&[
"CANCELLED",
"FULFILLED",
"INSTALLING",
"PENDING",
"PROCESSING",
"RECEIVED",
]
}
}
impl AsRef<str> for OrderStatus {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[non_exhaustive]
#[derive(
std::clone::Clone,
std::cmp::Eq,
std::cmp::Ord,
std::cmp::PartialEq,
std::cmp::PartialOrd,
std::fmt::Debug,
std::hash::Hash,
)]
pub enum PaymentTerm {
ThreeYears,
/// Unknown contains new variants that have been added since this code was generated.
Unknown(String),
}
impl std::convert::From<&str> for PaymentTerm {
fn from(s: &str) -> Self {
match s {
"THREE_YEARS" => PaymentTerm::ThreeYears,
other => PaymentTerm::Unknown(other.to_owned()),
}
}
}
impl std::str::FromStr for PaymentTerm {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(PaymentTerm::from(s))
}
}
impl PaymentTerm {
pub fn as_str(&self) -> &str {
match self {
PaymentTerm::ThreeYears => "THREE_YEARS",
PaymentTerm::Unknown(s) => s.as_ref(),
}
}
pub fn values() -> &'static [&'static str] {
&["THREE_YEARS"]
}
}
impl AsRef<str> for PaymentTerm {
fn as_ref(&self) -> &str {
self.as_str()
}
}
/// <p>Information about a line item request.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct LineItemRequest {
/// <p>The ID of the catalog item.</p>
pub catalog_item_id: std::option::Option<std::string::String>,
/// <p>The quantity of a line item request.</p>
pub quantity: i32,
}
impl std::fmt::Debug for LineItemRequest {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("LineItemRequest");
formatter.field("catalog_item_id", &self.catalog_item_id);
formatter.field("quantity", &self.quantity);
formatter.finish()
}
}
/// See [`LineItemRequest`](crate::model::LineItemRequest)
pub mod line_item_request {
/// A builder for [`LineItemRequest`](crate::model::LineItemRequest)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) catalog_item_id: std::option::Option<std::string::String>,
pub(crate) quantity: std::option::Option<i32>,
}
impl Builder {
/// <p>The ID of the catalog item.</p>
pub fn catalog_item_id(mut self, input: impl Into<std::string::String>) -> Self {
self.catalog_item_id = Some(input.into());
self
}
pub fn set_catalog_item_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.catalog_item_id = input;
self
}
/// <p>The quantity of a line item request.</p>
pub fn quantity(mut self, input: i32) -> Self {
self.quantity = Some(input);
self
}
pub fn set_quantity(mut self, input: std::option::Option<i32>) -> Self {
self.quantity = input;
self
}
/// Consumes the builder and constructs a [`LineItemRequest`](crate::model::LineItemRequest)
pub fn build(self) -> crate::model::LineItemRequest {
crate::model::LineItemRequest {
catalog_item_id: self.catalog_item_id,
quantity: self.quantity.unwrap_or_default(),
}
}
}
}
impl LineItemRequest {
/// Creates a new builder-style object to manufacture [`LineItemRequest`](crate::model::LineItemRequest)
pub fn builder() -> crate::model::line_item_request::Builder {
crate::model::line_item_request::Builder::default()
}
}
| true |
498958f59ce17e9acd7b75a0903eae265e38f0f1
|
Rust
|
MindFlavor/azure-sdk-for-rust
|
/sdk/cosmos/tests/permission_token_usage.rs
|
UTF-8
| 5,014 | 2.6875 | 3 |
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
#![cfg(all(test, feature = "test_e2e"))]
use azure_core::Context;
use azure_cosmos::prelude::*;
use collection::*;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
mod setup;
#[derive(Clone, Serialize, Deserialize, Debug)]
struct MySampleStruct<'a> {
id: Cow<'a, str>,
age: u32,
phones: Vec<Cow<'a, str>>,
}
impl<'a> azure_cosmos::CosmosEntity<'a> for MySampleStruct<'a> {
type Entity = &'a str;
fn partition_key(&'a self) -> Self::Entity {
self.id.as_ref()
}
}
#[tokio::test]
async fn permission_token_usage() {
const DATABASE_NAME: &str = "cosmos-test-db-permusage";
const COLLECTION_NAME: &str = "cosmos-test-db-permusage";
const USER_NAME: &str = "[email protected]";
const PERMISSION: &str = "sdktest";
let mut client = setup::initialize().unwrap();
// create a temp database
let _create_database_response = client
.create_database(
azure_core::Context::new(),
DATABASE_NAME,
CreateDatabaseOptions::new(),
)
.await
.unwrap();
let database_client = client.clone().into_database_client(DATABASE_NAME);
// create a new collection
let indexing_policy = IndexingPolicy {
automatic: true,
indexing_mode: IndexingMode::Consistent,
included_paths: vec![],
excluded_paths: vec![],
};
let create_collection_options = CreateCollectionOptions::new("/id")
.offer(Offer::Throughput(400))
.indexing_policy(indexing_policy);
let create_collection_response = database_client
.create_collection(Context::new(), COLLECTION_NAME, create_collection_options)
.await
.unwrap();
let user_client = database_client.clone().into_user_client(USER_NAME);
user_client
.create_user(Context::new(), CreateUserOptions::new())
.await
.unwrap();
// create the RO permission
let permission_client = user_client.into_permission_client(PERMISSION);
let permission_mode = create_collection_response.collection.read_permission();
let create_permission_response = permission_client
.create_permission(
Context::new(),
CreatePermissionOptions::new().expiry_seconds(18000u64), // 5 hours, max!
&permission_mode,
)
.await
.unwrap();
// change the AuthorizationToken using the token
// of the permission.
let new_authorization_token: AuthorizationToken = create_permission_response
.permission
.permission_token
.into();
client.auth_token(new_authorization_token);
let new_database_client = client.clone().into_database_client(DATABASE_NAME);
// let's list the collection content.
// This must succeed.
new_database_client
.clone()
.into_collection_client(COLLECTION_NAME)
.list_documents()
.execute::<serde_json::Value>()
.await
.unwrap();
let new_collection_client = new_database_client.into_collection_client(COLLECTION_NAME);
// Now we try to insert a document with the "read-only"
// authorization_token just created. It must fail.
let document = MySampleStruct {
id: Cow::Borrowed("Gianluigi Bombatomica"),
age: 43,
phones: vec![Cow::Borrowed("+39 1234567"), Cow::Borrowed("+39 2345678")],
};
new_collection_client
.create_document(
Context::new(),
&document,
CreateDocumentOptions::new().is_upsert(true),
)
.await
.unwrap_err();
permission_client
.delete_permission(Context::new(), DeletePermissionOptions::new())
.await
.unwrap();
// All includes read and write.
let permission_mode = create_collection_response.collection.all_permission();
let create_permission_response = permission_client
.create_permission(
Context::new(),
CreatePermissionOptions::new().expiry_seconds(18000u64), // 5 hours, max!
&permission_mode,
)
.await
.unwrap();
let new_authorization_token: AuthorizationToken = create_permission_response
.permission
.permission_token
.into();
client.auth_token(new_authorization_token);
let new_database_client = client.into_database_client(DATABASE_NAME);
let new_collection_client = new_database_client.into_collection_client(COLLECTION_NAME);
// now we have an "All" authorization_token
// so the create_document should succeed!
let create_document_response = new_collection_client
.create_document(
Context::new(),
&document,
CreateDocumentOptions::new().is_upsert(true),
)
.await
.unwrap();
println!(
"create_document_response == {:#?}",
create_document_response
);
// cleanup
database_client
.delete_database(Context::new(), DeleteDatabaseOptions::new())
.await
.unwrap();
}
| true |
b31f6630c16d428dc214202dc3b457abbbbb8160
|
Rust
|
ellipticoin/moonshined
|
/ellipticoin_contracts/src/contract.rs
|
UTF-8
| 893 | 2.65625 | 3 |
[] |
no_license
|
use crate::helpers::pad_left;
use ellipticoin_types::{
db::{Backend, Db},
Address, ADDRESS_LENGTH,
};
use serde::{de::DeserializeOwned, Serialize};
use std::convert::TryInto;
pub trait Contract {
const NAME: Name;
fn get<K: Into<Vec<u8>>, V: DeserializeOwned + Default, B: Backend>(
db: &mut Db<B>,
key: K,
) -> V {
db.get(Self::NAME as u16, key)
}
fn insert<K: Into<Vec<u8>>, V: Serialize, B: Backend>(db: &mut Db<B>, key: K, value: V) {
db.insert(Self::NAME as u16, key, value)
}
fn address() -> Address {
Address(
pad_left((Self::NAME as u16).to_be_bytes().to_vec(), ADDRESS_LENGTH)[..ADDRESS_LENGTH]
.try_into()
.unwrap(),
)
}
}
#[repr(u16)]
pub enum Name {
AMM,
Bridge,
Ellipticoin,
Governance,
OrderBook,
System,
Token,
}
| true |
4cb4d2ebe3a630ad0222f1a1845a8dbd99e00651
|
Rust
|
nrbray/infrabase
|
/src/wireguard.rs
|
UTF-8
| 2,153 | 3.421875 | 3 |
[] |
no_license
|
use std::io::Write;
use std::process::{Command, Stdio};
use anyhow::{ensure, Result};
fn run(cmd: &str, args: &[&str], input: Option<&[u8]>) -> Result<Vec<u8>> {
let mut child = Command::new(cmd)
.args(args)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?;
if let Some(input) = input {
let stdin = child.stdin.as_mut();
ensure!(stdin.is_some(), "Could not get stdin for child process");
stdin.unwrap().write_all(input)?;
}
let output = child.wait_with_output()?;
ensure!(output.status.success(), "{:?} finished with non-zero exit status {}", cmd, output.status);
Ok(output.stdout)
}
pub(crate) struct Keypair {
pub privkey: Vec<u8>,
pub pubkey: Vec<u8>,
}
fn chomp_newline(vec: &mut Vec<u8>) {
if let Some(b'\n') = vec.last() {
vec.pop();
}
}
pub(crate) fn generate_keypair() -> Result<Keypair> {
let mut privkey = run("wg", &["genkey"], None)?.to_vec();
let mut pubkey = run("wg", &["pubkey"], Some(&privkey))?.to_vec();
chomp_newline(&mut privkey);
chomp_newline(&mut pubkey);
Ok(Keypair { privkey, pubkey })
}
#[cfg(test)]
mod tests {
use super::chomp_newline;
use super::generate_keypair;
/// Does not chomp anything if there is no trailing newline
#[test]
fn test_chomp_newline_no_change() {
for string in &[b"hello\nworld".to_vec(), b" ".to_vec(), b"".to_vec()] {
let mut vec = string.clone();
chomp_newline(&mut vec);
assert_eq!(vec, *string);
}
}
/// Chomps just one trailing newline
#[test]
fn test_chomp_newline() {
let mut vec = b"hello\n".to_vec();
chomp_newline(&mut vec);
assert_eq!(vec, b"hello".to_vec());
let mut vec = b"\n\n".to_vec();
chomp_newline(&mut vec);
assert_eq!(vec, b"\n".to_vec());
}
/// Keypair has privkey and pubkey of correct length
#[test]
fn test_generate_keypair() {
let keypair = generate_keypair().unwrap();
assert_eq!(keypair.privkey.len(), 44);
assert_eq!(keypair.pubkey.len(), 44);
}
}
| true |
065caf5d02c22c4352a67eca3682e3187ef51272
|
Rust
|
dwuid/advent-of-code-2015
|
/aoc_22/src/types/mod.rs
|
UTF-8
| 2,522 | 2.921875 | 3 |
[
"MIT"
] |
permissive
|
#[macro_export]
macro_rules! decrease {
($i: expr, $e: expr) => {
if $i < $e { $i = 0; } else { $i -= $e; }
}
}
mod spells;
pub use self::spells::available_spells;
#[derive(Clone, Debug)]
pub struct Character {
pub hitpoints: u16,
pub damage: u16,
pub armor: u16,
pub mana: u16,
pub mana_spent: u32
}
#[derive(Clone, Debug)]
pub struct Contestants {
pub player: Character,
pub opponent: Character,
}
type Application = fn(&mut Contestants);
pub struct RecurringEffect {
apply: Application
}
impl Clone for RecurringEffect {
fn clone(&self) -> RecurringEffect {
RecurringEffect { apply: self.apply }
}
}
pub struct AlteratingEffect {
active: bool,
apply: Application,
undo: Application
}
impl Clone for AlteratingEffect {
fn clone(&self) -> AlteratingEffect {
AlteratingEffect {
active: self.active,
apply: self.apply,
undo: self.undo
}
}
}
#[derive(Clone)]
pub enum ConcreteEffect {
Recurring(RecurringEffect),
Alterating(AlteratingEffect)
}
#[derive(Clone)]
pub struct Effect<'a> {
pub spell: &'a (Spell + 'a),
pub rounds: u16,
pub effect: ConcreteEffect
}
impl<'a> Effect<'a> {
fn new(spell: &'a Spell, rounds: u16, effect: ConcreteEffect)
-> Effect<'a> {
Effect { spell: spell, rounds: rounds, effect: effect }
}
pub fn render(&mut self, contestants: &mut Contestants) -> bool {
use self::ConcreteEffect::*;
if self.rounds == 0 {
return false;
}
self.rounds -= 1;
match self.effect {
Recurring(ref effect) => {
(effect.apply)(contestants);
},
Alterating(ref mut effect) => {
if !effect.active {
effect.active = true;
(effect.apply)(contestants);
}
if self.rounds == 0 {
effect.active = false;
(effect.undo)(contestants);
}
},
}
self.rounds != 0
}
}
pub trait Spell {
fn cost(&self) -> u16;
fn id(&self) -> u16 { self.cost() }
fn available(&self, contestants: &Contestants) -> bool {
contestants.player.mana >= self.cost()
}
fn cast(&self, &mut Contestants) -> Option<Effect>;
}
trait SpellEffect : Spell {
fn apply(&mut Contestants) { }
fn undo(&mut Contestants) { }
}
| true |
7653aee0e44c923838aa0142523917c05ae567ed
|
Rust
|
Mokosha/pbrt_rust
|
/src/primitive/mod.rs
|
UTF-8
| 6,777 | 2.578125 | 3 |
[] |
no_license
|
mod aggregates;
mod geometric;
mod transformed;
use area_light::AreaLight;
use bbox::BBox;
use bbox::HasBounds;
use bsdf::BSDF;
use bsdf::bssrdf::BSSRDF;
use diff_geom::DifferentialGeometry;
use intersection::Intersectable;
use intersection::Intersection;
use material::Material;
use ray::Ray;
use shape::Shape;
use transform::animated::AnimatedTransform;
use transform::transform::Transform;
use primitive::geometric::GeometricPrimitive;
use primitive::transformed::TransformedPrimitive;
use primitive::aggregates::Aggregate;
use std::sync::atomic::AtomicUsize;
use std::sync::Arc;
#[derive(Clone, Debug)]
pub struct PrimitiveBase {
pub prim_id: usize
}
static NEXT_PRIM_ID: AtomicUsize = ::std::sync::atomic::AtomicUsize::new(0);
impl PrimitiveBase {
pub fn new() -> PrimitiveBase { PrimitiveBase {
prim_id: NEXT_PRIM_ID.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed) } }
}
impl ::std::cmp::PartialEq for PrimitiveBase {
fn eq(&self, _: &PrimitiveBase) -> bool { true }
}
pub trait Refinable<T = Self> {
fn is_refined(&self) -> bool;
fn refine(self) -> Vec<T>;
}
pub trait FullyRefinable : Refinable<Self>+Sized {
fn fully_refine(self) -> Vec<Self> {
let mut todo = self.refine();
let mut done = Vec::new();
while let Some(x) = todo.pop() {
if x.is_refined() {
done.push(x);
} else {
let mut rx = x.refine();
todo.append(&mut rx);
}
}
done
}
}
#[derive(Clone, Debug)] // , PartialEq)]
enum Prim {
Geometric(GeometricPrimitive),
Transformed(TransformedPrimitive),
Aggregate(Aggregate)
}
#[derive(Clone, Debug)] // , PartialEq)]
pub struct Primitive {
base: PrimitiveBase,
prim: Arc<Prim>
}
impl Primitive {
pub fn simple(s: Shape) -> Primitive {
Primitive {
base: PrimitiveBase::new(),
prim: Arc::new(Prim::Geometric(
GeometricPrimitive::new(s, Arc::new(Material::broken()))))
}
}
pub fn geometric(s: Shape, mtl: Arc<Material>) -> Primitive {
Primitive {
base: PrimitiveBase::new(),
prim: Arc::new(Prim::Geometric(GeometricPrimitive::new(s, mtl)))
}
}
pub fn geometric_area_light(s: Shape, mtl: Arc<Material>, al: Arc<AreaLight>) -> Primitive {
Primitive {
base: PrimitiveBase::new(),
prim: Arc::new(Prim::Geometric(GeometricPrimitive::new_lit(s, mtl, al)))
}
}
pub fn transformed(p: Arc<Primitive>, xf: AnimatedTransform) -> Primitive {
Primitive {
base: PrimitiveBase::new(),
prim: Arc::new(Prim::Transformed(TransformedPrimitive::new(p, xf)))
}
}
pub fn grid(p: Vec<Primitive>, refine_immediately: bool) -> Primitive {
Primitive {
base: PrimitiveBase::new(),
prim: Arc::new(Prim::Aggregate(Aggregate::grid(p, refine_immediately)))
}
}
pub fn bvh(p: Vec<Primitive>, max_prims: usize, sm: &'static str) -> Primitive {
Primitive {
base: PrimitiveBase::new(),
prim: Arc::new(Prim::Aggregate(Aggregate::bvh(p, max_prims, sm))),
}
}
pub fn can_intersect(&self) -> bool {
match self.prim.as_ref() {
&Prim::Geometric(ref p) => p.can_intersect(),
&Prim::Transformed(ref p) => p.primitive().can_intersect(),
&Prim::Aggregate(ref a) => true, // all aggregates are intersectable
}
}
pub fn get_id(&self) -> usize { self.base.prim_id }
pub fn area_light(&self) -> Option<Arc<AreaLight>> {
match self.prim.as_ref() {
&Prim::Geometric(ref p) => p.area_light(),
_ => panic!("Only geometric primitives may have area lights")
}
}
pub fn get_bsdf(&self, dg: DifferentialGeometry,
o2w: &Transform) -> Option<BSDF> {
match self.prim.as_ref() {
&Prim::Geometric(ref p) => p.get_bsdf(dg, o2w),
_ => panic!("Only geometric primitives may have bsdfs")
}
}
pub fn get_bssrdf(&self, dg: DifferentialGeometry,
o2w: &Transform) -> Option<BSSRDF> {
match self.prim.as_ref() {
&Prim::Geometric(ref p) => p.get_bssrdf(dg, o2w),
_ => panic!("Only geometric primitives may have bssrdfs")
}
}
}
impl HasBounds for Primitive {
fn world_bound(&self) -> BBox {
match self.prim.as_ref() {
&Prim::Geometric(ref prim) => prim.world_bound(),
&Prim::Transformed(ref p) => p.world_bound(),
&Prim::Aggregate(ref a) => a.world_bound()
}
}
}
impl Intersectable for Primitive {
fn intersect(&self, ray : &Ray) -> Option<Intersection> {
match self.prim.as_ref() {
&Prim::Geometric(ref prim) => {
prim.intersect(ray).and_then(|mut isect| {
isect.primitive = Some(Arc::new(self.clone()));
Some(isect)
})
},
&Prim::Transformed(ref prim) => prim.intersect(ray),
&Prim::Aggregate(ref a) => a.intersect(ray)
}.and_then(|mut isect| {
isect.primitive_id = self.base.prim_id;
Some(isect)
})
}
fn intersect_p(&self, ray : &Ray) -> bool {
match self.prim.as_ref() {
&Prim::Geometric(ref prim) => prim.intersect_p(ray),
&Prim::Transformed(ref prim) => prim.intersect_p(ray),
&Prim::Aggregate(ref a) => a.intersect_p(ray)
}
}
}
impl Refinable for Primitive {
fn refine(self) -> Vec<Primitive> {
if self.is_refined() {
return vec![self];
}
let prim = match Arc::try_unwrap(self.prim) {
Ok(p) => p,
Err(pr_ref) => pr_ref.as_ref().clone()
};
let prims = match prim {
Prim::Geometric(p) =>
p.refine().iter().cloned().map(Prim::Geometric).collect(),
Prim::Transformed(_) =>
panic!("Transformed primitive should already be refined!"),
Prim::Aggregate(a) => vec![Prim::Aggregate(a)]
};
prims.into_iter().map(|p| {
Primitive {
base: PrimitiveBase::new(),
prim: Arc::new(p)
}
}).collect()
}
fn is_refined(&self) -> bool {
match self.prim.as_ref() {
&Prim::Geometric(ref p) => p.is_refined(),
&Prim::Transformed(ref p) => {
assert!(p.primitive().is_refined());
true
}
&Prim::Aggregate(_) => true
}
}
}
impl FullyRefinable for Primitive { }
| true |
d21714a38789854450379f9f617c4ba5d600266f
|
Rust
|
robohouse-delft/abbrws-rs
|
/abbrws/src/parse/signal.rs
|
UTF-8
| 5,704 | 3.1875 | 3 |
[] |
no_license
|
use serde::Deserialize;
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Deserialize)]
pub enum SignalKind {
#[serde(rename = "DI")]
DigitalInput,
#[serde(rename = "DO")]
DigitalOutput,
#[serde(rename = "AI")]
AnalogInput,
#[serde(rename = "AO")]
AnalogOutput,
#[serde(rename = "GI")]
GroupInput,
#[serde(rename = "GO")]
GroupOutput,
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum SignalValue {
Binary(bool),
Analog(f64),
Group(u64),
}
impl std::fmt::Display for SignalKind {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
SignalKind::DigitalInput => f.pad("digital input"),
SignalKind::DigitalOutput => f.pad("digital output"),
SignalKind::AnalogInput => f.pad("analog input"),
SignalKind::AnalogOutput => f.pad("analog output"),
SignalKind::GroupInput => f.pad("group input"),
SignalKind::GroupOutput => f.pad("group output"),
}
}
}
impl std::fmt::Display for SignalValue {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
SignalValue::Binary(x) => write!(f, "{}", if *x { 1 } else { 0 }),
SignalValue::Analog(x) => write!(f, "{}", x),
SignalValue::Group(x) => write!(f, "{}", x),
}
}
}
#[derive(Debug)]
pub struct SignalValueFromStrError;
impl std::error::Error for SignalValueFromStrError {}
impl std::fmt::Display for SignalValueFromStrError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "invalid signal value")
}
}
impl std::str::FromStr for SignalValue {
type Err = SignalValueFromStrError;
fn from_str(input: &str) -> Result<Self, Self::Err> {
if input == "1" {
Ok(SignalValue::Binary(true))
} else if input == "0" {
Ok(SignalValue::Binary(false))
} else if let Ok(value) = input.parse::<bool>() {
Ok(SignalValue::Binary(value))
} else if let Ok(value) = input.parse::<u64>() {
Ok(SignalValue::Group(value))
} else if let Ok(value) = input.parse::<f64>() {
Ok(SignalValue::Analog(value))
} else {
Err(SignalValueFromStrError)
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Deserialize)]
struct RawSingleSignal<'a> {
#[serde(rename = "_title")]
pub title: &'a str,
#[serde(rename = "type")]
pub kind: SignalKind,
pub category: &'a str,
pub lvalue: &'a str,
}
#[derive(Copy, Clone, Debug, PartialEq, Deserialize)]
struct RawListSignal<'a> {
#[serde(rename = "_title")]
pub title: &'a str,
#[serde(rename = "type")]
pub kind: SignalKind,
pub category: &'a str,
pub lvalue: f64,
}
#[derive(Clone, Debug, PartialEq)]
pub struct Signal {
pub title: String,
pub kind: SignalKind,
pub category: String,
pub lvalue: SignalValue,
}
impl Signal {
fn from_single_raw(raw: RawSingleSignal) -> serde_json::Result<Self> {
use serde::de::Unexpected;
use serde::de::Error;
// Parse value depending on signal type.
let value = match raw.kind {
SignalKind::AnalogInput | SignalKind::AnalogOutput => {
raw.lvalue.parse::<f64>()
.map(SignalValue::Analog)
.map_err(|_| Error::invalid_type(Unexpected::Str(raw.lvalue), &"floating-point value"))
},
SignalKind::DigitalInput | SignalKind::DigitalOutput => {
match raw.lvalue {
"1" => Ok(SignalValue::Binary(true)),
"0" => Ok(SignalValue::Binary(false)),
_ => Err(Error::invalid_type(Unexpected::Str(raw.lvalue), &"1 or 0"))
}
},
SignalKind::GroupInput | SignalKind::GroupOutput => {
raw.lvalue.parse::<u64>()
.map(SignalValue::Group)
.map_err(|_| Error::invalid_type(Unexpected::Str(raw.lvalue), &"integer"))
},
}?;
Ok(Signal {
title: raw.title.into(),
kind: raw.kind,
category: raw.category.into(),
lvalue: value,
})
}
fn from_list_raw(raw: RawListSignal) -> serde_json::Result<Self> {
use serde::de::Unexpected;
use serde::de::Error;
// Parse value depending on signal type.
let value = match raw.kind {
SignalKind::AnalogInput | SignalKind::AnalogOutput => Ok(SignalValue::Analog(raw.lvalue)),
SignalKind::DigitalInput | SignalKind::DigitalOutput => {
if raw.lvalue == 1.0 {
Ok(SignalValue::Binary(true))
} else if raw.lvalue == 0.0 {
Ok(SignalValue::Binary(false))
} else {
Err(Error::invalid_type(Unexpected::Float(raw.lvalue), &"1 or 0"))
}
},
SignalKind::GroupInput | SignalKind::GroupOutput => {
// TODO: Is this safe? What's the maximum number of signals in a group,
// and do they fit lossless in a f64?
Ok(SignalValue::Group(raw.lvalue as u64))
},
}?;
Ok(Signal {
title: raw.title.into(),
kind: raw.kind,
category: raw.category.into(),
lvalue: value,
})
}
}
pub fn parse_one(data: &[u8]) -> serde_json::Result<Signal> {
super::parse_one::<RawSingleSignal>(data)
.and_then(Signal::from_single_raw)
}
pub fn parse_list(data: &[u8]) -> serde_json::Result<Vec<Signal>> {
super::parse_vec::<RawListSignal>(data)?
.into_iter()
.map(Signal::from_list_raw)
.collect()
}
#[cfg(test)]
mod test {
use super::*;
use assert2::assert;
use assert2::check;
#[test]
fn test_parse_signals() {
let parsed = parse_list(include_bytes!("../../../samples/signals.json"));
assert!(let Ok(_) = &parsed);
}
#[test]
fn test_parse_bad_signal() {
assert!(let Err(_) = parse_one(include_bytes!("../../../samples/bad_signal.json")));
}
#[test]
fn test_parse_signal() {
let parsed = parse_one(include_bytes!("../../../samples/good_signal.json"));
assert!(let Ok(_) = &parsed);
let parsed = parsed.unwrap();
check!(parsed.title == "Local/PANEL/SS2");
check!(parsed.category == "safety");
check!(parsed.kind == SignalKind::DigitalInput);
check!(parsed.lvalue == SignalValue::Binary(true));
}
}
| true |
3e4c874282f9d8b27a182c74a1ea5f224908ff29
|
Rust
|
swiboe/swiboe
|
/src/rpc.rs
|
UTF-8
| 2,580 | 2.5625 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright (c) The Swiboe development team. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE.txt
// in the project root for license information.
use serde;
use serde_json;
use std::error::Error as StdError;
use serde::{Serialize, Deserialize};
// NOCOM(#sirver): add documentation (using this lint that forbids not having documentation).
//
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum ResponseKind {
Last(Result),
Partial(serde_json::Value),
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Response {
pub context: String,
pub kind: ResponseKind,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StreamingResult {
pub context: String,
pub value: serde_json::Value,
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub enum ErrorKind {
UnknownRpc,
Io,
InvalidArgs,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub struct Error {
pub kind: ErrorKind,
pub details: Option<serde_json::Value>,
}
impl From<serde_json::error::Error> for Error {
fn from(error: serde_json::error::Error) -> Self {
Error {
kind: ErrorKind::InvalidArgs,
details: Some(serde_json::to_value(&error.description()).unwrap()),
}
}
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum Result {
// NOCOM(#sirver): mention success as a convenient creating for this.
Ok(serde_json::Value),
Err(Error),
NotHandled,
}
impl Result {
pub fn success<T: serde::Serialize>(value: T) -> Result {
Result::Ok(serde_json::to_value(&value).unwrap())
}
pub fn unwrap_err(self) -> Error {
use self::Result::*;
match self {
Ok(_) | NotHandled => panic!("Called unwrap_rpc_error on a non_error."),
Err(e) => e,
}
}
pub fn unwrap(self) -> serde_json::Value {
use self::Result::*;
match self {
Err(_) | NotHandled => panic!("Called unwrap on a non Ok() value."),
Ok(val) => val,
}
}
pub fn is_ok(&self) -> bool {
if let &Result::Ok(_) = self {
true
} else {
false
}
}
}
// NOCOM(#sirver): check in this file what needs to be derived. seems too much.
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub struct Call {
pub function: String,
pub context: String,
pub args: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Cancel {
pub context: String,
}
| true |
1f46375b6c20a0502f0ffea59884b7880af77b3c
|
Rust
|
samuelcolvin/edgerender-rust
|
/src/rust/env.rs
|
UTF-8
| 3,172 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
use crate::config::Config;
use crate::router::RouteMatch;
use js_sys::{Error, SyntaxError};
use serde::Deserialize;
use serde_json::{to_string_pretty, Value as SerdeValue};
use std::collections::HashMap;
use tera::{Context, Result as TeraResult, Tera};
use wasm_bindgen::prelude::*;
#[derive(Deserialize)]
pub struct Template {
name: String,
content: String,
}
#[wasm_bindgen]
pub struct Env {
tera: Tera,
}
#[wasm_bindgen]
impl Env {
pub fn render(
&self,
config: &Config,
route_match: &JsValue,
upstream_json: Option<String>,
response_status: u32,
upstream: &JsValue,
) -> Result<String, JsValue> {
let route_match_: RouteMatch = match route_match.into_serde() {
Err(e) => return err!("route_match not a valid RouteMatch object: {:?}", e),
Ok(v) => v,
};
let mut context = Context::new();
config.add_context(&mut context);
route_match_.add_context(&mut context);
if let Some(s) = upstream_json {
let context_value: SerdeValue = match serde_json::from_str(&s) {
Err(e) => return err!("Error parsing context JSON: {:?}", e),
Ok(v) => v,
};
context.insert("data", &context_value);
} else {
context.insert("data", &SerdeValue::Null);
}
context.insert("response_status", &response_status);
let upstream_value: SerdeValue = match upstream.into_serde() {
Err(e) => return err!("Error parsing upstream data: {:?}", e),
Ok(v) => v,
};
context.insert("upstream", &upstream_value);
let template_name = match route_match_.template {
Some(v) => v,
None => config.get_default_template(),
};
match self.tera.render(&template_name, &context) {
Err(e) => err!("Error rendering template {}: {:?}", &template_name, e),
Ok(v) => Ok(v),
}
}
}
fn to_json(obj: &SerdeValue, args: &HashMap<String, SerdeValue>) -> TeraResult<SerdeValue> {
let pretty: bool = match args.get("pretty") {
Some(v) => match v.as_bool() {
Some(v_) => v_,
_ => return Err("'pretty' argument must be a boolean".into()),
},
_ => false,
};
let s = match pretty {
true => to_string_pretty(obj)?,
false => obj.to_string(),
};
Ok(SerdeValue::from(s))
}
#[wasm_bindgen]
pub fn create_env(templates: &JsValue) -> Result<Env, JsValue> {
console_error_panic_hook::set_once();
let templates_vec: Vec<Template> = match templates.into_serde() {
Err(e) => return err!("Error decoding templates: {}", e),
Ok(v) => v,
};
let mut tera = Tera::default();
tera.register_filter("json", to_json);
tera.autoescape_on(vec![".html", ".html", ".xml", ".jinja", ".jinja2"]);
for t in templates_vec {
match tera.add_raw_template(&t.name, &t.content) {
Err(e) => return Err(SyntaxError::new(&format!("Invalid template {}: {:?}", t.name, e)).into()),
Ok(v) => v,
};
}
Ok(Env { tera })
}
| true |
b201a55bdeb13daf4b28f0ef73586933898e1988
|
Rust
|
muzudho/pyon-pyon-game
|
/src/main.rs
|
UTF-8
| 1,232 | 3 | 3 |
[
"MIT"
] |
permissive
|
mod board;
mod command_line;
mod protocol;
mod view;
use crate::board::Board;
use crate::command_line::CommandLine;
use std;
fn main() {
println!(
"ぴょんぴょんゲーム
コマンド:
`do b5c3` - b5 の駒を c3 へ移動。
`pos` - 局面表示。"
);
let xfen = "xfen 1o1o1/o1o1o/5/x1x1x/1x1x1 x";
if let Some(mut board) = Board::from_xfen(xfen) {
// [Ctrl]+[C] でループを終了
loop {
let mut line: String = String::new();
// まず最初に、コマンドライン入力を待機しろだぜ☆(^~^)
match std::io::stdin().read_line(&mut line) {
Ok(_n) => {}
Err(e) => panic!(format!("(Err.28) Failed to read line. / {}", e)),
};
// コマンドライン☆(^~^) p は parser の意味で使ってるぜ☆(^~^)
let mut p = CommandLine::new(&line);
if p.starts_with("pos") {
board.pos();
} else if p.starts_with("do") {
p.go_next_to("do ");
println!("Debug | rest=|{}|", p.rest());
board.do_(p.rest());
} else {
println!("Debug | Command not found. {:?}", p);
}
}
} else {
panic!(format!("(Err.31) xfen fail. / {}", xfen))
}
}
| true |
53812f8a617a3d8a21538e27f53c743dc22561ae
|
Rust
|
jazznerd206/Rustoleum
|
/documentation_exercises/math/arithmetic_functions.rs
|
UTF-8
| 96 | 2.734375 | 3 |
[
"Unlicense"
] |
permissive
|
fn main() {
print!("{}", addition(3,3));
}
fn addition(x: i32, y: i32) -> i32 {
x + y
}
| true |
8991868577fbdf8e64db57eac9e862505698de7f
|
Rust
|
Connicpu/ecs-game
|
/src/world/item.rs
|
UTF-8
| 437 | 3.40625 | 3 |
[] |
no_license
|
use self::Item::*;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Item {
Empty,
Coins(u32),
ExtraLife,
}
impl Item {
pub fn parse(value: char) -> Result<Item, char> {
Ok(match value {
'-' => Empty,
'c' => Coins(1),
'C' => Coins(3),
'$' => Coins(10),
'+' => ExtraLife,
_ => return Err(value),
})
}
}
| true |
7c9d472d8e337c465f882e74ab8716259fa5bf37
|
Rust
|
fossabot/rundeck
|
/cli/src/job.rs
|
UTF-8
| 1,585 | 2.65625 | 3 |
[] |
no_license
|
use api::job::JobService;
use api::Job;
use api::job::RunBody;
use prettytable::format;
use prettytable::row::Row;
use prettytable::cell::Cell;
use std::collections::HashMap;
pub fn list_jobs(
service: &JobService,
project: &str,
quiet: bool,
completion: bool,
filters: Vec<&str>,
) {
let jobs: Vec<Job> = service.list(project, filters);
if quiet {
for j in jobs {
if completion {
println!(
"{}/{}({})",
j.group.unwrap_or_else(|| "".into()),
j.name,
j.id
);
} else {
println!("{}", j.id);
}
}
} else {
let mut table = table!(["ID", "GROUP/NAME", "DESCRIPTION"]);
table.set_format(*format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR);
for j in jobs {
table.add_row(Row::new(vec![
Cell::new(&j.id),
Cell::new(&j.name_with_group()),
Cell::new(&j.description),
]));
}
table.printstd();
}
}
pub fn run(service: &JobService, job_id: &str, node: &str, options: Vec<&str>) {
// Options to RunBody
let mut body = RunBody {
arg_string: None,
filter: Some(node.into()),
options: HashMap::new(),
};
for i in options {
let split = i.split('=').collect::<Vec<_>>();
let name = split[0];
let opt = split[1];
body.options.insert(name.into(), opt.into());
}
service.run(job_id, &body);
}
| true |
0b5940c67dff3d557df9171d721f28bcc993ffb3
|
Rust
|
stm32-rs/stm32-rs-nightlies
|
/stm32l4/src/stm32l4p5/flash/sr.rs
|
UTF-8
| 29,563 | 2.6875 | 3 |
[] |
no_license
|
#[doc = "Register `SR` reader"]
pub type R = crate::R<SR_SPEC>;
#[doc = "Register `SR` writer"]
pub type W = crate::W<SR_SPEC>;
#[doc = "Field `EOP` reader - End of operation"]
pub type EOP_R = crate::BitReader<EOPR_A>;
#[doc = "End of operation\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOPR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when one or more Flash memory operation (programming / erase) has been completed successfully"]
Error = 1,
}
impl From<EOPR_A> for bool {
#[inline(always)]
fn from(variant: EOPR_A) -> Self {
variant as u8 != 0
}
}
impl EOP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EOPR_A {
match self.bits {
false => EOPR_A::NoError,
true => EOPR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == EOPR_A::NoError
}
#[doc = "Set by hardware when one or more Flash memory operation (programming / erase) has been completed successfully"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == EOPR_A::Error
}
}
#[doc = "End of operation\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOPW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<EOPW_AW> for bool {
#[inline(always)]
fn from(variant: EOPW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EOP` writer - End of operation"]
pub type EOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EOPW_AW>;
impl<'a, REG, const O: u8> EOP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(EOPW_AW::Clear)
}
}
#[doc = "Field `OPERR` reader - Operation error"]
pub type OPERR_R = crate::BitReader<OPERRR_A>;
#[doc = "Operation error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OPERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when a Flash memory operation (program / erase) completes unsuccessfully"]
Error = 1,
}
impl From<OPERRR_A> for bool {
#[inline(always)]
fn from(variant: OPERRR_A) -> Self {
variant as u8 != 0
}
}
impl OPERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OPERRR_A {
match self.bits {
false => OPERRR_A::NoError,
true => OPERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == OPERRR_A::NoError
}
#[doc = "Set by hardware when a Flash memory operation (program / erase) completes unsuccessfully"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == OPERRR_A::Error
}
}
#[doc = "Operation error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OPERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<OPERRW_AW> for bool {
#[inline(always)]
fn from(variant: OPERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `OPERR` writer - Operation error"]
pub type OPERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OPERRW_AW>;
impl<'a, REG, const O: u8> OPERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(OPERRW_AW::Clear)
}
}
#[doc = "Field `PROGERR` reader - Programming error"]
pub type PROGERR_R = crate::BitReader<PROGERRR_A>;
#[doc = "Programming error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PROGERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when a double-word address to be programmed contains a value different from '0xFFFF FFFF' before programming, except if the data to write is '0x0000 0000'"]
Error = 1,
}
impl From<PROGERRR_A> for bool {
#[inline(always)]
fn from(variant: PROGERRR_A) -> Self {
variant as u8 != 0
}
}
impl PROGERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PROGERRR_A {
match self.bits {
false => PROGERRR_A::NoError,
true => PROGERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == PROGERRR_A::NoError
}
#[doc = "Set by hardware when a double-word address to be programmed contains a value different from '0xFFFF FFFF' before programming, except if the data to write is '0x0000 0000'"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == PROGERRR_A::Error
}
}
#[doc = "Programming error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PROGERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<PROGERRW_AW> for bool {
#[inline(always)]
fn from(variant: PROGERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `PROGERR` writer - Programming error"]
pub type PROGERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PROGERRW_AW>;
impl<'a, REG, const O: u8> PROGERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(PROGERRW_AW::Clear)
}
}
#[doc = "Field `WRPERR` reader - Write protected error"]
pub type WRPERR_R = crate::BitReader<WRPERRR_A>;
#[doc = "Write protected error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum WRPERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when an address to be erased/programmed belongs to a writeprotected part (by WRP, PCROP or RDP level 1) of the Flash memory"]
Error = 1,
}
impl From<WRPERRR_A> for bool {
#[inline(always)]
fn from(variant: WRPERRR_A) -> Self {
variant as u8 != 0
}
}
impl WRPERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WRPERRR_A {
match self.bits {
false => WRPERRR_A::NoError,
true => WRPERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == WRPERRR_A::NoError
}
#[doc = "Set by hardware when an address to be erased/programmed belongs to a writeprotected part (by WRP, PCROP or RDP level 1) of the Flash memory"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == WRPERRR_A::Error
}
}
#[doc = "Write protected error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum WRPERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<WRPERRW_AW> for bool {
#[inline(always)]
fn from(variant: WRPERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `WRPERR` writer - Write protected error"]
pub type WRPERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, WRPERRW_AW>;
impl<'a, REG, const O: u8> WRPERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(WRPERRW_AW::Clear)
}
}
#[doc = "Field `PGAERR` reader - Programming alignment error"]
pub type PGAERR_R = crate::BitReader<PGAERRR_A>;
#[doc = "Programming alignment error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PGAERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when the data to program cannot be contained in the same 64-bit Flash memory row in case of standard programming, or if there is a change of page during fast programming"]
Error = 1,
}
impl From<PGAERRR_A> for bool {
#[inline(always)]
fn from(variant: PGAERRR_A) -> Self {
variant as u8 != 0
}
}
impl PGAERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PGAERRR_A {
match self.bits {
false => PGAERRR_A::NoError,
true => PGAERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == PGAERRR_A::NoError
}
#[doc = "Set by hardware when the data to program cannot be contained in the same 64-bit Flash memory row in case of standard programming, or if there is a change of page during fast programming"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == PGAERRR_A::Error
}
}
#[doc = "Programming alignment error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PGAERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<PGAERRW_AW> for bool {
#[inline(always)]
fn from(variant: PGAERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `PGAERR` writer - Programming alignment error"]
pub type PGAERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PGAERRW_AW>;
impl<'a, REG, const O: u8> PGAERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(PGAERRW_AW::Clear)
}
}
#[doc = "Field `SIZERR` reader - Size error"]
pub type SIZERR_R = crate::BitReader<SIZERRR_A>;
#[doc = "Size error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SIZERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when the size of the access is a byte or half-word during a program or a fast program sequence. Only double word programming is allowed (consequently: word access)"]
Error = 1,
}
impl From<SIZERRR_A> for bool {
#[inline(always)]
fn from(variant: SIZERRR_A) -> Self {
variant as u8 != 0
}
}
impl SIZERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SIZERRR_A {
match self.bits {
false => SIZERRR_A::NoError,
true => SIZERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == SIZERRR_A::NoError
}
#[doc = "Set by hardware when the size of the access is a byte or half-word during a program or a fast program sequence. Only double word programming is allowed (consequently: word access)"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == SIZERRR_A::Error
}
}
#[doc = "Size error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SIZERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<SIZERRW_AW> for bool {
#[inline(always)]
fn from(variant: SIZERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SIZERR` writer - Size error"]
pub type SIZERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SIZERRW_AW>;
impl<'a, REG, const O: u8> SIZERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(SIZERRW_AW::Clear)
}
}
#[doc = "Field `PGSERR` reader - Programming sequence error"]
pub type PGSERR_R = crate::BitReader<PGSERRR_A>;
#[doc = "Programming sequence error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PGSERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when a write access to the Flash memory is performed by the code while PG or FSTPG have not been set previously. Set also by hardware when PROGERR, SIZERR, PGAERR, WRPERR, MISSERR or FASTERR is set due to a previous programming error. Set also when trying to perform bank erase when DBANK=0 (or DB1M = 0)"]
Error = 1,
}
impl From<PGSERRR_A> for bool {
#[inline(always)]
fn from(variant: PGSERRR_A) -> Self {
variant as u8 != 0
}
}
impl PGSERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PGSERRR_A {
match self.bits {
false => PGSERRR_A::NoError,
true => PGSERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == PGSERRR_A::NoError
}
#[doc = "Set by hardware when a write access to the Flash memory is performed by the code while PG or FSTPG have not been set previously. Set also by hardware when PROGERR, SIZERR, PGAERR, WRPERR, MISSERR or FASTERR is set due to a previous programming error. Set also when trying to perform bank erase when DBANK=0 (or DB1M = 0)"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == PGSERRR_A::Error
}
}
#[doc = "Programming sequence error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PGSERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<PGSERRW_AW> for bool {
#[inline(always)]
fn from(variant: PGSERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `PGSERR` writer - Programming sequence error"]
pub type PGSERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PGSERRW_AW>;
impl<'a, REG, const O: u8> PGSERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(PGSERRW_AW::Clear)
}
}
#[doc = "Field `MISERR` reader - Fast programming data miss error"]
pub type MISERR_R = crate::BitReader<MISERRR_A>;
#[doc = "Fast programming data miss error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MISERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: In fast programming mode, 32 double words must be sent to Flash successively, and the new data must be sent to the Flash logic control before the current data is fully programmed. MISSERR is set by hardware when the new data is not present in time"]
Error = 1,
}
impl From<MISERRR_A> for bool {
#[inline(always)]
fn from(variant: MISERRR_A) -> Self {
variant as u8 != 0
}
}
impl MISERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MISERRR_A {
match self.bits {
false => MISERRR_A::NoError,
true => MISERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == MISERRR_A::NoError
}
#[doc = "In fast programming mode, 32 double words must be sent to Flash successively, and the new data must be sent to the Flash logic control before the current data is fully programmed. MISSERR is set by hardware when the new data is not present in time"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == MISERRR_A::Error
}
}
#[doc = "Fast programming data miss error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MISERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<MISERRW_AW> for bool {
#[inline(always)]
fn from(variant: MISERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `MISERR` writer - Fast programming data miss error"]
pub type MISERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MISERRW_AW>;
impl<'a, REG, const O: u8> MISERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(MISERRW_AW::Clear)
}
}
#[doc = "Field `FASTERR` reader - Fast programming error"]
pub type FASTERR_R = crate::BitReader<FASTERRR_A>;
#[doc = "Fast programming error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FASTERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when a fast programming sequence (activated by FSTPG) is interrupted due to an error (alignment, size, write protection or data miss). The corresponding status bit (PGAERR, SIZERR, WRPERR or MISSERR) is set at the same time"]
Error = 1,
}
impl From<FASTERRR_A> for bool {
#[inline(always)]
fn from(variant: FASTERRR_A) -> Self {
variant as u8 != 0
}
}
impl FASTERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FASTERRR_A {
match self.bits {
false => FASTERRR_A::NoError,
true => FASTERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == FASTERRR_A::NoError
}
#[doc = "Set by hardware when a fast programming sequence (activated by FSTPG) is interrupted due to an error (alignment, size, write protection or data miss). The corresponding status bit (PGAERR, SIZERR, WRPERR or MISSERR) is set at the same time"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == FASTERRR_A::Error
}
}
#[doc = "Fast programming error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FASTERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<FASTERRW_AW> for bool {
#[inline(always)]
fn from(variant: FASTERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `FASTERR` writer - Fast programming error"]
pub type FASTERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, FASTERRW_AW>;
impl<'a, REG, const O: u8> FASTERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(FASTERRW_AW::Clear)
}
}
#[doc = "Field `RDERR` reader - PCROP read error"]
pub type RDERR_R = crate::BitReader<RDERRR_A>;
#[doc = "PCROP read error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RDERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when an address to be read through the D-bus belongs to a read protected area of the Flash (PCROP protection)"]
Error = 1,
}
impl From<RDERRR_A> for bool {
#[inline(always)]
fn from(variant: RDERRR_A) -> Self {
variant as u8 != 0
}
}
impl RDERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RDERRR_A {
match self.bits {
false => RDERRR_A::NoError,
true => RDERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == RDERRR_A::NoError
}
#[doc = "Set by hardware when an address to be read through the D-bus belongs to a read protected area of the Flash (PCROP protection)"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == RDERRR_A::Error
}
}
#[doc = "PCROP read error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RDERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<RDERRW_AW> for bool {
#[inline(always)]
fn from(variant: RDERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `RDERR` writer - PCROP read error"]
pub type RDERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RDERRW_AW>;
impl<'a, REG, const O: u8> RDERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(RDERRW_AW::Clear)
}
}
#[doc = "Field `OPTVERR` reader - Option validity error"]
pub type OPTVERR_R = crate::BitReader<OPTVERRR_A>;
#[doc = "Option validity error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OPTVERRR_A {
#[doc = "0: No error"]
NoError = 0,
#[doc = "1: Set by hardware when the options read may not be the one configured by the user. If option haven’t been properly loaded, OPTVERR is set again after each system reset"]
Error = 1,
}
impl From<OPTVERRR_A> for bool {
#[inline(always)]
fn from(variant: OPTVERRR_A) -> Self {
variant as u8 != 0
}
}
impl OPTVERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OPTVERRR_A {
match self.bits {
false => OPTVERRR_A::NoError,
true => OPTVERRR_A::Error,
}
}
#[doc = "No error"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == OPTVERRR_A::NoError
}
#[doc = "Set by hardware when the options read may not be the one configured by the user. If option haven’t been properly loaded, OPTVERR is set again after each system reset"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == OPTVERRR_A::Error
}
}
#[doc = "Option validity error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OPTVERRW_AW {
#[doc = "1: Cleared by writing 1"]
Clear = 1,
}
impl From<OPTVERRW_AW> for bool {
#[inline(always)]
fn from(variant: OPTVERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `OPTVERR` writer - Option validity error"]
pub type OPTVERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OPTVERRW_AW>;
impl<'a, REG, const O: u8> OPTVERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Cleared by writing 1"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(OPTVERRW_AW::Clear)
}
}
#[doc = "Field `BSY` reader - Busy"]
pub type BSY_R = crate::BitReader<BSY_A>;
#[doc = "Busy\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BSY_A {
#[doc = "0: Not busy"]
NotBusy = 0,
#[doc = "1: Busy"]
Busy = 1,
}
impl From<BSY_A> for bool {
#[inline(always)]
fn from(variant: BSY_A) -> Self {
variant as u8 != 0
}
}
impl BSY_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BSY_A {
match self.bits {
false => BSY_A::NotBusy,
true => BSY_A::Busy,
}
}
#[doc = "Not busy"]
#[inline(always)]
pub fn is_not_busy(&self) -> bool {
*self == BSY_A::NotBusy
}
#[doc = "Busy"]
#[inline(always)]
pub fn is_busy(&self) -> bool {
*self == BSY_A::Busy
}
}
#[doc = "Field `PEMPTY` reader - "]
pub type PEMPTY_R = crate::BitReader<PEMPTY_A>;
#[doc = "\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PEMPTY_A {
#[doc = "0: The bit value is toggling"]
Toggling = 0,
#[doc = "1: No effect"]
NoEffect = 1,
}
impl From<PEMPTY_A> for bool {
#[inline(always)]
fn from(variant: PEMPTY_A) -> Self {
variant as u8 != 0
}
}
impl PEMPTY_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PEMPTY_A {
match self.bits {
false => PEMPTY_A::Toggling,
true => PEMPTY_A::NoEffect,
}
}
#[doc = "The bit value is toggling"]
#[inline(always)]
pub fn is_toggling(&self) -> bool {
*self == PEMPTY_A::Toggling
}
#[doc = "No effect"]
#[inline(always)]
pub fn is_no_effect(&self) -> bool {
*self == PEMPTY_A::NoEffect
}
}
#[doc = "Field `PEMPTY` writer - "]
pub type PEMPTY_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PEMPTY_A>;
impl<'a, REG, const O: u8> PEMPTY_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "The bit value is toggling"]
#[inline(always)]
pub fn toggling(self) -> &'a mut crate::W<REG> {
self.variant(PEMPTY_A::Toggling)
}
#[doc = "No effect"]
#[inline(always)]
pub fn no_effect(self) -> &'a mut crate::W<REG> {
self.variant(PEMPTY_A::NoEffect)
}
}
impl R {
#[doc = "Bit 0 - End of operation"]
#[inline(always)]
pub fn eop(&self) -> EOP_R {
EOP_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Operation error"]
#[inline(always)]
pub fn operr(&self) -> OPERR_R {
OPERR_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 3 - Programming error"]
#[inline(always)]
pub fn progerr(&self) -> PROGERR_R {
PROGERR_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Write protected error"]
#[inline(always)]
pub fn wrperr(&self) -> WRPERR_R {
WRPERR_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Programming alignment error"]
#[inline(always)]
pub fn pgaerr(&self) -> PGAERR_R {
PGAERR_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Size error"]
#[inline(always)]
pub fn sizerr(&self) -> SIZERR_R {
SIZERR_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Programming sequence error"]
#[inline(always)]
pub fn pgserr(&self) -> PGSERR_R {
PGSERR_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Fast programming data miss error"]
#[inline(always)]
pub fn miserr(&self) -> MISERR_R {
MISERR_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Fast programming error"]
#[inline(always)]
pub fn fasterr(&self) -> FASTERR_R {
FASTERR_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 14 - PCROP read error"]
#[inline(always)]
pub fn rderr(&self) -> RDERR_R {
RDERR_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Option validity error"]
#[inline(always)]
pub fn optverr(&self) -> OPTVERR_R {
OPTVERR_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Busy"]
#[inline(always)]
pub fn bsy(&self) -> BSY_R {
BSY_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17"]
#[inline(always)]
pub fn pempty(&self) -> PEMPTY_R {
PEMPTY_R::new(((self.bits >> 17) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - End of operation"]
#[inline(always)]
#[must_use]
pub fn eop(&mut self) -> EOP_W<SR_SPEC, 0> {
EOP_W::new(self)
}
#[doc = "Bit 1 - Operation error"]
#[inline(always)]
#[must_use]
pub fn operr(&mut self) -> OPERR_W<SR_SPEC, 1> {
OPERR_W::new(self)
}
#[doc = "Bit 3 - Programming error"]
#[inline(always)]
#[must_use]
pub fn progerr(&mut self) -> PROGERR_W<SR_SPEC, 3> {
PROGERR_W::new(self)
}
#[doc = "Bit 4 - Write protected error"]
#[inline(always)]
#[must_use]
pub fn wrperr(&mut self) -> WRPERR_W<SR_SPEC, 4> {
WRPERR_W::new(self)
}
#[doc = "Bit 5 - Programming alignment error"]
#[inline(always)]
#[must_use]
pub fn pgaerr(&mut self) -> PGAERR_W<SR_SPEC, 5> {
PGAERR_W::new(self)
}
#[doc = "Bit 6 - Size error"]
#[inline(always)]
#[must_use]
pub fn sizerr(&mut self) -> SIZERR_W<SR_SPEC, 6> {
SIZERR_W::new(self)
}
#[doc = "Bit 7 - Programming sequence error"]
#[inline(always)]
#[must_use]
pub fn pgserr(&mut self) -> PGSERR_W<SR_SPEC, 7> {
PGSERR_W::new(self)
}
#[doc = "Bit 8 - Fast programming data miss error"]
#[inline(always)]
#[must_use]
pub fn miserr(&mut self) -> MISERR_W<SR_SPEC, 8> {
MISERR_W::new(self)
}
#[doc = "Bit 9 - Fast programming error"]
#[inline(always)]
#[must_use]
pub fn fasterr(&mut self) -> FASTERR_W<SR_SPEC, 9> {
FASTERR_W::new(self)
}
#[doc = "Bit 14 - PCROP read error"]
#[inline(always)]
#[must_use]
pub fn rderr(&mut self) -> RDERR_W<SR_SPEC, 14> {
RDERR_W::new(self)
}
#[doc = "Bit 15 - Option validity error"]
#[inline(always)]
#[must_use]
pub fn optverr(&mut self) -> OPTVERR_W<SR_SPEC, 15> {
OPTVERR_W::new(self)
}
#[doc = "Bit 17"]
#[inline(always)]
#[must_use]
pub fn pempty(&mut self) -> PEMPTY_W<SR_SPEC, 17> {
PEMPTY_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SR_SPEC;
impl crate::RegisterSpec for SR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sr::R`](R) reader structure"]
impl crate::Readable for SR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`sr::W`](W) writer structure"]
impl crate::Writable for SR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SR to value 0"]
impl crate::Resettable for SR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
| true |
e27ea2d2387996c3c1f888dbe769423642be4829
|
Rust
|
liqcomb/os
|
/src/arch/x86_64/timer.rs
|
UTF-8
| 2,951 | 2.609375 | 3 |
[] |
no_license
|
use arch::idt::IDT;
use arch::io;
use arch::pic::PIC;
use spin::Mutex;
type TimerCallback = fn(u64);
const MAX_CALLBACKS: usize = 30;
const TIMER_C0_DATA: u16 = 0x40;
const TIMER_C1_DATA: u16 = 0x41;
const TIMER_C2_DATA: u16 = 0x42;
const TIMER_MODE_CTRL: u16 = 0x43;
const TIMER_WRAP: u64 = 0x100000000;
static mut SCHEDULER: Option<TimerCallback> = None;
static TIMER: Mutex<Timer> = Mutex::new(Timer {
handlers: [None; MAX_CALLBACKS],
tick: 0,
});
/// Represents an instance of timer handler
pub struct Timer {
handlers: [Option<TimerCallback>; MAX_CALLBACKS],
tick: u64,
}
impl Timer {
/// Get an instance of the timer
pub fn get() -> spin::MutexGuard<'static, Timer> {
TIMER.lock()
}
/// Register a timer function
pub fn register_timer(&mut self, func: fn(u64)) -> Result<usize, ::common::error::Error> {
for i in 0..MAX_CALLBACKS {
if self.handlers[i].is_some() {
continue;
}
self.handlers[i] = Some(func);
return Ok(i);
}
Err(err!(EFULL))
}
/// Unregister a timer function
pub fn unregister_timer(&mut self, idx: usize) -> Result<(), ::common::error::Error> {
if self.handlers[idx].is_none() {
return Err(err!(ENOENT));
}
self.handlers[idx] = None;
Ok(())
}
/// Register a scheduler function
pub fn register_scheduler(&self, func: fn(u64)) -> Result<(), ::common::error::Error> {
unsafe {
if SCHEDULER.is_some() {
return Err(err!(EAGAIN));
}
SCHEDULER = Some(func);
}
Ok(())
}
}
fn handler(_vector: u64, _error_code: u64) {
// Send EOI to Master PIC
unsafe {
PIC::eoi(false);
}
// Try lock the TIMER global
// If we cannot get a locked instance of TIMER,
// we can't just wait here since we're in the
// middle of an ISR.
let mut tick: u64 = 0;
match TIMER.try_lock() {
None => {
// Nothing we can do now, TIMER is occupied
}
Some(mut timer) => {
// wrap-adds the tick
if timer.tick >= TIMER_WRAP {
timer.tick = 0;
}
timer.tick += 1;
tick = timer.tick;
for i in 0..MAX_CALLBACKS {
// Call the callback, the callback must returns
if let Some(ref func) = timer.handlers[i] {
func(timer.tick);
}
}
}
};
// Now do the scheduler thing
// It's totally fine if this does not returns
unsafe {
if let Some(ref handler) = SCHEDULER {
handler(tick);
}
}
}
pub fn init() {
unsafe {
io::outb(TIMER_MODE_CTRL, 0x36);
io::outb(TIMER_C0_DATA, 0);
io::outb(TIMER_C0_DATA, 0);
}
assert!(IDT::get().register_isr(32, handler));
}
| true |
136f0498300c20f4fda49862b654dc8f73263b3e
|
Rust
|
Floeckchengrafik/LangJam0001-Comstruct
|
/tontuna/tontuna/src/lexer.rs
|
UTF-8
| 3,585 | 3.328125 | 3 |
[] |
no_license
|
use logos::Logos;
#[derive(Eq, PartialEq, PartialOrd, Ord, Debug, Copy, Clone, Logos)]
pub(crate) enum TokenKind {
#[token("fn")]
Fn,
#[token("let")]
Let,
#[token("while")]
While,
#[token("if")]
If,
#[token("else")]
Else,
#[token("for")]
For,
#[token("in")]
In,
#[token("return")]
Return,
#[token("struct")]
Struct,
#[token("true")]
True,
#[token("false")]
False,
#[regex("\"([^\\n\"\\\\]|\\\\.)*\"")]
Str,
#[token("nil")]
Nil,
#[token("self")]
SelfKw,
#[token(".")]
Dot,
#[token(":")]
Colon,
#[token("=")]
Equals,
#[token("&&")]
And,
#[token("||")]
Or,
#[token("+")]
Plus,
#[token("-")]
Minus,
#[token("*")]
Star,
#[token("/")]
Slash,
#[token("<")]
Less,
#[token("<=")]
LessEq,
#[token(">")]
Greater,
#[token(">=")]
GreaterEq,
#[token("==")]
EqEq,
#[token("!=")]
NotEq,
#[token("(")]
LeftParen,
#[token(")")]
RightParen,
#[token("{")]
LeftCurly,
#[token("}")]
RightCurly,
#[token(",")]
Comma,
#[token(";")]
Semicolon,
#[regex("[a-zA-Z_][a-zA-Z0-9_]*")]
Name,
#[regex("[0-9][a-zA-Z0-9_]*")]
Number,
#[token("#")]
CommentMarker,
CodeMarker,
#[regex(" +")]
Space,
#[regex(r"\r?\n")]
Newline,
#[error]
Error,
CommentText,
}
impl TokenKind {
pub(crate) fn to_str(self) -> &'static str {
match self {
TokenKind::Fn => "`fn`",
TokenKind::Let => "`let`",
TokenKind::While => "`while`",
TokenKind::If => "`if`",
TokenKind::Else => "`else`",
TokenKind::For => "`for`",
TokenKind::In => "`in`",
TokenKind::Return => "`return`",
TokenKind::Struct => "`struct`",
TokenKind::True => "`true`",
TokenKind::False => "`false`",
TokenKind::Str => "string literal",
TokenKind::Nil => "`nil`",
TokenKind::SelfKw => "`self`",
TokenKind::Dot => "`.`",
TokenKind::Colon => "`:`",
TokenKind::Equals => "`=`",
TokenKind::And => "`&&`",
TokenKind::Or => "`||`",
TokenKind::Plus => "`+`",
TokenKind::Minus => "`-`",
TokenKind::Star => "`*`",
TokenKind::Slash => "`/`",
TokenKind::Less => "`<`",
TokenKind::LessEq => "`<=`",
TokenKind::Greater => "`>`",
TokenKind::GreaterEq => "`>=`",
TokenKind::EqEq => "`==`",
TokenKind::NotEq => "`!=`",
TokenKind::LeftParen => "`(`",
TokenKind::RightParen => "`)`",
TokenKind::LeftCurly => "`{`",
TokenKind::RightCurly => "`}`",
TokenKind::Comma => "`,`",
TokenKind::Semicolon => "`;`",
TokenKind::Name => "identifier",
TokenKind::Number => "number",
TokenKind::CommentMarker => "`#`",
TokenKind::CodeMarker => "`>`",
TokenKind::Space => "whitespace",
TokenKind::Newline => "whitespace",
TokenKind::Error => "bad token",
TokenKind::CommentText => "comment text",
}
}
}
pub(crate) fn next_token(source: &str) -> Option<(TokenKind, usize)> {
let mut lexer = TokenKind::lexer(source);
let token = lexer.next()?;
let len = source.len() - lexer.remainder().len();
Some((token, len))
}
| true |
887ef2823a5b7ed2d4a7f24750a45f39442799a2
|
Rust
|
YoloDev/skorm
|
/crates/store/src/store/sink.rs
|
UTF-8
| 4,005 | 2.59375 | 3 |
[] |
no_license
|
use super::{
expand, insert_prefix, prefix::ExpandError, Lit, Store, StoredBlankNode, StoredLiteral,
StoredObject, StoredPredicate, StoredSubject, StoredTriple, StoredUri, TripleDatabase, Uri,
};
use evitable::*;
use skorm_parse::{
AsRdfObject, AsRdfParserSink, AsRdfPredicate, AsRdfPrefix, AsRdfStatement, AsRdfSubject,
AsRdfTriple, RdfParserSink, Statement,
};
use std::collections::HashMap;
use std::sync::atomic::Ordering;
#[evitable]
pub enum StoreInsertContext {
#[evitable(description("Prefix not found."), from = ExpandError)]
PrefixNotFound,
}
pub struct StoreSink<'a> {
store: &'a mut Store,
blanks: HashMap<skorm_parse::BlankNode, StoredBlankNode>,
}
impl<'a> RdfParserSink for StoreSink<'a> {
type Error = StoreInsertError;
fn insert(&mut self, statement: &impl AsRdfStatement) -> StoreInsertResult<()> {
fn insert_named_node(
db: &mut StoreSink,
node: skorm_parse::NamedNode,
) -> StoreInsertResult<Uri> {
let stored = match node {
skorm_parse::NamedNode::Iri(i) => StoredUri(i),
skorm_parse::NamedNode::Prefixed(p, v) => {
let i = expand(&mut db.store.inner, p, &v)?;
StoredUri(i.into())
}
};
Ok(db.store.inner.uri(stored))
}
fn insert_blank_node(db: &mut StoreSink, node: skorm_parse::BlankNode) -> StoredBlankNode {
let blanks = &mut db.blanks;
let store = &mut db.store;
blanks
.entry(node)
.or_insert_with(|| {
StoredBlankNode::from(store.inner.next_blank.fetch_add(1, Ordering::Relaxed))
})
.clone()
}
fn insert_literal(db: &mut StoreSink, node: skorm_parse::Literal) -> StoreInsertResult<Lit> {
let stored = match node {
skorm_parse::Literal::Simple(v) => StoredLiteral::Simple(v),
skorm_parse::Literal::LangTagged(v, l) => StoredLiteral::LangTagged(v, l),
skorm_parse::Literal::Typed(v, t) => StoredLiteral::Typed(v, insert_named_node(db, t)?),
};
Ok(db.store.inner.lit(stored))
}
fn insert_subject(
db: &mut StoreSink,
subj: skorm_parse::Subject,
) -> StoreInsertResult<StoredSubject> {
Ok(match subj {
skorm_parse::Subject::Named(n) => StoredSubject::Named(insert_named_node(db, n)?),
skorm_parse::Subject::Blank(n) => StoredSubject::Blank(insert_blank_node(db, n)),
})
}
fn insert_predicate(
db: &mut StoreSink,
pred: skorm_parse::Predicate,
) -> StoreInsertResult<StoredPredicate> {
insert_named_node(db, pred.0).map(Into::into)
}
fn insert_object(
db: &mut StoreSink,
obj: skorm_parse::Object,
) -> StoreInsertResult<StoredObject> {
Ok(match obj {
skorm_parse::Object::Named(n) => StoredObject::Named(insert_named_node(db, n)?),
skorm_parse::Object::Blank(n) => StoredObject::Blank(insert_blank_node(db, n)),
skorm_parse::Object::Literal(l) => StoredObject::Literal(insert_literal(db, l)?),
})
}
match statement.as_statement() {
Statement::Prefix(p) => {
let p = p.as_prefix();
insert_prefix(&mut self.store.inner, p.prefix.into(), p.uri.into());
Ok(())
}
Statement::Triple(t) => {
let subject = insert_subject(self, t.subject().as_subject())?;
let predicate = insert_predicate(self, t.predicate().as_predicate())?;
let object = insert_object(self, t.object().as_object())?;
let id = self.store.inner.triple(StoredTriple {
subject,
predicate,
object,
});
let mut triples = self.store.inner.triples();
if let None = triples.insert(id) {
// value did not already exist.
self.store.inner.set_triples(triples);
}
Ok(())
}
}
}
}
impl<'a> AsRdfParserSink<'a> for Store {
type Sink = StoreSink<'a>;
fn as_sink(&'a mut self) -> Self::Sink {
StoreSink {
store: self,
blanks: HashMap::new(),
}
}
}
| true |
2c694097a21c26c731d570ce1bcae90e4d1f61e8
|
Rust
|
txowner/dingtalk-rs
|
/src/lib.rs
|
UTF-8
| 18,154 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
use hmac::{Hmac, Mac, NewMac};
use serde_json::Value;
use sha2::Sha256;
use std::{
env, fs,
io::{Error, ErrorKind},
path::PathBuf,
time::SystemTime,
};
mod msg;
use msg::*;
pub use msg::{
DingTalkMessage, DingTalkMessageActionCardBtn, DingTalkMessageActionCardBtnOrientation,
DingTalkMessageActionCardHideAvatar, DingTalkMessageFeedCardLink, DingTalkMessageType,
DingTalkType,
};
type HmacSha256 = Hmac<Sha256>;
pub type XResult<T> = Result<T, Box<dyn std::error::Error>>;
const CONTENT_TYPE: &str = "Content-Type";
const APPLICATION_JSON_UTF8: &str = "application/json; charset=utf-8";
const DEFAULT_DINGTALK_ROBOT_URL: &str = "https://oapi.dingtalk.com/robot/send";
const DEFAULT_WECHAT_WORK_ROBOT_URL: &str = "https://qyapi.weixin.qq.com/cgi-bin/webhook/send";
/// `DingTalk` is a simple SDK for DingTalk webhook robot
///
/// Document https://ding-doc.dingtalk.com/doc#/serverapi2/qf2nxq
///
/// Sample code:
/// ```ignore
/// let dt = DingTalk::new("<token>", "");
/// dt.send_text("Hello world!")?;
/// ```
///
/// At all sample:
/// ```ignore
/// dt.send_message(&DingTalkMessage::new_text("Hello World!").at_all())?;
/// ```
#[derive(Default)]
pub struct DingTalk {
pub dingtalk_type: DingTalkType,
pub default_webhook_url: String,
pub access_token: String,
pub sec_token: String,
pub direct_url: String,
}
impl DingTalkMessage {
/// New text DingTalk message
pub fn new_text(text_content: &str) -> Self {
Self::new(DingTalkMessageType::Text).text(text_content)
}
/// New markdown DingTalk message
pub fn new_markdown(markdown_title: &str, markdown_content: &str) -> Self {
Self::new(DingTalkMessageType::Markdown).markdown(markdown_title, markdown_content)
}
/// New link DingTalk message
pub fn new_link(
link_title: &str,
link_text: &str,
link_pic_url: &str,
link_message_url: &str,
) -> Self {
Self::new(DingTalkMessageType::Link).link(
link_title,
link_text,
link_pic_url,
link_message_url,
)
}
/// New action card DingTalk message
pub fn new_action_card(title: &str, text: &str) -> Self {
let mut s = Self::new(DingTalkMessageType::ActionCard);
s.action_card_title = title.into();
s.action_card_text = text.into();
s
}
/// New feed card DingTalk message
pub fn new_feed_card() -> Self {
Self::new(DingTalkMessageType::FeedCard)
}
/// New DingTalk message
pub fn new(message_type: DingTalkMessageType) -> Self {
DingTalkMessage {
message_type,
..Default::default()
}
}
/// Set text
pub fn text(mut self, text_content: &str) -> Self {
self.text_content = text_content.into();
self
}
/// Set markdown
pub fn markdown(mut self, markdown_title: &str, markdown_content: &str) -> Self {
self.markdown_title = markdown_title.into();
self.markdown_content = markdown_content.into();
self
}
/// Set link
pub fn link(
mut self,
link_title: &str,
link_text: &str,
link_pic_url: &str,
link_message_url: &str,
) -> Self {
self.link_title = link_title.into();
self.link_text = link_text.into();
self.link_pic_url = link_pic_url.into();
self.link_message_url = link_message_url.into();
self
}
/// Set action card show avator(default show)
pub fn action_card_show_avatar(mut self) -> Self {
self.action_card_hide_avatar = DingTalkMessageActionCardHideAvatar::Show;
self
}
/// Set action card hide avator
pub fn action_card_hide_avatar(mut self) -> Self {
self.action_card_hide_avatar = DingTalkMessageActionCardHideAvatar::Hide;
self
}
/// Set action card btn vertical(default vertical)
pub fn action_card_btn_vertical(mut self) -> Self {
self.action_card_btn_orientation = DingTalkMessageActionCardBtnOrientation::Vertical;
self
}
/// Set action card btn landscape
pub fn action_card_btn_landscape(mut self) -> Self {
self.action_card_btn_orientation = DingTalkMessageActionCardBtnOrientation::Landscape;
self
}
/// Set action card single btn
pub fn set_action_card_signle_btn(mut self, btn: DingTalkMessageActionCardBtn) -> Self {
self.action_card_single_btn = Some(btn);
self
}
/// Add action card btn
pub fn add_action_card_btn(mut self, btn: DingTalkMessageActionCardBtn) -> Self {
self.action_card_btns.push(btn);
self
}
/// Add feed card link
pub fn add_feed_card_link(mut self, link: DingTalkMessageFeedCardLink) -> Self {
self.feed_card_links.push(link);
self
}
/// Add feed card link detail
pub fn add_feed_card_link_detail(self, title: &str, message_url: &str, pic_url: &str) -> Self {
self.add_feed_card_link(DingTalkMessageFeedCardLink {
title: title.into(),
message_url: message_url.into(),
pic_url: pic_url.into(),
})
}
/// At all
pub fn at_all(mut self) -> Self {
self.at_all = true;
self
}
/// At mobiles
pub fn at_mobiles(mut self, mobiles: &[String]) -> Self {
for m in mobiles {
self.at_mobiles.push(m.clone());
}
self
}
}
impl DingTalk {
/// Create `DingTalk` from token:
/// wechatwork:access_token
/// dingtalk:access_token?sec_token
pub fn from_token(token: &str) -> XResult<Self> {
if token.starts_with("dingtalk:") {
let token_and_or_sec = &token["dingtalk:".len()..];
let mut token_and_or_sec_vec = token_and_or_sec.split('?');
let access_token = match token_and_or_sec_vec.next() {
Some(t) => t,
None => token_and_or_sec,
};
let sec_token = match token_and_or_sec_vec.next() {
Some(t) => t,
None => "",
};
Ok(Self::new(access_token, sec_token))
} else if token.starts_with("wechatwork:") {
Ok(Self::new_wechat(&token["wechatwork:".len()..]))
} else if token.starts_with("wecom:") {
Ok(Self::new_wechat(&token["wecom:".len()..]))
} else {
Err(Box::new(Error::new(
ErrorKind::Other,
format!("Tokne format erorr: {}", token),
)))
}
}
/// Create `DingTalk` from file
///
/// Format see `DingTalk::from_json(json: &str)`
pub fn from_file(f: &str) -> XResult<Self> {
let f_path_buf = if f.starts_with("~/") {
let home = PathBuf::from(env::var("HOME")?);
home.join(f.chars().skip(2).collect::<String>())
} else {
PathBuf::from(f)
};
let f_content = fs::read_to_string(f_path_buf)?;
Self::from_json(&f_content)
}
/// Create `DingTalk` from JSON string
///
/// Format:
/// ```json
/// {
/// "default_webhook_url": "", // option
/// "access_token": "<access token>",
/// "sec_token": "<sec token>" // option
/// }
/// ```
pub fn from_json(json: &str) -> XResult<Self> {
let json_value: Value = serde_json::from_str(json)?;
if !json_value.is_object() {
return Err(Box::new(Error::new(
ErrorKind::Other,
format!("JSON format erorr: {}", json),
)));
}
let type_str = json_value["type"]
.as_str()
.unwrap_or_default()
.to_lowercase();
let dingtalk_type = match type_str.as_str() {
"wechat" | "wechatwork" | "wecom" => DingTalkType::WeChatWork,
_ => DingTalkType::DingTalk,
};
let default_webhook_url = json_value["default_webhook_url"]
.as_str()
.unwrap_or_else(|| match dingtalk_type {
DingTalkType::DingTalk => DEFAULT_DINGTALK_ROBOT_URL,
DingTalkType::WeChatWork => DEFAULT_WECHAT_WORK_ROBOT_URL,
})
.to_owned();
let access_token = json_value["access_token"]
.as_str()
.unwrap_or_default()
.to_owned();
let sec_token = json_value["sec_token"]
.as_str()
.unwrap_or_default()
.to_owned();
let direct_url = json_value["direct_url"]
.as_str()
.unwrap_or_default()
.to_owned();
Ok(DingTalk {
dingtalk_type,
default_webhook_url,
access_token,
sec_token,
direct_url,
})
}
/// Create `DingTalk` from url, for outgoing robot
pub fn from_url(direct_url: &str) -> Self {
DingTalk {
direct_url: direct_url.into(),
..Default::default()
}
}
/// Create `DingTalk`
/// `access_token` is access token, `sec_token` can be empty `""`
pub fn new(access_token: &str, sec_token: &str) -> Self {
DingTalk {
default_webhook_url: DEFAULT_DINGTALK_ROBOT_URL.into(),
access_token: access_token.into(),
sec_token: sec_token.into(),
..Default::default()
}
}
/// Create `DingTalk` for WeChat Work
pub fn new_wechat(key: &str) -> Self {
DingTalk {
default_webhook_url: DEFAULT_WECHAT_WORK_ROBOT_URL.into(),
dingtalk_type: DingTalkType::WeChatWork,
access_token: key.into(),
..Default::default()
}
}
/// Set default webhook url
pub fn set_default_webhook_url(&mut self, default_webhook_url: &str) {
self.default_webhook_url = default_webhook_url.into();
}
/// Send DingTalk message
///
/// 1. Create DingTalk JSON message
/// 2. POST JSON message to DingTalk server
pub async fn send_message(&self, dingtalk_message: DingTalkMessage) -> XResult<()> {
let mut message_json = match dingtalk_message.message_type {
DingTalkMessageType::Text => serde_json::to_value(InnerTextMessage {
msgtype: DingTalkMessageType::Text,
text: InnerTextMessageText {
content: dingtalk_message.text_content,
},
}),
DingTalkMessageType::Link => serde_json::to_value(InnerLinkMessage {
msgtype: DingTalkMessageType::Link,
link: InnerLinkMessageLink {
title: dingtalk_message.link_title,
text: dingtalk_message.link_text,
pic_url: dingtalk_message.link_pic_url,
message_url: dingtalk_message.link_message_url,
},
}),
DingTalkMessageType::Markdown => serde_json::to_value(InnerMarkdownMessage {
msgtype: DingTalkMessageType::Markdown,
markdown: InnerMarkdownMessageMarkdown {
title: dingtalk_message.markdown_title,
text: dingtalk_message.markdown_content,
},
}),
DingTalkMessageType::ActionCard => serde_json::to_value(InnerActionCardMessage {
msgtype: DingTalkMessageType::ActionCard,
action_card: InnerActionCardMessageActionCard {
title: dingtalk_message.action_card_title,
text: dingtalk_message.action_card_text,
hide_avatar: dingtalk_message.action_card_hide_avatar,
btn_orientation: dingtalk_message.action_card_btn_orientation,
},
}),
DingTalkMessageType::FeedCard => serde_json::to_value(InnerFeedCardMessage {
msgtype: DingTalkMessageType::FeedCard,
feed_card: InnerFeedCardMessageFeedCard {
links: {
let mut links: Vec<InnerFeedCardMessageFeedCardLink> = vec![];
for feed_card_link in &dingtalk_message.feed_card_links {
links.push(InnerFeedCardMessageFeedCardLink {
title: feed_card_link.title.clone(),
message_url: feed_card_link.message_url.clone(),
pic_url: feed_card_link.pic_url.clone(),
});
}
links
},
},
}),
}?;
if DingTalkMessageType::ActionCard == dingtalk_message.message_type {
if dingtalk_message.action_card_single_btn.is_some() {
if let Some(single_btn) = dingtalk_message.action_card_single_btn.as_ref() {
message_json["actionCard"]["singleTitle"] = single_btn.title.as_str().into();
message_json["actionCard"]["singleURL"] = single_btn.action_url.as_str().into();
};
} else {
let mut btns: Vec<InnerActionCardMessageBtn> = vec![];
for action_card_btn in &dingtalk_message.action_card_btns {
btns.push(InnerActionCardMessageBtn {
title: action_card_btn.title.clone(),
action_url: action_card_btn.action_url.clone(),
});
}
message_json["actionCard"]["btns"] = serde_json::to_value(btns)?;
}
}
if dingtalk_message.at_all || !dingtalk_message.at_mobiles.is_empty() {
if let Some(m) = message_json.as_object_mut() {
let mut at_mobiles: Vec<Value> = vec![];
for m in &dingtalk_message.at_mobiles {
at_mobiles.push(Value::String(m.clone()));
}
let mut at_map = serde_json::Map::new();
at_map.insert("atMobiles".into(), Value::Array(at_mobiles));
at_map.insert("isAtAll".into(), Value::Bool(dingtalk_message.at_all));
m.insert("at".into(), Value::Object(at_map));
}
}
self.send(&serde_json::to_string(&message_json)?).await
}
/// Send text message
pub async fn send_text(&self, text_message: &str) -> XResult<()> {
self.send_message(DingTalkMessage::new_text(text_message))
.await
}
/// Send markdown message
pub async fn send_markdown(&self, title: &str, text: &str) -> XResult<()> {
self.send_message(DingTalkMessage::new_markdown(title, text))
.await
}
/// Send link message
pub async fn send_link(
&self,
link_title: &str,
link_text: &str,
link_pic_url: &str,
link_message_url: &str,
) -> XResult<()> {
self.send_message(DingTalkMessage::new_link(
link_title,
link_text,
link_pic_url,
link_message_url,
))
.await
}
/// Direct send JSON message
pub async fn send(&self, json_message: &str) -> XResult<()> {
let client = reqwest::Client::new();
let response = match client
.post(&self.generate_signed_url()?)
.header(CONTENT_TYPE, APPLICATION_JSON_UTF8)
.body(json_message.as_bytes().to_vec())
.send()
.await
{
Ok(r) => r,
Err(e) => {
return Err(Box::new(Error::new(
ErrorKind::Other,
format!("Unknown error: {}", e),
)) as Box<dyn std::error::Error>);
}
};
match response.status().as_u16() {
200_u16 => Ok(()),
_ => Err(Box::new(Error::new(
ErrorKind::Other,
format!("Unknown status: {}", response.status().as_u16()),
)) as Box<dyn std::error::Error>),
}
}
/// Generate signed dingtalk webhook URL
pub fn generate_signed_url(&self) -> XResult<String> {
if !self.direct_url.is_empty() {
return Ok(self.direct_url.clone());
}
let mut signed_url = String::with_capacity(1024);
signed_url.push_str(&self.default_webhook_url);
if self.default_webhook_url.ends_with('?') {
// Just Ok
} else if self.default_webhook_url.contains('?') {
if !self.default_webhook_url.ends_with('&') {
signed_url.push('&');
}
} else {
signed_url.push('?');
}
match self.dingtalk_type {
DingTalkType::DingTalk => signed_url.push_str("access_token="),
DingTalkType::WeChatWork => signed_url.push_str("key="),
}
signed_url.push_str(&urlencoding::encode(&self.access_token));
if !self.sec_token.is_empty() {
let timestamp = &format!(
"{}",
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_millis()
);
let timestamp_and_secret = &format!("{}\n{}", timestamp, self.sec_token);
let hmac_sha256 = base64::encode(
&calc_hmac_sha256(self.sec_token.as_bytes(), timestamp_and_secret.as_bytes())?[..],
);
signed_url.push_str("×tamp=");
signed_url.push_str(timestamp);
signed_url.push_str("&sign=");
signed_url.push_str(&urlencoding::encode(&hmac_sha256));
}
Ok(signed_url)
}
}
/// calc hma_sha256 digest
fn calc_hmac_sha256(key: &[u8], message: &[u8]) -> XResult<Vec<u8>> {
let mut mac = match HmacSha256::new_from_slice(key) {
Ok(m) => m,
Err(e) => {
return Err(Box::new(Error::new(
ErrorKind::Other,
format!("Hmac error: {}", e),
)));
}
};
mac.update(message);
Ok(mac.finalize().into_bytes().to_vec())
}
| true |
66a2a4b70fbc74431b86010803a540e2d383c1ce
|
Rust
|
DeltaManiac/Exercism
|
/dot-dsl/src/lib.rs
|
UTF-8
| 2,745 | 3.0625 | 3 |
[] |
no_license
|
use std::collections::HashMap;
#[derive(PartialEq, Eq, Default, Clone, Debug)]
pub struct Edge {
start: String,
end: String,
attrs: HashMap<String, String>,
}
impl Edge {
pub fn new(s: &str, e: &str) -> Self {
Edge {
start: s.to_string(),
end: e.to_string(),
attrs: HashMap::new(),
}
}
pub fn with_attrs(self, attrs: &[(&str, &str)]) -> Edge {
Edge {
attrs: attrs
.iter()
.map(|(name, value)| (String::from(*name), String::from(*value)))
.collect(),
..self
}
}
pub fn get_attr(&self, name: &str) -> Option<&str> {
self.attrs.get(name).map(|v| v.as_str())
}
}
#[derive(PartialEq, Eq, Default, Clone, Debug)]
pub struct Node {
name: String,
attrs: HashMap<String, String>,
}
impl Node {
pub fn new(name: &str) -> Node {
Node {
name: name.to_string(),
attrs: HashMap::new(),
}
}
pub fn with_attrs(self, attrs: &[(&str, &str)]) -> Node {
Node {
attrs: attrs
.iter()
.map(|(name, value)| (String::from(*name), String::from(*value)))
.collect(),
..self
}
}
pub fn get_attr(&self, name: &str) -> Option<&str> {
self.attrs.get(name).map(|v| v.as_str())
}
}
#[derive(Debug, Default)]
pub struct Graph {
pub nodes: Vec<Node>,
pub edges: Vec<Edge>,
pub attrs: HashMap<String, String>,
}
impl Graph {
pub fn new() -> Self {
Graph {
nodes: Vec::new(),
edges: Vec::new(),
attrs: HashMap::new(),
}
}
pub fn with_nodes(self, nodes: &[Node]) -> Self {
Graph {
nodes: Vec::from(nodes),
..self
}
}
pub fn with_edges(self, edges: &[Edge]) -> Self {
Graph {
edges: Vec::from(edges),
..self
}
}
pub fn with_attrs(self, attrs: &[(&str, &str)]) -> Graph {
Graph {
attrs: attrs
.iter()
.map(|(name, value)| (String::from(*name), String::from(*value)))
.collect(),
..self
}
}
pub fn get_node(&self, name: &str) -> Option<&Node> {
self.nodes.iter().find(|n| name == n.name)
}
pub fn get_attr(&self, name: &str) -> Option<&str> {
self.attrs.get(name).map(|v| v.as_str())
}
}
pub mod graph {
pub use super::Graph;
pub mod graph_items {
pub mod edge {
pub use super::super::super::Edge;
}
pub mod node {
pub use super::super::super::Node;
}
}
}
| true |
4b209fef0994b40b86ab47b2c9b7f5be93fd8373
|
Rust
|
pcolusso/link-shortener
|
/src/main.rs
|
UTF-8
| 2,210 | 3 | 3 |
[] |
no_license
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use] extern crate rocket;
#[macro_use] extern crate rocket_contrib;
use std::sync::Mutex;
use std::collections::HashMap;
use rocket::State;
use rocket::response::Redirect;
use rocket_contrib::serve::StaticFiles;
use rocket_contrib::json::{Json, JsonValue};
use serde::{Serialize, Deserialize};
use rand::{thread_rng, Rng};
use rand::distributions::Alphanumeric;
use url::Url;
type DB = Mutex<HashMap<String, String>>;
#[get("/<id>")]
fn redirect(id: String, state: State<DB>) -> Redirect {
let db = state.lock().expect("Unable to lock");
match db.get(&id) {
Some(url) => Redirect::to(url.clone()),
None => Redirect::to("/")
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Submission {
slug: Option<String>,
url: String,
}
fn generate_slug() -> String {
let result: String = thread_rng()
.sample_iter(&Alphanumeric)
.take(5)
.collect();
result
}
#[post("/", format = "application/json", data = "<submission>")]
fn shorten(submission: Json<Submission>, state: State<DB>) -> JsonValue {
// Don't know why I can't destructure this, should be able to
// let Submission { slug: s, url: u } = submission;
let mut db = state.lock().expect("Unable to lock");
let slug = match &submission.slug {
Some(s) => { match &s[..] {
"" => generate_slug(),
_ => s.clone(),
}},
None => generate_slug(),
};
let url = match Url::parse(&submission.url) {
Ok(u) => u.into_string(),
Err(e) => return json!({"status": "error", "reason": format!("Couldn't shorten, url is funky. ({})", e)}),
};
if db.contains_key(&slug) {
json!({"status": "error", "reason": "Couldn't shorten, url already points somewhere."})
} else {
db.insert(slug.clone(), url);
json!({"status": "success", "slug": slug})
}
}
fn main() {
let db = Mutex::new(HashMap::<String, String>::new());
rocket::ignite()
.mount("/", StaticFiles::from(concat!(env!("CARGO_MANIFEST_DIR"), "/frontend/build")))
.mount("/", routes![redirect, shorten])
.manage(db)
.launch();
}
| true |
e195425c17e729983ff0ea9be5e809ed9ecffbe6
|
Rust
|
Kampfkarren/advent-of-code-2018-rust
|
/src/day8/mod.rs
|
UTF-8
| 2,113 | 3.40625 | 3 |
[] |
no_license
|
type Number = u64;
#[derive(Debug, Eq, PartialEq)]
struct Node {
children: Vec<Box<Node>>,
metadata: Vec<Number>,
}
impl Node {
fn descendants(&self) -> Vec<&Node> {
let mut descendants = vec![self];
for child in &self.children {
descendants.append(&mut child.descendants());
}
descendants
}
fn value(&self) -> Number {
if self.children.is_empty() {
self.metadata.iter().sum()
} else {
self.metadata.iter().map(|index| {
if let Some(reference) = self.children.get((index - 1) as usize) {
reference.value()
} else {
0
}
}).sum()
}
}
}
fn read_node<I: Iterator<Item=Number>>(numbers: &mut I) -> Option<Node> {
let children_count = numbers.next()?;
let metadata_entries = numbers.next().unwrap();
let node = Node {
children: std::iter::repeat_with(|| Box::new(read_node(numbers).unwrap())).take(children_count as usize).collect(),
metadata: std::iter::repeat_with(|| numbers.next().unwrap()).take(metadata_entries as usize).collect(),
};
Some(node)
}
fn parse(input: &str) -> Node {
read_node(&mut input.split(' ').map(|x| x.parse::<Number>().unwrap())).unwrap()
}
fn solution_part1(input: &str) -> Number {
parse(input).descendants().iter().map(|node| &node.metadata).flatten().sum()
}
fn solution_part2(input: &str) -> Number {
parse(input).value()
}
pub fn solve(input: &str) {
println!("Part 1: {}", solution_part1(input));
println!("Part 2: {}", solution_part2(input));
}
#[cfg(test)]
const TEST_DATA: &'static str = "2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2";
#[test]
fn test_parse() {
assert_eq!(parse(TEST_DATA), Node {
children: vec![Box::new(Node {
children: vec![],
metadata: vec![10, 11, 12],
}), Box::new(Node {
children: vec![Box::new(Node {
children: vec![],
metadata: vec![99]
})],
metadata: vec![2],
})],
metadata: vec![1, 1, 2],
});
}
#[test]
fn test_solution_part1() {
assert_eq!(solution_part1(TEST_DATA), 138);
}
#[test]
fn test_solution_part2() {
assert_eq!(solution_part2(TEST_DATA), 66);
}
| true |
31abae91288b73ace1a3ce5b3d392c68274a32fb
|
Rust
|
chazzam/aoc2019
|
/src/intcode.rs
|
UTF-8
| 10,137 | 2.9375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::convert::TryInto;
use std::io::{self, Read};
use std::collections::HashMap;
pub fn int_input(in_str: &str) -> Vec<i64> {
//println!("in: {}", &in_str.trim());
let results = in_str
.trim()
.split(',')
.filter_map(|x| if x.trim() != "" { x.trim().parse::<i64>().ok() } else { None })
.collect();
//println!("got input");
results
}
pub fn intcode(code: Vec<i64>, pc: usize) -> (Vec<i64>, Vec<i64>) {
let inputs = Vec::<i64>::new();
intcodes(code, pc, inputs)
}
pub fn intcodes(code: Vec<i64>, pc: usize, inputs: Vec<i64>) -> (Vec<i64>, Vec<i64>) {
intcodesw(code, pc, inputs, &mut io::stdout())
}
pub fn intcodesq(
code: Vec<i64>,
pc: usize,
inputs: Vec<i64>)
-> (Vec<i64>, Vec<i64>)
{
intcodesw(code, pc, inputs, &mut io::sink())
}
pub fn intcodesw(
code: Vec<i64>,
pc: usize,
inputs: Vec<i64>,
stdout: &mut io::Write)
-> (Vec<i64>, Vec<i64>)
{
let mut reversed_inputs = inputs.clone();
reversed_inputs.reverse();
let mut signals = Vec::<i64>::new();
let mut base = 0;
let memory: HashMap<i64, i64> = HashMap::new();
intcodes_internal(code, pc, &mut reversed_inputs, false, stdout,
&mut base, &mut signals, memory)
}
pub fn intcodesf(
code: Vec<i64>,
pc: usize,
inputs: Vec<i64>,
mut signals: &mut Vec<i64>)
-> (Vec<i64>, Vec<i64>)
{
let mut reversed_inputs = inputs.clone();
reversed_inputs.reverse();
let mut base = 0;
let memory: HashMap<i64, i64> = HashMap::new();
intcodes_internal(code, pc, &mut reversed_inputs, true, &mut io::sink(),
&mut base, &mut signals, memory)
//intcodes_internal(code, pc, &mut reversed_inputs, true, &mut io::stdout(), &mut signals)
}
fn update_dest(input: i64, dest: i64, mut code: Vec<i64>, mut memory: HashMap<i64, i64>) -> (Vec<i64>, HashMap<i64, i64>) {
if dest < (code.len() as i64) {
//println!("update: address: {} code.len:{} memory.len:{} = {} in code", dest, code.len(), memory.len(), input);
code[dest as usize] = input;
} else {
//println!("update: address: {} code.len:{} memory.len:{} = {} in memory", dest, code.len(), memory.len(), input);
memory.insert(dest, input);
}
(code, memory)
}
fn lookup_memory(pc: i64, code: &Vec<i64>, memory: &HashMap<i64, i64>) -> i64 {
if (pc as usize) < code.len() {
//println!("lookup: address: {} code.len:{} memory.len:{} in code", pc, code.len(), memory.len());
return *code.get(pc as usize).unwrap() as i64;
} else {
//println!("lookup: address: {} code.len:{} memory.len:{} in memory", pc, code.len(), memory.len());
let mut new_val: i64 = 0;
if memory.get(&pc).is_some() {
new_val = *memory.get(&pc).unwrap() as i64;
}
return new_val;
}
}
fn intcodes_internal(
code: Vec<i64>,
pc: usize,
inputs: &mut Vec<i64>,
feedback: bool,
stdout: &mut io::Write,
mut rel_base: &mut i64,
mut signals: &mut Vec<i64>,
memory: HashMap<i64, i64>)
-> (Vec<i64>, Vec<i64>)
{
// Intcode
// in set, place 0: opcode
// opcode 99: halt command processing
// opcode 1 (add): vec[set[3]] = vec[set[1]] + vec[set[2]]
// opcode 2 (mul): vec[set[3]] = vec[set[1]] * vec[set[2]]
let outputs = code.clone();
//let code_outputs = Vec::<i64>::new();
let op:i64 = code[pc];
//println!("DEBUG:Intcode:: op:{}", op);
if op == 99 {
writeln!(stdout, "Intcode:: EXIT (SUCCESS)").ok();
return (outputs, signals.to_vec());
}
if op < 1 || op > 22298 {
writeln!(stdout, "Intcode:: Received EXIT (FAILURE)").ok();
println!("EXIT badop: op: {} lengths:: code:{} memory:{} signals:{}", op, code.len(), memory.len(), signals.len());
signals.push(i64::min_value());
return (outputs, signals.to_vec());
}
let parameterized = |x: i64, op: i64| {
// 0 = position, 1 = immediate, 2 = relative base
op == x
|| op == (x + 100) /* a is immediate, b is position */
|| op == (x + 200) /* a is relative, b is position */
|| op == (x + 1000) /* a is position, b is immediate */
|| op == (x + 2000) /* a is position, b is relative */
|| op == (x + 1100) /* a is immediate, b is immediate */
|| op == (x + 1200) /* a is relative, b is immediate */
|| op == (x + 2100) /* a is immediate, b is relative */
|| op == (x + 2200) /* a is relative, b is relative */
|| op == (x + 20000) /* dest is relative */
|| op == (x + 20100) /* dest is relative, a immediate */
|| op == (x + 21100) /* dest is relative, a immediate, b immediate */
|| op == (x + 21200) /* dest is relative, a relative, b immediate */
|| op == (x + 22100) /* dest is relative, a immediate, b relative */
|| op == (x + 22200) /* dest is relative, a relative, b relative */
};
let modes = |x: i64| -> (u8, u8, u8) {
// 0 = position, 1 = immediate, 2 = relative base
// 100s -> update a
// 1000s -> update b
let mut a: u8 = 0;
let mut b: u8 = 0;
let mut d: u8 = 0;
if x < 100 /* position for all */ {
return (a, b, d);
}
a = ((x / 100) % 10).try_into().unwrap();
if x >= 1000 {
b = ((x / 1000) % 10).try_into().unwrap();
}
if x >= 10000 {
d = ((x / 10000) % 10).try_into().unwrap();
}
(a, b, d)
};
let mode = modes(op);
let op_add: bool = parameterized(1, op);
let op_mul: bool = parameterized(2, op);
let op_input: bool = op == 3 || op == 203;
let op_output: bool = op == 4 || op == 104 || op == 204;
let op_jump_true: bool = parameterized(5, op); // no dest
let op_jump_false: bool = parameterized(6, op); // no dest
let op_less_than: bool = parameterized(7, op);
let op_equals: bool = parameterized(8, op);
let op_rel: bool = op == 9 || op == 109 || op == 209;
// get the a parameter, handling mode
let mut a: i64 = code[pc + 1];
let mut dest: i64 = a;
if mode.0 == 0 {
//a = *code.get(a as usize).unwrap() as i64;
a = lookup_memory(a, &code, &memory);
} else if mode.0 == 2 {
//a = *code.get((a + *rel_base) as usize).unwrap() as i64;
dest = a + *rel_base;
a = lookup_memory(dest, &code, &memory);
}
//println!("DEBUG:: Looked up: a [{}] = {}", code[pc + 1], a);
if op_input {
//println!("DEBUG:: input <{}:{} ({})> +{} mode({}, {}, {})", op, dest, code[pc + 1], rel_base, mode.0, mode.1, mode.2);
// input
let input: i64;
if inputs.len() > 0 {
input = inputs.pop().unwrap();
writeln!(stdout, "Intcode:: Input: {}", input).ok();
}
else if feedback && signals.len() > 0 {
input = signals.remove(0);
writeln!(stdout, "Intcode:: Input: {}", input).ok();
}
else {
writeln!(stdout, "Intcode:: Input: ").ok();
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer).ok();
input = buffer.trim().parse().unwrap();
}
let (outputs, memory) = update_dest(input, dest, outputs, memory);
return intcodes_internal(outputs, pc + 2, inputs, feedback, stdout, &mut rel_base, &mut signals, memory)
}
else if op_output {
//println!("DEBUG:: output <{}:{}> +{} mode({}, {}, {})", op, a, rel_base, mode.0, mode.1, mode.2);
// output
writeln!(stdout, "Intcode:: Print: {}", a).ok();
signals.push(a);
let (outputs, _) = intcodes_internal(outputs, pc + 2, inputs, feedback, stdout, &mut rel_base, &mut signals, memory);
return (outputs, signals.to_vec());
}
else if op_rel {
//println!("DEBUG:: +rel <{}:{} ({})> +{} mode({}, {}, {})", op, a, code[pc + 1], rel_base, mode.0, mode.1, mode.2);
*rel_base += a;
return intcodes_internal(outputs, pc + 2, inputs, feedback, stdout, &mut rel_base, &mut signals, memory);
}
// get the b parameter, handling mode
let mut b: i64 = code[pc + 2];
if mode.1 == 0 {
//b = *code.get(b as usize).unwrap() as i64;
b = lookup_memory(b, &code, &memory);
} else if mode.1 == 2 {
//b = *code.get((b + *rel_base) as usize).unwrap() as i64;
b = lookup_memory(b + *rel_base, &code, &memory);
}
//println!("DEBUG:: Looked up: b [{}] = {}", code[pc + 2], b);
if op_jump_true || op_jump_false {
//println!("DEBUG:: jump <{}:{},{}> +{} mode({}, {}, {})", op, a, b, rel_base, mode.0, mode.1, mode.2);
// jump-if-true | jump-if-false
// 0n->paramAB, 10n->paramB, 100n->paramA, 110n->immediate
let mut new_pc: usize = pc + 3;
if (a != 0 && op_jump_true) || (a == 0 && op_jump_false) {
new_pc = b as usize;
}
return intcodes_internal(outputs, new_pc, inputs, feedback, stdout, &mut rel_base, &mut signals, memory);
}
// Get the third operation
let mut dest: i64 = code[pc + 3];
if mode.2 == 2 {
//dest = lookup_memory(dest + *rel_base, &code, &memory);
dest = dest + *rel_base;
}
//println!("DEBUG:: Looked up: dest [{}] = {}", code[pc + 3], dest);
//println!("DEBUG:: a[{}]={} b[{}]={} dest[{}]={}", code[pc + 1], a, code[pc + 2], b, code[pc + 3], dest);
if op_less_than || op_equals {
//println!("DEBUG:: <|=cmp <{}:{} [{}],{} [{}],{} [{}]> +{} mode({}, {}, {})", op, a, code[pc + 1], b, code[pc + 2], dest, code[pc + 3], rel_base, mode.0, mode.1, mode.2);
let mut result = 0;
if (op_less_than && a < b) || (op_equals && a == b) {
result = 1;
}
let (outputs, memory) = update_dest(result, dest, outputs, memory);
return intcodes_internal(outputs, pc + 4, inputs, feedback, stdout, &mut rel_base, &mut signals, memory);
}
else if op_add || op_mul {
//println!("DEBUG:: add|mul<{}:{},{},{}> +{} mode({}, {}, {})", op, a, b, dest, rel_base, mode.0, mode.1, mode.2);
// add and multiply
let mut result = a + b;
if op_mul {
result = a * b;
}
let (outputs, memory) = update_dest(result, dest, outputs, memory);
return intcodes_internal(outputs, pc + 4, inputs, feedback, stdout, &mut rel_base, &mut signals, memory);
}
writeln!(stdout, "Intcode:: Received EXIT (FAILURE)").ok();
println!("EXIT fallout: op: {} lengths:: code:{} memory:{} signals:{}", op, code.len(), memory.len(), signals.len());
signals.push(i64::min_value());
(outputs, signals.to_vec())
}
| true |
fe6cfcfede13d7d8db3973ab5256a9aa894194c6
|
Rust
|
keroro520/jsonrpc-client-rs
|
/pubsub/src/lib.rs
|
UTF-8
| 13,156 | 2.6875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! This crate adds support for subscriptions as defined in [here].
//!
//! [here]: https://github.com/ethereum/go-ethereum/wiki/RPC-PUB-SUB
extern crate futures;
extern crate jsonrpc_client_core;
extern crate jsonrpc_client_utils;
#[macro_use]
extern crate serde;
extern crate serde_json;
extern crate tokio;
#[macro_use]
extern crate log;
#[macro_use]
extern crate error_chain;
use futures::{future, future::Either, sync::mpsc, Async, Future, Poll, Sink, Stream};
use jsonrpc_client_core::server::{
types::Params, Handler, HandlerSettingError, Server, ServerHandle,
};
use jsonrpc_client_core::{
ClientHandle, DuplexTransport, Error as CoreError, ErrorKind as CoreErrorKind,
};
use serde_json::Value;
use std::collections::BTreeMap;
use std::fmt;
use std::marker::PhantomData;
use tokio::prelude::future::Executor;
use jsonrpc_client_utils::select_weak::{SelectWithWeak, SelectWithWeakExt};
error_chain! {
links {
Core(CoreError, CoreErrorKind);
}
foreign_links {
HandlerError(HandlerSettingError);
SpawnError(tokio::executor::SpawnError);
}
}
#[derive(Debug, Deserialize)]
struct SubscriptionMessage {
subscription: SubscriptionId,
result: Value,
}
/// A stream of messages from a subscription.
#[derive(Debug)]
pub struct Subscription<T: serde::de::DeserializeOwned> {
rx: mpsc::Receiver<Value>,
id: Option<SubscriptionId>,
handler_chan: mpsc::UnboundedSender<SubscriberMsg>,
_marker: PhantomData<T>,
}
impl<T: serde::de::DeserializeOwned> Stream for Subscription<T> {
type Item = T;
type Error = CoreError;
fn poll(&mut self) -> Poll<Option<T>, CoreError> {
match self.rx.poll().map_err(|_: ()| CoreErrorKind::Shutdown)? {
Async::Ready(Some(v)) => Ok(Async::Ready(Some(
serde_json::from_value(v).map_err(|_| CoreErrorKind::DeserializeError)?,
))),
Async::Ready(None) => Ok(Async::Ready(None)),
Async::NotReady => Ok(Async::NotReady),
}
}
}
impl<T: serde::de::DeserializeOwned> Drop for Subscription<T> {
fn drop(&mut self) {
if let Some(id) = self.id.take() {
let _ = self
.handler_chan
.unbounded_send(SubscriberMsg::RemoveSubscriber(id));
}
}
}
/// A subscriber creates new subscriptions.
#[derive(Debug)]
pub struct Subscriber<E: Executor<Box<Future<Item = (), Error = ()> + Send>>> {
client_handle: ClientHandle,
handlers: ServerHandle,
notification_handlers: BTreeMap<String, mpsc::UnboundedSender<SubscriberMsg>>,
executor: E,
}
impl<E: Executor<Box<Future<Item = (), Error = ()> + Send>> + Send + Sized> Subscriber<E> {
/// Constructs a new subscriber with the provided executor.
pub fn new(executor: E, client_handle: ClientHandle, handlers: ServerHandle) -> Self {
let notification_handlers = BTreeMap::new();
Self {
client_handle,
handlers,
notification_handlers,
executor,
}
}
/// Creates a new subscription with the given method names and parameters. Parameters
/// `sub_method` and `unsub_method` are only taken into account if this is the first time a
/// subscription for `notification` has been created in the lifetime of this `Subscriber`.
pub fn subscribe<T, P>(
&mut self,
sub_method: String,
unsub_method: String,
notification_method: String,
buffer_size: usize,
sub_parameters: P,
) -> impl Future<Item = Subscription<T>, Error = Error>
where
T: serde::de::DeserializeOwned + 'static,
P: serde::Serialize + 'static,
{
// Get a channel to an existing notification handler or spawn a new one.
let chan = self
.notification_handlers
.get(¬ification_method)
.filter(|c| c.is_closed())
.map(|chan| Ok(chan.clone()))
.unwrap_or_else(|| {
self.spawn_notification_handler(notification_method.clone(), unsub_method)
});
let (sub_tx, sub_rx) = mpsc::channel(buffer_size);
match chan {
Ok(chan) => Either::A(
self.client_handle
.call_method(sub_method, &sub_parameters)
.map_err(|e| e.into())
.and_then(move |id: SubscriptionId| {
if let Err(_) =
chan.unbounded_send(SubscriberMsg::NewSubscriber(id.clone(), sub_tx))
{
debug!(
"Notificaton handler for {} - {} already closed",
notification_method, id
);
};
Ok(Subscription {
rx: sub_rx,
id: Some(id),
handler_chan: chan.clone(),
_marker: PhantomData::<T>,
})
}),
),
Err(e) => Either::B(future::err(e)),
}
}
fn spawn_notification_handler(
&mut self,
notification_method: String,
unsub_method: String,
) -> Result<mpsc::UnboundedSender<SubscriberMsg>> {
let (msg_tx, msg_rx) = mpsc::channel(0);
self.handlers
.add(
notification_method.clone(),
Handler::Notification(Box::new(move |notification| {
let fut = match params_to_subscription_message(notification.params) {
Some(msg) => Either::A(
msg_tx
.clone()
.send(msg)
.map(|_| ())
.map_err(|_| CoreErrorKind::Shutdown.into()),
),
None => {
error!(
"Received notification with invalid parameters for subscription - {}",
notification.method
);
Either::B(futures::future::ok(()))
}
};
Box::new(fut)
})),
)
.wait()?;
let (control_tx, control_rx) = mpsc::unbounded();
let notification_handler = NotificationHandler::new(
notification_method.clone(),
self.handlers.clone(),
self.client_handle.clone(),
unsub_method,
msg_rx,
control_rx,
);
if let Err(e) = self
.executor
.execute(Box::new(notification_handler.map_err(|_| ())))
{
error!("Failed to spawn notification handler - {:?}", e);
};
self.notification_handlers
.insert(notification_method, control_tx.clone());
Ok(control_tx)
}
}
fn params_to_subscription_message(params: Option<Params>) -> Option<SubscriberMsg> {
params
.and_then(|p| p.parse().ok())
.map(SubscriberMsg::NewMessage)
}
#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug, Deserialize)]
#[serde(untagged)]
enum SubscriptionId {
Num(u64),
String(String),
}
impl fmt::Display for SubscriptionId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SubscriptionId::Num(n) => write!(f, "{}", n),
SubscriptionId::String(s) => write!(f, "{}", s),
}
}
}
#[derive(Debug)]
enum SubscriberMsg {
NewMessage(SubscriptionMessage),
NewSubscriber(SubscriptionId, mpsc::Sender<Value>),
RemoveSubscriber(SubscriptionId),
}
// A single notification can receive messages for different subscribers for the same notification.
struct NotificationHandler {
notification_method: String,
subscribers: BTreeMap<SubscriptionId, mpsc::Sender<Value>>,
messages: SelectWithWeak<mpsc::Receiver<SubscriberMsg>, mpsc::UnboundedReceiver<SubscriberMsg>>,
unsub_method: String,
client_handle: ClientHandle,
current_future: Option<Box<dyn Future<Item = (), Error = ()> + Send>>,
server_handlers: ServerHandle,
should_shut_down: bool,
}
impl Drop for NotificationHandler {
fn drop(&mut self) {
let _ = self
.server_handlers
.remove(self.notification_method.clone());
}
}
impl NotificationHandler {
fn new(
notification_method: String,
server_handlers: ServerHandle,
client_handle: ClientHandle,
unsub_method: String,
subscription_messages: mpsc::Receiver<SubscriberMsg>,
control_messages: mpsc::UnboundedReceiver<SubscriberMsg>,
) -> Self {
let messages = subscription_messages.select_with_weak(control_messages);
Self {
notification_method,
messages,
server_handlers,
unsub_method,
subscribers: BTreeMap::new(),
client_handle,
current_future: None,
should_shut_down: false,
}
}
fn handle_new_subscription(&mut self, id: SubscriptionId, chan: mpsc::Sender<Value>) {
self.subscribers.insert(id, chan);
}
fn handle_removal(&mut self, id: SubscriptionId) {
if let None = self.subscribers.remove(&id) {
debug!("Removing non-existant subscriber - {}", &id);
};
let fut = self
.client_handle
.call_method(self.unsub_method.clone(), &[0u8; 0])
.map(|_r: bool| ())
.map_err(|e| trace!("Failed to unsubscribe - {}", e));
self.should_shut_down = self.subscribers.len() < 1;
self.current_future = Some(Box::new(fut));
}
fn handle_new_message(&mut self, id: SubscriptionId, message: Value) {
match self.subscribers.get(&id) {
Some(chan) => {
let fut = chan
.clone()
.send(message)
.map_err(move |_| trace!("Subscriber already gone: {}", id))
.map(|_| ());
self.current_future = Some(Box::new(fut));
}
None => trace!("Received message for non existant subscription - {}", id),
}
}
fn ready_for_next_connection(&mut self) -> bool {
match self.current_future.take() {
None => true,
Some(mut fut) => match fut.poll() {
Ok(Async::NotReady) => {
self.current_future = Some(fut);
false
}
_ => true,
},
}
}
}
impl Future for NotificationHandler {
type Item = ();
type Error = ();
fn poll(&mut self) -> Poll<(), ()> {
while self.ready_for_next_connection() {
match self.messages.poll()? {
Async::NotReady => {
break;
}
Async::Ready(None) => {
return Ok(Async::Ready(()));
}
Async::Ready(Some(SubscriberMsg::NewMessage(msg))) => {
self.handle_new_message(msg.subscription, msg.result);
}
Async::Ready(Some(SubscriberMsg::NewSubscriber(id, chan))) => {
self.handle_new_subscription(id, chan);
}
Async::Ready(Some(SubscriberMsg::RemoveSubscriber(id))) => {
self.handle_removal(id);
}
}
}
if self.should_shut_down {
trace!(
"shutting down notification handler for notification '{}'",
self.notification_method
);
Ok(Async::Ready(()))
} else {
Ok(Async::NotReady)
}
}
}
/// A trait for constructing the usual client handles with coupled `Subscriber` structs.
pub trait SubscriberTransport: DuplexTransport {
/// Constructs a new client, client handle and a subscriber.
fn subscriber_client<E: Executor<Box<Future<Item = (), Error = ()> + Send>> + Send + Sized>(
self,
executor: E,
) -> (
jsonrpc_client_core::Client<Self, Server>,
ClientHandle,
Subscriber<E>,
);
}
/// Subscriber transport trait allows one to create a client future, a subscriber and a client
/// handle from a valid JSON-RPC transport.
impl<T: DuplexTransport> SubscriberTransport for T {
/// Constructs a new client, client handle and a subscriber.
fn subscriber_client<E: Executor<Box<Future<Item = (), Error = ()> + Send>> + Send>(
self,
executor: E,
) -> (
jsonrpc_client_core::Client<Self, Server>,
ClientHandle,
Subscriber<E>,
) {
let (server, server_handle) = Server::new();
let (client, client_handle) = self.with_server(server);
let subscriber = Subscriber::new(executor, client_handle.clone(), server_handle);
(client, client_handle, subscriber)
}
}
| true |
f9854918afc5325b26835f91643a342fc0084458
|
Rust
|
diffeo/kodama
|
/src/chain.rs
|
UTF-8
| 8,291 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
use std::mem;
use crate::condensed::CondensedMatrix;
use crate::dendrogram::Dendrogram;
use crate::float::Float;
use crate::method;
use crate::{LinkageState, MethodChain};
/// Perform hierarchical clustering using the "nearest neighbor chain"
/// algorithm as described in Müllner's paper.
///
/// In general, one should prefer to use
/// [`linkage`](fn.linkage.html),
/// since it tries to pick the fastest algorithm depending on the method
/// supplied.
pub fn nnchain<T: Float>(
dis: &mut [T],
observations: usize,
method: MethodChain,
) -> Dendrogram<T> {
let mut state = LinkageState::new();
let mut steps = Dendrogram::new(observations);
nnchain_with(&mut state, dis, observations, method, &mut steps);
steps
}
/// Like [`nnchain`](fn.nnchain.html), but amortizes allocation.
///
/// See [`linkage_with`](fn.linkage_with.html) for details.
#[inline(never)]
pub fn nnchain_with<T: Float>(
state: &mut LinkageState<T>,
dis: &mut [T],
observations: usize,
method: MethodChain,
steps: &mut Dendrogram<T>,
) {
method.square(dis);
let mut dis = CondensedMatrix::new(dis, observations);
steps.reset(dis.observations());
if dis.observations() == 0 {
return;
}
state.reset(dis.observations());
let (mut a, mut b, mut min);
state.chain.clear();
for _ in 0..dis.observations() - 1 {
if state.chain.len() < 4 {
a = state
.active
.iter()
.next()
.expect("at least one active observation");
state.chain.clear();
state.chain.push(a);
b = state.active.iter().nth(1).unwrap();
min = dis[[a, b]];
for i in state.active.range(b..).skip(1) {
if dis[[a, i]] < min {
min = dis[[a, i]];
b = i;
}
}
} else {
// All of these unwraps are guaranteed to succeed because
// state.chain has at least 4 elements.
state.chain.pop().unwrap();
state.chain.pop().unwrap();
b = state.chain.pop().unwrap();
a = state.chain[state.chain.len() - 1];
if a < b {
min = dis[[a, b]];
} else {
min = dis[[b, a]];
}
}
loop {
state.chain.push(b);
for x in state.active.range(..b) {
if dis[[x, b]] < min {
min = dis[[x, b]];
a = x;
}
}
for x in state.active.range(b..).skip(1) {
if dis[[b, x]] < min {
min = dis[[b, x]];
a = x;
}
}
b = a;
a = state.chain[state.chain.len() - 1];
if b == state.chain[state.chain.len() - 2] {
break;
}
}
if a > b {
mem::swap(&mut a, &mut b);
}
match method {
MethodChain::Single => single(state, &mut dis, a, b),
MethodChain::Complete => complete(state, &mut dis, a, b),
MethodChain::Average => average(state, &mut dis, a, b),
MethodChain::Weighted => weighted(state, &mut dis, a, b),
MethodChain::Ward => ward(state, &mut dis, a, b),
}
state.merge(steps, a, b, min);
}
state.set.relabel(steps, method.into_method());
method.sqrt(steps);
}
#[inline]
fn single<T: Float>(
state: &mut LinkageState<T>,
dis: &mut CondensedMatrix<'_, T>,
a: usize,
b: usize,
) {
for x in state.active.range(..a) {
method::single(dis[[x, a]], &mut dis[[x, b]]);
}
for x in state.active.range(a..b).skip(1) {
method::single(dis[[a, x]], &mut dis[[x, b]]);
}
for x in state.active.range(b..).skip(1) {
method::single(dis[[a, x]], &mut dis[[b, x]]);
}
}
#[inline]
fn complete<T: Float>(
state: &mut LinkageState<T>,
dis: &mut CondensedMatrix<'_, T>,
a: usize,
b: usize,
) {
for x in state.active.range(..a) {
method::complete(dis[[x, a]], &mut dis[[x, b]]);
}
for x in state.active.range(a..b).skip(1) {
method::complete(dis[[a, x]], &mut dis[[x, b]]);
}
for x in state.active.range(b..).skip(1) {
method::complete(dis[[a, x]], &mut dis[[b, x]]);
}
}
#[inline]
fn average<T: Float>(
state: &mut LinkageState<T>,
dis: &mut CondensedMatrix<'_, T>,
a: usize,
b: usize,
) {
let (size_a, size_b) = (state.sizes[a], state.sizes[b]);
for x in state.active.range(..a) {
method::average(dis[[x, a]], &mut dis[[x, b]], size_a, size_b);
}
for x in state.active.range(a..b).skip(1) {
method::average(dis[[a, x]], &mut dis[[x, b]], size_a, size_b);
}
for x in state.active.range(b..).skip(1) {
method::average(dis[[a, x]], &mut dis[[b, x]], size_a, size_b);
}
}
#[inline]
fn weighted<T: Float>(
state: &mut LinkageState<T>,
dis: &mut CondensedMatrix<'_, T>,
a: usize,
b: usize,
) {
for x in state.active.range(..a) {
method::weighted(dis[[x, a]], &mut dis[[x, b]]);
}
for x in state.active.range(a..b).skip(1) {
method::weighted(dis[[a, x]], &mut dis[[x, b]]);
}
for x in state.active.range(b..).skip(1) {
method::weighted(dis[[a, x]], &mut dis[[b, x]]);
}
}
#[inline]
fn ward<T: Float>(
state: &mut LinkageState<T>,
dis: &mut CondensedMatrix<'_, T>,
a: usize,
b: usize,
) {
let dist = dis[[a, b]];
let (size_a, size_b) = (state.sizes[a], state.sizes[b]);
for x in state.active.range(..a) {
method::ward(
dis[[x, a]],
&mut dis[[x, b]],
dist,
size_a,
size_b,
state.sizes[x],
);
}
for x in state.active.range(a..b).skip(1) {
method::ward(
dis[[a, x]],
&mut dis[[x, b]],
dist,
size_a,
size_b,
state.sizes[x],
);
}
for x in state.active.range(b..).skip(1) {
method::ward(
dis[[a, x]],
&mut dis[[b, x]],
dist,
size_a,
size_b,
state.sizes[x],
);
}
}
#[cfg(test)]
mod tests {
use super::nnchain;
use crate::test::DistinctMatrix;
use crate::{primitive, Method, MethodChain};
quickcheck::quickcheck! {
fn prop_nnchain_single_primitive(mat: DistinctMatrix) -> bool {
let dend_prim = primitive(
&mut mat.matrix(), mat.len(), Method::Single);
let dend_nnchain = nnchain(
&mut mat.matrix(), mat.len(), MethodChain::Single);
dend_prim == dend_nnchain
}
fn prop_nnchain_complete_primitive(mat: DistinctMatrix) -> bool {
let dend_prim = primitive(
&mut mat.matrix(), mat.len(), Method::Complete);
let dend_nnchain = nnchain(
&mut mat.matrix(), mat.len(), MethodChain::Complete);
dend_prim == dend_nnchain
}
fn prop_nnchain_average_primitive(mat: DistinctMatrix) -> bool {
let dend_prim = primitive(
&mut mat.matrix(), mat.len(), Method::Average);
let dend_nnchain = nnchain(
&mut mat.matrix(), mat.len(), MethodChain::Average);
dend_prim.eq_with_epsilon(&dend_nnchain, 0.0000000001)
}
fn prop_nnchain_weighted_primitive(mat: DistinctMatrix) -> bool {
let dend_prim = primitive(
&mut mat.matrix(), mat.len(), Method::Weighted);
let dend_nnchain = nnchain(
&mut mat.matrix(), mat.len(), MethodChain::Weighted);
dend_prim.eq_with_epsilon(&dend_nnchain, 0.0000000001)
}
fn prop_nnchain_ward_primitive(mat: DistinctMatrix) -> bool {
let dend_prim = primitive(
&mut mat.matrix(), mat.len(), Method::Ward);
let dend_nnchain = nnchain(
&mut mat.matrix(), mat.len(), MethodChain::Ward);
dend_prim.eq_with_epsilon(&dend_nnchain, 0.0000000001)
}
}
}
| true |
d2681290350916269d5f8fe49e97a014ebe88f3a
|
Rust
|
theemathas/binary_turk
|
/game/src/castle.rs
|
UTF-8
| 3,003 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
use color::{Color, White, Black};
use square::{Square, File, Rank};
pub use self::Side::{Kingside, Queenside};
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
pub enum Side {
Kingside,
Queenside,
}
impl Side {
pub fn require_empty_squares(self, c: Color) -> Vec<Square> {
match (c, self) {
(White, Kingside) => vec![Square::new(File(5), Rank(0)),
Square::new(File(6), Rank(0))],
(White, Queenside) => vec![Square::new(File(3), Rank(0)),
Square::new(File(2), Rank(0)),
Square::new(File(1), Rank(0))],
(Black, Kingside) => vec![Square::new(File(5), Rank(7)),
Square::new(File(6), Rank(7))],
(Black, Queenside) => vec![Square::new(File(3), Rank(7)),
Square::new(File(2), Rank(7)),
Square::new(File(1), Rank(7))],
}
}
pub fn require_no_attack(self, c: Color) -> Vec<Square> {
match (c, self) {
(White, Kingside) => vec![Square::new(File(4), Rank(0)),
Square::new(File(5), Rank(0)),
Square::new(File(6), Rank(0))],
(White, Queenside) => vec![Square::new(File(4), Rank(0)),
Square::new(File(3), Rank(0)),
Square::new(File(2), Rank(0))],
(Black, Kingside) => vec![Square::new(File(4), Rank(7)),
Square::new(File(5), Rank(7)),
Square::new(File(6), Rank(7))],
(Black, Queenside) => vec![Square::new(File(4), Rank(7)),
Square::new(File(3), Rank(7)),
Square::new(File(2), Rank(7))],
}
}
}
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
pub struct CastlingData {
w_kingside: bool,
w_queenside: bool,
b_kingside: bool,
b_queenside: bool,
}
impl CastlingData {
pub fn new() -> CastlingData {
CastlingData {
w_kingside: false,
w_queenside: false,
b_kingside: false,
b_queenside: false,
}
}
pub fn get(&self, side: Side, c: Color) -> bool {
match (c, side) {
(White, Kingside) => self.w_kingside,
(White, Queenside) => self.w_queenside,
(Black, Kingside) => self.b_kingside,
(Black, Queenside) => self.b_queenside,
}
}
pub fn set(&mut self, side: Side, c: Color, val: bool) {
match (c, side) {
(White, Kingside) => self.w_kingside = val,
(White, Queenside) => self.w_queenside = val,
(Black, Kingside) => self.b_kingside = val,
(Black, Queenside) => self.b_queenside = val,
}
}
}
| true |
ed65b363589d899c4f04596f6b03bc494010f34e
|
Rust
|
LucioFranco/clique
|
/tests/cluster.rs
|
UTF-8
| 1,542 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
use clique::{Cluster, Endpoint, Message, Transport2};
use std::collections::HashMap;
use tokio::sync::mpsc;
type Sender = mpsc::Sender<(Endpoint, Message)>;
type Receiver = mpsc::Receiver<(Endpoint, Message)>;
#[derive(Debug)]
struct SimulatedCluster {
nodes: HashMap<Endpoint, Sender>,
messages: Receiver,
}
impl SimulatedCluster {
pub fn new(size: usize) -> Self {
let (tx, rx) = mpsc::channel(1024);
let mut nodes = HashMap::new();
for i in 0..size {
let endpoint = format!("node:{}", i);
let (tx1, rx1) = mpsc::channel(1024);
let network = Network {
send: tx.clone(),
recv: rx1,
};
nodes.insert(endpoint.clone(), tx1);
if i == 0 {
tokio::spawn(async move { Cluster::start(network, endpoint).await });
}
}
Self {
nodes,
messages: rx,
}
}
}
#[derive(Debug)]
struct Network {
send: Sender,
recv: Receiver,
}
#[async_trait::async_trait]
impl Transport2 for Network {
type Error = std::io::Error;
async fn send_to(&mut self, dst: Endpoint, msg: Message) -> Result<(), Self::Error> {
self.send.send((dst, msg)).await.unwrap();
Ok(())
}
async fn recv(&mut self) -> Result<(Endpoint, Message), Self::Error> {
Ok(self.recv.recv().await.expect("message queue finished"))
}
}
#[tokio::test]
async fn single_node() {
SimulatedCluster::new(1);
// loop {}
}
| true |
722e9d7fa274ffb850deb88e9f22cc01766412bd
|
Rust
|
PinkDiamond1/drand-verify
|
/src/verify_js.rs
|
UTF-8
| 2,004 | 2.734375 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use wasm_bindgen::prelude::*;
use super::points::{g1_from_variable, InvalidPoint};
use super::verify::{verify, VerificationError};
struct VerifyWebError(pub String);
impl From<hex::FromHexError> for VerifyWebError {
fn from(source: hex::FromHexError) -> Self {
Self(source.to_string())
}
}
impl From<InvalidPoint> for VerifyWebError {
fn from(source: InvalidPoint) -> Self {
Self(source.to_string())
}
}
impl From<VerificationError> for VerifyWebError {
fn from(source: VerificationError) -> Self {
Self(source.to_string())
}
}
impl From<VerifyWebError> for JsValue {
fn from(source: VerifyWebError) -> JsValue {
JsValue::from_str(&source.0)
}
}
/// This is the entry point from JavaScript.
///
/// The argument types are chosen such that the JS binding is simple
/// (u32 can be expressed as number, u64 cannot; strings are easier than binary data).
///
/// The result type is translated to an exception in case of an error
/// and too a boolean value in case of success.
#[wasm_bindgen]
pub fn verify_beacon(
pk_hex: &str,
round: u32,
previous_signature_hex: &str,
signature_hex: &str,
) -> Result<bool, JsValue> {
Ok(verify_beacon_impl(
pk_hex,
round,
previous_signature_hex,
signature_hex,
)?)
}
/// Like verify_beacon but with the structured error type needed to translate between lower level errors and JsValue.
/// If you cn show me how to translate from hex::FromHexError to JsValue without this intermediate function,
/// I'd be happy to learn how.
fn verify_beacon_impl(
pk_hex: &str,
round: u32,
previous_signature_hex: &str,
signature_hex: &str,
) -> Result<bool, VerifyWebError> {
let pk = g1_from_variable(&hex::decode(pk_hex)?)?;
let previous_signature = hex::decode(previous_signature_hex)?;
let signature = hex::decode(signature_hex)?;
let result = verify(&pk, round.into(), &previous_signature, &signature)?;
Ok(result)
}
| true |
d1848d006d302193610b45d8ec2a5c6e1e8a9657
|
Rust
|
domain-independent-dp/didp-rs
|
/didp-yaml/src/dypdl_parser/table_registry_parser.rs
|
UTF-8
| 56,222 | 2.78125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::util;
use dypdl::prelude::*;
use dypdl::{StateMetadata, TableRegistry};
use lazy_static::lazy_static;
use rustc_hash::FxHashMap;
use std::error::Error;
use std::fmt;
use std::str;
use yaml_rust::Yaml;
enum TableReturnType {
Integer(Integer),
Continuous(Continuous),
Set(Set),
Vector(usize, Vector),
Element(Element),
Bool(bool),
}
/// Returns tables of constants loaded from YAML
///
/// # Errors
///
/// If the format is invalid.
pub fn load_table_registry_from_yaml(
tables: &Yaml,
table_values: &Yaml,
metadata: &StateMetadata,
) -> Result<TableRegistry, Box<dyn Error>> {
lazy_static! {
static ref ARGS_KEY: yaml_rust::Yaml = yaml_rust::Yaml::from_str("args");
}
let tables = util::get_array(tables)?;
let mut table_names = Vec::with_capacity(tables.len());
let mut name_to_signature = FxHashMap::default();
let mut reserved_names = metadata.get_name_set();
for value in tables {
let map = util::get_map(value)?;
let name = util::get_string_by_key(map, "name")?;
if let Some(name) = reserved_names.get(&name) {
return Err(util::YamlContentErr::new(format!(
"table name `{}` is already used",
name
))
.into());
}
reserved_names.insert(name.clone());
let args = match map.get(&ARGS_KEY) {
Some(value) => util::get_string_array(value)?,
None => Vec::new(),
};
let mut arg_types = Vec::with_capacity(args.len());
for object in &args {
if let Some(value) = metadata.name_to_object_type.get(object) {
arg_types.push(*value);
} else {
return Err(
util::YamlContentErr::new(format!("no such object `{}`", object)).into(),
);
}
}
let return_type = util::get_string_by_key(map, "type")?;
match &return_type[..] {
"integer" => {
if let Ok(value) = util::get_numeric_by_key(map, "default") {
name_to_signature
.insert(name.clone(), (arg_types, TableReturnType::Integer(value)));
} else {
name_to_signature
.insert(name.clone(), (arg_types, TableReturnType::Integer(0)));
}
}
"continuous" => {
if let Ok(value) = util::get_numeric_by_key(map, "default") {
name_to_signature.insert(
name.clone(),
(arg_types, TableReturnType::Continuous(value)),
);
} else {
name_to_signature
.insert(name.clone(), (arg_types, TableReturnType::Continuous(0.0)));
}
}
"set" => {
let object_name = util::get_string_by_key(map, "object")?;
let object = match metadata.name_to_object_type.get(&object_name) {
Some(object) => *object,
None => {
return Err(util::YamlContentErr::new(format!(
"no such object `{}`",
object_name
))
.into())
}
};
let n = metadata.object_numbers[object];
let mut default = Set::with_capacity(n);
if let Ok(array) = util::get_usize_array_by_key(map, "default") {
for v in array {
if v >= n {
return Err(util::YamlContentErr::new(format!(
"element `{}` is too large for object `{}`",
v, object_name
))
.into());
}
default.insert(v);
}
}
name_to_signature.insert(name.clone(), (arg_types, TableReturnType::Set(default)));
}
"vector" => {
let object_name = util::get_string_by_key(map, "object")?;
let object = match metadata.name_to_object_type.get(&object_name) {
Some(object) => *object,
None => {
return Err(util::YamlContentErr::new(format!(
"no such object `{}`",
object_name
))
.into())
}
};
let n = metadata.object_numbers[object];
let default = match util::get_usize_array_by_key(map, "default") {
Ok(array) => {
for v in &array {
if *v >= n {
return Err(util::YamlContentErr::new(format!(
"element `{}` is too large for object `{}`",
*v, object_name
))
.into());
}
}
array
}
_ => Vec::new(),
};
name_to_signature.insert(
name.clone(),
(arg_types, TableReturnType::Vector(n, default)),
);
}
"element" => {
if let Ok(value) = util::get_usize_by_key(map, "default") {
name_to_signature
.insert(name.clone(), (arg_types, TableReturnType::Element(value)));
} else {
name_to_signature
.insert(name.clone(), (arg_types, TableReturnType::Element(0)));
}
}
"bool" => {
if let Ok(value) = util::get_bool_by_key(map, "default") {
name_to_signature
.insert(name.clone(), (arg_types, TableReturnType::Bool(value)));
} else {
name_to_signature
.insert(name.clone(), (arg_types, TableReturnType::Bool(false)));
}
}
_ => {
return Err(util::YamlContentErr::new(format!(
"no such table type `{}`",
return_type
))
.into())
}
}
table_names.push(name);
}
let mut registry = TableRegistry::default();
let table_values = util::get_map(table_values)?;
for name in table_names {
let (arg_types, return_type) = name_to_signature.get(&name).unwrap();
let value = util::get_yaml_by_key(table_values, &name)?;
if arg_types.is_empty() {
match return_type {
TableReturnType::Integer(_) => {
registry
.integer_tables
.name_to_constant
.insert(name, util::get_numeric(value)?);
}
TableReturnType::Continuous(_) => {
registry
.continuous_tables
.name_to_constant
.insert(name, util::get_numeric(value)?);
}
TableReturnType::Set(default) => {
let value = load_set_from_yaml(value, default.len())?;
registry.set_tables.name_to_constant.insert(name, value);
}
TableReturnType::Vector(capacity, _) => {
let value = load_vector_from_yaml(value, *capacity)?;
registry.vector_tables.name_to_constant.insert(name, value);
}
TableReturnType::Element(_) => {
registry
.element_tables
.name_to_constant
.insert(name, util::get_usize(value)?);
}
TableReturnType::Bool(_) => {
registry
.bool_tables
.name_to_constant
.insert(name, util::get_bool(value)?);
}
}
} else if arg_types.len() == 1 {
let size = metadata.object_numbers[arg_types[0]];
match return_type {
TableReturnType::Integer(default) => {
let f = load_numeric_table_1d_from_yaml(value, size, *default)?;
registry.add_table_1d(name, f)?;
}
TableReturnType::Continuous(default) => {
let f = load_numeric_table_1d_from_yaml(value, size, *default)?;
registry.add_table_1d(name, f)?;
}
TableReturnType::Set(default) => {
let f = load_set_table_1d_from_yaml(value, size, default)?;
registry.add_table_1d(name, f)?;
}
TableReturnType::Vector(capacity, default) => {
let f = load_vector_table_1d_from_yaml(value, size, default, *capacity)?;
registry.add_table_1d(name, f)?;
}
TableReturnType::Element(default) => {
let f = load_numeric_table_1d_from_yaml(value, size, *default)?;
registry.add_table_1d(name, f)?;
}
TableReturnType::Bool(default) => {
let f = load_bool_table_1d_from_yaml(value, size, *default)?;
registry.add_table_1d(name, f)?;
}
}
} else if arg_types.len() == 2 {
let size_x = metadata.object_numbers[arg_types[0]];
let size_y = metadata.object_numbers[arg_types[1]];
match return_type {
TableReturnType::Integer(default) => {
let f = load_numeric_table_2d_from_yaml(value, size_x, size_y, *default)?;
registry.add_table_2d(name, f)?;
}
TableReturnType::Continuous(default) => {
let f = load_numeric_table_2d_from_yaml(value, size_x, size_y, *default)?;
registry.add_table_2d(name, f)?;
}
TableReturnType::Set(default) => {
let f = load_set_table_2d_from_yaml(value, size_x, size_y, default)?;
registry.add_table_2d(name, f)?;
}
TableReturnType::Vector(capacity, default) => {
let f =
load_vector_table_2d_from_yaml(value, size_x, size_y, default, *capacity)?;
registry.add_table_2d(name, f)?;
}
TableReturnType::Element(default) => {
let f = load_numeric_table_2d_from_yaml(value, size_x, size_y, *default)?;
registry.add_table_2d(name, f)?;
}
TableReturnType::Bool(default) => {
let f = load_bool_table_2d_from_yaml(value, size_x, size_y, *default)?;
registry.add_table_2d(name, f)?;
}
}
} else if arg_types.len() == 3 {
let size_x = metadata.object_numbers[arg_types[0]];
let size_y = metadata.object_numbers[arg_types[1]];
let size_z = metadata.object_numbers[arg_types[2]];
match return_type {
TableReturnType::Integer(default) => {
let f =
load_numeric_table_3d_from_yaml(value, size_x, size_y, size_z, *default)?;
registry.add_table_3d(name, f)?;
}
TableReturnType::Continuous(default) => {
let f =
load_numeric_table_3d_from_yaml(value, size_x, size_y, size_z, *default)?;
registry.add_table_3d(name, f)?;
}
TableReturnType::Set(default) => {
let f = load_set_table_3d_from_yaml(value, size_x, size_y, size_z, default)?;
registry.add_table_3d(name, f)?;
}
TableReturnType::Vector(capacity, default) => {
let f = load_vector_table_3d_from_yaml(
value, size_x, size_y, size_z, default, *capacity,
)?;
registry.add_table_3d(name, f)?;
}
TableReturnType::Element(default) => {
let f =
load_numeric_table_3d_from_yaml(value, size_x, size_y, size_z, *default)?;
registry.add_table_3d(name, f)?;
}
TableReturnType::Bool(default) => {
let f = load_bool_table_3d_from_yaml(value, size_x, size_y, size_z, *default)?;
registry.add_table_3d(name, f)?;
}
}
} else {
let size: Vec<usize> = arg_types
.iter()
.map(|i| metadata.object_numbers[*i])
.collect();
match return_type {
TableReturnType::Integer(default) => {
let (f, default) = load_numeric_table_from_yaml(value, size, *default)?;
registry.add_table(name, f, default)?;
}
TableReturnType::Continuous(default) => {
let (f, default) = load_numeric_table_from_yaml(value, size, *default)?;
registry.add_table(name, f, default)?;
}
TableReturnType::Set(default) => {
let (f, default) = load_set_table_from_yaml(value, size, default.clone())?;
registry.add_table(name, f, default)?;
}
TableReturnType::Vector(capacity, default) => {
let (f, default) =
load_vector_table_from_yaml(value, size, default.clone(), *capacity)?;
registry.add_table(name, f, default)?;
}
TableReturnType::Element(default) => {
let (f, default) = load_numeric_table_from_yaml(value, size, *default)?;
registry.add_table(name, f, default)?;
}
TableReturnType::Bool(default) => {
let (f, default) = load_bool_table_from_yaml(value, size, *default)?;
registry.add_table(name, f, default)?;
}
}
}
}
Ok(registry)
}
fn load_numeric_table_1d_from_yaml<T: str::FromStr + num_traits::FromPrimitive + Copy>(
value: &Yaml,
size: usize,
default: T,
) -> Result<Vec<T>, util::YamlContentErr>
where
<T as str::FromStr>::Err: fmt::Debug,
{
let mut body: Vec<T> = (0..size).map(|_| default).collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize(args)?;
let value = util::get_numeric(value)?;
if args >= size {
return Err(util::YamlContentErr::new(format!(
"`{}` is greater than the number of the objects for table",
args,
)));
}
body[args] = value;
}
Ok(body)
}
fn load_numeric_table_2d_from_yaml<T: str::FromStr + num_traits::FromPrimitive + Copy>(
value: &Yaml,
size_x: usize,
size_y: usize,
default: T,
) -> Result<Vec<Vec<T>>, util::YamlContentErr>
where
<T as str::FromStr>::Err: fmt::Debug,
{
let mut body: Vec<Vec<T>> = (0..size_x)
.map(|_| (0..size_y).map(|_| default).collect())
.collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize_array(args)?;
let x = args[0];
let y = args[1];
let value = util::get_numeric(value)?;
if x >= size_x || y >= size_y {
return Err(util::YamlContentErr::new(format!(
"`({}, {})` is greater than the numbers of objects for table",
x, y,
)));
}
body[x][y] = value;
}
Ok(body)
}
fn load_numeric_table_3d_from_yaml<T: str::FromStr + num_traits::FromPrimitive + Copy>(
value: &Yaml,
size_x: usize,
size_y: usize,
size_z: usize,
default: T,
) -> Result<Vec<Vec<Vec<T>>>, util::YamlContentErr>
where
<T as str::FromStr>::Err: fmt::Debug,
{
let mut body: Vec<Vec<Vec<T>>> = (0..size_x)
.map(|_| {
(0..size_y)
.map(|_| (0..size_z).map(|_| default).collect())
.collect()
})
.collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize_array(args)?;
let x = args[0];
let y = args[1];
let z = args[2];
let value = util::get_numeric(value)?;
if x >= size_x || y >= size_y || z >= size_z {
return Err(util::YamlContentErr::new(format!(
"`({}, {}, {})` is greater than the numbers of objects for table",
x, y, z,
)));
}
body[x][y][z] = value;
}
Ok(body)
}
fn load_numeric_table_from_yaml<T: str::FromStr + num_traits::FromPrimitive>(
value: &Yaml,
size: Vec<usize>,
default: T,
) -> Result<(FxHashMap<Vec<Element>, T>, T), util::YamlContentErr>
where
<T as str::FromStr>::Err: fmt::Debug,
{
let map = util::get_map(value)?;
let mut body = FxHashMap::default();
for (args, value) in map {
let args = util::get_usize_array(args)?;
if args.len() != size.len() {
return Err(util::YamlContentErr::new(format!(
"expected `{}` arguments for table, but passed `{}`",
size.len(),
args.len()
)));
}
let value = util::get_numeric(value)?;
if args.iter().zip(size.iter()).any(|(a, b)| a >= b) {
return Err(util::YamlContentErr::new(format!(
"`{:?}` is greater than the numbers of objects for table",
args,
)));
}
body.insert(args, value);
}
Ok((body, default))
}
fn load_bool_table_1d_from_yaml(
value: &Yaml,
size: usize,
default: bool,
) -> Result<Vec<bool>, util::YamlContentErr> {
let mut body: Vec<bool> = (0..size).map(|_| default).collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize(args)?;
let value = util::get_bool(value)?;
if args >= size {
return Err(util::YamlContentErr::new(format!(
"`{}` is greater than the number of the objects for table",
args,
)));
}
body[args] = value;
}
Ok(body)
}
fn load_bool_table_2d_from_yaml(
value: &Yaml,
size_x: usize,
size_y: usize,
default: bool,
) -> Result<Vec<Vec<bool>>, util::YamlContentErr> {
let mut body: Vec<Vec<bool>> = (0..size_x)
.map(|_| (0..size_y).map(|_| default).collect())
.collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize_array(args)?;
let x = args[0];
let y = args[1];
let value = util::get_bool(value)?;
if x >= size_x || y >= size_y {
return Err(util::YamlContentErr::new(format!(
"`({}, {})` is greater than the numbers of objects for table",
x, y,
)));
}
body[x][y] = value;
}
Ok(body)
}
fn load_bool_table_3d_from_yaml(
value: &Yaml,
size_x: usize,
size_y: usize,
size_z: usize,
default: bool,
) -> Result<Vec<Vec<Vec<bool>>>, util::YamlContentErr> {
let mut body: Vec<Vec<Vec<bool>>> = (0..size_x)
.map(|_| {
(0..size_y)
.map(|_| (0..size_z).map(|_| default).collect())
.collect()
})
.collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize_array(args)?;
let x = args[0];
let y = args[1];
let z = args[2];
let value = util::get_bool(value)?;
if x >= size_x || y >= size_y || z >= size_z {
return Err(util::YamlContentErr::new(format!(
"`({}, {}, {})` is greater than the numbers of objects for table",
x, y, z,
)));
}
body[x][y][z] = value;
}
Ok(body)
}
fn load_bool_table_from_yaml(
value: &Yaml,
size: Vec<usize>,
default: bool,
) -> Result<(FxHashMap<Vec<Element>, bool>, bool), util::YamlContentErr> {
let map = util::get_map(value)?;
let mut body = FxHashMap::default();
for (args, value) in map {
let args = util::get_usize_array(args)?;
if args.len() != size.len() {
return Err(util::YamlContentErr::new(format!(
"expected `{}` arguments for table, but passed `{}`",
size.len(),
args.len()
)));
}
let value = util::get_bool(value)?;
if args.iter().zip(size.iter()).any(|(a, b)| a >= b) {
return Err(util::YamlContentErr::new(format!(
"`{:?}` is greater than the numbers of objects for table",
args,
)));
}
body.insert(args, value);
}
Ok((body, default))
}
fn load_set_from_yaml(value: &Yaml, capacity: usize) -> Result<Set, util::YamlContentErr> {
let array = util::get_usize_array(value)?;
let mut set = Set::with_capacity(capacity);
for v in array {
if v >= capacity {
return Err(util::YamlContentErr::new(format!(
"element `{}` in a set table is too large for the object",
v,
)));
}
set.insert(v);
}
Ok(set)
}
fn load_set_table_1d_from_yaml(
value: &Yaml,
size: usize,
default: &Set,
) -> Result<Vec<Set>, util::YamlContentErr> {
let mut body: Vec<Set> = (0..size).map(|_| default.clone()).collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize(args)?;
let value = load_set_from_yaml(value, default.len())?;
if args >= size {
return Err(util::YamlContentErr::new(format!(
"`{}` is greater than the number of the objects for table",
args,
)));
}
body[args] = value;
}
Ok(body)
}
fn load_set_table_2d_from_yaml(
value: &Yaml,
size_x: usize,
size_y: usize,
default: &Set,
) -> Result<Vec<Vec<Set>>, util::YamlContentErr> {
let mut body: Vec<Vec<Set>> = (0..size_x)
.map(|_| (0..size_y).map(|_| default.clone()).collect())
.collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize_array(args)?;
let x = args[0];
let y = args[1];
let value = load_set_from_yaml(value, default.len())?;
if x >= size_x || y >= size_y {
return Err(util::YamlContentErr::new(format!(
"`({}, {})` is greater than the numbers of objects for table",
x, y,
)));
}
body[x][y] = value;
}
Ok(body)
}
fn load_set_table_3d_from_yaml(
value: &Yaml,
size_x: usize,
size_y: usize,
size_z: usize,
default: &Set,
) -> Result<Vec<Vec<Vec<Set>>>, util::YamlContentErr> {
let mut body: Vec<Vec<Vec<Set>>> = (0..size_x)
.map(|_| {
(0..size_y)
.map(|_| (0..size_z).map(|_| default.clone()).collect())
.collect()
})
.collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize_array(args)?;
let x = args[0];
let y = args[1];
let z = args[2];
let value = load_set_from_yaml(value, default.len())?;
if x >= size_x || y >= size_y || z >= size_z {
return Err(util::YamlContentErr::new(format!(
"`({}, {}, {})` is greater than the numbers of objects for table",
x, y, z,
)));
}
body[x][y][z] = value;
}
Ok(body)
}
fn load_set_table_from_yaml(
value: &Yaml,
size: Vec<usize>,
default: Set,
) -> Result<(FxHashMap<Vec<Element>, Set>, Set), util::YamlContentErr> {
let map = util::get_map(value)?;
let mut body = FxHashMap::default();
for (args, value) in map {
let args = util::get_usize_array(args)?;
if args.len() != size.len() {
return Err(util::YamlContentErr::new(format!(
"expected `{}` arguments for table, but passed `{}`",
size.len(),
args.len()
)));
}
let value = load_set_from_yaml(value, default.len())?;
if args.iter().zip(size.iter()).any(|(a, b)| a >= b) {
return Err(util::YamlContentErr::new(format!(
"`{:?}` is greater than the numbers of objects for table",
args,
)));
}
body.insert(args, value);
}
Ok((body, default))
}
fn load_vector_from_yaml(value: &Yaml, capacity: usize) -> Result<Vector, util::YamlContentErr> {
let value = util::get_usize_array(value)?;
for v in &value {
if *v >= capacity {
return Err(util::YamlContentErr::new(format!(
"element `{}` in a vector table is too large for the object",
*v,
)));
}
}
Ok(value)
}
fn load_vector_table_1d_from_yaml(
value: &Yaml,
size: usize,
default: &[Element],
capacity: usize,
) -> Result<Vec<Vector>, util::YamlContentErr> {
let mut body: Vec<Vector> = (0..size).map(|_| default.to_vec()).collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize(args)?;
let value = load_vector_from_yaml(value, capacity)?;
if args >= size {
return Err(util::YamlContentErr::new(format!(
"`{}` is greater than the number of the objects for table",
args,
)));
}
body[args] = value;
}
Ok(body)
}
fn load_vector_table_2d_from_yaml(
value: &Yaml,
size_x: usize,
size_y: usize,
default: &[Element],
capacity: usize,
) -> Result<Vec<Vec<Vector>>, util::YamlContentErr> {
let mut body: Vec<Vec<Vector>> = (0..size_x)
.map(|_| (0..size_y).map(|_| default.to_vec()).collect())
.collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize_array(args)?;
let x = args[0];
let y = args[1];
let value = load_vector_from_yaml(value, capacity)?;
if x >= size_x || y >= size_y {
return Err(util::YamlContentErr::new(format!(
"`({}, {})` is greater than the numbers of objects for table",
x, y,
)));
}
body[x][y] = value;
}
Ok(body)
}
fn load_vector_table_3d_from_yaml(
value: &Yaml,
size_x: usize,
size_y: usize,
size_z: usize,
default: &[Element],
capacity: usize,
) -> Result<Vec<Vec<Vec<Vector>>>, util::YamlContentErr> {
let mut body: Vec<Vec<Vec<Vector>>> = (0..size_x)
.map(|_| {
(0..size_y)
.map(|_| (0..size_z).map(|_| default.to_vec()).collect())
.collect()
})
.collect();
let map = util::get_map(value)?;
for (args, value) in map {
let args = util::get_usize_array(args)?;
let x = args[0];
let y = args[1];
let z = args[2];
let value = load_vector_from_yaml(value, capacity)?;
if x >= size_x || y >= size_y || z >= size_z {
return Err(util::YamlContentErr::new(format!(
"`({}, {}, {})` is greater than the numbers of objects for table",
x, y, z,
)));
}
body[x][y][z] = value;
}
Ok(body)
}
fn load_vector_table_from_yaml(
value: &Yaml,
size: Vec<usize>,
default: Vector,
capacity: usize,
) -> Result<(FxHashMap<Vec<Element>, Vector>, Vector), util::YamlContentErr> {
let map = util::get_map(value)?;
let mut body = FxHashMap::default();
for (args, value) in map {
let args = util::get_usize_array(args)?;
if args.len() != size.len() {
return Err(util::YamlContentErr::new(format!(
"expected `{}` arguments for table, but passed `{}`",
size.len(),
args.len()
)));
}
let value = load_vector_from_yaml(value, capacity)?;
if args.iter().zip(size.iter()).any(|(a, b)| a >= b) {
return Err(util::YamlContentErr::new(format!(
"`{:?}` is greater than the numbers of objects for table",
args,
)));
}
body.insert(args, value);
}
Ok((body, default))
}
#[cfg(test)]
mod tests {
use super::*;
use approx::assert_relative_eq;
use dypdl::{Table1DHandle, Table2DHandle, Table3DHandle, TableHandle};
#[test]
fn load_from_yaml_ok() {
let mut metadata = StateMetadata::default();
let ob = metadata.add_object_type(String::from("object"), 3);
assert!(ob.is_ok());
let ob = ob.unwrap();
let result = metadata.add_element_variable(String::from("e0"), ob);
assert!(result.is_ok());
let mut expected = TableRegistry::default();
expected
.integer_tables
.name_to_constant
.insert(String::from("i0"), 0);
let result = expected.add_table_1d(String::from("i1"), vec![10, 20, 30]);
assert!(result.is_ok());
let result = expected.add_table_2d(
String::from("i2"),
vec![vec![10, 20, 30], vec![10, 10, 10], vec![10, 10, 10]],
);
assert!(result.is_ok());
let result = expected.add_table_3d(
String::from("i3"),
vec![
vec![vec![10, 20, 30], vec![0, 0, 0], vec![0, 0, 0]],
vec![vec![0, 0, 0], vec![0, 0, 0], vec![0, 0, 0]],
vec![vec![0, 0, 0], vec![0, 0, 0], vec![0, 0, 0]],
],
);
assert!(result.is_ok());
let mut map = FxHashMap::default();
let key = vec![0, 1, 0, 0];
map.insert(key, 100);
let key = vec![0, 1, 0, 1];
map.insert(key, 200);
let key = vec![0, 1, 2, 0];
map.insert(key, 300);
let key = vec![0, 1, 2, 1];
map.insert(key, 400);
let result = expected.add_table(String::from("i4"), map, 0);
assert!(result.is_ok());
expected
.continuous_tables
.name_to_constant
.insert(String::from("c0"), 0.0);
let result = expected.add_table_1d(String::from("c1"), vec![10.0, 20.0, 30.0]);
assert!(result.is_ok());
let result = expected.add_table_2d(
String::from("c2"),
vec![
vec![10.0, 20.0, 30.0],
vec![10.0, 10.0, 10.0],
vec![10.0, 10.0, 10.0],
],
);
assert!(result.is_ok());
let result = expected.add_table_3d(
String::from("c3"),
vec![
vec![
vec![10.0, 20.0, 30.0],
vec![0.0, 0.0, 0.0],
vec![0.0, 0.0, 0.0],
],
vec![
vec![0.0, 0.0, 0.0],
vec![0.0, 0.0, 0.0],
vec![0.0, 0.0, 0.0],
],
vec![
vec![0.0, 0.0, 0.0],
vec![0.0, 0.0, 0.0],
vec![0.0, 0.0, 0.0],
],
],
);
assert!(result.is_ok());
let mut map = FxHashMap::default();
let key = vec![0, 1, 0, 0];
map.insert(key, 100.0);
let key = vec![0, 1, 0, 1];
map.insert(key, 200.0);
let key = vec![0, 1, 2, 0];
map.insert(key, 300.0);
let key = vec![0, 1, 2, 1];
map.insert(key, 400.0);
let result = expected.add_table(String::from("c4"), map, 0.0);
assert!(result.is_ok());
expected
.bool_tables
.name_to_constant
.insert(String::from("b0"), true);
let result = expected.add_table_1d(String::from("b1"), vec![true, false, false]);
assert!(result.is_ok());
let result = expected.add_table_2d(
String::from("b2"),
vec![
vec![true, false, false],
vec![false, false, false],
vec![false, false, false],
],
);
assert!(result.is_ok());
let result = expected.add_table_3d(
String::from("b3"),
vec![
vec![
vec![true, false, false],
vec![false, false, false],
vec![false, false, false],
],
vec![
vec![true, false, false],
vec![false, false, false],
vec![false, false, false],
],
vec![
vec![true, false, false],
vec![false, false, false],
vec![false, false, false],
],
],
);
assert!(result.is_ok());
let mut map = FxHashMap::default();
let key = vec![0, 1, 0, 0];
map.insert(key, true);
let key = vec![0, 1, 0, 1];
map.insert(key, false);
let key = vec![0, 1, 2, 0];
map.insert(key, false);
let key = vec![0, 1, 2, 1];
map.insert(key, false);
let result = expected.add_table(String::from("b4"), map, false);
assert!(result.is_ok());
let mut set = Set::with_capacity(3);
set.insert(0);
set.insert(2);
let default = Set::with_capacity(3);
expected
.set_tables
.name_to_constant
.insert(String::from("s0"), set.clone());
let result = expected.add_table_1d(
String::from("s1"),
vec![set.clone(), default.clone(), default.clone()],
);
assert!(result.is_ok());
let result = expected.add_table_2d(
String::from("s2"),
vec![
vec![set.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
],
);
assert!(result.is_ok());
let result = expected.add_table_3d(
String::from("s3"),
vec![
vec![
vec![set.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
],
vec![
vec![set.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
],
vec![
vec![set.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
],
],
);
assert!(result.is_ok());
let mut map = FxHashMap::default();
let key = vec![0, 1, 0, 0];
map.insert(key, set);
let key = vec![0, 1, 0, 1];
map.insert(key, default.clone());
let key = vec![0, 1, 2, 0];
map.insert(key, default.clone());
let key = vec![0, 1, 2, 1];
map.insert(key, default.clone());
let result = expected.add_table(String::from("s4"), map, default);
assert!(result.is_ok());
let vector = vec![0, 2];
let default = Vec::new();
expected
.vector_tables
.name_to_constant
.insert(String::from("v0"), vector.clone());
let result = expected.add_table_1d(
String::from("v1"),
vec![vector.clone(), default.clone(), default.clone()],
);
assert!(result.is_ok());
let result = expected.add_table_2d(
String::from("v2"),
vec![
vec![vector.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
],
);
assert!(result.is_ok());
let result = expected.add_table_3d(
String::from("v3"),
vec![
vec![
vec![vector.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
],
vec![
vec![vector.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
],
vec![
vec![vector.clone(), default.clone(), default.clone()],
vec![default.clone(), default.clone(), default.clone()],
],
],
);
assert!(result.is_ok());
let mut map = FxHashMap::default();
let key = vec![0, 1, 0, 0];
map.insert(key, vector);
let key = vec![0, 1, 0, 1];
map.insert(key, default.clone());
let key = vec![0, 1, 2, 0];
map.insert(key, default.clone());
let key = vec![0, 1, 2, 1];
map.insert(key, default.clone());
let result = expected.add_table(String::from("v4"), map, default);
assert!(result.is_ok());
expected
.element_tables
.name_to_constant
.insert(String::from("t0"), 1);
let result: Result<Table1DHandle<Element>, _> =
expected.add_table_1d(String::from("t1"), vec![1, 0, 0]);
assert!(result.is_ok());
let result: Result<Table2DHandle<Element>, _> = expected.add_table_2d(
String::from("t2"),
vec![vec![1, 0, 0], vec![0, 0, 0], vec![0, 0, 0]],
);
assert!(result.is_ok());
let result: Result<Table3DHandle<Element>, _> = expected.add_table_3d(
String::from("t3"),
vec![
vec![vec![1, 0, 0], vec![0, 0, 0], vec![0, 0, 0]],
vec![vec![1, 0, 0], vec![0, 0, 0], vec![0, 0, 0]],
vec![vec![1, 0, 0], vec![0, 0, 0], vec![0, 0, 0]],
],
);
assert!(result.is_ok());
let mut map = FxHashMap::default();
let key = vec![0, 1, 0, 0];
map.insert(key, 1);
let key = vec![0, 1, 0, 1];
map.insert(key, 0);
let key = vec![0, 1, 2, 0];
map.insert(key, 0);
let key = vec![0, 1, 2, 1];
map.insert(key, 0);
let result: Result<TableHandle<Element>, _> =
expected.add_table(String::from("t4"), map, 0);
assert!(result.is_ok());
let tables = r"
- name: i0
type: integer
- name: i1
type: integer
args:
- object
- name: i2
type: integer
args:
- object
- object
default: 10
- name: i3
type: integer
args: [object, object, object]
- name: i4
type: integer
args: [object, object, object, object]
- name: c0
type: continuous
args: []
- name: c1
type: continuous
args:
- object
- name: c2
type: continuous
args:
- object
- object
default: 10
- name: c3
type: continuous
args: [object, object, object]
- name: c4
type: continuous
args: [object, object, object, object]
- name: b0
type: bool
- name: b1
type: bool
args: [object]
- name: b2
type: bool
args: [object, object]
- name: b3
type: bool
args:
- object
- object
- object
default: false
- name: b4
type: bool
args:
- object
- object
- object
- object
- name: s0
type: set
object: object
- name: s1
type: set
object: object
args: [object]
default: []
- name: s2
type: set
object: object
args: [object, object]
- name: s3
type: set
object: object
args: [object, object, object]
- name: s4
type: set
object: object
args: [object, object, object, object]
- name: v0
type: vector
object: object
- name: v1
type: vector
object: object
args: [object]
default: []
- name: v2
type: vector
object: object
args: [object, object]
- name: v3
type: vector
object: object
args: [object, object, object]
- name: v4
type: vector
object: object
args: [object, object, object, object]
- name: t0
type: element
- name: t1
type: element
args:
- object
- name: t2
type: element
args:
- object
- object
default: 10
- name: t3
type: element
args: [object, object, object]
- name: t4
type: element
args: [object, object, object, object]
";
let table_values = r"
i0: 0
i1:
0: 10
1: 20
2: 30
i2: { [0, 0]: 10, [0, 1]: 20, [0, 2]: 30 }
i3: { [0, 0, 0]: 10, [0, 0, 1]: 20, [0, 0, 2]: 30 }
i4: { [0, 1, 0, 0]: 100, [0, 1, 0, 1]: 200, [0, 1, 2, 0]: 300, [0, 1, 2, 1]: 400 }
c0: 0
c1:
0: 10
1: 20
2: 30
c2: { [0, 0]: 10, [0, 1]: 20, [0, 2]: 30 }
c3: { [0, 0, 0]: 10, [0, 0, 1]: 20, [0, 0, 2]: 30 }
c4: { [0, 1, 0, 0]: 100, [0, 1, 0, 1]: 200, [0, 1, 2, 0]: 300, [0, 1, 2, 1]: 400 }
b0: true
b1: { 0: true, 1: false, 2: false }
b2: { [0, 0]: true }
b3: { [0, 0, 0]: true, [1, 0, 0]: true, [2, 0, 0]: true }
b4: { [0, 1, 0, 0]: true, [0, 1, 0, 1]: false, [0, 1, 2, 0]: false, [0, 1, 2, 1]: false }
s0: [0, 2]
s1: { 0: [0, 2] }
s2: { [0, 0]: [0, 2] }
s3: { [0, 0, 0]: [0, 2], [1, 0, 0]: [0, 2], [2, 0, 0]: [0, 2] }
s4: { [0, 1, 0, 0]: [0, 2]}
v0: [0, 2]
v1: { 0: [0, 2] }
v2: { [0, 0]: [0, 2] }
v3: { [0, 0, 0]: [0, 2], [1, 0, 0]: [0, 2], [2, 0, 0]: [0, 2] }
v4: { [0, 1, 0, 0]: [0, 2]}
t0: 0
t1: { 0: 1 }
t2: { [0, 0]: 1 }
t3: { [0, 0, 0]: 1, [1, 0, 0]: 1, [2, 0, 0]: 1 }
t4: { [0, 1, 0, 0]: 1 }
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_ok());
let registry = registry.unwrap();
assert_eq!(registry.integer_tables, expected.integer_tables);
assert_relative_eq!(registry.continuous_tables, expected.continuous_tables);
assert_eq!(registry.bool_tables, expected.bool_tables);
}
#[test]
fn load_from_yaml_err() {
let mut metadata = StateMetadata::default();
let ob = metadata.add_object_type(String::from("object"), 3);
assert!(ob.is_ok());
let ob = ob.unwrap();
let v = metadata.add_element_variable(String::from("e0"), ob);
assert!(v.is_ok());
let tables = r"
- name: f0
type: integer
- name: f0
type: integer
args: [object]
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
f0: 0
f0:
0: 10
1: 20
2: 30
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: object
type: integer
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
object: 0
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: e0
type: integer
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
e0: 0
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f1
type: integer
args: [null]
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
f1:
0: 10
1: 20
2: 30
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f1
type: null
args: [object]
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f1
type: integer
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f1
args: [object]
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- type: integer
args: [object]
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f1
type: integer
args: [object]
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
f1:
0: 10
1: 20
2: 30
3: 40
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let table_values = r"
f2:
0: 10
1: 20
2: 30
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let table_values = r"
f1:
0: 10
1: 2.1
2: 30
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let table_values = r"
f1:
[0, 0]: 10
[0, 1]: 20
[0, 2]: 30
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: c0
type: continuous
args: [object]
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
c0:
0: true
1: 1.2
2: 1.5
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: b1
type: bool
args: [object, object]
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
b1:
0: true
1: false
2: false
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let table_values = r"
b1:
[0, 0]: true
[0, 1]: 0
[0, 2]: false
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: b1
type: bool
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
b1:
[0, 0]: true
[0, 1]: 0
[0, 2]: false
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f0
type: set
object: object
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
f0: [0, 10]
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f0
type: set
object: null
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
f0: [0, 1]
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f0
type: vector
object: null
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
f0: [0, 1]
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
let tables = r"
- name: f0
type: vector
object: object
";
let tables = yaml_rust::YamlLoader::load_from_str(tables);
assert!(tables.is_ok());
let tables = tables.unwrap();
assert_eq!(tables.len(), 1);
let tables = &tables[0];
let table_values = r"
f0: [0, 10]
";
let table_values = yaml_rust::YamlLoader::load_from_str(table_values);
assert!(table_values.is_ok());
let table_values = table_values.unwrap();
assert_eq!(table_values.len(), 1);
let table_values = &table_values[0];
let registry = load_table_registry_from_yaml(tables, table_values, &metadata);
assert!(registry.is_err());
}
}
| true |
def9cd72b49caca63097fb04c30bb0d33e756515
|
Rust
|
ibraheemdev/rustbot
|
/src/moderation.rs
|
UTF-8
| 3,130 | 3.0625 | 3 |
[
"MIT"
] |
permissive
|
use crate::{serenity, Context, Error, PrefixContext};
/// Deletes the bot's messages for cleanup
///
/// ?cleanup [limit]
///
/// Deletes the bot's messages for cleanup.
/// You can specify how many messages to look for. Only messages from the last 24 hours can be deleted.
#[poise::command(on_error = "crate::acknowledge_fail", slash_command)]
pub async fn cleanup(
ctx: Context<'_>,
#[description = "Number of messages to delete"] num_messages: Option<usize>,
) -> Result<(), Error> {
let num_messages = num_messages.unwrap_or(5);
let messages_to_delete = ctx
.channel_id()
.messages(ctx.discord(), |m| m.limit(100))
.await?
.into_iter()
.filter(|msg| {
if msg.author.id != ctx.data().bot_user_id {
return false;
}
if (ctx.created_at() - msg.timestamp).num_hours() >= 24 {
return false;
}
true
})
.take(num_messages);
ctx.channel_id()
.delete_messages(ctx.discord(), messages_to_delete)
.await?;
crate::acknowledge_success(ctx, "rustOk", '👌').await
}
/// Bans another person
///
/// ?ban <member> [reason]
///
/// Bans another person
#[poise::command(on_error = "crate::acknowledge_fail", aliases("banne"), slash_command)]
pub async fn ban(
ctx: Context<'_>,
#[description = "Banned user"] banned_user: serenity::Member,
#[description = "Ban reason"]
#[rest]
reason: Option<String>,
) -> Result<(), Error> {
poise::say_reply(
ctx,
format!(
"Banned user {}#{:0>4}{} {}",
banned_user.user.name,
banned_user.user.discriminator,
match reason {
Some(reason) => format!(" {}", reason.trim()),
None => String::new(),
},
crate::custom_emoji_code(ctx, "ferrisBanne", '🔨').await
),
)
.await?;
Ok(())
}
async fn rustify_inner(ctx: Context<'_>, users: &[serenity::Member]) -> Result<(), Error> {
for user in users {
ctx.discord()
.http
.add_member_role(
user.guild_id.0,
user.user.id.0,
ctx.data().rustacean_role.0,
ctx.author()
.map(|author| format!("You have been rusted by {}! owo", author.name))
.as_deref(),
)
.await?;
}
crate::acknowledge_success(ctx, "rustOk", '👌').await
}
/// Adds the Rustacean role to members
#[poise::command(on_error = "crate::acknowledge_prefix_fail", rename = "rustify")]
pub async fn prefix_rustify(
ctx: PrefixContext<'_>,
users: Vec<serenity::Member>,
) -> Result<(), Error> {
rustify_inner(Context::Prefix(ctx), &users).await
}
/// Adds the Rustacean role to a member
#[poise::command(
on_error = "crate::acknowledge_fail",
slash_command,
rename = "rustify"
)]
pub async fn slash_rustify(
ctx: Context<'_>,
#[description = "User to rustify"] user: serenity::Member,
) -> Result<(), Error> {
rustify_inner(ctx, &[user]).await
}
| true |
878ff52a40323e464e800e1caa9eab2f0d195ee0
|
Rust
|
nbliznashki/radix_new
|
/radix_column/src/asbytes.rs
|
UTF-8
| 880 | 3.3125 | 3 |
[] |
no_license
|
use std::mem::MaybeUninit;
pub trait AsBytes {
fn bytelen(&self) -> usize;
//SAFETY: The type T should contain any references
//and therefore it should be able to simply copy it as bytes
unsafe fn copy(&self, data: &mut [MaybeUninit<u8>]);
fn as_bytes(&self) -> &[u8];
fn from_bytes(data: &[u8]) -> Self;
}
impl AsBytes for String {
fn bytelen(&self) -> usize {
self.as_bytes().len()
}
unsafe fn copy(&self, data: &mut [MaybeUninit<u8>]) {
let len = data.len();
assert_eq!(len, self.bytelen());
std::intrinsics::copy_nonoverlapping(
self.as_bytes().as_ptr(),
data.as_mut_ptr() as *mut u8,
len,
);
}
fn as_bytes(&self) -> &[u8] {
self.as_bytes()
}
fn from_bytes(data: &[u8]) -> Self {
String::from_utf8(data.to_vec()).unwrap()
}
}
| true |
f9707c8ed9bb8bac21daa4644ba4ae9921c34a9d
|
Rust
|
hegza/oil-rs
|
/src/tracker/test.rs
|
UTF-8
| 5,159 | 2.9375 | 3 |
[
"MIT"
] |
permissive
|
use super::*;
use crate::datamodel::*;
use crate::view::tracker_cli::TrackerCli;
use chrono::{DateTime, Datelike, NaiveTime};
use lazy_static::lazy_static;
lazy_static! {
static ref TEST_EVENT: EventData = EventData::new(
Interval::FromLastCompletion(TimeDelta::Hm(0, 1)),
"Test EventData".to_string(),
);
}
#[test]
fn event_lifecycle() {
let mut tracker = Tracker::empty();
let handle = tracker.add_event(TEST_EVENT.clone());
// Verify that the event is accessible with its handle
let event = tracker.event_mut(handle).unwrap();
// Verify that the event is in dormant state
match event.1.status {
StatusKind::Dormant { .. } => {}
_ => unreachable!(),
}
// TODO: Verify that the event is set to trigger after the time delta
// Remove the event
tracker.remove_event(handle);
// Verify that the event is removed
assert!(tracker.event_mut(handle).is_none());
}
#[test]
fn multiset_done() {
let mut cli = TrackerCli::new(Tracker::empty());
let ev = TEST_EVENT.clone();
let events = {
let tracker = &mut cli.tracker;
// Add three events
let evs = (0..3)
.into_iter()
.map(|_| tracker.add_event(ev.clone()))
.collect::<Vec<Uid>>();
// Set events as triggered
evs.into_iter().for_each(|uid| {
tracker.event_mut(uid).unwrap().trigger_now();
});
tracker.events()
};
// Check that all events are triggered
assert!(events.iter().all(|(_, ev)| ev.is_triggered()));
// Pick two of the events to be set as done
let mut ev_it = events.iter().enumerate();
let set_ev_ids = ev_it
.by_ref()
.take(2)
.map(|(idx, (uid, _))| (idx, *uid))
.collect::<Vec<(usize, Uid)>>();
let unset_ev_ids = ev_it
.take(1)
.map(|(idx, (uid, _))| (idx, *uid))
.collect::<Vec<(usize, Uid)>>();
// Command to set two of the events as done, ie. "0 1"
let cmd = format!("{} {}", set_ev_ids[0].0, set_ev_ids[1].0);
// Set two events as done
cli.call(&cmd);
let tracker = &mut cli.tracker;
// Verify that the two events are done, and the last one is still triggered
assert!(
tracker.event(set_ev_ids[0].1).unwrap().is_done(),
"first event was not done after setting it done"
);
assert!(tracker.event(set_ev_ids[1].1).unwrap().is_done());
assert!(tracker.event(unset_ev_ids[0].1).unwrap().is_triggered());
}
#[test]
fn trigger() {
let mut cli = TrackerCli::new(Tracker::empty());
let ev = TEST_EVENT.clone();
let events = {
let tracker = &mut cli.tracker;
// Add two events
tracker.add_event(ev.clone());
tracker.add_event(ev);
tracker.events()
};
// Check that all events are not triggered
assert!(events.iter().all(|(_, ev)| !ev.is_triggered()));
// Pick an event to be set as done
let mut ev_it = events.into_iter().enumerate();
let (trig_ev_id, trig_uid) = {
let ev = ev_it.next().unwrap();
(ev.0, (ev.1).0)
};
let untrig_uid = (ev_it.next().unwrap().1).0;
// Trigger an event
// Command to trigger an event, ie. "trigger 1"
let cmd = format!("trigger {}", trig_ev_id);
cli.call(&cmd);
let tracker = &mut cli.tracker;
// Verify that one event is triggered, one is not
assert!(
tracker.event(trig_uid).unwrap().is_triggered(),
"first event was not triggered after setting it "
);
assert!(!tracker.event(untrig_uid).unwrap().is_triggered());
}
#[test]
fn complete() {
let mut tracker = Tracker::empty();
let handle = tracker.add_event(TEST_EVENT.clone());
// Verify that the event is accessible with its handle
let event = tracker.event_mut(handle).unwrap();
// Verify that the event is in dormant state
match event.1.status {
StatusKind::Dormant { .. } => {}
_ => unreachable!(),
}
// Trigger the event
event.trigger_now();
// Verify it's triggered
assert!(tracker.event_mut(handle).unwrap().is_triggered());
let event = tracker.event_mut(handle).unwrap();
// Complete the event handle
event.complete_now();
// Verify it's completed
assert!(tracker.event_mut(handle).unwrap().is_completed());
}
#[test]
fn month_end_triggers_next_day() {
let mut tracker = Tracker::empty();
let event = EventData::new(
Interval::Periodic(TimePeriod::Daily(
NaiveTime::parse_from_str("15:00", "%H:%M").unwrap(),
)),
"Daily event".to_string(),
);
let handle = tracker.add_event_with_status(
event,
Status::from_time(Time(
DateTime::parse_from_rfc3339("2020-01-31T14:00:00-02:00")
.unwrap()
.into(),
)),
);
// Verify event triggers next on Feb. 1st
let trigger_date = tracker
.event(handle)
.unwrap()
.next_trigger_time()
.unwrap()
.date()
.naive_local();
assert!(trigger_date.month() == 2 && trigger_date.day() == 1);
}
| true |
36abc37460d6779f86e05b55fdb98c52ccffd00a
|
Rust
|
ThinkChaos/rtsp-rs
|
/rtsp-common/src/version.rs
|
UTF-8
| 3,962 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
use std::convert::{Infallible, TryFrom};
use std::error::Error;
use std::fmt::{Display, Formatter, Result as FormatterResult};
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
#[non_exhaustive]
pub enum Version {
Rtsp1_0,
Rtsp2_0,
}
impl Version {
pub fn as_encoded(&self) -> &'static [u8] {
self.as_str().as_bytes()
}
pub fn as_str(&self) -> &'static str {
use self::Version::*;
match self {
Rtsp1_0 => "RTSP/1.0",
Rtsp2_0 => "RTSP/2.0",
}
}
pub fn try_decode(value: &[u8]) -> Result<Self, DecodeError> {
use self::Version::*;
if value.len() != 8
|| value
.iter()
.take(5)
.map(u8::to_ascii_uppercase)
.ne(b"RTSP/".iter().cloned())
|| value[6] != b'.'
{
return Err(DecodeError::Invalid);
}
let major = value[5].checked_sub(b'0').ok_or(DecodeError::Invalid)?;
let minor = value[7].checked_sub(b'0').ok_or(DecodeError::Invalid)?;
if major == 1 && minor == 0 {
Ok(Rtsp1_0)
} else if major == 2 && minor == 0 {
Ok(Rtsp2_0)
} else if major > 9 || minor > 9 {
Err(DecodeError::Invalid)
} else {
Err(DecodeError::Unknown(major, minor))
}
}
}
impl Default for Version {
fn default() -> Self {
Version::Rtsp2_0
}
}
impl From<Version> for &'static [u8] {
fn from(value: Version) -> Self {
value.as_encoded()
}
}
impl From<Version> for &'static str {
fn from(value: Version) -> Self {
value.as_str()
}
}
impl<'version> TryFrom<&'version [u8]> for Version {
type Error = DecodeError;
fn try_from(value: &'version [u8]) -> Result<Self, Self::Error> {
Self::try_decode(value)
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
#[non_exhaustive]
pub enum DecodeError {
Invalid,
Unknown(u8, u8),
}
impl Display for DecodeError {
fn fmt(&self, formatter: &mut Formatter) -> FormatterResult {
use self::DecodeError::*;
match self {
Invalid => formatter.write_str("invalid version"),
Unknown(major, minor) => write!(formatter, "unknown version '{}.{}'", major, minor),
}
}
}
impl Error for DecodeError {}
impl From<Infallible> for DecodeError {
fn from(_: Infallible) -> Self {
unreachable!()
}
}
#[cfg(test)]
pub mod tests {
use super::{DecodeError, Version};
#[test]
fn test_as_encoded() {
assert_eq!(Version::Rtsp1_0.as_encoded(), b"RTSP/1.0");
assert_eq!(Version::Rtsp2_0.as_encoded(), b"RTSP/2.0");
assert_eq!(
Version::try_decode(b"rtsp/1.0").unwrap().as_encoded(),
b"RTSP/1.0"
);
assert_eq!(
Version::try_decode(b"rtsp/2.0").unwrap().as_encoded(),
b"RTSP/2.0"
);
}
#[test]
fn test_try_decode() {
assert_eq!(Version::try_decode(b"RTSP/1.0"), Ok(Version::Rtsp1_0));
assert_eq!(Version::try_decode(b"rtsp/1.0"), Ok(Version::Rtsp1_0));
assert_eq!(Version::try_decode(b"RtSp/1.0"), Ok(Version::Rtsp1_0));
assert_eq!(Version::try_decode(b"RTSP/2.0"), Ok(Version::Rtsp2_0));
assert_eq!(Version::try_decode(b"RtSp/2.0"), Ok(Version::Rtsp2_0));
assert_eq!(Version::try_decode(b"rtsp/2.0"), Ok(Version::Rtsp2_0));
assert_eq!(Version::try_decode(b""), Err(DecodeError::Invalid));
assert_eq!(Version::try_decode(b"RTSP/a.b"), Err(DecodeError::Invalid));
assert_eq!(Version::try_decode(b"RTSP/2"), Err(DecodeError::Invalid));
assert_eq!(
Version::try_decode(b"RTSP/9.9"),
Err(DecodeError::Unknown(9, 9))
);
assert_eq!(
Version::try_decode(b"rtsp/0.0"),
Err(DecodeError::Unknown(0, 0))
);
}
}
| true |
f5e04ef27e4982f1128b4fbb1bae4db5d776a67e
|
Rust
|
roadrunner-craft/core
|
/src/world/world.rs
|
UTF-8
| 4,110 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
use crate::chunk::{Chunk, ChunkGrid, ChunkGridCoordinate, ChunkGroup};
use crate::utils::ThreadPool;
use crate::world::generation::generate_chunk;
use crate::world::generation::WorldSeed;
use crate::world::WorldCoordinate;
use math::random::Seed;
use std::collections::HashSet;
use std::sync::mpsc::{channel, Receiver, Sender};
#[cfg(debug_assertions)]
pub const LOAD_DISTANCE: u8 = 4;
#[cfg(not(debug_assertions))]
pub const LOAD_DISTANCE: u8 = 12;
type ChunkLoadingChannel = (Sender<Chunk>, Receiver<Chunk>);
pub struct World {
pub chunks: ChunkGrid,
world_seed: WorldSeed,
// threading
chunk_loading_chan: ChunkLoadingChannel,
threadpool: ThreadPool,
loading_chunks: HashSet<ChunkGridCoordinate>,
}
impl World {
pub fn new() -> Self {
World {
chunks: ChunkGrid::default(),
world_seed: WorldSeed::new(),
chunk_loading_chan: channel(),
loading_chunks: HashSet::new(),
threadpool: ThreadPool::new(1),
}
}
pub fn from_seed(seed: Seed) -> Self {
World {
chunks: ChunkGrid::default(),
world_seed: WorldSeed(seed),
chunk_loading_chan: channel(),
loading_chunks: HashSet::new(),
threadpool: ThreadPool::new(1),
}
}
pub fn seed(&self) -> WorldSeed {
self.world_seed
}
pub fn load_chunk(&mut self, coords: ChunkGridCoordinate) {
if !self.loading_chunks.contains(&coords) && !self.chunks.contains_key(&coords) {
let seed = self.world_seed;
// start a generating thread for the chunk
let (sender, _) = &self.chunk_loading_chan;
let tx = sender.clone();
self.threadpool
.run(move || tx.send(generate_chunk(coords, seed)).unwrap());
self.loading_chunks.insert(coords);
}
}
// TODO: add an update methode to remove this garbage code
pub fn load_around(&mut self, positions: Vec<WorldCoordinate>) {
// get back chunks from generating thread
let mut received_chunks = 0;
let (_, receiver) = &self.chunk_loading_chan;
while let Ok(chunk) = receiver.try_recv() {
self.loading_chunks.remove(&chunk.coords);
self.chunks.insert(chunk.coords, chunk);
received_chunks += 1;
}
// (un?)load chunks as the players move
let mut chunks_to_load = HashSet::new();
let mut chunks_to_keep = HashSet::new();
for position in positions {
let target_chunk = ChunkGridCoordinate::from_world_coordinate(position);
let mut counter: u16 = 0;
for i in 0..=(LOAD_DISTANCE + 1) as i16 {
for x in -i..=i {
for z in -i..=i {
let coords = ChunkGridCoordinate::new(
target_chunk.x + x as i64,
target_chunk.z + z as i64,
);
if !self.chunks.contains_key(&coords) {
if counter < (received_chunks + 1) * 2 {
chunks_to_load.insert(coords);
counter += 1;
}
} else {
chunks_to_keep.insert(coords);
}
}
}
}
self.chunks
.retain(|coords, _| chunks_to_keep.contains(coords));
}
for coord in chunks_to_load {
self.load_chunk(coord);
}
}
pub fn get_chunk_group(&self, coords: ChunkGridCoordinate) -> Option<ChunkGroup> {
let current = self.chunks.get(&coords)?.clone();
let north = self.chunks.get(&coords.north())?.clone();
let south = self.chunks.get(&coords.south())?.clone();
let east = self.chunks.get(&coords.east())?.clone();
let west = self.chunks.get(&coords.west())?.clone();
Some(ChunkGroup::new(current, north, south, east, west))
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.