blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
586b8f302d645a13f463a976f580515cc2c825f6
|
Rust
|
harpiechoise/ArithmeticParser
|
/src/parsemath/token.rs
|
UTF-8
| 2,854 | 4 | 4 |
[] |
no_license
|
use std::cmp::PartialEq;
/// The Token struct holds the token type for a specific symbol or number
#[derive(Debug, PartialEq, Clone)]
pub enum Token {
ADD,
SUBTRACT,
MULTIPLY,
DIVIDE,
CARET,
LEFTPAREN,
RIGHTPAREN,
NUM(f64), // If the value is numeric we store the number in an Enum Variant
EOF,
}
/// The OpenPrec enum holds the operator precendence and allow to compare with ordering
/// opreratos like "<" or ">" the values of the tokens are
/// - DEFUALTZERO: 0
/// - ADDSUB: 1 (Adition Subtraction)
/// - MULTDIV: 2 (Multiplication Division)
/// - POWER: 3 (Pow operation)
/// - NEGATIVE: 4 (-5 or -(Token::NUM))
#[derive(Debug, PartialEq, PartialOrd)]
pub enum OperPrec {
DEFAULTZERO,
ADDSUB,
MULDIV,
POWER,
NEGATIVE
}
impl Token {
/// This method allow to get the precedence from a certain operation depending of the enum variant
/// # Retuns
/// `OperPrec` - An `OperPrec` enum variant
/// # Example
/// ```
/// use parsemath::token::Token;
/// let token = Token::ADD;
/// let oper_prec = token.get_oper_prec()
/// // This will be OperPrec::ADDSUB
/// ```
pub fn get_oper_prec(&self) -> OperPrec {
use self::OperPrec::*;
use self::Token::*;
match *self {
ADD | SUBTRACT => ADDSUB,
MULTIPLY | DIVIDE => MULDIV,
CARET => POWER,
_ => DEFAULTZERO,
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_oper_prec_addition_subtraction_addition() {
let token = Token::ADD.get_oper_prec();
assert_eq!(token, OperPrec::ADDSUB);
}
#[test]
fn test_oper_prec_addition_subtraction_aubtraction() {
let token = Token::SUBTRACT.get_oper_prec();
assert_eq!(token, OperPrec::ADDSUB);
}
#[test]
fn test_oper_prec_multiplication_division_division() {
let token = Token::DIVIDE.get_oper_prec();
assert_eq!(token, OperPrec::MULDIV);
}
#[test]
fn test_oper_prec_multiplication_division_multiplication() {
let token = Token::MULTIPLY.get_oper_prec();
assert_eq!(token, OperPrec::MULDIV);
}
#[test]
fn test_oper_prec_addition_carrent() {
let token = Token::CARET.get_oper_prec();
assert_eq!(token, OperPrec::POWER);
}
#[test]
fn test_oper_prec_default_zero() {
let token = Token::NUM(25.0).get_oper_prec();
assert_eq!(token, OperPrec::DEFAULTZERO)
}
#[test]
fn test_oper_prec_addition_subtraction_mult() {
let add = OperPrec::ADDSUB;
let mult = OperPrec::MULDIV;
assert!(mult > add);
}
#[test]
fn test_oper_prec_addition_mult_power() {
let mult = OperPrec::MULDIV;
let power = OperPrec::POWER;
assert!(power > mult);
}
}
| true |
c6462c935fe9af651cfc2147788c53b66670a251
|
Rust
|
Txuritan/rewryte
|
/rewryte-generator/src/mysql.rs
|
UTF-8
| 1,260 | 2.59375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use {
crate::Error,
rewryte_parser::models::{Column, ColumnDefault, Enum, ForeignKey, Item, Schema, Table, Types},
std::io,
};
pub fn write_schema(schema: &Schema, writer: &mut impl io::Write) -> Result<(), Error> {
for item in &schema.items {
write_item(item, writer)?;
writeln!(writer)?;
}
Ok(())
}
pub fn write_item(item: &Item, writer: &mut impl io::Write) -> Result<(), Error> {
match &item {
Item::Enum(decl) => write_enum(decl, writer)?,
Item::Table(decl) => write_table(decl, writer)?,
}
Ok(())
}
pub fn write_enum(_decl: &Enum, _writer: &mut impl io::Write) -> Result<(), Error> {
todo!()
}
pub fn write_table(_decl: &Table, _writer: &mut impl io::Write) -> Result<(), Error> {
todo!()
}
pub fn write_column(_column: &Column, _writer: &mut impl io::Write) -> Result<(), Error> {
todo!()
}
pub fn write_types(_types: &Types, _writer: &mut impl io::Write) -> Result<(), Error> {
todo!()
}
pub fn write_column_default(
_column_default: &ColumnDefault,
_writer: &mut impl io::Write,
) -> Result<(), Error> {
todo!()
}
pub fn write_foreign_key(
_foreign_key: &ForeignKey,
_writer: &mut impl io::Write,
) -> Result<(), Error> {
todo!()
}
| true |
fc5aded2c8d78c6fc37726b87877b5735e4216f9
|
Rust
|
JoshCheek/nushell
|
/crates/nu-serde/src/test.rs
|
UTF-8
| 1,127 | 2.96875 | 3 |
[
"MIT"
] |
permissive
|
use crate::{to_success_return_values, to_value};
use insta::assert_debug_snapshot;
use nu_source::Tag;
use serde::Serialize;
use std::collections::BTreeMap;
#[test]
fn it_works_with_single_integers() {
assert_debug_snapshot!(to_value(&4i32, Tag::default()));
}
#[test]
fn it_works_with_lists_of_values() {
assert_debug_snapshot!(to_value(&vec![4i32, 10, 8843234, 100], Tag::default()));
}
#[test]
fn it_works_with_complex_structs() {
#[derive(Serialize, Debug)]
struct Complex {
index: i64,
x: f32,
y: f64,
map: BTreeMap<String, Vec<u8>>,
}
let mut map = BTreeMap::new();
map.insert("coconuts".into(), vec![4]);
map.insert("tilapia".into(), vec![16, 3, 24]);
map.insert("mahi mahi".into(), vec![]);
assert_debug_snapshot!(to_value(
&Complex {
index: -40,
x: 32.8,
y: 38.2,
map
},
Tag::default()
));
}
#[test]
fn it_serializes_return_value_list() {
assert_debug_snapshot!(to_success_return_values(
vec![4i32, 10, 8843234, 100],
Tag::default()
));
}
| true |
0912017f0bc53890fa657e11bcf480b2645149d9
|
Rust
|
zhanpon/atcoder
|
/rust/src/bin/abc167_a.rs
|
UTF-8
| 185 | 3.015625 | 3 |
[] |
no_license
|
use proconio::input;
fn main() {
input! {
s: String,
t: String,
}
let answer = if t.starts_with(&s) { "Yes" } else { "No" };
println!("{}", answer);
}
| true |
38a8f00f39e25739636d5ca79fa395623046bf3e
|
Rust
|
yongkyuns/noon_egui
|
/src/data/mod.rs
|
UTF-8
| 12,525 | 3.578125 | 4 |
[] |
no_license
|
use std::{
cell::{Ref, RefCell},
ops::RangeInclusive,
rc::Rc,
};
pub type Time = f32;
use crate::Pose;
/// Aggregate of [`ColumnData`] that can only be added and not deleted.
/// Length of all [`ColumnData`] are equal, making this effectively a 2D table.
pub type DataSet<T = f32> = Vec<ColumnData<T>>;
/// Easily accessible wrapper around [`TimeTable`] to be shared by multiple owners
pub struct DataStore(pub(crate) Rc<RefCell<TimeTable<Pose>>>);
impl Default for DataStore {
fn default() -> Self {
// Temporary, create dummy data for showing
use crate::math::{cos, sin};
use std::f32::consts::PI;
let n = 10000;
let dt = 0.01;
let t: Vec<f32> = (0..n).map(|n| n as f32 * dt).collect();
let pose: Vec<Pose> = t
.iter()
.map(|&t| Pose::new(30.0 * sin(t), 20.0 * cos(t), 2.0 * PI * sin(t)))
.collect();
Self(Rc::new(RefCell::new(TimeTable::new(t, pose))))
}
}
impl DataStore {
pub fn new() -> Self {
Default::default()
}
pub fn clone(&self) -> Self {
DataStore(Rc::clone(&self.0))
}
pub fn borrow(&self) -> Ref<TimeTable<Pose>> {
self.0.borrow()
}
}
/// Single column of data
#[derive(Debug, Default, Clone)]
pub struct ColumnData<T> {
data: Vec<T>,
name: Option<String>,
}
impl<T> ColumnData<T> {
pub fn from_vec(data: impl Into<Vec<T>>) -> Self {
Self {
data: data.into(),
name: None,
}
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn add(&mut self, element: T) {
self.data.push(element);
}
pub fn get(&self, index: usize) -> Option<&T> {
self.data.get(index)
}
pub fn get_between(&self, range: RangeInclusive<usize>) -> &[T] {
&self.data[range]
}
}
/// Basic time vector for finding indices to look up within [`TimeSeries`] and [`TimeTable`]
#[derive(Debug, Default)]
struct Timeline {
/// Cache stores previously found index to avoid unecessary iteration when finding time index
cache: Option<(Time, usize)>,
/// Actual vector
vec: Vec<Time>,
}
impl Into<Timeline> for Vec<Time> {
fn into(self) -> Timeline {
Timeline {
vec: self,
..Default::default()
}
}
}
impl Timeline {
/// Tolerance to compare two input time for their equality
const EPSILON: f32 = 0.0005;
pub fn new(time_vec: impl Into<Vec<Time>>) -> Self {
Self {
vec: time_vec.into(),
..Default::default()
}
}
/// Adds a time element to the end
pub fn add(&mut self, time: Time) {
self.vec.push(time);
}
/// Checks if time input has changed from last index search
/// If time input is sufficiently close, assume same index can be used without calling [`get_index`]
///
/// [`get_index`]: Self::get_index
fn time_changed(&self, time: Time) -> bool {
self.cache
.map_or(true, |(prev, _)| (time - prev).abs() > Self::EPSILON)
}
/// Find the index that corresponds to the given time in seconds.
///
/// Returns index of first time that is greater or equal to the specified time.
fn get_index(&self, time: Time) -> Option<usize> {
if self.time_changed(time) {
self.vec.iter().position(|&t| t >= time).map(|index| {
// self.cache = Some((time, index));
index
})
} else {
// unwrap here is ok, since time_changed always ensures cache is not None
Some(self.cache.unwrap().1)
}
}
/// Similar to [`get_index`], but only returns time index that is smaller than the input time.
/// This is useful when making sure the returned time index never exceeds the given time, as
/// in [`get_range`]
///
/// [`get_index`]: Self::get_index
/// [`get_range`]: Self::get_range
fn get_index_under(&self, time: Time) -> Option<usize> {
if self.time_changed(time) {
self.vec
.iter()
.position(|&t| t > time)
.map(|idx| (idx - 1).max(0))
.map(|index| {
// self.cache = Some((time, index));
index
})
} else {
// unwrap here is ok, since time_changed always ensures cache is not None
Some(self.cache.unwrap().1)
}
}
/// Returns range indices that is within the time range specified
pub fn get_range(&self, start: Time, end: Time) -> Option<RangeInclusive<usize>> {
if start < end {
if let Some(start) = self.get_index(start) {
if let Some(end) = self.get_index_under(end) {
return Some(start..=end);
}
}
}
None
}
pub fn get_range_raw(&self, start: Time, end: Time) -> Option<Vec<Time>> {
self.get_range(start, end)
.map(|range| self.vec[range].to_vec())
}
/// Length of the time vector
pub fn len(&self) -> usize {
self.vec.len()
}
}
#[derive(Debug, Default)]
pub struct TimeTable<T> {
time: Timeline,
data: DataSet<T>,
}
impl<T> Into<TimeTable<T>> for TimeSeries<T> {
fn into(self) -> TimeTable<T> {
TimeTable {
time: self.time,
data: vec![self.data],
}
}
}
impl<T: Clone> TimeTable<T> {
#[allow(dead_code)]
pub fn from_timeseries(timeseries: TimeSeries<T>) -> Self {
Self {
time: timeseries.time,
data: vec![timeseries.data],
}
}
}
impl<T: Clone> TimeTable<T> {
pub fn new(time: Vec<Time>, data: Vec<T>) -> Self {
TimeSeries::new(time, data).into()
}
#[allow(dead_code)]
pub fn get_column(&self, column: usize) -> Option<ColumnData<T>> {
self.data.get(column).map(|val| val.clone())
}
pub fn get_at_time(&self, column: usize, time: Time) -> Option<T> {
if let Some(idx) = self.time.get_index(time) {
self.data
.get(column)
.and_then(|vec| vec.get(idx).clone())
.map(|el| el.to_owned())
} else {
None
}
}
pub fn get_time_range(&self, start: Time, end: Time) -> Option<Vec<Time>> {
self.time.get_range_raw(start, end)
}
/// Returns slice of data that is within the time range specified
pub fn get_range(&self, column: usize, start: Time, end: Time) -> Option<Vec<T>> {
if let Some(range) = self.time.get_range(start, end) {
self.data
.get(column)
.map(|vec| vec.get_between(range).to_owned())
} else {
None
}
}
}
#[derive(Debug, Default)]
pub struct TimeSeries<T> {
time: Timeline,
data: ColumnData<T>,
sample_time: Time,
}
impl<T: Clone> TimeSeries<T> {
/// Create a new [`TimeSeries`] from given array of `time` and `data`.
pub fn new(time: impl Into<Vec<Time>>, data: impl Into<Vec<T>>) -> Self {
let time = Timeline::new(time.into());
let data = ColumnData::from_vec(data);
let sample_time = 0.0;
if time.len() != data.len() {
panic!("Size of time and data are different!");
}
Self {
time,
data,
sample_time,
}
}
/// Create an empty [`TimeSeries`]
pub fn empty() -> Self {
Self {
time: Timeline::default(),
data: ColumnData {
data: Vec::new(),
name: None,
},
sample_time: 0.0,
}
}
/// Sets the `sample_time` of the [`TimeSeries`]. If `add` is called with timestamp
/// that is smaller than the sum of the last timestamp and `sample_time`, `add` will
/// not push the data into the [`TimeSeries`]. When `sample_time` is set to zero (by default),
/// `add` will only discard data points whose timestamp is identical to the last timestamp,
/// i.e. it only guarantees monotonically increasing timestamps.
pub fn with_sample_time(mut self, sample_time: Time) -> Self {
self.sample_time = sample_time;
self
}
pub fn add(&mut self, time: Time, element: T) {
// if let Some(last) = self.time.vec.last() {
// if last + self.sample_time < time {
// self.time.add(time);
// self.data.add(element);
// }
// } else {
// self.time.add(time);
// self.data.add(element);
// }
if self.time.vec.is_empty() {
self.time.add(time);
self.data.add(element);
} else {
if self.sample_time > 0.0 {
if self.time.vec.last().unwrap() + self.sample_time <= time {
self.time.add(time);
self.data.add(element);
}
} else {
if self.time.vec.last().unwrap() < &time {
self.time.add(time);
self.data.add(element);
}
}
}
}
/// Get data element for a given time
pub fn get_at_time(&self, time: Time) -> Option<T> {
self.time
.get_index(time)
.and_then(|idx| self.data.get(idx))
.map(|val| val.to_owned())
}
/// Returns slice of data that is within the time range specified
#[allow(dead_code)]
pub fn get_range(&self, start: Time, end: Time) -> Option<&[T]> {
self.time
.get_range(start, end)
.map(|range| self.data.get_between(range))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::math::{cos, sin};
use crate::Pose;
use std::f32::consts::PI;
fn dummy_pose() -> TimeSeries<Pose> {
let n = 5;
let dt = 0.01;
let t: Vec<f32> = (0..n).map(|n| n as f32 * dt).collect();
let pose: Vec<Pose> = t
.iter()
.map(|&t| Pose::new(30.0 * sin(t), 20.0 * cos(t), 2.0 * PI * sin(t)))
.collect();
TimeSeries::new(t, pose)
}
fn dummy_f32() -> TimeSeries<f32> {
let n = 5;
let dt = 1.0;
let t: Vec<f32> = (0..n).map(|n| n as f32 * dt).collect();
let data: Vec<f32> = t.iter().map(|&t| t * 3.0).collect();
TimeSeries::new(t, data)
}
#[test]
fn add_timeseries() {
let mut ts = TimeSeries::<f32>::empty();
ts.add(0.5, 5.0);
ts.add(0.8, 15.0);
dbg!(&ts);
}
#[test]
fn check_index() {
// dbg!(&ts);
let ts = dummy_pose();
assert_eq!(2, ts.time.get_index(0.02).unwrap()); // finding exactly matching time
assert_eq!(2, ts.time.get_index(0.02).unwrap()); // running again should give same result
assert_eq!(2, ts.time.get_index(0.015).unwrap()); // finding next closest time stamp
}
#[test]
fn check_range() {
let ts = dummy_f32();
assert_eq!(1, ts.time.get_index(1.0).unwrap());
assert_eq!(3, ts.time.get_index(2.1).unwrap());
assert_eq!(3, ts.time.get_index(2.9).unwrap());
assert_eq!(3, ts.time.get_index(3.0).unwrap());
assert_eq!(&[3.0, 6.0], ts.get_range(1.0, 2.9).unwrap());
assert_eq!(&[3.0, 6.0, 9.0], ts.get_range(1.0, 3.0).unwrap());
}
#[test]
fn series_to_table() {
let ts = dummy_f32();
let _table: TimeTable<f32> = ts.into();
}
#[test]
fn check_sample_time() {
let mut ts = TimeSeries::<f32>::empty();
ts.add(0.0, 1.0);
ts.add(0.0, 2.0); // This shouldn't be added
ts.add(0.5, 3.0);
ts.add(0.5, 4.0); // This shouldn't be added
assert_eq!(0, ts.time.get_index(0.0).unwrap());
assert_eq!(1, ts.time.get_index(0.5).unwrap());
let mut ts = TimeSeries::<f32>::empty().with_sample_time(0.1);
ts.add(0.0, 1.0);
ts.add(0.05, 2.0); // This shouldn't be added
ts.add(0.1, 3.0);
assert_eq!(0, ts.time.get_index(0.0).unwrap());
assert_eq!(1, ts.time.get_index(0.1).unwrap());
}
}
// TimeTable data format options:
// 1. Generational-arena -> Arena<TimeSeries>
// pros: easy to push and manage TimeSeries
// cons: Dependency, TimeSeries cannot contain different data types
// 2. Vec<Box<dyn TimeSeries>>
// pros: No dependency
// cons: Use of trait object
// 3. ndarray?
| true |
902305ed81985c7ae147e62197d0fbf05ce371b5
|
Rust
|
datadevopscloud/fluvio
|
/src/spu-schema/src/server/stream_fetch.rs
|
UTF-8
| 9,716 | 2.734375 | 3 |
[
"Apache-2.0"
] |
permissive
|
//!
//! # Continuous Fetch
//!
//! Stream records to client
//!
use std::fmt::Debug;
use std::marker::PhantomData;
use std::io::{self, Read};
use std::borrow::Cow;
use dataplane::core::{Encoder, Decoder};
use dataplane::api::Request;
use dataplane::fetch::FetchablePartitionResponse;
use dataplane::record::RecordSet;
use dataplane::Isolation;
use flate2::{
Compression,
bufread::{GzEncoder, GzDecoder},
};
pub type DefaultStreamFetchResponse = StreamFetchResponse<RecordSet>;
pub type DefaultStreamFetchRequest = StreamFetchRequest<RecordSet>;
use super::SpuServerApiKey;
// version for WASM_MODULE
pub const WASM_MODULE_API: i16 = 11;
pub const WASM_MODULE_V2_API: i16 = 12;
// version for aggregator smartstream
pub const AGGREGATOR_API: i16 = 13;
// version for gzipped WASM payloads
pub const GZIP_WASM_API: i16 = 14;
/// Fetch records continuously
/// Output will be send back as stream
#[derive(Decoder, Encoder, Default, Debug)]
pub struct StreamFetchRequest<R>
where
R: Encoder + Decoder + Default + Debug,
{
pub topic: String,
pub partition: i32,
pub fetch_offset: i64,
pub max_bytes: i32,
pub isolation: Isolation,
#[fluvio(min_version = 11)]
pub wasm_module: Vec<u8>,
#[fluvio(min_version = 12)]
pub wasm_payload: Option<SmartStreamPayload>,
pub data: PhantomData<R>,
}
impl<R> Request for StreamFetchRequest<R>
where
R: Debug + Decoder + Encoder,
{
const API_KEY: u16 = SpuServerApiKey::StreamFetch as u16;
const DEFAULT_API_VERSION: i16 = GZIP_WASM_API;
type Response = StreamFetchResponse<R>;
}
/// The request payload when using a Consumer SmartStream.
///
/// This includes the WASM content as well as the type of SmartStream being used.
/// It also carries any data that is required for specific types of SmartStreams.
#[derive(Debug, Default, Clone, Encoder, Decoder)]
pub struct SmartStreamPayload {
pub wasm: SmartStreamWasm,
pub kind: SmartStreamKind,
}
/// Indicates the type of SmartStream as well as any special data required
#[derive(Debug, Clone, Encoder, Decoder)]
pub enum SmartStreamKind {
Filter,
Map,
Aggregate { accumulator: Vec<u8> },
}
impl Default for SmartStreamKind {
fn default() -> Self {
Self::Filter
}
}
/// Different possible representations of WASM modules.
///
/// In a fetch request, a WASM module may be given directly in the request
/// as raw bytes.
///
// TODO ... or, it may be named and selected from the WASM store.
#[derive(Debug, Clone, Encoder, Decoder)]
pub enum SmartStreamWasm {
Raw(Vec<u8>),
/// compressed WASM module payload using Gzip
#[fluvio(min_version = 14)]
Gzip(Vec<u8>),
// TODO implement named WASM modules once we have a WASM store
// Url(String),
}
fn zip(raw: &[u8]) -> io::Result<Vec<u8>> {
let mut encoder = GzEncoder::new(raw, Compression::default());
let mut buffer = Vec::with_capacity(raw.len());
encoder.read_to_end(&mut buffer)?;
Ok(buffer)
}
fn unzip(compressed: &[u8]) -> io::Result<Vec<u8>> {
let mut decoder = GzDecoder::new(compressed);
let mut buffer = Vec::with_capacity(compressed.len());
decoder.read_to_end(&mut buffer)?;
Ok(buffer)
}
impl SmartStreamWasm {
/// returns the gzip-compressed WASM module bytes
pub fn to_gzip(&mut self) -> io::Result<()> {
if let Self::Raw(raw) = self {
*self = Self::Gzip(zip(raw.as_ref())?);
}
Ok(())
}
/// returns the raw WASM module bytes
pub fn to_raw(&mut self) -> io::Result<()> {
if let Self::Gzip(gzipped) = self {
*self = Self::Raw(unzip(gzipped)?);
}
Ok(())
}
/// get the raw bytes of the WASM module
pub fn get_raw(&self) -> io::Result<Cow<[u8]>> {
Ok(match self {
Self::Raw(raw) => Cow::Borrowed(raw),
Self::Gzip(gzipped) => Cow::Owned(unzip(gzipped.as_ref())?),
})
}
}
impl Default for SmartStreamWasm {
fn default() -> Self {
Self::Raw(Vec::new())
}
}
#[derive(Encoder, Decoder, Default, Debug)]
pub struct StreamFetchResponse<R>
where
R: Encoder + Decoder + Default + Debug,
{
pub topic: String,
pub stream_id: u32,
pub partition: FetchablePartitionResponse<R>,
}
#[cfg(feature = "file")]
pub use file::*;
#[cfg(feature = "file")]
mod file {
use std::io::Error as IoError;
use log::trace;
use bytes::BytesMut;
use dataplane::core::Version;
use dataplane::store::StoreValue;
use dataplane::record::FileRecordSet;
use dataplane::store::FileWrite;
pub type FileStreamFetchRequest = StreamFetchRequest<FileRecordSet>;
use super::*;
impl FileWrite for StreamFetchResponse<FileRecordSet> {
fn file_encode(
&self,
src: &mut BytesMut,
data: &mut Vec<StoreValue>,
version: Version,
) -> Result<(), IoError> {
trace!("file encoding FlvContinuousFetchResponse");
trace!("topic {}", self.topic);
self.topic.encode(src, version)?;
self.stream_id.encode(src, version)?;
self.partition.file_encode(src, data, version)?;
Ok(())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encode_smartstreamkind() {
let mut dest = Vec::new();
let value: SmartStreamKind = SmartStreamKind::Filter;
value.encode(&mut dest, 0).expect("should encode");
assert_eq!(dest.len(), 1);
assert_eq!(dest[0], 0x00);
}
#[test]
fn test_decode_smartstreamkind() {
let bytes = vec![0x01];
let mut value: SmartStreamKind = Default::default();
value
.decode(&mut std::io::Cursor::new(bytes), 0)
.expect("should decode");
assert!(matches!(value, SmartStreamKind::Map));
}
#[test]
fn test_encode_smartstreamwasm() {
let mut dest = Vec::new();
let value: SmartStreamWasm = SmartStreamWasm::Raw(vec![0xde, 0xad, 0xbe, 0xef]);
value.encode(&mut dest, 0).expect("should encode");
println!("{:02x?}", &dest);
assert_eq!(dest.len(), 9);
assert_eq!(dest[0], 0x00);
assert_eq!(dest[1], 0x00);
assert_eq!(dest[2], 0x00);
assert_eq!(dest[3], 0x00);
assert_eq!(dest[4], 0x04);
assert_eq!(dest[5], 0xde);
assert_eq!(dest[6], 0xad);
assert_eq!(dest[7], 0xbe);
assert_eq!(dest[8], 0xef);
}
#[test]
fn test_decode_smartstreamwasm() {
let bytes = vec![0x00, 0x00, 0x00, 0x00, 0x04, 0xde, 0xad, 0xbe, 0xef];
let mut value: SmartStreamWasm = Default::default();
value
.decode(&mut std::io::Cursor::new(bytes), 0)
.expect("should decode");
let inner = match value {
SmartStreamWasm::Raw(inner) => inner,
#[allow(unreachable_patterns)]
_ => panic!("should decode to SmartStreamWasm::Raw"),
};
assert_eq!(inner.len(), 4);
assert_eq!(inner[0], 0xde);
assert_eq!(inner[1], 0xad);
assert_eq!(inner[2], 0xbe);
assert_eq!(inner[3], 0xef);
}
#[test]
fn test_encode_stream_fetch_request() {
let mut dest = Vec::new();
let value = DefaultStreamFetchRequest {
topic: "one".to_string(),
partition: 3,
wasm_payload: Some(SmartStreamPayload {
kind: SmartStreamKind::Filter,
wasm: SmartStreamWasm::Raw(vec![0xde, 0xad, 0xbe, 0xef]),
}),
..Default::default()
};
value.encode(&mut dest, 12).expect("should encode");
let expected = vec![
0x00, 0x03, 0x6f, 0x6e, 0x65, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
0x00, 0x00, 0x00, 0x04, 0xde, 0xad, 0xbe, 0xef, 0x00,
];
assert_eq!(dest, expected);
}
#[test]
fn test_decode_stream_fetch_request() {
let bytes = vec![
0x00, 0x03, 0x6f, 0x6e, 0x65, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
0x00, 0x00, 0x00, 0x04, 0xde, 0xad, 0xbe, 0xef, 0x00,
];
let mut value = DefaultStreamFetchRequest::default();
value.decode(&mut std::io::Cursor::new(bytes), 12).unwrap();
assert_eq!(value.topic, "one");
assert_eq!(value.partition, 3);
let smartstream = match value.wasm_payload {
Some(wasm) => wasm,
_ => panic!("should have smartstreeam payload"),
};
let wasm = match smartstream.wasm {
SmartStreamWasm::Raw(wasm) => wasm,
#[allow(unreachable_patterns)]
_ => panic!("should be SmartStreamWasm::Raw"),
};
assert_eq!(wasm, vec![0xde, 0xad, 0xbe, 0xef]);
assert!(matches!(smartstream.kind, SmartStreamKind::Filter));
}
#[test]
fn test_zip_unzip_works() {
const ORIG_LEN: usize = 1024;
let orig = SmartStreamWasm::Raw(vec![0x01; ORIG_LEN]);
let mut compressed = orig.clone();
compressed.to_gzip().unwrap();
assert!(matches!(&compressed, &SmartStreamWasm::Gzip(ref x) if x.len() < ORIG_LEN));
let mut uncompressed = compressed.clone();
uncompressed.to_raw().unwrap();
assert!(
matches!((&uncompressed, &orig), (&SmartStreamWasm::Raw(ref x), &SmartStreamWasm::Raw(ref y)) if x == y )
);
assert_eq!(orig.get_raw().unwrap(), compressed.get_raw().unwrap());
}
}
| true |
34485704282c6663b80a30e2786909090cbf15ec
|
Rust
|
AngeloLupo/advent_of_code_2020
|
/src/day8/mod.rs
|
UTF-8
| 2,849 | 3.640625 | 4 |
[] |
no_license
|
use std::fs;
pub fn one(instruction_index: usize) -> (bool, i32) {
let input = fs::read_to_string("src/day8/input").expect("Unable to read file");
let instructions: Vec<&str> = input.split("\n").collect();
let mut index: usize = 0;
let mut accumulator: i32 = 0;
let mut visited_indexes: Vec<usize> = Vec::new();
loop{
if index > instructions.len()-1 {
return (true, accumulator);
}
if visited_indexes.contains(&index) {
return (false, accumulator);
}
visited_indexes.push(index);
let split_instruction: Vec<&str> = instructions[index].split(" ").collect();
let mut instruction = split_instruction[0];
let operator: char = split_instruction[1].as_bytes()[0] as char;
let operand_str = &split_instruction[1][1..];
let operand: i32 = operand_str.parse().unwrap();
if index == instruction_index {
if instruction == "nop" {
println!("changing instruction {}", index);
instruction = "jmp";
}
if instruction == "jmp" {
println!("changing instruction {}", index);
instruction = "nop";
}
}
match instruction {
"nop" => {
index += 1
},
"acc" => {
match operator {
'+' => accumulator += operand,
'-' => accumulator -= operand,
_ => println!("[ACC] Weird operator: {} at index {}", operator, index)
}
index += 1;
}
"jmp" => {
match operator {
'+' => index += operand as usize,
'-' => index -= operand as usize,
_ => println!("[JMP] Weird operator: {} at index {}", operator, index)
}
},
_ => println!("Weird instruction: {}", operator)
}
}
}
pub fn two() -> i32 {
let input = fs::read_to_string("src/day8/input").expect("Unable to read file");
let instructions: Vec<&str> = input.split("\n").collect();
let mut index: usize = 0;
let mut indexes: Vec<usize> = Vec::new();
loop {
if index > instructions.len()-1 {
break;
}
let split_instruction: Vec<&str> = instructions[index].split(" ").collect();
let instruction = split_instruction[0];
if (instruction == "nop") | (instruction == "jmp") {
indexes.push(index)
}
index += 1;
}
for index_to_change in indexes {
println!("trying index {}", index_to_change);
let result: (bool, i32) = one(index_to_change);
if result.0 == true{
return result.1;
}
}
return 0;
}
| true |
d3ff4168fe568edc17a0d0349a079eadfdba5d3f
|
Rust
|
callym/vsop2013-rs
|
/src/planet_data.rs
|
UTF-8
| 1,014 | 2.8125 | 3 |
[] |
no_license
|
use crate::{util::*, Error, Header};
use nom::character::complete::space0;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Table {
pub start: f64,
pub stop: f64,
pub data: Vec<f64>,
}
impl Table {
pub fn parse<'a>(header: &Header, input: &'a str) -> Error<'a, Self> {
let (input, start) = parse_f64(input)?;
let (input, _) = space0(input)?;
let (input, stop) = parse_f64(input)?;
let mut data = vec![];
let mut input_iter = input;
for _ in 0..header.table_size {
let input = input_iter;
let (input, val) = parse_f64(input)?;
let (input, exp) = parse_f64(input)?;
data.push(val * f64::powf(10.0, exp));
input_iter = input;
}
let input = input_iter;
Ok((input, Self { start, stop, data }))
}
pub fn is_in_range(&self, jd: f64) -> bool {
jd >= self.start && jd < self.stop
}
pub fn is_in_range_partial(&self, jd: f64) -> bool {
jd >= self.start || jd < self.stop
}
}
| true |
f98f6a4b522da357b300f335dac4935f374b5c8d
|
Rust
|
Baekalfen/BachelorProject
|
/rusqlite/src/named_params.rs
|
UTF-8
| 8,100 | 3.09375 | 3 |
[
"MIT"
] |
permissive
|
use libc::c_int;
use super::ffi;
use {Result, Error, Connection, Statement, Rows, Row, str_to_cstring};
use types::ToSql;
impl Connection {
/// Convenience method to prepare and execute a single SQL statement with named parameter(s).
///
/// On success, returns the number of rows that were changed or inserted or deleted (via
/// `sqlite3_changes`).
///
/// ## Example
///
/// ```rust,no_run
/// # use rusqlite::{Connection, Result};
/// fn insert(conn: &Connection) -> Result<i32> {
/// conn.execute_named("INSERT INTO test (name) VALUES (:name)", &[(":name", &"one")])
/// }
/// ```
///
/// # Failure
///
/// Will return `Err` if `sql` cannot be converted to a C-compatible string or if the
/// underlying SQLite call fails.
pub fn execute_named(&self, sql: &str, params: &[(&str, &ToSql)]) -> Result<c_int> {
self.prepare(sql).and_then(|mut stmt| stmt.execute_named(params))
}
/// Convenience method to execute a query with named parameter(s) that is expected to return
/// a single row.
///
/// If the query returns more than one row, all rows except the first are ignored.
///
/// # Failure
///
/// Will return `Err` if `sql` cannot be converted to a C-compatible string or if the
/// underlying SQLite call fails.
pub fn query_row_named<T, F>(&self, sql: &str, params: &[(&str, &ToSql)], f: F) -> Result<T>
where F: FnOnce(Row) -> T
{
let mut stmt = try!(self.prepare(sql));
let mut rows = try!(stmt.query_named(params));
rows.get_expected_row().map(f)
}
}
impl<'conn> Statement<'conn> {
/// Return the index of an SQL parameter given its name.
///
/// # Failure
///
/// Will return Err if `name` is invalid. Will return Ok(None) if the name
/// is valid but not a bound parameter of this statement.
pub fn parameter_index(&self, name: &str) -> Result<Option<i32>> {
let c_name = try!(str_to_cstring(name));
let c_index = unsafe { ffi::sqlite3_bind_parameter_index(self.stmt, c_name.as_ptr()) };
Ok(match c_index {
0 => None, // A zero is returned if no matching parameter is found.
n => Some(n),
})
}
/// Execute the prepared statement with named parameter(s). If any parameters
/// that were in the prepared statement are not included in `params`, they
/// will continue to use the most-recently bound value from a previous call
/// to `execute_named`, or `NULL` if they have never been bound.
///
/// On success, returns the number of rows that were changed or inserted or deleted (via
/// `sqlite3_changes`).
///
/// ## Example
///
/// ```rust,no_run
/// # use rusqlite::{Connection, Result};
/// fn insert(conn: &Connection) -> Result<i32> {
/// let mut stmt = try!(conn.prepare("INSERT INTO test (name) VALUES (:name)"));
/// stmt.execute_named(&[(":name", &"one")])
/// }
/// ```
///
/// # Failure
///
/// Will return `Err` if binding parameters fails, the executed statement returns rows (in
/// which case `query` should be used instead), or the underling SQLite call fails.
pub fn execute_named(&mut self, params: &[(&str, &ToSql)]) -> Result<c_int> {
try!(self.bind_parameters_named(params));
unsafe { self.execute_() }
}
/// Execute the prepared statement with named parameter(s), returning an iterator over the
/// resulting rows. If any parameters that were in the prepared statement are not included in
/// `params`, they will continue to use the most-recently bound value from a previous call to
/// `query_named`, or `NULL` if they have never been bound.
///
/// ## Example
///
/// ```rust,no_run
/// # use rusqlite::{Connection, Result, Rows};
/// fn query(conn: &Connection) -> Result<()> {
/// let mut stmt = try!(conn.prepare("SELECT * FROM test where name = :name"));
/// let mut rows = try!(stmt.query_named(&[(":name", &"one")]));
/// for row in rows {
/// // ...
/// }
/// Ok(())
/// }
/// ```
///
/// # Failure
///
/// Will return `Err` if binding parameters fails.
pub fn query_named<'a>(&'a mut self, params: &[(&str, &ToSql)]) -> Result<Rows<'a>> {
self.reset_if_needed();
try!(self.bind_parameters_named(params));
self.needs_reset = true;
Ok(Rows::new(self))
}
fn bind_parameters_named(&mut self, params: &[(&str, &ToSql)]) -> Result<()> {
for &(name, value) in params {
if let Some(i) = try!(self.parameter_index(name)) {
try!(self.conn.decode_result(unsafe { value.bind_parameter(self.stmt, i) }));
} else {
return Err(Error::InvalidParameterName(name.into()));
}
}
Ok(())
}
}
#[cfg(test)]
mod test {
use Connection;
#[test]
fn test_execute_named() {
let db = Connection::open_in_memory().unwrap();
db.execute_batch("CREATE TABLE foo(x INTEGER)").unwrap();
assert_eq!(db.execute_named("INSERT INTO foo(x) VALUES (:x)", &[(":x", &1i32)]).unwrap(),
1);
assert_eq!(db.execute_named("INSERT INTO foo(x) VALUES (:x)", &[(":x", &2i32)]).unwrap(),
1);
assert_eq!(3i32,
db.query_row_named("SELECT SUM(x) FROM foo WHERE x > :x",
&[(":x", &0i32)],
|r| r.get(0))
.unwrap());
}
#[test]
fn test_stmt_execute_named() {
let db = Connection::open_in_memory().unwrap();
let sql = "CREATE TABLE test (id INTEGER PRIMARY KEY NOT NULL, name TEXT NOT NULL, flag \
INTEGER)";
db.execute_batch(sql).unwrap();
let mut stmt = db.prepare("INSERT INTO test (name) VALUES (:name)").unwrap();
stmt.execute_named(&[(":name", &"one")]).unwrap();
assert_eq!(1i32,
db.query_row_named("SELECT COUNT(*) FROM test WHERE name = :name",
&[(":name", &"one")],
|r| r.get(0))
.unwrap());
}
#[test]
fn test_query_named() {
let db = Connection::open_in_memory().unwrap();
let sql = "CREATE TABLE test (id INTEGER PRIMARY KEY NOT NULL, name TEXT NOT NULL, flag \
INTEGER)";
db.execute_batch(sql).unwrap();
let mut stmt = db.prepare("SELECT * FROM test where name = :name").unwrap();
stmt.query_named(&[(":name", &"one")]).unwrap();
}
#[test]
fn test_unbound_parameters_are_null() {
let db = Connection::open_in_memory().unwrap();
let sql = "CREATE TABLE test (x TEXT, y TEXT)";
db.execute_batch(sql).unwrap();
let mut stmt = db.prepare("INSERT INTO test (x, y) VALUES (:x, :y)").unwrap();
stmt.execute_named(&[(":x", &"one")]).unwrap();
let result: Option<String> = db.query_row("SELECT y FROM test WHERE x = 'one'",
&[],
|row| row.get(0))
.unwrap();
assert!(result.is_none());
}
#[test]
fn test_unbound_parameters_are_reused() {
let db = Connection::open_in_memory().unwrap();
let sql = "CREATE TABLE test (x TEXT, y TEXT)";
db.execute_batch(sql).unwrap();
let mut stmt = db.prepare("INSERT INTO test (x, y) VALUES (:x, :y)").unwrap();
stmt.execute_named(&[(":x", &"one")]).unwrap();
stmt.execute_named(&[(":y", &"two")]).unwrap();
let result: String = db.query_row("SELECT x FROM test WHERE y = 'two'",
&[],
|row| row.get(0))
.unwrap();
assert_eq!(result, "one");
}
}
| true |
68fec9ba11d74275ca8c9ed36747fed27f5e3cc5
|
Rust
|
placrosse/cortex-m
|
/src/peripheral/nvic.rs
|
UTF-8
| 3,165 | 3.453125 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
//! Nested Vector Interrupt Controller
use volatile_register::{RO, RW};
use interrupt::Nr;
/// Register block
#[repr(C)]
pub struct RegisterBlock {
/// Interrupt Set-Enable
pub iser: [RW<u32>; 8],
reserved0: [u32; 24],
/// Interrupt Clear-Enable
pub icer: [RW<u32>; 8],
reserved1: [u32; 24],
/// Interrupt Set-Pending
pub ispr: [RW<u32>; 8],
reserved2: [u32; 24],
/// Interrupt Clear-Pending
pub icpr: [RW<u32>; 8],
reserved3: [u32; 24],
/// Interrupt Active Bit
pub iabr: [RO<u32>; 8],
reserved4: [u32; 56],
/// Interrupt Priority
pub ipr: [RW<u8>; 240],
}
impl RegisterBlock {
/// Clears `interrupt`'s pending state
pub fn clear_pending<I>(&self, interrupt: I)
where
I: Nr,
{
let nr = interrupt.nr();
unsafe { self.icpr[usize::from(nr / 32)].write(1 << (nr % 32)) }
}
/// Disables `interrupt`
pub fn disable<I>(&self, interrupt: I)
where
I: Nr,
{
let nr = interrupt.nr();
unsafe { self.icer[usize::from(nr / 32)].write(1 << (nr % 32)) }
}
/// Enables `interrupt`
pub fn enable<I>(&self, interrupt: I)
where
I: Nr,
{
let nr = interrupt.nr();
unsafe { self.iser[usize::from(nr / 32)].write(1 << (nr % 32)) }
}
/// Gets the "priority" of `interrupt`
///
/// NOTE NVIC encodes priority in the highest bits of a byte so values like
/// `1` and `2` have the same priority. Also for NVIC priorities, a lower
/// value (e.g. `16`) has higher priority than a larger value (e.g. `32`).
pub fn get_priority<I>(&self, interrupt: I) -> u8
where
I: Nr,
{
let nr = interrupt.nr();
self.ipr[usize::from(nr)].read()
}
/// Is `interrupt` active or pre-empted and stacked
pub fn is_active<I>(&self, interrupt: I) -> bool
where
I: Nr,
{
let nr = interrupt.nr();
let mask = 1 << (nr % 32);
(self.iabr[usize::from(nr / 32)].read() & mask) == mask
}
/// Checks if `interrupt` is enabled
pub fn is_enabled<I>(&self, interrupt: I) -> bool
where
I: Nr,
{
let nr = interrupt.nr();
let mask = 1 << (nr % 32);
(self.iser[usize::from(nr / 32)].read() & mask) == mask
}
/// Checks if `interrupt` is pending
pub fn is_pending<I>(&self, interrupt: I) -> bool
where
I: Nr,
{
let nr = interrupt.nr();
let mask = 1 << (nr % 32);
(self.ispr[usize::from(nr / 32)].read() & mask) == mask
}
/// Forces `interrupt` into pending state
pub fn set_pending<I>(&self, interrupt: I)
where
I: Nr,
{
let nr = interrupt.nr();
unsafe { self.ispr[usize::from(nr / 32)].write(1 << (nr % 32)) }
}
/// Sets the "priority" of `interrupt` to `prio`
///
/// NOTE See `get_priority` method for an explanation of how NVIC priorities
/// work.
pub unsafe fn set_priority<I>(&self, interrupt: I, prio: u8)
where
I: Nr,
{
let nr = interrupt.nr();
self.ipr[usize::from(nr)].write(prio)
}
}
| true |
9a201df698d5a5f383823b76ad2ba0a5b3768c21
|
Rust
|
dtolnay/star-history
|
/src/main.rs
|
UTF-8
| 16,636 | 2.59375 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
//! [![github]](https://github.com/dtolnay/star-history) [![crates-io]](https://crates.io/crates/star-history) [![docs-rs]](https://docs.rs/star-history)
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
#![allow(
clippy::cast_lossless,
clippy::default_trait_access,
clippy::let_underscore_untyped,
// Clippy bug: https://github.com/rust-lang/rust-clippy/issues/7422
clippy::nonstandard_macro_braces,
clippy::similar_names,
clippy::single_match_else,
clippy::too_many_lines,
clippy::toplevel_ref_arg,
clippy::uninlined_format_args,
)]
mod log;
use crate::log::Log;
use chrono::{DateTime, Duration, Utc};
use reqwest::blocking::Client;
use reqwest::header::{AUTHORIZATION, USER_AGENT};
use serde::de::{self, Deserialize, Deserializer, IgnoredAny, MapAccess, SeqAccess, Visitor};
use serde_derive::{Deserialize, Serialize};
use std::cmp::{self, Ordering};
use std::collections::{BTreeMap as Map, BTreeSet as Set, VecDeque};
use std::env;
use std::fmt::{self, Display};
use std::fs;
use std::io;
use std::marker::PhantomData;
use std::mem;
use std::process;
use thiserror::Error;
static VERSION: &str = concat!("star-history ", env!("CARGO_PKG_VERSION"));
static HELP: &str = concat!(
"star-history ",
env!("CARGO_PKG_VERSION"),
"
David Tolnay <[email protected]>
Produce a graph showing number of GitHub stars of a user or repo over time.
USAGE:
gh auth login
star-history [USER ...] [USER/REPO ...]
EXAMPLES:
star-history dtolnay
star-history dtolnay/syn dtolnay/quote
star-history serde-rs/serde
",
);
static MISSING_TOKEN: &str = "\
Error: GitHub auth token is not set up.
(Expected config file: {{path}})
Run `gh auth login` to store a GitHub login token. The `gh` CLI
can be installed from <https://cli.github.com>.
If you prefer not to use the `gh` CLI, you can instead provide
a token to star-history through the GITHUB_TOKEN environment
variable. Head to <https://github.com/settings/tokens> and click
\"Generate new token (classic)\". The default public access
permission is sufficient -- you can leave all the checkboxes
empty. Save the generated token somewhere like ~/.githubtoken
and use `export GITHUB_TOKEN=$(cat ~/.githubtoken)`.
";
#[derive(Error, Debug)]
enum Error {
#[error("Error from GitHub api: {0}")]
GitHub(String),
#[error("failed to decode response body")]
DecodeResponse(#[source] serde_json::Error),
#[error("no such user: {0}")]
NoSuchUser(String),
#[error("no such repository: {0}/{1}")]
NoSuchRepo(String, String),
#[error(transparent)]
GhToken(#[from] gh_token::Error),
#[error(transparent)]
Reqwest(#[from] reqwest::Error),
#[error(transparent)]
Io(#[from] io::Error),
}
type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(Eq, Clone)]
enum Series {
Owner(String),
Repo(String, String),
}
impl Display for Series {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
Series::Owner(owner) => formatter.write_str(owner)?,
Series::Repo(owner, repo) => {
formatter.write_str(owner)?;
formatter.write_str("/")?;
formatter.write_str(repo)?;
}
}
Ok(())
}
}
impl Ord for Series {
fn cmp(&self, other: &Self) -> Ordering {
match (self, other) {
(Series::Owner(lowner), Series::Owner(rowner)) => {
lowner.to_lowercase().cmp(&rowner.to_lowercase())
}
(Series::Repo(lowner, lrepo), Series::Repo(rowner, rrepo)) => {
(lowner.to_lowercase(), lrepo.to_lowercase())
.cmp(&(rowner.to_lowercase(), rrepo.to_lowercase()))
}
(Series::Owner(_), Series::Repo(..)) => Ordering::Less,
(Series::Repo(..), Series::Owner(_)) => Ordering::Greater,
}
}
}
impl PartialOrd for Series {
fn partial_cmp(&self, other: &Series) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Series {
fn eq(&self, other: &Series) -> bool {
self.cmp(other) == Ordering::Equal
}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(transparent)]
struct Cursor(Option<String>);
impl Display for Cursor {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match &self.0 {
Some(cursor) => {
formatter.write_str("\"")?;
formatter.write_str(cursor)?;
formatter.write_str("\"")?;
}
None => formatter.write_str("null")?,
}
Ok(())
}
}
struct Work {
series: Series,
cursor: Cursor,
}
#[derive(Serialize)]
struct Request {
query: String,
}
#[derive(Deserialize, Debug)]
struct Response {
message: Option<String>,
#[serde(default, deserialize_with = "deserialize_data")]
data: VecDeque<Data>,
#[serde(default)]
errors: Vec<Message>,
}
#[derive(Deserialize, Debug)]
struct Message {
message: String,
}
#[derive(Debug)]
enum Data {
Owner(Option<Owner>),
Repo(Option<Repo>),
}
#[derive(Deserialize, Debug)]
struct Owner {
login: String,
repositories: Repositories,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct Repositories {
page_info: PageInfo,
nodes: Vec<Repo>,
}
#[derive(Deserialize, Debug)]
struct Repo {
name: String,
owner: Account,
stargazers: Option<Stargazers>,
}
#[derive(Deserialize, Ord, PartialOrd, Eq, PartialEq, Clone, Default, Debug)]
struct Account {
login: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct Stargazers {
page_info: PageInfo,
#[serde(deserialize_with = "non_nulls")]
edges: Vec<Star>,
}
#[derive(Deserialize, Ord, PartialOrd, Eq, PartialEq, Clone, Debug)]
struct Star {
#[serde(rename = "starredAt")]
time: DateTime<Utc>,
node: Account,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct PageInfo {
has_next_page: bool,
end_cursor: Cursor,
}
fn deserialize_data<'de, D>(deserializer: D) -> Result<VecDeque<Data>, D::Error>
where
D: Deserializer<'de>,
{
struct ResponseVisitor;
impl<'de> Visitor<'de> for ResponseVisitor {
type Value = VecDeque<Data>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Map<String, Data>")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut data = VecDeque::new();
while let Some(key) = map.next_key::<String>()? {
if key.starts_with("owner") {
let owner = map.next_value::<Option<Owner>>()?;
data.push_back(Data::Owner(owner));
} else if key.starts_with("repo") {
let repo = map.next_value::<Option<Repo>>()?;
data.push_back(Data::Repo(repo));
} else {
map.next_value::<IgnoredAny>()?;
}
}
Ok(data)
}
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(VecDeque::new())
}
}
deserializer.deserialize_any(ResponseVisitor)
}
fn non_nulls<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
where
D: Deserializer<'de>,
T: Deserialize<'de>,
{
struct NonNullsVisitor<T>(PhantomData<fn() -> T>);
impl<'de, T> Visitor<'de> for NonNullsVisitor<T>
where
T: Deserialize<'de>,
{
type Value = Vec<T>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("array")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut vec = Vec::new();
while let Some(next) = seq.next_element::<Option<T>>()? {
vec.extend(next);
}
Ok(vec)
}
}
let visitor = NonNullsVisitor(PhantomData);
deserializer.deserialize_seq(visitor)
}
fn main() {
let ref mut log = Log::new();
if let Err(err) = try_main(log) {
log.error(err);
process::exit(1);
}
}
fn try_main(log: &mut Log) -> Result<()> {
let mut args = Vec::new();
for arg in env::args().skip(1) {
if arg == "--help" {
print!("{}", HELP);
process::exit(0);
} else if arg == "--version" {
println!("{}", VERSION);
process::exit(0);
}
let mut parts = arg.splitn(2, '/');
let owner = parts.next().unwrap();
match parts.next() {
Some(repo) => {
let owner = owner.to_owned();
let repo = repo.to_owned();
args.push(Series::Repo(owner, repo));
}
None => {
let owner = owner.strip_prefix('@').unwrap_or(owner).to_owned();
args.push(Series::Owner(owner));
}
}
}
let github_token = match gh_token::get() {
Ok(token) => token,
Err(gh_token::Error::NotConfigured(path)) => {
let path_lossy = path.to_string_lossy();
let message = MISSING_TOKEN.replace("{{path}}", &path_lossy);
eprint!("{}", message);
process::exit(1);
}
Err(error) => return Err(Error::GhToken(error)),
};
let authorization = format!("bearer {}", github_token.trim());
if args.is_empty() {
eprint!("{}", HELP);
process::exit(1);
}
let mut work = Vec::new();
let mut stars = Map::new();
for series in &args {
stars.insert(series.clone(), Set::new());
work.push(Work {
series: series.clone(),
cursor: Cursor(None),
});
}
let client = Client::new();
while !work.is_empty() {
let batch_size = cmp::min(work.len(), 50);
let defer = work.split_off(batch_size);
let batch = mem::replace(&mut work, defer);
let mut query = String::new();
query += "{\n";
for (i, work) in batch.iter().enumerate() {
let cursor = &work.cursor;
query += &match &work.series {
Series::Owner(owner) => query_owner(i, owner, cursor),
Series::Repo(owner, repo) => query_repo(i, owner, repo, cursor),
};
}
query += "}\n";
let json = client
.post("https://api.github.com/graphql")
.header(USER_AGENT, "dtolnay/star-history")
.header(AUTHORIZATION, &authorization)
.json(&Request { query })
.send()?
.text()?;
let response: Response = serde_json::from_str(&json).map_err(Error::DecodeResponse)?;
if let Some(message) = response.message {
return Err(Error::GitHub(message));
}
for err in response.errors {
log.error(Error::GitHub(err.message));
}
let mut data = response.data;
let mut queue = batch.into_iter();
while let Some(node) = data.pop_front() {
let id = queue.next();
match node {
Data::Owner(None) | Data::Repo(None) => match id.unwrap().series {
Series::Owner(owner) => return Err(Error::NoSuchUser(owner)),
Series::Repo(owner, repo) => return Err(Error::NoSuchRepo(owner, repo)),
},
Data::Owner(Some(node)) => {
let owner = node.login;
for repo in node.repositories.nodes {
data.push_back(Data::Repo(Some(repo)));
}
if node.repositories.page_info.has_next_page {
work.push(Work {
series: Series::Owner(owner),
cursor: node.repositories.page_info.end_cursor,
});
}
}
Data::Repo(Some(node)) => {
let owner = node.owner.login;
let repo = node.name;
if let Some(stargazers) = node.stargazers {
let series = Series::Owner(owner.clone());
let owner_stars = stars.entry(series).or_default();
for star in &stargazers.edges {
owner_stars.insert(star.clone());
}
let series = Series::Repo(owner.clone(), repo.clone());
let repo_stars = stars.entry(series).or_default();
for star in &stargazers.edges {
repo_stars.insert(star.clone());
}
if stargazers.page_info.has_next_page {
work.push(Work {
series: Series::Repo(owner, repo),
cursor: stargazers.page_info.end_cursor,
});
}
} else {
work.push(Work {
series: Series::Repo(owner, repo),
cursor: Cursor(None),
});
}
}
}
}
log.tick();
}
let now = Utc::now();
for set in stars.values_mut() {
if let Some(first) = set.iter().next() {
let first_time = first.time;
set.insert(Star {
time: first_time - Duration::seconds(1),
node: Default::default(),
});
}
match set.iter().next_back() {
Some(last) if last.time >= now => {}
_ => {
set.insert(Star {
time: now,
node: Default::default(),
});
}
}
}
let mut data = String::new();
data += "var data = [\n";
for arg in &args {
data += " {\"name\":\"";
data += &arg.to_string();
data += "\", \"values\":[\n";
let stars = &stars[arg];
for (i, star) in stars.iter().enumerate() {
data += " {\"time\":";
data += &star.time.timestamp().to_string();
data += ", \"stars\":";
data += &(i.saturating_sub((star.time == now) as usize)).to_string();
data += "},\n";
}
data += " ]},\n";
}
data += " ];";
let html = include_str!("index.html").replace("var data = [];", &data);
let dir = env::temp_dir().join("star-history");
fs::create_dir_all(&dir)?;
let path = dir.join(format!("{}.html", now.timestamp_millis()));
fs::write(&path, html)?;
if opener::open(&path).is_err() {
writeln!(log, "graph written to {}", path.display());
}
Ok(())
}
fn query_owner(i: usize, login: &str, cursor: &Cursor) -> String {
r#"
owner$i: repositoryOwner(login: "$login") {
login
repositories(after: $cursor, first: 100, isFork: false, privacy: PUBLIC, ownerAffiliations: [OWNER]) {
pageInfo {
hasNextPage
endCursor
}
nodes {
name
owner {
login
}
}
}
}
"#
.replace("$i", &i.to_string())
.replace("$login", login)
.replace("$cursor", &cursor.to_string())
}
fn query_repo(i: usize, owner: &str, repo: &str, cursor: &Cursor) -> String {
r#"
repo$i: repository(owner: "$owner", name: "$repo") {
name
owner {
login
}
stargazers(after: $cursor, first: 100) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
login
}
starredAt
}
}
}
"#
.replace("$i", &i.to_string())
.replace("$owner", owner)
.replace("$repo", repo)
.replace("$cursor", &cursor.to_string())
}
| true |
739feb43801f61c210f69d96ca9b97a99c5d9563
|
Rust
|
binh-vu/semantic-modeling
|
/mira/src/assembling/other_models/bayes_model.rs
|
UTF-8
| 2,626 | 2.640625 | 3 |
[
"MIT"
] |
permissive
|
use utils::dict_get;
use rayon::prelude::*;
use assembling::features::statistic::Statistic;
use assembling::models::annotator::Annotator;
use std::collections::HashMap;
use algorithm::data_structure::graph::*;
pub struct BayesModel<'a> {
stat: &'a Statistic,
sm_index: &'a HashMap<String, usize>,
stype_scores: Vec<HashMap<String, HashMap<(String, String), f32>>>,
default_prior: f32
}
impl<'a> BayesModel<'a> {
pub fn new(annotator: &'a Annotator<'a>) -> BayesModel<'a> {
let stype_scores = annotator.sms.iter()
.map(|sm| {
let mut scores: HashMap<String, HashMap<(String, String), f32>> = Default::default();
for attr in &sm.attrs {
let mut score: HashMap<(String, String), f32> = Default::default();
for stype in &attr.semantic_types {
score.insert((stype.class_uri.clone(), stype.predicate.clone()), stype.score);
}
scores.insert(attr.label.clone(), score);
}
scores
})
.collect();
BayesModel {
stat: &annotator.statistic,
sm_index: &annotator.sm_index,
stype_scores,
default_prior: 0.1
}
}
pub fn predict_sm_probs(&self, sm_id: &str, graphs: Vec<Graph>) -> Vec<(Graph, f64)> {
let sm_idx = self.sm_index[sm_id];
graphs.into_par_iter()
.map(|g| {
let mut log_prob = 0.0;
for r in g.iter_nodes() {
if r.n_incoming_edges == 0 {
log_prob += self.stat.p_n(&r.label, self.default_prior);
self.log_prob_tree_given_node(sm_idx, &r, &g);
}
}
(g, log_prob.exp() as f64)
})
.collect::<Vec<_>>()
}
fn log_prob_tree_given_node(&self, sm_idx: usize, node: &Node, graph: &Graph) -> f32 {
let mut log_prob = 0.0;
for e in node.iter_outgoing_edges(graph) {
let target = e.get_target_node(graph);
if target.is_data_node() {
log_prob += dict_get(&self.stype_scores[sm_idx][&target.label], &node.label, &e.label).unwrap();
} else {
log_prob += (self.stat.p_l_given_s(&node.label, &e.label, self.default_prior) + self.stat.p_o_given_sl(&node.label, &e.label, &target.label, self.default_prior)).ln();
log_prob += self.log_prob_tree_given_node(sm_idx, target, graph);
}
}
log_prob
}
}
| true |
0b5ac5cd94ec76e25f14da9818ca856f07ab70a3
|
Rust
|
elrnv/buffer
|
/dyn-derive/src/lib.rs
|
UTF-8
| 25,353 | 2.828125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use proc_macro2::{Span, TokenStream};
use quote::{quote, TokenStreamExt};
use std::collections::HashMap;
use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::*;
type GenericsMap = HashMap<Ident, Punctuated<TypeParamBound, Token![+]>>;
#[derive(Debug)]
struct Config {
dyn_crate_name: String,
suffix: String,
build_vtable_only: bool,
}
impl Default for Config {
fn default() -> Self {
Config {
dyn_crate_name: String::from("dyn"),
suffix: String::from("VTable"),
build_vtable_only: false,
}
}
}
#[derive(Debug)]
struct DynAttrib {
ident: Ident,
eq: Option<Token![=]>,
value: Option<Lit>,
}
impl Parse for DynAttrib {
fn parse(input: ParseStream) -> Result<Self> {
let ident = input.parse()?;
let eq = input.parse()?;
let value = input.parse()?;
Ok(DynAttrib { ident, eq, value })
}
}
impl Parse for Config {
fn parse(input: ParseStream) -> Result<Self> {
let mut config = Config::default();
let attribs: Punctuated<DynAttrib, Token![,]> =
Punctuated::parse_separated_nonempty(input)?;
for attrib in attribs.iter() {
let name = attrib.ident.to_string();
match (name.as_str(), &attrib.value) {
("build_vtable_only", None) => config.build_vtable_only = true,
("dyn_crate_name", Some(Lit::Str(ref lit))) => {
config.dyn_crate_name = lit.value().clone()
}
("suffix", Some(Lit::Str(ref lit))) => config.suffix = lit.value().clone(),
_ => {}
}
}
Ok(config)
}
}
#[proc_macro_attribute]
pub fn dyn_trait(
attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let config: Config = syn::parse(attr).expect("Failed to parse attributes");
let item_trait: ItemTrait =
syn::parse(item).expect("the dyn_trait attribute applies only to trait definitions");
let dyn_items = construct_dyn_items(&item_trait, &config);
let tokens = quote! {
#item_trait
#dyn_items
};
tokens.into()
}
fn construct_dyn_items(item_trait: &ItemTrait, config: &Config) -> TokenStream {
assert!(
item_trait.generics.params.is_empty(),
"trait generics are not supported by dyn_trait"
);
assert!(
item_trait.generics.where_clause.is_none(),
"traits with where clauses are not supported by dyn_trait"
);
// Byte Helpers
let from_bytes_fn: ItemFn = parse_quote! {
#[inline]
unsafe fn from_bytes<S: 'static>(bytes: &[u8]) -> &S {
assert_eq!(bytes.len(), std::mem::size_of::<S>());
&*(bytes.as_ptr() as *const S)
}
};
let from_bytes_mut_fn: ItemFn = parse_quote! {
#[inline]
unsafe fn from_bytes_mut<S: 'static>(bytes: &mut [u8]) -> &mut S {
assert_eq!(bytes.len(), std::mem::size_of::<S>());
&mut *(bytes.as_mut_ptr() as *mut S)
}
};
let as_bytes_fn: ItemFn = parse_quote! {
#[inline]
unsafe fn as_bytes<S: 'static>(s: &S) -> &[u8] {
// This is safe since any memory can be represented by bytes and we are looking at
// sized types only.
unsafe { std::slice::from_raw_parts(s as *const S as *const u8, std::mem::size_of::<S>()) }
}
};
let box_into_box_bytes_fn: ItemFn = parse_quote! {
#[inline]
fn box_into_box_bytes<S: 'static>(b: Box<S>) -> Box<[u8]> {
let byte_ptr = Box::into_raw(b) as *mut u8;
// This is safe since any memory can be represented by bytes and we are looking at
// sized types only.
unsafe { Box::from_raw(std::slice::from_raw_parts_mut(byte_ptr, std::mem::size_of::<S>())) }
}
};
// Implement known trait functions.
let clone_fn: (TypeBareFn, ItemFn) = (
parse_quote! { unsafe fn (&[u8]) -> Box<[u8]> },
parse_quote! {
#[inline]
unsafe fn clone_fn<S: Clone + 'static>(src: &[u8]) -> Box<[u8]> {
let typed_src: &S = from_bytes(src);
box_into_box_bytes(Box::new(typed_src.clone()))
}
}
);
let clone_from_fn: (TypeBareFn, ItemFn) = (
parse_quote! { unsafe fn (&mut [u8], &[u8]) },
parse_quote! {
#[inline]
unsafe fn clone_from_fn<S: Clone + 'static>(dst: &mut [u8], src: &[u8]) {
let typed_src: &S = from_bytes(src);
let typed_dst: &mut S = from_bytes_mut(dst);
typed_dst.clone_from(typed_src);
}
}
);
let clone_into_raw_fn: (TypeBareFn, ItemFn) = (
parse_quote! { unsafe fn (&[u8], &mut [u8]) },
parse_quote! {
#[inline]
unsafe fn clone_into_raw_fn<S: Clone + 'static>(src: &[u8], dst: &mut [u8]) {
let typed_src: &S = from_bytes(src);
let cloned = S::clone(typed_src);
let cloned_bytes = as_bytes(&cloned);
dst.copy_from_slice(cloned_bytes);
let _ = std::mem::ManuallyDrop::new(cloned);
}
}
);
let eq_fn: (TypeBareFn, ItemFn) = (
parse_quote! { unsafe fn (&[u8], &[u8]) -> bool },
parse_quote! {
#[inline]
unsafe fn eq_fn<S: PartialEq + 'static>(a: &[u8], b: &[u8]) -> bool {
let (a, b): (&S, &S) = (from_bytes(a), from_bytes(b));
a.eq(b)
}
}
);
let hash_fn: (TypeBareFn, ItemFn) = (
parse_quote! { unsafe fn (&[u8], &mut dyn std::hash::Hasher) },
parse_quote! {
#[inline]
unsafe fn hash_fn<S: std::hash::Hash + 'static>(bytes: &[u8], mut state: &mut dyn std::hash::Hasher) {
let typed_data: &S = from_bytes(bytes);
typed_data.hash(&mut state)
}
}
);
let fmt_fn: (TypeBareFn, ItemFn) = (
parse_quote! { unsafe fn (&[u8], &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> },
parse_quote! {
#[inline]
unsafe fn fmt_fn<S: std::fmt::Debug + 'static>(bytes: &[u8], f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
let typed_data: &S = from_bytes(bytes);
typed_data.fmt(f)
}
}
);
let mut known_traits: HashMap<Path, Vec<(TypeBareFn, ItemFn)>> = HashMap::new();
known_traits.insert(parse_quote! { Clone }, vec![clone_fn, clone_from_fn, clone_into_raw_fn]);
known_traits.insert(parse_quote! { PartialEq }, vec![eq_fn]);
known_traits.insert(parse_quote! { Eq }, vec![]);
known_traits.insert(parse_quote! { std::hash::Hash }, vec![hash_fn]);
known_traits.insert(parse_quote! { std::fmt::Debug }, vec![fmt_fn]);
let trait_name = item_trait.ident.clone();
let vtable_name = Ident::new(&format!("{}{}", &trait_name, config.suffix), Span::call_site());
let vis = item_trait.vis.clone();
// Construct the vtable of traits and their associated functions used by this trait item.
let vtable: Vec<_> = item_trait.supertraits.iter().filter_map(|bound| {
match bound {
TypeParamBound::Trait(bound) => {
if bound.lifetimes.is_some()
|| bound.modifier != TraitBoundModifier::None
{
// We are looking for recognizable traits only
None
} else {
let seg = bound.path.segments.first().unwrap();
if !seg.arguments.is_empty() {
None
} else {
known_traits.get_key_value(&bound.path).map(|(path, table)|
(path.clone(), table.clone())
)
}
}
}
_ => None,
}
}).collect();
let vtable_fields: Punctuated<Field, Token![,]> = vtable.iter().map(|(_, table)| {
let fns: Punctuated<Type, Token![,]> = table.iter().map(|(ty, _)| Type::BareFn(ty.clone())).collect();
Field {
attrs: Vec::new(),
vis: Visibility::Inherited,
ident: None,
colon_token: None,
ty: parse_quote! { (#fns) },
}
}).collect();
let crate_name = Ident::new(&config.dyn_crate_name, Span::call_site());
let mut has_impls = TokenStream::new();
for (table_idx_usize, (path, table)) in vtable.iter().enumerate() {
let table_idx = syn::Index::from(table_idx_usize);
let mut methods = TokenStream::new();
if table.len() == 1 {
let (fn_type, fn_def) = table.first().unwrap();
let fn_name = &fn_def.sig.ident;
methods.append_all(quote!{
#[inline]
fn #fn_name ( &self ) -> &#fn_type { &self.#table_idx }
});
} else {
for (fn_idx_usize, (fn_type, fn_def)) in table.iter().enumerate() {
let fn_idx = syn::Index::from(fn_idx_usize);
let fn_name = &fn_def.sig.ident;
methods.append_all(quote!{
#[inline]
fn #fn_name ( &self ) -> &#fn_type { &(self.#table_idx).#fn_idx }
});
}
}
let supertrait_name = path.segments.last().unwrap().ident.clone();
let has_trait = Ident::new(&format!("Has{}", supertrait_name), Span::call_site());
//eprintln!("{}", &methods);
has_impls.append_all(quote! {
impl #crate_name :: #has_trait for #vtable_name {
#methods
}
})
}
let vtable_constructor = vtable.iter().map(|(_, fntable)| {
let fields = fntable.iter().map(|(_, fn_def)| {
let fn_name = fn_def.sig.ident.clone();
let expr: Expr = parse_quote! { #fn_name::<T> };
expr
}).collect::<Punctuated<Expr, Token![,]>>();
let tuple: Expr = parse_quote! { (#fields) };
tuple
}).collect::<Punctuated<Expr, Token![,]>>();
let fns_defs = vtable.iter().flat_map(|(_, fntable)| {
fntable.iter().map(|(_, fn_def)| {
parse_quote! { #fn_def }
})
}).collect::<Vec<Stmt>>();
let mut build_vtable_block = TokenStream::new();
for fn_def in fns_defs.iter() {
build_vtable_block.append_all(quote! { #fn_def });
}
let res = quote! {
#[derive(Copy, Clone)]
#vis struct #vtable_name (#vtable_fields);
#has_impls
impl<T: #trait_name + 'static> #crate_name::VTable<T> for #vtable_name {
#[inline]
fn build_vtable() -> #vtable_name {
#from_bytes_fn
#from_bytes_mut_fn
#as_bytes_fn
#box_into_box_bytes_fn
#build_vtable_block
#vtable_name(#vtable_constructor)
}
}
};
res
}
#[proc_macro_attribute]
pub fn dyn_trait_method(
_attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let mut trait_method: TraitItemMethod = syn::parse(item)
.expect("the dyn_trait_function attribute applies only to trait function definitions only");
trait_method.sig = dyn_fn_sig(trait_method.sig);
let tokens = quote! { #trait_method };
tokens.into()
}
/// Convert a function signature by replacing self types with bytes.
fn dyn_fn_sig(sig: Signature) -> Signature {
assert!(
sig.constness.is_none(),
"const functions not supported by dyn_trait"
);
assert!(
sig.asyncness.is_none(),
"async functions not supported by dyn_trait"
);
assert!(
sig.abi.is_none(),
"extern functions not supported by dyn_trait"
);
assert!(
sig.variadic.is_none(),
"variadic functions not supported by dyn_trait"
);
let dyn_name = format!("{}_bytes", sig.ident);
let dyn_ident = Ident::new(&dyn_name, sig.ident.span().clone());
let mut generics = GenericsMap::new();
// Convert generics into `dyn Trait` if possible.
for gen in sig.generics.params.iter() {
match gen {
GenericParam::Type(ty) => {
assert!(
ty.attrs.is_empty(),
"type parameter attributes are not supported by dyn_trait"
);
assert!(
ty.colon_token.is_some(),
"unbound type parameters are not supported by dyn_trait"
);
assert!(
ty.eq_token.is_none() && ty.default.is_none(),
"default type parameters are not supported by dyn_trait"
);
generics.insert(ty.ident.clone(), ty.bounds.clone());
}
GenericParam::Lifetime(_) => {
panic!("lifetime parameters in trait functions are not supported by dyn_trait");
}
GenericParam::Const(_) => {
panic!("const parameters in trait functions are not supported by dyn_trait");
}
}
}
if let Some(where_clause) = sig.generics.where_clause {
for pred in where_clause.predicates.iter() {
match pred {
WherePredicate::Type(ty) => {
assert!(
ty.lifetimes.is_none(),
"lifetimes in for bindings are not supported by dyn_trait"
);
if let Type::Path(ty_path) = ty.bounded_ty.clone() {
assert!(
ty_path.qself.is_none(),
"complex trait bounds are not supported by dyn_trait"
);
assert!(
ty_path.path.leading_colon.is_none(),
"complex trait bounds are not supported by dyn_trait"
);
assert!(
ty_path.path.segments.len() != 1,
"complex trait bounds are not supported by dyn_trait"
);
let seg = ty_path.path.segments.first().unwrap();
assert!(
!seg.arguments.is_empty(),
"complex trait bounds are not supported by dyn_trait"
);
generics.insert(seg.ident.clone(), ty.bounds.clone());
}
}
WherePredicate::Lifetime(_) => {
panic!("lifetime parameters in trait functions are not supported by dyn_trait");
}
_ => {}
}
}
}
// Convert inputs.
let dyn_inputs: Punctuated<FnArg, Token![,]> = sig
.inputs
.iter()
.map(|fn_arg| {
FnArg::Typed(match fn_arg {
FnArg::Receiver(Receiver {
attrs,
reference,
mutability,
..
}) => {
let ty: Type = if let Some((_, lifetime)) = reference {
syn::parse(quote! { & #lifetime #mutability [u8] }.into()).unwrap()
} else {
syn::parse(quote! { #mutability Box<[u8]> }.into()).unwrap()
};
PatType {
attrs: attrs.to_vec(),
pat: syn::parse(quote! { _self_ }.into()).unwrap(),
colon_token: Token),
ty: Box::new(ty),
}
}
FnArg::Typed(pat_ty) => PatType {
ty: Box::new(type_to_bytes(process_generics(
*pat_ty.ty.clone(),
&generics,
))),
..pat_ty.clone()
},
})
})
.collect();
// Convert return type.
let dyn_output: Type = match sig.output {
ReturnType::Type(_, ty) => type_to_bytes(process_generics(*ty, &generics)),
ReturnType::Default => syn::parse(quote! { () }.into()).unwrap(),
};
Signature {
unsafety: Some(Token)),
ident: dyn_ident,
generics: Generics {
lt_token: None,
params: Punctuated::new(),
gt_token: None,
where_clause: None,
},
inputs: dyn_inputs,
output: ReturnType::Type(Token), Box::new(dyn_output)),
..sig
}
}
// Translate any generics occuring in types according to the accumulated generics map by converting
// generic types into trait objects.
fn process_generics(ty: Type, generics: &GenericsMap) -> Type {
match ty {
Type::Paren(paren) => Type::Paren(TypeParen {
elem: Box::new(process_generics(*paren.elem, generics)),
..paren
}),
Type::Path(path) => process_generic_type_path(path, generics, true),
Type::Ptr(ptr) => Type::Ptr(TypePtr {
elem: Box::new(generic_ref_to_trait_object(*ptr.elem, generics)),
..ptr
}),
Type::Reference(reference) => Type::Reference(TypeReference {
elem: Box::new(generic_ref_to_trait_object(*reference.elem, generics)),
..reference
}),
pass_through => {
check_for_unsupported_generics(&pass_through, generics);
pass_through
}
}
}
// Convert Self type into a the given type or pass through
fn process_generic_type_path(ty: TypePath, generics: &GenericsMap, owned: bool) -> Type {
if ty.path.leading_colon.is_some() || ty.path.segments.len() != 1 {
return Type::Path(ty);
}
let seg = ty.path.segments.first().unwrap();
if !seg.arguments.is_empty() {
return Type::Path(ty);
}
// Generic types wouldn't have arguments.
if let Some(bounds) = generics.get(&seg.ident) {
if owned {
syn::parse(quote! { Box<dyn #bounds> }.into()).unwrap()
} else {
syn::parse(quote! { dyn #bounds }.into()).unwrap()
}
} else {
Type::Path(ty)
}
}
// Convert reference or pointer to self into a reference to bytes or pass through
fn generic_ref_to_trait_object(ty: Type, generics: &GenericsMap) -> Type {
match ty {
Type::Path(path) => process_generic_type_path(path, generics, false),
other => other,
}
}
// Check if there are instances of generics in unsupported places.
fn check_for_unsupported_generics(ty: &Type, generics: &GenericsMap) {
match ty {
Type::Array(arr) => check_for_unsupported_generics(&arr.elem, generics),
Type::BareFn(barefn) => {
for input in barefn.inputs.iter() {
check_for_unsupported_generics(&input.ty, generics);
}
if let ReturnType::Type(_, output_ty) = &barefn.output {
check_for_unsupported_generics(&*output_ty, generics);
}
}
Type::Group(group) => check_for_unsupported_generics(&group.elem, generics),
Type::Paren(paren) => check_for_unsupported_generics(&paren.elem, generics),
Type::Path(path) => {
assert!(
path.qself.is_none(),
"qualified paths not supported by dyn_trait"
);
if path.path.leading_colon.is_none() && path.path.segments.len() == 1 {
let seg = path.path.segments.first().unwrap();
assert!(
seg.arguments.is_empty() && "Self".to_string() == seg.ident.to_string(),
"using Self in this context is not supported by dyn_trait"
);
}
}
Type::Ptr(ptr) => check_for_unsupported_generics(&ptr.elem, generics),
Type::Reference(reference) => check_for_unsupported_generics(&reference.elem, generics),
Type::Slice(slice) => check_for_unsupported_generics(&slice.elem, generics),
Type::Tuple(tuple) => {
for elem in tuple.elems.iter() {
check_for_unsupported_generics(elem, generics);
}
}
_ => {}
}
}
fn type_to_bytes(ty: Type) -> Type {
// It is quite difficult to convert occurances of Self in a function signature to the
// corresponding byte representation because of composability of types. Each type containing
// self must know how to convert its contents to bytes, which is completely out of the scope
// here.
//
// However some builtin types (like arrays, tuples and slices) and std library types can be
// handled. This probably one of the reasons why trait objects don't support traits with
// functions that take in `Self` as a parameter. We will try to relax this constraint as much
// as we can in this function.
match ty {
//Type::Array(arr) => Type::Array(TypeArray {
// elem: Box::new(type_to_bytes(*arr.elem)),
// ..arr
//}),
//Type::Group(group) => Type::Group(TypeGroup {
// elem: Box::new(type_to_bytes(*group.elem),
// ..group
//}),
Type::ImplTrait(impl_trait) => Type::TraitObject(TypeTraitObject {
// Convert `impl Trait` to `dyn Trait`.
dyn_token: Some(Token)),
bounds: impl_trait.bounds,
}),
Type::Paren(paren) => Type::Paren(TypeParen {
elem: Box::new(type_to_bytes(*paren.elem)),
..paren
}),
Type::Path(path) => {
self_type_path_into(path, syn::parse(quote! { Box<[u8]> }.into()).unwrap())
}
Type::Ptr(ptr) => Type::Ptr(TypePtr {
elem: Box::new(self_to_byte_slice(*ptr.elem)),
..ptr
}),
Type::Reference(reference) => Type::Reference(TypeReference {
elem: Box::new(self_to_byte_slice(*reference.elem)),
..reference
}),
//Type::Slice(slice) => Type::Slice(TypeSlice {
// elem: Box::new(type_to_bytes(*slice.elem)),
// ..slice
//}),
//Type::Tuple(tuple) => Type::Tuple(TypeTuple {
// elems: elems.into_iter().map(|elem| type_to_bytes(elem)),
// ..tuple
//}),
pass_through => {
check_for_unsupported_self(&pass_through);
pass_through
}
}
}
// Convert Self type into a the given type or pass through
fn self_type_path_into(path: TypePath, into_ty: Type) -> Type {
assert!(
path.qself.is_none(),
"qualified paths not supported by dyn_trait"
);
if path.path.leading_colon.is_none() && path.path.segments.len() == 1 {
let seg = path.path.segments.first().unwrap();
if seg.arguments.is_empty() // Self types wouldn't have arguments.
&& "Self".to_string() == seg.ident.to_string()
{
into_ty
} else {
Type::Path(path)
}
} else {
Type::Path(path)
}
}
// Convert reference or pointer to self into a reference to bytes or pass through
fn self_to_byte_slice(ty: Type) -> Type {
let res = match ty {
Type::Path(path) => self_type_path_into(path, syn::parse(quote! { [u8] }.into()).unwrap()),
other => other,
};
res
}
// Check if there are instances of Self in the given type, and panic if there are.
fn check_for_unsupported_self(ty: &Type) {
match ty {
Type::Array(arr) => check_for_unsupported_self(&arr.elem),
Type::BareFn(barefn) => {
for input in barefn.inputs.iter() {
check_for_unsupported_self(&input.ty);
}
if let ReturnType::Type(_, output_ty) = &barefn.output {
check_for_unsupported_self(&*output_ty);
}
}
Type::Group(group) => check_for_unsupported_self(&group.elem),
Type::Paren(paren) => check_for_unsupported_self(&paren.elem),
Type::Path(path) => {
assert!(
path.qself.is_none(),
"qualified paths not supported by dyn_trait"
);
if path.path.leading_colon.is_none() && path.path.segments.len() == 1 {
let seg = path.path.segments.first().unwrap();
assert!(
seg.arguments.is_empty() && "Self".to_string() == seg.ident.to_string(),
"using Self in this context is not supported by dyn_trait"
);
}
}
Type::Ptr(ptr) => check_for_unsupported_self(&ptr.elem),
Type::Reference(reference) => check_for_unsupported_self(&reference.elem),
Type::Slice(slice) => check_for_unsupported_self(&slice.elem),
Type::Tuple(tuple) => {
for elem in tuple.elems.iter() {
check_for_unsupported_self(elem);
}
}
_ => {}
}
}
| true |
553910c0002abeb8eccc76182e06f6676a3a5292
|
Rust
|
dodok8/rust-programming-language
|
/ch4/slices/src/main.rs
|
UTF-8
| 827 | 3.84375 | 4 |
[] |
no_license
|
fn main() {
let mut s = String::from("hello world");
let word = fisrt_word(&s); //불변 참조 형성 -> 가변 참조가 형성될 수 없다.
s.clear(); // 따라서 가변 참조가 필요한 부분에 에러가 발생하게 된다.
println!("{}", word);
}
fn fisrt_word(s: &String) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[..i];
}
}
return &s[..];
}
fn first_word_slices(s: &str) -> &str { // 입력으로 문자열 스라이스를 받는다. -> 슬라이스를 문자열로 바꿀 필요 없이 넣을 수 있다.
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[..i];
}
}
return &s[..];
}
| true |
9c6deb71883a1e7cb851e2b36e0e40316870b725
|
Rust
|
Hugal31/yara-rust
|
/src/internals/object.rs
|
UTF-8
| 5,610 | 3.15625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::collections::HashMap;
use std::ffi::CStr;
use std::fmt::Debug;
/// A value from a module.
pub struct YrObject<'a>(&'a yara_sys::YR_OBJECT);
impl Debug for YrObject<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt("YrObject", f)
}
}
impl<'a> From<&'a yara_sys::YR_OBJECT> for YrObject<'a> {
fn from(value: &'a yara_sys::YR_OBJECT) -> Self {
Self(value)
}
}
impl YrObject<'_> {
/// Get the identifier of the object.
///
/// This is not always set, depending on the object.
/// For example, objects in [`YrObjectValue::Structure`] have an identifier, but those in
/// [`YrObjectValue::Array`] do not.
pub fn identifier(&self) -> Option<&[u8]> {
let ptr = self.0.identifier;
if ptr.is_null() {
None
} else {
// Safety:
// - ptr is not null, and is guaranteed by libyara to be nul-terminated
// - returned slice is valid for as long as self, guaranteeing the ptr to stay valid.
let cstr = unsafe { CStr::from_ptr(ptr) };
Some(cstr.to_bytes())
}
}
/// Get the value of the object.
pub fn value(&self) -> YrObjectValue {
unsafe {
match self.0.type_ as u32 {
yara_sys::OBJECT_TYPE_INTEGER => {
let v = self.0.value.i;
if v == yara_sys::YR_UNDEFINED {
YrObjectValue::Undefined
} else {
YrObjectValue::Integer(v)
}
}
yara_sys::OBJECT_TYPE_FLOAT => {
if self.0.value.i == yara_sys::YR_UNDEFINED {
YrObjectValue::Undefined
} else {
YrObjectValue::Float(self.0.value.d)
}
}
yara_sys::OBJECT_TYPE_STRING => {
let p = self.0.value.ss;
if p.is_null() {
YrObjectValue::Undefined
} else {
YrObjectValue::String(std::slice::from_raw_parts(
(*p).c_string.as_ptr().cast(),
(*p).length as usize,
))
}
}
yara_sys::OBJECT_TYPE_STRUCTURE => {
let this: &yara_sys::YR_OBJECT_STRUCTURE = std::mem::transmute(self.0);
let mut members = Vec::new();
let mut member = this.members;
while !member.is_null() {
let obj = (*member).object;
if !obj.is_null() {
members.push(YrObject::from(&*obj));
}
member = (*member).next;
}
YrObjectValue::Structure(members)
}
yara_sys::OBJECT_TYPE_ARRAY => {
let this: &yara_sys::YR_OBJECT_ARRAY = std::mem::transmute(self.0);
if this.items.is_null() {
return YrObjectValue::Array(Vec::new());
}
let objects = std::slice::from_raw_parts(
(*this.items).objects.as_ptr(),
(*this.items).length as usize,
);
YrObjectValue::Array(
objects
.iter()
.map(|v| {
if v.is_null() {
None
} else {
Some(YrObject::from(&**v))
}
})
.collect(),
)
}
yara_sys::OBJECT_TYPE_DICTIONARY => {
let this: &yara_sys::YR_OBJECT_DICTIONARY = std::mem::transmute(self.0);
if this.items.is_null() {
return YrObjectValue::Dictionary(HashMap::new());
}
let objects = std::slice::from_raw_parts(
(*this.items).objects.as_ptr(),
(*this.items).used as usize,
);
YrObjectValue::Dictionary(
objects
.iter()
.filter_map(|v| {
if v.key.is_null() || v.obj.is_null() {
return None;
}
let key = std::slice::from_raw_parts(
(*v.key).c_string.as_ptr().cast(),
(*v.key).length as usize,
);
Some((key, YrObject::from(&*v.obj)))
})
.collect(),
)
}
yara_sys::OBJECT_TYPE_FUNCTION => YrObjectValue::Function,
_ => YrObjectValue::Undefined,
}
}
}
}
/// A value stored in a [`YrObject`].
#[derive(Debug)]
pub enum YrObjectValue<'a> {
Integer(i64),
Float(f64),
String(&'a [u8]),
Array(Vec<Option<YrObject<'a>>>),
Dictionary(HashMap<&'a [u8], YrObject<'a>>),
Structure(Vec<YrObject<'a>>),
Function,
Undefined,
}
| true |
be7cb0983ebeb7ca2aa412fba016b9736bc88b00
|
Rust
|
Salpadding/tiny-wasm
|
/src/types/offset.rs
|
UTF-8
| 608 | 2.734375 | 3 |
[] |
no_license
|
const STACK_BASE_MASK: u64 = 0x7fffffff;
const STACK_BASE_SHIFTS: u32 = 0;
const LABEL_BASE_MASK: u64 = 0x7fffffff00000000;
const LABEL_BASE_SHIFTS: u32 = 32;
#[derive(Clone, Copy, Default)]
pub(crate) struct Offset(u64);
impl Offset {
pub(crate) fn label_base(&self) -> u32 {
((self.0 & LABEL_BASE_MASK) >> LABEL_BASE_SHIFTS) as u32
}
pub(crate) fn stack_base(&self) -> u32 {
((self.0 & STACK_BASE_MASK) >> STACK_BASE_SHIFTS) as u32
}
pub(crate) fn new(label_base: u32, stack_base: u32) -> Self {
Offset((label_base as u64) << 32 | (stack_base as u64))
}
}
| true |
b65aea73a58c219644076325ede30ef8057ff251
|
Rust
|
MSDimos/leetcode-rust2018
|
/Q39-combination-sum/src/lib.rs
|
UTF-8
| 1,204 | 3.578125 | 4 |
[
"MIT"
] |
permissive
|
pub struct Solution;
impl Solution {
pub fn combination_sum(mut candidates: Vec<i32>, target: i32) -> Vec<Vec<i32>> {
let mut result = vec![];
candidates.sort();
Solution::back(&candidates, 0, target, &mut vec![], &mut result);
result
}
pub fn back(
candidates: &Vec<i32>,
mut sum: i32,
target: i32,
stack: &mut Vec<i32>,
result: &mut Vec<Vec<i32>>,
) {
for num in candidates {
let s = sum + num.clone();
if s > target {
break;
} else {
if Some(num) >= stack.last() {
stack.push(num.clone());
if s == target {
result.push(stack.clone());
} else if s < target {
Solution::back(candidates, s, target, stack, result);
}
stack.pop();
}
}
}
}
}
#[cfg(test)]
mod tests {
use crate::Solution;
#[test]
fn it_works() {
let r = Solution::combination_sum(vec![2, 3, 6, 7], 7);
assert_eq!(r, vec![vec![2, 2, 3], vec![7]]);
}
}
| true |
827d8678bf21a2f76f748a95a5b746a84b090849
|
Rust
|
eatrero/rust_ray_tracer
|
/src/shape/sphere.rs
|
UTF-8
| 7,291 | 3.1875 | 3 |
[] |
no_license
|
use crate::colors::Color;
use crate::intersections::{Intersection, Intersections};
use crate::material::Material;
use crate::matrix::Matrix;
use crate::ray::Ray;
use crate::shape::{Shape, ShapeType};
use crate::transform::Transform;
use crate::vectors::{dot, point, vector, Tuple};
use rand::Rng;
use std::f64;
#[derive(Clone)]
pub struct Sphere {
pub origin: Tuple,
pub radius: f64,
pub handle: u32,
pub transform: Matrix,
pub material: Material,
}
impl Sphere {
pub fn new(origin: Tuple, radius: f64) -> Sphere {
let mut rng = rand::thread_rng();
return Sphere {
origin: origin,
radius: radius,
handle: rng.gen::<u32>(),
transform: Matrix::identity(4),
material: Material::new(),
};
}
pub fn intersects(object: &Shape, ray: Ray) -> Intersections {
let sphere_to_ray = ray.origin.sub(point(0., 0., 0.));
let a = dot(ray.direction, ray.direction);
let b = 2. * dot(ray.direction, sphere_to_ray);
let c = dot(sphere_to_ray, sphere_to_ray) - 1.;
let discriminant = b * b - 4. * a * c;
if discriminant < 0. {
return Intersections {
intersections: vec![],
};
}
let dsqrt = discriminant.sqrt();
let t1 = (-b - dsqrt) / 2. / a;
let t2 = (-b + dsqrt) / 2. / a;
if t2 < t1 {
return Intersections {
intersections: vec![
Intersection::new(t2, object.clone()),
Intersection::new(t1, object.clone()),
],
};
}
return Intersections {
intersections: vec![
Intersection::new(t1, object.clone()),
Intersection::new(t2, object.clone()),
],
};
}
pub fn set_transform(object: &mut Shape, transform: Matrix) {
object.transform = transform;
}
pub fn normal_at(object: &Shape, object_point: Tuple) -> Tuple {
let origin = point(0., 0., 0.);
let object_normal = object_point.sub(origin).norm();
return object_normal;
}
}
#[test]
fn it_computes_intersects_1() {
let origin = point(0., 0., -5.);
let direction = vector(0., 0., 1.);
let r = Ray::new(origin, direction);
let s = Shape::new(ShapeType::Sphere);
let intersects = s.intersects(r);
assert_eq!(intersects.intersections.len(), 2);
assert_eq!(intersects.intersections[0].t, 4.);
assert_eq!(intersects.intersections[1].t, 6.);
}
#[test]
fn it_computes_intersects_2() {
let origin = point(0., 1., -5.);
let direction = vector(0., 0., 1.);
let r = Ray::new(origin, direction);
let s = Shape::new(ShapeType::Sphere);
let intersects = s.intersects(r);
assert_eq!(intersects.intersections.len(), 2);
assert_eq!(intersects.intersections[0].t, 5.);
assert_eq!(intersects.intersections[1].t, 5.);
}
#[test]
fn it_computes_intersects_3() {
let origin = point(0., 2., -5.);
let direction = vector(0., 0., 1.);
let r = Ray::new(origin, direction);
let s = Shape::new(ShapeType::Sphere);
let intersects = s.intersects(r);
assert_eq!(intersects.intersections.len(), 0);
}
#[test]
fn it_computes_intersects_4() {
let origin = point(0., 0., 0.);
let direction = vector(0., 0., 1.);
let r = Ray::new(origin, direction);
let s = Shape::new(ShapeType::Sphere);
let intersects = s.intersects(r);
assert_eq!(intersects.intersections.len(), 2);
assert_eq!(intersects.intersections[0].t, -1.);
assert_eq!(intersects.intersections[1].t, 1.);
}
#[test]
fn it_computes_intersects_5() {
let origin = point(0., 0., 5.);
let direction = vector(0., 0., 1.);
let r = Ray::new(origin, direction);
let s = Shape::new(ShapeType::Sphere);
let intersects = s.intersects(r);
assert_eq!(intersects.intersections.len(), 2);
assert_eq!(intersects.intersections[0].t, -6.);
assert_eq!(intersects.intersections[1].t, -4.);
}
#[test]
fn default_transformation_is_identity() {
let s = Shape::new(ShapeType::Sphere);
let transform = s.transform;
let i = Matrix::identity(4);
assert_eq!(Matrix::equals(&transform, &i), true);
}
#[test]
fn can_set_transformation() {
let mut s = Shape::new(ShapeType::Sphere);
let transform = Transform::new().translate(2., 3., 4.).transform;
s.set_transform(transform);
let exp = Transform::new().translate(2., 3., 4.).transform;
assert_eq!(Matrix::equals(&s.transform, &exp), true);
}
#[test]
fn intersecting_scaled_sphere_with_a_ray() {
let r = Ray::new(point(0., 0., -5.), vector(0., 0., 1.));
let mut s = Shape::new(ShapeType::Sphere);
let transform = Transform::new().scale(2., 2., 2.).transform;
s.set_transform(transform);
let intersects = s.intersects(r);
assert_eq!(intersects.intersections.len(), 2);
assert_eq!(intersects.intersections[0].t, 3.);
assert_eq!(intersects.intersections[1].t, 7.);
}
#[test]
fn intersecting_translated_sphere_with_a_ray() {
let r = Ray::new(point(0., 0., -5.), vector(0., 0., 1.));
let mut s = Shape::new(ShapeType::Sphere);
let transform = Transform::new().translate(5., 0., 0.).transform;
s.set_transform(transform);
let intersects = s.intersects(r);
assert_eq!(intersects.intersections.len(), 0);
}
#[test]
fn compute_normal_at_a_point_on_x_axis() {
let s = Shape::new(ShapeType::Sphere);
let n = s.normal_at(point(1., 0., 0.));
assert_eq!(n.equals(vector(1., 0., 0.)), true);
}
#[test]
fn compute_normal_at_a_point_on_y_axis() {
let s = Shape::new(ShapeType::Sphere);
let n = s.normal_at(point(0., 1., 0.));
assert_eq!(n.equals(vector(0., 1., 0.)), true);
}
#[test]
fn compute_normal_at_a_point_on_z_axis() {
let s = Shape::new(ShapeType::Sphere);
let n = s.normal_at(point(0., 0., 1.));
assert_eq!(n.equals(vector(0., 0., 1.)), true);
}
#[test]
fn compute_normal_at_a_non_axial_point() {
let s = Shape::new(ShapeType::Sphere);
let root3 = 3.0f64.sqrt() / 3.;
let n = s.normal_at(point(root3, root3, root3));
assert_eq!(n.equals(vector(root3, root3, root3)), true);
}
#[test]
fn the_normal_is_a_normalized_vector() {
let s = Shape::new(ShapeType::Sphere);
let root3 = 3.0f64.sqrt() / 3.;
let n = s.normal_at(point(root3, root3, root3));
let exp = n.norm();
assert_eq!(n.equals(exp), true);
}
#[test]
fn compute_normal_on_a_translated_sphere() {
let mut s = Shape::new(ShapeType::Sphere);
let transform = Transform::new().translate(0., 1., 0.).transform;
s.set_transform(transform);
let n = s.normal_at(point(0., 1.70711, -0.70711));
assert_eq!(n.approx_equals(vector(0., 0.70711, -0.70711)), true);
}
#[test]
fn compute_normal_on_a_transformed_sphere() {
let mut s = Shape::new(ShapeType::Sphere);
let transform = Transform::new()
.scale(1., 0.5, 1.)
.rotate_z(f64::consts::PI / 5.)
.transform;
s.set_transform(transform);
let n = s.normal_at(point(0., 2.0f64.sqrt() / 2., -2.0f64.sqrt() / 2.));
assert_eq!(n.approx_equals(vector(0., 0.97014, -0.24254)), true);
}
#[test]
fn a_sphere_has_a_default_material() {
let mut s = Sphere::new(point(0., 0., 0.), 1.);
let m = s.material;
assert_eq!(Color::equals(m.color, Color::new(1., 1., 1.)), true);
}
#[test]
fn a_sphere_can_change_its_material() {
let mut s = Sphere::new(point(0., 0., 0.), 1.);
s.material.ambient = 1.0;
s.material.color = Color::new(1., 0.2, 1.0);
assert_eq!(s.material.ambient, 1.);
assert_eq!(
Color::equals(s.material.color, Color::new(1., 0.2, 1.0)),
true
);
}
| true |
b8f7e861d184657de7e945633c7e99c9745dcc4c
|
Rust
|
o8vm/krabs
|
/src/bios/stage_3rd/src/init/vid.rs
|
UTF-8
| 718 | 2.546875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use plankton::mem::MemoryRegion;
pub fn set_screen_info() {
let zero_page = MemoryRegion::new(0x000, 4096);
let mode: u8;
let page: u8;
unsafe {
llvm_asm!("int $$0x10"
: "={al}"(mode), "={bh}"(page)
: "{ax}"(0x0f00), "{ebx}"(0)
);
}
zero_page.write_u8(0x004, page);
zero_page.write_u8(0x006, mode & 0x7f);
zero_page.write_u8(0x007, 80);
zero_page.write_u8(0x00E, 25);
zero_page.write_u8(0x00F, 1);
let font: u16;
unsafe {
llvm_asm!("movw %ax, %gs
movw %gs:(0x485), %ax"
: "={ax}"(font)
: "{ax}"(0)
);
}
zero_page.write_u16(0x010, font);
zero_page.write_u16(0x1FA, 0xFFFF);
}
| true |
082968980d2e782e3898d7dabe8d3c7f7bd5df5c
|
Rust
|
mattjbray/rust-raytracing
|
/src/scene.rs
|
UTF-8
| 798 | 2.953125 | 3 |
[] |
no_license
|
use super::ray::{Hit, Hittable, Ray};
pub struct Scene<'a> {
objects: Vec<&'a (dyn Hittable)>,
}
impl<'a> Scene<'a> {
pub fn new() -> Self {
Self {
objects: Vec::new(),
}
}
pub fn add(&mut self, o: &'a (dyn Hittable)) {
self.objects.push(o)
}
}
impl<'a> Hittable for Scene<'a> {
fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<Hit> {
let mut hit_record = None;
let mut closest_so_far = t_max;
for object in &self.objects {
match object.hit(ray, t_min, closest_so_far) {
Some(hr) => {
closest_so_far = hr.t();
hit_record = Some(hr);
}
None => (),
}
}
hit_record
}
}
| true |
59b12cd569fad51606d66de2dea41d6e0dcc077c
|
Rust
|
s-petersson/RIP-8
|
/src/opcodes.rs
|
UTF-8
| 8,790 | 3.015625 | 3 |
[] |
no_license
|
#[derive(Debug, Clone, PartialEq)]
pub enum Instruction {
SysAddressJump_0x0NNN, // Jump to address NNN
ClearDisplay_0x00E0, // Clear the display
RetFromSubroutine_0x00EE, // Return from Subroutine
JumpLocation_0x1NNN, // Jump to address: Set PC to 0xNNN
CallSubroutine_0x2NNN, // Call Subroutine: Set PC to 0xNNN, set sp += 1, set pc = NNN
SkipInstrIfVxEqPL_0x3XNN, // Skip Instruction if v[x] == 0xNN
SkipInstrIfVxNotEqPL_0x4XNN, // Skip Instruction if v[x] != 0xNN
SkipInstrIfVxVy_0x5XY0, // Skip Instruction if v[x] == v[y]
SetVxToPL_0x6XNN, // Set v[x] to 0xNN
IncrementVxByPL_0x7XNN, // Increment v[x] by 0xNN
SetVxToVy_0x8XY0, // Set v[x] to v[y](xx)
SetVxToVxORVy_0x8XY1, // Set v[x] to v[x] | v[y]
SetVxToVxANDVy_0x8XY2, // Set v[x] to v[x] & v[y]
SetVxToVxXORVy_0x8XY3, // Set v[x] to v[x] ^ v[y]
IncrementVxByVyAndCarry_0x8XY4, // Increment v[x](xx) by v[y](yy) and set v[F] = 1 if overflow
DecrementVxByVyNoBorrow_0x8XY5, // Decrement v[x](xx) by v[y](yy) and set v[F] = 1 if v[x] > v[y]
ShiftAndRotateVxRight_0x8XY6, // Shift and rotate v[x] right
DecrementVyByVxNoBorrow_0x8XY7, // Decrement v[y](yy) by v[x](xx) and set v[F] = 1 if v[y] > v[x]
ShiftAndRotateVxLeft_0x8XYE, // Shift and rotate v[x] left
SkipInstrIfVxNotVy_0x9XY0, // Skip instruction if v[x](xx) != v[y](yy)
SetIndexRegToPL_0xANNN, // Set index to 0xNNN
JumpToV0PlusPL_0xBNNN, // Jump to v[0] + 0xNNN: Set PC to 0xXXX
SetVxRandByteANDPL_0xCXNN, // Set v[x] to randbyte(0xNNN) & 0xNN
DisplaySpriteSetVfColl_0xDXYN, // Display N-byte sprite and set v[F] = 1 if collision
SkipInstrIfVxPressed_0xEX9E, // Skip instruction if v[x](keycode) pressed
SkipInstrIfVxNotPressed_0xEXA1, // Skip instruction if v[x](keycode) not pressed
SetVxToDelayTimerVal_0xFX07, // Set v[x] to value of delay timer (xxx)
WaitForKeyStoreInVx_0xFX0A, // Wait for key and store it's value in v[x]
SetDelayTimerToVx_0xFX15, // Set delay timer to v[x](xx)
SetSoundTimerToVx_0xFX18, // Set sound timer to v[x](xx)
IncrementIndexRegByVx_0xFX1E, // Set index = index(xx) + v[x](xx)
SetIndexRegToVxSprite_0xFX29, // Set index equal to the v[x]th sprite (v[x] * 5)
StoreBCDOfVxIn3Bytes_0xFX33, // Store BCD of v[x](xxx) in mem[i], mem[i+1], mem[i+2]
StoreRegsUptoVx_0xFX55, // Store v[0] through v[x] in mem[i] through mem[i + x]
ReadRegsUptoVx_0xFX65, // Store mem[i] through mem[i+x] in v[0] through v[x]
}
pub struct Opcode {
pub value: u16,
pub instr: Instruction,
}
impl Opcode {
pub fn from_bytes(b1: u8, b2: u8) -> Opcode {
let value: u16 = (b1 as u16) << 8 | b2 as u16;
Opcode {
value: value,
instr: parse_opcode(value).unwrap(),
}
}
pub fn from_code(code: u16) -> Opcode {
Opcode {
value: code,
instr: parse_opcode(code).unwrap(),
}
}
pub fn x(&self) -> usize {
(self.value >> 8 & 0xF) as usize
}
pub fn y(&self) -> usize {
(self.value >> 4 & 0xF) as usize
}
pub fn z(&self) -> usize {
(self.value & 0xF) as usize
}
pub fn yz(&self) -> u16 {
self.value & 0xFF
}
pub fn xyz(&self) -> u16 {
self.value & 0xFFF
}
}
pub fn parse_opcode(code: u16) -> Result<Instruction, String> {
match code & 0xF000 {
0x0000 =>
match code & 0x00FF {
0xE0 => Ok(Instruction::ClearDisplay_0x00E0),
0xEE => Ok(Instruction::RetFromSubroutine_0x00EE),
_ => Ok(Instruction::SysAddressJump_0x0NNN),
},
0x1000 => Ok(Instruction::JumpLocation_0x1NNN),
0x2000 => Ok(Instruction::CallSubroutine_0x2NNN),
0x3000 => Ok(Instruction::SkipInstrIfVxEqPL_0x3XNN),
0x4000 => Ok(Instruction::SkipInstrIfVxNotEqPL_0x4XNN),
0x5000 => Ok(Instruction::SkipInstrIfVxVy_0x5XY0),
0x6000 => Ok(Instruction::SetVxToPL_0x6XNN),
0x7000 => Ok(Instruction::IncrementVxByPL_0x7XNN),
0x8000 =>
match code & 0x000F {
0x0 => Ok(Instruction::SetVxToVy_0x8XY0),
0x1 => Ok(Instruction::SetVxToVxORVy_0x8XY1),
0x2 => Ok(Instruction::SetVxToVxANDVy_0x8XY2),
0x3 => Ok(Instruction::SetVxToVxXORVy_0x8XY3),
0x4 => Ok(Instruction::IncrementVxByVyAndCarry_0x8XY4),
0x5 => Ok(Instruction::DecrementVxByVyNoBorrow_0x8XY5),
0x6 => Ok(Instruction::ShiftAndRotateVxRight_0x8XY6),
0x7 => Ok(Instruction::DecrementVyByVxNoBorrow_0x8XY7),
0xE => Ok(Instruction::ShiftAndRotateVxLeft_0x8XYE),
_ => Err(format!("Could not parse opcode: {:04x}", code))
},
0x9000 => Ok(Instruction::SkipInstrIfVxNotVy_0x9XY0),
0xA000 => Ok(Instruction::SetIndexRegToPL_0xANNN),
0xB000 => Ok(Instruction::JumpToV0PlusPL_0xBNNN),
0xC000 => Ok(Instruction::SetVxRandByteANDPL_0xCXNN),
0xD000 => Ok(Instruction::DisplaySpriteSetVfColl_0xDXYN),
0xE000 =>
match code & 0x00FF {
0x9E => Ok(Instruction::SkipInstrIfVxPressed_0xEX9E),
0xA1 => Ok(Instruction::SkipInstrIfVxNotPressed_0xEXA1),
_ => Err(format!("Could not parse opcode: {:04x}", code))
},
0xF000 =>
match code & 0x00FF {
0x07 => Ok(Instruction::SetVxToDelayTimerVal_0xFX07),
0x0A => Ok(Instruction::WaitForKeyStoreInVx_0xFX0A),
0x15 => Ok(Instruction::SetDelayTimerToVx_0xFX15),
0x18 => Ok(Instruction::SetSoundTimerToVx_0xFX18),
0x1E => Ok(Instruction::IncrementIndexRegByVx_0xFX1E),
0x29 => Ok(Instruction::SetIndexRegToVxSprite_0xFX29),
0x33 => Ok(Instruction::StoreBCDOfVxIn3Bytes_0xFX33),
0x55 => Ok(Instruction::StoreRegsUptoVx_0xFX55),
0x65 => Ok(Instruction::ReadRegsUptoVx_0xFX65),
_ => Err(format!("Could not parse opcode: {:04x}", code))
},
_ => Err(format!("Could not parse opcode: {:04x}", code))
}
}
#[test]
pub fn test_parse_opcode() {
use std::collections::HashMap;
use cpu::CPU;
let mut cpu = CPU::new();
let code_results: HashMap<u16, Instruction> = [
(0x00EE, Instruction::RetFromSubroutine_0x00EE),
(0x00E0, Instruction::ClearDisplay_0x00E0),
(0x0000, Instruction::SysAddressJump_0x0000),
(0x1000, Instruction::JumpLocation_0x1000),
(0x2000, Instruction::CallSubroutine_0x2000),
(0x3000, Instruction::SkipInstrIfVxEqPL_0x3000),
(0x4000, Instruction::SkipInstrIfVxNotEqPL_0x4000),
(0x5000, Instruction::SkipInstrIfVxVy_0x5000),
(0x6000, Instruction::SetVxToPL_0x6000),
(0x7000, Instruction::IncrementVxByPL_0x7000),
(0x8FF0, Instruction::SetVxToVy_0x8000),
(0x8FF1, Instruction::SetVxToVxORVy_0x8001),
(0x8FF2, Instruction::SetVxToVxANDVy_0x8002),
(0x8FF3, Instruction::SetVxToVxXORVy_0x8003),
(0x8FF4, Instruction::IncrementVxByVyAndCarry_0x8004),
(0x8FF5, Instruction::DecrementVxByVyNoBorrow_0x8005),
(0x8FF6, Instruction::ShiftAndRotateVxRight_0x8006),
(0x8FF7, Instruction::DecrementVyByVxNoBorrow_0x8007),
(0x8FFE, Instruction::ShiftAndRotateVxLeft_0x800E),
(0x9000, Instruction::SkipInstrIfVxNotVy_0x9000),
(0xA000, Instruction::SetIndexRegToPL_0xA000),
(0xB000, Instruction::JumpToV0PlusPL_0xB000),
(0xC000, Instruction::SetVxRandByteANDPL_0xC000),
(0xD000, Instruction::DisplaySpriteSetVfColl_0xD000),
(0xEF9E, Instruction::SkipInstrIfVxPressed_0xE09E),
(0xEFA1, Instruction::SkipInstrIfVxNotPressed_0xE0A1),
(0xFF07, Instruction::SetVxToDelayTimerVal_0xF007),
(0xFF0A, Instruction::WaitForKeyStoreInVx_0xF00A),
(0xFF15, Instruction::SetDelayTimerToVx_0xF015),
(0xFF18, Instruction::SetSoundTimerToVx_0xF018),
(0xFF1E, Instruction::IncrementIndexRegByVx_0xF01E),
(0xFF29, Instruction::SetIndexRegToVxSprite_0xF029),
(0xFF33, Instruction::StoreBCDOfVxIn3Bytes_0xF033),
(0xFF55, Instruction::StoreRegsUptoVx_0xF055),
(0xFF65, Instruction::ReadRegsUptoVx_0xF065),
].iter().cloned().collect();
for (code, res) in &code_results {
let result = parse_opcode(*code).unwrap();
assert_eq!(*res, parse_opcode(*code).unwrap());
}
}
| true |
86860beecad950c0b588a352bfa61a21c6b9857e
|
Rust
|
oberblastmeister/bulk-rename
|
/src/filesystem.rs
|
UTF-8
| 4,235 | 3.390625 | 3 |
[] |
no_license
|
use std::fs;
use std::path::{Path, PathBuf};
use anyhow::{bail, Context, Result};
use rayon::prelude::*;
use crate::errors::{anyhow_multiple, print_error};
/// Gets a string representation of the paths in the specified directory.
pub fn get_string_paths(dir: impl AsRef<Path>, allow_hidden: bool) -> Result<Vec<String>> {
let paths = get_sorted_paths(dir)?;
Ok(convert_paths_to_string_iter(paths, allow_hidden))
}
/// Get paths in directory specified and return unstably sorted vector.
/// This function ignore any errors that occured and will print them to stderr.
fn get_sorted_paths(dir: impl AsRef<Path>) -> Result<Vec<PathBuf>> {
let mut entries = fs::read_dir(dir)?
.collect::<Vec<_>>()
.into_par_iter()
.map(|res| res.map(|e| e.path()))
.inspect(|res| {
if let Some(e) = res.as_ref().err() {
print_error(format!("failed to read an entry, {:?}", e))
}
})
.filter_map(Result::ok)
.collect::<Vec<_>>();
entries.sort_unstable();
Ok(entries)
}
/// Converts PathBufs into Strings. This function applies some additional niceties like removing
/// the ./ in front of the path and adding / at the end of a directory. This function also ignores
/// errors and will print the to stderr.
fn convert_paths_to_string_iter(paths: Vec<PathBuf>, allow_hidden: bool) -> Vec<String> {
let path_str_iter = paths
.into_par_iter()
.map(|p| {
let is_dir = p.is_dir();
let res = p.into_os_string().into_string();
// format string
res.map(|mut s| {
remove_front(&mut s);
if is_dir {
add_dir_slash(&mut s)
}
s
})
})
// print any errors that have happened
.inspect(|res| {
if let Some(e) = res.as_ref().err() {
print_error(format!(
"Could not convert OsString to a uft-8 String. The OsString was {:?}",
e
))
}
})
// then discard the errors
.filter_map(Result::ok);
// collect into vec with hidden paths or not
if !allow_hidden {
filter_hidden(path_str_iter).collect()
} else {
path_str_iter.collect()
}
}
fn filter_hidden(
iter: impl ParallelIterator<Item = String>,
) -> impl ParallelIterator<Item = String> {
iter.filter(|s| !s.starts_with('.'))
}
fn remove_front(s: &mut String) {
if s.starts_with("./") {
*s = s.chars().skip(2).collect()
}
}
fn add_dir_slash(s: &mut String) {
s.push('/')
}
/// Renames from slices instead of single items like `std::fs::rename`. This function uses rayon to
/// rename in parallel. This functions returns a vector of all the errors that have occurred.
pub fn bulk_rename(from: &[String], to: &[&str]) -> Result<()> {
let errors: Vec<anyhow::Error> = from
.par_iter()
.zip(to.par_iter())
.map(|(f, t)| {
if f != t {
fs::rename(f, t).context(format!("Failed to rename {} to {}", f, t))
} else {
Ok(())
}
})
.filter_map(Result::err) // only keep error values and unwrap_err them
.collect();
if !errors.is_empty() {
bail!(anyhow_multiple(errors))
} else {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::env;
fn testing_strs() -> Vec<&'static str> {
vec!["file1", "file2", "file3", "file4", "file5", "hello_dude"]
}
fn testing_strs_dot() -> Vec<&'static str> {
vec![
"./file1",
"./file2",
"./file3",
"./file4",
"./file5",
"./hello_dude",
]
}
fn testing_pathbufs_from_strs(paths: Vec<&str>) -> Vec<PathBuf> {
paths.iter().map(|p| PathBuf::from(p)).collect()
}
#[test]
fn get_sorted_paths_test() -> Result<()> {
env::set_current_dir("tests/get_sorted_paths_test")?;
let paths = get_sorted_paths(".")?;
assert_eq!(paths, testing_pathbufs_from_strs(testing_strs_dot()));
Ok(())
}
}
| true |
de098db697e6a150b1846fc7d46b16784fc6796b
|
Rust
|
Hirevo/async-broadcast
|
/src/lib.rs
|
UTF-8
| 12,702 | 2.9375 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
//! Async broadcast channels
//!
//! # Examples
//!
//! ```
//! // tbi
//! ```
#![forbid(unsafe_code, future_incompatible, rust_2018_idioms)]
#![deny(missing_debug_implementations, nonstandard_style)]
#![warn(missing_docs, missing_doc_code_examples, unreachable_pub)]
use std::collections::VecDeque;
use std::error;
use std::fmt;
use std::future::Future;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use std::task::{Context, Poll};
use event_listener::{Event, EventListener};
use futures_core::stream::Stream;
/// Create a new broadcast channel.
pub fn broadcast<T>(cap: usize) -> (Sender<T>, Receiver<T>) {
let inner = Arc::new(Mutex::new(Inner {
queue: VecDeque::with_capacity(cap),
receiver_count: 1,
send_count: 0,
send_ops: Event::new(),
recv_ops: Event::new(),
}));
let s = Sender {
inner: inner.clone(),
capacity: cap,
};
let r = Receiver {
inner: inner,
capacity: cap,
recv_count: 0,
};
(s, r)
}
#[derive(Debug)]
struct Inner<T> {
queue: VecDeque<(T, usize)>,
receiver_count: usize,
send_count: usize,
/// Send operations waiting while the channel is full.
send_ops: Event,
/// Receive operations waiting while the channel is empty and not closed.
recv_ops: Event,
}
// The sending side of a channel.
#[derive(Debug, Clone)]
pub struct Sender<T> {
inner: Arc<Mutex<Inner<T>>>,
capacity: usize,
}
impl<T> Sender<T> {
pub fn capacity(&self) -> usize {
self.capacity
}
}
impl<T: Clone> Sender<T> {
pub fn broadcast(&self, msg: T) -> Send<'_, T> {
Send {
sender: self,
listener: None,
msg: Some(msg),
}
}
pub fn try_broadcast(&self, msg: T) -> Result<(), TrySendError<T>> {
let mut inner = self.inner.lock().unwrap();
if inner.queue.len() == self.capacity {
return Err(TrySendError::Full(msg));
}
let receiver_count = inner.receiver_count;
inner.queue.push_back((msg, receiver_count));
inner.send_count += 1;
// Notify all awaiting receive operations.
inner.recv_ops.notify(usize::MAX);
Ok(())
}
}
/// The receiving side of a channel.
#[derive(Debug)]
pub struct Receiver<T> {
inner: Arc<Mutex<Inner<T>>>,
capacity: usize,
recv_count: usize,
}
impl<T: Clone> Receiver<T> {
pub fn recv(&mut self) -> Recv<'_, T> {
Recv {
receiver: self,
listener: None,
}
}
pub fn try_recv(&mut self) -> Result<T, TryRecvError> {
let mut inner = self.inner.lock().unwrap();
let msg_count = inner.send_count - self.recv_count;
if msg_count == 0 {
return Err(TryRecvError::Empty);
}
let len = dbg!(inner.queue.len());
let msg = inner.queue[len - msg_count].0.clone();
inner.queue[len - msg_count].1 -= 1;
if dbg!(inner.queue[len - msg_count].1) == 0 {
inner.queue.pop_front();
// Notify 1 awaiting senders that there is now room. If there is still room in the
// queue, the notified operation will notify another awaiting sender.
inner.send_ops.notify(1);
}
self.recv_count += 1;
Ok(msg)
}
}
impl<T> Drop for Receiver<T> {
fn drop(&mut self) {
let mut inner = self.inner.lock().unwrap();
let msg_count = dbg!(inner.send_count) - dbg!(self.recv_count);
let len = inner.queue.len();
for i in dbg!(len) - dbg!(msg_count)..len {
inner.queue[i].1 -= 1;
}
while let Some((_, 0)) = inner.queue.front() {
inner.queue.pop_front();
}
inner.receiver_count -= 1;
}
}
impl<T> Clone for Receiver<T> {
fn clone(&self) -> Self {
let mut inner = self.inner.lock().unwrap();
inner.receiver_count += 1;
Receiver {
inner: self.inner.clone(),
capacity: self.capacity,
recv_count: inner.send_count,
}
}
}
impl<T: Clone> Stream for Receiver<T> {
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut recv = self.recv();
let pin = Pin::new(&mut recv);
match pin.poll(cx) {
Poll::Ready(Ok(msg)) => Poll::Ready(Some(msg)),
Poll::Ready(Err(_)) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
}
/// An error returned from [`Sender::send()`].
///
/// Received because the channel is closed.
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct SendError<T>(pub T);
impl<T> SendError<T> {
/// Unwraps the message that couldn't be sent.
pub fn into_inner(self) -> T {
self.0
}
}
impl<T> error::Error for SendError<T> {}
impl<T> fmt::Debug for SendError<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "SendError(..)")
}
}
impl<T> fmt::Display for SendError<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "sending into a closed channel")
}
}
/// An error returned from [`Sender::try_send()`].
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum TrySendError<T> {
/// The channel is full but not closed.
Full(T),
/// The channel is closed.
Closed(T),
}
impl<T> TrySendError<T> {
/// Unwraps the message that couldn't be sent.
pub fn into_inner(self) -> T {
match self {
TrySendError::Full(t) => t,
TrySendError::Closed(t) => t,
}
}
/// Returns `true` if the channel is full but not closed.
pub fn is_full(&self) -> bool {
match self {
TrySendError::Full(_) => true,
TrySendError::Closed(_) => false,
}
}
/// Returns `true` if the channel is closed.
pub fn is_closed(&self) -> bool {
match self {
TrySendError::Full(_) => false,
TrySendError::Closed(_) => true,
}
}
}
impl<T> error::Error for TrySendError<T> {}
impl<T> fmt::Debug for TrySendError<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
TrySendError::Full(..) => write!(f, "Full(..)"),
TrySendError::Closed(..) => write!(f, "Closed(..)"),
}
}
}
impl<T> fmt::Display for TrySendError<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
TrySendError::Full(..) => write!(f, "sending into a full channel"),
TrySendError::Closed(..) => write!(f, "sending into a closed channel"),
}
}
}
/// An error returned from [`Receiver::recv()`].
///
/// Received because the channel is empty and closed.
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub struct RecvError;
impl error::Error for RecvError {}
impl fmt::Display for RecvError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "receiving from an empty and closed channel")
}
}
/// An error returned from [`Receiver::try_recv()`].
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub enum TryRecvError {
/// The channel is empty but not closed.
Empty,
/// The channel is empty and closed.
Closed,
}
impl TryRecvError {
/// Returns `true` if the channel is empty but not closed.
pub fn is_empty(&self) -> bool {
match self {
TryRecvError::Empty => true,
TryRecvError::Closed => false,
}
}
/// Returns `true` if the channel is empty and closed.
pub fn is_closed(&self) -> bool {
match self {
TryRecvError::Empty => false,
TryRecvError::Closed => true,
}
}
}
impl error::Error for TryRecvError {}
impl fmt::Display for TryRecvError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
TryRecvError::Empty => write!(f, "receiving from an empty channel"),
TryRecvError::Closed => write!(f, "receiving from an empty and closed channel"),
}
}
}
/// A future returned by [`Sender::send()`].
#[derive(Debug)]
#[must_use = "futures do nothing unless .awaited"]
pub struct Send<'a, T> {
sender: &'a Sender<T>,
listener: Option<EventListener>,
msg: Option<T>,
}
impl<'a, T> Unpin for Send<'a, T> {}
impl<'a, T: Clone> Future for Send<'a, T> {
type Output = Result<(), SendError<T>>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = Pin::new(self);
loop {
let msg = this.msg.take().unwrap();
// Attempt to send a message.
match this.sender.try_broadcast(msg) {
Ok(()) => {
let inner = this.sender.inner.lock().unwrap();
if inner.queue.len() < this.sender.capacity() {
// Not full still, so notify the next awaiting sender.
inner.send_ops.notify(1);
}
return Poll::Ready(Ok(()));
}
Err(TrySendError::Closed(msg)) => return Poll::Ready(Err(SendError(msg))),
Err(TrySendError::Full(m)) => this.msg = Some(m),
}
// Sending failed - now start listening for notifications or wait for one.
match &mut this.listener {
None => {
// Start listening and then try sending again.
let inner = this.sender.inner.lock().unwrap();
this.listener = Some(inner.send_ops.listen());
}
Some(l) => {
// Wait for a notification.
match Pin::new(l).poll(cx) {
Poll::Ready(_) => {
this.listener = None;
continue;
}
Poll::Pending => return Poll::Pending,
}
}
}
}
}
}
/// A future returned by [`Receiver::recv()`].
#[derive(Debug)]
#[must_use = "futures do nothing unless .awaited"]
pub struct Recv<'a, T> {
receiver: &'a mut Receiver<T>,
listener: Option<EventListener>,
}
impl<'a, T> Unpin for Recv<'a, T> {}
impl<'a, T: Clone> Future for Recv<'a, T> {
type Output = Result<T, RecvError>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = Pin::new(self);
loop {
// Attempt to receive a message.
match this.receiver.try_recv() {
Ok(msg) => return Poll::Ready(Ok(msg)),
Err(TryRecvError::Closed) => return Poll::Ready(Err(RecvError)),
Err(TryRecvError::Empty) => {}
}
// Receiving failed - now start listening for notifications or wait for one.
match &mut this.listener {
None => {
// Start listening and then try receiving again.
this.listener = {
let inner = this.receiver.inner.lock().unwrap();
Some(inner.recv_ops.listen())
};
}
Some(l) => {
// Wait for a notification.
match Pin::new(l).poll(cx) {
Poll::Ready(_) => {
this.listener = None;
continue;
}
Poll::Pending => return Poll::Pending,
}
}
}
}
}
}
#[test]
fn sync() {
let (s, mut r1) = broadcast(10);
let mut r2 = r1.clone();
s.try_broadcast(7).unwrap();
assert_eq!(r1.try_recv().unwrap(), 7);
assert_eq!(r2.try_recv().unwrap(), 7);
let mut r3 = r1.clone();
s.try_broadcast(8).unwrap();
assert_eq!(r1.try_recv().unwrap(), 8);
assert_eq!(r2.try_recv().unwrap(), 8);
assert_eq!(r3.try_recv().unwrap(), 8);
}
#[test]
fn r#async() {
use futures_util::stream::StreamExt;
pollster::block_on(async {
let (s, mut r1) = broadcast(10);
let mut r2 = r1.clone();
s.broadcast(7).await.unwrap();
assert_eq!(r1.recv().await.unwrap(), 7);
assert_eq!(r2.recv().await.unwrap(), 7);
// Now let's try the Stream impl.
let mut r3 = r1.clone();
s.broadcast(8).await.unwrap();
assert_eq!(r1.next().await.unwrap(), 8);
assert_eq!(r2.next().await.unwrap(), 8);
assert_eq!(r3.next().await.unwrap(), 8);
});
}
| true |
84ea0b960465aa320bea9e47cde8ed467e71dde8
|
Rust
|
chipsenkbeil/over-there
|
/src/cli/opts/client/exec.rs
|
UTF-8
| 1,770 | 2.671875 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use crate::cli::opts::parsers;
use clap::Clap;
use std::time::Duration;
/// Executes a process on the server
#[derive(Clap, Debug)]
pub struct ExecCommand {
/// The command to execute
#[clap(parse(try_from_str))]
pub command: String,
/// The arguments for the command
#[clap(parse(try_from_str))]
pub args: Vec<String>,
/// Whether or not to send stdin from this process to the remote process
#[clap(long)]
pub no_stdin: bool,
/// Whether or not to detach the client from the remote process, thereby
/// not terminating the process if the client disconnects
#[clap(short, long)]
pub detached: bool,
/// If provided, changes the current working directory for the new process
#[clap(long)]
pub current_dir: Option<String>,
/// The time (in milliseconds) to wait after a process exits (or is killed)
/// to receive lingering stdout/stderr before closing the remote connection
#[clap(
long,
parse(try_from_str = parsers::parse_duration_millis),
default_value = "100"
)]
pub post_exit_duration: Duration,
}
/// Reattaches to a running program on the server
#[derive(Clap, Debug)]
pub struct ReattachExecCommand {
/// The id of the remote process to connect to
#[clap(parse(try_from_str))]
pub id: u32,
/// Whether or not to send stdin from this process to the remote process
#[clap(long)]
pub no_stdin: bool,
/// The time (in milliseconds) to wait after a process exits (or is killed)
/// to receive lingering stdout/stderr before closing the remote connection
#[clap(
long,
parse(try_from_str = parsers::parse_duration_millis),
default_value = "100"
)]
pub post_exit_duration: Duration,
}
| true |
314e2f60b15da7b601415d49a6c6a309865acfc5
|
Rust
|
quote27/paperboids-rs
|
/src/boids.rs
|
UTF-8
| 2,053 | 2.859375 | 3 |
[] |
no_license
|
extern crate cgmath;
extern crate rand;
use aabb::AABB;
use cgmath::*;
use rand::prelude::*;
pub struct Boid {
pub pos: Vector3<f32>,
pub vel: Vector3<f32>,
pub acc: Vector3<f32>,
pub min_speed: f32,
}
impl Boid {
pub fn random_new(bbox: &AABB) -> Boid {
let mut rand = rand::thread_rng();
let x = bbox.l.x + rand.gen::<f32>() * bbox.xlen();
let y = bbox.l.y + rand.gen::<f32>() * bbox.ylen();
let z = bbox.l.z + rand.gen::<f32>() * bbox.zlen();
let vx = 10.0 - rand.gen::<f32>() * 20.0;
let vy = 10.0 - rand.gen::<f32>() * 20.0;
let vz = 10.0 - rand.gen::<f32>() * 20.0;
Boid {
pos: Vector3::new(x, y, z),
vel: Vector3::new(vx, vy, vz),
acc: Vector3::zero(),
min_speed: 4.0 * 0.5,
}
}
pub fn update(&mut self, dt: f32, world_scale: f32) {
// TODO: figure out where to put these speed constants
let max_speed = 25.0 * world_scale;
//let min_speed = 4.0 * world_scale;
let min_speed = self.min_speed;
self.vel = self.vel + self.acc * dt;
let curr_speed = self.vel.magnitude();
if curr_speed > max_speed {
self.vel = self.vel * (max_speed / curr_speed);
} else if curr_speed < min_speed {
self.vel = self.vel * (min_speed / curr_speed);
}
self.pos = self.pos + self.vel * dt;
}
pub fn model(&self) -> Matrix4<f32> {
// TODO: figure out 'up' vector to get bank rotation animation
// note: replicating cgmath's Matrix3::look_at but without the added transpose
// this logic rotates the boid correctly, not sure if the transpose is a bug or not
let dir = self.vel;
let up = Vector3::unit_y();
let dir = dir.normalize();
let side = up.cross(dir).normalize();
let up = dir.cross(side).normalize();
let m3 = Matrix3::from_cols(side, up, dir);
Matrix4::from_translation(self.pos) * Matrix4::from(m3)
}
}
| true |
408027b1a0a4f048162faaa6ce2dbeee5a24656d
|
Rust
|
dnseitz/CortexM0Rust
|
/port/cortex-m0/src/peripheral/rcc/clock_control.rs
|
UTF-8
| 6,749 | 2.875 | 3 |
[
"MIT"
] |
permissive
|
// peripheral/rcc/clock_control.rs
// AltOSRust
//
// Created by Daniel Seitz on 11/30/16
//! This module handles the clock control register of the CRR
use super::super::Register;
pub mod clock_rate {
static mut CLOCK_RATE: u32 = 0;
pub fn get_system_clock_rate() -> u32 {
unsafe {
CLOCK_RATE
}
}
pub fn update_system_clock_rate() {
const HSI_VALUE: u32 = 8_000_000;
const HSE_VALUE: u32 = 8_000_000;
const HSI48_VALUE: u32 = 48_000_000;
use super::Clock;
use super::super::super::systick;
let rcc = super::super::rcc();
let rate = match rcc.get_system_clock_source() {
Clock::HSI => HSI_VALUE,
Clock::HSE => HSE_VALUE,
Clock::HSI48 => HSI48_VALUE,
Clock::PLL => {
let multiplier = rcc.get_pll_multiplier() as u32;
let source = rcc.get_pll_source();
let prediv_factor = rcc.get_pll_prediv_factor() as u32;
match source {
Clock::HSE => (HSE_VALUE/prediv_factor) * multiplier,
Clock::HSI48 => (HSI48_VALUE/prediv_factor) * multiplier,
Clock::HSI => (HSI_VALUE/2) * multiplier,
_ => panic!("CRR::update_system_core_clock - invalid clock driving the PLL!"),
}
},
_ => panic!("CRR::update_system_core_clock - invalid clock for the system clock!"),
};
unsafe { CLOCK_RATE = rate; }
let systick = systick::systick();
// Interrupt every milisecond
systick.set_reload_value(rate / 1000);
}
}
pub enum Clock {
HSI,
HSI48,
HSI14,
HSE,
PLL,
}
/// Clock Control Register
#[derive(Copy, Clone)]
pub struct ClockControl {
cr: CR,
cr2: CR2,
}
impl ClockControl {
pub fn new(base_addr: u32) -> Self {
ClockControl {
cr: CR::new(base_addr),
cr2: CR2::new(base_addr),
}
}
/// Enable a clock
pub fn enable_clock(&self, clock: Clock) {
match clock {
Clock::HSI | Clock::HSE | Clock::PLL => self.cr.set_clock(true, clock),
Clock::HSI48 | Clock::HSI14 => self.cr2.set_clock(true, clock),
};
}
/// Disable a clock, if a clock is unable to be disabled the return value will be false.
pub fn disable_clock(&self, clock: Clock) -> bool {
match clock {
Clock::HSI | Clock::HSE | Clock::PLL => self.cr.set_clock(false, clock),
Clock::HSI48 | Clock::HSI14 => self.cr2.set_clock(false, clock),
}
}
/// Return true if the specified clock is enabled, false otherwise
pub fn clock_is_on(&self, clock: Clock) -> bool {
match clock {
Clock::HSI | Clock::HSE | Clock::PLL => self.cr.clock_is_on(clock),
Clock::HSI48 | Clock::HSI14 => self.cr2.clock_is_on(clock),
}
}
/// Return true if the specified clock is ready for use, false otherwise
pub fn clock_is_ready(&self, clock: Clock) -> bool {
match clock {
Clock::HSI | Clock::HSE | Clock::PLL => self.cr.clock_is_ready(clock),
Clock::HSI48 | Clock::HSI14 => self.cr2.clock_is_ready(clock),
}
}
}
/// The CR register only controls the PLL, HSE, and HSI clocks, if another clock is passed in as an
/// argument to any of the methods that take a clock argument the kernel will panic.
#[derive(Copy, Clone)]
pub struct CR {
base_addr: u32,
}
impl Register for CR {
fn new(base_addr: u32) -> Self {
CR { base_addr: base_addr }
}
fn base_addr(&self) -> u32 {
self.base_addr
}
fn mem_offset(&self) -> u32 {
0x0
}
}
impl CR {
/// Set a clock on if `enable` is true, off otherwise. If `enable` is true, the return value is
/// always true. If `enable` is false, the return value will be true if the clock was
/// successfully disabled.
fn set_clock(&self, enable: bool, clock: Clock) -> bool {
let mask = match clock {
Clock::PLL => 1 << 24,
Clock::HSE => 1 << 16,
Clock::HSI => 1 << 0,
_ => panic!("CR::enable_clock - argument clock is not controlled by this register!"),
};
unsafe {
let mut reg = self.addr();
if enable {
*reg |= mask;
true
}
else {
*reg &= !mask;
(*reg & mask) == 0
}
}
}
/// Return true if the specified clock is enabled.
fn clock_is_on(&self, clock: Clock) -> bool {
let mask = match clock {
Clock::PLL => 1 << 24,
Clock::HSE => 1 << 16,
Clock::HSI => 1 << 0,
_ => panic!("CR::clock_is_on - argument clock is not controlled by thsi register!"),
};
unsafe {
let reg = self.addr();
(*reg & mask) != 0
}
}
/// Return true if the specified clock is ready for use.
fn clock_is_ready(&self, clock: Clock) -> bool {
let mask = match clock {
Clock::PLL => 1 << 25,
Clock::HSE => 1 << 17,
Clock::HSI => 1 << 1,
_ => panic!("CR::clock_is_ready - argument clock is not controlled by this register!"),
};
unsafe {
let reg = self.addr();
(*reg & mask) != 0
}
}
}
/// The CR2 register only controls the HSI48 and HSI14 clocks, if another clock is passed in as an
/// argument to any of the methods that take a clock argument the kernel will panic.
#[derive(Copy, Clone)]
pub struct CR2 {
base_addr: u32,
}
impl Register for CR2 {
fn new(base_addr: u32) -> Self {
CR2 { base_addr: base_addr }
}
fn base_addr(&self) -> u32 {
self.base_addr
}
fn mem_offset(&self) -> u32 {
0x34
}
}
impl CR2 {
/// Set a clock on if `enable` is true, off otherwise. If `enable` is true, the return value is
/// always true. If `enable` is false, the return value will be true if the clock was
/// successfully disabled.
fn set_clock(&self, enable: bool, clock: Clock) -> bool {
let mask = match clock {
Clock::HSI48 => 1 << 16,
Clock::HSI14 => 1 << 0,
_ => panic!("CR2::set_clock - argument clock is not controlled by this register!"),
};
unsafe {
let mut reg = self.addr();
if enable {
*reg |= mask;
true
}
else {
*reg &= !mask;
(*reg & mask) == 0
}
}
}
/// Return true if the specified clock is enabled.
fn clock_is_on(&self, clock: Clock) -> bool {
let mask = match clock {
Clock::HSI48 => 1 << 16,
Clock::HSI14 => 1 << 0,
_ => panic!("CR2::clock_is_on - argument clock is not controlled by this register!"),
};
unsafe {
let reg = self.addr();
(*reg & mask) != 0
}
}
/// Return true if the specified clock is ready for use.
fn clock_is_ready(&self, clock: Clock) -> bool {
let mask = match clock {
Clock::HSI48 => 1 << 17,
Clock::HSI14 => 1 << 1,
_ => panic!("CR2::clock_is_ready - argument clock is not controlled by this register!"),
};
unsafe {
let reg = self.addr();
(*reg & mask) != 0
}
}
}
| true |
80f50032ec6876ea0690ee8034534ea8380d7150
|
Rust
|
BurntSushi/rust-analyzer
|
/crates/rust-analyzer/src/diagnostics.rs
|
UTF-8
| 2,780 | 2.9375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Book keeping for keeping diagnostics easily in sync with the client.
use std::{collections::HashMap, sync::Arc};
use lsp_types::{CodeActionOrCommand, Diagnostic, Range};
use ra_ide::FileId;
pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>;
#[derive(Debug, Default, Clone)]
pub struct DiagnosticCollection {
pub native: HashMap<FileId, Vec<Diagnostic>>,
pub check: HashMap<FileId, Vec<Diagnostic>>,
pub check_fixes: CheckFixes,
}
#[derive(Debug, Clone)]
pub struct Fix {
pub range: Range,
pub action: CodeActionOrCommand,
}
#[derive(Debug)]
pub enum DiagnosticTask {
ClearCheck,
AddCheck(FileId, Diagnostic, Vec<CodeActionOrCommand>),
SetNative(FileId, Vec<Diagnostic>),
}
impl DiagnosticCollection {
pub fn clear_check(&mut self) -> Vec<FileId> {
Arc::make_mut(&mut self.check_fixes).clear();
self.check.drain().map(|(key, _value)| key).collect()
}
pub fn add_check_diagnostic(
&mut self,
file_id: FileId,
diagnostic: Diagnostic,
fixes: Vec<CodeActionOrCommand>,
) {
let diagnostics = self.check.entry(file_id).or_default();
for existing_diagnostic in diagnostics.iter() {
if are_diagnostics_equal(&existing_diagnostic, &diagnostic) {
return;
}
}
let check_fixes = Arc::make_mut(&mut self.check_fixes);
check_fixes
.entry(file_id)
.or_default()
.extend(fixes.into_iter().map(|action| Fix { range: diagnostic.range, action }));
diagnostics.push(diagnostic);
}
pub fn set_native_diagnostics(&mut self, file_id: FileId, diagnostics: Vec<Diagnostic>) {
self.native.insert(file_id, diagnostics);
}
pub fn diagnostics_for(&self, file_id: FileId) -> impl Iterator<Item = &Diagnostic> {
let native = self.native.get(&file_id).into_iter().flatten();
let check = self.check.get(&file_id).into_iter().flatten();
native.chain(check)
}
pub fn handle_task(&mut self, task: DiagnosticTask) -> Vec<FileId> {
match task {
DiagnosticTask::ClearCheck => self.clear_check(),
DiagnosticTask::AddCheck(file_id, diagnostic, fixes) => {
self.add_check_diagnostic(file_id, diagnostic, fixes);
vec![file_id]
}
DiagnosticTask::SetNative(file_id, diagnostics) => {
self.set_native_diagnostics(file_id, diagnostics);
vec![file_id]
}
}
}
}
fn are_diagnostics_equal(left: &Diagnostic, right: &Diagnostic) -> bool {
left.source == right.source
&& left.severity == right.severity
&& left.range == right.range
&& left.message == right.message
}
| true |
2be532e1a70ed45b9a1acf9c8eb5f04ea5237288
|
Rust
|
gwy15/leetcode
|
/src/62.不同路径.rs
|
UTF-8
| 789 | 2.84375 | 3 |
[] |
no_license
|
/*
* @lc app=leetcode.cn id=62 lang=rust
*
* [62] 不同路径
*/
struct Solution;
// @lc code=start
impl Solution {
pub fn unique_paths(m: i32, n: i32) -> i32 {
let (m, n) = (m as usize, n as usize);
let mut last = vec![0; m];
let mut row = vec![0; m];
row[0] = 1;
for _i in 0..n {
for j in 1..m {
row[j] = row[j - 1] + last[j];
}
last = row;
row = vec![0; m];
row[0] = 1;
}
last[m - 1]
}
}
// @lc code=end
#[test]
fn test_solution() {
macro_rules! test {
($m:expr, $n:expr, $ans:expr) => {
assert_eq!(Solution::unique_paths($m, $n), $ans)
};
}
test!(3, 2, 3);
test!(7, 3, 28);
test!(1, 1, 1);
}
| true |
3ed49141a9eddad26894e4e4142f750127a066b5
|
Rust
|
tokio-rs/axum
|
/axum/src/extract/host.rs
|
UTF-8
| 5,404 | 3.109375 | 3 |
[
"MIT"
] |
permissive
|
use super::{
rejection::{FailedToResolveHost, HostRejection},
FromRequestParts,
};
use async_trait::async_trait;
use http::{
header::{HeaderMap, FORWARDED},
request::Parts,
};
const X_FORWARDED_HOST_HEADER_KEY: &str = "X-Forwarded-Host";
/// Extractor that resolves the hostname of the request.
///
/// Hostname is resolved through the following, in order:
/// - `Forwarded` header
/// - `X-Forwarded-Host` header
/// - `Host` header
/// - request target / URI
///
/// Note that user agents can set `X-Forwarded-Host` and `Host` headers to arbitrary values so make
/// sure to validate them to avoid security issues.
#[derive(Debug, Clone)]
pub struct Host(pub String);
#[async_trait]
impl<S> FromRequestParts<S> for Host
where
S: Send + Sync,
{
type Rejection = HostRejection;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
if let Some(host) = parse_forwarded(&parts.headers) {
return Ok(Host(host.to_owned()));
}
if let Some(host) = parts
.headers
.get(X_FORWARDED_HOST_HEADER_KEY)
.and_then(|host| host.to_str().ok())
{
return Ok(Host(host.to_owned()));
}
if let Some(host) = parts
.headers
.get(http::header::HOST)
.and_then(|host| host.to_str().ok())
{
return Ok(Host(host.to_owned()));
}
if let Some(host) = parts.uri.host() {
return Ok(Host(host.to_owned()));
}
Err(HostRejection::FailedToResolveHost(FailedToResolveHost))
}
}
#[allow(warnings)]
fn parse_forwarded(headers: &HeaderMap) -> Option<&str> {
// if there are multiple `Forwarded` `HeaderMap::get` will return the first one
let forwarded_values = headers.get(FORWARDED)?.to_str().ok()?;
// get the first set of values
let first_value = forwarded_values.split(',').nth(0)?;
// find the value of the `host` field
first_value.split(';').find_map(|pair| {
let (key, value) = pair.split_once('=')?;
key.trim()
.eq_ignore_ascii_case("host")
.then(|| value.trim().trim_matches('"'))
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{routing::get, test_helpers::TestClient, Router};
use http::header::HeaderName;
fn test_client() -> TestClient {
async fn host_as_body(Host(host): Host) -> String {
host
}
TestClient::new(Router::new().route("/", get(host_as_body)))
}
#[crate::test]
async fn host_header() {
let original_host = "some-domain:123";
let host = test_client()
.get("/")
.header(http::header::HOST, original_host)
.send()
.await
.text()
.await;
assert_eq!(host, original_host);
}
#[crate::test]
async fn x_forwarded_host_header() {
let original_host = "some-domain:456";
let host = test_client()
.get("/")
.header(X_FORWARDED_HOST_HEADER_KEY, original_host)
.send()
.await
.text()
.await;
assert_eq!(host, original_host);
}
#[crate::test]
async fn x_forwarded_host_precedence_over_host_header() {
let x_forwarded_host_header = "some-domain:456";
let host_header = "some-domain:123";
let host = test_client()
.get("/")
.header(X_FORWARDED_HOST_HEADER_KEY, x_forwarded_host_header)
.header(http::header::HOST, host_header)
.send()
.await
.text()
.await;
assert_eq!(host, x_forwarded_host_header);
}
#[crate::test]
async fn uri_host() {
let host = test_client().get("/").send().await.text().await;
assert!(host.contains("127.0.0.1"));
}
#[test]
fn forwarded_parsing() {
// the basic case
let headers = header_map(&[(FORWARDED, "host=192.0.2.60;proto=http;by=203.0.113.43")]);
let value = parse_forwarded(&headers).unwrap();
assert_eq!(value, "192.0.2.60");
// is case insensitive
let headers = header_map(&[(FORWARDED, "host=192.0.2.60;proto=http;by=203.0.113.43")]);
let value = parse_forwarded(&headers).unwrap();
assert_eq!(value, "192.0.2.60");
// ipv6
let headers = header_map(&[(FORWARDED, "host=\"[2001:db8:cafe::17]:4711\"")]);
let value = parse_forwarded(&headers).unwrap();
assert_eq!(value, "[2001:db8:cafe::17]:4711");
// multiple values in one header
let headers = header_map(&[(FORWARDED, "host=192.0.2.60, host=127.0.0.1")]);
let value = parse_forwarded(&headers).unwrap();
assert_eq!(value, "192.0.2.60");
// multiple header values
let headers = header_map(&[
(FORWARDED, "host=192.0.2.60"),
(FORWARDED, "host=127.0.0.1"),
]);
let value = parse_forwarded(&headers).unwrap();
assert_eq!(value, "192.0.2.60");
}
fn header_map(values: &[(HeaderName, &str)]) -> HeaderMap {
let mut headers = HeaderMap::new();
for (key, value) in values {
headers.append(key, value.parse().unwrap());
}
headers
}
}
| true |
7a8c3810baca685c33e4fff89c970a7d50886674
|
Rust
|
agam/Cavegen
|
/src/main.rs
|
UTF-8
| 5,644 | 3.0625 | 3 |
[] |
no_license
|
// A "translation" from https://bitbucket.org/agambrahma/caves/
use array2d::Array2D;
use rand::Rng;
#[derive(Debug, Clone, Copy)]
enum Cell {
Space,
Wall
}
#[derive(Debug, Clone, Copy)]
struct Params {
r1_cutoff: u8,
r2_cutoff: Option<i8>
}
const NUM_ROWS: usize = 10;
const NUM_COLS: usize = 20;
const WALL_PROB_PCT: u8 = 40;
fn main() {
println!("Hello, world!");
// Display.
fn show_grid(grid: & Array2D<Cell>) {
for row_iter in grid.rows_iter() {
for cell in row_iter {
let cell_str = match cell {
Cell::Space => ".",
Cell::Wall => "#",
};
print!("{}", cell_str);
}
println!();
}
};
// TODO: figure out why the "closure form" of this was harder to work with.
fn seed_caves(rng: &mut rand::rngs::ThreadRng) -> Array2D<Cell> {
let mut grid: Array2D<Cell> = Array2D::filled_with(Cell::Space, NUM_ROWS, NUM_COLS);
fn is_edge(row: usize, col: usize) -> bool {
(row == 0) ||
(col == 0) ||
(row == NUM_ROWS - 1) ||
(col == NUM_COLS - 1)
}
fn is_middle_row(row: usize) -> bool {
row == (NUM_ROWS / 2)
}
fn should_place_wall(generator: &mut rand::rngs::ThreadRng) -> bool {
generator.gen_range(0..100) < WALL_PROB_PCT
}
for row in 0..NUM_ROWS {
for col in 0..NUM_COLS {
let new_val =
if is_edge(row, col) {
Cell::Wall
} else if is_middle_row(row) {
Cell::Space
} else if should_place_wall(rng) {
Cell::Wall
} else {
Cell::Space
};
let result = grid.set(row, col, new_val);
assert!(result.is_ok());
}
}
grid
};
// Helpers.
fn abs(n1: usize, n2: usize) -> usize {
if n1 > n2 {
n1 - n2
} else {
n2 - n1
}
}
fn get_neighbor_count(grid: & Array2D<Cell>, row: usize, col: usize, delta: usize) -> i8 {
let mut count = 0;
let startx = if col > delta { col } else { 0 };
let stopx = std::cmp::min(col + delta, NUM_COLS - 1);
let starty = if row > delta { row } else { 0 };
let stopy = std::cmp::min(row + delta, NUM_ROWS - 1);
for i in starty..=stopy {
for j in startx..=stopx {
// Skip corners when delta > 1.
let should_skip = delta > 1 && abs(i, row) == delta && abs(j, col) == delta;
if !should_skip {
count = match grid.get(i, j).unwrap() {
Cell::Wall => count + 1,
Cell::Space => count,
}
}
}
}
count
}
fn apply_cell_rules(r1: i8, r2: i8, params: &Params) -> Cell {
if r1 >= params.r1_cutoff as i8 {
return Cell::Wall
}
if r2 <= params.r2_cutoff.unwrap_or(-1) {
return Cell::Wall
}
Cell::Space
}
fn iterate_caves(old_grid: &Array2D<Cell>, params: &Params) -> Array2D<Cell> {
let mut new_grid: Array2D<Cell> = Array2D::filled_with(Cell::Space, NUM_ROWS, NUM_COLS);
// Create outer boundary.
for i in [0, NUM_ROWS-1].iter() {
for j in 0..NUM_COLS {
assert!(new_grid.set(*i, j, Cell::Wall).is_ok());
}
}
for j in [0, NUM_COLS-1].iter() {
for i in 0..NUM_ROWS {
assert!(new_grid.set(i, *j, Cell::Wall).is_ok());
}
}
// Transform each cell.
for i in 1..(NUM_ROWS - 1) {
for j in 1 ..(NUM_COLS - 1) {
let num_neighbors_1 = get_neighbor_count(old_grid, i, j, 1);
let num_neighbors_2 = get_neighbor_count(old_grid, i, j, 2);
let new_cell = apply_cell_rules(num_neighbors_1, num_neighbors_2, params);
//println!("DebugAgam: at {:?}, {:?}, old = {:?}, neghbors = {:?}, {:?}, new = {:?}", i, j, old_grid.get(i, j).unwrap(), num_neighbors_1, num_neighbors_2, new_cell);
assert!(new_grid.set(i, j, new_cell).is_ok());
}
}
new_grid
}
fn run() {
let mut rng = rand::thread_rng();
// TODO: figure out why the mutable access-version of this was harder to work with.
let mut grid = seed_caves(&mut rng);
show_grid(&grid);
// Two rounds of coalescing, larger then smaller islands.
let round_1 = Params {
r1_cutoff: 5,
r2_cutoff: Some(6),
};
let round_2 = Params {
r1_cutoff: 5,
r2_cutoff: Some(6),
};
// One round of pruning out the isolated walls.
let round_3 = Params {
r1_cutoff: 5,
r2_cutoff: None,
};
// TODO: populate params vector properly.
let params_list = vec![
round_1, round_1, round_1, round_1,
round_2, round_2, round_2,
round_3
];
for (i, params) in params_list.iter().enumerate() {
println!("\nIteration #{}\n--------------------\n\n", i);
let new_grid = iterate_caves(&grid, ¶ms);
show_grid(&new_grid);
grid = new_grid;
}
}
run()
}
| true |
8602906fc3069885fbc916058616bbd0f4e84aea
|
Rust
|
jaredwolff/serde_arrays
|
/src/lib.rs
|
UTF-8
| 8,091 | 3.28125 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Copyright 2021 Travis Veazey
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// https://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// https://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//! Serialize and deserialize const generic or arbitrarily-large arrays with [Serde].
//!
//! Out of the box, Serde supports [a lot of types](https://serde.rs/data-model.html#types), but
//! unfortunately lacks support for arrays that use const generics. This library provides a module
//! that, in combination with Serde's [`with`](https://serde.rs/field-attrs.html#with) attribute,
//! adds that support.
//!
//! # Example usage
//!
//! ```
//! use serde::{Serialize, Deserialize};
//! use serde_json;
//!
//! #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
//! struct GenericArray<const N: usize> {
//! #[serde(with = "serde_arrays")]
//! arr: [u32; N],
//! }
//!
//! let data = GenericArray{ arr: [1; 16] };
//! let json = serde_json::to_string(&data)?;
//! let de_data = serde_json::from_str(&json)?;
//!
//! assert_eq!(data, de_data);
//! # Ok::<(), serde_json::Error>(())
//! ```
//!
//! As an added bonus, this also adds support for arbitrarily large arrays beyond the 32 elements
//! that Serde supports:
//!
//! ```
//! # use serde::{Serialize, Deserialize};
//! # use serde_json;
//! #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
//! struct LargeArray {
//! #[serde(with = "serde_arrays")]
//! arr: [u32; 64],
//! }
//! # let data = LargeArray{ arr: [1; 64] };
//! # let json = serde_json::to_string(&data)?;
//! # let de_data = serde_json::from_str(&json)?;
//! # assert_eq!(data, de_data);
//! # Ok::<(), serde_json::Error>(())
//! ```
//!
//! Tuple structs are supported just as easily:
//!
//! ```
//! # use serde::{Serialize, Deserialize};
//! # use serde_json;
//! #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
//! struct TupleStruct<const N: usize>(
//! #[serde(with = "serde_arrays")]
//! [u32; N],
//! );
//! # let data = TupleStruct([1; 64]);
//! # let json = serde_json::to_string(&data)?;
//! # let de_data = serde_json::from_str(&json)?;
//! # assert_eq!(data, de_data);
//! # Ok::<(), serde_json::Error>(())
//! ```
//!
//! Even nested arrays are supported:
//!
//! ```
//! # #[cfg(feature = "std")]
//! # {
//! # use std::io;
//! # use serde::{Serialize, Deserialize};
//! # use serde_json;
//! #[derive(Serialize, Debug, PartialEq, Eq)]
//! struct NestedArray {
//! #[serde(with = "serde_arrays")]
//! arr: [[u32; 64]; 64],
//! #[serde(with = "serde_arrays")]
//! vec: Vec<[u32; 96]>,
//! }
//! # fn main() -> io::Result<()> {
//! # let data = NestedArray{ arr: [[1; 64]; 64], vec: vec![[2; 96]; 37], };
//! # let json = serde_json::to_string(&data)?;
//! # //let de_data = serde_json::from_str(&json)?;
//! # //assert_eq!(data, de_data);
//! # Ok(())
//! # }
//! # }
//! ```
//!
//! # MSRV
//!
//! This library relies on the const generics feature introduced in Rust 1.51.0.
//!
//! # Relevant links
//!
//! * The [Serde issue](https://github.com/serde-rs/serde/issues/1937) for const generics support
//! * [serde-big-array](https://crates.io/crates/serde-big-array) is a similar crate for large
//! arrays and const generic arrays
//! * [serde_with](https://crates.io/crates/serde_with/) is a much more flexible and powerful
//! crate, but with arguably more complex ergonomics
//!
//! [Serde]: https://serde.rs/
#![cfg_attr(not(feature = "std"), no_std)]
use core::{fmt, marker::PhantomData, mem::MaybeUninit};
use serde::{
de::{self, Deserialize, Deserializer, SeqAccess, Visitor},
ser::{Serialize, Serializer},
};
#[doc(hidden)]
pub mod serializable;
mod wrapper;
pub use serializable::Serializable;
/// Serialize const generic or arbitrarily-large arrays
///
/// Types must implement the [`Serializable`] trait; while this requirement sharply limits how
/// composable the final result is, the simple ergonomics make up for it.
///
/// For greater flexibility see [`serde_with`][serde_with].
///
/// [serde_with]: https://crates.io/crates/serde_with/
pub fn serialize<A, S, T, const N: usize>(data: &A, ser: S) -> Result<S::Ok, S::Error>
where
A: Serializable<T, N>,
S: Serializer,
T: Serialize,
{
data.serialize(ser)
}
/// A Serde Deserializer `Visitor` for [T; N] arrays
struct ArrayVisitor<T, const N: usize> {
// Literally nothing (a "phantom"), but stops Rust complaining about the "unused" T parameter
_marker: PhantomData<T>,
}
impl<'de, T, const N: usize> Visitor<'de> for ArrayVisitor<T, N>
where
T: Deserialize<'de>,
{
type Value = [T; N];
/// Format a message stating we expect an array of size `N`
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "an array of size {}", N)
}
/// Process a sequence into an array
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
// Safety: `assume_init` is sound because the type we are claiming to have
// initialized here is a bunch of `MaybeUninit`s, which do not require
// initialization.
let mut arr: [MaybeUninit<T>; N] = unsafe { MaybeUninit::uninit().assume_init() };
// Iterate over the array and fill the elemenets with the ones obtained from
// `seq`.
let mut place_iter = arr.iter_mut();
let mut cnt_filled = 0;
let err = loop {
match (seq.next_element(), place_iter.next()) {
(Ok(Some(val)), Some(place)) => *place = MaybeUninit::new(val),
// no error, we're done
(Ok(None), None) => break None,
// error from serde, propagate it
(Err(e), _) => break Some(e),
// lengths do not match, report invalid_length
(Ok(None), Some(_)) | (Ok(Some(_)), None) => {
break Some(de::Error::invalid_length(cnt_filled, &self))
}
}
cnt_filled += 1;
};
if let Some(err) = err {
if core::mem::needs_drop::<T>() {
for elem in core::array::IntoIter::new(arr).take(cnt_filled) {
// Safety: `assume_init()` is sound because we did initialize CNT_FILLED
// elements. We call it to drop the deserialized values.
unsafe {
elem.assume_init();
}
}
}
return Err(err);
}
// Safety: everything is initialized and we are ready to transmute to the
// initialized array type.
// See https://github.com/rust-lang/rust/issues/62875#issuecomment-513834029
//let ret = unsafe { std::mem::transmute::<_, [T; N]>(arr) };
let ret = unsafe { core::mem::transmute_copy(&arr) };
core::mem::forget(arr);
Ok(ret)
}
}
/// Deserialize const generic or arbitrarily-large arrays
///
/// For any array up to length `usize::MAX`, this function will allow Serde to properly deserialize
/// it, provided the type `T` itself is deserializable.
///
/// This implementation is adapted from the [Serde documentation][deserialize_map].
///
/// [deserialize_map]: https://serde.rs/deserialize-map.html
pub fn deserialize<'de, D, T, const N: usize>(deserialize: D) -> Result<[T; N], D::Error>
where
D: Deserializer<'de>,
T: Deserialize<'de>,
{
deserialize.deserialize_tuple(
N,
ArrayVisitor {
_marker: PhantomData,
},
)
}
/// Hacky way to include README in doc-tests, but works until #[doc(include...)] is stabilized
/// https://github.com/rust-lang/cargo/issues/383#issuecomment-720873790
#[cfg(doctest)]
mod test_readme {
macro_rules! external_doc_test {
($x:expr) => {
#[doc = $x]
extern "C" {}
};
}
external_doc_test!(include_str!("../README.md"));
}
| true |
8b111b34d76a3b9e906495bc260140b161fb7016
|
Rust
|
Pe6oProgramista/rusty-brain
|
/src/utils.rs
|
UTF-8
| 1,791 | 2.53125 | 3 |
[
"MIT"
] |
permissive
|
use ndarray::prelude::*;
use gnuplot::*;
use gnuplot::Axis as gpAxis;
pub fn max_arr1(array: &ArrayView1<f64>) -> f64 {
let mut max = array[0];
for a in array.iter() {
if *a > max {
max = *a;
}
}
max
}
pub fn max_arr2(array: &ArrayView2<f64>) -> f64 {
let mut max = array[[0, 0]];
for a in array.iter() {
if *a > max {
max = *a;
}
}
max
}
pub fn plot_data(data: &Array2<f64>) {
let (samples, features) = data.dim();
let mut fg = Figure::new();
for row in 0..features {
for col in 0..features {
fg.axes2d()
.set_pos_grid(features as u32, features as u32, (row * features + col) as u32)
// .set_title("A plot", &[])
// .set_legend(Graph(0.5), Graph(0.9), &[], &[])
// .set_x_label("iterations", &[])
// .set_y_label("Error", &[])
.set_x_ticks(None, &[], &[])
.set_y_ticks(None, &[], &[])
.points(data.clone().reversed_axes().row(row), data.clone().reversed_axes().row(col), &[PointSymbol('O'), PointSize(0.5)]);
//&[Caption("Points"), PointSymbol('D'), Color("#ffaa77"), PointSize(2.0)
}
}
fg.set_terminal("pngcairo", "exampleData.png");
fg.show().close();
}
pub fn plot_error(errors: &Vec<f64>) {
let mut fg = Figure::new();
let x: Vec<usize> = (0..errors.len()).collect();
fg.axes2d()
.set_title("A plot", &[])
.set_legend(Graph(0.5), Graph(0.9), &[], &[])
.set_x_label("iterations", &[])
.set_y_label("Error", &[])
.lines(&x, errors, &[Caption("Error"), Color("red")]);
fg.set_terminal("pngcairo", "exampleError.png");
fg.show().close();
}
| true |
c9700c52e1f32d44086b20f81c0e3cdee388c23d
|
Rust
|
EtomicBomb/picture_scout_rust
|
/src/parse/target_mesh.rs
|
UTF-8
| 7,731 | 2.84375 | 3 |
[] |
no_license
|
use std::cmp::{max, min, Reverse};
use crate::parse::boolean_matrix::BooleanMatrix;
use crate::parse::image::{Image, Color};
use crate::parse::target::Target;
use ordered_float::OrderedFloat;
#[derive(Debug)]
pub struct TargetMesh {
pub targets: Vec<Target>, // coordinates stored are between 0 and 1 as height percentage
}
impl TargetMesh {
pub fn add_to_image(&self, image: &mut Image) {
let target_center_square_size = image.height / 200;
for target in self.targets.iter() {
let color = target.get_color();
let top = (target.top * image.height as f64) as usize;
let bottom = (target.bottom * image.height as f64) as usize;
let left = (target.left * image.base as f64) as usize;
let right = (target.right * image.base as f64) as usize;
let mean_x = (target.mean_x * image.base as f64) as usize;
let mean_y = (target.mean_y * image.height as f64) as usize;
for y in top..=bottom {
for x in left..=right {
// we need to scale this up to the size of the image
image.set_color(x, y, color);
}
}
for y in mean_y-target_center_square_size..=mean_y+target_center_square_size {
for x in mean_x-target_center_square_size..=mean_x+target_center_square_size {
if x < image.base && y < image.height {
image.set_color(x, y, Color::yellow());
}
}
}
}
}
pub fn get_bar_centers(&self) -> Vec<(f64, f64)> {
self.targets.iter()
.filter(|t| t.is_bar())
.map(|t| t.center_position())
.collect()
}
pub fn get_aligner_centers(&self) -> Vec<(f64, f64)> {
let mut aligners: Vec<Target> = self.targets.iter()
.filter(|t| t.is_aligner())
.cloned()
.collect();
aligners.sort_by_key(|t| Reverse(OrderedFloat(t.fraction_of_image_filled)));
aligners.truncate(4);
assert_eq!(aligners.len(), 4, "Fewer than four aligners were found");
let mut centers: Vec<(f64, f64)> = aligners.into_iter()
.map(|t| (t.mean_x as f64, t.mean_y as f64))
.collect();
// we want to
let mut sorted_centers = Vec::with_capacity(4);
sorted_centers.push(remove_max_by(&mut centers, |&(x0, y0), &(x1, y1)| x0+y0 < x1+y1)); // top left
sorted_centers.push(remove_max_by(&mut centers, |&(x0, y0), &(x1, y1)| x0-y0 > x1-y1)); // top right
sorted_centers.push(remove_max_by(&mut centers, |&(x0, y0), &(x1, y1)| x0+y0 > x1+y1)); // bottom right
sorted_centers.push(remove_max_by(&mut centers, |&(x0, y0), &(x1, y1)| x0-y0 < x1-y1)); // bottom left
sorted_centers
}
pub fn from_matrix(target_candidates: &BooleanMatrix) -> TargetMesh {
// lets iterate through all of the `dark` pixels
let (base, height) = target_candidates.base_height();
let mut has_seen = BooleanMatrix::all_false(height, height);
let mut targets = Vec::new(); // we add coordinates of the targets here
for y in 0..height {
for x in 0..base {
if !target_candidates.is_set(x, y) || has_seen.is_set(x, y) { continue } // this pixel isn't a target, or we've already seen it
if let Some(target) = flood_fill(x, y, target_candidates, &mut has_seen) {
targets.push(target);
}
}
}
TargetMesh { targets }
}
}
fn flood_fill(x: usize, y: usize, target_candidates: &BooleanMatrix, has_seen: &mut BooleanMatrix) -> Option<Target> {
// returns the topmost, rightmost, bottommost, leftmost point, and the total pixels filled
let (image_base, image_height) = target_candidates.base_height();
assert!(!has_seen.is_set(x, y));
assert!(target_candidates.is_set(x, y));
// let mut top = y as f64 / image_height as f64;
// let mut bottom = y as f64 / image_height as f64;
// let mut left = x as f64 / image_base as f64;
// let mut right = x as f64 / image_base as f64;
let mut top = y;
let mut bottom = y;
let mut left = x;
let mut right = x;
// let mut x_sum = 0.0;
// let mut y_sum = 0.0;
let mut x_sum = 0;
let mut y_sum = 0;
let mut pixels_filled = 0;
let mut stack = Vec::new();
stack.push((x, y));
while let Some((x, y)) = stack.pop() {
if has_seen.is_set(x, y) { continue } // after this was pushed on, this pixel was colored
has_seen.set(x, y);
// let x_frac = x as f64 / image_base as f64;
// let y_frac = y as f64 / image_height as f64;
pixels_filled += 1;
//
// x_sum += x_frac;
// y_sum += y_frac;
//
// left = min(left, x_frac);
// top = min(top, y_frac);
// right = max(right, x_frac);
// bottom = max(bottom, y_frac);
x_sum += x;
y_sum += y;
left = min(left, x);
top = min(top, y);
right = max(right, x);
bottom = max(bottom, y);
// process the neighbors
for (new_x, new_y) in neighbors(x, y, image_base, image_height) {
if target_candidates.is_set(new_x, new_y) {
stack.push((new_x, new_y));
}
}
}
// let mean_x = x_sum / pixels_filled as f64;
// let mean_y = y_sum / pixels_filled as f64;
// Target::new shouldn't care about the image_base or image_height of our image. That's why we want to convert all of our numbers
// to fractions between 0 and 1
// let (b, h) = (image_base as f64, image_height as f64); // just to be clear, these are the image_base and image_height of the image
//
// // this is the 'center of mass' of our target
// let mean_x = x_sum / pixels_filled;
// let mean_y = y_sum / pixels_filled;
//
// let mut fraction_of_image_filled = pixels_filled as f64 / (b*h);
//
// // why add one: because otherwise if there is a target only one pixel, then right-left == 0, and so image_base would be reported as 0
// let target_base_unmapped = right-left+1; // we add one, because
// let target_height_unmapped = bottom-left+1;
// let squareness = squareness(target_base_unmapped, target_height_unmapped);
// let fullness = pixels_filled as f64 / (target_base_unmapped*target_height_unmapped) as f64;
//
// Target::new(
// top as f64 / h,
// bottom as f64 / h,
// right as f64 / b,
// left as f64 / b,
// mean_x as f64 / b,
// mean_y as f64 / h,
// fraction_of_image_filled,
// squareness,
// fullness,
// )
let mean_x = x_sum / pixels_filled;
let mean_y = y_sum / pixels_filled;
Target::new(left, right, top, bottom, pixels_filled, mean_x, mean_y, image_base, image_height)
}
fn neighbors(x: usize, y: usize, width: usize, height: usize) -> impl Iterator<Item=(usize, usize)> {
// returns an iterator over the 4 pixels surrounding (x,y) respecting the edges
let mut ret = Vec::with_capacity(4);
if x > 0 {
ret.push((x-1, y));
}
if y > 0 {
ret.push((x, y-1));
}
if x < width-1 {
ret.push((x+1, y));
}
if y < height-1 {
ret.push((x, y+1));
}
ret.into_iter()
}
fn remove_max_by<T>(vec: &mut Vec<T>, greater_than: impl Fn(&T, &T) -> bool) -> T {
let mut max_index = 0;
let mut max = &vec[0];
for (i, k) in vec.iter().enumerate().skip(1) {
if greater_than(k, max) {
max = k;
max_index = i;
}
}
return vec.remove(max_index);
}
| true |
95993b2010a1d6cc2996b8ac81cedc31f51b5410
|
Rust
|
JoshGendein/rust-git
|
/src/commit.rs
|
UTF-8
| 3,477 | 3.078125 | 3 |
[] |
no_license
|
use super::file::FileService;
use super::types::{ Tree, Commit };
use super::error::Error;
use super::index::Index;
use std::collections::{ BTreeMap, HashMap, HashSet };
pub fn commit() -> Result<(), Error> {
let file_service = FileService::new()?;
let index = Index::new(&file_service.root_dir)?;
let file_service = FileService::new()?;
let mut paths_ordered_by_depth: BTreeMap<usize, HashSet<String>> = BTreeMap::new();
let mut dir_mapped_to_children: HashMap<String, HashSet<String>> = HashMap::new();
// Iterate over every file in the staging area. (i.e. index)
for (path, _ ) in index.hashtree.iter() {
// split path into its levels
let mut levels = path.split("\\").collect::<Vec<&str>>();
'inner: while levels.len() > 0 {
let current_path = levels.join("\\");
levels.pop();
let parent = levels.join("\\");
// Get the list of our parents children. Otherwise create one.
let dir_mapping = dir_mapped_to_children.entry(parent).or_default();
// Check if we are already inserted into our parents list
// (i.e.) Another file already created path back to root from our parent
if dir_mapping.contains(¤t_path) {
break 'inner;
}
// Insert ourselves into our parents list of children.
// Insert into into list at our current depth.
let same_depth = paths_ordered_by_depth.entry(levels.len() + 1).or_default();
same_depth.insert(current_path.clone());
dir_mapping.insert(current_path);
}
}
let mut trees: HashMap<String, Tree> = HashMap::new();
// Iterate over paths in order of depth decreasing.
for (_, paths) in paths_ordered_by_depth.iter().rev() {
for path in paths {
if dir_mapped_to_children.contains_key(path) {
let tree_file = create_tree_file(&index, &dir_mapped_to_children[path], &mut trees);
trees.insert(path.to_string(), Tree::new(tree_file));
}
}
}
// Special Case for Root
let roots_children = &dir_mapped_to_children[""];
let root_tree_file = create_tree_file(&index, &roots_children, &mut trees);
trees.insert(String::from("\\"), Tree::new(root_tree_file));
for (_, tree) in &trees {
let data = tree.data.clone();
file_service.write_object(&tree.hash, &data.into_bytes())?;
}
let parent_ref = file_service.get_head_ref();
let commit = Commit::new(parent_ref, trees["\\"].hash.clone(),
"William Shakespeare".to_string(),
"Example Commit Message".to_string());
file_service.write_head_ref(&commit.hash)?;
file_service.write_object(&commit.hash, &commit.data.into_bytes())?;
return Ok(())
}
pub fn create_tree_file(index: &Index,
children: &HashSet<String>,
trees: &mut HashMap<String, Tree>) -> String {
let mut tree_file = String::new();
for path in children {
// Check if you are a file or dir.
if let Some(file_hash) = index.hashtree.get(path) {
let line = format!("blob {} {}\n", file_hash, path);
tree_file.push_str(&line);
}
else if let Some(tree) = trees.get(path) {
let line = format!("tree {} {}\n", tree.hash, path);
tree_file.push_str(&line);
}
}
return tree_file;
}
| true |
b821e60ff37cb995e980b6b1aa7b251f7636c339
|
Rust
|
embed-rs/embedded-rs
|
/src/components/gpio/stm32f7/output_data.rs
|
UTF-8
| 443 | 3.109375 | 3 |
[] |
no_license
|
//! port output data register (GPIOx_ODR)
use super::Pin;
use bit_field::BitField;
/// Register
#[derive(Clone, Copy)]
pub struct OutputDataRegister(BitField<u32>);
#[allow(dead_code)]
impl OutputDataRegister {
/// Get output pin
pub fn get(&self, pin: Pin) -> bool {
self.0.get_bit(pin as u8)
}
/// Set output pin
pub fn set(&mut self, pin: Pin, value: bool) {
self.0.set_bit(pin as u8, value);
}
}
| true |
55af9b47f5ae09e5a5fc785c89f8985812cf8421
|
Rust
|
vab9/advent-of-code-16-rust
|
/2/src/main.rs
|
UTF-8
| 1,487 | 3.75 | 4 |
[] |
no_license
|
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
fn main() {
let mut b = Button { n: 5 };
let f = File::open("../input2.txt").unwrap();
let reader = BufReader::new(f);
for line in reader.lines() {
for c in line.unwrap().chars() {
b.make_move(match c {
'U' => Direction::Up,
'D' => Direction::Down,
'L' => Direction::Left,
'R' => Direction::Right,
_ => unreachable!(),
});
}
print!("{}", b.n);
}
}
#[derive(Debug)]
pub enum Direction {
Up,
Down,
Left,
Right,
}
pub struct Button {
n: u8,
}
impl Button {
pub fn make_move(&mut self, d: Direction) {
// print!("Moving from {} ", self.n);
match d {
Direction::Up => {
if self.n > 3 {
self.n = self.n - 3
}
}
Direction::Down => {
if self.n < 7 {
self.n = self.n + 3
}
}
Direction::Left => {
if [2, 3, 5, 6, 8, 9].contains(&self.n) {
self.n = self.n - 1
}
}
Direction::Right => {
if [1, 2, 4, 5, 7, 8].contains(&self.n) {
self.n = self.n + 1
}
}
};
// println!(" in direction {:?} ==> {} ", d, self.n);
}
}
| true |
d2602d317003a2c828a1d0a16005087bce4c3501
|
Rust
|
jly36963/notes
|
/rs/web/gotham-example/src/main.rs
|
UTF-8
| 5,234 | 2.703125 | 3 |
[] |
no_license
|
#[macro_use]
extern crate gotham_derive;
use gotham::handler::HandlerResult;
use gotham::helpers::http::response::{create_empty_response, create_permanent_redirect, create_response};
use gotham::hyper::{body, Body, Response, StatusCode};
use gotham::router::builder::*;
use gotham::router::Router;
use gotham::state::{FromState, State};
use mime;
use serde::{Deserialize, Serialize};
// ---
// Main
// ---
fn main() {
let addr = "127.0.0.1:3000";
println!("Serving on {}", addr);
gotham::start(addr, || Ok(get_router())).unwrap();
}
// ---
// Router
// ---
fn get_router() -> Router {
build_simple_router(|route| {
route.get("/").to(get_root);
route.scope("/api", |route| {
route.get("/").to(get_api);
route.get("/health").to(get_api_health);
route.get("/health-check").to(get_api_health_check);
route
.get("/store/search")
.with_query_string_extractor::<StoreSearchQuery>()
.to(get_api_store_search);
route.scope("/user", |route| {
route.get("/:id").with_path_extractor::<UserIdPathParams>().to(get_api_user_id);
route.post("/").to_async(post_api_user);
})
})
})
}
// ---
// Handlers
// ---
/// GET /
/// Return "Hello!"
pub fn get_root(state: State) -> (State, &'static str) {
(state, "Hello!")
}
/// GET /api
/// Return simple JSON
pub fn get_api(state: State) -> (State, Response<Body>) {
#[derive(Serialize, Deserialize)]
struct Data {
message: String,
}
let data = Data {
message: String::from("Hello!"),
};
let body = serde_json::to_string(&data).unwrap();
let res = create_response(&state, StatusCode::OK, mime::APPLICATION_JSON, body);
(state, res)
}
/// GET, /api/health
/// Return status code
pub fn get_api_health(state: State) -> (State, Response<Body>) {
let res = create_empty_response(&state, StatusCode::OK);
(state, res)
}
/// GET /api/health-check
/// Redirect to /api/health
pub fn get_api_health_check(state: State) -> (State, Response<Body>) {
let res = create_permanent_redirect(&state, "/api/health");
(state, res)
}
/// GET /api/store/search
/// Return "q" query param
pub fn get_api_store_search(mut state: State) -> (State, Response<Body>) {
let query = StoreSearchQuery::take_from(&mut state);
#[derive(Serialize, Deserialize)]
struct Data {
q: String,
}
let data = Data { q: query.q };
let body = serde_json::to_string(&data).unwrap();
let res = create_response(&state, StatusCode::OK, mime::APPLICATION_JSON, body);
(state, res)
}
/// GET /api/user/:id
/// Return (mock) user.
pub fn get_api_user_id(state: State) -> (State, Response<Body>) {
let path_params = UserIdPathParams::borrow_from(&state);
let id = &path_params.id;
let user = User {
id: String::from(id),
first_name: String::from("Kakashi"),
last_name: String::from("Hatake"),
};
let body = serde_json::to_string(&user);
let res: Response<Body>;
match body {
Ok(body) => res = create_response(&state, StatusCode::OK, mime::APPLICATION_JSON, body),
Err(_) => res = create_empty_response(&state, StatusCode::INTERNAL_SERVER_ERROR),
}
(state, res)
}
/// POST /api/user
/// Pretend to create new user.
/// Async handler example in repo: examples/handlers/simple_async_handlers_await.
pub async fn post_api_user(mut state: State) -> HandlerResult {
// Get body
let body: String;
match body::to_bytes(Body::take_from(&mut state)).await {
Ok(valid_body) => body = String::from_utf8(valid_body.to_vec()).unwrap(),
Err(_) => {
let res = create_empty_response(&state, StatusCode::INTERNAL_SERVER_ERROR);
return Ok((state, res));
}
}
// Convert body to struct
let user_new: UserNew;
match serde_json::from_str(&body) {
Ok(u) => user_new = u,
Err(_) => {
let res = create_empty_response(&state, StatusCode::INTERNAL_SERVER_ERROR);
return Ok((state, res));
}
}
// Create pretend user from input
let user = User {
id: String::from("b78984aa-f014-45d2-b884-49450f29758a"),
first_name: user_new.first_name,
last_name: user_new.last_name,
};
// Serialize pretend user
let data: String;
match serde_json::to_string(&user) {
Ok(s) => data = s,
Err(_) => {
let res = create_empty_response(&state, StatusCode::INTERNAL_SERVER_ERROR);
return Ok((state, res));
}
}
// Return Response
let res = create_response(&state, StatusCode::OK, mime::APPLICATION_JSON, data);
Ok((state, res))
}
// ---
// Structs
// ---
#[derive(Serialize, Deserialize)]
struct User {
id: String,
first_name: String,
last_name: String,
}
#[derive(Serialize, Deserialize)]
struct UserNew {
first_name: String,
last_name: String,
}
#[derive(Deserialize, StateData, StaticResponseExtender)]
struct StoreSearchQuery {
q: String,
}
#[derive(Deserialize, StateData, StaticResponseExtender)]
struct UserIdPathParams {
id: String,
}
| true |
3bbd6d3bbe9f41a928e49d82ed88dfa62e37cab1
|
Rust
|
jonhoo/left-right
|
/src/write.rs
|
UTF-8
| 24,994 | 3.0625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::read::ReadHandle;
use crate::Absorb;
use crate::sync::{fence, Arc, AtomicUsize, MutexGuard, Ordering};
use std::collections::VecDeque;
use std::marker::PhantomData;
use std::ops::DerefMut;
use std::ptr::NonNull;
#[cfg(test)]
use std::sync::atomic::AtomicBool;
use std::{fmt, thread};
/// A writer handle to a left-right guarded data structure.
///
/// All operations on the underlying data should be enqueued as operations of type `O` using
/// [`append`](Self::append). The effect of this operations are only exposed to readers once
/// [`publish`](Self::publish) is called.
///
/// # Reading through a `WriteHandle`
///
/// `WriteHandle` allows access to a [`ReadHandle`] through `Deref<Target = ReadHandle>`. Note that
/// since the reads go through a [`ReadHandle`], those reads are subject to the same visibility
/// restrictions as reads that do not go through the `WriteHandle`: they only see the effects of
/// operations prior to the last call to [`publish`](Self::publish).
pub struct WriteHandle<T, O>
where
T: Absorb<O>,
{
epochs: crate::Epochs,
w_handle: NonNull<T>,
oplog: VecDeque<O>,
swap_index: usize,
r_handle: ReadHandle<T>,
last_epochs: Vec<usize>,
#[cfg(test)]
refreshes: usize,
#[cfg(test)]
is_waiting: Arc<AtomicBool>,
/// Write directly to the write handle map, since no publish has happened.
first: bool,
/// A publish has happened, but the two copies have not been synchronized yet.
second: bool,
/// If we call `Self::take` the drop needs to be different.
taken: bool,
}
// safety: if a `WriteHandle` is sent across a thread boundary, we need to be able to take
// ownership of both Ts and Os across that thread boundary. since `WriteHandle` holds a
// `ReadHandle`, we also need to respect its Send requirements.
unsafe impl<T, O> Send for WriteHandle<T, O>
where
T: Absorb<O>,
T: Send,
O: Send,
ReadHandle<T>: Send,
{
}
impl<T, O> fmt::Debug for WriteHandle<T, O>
where
T: Absorb<O> + fmt::Debug,
O: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("WriteHandle")
.field("epochs", &self.epochs)
.field("w_handle", &self.w_handle)
.field("oplog", &self.oplog)
.field("swap_index", &self.swap_index)
.field("r_handle", &self.r_handle)
.field("first", &self.first)
.field("second", &self.second)
.finish()
}
}
/// A **smart pointer** to an owned backing data structure. This makes sure that the
/// data is dropped correctly (using [`Absorb::drop_second`]).
///
/// Additionally it allows for unsafely getting the inner data out using [`into_box()`](Taken::into_box).
pub struct Taken<T: Absorb<O>, O> {
inner: Option<Box<T>>,
_marker: PhantomData<O>,
}
impl<T: Absorb<O> + std::fmt::Debug, O> std::fmt::Debug for Taken<T, O> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Taken")
.field(
"inner",
self.inner
.as_ref()
.expect("inner is only taken in `into_box` which drops self"),
)
.finish()
}
}
impl<T: Absorb<O>, O> Deref for Taken<T, O> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.inner
.as_ref()
.expect("inner is only taken in `into_box` which drops self")
}
}
impl<T: Absorb<O>, O> DerefMut for Taken<T, O> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.inner
.as_mut()
.expect("inner is only taken in `into_box` which drops self")
}
}
impl<T: Absorb<O>, O> Taken<T, O> {
/// This is unsafe because you must call [`Absorb::drop_second`] in
/// case just dropping `T` is not safe and sufficient.
///
/// If you used the default implementation of [`Absorb::drop_second`] (which just calls [`drop`](Drop::drop))
/// you don't need to call [`Absorb::drop_second`].
pub unsafe fn into_box(mut self) -> Box<T> {
self.inner
.take()
.expect("inner is only taken here then self is dropped")
}
}
impl<T: Absorb<O>, O> Drop for Taken<T, O> {
fn drop(&mut self) {
if let Some(inner) = self.inner.take() {
T::drop_second(inner);
}
}
}
impl<T, O> WriteHandle<T, O>
where
T: Absorb<O>,
{
/// Takes out the inner backing data structure if it hasn't been taken yet. Otherwise returns `None`.
///
/// Makes sure that all the pending operations are applied and waits till all the read handles
/// have departed. Then it uses [`Absorb::drop_first`] to drop one of the copies of the data and
/// returns the other copy as a [`Taken`] smart pointer.
fn take_inner(&mut self) -> Option<Taken<T, O>> {
use std::ptr;
// Can only take inner once.
if self.taken {
return None;
}
// Disallow taking again.
self.taken = true;
// first, ensure both copies are up to date
// (otherwise safely dropping the possibly duplicated w_handle data is a pain)
if self.first || !self.oplog.is_empty() {
self.publish();
}
if !self.oplog.is_empty() {
self.publish();
}
assert!(self.oplog.is_empty());
// next, grab the read handle and set it to NULL
let r_handle = self.r_handle.inner.swap(ptr::null_mut(), Ordering::Release);
// now, wait for all readers to depart
let epochs = Arc::clone(&self.epochs);
let mut epochs = epochs.lock().unwrap();
self.wait(&mut epochs);
// ensure that the subsequent epoch reads aren't re-ordered to before the swap
fence(Ordering::SeqCst);
// all readers have now observed the NULL, so we own both handles.
// all operations have been applied to both w_handle and r_handle.
// give the underlying data structure an opportunity to handle the one copy differently:
//
// safety: w_handle was initially crated from a `Box`, and is no longer aliased.
Absorb::drop_first(unsafe { Box::from_raw(self.w_handle.as_ptr()) });
// next we take the r_handle and return it as a boxed value.
//
// this is safe, since we know that no readers are using this pointer
// anymore (due to the .wait() following swapping the pointer with NULL).
//
// safety: r_handle was initially crated from a `Box`, and is no longer aliased.
let boxed_r_handle = unsafe { Box::from_raw(r_handle) };
Some(Taken {
inner: Some(boxed_r_handle),
_marker: PhantomData,
})
}
}
impl<T, O> Drop for WriteHandle<T, O>
where
T: Absorb<O>,
{
fn drop(&mut self) {
if let Some(inner) = self.take_inner() {
drop(inner);
}
}
}
impl<T, O> WriteHandle<T, O>
where
T: Absorb<O>,
{
pub(crate) fn new(w_handle: T, epochs: crate::Epochs, r_handle: ReadHandle<T>) -> Self {
Self {
epochs,
// safety: Box<T> is not null and covariant.
w_handle: unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(w_handle))) },
oplog: VecDeque::new(),
swap_index: 0,
r_handle,
last_epochs: Vec::new(),
#[cfg(test)]
is_waiting: Arc::new(AtomicBool::new(false)),
#[cfg(test)]
refreshes: 0,
first: true,
second: true,
taken: false,
}
}
fn wait(&mut self, epochs: &mut MutexGuard<'_, slab::Slab<Arc<AtomicUsize>>>) {
let mut iter = 0;
let mut starti = 0;
#[cfg(test)]
{
self.is_waiting.store(true, Ordering::Relaxed);
}
// we're over-estimating here, but slab doesn't expose its max index
self.last_epochs.resize(epochs.capacity(), 0);
'retry: loop {
// read all and see if all have changed (which is likely)
for (ii, (ri, epoch)) in epochs.iter().enumerate().skip(starti) {
// if the reader's epoch was even last we read it (which was _after_ the swap),
// then they either do not have the pointer, or must have read the pointer strictly
// after the swap. in either case, they cannot be using the old pointer value (what
// is now w_handle).
//
// note that this holds even with wrap-around since std::u{N}::MAX == 2 ^ N - 1,
// which is odd, and std::u{N}::MAX + 1 == 0 is even.
//
// note also that `ri` _may_ have been re-used since we last read into last_epochs.
// this is okay though, as a change still implies that the new reader must have
// arrived _after_ we did the atomic swap, and thus must also have seen the new
// pointer.
if self.last_epochs[ri] % 2 == 0 {
continue;
}
let now = epoch.load(Ordering::Acquire);
if now != self.last_epochs[ri] {
// reader must have seen the last swap, since they have done at least one
// operation since we last looked at their epoch, which _must_ mean that they
// are no longer using the old pointer value.
} else {
// reader may not have seen swap
// continue from this reader's epoch
starti = ii;
if !cfg!(loom) {
// how eagerly should we retry?
if iter != 20 {
iter += 1;
} else {
thread::yield_now();
}
}
#[cfg(loom)]
loom::thread::yield_now();
continue 'retry;
}
}
break;
}
#[cfg(test)]
{
self.is_waiting.store(false, Ordering::Relaxed);
}
}
/// Publish all operations append to the log to reads.
///
/// This method needs to wait for all readers to move to the "other" copy of the data so that
/// it can replay the operational log onto the stale copy the readers used to use. This can
/// take some time, especially if readers are executing slow operations, or if there are many
/// of them.
pub fn publish(&mut self) -> &mut Self {
// we need to wait until all epochs have changed since the swaps *or* until a "finished"
// flag has been observed to be on for two subsequent iterations (there still may be some
// readers present since we did the previous refresh)
//
// NOTE: it is safe for us to hold the lock for the entire duration of the swap. we will
// only block on pre-existing readers, and they are never waiting to push onto epochs
// unless they have finished reading.
let epochs = Arc::clone(&self.epochs);
let mut epochs = epochs.lock().unwrap();
self.wait(&mut epochs);
if !self.first {
// all the readers have left!
// safety: we haven't freed the Box, and no readers are accessing the w_handle
let w_handle = unsafe { self.w_handle.as_mut() };
// safety: we will not swap while we hold this reference
let r_handle = unsafe {
self.r_handle
.inner
.load(Ordering::Acquire)
.as_ref()
.unwrap()
};
if self.second {
Absorb::sync_with(w_handle, r_handle);
self.second = false
}
// the w_handle copy has not seen any of the writes in the oplog
// the r_handle copy has not seen any of the writes following swap_index
if self.swap_index != 0 {
// we can drain out the operations that only the w_handle copy needs
//
// NOTE: the if above is because drain(0..0) would remove 0
for op in self.oplog.drain(0..self.swap_index) {
T::absorb_second(w_handle, op, r_handle);
}
}
// we cannot give owned operations to absorb_first
// since they'll also be needed by the r_handle copy
for op in self.oplog.iter_mut() {
T::absorb_first(w_handle, op, r_handle);
}
// the w_handle copy is about to become the r_handle, and can ignore the oplog
self.swap_index = self.oplog.len();
// w_handle (the old r_handle) is now fully up to date!
} else {
self.first = false
}
// at this point, we have exclusive access to w_handle, and it is up-to-date with all
// writes. the stale r_handle is accessed by readers through an Arc clone of atomic pointer
// inside the ReadHandle. oplog contains all the changes that are in w_handle, but not in
// r_handle.
//
// it's now time for us to swap the copies so that readers see up-to-date results from
// w_handle.
// swap in our w_handle, and get r_handle in return
let r_handle = self
.r_handle
.inner
.swap(self.w_handle.as_ptr(), Ordering::Release);
// NOTE: at this point, there are likely still readers using r_handle.
// safety: r_handle was also created from a Box, so it is not null and is covariant.
self.w_handle = unsafe { NonNull::new_unchecked(r_handle) };
// ensure that the subsequent epoch reads aren't re-ordered to before the swap
fence(Ordering::SeqCst);
for (ri, epoch) in epochs.iter() {
self.last_epochs[ri] = epoch.load(Ordering::Acquire);
}
#[cfg(test)]
{
self.refreshes += 1;
}
self
}
/// Publish as necessary to ensure that all operations are visible to readers.
///
/// `WriteHandle::publish` will *always* wait for old readers to depart and swap the maps.
/// This method will only do so if there are pending operations.
pub fn flush(&mut self) {
if self.has_pending_operations() {
self.publish();
}
}
/// Returns true if there are operations in the operational log that have not yet been exposed
/// to readers.
pub fn has_pending_operations(&self) -> bool {
// NOTE: we don't use self.oplog.is_empty() here because it's not really that important if
// there are operations that have not yet been applied to the _write_ handle.
self.swap_index < self.oplog.len()
}
/// Append the given operation to the operational log.
///
/// Its effects will not be exposed to readers until you call [`publish`](Self::publish).
pub fn append(&mut self, op: O) -> &mut Self {
self.extend(std::iter::once(op));
self
}
/// Returns a raw pointer to the write copy of the data (the one readers are _not_ accessing).
///
/// Note that it is only safe to mutate through this pointer if you _know_ that there are no
/// readers still present in this copy. This is not normally something you know; even after
/// calling `publish`, readers may still be in the write copy for some time. In general, the
/// only time you know this is okay is before the first call to `publish` (since no readers
/// ever entered the write copy).
// TODO: Make this return `Option<&mut T>`,
// and only `Some` if there are indeed to readers in the write copy.
pub fn raw_write_handle(&mut self) -> NonNull<T> {
self.w_handle
}
/// Returns the backing data structure.
///
/// Makes sure that all the pending operations are applied and waits till all the read handles
/// have departed. Then it uses [`Absorb::drop_first`] to drop one of the copies of the data and
/// returns the other copy as a [`Taken`] smart pointer.
pub fn take(mut self) -> Taken<T, O> {
// It is always safe to `expect` here because `take_inner` is private
// and it is only called here and in the drop impl. Since we have an owned
// `self` we know the drop has not yet been called. And every first call of
// `take_inner` returns `Some`
self.take_inner()
.expect("inner is only taken here then self is dropped")
}
}
// allow using write handle for reads
use std::ops::Deref;
impl<T, O> Deref for WriteHandle<T, O>
where
T: Absorb<O>,
{
type Target = ReadHandle<T>;
fn deref(&self) -> &Self::Target {
&self.r_handle
}
}
impl<T, O> Extend<O> for WriteHandle<T, O>
where
T: Absorb<O>,
{
/// Add multiple operations to the operational log.
///
/// Their effects will not be exposed to readers until you call [`publish`](Self::publish)
fn extend<I>(&mut self, ops: I)
where
I: IntoIterator<Item = O>,
{
if self.first {
// Safety: we know there are no outstanding w_handle readers, since we haven't
// refreshed ever before, so we can modify it directly!
let mut w_inner = self.raw_write_handle();
let w_inner = unsafe { w_inner.as_mut() };
let r_handle = self.enter().expect("map has not yet been destroyed");
// Because we are operating directly on the map, and nothing is aliased, we do want
// to perform drops, so we invoke absorb_second.
for op in ops {
Absorb::absorb_second(w_inner, op, &*r_handle);
}
} else {
self.oplog.extend(ops);
}
}
}
/// `WriteHandle` can be sent across thread boundaries:
///
/// ```
/// use left_right::WriteHandle;
///
/// struct Data;
/// impl left_right::Absorb<()> for Data {
/// fn absorb_first(&mut self, _: &mut (), _: &Self) {}
/// fn sync_with(&mut self, _: &Self) {}
/// }
///
/// fn is_send<T: Send>() {
/// // dummy function just used for its parameterized type bound
/// }
///
/// is_send::<WriteHandle<Data, ()>>()
/// ```
///
/// As long as the inner types allow that of course.
/// Namely, the data type has to be `Send`:
///
/// ```compile_fail
/// use left_right::WriteHandle;
/// use std::rc::Rc;
///
/// struct Data(Rc<()>);
/// impl left_right::Absorb<()> for Data {
/// fn absorb_first(&mut self, _: &mut (), _: &Self) {}
/// }
///
/// fn is_send<T: Send>() {
/// // dummy function just used for its parameterized type bound
/// }
///
/// is_send::<WriteHandle<Data, ()>>()
/// ```
///
/// .. the operation type has to be `Send`:
///
/// ```compile_fail
/// use left_right::WriteHandle;
/// use std::rc::Rc;
///
/// struct Data;
/// impl left_right::Absorb<Rc<()>> for Data {
/// fn absorb_first(&mut self, _: &mut Rc<()>, _: &Self) {}
/// }
///
/// fn is_send<T: Send>() {
/// // dummy function just used for its parameterized type bound
/// }
///
/// is_send::<WriteHandle<Data, Rc<()>>>()
/// ```
///
/// .. and the data type has to be `Sync` so it's still okay to read through `ReadHandle`s:
///
/// ```compile_fail
/// use left_right::WriteHandle;
/// use std::cell::Cell;
///
/// struct Data(Cell<()>);
/// impl left_right::Absorb<()> for Data {
/// fn absorb_first(&mut self, _: &mut (), _: &Self) {}
/// }
///
/// fn is_send<T: Send>() {
/// // dummy function just used for its parameterized type bound
/// }
///
/// is_send::<WriteHandle<Data, ()>>()
/// ```
#[allow(dead_code)]
struct CheckWriteHandleSend;
#[cfg(test)]
mod tests {
use crate::sync::{AtomicUsize, Mutex, Ordering};
use crate::Absorb;
use slab::Slab;
include!("./utilities.rs");
#[test]
fn append_test() {
let (mut w, _r) = crate::new::<i32, _>();
assert_eq!(w.first, true);
w.append(CounterAddOp(1));
assert_eq!(w.oplog.len(), 0);
assert_eq!(w.first, true);
w.publish();
assert_eq!(w.first, false);
w.append(CounterAddOp(2));
w.append(CounterAddOp(3));
assert_eq!(w.oplog.len(), 2);
}
#[test]
fn take_test() {
// publish twice then take with no pending operations
let (mut w, _r) = crate::new_from_empty::<i32, _>(2);
w.append(CounterAddOp(1));
w.publish();
w.append(CounterAddOp(1));
w.publish();
assert_eq!(*w.take(), 4);
// publish twice then pending operation published by take
let (mut w, _r) = crate::new_from_empty::<i32, _>(2);
w.append(CounterAddOp(1));
w.publish();
w.append(CounterAddOp(1));
w.publish();
w.append(CounterAddOp(2));
assert_eq!(*w.take(), 6);
// normal publish then pending operations published by take
let (mut w, _r) = crate::new_from_empty::<i32, _>(2);
w.append(CounterAddOp(1));
w.publish();
w.append(CounterAddOp(1));
assert_eq!(*w.take(), 4);
// pending operations published by take
let (mut w, _r) = crate::new_from_empty::<i32, _>(2);
w.append(CounterAddOp(1));
assert_eq!(*w.take(), 3);
// emptry op queue
let (mut w, _r) = crate::new_from_empty::<i32, _>(2);
w.append(CounterAddOp(1));
w.publish();
assert_eq!(*w.take(), 3);
// no operations
let (w, _r) = crate::new_from_empty::<i32, _>(2);
assert_eq!(*w.take(), 2);
}
#[test]
fn wait_test() {
use std::sync::{Arc, Barrier};
use std::thread;
let (mut w, _r) = crate::new::<i32, _>();
// Case 1: If epoch is set to default.
let test_epochs: crate::Epochs = Default::default();
let mut test_epochs = test_epochs.lock().unwrap();
// since there is no epoch to waiting for, wait function will return immediately.
w.wait(&mut test_epochs);
// Case 2: If one of the reader is still reading(epoch is odd and count is same as in last_epoch)
// and wait has been called.
let held_epoch = Arc::new(AtomicUsize::new(1));
w.last_epochs = vec![2, 2, 1];
let mut epochs_slab = Slab::new();
epochs_slab.insert(Arc::new(AtomicUsize::new(2)));
epochs_slab.insert(Arc::new(AtomicUsize::new(2)));
epochs_slab.insert(Arc::clone(&held_epoch));
let barrier = Arc::new(Barrier::new(2));
let is_waiting = Arc::clone(&w.is_waiting);
// check writers waiting state before calling wait.
let is_waiting_v = is_waiting.load(Ordering::Relaxed);
assert_eq!(false, is_waiting_v);
let barrier2 = Arc::clone(&barrier);
let test_epochs = Arc::new(Mutex::new(epochs_slab));
let wait_handle = thread::spawn(move || {
barrier2.wait();
let mut test_epochs = test_epochs.lock().unwrap();
w.wait(&mut test_epochs);
});
barrier.wait();
// make sure that writer wait() will call first, only then allow to updates the held epoch.
while !is_waiting.load(Ordering::Relaxed) {
thread::yield_now();
}
held_epoch.fetch_add(1, Ordering::SeqCst);
// join to make sure that wait must return after the progress/increment
// of held_epoch.
let _ = wait_handle.join();
}
#[test]
fn flush_noblock() {
let (mut w, r) = crate::new::<i32, _>();
w.append(CounterAddOp(42));
w.publish();
assert_eq!(*r.enter().unwrap(), 42);
// pin the epoch
let _count = r.enter();
// refresh would hang here
assert_eq!(w.oplog.iter().skip(w.swap_index).count(), 0);
assert!(!w.has_pending_operations());
}
#[test]
fn flush_no_refresh() {
let (mut w, _) = crate::new::<i32, _>();
// Until we refresh, writes are written directly instead of going to the
// oplog (because there can't be any readers on the w_handle table).
assert!(!w.has_pending_operations());
w.publish();
assert!(!w.has_pending_operations());
assert_eq!(w.refreshes, 1);
w.append(CounterAddOp(42));
assert!(w.has_pending_operations());
w.publish();
assert!(!w.has_pending_operations());
assert_eq!(w.refreshes, 2);
w.append(CounterAddOp(42));
assert!(w.has_pending_operations());
w.publish();
assert!(!w.has_pending_operations());
assert_eq!(w.refreshes, 3);
// Sanity check that a refresh would have been visible
assert!(!w.has_pending_operations());
w.publish();
assert_eq!(w.refreshes, 4);
}
}
| true |
7a90666a9e8ddd1209649ccc0d787c16321f26bd
|
Rust
|
oxalica/statx-sys
|
/src/lib.rs
|
UTF-8
| 5,946 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
//! # Bindings to `statx` syscall.
//!
//! Note that `statx()` was added to Linux in kernel 4.11 .
//!
//! # See also
//! <http://man7.org/linux/man-pages/man2/statx.2.html>
#![no_std]
#![cfg(target_os = "linux")]
#![deny(warnings)]
use libc::syscall;
use libc::{c_char, c_int, c_uint};
/// Timestamp structure for the timestamps in struct statx.
///
/// tv_sec holds the number of seconds before (negative) or after (positive)
/// 00:00:00 1st January 1970 UTC.
///
/// tv_nsec holds a number of nanoseconds (0..999,999,999) after the tv_sec time.
///
/// __reserved is held in case we need a yet finer resolution.
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub struct statx_timestamp {
pub tv_sec: i64,
pub tv_nsec: u32,
__reserved: i32,
}
/// Structures for the extended file attribute retrieval system call
/// (statx()).
///
/// The caller passes a mask of what they're specifically interested in as a
/// parameter to statx(). What statx() actually got will be indicated in
/// st_mask upon return.
///
/// For each bit in the mask argument:
///
/// - if the datum is not supported:
///
/// - the bit will be cleared, and
///
/// - the datum will be set to an appropriate fabricated value if one is
/// available (eg. CIFS can take a default uid and gid), otherwise
///
/// - the field will be cleared;
///
/// - otherwise, if explicitly requested:
///
/// - the datum will be synchronised to the server if AT_STATX_FORCE_SYNC is
/// set or if the datum is considered out of date, and
///
/// - the field will be filled in and the bit will be set;
///
/// - otherwise, if not requested, but available in approximate form without any
/// effort, it will be filled in anyway, and the bit will be set upon return
/// (it might not be up to date, however, and no attempt will be made to
/// synchronise the internal state first);
///
/// - otherwise the field and the bit will be cleared before returning.
///
/// Items in STATX_BASIC_STATS may be marked unavailable on return, but they
/// will have values installed for compatibility purposes so that stat() and
/// co. can be emulated in userspace.
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub struct statx {
// 0x00
/// What results were written \[uncond]
pub stx_mask: u32,
/// Preferred general I/O size \[uncond]
pub stx_blksize: u32,
/// Flags conveying information about the file \[uncond]
pub stx_attributes: u64,
// 0x10
/// Number of hard links
pub stx_nlink: u32,
/// User ID of owner
pub stx_uid: u32,
/// Group ID of owner
pub stx_gid: u32,
/// File mode
pub stx_mode: u16,
__spare0: [u16; 1],
// 0x20
/// Inode number
pub stx_ino: u64,
/// File size
pub stx_size: u64,
/// Number of 512-byte blocks allocated
pub stx_blocks: u64,
/// Mask to show what's supported in stx_attributes
pub stx_attributes_mask: u64,
// 0x40
pub stx_atime: statx_timestamp, /* Last access time */
pub stx_btime: statx_timestamp, /* File creation time */
pub stx_ctime: statx_timestamp, /* Last attribute change time */
pub stx_mtime: statx_timestamp, /* Last data modification time */
/* 0x80 */
/// Device ID of special file \[if bdev/cdev]
pub stx_rdev_major: u32,
pub stx_rdev_minor: u32,
/// ID of device containing file \[uncond]
pub stx_dev_major: u32,
pub stx_dev_minor: u32,
// 0x90
/// Spare space for future expansion
__spare2: [u64; 14],
// 0x100
}
mod syscall;
pub use syscall::SYS_statx;
// Flags
pub const AT_FDCWD: c_int = -100;
pub const AT_SYMLINK_NOFOLLOW: c_int = 0x100;
pub const AT_REMOVEDIR: c_int = 0x200;
pub const AT_SYMLINK_FOLLOW: c_int = 0x400;
pub const AT_NO_AUTOMOUNT: c_int = 0x800;
pub const AT_EMPTY_PATH: c_int = 0x1000;
pub const AT_STATX_SYNC_AS_STAT: c_int = 0x0000;
pub const AT_STATX_FORCE_SYNC: c_int = 0x2000;
pub const AT_STATX_SYNC_TYPE: c_int = 0x6000;
pub const AT_STATX_DONT_SYNC: c_int = 0x4000;
pub const STATX_TYPE: c_uint = 0x0000_0001;
pub const STATX_MODE: c_uint = 0x0000_0002;
pub const STATX_NLINK: c_uint = 0x0000_0004;
pub const STATX_UID: c_uint = 0x0000_0008;
pub const STATX_GID: c_uint = 0x0000_0010;
pub const STATX_ATIME: c_uint = 0x0000_0020;
pub const STATX_MTIME: c_uint = 0x0000_0040;
pub const STATX_CTIME: c_uint = 0x0000_0080;
pub const STATX_INO: c_uint = 0x0000_0100;
pub const STATX_SIZE: c_uint = 0x0000_0200;
pub const STATX_BLOCKS: c_uint = 0x0000_0400;
pub const STATX_BASIC_STATS: c_uint = 0x0000_07ff;
pub const STATX_BTIME: c_uint = 0x0000_0800;
pub const STATX_ALL: c_uint = 0x0000_0fff;
pub const STATX__RESERVED: c_uint = 0x8000_0000;
// File attributes.
pub const STATX_ATTR_COMPRESSED: c_int = 0x0000_0004;
pub const STATX_ATTR_IMMUTABLE: c_int = 0x0000_0010;
pub const STATX_ATTR_APPEND: c_int = 0x0000_0020;
pub const STATX_ATTR_NODUMP: c_int = 0x0000_0040;
pub const STATX_ATTR_ENCRYPTED: c_int = 0x0000_0800;
pub const STATX_ATTR_AUTOMOUNT: c_int = 0x0000_1000;
/// statx - get file status (extended)
///
/// See also:
/// <http://man7.org/linux/man-pages/man2/statx.2.html>
pub unsafe fn statx(
dirfd: c_int,
pathname: *const c_char,
flags: c_int,
mask: c_uint,
statxbuf: *mut statx,
) -> c_int {
syscall(SYS_statx, dirfd, pathname, flags, mask, statxbuf) as c_int
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn check_struct_layout() {
use core::mem::size_of;
use memoffset::offset_of;
assert_eq!(size_of::<statx>(), 0x100);
assert_eq!(size_of::<statx_timestamp>(), 16);
assert_eq!(offset_of!(statx, stx_mask), 0);
assert_eq!(offset_of!(statx, stx_nlink), 0x10);
assert_eq!(offset_of!(statx, stx_ino), 0x20);
assert_eq!(offset_of!(statx, stx_atime), 0x40);
assert_eq!(offset_of!(statx, stx_rdev_major), 0x80);
assert_eq!(offset_of!(statx, __spare2), 0x90);
}
}
| true |
5c1afe4f00b1e47b081d9e216560acacef0e9064
|
Rust
|
LaplaceKorea/sml-compiler
|
/crates/sml-frontend/src/parser/mod.rs
|
UTF-8
| 7,845 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use super::ast::*;
use super::lexer::Lexer;
use super::tokens::*;
use sml_util::diagnostics::Diagnostic;
use sml_util::interner::*;
use sml_util::span::{Span, Spanned};
use sml_util::Const;
use std::iter::Peekable;
mod decls;
mod exprs;
mod pats;
pub mod precedence;
mod types;
pub struct Parser<'s, 'sym> {
tokens: Peekable<Lexer<'s, 'sym>>,
current: Spanned<Token>,
prev: Span,
pub diags: Vec<Diagnostic>,
}
#[derive(Clone, Debug, PartialEq, PartialOrd)]
pub enum ErrorKind {
ExpectedToken(Token),
ExpectedIdentifier,
ExpectedType,
ExpectedPat,
ExpectedExpr,
ExpectedDecl,
Internal,
EOF,
}
#[derive(Clone, Debug, PartialEq, PartialOrd)]
pub struct Error {
pub span: Span,
pub token: Token,
pub kind: ErrorKind,
}
impl Error {
pub fn to_diagnostic(self) -> Diagnostic {
use ErrorKind::*;
let message = match self.kind {
ExpectedToken(kind) => format!(
"expected a token of kind {:?}, but encountered {:?}",
kind, self.token
),
ExpectedIdentifier => format!("expected identifier, but encountered {:?}", self.token),
ExpectedType => format!("expected type, but encountered {:?}", self.token),
ExpectedPat => format!("expected pattern, but encountered {:?}", self.token),
ExpectedExpr => format!("expected expression, but encountered {:?}", self.token),
ExpectedDecl => format!("expected declaration, but encountered {:?}", self.token),
Internal => format!("internal parser error! last token was {:?}", self.token),
EOF => "EOF?".to_string(),
};
Diagnostic::error(self.span, message)
}
}
#[macro_export]
macro_rules! diag {
($sp:expr, $msg:expr, $($t:expr),+) => { Diagnostic::error($sp, format!($msg, $($t),+)) };
}
impl<'s, 'sym> Parser<'s, 'sym> {
pub fn new(input: &'s str, interner: &'sym mut Interner) -> Parser<'s, 'sym> {
let mut p = Parser {
tokens: Lexer::new(input.chars(), interner).peekable(),
current: Spanned::new(Token::EOF, Span::zero()),
prev: Span::zero(),
diags: Vec::new(),
};
p.bump();
p
}
/// Generate a parsing error. These are not necessarily fatal
fn error<T>(&self, k: ErrorKind) -> Result<T, Error> {
Err(Error {
span: self.current.span,
token: self.current.data,
kind: k,
})
}
fn current(&self) -> Token {
self.current.data
}
/// Bump the current token, returning it, and pull a new token
/// from the lexer
fn bump(&mut self) -> Token {
match self.tokens.next() {
Some(t) => {
self.prev = self.current.span;
std::mem::replace(&mut self.current, t).data()
}
None => std::mem::replace(&mut self.current.data, Token::EOF),
}
}
/// Ignore a token matching `kind`
fn bump_if(&mut self, kind: Token) -> bool {
if self.current.data == kind {
self.bump();
true
} else {
false
}
}
fn expect(&mut self, kind: Token) -> Result<(), Error> {
if self.current() == kind {
self.bump();
Ok(())
} else {
self.diags.push(diag!(
self.current.span,
"expected token {:?}, but found {:?}",
kind,
self.current()
));
self.error(ErrorKind::ExpectedToken(kind))
}
}
/// Bump the current token if it equals `kind`, otherwise emit a diagnostic
/// and continue.
///
/// This is for use in terminals after we have already bumped at least 1
/// token off of the lexer
fn expect_try_recover(&mut self, kind: Token) {
if self.current() == kind {
self.bump();
} else {
self.diags.push(diag!(
Span::new(self.prev.end, self.current.span.start),
"Inserting token {:?}",
kind
));
}
}
fn expect_id(&mut self) -> Result<Symbol, Error> {
match self.current() {
Token::Id(s) | Token::IdS(s) => {
self.bump();
Ok(s)
}
_ => self.error(ErrorKind::ExpectedIdentifier),
}
}
fn is_id(&self) -> bool {
match self.current() {
Token::Id(_) | Token::IdS(_) => true,
_ => false,
}
}
fn expect_id_alpha(&mut self) -> Result<Symbol, Error> {
match self.current() {
Token::Id(s) => {
self.bump();
Ok(s)
}
_ => self.error(ErrorKind::ExpectedIdentifier),
}
}
fn spanned<T, F: Fn(&mut Parser) -> Result<T, Error>>(
&mut self,
f: F,
) -> Result<Spanned<T>, Error> {
let sp = self.current.span;
f(self).map(|inner| Spanned::new(inner, sp + self.current.span))
}
/// Call `func` once, returning the `Result<T,E>` of the function.
/// A failure of `func` may have side effects, including emitting
/// diagnostics containing `message`
///
/// Generally, this is just used to give better error messages
fn once<T, E, F>(&mut self, func: F, message: &str) -> Result<T, E>
where
F: Fn(&mut Parser) -> Result<T, E>,
{
match func(self) {
Ok(t) => Ok(t),
Err(e) => {
self.diags.push(diag!(self.current.span, "{}", message));
Err(e)
}
}
}
/// Collect the result of `func` into a `Vec<T>` as long as `func` returns
/// an `Ok(T)`. A call to `func` must succeed on the first try, or an error
/// is immediately returned. Subsequent calls to `func` may fail, in which
/// case the error is discarded, and the results are returned. If `delimit`
/// is supplied, the parser will discard matching tokens between each call
/// to `func`
fn plus<T, E, F>(&mut self, func: F, delimit: Option<Token>) -> Result<Vec<T>, E>
where
F: Fn(&mut Parser) -> Result<T, E>,
{
let mut v = vec![func(self)?];
if let Some(t) = delimit {
if !self.bump_if(t) {
return Ok(v);
}
}
while let Ok(x) = func(self) {
v.push(x);
if let Some(t) = delimit {
if !self.bump_if(t) {
break;
}
}
}
Ok(v)
}
/// Collect the result of `func` into a `Vec<T>` as long as `func` returns
/// an `Ok(T)`. If an error is encountered, it is discarded and the results
/// are immediately returned. If `delimit` is supplied, the parser will
/// discard matching tokens between each call to `func`
fn star<T, E, F>(&mut self, func: F, delimit: Option<Token>) -> Vec<T>
where
F: Fn(&mut Parser) -> Result<T, E>,
{
let mut v = Vec::new();
while let Ok(x) = func(self) {
v.push(x);
if let Some(t) = delimit {
if !self.bump_if(t) {
break;
}
}
}
v
}
/// Identical semantics to `Parser::plus`, except `delimit` must be supplied
fn delimited<T, F>(&mut self, func: F, delimit: Token) -> Result<Vec<T>, Error>
where
F: Fn(&mut Parser) -> Result<T, Error>,
{
let mut v = vec![func(self)?];
if !self.bump_if(delimit) {
return Ok(v);
}
while let Ok(x) = func(self) {
v.push(x);
if !self.bump_if(delimit) {
break;
}
}
Ok(v)
}
}
| true |
e6798e7676e3f73c78f90ee82b76552503a3626f
|
Rust
|
GaloisInc/mir-verifier
|
/lib/liballoc/tests/btree/map.rs
|
UTF-8
| 35,914 | 2.9375 | 3 |
[] |
permissive
|
use std::collections::btree_map::Entry::{Occupied, Vacant};
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fmt::Debug;
use std::iter::FromIterator;
use std::ops::Bound::{self, Excluded, Included, Unbounded};
use std::ops::RangeBounds;
use std::panic::catch_unwind;
use std::rc::Rc;
use std::sync::atomic::{AtomicU32, Ordering};
use super::DeterministicRng;
#[test]
fn test_basic_large() {
let mut map = BTreeMap::new();
#[cfg(not(miri))] // Miri is too slow
let size = 10000;
#[cfg(miri)]
let size = 144; // to obtain height 3 tree (having edges to both kinds of nodes)
assert_eq!(map.len(), 0);
for i in 0..size {
assert_eq!(map.insert(i, 10 * i), None);
assert_eq!(map.len(), i + 1);
}
assert_eq!(map.first_key_value(), Some((&0, &0)));
assert_eq!(map.last_key_value(), Some((&(size - 1), &(10 * (size - 1)))));
assert_eq!(map.first_entry().unwrap().key(), &0);
assert_eq!(map.last_entry().unwrap().key(), &(size - 1));
for i in 0..size {
assert_eq!(map.get(&i).unwrap(), &(i * 10));
}
for i in size..size * 2 {
assert_eq!(map.get(&i), None);
}
for i in 0..size {
assert_eq!(map.insert(i, 100 * i), Some(10 * i));
assert_eq!(map.len(), size);
}
for i in 0..size {
assert_eq!(map.get(&i).unwrap(), &(i * 100));
}
for i in 0..size / 2 {
assert_eq!(map.remove(&(i * 2)), Some(i * 200));
assert_eq!(map.len(), size - i - 1);
}
for i in 0..size / 2 {
assert_eq!(map.get(&(2 * i)), None);
assert_eq!(map.get(&(2 * i + 1)).unwrap(), &(i * 200 + 100));
}
for i in 0..size / 2 {
assert_eq!(map.remove(&(2 * i)), None);
assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100));
assert_eq!(map.len(), size / 2 - i - 1);
}
}
#[test]
fn test_basic_small() {
let mut map = BTreeMap::new();
// Empty, root is absent (None):
assert_eq!(map.remove(&1), None);
assert_eq!(map.len(), 0);
assert_eq!(map.get(&1), None);
assert_eq!(map.get_mut(&1), None);
assert_eq!(map.first_key_value(), None);
assert_eq!(map.last_key_value(), None);
assert_eq!(map.keys().count(), 0);
assert_eq!(map.values().count(), 0);
assert_eq!(map.range(..).next(), None);
assert_eq!(map.range(..1).next(), None);
assert_eq!(map.range(1..).next(), None);
assert_eq!(map.range(1..=1).next(), None);
assert_eq!(map.range(1..2).next(), None);
assert_eq!(map.insert(1, 1), None);
// 1 key-value pair:
assert_eq!(map.len(), 1);
assert_eq!(map.get(&1), Some(&1));
assert_eq!(map.get_mut(&1), Some(&mut 1));
assert_eq!(map.first_key_value(), Some((&1, &1)));
assert_eq!(map.last_key_value(), Some((&1, &1)));
assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1]);
assert_eq!(map.values().collect::<Vec<_>>(), vec![&1]);
assert_eq!(map.insert(1, 2), Some(1));
assert_eq!(map.len(), 1);
assert_eq!(map.get(&1), Some(&2));
assert_eq!(map.get_mut(&1), Some(&mut 2));
assert_eq!(map.first_key_value(), Some((&1, &2)));
assert_eq!(map.last_key_value(), Some((&1, &2)));
assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1]);
assert_eq!(map.values().collect::<Vec<_>>(), vec![&2]);
assert_eq!(map.insert(2, 4), None);
// 2 key-value pairs:
assert_eq!(map.len(), 2);
assert_eq!(map.get(&2), Some(&4));
assert_eq!(map.get_mut(&2), Some(&mut 4));
assert_eq!(map.first_key_value(), Some((&1, &2)));
assert_eq!(map.last_key_value(), Some((&2, &4)));
assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1, &2]);
assert_eq!(map.values().collect::<Vec<_>>(), vec![&2, &4]);
assert_eq!(map.remove(&1), Some(2));
// 1 key-value pair:
assert_eq!(map.len(), 1);
assert_eq!(map.get(&1), None);
assert_eq!(map.get_mut(&1), None);
assert_eq!(map.get(&2), Some(&4));
assert_eq!(map.get_mut(&2), Some(&mut 4));
assert_eq!(map.first_key_value(), Some((&2, &4)));
assert_eq!(map.last_key_value(), Some((&2, &4)));
assert_eq!(map.keys().collect::<Vec<_>>(), vec![&2]);
assert_eq!(map.values().collect::<Vec<_>>(), vec![&4]);
assert_eq!(map.remove(&2), Some(4));
// Empty but root is owned (Some(...)):
assert_eq!(map.len(), 0);
assert_eq!(map.get(&1), None);
assert_eq!(map.get_mut(&1), None);
assert_eq!(map.first_key_value(), None);
assert_eq!(map.last_key_value(), None);
assert_eq!(map.keys().count(), 0);
assert_eq!(map.values().count(), 0);
assert_eq!(map.range(..).next(), None);
assert_eq!(map.range(..1).next(), None);
assert_eq!(map.range(1..).next(), None);
assert_eq!(map.range(1..=1).next(), None);
assert_eq!(map.range(1..2).next(), None);
assert_eq!(map.remove(&1), None);
}
#[test]
fn test_iter() {
#[cfg(not(miri))] // Miri is too slow
let size = 10000;
#[cfg(miri)]
let size = 200;
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: usize, mut iter: T)
where
T: Iterator<Item = (usize, usize)>,
{
for i in 0..size {
assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
assert_eq!(iter.next().unwrap(), (i, i));
}
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next(), None);
}
test(size, map.iter().map(|(&k, &v)| (k, v)));
test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
test(size, map.into_iter());
}
#[test]
fn test_iter_rev() {
#[cfg(not(miri))] // Miri is too slow
let size = 10000;
#[cfg(miri)]
let size = 200;
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: usize, mut iter: T)
where
T: Iterator<Item = (usize, usize)>,
{
for i in 0..size {
assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1));
}
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next(), None);
}
test(size, map.iter().rev().map(|(&k, &v)| (k, v)));
test(size, map.iter_mut().rev().map(|(&k, &mut v)| (k, v)));
test(size, map.into_iter().rev());
}
/// Specifically tests iter_mut's ability to mutate the value of pairs in-line
fn do_test_iter_mut_mutation<T>(size: usize)
where
T: Copy + Debug + Ord + TryFrom<usize>,
<T as std::convert::TryFrom<usize>>::Error: std::fmt::Debug,
{
let zero = T::try_from(0).unwrap();
let mut map: BTreeMap<T, T> = (0..size).map(|i| (T::try_from(i).unwrap(), zero)).collect();
// Forward and backward iteration sees enough pairs (also tested elsewhere)
assert_eq!(map.iter_mut().count(), size);
assert_eq!(map.iter_mut().rev().count(), size);
// Iterate forwards, trying to mutate to unique values
for (i, (k, v)) in map.iter_mut().enumerate() {
assert_eq!(*k, T::try_from(i).unwrap());
assert_eq!(*v, zero);
*v = T::try_from(i + 1).unwrap();
}
// Iterate backwards, checking that mutations succeeded and trying to mutate again
for (i, (k, v)) in map.iter_mut().rev().enumerate() {
assert_eq!(*k, T::try_from(size - i - 1).unwrap());
assert_eq!(*v, T::try_from(size - i).unwrap());
*v = T::try_from(2 * size - i).unwrap();
}
// Check that backward mutations succeeded
for (i, (k, v)) in map.iter_mut().enumerate() {
assert_eq!(*k, T::try_from(i).unwrap());
assert_eq!(*v, T::try_from(size + i + 1).unwrap());
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
#[repr(align(32))]
struct Align32(usize);
impl TryFrom<usize> for Align32 {
type Error = ();
fn try_from(s: usize) -> Result<Align32, ()> {
Ok(Align32(s))
}
}
#[test]
fn test_iter_mut_mutation() {
// Check many alignments because various fields precede array in NodeHeader.
// Check with size 0 which should not iterate at all.
// Check with size 1 for a tree with one kind of node (root = leaf).
// Check with size 12 for a tree with two kinds of nodes (root and leaves).
// Check with size 144 for a tree with all kinds of nodes (root, internals and leaves).
do_test_iter_mut_mutation::<u8>(0);
do_test_iter_mut_mutation::<u8>(1);
do_test_iter_mut_mutation::<u8>(12);
do_test_iter_mut_mutation::<u8>(127); // not enough unique values to test 144
do_test_iter_mut_mutation::<u16>(1);
do_test_iter_mut_mutation::<u16>(12);
do_test_iter_mut_mutation::<u16>(144);
do_test_iter_mut_mutation::<u32>(1);
do_test_iter_mut_mutation::<u32>(12);
do_test_iter_mut_mutation::<u32>(144);
do_test_iter_mut_mutation::<u64>(1);
do_test_iter_mut_mutation::<u64>(12);
do_test_iter_mut_mutation::<u64>(144);
do_test_iter_mut_mutation::<u128>(1);
do_test_iter_mut_mutation::<u128>(12);
do_test_iter_mut_mutation::<u128>(144);
do_test_iter_mut_mutation::<Align32>(1);
do_test_iter_mut_mutation::<Align32>(12);
do_test_iter_mut_mutation::<Align32>(144);
}
#[test]
fn test_values_mut() {
let mut a = BTreeMap::new();
a.insert(1, String::from("hello"));
a.insert(2, String::from("goodbye"));
for value in a.values_mut() {
value.push_str("!");
}
let values: Vec<String> = a.values().cloned().collect();
assert_eq!(values, [String::from("hello!"), String::from("goodbye!")]);
}
#[test]
fn test_iter_mixed() {
#[cfg(not(miri))] // Miri is too slow
let size = 10000;
#[cfg(miri)]
let size = 200;
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: usize, mut iter: T)
where
T: Iterator<Item = (usize, usize)> + DoubleEndedIterator,
{
for i in 0..size / 4 {
assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
assert_eq!(iter.next().unwrap(), (i, i));
assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1));
}
for i in size / 4..size * 3 / 4 {
assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i)));
assert_eq!(iter.next().unwrap(), (i, i));
}
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next(), None);
}
test(size, map.iter().map(|(&k, &v)| (k, v)));
test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
test(size, map.into_iter());
}
fn range_keys(map: &BTreeMap<i32, i32>, range: impl RangeBounds<i32>) -> Vec<i32> {
map.range(range)
.map(|(&k, &v)| {
assert_eq!(k, v);
k
})
.collect()
}
#[test]
fn test_range_small() {
let size = 4;
let map: BTreeMap<_, _> = (1..=size).map(|i| (i, i)).collect();
let all: Vec<_> = (1..=size).collect();
let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]);
assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all);
assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all);
assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all);
assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all);
assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all);
assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all);
assert_eq!(range_keys(&map, (Included(0), Included(size))), all);
assert_eq!(range_keys(&map, (Included(0), Unbounded)), all);
assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all);
assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all);
assert_eq!(range_keys(&map, (Included(1), Included(size))), all);
assert_eq!(range_keys(&map, (Included(1), Unbounded)), all);
assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all);
assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all);
assert_eq!(range_keys(&map, (Unbounded, Included(size))), all);
assert_eq!(range_keys(&map, ..), all);
assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]);
assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]);
assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]);
assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]);
assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]);
assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]);
assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first);
assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first);
assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first);
assert_eq!(range_keys(&map, (Included(0), Included(1))), first);
assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first);
assert_eq!(range_keys(&map, (Included(1), Included(1))), first);
assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first);
assert_eq!(range_keys(&map, (Unbounded, Included(1))), first);
assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last);
assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last);
assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last);
assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last);
assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last);
assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last);
assert_eq!(range_keys(&map, (Included(size), Included(size))), last);
assert_eq!(range_keys(&map, (Included(size), Unbounded)), last);
assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]);
assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]);
assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]);
assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]);
assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]);
assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]);
assert_eq!(range_keys(&map, ..3), vec![1, 2]);
assert_eq!(range_keys(&map, 3..), vec![3, 4]);
assert_eq!(range_keys(&map, 2..=3), vec![2, 3]);
}
#[test]
fn test_range_height_2() {
// Assuming that node.CAPACITY is 11, having 12 pairs implies a height 2 tree
// with 2 leaves. Depending on details we don't want or need to rely upon,
// the single key at the root will be 6 or 7.
let map: BTreeMap<_, _> = (1..=12).map(|i| (i, i)).collect();
for &root in &[6, 7] {
assert_eq!(range_keys(&map, (Excluded(root), Excluded(root + 1))), vec![]);
assert_eq!(range_keys(&map, (Excluded(root), Included(root + 1))), vec![root + 1]);
assert_eq!(range_keys(&map, (Included(root), Excluded(root + 1))), vec![root]);
assert_eq!(range_keys(&map, (Included(root), Included(root + 1))), vec![root, root + 1]);
assert_eq!(range_keys(&map, (Excluded(root - 1), Excluded(root))), vec![]);
assert_eq!(range_keys(&map, (Included(root - 1), Excluded(root))), vec![root - 1]);
assert_eq!(range_keys(&map, (Excluded(root - 1), Included(root))), vec![root]);
assert_eq!(range_keys(&map, (Included(root - 1), Included(root))), vec![root - 1, root]);
}
}
#[test]
fn test_range_large() {
let size = 200;
let map: BTreeMap<_, _> = (1..=size).map(|i| (i, i)).collect();
let all: Vec<_> = (1..=size).collect();
let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]);
assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all);
assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all);
assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all);
assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all);
assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all);
assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all);
assert_eq!(range_keys(&map, (Included(0), Included(size))), all);
assert_eq!(range_keys(&map, (Included(0), Unbounded)), all);
assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all);
assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all);
assert_eq!(range_keys(&map, (Included(1), Included(size))), all);
assert_eq!(range_keys(&map, (Included(1), Unbounded)), all);
assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all);
assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all);
assert_eq!(range_keys(&map, (Unbounded, Included(size))), all);
assert_eq!(range_keys(&map, ..), all);
assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]);
assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]);
assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]);
assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]);
assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]);
assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]);
assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first);
assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first);
assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first);
assert_eq!(range_keys(&map, (Included(0), Included(1))), first);
assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first);
assert_eq!(range_keys(&map, (Included(1), Included(1))), first);
assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first);
assert_eq!(range_keys(&map, (Unbounded, Included(1))), first);
assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last);
assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last);
assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last);
assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last);
assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last);
assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last);
assert_eq!(range_keys(&map, (Included(size), Included(size))), last);
assert_eq!(range_keys(&map, (Included(size), Unbounded)), last);
assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]);
assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]);
assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]);
assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]);
assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]);
assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]);
fn check<'a, L, R>(lhs: L, rhs: R)
where
L: IntoIterator<Item = (&'a i32, &'a i32)>,
R: IntoIterator<Item = (&'a i32, &'a i32)>,
{
let lhs: Vec<_> = lhs.into_iter().collect();
let rhs: Vec<_> = rhs.into_iter().collect();
assert_eq!(lhs, rhs);
}
check(map.range(..=100), map.range(..101));
check(map.range(5..=8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]);
check(map.range(-1..=2), vec![(&1, &1), (&2, &2)]);
}
#[test]
fn test_range_inclusive_max_value() {
let max = std::usize::MAX;
let map: BTreeMap<_, _> = vec![(max, 0)].into_iter().collect();
assert_eq!(map.range(max..=max).collect::<Vec<_>>(), &[(&max, &0)]);
}
#[test]
fn test_range_equal_empty_cases() {
let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
assert_eq!(map.range((Included(2), Excluded(2))).next(), None);
assert_eq!(map.range((Excluded(2), Included(2))).next(), None);
}
#[test]
#[should_panic]
fn test_range_equal_excluded() {
let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
map.range((Excluded(2), Excluded(2)));
}
#[test]
#[should_panic]
fn test_range_backwards_1() {
let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
map.range((Included(3), Included(2)));
}
#[test]
#[should_panic]
fn test_range_backwards_2() {
let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
map.range((Included(3), Excluded(2)));
}
#[test]
#[should_panic]
fn test_range_backwards_3() {
let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
map.range((Excluded(3), Included(2)));
}
#[test]
#[should_panic]
fn test_range_backwards_4() {
let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
map.range((Excluded(3), Excluded(2)));
}
#[test]
fn test_range_1000() {
#[cfg(not(miri))] // Miri is too slow
let size = 1000;
#[cfg(miri)]
let size = 144; // to obtain height 3 tree (having edges to both kinds of nodes)
let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test(map: &BTreeMap<u32, u32>, size: u32, min: Bound<&u32>, max: Bound<&u32>) {
let mut kvs = map.range((min, max)).map(|(&k, &v)| (k, v));
let mut pairs = (0..size).map(|i| (i, i));
for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
assert_eq!(kv, pair);
}
assert_eq!(kvs.next(), None);
assert_eq!(pairs.next(), None);
}
test(&map, size, Included(&0), Excluded(&size));
test(&map, size, Unbounded, Excluded(&size));
test(&map, size, Included(&0), Included(&(size - 1)));
test(&map, size, Unbounded, Included(&(size - 1)));
test(&map, size, Included(&0), Unbounded);
test(&map, size, Unbounded, Unbounded);
}
#[test]
fn test_range_borrowed_key() {
let mut map = BTreeMap::new();
map.insert("aardvark".to_string(), 1);
map.insert("baboon".to_string(), 2);
map.insert("coyote".to_string(), 3);
map.insert("dingo".to_string(), 4);
// NOTE: would like to use simply "b".."d" here...
let mut iter = map.range::<str, _>((Included("b"), Excluded("d")));
assert_eq!(iter.next(), Some((&"baboon".to_string(), &2)));
assert_eq!(iter.next(), Some((&"coyote".to_string(), &3)));
assert_eq!(iter.next(), None);
}
#[test]
fn test_range() {
let size = 200;
#[cfg(not(miri))] // Miri is too slow
let step = 1;
#[cfg(miri)]
let step = 66;
let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
for i in (0..size).step_by(step) {
for j in (i..size).step_by(step) {
let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v));
let mut pairs = (i..=j).map(|i| (i, i));
for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
assert_eq!(kv, pair);
}
assert_eq!(kvs.next(), None);
assert_eq!(pairs.next(), None);
}
}
}
#[test]
fn test_range_mut() {
let size = 200;
#[cfg(not(miri))] // Miri is too slow
let step = 1;
#[cfg(miri)]
let step = 66;
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
for i in (0..size).step_by(step) {
for j in (i..size).step_by(step) {
let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v));
let mut pairs = (i..=j).map(|i| (i, i));
for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
assert_eq!(kv, pair);
}
assert_eq!(kvs.next(), None);
assert_eq!(pairs.next(), None);
}
}
}
#[test]
fn test_borrow() {
// make sure these compile -- using the Borrow trait
{
let mut map = BTreeMap::new();
map.insert("0".to_string(), 1);
assert_eq!(map["0"], 1);
}
{
let mut map = BTreeMap::new();
map.insert(Box::new(0), 1);
assert_eq!(map[&0], 1);
}
{
let mut map = BTreeMap::new();
map.insert(Box::new([0, 1]) as Box<[i32]>, 1);
assert_eq!(map[&[0, 1][..]], 1);
}
{
let mut map = BTreeMap::new();
map.insert(Rc::new(0), 1);
assert_eq!(map[&0], 1);
}
}
#[test]
fn test_entry() {
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: BTreeMap<_, _> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.entry(2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
*v *= 10;
}
}
assert_eq!(map.get(&2).unwrap(), &200);
assert_eq!(map.len(), 6);
// Existing key (take)
match map.entry(3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove(), 30);
}
}
assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5);
// Inexistent key (insert)
match map.entry(10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(*view.insert(1000), 1000);
}
}
assert_eq!(map.get(&10).unwrap(), &1000);
assert_eq!(map.len(), 6);
}
#[test]
fn test_extend_ref() {
let mut a = BTreeMap::new();
a.insert(1, "one");
let mut b = BTreeMap::new();
b.insert(2, "two");
b.insert(3, "three");
a.extend(&b);
assert_eq!(a.len(), 3);
assert_eq!(a[&1], "one");
assert_eq!(a[&2], "two");
assert_eq!(a[&3], "three");
}
#[test]
fn test_zst() {
let mut m = BTreeMap::new();
assert_eq!(m.len(), 0);
assert_eq!(m.insert((), ()), None);
assert_eq!(m.len(), 1);
assert_eq!(m.insert((), ()), Some(()));
assert_eq!(m.len(), 1);
assert_eq!(m.iter().count(), 1);
m.clear();
assert_eq!(m.len(), 0);
for _ in 0..100 {
m.insert((), ());
}
assert_eq!(m.len(), 1);
assert_eq!(m.iter().count(), 1);
}
// This test's only purpose is to ensure that zero-sized keys with nonsensical orderings
// do not cause segfaults when used with zero-sized values. All other map behavior is
// undefined.
#[test]
fn test_bad_zst() {
use std::cmp::Ordering;
struct Bad;
impl PartialEq for Bad {
fn eq(&self, _: &Self) -> bool {
false
}
}
impl Eq for Bad {}
impl PartialOrd for Bad {
fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
Some(Ordering::Less)
}
}
impl Ord for Bad {
fn cmp(&self, _: &Self) -> Ordering {
Ordering::Less
}
}
let mut m = BTreeMap::new();
for _ in 0..100 {
m.insert(Bad, Bad);
}
}
#[test]
fn test_clone() {
let mut map = BTreeMap::new();
let size = 12; // to obtain height 2 tree (having edges to leaf nodes)
assert_eq!(map.len(), 0);
for i in 0..size {
assert_eq!(map.insert(i, 10 * i), None);
assert_eq!(map.len(), i + 1);
assert_eq!(map, map.clone());
}
for i in 0..size {
assert_eq!(map.insert(i, 100 * i), Some(10 * i));
assert_eq!(map.len(), size);
assert_eq!(map, map.clone());
}
for i in 0..size / 2 {
assert_eq!(map.remove(&(i * 2)), Some(i * 200));
assert_eq!(map.len(), size - i - 1);
assert_eq!(map, map.clone());
}
for i in 0..size / 2 {
assert_eq!(map.remove(&(2 * i)), None);
assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100));
assert_eq!(map.len(), size / 2 - i - 1);
assert_eq!(map, map.clone());
}
// Full 2-level and minimal 3-level tree (sizes 143, 144 -- the only ones we clone for).
for i in 1..=144 {
assert_eq!(map.insert(i, i), None);
assert_eq!(map.len(), i);
if i >= 143 {
assert_eq!(map, map.clone());
}
}
}
#[test]
fn test_clone_from() {
let mut map1 = BTreeMap::new();
let max_size = 12; // to obtain height 2 tree (having edges to leaf nodes)
// Range to max_size inclusive, because i is the size of map1 being tested.
for i in 0..=max_size {
let mut map2 = BTreeMap::new();
for j in 0..i {
let mut map1_copy = map2.clone();
map1_copy.clone_from(&map1); // small cloned from large
assert_eq!(map1_copy, map1);
let mut map2_copy = map1.clone();
map2_copy.clone_from(&map2); // large cloned from small
assert_eq!(map2_copy, map2);
map2.insert(100 * j + 1, 2 * j + 1);
}
map2.clone_from(&map1); // same length
assert_eq!(map2, map1);
map1.insert(i, 10 * i);
}
}
#[test]
#[allow(dead_code)]
fn test_variance() {
use std::collections::btree_map::{IntoIter, Iter, Keys, Range, Values};
fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> {
v
}
fn map_val<'new>(v: BTreeMap<(), &'static str>) -> BTreeMap<(), &'new str> {
v
}
fn iter_key<'a, 'new>(v: Iter<'a, &'static str, ()>) -> Iter<'a, &'new str, ()> {
v
}
fn iter_val<'a, 'new>(v: Iter<'a, (), &'static str>) -> Iter<'a, (), &'new str> {
v
}
fn into_iter_key<'new>(v: IntoIter<&'static str, ()>) -> IntoIter<&'new str, ()> {
v
}
fn into_iter_val<'new>(v: IntoIter<(), &'static str>) -> IntoIter<(), &'new str> {
v
}
fn range_key<'a, 'new>(v: Range<'a, &'static str, ()>) -> Range<'a, &'new str, ()> {
v
}
fn range_val<'a, 'new>(v: Range<'a, (), &'static str>) -> Range<'a, (), &'new str> {
v
}
fn keys<'a, 'new>(v: Keys<'a, &'static str, ()>) -> Keys<'a, &'new str, ()> {
v
}
fn vals<'a, 'new>(v: Values<'a, (), &'static str>) -> Values<'a, (), &'new str> {
v
}
}
#[test]
fn test_occupied_entry_key() {
let mut a = BTreeMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
a.insert(key.clone(), value.clone());
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
match a.entry(key.clone()) {
Vacant(_) => panic!(),
Occupied(e) => assert_eq!(key, *e.key()),
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_vacant_entry_key() {
let mut a = BTreeMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
match a.entry(key.clone()) {
Occupied(_) => panic!(),
Vacant(e) => {
assert_eq!(key, *e.key());
e.insert(value.clone());
}
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_first_last_entry() {
let mut a = BTreeMap::new();
assert!(a.first_entry().is_none());
assert!(a.last_entry().is_none());
a.insert(1, 42);
assert_eq!(a.first_entry().unwrap().key(), &1);
assert_eq!(a.last_entry().unwrap().key(), &1);
a.insert(2, 24);
assert_eq!(a.first_entry().unwrap().key(), &1);
assert_eq!(a.last_entry().unwrap().key(), &2);
a.insert(0, 6);
assert_eq!(a.first_entry().unwrap().key(), &0);
assert_eq!(a.last_entry().unwrap().key(), &2);
let (k1, v1) = a.first_entry().unwrap().remove_entry();
assert_eq!(k1, 0);
assert_eq!(v1, 6);
let (k2, v2) = a.last_entry().unwrap().remove_entry();
assert_eq!(k2, 2);
assert_eq!(v2, 24);
assert_eq!(a.first_entry().unwrap().key(), &1);
assert_eq!(a.last_entry().unwrap().key(), &1);
}
macro_rules! create_append_test {
($name:ident, $len:expr) => {
#[test]
fn $name() {
let mut a = BTreeMap::new();
for i in 0..8 {
a.insert(i, i);
}
let mut b = BTreeMap::new();
for i in 5..$len {
b.insert(i, 2 * i);
}
a.append(&mut b);
assert_eq!(a.len(), $len);
assert_eq!(b.len(), 0);
for i in 0..$len {
if i < 5 {
assert_eq!(a[&i], i);
} else {
assert_eq!(a[&i], 2 * i);
}
}
assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1)));
assert_eq!(a.insert($len - 1, 20), None);
}
};
}
// These are mostly for testing the algorithm that "fixes" the right edge after insertion.
// Single node.
create_append_test!(test_append_9, 9);
// Two leafs that don't need fixing.
create_append_test!(test_append_17, 17);
// Two leafs where the second one ends up underfull and needs stealing at the end.
create_append_test!(test_append_14, 14);
// Two leafs where the second one ends up empty because the insertion finished at the root.
create_append_test!(test_append_12, 12);
// Three levels; insertion finished at the root.
create_append_test!(test_append_144, 144);
// Three levels; insertion finished at leaf while there is an empty node on the second level.
create_append_test!(test_append_145, 145);
// Tests for several randomly chosen sizes.
create_append_test!(test_append_170, 170);
create_append_test!(test_append_181, 181);
#[cfg(not(miri))] // Miri is too slow
create_append_test!(test_append_239, 239);
#[cfg(not(miri))] // Miri is too slow
create_append_test!(test_append_1700, 1700);
fn rand_data(len: usize) -> Vec<(u32, u32)> {
let mut rng = DeterministicRng::new();
Vec::from_iter((0..len).map(|_| (rng.next(), rng.next())))
}
#[test]
fn test_split_off_empty_right() {
let mut data = rand_data(173);
let mut map = BTreeMap::from_iter(data.clone());
let right = map.split_off(&(data.iter().max().unwrap().0 + 1));
data.sort();
assert!(map.into_iter().eq(data));
assert!(right.into_iter().eq(None));
}
#[test]
fn test_split_off_empty_left() {
let mut data = rand_data(314);
let mut map = BTreeMap::from_iter(data.clone());
let right = map.split_off(&data.iter().min().unwrap().0);
data.sort();
assert!(map.into_iter().eq(None));
assert!(right.into_iter().eq(data));
}
#[test]
fn test_split_off_large_random_sorted() {
#[cfg(not(miri))] // Miri is too slow
let mut data = rand_data(1529);
#[cfg(miri)]
let mut data = rand_data(529);
// special case with maximum height.
data.sort();
let mut map = BTreeMap::from_iter(data.clone());
let key = data[data.len() / 2].0;
let right = map.split_off(&key);
assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key)));
assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key)));
}
#[test]
fn test_into_iter_drop_leak_1() {
static DROPS: AtomicU32 = AtomicU32::new(0);
struct D;
impl Drop for D {
fn drop(&mut self) {
if DROPS.fetch_add(1, Ordering::SeqCst) == 3 {
panic!("panic in `drop`");
}
}
}
let mut map = BTreeMap::new();
map.insert("a", D);
map.insert("b", D);
map.insert("c", D);
map.insert("d", D);
map.insert("e", D);
catch_unwind(move || drop(map.into_iter())).ok();
assert_eq!(DROPS.load(Ordering::SeqCst), 5);
}
#[test]
fn test_into_iter_drop_leak_2() {
let size = 12; // to obtain tree with 2 levels (having edges to leaf nodes)
static DROPS: AtomicU32 = AtomicU32::new(0);
static PANIC_POINT: AtomicU32 = AtomicU32::new(0);
struct D;
impl Drop for D {
fn drop(&mut self) {
if DROPS.fetch_add(1, Ordering::SeqCst) == PANIC_POINT.load(Ordering::SeqCst) {
panic!("panic in `drop`");
}
}
}
for panic_point in vec![0, 1, size - 2, size - 1] {
DROPS.store(0, Ordering::SeqCst);
PANIC_POINT.store(panic_point, Ordering::SeqCst);
let map: BTreeMap<_, _> = (0..size).map(|i| (i, D)).collect();
catch_unwind(move || drop(map.into_iter())).ok();
assert_eq!(DROPS.load(Ordering::SeqCst), size);
}
}
| true |
73ae65c42a240644d8170dec3b515cfe0cb1bf2d
|
Rust
|
mislav-markovic/advent-of-code
|
/aoc-19/src/input_reader.rs
|
UTF-8
| 1,079 | 3.328125 | 3 |
[] |
no_license
|
pub fn read_sparated_values_from_input(
path: &str,
separator: &str,
) -> Result<Vec<String>, ReaderError> {
use std::fs;
let contents = fs::read_to_string(path)
.map_err(|_| ReaderError {})
.map(|text| text.split(separator).map(|s| String::from(s)).collect());
contents
}
pub trait Parser {
type R;
fn parse_line(line: &str) -> Result<Self::R, ParseError>;
fn parse_all(input: &[&str]) -> Vec<Result<Self::R, ParseError>> {
let mut result: Vec<Result<Self::R, ParseError>> = Vec::with_capacity(input.len());
for line in input.into_iter() {
result.push(Self::parse_line(line));
}
result
}
}
#[derive(Debug, Clone)]
pub struct ReaderError {}
#[derive(Debug, Clone)]
pub struct ParseError {
message: String,
line: String,
}
impl ParseError {
pub fn new(message: String, line: String) -> Self {
ParseError { message, line }
}
pub fn new_copy(message: &str, line: &str) -> Self {
Self::new(String::from(message), String::from(line))
}
}
| true |
3e5e3a48a75c71f270f08b459bfb0340a6ee7670
|
Rust
|
troyunverdruss/advent-of-code-2020
|
/src/day10.rs
|
UTF-8
| 3,738 | 3.171875 | 3 |
[] |
no_license
|
use crate::util::inputs::day_input;
use itertools::Itertools;
use std::collections::HashMap;
pub fn run() {
let adapters = day_input(10)
.iter()
.map(|s| s.parse::<i32>().unwrap())
.sorted()
.collect::<Vec<i32>>();
let part1 = part_1(&adapters);
println!("Part 1: {}", part1);
let part2 = part_2(&adapters);
println!("Part 2: {}", part2);
}
fn part_1(adapters: &[i32]) -> i32 {
let mut results = HashMap::new();
results.insert(1, 0);
results.insert(2, 0);
// 3 starts at 1 because we always have a 3 at the end to get to our device
results.insert(3, 1);
let mut last_val = 0;
for a in adapters {
let change = *a - last_val;
match change {
1 => results.entry(1).and_modify(|f| *f += 1),
2 => results.entry(2).and_modify(|f| *f += 1),
3 => results.entry(3).and_modify(|f| *f += 1),
_ => {
let error = format!("{} adapter greater than 3 from {}", a, last_val);
panic!(error);
}
};
last_val = *a;
}
let ones = results.get(&1).unwrap();
let threes = results.get(&3).unwrap();
ones * threes
}
// TODO Tribonnaci?
// Dynamic programming?
// counting sort?
// https://www.reddit.com/r/adventofcode/comments/ka8z8x/2020_day_10_solutions/gfal951/?utm_source=share&utm_medium=ios_app&utm_name=iossmf&context=3
fn part_2(adapters: &Vec<i32>) -> i64 {
let mut memo = HashMap::new();
let result = solver(&0, &adapters[..], &mut memo).unwrap();
// println!("memo size: {}", memo.len());
result
}
fn solver(
current: &i32,
remaining: &[i32],
memo: &mut HashMap<String, Option<i64>>,
) -> Option<i64> {
let key = remaining.iter().join("-");
if let Some(key) = memo.get(&key) {
if let Some(value) = key {
return Some(*value);
}
}
let result = if remaining.is_empty() {
Some(1)
} else {
let next_steps: Vec<i32> = remaining
.iter()
.map(|v| *v)
.filter(|v| *v - current <= 3)
.collect();
assert!(next_steps.len() <= 3);
if next_steps.len() == 0 {
None
} else {
let mut sum = 0;
for (index, value) in next_steps.iter().enumerate() {
let result = solver(value, &remaining[index + 1..], memo);
if let Some(val) = result {
sum += val;
}
}
if sum == 0 {
None
} else {
Some(sum)
}
}
};
memo.insert(key.to_owned(), result.to_owned());
result
}
#[cfg(test)]
mod tests {
use crate::day10::{part_1, part_2};
#[test]
fn example_1_1() {
let mut adapters = vec![16, 10, 15, 5, 1, 11, 7, 19, 6, 12, 4];
adapters.sort();
assert_eq!(35, part_1(&adapters))
}
#[test]
fn example_1_2() {
let mut adapters = vec![
28, 33, 18, 42, 31, 14, 46, 20, 48, 47, 24, 23, 49, 45, 19, 38, 39, 11, 1, 32, 25, 35,
8, 17, 7, 9, 4, 2, 34, 10, 3,
];
adapters.sort();
assert_eq!(220, part_1(&adapters))
}
#[test]
fn example_2_1() {
let mut adapters = vec![16, 10, 15, 5, 1, 11, 7, 19, 6, 12, 4];
adapters.sort();
assert_eq!(8, part_2(&adapters))
}
#[test]
fn example_2_2() {
let mut adapters = vec![
28, 33, 18, 42, 31, 14, 46, 20, 48, 47, 24, 23, 49, 45, 19, 38, 39, 11, 1, 32, 25, 35,
8, 17, 7, 9, 4, 2, 34, 10, 3,
];
adapters.sort();
assert_eq!(19208, part_2(&adapters))
}
}
| true |
229c356d05f5b1439e59d57a0a33aa8281213234
|
Rust
|
redox-os/termion
|
/src/screen.rs
|
UTF-8
| 2,707 | 3.75 | 4 |
[
"MIT"
] |
permissive
|
//! Managing switching between main and alternate screen buffers.
//!
//! Note that this implementation uses xterm's new escape sequences for screen switching and thus
//! only works for xterm compatible terminals (which should be most terminals nowadays).
//!
//! # Example
//!
//! ```rust
//! use termion::screen::IntoAlternateScreen;
//! use std::io::{Write, stdout};
//!
//! fn main() {
//! {
//! let mut screen = stdout().into_alternate_screen().unwrap();
//! write!(screen, "Writing to alternate screen!").unwrap();
//! screen.flush().unwrap();
//! }
//! println!("Writing to main screen.");
//! }
//! ```
use std::io::{self, Write};
use std::ops;
use std::fmt;
/// Switch to the main screen buffer of the terminal.
pub struct ToMainScreen;
impl fmt::Display for ToMainScreen {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, csi!("?1049l"))
}
}
/// Switch to the alternate screen buffer of the terminal.
pub struct ToAlternateScreen;
impl fmt::Display for ToAlternateScreen {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, csi!("?1049h"))
}
}
/// A terminal restorer, which wraps a type implementing Write, and causes all writes to be written
/// to an alternate screen.
///
/// This is achieved by switching the terminal to the alternate screen on creation and
/// automatically switching it back to the original screen on drop.
pub struct AlternateScreen<W: Write> {
/// The output target.
output: W,
}
/// Extension trait for writers, providing the `into_alternate_screen` function.
pub trait IntoAlternateScreen: Write + Sized {
/// Switch the terminal controlled by this writer to use the alternate screen. The terminal will be
/// restored to the main screen when the `AlternateScreen` returned by this function is
/// dropped.
fn into_alternate_screen(mut self) -> io::Result<AlternateScreen<Self>> {
write!(self, "{}", ToAlternateScreen)?;
Ok(AlternateScreen { output: self })
}
}
impl<W: Write> IntoAlternateScreen for W {}
impl<W: Write> Drop for AlternateScreen<W> {
fn drop(&mut self) {
let _ = write!(self, "{}", ToMainScreen);
}
}
impl<W: Write> ops::Deref for AlternateScreen<W> {
type Target = W;
fn deref(&self) -> &W {
&self.output
}
}
impl<W: Write> ops::DerefMut for AlternateScreen<W> {
fn deref_mut(&mut self) -> &mut W {
&mut self.output
}
}
impl<W: Write> Write for AlternateScreen<W> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.output.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.output.flush()
}
}
| true |
93483bc6c5882d376f1c5cf0ace8b7b6c5cd48de
|
Rust
|
vegaluisjose/toml-example
|
/src/main.rs
|
UTF-8
| 446 | 2.6875 | 3 |
[] |
no_license
|
// extern crate serde;
// #[macro_use]
// extern crate serde_derive;
// extern crate toml;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(tag = "type", content = "args")]
enum Actions {
Wait(usize),
Move { x: usize, y: usize },
}
fn main() {
println!("{}", toml::to_string(&Actions::Wait(5)).unwrap());
println!("{}", toml::to_string(&Actions::Move { x: 1, y: 1 }).unwrap());
}
| true |
09760ebfb008d73be3644062f7420a77a6dfbe0a
|
Rust
|
FauxFaux/tempfile-fast-rs
|
/tests/sponge.rs
|
UTF-8
| 960 | 2.90625 | 3 |
[
"MIT"
] |
permissive
|
use std::fs;
use std::io;
use std::io::Read;
use std::io::Write;
#[test]
fn sponge() -> Result<(), io::Error> {
let dir = tempfile::TempDir::new()?;
let mut test_path = dir.path().to_path_buf();
{
test_path.push("hello");
fs::create_dir_all(&test_path)?;
test_path.push("world.txt");
fs::File::create(&test_path)?.write_all(b"content before")?;
}
let mut sponge = tempfile_fast::Sponge::new_for(&test_path)?;
sponge.write_all(b"new stuff")?;
assert_eq!("content before", read(fs::File::open(&test_path)?));
sponge.flush()?;
assert_eq!("content before", read(fs::File::open(&test_path)?));
sponge.commit()?;
assert_eq!("new stuff", read(fs::File::open(&test_path)?));
assert_eq!(1, fs::read_dir(test_path.parent().unwrap())?.count());
Ok(())
}
fn read<R: Read>(mut thing: R) -> String {
let mut s = String::new();
thing.read_to_string(&mut s).unwrap();
s
}
| true |
665e5116f0d644ca5eeb1cb9463a4cf26ae7efff
|
Rust
|
pacheco/sevent
|
/src/chan.rs
|
UTF-8
| 1,650 | 2.875 | 3 |
[] |
no_license
|
use std::io;
use std::cell::RefCell;
use std::sync::mpsc::TryRecvError;
use mio::Ready;
use mio::Poll;
use mio_more::channel;
use ::TokenKind;
// we need the trait because we want to be able to store channels of any T.
pub trait Chan {
fn id(&self) -> usize;
fn ready(&self, ready: Ready);
fn deregister(&self, poll: &Poll) -> Result<(), io::Error>;
}
pub trait ChanHandler<T> {
fn on_recv(&mut self, id: usize, msg: T);
fn on_close(&mut self, id: usize);
}
impl<F, T> ChanHandler<T> for F
where F: FnMut(usize, Option<T>)
{
fn on_recv(&mut self, id: usize, msg: T) {
self(id, Some(msg))
}
fn on_close(&mut self, id: usize) {
self(id, None)
}
}
impl<T> Chan for ChanCtx<T> {
fn id(&self) -> usize {
self.id
}
fn ready(&self, _ready: Ready) {
loop {
let recv = self.inner.borrow_mut().try_recv();
match recv {
Ok(msg) => {
self.handler.borrow_mut().on_recv(self.id, msg);
}
Err(TryRecvError::Empty) => {
break;
}
Err(TryRecvError::Disconnected) => {
self.handler.borrow_mut().on_close(self.id);
super::del(self.id, TokenKind::Chan).unwrap();
break;
}
}
}
}
fn deregister(&self, poll: &Poll) -> Result<(), io::Error> {
poll.deregister(&*self.inner.borrow())
}
}
pub struct ChanCtx<T> {
pub id: usize,
pub inner: RefCell<channel::Receiver<T>>,
pub handler: RefCell<Box<ChanHandler<T>>>,
}
| true |
08cfb3f129191cb9f1a545aac722d72509fa60d0
|
Rust
|
pd0wm/exercises
|
/advent_of_code_2022/src/bin/13.rs
|
UTF-8
| 4,098 | 3.296875 | 3 |
[] |
no_license
|
#![feature(test)]
use std::cmp::Ordering;
use bisection::bisect_left;
extern crate test;
#[derive(Debug, Eq, PartialEq, Clone)]
enum Packet {
Int(u64),
List(Vec<Packet>),
}
impl From<&str> for Packet {
fn from(packet: &str) -> Self {
if packet.starts_with('[') {
let mut packets = Vec::new();
if packet.len() > 2 {
let mut depth = 0;
let mut prev_i = 0;
// Find items boundaries, making sure to ignore nested commas
for (i, c) in packet.chars().enumerate() {
match c {
'[' => depth += 1,
']' => depth -= 1,
',' => {
if depth == 1 {
packets.push(packet[prev_i + 1..i].into());
prev_i = i;
}
}
_ => {}
}
}
packets.push(packet[prev_i + 1..packet.len() - 1].into());
}
Packet::List(packets)
} else {
Packet::Int(packet.parse().unwrap())
}
}
}
impl PartialOrd for Packet {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Packet {
fn cmp(&self, other: &Self) -> Ordering {
match (self, other) {
(Packet::Int(a), Packet::Int(b)) => a.cmp(b),
(Packet::List(a), Packet::List(b)) => {
for i in 0..a.len() {
if i >= b.len() {
return Ordering::Greater;
}
if a[i] < b[i] {
return Ordering::Less;
}
if a[i] > b[i] {
return Ordering::Greater;
}
}
if a.len() < b.len() {
return Ordering::Less;
}
Ordering::Equal
}
(Packet::List(_), b) => {
let b = Packet::List(vec![b.clone()]);
self.cmp(&b)
}
(a, Packet::List(_)) => {
let a = Packet::List(vec![a.clone()]);
a.cmp(other)
}
}
}
}
type Input = Vec<(Packet, Packet)>;
fn part1(input: &Input) -> u64 {
input
.iter()
.enumerate()
.map(|(i, (a, b))| if a < b { i as u64 + 1 } else { 0 })
.sum()
}
fn part2(input: &Input) -> u64 {
let mut all = Vec::new();
for (a, b) in input.iter() {
all.push(a);
all.push(b);
}
all.sort();
let a = bisect_left(&all, &&"[[2]]".into()) as u64;
let b = bisect_left(&all, &&"[[6]]".into()) as u64;
(a + 1) * (b + 2)
}
fn parse_input(input: &str) -> Input {
let mut lines = input.lines();
let mut ret = Vec::new();
while let (Some(a), Some(b), _) = (lines.next(), lines.next(), lines.next()) {
ret.push((a.into(), b.into()));
}
ret
}
fn main() {
let values = parse_input(include_str!("../../inputs/13.txt").trim());
println!("{}", part1(&values));
println!("{}", part2(&values));
}
#[cfg(test)]
mod day13_tests {
use super::*;
use test::{black_box, Bencher};
#[test]
fn example() {
let sample_input = parse_input(include_str!("../../inputs/13_sample.txt").trim());
assert_eq!(13, part1(&sample_input));
assert_eq!(140, part2(&sample_input));
}
#[bench]
fn bench_parsing(b: &mut Bencher) {
b.iter(|| parse_input(include_str!("../../inputs/13.txt").trim()));
}
#[bench]
fn bench_part1(b: &mut Bencher) {
let values = black_box(parse_input(include_str!("../../inputs/13.txt").trim()));
b.iter(|| part1(&values));
}
#[bench]
fn bench_part2(b: &mut Bencher) {
let values = black_box(parse_input(include_str!("../../inputs/13.txt").trim()));
b.iter(|| part2(&values));
}
}
| true |
306fac27dbd8151d1f6fdea2605ce4c519e5a42a
|
Rust
|
Setheum-Labs/scale-info
|
/src/form.rs
|
UTF-8
| 2,869 | 2.796875 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2019-2020 Parity Technologies (UK) Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Provides some form definitions.
//!
//! The forms provided here are used to generically communicate the
//! compaction mode a type identifier, type definition or structures
//! that are using these.
//!
//! The default form is the `MetaForm`.
//! It uses `MetaType` for communicating type identifiers and thus acts as
//! a bridge from runtime to compile time type information.
//!
//! The compact form is `CompactForm` and represents a compact form
//! that no longer has any connections to the interning registry and thus
//! can no longer be used in order to retrieve information from the
//! original registry easily. Its sole purpose is for compact serialization.
//!
//! Other forms, such as a compact form that is still bound to the registry
//! (also via lifetime tracking) are possible but current not needed.
use crate::tm_std::*;
use crate::{interner::UntrackedSymbol, meta_type::MetaType};
use serde::Serialize;
/// Trait to control the internal structures of type definitions.
///
/// This allows for type-level separation between free forms that can be
/// instantiated out of the flux and compact forms that require some sort of
/// interning data structures.
pub trait Form {
/// The type identifier type.
type TypeId: PartialEq + Eq + PartialOrd + Ord + Clone + core::fmt::Debug;
/// The string type.
type String: Serialize + PartialEq + Eq + PartialOrd + Ord + Clone + core::fmt::Debug;
}
/// A meta meta-type.
///
/// Allows to be converted into other forms such as compact form
/// through the registry and `IntoCompact`.
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Serialize, Debug)]
pub enum MetaForm {}
impl Form for MetaForm {
type TypeId = MetaType;
type String = &'static str;
}
/// Compact form that has its lifetime untracked in association to its interner.
///
/// # Note
///
/// This resolves some lifetime issues with self-referential structs (such as
/// the registry itself) but can no longer be used to resolve to the original
/// underlying data.
///
/// `type String` is owned in order to enable decoding
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Serialize, Debug)]
pub enum CompactForm {}
impl Form for CompactForm {
type TypeId = UntrackedSymbol<TypeId>;
type String = String;
}
| true |
5e3209a7eafd58b94a04d59d7ea05e5f6fea2825
|
Rust
|
NyxTo/Advent-of-Code
|
/2019/Code/day6_orbit.rs
|
UTF-8
| 1,313 | 3.21875 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::collections::HashMap;
fn dfs(directs: &HashMap<String, Vec<String>>, ctr: &str, dist_com: i32) -> (i32, i32, i32) {
let (mut tot_com, mut dist_you, mut dist_san) = (dist_com, -1, -1);
if let Some(orbits) = directs.get(ctr) {
for arnd in orbits {
let (tot_arnd, d_you, d_san) = dfs(directs, arnd, dist_com + 1);
tot_com += tot_arnd;
if arnd == "YOU" { dist_you = 0; }
if arnd == "SAN" { dist_san = 0; }
if d_you >= 0 && d_san >= 0 {
dist_you = d_you;
dist_san = d_san;
}
else if d_you >= 0 { dist_you = d_you + 1; }
else if d_san >= 0 { dist_san = d_san + 1; }
}
}
(tot_com, dist_you, dist_san)
}
fn main() {
let mut directs: HashMap<_, Vec<String>> = HashMap::new();
for line in BufReader::new(File::open("in6.txt").unwrap()).lines().map(|line| line.unwrap()) {
let sep = line.find(')').unwrap();
let (ctr, arnd) = (line[0..sep].to_string(), line[sep + 1 ..].to_string());
match directs.get_mut(&ctr) {
Some(orbits) => orbits.push(arnd),
None => { directs.insert(ctr, vec![arnd]); },
}
}
let (tot_com, dist_you, dist_san) = dfs(&directs, "COM", 0);
println!("Part A: {}", tot_com); // 160040
println!("Part B: {}", dist_you + dist_san); // 373
}
| true |
433ff5f1bec0d6d102bd8d5d46df6578a9ec956c
|
Rust
|
srijs/rust-cfn
|
/src/aws/ses.rs
|
UTF-8
| 99,222 | 3.125 | 3 |
[
"MIT"
] |
permissive
|
//! Types for the `SES` service.
/// The [`AWS::SES::ConfigurationSet`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-configurationset.html) resource type.
#[derive(Debug, Default)]
pub struct ConfigurationSet {
properties: ConfigurationSetProperties
}
/// Properties for the `ConfigurationSet` resource.
#[derive(Debug, Default)]
pub struct ConfigurationSetProperties {
/// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-configurationset.html#cfn-ses-configurationset-name).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub name: Option<::Value<String>>,
}
impl ::serde::Serialize for ConfigurationSetProperties {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref name) = self.name {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", name)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl<'de> ::serde::Deserialize<'de> for ConfigurationSetProperties {
fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ConfigurationSetProperties, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ConfigurationSetProperties;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ConfigurationSetProperties")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut name: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Name" => {
name = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ConfigurationSetProperties {
name: name,
})
}
}
d.deserialize_map(Visitor)
}
}
impl ::Resource for ConfigurationSet {
type Properties = ConfigurationSetProperties;
const TYPE: &'static str = "AWS::SES::ConfigurationSet";
fn properties(&self) -> &ConfigurationSetProperties {
&self.properties
}
fn properties_mut(&mut self) -> &mut ConfigurationSetProperties {
&mut self.properties
}
}
impl ::private::Sealed for ConfigurationSet {}
impl From<ConfigurationSetProperties> for ConfigurationSet {
fn from(properties: ConfigurationSetProperties) -> ConfigurationSet {
ConfigurationSet { properties }
}
}
/// The [`AWS::SES::ConfigurationSetEventDestination`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-configurationseteventdestination.html) resource type.
#[derive(Debug, Default)]
pub struct ConfigurationSetEventDestination {
properties: ConfigurationSetEventDestinationProperties
}
/// Properties for the `ConfigurationSetEventDestination` resource.
#[derive(Debug, Default)]
pub struct ConfigurationSetEventDestinationProperties {
/// Property [`ConfigurationSetName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-configurationseteventdestination.html#cfn-ses-configurationseteventdestination-configurationsetname).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub configuration_set_name: ::Value<String>,
/// Property [`EventDestination`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-configurationseteventdestination.html#cfn-ses-configurationseteventdestination-eventdestination).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub event_destination: ::Value<self::configuration_set_event_destination::EventDestination>,
}
impl ::serde::Serialize for ConfigurationSetEventDestinationProperties {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "ConfigurationSetName", &self.configuration_set_name)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "EventDestination", &self.event_destination)?;
::serde::ser::SerializeMap::end(map)
}
}
impl<'de> ::serde::Deserialize<'de> for ConfigurationSetEventDestinationProperties {
fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ConfigurationSetEventDestinationProperties, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ConfigurationSetEventDestinationProperties;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ConfigurationSetEventDestinationProperties")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut configuration_set_name: Option<::Value<String>> = None;
let mut event_destination: Option<::Value<self::configuration_set_event_destination::EventDestination>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"ConfigurationSetName" => {
configuration_set_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"EventDestination" => {
event_destination = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ConfigurationSetEventDestinationProperties {
configuration_set_name: configuration_set_name.ok_or(::serde::de::Error::missing_field("ConfigurationSetName"))?,
event_destination: event_destination.ok_or(::serde::de::Error::missing_field("EventDestination"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
impl ::Resource for ConfigurationSetEventDestination {
type Properties = ConfigurationSetEventDestinationProperties;
const TYPE: &'static str = "AWS::SES::ConfigurationSetEventDestination";
fn properties(&self) -> &ConfigurationSetEventDestinationProperties {
&self.properties
}
fn properties_mut(&mut self) -> &mut ConfigurationSetEventDestinationProperties {
&mut self.properties
}
}
impl ::private::Sealed for ConfigurationSetEventDestination {}
impl From<ConfigurationSetEventDestinationProperties> for ConfigurationSetEventDestination {
fn from(properties: ConfigurationSetEventDestinationProperties) -> ConfigurationSetEventDestination {
ConfigurationSetEventDestination { properties }
}
}
/// The [`AWS::SES::ContactList`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-contactlist.html) resource type.
#[derive(Debug, Default)]
pub struct ContactList {
properties: ContactListProperties
}
/// Properties for the `ContactList` resource.
#[derive(Debug, Default)]
pub struct ContactListProperties {
/// Property [`ContactListName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-contactlist.html#cfn-ses-contactlist-contactlistname).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub contact_list_name: Option<::Value<String>>,
/// Property [`Description`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-contactlist.html#cfn-ses-contactlist-description).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub description: Option<::Value<String>>,
/// Property [`Tags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-contactlist.html#cfn-ses-contactlist-tags).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub tags: Option<::ValueList<::Tag>>,
/// Property [`Topics`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-contactlist.html#cfn-ses-contactlist-topics).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub topics: Option<::ValueList<self::contact_list::Topic>>,
}
impl ::serde::Serialize for ContactListProperties {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref contact_list_name) = self.contact_list_name {
::serde::ser::SerializeMap::serialize_entry(&mut map, "ContactListName", contact_list_name)?;
}
if let Some(ref description) = self.description {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Description", description)?;
}
if let Some(ref tags) = self.tags {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Tags", tags)?;
}
if let Some(ref topics) = self.topics {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Topics", topics)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl<'de> ::serde::Deserialize<'de> for ContactListProperties {
fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ContactListProperties, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ContactListProperties;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ContactListProperties")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut contact_list_name: Option<::Value<String>> = None;
let mut description: Option<::Value<String>> = None;
let mut tags: Option<::ValueList<::Tag>> = None;
let mut topics: Option<::ValueList<self::contact_list::Topic>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"ContactListName" => {
contact_list_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Description" => {
description = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Tags" => {
tags = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Topics" => {
topics = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ContactListProperties {
contact_list_name: contact_list_name,
description: description,
tags: tags,
topics: topics,
})
}
}
d.deserialize_map(Visitor)
}
}
impl ::Resource for ContactList {
type Properties = ContactListProperties;
const TYPE: &'static str = "AWS::SES::ContactList";
fn properties(&self) -> &ContactListProperties {
&self.properties
}
fn properties_mut(&mut self) -> &mut ContactListProperties {
&mut self.properties
}
}
impl ::private::Sealed for ContactList {}
impl From<ContactListProperties> for ContactList {
fn from(properties: ContactListProperties) -> ContactList {
ContactList { properties }
}
}
/// The [`AWS::SES::ReceiptFilter`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-receiptfilter.html) resource type.
#[derive(Debug, Default)]
pub struct ReceiptFilter {
properties: ReceiptFilterProperties
}
/// Properties for the `ReceiptFilter` resource.
#[derive(Debug, Default)]
pub struct ReceiptFilterProperties {
/// Property [`Filter`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-receiptfilter.html#cfn-ses-receiptfilter-filter).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub filter: ::Value<self::receipt_filter::Filter>,
}
impl ::serde::Serialize for ReceiptFilterProperties {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Filter", &self.filter)?;
::serde::ser::SerializeMap::end(map)
}
}
impl<'de> ::serde::Deserialize<'de> for ReceiptFilterProperties {
fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ReceiptFilterProperties, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ReceiptFilterProperties;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ReceiptFilterProperties")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut filter: Option<::Value<self::receipt_filter::Filter>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Filter" => {
filter = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ReceiptFilterProperties {
filter: filter.ok_or(::serde::de::Error::missing_field("Filter"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
impl ::Resource for ReceiptFilter {
type Properties = ReceiptFilterProperties;
const TYPE: &'static str = "AWS::SES::ReceiptFilter";
fn properties(&self) -> &ReceiptFilterProperties {
&self.properties
}
fn properties_mut(&mut self) -> &mut ReceiptFilterProperties {
&mut self.properties
}
}
impl ::private::Sealed for ReceiptFilter {}
impl From<ReceiptFilterProperties> for ReceiptFilter {
fn from(properties: ReceiptFilterProperties) -> ReceiptFilter {
ReceiptFilter { properties }
}
}
/// The [`AWS::SES::ReceiptRule`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-receiptrule.html) resource type.
#[derive(Debug, Default)]
pub struct ReceiptRule {
properties: ReceiptRuleProperties
}
/// Properties for the `ReceiptRule` resource.
#[derive(Debug, Default)]
pub struct ReceiptRuleProperties {
/// Property [`After`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-receiptrule.html#cfn-ses-receiptrule-after).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub after: Option<::Value<String>>,
/// Property [`Rule`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-receiptrule.html#cfn-ses-receiptrule-rule).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub rule: ::Value<self::receipt_rule::Rule>,
/// Property [`RuleSetName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-receiptrule.html#cfn-ses-receiptrule-rulesetname).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub rule_set_name: ::Value<String>,
}
impl ::serde::Serialize for ReceiptRuleProperties {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref after) = self.after {
::serde::ser::SerializeMap::serialize_entry(&mut map, "After", after)?;
}
::serde::ser::SerializeMap::serialize_entry(&mut map, "Rule", &self.rule)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "RuleSetName", &self.rule_set_name)?;
::serde::ser::SerializeMap::end(map)
}
}
impl<'de> ::serde::Deserialize<'de> for ReceiptRuleProperties {
fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ReceiptRuleProperties, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ReceiptRuleProperties;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ReceiptRuleProperties")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut after: Option<::Value<String>> = None;
let mut rule: Option<::Value<self::receipt_rule::Rule>> = None;
let mut rule_set_name: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"After" => {
after = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Rule" => {
rule = ::serde::de::MapAccess::next_value(&mut map)?;
}
"RuleSetName" => {
rule_set_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ReceiptRuleProperties {
after: after,
rule: rule.ok_or(::serde::de::Error::missing_field("Rule"))?,
rule_set_name: rule_set_name.ok_or(::serde::de::Error::missing_field("RuleSetName"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
impl ::Resource for ReceiptRule {
type Properties = ReceiptRuleProperties;
const TYPE: &'static str = "AWS::SES::ReceiptRule";
fn properties(&self) -> &ReceiptRuleProperties {
&self.properties
}
fn properties_mut(&mut self) -> &mut ReceiptRuleProperties {
&mut self.properties
}
}
impl ::private::Sealed for ReceiptRule {}
impl From<ReceiptRuleProperties> for ReceiptRule {
fn from(properties: ReceiptRuleProperties) -> ReceiptRule {
ReceiptRule { properties }
}
}
/// The [`AWS::SES::ReceiptRuleSet`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-receiptruleset.html) resource type.
#[derive(Debug, Default)]
pub struct ReceiptRuleSet {
properties: ReceiptRuleSetProperties
}
/// Properties for the `ReceiptRuleSet` resource.
#[derive(Debug, Default)]
pub struct ReceiptRuleSetProperties {
/// Property [`RuleSetName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-receiptruleset.html#cfn-ses-receiptruleset-rulesetname).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub rule_set_name: Option<::Value<String>>,
}
impl ::serde::Serialize for ReceiptRuleSetProperties {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref rule_set_name) = self.rule_set_name {
::serde::ser::SerializeMap::serialize_entry(&mut map, "RuleSetName", rule_set_name)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl<'de> ::serde::Deserialize<'de> for ReceiptRuleSetProperties {
fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<ReceiptRuleSetProperties, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ReceiptRuleSetProperties;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ReceiptRuleSetProperties")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut rule_set_name: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"RuleSetName" => {
rule_set_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ReceiptRuleSetProperties {
rule_set_name: rule_set_name,
})
}
}
d.deserialize_map(Visitor)
}
}
impl ::Resource for ReceiptRuleSet {
type Properties = ReceiptRuleSetProperties;
const TYPE: &'static str = "AWS::SES::ReceiptRuleSet";
fn properties(&self) -> &ReceiptRuleSetProperties {
&self.properties
}
fn properties_mut(&mut self) -> &mut ReceiptRuleSetProperties {
&mut self.properties
}
}
impl ::private::Sealed for ReceiptRuleSet {}
impl From<ReceiptRuleSetProperties> for ReceiptRuleSet {
fn from(properties: ReceiptRuleSetProperties) -> ReceiptRuleSet {
ReceiptRuleSet { properties }
}
}
/// The [`AWS::SES::Template`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-template.html) resource type.
#[derive(Debug, Default)]
pub struct Template {
properties: TemplateProperties
}
/// Properties for the `Template` resource.
#[derive(Debug, Default)]
pub struct TemplateProperties {
/// Property [`Template`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ses-template.html#cfn-ses-template-template).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub template: Option<::Value<self::template::Template>>,
}
impl ::serde::Serialize for TemplateProperties {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref template) = self.template {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Template", template)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl<'de> ::serde::Deserialize<'de> for TemplateProperties {
fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<TemplateProperties, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = TemplateProperties;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type TemplateProperties")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut template: Option<::Value<self::template::Template>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Template" => {
template = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(TemplateProperties {
template: template,
})
}
}
d.deserialize_map(Visitor)
}
}
impl ::Resource for Template {
type Properties = TemplateProperties;
const TYPE: &'static str = "AWS::SES::Template";
fn properties(&self) -> &TemplateProperties {
&self.properties
}
fn properties_mut(&mut self) -> &mut TemplateProperties {
&mut self.properties
}
}
impl ::private::Sealed for Template {}
impl From<TemplateProperties> for Template {
fn from(properties: TemplateProperties) -> Template {
Template { properties }
}
}
pub mod configuration_set_event_destination {
//! Property types for the `ConfigurationSetEventDestination` resource.
/// The [`AWS::SES::ConfigurationSetEventDestination.CloudWatchDestination`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-cloudwatchdestination.html) property type.
#[derive(Debug, Default)]
pub struct CloudWatchDestination {
/// Property [`DimensionConfigurations`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-cloudwatchdestination.html#cfn-ses-configurationseteventdestination-cloudwatchdestination-dimensionconfigurations).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub dimension_configurations: Option<::ValueList<DimensionConfiguration>>,
}
impl ::codec::SerializeValue for CloudWatchDestination {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref dimension_configurations) = self.dimension_configurations {
::serde::ser::SerializeMap::serialize_entry(&mut map, "DimensionConfigurations", dimension_configurations)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for CloudWatchDestination {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<CloudWatchDestination, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = CloudWatchDestination;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type CloudWatchDestination")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut dimension_configurations: Option<::ValueList<DimensionConfiguration>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"DimensionConfigurations" => {
dimension_configurations = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(CloudWatchDestination {
dimension_configurations: dimension_configurations,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ConfigurationSetEventDestination.DimensionConfiguration`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-dimensionconfiguration.html) property type.
#[derive(Debug, Default)]
pub struct DimensionConfiguration {
/// Property [`DefaultDimensionValue`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-dimensionconfiguration.html#cfn-ses-configurationseteventdestination-dimensionconfiguration-defaultdimensionvalue).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub default_dimension_value: ::Value<String>,
/// Property [`DimensionName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-dimensionconfiguration.html#cfn-ses-configurationseteventdestination-dimensionconfiguration-dimensionname).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub dimension_name: ::Value<String>,
/// Property [`DimensionValueSource`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-dimensionconfiguration.html#cfn-ses-configurationseteventdestination-dimensionconfiguration-dimensionvaluesource).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub dimension_value_source: ::Value<String>,
}
impl ::codec::SerializeValue for DimensionConfiguration {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "DefaultDimensionValue", &self.default_dimension_value)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "DimensionName", &self.dimension_name)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "DimensionValueSource", &self.dimension_value_source)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for DimensionConfiguration {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<DimensionConfiguration, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = DimensionConfiguration;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type DimensionConfiguration")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut default_dimension_value: Option<::Value<String>> = None;
let mut dimension_name: Option<::Value<String>> = None;
let mut dimension_value_source: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"DefaultDimensionValue" => {
default_dimension_value = ::serde::de::MapAccess::next_value(&mut map)?;
}
"DimensionName" => {
dimension_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"DimensionValueSource" => {
dimension_value_source = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(DimensionConfiguration {
default_dimension_value: default_dimension_value.ok_or(::serde::de::Error::missing_field("DefaultDimensionValue"))?,
dimension_name: dimension_name.ok_or(::serde::de::Error::missing_field("DimensionName"))?,
dimension_value_source: dimension_value_source.ok_or(::serde::de::Error::missing_field("DimensionValueSource"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ConfigurationSetEventDestination.EventDestination`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-eventdestination.html) property type.
#[derive(Debug, Default)]
pub struct EventDestination {
/// Property [`CloudWatchDestination`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-eventdestination.html#cfn-ses-configurationseteventdestination-eventdestination-cloudwatchdestination).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub cloud_watch_destination: Option<::Value<CloudWatchDestination>>,
/// Property [`Enabled`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-eventdestination.html#cfn-ses-configurationseteventdestination-eventdestination-enabled).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub enabled: Option<::Value<bool>>,
/// Property [`KinesisFirehoseDestination`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-eventdestination.html#cfn-ses-configurationseteventdestination-eventdestination-kinesisfirehosedestination).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub kinesis_firehose_destination: Option<::Value<KinesisFirehoseDestination>>,
/// Property [`MatchingEventTypes`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-eventdestination.html#cfn-ses-configurationseteventdestination-eventdestination-matchingeventtypes).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub matching_event_types: ::ValueList<String>,
/// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-eventdestination.html#cfn-ses-configurationseteventdestination-eventdestination-name).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub name: Option<::Value<String>>,
}
impl ::codec::SerializeValue for EventDestination {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref cloud_watch_destination) = self.cloud_watch_destination {
::serde::ser::SerializeMap::serialize_entry(&mut map, "CloudWatchDestination", cloud_watch_destination)?;
}
if let Some(ref enabled) = self.enabled {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Enabled", enabled)?;
}
if let Some(ref kinesis_firehose_destination) = self.kinesis_firehose_destination {
::serde::ser::SerializeMap::serialize_entry(&mut map, "KinesisFirehoseDestination", kinesis_firehose_destination)?;
}
::serde::ser::SerializeMap::serialize_entry(&mut map, "MatchingEventTypes", &self.matching_event_types)?;
if let Some(ref name) = self.name {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", name)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for EventDestination {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<EventDestination, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = EventDestination;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type EventDestination")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut cloud_watch_destination: Option<::Value<CloudWatchDestination>> = None;
let mut enabled: Option<::Value<bool>> = None;
let mut kinesis_firehose_destination: Option<::Value<KinesisFirehoseDestination>> = None;
let mut matching_event_types: Option<::ValueList<String>> = None;
let mut name: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"CloudWatchDestination" => {
cloud_watch_destination = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Enabled" => {
enabled = ::serde::de::MapAccess::next_value(&mut map)?;
}
"KinesisFirehoseDestination" => {
kinesis_firehose_destination = ::serde::de::MapAccess::next_value(&mut map)?;
}
"MatchingEventTypes" => {
matching_event_types = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Name" => {
name = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(EventDestination {
cloud_watch_destination: cloud_watch_destination,
enabled: enabled,
kinesis_firehose_destination: kinesis_firehose_destination,
matching_event_types: matching_event_types.ok_or(::serde::de::Error::missing_field("MatchingEventTypes"))?,
name: name,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ConfigurationSetEventDestination.KinesisFirehoseDestination`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-kinesisfirehosedestination.html) property type.
#[derive(Debug, Default)]
pub struct KinesisFirehoseDestination {
/// Property [`DeliveryStreamARN`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-kinesisfirehosedestination.html#cfn-ses-configurationseteventdestination-kinesisfirehosedestination-deliverystreamarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub delivery_stream_arn: ::Value<String>,
/// Property [`IAMRoleARN`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-kinesisfirehosedestination.html#cfn-ses-configurationseteventdestination-kinesisfirehosedestination-iamrolearn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub iam_role_arn: ::Value<String>,
}
impl ::codec::SerializeValue for KinesisFirehoseDestination {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "DeliveryStreamARN", &self.delivery_stream_arn)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "IAMRoleARN", &self.iam_role_arn)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for KinesisFirehoseDestination {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<KinesisFirehoseDestination, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = KinesisFirehoseDestination;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type KinesisFirehoseDestination")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut delivery_stream_arn: Option<::Value<String>> = None;
let mut iam_role_arn: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"DeliveryStreamARN" => {
delivery_stream_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
"IAMRoleARN" => {
iam_role_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(KinesisFirehoseDestination {
delivery_stream_arn: delivery_stream_arn.ok_or(::serde::de::Error::missing_field("DeliveryStreamARN"))?,
iam_role_arn: iam_role_arn.ok_or(::serde::de::Error::missing_field("IAMRoleARN"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
}
pub mod contact_list {
//! Property types for the `ContactList` resource.
/// The [`AWS::SES::ContactList.Topic`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-contactlist-topic.html) property type.
#[derive(Debug, Default)]
pub struct Topic {
/// Property [`DefaultSubscriptionStatus`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-contactlist-topic.html#cfn-ses-contactlist-topic-defaultsubscriptionstatus).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub default_subscription_status: ::Value<String>,
/// Property [`Description`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-contactlist-topic.html#cfn-ses-contactlist-topic-description).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub description: Option<::Value<String>>,
/// Property [`DisplayName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-contactlist-topic.html#cfn-ses-contactlist-topic-displayname).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub display_name: ::Value<String>,
/// Property [`TopicName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-contactlist-topic.html#cfn-ses-contactlist-topic-topicname).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub topic_name: ::Value<String>,
}
impl ::codec::SerializeValue for Topic {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "DefaultSubscriptionStatus", &self.default_subscription_status)?;
if let Some(ref description) = self.description {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Description", description)?;
}
::serde::ser::SerializeMap::serialize_entry(&mut map, "DisplayName", &self.display_name)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "TopicName", &self.topic_name)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for Topic {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Topic, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = Topic;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type Topic")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut default_subscription_status: Option<::Value<String>> = None;
let mut description: Option<::Value<String>> = None;
let mut display_name: Option<::Value<String>> = None;
let mut topic_name: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"DefaultSubscriptionStatus" => {
default_subscription_status = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Description" => {
description = ::serde::de::MapAccess::next_value(&mut map)?;
}
"DisplayName" => {
display_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TopicName" => {
topic_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(Topic {
default_subscription_status: default_subscription_status.ok_or(::serde::de::Error::missing_field("DefaultSubscriptionStatus"))?,
description: description,
display_name: display_name.ok_or(::serde::de::Error::missing_field("DisplayName"))?,
topic_name: topic_name.ok_or(::serde::de::Error::missing_field("TopicName"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
}
pub mod receipt_filter {
//! Property types for the `ReceiptFilter` resource.
/// The [`AWS::SES::ReceiptFilter.Filter`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptfilter-filter.html) property type.
#[derive(Debug, Default)]
pub struct Filter {
/// Property [`IpFilter`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptfilter-filter.html#cfn-ses-receiptfilter-filter-ipfilter).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub ip_filter: ::Value<IpFilter>,
/// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptfilter-filter.html#cfn-ses-receiptfilter-filter-name).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub name: Option<::Value<String>>,
}
impl ::codec::SerializeValue for Filter {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "IpFilter", &self.ip_filter)?;
if let Some(ref name) = self.name {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", name)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for Filter {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Filter, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = Filter;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type Filter")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut ip_filter: Option<::Value<IpFilter>> = None;
let mut name: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"IpFilter" => {
ip_filter = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Name" => {
name = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(Filter {
ip_filter: ip_filter.ok_or(::serde::de::Error::missing_field("IpFilter"))?,
name: name,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptFilter.IpFilter`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptfilter-ipfilter.html) property type.
#[derive(Debug, Default)]
pub struct IpFilter {
/// Property [`Cidr`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptfilter-ipfilter.html#cfn-ses-receiptfilter-ipfilter-cidr).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub cidr: ::Value<String>,
/// Property [`Policy`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptfilter-ipfilter.html#cfn-ses-receiptfilter-ipfilter-policy).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub policy: ::Value<String>,
}
impl ::codec::SerializeValue for IpFilter {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Cidr", &self.cidr)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Policy", &self.policy)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for IpFilter {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<IpFilter, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = IpFilter;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type IpFilter")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut cidr: Option<::Value<String>> = None;
let mut policy: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Cidr" => {
cidr = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Policy" => {
policy = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(IpFilter {
cidr: cidr.ok_or(::serde::de::Error::missing_field("Cidr"))?,
policy: policy.ok_or(::serde::de::Error::missing_field("Policy"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
}
pub mod receipt_rule {
//! Property types for the `ReceiptRule` resource.
/// The [`AWS::SES::ReceiptRule.Action`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-action.html) property type.
#[derive(Debug, Default)]
pub struct Action {
/// Property [`AddHeaderAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-action.html#cfn-ses-receiptrule-action-addheaderaction).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub add_header_action: Option<::Value<AddHeaderAction>>,
/// Property [`BounceAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-action.html#cfn-ses-receiptrule-action-bounceaction).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub bounce_action: Option<::Value<BounceAction>>,
/// Property [`LambdaAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-action.html#cfn-ses-receiptrule-action-lambdaaction).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub lambda_action: Option<::Value<LambdaAction>>,
/// Property [`S3Action`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-action.html#cfn-ses-receiptrule-action-s3action).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub s3_action: Option<::Value<S3Action>>,
/// Property [`SNSAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-action.html#cfn-ses-receiptrule-action-snsaction).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub sns_action: Option<::Value<SNSAction>>,
/// Property [`StopAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-action.html#cfn-ses-receiptrule-action-stopaction).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub stop_action: Option<::Value<StopAction>>,
/// Property [`WorkmailAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-action.html#cfn-ses-receiptrule-action-workmailaction).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub workmail_action: Option<::Value<WorkmailAction>>,
}
impl ::codec::SerializeValue for Action {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref add_header_action) = self.add_header_action {
::serde::ser::SerializeMap::serialize_entry(&mut map, "AddHeaderAction", add_header_action)?;
}
if let Some(ref bounce_action) = self.bounce_action {
::serde::ser::SerializeMap::serialize_entry(&mut map, "BounceAction", bounce_action)?;
}
if let Some(ref lambda_action) = self.lambda_action {
::serde::ser::SerializeMap::serialize_entry(&mut map, "LambdaAction", lambda_action)?;
}
if let Some(ref s3_action) = self.s3_action {
::serde::ser::SerializeMap::serialize_entry(&mut map, "S3Action", s3_action)?;
}
if let Some(ref sns_action) = self.sns_action {
::serde::ser::SerializeMap::serialize_entry(&mut map, "SNSAction", sns_action)?;
}
if let Some(ref stop_action) = self.stop_action {
::serde::ser::SerializeMap::serialize_entry(&mut map, "StopAction", stop_action)?;
}
if let Some(ref workmail_action) = self.workmail_action {
::serde::ser::SerializeMap::serialize_entry(&mut map, "WorkmailAction", workmail_action)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for Action {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Action, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = Action;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type Action")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut add_header_action: Option<::Value<AddHeaderAction>> = None;
let mut bounce_action: Option<::Value<BounceAction>> = None;
let mut lambda_action: Option<::Value<LambdaAction>> = None;
let mut s3_action: Option<::Value<S3Action>> = None;
let mut sns_action: Option<::Value<SNSAction>> = None;
let mut stop_action: Option<::Value<StopAction>> = None;
let mut workmail_action: Option<::Value<WorkmailAction>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"AddHeaderAction" => {
add_header_action = ::serde::de::MapAccess::next_value(&mut map)?;
}
"BounceAction" => {
bounce_action = ::serde::de::MapAccess::next_value(&mut map)?;
}
"LambdaAction" => {
lambda_action = ::serde::de::MapAccess::next_value(&mut map)?;
}
"S3Action" => {
s3_action = ::serde::de::MapAccess::next_value(&mut map)?;
}
"SNSAction" => {
sns_action = ::serde::de::MapAccess::next_value(&mut map)?;
}
"StopAction" => {
stop_action = ::serde::de::MapAccess::next_value(&mut map)?;
}
"WorkmailAction" => {
workmail_action = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(Action {
add_header_action: add_header_action,
bounce_action: bounce_action,
lambda_action: lambda_action,
s3_action: s3_action,
sns_action: sns_action,
stop_action: stop_action,
workmail_action: workmail_action,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptRule.AddHeaderAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-addheaderaction.html) property type.
#[derive(Debug, Default)]
pub struct AddHeaderAction {
/// Property [`HeaderName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-addheaderaction.html#cfn-ses-receiptrule-addheaderaction-headername).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub header_name: ::Value<String>,
/// Property [`HeaderValue`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-addheaderaction.html#cfn-ses-receiptrule-addheaderaction-headervalue).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub header_value: ::Value<String>,
}
impl ::codec::SerializeValue for AddHeaderAction {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "HeaderName", &self.header_name)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "HeaderValue", &self.header_value)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for AddHeaderAction {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<AddHeaderAction, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = AddHeaderAction;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type AddHeaderAction")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut header_name: Option<::Value<String>> = None;
let mut header_value: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"HeaderName" => {
header_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"HeaderValue" => {
header_value = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(AddHeaderAction {
header_name: header_name.ok_or(::serde::de::Error::missing_field("HeaderName"))?,
header_value: header_value.ok_or(::serde::de::Error::missing_field("HeaderValue"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptRule.BounceAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-bounceaction.html) property type.
#[derive(Debug, Default)]
pub struct BounceAction {
/// Property [`Message`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-bounceaction.html#cfn-ses-receiptrule-bounceaction-message).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub message: ::Value<String>,
/// Property [`Sender`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-bounceaction.html#cfn-ses-receiptrule-bounceaction-sender).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub sender: ::Value<String>,
/// Property [`SmtpReplyCode`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-bounceaction.html#cfn-ses-receiptrule-bounceaction-smtpreplycode).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub smtp_reply_code: ::Value<String>,
/// Property [`StatusCode`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-bounceaction.html#cfn-ses-receiptrule-bounceaction-statuscode).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub status_code: Option<::Value<String>>,
/// Property [`TopicArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-bounceaction.html#cfn-ses-receiptrule-bounceaction-topicarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub topic_arn: Option<::Value<String>>,
}
impl ::codec::SerializeValue for BounceAction {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Message", &self.message)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Sender", &self.sender)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "SmtpReplyCode", &self.smtp_reply_code)?;
if let Some(ref status_code) = self.status_code {
::serde::ser::SerializeMap::serialize_entry(&mut map, "StatusCode", status_code)?;
}
if let Some(ref topic_arn) = self.topic_arn {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TopicArn", topic_arn)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for BounceAction {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<BounceAction, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = BounceAction;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type BounceAction")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut message: Option<::Value<String>> = None;
let mut sender: Option<::Value<String>> = None;
let mut smtp_reply_code: Option<::Value<String>> = None;
let mut status_code: Option<::Value<String>> = None;
let mut topic_arn: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Message" => {
message = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Sender" => {
sender = ::serde::de::MapAccess::next_value(&mut map)?;
}
"SmtpReplyCode" => {
smtp_reply_code = ::serde::de::MapAccess::next_value(&mut map)?;
}
"StatusCode" => {
status_code = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TopicArn" => {
topic_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(BounceAction {
message: message.ok_or(::serde::de::Error::missing_field("Message"))?,
sender: sender.ok_or(::serde::de::Error::missing_field("Sender"))?,
smtp_reply_code: smtp_reply_code.ok_or(::serde::de::Error::missing_field("SmtpReplyCode"))?,
status_code: status_code,
topic_arn: topic_arn,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptRule.LambdaAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-lambdaaction.html) property type.
#[derive(Debug, Default)]
pub struct LambdaAction {
/// Property [`FunctionArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-lambdaaction.html#cfn-ses-receiptrule-lambdaaction-functionarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub function_arn: ::Value<String>,
/// Property [`InvocationType`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-lambdaaction.html#cfn-ses-receiptrule-lambdaaction-invocationtype).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub invocation_type: Option<::Value<String>>,
/// Property [`TopicArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-lambdaaction.html#cfn-ses-receiptrule-lambdaaction-topicarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub topic_arn: Option<::Value<String>>,
}
impl ::codec::SerializeValue for LambdaAction {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "FunctionArn", &self.function_arn)?;
if let Some(ref invocation_type) = self.invocation_type {
::serde::ser::SerializeMap::serialize_entry(&mut map, "InvocationType", invocation_type)?;
}
if let Some(ref topic_arn) = self.topic_arn {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TopicArn", topic_arn)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for LambdaAction {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<LambdaAction, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = LambdaAction;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type LambdaAction")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut function_arn: Option<::Value<String>> = None;
let mut invocation_type: Option<::Value<String>> = None;
let mut topic_arn: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"FunctionArn" => {
function_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
"InvocationType" => {
invocation_type = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TopicArn" => {
topic_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(LambdaAction {
function_arn: function_arn.ok_or(::serde::de::Error::missing_field("FunctionArn"))?,
invocation_type: invocation_type,
topic_arn: topic_arn,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptRule.Rule`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-rule.html) property type.
#[derive(Debug, Default)]
pub struct Rule {
/// Property [`Actions`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-rule.html#cfn-ses-receiptrule-rule-actions).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub actions: Option<::ValueList<Action>>,
/// Property [`Enabled`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-rule.html#cfn-ses-receiptrule-rule-enabled).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub enabled: Option<::Value<bool>>,
/// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-rule.html#cfn-ses-receiptrule-rule-name).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub name: Option<::Value<String>>,
/// Property [`Recipients`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-rule.html#cfn-ses-receiptrule-rule-recipients).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub recipients: Option<::ValueList<String>>,
/// Property [`ScanEnabled`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-rule.html#cfn-ses-receiptrule-rule-scanenabled).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub scan_enabled: Option<::Value<bool>>,
/// Property [`TlsPolicy`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-rule.html#cfn-ses-receiptrule-rule-tlspolicy).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub tls_policy: Option<::Value<String>>,
}
impl ::codec::SerializeValue for Rule {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref actions) = self.actions {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Actions", actions)?;
}
if let Some(ref enabled) = self.enabled {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Enabled", enabled)?;
}
if let Some(ref name) = self.name {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", name)?;
}
if let Some(ref recipients) = self.recipients {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Recipients", recipients)?;
}
if let Some(ref scan_enabled) = self.scan_enabled {
::serde::ser::SerializeMap::serialize_entry(&mut map, "ScanEnabled", scan_enabled)?;
}
if let Some(ref tls_policy) = self.tls_policy {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TlsPolicy", tls_policy)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for Rule {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Rule, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = Rule;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type Rule")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut actions: Option<::ValueList<Action>> = None;
let mut enabled: Option<::Value<bool>> = None;
let mut name: Option<::Value<String>> = None;
let mut recipients: Option<::ValueList<String>> = None;
let mut scan_enabled: Option<::Value<bool>> = None;
let mut tls_policy: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Actions" => {
actions = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Enabled" => {
enabled = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Name" => {
name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Recipients" => {
recipients = ::serde::de::MapAccess::next_value(&mut map)?;
}
"ScanEnabled" => {
scan_enabled = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TlsPolicy" => {
tls_policy = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(Rule {
actions: actions,
enabled: enabled,
name: name,
recipients: recipients,
scan_enabled: scan_enabled,
tls_policy: tls_policy,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptRule.S3Action`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html) property type.
#[derive(Debug, Default)]
pub struct S3Action {
/// Property [`BucketName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html#cfn-ses-receiptrule-s3action-bucketname).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub bucket_name: ::Value<String>,
/// Property [`KmsKeyArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html#cfn-ses-receiptrule-s3action-kmskeyarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub kms_key_arn: Option<::Value<String>>,
/// Property [`ObjectKeyPrefix`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html#cfn-ses-receiptrule-s3action-objectkeyprefix).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub object_key_prefix: Option<::Value<String>>,
/// Property [`TopicArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html#cfn-ses-receiptrule-s3action-topicarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub topic_arn: Option<::Value<String>>,
}
impl ::codec::SerializeValue for S3Action {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "BucketName", &self.bucket_name)?;
if let Some(ref kms_key_arn) = self.kms_key_arn {
::serde::ser::SerializeMap::serialize_entry(&mut map, "KmsKeyArn", kms_key_arn)?;
}
if let Some(ref object_key_prefix) = self.object_key_prefix {
::serde::ser::SerializeMap::serialize_entry(&mut map, "ObjectKeyPrefix", object_key_prefix)?;
}
if let Some(ref topic_arn) = self.topic_arn {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TopicArn", topic_arn)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for S3Action {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<S3Action, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = S3Action;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type S3Action")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut bucket_name: Option<::Value<String>> = None;
let mut kms_key_arn: Option<::Value<String>> = None;
let mut object_key_prefix: Option<::Value<String>> = None;
let mut topic_arn: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"BucketName" => {
bucket_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"KmsKeyArn" => {
kms_key_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
"ObjectKeyPrefix" => {
object_key_prefix = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TopicArn" => {
topic_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(S3Action {
bucket_name: bucket_name.ok_or(::serde::de::Error::missing_field("BucketName"))?,
kms_key_arn: kms_key_arn,
object_key_prefix: object_key_prefix,
topic_arn: topic_arn,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptRule.SNSAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-snsaction.html) property type.
#[derive(Debug, Default)]
pub struct SNSAction {
/// Property [`Encoding`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-snsaction.html#cfn-ses-receiptrule-snsaction-encoding).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub encoding: Option<::Value<String>>,
/// Property [`TopicArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-snsaction.html#cfn-ses-receiptrule-snsaction-topicarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub topic_arn: Option<::Value<String>>,
}
impl ::codec::SerializeValue for SNSAction {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref encoding) = self.encoding {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Encoding", encoding)?;
}
if let Some(ref topic_arn) = self.topic_arn {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TopicArn", topic_arn)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for SNSAction {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<SNSAction, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = SNSAction;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type SNSAction")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut encoding: Option<::Value<String>> = None;
let mut topic_arn: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Encoding" => {
encoding = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TopicArn" => {
topic_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(SNSAction {
encoding: encoding,
topic_arn: topic_arn,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptRule.StopAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-stopaction.html) property type.
#[derive(Debug, Default)]
pub struct StopAction {
/// Property [`Scope`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-stopaction.html#cfn-ses-receiptrule-stopaction-scope).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub scope: ::Value<String>,
/// Property [`TopicArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-stopaction.html#cfn-ses-receiptrule-stopaction-topicarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub topic_arn: Option<::Value<String>>,
}
impl ::codec::SerializeValue for StopAction {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Scope", &self.scope)?;
if let Some(ref topic_arn) = self.topic_arn {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TopicArn", topic_arn)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for StopAction {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<StopAction, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = StopAction;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type StopAction")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut scope: Option<::Value<String>> = None;
let mut topic_arn: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Scope" => {
scope = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TopicArn" => {
topic_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(StopAction {
scope: scope.ok_or(::serde::de::Error::missing_field("Scope"))?,
topic_arn: topic_arn,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::SES::ReceiptRule.WorkmailAction`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-workmailaction.html) property type.
#[derive(Debug, Default)]
pub struct WorkmailAction {
/// Property [`OrganizationArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-workmailaction.html#cfn-ses-receiptrule-workmailaction-organizationarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub organization_arn: ::Value<String>,
/// Property [`TopicArn`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-workmailaction.html#cfn-ses-receiptrule-workmailaction-topicarn).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub topic_arn: Option<::Value<String>>,
}
impl ::codec::SerializeValue for WorkmailAction {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "OrganizationArn", &self.organization_arn)?;
if let Some(ref topic_arn) = self.topic_arn {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TopicArn", topic_arn)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for WorkmailAction {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<WorkmailAction, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = WorkmailAction;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type WorkmailAction")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut organization_arn: Option<::Value<String>> = None;
let mut topic_arn: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"OrganizationArn" => {
organization_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TopicArn" => {
topic_arn = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(WorkmailAction {
organization_arn: organization_arn.ok_or(::serde::de::Error::missing_field("OrganizationArn"))?,
topic_arn: topic_arn,
})
}
}
d.deserialize_map(Visitor)
}
}
}
pub mod template {
//! Property types for the `Template` resource.
/// The [`AWS::SES::Template.Template`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-template-template.html) property type.
#[derive(Debug, Default)]
pub struct Template {
/// Property [`HtmlPart`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-template-template.html#cfn-ses-template-template-htmlpart).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub html_part: Option<::Value<String>>,
/// Property [`SubjectPart`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-template-template.html#cfn-ses-template-template-subjectpart).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub subject_part: Option<::Value<String>>,
/// Property [`TemplateName`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-template-template.html#cfn-ses-template-template-templatename).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub template_name: Option<::Value<String>>,
/// Property [`TextPart`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-template-template.html#cfn-ses-template-template-textpart).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub text_part: Option<::Value<String>>,
}
impl ::codec::SerializeValue for Template {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref html_part) = self.html_part {
::serde::ser::SerializeMap::serialize_entry(&mut map, "HtmlPart", html_part)?;
}
if let Some(ref subject_part) = self.subject_part {
::serde::ser::SerializeMap::serialize_entry(&mut map, "SubjectPart", subject_part)?;
}
if let Some(ref template_name) = self.template_name {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TemplateName", template_name)?;
}
if let Some(ref text_part) = self.text_part {
::serde::ser::SerializeMap::serialize_entry(&mut map, "TextPart", text_part)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for Template {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Template, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = Template;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type Template")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut html_part: Option<::Value<String>> = None;
let mut subject_part: Option<::Value<String>> = None;
let mut template_name: Option<::Value<String>> = None;
let mut text_part: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"HtmlPart" => {
html_part = ::serde::de::MapAccess::next_value(&mut map)?;
}
"SubjectPart" => {
subject_part = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TemplateName" => {
template_name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"TextPart" => {
text_part = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(Template {
html_part: html_part,
subject_part: subject_part,
template_name: template_name,
text_part: text_part,
})
}
}
d.deserialize_map(Visitor)
}
}
}
| true |
68052910a79955b3f377fcaf0d91272b0469cfb4
|
Rust
|
SkyMomentum/tonegen-rs
|
/src/wavfile/util.rs
|
UTF-8
| 2,639 | 3.625 | 4 |
[] |
no_license
|
/// Transmute and copy to u8 array.
///
/// # Unsafe
/// Forces $targ to be represented as an arry of u8. $num_bytes is assumed to be the number of
/// bytes $targ occupies. $num_bytes must also be the same size as the $outbuf used. First param
/// is just for readability at use site.
///
#[macro_export]
macro_rules! transmute_to_u8_from {
($t:ty, $num_bytes:expr, $targ:expr, $outbuf: expr) => ({
use std::mem::transmute;
use std::ptr::copy_nonoverlapping;
unsafe {
let buf = transmute::<_, [u8; $num_bytes]>($targ);
copy_nonoverlapping( (&buf).as_ptr(), $outbuf.as_mut_ptr(), $num_bytes );
}
});
}
/// Setup buffer for transmutation then call into macro.
///
/// The macro setups the boiler plate to call transmute_to_u8_from! macro. $fnc is a fn that exists
/// to call transmute macro with correct parameters for converting a specific type.
///
/// ```ignore
/// fn u16_to_u8(target: u16, output: &mut [u8]) {
/// transmute_to_u8_from!(u16, 2, target, output);
/// }
/// ```
/// Naming the function as x_to_u8 increases readability at use site. Having the target byte size
/// as last param doesn't.
/// ```ignore
/// do_transmute!(u16_to_u8, input, &mut buf, &mut buf_offset, 2);
/// ```
#[macro_export]
macro_rules! do_transmute {
($fnc:ident, $input:expr, $to_buf:expr, $offset:expr, $size:expr) => ({
#[allow(unused_imports)]
use util::{u16_to_u8, u32_to_u8, f32_to_u8, append_bytes};
let mut transmutebuf: [u8; $size] = [0; $size];
$fnc($input, &mut transmutebuf);
*$offset = *$offset + append_bytes( &transmutebuf, $to_buf, *$offset);
});
}
/// Helper function to convert u16 to a [u8; 2].
pub fn u16_to_u8(target: u16, output: &mut [u8]) {
transmute_to_u8_from!(u16, 2, target, output);
}
/// Helper function to convert u32 to a [u8; 4].
pub fn u32_to_u8(target: u32, output: &mut [u8]) {
transmute_to_u8_from!(u32, 4, target, output);
}
/// Helper function to convert f32 to a [u8; 4].
pub fn f32_to_u8(target: f32, output: &mut [u8]) {
transmute_to_u8_from!(f32, 4, target, output);
}
/// Function to bytewise copy from &[u8] to an &mut [u8], inserting at offset/
pub fn append_bytes(bytes: &[u8], buf: &mut [u8], offset: usize) -> usize {
if bytes.len() + offset <= buf.len() {
for i in 0 .. bytes.len() {
buf[i + offset] = bytes[i];
}
}
bytes.len()
}
/// Utility fn to zero out a given &mut [u8].
pub fn zero_u8_array(targ: &mut [u8]) {
for i in 0 .. targ.len() {
targ[i] = 0;
}
}
| true |
0c7ab5eddcb2e5bf948478632273bd3dfdbb8c91
|
Rust
|
LeJane/rust
|
/v1/src/postgresql_db_contexts/quests_attribute_assets.rs
|
UTF-8
| 3,320 | 2.53125 | 3 |
[] |
no_license
|
use crate::models::quests_attribute_assets::QuestsAttributeAsset;
use crate::schema::quests_attribute_assets;
use diesel::prelude::*;
use crate::{utils::binary_read_helper::*, BinaryDecode, BinaryEncode};
use anyhow::Result;
use std::io::Cursor;
use crate::{
FrontDisplayMetaVersion, FrontDisplayMetaVersionRelation, MetadataInstance, MetadataTypeEnum,
TableIdEnum,
};
impl QuestsAttributeAsset {
pub fn get_quest_attribute_asset_list_by_quests_id(
conn: &PgConnection,
quests_id: i64,
) -> QueryResult<Vec<Self>> {
quests_attribute_assets::table
.filter(quests_attribute_assets::quests_id.eq(quests_id))
.load(conn)
}
pub fn get_quest_attribute_asset_list(conn: &PgConnection) -> QueryResult<Vec<Self>> {
quests_attribute_assets::table
.load(conn)
}
pub fn get_quest_attribute_asset_list_by_id(conn: &PgConnection, id: i64) -> QueryResult<Self> {
quests_attribute_assets::table
.filter(quests_attribute_assets::id.eq(id))
.first(conn)
}
}
impl MetadataInstance for QuestsAttributeAsset {
fn get_table_id() -> Result<i32> {
Ok(TableIdEnum::QuestsAttributeAsset.to_i32())
}
fn get_single_instance(conn: &PgConnection, id: i64) -> Result<MetadataTypeEnum> {
let data = QuestsAttributeAsset::get_quest_attribute_asset_list_by_id(conn, id)?;
Ok(MetadataTypeEnum::QuestsAttributeAsset(data))
}
fn get_instance_list(conn: &PgConnection) -> Result<FrontDisplayMetaVersion> {
let list = QuestsAttributeAsset::get_quest_attribute_asset_list(conn)?;
let table_id = Self::get_table_id()?;
let data_list: Vec<FrontDisplayMetaVersionRelation> = list
.into_iter()
.map(|data| FrontDisplayMetaVersionRelation {
action_type: 0,
table_id,
data: MetadataTypeEnum::QuestsAttributeAsset(data),
})
.collect();
Ok(FrontDisplayMetaVersion {
update_type: 2,
data_list,
})
}
}
impl BinaryEncode for QuestsAttributeAsset {
fn encode(&self) -> Result<Vec<u8>> {
let mut encoded = Vec::new();
binary_write_i64(&mut encoded, self.id)?;
binary_write_i64(&mut encoded, self.quests_id)?;
binary_write_i32(&mut encoded, self.attribute_id)?;
binary_write_i32(&mut encoded, self.amounts)?;
binary_write_time(&mut encoded, self.modify_time)?;
binary_write_time(&mut encoded, self.created_time)?;
//set item length
encoded.encode()
}
}
impl<'a> BinaryDecode<'a> for QuestsAttributeAsset {
fn decode(cursor: &mut Cursor<&'a [u8]>, bytes: &'a [u8]) -> Result<QuestsAttributeAsset> {
let id = binary_read_i64(cursor)?;
let quests_id = binary_read_i64(cursor)?;
let attribute_id = binary_read_i32(cursor)?;
let amounts = binary_read_i32(cursor)?;
let modify_time = binary_read_time(cursor, bytes)?;
let created_time = binary_read_time(cursor, bytes)?;
let data = QuestsAttributeAsset {
id,
quests_id,
attribute_id,
amounts,
modify_time,
created_time,
};
Ok(data)
}
}
| true |
c2ff2bcf005e4e64e81645ae4d4c1643b75e0911
|
Rust
|
saethlin/bloomset
|
/src/lib.rs
|
UTF-8
| 6,439 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
//! A set-like data structure that is the same size as a `HashSet<T>` but has faster best-case
//! membership checks, because `x86_64` only supports 48 bits of address space. So we can embed a
//! bloom filter in the 32 free bits between its capacity and length.
#![warn(clippy::pedantic, clippy::nursery, clippy::restriction)]
#![deny(clippy::missing_inline_in_public_items)]
use std::hash::{Hash, Hasher};
use std::mem::ManuallyDrop;
use std::ptr::NonNull;
use std::slice;
pub struct BloomSet<T> {
ptr: NonNull<T>,
length: usize,
capacity: usize,
}
#[derive(Default)]
pub struct BloomHasher {
state: u8,
}
impl Hasher for BloomHasher {
#[inline]
fn write(&mut self, bytes: &[u8]) {
for b in bytes {
self.state ^= b;
}
}
#[inline]
fn finish(&self) -> u64 {
u64::from(self.state)
}
}
impl<T> Default for BloomSet<T> {
#[inline]
fn default() -> Self {
Self {
ptr: NonNull::dangling(),
length: 0,
capacity: 0,
}
}
}
impl<T> BloomSet<T> {
#[inline]
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[inline]
#[must_use]
pub fn with_capacity(cap: usize) -> Self {
let mut vec = ManuallyDrop::new(Vec::with_capacity(cap));
let ptr = unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) };
Self {
ptr,
length: 0,
capacity: cap,
}
}
#[inline]
#[must_use]
pub const fn is_empty(&self) -> bool {
self.len() != 0
}
#[inline]
#[must_use]
pub const fn len(&self) -> usize {
self.length & 0x0000_0000_0000_00FF
}
#[inline]
#[must_use]
pub const fn capacity(&self) -> usize {
self.capacity & 0x0000_0000_0000_00FF
}
#[inline]
#[must_use]
pub const fn as_mut_ptr(&self) -> *mut T {
self.ptr.as_ptr()
}
#[inline]
#[must_use]
pub fn as_slice(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.as_mut_ptr(), self.len()) }
}
#[inline(never)]
fn insert_resizing(&mut self, item: T) {
let mut vec = unsafe {
// Use ManuallyDrop to ensure that the Vec is never dropped
ManuallyDrop::new(Vec::from_raw_parts(
self.as_mut_ptr(),
self.len(),
self.capacity(),
))
};
if vec.capacity() > u8::MAX as usize {
panic!("A BloomSet's capacity cannot exceed 255");
}
vec.push(item);
unsafe { self.ptr = NonNull::new_unchecked(vec.as_mut_ptr()) };
self.capacity =
(vec.capacity() & 0x0000_0000_0000_00FF) | (self.capacity & 0xFFFF_FFFF_FFFF_FF00);
}
pub fn clear(&mut self) {
let mut vec = unsafe {
// Use ManuallyDrop to ensure that the Vec is never dropped
ManuallyDrop::new(Vec::from_raw_parts(
self.as_mut_ptr(),
self.len(),
self.capacity(),
))
};
// Drop all the elements
vec.clear();
// Zero the bloom filter
self.capacity &= 0x0000_0000_0000_00FF;
self.length = 0;
}
#[inline]
#[must_use]
const fn bloom_contains(&self, bloom_bit: u64) -> bool {
if bloom_bit >= 56 {
let bloom = 1 << (8 + bloom_bit - 56);
(self.length & 0xFFFF_FFFF_FFFF_FF00 & bloom) != 0
} else {
let bloom = 1 << (8 + bloom_bit);
(self.capacity & 0xFFFF_FFFF_FFFF_FF00 & bloom) != 0
}
}
}
impl<T: Hash + PartialEq> BloomSet<T> {
#[inline]
pub fn insert(&mut self, item: T) {
let mut hasher = BloomHasher { state: 0 };
item.hash(&mut hasher);
let hash = hasher.finish();
let mut bloom_bit = hash;
if bloom_bit >= 224 {
bloom_bit -= 224;
} else if bloom_bit >= 112 {
bloom_bit -= 112;
}
let maybe_in_set = if bloom_bit >= 56 {
let bloom = 1 << (8 + bloom_bit - 56);
if (self.length & 0xFFFF_FFFF_FFFF_FF00 & bloom) != 0 {
true
} else {
self.length |= bloom;
false
}
} else {
let bloom = 1 << (8 + bloom_bit);
if (self.capacity & 0xFFFF_FFFF_FFFF_FF00 & bloom) != 0 {
true
} else {
self.capacity |= bloom;
false
}
};
let in_set = if maybe_in_set {
self.as_slice().iter().any(|it| *it == item)
} else {
false
};
if !in_set {
if self.len() == self.capacity() {
self.insert_resizing(item);
} else {
unsafe {
use std::convert::TryInto;
*self.ptr.as_ptr().offset(self.len().try_into().unwrap()) = item;
}
}
self.length += 1;
}
}
#[inline]
pub fn contains<B: std::borrow::Borrow<T>>(&self, item: B) -> bool {
let item = item.borrow();
let mut hasher = BloomHasher { state: 0 };
item.hash(&mut hasher);
let hash = hasher.finish();
let bloom_bit = hash % 112;
let maybe_in_set = self.bloom_contains(bloom_bit);
if maybe_in_set {
self.as_slice().iter().any(|it| it == item)
} else {
false
}
}
}
impl<T> Drop for BloomSet<T> {
#[inline]
fn drop(&mut self) {
unsafe { Vec::from_raw_parts(self.as_mut_ptr(), self.len(), self.capacity()) };
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn size_is_right() {
use core::mem::size_of;
assert_eq!(size_of::<BloomSet<u8>>(), size_of::<Vec<u8>>());
}
#[test]
fn insert() {
let mut set = BloomSet::default();
assert_eq!(set.len(), 0);
set.insert(2u8);
assert_eq!(set.len(), 1);
set.insert(4u8);
assert_eq!(set.len(), 2);
assert_eq!(set.as_slice()[0], 2);
assert_eq!(set.as_slice()[1], 4);
assert_eq!(set.as_slice().len(), 2);
set.insert(2);
assert_eq!(set.len(), 2);
set.insert(31);
assert_eq!(set.len(), 3);
set.insert(31);
assert_eq!(set.len(), 3);
}
}
| true |
7d27b03b95ab8abca739a288f1149569dd3aadfd
|
Rust
|
spacejam/demikernel
|
/src/rust/catnip/src/protocols/arp/options.rs
|
UTF-8
| 954 | 2.703125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
use std::time::Duration;
#[derive(Clone, Debug)]
pub struct ArpOptions {
pub cache_ttl: Duration,
pub request_timeout: Duration,
pub retry_count: usize,
}
impl Default for ArpOptions {
fn default() -> Self {
ArpOptions {
cache_ttl: Duration::from_secs(15),
request_timeout: Duration::from_secs(20),
retry_count: 5,
}
}
}
impl ArpOptions {
pub fn cache_ttl(mut self, value: Duration) -> Self {
assert!(value > Duration::new(0, 0));
self.cache_ttl = value;
self
}
pub fn request_timeout(mut self, value: Duration) -> Self {
assert!(value > Duration::new(0, 0));
self.request_timeout = value;
self
}
pub fn retry_count(mut self, value: usize) -> Self {
assert!(value > 0);
self.retry_count = value;
self
}
}
| true |
81ca19a099be553f6464861497c3c7df586f0b01
|
Rust
|
ajunlonglive/may_http
|
/examples/echo.rs
|
UTF-8
| 650 | 2.59375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::io::{Read, Write};
use may_http::server::*;
// test with: curl -v POST -d "asdfasdfasf" "http://127.0.0.1:8080/"
// test with: curl -v POST --header "Transfer-Encoding: chunked" -d "hello chunk" "http://127.0.0.1:8080/"
fn hello(mut req: Request, rsp: &mut Response) {
let mut s = String::new();
req.read_to_string(&mut s).unwrap();
write!(rsp, "got data: {}", s).unwrap();
}
fn main() {
may::config().set_workers(1).set_stack_size(0x10000);
env_logger::init();
let server = HttpServer::new(hello).start("127.0.0.1:8080").unwrap();
server.wait();
std::thread::sleep(std::time::Duration::from_secs(10));
}
| true |
4543005b321660cab3d3853f256e48cfaf49cb75
|
Rust
|
regexident/RustUI
|
/src/view_components/widgets/scrollbar.rs
|
UTF-8
| 6,111 | 2.96875 | 3 |
[
"MIT"
] |
permissive
|
extern crate sdl2;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::event::Event;
use crate::view_components::{ViewComponent, IntoViewComponent};
use crate::backend::system::window::Window;
use crate::colors;
use crate::Orientation;
use super::{Widget, WidgetState};
use super::Text;
pub struct ScrollBar<T> {
id: &'static str,
from: i32,
to: i32,
value: i32,
slider: Rect,
rail: Rect,
value_text: Option<Text<T>>,
on_value_changed: Option<Box<dyn Fn(&mut T, i32)>>,
slider_passive_color: Color,
slider_hover_color: Color,
slider_active_color: Color,
rail_passive_color: Color,
rail_hover_color: Color,
orientation: Orientation,
}
// TODO: Finish implementing orientations
impl<T> ScrollBar<T> {
pub fn new(id: &'static str, from: i32, to: i32, current: i32) -> Self {
ScrollBar {
id: id,
from: from,
to: to,
value: current,
slider: Rect::new(0, 0, 12, 16),
rail: Rect::new(0, 0, 120, 4),
value_text: None,
on_value_changed: None,
slider_passive_color: colors::MANILLA,
slider_hover_color: Color::RGB(200, 200, 200),
slider_active_color: colors::DARK_GRAY,
rail_passive_color: colors::LIGHT_GRAY,
rail_hover_color: colors::WHITE,
orientation: Orientation::Horizontal,
}
}
pub fn with_orientation(mut self, orientation: Orientation) -> Self {
self.orientation = orientation;
self
}
pub fn with_length(mut self, length: u32) -> Self {
self.rail.set_width(length);
self
}
pub fn with_thickness(mut self, thickness: u32) -> Self {
self.rail.set_height(thickness);
self
}
pub fn with_on_value_changed<F: 'static + Fn(&mut T, i32)>
(mut self, callback: F) -> Self {
self.on_value_changed = Some(Box::new(callback));
self
}
/// Maps slider location relative to the rail to slider value
fn pixel_to_value(&self, pixel: i32) -> i32 {
match self.orientation {
Orientation::Horizontal => {
let value = ((pixel-self.rail.x) * (self.to - self.from)) / self.rail.width() as i32;
return if value < self.from {
self.from
} else if value > self.to {
self.to
} else {
value
};
}
Orientation::Vertical => {
let value = ((pixel-self.rail.y) * (self.to - self.from)) / self.rail.height() as i32;
return if value < self.from {
self.from
} else if value > self.to {
self.to
} else {
value
};
}
}
}
/// Maps slider value to slider location relative to the rail
fn value_to_pixel(&self, value: i32) -> i32 {
(value * self.rail.width() as i32) / (self.to - self.from) + self.rail.x
}
}
impl<T> Widget<T> for ScrollBar<T> {
// TODO: This
fn rect(&self) -> Rect {
self.slider
}
fn id(&self) -> &'static str {
self.id
}
fn text_component(&mut self) -> Option<&mut Text<T>> {
self.value_text.as_mut()
}
fn assign_text_dimensions(&mut self, dims: (u32, u32)) {
if let Some(text) = &mut self.value_text {
text.assign_text_dimensions(dims);
}
}
fn place(&mut self, x: i32, y: i32) {
self.rail.set_x(x);
self.rail.set_y(y);
// self.slider.set_x(self.value_to_pixel(self.value) - self.slider.width() as i32 / 2);
self.slider.set_y(y - (self.slider.height() - self.rail.height()) as i32 / 2);
}
fn update(&mut self, state: &mut T, event: &Event) {
match event {
Event::MouseMotion {x, y, ..} => {
self.value = self.pixel_to_value(*x);
self.slider.set_x(self.value_to_pixel(self.value) - self.slider.width() as i32 / 2);
if let Some(on_value_changed) = &self.on_value_changed {
(on_value_changed)(state, self.value);
}
}
_ => {}
}
}
fn render(&self, window: &mut Window<T>, widget_state: WidgetState)
where T: super::GenerateView<T> {
// Draw rail
match widget_state {
// WidgetState::Active |
// WidgetState::Hovering => window.canvas.set_draw_color(self.rail_hover_color),
_ => window.canvas.set_draw_color(self.rail_passive_color),
}
window.canvas.fill_rect(self.rail).unwrap();
// Draw slider
match widget_state {
WidgetState::Active => window.canvas.set_draw_color(self.slider_active_color),
WidgetState::Hovering => window.canvas.set_draw_color(self.slider_hover_color),
_ => window.canvas.set_draw_color(self.slider_passive_color),
}
window.canvas.fill_rect(self.slider).unwrap();
}
fn translate(&mut self, dx: i32, dy: i32) {
self.rail.set_x(self.rail.x() + dx);
self.rail.set_y(self.rail.y() + dy);
// FIXME: This is an alignment hack. This should actually be done in .place,
// but because view.align uses widget.rect(), moving this to .place
// will only adjust the rail, not the slider.
// This is because .rect() returns the slider's rect (for mouse handling), not the
// rail's rect
self.slider.set_x(self.value_to_pixel(self.value) - self.slider.width() as i32 / 2);
self.slider.set_y(self.slider.y() + dy);
}
fn draw_width(&self) -> u32 {
self.rail.width()
}
fn draw_height(&self) -> u32 {
self.slider.height()
}
}
impl<T> IntoViewComponent<T> for ScrollBar<T> where T: 'static {
fn as_component(self) -> ViewComponent<T> {
ViewComponent::Widget(Box::new(self))
}
}
| true |
62f8ecd3926b265cfe5abb2de0b9b73e42b4f4c2
|
Rust
|
patahene/rrt
|
/src/hit.rs
|
UTF-8
| 1,161 | 3.109375 | 3 |
[] |
no_license
|
use crate::material::MaterialKind;
use crate::ray::Ray;
use crate::vec3::Vec3;
#[derive(Debug)]
pub struct HitRecord {
pub t: f32,
pub p: Vec3,
pub normal: Vec3,
pub material: MaterialKind,
}
impl HitRecord {
pub fn new(mt: MaterialKind) -> HitRecord {
HitRecord {
t: 0.0,
p: Vec3::zero(),
normal: Vec3::zero(),
material: mt,
}
}
}
pub trait Hittable {
fn hit(&self, r: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord>;
}
pub struct HittableList {
pub list: Vec<Box<dyn Hittable + Send + Sync>>,
}
impl HittableList {
pub fn new() -> HittableList {
HittableList { list: vec![] }
}
pub fn hit(&self, r: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> {
let mut rec: Option<HitRecord> = None;
let mut closest_so_far = t_max;
for h in self.list.iter() {
match h.as_ref().hit(r, t_min, closest_so_far) {
Some(hr) => {
closest_so_far = hr.t;
rec = Some(hr);
}
None => {}
}
}
rec
}
}
| true |
6b5496cabefb3a6cfbd5b709c64982b0c41245ef
|
Rust
|
untoldwind/t-rust-less
|
/daemon/build.rs
|
UTF-8
| 638 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
use std::env;
use std::fs;
use std::process;
use clap::Shell;
#[path = "src/cli.rs"]
mod cli;
fn main() {
let outdir = match env::var_os("OUT_DIR") {
Some(outdir) => outdir,
None => {
eprintln!("OUT_DIR environment variable not defined.");
process::exit(1);
}
};
fs::create_dir_all(&outdir).unwrap();
let mut app = cli::app();
app.gen_completions("t-rust-less-daemon", Shell::Bash, &outdir);
app.gen_completions("t-rust-less-daemon", Shell::Fish, &outdir);
app.gen_completions("t-rust-less-daemon", Shell::Zsh, &outdir);
app.gen_completions("t-rust-less-daemon", Shell::PowerShell, &outdir);
}
| true |
2d7d8232e39dde32ecd319b3e103e0094e6763a3
|
Rust
|
yoshitsugu/fdl
|
/src/note.rs
|
UTF-8
| 1,346 | 2.796875 | 3 |
[] |
no_license
|
use crate::circle::Circle;
#[derive(Clone)]
pub struct Fdl {
pub fun: bool,
pub done: bool,
pub learn: bool,
}
#[derive(Clone)]
pub struct Note {
pub description: String,
pub fdl: Fdl,
pub x: isize,
pub y: isize,
pub width: isize,
pub height: isize,
pub original_x: isize,
pub original_y: isize,
pub client_x: isize,
pub client_y: isize,
pub dragging: bool,
}
impl Note {
#[must_use]
pub const fn new(description: String, x: isize, y: isize) -> Self {
Self {
description,
fdl: Fdl {
fun: false,
done: false,
learn: false,
},
x,
y,
width: 150,
height: 100,
original_x: 0,
original_y: 0,
client_x: 0,
client_y: 0,
dragging: false,
}
}
pub fn set_fdl(&mut self, fdl: (Circle, Circle, Circle)) {
let fun = fdl.0;
let done = fdl.1;
let learn = fdl.2;
self.fdl = Fdl {
fun: fun.include(self.x + self.width / 2, self.y + self.height / 2),
done: done.include(self.x + self.width / 2, self.y + self.height / 2),
learn: learn.include(self.x + self.width / 2, self.y + self.height / 2),
}
}
}
| true |
791e43c6e2a98f87df158151c8d82d7eb78e9c14
|
Rust
|
sanbox-irl/bit-bots
|
/src/serialization/serialized_entity.rs
|
UTF-8
| 10,321 | 2.609375 | 3 |
[] |
no_license
|
use super::{
physics_components::*, prefab_system, ComponentBounds, ComponentDatabase, ConversantNPC, DrawRectangle,
Entity, Follow, GraphNode, GridObject, Marker, Name, NonInspectableEntities, Player, PrefabMarker,
ResourcesDatabase, SceneSwitcher, SerializableComponent, SingletonDatabase, SoundSource, Sprite,
TextSource, Transform, Velocity,
};
use serde_yaml::Value as YamlValue;
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
#[serde(default)]
pub struct SerializedComponent<T> {
pub inner: T,
pub active: bool,
}
pub type SerializedComponentWrapper<T> = Option<SerializedComponent<T>>;
pub trait SerializedComponentExtenstions<T: Default> {
fn unwrap_or_else_create<F: FnOnce() -> SerializedComponent<T>>(
&mut self,
f: F,
) -> &mut SerializedComponent<T>;
fn unwrap_or_create_default(&mut self) -> &mut SerializedComponent<T>;
}
impl<T: Default> SerializedComponentExtenstions<T> for SerializedComponentWrapper<T> {
fn unwrap_or_else_create<F: FnOnce() -> SerializedComponent<T>>(
&mut self,
f: F,
) -> &mut SerializedComponent<T> {
if let Some(inner) = self {
inner
} else {
*self = Some(f());
self.as_mut().unwrap()
}
}
fn unwrap_or_create_default(&mut self) -> &mut SerializedComponent<T> {
if let Some(inner) = self {
inner
} else {
*self = Some(SerializedComponent {
inner: T::default(),
active: true,
});
self.as_mut().unwrap()
}
}
}
// This should mirror ComponentDatabse
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
#[serde(default)]
pub struct SerializedEntity {
pub name: SerializedComponentWrapper<Name>,
pub player: SerializedComponentWrapper<Player>,
pub transform: SerializedComponentWrapper<Transform>,
pub grid_object: SerializedComponentWrapper<GridObject>,
pub scene_switcher: SerializedComponentWrapper<SceneSwitcher>,
pub graph_node: SerializedComponentWrapper<GraphNode>,
pub velocity: SerializedComponentWrapper<Velocity>,
pub sprite: SerializedComponentWrapper<Sprite>,
pub sound_source: SerializedComponentWrapper<SoundSource>,
pub draw_rectangle: SerializedComponentWrapper<DrawRectangle>,
pub bounding_box: SerializedComponentWrapper<BoundingBox>,
pub text_source: SerializedComponentWrapper<TextSource>,
// pub tilemap: SerializedComponentWrapper<TilemapSerialized>,
pub follow: SerializedComponentWrapper<Follow>,
pub conversant_npc: SerializedComponentWrapper<ConversantNPC>,
pub prefab_marker: SerializedComponentWrapper<PrefabMarker>,
pub id: Uuid,
pub marker: Option<Marker>,
}
impl SerializedEntity {
pub fn new(
entity_id: &Entity,
serialization_id: Uuid,
component_database: &ComponentDatabase,
singleton_database: &SingletonDatabase,
resources: &ResourcesDatabase,
) -> Option<Self> {
let mut prefab = None;
let mut serialized_entity = SerializedEntity::with_prefab_components(
entity_id,
serialization_id,
component_database,
singleton_database,
resources,
Some(&mut prefab),
)?;
if let Some(prefab) = prefab {
serialized_entity
.foreach_component_dedup(|component, active| component.is_serialized(&prefab, *active));
}
Some(serialized_entity)
}
/// This creates a serialized entity, but it will not dedup the prefab components out. If the entity
/// is a prefab inheritor, then this will not be the same as what is written to disk, but **will be**
/// the same as what is currently *live* (as far as SE == live entities goes, that is)
pub fn with_prefab_components(
entity_id: &Entity,
serialization_id: Uuid,
component_database: &ComponentDatabase,
singleton_database: &SingletonDatabase,
resources: &ResourcesDatabase,
give_prefab: Option<&mut Option<SerializedEntity>>,
) -> Option<SerializedEntity> {
// If it's a prefab, add in all the PREFAB components
let mut serialized_entity = Default::default();
let loaded_prefab = prefab_system::get_serialized_parent_prefab_from_inheritor(
component_database.prefab_markers.get(entity_id),
resources,
&mut serialized_entity,
);
if loaded_prefab {
// If the caller want the original prefab too, they can have it.
if let Some(give_prefab) = give_prefab {
*give_prefab = Some(serialized_entity.clone());
}
}
// Save over the ID at this stage (we probably had the prefab ID in there)
serialized_entity.id = serialization_id;
// Now add in all the normal components:
component_database.foreach_component_list(
NonInspectableEntities::NAME | NonInspectableEntities::GRAPH_NODE,
|component_list| {
component_list.load_component_into_serialized_entity(
entity_id,
&mut serialized_entity,
&component_database.serialization_markers,
);
},
);
serialized_entity.marker = singleton_database.save_singleton_markers(entity_id);
Some(serialized_entity)
}
pub fn new_blank() -> Self {
SerializedEntity {
id: Uuid::new_v4(),
..Default::default()
}
}
pub fn with_uuid(uuid: Uuid) -> Self {
SerializedEntity {
id: uuid,
..Default::default()
}
}
pub fn foreach_component(
&mut self,
entity_bitmask: NonInspectableEntities,
mut f: impl FnMut(&dyn ComponentBounds, &bool),
f_util: Option<impl FnMut(&mut Uuid, &mut Option<Marker>)>,
) {
let SerializedEntity {
name,
player,
transform,
grid_object,
scene_switcher,
graph_node,
velocity,
sprite,
sound_source,
draw_rectangle,
bounding_box,
text_source,
// tilemap,
follow,
conversant_npc,
prefab_marker,
id,
marker,
} = self;
macro_rules! do_action {
( $( $x:ident ),* ) => {
$(
if let Some(serialized_component) = $x {
f(&serialized_component.inner, &serialized_component.active);
}
)*
};
}
if entity_bitmask.contains(NonInspectableEntities::NAME) {
do_action!(name);
}
do_action!(
player,
transform,
grid_object,
scene_switcher,
velocity,
sprite,
sound_source,
draw_rectangle,
bounding_box,
text_source,
// tilemap,
follow,
conversant_npc
);
if entity_bitmask.contains(NonInspectableEntities::GRAPH_NODE) {
do_action!(graph_node);
}
if entity_bitmask.contains(NonInspectableEntities::PREFAB) {
do_action!(prefab_marker);
}
if let Some(mut f_util) = f_util {
f_util(id, marker);
}
}
pub fn foreach_component_dedup(&mut self, mut f: impl FnMut(&dyn ComponentBounds, &bool) -> bool) {
let SerializedEntity {
name,
player,
transform,
grid_object,
scene_switcher,
graph_node,
velocity,
sprite,
sound_source,
draw_rectangle,
bounding_box,
text_source,
// tilemap,
follow,
conversant_npc,
prefab_marker: _,
id: _,
marker: _,
} = self;
macro_rules! dedup_serialization {
( $( $x:ident ),* ) => {
$(
if let Some(serialized_component) = $x {
if f(&serialized_component.inner, &serialized_component.active) {
*$x = None;
}
}
)*
};
}
dedup_serialization!(
name,
player,
transform,
grid_object,
scene_switcher,
graph_node,
velocity,
sprite,
sound_source,
draw_rectangle,
bounding_box,
text_source,
// tilemap,
follow,
conversant_npc
);
}
pub fn log_to_console(&self) {
println!("---");
println!("Serialized Entity: {:#?}", self);
println!("---");
}
pub fn get_serialized_yaml_component<T: SerializableComponent + ComponentBounds>(
serialized_entity: &SerializedEntity,
) -> YamlValue {
if let YamlValue::Mapping(mut serialized_entity_value) =
serde_yaml::to_value(serialized_entity.clone()).unwrap()
{
serialized_entity_value
.remove(&T::SERIALIZATION_NAME)
.unwrap_or_default()
} else {
YamlValue::default()
}
}
pub fn get_serialized_component<T: SerializableComponent + ComponentBounds>(
serialized_entity: &SerializedEntity,
) -> Option<SerializedComponent<T>> {
let my_output = SerializedEntity::get_serialized_yaml_component::<T>(serialized_entity);
if my_output.is_mapping() {
serde_yaml::from_value(my_output).unwrap_or_default()
} else {
None
}
}
pub fn create_yaml_component<T: SerializableComponent + ComponentBounds>(
cmp: &super::Component<T>,
) -> YamlValue {
serde_yaml::to_value(SerializedComponent {
inner: cmp.clone_inner(),
active: cmp.is_active,
})
.unwrap_or_default()
}
}
| true |
9a85ba518d91466e650f8c19bbadea3e0d627682
|
Rust
|
mcaveniathor/cathode
|
/src/mode.rs
|
UTF-8
| 10,698 | 2.984375 | 3 |
[] |
no_license
|
use std::{io,process,str,thread,time};
use std::io::Error;
use std::result::Result;
use regex::Regex;
use serde::{Serialize,Deserialize};
use crate::{fileio,util};
#[derive(Debug)]
pub struct InputMode {
width:String,
height:String,
rate:String,
name:String,
display:String,
}
impl InputMode {
pub fn new(width:&str,height:&str,rate:&str,display:&str,name:&str) -> InputMode {
InputMode {
width:width.to_string(),
height:height.to_string(),
rate:rate.to_string(),
display:display.to_string(),
name:name.to_string()
}
}
}
#[derive(Clone,Debug,Serialize,Deserialize)]
pub struct CvtMode {
name: String,
clock: String,
h_disp: String,
h_sync_start: String,
h_sync_end: String,
h_total: String,
v_disp: String,
v_sync_start: String,
v_sync_end: String,
v_total: String,
flags: String,
}
impl CvtMode {
pub fn get_name(&self) -> &str {
&self.name
}
/*
pub fn new_empty() -> CvtMode {
CvtMode {
name: String::new(),
clock: String::new(),
h_disp: String::new(),
h_sync_start: String::new(),
h_sync_end: String::new(),
h_total: String::new(),
v_disp: String::new(),
v_sync_start: String::new(),
v_sync_end: String::new(),
v_total: String::new(),
flags: String::new(),
}
}
*/
}
// Some(d) would be a vec of the displays for which to delete the mode; if d is None, the mode will be removed from all connected displays
// xrandr doesn't seem to think the program has access to user-created modes for deletion;
// could run as root but would rather not.
// TODO: address deletion permission issue
/*
fn delete_mode_xrandr(n: &str, d: Option<Vec<String>>, verbose: bool) -> Result<(),Error> {
for display in d.unwrap() {
delete_mode(&n,&display);
}
let currents_handle = thread::spawn(move || get_current_modes(verbose));
let defaults_handle = thread::spawn(move || get_default_modes(verbose));
let currents = currents_handle.join().unwrap()?;
let defaults = defaults_handle.join().unwrap()?;
let displays = match d {
Some(disps) => disps,
None => {
let mut tmp: Vec<String> = Vec::with_capacity(currents.len());
for mode in ¤ts {
tmp.push(mode.display.clone());
}
tmp
}
};
println!("{:?}",¤ts);
// these loops are because xrandr doesn't let you update modes or delete them while in use
for disp in displays {
for default in &defaults {
if default.display == disp {
if verbose {
println!("Switching to default mode to allow updating of the current mode");
}
switch_mode(&default.name, &disp, verbose)?; // switch the display to its default mode to enable deletion of in-use mode
}
}
if verbose {
println!("Removing mode {} from display {}",&n,&disp);
}
let mut cmd = process::Command::new("xrandr");
cmd.arg("--delmode").arg(disp.clone()).arg(n.clone());
println!("{:?}",cmd.output().unwrap());
}
Ok(())
}
*/
pub fn add_mode(w: Option<&str>, h: Option<&str>, r: Option<&str>, d: Option<&str>, n: Option<&str>, t: Option<&str>, f: Option<&str>, test: bool, save: bool, verbose: bool) -> Result<(),Error> {
let current_modes = get_current_modes(verbose)?;
// Use first current display mode for parameters not supplied
// and as the fallback if test option is used
let width = w.unwrap_or(¤t_modes[0].width).to_string();
let height = h.unwrap_or(¤t_modes[0].height).to_string();
let rate = r.unwrap_or(¤t_modes[0].rate).to_string();
let display = d.unwrap_or(¤t_modes[0].display).to_string();
let tmp = format!("{}x{}_{}",width,height,rate);
// default test timeout is 10 seconds.
let name = match n {
Some(nm) => String::from(nm),
None => {
tmp
}
};
let i_mode = InputMode {
width,
height,
rate,
display: String::from(&display),
name: name.clone()
};
let mut d_vec: Vec<String> = Vec::with_capacity(1);
d_vec.push(display.clone());
// compute CVT timings and delete xrandr mode concurrently; wait for deletion before adding to xrandr
//let del_handle = thread::spawn(move || delete_mode_xrandr(&name, Some(d_vec), verbose));
let cvt_handle = thread::spawn(move || gen_cvt_mode(&i_mode, verbose));
let fallback_cvt_handle = thread::spawn(move || gen_cvt_mode(¤t_modes[0], verbose));
//let _ = del_handle.join().unwrap();
let cvt = cvt_handle.join().unwrap();
let fallback_cvt = fallback_cvt_handle.join().unwrap();
new_mode(&cvt, &display, verbose)?;
if test {
test_mode(&cvt, &fallback_cvt, &display, t, verbose)?;
}
if save {
fileio::save_mode(&cvt,f,verbose)?
}
Ok(())
}
pub fn apply_mode(n: &str, d: &str, t: Option<&str>, test: bool, persist: bool, verbose: bool) -> Result<(), io::Error> {
println!("Applying mode {} to display {}.",n,d);
let mode = fileio::get_mode(n, None, verbose).unwrap();
if test {
let default_modes = get_default_modes(verbose)?;
let default_mode = gen_cvt_mode(&default_modes[0],verbose);
test_mode(&mode, &default_mode, d, t, verbose)?;
println!("Keep the mode you just tested? y/n");
let mut input = String::new();
while !(input.contains("y") || input.contains("n")) {
let _ = io::stdin().read_line(&mut input);
if input.contains("n") {
return Ok(());
}
}
}
switch_mode(n, d, verbose)?;
if persist {
fileio::save_mode_persistent(&mode, verbose)?;
}
Ok(())
}
fn test_mode(mode: &CvtMode, default_mode: &CvtMode, display: &str, t: Option<&str>, verbose: bool) -> Result<(), io::Error> {
let name = &mode.get_name();
let default_name = &default_mode.get_name();
let timeout: u64 = match t {
Some(time) => {
let tmp = match time.parse() {
Ok(kk) => kk,
Err(_) => {
eprintln!("Error: timeout must be an integer greater than zero. Using default timeout of 10 seconds.");
10 // just default to 10 secs if invalid timeout provided rather than returning an error
}
};
if tmp > 0 {
tmp
} else {
10 // default to 10 secs if none given
}
}
None => 10
};
let delay = time::Duration::from_secs(timeout);
if verbose {
println!("Testing mode {} on display {} for {} secs.", name, display, timeout);
thread::sleep(time::Duration::from_secs(1));
}
if verbose {
let _ = thread::spawn(move || util::print_countdown(timeout)); // this should maybe print regardless of verbose option, idk
}
let handle = thread::spawn(move || thread::sleep(delay));
switch_mode(name, display, verbose)?;
handle.join().expect("Timer thread had an error.");
if verbose {
println!("Reverting to mode {} on display {}.", default_name, display);
}
switch_mode(default_name, display, verbose)?;
Ok(())
}
fn gen_cvt_mode(input: &InputMode, verbose: bool) -> CvtMode {
if verbose {
println!("Generating coordinated video timings for mode {}",input.name);
}
let mut cmd = process::Command::new("cvt");
cmd.arg(&input.width).arg(&input.height).arg(&input.rate);
let output = cmd.output().unwrap();
let out = str::from_utf8(&output.stdout).unwrap();
let lines: Vec<_> = out.split('"').collect();
let mut t: Vec<_> = lines[2][2..lines[2].len()-1].split(" ").collect();
let mut i=0;
while i < t.len() {
if t[i] == "" || t[i] == "\t" {
t.remove(i);
} else {
i += 1;
}
}
let tmp = CvtMode {
name: input.name.to_owned(),
clock: String::from(t[0]),
h_disp: String::from(t[1]),
h_sync_start: String::from(t[2]),
h_sync_end: String::from(t[3]),
h_total: String::from(t[4]),
v_disp: String::from(t[5]),
v_sync_start: String::from(t[6]),
v_sync_end: String::from(t[7]),
v_total: String::from(t[8]),
flags: format!("{} {}",t[9],t[10]),
};
if verbose {
println!("{:?}",tmp);
}
tmp
}
// Retrieves modes which are currently in use
fn get_current_modes(verbose: bool) -> Result<Vec<InputMode>, Error> {
if verbose {
println!("Retrieving current display configuration.");
}
let re = Regex::new(r"(\S+)\s+connected.*\n[[a-zA-Z0-9\.]*\n]*\s*([0-9]+)x([0-9]+)\s*([0-9]+\.[0-9]+)\*").unwrap();
util::get_modes_helper(&re, verbose)
}
// Retrieves the default modes for each display
fn get_default_modes(verbose: bool) -> Result<Vec<InputMode>, Error> {
if verbose {
println!("Retrieving current display configuration.");
}
let re = Regex::new(r"(\S+)\s+connected.*\n[[a-zA-Z0-9\.]*\n]*\s*([0-9]+)x([0-9]+)\s*([0-9]+\.[0-9]+)[\*]?\+").unwrap();
util::get_modes_helper(&re, verbose)
}
fn switch_mode(name: &str, display: &str, verbose: bool) -> Result<(), io::Error> {
let mut cmd = process::Command::new("xrandr");
cmd.arg("--output").arg(&display).arg("--mode").arg(name);
if verbose {
println!("Applying mode {} to display {}",name,&display);
}
cmd.output()?;
if verbose {
println!("Successfully applied mode {} to display {}",name, &display);
}
Ok(())
}
// Adds the newly created mode to xrandr
fn new_mode(mode: &CvtMode, display: &str, verbose: bool) -> Result<(), io::Error> {
let mut cmd = process::Command::new("xrandr");
cmd.arg("--newmode")
.arg(&mode.name)
.arg(&mode.clock)
.arg(&mode.h_disp)
.arg(&mode.h_sync_start)
.arg(&mode.h_sync_end)
.arg(&mode.h_total)
.arg(&mode.v_disp)
.arg(&mode.v_sync_start)
.arg(&mode.v_sync_end)
.arg(&mode.v_total)
.arg(&mode.flags);
if verbose {
println!("Creating xrandr mode {}",&mode.name);
}
cmd.output()?;
if verbose {
println!("Adding mode {} for display {}.",&mode.name,display);
}
cmd = process::Command::new("xrandr");
cmd.arg("--addmode").arg(display).arg(&mode.name);
cmd.output()?;
Ok(())
}
| true |
cdae0c00a44b8ce67c06cea86a5293e3b6a64518
|
Rust
|
PaulJuliusMartinez/rust
|
/src/test/ui/cleanup-rvalue-scopes-cf.rs
|
UTF-8
| 994 | 2.640625 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// ignore-compare-mode-nll
// Test that the borrow checker prevents pointers to temporaries
// with statement lifetimes from escaping.
use std::ops::Drop;
static mut FLAGS: u64 = 0;
struct Box<T> { f: T }
struct AddFlags { bits: u64 }
fn AddFlags(bits: u64) -> AddFlags {
AddFlags { bits: bits }
}
fn arg(x: &AddFlags) -> &AddFlags {
x
}
impl AddFlags {
fn get(&self) -> &AddFlags {
self
}
}
pub fn main() {
let _x = arg(&AddFlags(1)); //~ ERROR value does not live long enough
let _x = AddFlags(1).get(); //~ ERROR value does not live long enough
let _x = &*arg(&AddFlags(1)); //~ ERROR value does not live long enough
let ref _x = *arg(&AddFlags(1)); //~ ERROR value does not live long enough
let &ref _x = arg(&AddFlags(1)); //~ ERROR value does not live long enough
let _x = AddFlags(1).get(); //~ ERROR value does not live long enough
let Box { f: _x } = Box { f: AddFlags(1).get() }; //~ ERROR value does not live long enough
}
| true |
6e7a18c68109b18baaa2918981fd10420f3b44ce
|
Rust
|
chin0/llrl
|
/llrl0/src/tests/error_expectation.rs
|
UTF-8
| 8,924 | 2.671875 | 3 |
[] |
permissive
|
use crate::prelude::*;
use crate::sexp::matcher as m;
use std::borrow::Cow;
use std::collections::HashSet;
#[derive(Debug)]
pub(super) enum ErrorExpectation {
Syntax,
MacroExpansion(String),
MultipleDeclarations(String),
ConflictingExports(String),
DuplicatedIdentifier(String),
Unresolved(String, String),
CannotDeclareSealedClassInstanceInAnotherModule,
CannotUseMacroDefinedInTheSameModule,
ClassMethodTypeSchemeUnspecified,
CannotGeneralize,
UnifyKind(UnificationError),
UnsupportedKind,
CyclicClasses(Vec<String>),
OverlappingInstances(Vec<String>),
NoMatchingInstances,
RecursionLimitExceeded,
CannotUseReturnInThisContext,
UnifyType(UnificationError),
CannotResolveAmbiguity,
ArityMismatch,
MethodTypeSchemeMismatch,
UselessPattern(String),
NonExhaustivePattern(Vec<String>),
}
impl ErrorExpectation {
pub(super) fn matches(&self, error: &ModuleError) -> bool {
let error = error.clone().drop_context_info();
match self {
Self::Syntax => matches!(error, ModuleError::Syntax(_)),
Self::MacroExpansion(a) => {
matches!(error, ModuleError::MacroExpansionFailed(_, ref b) if a == b)
}
Self::MultipleDeclarations(a) => {
matches!(error, ModuleError::MultipleDeclarations(ref b, _, _) if a == b)
}
Self::ConflictingExports(a) => {
matches!(error, ModuleError::ConflictingExports(ref b, _, _) if a == b)
}
Self::DuplicatedIdentifier(a) => {
matches!(error, ModuleError::DuplicatedIdentifier(ref b, _, _) if a == b)
}
Self::Unresolved(kind, name) => {
matches!(error, ModuleError::Unresolved(_, ref k, ref n) if kind == k && name == n)
}
Self::CannotDeclareSealedClassInstanceInAnotherModule => {
matches!(
error,
ModuleError::CannotDeclareSealedClassInstanceInAnotherModule(_)
)
}
Self::CannotUseMacroDefinedInTheSameModule => {
matches!(error, ModuleError::CannotUseMacroDefinedInTheSameModule(_))
}
Self::ClassMethodTypeSchemeUnspecified => {
matches!(error, ModuleError::ClassMethodTypeSchemeUnspecified(_))
}
Self::CannotGeneralize => {
matches!(error, ModuleError::CannotGeneralize(_))
}
Self::UnifyKind(a) => {
matches!(error, ModuleError::CannotUnifyKind(_, _, ref b) if a == b)
}
Self::UnsupportedKind => {
matches!(error, ModuleError::UnsupportedKind(_))
}
Self::CyclicClasses(names) => {
matches!(
error,
ModuleError::CyclicClasses(ref classes)
if names.iter().map(|s| s.to_string()).collect::<HashSet<_>>()
== classes.iter().map(|c| c.name.to_string()).collect()
)
}
Self::OverlappingInstances(names) => {
matches!(
error,
ModuleError::OverlappingInstances(ref instances)
if names.iter().map(|s| s.to_string()).collect::<HashSet<_>>()
== instances.iter().map(|i| i.name.to_string()).collect()
)
}
Self::NoMatchingInstances => {
matches!(error, ModuleError::NoMatchingInstances(_, _))
}
Self::RecursionLimitExceeded => {
matches!(error, ModuleError::RecursionLimitExceeded(_))
}
Self::CannotUseReturnInThisContext => {
matches!(error, ModuleError::CannotUseReturnInThisContext)
}
Self::UnifyType(a) => {
matches!(error, ModuleError::CannotUnifyType(_, _, ref b) if a == b)
}
Self::CannotResolveAmbiguity => {
matches!(error, ModuleError::CannotResolveAmbiguity(_, _))
}
Self::ArityMismatch => {
matches!(error, ModuleError::ArityMismatch(_, _))
}
Self::MethodTypeSchemeMismatch => {
matches!(error, ModuleError::MethodTypeSchemeMismatch(_, _))
}
Self::UselessPattern(pat) => {
matches!(error, ModuleError::UselessPattern(ref p) if p == pat)
}
Self::NonExhaustivePattern(pats) => {
matches!(error, ModuleError::NonExhaustivePattern(ref ps) if ps == pats)
}
}
}
}
impl<'a> m::Match<'a> for ErrorExpectation {
type Result = Self;
fn match_sexp(s: &'a Sexp) -> m::Result<Self> {
if let Ok("syntax") = s.matches::<m::Symbol>() {
Ok(Self::Syntax)
} else if let Ok(("macro-expansion", error)) = s.matches::<(m::Symbol, m::String)>() {
Ok(Self::MacroExpansion(error.to_string()))
} else if let Ok("unify-kind") = s.matches::<m::Symbol>() {
Ok(Self::UnifyKind(UnificationError::Mismatch))
} else if let Ok(("unify-kind", "occurs-check-failed")) =
s.matches::<(m::Symbol, m::Symbol)>()
{
Ok(Self::UnifyKind(UnificationError::OccursCheckFailed))
} else if let Ok("unsupported-kind") = s.matches::<m::Symbol>() {
Ok(Self::UnsupportedKind)
} else if let Ok(("multiple-declarations", name)) = s.matches::<(m::Symbol, m::Symbol)>() {
Ok(Self::MultipleDeclarations(name.to_string()))
} else if let Ok(("conflicting-exports", name)) = s.matches::<(m::Symbol, m::Symbol)>() {
Ok(Self::ConflictingExports(name.to_string()))
} else if let Ok(("duplicated-identifier", name)) = s.matches::<(m::Symbol, m::Symbol)>() {
Ok(Self::DuplicatedIdentifier(name.to_string()))
} else if let Ok("cannot-use-macro-defined-in-the-same-module") = s.matches::<m::Symbol>() {
Ok(Self::CannotUseMacroDefinedInTheSameModule)
} else if let Ok("class-method-type-scheme-unspecified") = s.matches::<m::Symbol>() {
Ok(Self::ClassMethodTypeSchemeUnspecified)
} else if let Ok("cannot-generalize") = s.matches::<m::Symbol>() {
Ok(Self::CannotGeneralize)
} else if let Ok(("unresolved", def_kind, name)) =
s.matches::<(m::Symbol, m::Symbol, m::Symbol)>()
{
Ok(Self::Unresolved(def_kind.to_string(), name.to_string()))
} else if let Ok("cannot-declare-sealed-class-instance-in-another-module") =
s.matches::<m::Symbol>()
{
Ok(Self::CannotDeclareSealedClassInstanceInAnotherModule)
} else if let Ok(("cyclic-classes", names)) = s.matches::<(m::Symbol, m::Rest<m::Symbol>)>()
{
Ok(Self::CyclicClasses(
names.into_iter().map(|s| s.to_string()).collect(),
))
} else if let Ok(("overlapping-instances", names)) =
s.matches::<(m::Symbol, m::Rest<m::Symbol>)>()
{
Ok(Self::OverlappingInstances(
names.into_iter().map(|s| s.to_string()).collect(),
))
} else if let Ok("no-matching-instances") = s.matches::<m::Symbol>() {
Ok(Self::NoMatchingInstances)
} else if let Ok("recursion-limit-exceeded") = s.matches::<m::Symbol>() {
Ok(Self::RecursionLimitExceeded)
} else if let Ok("cannot-use-return-in-this-context") = s.matches::<m::Symbol>() {
Ok(Self::CannotUseReturnInThisContext)
} else if let Ok("unify-type") = s.matches::<m::Symbol>() {
Ok(Self::UnifyType(UnificationError::Mismatch))
} else if let Ok(("unify-type", "occurs-check-failed")) =
s.matches::<(m::Symbol, m::Symbol)>()
{
Ok(Self::UnifyType(UnificationError::OccursCheckFailed))
} else if let Ok("cannot-resolve-ambiguity") = s.matches::<m::Symbol>() {
Ok(Self::CannotResolveAmbiguity)
} else if let Ok("arity-mismatch") = s.matches::<m::Symbol>() {
Ok(Self::ArityMismatch)
} else if let Ok("method-type-scheme-mismatch") = s.matches::<m::Symbol>() {
Ok(Self::MethodTypeSchemeMismatch)
} else if let Ok(("useless", s)) = s.matches::<(m::Symbol, m::Any)>() {
Ok(Self::UselessPattern(s.to_string()))
} else if let Ok(("non-exhaustive", ss)) = s.matches::<(m::Symbol, m::Rest<m::Any>)>() {
Ok(Self::NonExhaustivePattern(
ss.into_iter().map(|s| s.to_string()).collect(),
))
} else {
Err(Self::error(s))
}
}
fn expect() -> Cow<'static, str> {
"<error-expectation>".into()
}
}
| true |
36a4e9b9b57a5f9fe918c4df7ff03f8954ce3560
|
Rust
|
davethecanuck/rust-poly-logger
|
/src/log_formatter.rs
|
UTF-8
| 3,394 | 3.078125 | 3 |
[] |
no_license
|
use strfmt::strfmt;
use std::collections::HashMap;
pub struct LogFormatter {
// strftime format string
timestamp_format: &'static str,
// e.g. [{timestamp}] {level} [{path}] - {msg}
msg_format: &'static str,
// Flag to indicate we need to do more expensive
// formatting with strfmt
use_strfmt: bool,
}
// NOTE: Using default error type
type MsgResult = Result<String, strfmt::FmtError>;
impl Clone for LogFormatter {
fn clone(&self) -> LogFormatter {
LogFormatter {
timestamp_format: self.timestamp_format.clone(),
msg_format: self.msg_format.clone(),
use_strfmt: self.use_strfmt,
}
}
}
impl LogFormatter {
pub fn new() -> Self {
LogFormatter {
timestamp_format: "%+",
msg_format: "",
use_strfmt: false,
}
}
// Set format options
pub fn timestamp_format(&mut self, format: &'static str) -> &mut Self {
self.timestamp_format = format;
self
}
pub fn msg_format(&mut self, format: &'static str) -> &mut Self {
// Using custom format
self.use_strfmt = true;
self.msg_format = format;
self
}
// Format value accessors
pub fn msg(&self, record: &log::Record) -> MsgResult {
// NOTE - Use strfmt only if custom message
// as it's more expensive.
// Future option: We could add various
// canned defaults for performance reasons
match self.use_strfmt {
false => {
Ok(self.default_msg(record))
},
true => {
self.custom_msg(record)
},
}
}
pub fn default_msg(&self, record: &log::Record) -> String {
format!(
"[{timestamp}] {level} [{file}:{line}] {args}",
timestamp=self.timestamp(),
level=record.metadata().level(),
file=self.file(record),
line=self.line(record),
args=record.args())
}
fn custom_msg(&self, record: &log::Record) -> MsgResult {
let mut vars = HashMap::new();
vars.insert("timestamp".to_string(), self.timestamp());
vars.insert("level".to_string(),
record.metadata().level().to_string());
vars.insert("file".to_string(), self.file(record));
vars.insert("line".to_string(), self.line(record).to_string());
vars.insert("args".to_string(), record.args().to_string());
strfmt(self.msg_format, &vars)
}
fn timestamp(&self) -> String {
match &self.timestamp_format {
&"" => {
"".to_string()
},
f => {
// Note that we might want to separate the
// timestamping of a message with the formatting of the
// timestamp, especially if we move to a producer/consumer
// queue
let now = chrono::Local::now();
now.format(&f).to_string()
}
}
}
fn line(&self, record: &log::Record) -> u32 {
match record.line() {
Some(l) => l,
None => 0,
}
}
fn file(&self, record: &log::Record) -> String {
match record.file() {
Some(f) => f.to_string(),
None => "<no_file>".to_string(),
}
}
}
| true |
18314c5ba17bac357e8f6d523151287608e6c301
|
Rust
|
JadenGeller/Advent2016
|
/src/dec4.rs
|
UTF-8
| 3,845 | 3.328125 | 3 |
[
"MIT"
] |
permissive
|
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::str::FromStr;
use std::error::Error;
use std::fmt;
use std::process::exit;
use std::collections::HashMap;
use std::cmp::Ordering;
#[allow(dead_code)]
struct Room {
name: String,
id: i32,
}
#[derive(Debug)]
enum ParseRoomError {
InvalidFormat,
IncorrectChecksum,
}
impl fmt::Display for ParseRoomError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(self.description())
}
}
impl Error for ParseRoomError {
fn description(&self) -> &str {
match self {
&ParseRoomError::InvalidFormat => "invalid room identifier format",
&ParseRoomError::IncorrectChecksum => "room identifier checksum doesn't match",
}
}
}
impl FromStr for Room {
type Err = ParseRoomError;
fn from_str(x: &str) -> Result<Room, ParseRoomError> {
let (head, checksum) = x.split_at(x.find('[').ok_or(ParseRoomError::InvalidFormat)?);
let end = checksum.len();
if checksum.chars().nth(0) != Some('[') || checksum.chars().nth(end - 1) != Some(']') {
Err(ParseRoomError::InvalidFormat)?
};
let checksum = checksum[1..end - 1].to_string();
let mut components = head.split('-');
let id = components.next_back().ok_or(ParseRoomError::InvalidFormat)?
.parse::<i32>().map_err(|_| { ParseRoomError::InvalidFormat })?;
let mut name: Vec<String> = vec![];
let mut freqs: HashMap<char, u32> = HashMap::new();
for comp in components {
for letter in comp.chars().by_ref() {
if !letter.is_alphabetic() || !letter.is_lowercase() { Err(ParseRoomError::InvalidFormat)? };
let new_value = freqs.get(&letter).map(|c| *c).unwrap_or(0) + 1;
freqs.insert(letter, new_value);
};
name.push(comp.to_string());
};
let mut freqs = freqs.iter().collect::<Vec<_>>();
freqs.sort_by(|&(l1, c1), &(l2, c2)| {
let count_ordering = c1.cmp(c2).reverse();
match count_ordering {
Ordering::Equal => l1.cmp(l2),
_ => count_ordering,
}
});
let expected_checksum = freqs[0..5].iter().map(|x| *x.0).collect::<String>();
if expected_checksum != checksum { Err(ParseRoomError::IncorrectChecksum)? };
let name = name.iter()
.map(|comp| comp.chars().map(|letter| rotate(letter, id)).collect::<String>())
.collect::<Vec<_>>()
.join(" ");
Ok(Room {
name: name,
id: id,
})
}
}
fn rotate(c: char, n: i32) -> char {
assert!(c.is_alphabetic() && c.is_lowercase());
let mut index = ((c as u8) - ('a' as u8)) as i32;
index += n;
index %= (('z' as u8) - ('a' as u8)) as i32 + 1;
(index + ('a' as i32)) as u8 as char
}
fn main() {
let file = File::open("input.txt").unwrap();
let file = BufReader::new(file);
let _ = Room::from_str("aaaaa-bbb-z-y-x-123[abxyz]");
let rooms = file.lines()
.map(|s| Room::from_str(&s.unwrap()));
let mut sum = 0;
for room in rooms {
match room {
Ok(room) => {
sum += room.id;
if room.name == "northpole object storage" {
println!("The North Pole objects are stored in room #{}.", room.id);
}
}
Err(ParseRoomError::IncorrectChecksum) => (),
Err(error) => {
println!("{}", error);
exit(-1)
},
}
};
println!("The sum of valid ids is {}.", sum)
}
| true |
e40c186458cee81c1d3604db206ef39565bb3219
|
Rust
|
pointedGroup/openmls
|
/src/schedule/mod.rs
|
UTF-8
| 16,545 | 2.75 | 3 |
[
"MIT"
] |
permissive
|
//! This module represents the key schedule as introduced in Section 8 of the
//! MLS specification. The key schedule evolves in epochs, where in each epoch
//! new key material is injected.
//!
//! The flow of the key schedule is as follows (from Section 8 of the MLS
//! specification):
//!
//! ```text
//! init_secret_[n-1]
//! |
//! V
//! commit_secret -> KDF.Extract = joiner_secret
//! |
//! V
//! Derive-Secret(., "member")
//! |
//! V
//! psk_secret (or 0) -> KDF.Extract = member_secret
//! |
//! +--> Derive-Secret(., "welcome")
//! | = welcome_secret
//! |
//! V
//! ExpandWithLabel(., "epoch", GroupContext_[n], KDF.Nh)
//! |
//! V
//! epoch_secret
//! |
//! +--> Derive-Secret(., <label>)
//! | = <secret>
//! |
//! V
//! Derive-Secret(., "init")
//! |
//! V
//! init_secret_[n]
//! ```
//!
//! Each of the secrets in the key schedule (with exception of the
//! welcome_secret) is represented by its own struct to ensure that the keys are
//! not confused with one-another and/or that the schedule is not derived
//! out-of-order.
use crate::ciphersuite::*;
use crate::codec::*;
use crate::group::*;
use crate::messages::{proposals::AddProposal, GroupSecrets, PathSecret};
use crate::tree::index::LeafIndex;
use crate::tree::index::NodeIndex;
use crate::tree::private_tree::CommitSecret;
use crate::tree::secret_tree::SecretTree;
use crate::tree::treemath;
use crate::tree::RatchetTree;
use serde::{Deserialize, Serialize};
pub(crate) mod psk;
/// The `InitSecret` is used to connect the next epoch to the current one. It's
/// necessary to be able clone this to create a provisional group state, which
/// includes the `InitSecret`.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(test, derive(PartialEq))]
pub struct InitSecret {
secret: Secret,
}
impl InitSecret {
/// Derive an `InitSecret` from an `EpochSecret`.
fn from_epoch_secret(ciphersuite: &Ciphersuite, epoch_secret: &EpochSecret) -> Self {
InitSecret {
secret: epoch_secret.secret.derive_secret(ciphersuite, "init"),
}
}
/// Sample a fresh, random `InitSecret` for the creation of a new group.
pub(crate) fn random(length: usize) -> Self {
InitSecret {
secret: Secret::random(length),
}
}
}
/// It's necessary to clone this to be able generate `GroupSecret` object with
/// multiple different `PathSecret` objects.
#[derive(Debug, Clone)]
pub struct JoinerSecret {
secret: Secret,
}
impl JoinerSecret {
/// Derive a `JoinerSecret` from an optional `CommitSecret` and an
/// `EpochSecrets` object, which contains the necessary `InitSecret`. The
/// `CommitSecret` needs to be present if the current commit is not an
/// Add-only commit. TODO: For now, this takes a reference to a
/// `CommitSecret` as input. This should change with #224.
pub(crate) fn from_commit_and_epoch_secret(
ciphersuite: &Ciphersuite,
commit_secret_option: Option<&CommitSecret>,
init_secret: InitSecret,
) -> Self {
let commit_secret_value = commit_secret_option.map(|commit_secret| commit_secret.secret());
JoinerSecret {
secret: ciphersuite.hkdf_extract(commit_secret_value, &init_secret.secret),
}
}
/// Derive the inital `JoinerSecret` when creating a new group from a
/// `CommitSecret`. The `InitSecret` is randomly generated. TODO:
/// For now, this takes a reference to a `CommitSecret` as input. This
/// should change with #224.
fn from_commit_secret(ciphersuite: &Ciphersuite, commit_secret: &CommitSecret) -> Self {
let initial_init_secret = InitSecret::random(ciphersuite.hash_length());
JoinerSecret {
secret: ciphersuite
.hkdf_extract(Some(commit_secret.secret()), &initial_init_secret.secret),
}
}
/// Create the `GroupSecrets` for a number of `invited_members` based on a
/// provisional `RatchetTree`. If `path_secret_option` is `Some`, we need to
/// include a `path_secret` into the `GroupSecrets`.
pub(crate) fn group_secrets(
&self,
invited_members: Vec<(NodeIndex, AddProposal)>,
provisional_tree: &RatchetTree,
mut path_secrets_option: Option<Vec<Secret>>,
) -> Vec<(HPKEPublicKey, Vec<u8>, Vec<u8>)> {
// Get a Vector containing the node indices of the direct path to the
// root from our own leaf.
let dirpath = treemath::direct_path_root(
provisional_tree.own_node_index(),
provisional_tree.leaf_count(),
)
.expect("create_commit_internal: TreeMath error when computing direct path.");
let mut plaintext_secrets = vec![];
for (index, add_proposal) in invited_members {
let key_package = add_proposal.key_package;
let key_package_hash = key_package.hash();
let path_secret = match path_secrets_option {
Some(ref mut path_secrets) => {
// Compute the index of the common ancestor lowest in the
// tree of our own leaf and the given index.
let common_ancestor_index =
treemath::common_ancestor_index(index, provisional_tree.own_node_index());
// Get the position of the node index that represents the
// common ancestor in the direct path. We can unwrap here,
// because the direct path must contain the shared ancestor.
let position = dirpath
.iter()
.position(|&x| x == common_ancestor_index)
.unwrap();
// We have to clone the element of the vector here to
// preserve its order.
let path_secret = path_secrets[position].clone();
Some(PathSecret { path_secret })
}
None => None,
};
// Create the groupsecrets object for the respective member.
let group_secrets = GroupSecrets::new(self.clone(), path_secret);
let group_secrets_bytes = group_secrets.encode_detached().unwrap();
plaintext_secrets.push((
key_package.hpke_init_key().clone(),
group_secrets_bytes,
key_package_hash,
));
}
plaintext_secrets
}
}
impl Codec for JoinerSecret {
fn encode(&self, buffer: &mut Vec<u8>) -> Result<(), CodecError> {
self.secret.encode(buffer)
}
fn decode(cursor: &mut Cursor) -> Result<Self, CodecError> {
let secret = Secret::decode(cursor)?;
Ok(JoinerSecret { secret })
}
}
/// An intermediate secret in the key schedule. It can be used to derive the
/// `EpochSecret` and the secrets required to decrypt the `Welcome` message.
pub(crate) struct MemberSecret {
secret: Secret,
}
impl MemberSecret {
/// Derive a `MemberSecret` from a `CommitSecret` and a `JoinerSecret`
/// object. This doesn't consume the `JoinerSecret` object, because we need
/// it later in the `create_commit` function to create `GroupSecret`
/// objects. TODO: The PSK should get its own dedicated type in the process
/// of tackling issue #141.
pub(crate) fn from_joiner_secret_and_psk(
ciphersuite: &Ciphersuite,
joiner_secret: JoinerSecret,
psk: Option<Secret>,
) -> Self {
let intermediate_secret = joiner_secret.secret.derive_secret(ciphersuite, "member");
MemberSecret {
secret: ciphersuite.hkdf_extract(psk.as_ref(), &intermediate_secret),
}
}
/// Derive an initial `MemberSecret` when creating a new group. This
/// function should not be used when computing secrets in an existing group.
/// TODO: The PSK should get its own dedicated type in the process of
/// tackling issue #141.
pub(crate) fn from_commit_secret_and_psk(
ciphersuite: &Ciphersuite,
commit_secret: &CommitSecret,
psk: Option<Secret>,
) -> Self {
let joiner_secret = JoinerSecret::from_commit_secret(ciphersuite, commit_secret);
let intermediate_secret = joiner_secret.secret.derive_secret(ciphersuite, "member");
MemberSecret {
secret: ciphersuite.hkdf_extract(psk.as_ref(), &intermediate_secret),
}
}
/// Derive an `AeadKey` and an `AeadNonce` from the `WelcomeSecret`,
/// consuming it in the process.
pub(crate) fn derive_welcome_key_nonce(
&self,
ciphersuite: &Ciphersuite,
) -> (AeadKey, AeadNonce) {
let welcome_secret = WelcomeSecret::from_member_secret(&self, ciphersuite);
welcome_secret.derive_welcome_key_nonce(ciphersuite)
}
}
pub(crate) struct WelcomeSecret {
secret: Secret,
}
impl WelcomeSecret {
/// Derive a `WelcomeSecret` from to decrypt a `Welcome` message.
pub(crate) fn from_member_secret(
member_secret: &MemberSecret,
ciphersuite: &Ciphersuite,
) -> Self {
let secret = ciphersuite
.hkdf_expand(
&member_secret.secret,
b"mls 1.0 welcome",
ciphersuite.hash_length(),
)
.unwrap();
WelcomeSecret { secret }
}
/// Get the `Secret` of the `WelcomeSecret`.
pub(crate) fn secret(&self) -> &Secret {
&self.secret
}
/// Derive an `AeadKey` and an `AeadNonce` from the `WelcomeSecret`,
/// consuming it in the process.
fn derive_welcome_key_nonce(self, ciphersuite: &Ciphersuite) -> (AeadKey, AeadNonce) {
let welcome_nonce = AeadNonce::from_welcome_secret(ciphersuite, &self);
let welcome_key = AeadKey::from_welcome_secret(ciphersuite, &self);
(welcome_key, welcome_nonce)
}
}
/// An intermediate secret in the key schedule, the `EpochSecret` is used to
/// create an `EpochSecrets` object and is finally consumed when creating that
/// epoch's `InitSecret`.
pub(crate) struct EpochSecret {
secret: Secret,
}
impl EpochSecret {
/// Derive an `EpochSecret` from a `MemberSecret`, consuming it in the
/// process.
fn from_member_secret(
ciphersuite: &Ciphersuite,
group_context: &GroupContext,
member_secret: MemberSecret,
) -> Self {
EpochSecret {
secret: member_secret.secret.kdf_expand_label(
ciphersuite,
"epoch",
&group_context.serialize(),
ciphersuite.hash_length(),
),
}
}
//
}
/// The `EncryptionSecret` is used to create a `SecretTree`.
pub struct EncryptionSecret {
secret: Secret,
}
impl EncryptionSecret {
/// Derive an encryption secret from a reference to an `EpochSecret`.
fn from_epoch_secret(ciphersuite: &Ciphersuite, epoch_secret: &EpochSecret) -> Self {
EncryptionSecret {
secret: epoch_secret.secret.derive_secret(ciphersuite, "encryption"),
}
}
/// Create a `SecretTree` from the `encryption_secret` contained in the
/// `EpochSecrets`. The `encryption_secret` is replaced with `None` in the
/// process, allowing us to achieve FS.
pub fn create_secret_tree(self, treesize: LeafIndex) -> SecretTree {
SecretTree::new(self, treesize)
}
pub(crate) fn consume_secret(self) -> Secret {
self.secret
}
/// Create a random `EncryptionSecret`. For testing purposes only.
#[cfg(test)]
#[doc(hidden)]
pub fn from_random(length: usize) -> Self {
EncryptionSecret {
secret: Secret::random(length),
}
}
#[cfg(all(test, feature = "test-vectors"))]
#[doc(hidden)]
pub fn to_vec(&self) -> Vec<u8> {
self.secret.to_vec()
}
}
#[cfg(test)]
#[doc(hidden)]
impl From<&[u8]> for EncryptionSecret {
fn from(bytes: &[u8]) -> Self {
Self {
secret: Secret::from(bytes),
}
}
}
/// A secret that we can derive secrets from, that are used outside of OpenMLS.
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, derive(PartialEq))]
pub(crate) struct ExporterSecret {
secret: Secret,
}
impl ExporterSecret {
/// Derive an `ExporterSecret` from an `EpochSecret`.
pub(crate) fn from_epoch_secret(ciphersuite: &Ciphersuite, epoch_secret: &EpochSecret) -> Self {
let secret = epoch_secret.secret.derive_secret(ciphersuite, "exporter");
ExporterSecret { secret }
}
/// Get the `Secret` of the `ExporterSecret`.
pub(crate) fn secret(&self) -> &Secret {
&self.secret
}
}
/// A key that can be used to derive an `AeadKey` and an `AeadNonce`.
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, derive(PartialEq))]
pub(crate) struct SenderDataSecret {
secret: Secret,
}
impl SenderDataSecret {
/// Derive an `ExporterSecret` from an `EpochSecret`.
pub(crate) fn from_epoch_secret(ciphersuite: &Ciphersuite, epoch_secret: &EpochSecret) -> Self {
let secret = epoch_secret
.secret
.derive_secret(ciphersuite, "sender data");
SenderDataSecret { secret }
}
/// Get the `Secret` of the `ExporterSecret`.
pub(crate) fn secret(&self) -> &Secret {
&self.secret
}
#[cfg(all(test, feature = "test-vectors"))]
#[doc(hidden)]
pub fn from_random(length: usize) -> Self {
Self {
secret: Secret::random(length),
}
}
#[cfg(all(test, feature = "test-vectors"))]
#[doc(hidden)]
pub fn to_vec(&self) -> Vec<u8> {
self.secret.to_vec()
}
}
#[cfg(test)]
#[doc(hidden)]
impl From<&[u8]> for SenderDataSecret {
fn from(bytes: &[u8]) -> Self {
Self {
secret: Secret::from(bytes),
}
}
}
/// The `EpochSecrets` contain keys (or secrets), which are accessible outside
/// of the `KeySchedule` and which don't get consumed immediately upon first
/// use.
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, derive(PartialEq))]
pub(crate) struct EpochSecrets {
sender_data_secret: SenderDataSecret,
pub(crate) exporter_secret: ExporterSecret,
confirmation_key: Secret,
}
impl EpochSecrets {
/// Get the sender_data secret.
pub(crate) fn sender_data_secret(&self) -> &SenderDataSecret {
&self.sender_data_secret
}
/// Get the confirmation key.
pub(crate) fn confirmation_key(&self) -> &Secret {
&self.confirmation_key
}
/// Derive `EpochSecrets`, as well as an `EncryptionSecret` and an
/// `InitSecret` from a `MemberSecret` and a given `GroupContext`. This
/// method is only used when initially creating a new `MlsGroup` state.
pub(crate) fn derive_epoch_secrets(
ciphersuite: &Ciphersuite,
member_secret: MemberSecret,
group_context: &GroupContext,
) -> (Self, InitSecret, EncryptionSecret) {
let epoch_secret =
EpochSecret::from_member_secret(ciphersuite, group_context, member_secret);
let sender_data_secret = SenderDataSecret::from_epoch_secret(ciphersuite, &epoch_secret);
let encryption_secret = EncryptionSecret::from_epoch_secret(ciphersuite, &epoch_secret);
let exporter_secret = ExporterSecret::from_epoch_secret(ciphersuite, &epoch_secret);
let confirmation_key = epoch_secret.secret.derive_secret(ciphersuite, "confirm");
let init_secret = InitSecret::from_epoch_secret(ciphersuite, &epoch_secret);
let epoch_secrets = EpochSecrets {
sender_data_secret,
exporter_secret,
confirmation_key,
};
(epoch_secrets, init_secret, encryption_secret)
}
#[cfg(all(test, feature = "test-vectors"))]
#[doc(hidden)]
pub(crate) fn sender_data_secret_mut(&mut self) -> &mut SenderDataSecret {
&mut self.sender_data_secret
}
}
| true |
6107ec3a46aedeef15682dd10eaabe85ca155393
|
Rust
|
oknozor/m1s2_compil
|
/src/token/to_token.rs
|
UTF-8
| 11,188 | 2.84375 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use crate::ast::expression::Expression::*;
use crate::ast::expression::*;
use crate::ast::statement::Statement;
use crate::ast::statement::Statement::*;
use crate::token::token::*;
use crate::token::token::Operator::*;
use crate::token::token::Token::*;
use crate::visitor::Visitor;
use crate::token::Precedence;
use std::path::Component::Prefix;
#[derive(Debug)]
pub struct Stack {
pub out_queue: Vec<Token>,
pub op_queue: Vec<Operator>,
}
impl Stack {
pub fn new() -> Self {
Stack {
out_queue: vec![],
op_queue: vec![],
}
}
}
pub trait ToToken {
fn to_token(&self) -> Vec<Token>;
}
impl ToToken for Box<Expression> {
fn to_token(&self) -> Vec<Token> {
match self {
box Expression::BinaryExpression(bin_exp) => bin_exp.to_token(),
box Expression::UnaryExpression(unary_exp) => unary_exp.to_token(),
box Expression::NumericLiteral(numeric) => numeric.to_token(),
box Expression::StringLiteral(string) => string.to_token(),
box Expression::Identifier(identifier) => identifier.to_token(),
box Expression::UpdateExpression(update) => update.to_token(),
box Expression::CallExpression(call) => call.to_token(),
box Expression::AssignmentExpression(assign) => assign.to_token(),
box Expression::LogicalExpression(log) => log.to_token(),
box Expression::MemberExpression(member) => member.to_token(),
box Expression::CallExpression(call) => call.to_token(),
box Expression::ObjectExpression(object) => object.to_token(),
}
}
}
impl ToToken for BinaryExp {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
if let Some(extra) = &self.extra {
token_stream.push(OperatorToken(RightParenthesis));
};
let op: BinaryOperator = BinaryOperator::from(self.operator.as_str());
match &self.right {
box NumericLiteral(num) => token_stream.push(Token::LiteralToken(Literal::NumericLiteral(num.value))),
box StringLiteral(string) => token_stream.push(Token::LiteralToken(Literal::StringLiteral(string.value.clone()))),
box BinaryExpression(bin) => token_stream.append(&mut bin.to_token()),
_ => unimplemented!()
};
token_stream.push(OperatorToken(Operator::BinOp(op)));
match &self.left {
box NumericLiteral(num) => token_stream.push(Token::LiteralToken(Literal::NumericLiteral(num.value))),
box StringLiteral(string) => token_stream.push(Token::LiteralToken(Literal::StringLiteral(string.value.clone()))),
box BinaryExpression(bin) => token_stream.append(&mut bin.to_token()),
_ => unimplemented!()
};
if let Some(extra) = &self.extra {
token_stream.push(OperatorToken(LeftParenthesis));
};
token_stream
}
}
impl ToToken for UnaryExp {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
let op = UnaryOperator::from(self.operator.as_str());
token_stream.extend_from_slice(self.argument.to_token().as_slice());
token_stream
}
}
impl ToToken for StringLit {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
let token = Token::LiteralToken(Literal::StringLiteral(self.value.clone()));
token_stream.push(token);
token_stream
}
}
impl ToToken for NumericLit {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
let token = Token::LiteralToken(Literal::NumericLiteral(self.value));
token_stream.push(token);
token_stream
}
}
impl ToToken for Id {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
let token = Token::IdendifierToken(self.name.clone());
token_stream.push(token);
token_stream
}
}
impl ToToken for UpdateExp {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
token_stream.extend_from_slice(self.argument.to_token().as_slice());
let op = UpdateOperator::from(self.operator.as_str());
token_stream.push(Token::OperatorToken(UpdateOp(op)));
token_stream
}
}
impl ToToken for AssignmentExp {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
token_stream.extend_from_slice(self.left.to_token().as_slice());
let op = AssignmentOperator::from(self.operator.as_str());
token_stream.extend_from_slice(self.right.to_token().as_slice());
token_stream
}
}
impl ToToken for LogicalExp {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
token_stream.extend_from_slice(self.left.to_token().as_slice());
let op = AssignmentOperator::from(self.operator.as_str());
token_stream.extend_from_slice(self.right.to_token().as_slice());
token_stream
}
}
impl ToToken for MemberExp {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
token_stream.extend_from_slice(self.property.to_token().as_slice());
token_stream
}
}
impl ToToken for CallExp {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
let mut callee = String::new();
if let box Expression::Identifier(i) = &self.callee {
callee = i.name.clone();
}
let mut args = vec![];
self.arguments.iter().for_each(|arg|
args.extend_from_slice(arg.to_token().as_slice())
);
let call = FunctionToken(Call { args, callee });
token_stream.push(call);
token_stream
}
}
impl ToToken for ObjectExp {
fn to_token(&self) -> Vec<Token> {
let mut token_stream = vec![];
self.properties.iter().for_each(|prop| {
token_stream.extend_from_slice(prop.value.to_token().as_slice());
token_stream.extend_from_slice(prop.key.to_token().as_slice());
});
token_stream
}
}
pub fn postfix(tokens: &mut Vec<Token>) -> Vec<Token> {
let mut postfix_expression = vec![];
let mut stack = Stack::new();
while let Some(token_in) = tokens.pop() {
match &token_in {
LiteralToken(_) => postfix_expression.push(token_in.clone()),
FunctionToken(_) => unimplemented!(),
OperatorToken(LeftParenthesis) => stack.op_queue.push(LeftParenthesis),
OperatorToken(RightParenthesis) => {
while stack.op_queue.last().expect("missing left parenthesis") != &LeftParenthesis {
postfix_expression.push(Token::from(stack.op_queue.pop().unwrap()));
};
if let Some(LeftParenthesis) = stack.op_queue.last() {
stack.op_queue.pop();
};
}
OperatorToken(op_in) => {
while stack.op_queue.last().is_some() &&
Operator::get_precedence(stack.op_queue.last().unwrap(), op_in) &&
!is_left_parenthesis(stack.op_queue.last().unwrap()) {
let op = OperatorToken(stack.op_queue.pop().unwrap());
postfix_expression.push(op);
};
stack.op_queue.push(*op_in);
}
IdendifierToken(_) => unimplemented!(),
Undefined => unimplemented!(),
};
}
while !stack.op_queue.is_empty() {
let leftover = stack.op_queue.pop();
postfix_expression.push(Token::from(leftover.unwrap()));
};
postfix_expression
}
fn is_left_parenthesis(operator: &Operator) -> bool {
if let LeftParenthesis = operator {
true
} else {
false
}
}
fn operator_on_stack_as_greater_precedence(op_from_stack: &Token, op_from_queue: &Token) -> bool {
let op_from_stack = match op_from_stack {
OperatorToken(operator) => operator,
_ => panic!("not an op")
};
let op_in = match op_from_queue {
OperatorToken(operator) => operator,
_ => panic!("not an op")
};
if Operator::get_precedence(&op_from_stack, &op_in) {
true
} else {
false
}
}
fn operator_on_stack_as_equal_precedence_and_is_left_associative(op_from_stack: &Token, op_from_queue: &Token) -> bool {
let op_from_stack = match op_from_stack {
OperatorToken(operator) => operator,
_ => panic!("not an op")
};
let op_in = match op_from_queue {
OperatorToken(operator) => operator,
_ => panic!("not an op")
};
if !Operator::get_precedence(&op_from_stack, &op_in) && op_from_stack.is_left_associative() {
true
} else {
false
}
}
fn top_operator_is_function(top_operator: &Token) -> bool {
if let FunctionToken(_) = top_operator {
true
} else {
false
}
}
fn top_operator_is_left_parenthesis(top_operator: &Token) -> bool {
if let OperatorToken(LeftParenthesis) = top_operator {
true
} else {
false
}
}
#[cfg(test)]
mod tests {
use crate::token::token::Token::LiteralToken;
use crate::token::token::Literal::NumericLiteral;
use crate::token::token::Token::OperatorToken;
use crate::token::token::Operator;
use crate::token::token::Operator::*;
use crate::token::token::UnaryOperator::Plus;
use crate::token::to_token::postfix;
use crate::token::token::Token;
use crate::token::token::BinaryOperator::*;
#[test]
fn should_postfix_expression() {
let token_in: &mut Vec<Token> = &mut Vec::new();
token_in.push(LiteralToken(NumericLiteral(1.0)));
token_in.push(OperatorToken(Operator::BinOp(Add)));
token_in.push(LiteralToken(NumericLiteral(1.0)));
let mut expected = vec![];
expected.push(LiteralToken(NumericLiteral(1.0)));
expected.push(LiteralToken(NumericLiteral(1.0)));
expected.push(OperatorToken(Operator::BinOp(Add)));
let token_postfix = postfix(token_in);
assert_eq!(token_postfix, expected);
}
#[test]
fn should_postfix_parenthesized_expression() {
let token_in: &mut Vec<Token> = &mut Vec::new();
// 2 * ( 1 + 1 )
token_in.push(OperatorToken(RightParenthesis));
token_in.push(LiteralToken(NumericLiteral(1.0)));
token_in.push(OperatorToken(Operator::BinOp(Add)));
token_in.push(LiteralToken(NumericLiteral(1.0)));
token_in.push(OperatorToken(LeftParenthesis));
token_in.push(OperatorToken(Operator::BinOp(Mul)));
token_in.push(LiteralToken(NumericLiteral(2.0)));
let mut expected = vec![];
expected.push(LiteralToken(NumericLiteral(2.0)));
expected.push(LiteralToken(NumericLiteral(1.0)));
expected.push(LiteralToken(NumericLiteral(1.0)));
expected.push(OperatorToken(Operator::BinOp(Add)));
expected.push(OperatorToken(Operator::BinOp(Mul)));
let token_postfix = postfix(token_in);
assert_eq!(token_postfix, expected);
}
}
| true |
6cc7859ef5f2c2c303c3d7878a12de15c2929a2a
|
Rust
|
NLnetLabs/rpki-rs
|
/src/repository/resources/set.rs
|
UTF-8
| 13,590 | 2.859375 | 3 |
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::convert::TryFrom;
use std::fmt;
use std::str::FromStr;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::repository::resources::{AsResources, IpResources};
use crate::repository::Cert;
use crate::repository::{
resources::{AsBlock, AsBlocks, AsBlocksBuilder, Ipv4Blocks, Ipv6Blocks},
roa::RoaIpAddress,
};
use crate::resources::asn::Asn;
//------------ ResourceSet ---------------------------------------------------
/// A set of ASN, IPv4 and IPv6 resources.
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
pub struct ResourceSet {
asn: AsBlocks,
#[cfg_attr(feature = "serde", serde(alias = "v4"))]
ipv4: Ipv4Blocks,
#[cfg_attr(feature = "serde", serde(alias = "v6"))]
ipv6: Ipv6Blocks,
}
impl ResourceSet {
pub fn new(asn: AsBlocks, ipv4: Ipv4Blocks, ipv6: Ipv6Blocks) -> Self {
ResourceSet { asn, ipv4, ipv6 }
}
pub fn from_strs(asn: &str, ipv4: &str, ipv6: &str) -> Result<Self, FromStrError> {
let asn = AsBlocks::from_str(asn).map_err(FromStrError::asn)?;
let ipv4 = Ipv4Blocks::from_str(ipv4).map_err(FromStrError::ipv4)?;
let ipv6 = Ipv6Blocks::from_str(ipv6).map_err(FromStrError::ipv6)?;
Ok(ResourceSet { asn, ipv4, ipv6 })
}
pub fn empty() -> ResourceSet {
Self::default()
}
pub fn all() -> ResourceSet {
ResourceSet {
asn: AsBlocks::all(),
ipv4: Ipv4Blocks::all(),
ipv6: Ipv6Blocks::all(),
}
}
pub fn is_empty(&self) -> bool {
self.asn.is_empty() && self.ipv4.is_empty() && self.ipv6.is_empty()
}
pub fn set_asn(&mut self, asn: AsBlocks) {
self.asn = asn;
}
pub fn set_ipv4(&mut self, ipv4: Ipv4Blocks) {
self.ipv4 = ipv4;
}
pub fn set_ipv6(&mut self, ipv6: Ipv6Blocks) {
self.ipv6 = ipv6;
}
pub fn asn(&self) -> &AsBlocks {
&self.asn
}
pub fn to_as_resources(&self) -> AsResources {
AsResources::blocks(self.asn.clone())
}
pub fn ipv4(&self) -> &Ipv4Blocks {
&self.ipv4
}
pub fn to_ip_resources_v4(&self) -> IpResources {
self.ipv4.to_ip_resources()
}
pub fn ipv6(&self) -> &Ipv6Blocks {
&self.ipv6
}
pub fn to_ip_resources_v6(&self) -> IpResources {
self.ipv6.to_ip_resources()
}
/// Returns None if there are no ASNs in this ResourceSet.
pub fn asn_opt(&self) -> Option<&AsBlocks> {
if self.asn.is_empty() {
None
} else {
Some(&self.asn)
}
}
/// Returns None if there is no IPv4 in this ResourceSet.
pub fn ipv4_opt(&self) -> Option<&Ipv4Blocks> {
if self.ipv4.is_empty() {
None
} else {
Some(&self.ipv4)
}
}
/// Returns None if there is no IPv6 in this ResourceSet.
pub fn ipv6_opt(&self) -> Option<&Ipv6Blocks> {
if self.ipv6.is_empty() {
None
} else {
Some(&self.ipv6)
}
}
/// Check of the other set is contained by this set. If this set
/// contains inherited resources, then any explicit corresponding
/// resources in the other set will be considered to fall outside of
/// this set.
pub fn contains(&self, other: &ResourceSet) -> bool {
self.asn.contains(other.asn())
&& self.ipv4.contains(&other.ipv4)
&& self.ipv6.contains(&other.ipv6)
}
/// Check if the resource set contains the given Asn
pub fn contains_asn(&self, asn: Asn) -> bool {
let mut blocks = AsBlocksBuilder::new();
blocks.push(AsBlock::Id(asn));
let blocks = blocks.finalize();
self.asn.contains(&blocks)
}
/// Check if the resource set contains the given ROA address
pub fn contains_roa_address(&self, roa_address: &RoaIpAddress) -> bool {
self.ipv4.contains_roa(roa_address) || self.ipv6.contains_roa(roa_address)
}
/// Returns the union of this ResourceSet and the other. I.e. a new
/// ResourceSet containing all resources found in one or both.
pub fn union(&self, other: &ResourceSet) -> Self {
let asn = self.asn.union(&other.asn);
let ipv4 = self.ipv4.union(&other.ipv4).into();
let ipv6 = self.ipv6.union(&other.ipv6).into();
ResourceSet { asn, ipv4, ipv6 }
}
/// Returns the intersection of this ResourceSet and the other. I.e. a new
/// ResourceSet containing all resources found in both sets.
pub fn intersection(&self, other: &ResourceSet) -> Self {
let asn = self.asn.intersection(&other.asn);
let ipv4 = self.ipv4.intersection(&other.ipv4).into();
let ipv6 = self.ipv6.intersection(&other.ipv6).into();
ResourceSet { asn, ipv4, ipv6 }
}
/// Returns the difference from another ResourceSet towards `self`.
pub fn difference(&self, other: &ResourceSet) -> ResourceDiff {
let added = ResourceSet {
asn: self.asn.difference(&other.asn),
ipv4: self.ipv4.difference(&other.ipv4).into(),
ipv6: self.ipv6.difference(&other.ipv6).into(),
};
let removed = ResourceSet {
asn: other.asn.difference(&self.asn),
ipv4: other.ipv4.difference(&self.ipv4).into(),
ipv6: other.ipv6.difference(&self.ipv6).into(),
};
ResourceDiff { added, removed }
}
}
impl Default for ResourceSet {
fn default() -> Self {
ResourceSet {
asn: AsBlocks::empty(),
ipv4: Ipv4Blocks::empty(),
ipv6: Ipv6Blocks::empty(),
}
}
}
//--- Display
impl fmt::Display for ResourceSet {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"asn: '{}', ipv4: '{}', ipv6: '{}'",
self.asn,
self.ipv4,
self.ipv6
)
}
}
impl TryFrom<&Cert> for ResourceSet {
type Error = InheritError;
fn try_from(cert: &Cert) -> Result<Self, Self::Error> {
let asn = match cert.as_resources().to_blocks() {
Ok(as_blocks) => as_blocks,
Err(_) => return Err(InheritError),
};
let ipv4 = match cert.v4_resources().to_blocks() {
Ok(blocks) => blocks,
Err(_) => return Err(InheritError),
}
.into();
let ipv6 = match cert.v6_resources().to_blocks() {
Ok(blocks) => blocks,
Err(_) => return Err(InheritError),
}
.into();
Ok(ResourceSet { asn, ipv4, ipv6 })
}
}
//------------ ResourceDiff --------------------------------------------------
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
pub struct ResourceDiff {
added: ResourceSet,
removed: ResourceSet,
}
impl ResourceDiff {
pub fn is_empty(&self) -> bool {
self.added.is_empty() && self.removed.is_empty()
}
}
impl fmt::Display for ResourceDiff {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.is_empty() {
write!(f, "<no changes in resources>")?;
}
if !self.added.is_empty() {
write!(f, "Added:")?;
if !self.added.asn.is_empty() {
write!(f, " asn: {}", self.added.asn)?;
}
if !self.added.ipv4.is_empty() {
write!(f, " ipv4: {}", self.added.ipv4())?;
}
if !self.added.ipv6.is_empty() {
write!(f, " ipv6: {}", self.added.ipv6())?;
}
if !self.removed.is_empty() {
write!(f, " ")?;
}
}
if !self.removed.is_empty() {
write!(f, "Removed:")?;
if !self.removed.asn.is_empty() {
write!(f, " asn: {}", self.removed.asn)?;
}
if !self.removed.ipv4.is_empty() {
write!(f, " ipv4: {}", self.removed.ipv4())?;
}
if !self.removed.ipv6.is_empty() {
write!(f, " ipv6: {}", self.removed.ipv6())?;
}
}
Ok(())
}
}
//------------ InheritError --------------------------------------------------
#[derive(Clone, Debug)]
pub struct InheritError;
impl fmt::Display for InheritError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "cannot determine resources for certificate using inherit")
}
}
impl std::error::Error for InheritError {}
//------------ FromStrError --------------------------------------------------
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum FromStrError {
Asn(String),
Ipv4(String),
Ipv6(String)
}
impl FromStrError {
fn asn(e: super::asres::FromStrError) -> Self {
FromStrError::Asn(e.to_string())
}
fn ipv4(e: super::ipres::FromStrError) -> Self {
FromStrError::Ipv4(e.to_string())
}
fn ipv6(e: super::ipres::FromStrError) -> Self {
FromStrError::Ipv6(e.to_string())
}
}
impl fmt::Display for FromStrError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
FromStrError::Asn(e)
=> write!(f, "cannot parse ASN resources: {}", e),
FromStrError::Ipv4(e)
=> write!(f, "cannot parse IPv4 resources: {}", e),
FromStrError::Ipv6(e)
=> write!(f, "cannot parse IPv6 resources: {}", e),
}
}
}
impl std::error::Error for FromStrError {}
//------------ Tests ---------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_resource_set_intersection() {
let child_resources = ResourceSet::from_strs("AS65000", "10.0.0.0/8", "fd00::/8").unwrap();
let parent_resources = ResourceSet::all();
let intersection = parent_resources.intersection(&child_resources);
assert_eq!(intersection, child_resources);
}
#[test]
fn resource_set_difference() {
let set1_asns = "AS65000-AS65003, AS65005";
let set2_asns = "AS65000, AS65003, AS65005";
let asn_added = "AS65001-AS65002";
let set1_ipv4s = "10.0.0.0-10.4.5.6, 192.168.0.0";
let set2_ipv4s = "10.0.0.0/8, 192.168.0.0";
let ipv4_removed = "10.4.5.7-10.255.255.255";
let set1_ipv6s = "::1, 2001:db8::/32";
let set2_ipv6s = "::1, 2001:db8::/56";
let ipv6_added = "2001:db8:0:100::-2001:db8:ffff:ffff:ffff:ffff:ffff:ffff";
let set1 = ResourceSet::from_strs(set1_asns, set1_ipv4s, set1_ipv6s).unwrap();
let set2 = ResourceSet::from_strs(set2_asns, set2_ipv4s, set2_ipv6s).unwrap();
let diff = set1.difference(&set2);
let expected_diff = ResourceDiff {
added: ResourceSet::from_strs(asn_added, "", ipv6_added).unwrap(),
removed: ResourceSet::from_strs("", ipv4_removed, "").unwrap(),
};
assert!(!diff.is_empty());
assert_eq!(expected_diff, diff);
}
#[test]
fn resource_set_eq() {
let asns = "AS65000-AS65003, AS65005";
let ipv4s = "10.0.0.0/8, 192.168.0.0";
let ipv6s = "::1, 2001:db8::/32";
let resource_set = ResourceSet::from_strs(asns, ipv4s, ipv6s).unwrap();
let asns_2 = "AS65000-AS65003";
let ipv4s_2 = "192.168.0.0";
let ipv6s_2 = "2001:db8::/32";
let resource_set_asn_differs = ResourceSet::from_strs(asns_2, ipv4s, ipv6s).unwrap();
let resource_set_v4_differs = ResourceSet::from_strs(asns, ipv4s_2, ipv6s).unwrap();
let resource_set_v6_differs = ResourceSet::from_strs(asns, ipv4s, ipv6s_2).unwrap();
let resource_set_2 = ResourceSet::from_strs(asns_2, ipv4s_2, ipv6s_2).unwrap();
assert_ne!(resource_set, resource_set_asn_differs);
assert_ne!(resource_set, resource_set_v4_differs);
assert_ne!(resource_set, resource_set_v6_differs);
assert_ne!(resource_set, resource_set_2);
let default_set = ResourceSet::default();
let certified = ResourceSet::from_strs(
"",
"10.0.0.0/16, 192.168.0.0/16",
"2001:db8::/32, 2000:db8::/32",
)
.unwrap();
assert_ne!(default_set, certified);
assert_ne!(resource_set, certified);
}
#[test]
fn resource_set_equivalent() {
// Data may be unordered on input, or not use ranges etc. But
// if the resources are the same then we should get equal
// sets in the end.
let asns_1 = "AS65000-AS65003, AS65005";
let ipv4_1 = "10.0.0.0/8, 192.168.0.0";
let ipv6_1 = "::1, 2001:db8::/32";
let asns_2 = "AS65005, AS65001-AS65003, AS65000";
let ipv4_2 = "192.168.0.0, 10.0.0.0/8, ";
let ipv6_2 = "2001:db8::/32, ::1";
let set_1 = ResourceSet::from_strs(asns_1, ipv4_1, ipv6_1).unwrap();
let set_2 = ResourceSet::from_strs(asns_2, ipv4_2, ipv6_2).unwrap();
assert_eq!(set_1, set_2);
}
#[cfg(feature = "serde")]
#[test]
fn serialize_deserialize_resource_set() {
let asns = "AS65000-AS65003, AS65005";
let ipv4s = "10.0.0.0/8, 192.168.0.0";
let ipv6s = "::1, 2001:db8::/32";
let set = ResourceSet::from_strs(asns, ipv4s, ipv6s).unwrap();
let json = serde_json::to_string(&set).unwrap();
let deser_set = serde_json::from_str(&json).unwrap();
assert_eq!(set, deser_set);
}
}
| true |
4c71f4993937418a6dd34a90e9ad72b1a67f6858
|
Rust
|
Drevoed/vector
|
/lib/tracing-metrics/examples/yak_shave.rs
|
UTF-8
| 2,138 | 2.53125 | 3 |
[
"Apache-2.0"
] |
permissive
|
#[macro_use]
extern crate tracing;
extern crate hotmic;
extern crate serde_json;
extern crate tracing_env_logger;
extern crate tracing_fmt;
extern crate tracing_metrics;
use hotmic::Receiver;
use std::thread;
fn shave(yak: usize) -> bool {
trace_span!("shave", yak = yak).in_scope(|| {
debug!(
message = "hello! I'm gonna shave a yak.",
excitement = "yay!"
);
if yak == 3 {
warn!(target: "yak_events", "could not locate yak!");
false
} else {
trace!(target: "yak_events", "yak shaved successfully");
true
}
})
}
fn main() {
let mut receiver = Receiver::builder().build();
let sink = receiver.get_sink();
let controller = receiver.get_controller();
thread::spawn(move || {
receiver.run();
});
let subscriber = tracing_fmt::FmtSubscriber::builder().finish();
tracing_env_logger::try_init().expect("init log adapter");
let subscriber = tracing_metrics::MetricsSubscriber::new(subscriber, sink);
tracing::subscriber::with_default(subscriber, || {
let number_of_yaks = 3;
let mut number_shaved = 0;
debug!("preparing to shave {} yaks", number_of_yaks);
trace_span!("shaving_yaks", yaks_to_shave = number_of_yaks).in_scope(|| {
info!("shaving yaks");
for yak in 1..=number_of_yaks {
let shaved = shave(yak);
trace!(target: "yak_events", yak = yak, shaved = shaved);
if !shaved {
error!(message = "failed to shave yak!", yak = yak);
} else {
number_shaved += 1;
}
trace!(target: "yak_events", yaks_shaved = number_shaved);
}
});
debug!(
message = "yak shaving completed.",
all_yaks_shaved = number_shaved == number_of_yaks,
);
});
let _snapshot = controller.get_snapshot().unwrap();
// let raw_snap = serde_json::to_string_pretty(&snapshot).unwrap();
// println!("Metrics snapshot: {}", raw_snap);
}
| true |
2976119b7794cccc4aaadff716ad5987774a94b1
|
Rust
|
kellda/peripherals
|
/src/attributes/periph.rs
|
UTF-8
| 2,040 | 2.671875 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
//! An example of where attributes get expanded in [`periph!`] macros
//!
//! Take a look at the source of this module to see the macro invocation used.
crate::periph! {
#[doc = "Nothing: only on peripheral struct"]
Peripheral;
#[doc = "Nothing: on `Register` and field of peripheral"]
rw Register @ 0x00: u8 = 0x00 {
#[doc = "Nothing: on `Newtype` and associated constant"]
Newtype: 3 = struct Newtype(bool);
#[doc = "Nothing: on `Enum` and associated constant"]
Enum: 4..5 = enum Enum {}
}
}
crate::periph! {
#[type: doc = "`type`: only on peripheral struct"]
PeripheralType;
#[type: doc = "`type`: only on register structs"]
rw RegisterType @ 0x00: u8 = 0x00 {
#[type: doc = "`type`: only on `NewtypeType`"]
NewtypeType: 3 = struct NewtypeType(bool);
#[type: doc = "`type`: only on `EnumType`"]
EnumType: 4..5 = enum EnumType {}
}
}
crate::periph! {
#[field: doc = "`field`: on nothing"]
PeripheralField;
#[field: doc = "`field`: on peripheral field"]
rw RegisterField @ 0x00: u8 = 0x00 {
#[field: doc = "`field`: on associated constant"]
NewtypeField: 3 = struct NewtypeField(bool);
#[field: doc = "`field`: on associated constant"]
EnumField: 4..5 = enum EnumField {}
}
}
crate::periph! {
#[impl: doc = "`impl`: only on impls"]
PeripheralImpl;
#[impl: doc = "`impl`: only on impls"]
rw RegisterImpl @ 0x00: u8 = 0x00 {
#[impl: doc = "`impl`: only on impls"]
NewtypeImpl: 3 = struct NewtypeImpl(bool);
#[impl: doc = "`impl`: only on impls"]
EnumImpl: 4..5 = enum EnumImpl {}
}
}
crate::periph! {
#[all: doc = "`all`: on everything"]
PeripheralAll;
#[all: doc = "`all`: on everything"]
rw RegisterAll @ 0x00: u8 = 0x00 {
#[all: doc = "`all`: on everything"]
NewtypeAll: 3 = struct NewtypeAll(bool);
#[all: doc = "`all`: on everything"]
EnumAll: 4..5 = enum EnumAll {}
}
}
| true |
554c568cd356d0a587518247885f27b29498e778
|
Rust
|
Weltraumschaf/minivm
|
/src/frontend/lexer/number_lexer.rs
|
UTF-8
| 7,625 | 3.5 | 4 |
[] |
no_license
|
use frontend::token::*;
use frontend::lexer::SubLexer;
use frontend::character_stream::CharacterStream;
use frontend::character_helper::CharacterHelper;
#[cfg(test)]
use frontend::Position;
/// Recognizes a number literal token.
pub struct NumberLexer {}
impl NumberLexer {
pub fn new() -> NumberLexer {
NumberLexer {}
}
// Consumes all unsigned inter digits from input until a non integer digit character occurs.
//
// @param input must not be {@code null}
// @return all digits from input
fn unsigned_integer_digits(&self, input: &mut CharacterStream) -> String {
let mut digits = String::new();
loop {
if END_OF_FILE == input.current() {
// 0 Indicates EOF
break;
}
if !CharacterHelper::is_numeric(input.current()) {
// no more digits
break;
}
digits.push(input.current());
input.next(); // consume digit
}
digits
}
}
enum Type {
INTEGER,
REAL,
}
impl SubLexer for NumberLexer {
fn scan(&self, input: &mut CharacterStream) -> Token {
let position = input.position();
debug!("Start number lexing at position {}.", position);
let mut literal = String::new();
let mut whole_digits = String::new(); // Digits before the decimal point.
let mut fraction_digits = String::new(); // Digits after the decimal point.
let mut exponent_digits = String::new(); // Exponent digits.
let mut number_type = Type::INTEGER; // Assume INTEGER token type for now.
// Extract the digits of the whole part of the number.
whole_digits.push_str(self.unsigned_integer_digits(input).as_str());
if whole_digits.is_empty() {
panic!("At least one digit necessary!");
}
literal.push_str(whole_digits.as_str());
// Is there a dot, so we have a floating point number.
if '.' == input.current() {
number_type = Type::REAL;
literal.push(input.current());
input.next(); // Consume decimal point.
// Collect the digits of the fraction part of the number.
fraction_digits.push_str(self.unsigned_integer_digits(input).as_str());
if fraction_digits.is_empty() {
panic!("At least one fraction digit necessary!");
}
literal.push_str(fraction_digits.as_str());
}
// Is there an exponent part?
if input.current() == 'E' || input.current() == 'e' {
number_type = Type::REAL; // Exponent, so token type is FLOAT.
literal.push(input.current());
input.next(); // Consume 'E' or 'e'.
// Exponent sign?
if input.current() == '+' || input.current() == '-' {
literal.push(input.current());
input.next(); // Consume '+' or '-'.
}
// Extract the digits of the exponent.
exponent_digits.push_str(self.unsigned_integer_digits(input).as_str());
if exponent_digits.is_empty() {
panic!("At least one exponent digit necessary!");
}
literal.push_str(exponent_digits.as_str());
}
let token_type;
match number_type {
Type::INTEGER => {
// Compute the value of an integer number token.
match literal.parse::<i64>() {
Ok(value) => token_type = TokenType::Integer(value),
Err(error) => panic!(format!("ERROR: {}: \"{}\"", error, literal))
}
},
Type::REAL => {
// Compute the value of a real number token.
match literal.parse::<f64>() {
Ok(value) => token_type = TokenType::Real(value),
Err(error) => panic!(format!("ERROR: {}: \"{}\"", error, literal))
}
}
}
Token::new(position, token_type, literal)
}
}
#[cfg(test)]
mod tests {
use super::*;
use hamcrest::prelude::*;
#[test]
fn unsigned_integer_digits_empty() {
let mut src = CharacterStream::new(String::from(""));
let sut = NumberLexer::new();
let token = sut.unsigned_integer_digits(&mut src);
assert_that!(token, is(equal_to(String::from(""))));
}
#[test]
fn unsigned_integer_digits_only_numbers() {
let mut src = CharacterStream::new(String::from("1234"));
let sut = NumberLexer::new();
let token = sut.unsigned_integer_digits(&mut src);
assert_that!(token, is(equal_to(String::from("1234"))));
}
#[test]
fn unsigned_integer_digits_numbers_with_trailing_whitespace() {
let mut src = CharacterStream::new(String::from("1234 "));
let sut = NumberLexer::new();
let token = sut.unsigned_integer_digits(&mut src);
assert_that!(token, is(equal_to(String::from("1234"))));
}
#[test]
fn unsigned_integer_digits_numbers_with_trailing_dot() {
let mut src = CharacterStream::new(String::from("12345678.9"));
let sut = NumberLexer::new();
let token = sut.unsigned_integer_digits(&mut src);
assert_that!(token, is(equal_to(String::from("12345678"))));
}
#[test]
fn integer() {
let mut src = CharacterStream::new(String::from("42"));
let sut = NumberLexer::new();
let token = sut.scan(&mut src);
assert_that!(token,
is(equal_to(
Token::new(Position::new(1, 1), TokenType::Integer(42), String::from("42")))
)
);
}
#[test]
fn real() {
let mut src = CharacterStream::new(String::from("3.14"));
let sut = NumberLexer::new();
let token = sut.scan(&mut src);
assert_that!(token,
is(equal_to(
Token::new(Position::new(1, 1), TokenType::Real(3.14), String::from("3.14")))
)
);
}
#[test]
fn real_with_negative_exponent_lc() {
let mut src = CharacterStream::new(String::from("7.0e-2"));
let sut = NumberLexer::new();
let token = sut.scan(&mut src);
assert_that!(token,
is(equal_to(
Token::new(Position::new(1, 1), TokenType::Real(0.07), String::from("7.0e-2")))
)
);
}
#[test]
fn real_with_negative_exponent_uc() {
let mut src = CharacterStream::new(String::from("7.0E-2"));
let sut = NumberLexer::new();
let token = sut.scan(&mut src);
assert_that!(token,
is(equal_to(
Token::new(Position::new(1, 1), TokenType::Real(0.07), String::from("7.0E-2")))
)
);
}
#[test]
fn real_with_positive_exponent_lc() {
let mut src = CharacterStream::new(String::from("7.0e2"));
let sut = NumberLexer::new();
let token = sut.scan(&mut src);
assert_that!(token,
is(equal_to(
Token::new(Position::new(1, 1), TokenType::Real(700.), String::from("7.0e2")))
)
);
}
#[test]
fn real_with_positive_exponent_uc() {
let mut src = CharacterStream::new(String::from("7.0E+2"));
let sut = NumberLexer::new();
let token = sut.scan(&mut src);
assert_that!(token,
is(equal_to(
Token::new(Position::new(1, 1), TokenType::Real(700.), String::from("7.0E+2")))
)
);
}
}
| true |
31d21515948921dc6b01983b3d73971e9ef06131
|
Rust
|
doytsujin/dns-message-parser
|
/src/encode/encoder.rs
|
UTF-8
| 695 | 2.53125 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
use crate::{EncodeError, EncodeResult};
use bytes::BytesMut;
use std::collections::HashMap;
use std::convert::TryInto;
pub(crate) struct Encoder {
pub bytes: BytesMut,
pub domain_name_index: HashMap<String, (u16, usize)>,
}
impl Encoder {
#[inline]
pub(super) fn get_offset(&self) -> EncodeResult<u16> {
let bytes_len = self.bytes.len();
if let Ok(offset) = bytes_len.try_into() {
Ok(offset)
} else {
Err(EncodeError::Length(bytes_len))
}
}
}
impl Default for Encoder {
fn default() -> Self {
Encoder {
bytes: BytesMut::new(),
domain_name_index: HashMap::new(),
}
}
}
| true |
5c9a365ba00c1cffaac67d086deb8250b01fc897
|
Rust
|
tynes/rsd
|
/primitives/src/block.rs
|
UTF-8
| 1,270 | 2.984375 | 3 |
[] |
no_license
|
use crate::BlockHeader;
use crate::Transaction;
use extended_primitives::Buffer;
use handshake_protocol::encoding::{Decodable, DecodingError, Encodable};
/// A Handshake block, which is a collection of transactions with an attached
/// proof of work.
#[derive(PartialEq, Clone, Debug)]
pub struct Block {
/// The block header
pub header: BlockHeader,
/// List of transactions contained in the block
pub txdata: Vec<Transaction>,
}
impl Encodable for Block {
fn size(&self) -> u32 {
//TODO relies on tx's get size which is not done.
32
}
fn encode(&self) -> Buffer {
let mut buffer = Buffer::new();
buffer.extend(self.header.encode());
buffer.write_varint(self.txdata.len());
for tx in self.txdata.iter() {
buffer.extend(tx.encode());
}
buffer
}
}
impl Decodable for Block {
type Error = DecodingError;
fn decode(buffer: &mut Buffer) -> Result<Self, Self::Error> {
let header = BlockHeader::decode(buffer)?;
let count = buffer.read_varint()?;
let mut txdata = Vec::new();
for _ in 0..count.as_u64() {
txdata.push(Transaction::decode(buffer)?);
}
Ok(Block { header, txdata })
}
}
| true |
11226a7d073c3a815e81a5758ac97e99a1701972
|
Rust
|
juxd/aoc2020
|
/src/day12/mod.rs
|
UTF-8
| 3,688 | 3.4375 | 3 |
[] |
no_license
|
use regex::Regex;
#[derive(Debug, PartialEq, Eq)]
enum Move {
N(i32),
S(i32),
E(i32),
W(i32),
L(i32),
R(i32),
F(i32),
}
#[derive(Debug, PartialEq, Eq)]
struct Ship {
dir: i32,
x: i32,
y: i32,
waypoint_x: i32,
waypoint_y: i32,
}
impl Move {
fn of_dir_and_value(dir: &str, val: i32) -> Move {
match dir {
"N" => Move::N(val),
"S" => Move::S(val),
"E" => Move::E(val),
"W" => Move::W(val),
"L" => Move::L(val),
"R" => Move::R(val),
"F" => Move::F(val),
_ => panic!("unknown dir"),
}
}
}
fn parse_input(input: &str) -> Vec<Move> {
Regex::new(r"([NSEWLRF])(\d*)")
.unwrap()
.captures_iter(input)
.map(|cap| Move::of_dir_and_value(&cap[1], str::parse::<i32>(&cap[2]).unwrap()))
.collect::<Vec<_>>()
}
fn sim_moves(ship: &mut Ship, moves: &[Move]) {
for mov in moves {
match mov {
Move::N(amt) => ship.y += amt,
Move::S(amt) => ship.y -= amt,
Move::E(amt) => ship.x += amt,
Move::W(amt) => ship.x -= amt,
Move::L(amt) => ship.dir = (ship.dir - amt + 360) % 360,
Move::R(amt) => ship.dir = (ship.dir + amt + 360) % 360,
Move::F(amt) => match ship.dir {
0 => ship.y += amt,
90 => ship.x += amt,
180 => ship.y -= amt,
270 => ship.x -= amt,
other => panic!("lol, no non-90 value {} found", other),
},
}
}
}
fn sim_moves_p2(ship: &mut Ship, moves: &[Move]) {
for mov in moves {
match mov {
Move::N(amt) => ship.waypoint_y += amt,
Move::S(amt) => ship.waypoint_y -= amt,
Move::E(amt) => ship.waypoint_x += amt,
Move::W(amt) => ship.waypoint_x -= amt,
Move::L(amt) => {
let mut turns = *amt;
while turns > 0 {
let tmp = ship.waypoint_x;
ship.waypoint_x = -ship.waypoint_y;
ship.waypoint_y = tmp;
turns -= 90;
}
}
Move::R(amt) => {
let mut turns = *amt;
while turns > 0 {
let tmp = ship.waypoint_x;
ship.waypoint_x = ship.waypoint_y;
ship.waypoint_y = -tmp;
turns -= 90;
}
}
Move::F(amt) => {
ship.x += ship.waypoint_x * amt;
ship.y += ship.waypoint_y * amt;
}
}
}
}
pub fn solve_p1(input: &str) -> i32 {
let ship = &mut Ship {
dir: 90,
x: 0,
y: 0,
waypoint_x: 10,
waypoint_y: 1,
};
let moves = parse_input(input);
sim_moves(ship, &moves);
ship.x.abs() + ship.y.abs()
}
pub fn solve_p2(input: &str) -> i32 {
let ship = &mut Ship {
dir: 90,
x: 0,
y: 0,
waypoint_x: 10,
waypoint_y: 1,
};
let moves = parse_input(input);
sim_moves_p2(ship, &moves);
ship.x.abs() + ship.y.abs()
}
#[cfg(test)]
mod tests {
use super::*;
static SAMPLE_INPUT: &str = "
F10
N3
F7
R90
F11";
#[test]
fn check_input_parse() {
use Move::*;
assert_eq!(
vec![F(10), N(3), F(7), R(90), F(11)],
parse_input(SAMPLE_INPUT)
);
}
#[test]
fn test_p1() {
assert_eq!(25, solve_p1(SAMPLE_INPUT));
}
#[test]
fn test_p2() {
assert_eq!(286, solve_p2(SAMPLE_INPUT));
}
}
| true |
0597dd547dff779aeb713367525ac7da7ec2685e
|
Rust
|
Its-its/pixel-circuits
|
/frontend/src/canvas/pixels/map.rs
|
UTF-8
| 3,983 | 2.703125 | 3 |
[] |
no_license
|
use circuit_sim_common::{CellPos, Dimensions, NodeObjectSide};
use wasm_bindgen::JsValue;
use web_sys::CanvasRenderingContext2d;
use crate::Result;
use crate::editor::{Node, ViewOptions};
use super::{PIXEL_NODE, PIXEL_OBJECT, PixelColor, PixelType};
// Used for rendering the object.
#[derive(Debug)]
pub struct PixelMap {
map: Vec<Option<PixelType>>,
width: usize
}
impl PixelMap {
/// map will NOT include Nodes. Only object.
pub fn generate_with_object_map(dimensions: Dimensions, object_map: Vec<PixelType>, nodes: &[Node]) -> Self {
let width = dimensions.width();
let mut map = Vec::new();
map.resize(width + 2, None);
object_map.into_iter()
.enumerate()
.for_each(|(i, p)| {
let x = i % width;
if x == 0 {
map.push(None);
}
map.push(Some(p));
if x == width - 1 {
map.push(None);
}
});
(0..width + 2).for_each(|_| map.push(None));
let mut this = Self {
map,
width: width + 2
};
this.render_nodes(nodes);
this
}
pub fn new_empty(dimensions: Dimensions) -> Self {
let width = dimensions.width() + 2;
let height = dimensions.height() + 2;
let mut map = Vec::new();
map.resize(width * height, None);
Self {
map,
width
}
}
pub fn new_with_nodes(dimensions: Dimensions, nodes: &[Node]) -> Self {
Self::new_empty(dimensions).generate(nodes)
}
pub fn generate(mut self, nodes: &[Node]) -> Self {
self.render_object();
self.render_nodes(nodes);
self
}
fn render_object(&mut self) {
// Skip first row. Ignore last row.
(self.width .. self.map.len() - self.width)
.for_each(|i| {
let x = i % self.width;
if x != 0 && x != self.width - 1 {
self.map[i] = Some(PixelType::ObjectColor);
}
});
}
fn render_nodes(&mut self, nodes: &[Node]) {
nodes.iter().for_each(|n| {
let i = match n.side {
NodeObjectSide::Top(v) => 1 + v,
NodeObjectSide::Bottom(v) => self.map.len() - v - 2,
NodeObjectSide::Right(v) => self.width * 2 + self.width * v - 1,
NodeObjectSide::Left(v) => self.map.len() - (self.width * 2) - (self.width * v),
};
self.map[i] = Some(PixelType::Node { direction: n.direction, side: n.side.side() });
});
}
pub fn pixel_positions(&self, pos: CellPos) -> Vec<CellPos> {
self.map.iter()
.enumerate()
.filter_map(|(i, t)| {
if t.is_some() {
let x_offset = i % self.width;
let y_offset = i / self.width;
Some((
(pos.0 + x_offset).checked_sub(1)?,
(pos.1 + y_offset).checked_sub(1)?
))
} else {
None
}
})
.collect()
}
pub fn pixel_positions_with_types(self, pos: CellPos) -> Vec<(CellPos, PixelType)> {
let width = self.width;
self.map.into_iter()
.enumerate()
.filter_map(|(i, t)| {
if let Some(type_of) = t {
let x_offset = i % width;
let y_offset = i / width;
Some((
((pos.0 + x_offset).checked_sub(1)?, (pos.1 + y_offset).checked_sub(1)?),
type_of
))
} else {
None
}
})
.collect()
}
pub fn render(&self, pos: CellPos, _is_selected: bool, view: &ViewOptions, palette: &[(PixelColor, PixelColor)], ctx: &CanvasRenderingContext2d) -> Result<()> {
let pixel_size = view.pixel_size as f64;
let rendering_pixels = self.map.iter()
.enumerate()
.filter_map(|(i, t)| t.as_ref().map(|t| (i, t)));
for (i, type_of) in rendering_pixels {
let x_offset = i % self.width;
let y_offset = i / self.width;
if let (Some(pos_x), Some(pos_y)) = ((pos.0 + x_offset).checked_sub(1), (pos.1 + y_offset).checked_sub(1)) {
let pixel = match type_of {
PixelType::Custom(p) => *p,
PixelType::Node { .. } => PIXEL_NODE,
PixelType::ObjectColor => PIXEL_OBJECT,
PixelType::Wire { index, .. } => palette[*index].0
};
ctx.set_fill_style(&JsValue::from_str(&pixel.get_string_color()));
ctx.fill_rect(
pos_x as f64 * pixel_size,
pos_y as f64 * pixel_size,
pixel_size,
pixel_size
);
}
}
Ok(())
}
}
| true |
bb63b3decce87276831becc283cf37d91b1fc9a7
|
Rust
|
wotsushi/competitive-programming
|
/arc/061/d.rs
|
UTF-8
| 1,762 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
#![allow(non_snake_case)]
#![allow(unused_variables)]
#![allow(dead_code)]
fn main() {
let (H, W, N): (i64, i64, usize) = {
let mut line: String = String::new();
std::io::stdin().read_line(&mut line).unwrap();
let mut iter = line.split_whitespace();
(
iter.next().unwrap().parse().unwrap(),
iter.next().unwrap().parse().unwrap(),
iter.next().unwrap().parse().unwrap(),
)
};
let (a, b): (Vec<i64>, Vec<i64>) = {
let (mut a, mut b) = (vec![], vec![]);
for _ in 0..N {
let mut line: String = String::new();
std::io::stdin().read_line(&mut line).unwrap();
let mut iter = line.split_whitespace();
a.push(iter.next().unwrap().parse().unwrap());
b.push(iter.next().unwrap().parse().unwrap());
}
(a, b)
};
let p = (0..N)
.map(|i| (a[i], b[i]))
.collect::<std::collections::HashSet<_>>();
let mut dp = vec![0; 10];
for k in 0..N {
let (y, x) = (a[k], b[k]);
for u in std::cmp::max(1, y - 2)..(std::cmp::min(H - 2, y) + 1) {
for l in std::cmp::max(1, x - 2)..(std::cmp::min(W - 2, x) + 1) {
dp[(u..(u + 3))
.map(|i| (l..(l + 3)).filter(|&j| p.contains(&(i, j))).count())
.sum::<usize>()] += 1;
}
}
}
for i in 1..10 {
dp[i] /= i as i64;
}
let ans = (0..10)
.map(|i| {
if i == 0 {
((H - 2) * (W - 2) - dp.iter().sum::<i64>()).to_string()
} else {
dp[i].to_string()
}
})
.collect::<Vec<_>>()
.join("\n");
println!("{}", ans);
}
| true |
9ee5f150a8804e67407dfeeabe533e086bb9fa5a
|
Rust
|
mkeeter/advent-of-code
|
/2016/02/src/main.rs
|
UTF-8
| 1,464 | 3.25 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::io::Read;
fn main() {
let mut input = String::new();
std::io::stdin().read_to_string(&mut input).unwrap();
let run = |keypad: &str| {
let mut keys: HashMap<(i32, i32), char> = HashMap::new();
let mut pos = (0, 0);
for (y, line) in keypad.lines().enumerate() {
for (x, c) in line.chars().enumerate() {
if c != ' ' {
let p = (x as i32 / 2, y as i32);
keys.insert(p, c);
if c == '5' {
pos = p;
}
}
}
}
for line in input.lines() {
for b in line.chars() {
let next = match b {
'U' => (pos.0, pos.1 - 1),
'D' => (pos.0, pos.1 + 1),
'R' => (pos.0 + 1, pos.1),
'L' => (pos.0 - 1, pos.1),
_ => panic!("Invalid character"),
};
if keys.get(&next).is_some() {
pos = next;
}
}
print!("{}", keys.get(&pos).unwrap())
}
};
print!("Part 1: ");
run(concat!("1 2 3\n", "4 5 6\n", "7 8 9"));
println!();
print!("Part 2: ");
run(concat!(
" 1 \n",
" 2 3 4 \n",
"5 6 7 8 9\n",
" A B C \n",
" D "
));
println!();
}
| true |
abadf2cfcfb52ba86cfbc5e2f9ca709982941bef
|
Rust
|
cloudflare/lol-html
|
/src/transform_stream/dispatcher.rs
|
UTF-8
| 12,371 | 2.515625 | 3 |
[
"BSD-3-Clause",
"CC-BY-SA-4.0"
] |
permissive
|
use super::*;
use crate::base::{Bytes, Range, SharedEncoding};
use crate::html::{LocalName, Namespace};
use crate::parser::{
Lexeme, LexemeSink, NonTagContentLexeme, ParserDirective, ParserOutputSink, TagHintSink,
TagLexeme, TagTokenOutline,
};
use crate::rewritable_units::{
DocumentEnd, Serialize, ToToken, Token, TokenCaptureFlags, TokenCapturer, TokenCapturerEvent,
};
use crate::rewriter::RewritingError;
use std::rc::Rc;
use TagTokenOutline::*;
pub struct AuxStartTagInfo<'i> {
pub input: &'i Bytes<'i>,
pub attr_buffer: SharedAttributeBuffer,
pub self_closing: bool,
}
type AuxStartTagInfoRequest<C> =
Box<dyn FnOnce(&mut C, AuxStartTagInfo<'_>) -> Result<TokenCaptureFlags, RewritingError>>;
pub enum DispatcherError<C> {
InfoRequest(AuxStartTagInfoRequest<C>),
RewritingError(RewritingError),
}
pub type StartTagHandlingResult<C> = Result<TokenCaptureFlags, DispatcherError<C>>;
pub trait TransformController: Sized {
fn initial_capture_flags(&self) -> TokenCaptureFlags;
fn handle_start_tag(&mut self, name: LocalName, ns: Namespace) -> StartTagHandlingResult<Self>;
fn handle_end_tag(&mut self, name: LocalName) -> TokenCaptureFlags;
fn handle_token(&mut self, token: &mut Token) -> Result<(), RewritingError>;
fn handle_end(&mut self, document_end: &mut DocumentEnd) -> Result<(), RewritingError>;
fn should_emit_content(&self) -> bool;
}
/// Defines an interface for the [`HtmlRewriter`]'s output.
///
/// Implemented for [`Fn`] and [`FnMut`].
///
/// [`HtmlRewriter`]: struct.HtmlRewriter.html
/// [`Fn`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
/// [`FnMut`]: https://doc.rust-lang.org/std/ops/trait.FnMut.html
pub trait OutputSink {
/// Handles rewriter's output chunk.
///
/// # Note
/// The last chunk of the output has zero length.
fn handle_chunk(&mut self, chunk: &[u8]);
}
impl<F: FnMut(&[u8])> OutputSink for F {
fn handle_chunk(&mut self, chunk: &[u8]) {
self(chunk);
}
}
pub struct Dispatcher<C, O>
where
C: TransformController,
O: OutputSink,
{
transform_controller: C,
output_sink: O,
remaining_content_start: usize,
token_capturer: TokenCapturer,
got_flags_from_hint: bool,
pending_element_aux_info_req: Option<AuxStartTagInfoRequest<C>>,
emission_enabled: bool,
encoding: SharedEncoding,
}
impl<C, O> Dispatcher<C, O>
where
C: TransformController,
O: OutputSink,
{
pub fn new(transform_controller: C, output_sink: O, encoding: SharedEncoding) -> Self {
let initial_capture_flags = transform_controller.initial_capture_flags();
Dispatcher {
transform_controller,
output_sink,
remaining_content_start: 0,
token_capturer: TokenCapturer::new(
initial_capture_flags,
SharedEncoding::clone(&encoding),
),
got_flags_from_hint: false,
pending_element_aux_info_req: None,
emission_enabled: true,
encoding,
}
}
pub fn flush_remaining_input(&mut self, input: &[u8], consumed_byte_count: usize) {
let output = &input[self.remaining_content_start..consumed_byte_count];
if self.emission_enabled && !output.is_empty() {
self.output_sink.handle_chunk(output);
}
self.remaining_content_start = 0;
}
pub fn finish(&mut self, input: &[u8]) -> Result<(), RewritingError> {
self.flush_remaining_input(input, input.len());
let mut document_end = DocumentEnd::new(&mut self.output_sink, self.encoding.get());
self.transform_controller.handle_end(&mut document_end)?;
// NOTE: output the finalizing chunk.
self.output_sink.handle_chunk(&[]);
Ok(())
}
fn try_produce_token_from_lexeme<'i, T>(
&mut self,
lexeme: &Lexeme<'i, T>,
) -> Result<(), RewritingError>
where
Lexeme<'i, T>: ToToken,
{
let transform_controller = &mut self.transform_controller;
let output_sink = &mut self.output_sink;
let emission_enabled = self.emission_enabled;
let lexeme_range = lexeme.raw_range();
let remaining_content_start = self.remaining_content_start;
let mut lexeme_consumed = false;
self.token_capturer.feed(lexeme, |event| {
match event {
TokenCapturerEvent::LexemeConsumed => {
let chunk = lexeme.input().slice(Range {
start: remaining_content_start,
end: lexeme_range.start,
});
lexeme_consumed = true;
if emission_enabled && chunk.len() > 0 {
output_sink.handle_chunk(&chunk);
}
}
TokenCapturerEvent::TokenProduced(mut token) => {
trace!(@output token);
transform_controller.handle_token(&mut token)?;
if emission_enabled {
token.to_bytes(&mut |c| output_sink.handle_chunk(c));
}
}
}
Ok(())
})?;
if lexeme_consumed {
self.remaining_content_start = lexeme_range.end;
}
Ok(())
}
#[inline]
fn get_next_parser_directive(&self) -> ParserDirective {
if self.token_capturer.has_captures() {
ParserDirective::Lex
} else {
ParserDirective::WherePossibleScanForTagsOnly
}
}
fn adjust_capture_flags_for_tag_lexeme(
&mut self,
lexeme: &TagLexeme,
) -> Result<(), RewritingError> {
let input = lexeme.input();
macro_rules! get_flags_from_aux_info_res {
($handler:expr, $attributes:expr, $self_closing:expr) => {
$handler(
&mut self.transform_controller,
AuxStartTagInfo {
input,
attr_buffer: Rc::clone($attributes),
self_closing: $self_closing,
},
)
};
}
let capture_flags = match self.pending_element_aux_info_req.take() {
// NOTE: tag hint was produced for the tag, but
// attributes and self closing flag were requested.
Some(aux_info_req) => match *lexeme.token_outline() {
StartTag {
ref attributes,
self_closing,
..
} => get_flags_from_aux_info_res!(aux_info_req, attributes, self_closing),
_ => unreachable!("Tag should be a start tag at this point"),
},
// NOTE: tag hint hasn't been produced for the tag, because
// parser is not in the tag scan mode.
None => match *lexeme.token_outline() {
StartTag {
name,
name_hash,
ns,
ref attributes,
self_closing,
} => {
let name = LocalName::new(input, name, name_hash);
match self.transform_controller.handle_start_tag(name, ns) {
Ok(flags) => Ok(flags),
Err(DispatcherError::InfoRequest(aux_info_req)) => {
get_flags_from_aux_info_res!(aux_info_req, attributes, self_closing)
}
Err(DispatcherError::RewritingError(e)) => Err(e),
}
}
EndTag { name, name_hash } => {
let name = LocalName::new(input, name, name_hash);
Ok(self.transform_controller.handle_end_tag(name))
}
},
};
match capture_flags {
Ok(flags) => {
self.token_capturer.set_capture_flags(flags);
Ok(())
}
Err(e) => Err(e),
}
}
#[inline]
fn apply_capture_flags_from_hint_and_get_next_parser_directive(
&mut self,
flags: TokenCaptureFlags,
) -> ParserDirective {
self.token_capturer.set_capture_flags(flags);
self.got_flags_from_hint = true;
self.get_next_parser_directive()
}
#[inline]
fn flush_pending_captured_text(&mut self) -> Result<(), RewritingError> {
let transform_controller = &mut self.transform_controller;
let output_sink = &mut self.output_sink;
let emission_enabled = self.emission_enabled;
self.token_capturer.flush_pending_text(&mut |event| {
if let TokenCapturerEvent::TokenProduced(mut token) = event {
trace!(@output token);
transform_controller.handle_token(&mut token)?;
if emission_enabled {
token.to_bytes(&mut |c| output_sink.handle_chunk(c));
}
}
Ok(())
})?;
Ok(())
}
#[inline]
fn should_stop_removing_element_content(&self) -> bool {
!self.emission_enabled && self.transform_controller.should_emit_content()
}
}
impl<C, O> LexemeSink for Dispatcher<C, O>
where
C: TransformController,
O: OutputSink,
{
fn handle_tag(&mut self, lexeme: &TagLexeme) -> Result<ParserDirective, RewritingError> {
// NOTE: flush pending text before reporting tag to the transform controller.
// Otherwise, transform controller can enable or disable text handlers too early.
// In case of start tag, newly matched element text handlers
// will receive leftovers from the previous match. And, in case of end tag,
// handlers will be disabled before the receive the finalizing chunk.
self.flush_pending_captured_text()?;
if self.got_flags_from_hint {
self.got_flags_from_hint = false;
} else {
self.adjust_capture_flags_for_tag_lexeme(lexeme)?;
}
if let TagTokenOutline::EndTag { .. } = lexeme.token_outline() {
if self.should_stop_removing_element_content() {
self.emission_enabled = true;
self.remaining_content_start = lexeme.raw_range().start;
}
}
self.try_produce_token_from_lexeme(lexeme)?;
self.emission_enabled = self.transform_controller.should_emit_content();
Ok(self.get_next_parser_directive())
}
#[inline]
fn handle_non_tag_content(
&mut self,
lexeme: &NonTagContentLexeme,
) -> Result<(), RewritingError> {
self.try_produce_token_from_lexeme(lexeme)
}
}
impl<C, O> TagHintSink for Dispatcher<C, O>
where
C: TransformController,
O: OutputSink,
{
fn handle_start_tag_hint(
&mut self,
name: LocalName,
ns: Namespace,
) -> Result<ParserDirective, RewritingError> {
match self.transform_controller.handle_start_tag(name, ns) {
Ok(flags) => {
Ok(self.apply_capture_flags_from_hint_and_get_next_parser_directive(flags))
}
Err(DispatcherError::InfoRequest(aux_info_req)) => {
self.got_flags_from_hint = false;
self.pending_element_aux_info_req = Some(aux_info_req);
Ok(ParserDirective::Lex)
}
Err(DispatcherError::RewritingError(e)) => Err(e),
}
}
fn handle_end_tag_hint(&mut self, name: LocalName) -> Result<ParserDirective, RewritingError> {
self.flush_pending_captured_text()?;
let mut flags = self.transform_controller.handle_end_tag(name);
// NOTE: if emission was disabled (i.e. we've been removing element content)
// we need to request the end tag lexeme, to ensure that we have it.
// Otherwise, if we have unfinished end tag in the end of input we'll emit
// it where we shouldn't.
if self.should_stop_removing_element_content() {
flags |= TokenCaptureFlags::NEXT_END_TAG;
}
Ok(self.apply_capture_flags_from_hint_and_get_next_parser_directive(flags))
}
}
impl<C, O> ParserOutputSink for Dispatcher<C, O>
where
C: TransformController,
O: OutputSink,
{
}
| true |
e889becba0c817d88f245f1aa56e70f1adfcb1eb
|
Rust
|
SilvanCodes/novel-set-neat
|
/src/utility/gym.rs
|
UTF-8
| 1,924 | 2.875 | 3 |
[] |
no_license
|
use gym::State;
use ndarray::{stack, Array1, Array2, ArrayView1, ArrayView2, ArrayViewMut1, Axis};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StandardScaler {
means: Array1<f64>,
standard_deviations: Array1<f64>,
}
impl StandardScaler {
pub fn for_environment(environment: &str) -> Self {
Self::new(Self::generate_standard_scaler_samples(environment, 1000).view())
}
pub fn new(samples: ArrayView2<f64>) -> Self {
let standard_deviations = samples
.var_axis(Axis(0), 0.0)
.mapv_into(|x| (x + f64::EPSILON).sqrt());
let means = samples.mean_axis(Axis(0)).unwrap();
StandardScaler {
means,
standard_deviations,
}
}
pub fn scale_inplace(&self, mut sample: ArrayViewMut1<f64>) {
sample -= &self.means;
sample /= &self.standard_deviations;
}
pub fn scale(&self, sample: ArrayView1<f64>) -> Array1<f64> {
(&sample - &self.means) / &self.standard_deviations
}
fn generate_standard_scaler_samples(environment: &str, num_samples: usize) -> Array2<f64> {
let gym = gym::GymClient::default();
let env = gym.make(environment);
// collect samples for standard scaler
let samples = env
.reset()
.unwrap()
.get_box()
.expect("expected gym environment with box type observations");
let mut samples = samples.insert_axis(Axis(0));
println!("sampling for scaler");
for _ in 0..num_samples {
let State { observation, .. } = env.step(&env.action_space().sample()).unwrap();
samples = stack![
Axis(0),
samples,
observation.get_box().unwrap().insert_axis(Axis(0))
];
}
println!("done sampling");
samples
}
}
| true |
939a072f0a0cc07326e837584456a45e96f0fa59
|
Rust
|
gmalmquist/arose
|
/src/utils.rs
|
UTF-8
| 1,690 | 3.171875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use chrono;
use js_sys::Math::{exp, sqrt, pow, log2};
use wasm_bindgen::__rt::core::f64::consts::PI;
pub fn set_panic_hook() {
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.
//
// For more details see
// https://github.com/rustwasm/console_error_panic_hook#readme
//#[cfg(feature = "console_error_panic_hook")]
console_error_panic_hook::set_once();
}
pub fn current_time_millis() -> f64 {
chrono::Utc::now().timestamp_millis() as f64
}
pub fn lerpf(a: f64, b: f64, s: f64) -> f64 {
(1. - s) * a + s * b
}
pub fn bezierf2(a: f64, b: f64, c: f64, s: f64) -> f64 {
lerpf(
lerpf(a, b, s),
lerpf(b, c, s),
s,
)
}
pub fn gaussian_blur(sigma: f64, x: f64, y: f64) -> f64 {
// https://en.wikipedia.org/wiki/Gaussian_blur
1. / (2. * PI * sigma * sigma) * exp(-(x * x + y * y) / (2. * sigma * sigma))
}
pub fn exp_smin(a: f64, b: f64, k: f64) -> f64 {
let res = pow(2., -k * a) + pow(2., -k * b);
return -log2(res) / k;
}
//
// #[cfg(test)]
// mod tests {
// use crate::utils::longest_common_prefix;
//
// #[test]
// fn common_prefix() {
// assert_eq!("", longest_common_prefix(&vec![]));
// assert_eq!("apple", longest_common_prefix(&vec!["apple"]));
// assert_eq!("apple", longest_common_prefix(&vec!["apple", "apple pie"]));
// assert_eq!("apple", longest_common_prefix(&vec!["apple"]));
// assert_eq!("ap", longest_common_prefix(&vec!["apple", "apple pie", "apricot"]));
// }
// }
| true |
2cc21316c087bbfaea80bff6d19afe2ce16e5f96
|
Rust
|
torkeldanielsson/aoc2020
|
/day_13_a/src/main.rs
|
UTF-8
| 416 | 2.890625 | 3 |
[
"MIT"
] |
permissive
|
fn main() {
// let start_time= 939;
// let buses = vec![7, 13, 59, 31, 19];
let start_time = 1001171;
let buses = vec![17, 41, 37, 367, 19, 23, 29, 613, 13];
let mut n = start_time;
'outer: loop {
for bus in &buses {
if n % bus == 0 {
println!("{}", (n - start_time) * bus);
break 'outer;
}
}
n += 1;
}
}
| true |
0f8038f8348dca453b06c729231b506a6d295815
|
Rust
|
dahliaOS/fuchsia-pi4
|
/src/connectivity/lib/at-commands/codegen/src/definition.rs
|
UTF-8
| 2,927 | 2.703125 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
// Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use super::codegen::common::to_initial_capital;
#[derive(Debug, Clone, PartialEq)]
pub enum Definition {
Command(Command),
Response {
name: String,
type_name: Option<String>,
is_extension: bool,
arguments: DelimitedArguments,
},
Enum {
name: String,
variants: Vec<Variant>,
},
}
#[derive(Debug, Clone, PartialEq)]
pub enum Command {
Execute {
name: String,
type_name: Option<String>,
is_extension: bool,
arguments: DelimitedArguments,
},
Read {
name: String,
type_name: Option<String>,
is_extension: bool,
},
Test {
name: String,
type_name: Option<String>,
is_extension: bool,
},
}
impl Command {
pub fn type_name(&self) -> String {
match self {
Command::Execute { name, type_name, .. } => {
type_name.clone().unwrap_or_else(|| to_initial_capital(name.as_str()))
}
Command::Read { name, type_name, .. } => type_name
.clone()
.unwrap_or_else(|| format!("{}Read", to_initial_capital(name.as_str()))),
Command::Test { name, type_name, .. } => type_name
.clone()
.unwrap_or_else(|| format!("{}Test", to_initial_capital(name.as_str()))),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct DelimitedArguments {
pub delimiter: Option<String>,
pub arguments: Arguments,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Arguments {
ParenthesisDelimitedArgumentLists(Vec<Vec<Argument>>),
ArgumentList(Vec<Argument>),
}
impl Arguments {
pub fn is_empty(&self) -> bool {
match self {
Self::ArgumentList(vec) => vec.is_empty(),
Self::ParenthesisDelimitedArgumentLists(vec) => {
vec.is_empty() || vec.into_iter().all(|el| el.is_empty())
}
}
}
pub fn flatten(&self) -> Vec<Argument> {
match self {
Self::ParenthesisDelimitedArgumentLists(arg_vec_vec) => arg_vec_vec.concat(),
Self::ArgumentList(arg_vec) => arg_vec.clone(),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Argument {
pub name: String,
pub typ: Type,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Type {
List(PrimitiveType),
Option(PrimitiveType),
Map { key: PrimitiveType, value: PrimitiveType },
PrimitiveType(PrimitiveType),
}
#[derive(Debug, Clone, PartialEq)]
pub enum PrimitiveType {
String,
Integer,
// Special case 1 and 0 representing true and false
BoolAsInt,
NamedType(String),
}
#[derive(Debug, Clone, PartialEq)]
pub struct Variant {
pub name: String,
pub value: i64,
}
| true |
a268d30698e8ae056e03e6885f78ef35e506753a
|
Rust
|
DTFN/occlum
|
/demos/rust/rust_app/src/main.rs
|
UTF-8
| 660 | 3.046875 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
extern crate libc;
use std::{time, thread};
use std::thread::sleep;
use std::time::Duration;
extern "C" {
fn increment_by_one(input: *mut libc::c_int);
}
fn main() {
let mut input = 5;
let old = input;
unsafe { increment_by_one(&mut input) };
println!("{} + 1 = {}", old, input);
let mut number = 1;
while number != 10 {
number += 1;
let handle = thread::spawn(|| {
for i in 1..10 {
thread::sleep(Duration::from_millis(500));
}
});
for i in 1..5 {
thread::sleep(Duration::from_millis(500));
}
handle.join().unwrap();
}
}
| true |
4e942ce687c614ca9b56cc6b7791f9d8e62cde4f
|
Rust
|
JuanFdS/advent-of-code-2020
|
/dia-2/src/main.rs
|
UTF-8
| 4,021 | 3.25 | 3 |
[] |
no_license
|
use std::str::FromStr;
use std::num::ParseIntError;
fn main() {
println!("Hello, world!");
}
fn contrasenia_valida_segun_politica(minimo: i32, maximo: i32, letra: String, contrasenia: String) -> bool {
let n = contrasenia.matches(&letra).count() as i32;
return n >= minimo && n <= maximo
}
fn parsear_politica_y_contrasenia(input: String) -> (i32, i32, &'static str, String) {
return (1, 3, "a", "abcde".to_string());
}
fn resolver_para(input: &str) -> i32 {
let contrasenias_y_politicas : Vec<ContraseniaYPolitica> = parsear_input(input).unwrap();
return contrasenias_y_politicas.into_iter().
filter(|contrasenia_y_politica| contrasenia_valida_segun_politica(contrasenia_y_politica.minimo,
contrasenia_y_politica.maximo,
contrasenia_y_politica.letra.to_string(),
contrasenia_y_politica.contrasenia.to_string())).count() as i32;
}
#[derive(Debug, PartialEq)]
struct ContraseniaYPolitica {
minimo: i32,
maximo: i32,
letra: String,
contrasenia: String
}
fn parsear_input(input: &str) -> Result<Vec<ContraseniaYPolitica>, ParseIntError> {
let c: Vec<&str> = input.split('\n').collect();
return c.into_iter().map(|linea| linea.parse::<ContraseniaYPolitica>()).collect()
}
impl FromStr for ContraseniaYPolitica {
type Err = ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let splited_input: Vec<&str> = s.split(' ')
.collect();
let rango: Vec<&str>= splited_input[0].split('-').collect();
let minimo = rango[0].parse::<i32>()?;
let maximo = rango[1].parse::<i32>()?;
let letra = splited_input[1].chars().nth(0).unwrap().to_string();
let contrasenia = splited_input[2].to_string();
Ok(ContraseniaYPolitica { minimo: minimo, maximo: maximo, letra: letra, contrasenia: contrasenia})
}
}
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn foo() {
assert!(contrasenia_valida_segun_politica(1, 1, "a".to_string(), "a".to_string()));
}
#[test]
fn foo2() {
assert!(!contrasenia_valida_segun_politica(1, 1, "a".to_string(), "b".to_string()));
}
#[test]
fn contrasenia_no_es_valida_si_la_letra_pedida_aparece_menos_veces_que_el_minimo_de_la_politica() {
assert!(!contrasenia_valida_segun_politica(2, 3, "a".to_string(), "a".to_string()));
}
#[test]
fn contrasenia_no_es_valida_si_la_letra_pedida_aparece_mas_veces_que_el_maximo_de_la_politica() {
assert!(!contrasenia_valida_segun_politica(1, 1, "a".to_string(), "aa".to_string()));
}
#[test]
fn bleh() {
assert!(contrasenia_valida_segun_politica(1, 3, "a".to_string(), "abcde".to_string()));
assert!(!contrasenia_valida_segun_politica(1, 3, "b".to_string(), "cdefg".to_string()));
assert!(contrasenia_valida_segun_politica(2, 9, "c".to_string(), "ccccccccc".to_string()));
}
#[test]
fn bleh2() {
assert_eq!("1-3 a: abcde".parse::<ContraseniaYPolitica>(),
Ok(ContraseniaYPolitica { minimo: 1, maximo: 3, letra: "a".to_string(), contrasenia: "abcde".to_string() }))
}
#[test]
fn bleh3() {
assert_eq!("1-3 b: cdefg".parse::<ContraseniaYPolitica>(),
Ok(ContraseniaYPolitica { minimo: 1, maximo: 3, letra: "b".to_string(), contrasenia: "cdefg".to_string() }))
}
#[test]
fn bleh4() {
let archivo = "1-3 a: abcde\n1-3 b: cdefg";
assert_eq!(parsear_input(archivo),
Ok(vec![ContraseniaYPolitica { minimo: 1, maximo: 3, letra: "a".to_string(), contrasenia: "abcde".to_string() },
ContraseniaYPolitica { minimo: 1, maximo: 3, letra: "b".to_string(), contrasenia: "cdefg".to_string() }]))
}
#[test]
fn bleh5() {
let archivo = "1-3 a: abcde\n1-3 b: cdefg\n2-9 c: ccccccccc";
assert_eq!(resolver_para(archivo), 2)
}
}
| true |
7c5f4ae8ecaf13b631b02ec0c824d0b2a7225eb9
|
Rust
|
jamesmarva/The-Rust-Programming-Language
|
/code/ch10/c_10_15/src/main.rs
|
UTF-8
| 311 | 3.28125 | 3 |
[] |
no_license
|
fn main() {
let v = vec![1, 2, 4, 5, 6];
let v1 = vec!['a', 'z', 'b', 'c', 'r', 't'];
println!("largest: {}", larget(&v1));
}
fn larget<T>(l: &[T]) -> T
where T: PartialOrd + Copy
{
let mut rst = l[0];
for &i in l {
if i > rst {
rst = i;
}
}
rst
}
| true |
326dab541b957389b0669ba27daa2d92f5eadbe8
|
Rust
|
algesten/lolb
|
/src/limit.rs
|
UTF-8
| 2,411 | 2.828125 | 3 |
[] |
no_license
|
use crate::{AsyncRead, AsyncWrite, LolbError};
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
/// Helper to make an AsyncRead that reads up to a limit of bytes.
pub(crate) struct LimitRead<S>
where
S: AsyncRead,
{
source: S,
read: usize,
limit: usize,
}
impl<S: AsyncRead> LimitRead<S> {
pub fn new(source: S, limit: usize) -> Self {
LimitRead {
source,
read: 0,
limit,
}
}
}
impl<S: AsyncRead + Unpin> AsyncRead for LimitRead<S> {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let max = (self.limit - self.read).min(buf.len());
Pin::new(&mut self.get_mut().source).poll_read(cx, &mut buf[0..max])
}
}
/// Helper to make an AsyncWrite that checks we only write a fixed number of bytes.
pub(crate) struct LimitWrite<S>
where
S: AsyncWrite,
{
source: S,
written: usize,
limit: usize,
}
impl<S: AsyncWrite> LimitWrite<S> {
pub fn new(source: S, limit: usize) -> Self {
LimitWrite {
source,
written: 0,
limit,
}
}
}
impl<S: AsyncWrite + Unpin> AsyncWrite for LimitWrite<S> {
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
let total = self.written + buf.len();
if total > self.limit {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidData,
LolbError::Owned(format!(
"More bytes than LimitWrite allows: {} > {}",
total, self.limit
)),
)));
}
let self_mut = self.get_mut();
match Pin::new(&mut self_mut.source).poll_write(cx, buf) {
Poll::Ready(r) => {
let wr = r?;
self_mut.written += wr;
Poll::Ready(Ok(wr))
}
Poll::Pending => Poll::Pending,
}
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Pin::new(&mut self.get_mut().source).poll_flush(cx)
}
fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Pin::new(&mut self.get_mut().source).poll_shutdown(cx)
}
}
| true |
1de9f9118537a8daf52f712a085364b97256c714
|
Rust
|
SnakeSolid/rust-postgres-status
|
/src/handler/dropdb.rs
|
UTF-8
| 1,159 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
use super::util::handle_request;
use super::HandlerError;
use crate::config::ConfigRef;
use crate::postgres::PostgreSQL;
use iron::middleware::Handler;
use iron::IronResult;
use iron::Request as IronRequest;
use iron::Response as IronResponse;
#[derive(Debug)]
pub struct DropDbHandler {
config: ConfigRef,
}
impl DropDbHandler {
pub fn new(config: ConfigRef) -> DropDbHandler {
DropDbHandler { config }
}
}
impl Handler for DropDbHandler {
fn handle(&self, request: &mut IronRequest) -> IronResult<IronResponse> {
handle_request(request, move |request: Request| {
let name = request.name;
let server_config = self.config.server();
let postgres = PostgreSQL::new(
server_config.host(),
server_config.port(),
server_config.role(),
server_config.password(),
);
postgres.drop_database(&name).map_err(|err| {
HandlerError::new(&format!("Failed to drop database `{}` - {}", name, err))
})
})
}
}
#[derive(Debug, Deserialize)]
struct Request {
name: String,
}
| true |
c6b35d3eb54f33334c2503375b685e9249ffe201
|
Rust
|
fyang93/rust-leetcode
|
/src/p0036_valid_sudoku.rs
|
UTF-8
| 1,681 | 3.53125 | 4 |
[] |
no_license
|
use std::collections::HashMap;
pub fn is_valid_sudoku(board: Vec<Vec<char>>) -> bool {
let mut rows = vec![HashMap::new(); 9];
let mut cols = vec![HashMap::new(); 9];
let mut boxes = vec![HashMap::new(); 9];
for i in 0..9 {
for j in 0..9 {
if let Some(num) = board[i][j].to_digit(10) {
let box_id = i / 3 * 3 + j / 3;
let count = rows[i].entry(num).or_insert(0);
if *count > 0 {
return false;
}
*count += 1;
let count = cols[j].entry(num).or_insert(0);
if *count > 0 {
return false;
}
*count += 1;
let count = boxes[box_id].entry(num).or_insert(0);
if *count > 0 {
return false;
}
*count += 1;
}
}
}
true
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let board = vec![
vec!['5', '3', '.', '.', '7', '.', '.', '.', '.'],
vec!['6', '.', '.', '1', '9', '5', '.', '.', '.'],
vec!['.', '9', '8', '.', '.', '.', '.', '6', '.'],
vec!['8', '.', '.', '.', '6', '.', '.', '.', '3'],
vec!['4', '.', '.', '8', '.', '3', '.', '.', '1'],
vec!['7', '.', '.', '.', '2', '.', '.', '.', '6'],
vec!['.', '6', '.', '.', '.', '.', '2', '8', '.'],
vec!['.', '.', '.', '4', '1', '9', '.', '.', '5'],
vec!['.', '.', '.', '.', '8', '.', '.', '7', '9'],
];
assert_eq!(is_valid_sudoku(board), true);
}
}
| true |
297bfb9be010519437a06afc27f4c2a857013603
|
Rust
|
1aim/state_machine_future
|
/tests/generics.rs
|
UTF-8
| 2,711 | 2.875 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Test that we handle generics properly.
//!
//! Here's the deal: we don't figure out which generics are used in which
//! variants, so it is up to you to add phantom datas as needed.
#![feature(futures_api, pin, arbitrary_self_types)]
extern crate futures;
#[macro_use]
extern crate state_machine_future;
use futures::{Future, Poll};
use state_machine_future::RentToOwn;
use std::fmt::Debug;
use std::marker::PhantomData;
use std::io;
pub trait ComplexTrait<'a, T> {
fn data(&self) -> &'a T;
}
pub trait AssociatedTypesTrait {
type Type;
}
pub struct StartType<'a, 'c, 'd: 'a, T, C>
where
T: ComplexTrait<'c, C>,
C: 'c,
'c: 'd,
{
_data: T,
_phan: PhantomData<&'a C>,
_phan2: PhantomData<&'c C>,
_phan3: PhantomData<&'d C>,
}
#[derive(StateMachineFuture)]
pub enum Fsm<'a, 'c, 'd: 'a, T: 'static, E, C, D>
where
T: ComplexTrait<'c, C>,
E: Debug,
C: 'c,
'c: 'd,
D: AssociatedTypesTrait<Type = E>,
{
/// The start state.
#[state_machine_future(start)]
#[state_machine_future(transitions(Complex, AssociatedType, Ready, Error))]
Start(StartType<'a, 'c, 'd, T, C>, D),
#[state_machine_future(transitions(Ready, Error))]
Complex(&'a i32, &'d u32),
#[state_machine_future(transitions(Ready, Error))]
AssociatedType(D),
/// Some generic ready state.
#[state_machine_future(ready)]
Ready(i32),
/// Some generic error state.
#[state_machine_future(error)]
Error(E),
}
impl<'a, 'c, 'd: 'a, T, E, C, D> PollFsm<'a, 'c, 'd, T, E, C, D> for Fsm<'a, 'c, 'd, T, E, C, D>
where
T: ComplexTrait<'c, C> + 'static,
E: Debug,
C: 'c,
'c: 'd,
D: AssociatedTypesTrait<Type = E>,
{
fn poll_start<'b>(
_: &'b mut RentToOwn<'b, Start<'a, 'c, 'd, T, E, C, D>>,
) -> Poll<Result<AfterStart<'a, 'd, E, D>, E>> {
unimplemented!()
}
fn poll_complex<'b>(_: &'b mut RentToOwn<'b, Complex<'a, 'd>>) -> Poll<Result<AfterComplex<E>, E>> {
unimplemented!()
}
fn poll_associated_type<'b>(
_: &'b mut RentToOwn<'b, AssociatedType<E, D>>,
) -> Poll<Result<AfterAssociatedType<E>, E>> {
unimplemented!()
}
}
impl<'a> ComplexTrait<'a, i32> for i32 {
fn data(&self) -> &'a i32 {
unimplemented!()
}
}
impl AssociatedTypesTrait for String {
type Type = io::Error;
}
#[test]
fn check_generic_start() {
let test = String::from("test");
let _: Box<Future<Output = Result<i32, io::Error>>> = Box::new(Fsm::start(
StartType {
_data: 0,
_phan: Default::default(),
_phan2: Default::default(),
_phan3: Default::default(),
},
test,
));
}
| true |
e45aae36fc34867492d26e1358b0c05a2cc89f0d
|
Rust
|
enso-org/enso
|
/app/gui/language/ast/impl/src/crumbs.rs
|
UTF-8
| 36,807 | 2.984375 | 3 |
[
"AGPL-3.0-only",
"Apache-2.0",
"AGPL-3.0-or-later"
] |
permissive
|
//! Crumbs for AST. Crumb identifies children node location in AST node. The access should be
//! possible in a constant time.
use crate::prelude::*;
use enso_text::index::*;
use crate::enumerate_non_empty_lines;
use crate::known;
use crate::HasTokens;
use crate::Shape;
use crate::ShiftedVec1;
use crate::SpanSeed;
use crate::TokenConsumer;
use enso_text as text;
// ==============
// === Errors ===
// ==============
trait IndexedAccess {
type Item;
fn get_or_err(&self, index: usize, name: impl Str) -> Result<&Self::Item, IndexOutOfBounds>;
fn get_mut_or_err(
&mut self,
index: usize,
name: impl Str,
) -> Result<&mut Self::Item, IndexOutOfBounds>;
}
impl<T> IndexedAccess for Vec<T> {
type Item = T;
fn get_or_err(&self, index: usize, name: impl Str) -> Result<&Self::Item, IndexOutOfBounds> {
self.get(index).ok_or_else(|| IndexOutOfBounds(name.into()))
}
fn get_mut_or_err(
&mut self,
index: usize,
name: impl Str,
) -> Result<&mut Self::Item, IndexOutOfBounds> {
self.get_mut(index).ok_or_else(|| IndexOutOfBounds(name.into()))
}
}
impl<T> IndexedAccess for ShiftedVec1<T> {
type Item = T;
fn get_or_err(&self, index: usize, name: impl Str) -> Result<&Self::Item, IndexOutOfBounds> {
if index == 0 {
Ok(&self.head)
} else {
Ok(&self.tail.get(index - 1).ok_or_else(|| IndexOutOfBounds(name.into()))?.wrapped)
}
}
fn get_mut_or_err(
&mut self,
index: usize,
name: impl Str,
) -> Result<&mut Self::Item, IndexOutOfBounds> {
if index == 0 {
Ok(&mut self.head)
} else {
Ok(&mut self
.tail
.get_mut(index - 1)
.ok_or_else(|| IndexOutOfBounds(name.into()))?
.wrapped)
}
}
}
#[allow(missing_docs)]
#[derive(Debug, Fail, Clone)]
#[fail(display = "The crumb refers to a {} which is not present.", _0)]
pub struct NotPresent(String);
#[allow(missing_docs)]
#[derive(Debug, Fail, Clone)]
#[fail(display = "The crumb refers to {} by index that is out of bounds.", _0)]
pub struct IndexOutOfBounds(String);
#[allow(missing_docs)]
#[derive(Debug, Fail, Clone)]
#[fail(
display = "The line designated by crumb {:?} does not contain any AST. Context AST was {}.",
crumb, repr
)]
pub struct LineDoesNotContainAst {
repr: String,
crumb: Crumb,
}
impl LineDoesNotContainAst {
/// Creates a new instance of error about missing AST in the designated line.
pub fn new(repr: impl HasRepr, crumb: impl Into<Crumb>) -> LineDoesNotContainAst {
let repr = repr.repr();
let crumb = crumb.into();
LineDoesNotContainAst { repr, crumb }
}
}
#[derive(Debug, Fail, Clone, Copy)]
#[fail(display = "The crumb '{}' is not applicable to '{}' shape.", crumb, shape)]
struct MismatchedCrumbType {
shape: &'static str,
crumb: &'static str,
}
#[derive(Debug, Fail, Clone, Copy)]
#[fail(display = "The crumb refers to a non-Child tree variant.")]
struct NonChildTreeCrumb;
// =============
// === Crumb ===
// =============
// === Ast ===
/// Trait automatically implemented for all IntoIterators of crumbs.
///
/// It provides way to easily convert vector of specific crumbs (e.g.
/// `[InfixCrumb::LeftoOperand, ..]` without calling into on each element.
pub trait IntoCrumbs: IntoIterator<Item: Into<Crumb>> + Sized {
/// Convert to the actual Crumbs structure.
fn into_crumbs(self) -> Crumbs {
iter_crumbs(self).collect()
}
}
impl<T: IntoIterator<Item: Into<Crumb>> + Sized> IntoCrumbs for T {}
/// Converts `IntoCrumbs` value into a `Crumb`-yielding iterator.
pub fn iter_crumbs(crumbs: impl IntoCrumbs) -> impl Iterator<Item = Crumb> {
crumbs.into_iter().map(|crumb| crumb.into())
}
/// Sequence of `Crumb`s describing traversal path through AST.
pub type Crumbs = Vec<Crumb>;
/// Helper macro. Behaves like `vec!` but converts each element into `Crumb`.
#[macro_export]
macro_rules! crumbs {
( ) => {
Vec::<$crate::crumbs::Crumb>::new()
};
( $( $x:expr ),* ) => {
vec![$($crate::crumbs::Crumb::from($x)),*]
};
}
/// Crumb identifies location of child AST in an AST node. Allows for a single step AST traversal.
/// The enum variants are paired with Shape variants. For example, `ModuleCrumb` allows obtaining
/// (or setting) `Ast` stored within a `Module` shape.
///
/// As `Ast` can store any `Shape`, this `Crumb` may store any `Shape`-specific crumb type.
///
/// The location format should allow constant time traversal step, so e.g. indices should be used
/// rather than names or similar.
///
/// Crumbs are potentially invalidated by any AST change.
// === Prefix ===
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum PrefixCrumb {
Func,
Arg,
}
// === Infix ===
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum InfixCrumb {
LeftOperand,
Operator,
RightOperand,
}
// === SectionLeft ===
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum SectionLeftCrumb {
Arg,
Opr,
}
// === SectionRight ===
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum SectionRightCrumb {
Opr,
Arg,
}
// === SectionSides ===
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SectionSidesCrumb;
// === Module ===
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ModuleCrumb {
pub line_index: usize,
}
// === Block ===
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum BlockCrumb {
/// The first non-empty line in block.
HeadLine,
/// Index in the sequence of "rest of" lines (not counting the HeadLine).
TailLine { tail_index: usize },
}
// === Conversion Traits ===
macro_rules! from_crumb {
($id:ident,$crumb_id:ident) => {
impl From<$crumb_id> for Crumb {
fn from(crumb: $crumb_id) -> Self {
Crumb::$id(crumb)
}
}
impl From<&$crumb_id> for Crumb {
fn from(crumb: &$crumb_id) -> Self {
Crumb::$id(crumb.clone())
}
}
impl IntoIterator for $crumb_id {
type Item = Crumb;
type IntoIter = std::iter::Once<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
std::iter::once(Crumb::from(self))
}
}
};
}
macro_rules! impl_crumbs {
($(($id:ident,$crumb_id:ident,$matcher:ident)),*) => {
$(from_crumb!{$id,$crumb_id})*
impl Crumbable for Shape<Ast> {
type Crumb = Crumb;
fn get(&self, crumb:&Self::Crumb) -> FallibleResult<&Ast> {
match (self,crumb) {
$((Shape::$id(shape),Crumb::$id(crumb)) => shape.get(crumb),)*
(shape, crumb) => Err(MismatchedCrumbType { shape: shape.variant_name(), crumb: crumb.variant_name() }.into())
}
}
fn set(&self, crumb:&Self::Crumb, new_ast:Ast) -> FallibleResult<Self> {
match (self,crumb) {
$((Shape::$id(shape),Crumb::$id(crumb)) => Ok(shape.set(crumb,new_ast)?.into()),)*
(shape, crumb) => Err(MismatchedCrumbType { shape: shape.variant_name(), crumb: crumb.variant_name() }.into())
}
}
fn iter_subcrumbs<'a>(&'a self) -> Box<dyn Iterator<Item = Self::Crumb> + 'a> {
match self {
$(Shape::$id(shape) => Box::new(shape.iter_subcrumbs().map(Crumb::$id)),)*
_ => Box::new(std::iter::empty())
}
}
}
/// Crumb identifies location of child AST in an AST node. Allows for a single step AST traversal.
#[derive(Clone,Copy,Debug,PartialEq,Eq,PartialOrd,Ord,Hash)]
#[allow(missing_docs)]
pub enum Crumb {
$($id($crumb_id),)*
}
impl Crumb {
$(
/// Constructor checker.
pub fn $matcher(&self) -> bool {
match self {
Self::$id{..} => true,
_ => false,
}
}
)*
/// Get the Crumb variant name as a static string. Does not contain the module path.
pub fn variant_name(&self) -> &'static str {
match self {
$(Self::$id{..} => stringify!($id),)*
}
}
}
}
}
impl IntoIterator for Crumb {
type Item = Crumb;
type IntoIter = std::iter::Once<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
std::iter::once(self)
}
}
impl_crumbs! {
// Translated types
( Prefix , PrefixCrumb , is_prefix ),
( Infix , InfixCrumb , is_infix ),
( SectionLeft , SectionLeftCrumb , is_section_left ),
( SectionRight , SectionRightCrumb , is_section_right ),
( SectionSides , SectionSidesCrumb , is_section_sides ),
( Module , ModuleCrumb , is_module ),
( Block , BlockCrumb , is_block ),
// Tree
( Tree , TreeCrumb , is_tree )
}
// =================
// === Crumbable ===
// =================
/// Interface for items that allow getting/setting stored Ast located by arbitrary `Crumb`.
pub trait Crumbable {
/// Specific `Crumb` type used by `Self` to locate child Asts.
type Crumb: Into<Crumb> + IntoIterator<Item = Crumb>;
/// Retrieves `Ast` under the crumb.
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast>;
/// Sets `Ast` under the crumb, returns updated entity.
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self>
where Self: Sized;
/// Iterates all valid crumbs available for `self`.
fn iter_subcrumbs<'a>(&'a self) -> Box<dyn Iterator<Item = Self::Crumb> + 'a>;
/// Iterates pairs (crumb,child_ast) for `self`.
fn enumerate<'a>(&'a self) -> Box<dyn Iterator<Item = (Self::Crumb, &'a Ast)> + 'a> {
let indices = self.iter_subcrumbs();
let iter = indices.map(move |crumb| {
// NOTE Safe if this module is correct - children crumbs are always accessible.
let child = self.get(&crumb).unwrap();
(crumb, child)
});
Box::new(iter)
}
/// Returns child Ast subtree while keeping knowledge of its location.
fn get_located(&self, crumb: Self::Crumb) -> FallibleResult<Located<&Ast>> {
let child = self.get(&crumb)?;
Ok(Located::new(crumb, child))
}
/// Enumerates all AST being a direct children of the given AST node.
fn children<'a>(&'a self) -> Box<dyn Iterator<Item = ChildAst<'a>> + 'a> {
let iter = self.enumerate().map(|(crumb, ast)| ChildAst::new(crumb, ast));
Box::new(iter)
}
}
impl Crumbable for crate::Prefix<Ast> {
type Crumb = PrefixCrumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
let ret = match crumb {
PrefixCrumb::Func => &self.func,
PrefixCrumb::Arg => &self.arg,
};
Ok(ret)
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let mut ret = self.clone();
let target = match crumb {
PrefixCrumb::Func => &mut ret.func,
PrefixCrumb::Arg => &mut ret.arg,
};
*target = new_ast;
Ok(ret)
}
fn iter_subcrumbs(&self) -> Box<dyn Iterator<Item = Self::Crumb>> {
const CHILDREN: [PrefixCrumb; 2] = [PrefixCrumb::Func, PrefixCrumb::Arg];
Box::new(CHILDREN.iter().copied())
}
}
impl Crumbable for crate::Infix<Ast> {
type Crumb = InfixCrumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
let ret = match crumb {
InfixCrumb::LeftOperand => &self.larg,
InfixCrumb::Operator => &self.opr,
InfixCrumb::RightOperand => &self.rarg,
};
Ok(ret)
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let mut ret = self.clone();
let target = match crumb {
InfixCrumb::LeftOperand => &mut ret.larg,
InfixCrumb::Operator => &mut ret.opr,
InfixCrumb::RightOperand => &mut ret.rarg,
};
*target = new_ast;
Ok(ret)
}
fn iter_subcrumbs(&self) -> Box<dyn Iterator<Item = Self::Crumb>> {
const CHILDREN: [InfixCrumb; 3] =
[InfixCrumb::LeftOperand, InfixCrumb::Operator, InfixCrumb::RightOperand];
Box::new(CHILDREN.iter().copied())
}
}
impl Crumbable for crate::SectionLeft<Ast> {
type Crumb = SectionLeftCrumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
let ret = match crumb {
SectionLeftCrumb::Arg => &self.arg,
SectionLeftCrumb::Opr => &self.opr,
};
Ok(ret)
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let mut ret = self.clone();
let target = match crumb {
SectionLeftCrumb::Arg => &mut ret.arg,
SectionLeftCrumb::Opr => &mut ret.opr,
};
*target = new_ast;
Ok(ret)
}
fn iter_subcrumbs(&self) -> Box<dyn Iterator<Item = Self::Crumb>> {
const CHILDREN: [SectionLeftCrumb; 2] = [SectionLeftCrumb::Arg, SectionLeftCrumb::Opr];
Box::new(CHILDREN.iter().copied())
}
}
impl Crumbable for crate::SectionRight<Ast> {
type Crumb = SectionRightCrumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
let ret = match crumb {
SectionRightCrumb::Arg => &self.arg,
SectionRightCrumb::Opr => &self.opr,
};
Ok(ret)
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let mut ret = self.clone();
let target = match crumb {
SectionRightCrumb::Arg => &mut ret.arg,
SectionRightCrumb::Opr => &mut ret.opr,
};
*target = new_ast;
Ok(ret)
}
fn iter_subcrumbs(&self) -> Box<dyn Iterator<Item = Self::Crumb>> {
const CHILDREN: [SectionRightCrumb; 2] = [SectionRightCrumb::Opr, SectionRightCrumb::Arg];
Box::new(CHILDREN.iter().copied())
}
}
impl Crumbable for crate::SectionSides<Ast> {
type Crumb = SectionSidesCrumb;
fn get(&self, _crumb: &Self::Crumb) -> FallibleResult<&Ast> {
Ok(&self.opr)
}
fn set(&self, _crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let mut ret = self.clone();
ret.opr = new_ast;
Ok(ret)
}
fn iter_subcrumbs(&self) -> Box<dyn Iterator<Item = Self::Crumb>> {
Box::new(std::iter::once(SectionSidesCrumb))
}
}
impl Crumbable for crate::Module<Ast> {
type Crumb = ModuleCrumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
let line = self.lines.get_or_err(crumb.line_index, "line")?;
line.elem.as_ref().ok_or_else(|| LineDoesNotContainAst::new(self, crumb).into())
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let mut module = self.clone();
let line = module.lines.get_mut_or_err(crumb.line_index, "line")?;
line.elem.replace(new_ast);
Ok(module)
}
fn iter_subcrumbs<'a>(&'a self) -> Box<dyn Iterator<Item = Self::Crumb> + 'a> {
let indices = non_empty_line_indices(self.lines.iter());
let crumbs = indices.map(|line_index| ModuleCrumb { line_index });
Box::new(crumbs)
}
}
impl Crumbable for crate::Block<Ast> {
type Crumb = BlockCrumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
match crumb {
BlockCrumb::HeadLine => Ok(&self.first_line.elem),
BlockCrumb::TailLine { tail_index } => {
let line = self.lines.get_or_err(*tail_index, "line")?;
line.elem.as_ref().ok_or_else(|| LineDoesNotContainAst::new(self, crumb).into())
}
}
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let mut block = self.clone();
match crumb {
BlockCrumb::HeadLine => block.first_line.elem = new_ast,
BlockCrumb::TailLine { tail_index } => {
let line = block.lines.get_mut_or_err(*tail_index, "line")?;
line.elem.replace(new_ast);
}
}
Ok(block)
}
fn iter_subcrumbs<'a>(&'a self) -> Box<dyn Iterator<Item = Self::Crumb> + 'a> {
let first_line = std::iter::once(BlockCrumb::HeadLine);
let tail_line_indices = non_empty_line_indices(self.lines.iter());
let tail_lines = tail_line_indices.map(|tail_index| BlockCrumb::TailLine { tail_index });
Box::new(first_line.chain(tail_lines))
}
}
/// Just delegates the implementation to shape.
impl Crumbable for Ast {
type Crumb = Crumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
self.shape().get(crumb)
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let new_shape = self.shape().set(crumb, new_ast)?;
Ok(self.with_shape(new_shape))
}
fn iter_subcrumbs<'a>(&'a self) -> Box<dyn Iterator<Item = Self::Crumb> + 'a> {
self.shape().iter_subcrumbs()
}
}
/// Just delegates to Ast.
impl<T, E> Crumbable for known::KnownAst<T>
where
for<'t> &'t Shape<Ast>: TryInto<&'t T, Error = E>,
E: failure::Fail,
{
type Crumb = Crumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
self.ast().get(crumb)
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let new_ast = self.ast().set(crumb, new_ast)?;
let ret = known::KnownAst::try_new(new_ast)?;
Ok(ret)
}
fn iter_subcrumbs<'a>(&'a self) -> Box<dyn Iterator<Item = Self::Crumb> + 'a> {
self.ast().iter_subcrumbs()
}
}
// ===================
// === Tree crumbs ===
// ===================
impl Crumbable for crate::Tree<Ast> {
type Crumb = TreeCrumb;
fn get(&self, crumb: &Self::Crumb) -> FallibleResult<&Ast> {
match self
.span_info
.get(crumb.index)
.ok_or_else(|| IndexOutOfBounds("Tree child".into()))?
{
SpanSeed::Child(crate::SpanSeedChild { node }) => Ok(node),
_ => Err(NonChildTreeCrumb.into()),
}
}
fn set(&self, crumb: &Self::Crumb, new_ast: Ast) -> FallibleResult<Self> {
let mut result = self.clone();
let child = result
.span_info
.get_mut(crumb.index)
.ok_or_else(|| IndexOutOfBounds("Tree child".into()))?;
*child = SpanSeed::Child(crate::SpanSeedChild { node: new_ast });
Ok(result)
}
fn iter_subcrumbs<'a>(&'a self) -> Box<dyn Iterator<Item = Self::Crumb> + 'a> {
Box::new(self.span_info.iter().enumerate().filter_map(|(index, thing)| {
matches!(thing, SpanSeed::Child(_)).as_some(TreeCrumb { index })
}))
}
}
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct TreeCrumb {
pub index: usize,
}
// ===========================
// === Recursive Traversal ===
// ===========================
/// Interface for recursive AST traversal using `Crumb` sequence.
///
/// Intended for `Ast` and `Ast`-like types, like `KnownAst`.
pub trait TraversableAst: Sized {
/// Returns rewritten AST where child AST under location designated by `crumbs` is updated.
///
/// Works recursively.
fn set_traversing(&self, crumbs: &[Crumb], new_ast: Ast) -> FallibleResult<Self>;
/// Recursively traverses AST to retrieve AST node located by given crumbs sequence.
fn get_traversing(&self, crumbs: &[Crumb]) -> FallibleResult<&Ast>;
/// Get the `Ast` node corresponding to `Self`.
fn my_ast(&self) -> FallibleResult<&Ast> {
self.get_traversing(&[])
}
/// Calculate the span of the descendent AST node described by given crumbs.
fn range_of_descendant_at(&self, crumbs: &[Crumb]) -> FallibleResult<text::Range<Byte>> {
let mut position = 0.byte();
let mut ast = self.my_ast()?;
for crumb in crumbs {
let child = ast.get(crumb)?;
let child_offset = ast.child_offset(child)?;
position += child_offset;
ast = child;
}
Ok(text::Range::new(position, position + ast.repr_len()))
}
}
impl TraversableAst for Ast {
fn set_traversing(&self, crumbs: &[Crumb], new_ast: Ast) -> FallibleResult<Self> {
match crumbs {
[] => Ok(new_ast),
[first_crumb, tail_crumbs @ ..] => {
let child = self.get(first_crumb)?;
let updated_child = child.set_traversing(tail_crumbs, new_ast)?;
self.set(first_crumb, updated_child)
}
}
}
fn get_traversing(&self, crumbs: &[Crumb]) -> FallibleResult<&Ast> {
if let Some(first_crumb) = crumbs.first() {
let child = self.get(first_crumb)?;
child.get_traversing(&crumbs[1..])
} else {
Ok(self)
}
}
}
impl<T, E> TraversableAst for known::KnownAst<T>
where
for<'t> &'t Shape<Ast>: TryInto<&'t T, Error = E>,
E: failure::Fail,
{
fn set_traversing(&self, crumbs: &[Crumb], new_ast: Ast) -> FallibleResult<Self> {
let updated_ast = self.ast().set_traversing(crumbs, new_ast)?;
Ok(Self::try_new(updated_ast)?)
}
fn get_traversing(&self, crumbs: &[Crumb]) -> FallibleResult<&Ast> {
self.ast().get_traversing(crumbs)
}
}
// ===============
// === Utility ===
// ===============
/// Iterates over indices of non-empty lines in a line sequence.
pub fn non_empty_line_indices<'a, T: 'a>(
iter: impl Iterator<Item = &'a crate::BlockLine<Option<T>>> + 'a,
) -> impl Iterator<Item = usize> + 'a {
enumerate_non_empty_lines(iter).map(|(index, _ast)| index)
}
// ===============
// === Located ===
// ===============
/// Item which location is identified by `Crumbs`.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deref)]
pub struct Located<T> {
/// Crumbs from containing parent.
pub crumbs: Crumbs,
/// The sub-item representation.
#[deref]
pub item: T,
}
impl<T> Located<T> {
/// Creates a new located item.
pub fn new(crumbs: impl IntoCrumbs, item: T) -> Located<T> {
let crumbs = crumbs.into_crumbs();
Located { crumbs, item }
}
/// Creates a new item in a root location (empty crumbs list).
pub fn new_root(item: T) -> Located<T> {
let crumbs = default();
Located { crumbs, item }
}
/// Uses given function to map over the item.
pub fn map<U>(self, f: impl FnOnce(T) -> U) -> Located<U> {
Located::new(self.crumbs, f(self.item))
}
/// Descends into a child described from `item` by given function.
pub fn entered<U>(&self, f: impl FnOnce(&T) -> Located<U>) -> Located<U> {
let child = f(&self.item);
self.descendant(child.crumbs, child.item)
}
/// Takes crumbs relative to self and item that will be wrapped.
pub fn descendant<U>(&self, crumbs: impl IntoCrumbs, child: U) -> Located<U> {
let crumbs_so_far = self.crumbs.iter().cloned();
let crumbs = crumbs_so_far.chain(crumbs.into_crumbs());
Located::new(crumbs, child)
}
/// Maps into child, concatenating this crumbs and child crumbs.
pub fn into_descendant<U>(self, child: Located<U>) -> Located<U> {
let Located { crumbs, item } = child;
let mut ret = self.map(|_| item);
ret.crumbs.extend(crumbs);
ret
}
}
impl<T> Located<Option<T>> {
/// Propagates Option from the stored value onto self.
pub fn into_opt(self) -> Option<Located<T>> {
let Located { item, crumbs } = self;
item.map(|item| Located { crumbs, item })
}
}
impl<T: HasTokens> HasTokens for Located<T> {
fn feed_to(&self, consumer: &mut impl TokenConsumer) {
self.item.feed_to(consumer)
}
}
/// Reference to AST stored under some known crumbs path.
pub type ChildAst<'a> = Located<&'a Ast>;
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
use crate::*;
/// Gets item under given crumb and checks if its representation is as expected.
fn expect_repr<C: Crumbable>(item: &C, crumb: &C::Crumb, expected_repr: impl Str) {
assert_eq!(item.get(crumb).unwrap().repr(), expected_repr.as_ref());
}
#[test]
fn module_crumb() {
let lines = [Some(Ast::var("foo")), None, Some(Ast::var("bar"))];
let module = crate::Module::from_lines(&lines);
// === Getting ===
expect_repr(&module, &ModuleCrumb { line_index: 0 }, "foo");
assert!(module.get(&ModuleCrumb { line_index: 1 }).is_err());
expect_repr(&module, &ModuleCrumb { line_index: 2 }, "bar");
assert!(module.get(&ModuleCrumb { line_index: 3 }).is_err());
let module2 = module.set(&ModuleCrumb { line_index: 0 }, Ast::var("foo2")).unwrap();
assert_eq!(module2.repr(), "foo2\n\nbar");
let module3 = module.set(&ModuleCrumb { line_index: 1 }, Ast::var("foo2")).unwrap();
assert_eq!(module3.repr(), "foo\nfoo2\nbar");
let module4 = module.set(&ModuleCrumb { line_index: 3 }, Ast::var("foo2"));
assert!(module4.is_err());
}
#[test]
fn block_crumb() {
let first_line = Ast::var("first_line");
let tail_lines = [Some(Ast::var("tail0")), None, Some(Ast::var("tail2"))];
let block = crate::Block::from_lines(&first_line, &tail_lines);
expect_repr(&block, &BlockCrumb::HeadLine, "first_line");
expect_repr(&block, &BlockCrumb::TailLine { tail_index: 0 }, "tail0");
assert!(block.get(&BlockCrumb::TailLine { tail_index: 1 }).is_err());
expect_repr(&block, &BlockCrumb::TailLine { tail_index: 2 }, "tail2");
assert!(block.get(&BlockCrumb::TailLine { tail_index: 3 }).is_err());
let block2 = block.set(&BlockCrumb::HeadLine, Ast::var("first_line2")).unwrap();
assert_eq!(block2.repr(), "\nfirst_line2\ntail0\n\ntail2");
let block3 = block.set(&BlockCrumb::TailLine { tail_index: 1 }, Ast::var("tail1")).unwrap();
assert_eq!(block3.repr(), "\nfirst_line\ntail0\ntail1\ntail2");
let block4 =
block.set(&BlockCrumb::TailLine { tail_index: 2 }, Ast::var("tail22")).unwrap();
assert_eq!(block4.repr(), "\nfirst_line\ntail0\n\ntail22");
}
fn get<T, F: FnOnce(T) -> Crumb>(f: F, ast: &Ast, crumb: T) -> FallibleResult<&Ast> {
let crumb = f(crumb);
ast.get(&crumb)
}
fn set<T, F: FnOnce(T) -> Crumb>(
f: F,
ast: &Ast,
crumb: T,
internal_ast: Ast,
) -> FallibleResult<Ast> {
let crumb = f(crumb);
ast.set(&crumb, internal_ast)
}
// === Infix ===
#[test]
fn infix_crumb() -> FallibleResult {
let infix = Ast::infix_var("foo", "+", "bar");
let to_crumb_enum = Crumb::Infix;
let baz = Ast::var("baz");
let times = Ast::opr("*");
assert_eq!(infix.repr(), "foo + bar");
assert_eq!(get(to_crumb_enum, &infix, InfixCrumb::LeftOperand)?.repr(), "foo");
assert_eq!(get(to_crumb_enum, &infix, InfixCrumb::Operator)?.repr(), "+");
assert_eq!(get(to_crumb_enum, &infix, InfixCrumb::RightOperand)?.repr(), "bar");
assert_eq!(
set(to_crumb_enum, &infix, InfixCrumb::LeftOperand, baz.clone())?.repr(),
"baz + bar"
);
assert_eq!(set(to_crumb_enum, &infix, InfixCrumb::Operator, times)?.repr(), "foo * bar");
assert_eq!(set(to_crumb_enum, &infix, InfixCrumb::RightOperand, baz)?.repr(), "foo + baz");
Ok(())
}
#[test]
fn iterate_infix() {
let sum = crate::Infix::from_vars("foo", "+", "bar");
let (larg, opr, rarg) = sum.iter_subcrumbs().expect_tuple();
assert_eq!(larg, InfixCrumb::LeftOperand);
assert_eq!(opr, InfixCrumb::Operator);
assert_eq!(rarg, InfixCrumb::RightOperand);
}
#[test]
fn nested_infix() -> FallibleResult {
use InfixCrumb::*;
let sum = Ast::infix_var("foo", "+", "bar");
let infix = Ast::infix(Ast::var("main"), "=", sum);
assert_eq!(infix.repr(), "main = foo + bar");
let set = |crumbs: &[InfixCrumb], ast| {
let crumbs = crumbs.iter().map(|c| Crumb::Infix(*c)).collect_vec();
infix.set_traversing(&crumbs, ast)
};
let get = |crumbs: &[InfixCrumb]| {
let crumbs = crumbs.iter().map(|c| Crumb::Infix(*c)).collect_vec();
infix.get_traversing(&crumbs)
};
assert_eq!(set(&[RightOperand, LeftOperand], Ast::var("baz"))?.repr(), "main = baz + bar");
assert_eq!(set(&[LeftOperand], Ast::var("baz"))?.repr(), "baz = foo + bar");
assert_eq!(get(&[Operator])?.repr(), "=");
assert_eq!(get(&[RightOperand])?.repr(), "foo + bar");
assert_eq!(get(&[RightOperand, LeftOperand])?.repr(), "foo");
assert_eq!(get(&[RightOperand, RightOperand])?.repr(), "bar");
Ok(())
}
// === Prefix ===
#[test]
fn prefix_crumb() -> FallibleResult {
let prefix = Ast::prefix(Ast::var("func"), Ast::var("arg"));
let get = |prefix_crumb| {
let crumb = Crumb::Prefix(prefix_crumb);
prefix.get(&crumb)
};
let set = |prefix_crumb, ast| {
let crumb = Crumb::Prefix(prefix_crumb);
prefix.set(&crumb, ast)
};
let foo = Ast::var("foo");
let x = Ast::var("x");
assert_eq!(prefix.repr(), "func arg");
assert_eq!(get(PrefixCrumb::Func)?.repr(), "func");
assert_eq!(get(PrefixCrumb::Arg)?.repr(), "arg");
assert_eq!(set(PrefixCrumb::Func, foo)?.repr(), "foo arg");
assert_eq!(set(PrefixCrumb::Arg, x)?.repr(), "func x");
Ok(())
}
#[test]
fn iterate_prefix() -> FallibleResult {
let prefix = Ast::prefix(Ast::var("func"), Ast::var("arg"));
let (func, arg) = prefix.iter_subcrumbs().expect_tuple();
assert_eq!(func, Crumb::Prefix(PrefixCrumb::Func));
assert_eq!(arg, Crumb::Prefix(PrefixCrumb::Arg));
Ok(())
}
// === SectionLeft ===
#[test]
fn section_left_crumb() -> FallibleResult {
let app = Ast::section_left(Ast::var("foo"), "bar");
let get = |app_crumb| {
let crumb = Crumb::SectionLeft(app_crumb);
app.get(&crumb)
};
let set = |app_crumb, ast| {
let crumb = Crumb::SectionLeft(app_crumb);
app.set(&crumb, ast)
};
let arg = Ast::var("arg");
let opr = Ast::var("opr");
assert_eq!(app.repr(), "foo bar");
assert_eq!(get(SectionLeftCrumb::Arg)?.repr(), "foo");
assert_eq!(get(SectionLeftCrumb::Opr)?.repr(), "bar");
assert_eq!(set(SectionLeftCrumb::Arg, arg)?.repr(), "arg bar");
assert_eq!(set(SectionLeftCrumb::Opr, opr)?.repr(), "foo opr");
Ok(())
}
#[test]
fn iterate_section_left() -> FallibleResult {
let app = Ast::section_left(Ast::var("foo"), "bar");
let (arg, opr) = app.iter_subcrumbs().expect_tuple();
assert_eq!(arg, Crumb::SectionLeft(SectionLeftCrumb::Arg));
assert_eq!(opr, Crumb::SectionLeft(SectionLeftCrumb::Opr));
Ok(())
}
// === SectionRight ===
#[test]
fn section_right_crumb() -> FallibleResult {
let app = Ast::section_right("foo", Ast::var("bar"));
let get = |app_crumb| {
let crumb = Crumb::SectionRight(app_crumb);
app.get(&crumb)
};
let set = |app_crumb, ast| {
let crumb = Crumb::SectionRight(app_crumb);
app.set(&crumb, ast)
};
let arg = Ast::var("arg");
let opr = Ast::var("opr");
assert_eq!(app.repr(), "foo bar");
assert_eq!(get(SectionRightCrumb::Opr)?.repr(), "foo");
assert_eq!(get(SectionRightCrumb::Arg)?.repr(), "bar");
assert_eq!(set(SectionRightCrumb::Opr, opr)?.repr(), "opr bar");
assert_eq!(set(SectionRightCrumb::Arg, arg)?.repr(), "foo arg");
Ok(())
}
#[test]
fn iterate_section_right() -> FallibleResult {
let app = Ast::section_right("foo", Ast::var("bar"));
let (opr, arg) = app.iter_subcrumbs().expect_tuple();
assert_eq!(arg, Crumb::SectionRight(SectionRightCrumb::Arg));
assert_eq!(opr, Crumb::SectionRight(SectionRightCrumb::Opr));
Ok(())
}
// === SectionSides ===
#[test]
fn section_sides_crumb() -> FallibleResult {
let app = Ast::section_sides("foo");
let get = |app_crumb| {
let crumb = Crumb::SectionSides(app_crumb);
app.get(&crumb)
};
let set = |app_crumb, ast| {
let crumb = Crumb::SectionSides(app_crumb);
app.set(&crumb, ast)
};
let opr = Ast::var("opr");
assert_eq!(app.repr(), "foo");
assert_eq!(get(SectionSidesCrumb)?.repr(), "foo");
assert_eq!(set(SectionSidesCrumb, opr)?.repr(), "opr");
Ok(())
}
#[test]
fn iterate_section_sides() -> FallibleResult {
let app = Ast::section_sides("foo");
let mut iter = app.iter_subcrumbs();
assert_eq!(iter.next(), Some(Crumb::SectionSides(SectionSidesCrumb)));
assert_eq!(iter.next(), None);
Ok(())
}
// === Module ===
#[test]
fn iterate_module() {
let var = crate::Ast::var("foo");
let lines = [Some(var.clone_ref()), None, Some(var.clone_ref())];
let module = crate::Module::from_lines(&lines);
assert_eq!(module.repr(), "foo\n\nfoo");
let (line0, line2) = module.iter_subcrumbs().expect_tuple();
assert_eq!(line0.line_index, 0);
assert_eq!(line2.line_index, 2);
}
// === Block ===
#[test]
fn iterate_block() {
let first_line = crate::Ast::var("foo");
let lines = [Some(crate::Ast::var("bar")), None, Some(crate::Ast::var("baz"))];
let block = crate::Block::from_lines(&first_line, &lines);
let (line0, line1, line3) = block.iter_subcrumbs().expect_tuple();
assert_eq!(line0, BlockCrumb::HeadLine);
assert_eq!(line1, BlockCrumb::TailLine { tail_index: 0 });
assert_eq!(line3, BlockCrumb::TailLine { tail_index: 2 });
}
#[test]
fn mismatched_crumb() {
let sum = Ast::infix_var("foo", "+", "bar");
let crumb = Crumb::Module(ModuleCrumb { line_index: 0 });
let first_line = sum.get(&crumb);
first_line.expect_err("Using module crumb on infix should fail");
}
#[test]
fn located() {
let item = Located::new_root("zero");
assert_eq!(item.item, "zero");
assert!(item.crumbs.is_empty());
let child_item = Located::new(InfixCrumb::Operator, "two");
let item = item.into_descendant(child_item);
assert_eq!(item.item, "two");
let (crumb0,) = item.crumbs.iter().expect_tuple();
assert_eq!(crumb0, &Crumb::Infix(InfixCrumb::Operator));
let item2 = item.clone().map(|item| item.len());
assert_eq!(item2.item, 3);
assert_eq!(item.crumbs, item2.crumbs);
}
// === TraversableAst ===
#[test]
fn traversable_ast() {
let ast = Ast::prefix(Ast::prefix(Ast::var("add"), Ast::number(2)), Ast::number(4));
let expected_code = "add 2 4";
assert_eq!(ast.repr(), expected_code);
assert_eq!(ast.my_ast().unwrap(), &ast);
let crumbs_to_two = [PrefixCrumb::Func, PrefixCrumb::Arg].into_crumbs();
let two = ast.get_traversing(&crumbs_to_two).unwrap();
assert_eq!(two.repr(), "2");
let two_span = ast.range_of_descendant_at(&crumbs_to_two).unwrap();
assert_eq!(two_span, 4.byte()..5.byte());
assert_eq!(&expected_code[two_span], "2");
}
}
| true |
591aed47a99d2d1a6d096179fd9d8457048db9ef
|
Rust
|
koute/yew
|
/src/services/storage.rs
|
UTF-8
| 2,137 | 3.46875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! This module contains the implementation of a service to
//! use local and session storage of a browser.
use stdweb::Value;
use format::{Storable, Restorable};
/// Represents errors of a storage.
#[derive(Debug, Fail)]
enum StorageError {
#[fail(display = "restore error")]
CantRestore,
}
/// An area to keep the data in.
pub enum Area {
/// Use `localStorage` of a browser.
Local,
/// Use `sessionStorage` of a browser.
Session,
}
/// A storage service attached to a context.
pub struct StorageService {
scope: Area,
}
impl StorageService {
/// Creates a new storage service instance with specified storate scope.
pub fn new(scope: Area) -> Self {
StorageService { scope }
}
/// Stores value to the storage.
pub fn store<T>(&mut self, key: &str, value: T)
where
T: Into<Storable>
{
if let Some(data) = value.into() {
match self.scope {
Area::Local => { js! { @(no_return)
localStorage.setItem(@{key}, @{data});
} },
Area::Session => { js! { @(no_return)
sessionStorage.setItem(@{key}, @{data});
} },
}
}
}
/// Restores value from the storage.
pub fn restore<T>(&mut self, key: &str) -> T
where
T : From<Restorable>
{
let value: Value = {
match self.scope {
Area::Local => js! { return localStorage.getItem(@{key}); },
Area::Session => js! { return sessionStorage.getItem(@{key}); },
}
};
let data = value.into_string().ok_or_else(|| StorageError::CantRestore.into());
T::from(data)
}
/// Removes value from the storage.
pub fn remove(&mut self, key: &str) {
{
match self.scope {
Area::Local => js! { @(no_return)
localStorage.removeItem(@{key});
},
Area::Session => js! { @(no_return)
sessionStorage.removeItem(@{key});
},
}
};
}
}
| true |
f824a055d9afb1efcf1d8f36a54e9f965df7b9e2
|
Rust
|
jDomantas/ccg
|
/crates/engine/src/renderer.rs
|
UTF-8
| 4,568 | 2.671875 | 3 |
[] |
no_license
|
use ggez::Context;
use ggez::graphics::{DrawParam, Image, Rect, Color};
use ggez::nalgebra::Point2;
use crate::{Ctx, CtxData, Result};
pub struct Renderer {
icons: Image,
textures: Vec<Image>,
indices: Textures,
}
impl Renderer {
pub fn new(icons: Image, textures: Vec<Image>, indices: Textures) -> Self {
Renderer { icons, textures, indices }
}
pub fn frame<'a>(&'a mut self, ctx: &'a mut Ctx<'_>) -> FrameRenderer<'a> {
FrameRenderer {
renderer: self,
ctx: ctx.ggez,
ctx_data: ctx.data,
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct Icon {
index: u32,
}
impl Icon {
pub const CIRCLE: Icon = Icon { index: 1 };
pub const DOT: Icon = Icon { index: 2 };
pub const SQUARE: Icon = Icon { index: 3 };
pub const SWORD: Icon = Icon { index: 4 };
pub const HEART: Icon = Icon { index: 5 };
pub const SHIELD: Icon = Icon { index: 6 };
pub const FIGHTER: Icon = Icon { index: 7 };
pub const BEHOLDER: Icon = Icon { index: 8 };
pub const CARD: Icon = Icon { index: 9 };
pub const PLAY: Icon = Icon { index: 10 };
pub const CARD_BACK: Icon = Icon { index: 11 };
pub const COIN: Icon = Icon { index: 12 };
pub const CROSS: Icon = Icon { index: 13 };
pub const BANG: Icon = Icon { index: 14 };
pub const RED_CIRCLE: Icon = Icon { index: 15 };
pub const BLUE_BEHOLDER: Icon = Icon { index: 16 };
pub const GREEN_HEART: Icon = Icon { index: 17 };
pub const BROKEN: Icon = Icon { index: 18 };
pub const DECK: Icon = Icon { index: 19 };
pub const TRAP_DECK: Icon = Icon { index: 20 };
pub const BLACK: Icon = Icon { index: 21 };
pub const DISARM: Icon = Icon { index: 22 };
pub const RED_SWORD: Icon = Icon { index: 23 };
pub const BOW: Icon = Icon { index: 24 };
pub const FIGHTER_2: Icon = Icon { index: 25 };
pub const CHICKEN: Icon = Icon { index: 26 };
pub const fn new(index: u32) -> Icon {
Icon { index }
}
}
pub struct Textures {
pub button: Texture,
pub button_hover: Texture,
pub button_selected: Texture,
pub card_back: Texture,
}
#[derive(Debug, Copy, Clone)]
pub struct Texture {
index: u32,
}
impl Texture {
pub const fn new(index: u32) -> Texture {
Texture { index }
}
}
pub struct FrameRenderer<'a> {
renderer: &'a mut Renderer,
ctx: &'a mut Context,
ctx_data: &'a mut CtxData,
}
impl<'a> FrameRenderer<'a> {
pub fn textures(&self) -> &Textures {
&self.renderer.indices
}
pub fn ggez(&mut self) -> &mut Context {
self.ctx
}
pub fn ctx(&mut self) -> Ctx<'_> {
Ctx {
ggez: self.ctx,
data: self.ctx_data,
}
}
pub fn draw(&mut self, texture: Texture, x: f32, y: f32, width: f32, height: f32) -> Result {
let texture = &self.renderer.textures[texture.index as usize];
ggez::graphics::draw(
self.ctx,
texture,
DrawParam::new()
.dest(Point2::new(x, y))
.scale([
width / f32::from(texture.width()),
height / f32::from(texture.height()),
])
.src(Rect { x: 0.0, y: 0.0, w: 1.0, h: 1.0 }),
)
}
pub fn draw_icon(&mut self, icon: Icon, x: f32, y: f32, width: f32, height: f32) -> Result {
let row = (icon.index / 8) as f32;
let col = (icon.index % 8) as f32;
let draw = DrawParam::new()
.dest(Point2::new(x, y))
.scale([width / 16.0, height / 16.0])
.src(Rect {
x: col / 8.0,
y: row / 8.0,
w: 0.125,
h: 0.125,
});
ggez::graphics::draw(self.ctx, &self.renderer.icons, draw)
}
pub fn draw_fade(&mut self, opacity: f32) -> Result {
let mut opacity = (opacity * 256.0) as i64;
if opacity < 0 {
opacity = 0;
}
if opacity > 255 {
opacity = 255;
}
let opacity = opacity as u8;
let draw = DrawParam::new()
.dest(Point2::new(0.0, 0.0))
.scale([crate::SCREEN_WIDTH / 16.0, crate::SCREEN_HEIGHT / 16.0])
.src(Rect {
x: 5.0 / 8.0,
y: 2.0 / 8.0,
w: 0.125,
h: 0.125,
})
.color(Color::from_rgba(opacity, opacity, opacity, opacity));
ggez::graphics::draw(self.ctx, &self.renderer.icons, draw)
}
}
| true |
32d67f4836f6508c12683d98a1865541a32b1495
|
Rust
|
qti3e/ross
|
/ross_core/src/db/batch.rs
|
UTF-8
| 1,483 | 2.828125 | 3 |
[] |
no_license
|
use super::DB;
use super::{bincode::serialize, keys::DbWriteKey};
use crate::error::{Error, Result};
/// An atomic batch of write operations. This is a type safe wrapper around
/// rocksdb::WriteBatch.
pub struct Batch<'a> {
db: &'a DB,
batch: rocksdb::WriteBatch,
}
impl<'a> Batch<'a> {
pub fn new(db: &'a DB) -> Self {
Batch {
db,
batch: rocksdb::WriteBatch::default(),
}
}
#[inline(always)]
pub fn put<K: DbWriteKey>(&mut self, key: K, value: &K::Value) {
let cf = K::cf(&self.db.cf);
self.batch
.put_cf(cf, serialize(key.key()), serialize(value));
}
#[inline(always)]
pub fn delete<K: DbWriteKey>(&mut self, key: K) {
let cf = K::cf(&self.db.cf);
self.batch.delete_cf(cf, serialize(key.key()));
}
#[inline(always)]
pub fn delete_range<K: DbWriteKey>(&mut self, from: K, to: K) {
let cf = K::cf(&self.db.cf);
self.batch
.delete_range_cf(cf, serialize(from.key()), serialize(to.key()));
}
#[inline(always)]
pub fn push<K: DbWriteKey<Value = Vec<I>>, I: serde::Serialize>(&mut self, key: K, value: &I) {
let cf = K::cf(&self.db.cf);
self.batch
.merge_cf(cf, serialize(key.key()), serialize(value));
}
/// Perform the atomic batch write.
#[inline(always)]
pub fn write(self) -> Result<()> {
self.db.db.write(self.batch).map_err(Error::DBError)
}
}
| true |
c7f16274b4bb2c4980f521b93a97da2721f51d6b
|
Rust
|
gengjiawen/leetcode
|
/Array/_0888_fair_candy_swap.rs
|
UTF-8
| 2,307 | 3.734375 | 4 |
[] |
no_license
|
// https://leetcode.com/problems/fair-candy-swap
//
// Alice and Bob have a different total number of candies. You are given two integer arrays `aliceSizes` and `bobSizes` where `aliceSizes[i]` is the number of candies of the `i<sup>th</sup>` box of candy that Alice has and `bobSizes[j]` is the number of candies of the `j<sup>th</sup>` box of candy that Bob has.
//
// Since they are friends, they would like to exchange one candy box each so that after the exchange, they both have the same total amount of candy. The total amount of candy a person has is the sum of the number of candies in each box they have.
//
// Return a_n integer array_ `answer` _where_ `answer[0]` _is the number of candies in the box that Alice must exchange, and_ `answer[1]` _is the number of candies in the box that Bob must exchange_. If there are multiple answers, you may **return any** one of them. It is guaranteed that at least one answer exists.
//
// **Example 1:**
//
// ```
// **Input:** aliceSizes = [1,1], bobSizes = [2,2]
// **Output:** [1,2]
// ```
//
// **Example 2:**
//
// ```
// **Input:** aliceSizes = [1,2], bobSizes = [2,3]
// **Output:** [1,2]
// ```
//
// **Example 3:**
//
// ```
// **Input:** aliceSizes = [2], bobSizes = [1,3]
// **Output:** [2,3]
// ```
//
// **Constraints:**
//
// * `1 <= aliceSizes.length, bobSizes.length <= 10<sup>4</sup>`
// * `1 <= aliceSizes[i], bobSizes[j] <= 10<sup>5</sup>`
// * Alice and Bob have a different total number of candies.
// * There will be at least one valid answer for the given input.
pub fn fair_candy_swap(alice_sizes: Vec<i32>, bob_sizes: Vec<i32>) -> Vec<i32> {
let alice_sum = alice_sizes.iter().sum::<i32>();
let bob_sum = bob_sizes.iter().sum::<i32>();
for i in 0..alice_sizes.len() {
let alice_size = alice_sizes[i];
for j in 0..bob_sizes.len() {
let bob_size = bob_sizes[j];
if alice_sum + bob_size - alice_size == bob_sum + alice_size - bob_size {
return vec![alice_size, bob_size];
}
}
}
return vec![0, 0];
}
#[test]
pub fn t1() {
assert_eq!(fair_candy_swap(vec![1, 1], vec![2, 2]), vec![1, 2]);
assert_eq!(fair_candy_swap(vec![1, 2], vec![2, 3]), vec![1, 2]);
assert_eq!(fair_candy_swap(vec![2], vec![1, 3]), vec![2, 3]);
}
| true |
ab99475dc781d845512672ad73a82bc0fbdf4a71
|
Rust
|
ufwt/SMB_Fuzzer
|
/src/fuzzing_lib/src/smb2/helper_functions/negotiate_context.rs
|
UTF-8
| 26,116 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
//! This module describes the negotiate contexts.
//! The SMB2_NEGOTIATE_CONTEXT structure is used by the SMB2 NEGOTIATE Request
//! and the SMB2 NEGOTIATE Response to encode additional properties.
//! The server MUST support receiving negotiate contexts in any order.
use rand::{
distributions::{Distribution, Standard},
Rng,
};
use crate::{
format::convert_byte_array_to_int, fuzzer::create_random_byte_array_of_predefined_length,
};
pub trait DataSize {
fn get_data_length(&self) -> Vec<u8>;
}
/// ContextType (2 bytes): Specifies the type of context in the Data field.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum ContextType {
PreauthIntegrityCapabilities(PreauthIntegrityCapabilities),
EncryptionCapabilities(EncryptionCapabilities),
CompressionCapabilities(CompressionCapabilities),
NetnameNegotiateContextId(NetnameNegotiateContextId),
TransportCapabilities(TransportCapabilities),
RdmaTransformCapabilities(RdmaTransformCapabilities),
}
impl ContextType {
/// Unpacks the byte code for the context type.
pub fn unpack_byte_code(&self) -> Vec<u8> {
match self {
ContextType::PreauthIntegrityCapabilities(_) => b"\x01\x00".to_vec(),
ContextType::EncryptionCapabilities(_) => b"\x02\x00".to_vec(),
ContextType::CompressionCapabilities(_) => b"\x03\x00".to_vec(),
ContextType::NetnameNegotiateContextId(_) => b"\x05\x00".to_vec(),
ContextType::TransportCapabilities(_) => b"\x06\x00".to_vec(),
ContextType::RdmaTransformCapabilities(_) => b"\x07\x00".to_vec(),
}
}
/// Maps the byte code of an incoming response to the corresponding context type.
pub fn map_byte_code_to_context_type(byte_code: Vec<u8>) -> ContextType {
if let Some(code) = byte_code.get(0) {
match code {
1 => ContextType::PreauthIntegrityCapabilities(
PreauthIntegrityCapabilities::default(),
),
2 => ContextType::EncryptionCapabilities(EncryptionCapabilities::default()),
3 => ContextType::CompressionCapabilities(CompressionCapabilities::default()),
5 => ContextType::NetnameNegotiateContextId(NetnameNegotiateContextId::default()),
6 => ContextType::TransportCapabilities(TransportCapabilities::default()),
7 => ContextType::RdmaTransformCapabilities(RdmaTransformCapabilities::default()),
_ => panic!("Invalid context type in parsed response."),
}
} else {
panic!("Empty context type in parsed response.")
}
}
/// Calls get data length for the corresponding capability.
pub fn get_capability_data_length(&self) -> Vec<u8> {
match self {
ContextType::PreauthIntegrityCapabilities(preauth) => preauth.get_data_length(),
ContextType::EncryptionCapabilities(encrypt) => encrypt.get_data_length(),
ContextType::CompressionCapabilities(compress) => compress.get_data_length(),
ContextType::NetnameNegotiateContextId(netname) => netname.get_data_length(),
ContextType::TransportCapabilities(transport) => transport.get_data_length(),
ContextType::RdmaTransformCapabilities(rdma) => rdma.get_data_length(),
}
}
}
impl Distribution<ContextType> for Standard {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> ContextType {
match rng.gen_range(0..=4) {
0 => ContextType::PreauthIntegrityCapabilities(
PreauthIntegrityCapabilities::fuzz_with_predefined_length(),
),
1 => ContextType::EncryptionCapabilities(
EncryptionCapabilities::fuzz_with_predefined_length(),
),
2 => ContextType::CompressionCapabilities(
CompressionCapabilities::fuzz_with_predefined_length(),
),
3 => ContextType::NetnameNegotiateContextId(NetnameNegotiateContextId::fuzz()),
4 => ContextType::TransportCapabilities(
TransportCapabilities::fuzz_with_predefined_length(),
),
_ => ContextType::RdmaTransformCapabilities(
RdmaTransformCapabilities::fuzz_with_predefined_length(),
),
}
}
}
impl std::fmt::Display for ContextType {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
ContextType::PreauthIntegrityCapabilities(preauth) => {
write!(f, "{}", preauth)
}
ContextType::EncryptionCapabilities(encrypt) => {
write!(f, "{}", encrypt)
}
ContextType::CompressionCapabilities(compress) => {
write!(f, "{}", compress)
}
ContextType::NetnameNegotiateContextId(netname) => {
write!(f, "{}", netname)
}
ContextType::TransportCapabilities(transport) => {
write!(f, "{}", transport)
}
ContextType::RdmaTransformCapabilities(rdma) => {
write!(f, "{}", rdma)
}
}
}
}
/// The SMB2_NEGOTIATE_CONTEXT structure is used by the SMB2 NEGOTIATE Request
/// and the SMB2 NEGOTIATE Response to encode additional properties.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct NegotiateContext {
/// ContextType (2 bytes): Specifies the type of context in the Data field.
pub context_type: Vec<u8>,
/// DataLength (2 bytes): The length, in bytes, of the Data field.
pub data_length: Vec<u8>,
/// Reserved (4 bytes): This field MUST NOT be used and MUST be reserved.
/// This value MUST be set to 0 by the client, and MUST be ignored by the server.
pub reserved: Vec<u8>,
/// Data (variable): A variable-length field that contains
/// the negotiate context specified by the ContextType field.
pub data: Option<ContextType>,
}
impl NegotiateContext {
/// Creates a new instance of the Negotiate Context.
pub fn default() -> Self {
NegotiateContext {
context_type: Vec::new(),
data_length: Vec::new(),
reserved: vec![0; 4],
data: None,
}
}
}
impl std::fmt::Display for NegotiateContext {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Negotiate Context: \n\t\tcontext type: {:?}\n\t\tdata length: {:?}\
\n\t\treserved: {:?}\n\t\tdata: {}",
self.context_type,
self.data_length,
self.reserved,
self.data.as_ref().unwrap()
)
}
}
pub struct NegVec<'a>(pub &'a Vec<NegotiateContext>);
impl<'a> std::fmt::Display for NegVec<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut context_string = String::new();
for context in self.0.iter() {
context_string.push_str(format!("{}\n", context).as_str());
}
write!(f, "{}", context_string)
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum HashAlgorithms {
Sha512,
}
impl HashAlgorithms {
/// Unpacks the byte code for hash algorithms.
pub fn unpack_byte_code(&self) -> Vec<u8> {
match self {
HashAlgorithms::Sha512 => b"\x01\x00".to_vec(),
}
}
}
impl Distribution<HashAlgorithms> for Standard {
fn sample<R: Rng + ?Sized>(&self, _rng: &mut R) -> HashAlgorithms {
HashAlgorithms::Sha512
}
}
/// The SMB2_PREAUTH_INTEGRITY_CAPABILITIES context is specified in an SMB2 NEGOTIATE
/// request by the client to indicate which preauthentication integrity hash algorithms
/// the client supports and to optionally supply a preauthentication integrity hash salt value.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct PreauthIntegrityCapabilities {
/// HashAlgorithmCount (2 bytes): The number of hash algorithms in
/// the HashAlgorithms array. This value MUST be greater than zero.
pub hash_algorithm_count: Vec<u8>,
/// SaltLength (2 bytes): The size, in bytes, of the Salt field.
pub salt_length: Vec<u8>,
/// HashAlgorithms (variable): An array of HashAlgorithmCount
/// 16-bit integer IDs specifying the supported preauthentication integrity hash functions.
/// There is currently only SHA-512 available.
pub hash_algorithms: Vec<Vec<u8>>,
/// Salt (variable): A buffer containing the salt value of the hash.
pub salt: Vec<u8>,
}
impl PreauthIntegrityCapabilities {
/// Creates a new PreauthIntegrityCapabilities instance.
pub fn default() -> Self {
PreauthIntegrityCapabilities {
hash_algorithm_count: b"\x01\x00".to_vec(),
salt_length: Vec::new(),
hash_algorithms: vec![b"\x01\x00".to_vec()], // SHA-512
salt: Vec::new(),
}
}
/// Fuzzes the preauthintegrity capabilities with a predefined length.
pub fn fuzz_with_predefined_length() -> Self {
let mut random_hashes: Vec<HashAlgorithms> = Vec::new();
for _ in 0..rand::thread_rng().gen_range(0..100) {
random_hashes.push(rand::random());
}
let salt_length = rand::thread_rng().gen_range(0..32) as u16;
PreauthIntegrityCapabilities {
hash_algorithm_count: (random_hashes.len() as u16).to_le_bytes().to_vec(),
salt_length: salt_length.to_le_bytes().to_vec(),
hash_algorithms: random_hashes
.into_iter()
.map(|hash| hash.unpack_byte_code())
.collect(),
salt: create_random_byte_array_of_predefined_length(salt_length as u32),
}
}
}
impl DataSize for PreauthIntegrityCapabilities {
/// Gets the data length of the preauthintegrity capabilities.
fn get_data_length(&self) -> Vec<u8> {
let length = self.hash_algorithm_count.len()
+ self.salt_length.len()
+ convert_byte_array_to_int(self.hash_algorithm_count.clone(), false) as usize
+ self.salt.len();
(length as u16).to_le_bytes().to_vec()
}
}
impl std::fmt::Display for PreauthIntegrityCapabilities {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"\n\t\t\tPreauth Integrity Capabilities: \n\t\t\t\thash algorithm count: {:?}\
\n\t\t\t\tsalt length: {:?}\n\t\t\t\thash algorithms: {:?}\n\t\t\t\tsalt: {:?}",
self.hash_algorithm_count, self.salt_length, self.hash_algorithms, self.salt
)
}
}
/// An array of CipherCount 16-bit integer IDs specifying the supported encryption algorithms.
/// These IDs MUST be in an order such that the most preferred cipher MUST be at the beginning
/// of the array and least preferred cipher at the end of the array. The following IDs are defined.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Ciphers {
Aes128Ccm,
Aes128Gcm,
Aes256Ccm,
Aes256Gcm,
}
impl Ciphers {
/// Unpacks the byte code of ciphers.
pub fn unpack_byte_code(&self) -> Vec<u8> {
match self {
Ciphers::Aes128Ccm => b"\x01\x00".to_vec(),
Ciphers::Aes128Gcm => b"\x02\x00".to_vec(),
Ciphers::Aes256Ccm => b"\x03\x00".to_vec(),
Ciphers::Aes256Gcm => b"\x04\x00".to_vec(),
}
}
}
impl Distribution<Ciphers> for Standard {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Ciphers {
match rng.gen_range(0..=3) {
0 => Ciphers::Aes128Ccm,
1 => Ciphers::Aes128Gcm,
2 => Ciphers::Aes256Ccm,
_ => Ciphers::Aes256Gcm,
}
}
}
/// The SMB2_ENCRYPTION_CAPABILITIES context is specified in an SMB2 NEGOTIATE
/// request by the client to indicate which encryption algorithms the client supports.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct EncryptionCapabilities {
/// CipherCount (2 bytes): The number of ciphers in the Ciphers array.
/// This value MUST be greater than zero.
pub cipher_count: Vec<u8>,
/// Ciphers (variable): An array of CipherCount 16-bit integer IDs
/// specifying the supported encryption algorithms.
/// These IDs MUST be in an order such that the most preferred cipher
/// MUST be at the beginning of the array and least preferred cipher
/// at the end of the array.
pub ciphers: Vec<Vec<u8>>,
}
impl EncryptionCapabilities {
/// Creates a new EncryptionCapabilities instance.
pub fn default() -> Self {
EncryptionCapabilities {
cipher_count: Vec::new(),
ciphers: Vec::new(),
}
}
/// Fuzzes the encryption capabilities with the predefined length.
pub fn fuzz_with_predefined_length() -> Self {
let mut random_ciphers: Vec<Ciphers> = Vec::new();
for _ in 0..rand::thread_rng().gen_range(0..100) {
random_ciphers.push(rand::random());
}
EncryptionCapabilities {
cipher_count: (random_ciphers.len() as u16).to_le_bytes().to_vec(),
ciphers: random_ciphers
.into_iter()
.map(|cipher| cipher.unpack_byte_code())
.collect(),
}
}
/// Gets the data length of the encrytpion capabilities.
pub fn get_data_length(&self) -> Vec<u8> {
let length = self.cipher_count.len()
+ convert_byte_array_to_int(self.cipher_count.clone(), false) as usize;
(length as u16).to_le_bytes().to_vec()
}
}
impl DataSize for EncryptionCapabilities {
/// Gets the data length of the encrytpion capabilities.
fn get_data_length(&self) -> Vec<u8> {
let length = self.cipher_count.len()
+ convert_byte_array_to_int(self.cipher_count.clone(), false) as usize;
(length as u16).to_le_bytes().to_vec()
}
}
impl std::fmt::Display for EncryptionCapabilities {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"\n\t\t\tEncryption Capabilities: \n\t\t\t\tcipher count: {:?}\n\t\t\t\tciphers: {:?}",
self.cipher_count, self.ciphers
)
}
}
/// *Compression Capabilities Flag None*:
/// - Chained compression is not supported.
///
/// *Compression Capabilities Flag Chained*:
/// - Chained compression is supported on this connection.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Flags {
CompressionCapabilitiesFlagNone,
CompressionCapabilitiesFlagChained,
}
impl Flags {
/// Unpacks the byte code for compression flags.
pub fn unpack_byte_code(&self) -> Vec<u8> {
match self {
Flags::CompressionCapabilitiesFlagNone => b"\x00\x00\x00\x00".to_vec(),
Flags::CompressionCapabilitiesFlagChained => b"\x01\x00\x00\x00".to_vec(),
}
}
}
impl Distribution<Flags> for Standard {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Flags {
match rng.gen_range(0..=1) {
0 => Flags::CompressionCapabilitiesFlagNone,
_ => Flags::CompressionCapabilitiesFlagChained,
}
}
}
/// *None*:
/// - No compression.
///
/// *LZNT1*:
/// - LZNT1 compression algorithm.
///
/// *LZ77*:
/// - LZ77 compression algorithm.
///
/// *LZ77 + Huffman*:
/// - LZ77+Huffman compression algorithm.
///
/// *Pattern_V1*:
/// - Pattern scanning algorithm.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum CompressionAlgorithms {
None,
Lznt1,
Lz77,
Lz77Huffman,
PatternV1,
}
impl CompressionAlgorithms {
/// Unpacks the byte code for compression algorithms.
pub fn unpack_byte_code(&self) -> Vec<u8> {
match self {
CompressionAlgorithms::None => b"\x00\x00".to_vec(),
CompressionAlgorithms::Lznt1 => b"\x01\x00".to_vec(),
CompressionAlgorithms::Lz77 => b"\x02\x00".to_vec(),
CompressionAlgorithms::Lz77Huffman => b"\x03\x00".to_vec(),
CompressionAlgorithms::PatternV1 => b"\x04\x00".to_vec(),
}
}
}
impl Distribution<CompressionAlgorithms> for Standard {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> CompressionAlgorithms {
match rng.gen_range(0..=4) {
0 => CompressionAlgorithms::None,
1 => CompressionAlgorithms::Lznt1,
2 => CompressionAlgorithms::Lz77,
3 => CompressionAlgorithms::Lz77Huffman,
_ => CompressionAlgorithms::PatternV1,
}
}
}
/// The SMB2_COMPRESSION_CAPABILITIES context is specified in an SMB2 NEGOTIATE request
/// by the client to indicate which compression algorithms the client supports.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct CompressionCapabilities {
/// CompressionAlgorithmCount (2 bytes): The number of elements in CompressionAlgorithms array.
pub compression_algorithm_count: Vec<u8>,
/// Padding (2 bytes): The sender MUST set this to 0, and the receiver MUST ignore it on receipt.
pub padding: Vec<u8>,
/// Flags (4 bytes)
pub flags: Vec<u8>,
/// CompressionAlgorithms (variable): An array of 16-bit integer IDs specifying
/// the supported compression algorithms. These IDs MUST be in order of preference
/// from most to least. The following IDs are defined.
pub compression_algorithms: Vec<Vec<u8>>,
}
impl CompressionCapabilities {
/// Creates a new compression capabilities instance.
pub fn default() -> Self {
CompressionCapabilities {
compression_algorithm_count: Vec::new(),
padding: b"\x00\x00".to_vec(),
flags: Vec::new(),
compression_algorithms: Vec::new(),
}
}
/// Fuzzes the compression capabilities with the predefined length.
pub fn fuzz_with_predefined_length() -> Self {
let mut random_algorithms: Vec<CompressionAlgorithms> = Vec::new();
for _ in 0..rand::thread_rng().gen_range(0..100) {
random_algorithms.push(rand::random());
}
CompressionCapabilities {
compression_algorithm_count: (random_algorithms.len() as u16).to_le_bytes().to_vec(),
padding: b"\x00\x00".to_vec(),
flags: rand::random::<Flags>().unpack_byte_code(),
compression_algorithms: random_algorithms
.into_iter()
.map(|algo| algo.unpack_byte_code())
.collect(),
}
}
}
impl DataSize for CompressionCapabilities {
/// Gets the data size of the compression capabilities.
fn get_data_length(&self) -> Vec<u8> {
let length = self.compression_algorithm_count.len()
+ self.padding.len()
+ self.flags.len()
+ convert_byte_array_to_int(self.compression_algorithm_count.clone(), false) as usize;
(length as u16).to_le_bytes().to_vec()
}
}
impl std::fmt::Display for CompressionCapabilities {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"\n\t\t\tCompression Capabilities: \n\t\t\t\talgorithm count: {:?}\
\n\t\t\t\tpadding: {:?}\n\t\t\t\tflags: {:?}\n\t\t\t\talgorithms: {:?}",
self.compression_algorithm_count, self.padding, self.flags, self.compression_algorithms
)
}
}
/// The SMB2_NETNAME_NEGOTIATE_CONTEXT_ID context is specified in an SMB2 NEGOTIATE request
/// to indicate the server name the client connects to.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct NetnameNegotiateContextId {
/// NetName (variable): A Unicode string containing the server name and specified
/// by the client application. e.g. 'tom'
pub net_name: Vec<u8>,
}
impl NetnameNegotiateContextId {
/// Creates a new NetnameNegotiateContextId instance.
pub fn default() -> Self {
NetnameNegotiateContextId {
net_name: Vec::new(),
}
}
/// Fuzzess the netname with random bytes and a random length up to 100 bytes.
pub fn fuzz() -> Self {
NetnameNegotiateContextId {
net_name: create_random_byte_array_of_predefined_length(
rand::thread_rng().gen_range(0..100),
),
}
}
}
impl DataSize for NetnameNegotiateContextId {
/// Gets the data size of the netname context id.
fn get_data_length(&self) -> Vec<u8> {
(self.net_name.len() as u16).to_le_bytes().to_vec()
}
}
impl std::fmt::Display for NetnameNegotiateContextId {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"\n\t\t\tNetname Context ID: \n\t\t\t\tnetname: {:?}",
self.net_name
)
}
}
/// The SMB2_TRANSPORT_CAPABILITIES context is specified in an SMB2 NEGOTIATE request
/// to indicate transport capabilities over which the connection is made.
/// The server MUST ignore the context on receipt.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct TransportCapabilities {
/// Reserved (4 bytes): This field SHOULD be set to zero and is ignored on receipt.
pub reserved: Vec<u8>,
}
impl TransportCapabilities {
/// Creates a new TransportCapabilities instance.
pub fn default() -> Self {
TransportCapabilities {
reserved: b"\x00\x00\x00\x00".to_vec(),
}
}
/// Fuzzes the transport capabilities with the predefined length.
pub fn fuzz_with_predefined_length() -> Self {
TransportCapabilities {
reserved: create_random_byte_array_of_predefined_length(4),
}
}
/// Fuzzes the transport capabilities with random length and bytes.
pub fn fuzz_with_random_length() -> Self {
TransportCapabilities {
reserved: create_random_byte_array_of_predefined_length(
rand::thread_rng().gen_range(0..100),
),
}
}
}
impl DataSize for TransportCapabilities {
/// Gets the data length of the transport capabilities.
fn get_data_length(&self) -> Vec<u8> {
(self.reserved.len() as u16).to_le_bytes().to_vec()
}
}
impl std::fmt::Display for TransportCapabilities {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"\n\t\t\tTransport Capabilities: \n\t\t\t\treserved: {:?}",
self.reserved
)
}
}
/// *RDMA Transform None*:
/// - No transform
///
/// *RDMA Transform Encryption*:
/// - Encryption of data sent over RMDA.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum RdmaTransformIds {
RdmaTransformNone,
RdmaTransformEncryption,
}
impl RdmaTransformIds {
/// Unpacks the byte code of RDMA transform ids.
pub fn unpack_byte_code(&self) -> Vec<u8> {
match self {
RdmaTransformIds::RdmaTransformNone => b"\x00\x00".to_vec(),
RdmaTransformIds::RdmaTransformEncryption => b"\x01\x00".to_vec(),
}
}
}
impl Distribution<RdmaTransformIds> for Standard {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> RdmaTransformIds {
match rng.gen_range(0..=1) {
0 => RdmaTransformIds::RdmaTransformNone,
_ => RdmaTransformIds::RdmaTransformEncryption,
}
}
}
/// The RDMA_TRANSFORM_CAPABILITIES context is specified in an
/// SMB2 NEGOTIATE request by the client to indicate the transforms
/// supported when data is sent over RDMA.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct RdmaTransformCapabilities {
/// TransformCount (2 bytes): The number of elements in RDMATransformIds array.
/// This value MUST be greater than 0.
pub transform_count: Vec<u8>,
/// Reserved1 (2 bytes): This field MUST NOT be used and MUST be reserved.
/// The sender MUST set this to 0, and the receiver MUST ignore it on receipt.
pub reserved1: Vec<u8>,
/// Reserved2 (4 bytes): This field MUST NOT be used and MUST be reserved.
/// The sender MUST set this to 0, and the receiver MUST ignore it on receipt.
pub reserved2: Vec<u8>,
/// RDMATransformIds (variable): An array of 16-bit integer IDs specifying
/// the supported RDMA transforms. The following IDs are defined.
pub rdma_transform_ids: Vec<Vec<u8>>,
}
impl RdmaTransformCapabilities {
/// Creates a new RDMATransformCapabilities instance.
pub fn default() -> Self {
RdmaTransformCapabilities {
transform_count: Vec::new(),
reserved1: b"\x00\x00".to_vec(),
reserved2: b"\x00\x00\x00\x00".to_vec(),
rdma_transform_ids: Vec::new(),
}
}
/// Fuzzes the rdma transform capabilities with the predefined length.
/// and semi-valid values.
pub fn fuzz_with_predefined_length() -> Self {
let mut random_ids: Vec<RdmaTransformIds> = Vec::new();
for _ in 0..rand::thread_rng().gen_range(0..100) {
random_ids.push(rand::random());
}
RdmaTransformCapabilities {
transform_count: (random_ids.len() as u16).to_le_bytes().to_vec(),
reserved1: b"\x00\x00".to_vec(),
reserved2: b"\x00\x00\x00\x00".to_vec(),
rdma_transform_ids: random_ids
.into_iter()
.map(|id| id.unpack_byte_code())
.collect(),
}
}
}
impl DataSize for RdmaTransformCapabilities {
/// Gets the data length of the rdma transform capabilities.
fn get_data_length(&self) -> Vec<u8> {
let length = self.transform_count.len()
+ self.reserved1.len()
+ self.reserved2.len()
+ convert_byte_array_to_int(self.transform_count.clone(), false) as usize;
(length as u16).to_le_bytes().to_vec()
}
}
impl std::fmt::Display for RdmaTransformCapabilities {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"\n\t\t\tRDMA Transform Capabilities: \n\t\t\t\ttransform count: {:?}\
\n\t\t\t\treserved1: {:?}\n\t\t\t\treserved2: {:?}\n\t\t\t\tids: {:?}",
self.transform_count, self.reserved1, self.reserved2, self.rdma_transform_ids
)
}
}
| true |
773f2346f39cc6106a93e63137729968e3321cfe
|
Rust
|
mankenavenkatesh/ibc-rs
|
/relayer/src/worker/handle.rs
|
UTF-8
| 2,754 | 2.6875 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
use core::fmt;
use std::thread::{self, JoinHandle};
use crossbeam_channel::Sender;
use tracing::trace;
use ibc::{
core::{ics02_client::events::NewBlock, ics24_host::identifier::ChainId},
events::IbcEvent,
Height,
};
use crate::{event::monitor::EventBatch, object::Object};
use super::error::WorkerError;
use super::{WorkerCmd, WorkerId};
/// Handle to a [`Worker`], for sending [`WorkerCmd`]s to it.
pub struct WorkerHandle {
id: WorkerId,
object: Object,
tx: Sender<WorkerCmd>,
thread_handle: JoinHandle<()>,
}
impl fmt::Debug for WorkerHandle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("WorkerHandle")
.field("id", &self.id)
.field("object", &self.object)
.finish_non_exhaustive()
}
}
impl WorkerHandle {
pub fn new(
id: WorkerId,
object: Object,
tx: Sender<WorkerCmd>,
thread_handle: JoinHandle<()>,
) -> Self {
Self {
id,
object,
tx,
thread_handle,
}
}
/// Send a batch of events to the worker.
pub fn send_events(
&self,
height: Height,
events: Vec<IbcEvent>,
chain_id: ChainId,
) -> Result<(), WorkerError> {
let batch = EventBatch {
chain_id,
height,
events,
};
self.tx
.send(WorkerCmd::IbcEvents { batch })
.map_err(WorkerError::send)
}
/// Send a batch of [`NewBlock`] event to the worker.
pub fn send_new_block(&self, height: Height, new_block: NewBlock) -> Result<(), WorkerError> {
self.tx
.send(WorkerCmd::NewBlock { height, new_block })
.map_err(WorkerError::send)
}
/// Instruct the worker to clear pending packets.
pub fn clear_pending_packets(&self) -> Result<(), WorkerError> {
self.tx
.send(WorkerCmd::ClearPendingPackets)
.map_err(WorkerError::send)
}
/// Shutdown the worker.
pub fn shutdown(&self) -> Result<(), WorkerError> {
self.tx.send(WorkerCmd::Shutdown).map_err(WorkerError::send)
}
/// Wait for the worker thread to finish.
pub fn join(self) -> thread::Result<()> {
trace!(worker = %self.object.short_name(), "worker::handle: waiting for worker loop to end");
let res = self.thread_handle.join();
trace!(worker = %self.object.short_name(), "worker::handle: waiting for worker loop to end: done");
res
}
/// Get the worker's id.
pub fn id(&self) -> WorkerId {
self.id
}
/// Get a reference to the worker's object.
pub fn object(&self) -> &Object {
&self.object
}
}
| true |
d4863e8802fe4b963cfa2a6ab46052a723718653
|
Rust
|
delaneyj/minigame-rust
|
/src/collider.rs
|
UTF-8
| 7,001 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
extern crate cgmath;
use rectangle::Rectangle;
use self::cgmath::{InnerSpace, MetricSpace, Vector2};
use utils::{Clamp};
pub struct CircleCollider {
radius: f32,
origin: Vector2<f32>,
}
impl CircleCollider {
pub fn new(radius: f32, origin: Vector2<f32>) -> Self {
CircleCollider {
radius: radius,
origin: Vector2::new(origin.x, origin.y),
}
}
pub fn get_absolute_position(&self) -> Vector2<f32> {
self.origin
}
pub fn get_absolute_left(&self) -> f32 {
self.origin.x - self.radius
}
pub fn get_absolute_right(&self) -> f32 {
self.origin.x + self.radius
}
pub fn get_absolute_top(&self) -> f32 {
self.origin.y - self.radius
}
pub fn get_absolute_bottom(&self) -> f32 {
self.origin.y + self.radius
}
pub fn get_radius(&self) -> f32 {
self.radius
}
}
pub struct BoxCollider {
origin: Vector2<f32>,
width: f32,
height: f32,
}
impl BoxCollider {
pub fn new(origin: Vector2<f32>, width: f32, height: f32) -> Self {
BoxCollider {
origin: origin,
width: width,
height: height,
}
}
pub fn get_absolute_position(&self) -> Vector2<f32> {
self.origin
}
pub fn get_absolute_left(&self) -> f32 {
self.origin.x
}
pub fn get_absolute_right(&self) -> f32 {
self.origin.x + self.width
}
pub fn get_absolute_top(&self) -> f32 {
self.origin.y
}
pub fn get_absolute_bottom(&self) -> f32 {
self.origin.y + self.height
}
pub fn as_rect(&self) -> Rectangle {
let rect = Rectangle::new(self.origin.x, self.origin.y, self.width as i32, self.height as i32);
return rect;
}
}
bitflags!{
flags PointSectors: u8 {
const CENTER = 0,
const TOP = 1,
const BOTTOM = 2,
const TOP_LEFT = 9,
const TOP_RIGHT = 5,
const LEFT = 8,
const RIGHT = 4,
const BOTTOM_LEFT = 10,
const BOTTOM_RIGHT = 6
}
}
pub struct Collider {
}
impl Collider {
pub fn closest_point_on_line(line_a: Vector2<f32>, line_b: Vector2<f32>, colsest_to: Vector2<f32>) -> Vector2<f32> {
let v = line_b - line_a;
let w = colsest_to - line_a;
let mut t: f32 = w.dot(v) / v.dot(v);
t = t.clamp(0.0, 1.0);
return line_a + v * t;
}
/*
Bitflags and helpers for using the Cohen–Sutherland algorithm
http://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
Sector bitflags:
1001 1000 1010
0001 0000 0010
0101 0100 0110
*/
pub fn get_sector_rect_point(rect: Rectangle, point: Vector2<f32>) -> PointSectors {
let mut sector = CENTER;
if point.x < rect.get_left() {
sector = sector | LEFT;
} else if point.x >= rect.get_right() {
sector = sector | RIGHT;
}
if point.y < rect.get_top() {
sector = sector | TOP;
} else if point.y >= rect.get_bottom() {
sector = sector | BOTTOM;
}
return sector;
}
pub fn get_sector_split_rect_point(r_x: f32, r_y: f32, r_w: f32, r_h: f32, point: Vector2<f32>) -> PointSectors {
let mut sector = CENTER;
if point.x < r_x {
sector = sector | LEFT;
} else if point.x >= r_x + r_w {
sector = sector | RIGHT;
}
if point.y < r_y {
sector = sector | TOP;
} else if point.y >= r_y + r_h {
sector = sector | BOTTOM;
}
return sector;
}
pub fn collide_circle_to_point(circle: &CircleCollider, point: Vector2<f32>) -> bool {
let c_pos = circle.get_absolute_position();
return c_pos.distance2(point) < circle.get_radius() * circle.get_radius();
}
pub fn collide_circle_to_line(circle: &CircleCollider, line_from: Vector2<f32>, line_to: Vector2<f32>) -> bool {
let closest = Collider::closest_point_on_line(line_from, line_to, circle.get_absolute_position());
return closest.distance2(circle.get_absolute_position()) < circle.get_radius() * circle.get_radius();
}
pub fn collide_circle_to_rect(circle: &CircleCollider, rect: Rectangle) -> bool {
// Check if the circle contains the rectangle's center-point
if Collider::collide_circle_to_point(circle, Vector2::new(rect.x + rect.w as f32 / 2.0, rect.y + rect.h as f32 / 2.0)) {
return true;
}
// Check the circle against the relevant edges
let mut edge_from: Vector2<f32>;
let mut edge_to: Vector2<f32>;
let sector = Collider::get_sector_rect_point(rect, circle.get_absolute_position());
if sector.contains(TOP) {
edge_from = Vector2::new(rect.x, rect.y);
edge_to = Vector2::new(rect.x + rect.w as f32, rect.y);
if Collider::collide_circle_to_line(circle, edge_from, edge_to) {
return true;
}
}
if sector.contains(BOTTOM) {
edge_from = Vector2::new(rect.x, rect.y + rect.h as f32);
edge_to = Vector2::new(rect.x + rect.w as f32, rect.y + rect.h as f32);
if Collider::collide_circle_to_line(circle, edge_from, edge_to) {
return true;
}
}
if sector.contains(LEFT) {
edge_from = Vector2::new(rect.x, rect.y);
edge_to = Vector2::new(rect.x, rect.y + rect.h as f32);
if Collider::collide_circle_to_line(circle, edge_from, edge_to) {
return true;
}
}
if sector.contains(RIGHT) {
edge_from = Vector2::new(rect.x + rect.w as f32, rect.y);
edge_to = Vector2::new(rect.x + rect.w as f32, rect.y + rect.h as f32);
if Collider::collide_circle_to_line(circle, edge_from, edge_to) {
return true;
}
}
return false;
}
pub fn collide_circle_to_circle(circle1: &CircleCollider, circle2: &CircleCollider) -> bool {
let c1_pos = circle1.get_absolute_position();
let c2_pos = circle2.get_absolute_position();
return c1_pos.distance2(c2_pos) < (circle1.get_radius() + circle2.get_radius()) * (circle1.get_radius() + circle2.get_radius())
}
pub fn collide_circle_to_box(circle: &CircleCollider, box_collider: &BoxCollider) -> bool {
return Collider::collide_circle_to_rect(circle, box_collider.as_rect());
}
pub fn collide_box_to_box(box_collider1: &BoxCollider, box_collider2: &BoxCollider) -> bool {
return box_collider1.get_absolute_left() < box_collider2.get_absolute_right() &&
box_collider1.get_absolute_right() > box_collider2.get_absolute_left() &&
box_collider1.get_absolute_bottom() > box_collider2.get_absolute_top() &&
box_collider1.get_absolute_top() < box_collider2.get_absolute_bottom();
}
}
| true |
b489e0a9428ad7b6a20dbc8bcc4edcf82a80104e
|
Rust
|
fossabot/fastobo
|
/fastobo/src/ast/mod.rs
|
UTF-8
| 8,192 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
//! Owned syntax tree for the [OBO format version 1.4].
//!
//! [`OboDoc`] is the struct acting as the root of the syntax tree. It can be
//! created from a borrowed string slice with either [`FromStr::from_str`] or
//! [`FromSlice::from_slice`], from a file with [`OboDoc::from_file`], or from
//! a buffered reader with [`OboDoc::from_stream`].
//!
//! [`FromStr::from_str`]: https://doc.rust-lang.org/std/str/trait.FromStr.html#tymethod.from_str
//! [`FromSlice::from_slice`]: ../parser/trait.FromSlice.html#method.from_slice
//! [`OboDoc`]: ./struct.OboDoc.html
//! [`OboDoc::from_file`]: ./struct.OboDoc.html#method.from_file
//! [`OboDoc::from_stream`]: ./struct.OboDoc.html#method.from_stream
//! [OBO format version 1.4]: http://owlcollab.github.io/oboformat/doc/GO.format.obo-1_4.html.
mod date;
mod header;
mod id;
mod instance;
mod line;
mod pv;
mod qualifier;
mod strings;
mod synonym;
mod term;
mod typedef;
mod xref;
pub use self::date::*;
pub use self::header::*;
pub use self::id::*;
pub use self::instance::*;
pub use self::line::*;
pub use self::pv::*;
pub use self::qualifier::*;
pub use self::strings::*;
pub use self::synonym::*;
pub use self::term::*;
pub use self::typedef::*;
pub use self::xref::*;
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt::Result as FmtResult;
use std::fmt::Write;
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
use std::path::Path;
use std::str::FromStr;
use pest::iterators::Pair;
use pest::Parser;
use crate::error::Error;
use crate::error::Result;
use crate::parser::FromPair;
use crate::parser::OboParser;
use crate::parser::Rule;
/// A complete OBO document in format version 1.4.
#[derive(Clone, Default, Debug, Hash, Eq, PartialEq)]
pub struct OboDoc {
pub header: HeaderFrame,
pub entities: Vec<EntityFrame>,
}
impl OboDoc {
/// Create a new OBO document with the provided frame.
pub fn new(header: HeaderFrame) -> Self {
Self {
header,
entities: Vec::new(),
}
}
/// Create a new OBO document with the provided entity frames.
pub fn with_entities<E>(header: HeaderFrame, entities: E) -> Self
where
E: IntoIterator<Item = EntityFrame>,
{
Self {
header,
entities: entities.into_iter().collect(),
}
}
/// Consume a buffered stream containing an OBO document into an AST.
pub fn from_stream<B>(stream: &mut B) -> Result<Self>
where
B: BufRead,
{
let mut line = String::new();
let mut l: &str;
let mut offset = 0;
let mut line_offset = 0;
// collect the header frame
let mut frame_clauses = Vec::new();
loop {
// Read the next line
line.clear();
stream.read_line(&mut line)?;
l = line.trim();
// Parse header as long as we didn't reach EOL or first frame.
if !l.starts_with('[') && !l.is_empty() {
unsafe {
let mut pairs = OboParser::parse(Rule::HeaderClause, &line)
.map_err(|e| Error::from(e).with_offsets(line_offset, offset))?;
let clause = HeaderClause::from_pair_unchecked(pairs.next().unwrap())?;
frame_clauses.push(clause);
}
}
// Update offsets
line_offset += 1;
offset += line.len();
// Bail out if we reached EOL or first frame.
if l.starts_with('[') || line.is_empty() {
break
}
}
// create the OBO document
let mut obodoc = Self::new(HeaderFrame::new(frame_clauses));
// read all entity frames
let mut frame_lines = String::new();
let mut local_line_offset = 0;
let mut local_offset = 0;
while !line.is_empty() {
// Read the next line.
frame_lines.push_str(&line);
line.clear();
stream.read_line(&mut line)?;
// Read the line if we reached the next frame.
if line.trim_start().starts_with('[') || line.is_empty() {
unsafe {
let mut pairs = OboParser::parse(Rule::EntitySingle, &frame_lines)
.map_err(|e| Error::from(e).with_offsets(line_offset, offset))?;
let entity = EntityFrame::from_pair_unchecked(pairs.next().unwrap())?;
obodoc.entities.push(entity);
frame_lines.clear()
}
// Update offsets
line_offset += local_line_offset;
offset += local_offset;
// Reset local offsets
local_line_offset = 0;
local_offset = 0;
}
// Update local offsets
local_line_offset += 1;
local_offset += line.len();
}
Ok(obodoc)
}
/// Read an OBO file located somwhere in the filesystem.
pub fn from_file<P>(path: P) -> Result<Self>
where
P: AsRef<Path>,
{
let pathref: &Path = path.as_ref();
File::open(pathref)
.map_err(Error::from)
.and_then(|f| Self::from_stream(&mut BufReader::new(f)))
.map_err(|e| e.with_path(&pathref.to_string_lossy()))
}
/// Get a reference to the header of the OBO document.
pub fn header(&self) -> &HeaderFrame {
&self.header
}
/// Get a mutable reference to the header of the OBO document.
pub fn header_mut(&mut self) -> &mut HeaderFrame {
&mut self.header
}
/// Set the header of the OBO document.
pub fn set_header(&mut self, header: HeaderFrame) {
self.header = header
}
}
impl Display for OboDoc {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
self.header.fmt(f).and(f.write_char('\n'))?;
let mut entities = self.entities.iter().peekable();
while let Some(entity) = entities.next() {
entity.fmt(f)?;
if entities.peek().is_some() {
f.write_char('\n')?;
}
}
Ok(())
}
}
impl<'i> FromPair<'i> for OboDoc {
const RULE: Rule = Rule::OboDoc;
unsafe fn from_pair_unchecked(pair: Pair<'i, Rule>) -> Result<Self> {
let mut inner = pair.into_inner();
let mut entities = Vec::new();
let header = HeaderFrame::from_pair_unchecked(inner.next().unwrap())?;
let mut pair = inner.next().unwrap();
while pair.as_rule() != Rule::EOI {
entities.push(EntityFrame::from_pair_unchecked(pair)?);
pair = inner.next().unwrap();
}
Ok(OboDoc { header, entities })
}
}
impl_fromstr!(OboDoc);
/// An entity frame, either for a term, an instance, or a typedef.
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
pub enum EntityFrame {
Term(TermFrame),
Typedef(TypedefFrame),
Instance(InstanceFrame),
}
impl Display for EntityFrame {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
use self::EntityFrame::*;
match self {
Term(t) => t.fmt(f),
Typedef(t) => t.fmt(f) ,
Instance(i) => i.fmt(f),
}
}
}
impl From<TermFrame> for EntityFrame {
fn from(frame: TermFrame) -> Self {
EntityFrame::Term(frame)
}
}
impl From<TypedefFrame> for EntityFrame {
fn from(frame: TypedefFrame) -> Self {
EntityFrame::Typedef(frame)
}
}
impl From<InstanceFrame> for EntityFrame {
fn from(frame: InstanceFrame) -> Self {
EntityFrame::Instance(frame)
}
}
impl<'i> FromPair<'i> for EntityFrame {
const RULE: Rule = Rule::EntityFrame;
unsafe fn from_pair_unchecked(pair: Pair<'i, Rule>) -> Result<Self> {
let inner = pair.into_inner().next().unwrap();
match inner.as_rule() {
Rule::TermFrame => TermFrame::from_pair_unchecked(inner).map(From::from),
Rule::TypedefFrame => TypedefFrame::from_pair_unchecked(inner).map(From::from),
Rule::InstanceFrame => unimplemented!(),
_ => unreachable!(),
}
}
}
impl_fromstr!(EntityFrame);
| true |
76e279bd0bc6f78806e0c085f14f3bf6897747d8
|
Rust
|
slayfer-dev/Challenges-sol
|
/Programming/Codeabbey/012/CrazyCapitan.rs
|
UTF-8
| 1,327 | 2.921875 | 3 |
[] |
no_license
|
//Author CrazyCapitan
pub fn main() {
let reader = std::io::stdin();
let mut count = String::new();
reader.read_line(&mut count).expect("Cloud not read your input");
for _ in 0..count.trim().parse::<i32>().unwrap() {
let mut line = String::new();
reader.read_line(&mut line).expect("Cloud not read your input");
let mut time_arr : [i32;8] = [0;8];
for (i,y) in line.split_whitespace().enumerate() {
time_arr[i] = y.parse::<i32>().unwrap();
}
print!("(");
let mut counter = 0;
for x in &time_diff(time_arr) {
if counter < 3 {
print!("{} ",x);
}else {
print!("{}",x);
}
counter+=1;
}
print!(") ");
}
println!("");
}
fn time_diff(a : [i32;8]) -> [i32;4] {
let time1 : i32 = a[0]*24*60*60 + a[1]*60*60 + a[2]*60 + a[3];
let time2 : i32 = a[4]*24*60*60 + a[5]*60*60 + a[6]*60 + a[7];
let mut travel_time = time2 - time1;
let mut diff : [i32;4] = [0;4];
diff[0] = travel_time / (24*60*60);
travel_time = travel_time % (24*60*60);
diff[1] = travel_time / (60*60);
travel_time = travel_time % (60*60);
diff[2] = travel_time / 60;
travel_time = travel_time % 60;
diff[3] = travel_time;
diff
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.