blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
7dbafc413541a75f50d0b34be3c17ebb3da75d22
|
Rust
|
Charles-Schleich/aoc-2020
|
/src/libday1.rs
|
UTF-8
| 980 | 3.234375 | 3 |
[] |
no_license
|
use std::fs::File ;
use std::io::prelude::*;
use std::io::BufReader;
use std::{thread, time};
use std::time::{Duration, Instant};
fn read_input(filename: &str) -> Vec<u32> {
let f = File::open(filename).unwrap();
let mut reader = BufReader::new(f);
let mut line = String::new();
let mut vec = Vec::new();
let mut value = Ok(0 as usize);
while value.is_ok() {
let mut line = String::new();
value = reader.read_line(&mut line);
let integer = match line.trim().parse::<u32>(){
Ok(x)=>{x}
Err(err)=>{break}
};
vec.push(integer);
};
vec
}
pub fn day1(){
let input = read_input("./input/day1.txt");
let mut first= 0;
let mut second= 0;
for i in 0..input.len(){
for j in i..input.len(){
if input[i]+input[j] == 2020 {
first = input[i];
second = input[j];
break
}
}
}
}
| true |
73f063e9f8c673a75780c4f574e7a8efc6f4a357
|
Rust
|
jkaessens/TheVault
|
/src/models.rs
|
UTF-8
| 1,548 | 2.5625 | 3 |
[] |
no_license
|
use crate::schema::*;
use serde::Serialize;
use chrono::NaiveDate;
#[derive(Queryable,QueryableByName,Insertable,Debug,Serialize)]
#[table_name="run"]
pub struct Run {
pub name: String,
pub date: NaiveDate,
pub assay: String,
pub chemistry: String,
pub description: Option<String>,
pub investigator: String,
pub path: String,
}
#[derive(Queryable,QueryableByName,Debug,Serialize, PartialEq, Eq, Hash, PartialOrd, Ord, Clone, Default)]
#[table_name = "sample"]
pub struct Sample {
pub run: String,
pub name: String,
pub dna_nr: Option<String>,
pub project: Option<String>,
pub lims_id: Option<i64>,
pub primer_set: Option<String>,
pub id: i32,
pub cells: Option<i32>,
}
#[derive(Insertable,Debug,Serialize,Clone,Default)]
#[table_name="sample"]
pub struct NewSample {
pub run: String,
pub name: String,
pub dna_nr: Option<String>,
pub project: Option<String>,
pub lims_id: Option<i64>,
pub primer_set: Option<String>,
pub cells: Option<i32>,
}
#[derive(Queryable, QueryableByName, Insertable,Debug,Serialize)]
#[table_name="fastq"]
pub struct Fastq {
pub filename: String,
pub sample_id: i32
}
impl NewSample {
pub fn from_sample(s: &Sample) -> NewSample {
NewSample {
run: s.run.clone(),
name: s.name.clone(),
dna_nr: s.dna_nr.clone(),
project: s.project.clone(),
lims_id: s.lims_id,
primer_set: s.primer_set.clone(),
cells: s.cells
}
}
}
| true |
16125f6020606b8d011df004cb22273c75efc1a5
|
Rust
|
jaysonsantos/lenient-semver
|
/src/lib.rs
|
UTF-8
| 22,105 | 3.203125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
#![warn(missing_docs)]
/*!
Lenient parser for Semantic Version numbers.
## Motivation
This crate aims to provide an alternative parser for [semver `Version`s](https://crates.io/crates/semver).
Instead of adhering to the semver specification, this parser is more lenient in what it allows.
The differenc include:
- Minor and Path are optional an default to 0 (e.g. "1" parses as "1.0.0")
- Pre-release identifier may be separated by `.` as well (e.g. "1.2.3.rc1" parses as "1.2.3-rc1")
- Some pre-release identifiers are parsed as build identifier (e.g. "1.2.3.Final" parses as "1.2.3+Final")
- Additional numeric identifiers are parsed as build identifier (e.g "1.2.3.4.5" parses as "1.2.3+4.5")
- A leading `v` or `V` is allowed (e.g. "v1.2.3" parses as "1.2.3")
- Numbers that overflow an u64 are treated as strings (e.g. "1.2.3-9876543210987654321098765432109876543210" parses without error)
This diagram shows lenient parsing grammar

## Examples
```rust
# use semver_v100 as semver;
use semver::Version;
let version = lenient_semver::parse("1.2.3");
assert_eq!(version, Ok(Version::new(1, 2, 3)));
// examples of a version that would not be accepted by semver_parser
assert_eq!(
lenient_semver::parse("1.2.M1").unwrap(),
Version::parse("1.2.0-M1").unwrap()
);
assert!(Version::parse("1.2.M1").is_err());
assert_eq!(
lenient_semver::parse("1").unwrap(),
Version::parse("1.0.0").unwrap()
);
assert!(Version::parse("1").is_err());
assert_eq!(
lenient_semver::parse("1.2.3.Final").unwrap(),
Version::parse("1.2.3+Final").unwrap()
);
assert!(Version::parse("1.2.3.Final").is_err());
assert_eq!(
lenient_semver::parse("1.2.3.4.5").unwrap(),
Version::parse("1.2.3+4.5").unwrap()
);
assert!(Version::parse("1.2.3.4.5").is_err());
assert_eq!(
lenient_semver::parse("v1.2.3").unwrap(),
Version::parse("1.2.3").unwrap()
);
assert!(Version::parse("v1.2.3").is_err());
assert_eq!(
lenient_semver::parse("1.2.9876543210987654321098765432109876543210").unwrap(),
Version::parse("1.2.0-9876543210987654321098765432109876543210").unwrap()
);
assert!(Version::parse("1.2.9876543210987654321098765432109876543210").is_err());
```
## Parsing into custom versions
The parser is not fixed on returning a `semver::Version`, it instead parses into a `lenient_semver::VersionBuilder`.
The default features for this crate contain a `VersionBuilder` implementation for `semver::Version`, but any implementation can be used with `parse_into`.
### Examples
```rust
# // This example is replicated in the tests module
# // Please try to keep them in sync
use lenient_semver::VersionBuilder;
/// Simpler version struct that lives only on the stack
#[derive(Debug, Default)]
struct MyVersion {
numbers: [u64; 3],
is_pre_release: bool,
}
/// The VersionBuilder trait is generic over the lifetime of the input string.
/// We don't store references to those strings, so we don't care about the specific lifetime.
impl VersionBuilder<'_> for MyVersion {
/// We will modify the target struct directly
type Out = Self;
/// Construct a new builder instance.
/// One can only expect `set_major` to be called before `build`, all other methods are optional.
fn new() -> Self {
Self::default()
}
/// Construct the final result. In this case, we can just return ourselves.
fn build(self) -> Self::Out {
self
}
/// Called when the major component was found.
fn set_major(&mut self, major: u64) {
self.numbers[0] = major;
}
/// Called when the minor component was found.
fn set_minor(&mut self, minor: u64) {
self.numbers[1] = minor;
}
/// Called when the patch component was found.
fn set_patch(&mut self, patch: u64) {
self.numbers[2] = patch;
}
/// Called when any pre-relase metadata identifier was found.
/// This identifier can just numeric, no attempts at parsing it into a number have been made.
/// For this implementation, we don't care about the value, just it's presence.
fn add_pre_release(&mut self, _pre_release: &str) {
self.is_pre_release = true
}
}
let input = "1.3.3.7-alpha21+build.42";
let my_version = lenient_semver::parse_into::<MyVersion>(input).unwrap();
assert_eq!([1, 3, 3], my_version.numbers);
assert!(my_version.is_pre_release);
```
The VersionBuilder has empty default implementation for the various methods, making it easy to use it for use-cases beyond just parsing.
The following example implements a function that checks if a given string represents any form of pre-release version.
```rust
# // This example is replicated in the tests module
# // Please try to keep them in sync
use lenient_semver::VersionBuilder;
/// newtype around bool, so we can implement the VersionBuilder trait for it
#[derive(Debug, Default)]
struct IsPreRelease(bool);
impl VersionBuilder<'_> for IsPreRelease {
/// Here we parse into a different value than Self
type Out = bool;
fn new() -> Self {
Self::default()
}
/// Return the wrapped bool
fn build(self) -> Self::Out {
self.0
}
/// We only care about this method and can ignore all the other ones
fn add_pre_release(&mut self, _pre_release: &str) {
self.0 = true;
}
}
/// This method also return false for invalid version strings,
/// which is technically true, as those are not pre-release versions.
/// Usually you would want to have a better error handling.
fn is_pre_release(v: &str) -> bool {
lenient_semver::parse_into::<IsPreRelease>(v).unwrap_or_default()
}
assert!(is_pre_release("1.2.3-pre") == true);
assert!(is_pre_release("1.2.3") == false);
assert!(is_pre_release("1.2.3+build") == false);
```
## Features
`lenient_semver` comes with a number of features:
| feature name | default enabled | transitive dependencies | purpose
| -------------: | --------------- | ----------------------- | --------
| semver | **yes** | `semver = "1"` | Provides `VersionBuilder` implementation for `semver = "1"`.
| semver011 | no | `semver = "0.11"` | Provides `VersionBuilder` implementation for `semver = "0.11"`.
| semver010 | no | `semver = "0.10"` | Provides `VersionBuilder` implementation for `semver = "0.10"`.
| parse_partial | no | | Provides `parse_partial` method for partially parsing a version from the beginning of a string.
| version_lite | no | `lenient_version = "*"` | A custom Version as alternative to `semver::Version` that complements some leneient features, such as additional numbers beyond patch.
| version_semver | no | `lenient_version = "*"` | Add conversions From `lenient_version` Into `semver::Version`.
| version_serde | no | `serde = "1"` | Serde Deserializer and Serializer implementation for `lenient_version`.
### Examples
#### `semver`
```toml
lenient_semver = { version = "*", features = [ "semver" ] }
```
```rust
# #[cfg(not(feature = "semver"))]
# compile_error!("Please run doc tests with --all-features");
# use semver_v100 as semver;
use semver::Version;
// This features is enabled by default and is usable through `parse` directly.
let version = lenient_semver::parse("v1.2.3.Final").unwrap();
assert_eq!(version, Version::parse("1.2.3+Final").unwrap());
// It can also be used with `parse_into`.
let version = lenient_semver::parse_into::<Version>("v1.2.3.Final").unwrap();
assert_eq!(version, Version::parse("1.2.3+Final").unwrap());
```
#### `semver011`
```toml
lenient_semver = { version = "*", features = [ "semver011" ] }
```
```rust
# #[cfg(not(feature = "semver011"))]
# compile_error!("Please run doc tests with --all-features");
# use semver_v011 as semver;
// Rename is just for demonstration and not required
use semver::Version as Version011;
// The default parse is fixed to the latest semver::Version,
// so we need to use `parse_into`.
let version = lenient_semver::parse_into::<Version011>("v1.2.3.Final").unwrap();
assert_eq!(version, Version011::parse("1.2.3+Final").unwrap());
```
#### `semver010`
```toml
lenient_semver = { version = "*", features = [ "semver010" ] }
```
```rust
# #[cfg(not(feature = "semver010"))]
# compile_error!("Please run doc tests with --all-features");
# use semver_v010 as semver;
// Rename is just for demonstration and not required
use semver::Version as Version010;
// The default parse is fixed to the latest semver::Version,
// so we need to use `parse_into`.
let version = lenient_semver::parse_into::<Version010>("v1.2.3.Final").unwrap();
assert_eq!(version, Version010::parse("1.2.3+Final").unwrap());
```
#### `version_lite`
```toml
lenient_semver = { version = "*", features = [ "version_lite" ] }
```
With this features, lenient_semver now comes with it's own version.
That particular implementation supports numbers beyond patch directly.
Note that lenient_semver still parses those additional number without complaining,
but they are added as build attribute to semver Versions.
```rust
# #[cfg(not(feature = "version_lite"))]
# compile_error!("Please run doc tests with --all-features");
use lenient_semver::Version;
let version = lenient_semver::parse_into::<Version>("1.3.3.7").unwrap();
assert_eq!(version, Version::parse("1.3.3.7").unwrap()); // Version::parse delegates to this parser
```
The native support allows such version to be compared properly, which does not work with semver.
```rust
# #[cfg(not(all(feature = "version_lite", feature = "semver011")))]
# compile_error!("Please run doc tests with --all-features");
# use semver_v011 as semver;
use lenient_semver::Version;
let version_a = Version::parse("1.3.3.7").unwrap();
let version_b = Version::parse("1.3.3.8").unwrap();
assert!(version_a < version_b);
// with semver pre 1.0, that fails:
let version_a = lenient_semver::parse_into::<semver::Version>("1.3.3.7").unwrap();
let version_b = lenient_semver::parse_into::<semver::Version>("1.3.3.8").unwrap();
assert_eq!(version_a < version_b, false);
assert_eq!(version_a, version_b);
```
Furthermore, `Version` does not own the data for the metadata identifiers.
The metadata can be disassociated, so the version can reference a different owner.
```rust
# // This example is replicated in the tests module
# // Please try to keep them in sync
# #[cfg(not(feature = "version_lite"))]
# compile_error!("Please run doc tests with --all-features");
use lenient_semver::{Version, VersionBuilder};
let input = "1.3.3.7-beta.21+build.42";
// make an owned copy, so we don't cheat by using the 'static lifetime.
let input = String::from(input);
// This version references slices from the `input` String
let version = lenient_semver::parse_into::<Version>(input.as_ref()).unwrap();
// Which prevents us from dropping the input
// drop(input);
// We can disassociate the metadata, which allows the new version to reference something else
let (mut version, pre, build) = version.disassociate_metadata();
// We still get the referenced input slices, so we create owned copies
let pre: Option<String> = pre.map(ToOwned::to_owned);
let build: Option<String> = build.map(ToOwned::to_owned);
// now we can safely drop the input
drop(input);
// We can also re-add the cloned identifiers.
// The version would now be bound to the lifetime of this method.
// Just for fun, we swap pre-release and build
if let Some(pre) = pre.as_deref() {
version.add_build(pre);
}
if let Some(build) = build.as_deref() {
version.add_pre_release(build);
}
assert_eq!("1.3.3.7-build.42+beta.21".to_string(), version.to_string());
```
#### `version_semver`
```toml
lenient_semver = { version = "*", features = [ "version_semver" ] }
```
If you need to store an owned copy of the version information, you should copy into `semver::Version` or your custom version type instead.
If you only ever intend to store the version information, it might make more sense to parse directly into `semver::Version` instead.
```rust
# #[cfg(all(not(feature = "version_lite"), not(feature = "version_semver")))]
# compile_error!("Please run doc tests with --all-features");
# use semver_v100 as semver;
use semver::Version;
let input = String::from("v1.3.3.7-beta-21+build-42");
let version = lenient_semver::Version::parse(&input).unwrap();
let version = Version::from(version);
assert_eq!("1.3.3-beta-21+7.build-42", &version.to_string());
```
#### `version_serde`
```toml
lenient_semver = { version = "*", features = [ "version_serde" ] }
```
This feature also enabled `version_lite` and brings serde support for the own Version type.
Since `lenient_semver::Version` does not take ownership of the metadata identifiers,
the lifetime of the deserialization result is bound to the input.
```rust
# // This example is replicated in the tests module
# // Please try to keep them in sync
# #[cfg(any(not(feature = "version_lite"), not(feature = "version_serde")))]
# compile_error!("Please run doc tests with --all-features");
use lenient_semver::{Version, VersionBuilder};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct DependencySpec<'input> {
/// Refer to name as owned value
name: String,
/// Borrows from the input string
#[serde(borrow)]
version: Version<'input>,
}
let input = "
{
\"name\": \"lenient_semver\",
\"version\": \"1.3.3.7+build.42\"
}";
// make an owned copy, so we don't cheat by using the 'static lifetime.
let input = String::from(input);
// use serde as one would normally do
let dep: DependencySpec = serde_json::from_str(input.as_ref()).unwrap();
// cannot move out of `input` because it is borrowed
// drop(input);
let mut expected = Version::new(1, 3, 3);
expected.add_additional(7);
expected.add_build("build.42");
assert_eq!(dep.version, expected);
// now we can drop the input
drop(input);
```
#### `parse_partial`
```toml
lenient_semver = { version = "*", features = [ "parse_partial" ] }
```
This feature enables the `partial` feature of the parser.
The partial parser will not try to consume all input.
Instead it parses the version as far as possible and will return the unconsumed input alongside the parsed version.
```rust
# // This example is replicated in the tests module
# // Please try to keep them in sync
# #[cfg(not(feature = "parse_partial"))]
# compile_error!("Please run doc tests with --all-features");
use lenient_semver::{Version, VersionBuilder, parser};
let input = "1.2.3 42+build 1.3.3.7 // end";
// parse first version
let (version, remainder) = parser::parse_partial::<Version>(input).unwrap();
let expected = Version::new(1, 2, 3);
assert_eq!(version, expected);
// trailing whitespace is considered part of a version and consumed as well
assert_eq!("42+build 1.3.3.7 // end", remainder);
// parse second version
let (version, remainder) = parser::parse_partial::<Version>(remainder).unwrap();
let mut expected = Version::new(42, 0, 0);
expected.add_build("build");
assert_eq!(version, expected);
assert_eq!("1.3.3.7 // end", remainder);
// parse last version
let (version, remainder) = parser::parse_partial::<Version>(remainder).unwrap();
let mut expected = Version::new(1, 3, 3);
expected.add_additional(7);
assert_eq!(version, expected);
assert_eq!("// end", remainder);
// parse partial still expects to parse something.
// It will fail with `UnexpectedInput` or `MissingMajorNumber` if the input does not match at least a major version.
// let's try to parse the remaining input
let error = parser::parse_partial::<Version>(remainder).unwrap_err();
assert_eq!(error.error_kind(), parser::ErrorKind::UnexpectedInput);
assert_eq!(error.error_line(), "Unexpected `/`");
// or an empty string
let error = parser::parse_partial::<Version>(" ").unwrap_err();
assert_eq!(error.error_kind(), parser::ErrorKind::MissingMajorNumber);
assert_eq!(
error.error_line(),
"Could not parse the major identifier: No input"
);
// The rules of when a certain number will be parsed are even more relaxed
let (version, remainder) = parser::parse_partial::<Version>("1foobar").unwrap();
let expected = Version::new(1, 0, 0);
assert_eq!(version, expected);
assert_eq!(remainder, "foobar");
// Furthermore, the characters `*` and `?` are allowed to appear everywhere where other alphabetic character are allowed.
// This relaxes the rule that only a-z, A-Z, and 0-9 are allowed.
// Those characters have no special meaning and will be parsed as pre-release or build segment.
let (version, remainder) = parser::parse_partial::<Version>("1.2.*+final?").unwrap();
let mut expected = Version::new(1, 2, 0);
expected.add_pre_release("*");
expected.add_build("final?");
assert_eq!(version, expected);
assert_eq!(remainder, "");
```
*/
pub use lenient_semver_parser::{self as parser, VersionBuilder};
#[cfg(feature = "version_lite")]
pub use lenient_version::{Version, Version as VersionLite};
/// Parse a string slice into a Version.
///
/// This parser does not require semver-specification conformant input and is more lenient in what it allows.
/// The differenc include:
///
/// - Minor and Path are optional an default to 0 (e.g. "1" parses as "1.0.0")
/// - Pre-release identifier may be separated by `.` as well (e.g. "1.2.3.rc1" parses as "1.2.3-rc1")
/// - Some pre-release identifiers are parsed as build identifier (e.g. "1.2.3.Final" parses as "1.2.3+Final")
/// - Additional numeric identifiers are parsed as build identifier (e.g "1.2.3.4.5" parses as "1.2.3+4.5")
/// - A leading `v` or `V` is allowed (e.g. "v1.2.3" parses as "1.2.3")
/// - Numbers that overflow an u64 are treated as strings (e.g. "1.2.3-9876543210987654321098765432109876543210" parses without error)
///
/// This diagram shows lenient parsing grammar
///
/// 
///
/// ## Examples
///
/// ```rust
/// # use semver_v100 as semver;;
/// use semver::Version;
///
/// let version = lenient_semver::parse("1.2.3");
/// assert_eq!(version, Ok(Version::new(1, 2, 3)));
///
/// // examples of a version that would not be accepted by semver_parser
/// assert_eq!(
/// lenient_semver::parse("1.2.M1").unwrap(),
/// Version::parse("1.2.0-M1").unwrap()
/// );
/// assert!(Version::parse("1.2.M1").is_err());
///
/// assert_eq!(
/// lenient_semver::parse("1").unwrap(),
/// Version::parse("1.0.0").unwrap()
/// );
/// assert!(Version::parse("1").is_err());
///
/// assert_eq!(
/// lenient_semver::parse("1.2.3.Final").unwrap(),
/// Version::parse("1.2.3+Final").unwrap()
/// );
/// assert!(Version::parse("1.2.3.Final").is_err());
///
/// assert_eq!(
/// lenient_semver::parse("1.2.3.4.5").unwrap(),
/// Version::parse("1.2.3+4.5").unwrap()
/// );
/// assert!(Version::parse("1.2.3.4.5").is_err());
///
/// assert_eq!(
/// lenient_semver::parse("v1.2.3").unwrap(),
/// Version::parse("1.2.3").unwrap()
/// );
/// assert!(Version::parse("v1.2.3").is_err());
///
/// assert_eq!(
/// lenient_semver::parse("1.2.9876543210987654321098765432109876543210").unwrap(),
/// Version::parse("1.2.0-9876543210987654321098765432109876543210").unwrap()
/// );
/// assert!(Version::parse("1.2.9876543210987654321098765432109876543210").is_err());
/// ```
///
/// This method is fixed to return a [`semver_v100::Version`].
/// A more flexible variant is [`parse_into`].
#[cfg(feature = "semver")]
pub fn parse(input: &str) -> Result<semver_v100::Version, parser::Error> {
parser::parse::<semver_v100::Version>(input)
}
/// Parse a string slice into a Version.
///
/// This parser does not require semver-specification conformant input and is more lenient in what it allows.
/// The differenc include:
///
/// - Minor and Path are optional an default to 0 (e.g. "1" parses as "1.0.0")
/// - Pre-release identifier may be separated by `.` as well (e.g. "1.2.3.rc1" parses as "1.2.3-rc1")
/// - Some pre-release identifiers are parsed as build identifier (e.g. "1.2.3.Final" parses as "1.2.3+Final")
/// - Additional numeric identifiers are parsed as build identifier (e.g "1.2.3.4.5" parses as "1.2.3+4.5")
/// - A leading `v` or `V` is allowed (e.g. "v1.2.3" parses as "1.2.3")
/// - Numbers that overflow an u64 are treated as strings (e.g. "1.2.3-9876543210987654321098765432109876543210" parses without error)
///
/// This diagram shows lenient parsing grammar
///
/// 
///
/// This method can parse anything that implements [`VersionBuilder`].
///
/// ## Examples
///
/// ```rust
/// use lenient_semver::Version;
///
/// let version = lenient_semver::parse_into::<Version>("1.2.3");
/// assert_eq!(version, Ok(Version::new(1, 2, 3)));
///
/// // examples of a version that would not be accepted by semver_parser
/// assert_eq!(
/// lenient_semver::parse_into::<Version>("1.2.M1").unwrap(),
/// Version::parse("1.2.0-M1").unwrap()
/// );
///
/// assert_eq!(
/// lenient_semver::parse_into::<Version>("1").unwrap(),
/// Version::parse("1.0.0").unwrap()
/// );
///
/// assert_eq!(
/// lenient_semver::parse_into::<Version>("1.2.3.Final").unwrap(),
/// Version::parse("1.2.3+Final").unwrap()
/// );
///
/// assert_eq!(
/// lenient_semver::parse_into::<Version>("1.2.3.4.5").unwrap(),
/// Version::parse("1.2.3.4.5").unwrap()
/// );
///
/// assert_eq!(
/// lenient_semver::parse_into::<Version>("v1.2.3").unwrap(),
/// Version::parse("1.2.3").unwrap()
/// );
///
/// assert_eq!(
/// lenient_semver::parse_into::<Version>("1.2.9876543210987654321098765432109876543210").unwrap(),
/// Version::parse("1.2.0-9876543210987654321098765432109876543210").unwrap()
/// );
/// ```
pub fn parse_into<'input, V>(input: &'input str) -> Result<V::Out, parser::Error<'input>>
where
V: VersionBuilder<'input>,
{
parser::parse::<V>(input)
}
#[cfg(test)]
mod tests;
| true |
88fccc2134859631d7b987cca29c3e688c8b91e6
|
Rust
|
mottodora/contest
|
/yukicoder/7.rs
|
UTF-8
| 885 | 3.328125 | 3 |
[] |
no_license
|
fn getline() -> String{
let mut ret = String::new();
std::io::stdin().read_line(&mut ret).ok();
return ret;
}
fn eratosthenes(n: i32) -> Vec<i32> {
let mut v: Vec<i32> = (2..n+1).collect();
let mut i:i32 = 0;
while i < v.len() as i32 {
let a = v[i as usize];
v.retain(|&x| x< a+1 || x % a !=0);
i+=1;
}
v
}
fn main() {
let n: i32 = getline().trim().parse().unwrap();
let primes = eratosthenes(n);
let mut dp = [false; 10001];
let l = primes.len() as i32;
for i in 4..(n+1) {
let mut j = 0;
while primes[j as usize] <= i-2{
dp[i as usize] |= !dp[(i - primes[j as usize]) as usize];
j += 1;
if j >= l {
break;
}
}
}
if dp[n as usize] {
println!("Win");
}
else {
println!("Lose");
}
}
| true |
b63f3d5b850c62709566212eaf5f6c617604c518
|
Rust
|
rinde/aws-lambda-rust-runtime
|
/lambda-runtime-errors/src/lib.rs
|
UTF-8
| 6,707 | 3.484375 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! The Lambda runtime errors crate defines the `LambdaErrorExt` trait
//! that can be used by libriaries to return errors compatible with the
//! AWS Lambda Rust runtime.
//!
//! This crate also exports the `lambda_runtime_errors_derive` crate to
//! derive the `LambdaErrorExt` trait.
//!
//! ```rust,no-run
//! use lambda_runtime_errors::*;
//!
//! // the generated error_type() method returns "crate::LambdaError"
//! #[derive(LambdaErrorExt)]
//! struct LambdaError;
//! ```
mod error_ext_impl;
pub use crate::error_ext_impl::*;
pub use lambda_runtime_errors_derive::*;
use failure::{format_err, Compat, Error, Fail};
use std::fmt;
/// The `LambdaErrorExt` trait defines the `error_type()` method used
/// by the AWS Lambda runtime client to generate `ErrorResponse`
/// objects. The value returned by the `error_type()` method is used to
/// populate the `errorType` field in the Lambda response. This crate
/// includes an implementation of this trait for most errors in the
/// standard library. By default, error return their type name.
pub trait LambdaErrorExt {
/// The value for this field should be an alphanumeric unique identifier
/// of the error type. For example `MyCustomError`.
///
/// # Return
/// An alphanumeric identifier for the error
fn error_type(&self) -> &str;
}
impl LambdaErrorExt for Error {
fn error_type(&self) -> &str {
self.find_root_cause().name().unwrap_or_else(|| "FailureError")
}
}
// We implement this trait here so that we can use the Compat type
// in the lambda-runtime crate - heaps of fun between failure and std::error
impl LambdaErrorExt for Compat<Error> {
fn error_type(&self) -> &str {
"CompatFailureError"
}
}
/// `Result` type extension for AWS that makes it easy to generate a `HandlerError`
/// object or a `Compat<Error>` from the failure crate using an existing result.
/// This trait should be imported from the `lambda_runtime_core` or `lambda_runtime`
/// crates.
pub trait LambdaResultExt<OK, ERR> {
/// Takes the incoming `Result` and maps it to a Result that returns an `HandlerError` object.
/// The `HandlerError` type already includes implementations of the `From` trait for most
/// standard library errors. This method is intended to be used when a the `From` trait is not
/// implemented.
///
/// # Example
///
/// ```rust,no_run
/// use lambda_runtime_core::{Context, LambdaResultExt, HandlerError, lambda};
/// use std::error::Error as StdError;
///
/// fn main() -> Result<(), Box<dyn StdError>> {
/// lambda!(my_handler);
/// Ok(())
/// }
///
/// fn my_handler(_event: Vec<u8>, _ctx: Context) -> Result<Vec<u8>, HandlerError> {
/// let age = "hello"; // this will throw an error when we try to parse it into an int
/// age.parse::<u8>().handler_error()?;
///
/// Ok(vec!())
/// }
/// ```
fn handler_error(self) -> Result<OK, HandlerError>;
/// Takes the incoming result and converts it into an `Error` type from the `failure` crate
/// wrapped in a `Compat` object to make it implement the `Error` trait from the standard
/// library. This method makes it easy to write handler functions that return `Compat<Error>`
/// directly.
///
/// # Example
///
/// ```rust,no_run
/// use lambda_runtime_core::{Context, LambdaResultExt, lambda};
/// use failure::{Error, Compat};
/// use std::error::Error as StdError;
///
/// fn main() -> Result<(), Box<dyn StdError>> {
/// lambda!(my_handler);
/// Ok(())
/// }
///
/// fn my_handler(_event: Vec<u8>, _ctx: Context) -> Result<Vec<u8>, Compat<Error>> {
/// let age = "hello"; // this will throw an error when we try to parse it into an int
/// age.parse::<u8>().failure_compat()?;
/// Ok(vec!())
/// }
/// ```
fn failure_compat(self) -> Result<OK, Compat<Error>>;
}
impl<OK, ERR> LambdaResultExt<OK, ERR> for Result<OK, ERR>
where
ERR: Fail + LambdaErrorExt,
{
fn handler_error(self) -> Result<OK, HandlerError> {
self.map_err(HandlerError::new)
}
fn failure_compat(self) -> Result<OK, Compat<Error>> {
self.map_err(|err| Error::from(err).compat())
}
}
/// The `HandlerError` struct can be use to abstract any `Err` of the handler method `Result`.
/// The `HandlerError` object can be generated `From` any object that supports `Display`,
/// `Send, `Sync`, and `Debug`. This allows handler functions to return any error using
/// the `?` syntax. For example `let _age_num: u8 = e.age.parse()?;` will return the
/// `<F as FromStr>::Err` from the handler function.
//pub type HandlerError = failure::Error;
#[derive(Debug)]
pub struct HandlerError {
err_type: String,
inner: failure::Error,
}
impl HandlerError {
pub fn new<T: failure::Fail + LambdaErrorExt + Send + Sync>(e: T) -> Self {
let err_type = e.error_type().to_owned();
HandlerError {
err_type,
inner: failure::Error::from(e),
}
}
}
impl std::error::Error for HandlerError {}
impl fmt::Display for HandlerError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}: {}", self.err_type, self.inner.find_root_cause())
}
}
impl LambdaErrorExt for HandlerError {
fn error_type(&self) -> &str {
&self.err_type
}
}
impl From<&str> for HandlerError {
fn from(s: &str) -> Self {
HandlerError {
err_type: "UnknownError".to_owned(),
inner: format_err!("{}", s),
}
}
}
impl From<failure::Error> for HandlerError {
fn from(e: failure::Error) -> Self {
let error_type = e.error_type();
HandlerError {
err_type: error_type.to_owned(),
inner: e,
}
}
}
impl From<serde_json::error::Error> for HandlerError {
fn from(e: serde_json::error::Error) -> Self {
HandlerError {
err_type: "JsonError".to_owned(),
inner: failure::Error::from(e),
}
}
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use failure::Fail;
#[derive(Fail, Debug)]
#[fail(display = "Custom Error")]
struct CustomError;
#[test]
fn std_error_type() {
let parsed_int = "hello".parse::<u8>();
let err = HandlerError::from(parsed_int.err().unwrap());
assert_eq!(err.error_type(), "std::num::ParseIntError");
}
#[test]
fn error_type_from_failure() {
let err = HandlerError::from(failure::Error::from(CustomError {}));
assert_eq!(err.error_type(), "lambda_runtime_errors::tests::CustomError");
}
}
| true |
c226541ad8e62e455bbdf372f4c2749281d9baa9
|
Rust
|
SchrodingerZhu/toylibc
|
/src/time/sys.rs
|
UTF-8
| 3,768 | 2.8125 | 3 |
[] |
no_license
|
use core::convert::TryInto;
use core::time::Duration;
use syscalls::syscall2;
use crate::{clockid_t, timespec};
use crate::constants::*;
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]
struct Timespec {
t: timespec,
}
impl Timespec {
const fn zero() -> Timespec {
Timespec {
t: timespec { tv_sec: 0, tv_nsec: 0 },
}
}
fn sub_timespec(&self, other: &Timespec) -> Result<Duration, Duration> {
if self >= other {
Ok(if self.t.tv_nsec >= other.t.tv_nsec {
Duration::new((self.t.tv_sec - other.t.tv_sec) as u64,
(self.t.tv_nsec - other.t.tv_nsec) as u32)
} else {
Duration::new((self.t.tv_sec - 1 - other.t.tv_sec) as u64,
self.t.tv_nsec as u32 + (NSEC_PER_SEC as u32) -
other.t.tv_nsec as u32)
})
} else {
match other.sub_timespec(self) {
Ok(d) => Err(d),
Err(d) => Ok(d),
}
}
}
fn checked_add_duration(&self, other: &Duration) -> Option<Timespec> {
let mut secs = other
.as_secs()
.try_into() // <- target type would be `libc::time_t`
.ok()
.and_then(|secs| self.t.tv_sec.checked_add(secs))?;
// Nano calculations can't overflow because nanos are <1B which fit
// in a u32.
let mut nsec = other.subsec_nanos() + self.t.tv_nsec as u32;
if nsec >= NSEC_PER_SEC as u32 {
nsec -= NSEC_PER_SEC as u32;
secs = secs.checked_add(1)?;
}
Some(Timespec {
t: timespec {
tv_sec: secs,
tv_nsec: nsec as _,
},
})
}
fn checked_sub_duration(&self, other: &Duration) -> Option<Timespec> {
let mut secs = other
.as_secs()
.try_into() // <- target type would be `libc::time_t`
.ok()
.and_then(|secs| self.t.tv_sec.checked_sub(secs))?;
// Similar to above, nanos can't overflow.
let mut nsec = self.t.tv_nsec as i32 - other.subsec_nanos() as i32;
if nsec < 0 {
nsec += NSEC_PER_SEC as i32;
secs = secs.checked_sub(1)?;
}
Some(Timespec {
t: timespec {
tv_sec: secs,
tv_nsec: nsec as _,
},
})
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Instant {
t: Timespec,
}
impl Instant {
pub fn now() -> Instant {
Instant { t: now(CLOCK_MONOTONIC) }
}
pub const fn zero() -> Instant {
Instant {
t: Timespec::zero(),
}
}
pub fn actually_monotonic() -> bool { true } // for LINUX-LIKE SYSTEM
pub fn checked_sub_instant(&self, other: &Instant) -> Option<Duration> {
self.t.sub_timespec(&other.t).ok()
}
pub fn checked_add_duration(&self, other: &Duration) -> Option<Instant> {
Some(Instant { t: self.t.checked_add_duration(other)? })
}
pub fn checked_sub_duration(&self, other: &Duration) -> Option<Instant> {
Some(Instant { t: self.t.checked_sub_duration(other)? })
}
}
fn now(clock: clockid_t) -> Timespec {
use syscalls::SYS_clock_gettime;
let mut t = Timespec {
t: timespec {
tv_sec: 0,
tv_nsec: 0,
}
};
// TODO: wrap this
unsafe {
syscall2(SYS_clock_gettime, clock as _, &mut t.t as *mut _ as _).unwrap();
}
t
}
#[cfg(test)]
mod test {
use crate::constants::CLOCK_MONOTONIC;
#[test]
fn now() {
println!("{:?}", super::now(CLOCK_MONOTONIC))
}
}
| true |
0793de86af6a541d7792ca3a8c431615250ec4f5
|
Rust
|
nbardiuk/rlox
|
/src/bin/interpreter/parser.rs
|
UTF-8
| 39,386 | 3.453125 | 3 |
[] |
no_license
|
use crate::ast::Expr;
use crate::ast::Stmt;
use crate::lox::Lox;
use crate::token::Literal;
use crate::token::Token;
use crate::token::TokenType;
const MAX_ARGS: usize = 255;
#[derive(Debug)]
struct ParserError {}
pub struct Parser<'a> {
tokens: Vec<Token>,
current: usize,
lox: &'a mut Lox,
}
impl<'a> Parser<'a> {
pub fn new(lox: &'a mut Lox, tokens: Vec<Token>) -> Self {
Self {
tokens,
current: 0,
lox,
}
}
pub fn parse(&mut self) -> Vec<Stmt> {
let mut statements = vec![];
while !self.is_at_end() {
if let Some(statement) = self.declaration() {
statements.push(statement);
}
}
statements
}
fn declaration(&mut self) -> Option<Stmt> {
use TokenType::*;
let statement = if self.matches(&[Class]) {
self.class_declaration()
} else if self.matches(&[Fun]) {
self.function("function")
} else if self.matches(&[Var]) {
self.var_declaration()
} else {
self.statement()
};
if statement.is_err() {
self.syncronize();
}
statement.ok()
}
fn function(&mut self, kind: &str) -> Result<Stmt, ParserError> {
use TokenType::*;
let name = self.consume(Identifier, &format!("Expect {} name.", kind))?;
self.consume(LeftParen, &format!("Expect '(' after {} name.", kind))?;
let mut parameters = vec![];
if !self.check(RightParen) {
loop {
if parameters.len() >= MAX_ARGS {
return self.error(
self.peek(),
&format!("Cannot have more than {} parameters.", MAX_ARGS),
);
}
parameters.push(*self.consume(Identifier, "Expect parameter name.")?);
if !self.matches(&[Comma]) {
break;
}
}
}
self.consume(RightParen, "Expect ')' after parameters.")?;
self.consume(LeftBrace, &format!("Expect '{{' before {} body.", kind))?;
let body = self.block()?;
Ok(Stmt::Function(name, parameters, body))
}
fn class_declaration(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
let name = self.consume(Identifier, "Expect class name.")?;
let superclass = if self.matches(&[Less]) {
Some(self.consume(Identifier, "Expect superclass name.")?)
} else {
None
};
self.consume(LeftBrace, "Expect '{' before class body.")?;
let mut methods = vec![];
while !self.check(RightBrace) && !self.is_at_end() {
methods.push(self.function("method")?);
}
self.consume(RightBrace, "Expect '}' after class body.")?;
Ok(Stmt::Class(name, superclass, methods))
}
fn var_declaration(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
let name = self.consume(Identifier, "Expect variable name.")?;
let initializer = if self.matches(&[Equal]) {
Some(self.expression()?)
} else {
None
};
self.consume(Semicolon, "Expect ';' after variable declaration")?;
Ok(Stmt::Var(name, initializer))
}
fn statement(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
if self.matches(&[For]) {
self.for_statement()
} else if self.matches(&[If]) {
self.if_statement()
} else if self.matches(&[Print]) {
self.print_statement()
} else if self.matches(&[Return]) {
self.return_statement()
} else if self.matches(&[While]) {
self.while_statement()
} else if self.matches(&[LeftBrace]) {
Ok(Stmt::Block(self.block()?))
} else {
self.expression_statement()
}
}
fn if_statement(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
self.consume(LeftParen, "Expect '(' after if.")?;
let condition = self.expression()?;
self.consume(RightParen, "Expect ')' after condition.")?;
let then = Box::new(self.statement()?);
let r#else = if self.matches(&[Else]) {
Some(Box::new(self.statement()?))
} else {
None
};
Ok(Stmt::If(condition, then, r#else))
}
fn for_statement(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
self.consume(LeftParen, "Expect '(' after for.")?;
let initializer = if self.matches(&[Semicolon]) {
None
} else if self.matches(&[Var]) {
Some(self.var_declaration()?)
} else {
Some(self.expression_statement()?)
};
let condition = if !self.check(Semicolon) {
self.expression()?
} else {
Box::new(Expr::Literal(Box::new(Literal::Bool(true))))
};
self.consume(Semicolon, "Expect ';' after loop condition.")?;
let increment = if !self.check(RightParen) {
Some(self.expression()?)
} else {
None
};
self.consume(RightParen, "Expect ')' after for clauses.")?;
let mut body = self.statement()?;
if let Some(inc) = increment {
body = Stmt::Block(vec![body, Stmt::Expression(inc)]);
}
body = Stmt::While(condition, Box::new(body));
if let Some(init) = initializer {
body = Stmt::Block(vec![init, body]);
}
Ok(body)
}
fn while_statement(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
self.consume(LeftParen, "Expect '(' after while.")?;
let condition = self.expression()?;
self.consume(RightParen, "Expect ')' after condition.")?;
let body = Box::new(self.statement()?);
Ok(Stmt::While(condition, body))
}
fn block(&mut self) -> Result<Vec<Stmt>, ParserError> {
use TokenType::*;
let mut statements = vec![];
while !self.check(RightBrace) && !self.is_at_end() {
if let Some(statement) = self.declaration() {
statements.push(statement);
}
}
self.consume(RightBrace, "Expect '}' after block")?;
Ok(statements)
}
fn print_statement(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
let value = self.expression()?;
self.consume(Semicolon, "Expect ';' after value.")?;
Ok(Stmt::Print(value))
}
fn return_statement(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
let keyword = self.previous();
let value = if !self.check(Semicolon) {
Some(self.expression()?)
} else {
None
};
self.consume(Semicolon, "Expect ';' after return value.")?;
Ok(Stmt::Return(keyword, value))
}
fn expression_statement(&mut self) -> Result<Stmt, ParserError> {
use TokenType::*;
let value = self.expression()?;
self.consume(Semicolon, "Expect ';' after value.")?;
Ok(Stmt::Expression(value))
}
fn expression(&mut self) -> Result<Box<Expr>, ParserError> {
self.assignment()
}
fn assignment(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let expr = self.or()?;
if self.matches(&[Equal]) {
if let Expr::Variable(name) = *expr {
Ok(Box::new(Expr::Asign(name, self.assignment()?)))
} else if let Expr::Get(object, name) = *expr {
Ok(Box::new(Expr::Set(object, name, self.assignment()?)))
} else {
self.error(self.previous(), "Invalid assignment target.")
}
} else {
Ok(expr)
}
}
fn or(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let mut expr = self.and()?;
while self.matches(&[Or]) {
let operator = self.previous();
let right = self.and()?;
expr = Box::new(Expr::Logical(expr, operator, right));
}
Ok(expr)
}
fn and(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let mut expr = self.equality()?;
while self.matches(&[And]) {
let operator = self.previous();
let right = self.equality()?;
expr = Box::new(Expr::Logical(expr, operator, right));
}
Ok(expr)
}
fn equality(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let mut expr = self.comparison()?;
while self.matches(&[BangEqual, EqualEqual]) {
let operator = self.previous();
let right = self.comparison()?;
expr = Box::new(Expr::Binary(expr, operator, right));
}
Ok(expr)
}
fn comparison(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let mut expr = self.addition()?;
while self.matches(&[Greater, GreaterEqual, Less, LessEqual]) {
let operator = self.previous();
let right = self.addition()?;
expr = Box::new(Expr::Binary(expr, operator, right));
}
Ok(expr)
}
fn addition(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let mut expr = self.multiplication()?;
while self.matches(&[Minus, Plus]) {
let operator = self.previous();
let right = self.multiplication()?;
expr = Box::new(Expr::Binary(expr, operator, right));
}
Ok(expr)
}
fn multiplication(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let mut expr = self.unary()?;
while self.matches(&[Slash, Star]) {
let operator = self.previous();
let right = self.unary()?;
expr = Box::new(Expr::Binary(expr, operator, right));
}
Ok(expr)
}
fn unary(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
if self.matches(&[Bang, Minus]) {
let operator = self.previous();
let right = self.unary()?;
return Ok(Box::new(Expr::Unary(operator, right)));
}
self.call()
}
fn call(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let mut expr = self.primary()?;
loop {
if self.matches(&[LeftParen]) {
expr = self.finish_call(expr)?;
} else if self.matches(&[Dot]) {
let name = self.consume(Identifier, "Expect property name after '.'.")?;
expr = Box::new(Expr::Get(expr, name));
} else {
break;
}
}
Ok(expr)
}
fn finish_call(&mut self, callee: Box<Expr>) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
let mut args = vec![];
if !self.check(RightParen) {
loop {
if args.len() >= MAX_ARGS {
return self.error(
self.peek(),
&format!("Cannot have more than {} arguments.", MAX_ARGS),
);
}
args.push(*self.expression()?);
if !self.matches(&[Comma]) {
break;
}
}
}
let paren = self.consume(RightParen, "Expect ')' after arguments.")?;
Ok(Box::new(Expr::Call(callee, paren, args)))
}
fn primary(&mut self) -> Result<Box<Expr>, ParserError> {
use TokenType::*;
if self.matches(&[False, True, Nil, Number, String]) {
return Ok(Box::new(Expr::Literal(Box::new(self.previous().literal))));
}
if self.matches(&[LeftParen]) {
let expr = self.expression()?;
self.consume(RightParen, "Expect ')' after expression.")?;
return Ok(Box::new(Expr::Grouping(expr)));
}
if self.matches(&[Super]) {
let keyword = self.previous();
self.consume(Dot, "Expect '.' after 'super'.")?;
let method = self.consume(Identifier, "Expect superclass method name.")?;
return Ok(Box::new(Expr::Super(keyword, method)));
}
if self.matches(&[This]) {
return Ok(Box::new(Expr::This(self.previous())));
}
if self.matches(&[Identifier]) {
return Ok(Box::new(Expr::Variable(self.previous())));
}
self.error(self.peek(), "Expect expression.")
}
fn syncronize(&mut self) {
use TokenType::*;
self.advance();
while !self.is_at_end() {
if self.previous().typ == Semicolon {
return;
};
match self.peek().typ {
Class | Fun | Var | For | If | While | Print | Return => return,
_ => {}
}
self.advance();
}
}
fn consume(&mut self, typ: TokenType, message: &str) -> Result<Box<Token>, ParserError> {
if self.check(typ) {
return Ok(self.advance());
}
self.error(self.peek(), message)
}
fn error<T>(&mut self, token: Box<Token>, message: &str) -> Result<T, ParserError> {
self.lox.error_token(&token, message);
Err(ParserError {})
}
fn previous(&self) -> Box<Token> {
Box::new(self.tokens[self.current - 1].clone()) // TODO use get instead of unchecked indexing
}
fn matches(&mut self, types: &[TokenType]) -> bool {
for typ in types {
if self.check(*typ) {
self.advance();
return true;
}
}
false
}
fn check(&self, typ: TokenType) -> bool {
!self.is_at_end() && self.peek().typ == typ
}
fn advance(&mut self) -> Box<Token> {
if !self.is_at_end() {
self.current += 1
}
self.previous()
}
fn is_at_end(&self) -> bool {
self.peek().typ == TokenType::EOF
}
fn peek(&self) -> Box<Token> {
Box::new(self.tokens[self.current].clone()) // TODO use get instead of unchecked indexing
}
}
#[cfg(test)]
mod spec {
use super::*;
use std::cell::RefCell;
use std::rc::Rc;
fn parse<'a>(source: &'a str) -> Vec<String> {
use crate::scanner::Scanner;
let out = Rc::new(RefCell::new(vec![]));
let mut lox = Lox::new_t(out.clone());
let mut scanner = Scanner::new(source);
let tokens = scanner.scan_tokens(&mut lox);
let mut parser = Parser::new(&mut lox, tokens);
let mut tree = parser
.parse()
.iter()
.map(|p| p.to_string())
.collect::<Vec<_>>();
let v = out.borrow().to_vec();
let mut output = std::string::String::from_utf8(v)
.unwrap()
.lines()
.map(|s| s.to_string())
.collect::<Vec<_>>();
output.append(&mut tree);
output
}
#[test]
fn not_expression() {
assert_eq!(
parse("anything\nnot valid"),
vec!["[line 2] Error at 'not': Expect ';' after value."]
);
}
#[test]
fn primary() {
assert_eq!(parse("1321.31;"), vec!["(expr 1321.31)"]);
assert_eq!(parse("\"asdf 123\";"), vec!["(expr \"asdf 123\")"]);
assert_eq!(parse("true;"), vec!["(expr true)"]);
assert_eq!(parse("false;"), vec!["(expr false)"]);
assert_eq!(parse("nil;"), vec!["(expr nil)"]);
assert_eq!(parse("variable_name;"), vec!["(expr variable_name)"]);
}
#[test]
fn uniary_bang() {
assert_eq!(parse("!true;"), vec!["(expr (! true))"]);
assert_eq!(parse("!!false;"), vec!["(expr (! (! false)))"]);
}
#[test]
fn unary_minus() {
assert_eq!(parse("-1;"), vec!["(expr (- 1))"]);
assert_eq!(parse("--1;"), vec!["(expr (- (- 1)))"]);
}
#[test]
fn multiplication_slash() {
assert_eq!(parse("2/-3;"), vec!["(expr (/ 2 (- 3)))"]);
assert_eq!(parse("-4/2;"), vec!["(expr (/ (- 4) 2))"]);
assert_eq!(parse("1/2/3;"), vec!["(expr (/ (/ 1 2) 3))"]);
assert_eq!(
parse("1/"),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("/1"),
vec!["[line 1] Error at '/': Expect expression."]
);
}
#[test]
fn multiplication_star() {
assert_eq!(parse("2*-3;"), vec!["(expr (* 2 (- 3)))"]);
assert_eq!(parse("-4*2;"), vec!["(expr (* (- 4) 2))"]);
assert_eq!(parse("1*2*3;"), vec!["(expr (* (* 1 2) 3))"]);
assert_eq!(
parse("1*"),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("*1"),
vec!["[line 1] Error at '*': Expect expression."]
);
}
#[test]
fn multiplication() {
assert_eq!(
parse("1*2/3*4/5;"),
vec!["(expr (/ (* (/ (* 1 2) 3) 4) 5))"]
);
}
#[test]
fn addition_plus() {
assert_eq!(parse("1+-2;"), vec!["(expr (+ 1 (- 2)))"]);
assert_eq!(parse("-1+2;"), vec!["(expr (+ (- 1) 2))"]);
assert_eq!(parse("1+2+3;"), vec!["(expr (+ (+ 1 2) 3))"]);
assert_eq!(parse("1*2 + 3*4;"), vec!["(expr (+ (* 1 2) (* 3 4)))"]);
assert_eq!(parse("1 + 2/3 + 4;"), vec!["(expr (+ (+ 1 (/ 2 3)) 4))"]);
assert_eq!(
parse("1+"),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("+1"),
vec!["[line 1] Error at '+': Expect expression."]
);
}
#[test]
fn addition_minus() {
assert_eq!(parse("1--2;"), vec!["(expr (- 1 (- 2)))"]);
assert_eq!(parse("-1-2;"), vec!["(expr (- (- 1) 2))"]);
assert_eq!(parse("1-2-3;"), vec!["(expr (- (- 1 2) 3))"]);
assert_eq!(parse("1*2 - 3*4;"), vec!["(expr (- (* 1 2) (* 3 4)))"]);
assert_eq!(parse("1 - 2/3 - 4;"), vec!["(expr (- (- 1 (/ 2 3)) 4))"]);
assert_eq!(
parse("1-"),
vec!["[line 1] Error at end: Expect expression."]
);
}
#[test]
fn comparison_greater() {
assert_eq!(parse("1>2;"), vec!["(expr (> 1 2))"]);
assert_eq!(parse("1>2>3;"), vec!["(expr (> (> 1 2) 3))"]);
assert_eq!(parse("1+2>3*4;"), vec!["(expr (> (+ 1 2) (* 3 4)))"]);
assert_eq!(
parse("1>"),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse(">1"),
vec!["[line 1] Error at '>': Expect expression."]
);
}
#[test]
fn comparison_greater_eq() {
assert_eq!(parse("1>=2;"), vec!["(expr (>= 1 2))"]);
assert_eq!(parse("1>=2>=3;"), vec!["(expr (>= (>= 1 2) 3))"]);
assert_eq!(parse("1+2>=3*4;"), vec!["(expr (>= (+ 1 2) (* 3 4)))"]);
assert_eq!(
parse("1>="),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse(">=1"),
vec!["[line 1] Error at '>=': Expect expression."]
);
}
#[test]
fn comparison_less() {
assert_eq!(parse("1<2;"), vec!["(expr (< 1 2))"]);
assert_eq!(parse("1<2<3;"), vec!["(expr (< (< 1 2) 3))"]);
assert_eq!(parse("1+2<3*4;"), vec!["(expr (< (+ 1 2) (* 3 4)))"]);
assert_eq!(
parse("1<"),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("<1"),
vec!["[line 1] Error at '<': Expect expression."]
);
}
#[test]
fn comparison_less_eq() {
assert_eq!(parse("1<=2;"), vec!["(expr (<= 1 2))"]);
assert_eq!(parse("1<=2<=3;"), vec!["(expr (<= (<= 1 2) 3))"]);
assert_eq!(parse("1+2<=3*4;"), vec!["(expr (<= (+ 1 2) (* 3 4)))"]);
assert_eq!(
parse("1<="),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("<=1"),
vec!["[line 1] Error at '<=': Expect expression."]
);
}
#[test]
fn equality_eq() {
assert_eq!(parse("1==2;"), vec!["(expr (== 1 2))"]);
assert_eq!(parse("1==2==3;"), vec!["(expr (== (== 1 2) 3))"]);
assert_eq!(
parse("1+2==3<=4==5*6;"),
vec!["(expr (== (== (+ 1 2) (<= 3 4)) (* 5 6)))"]
);
assert_eq!(
parse("1=="),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("==1"),
vec!["[line 1] Error at '==': Expect expression."]
);
}
#[test]
fn equality_not_eq() {
assert_eq!(parse("1!=2;"), vec!["(expr (!= 1 2))"]);
assert_eq!(parse("1!=2!=3;"), vec!["(expr (!= (!= 1 2) 3))"]);
assert_eq!(
parse("1+2!=3<=4!=5*6;"),
vec!["(expr (!= (!= (+ 1 2) (<= 3 4)) (* 5 6)))"]
);
assert_eq!(
parse("1!="),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("!=1"),
vec!["[line 1] Error at '!=': Expect expression."]
);
}
#[test]
fn grouping() {
assert_eq!(parse("(1);"), vec!["(expr (group 1))"]);
assert_eq!(parse("1 + (2 + 3);"), vec!["(expr (+ 1 (group (+ 2 3))))"]);
assert_eq!(parse("1 / (2 - 3);"), vec!["(expr (/ 1 (group (- 2 3))))"]);
assert_eq!(parse("-(1 - 2);"), vec!["(expr (- (group (- 1 2))))"]);
assert_eq!(parse("!(1 >= 2);"), vec!["(expr (! (group (>= 1 2))))"]);
assert_eq!(
parse("("),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse(")"),
vec!["[line 1] Error at ')': Expect expression."]
);
assert_eq!(
parse("1)"),
vec!["[line 1] Error at ')': Expect ';' after value."]
);
assert_eq!(
parse("(1"),
vec!["[line 1] Error at end: Expect ')' after expression."]
);
}
#[test]
fn expression() {
assert_eq!(parse("1!=2;"), vec!["(expr (!= 1 2))"]);
assert_eq!(
parse("1+1;\n2-3;"),
vec!["(expr (+ 1 1))", "(expr (- 2 3))"]
);
assert_eq!(
parse("1"),
vec!["[line 1] Error at end: Expect ';' after value."]
);
assert_eq!(
parse("1;2"),
vec!["[line 1] Error at end: Expect ';' after value.", "(expr 1)"]
);
}
#[test]
fn print() {
assert_eq!(parse("print 1!=2;"), vec!["(print (!= 1 2))"]);
assert_eq!(
parse("print 1;print 2-3;"),
vec!["(print 1)", "(print (- 2 3))"]
);
assert_eq!(
parse("print"),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("print 1;2"),
vec![
"[line 1] Error at end: Expect ';' after value.",
"(print 1)"
]
);
}
#[test]
fn declaration_var() {
assert_eq!(
parse("var a = 1; var b = 1 > 2; var n;"),
vec!["(def a 1)", "(def b (> 1 2))", "(def n)"]
);
assert_eq!(
parse(
"var snake_case = true;
var lisp-case = false;
var camelCase = true;
var 0name = false;
var _0name = true;
"
),
vec![
"[line 2] Error at '-': Expect ';' after variable declaration",
"[line 4] Error at '0': Expect variable name.",
"(def snake_case true)",
"(def camelCase true)",
"(def _0name true)"
]
);
}
#[test]
fn assignmen_var() {
assert_eq!(
parse("a=1; b=1>2; a=a;"),
vec![
"(expr (set! a 1))",
"(expr (set! b (> 1 2)))",
"(expr (set! a a))"
]
);
assert_eq!(parse("a = b = c;"), vec!["(expr (set! a (set! b c)))"]);
assert_eq!(
parse(
"a + (b = 1);
(a = 1) + b;"
),
vec![
"(expr (+ a (group (set! b 1))))",
"(expr (+ (group (set! a 1)) b))"
]
);
assert_eq!(
parse(
"1 = a;
\"a\" = 1;
a = 1 = b;
"
),
vec![
"[line 1] Error at '=': Invalid assignment target.",
"[line 2] Error at '=': Invalid assignment target.",
"[line 3] Error at '=': Invalid assignment target."
]
);
}
#[test]
fn blocks() {
assert_eq!(parse("{}{}{{}}"), vec!["(do )", "(do )", "(do (do ))"]);
assert_eq!(
parse("{var a=1;print a;}{a=2;}"),
vec!["(do (def a 1) (print a))", "(do (expr (set! a 2)))"]
);
assert_eq!(
parse("a = {b = 1};"),
vec!["[line 1] Error at '{': Expect expression."]
);
assert_eq!(
parse("{\n{"),
vec![
"[line 2] Error at end: Expect '}' after block",
"[line 2] Error at end: Expect '}' after block"
]
);
}
#[test]
fn ifs() {
assert_eq!(
parse("if (condition) print when_true; else print when_false;"),
vec!["(if condition (print when_true) (print when_false))"]
);
assert_eq!(
parse("if (a) if (b) 1; else 2;"),
vec!["(if a (if b (expr 1) (expr 2)))"]
);
assert_eq!(
parse("if (a) { if (b) when_true; } else when_false;"),
vec!["(if a (do (if b (expr when_true))) (expr when_false))"]
);
assert_eq!(
parse("if a 1; else 2;"),
vec![
"[line 1] Error at 'a': Expect '(' after if.",
"[line 1] Error at 'else': Expect expression."
]
);
assert_eq!(
parse("if (a 1; else 2;"),
vec![
"[line 1] Error at '1': Expect ')' after condition.",
"[line 1] Error at 'else': Expect expression."
]
);
assert_eq!(
parse("if (a); 1; else 2;"),
vec![
"[line 1] Error at ';': Expect expression.",
"[line 1] Error at 'else': Expect expression.",
"(expr 1)"
]
);
}
#[test]
fn logical() {
assert_eq!(parse("1 or 2 or 3;"), vec!["(expr (or (or 1 2) 3))"]);
assert_eq!(parse("1 and 2 and 3;"), vec!["(expr (and (and 1 2) 3))"]);
assert_eq!(
parse("1 or 2 and 3 or 4;"),
vec!["(expr (or (or 1 (and 2 3)) 4))"]
);
assert_eq!(
parse("1 and 2 or 3 and 4;"),
vec!["(expr (or (and 1 2) (and 3 4)))"]
);
assert_eq!(
parse("1 > 2 or 3 < 4;"),
vec!["(expr (or (> 1 2) (< 3 4)))"]
);
assert_eq!(
parse("1 > 2 and 3 < 4;"),
vec!["(expr (and (> 1 2) (< 3 4)))"]
);
}
#[test]
fn whiles() {
assert_eq!(parse("while (true) {}"), vec!["(while true (do ))"]);
assert_eq!(
parse("while (a or b) print c;"),
vec!["(while (or a b) (print c))"]
);
assert_eq!(
parse("while true {}"),
vec!["[line 1] Error at 'true': Expect '(' after while."]
);
assert_eq!(
parse("while (true {}"),
vec!["[line 1] Error at '{': Expect ')' after condition."]
);
assert_eq!(
parse("while (print 1;) {}"),
vec![
"[line 1] Error at 'print': Expect expression.",
"[line 1] Error at ')': Expect expression."
]
);
}
#[test]
fn fors() {
assert_eq!(parse("for (;;) {}"), vec!["(while true (do ))"]);
assert_eq!(
parse("for (var i=1;;) {}"),
vec![
"(do (def i 1) \
(while true (do )))"
]
);
assert_eq!(parse("for (;a<b;) {}"), vec!["(while (< a b) (do ))"]);
assert_eq!(
parse("for (;;c=c+1) {}"),
vec![
"(while true \
(do (do ) \
(expr (set! c (+ c 1)))))"
]
);
assert_eq!(
parse("for (var i=0;i<10;i=i+1) print i;"),
vec![
"(do (def i 0) \
(while (< i 10) \
(do (print i) \
(expr (set! i (+ i 1))))))"
]
);
assert_eq!(
parse("for () {}"),
vec!["[line 1] Error at ')': Expect expression."]
);
assert_eq!(
parse("for (1) {}"),
vec!["[line 1] Error at ')': Expect ';' after value."]
);
assert_eq!(
parse("for (1;) {}"),
vec!["[line 1] Error at ')': Expect expression."]
);
assert_eq!(
parse("for (1;2) {}"),
vec!["[line 1] Error at ')': Expect ';' after loop condition."]
);
assert_eq!(
parse("for (1;2;3;) {}"),
vec![
"[line 1] Error at ';': Expect ')' after for clauses.",
"[line 1] Error at ')': Expect expression."
]
);
}
#[test]
fn function_call() {
assert_eq!(parse("a();"), vec!["(expr (a ))"]);
assert_eq!(parse("a(1,2,3,4);"), vec!["(expr (a 1 2 3 4))"]);
assert_eq!(parse("print a(1,2);"), vec!["(print (a 1 2))"]);
assert_eq!(parse("a(1)(2);"), vec!["(expr ((a 1) 2))"]);
assert_eq!(parse("\"a\"(1);"), vec!["(expr (\"a\" 1))"]);
assert_eq!(parse("1(a);"), vec!["(expr (1 a))"]);
assert_eq!(parse("i=1(a);"), vec!["(expr (set! i (1 a)))"]);
assert_eq!(parse("(i=1)(a);"), vec!["(expr ((group (set! i 1)) a))"]);
assert_eq!(
parse("a(1,);"),
vec!["[line 1] Error at ')': Expect expression."]
);
assert_eq!(
parse("a(,1);"),
vec!["[line 1] Error at ',': Expect expression."]
);
assert_eq!(parse("a(
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1);"), vec!["(expr (a \
1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 \
1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 \
1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 \
1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 \
1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 \
1 1 1 1 1))"]);
assert_eq!(parse("a(
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1
);"), vec!["[line 7] Error at '1': Cannot have more than 255 arguments."]);
}
#[test]
fn function_declaration() {
assert_eq!(parse("fun a(){}"), vec!["(defn a () )"]);
assert_eq!(parse("fun a(a,b,c){}"), vec!["(defn a (a b c) )"]);
assert_eq!(
parse("fun a(){a; b;}"),
vec!["(defn a () (expr a) (expr b))"]
);
assert_eq!(
parse("fun a(1){}"),
vec!["[line 1] Error at '1': Expect parameter name."]
);
assert_eq!(
parse("fun a(a b){}"),
vec!["[line 1] Error at 'b': Expect ')' after parameters."]
);
assert_eq!(
parse("fun (){}"),
vec!["[line 1] Error at '(': Expect function name."]
);
assert_eq!(
parse("fun a{}"),
vec!["[line 1] Error at '{': Expect '(' after function name."]
);
assert_eq!(
parse("fun a();"),
vec!["[line 1] Error at ';': Expect '{' before function body."]
);
assert_eq!(parse("fun a(
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b){}"), vec!["(defn a (\
b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b \
b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b \
b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b \
b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b \
b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b b \
b b b b b) )"]);
assert_eq!(parse("fun a(
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,b,
b,b,b,b,b,b){}"), vec!["[line 7] Error at 'b': Cannot have more than 255 parameters."]);
}
#[test]
fn return_statement() {
assert_eq!(parse("return;"), vec!["(return)"]);
assert_eq!(parse("return 1+1;"), vec!["(return (+ 1 1))"]);
assert_eq!(
parse("return"),
vec!["[line 1] Error at end: Expect expression."]
);
assert_eq!(
parse("return 1+1"),
vec!["[line 1] Error at end: Expect ';' after return value."]
);
}
#[test]
fn class() {
assert_eq!(parse("class a {}"), vec!["(class a )"]);
assert_eq!(parse("class a < b {}"), vec!["(class a b )"]);
assert_eq!(
parse("class a < b {c(){}}"),
vec!["(class a b (defn c () ))"]
);
assert_eq!(
parse("class a {f(){}g(){}}"),
vec!["(class a (defn f () ) (defn g () ))"]
);
assert_eq!(
parse(
"class Breakfast {
cook() {
print \"Eggs a-fryin'!\";
}
serve(who) {
print \"Enjoy your breakfast, \" + who + \".\";
}
}"
),
vec![
"(class Breakfast \
(defn cook () \
(print \"Eggs a-fryin'!\")) \
(defn serve (who) \
(print (+ (+ \"Enjoy your breakfast, \" who) \".\"))))"
]
);
assert_eq!(
parse("class a < {}"),
vec!["[line 1] Error at '{': Expect superclass name."]
);
assert_eq!(
parse("class a < b < c {}"),
vec!["[line 1] Error at '<': Expect '{' before class body."]
);
assert_eq!(
parse("class a {var b = 1;}"),
vec![
"[line 1] Error at 'var': Expect method name.",
"[line 1] Error at '}': Expect expression."
]
);
assert_eq!(
parse("class {}"),
vec!["[line 1] Error at '{': Expect class name."]
);
assert_eq!(
parse("class class {}"),
vec!["[line 1] Error at 'class': Expect class name."]
);
assert_eq!(
parse("class a"),
vec!["[line 1] Error at end: Expect '{' before class body."]
);
}
#[test]
fn property() {
assert_eq!(
parse("egg.scramble(3).with(cheddar);"),
vec!["(expr ((get ((get egg :scramble) 3) :with) cheddar))"]
);
assert_eq!(
parse("breakfast.omlette.filling.meat = ham;"),
vec!["(expr (set (get (get breakfast :omlette) :filling) :meat ham))"]
);
assert_eq!(
parse("a.;"),
vec!["[line 1] Error at ';': Expect property name after '.'."]
);
}
#[test]
fn this() {
assert_eq!(
parse("class A {m(){return this;}}"),
vec!["(class A (defn m () (return this)))"]
);
assert_eq!(parse("this.a = 1;"), vec!["(expr (set this :a 1))"]);
assert_eq!(
parse("this = 1;"),
vec!["[line 1] Error at '=': Invalid assignment target."]
);
assert_eq!(
parse("var this = 1;"),
vec!["[line 1] Error at 'this': Expect variable name."]
);
}
#[test]
fn super_() {
assert_eq!(parse("super.b(c);"), vec!["(expr ((get super :b) c))"]);
assert_eq!(
parse("print super;"),
vec!["[line 1] Error at ';': Expect '.' after 'super'."]
);
assert_eq!(
parse("super.2;"),
vec!["[line 1] Error at '2': Expect superclass method name.",]
);
}
}
| true |
744125f3ad641d37ce9f1d69a9a1e47d98db4f53
|
Rust
|
ogham/exa
|
/src/fs/feature/git.rs
|
UTF-8
| 12,689 | 3.34375 | 3 |
[
"MIT"
] |
permissive
|
//! Getting the Git status of files and directories.
use std::ffi::OsStr;
#[cfg(target_family = "unix")]
use std::os::unix::ffi::OsStrExt;
use std::path::{Path, PathBuf};
use std::sync::Mutex;
use log::*;
use crate::fs::fields as f;
/// A **Git cache** is assembled based on the user’s input arguments.
///
/// This uses vectors to avoid the overhead of hashing: it’s not worth it when the
/// expected number of Git repositories per exa invocation is 0 or 1...
pub struct GitCache {
/// A list of discovered Git repositories and their paths.
repos: Vec<GitRepo>,
/// Paths that we’ve confirmed do not have Git repositories underneath them.
misses: Vec<PathBuf>,
}
impl GitCache {
pub fn has_anything_for(&self, index: &Path) -> bool {
self.repos.iter().any(|e| e.has_path(index))
}
pub fn get(&self, index: &Path, prefix_lookup: bool) -> f::Git {
self.repos.iter()
.find(|e| e.has_path(index))
.map(|repo| repo.search(index, prefix_lookup))
.unwrap_or_default()
}
}
use std::iter::FromIterator;
impl FromIterator<PathBuf> for GitCache {
fn from_iter<I>(iter: I) -> Self
where I: IntoIterator<Item=PathBuf>
{
let iter = iter.into_iter();
let mut git = Self {
repos: Vec::with_capacity(iter.size_hint().0),
misses: Vec::new(),
};
for path in iter {
if git.misses.contains(&path) {
debug!("Skipping {:?} because it already came back Gitless", path);
}
else if git.repos.iter().any(|e| e.has_path(&path)) {
debug!("Skipping {:?} because we already queried it", path);
}
else {
match GitRepo::discover(path) {
Ok(r) => {
if let Some(r2) = git.repos.iter_mut().find(|e| e.has_workdir(&r.workdir)) {
debug!("Adding to existing repo (workdir matches with {:?})", r2.workdir);
r2.extra_paths.push(r.original_path);
continue;
}
debug!("Discovered new Git repo");
git.repos.push(r);
}
Err(miss) => {
git.misses.push(miss)
}
}
}
}
git
}
}
/// A **Git repository** is one we’ve discovered somewhere on the filesystem.
pub struct GitRepo {
/// The queryable contents of the repository: either a `git2` repo, or the
/// cached results from when we queried it last time.
contents: Mutex<GitContents>,
/// The working directory of this repository.
/// This is used to check whether two repositories are the same.
workdir: PathBuf,
/// The path that was originally checked to discover this repository.
/// This is as important as the extra_paths (it gets checked first), but
/// is separate to avoid having to deal with a non-empty Vec.
original_path: PathBuf,
/// Any other paths that were checked only to result in this same
/// repository.
extra_paths: Vec<PathBuf>,
}
/// A repository’s queried state.
enum GitContents {
/// All the interesting Git stuff goes through this.
Before {
repo: git2::Repository,
},
/// Temporary value used in `repo_to_statuses` so we can move the
/// repository out of the `Before` variant.
Processing,
/// The data we’ve extracted from the repository, but only after we’ve
/// actually done so.
After {
statuses: Git,
},
}
impl GitRepo {
/// Searches through this repository for a path (to a file or directory,
/// depending on the prefix-lookup flag) and returns its Git status.
///
/// Actually querying the `git2` repository for the mapping of paths to
/// Git statuses is only done once, and gets cached so we don’t need to
/// re-query the entire repository the times after that.
///
/// The temporary `Processing` enum variant is used after the `git2`
/// repository is moved out, but before the results have been moved in!
/// See <https://stackoverflow.com/q/45985827/3484614>
fn search(&self, index: &Path, prefix_lookup: bool) -> f::Git {
use std::mem::replace;
let mut contents = self.contents.lock().unwrap();
if let GitContents::After { ref statuses } = *contents {
debug!("Git repo {:?} has been found in cache", &self.workdir);
return statuses.status(index, prefix_lookup);
}
debug!("Querying Git repo {:?} for the first time", &self.workdir);
let repo = replace(&mut *contents, GitContents::Processing).inner_repo();
let statuses = repo_to_statuses(&repo, &self.workdir);
let result = statuses.status(index, prefix_lookup);
let _processing = replace(&mut *contents, GitContents::After { statuses });
result
}
/// Whether this repository has the given working directory.
fn has_workdir(&self, path: &Path) -> bool {
self.workdir == path
}
/// Whether this repository cares about the given path at all.
fn has_path(&self, path: &Path) -> bool {
path.starts_with(&self.original_path) || self.extra_paths.iter().any(|e| path.starts_with(e))
}
/// Searches for a Git repository at any point above the given path.
/// Returns the original buffer if none is found.
fn discover(path: PathBuf) -> Result<Self, PathBuf> {
info!("Searching for Git repository above {:?}", path);
let repo = match git2::Repository::discover(&path) {
Ok(r) => r,
Err(e) => {
error!("Error discovering Git repositories: {:?}", e);
return Err(path);
}
};
if let Some(workdir) = repo.workdir() {
let workdir = workdir.to_path_buf();
let contents = Mutex::new(GitContents::Before { repo });
Ok(Self { contents, workdir, original_path: path, extra_paths: Vec::new() })
}
else {
warn!("Repository has no workdir?");
Err(path)
}
}
}
impl GitContents {
/// Assumes that the repository hasn’t been queried, and extracts it
/// (consuming the value) if it has. This is needed because the entire
/// enum variant gets replaced when a repo is queried (see above).
fn inner_repo(self) -> git2::Repository {
if let Self::Before { repo } = self {
repo
}
else {
unreachable!("Tried to extract a non-Repository")
}
}
}
/// Iterates through a repository’s statuses, consuming it and returning the
/// mapping of files to their Git status.
/// We will have already used the working directory at this point, so it gets
/// passed in rather than deriving it from the `Repository` again.
fn repo_to_statuses(repo: &git2::Repository, workdir: &Path) -> Git {
let mut statuses = Vec::new();
info!("Getting Git statuses for repo with workdir {:?}", workdir);
match repo.statuses(None) {
Ok(es) => {
for e in es.iter() {
#[cfg(target_family = "unix")]
let path = workdir.join(Path::new(OsStr::from_bytes(e.path_bytes())));
// TODO: handle non Unix systems better:
// https://github.com/ogham/exa/issues/698
#[cfg(not(target_family = "unix"))]
let path = workdir.join(Path::new(e.path().unwrap()));
let elem = (path, e.status());
statuses.push(elem);
}
}
Err(e) => {
error!("Error looking up Git statuses: {:?}", e);
}
}
Git { statuses }
}
// The `repo.statuses` call above takes a long time. exa debug output:
//
// 20.311276 INFO:exa::fs::feature::git: Getting Git statuses for repo with workdir "/vagrant/"
// 20.799610 DEBUG:exa::output::table: Getting Git status for file "./Cargo.toml"
//
// Even inserting another logging line immediately afterwards doesn’t make it
// look any faster.
/// Container of Git statuses for all the files in this folder’s Git repository.
struct Git {
statuses: Vec<(PathBuf, git2::Status)>,
}
impl Git {
/// Get either the file or directory status for the given path.
/// “Prefix lookup” means that it should report an aggregate status of all
/// paths starting with the given prefix (in other words, a directory).
fn status(&self, index: &Path, prefix_lookup: bool) -> f::Git {
if prefix_lookup { self.dir_status(index) }
else { self.file_status(index) }
}
/// Get the user-facing status of a file.
/// We check the statuses directly applying to a file, and for the ignored
/// status we check if any of its parents directories is ignored by git.
fn file_status(&self, file: &Path) -> f::Git {
let path = reorient(file);
let s = self.statuses.iter()
.filter(|p| if p.1 == git2::Status::IGNORED {
path.starts_with(&p.0)
} else {
p.0 == path
})
.fold(git2::Status::empty(), |a, b| a | b.1);
let staged = index_status(s);
let unstaged = working_tree_status(s);
f::Git { staged, unstaged }
}
/// Get the combined, user-facing status of a directory.
/// Statuses are aggregating (for example, a directory is considered
/// modified if any file under it has the status modified), except for
/// ignored status which applies to files under (for example, a directory
/// is considered ignored if one of its parent directories is ignored).
fn dir_status(&self, dir: &Path) -> f::Git {
let path = reorient(dir);
let s = self.statuses.iter()
.filter(|p| if p.1 == git2::Status::IGNORED {
path.starts_with(&p.0)
} else {
p.0.starts_with(&path)
})
.fold(git2::Status::empty(), |a, b| a | b.1);
let staged = index_status(s);
let unstaged = working_tree_status(s);
f::Git { staged, unstaged }
}
}
/// Converts a path to an absolute path based on the current directory.
/// Paths need to be absolute for them to be compared properly, otherwise
/// you’d ask a repo about “./README.md” but it only knows about
/// “/vagrant/README.md”, prefixed by the workdir.
#[cfg(unix)]
fn reorient(path: &Path) -> PathBuf {
use std::env::current_dir;
// TODO: I’m not 100% on this func tbh
let path = match current_dir() {
Err(_) => Path::new(".").join(&path),
Ok(dir) => dir.join(&path),
};
path.canonicalize().unwrap_or(path)
}
#[cfg(windows)]
fn reorient(path: &Path) -> PathBuf {
let unc_path = path.canonicalize().unwrap();
// On Windows UNC path is returned. We need to strip the prefix for it to work.
let normal_path = unc_path.as_os_str().to_str().unwrap().trim_left_matches("\\\\?\\");
return PathBuf::from(normal_path);
}
/// The character to display if the file has been modified, but not staged.
fn working_tree_status(status: git2::Status) -> f::GitStatus {
match status {
s if s.contains(git2::Status::WT_NEW) => f::GitStatus::New,
s if s.contains(git2::Status::WT_MODIFIED) => f::GitStatus::Modified,
s if s.contains(git2::Status::WT_DELETED) => f::GitStatus::Deleted,
s if s.contains(git2::Status::WT_RENAMED) => f::GitStatus::Renamed,
s if s.contains(git2::Status::WT_TYPECHANGE) => f::GitStatus::TypeChange,
s if s.contains(git2::Status::IGNORED) => f::GitStatus::Ignored,
s if s.contains(git2::Status::CONFLICTED) => f::GitStatus::Conflicted,
_ => f::GitStatus::NotModified,
}
}
/// The character to display if the file has been modified and the change
/// has been staged.
fn index_status(status: git2::Status) -> f::GitStatus {
match status {
s if s.contains(git2::Status::INDEX_NEW) => f::GitStatus::New,
s if s.contains(git2::Status::INDEX_MODIFIED) => f::GitStatus::Modified,
s if s.contains(git2::Status::INDEX_DELETED) => f::GitStatus::Deleted,
s if s.contains(git2::Status::INDEX_RENAMED) => f::GitStatus::Renamed,
s if s.contains(git2::Status::INDEX_TYPECHANGE) => f::GitStatus::TypeChange,
_ => f::GitStatus::NotModified,
}
}
| true |
b7bcdccda56cad69c5acb40647e5ccf80277f1fe
|
Rust
|
mvarble/nutrition
|
/src/fdc/mod.rs
|
UTF-8
| 2,441 | 3.03125 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! This module allows us to perform HTTP requests to the
//! [FoodData Central](https://fdc.nal.usda.gov/index.html) API though the [`FDCService`] struct.
pub mod api;
pub use api::*;
use anyhow::Result;
use reqwest::Client;
/// `FDCService` implements the http requests to the FDC API through an Actix client.
#[derive(Clone, Debug)]
pub struct FDCService {
pub fdc_key: String,
}
impl FDCService {
/// generate a new FDCService
pub fn new<S: Into<String>>(fdc_key: S) -> FDCService {
FDCService {
fdc_key: fdc_key.into(),
}
}
/// Make a request to "v1/foods/search" and collect the first 10 results to a vector.
pub async fn v1_foods_search<S: Into<String>>(
&self,
client: &Client,
query: S,
) -> Result<Vec<AbridgedFoodItem>> {
// make the request
let body = serde_json::json!({ "query": query.into(), "pageSize": 10 });
let mut res = client
.post(format!(
"https://api.nal.usda.gov/fdc/v1/foods/search?api_key={}",
self.fdc_key
))
.json(&body)
.send()
.await?
.json::<serde_json::Value>()
.await?;
// extract "foods" json array and deserialize
Ok(serde_json::from_value(res["foods"].take())?)
}
/// Make a request to "v1/foods"
pub async fn v1_foods(&self, client: &Client, fdc_ids: &[i32]) -> Result<Vec<FDCMeta>> {
// make the request
let body = serde_json::json!({ "fdcIds": fdc_ids, "format": "full" });
let mut res = client
.post(format!(
"https://api.nal.usda.gov/fdc/v1/foods?api_key={}",
self.fdc_key
))
.json(&body)
.send()
.await?
.json::<serde_json::Value>()
.await?;
// map the values associated to the `dataType` key so that they can match the enum variants
res.as_array_mut().map(|foods| {
foods
.iter_mut()
.for_each(|food| match food["dataType"].as_str() {
Some("Branded") => {}
_ => {
food["dataType"] = serde_json::Value::String("Other".into());
}
})
});
// deserialize
Ok(serde_json::from_value(res)?)
}
}
#[cfg(test)]
mod test;
| true |
b03cec8883d2db62f41461e83d5f54c752b846b4
|
Rust
|
timvermeulen/advent-of-code
|
/src/solutions/year2016/day08.rs
|
UTF-8
| 2,709 | 3.171875 | 3 |
[] |
no_license
|
use super::*;
#[derive(Copy, Clone)]
enum State {
On,
Off,
}
impl Debug for State {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Self::On => write!(f, "#"),
Self::Off => write!(f, "."),
}
}
}
#[derive(Debug)]
enum Operation {
Rect { width: i32, height: i32 },
RotateRow { y: i32, by: usize },
RotateColumn { x: i32, by: usize },
}
fn rect<'a>() -> impl Parser<&'a str, Output = Operation> {
chain((string("rect "), parser::i32(), token('x'), parser::i32()))
.map(|(_, width, _, height)| Operation::Rect { width, height })
}
fn rotate_row<'a>() -> impl Parser<&'a str, Output = Operation> {
chain((
string("rotate row y="),
parser::i32(),
string(" by "),
parser::usize(),
))
.map(|(_, y, _, by)| Operation::RotateRow { y, by })
}
fn rotate_column<'a>() -> impl Parser<&'a str, Output = Operation> {
chain((
string("rotate column x="),
parser::i32(),
string(" by "),
parser::usize(),
))
.map(|(_, x, _, by)| Operation::RotateColumn { x, by })
}
fn parser<'a>() -> impl Parser<&'a str, Output = Vec<Operation>> {
choice((rect().attempt(), rotate_row().attempt(), rotate_column())).collect_sep_by(newline())
}
pub fn solve(input: &str) {
const WIDTH: i32 = 50;
const HEIGHT: i32 = 6;
let mut grid = Grid::new(State::Off);
for op in parser().parse_to_end(input).unwrap() {
match op {
Operation::Rect { width, height } => {
for x in 0..width {
for y in 0..height {
grid[Pos { x, y }] = State::On;
}
}
}
Operation::RotateRow { y, by } => {
let mut vec: Vec<_> = (0..WIDTH).map(|x| grid[Pos { x, y }]).collect();
vec.rotate_right(by);
(0..WIDTH)
.zip(vec)
.for_each(|(x, state)| grid[Pos { x, y }] = state);
}
Operation::RotateColumn { x, by } => {
let mut vec: Vec<_> = (0..HEIGHT).map(|y| grid[Pos { x, y }]).collect();
vec.rotate_right(by);
(0..HEIGHT)
.zip(vec)
.for_each(|(y, state)| grid[Pos { x, y }] = state);
}
}
}
println!(
"{}",
grid.iter()
.filter(|&(_, &state)| matches!(state, State::On))
.count()
);
println!("{:?}", grid);
}
#[async_std::test]
async fn test() -> Result<(), InputError> {
let input = get_input(2016, 8).await?;
solve(&input);
Ok(())
}
| true |
eccc7305b390303bcb6efa72464628798f262161
|
Rust
|
bauhaus93/optimizer
|
/src/article.rs
|
UTF-8
| 472 | 3.25 | 3 |
[] |
no_license
|
pub fn calculate_article_fitness(deck: &[Article]) -> u32 {
let mut sum = 0;
deck.iter().for_each(| ref e | sum += e.get_total_price());
sum
}
#[derive(Clone)]
pub struct Article {
id_article: u32,
count: u32,
price: u32
}
impl Article {
pub fn get_total_price(&self) -> u32 {
self.count * self.price
}
}
impl PartialEq for Article {
fn eq(&self, other: &Self) -> bool {
self.id_article == other.id_article
}
}
| true |
ab1a4c89be82d450ef77a3420ef1bb2f3308db68
|
Rust
|
transparencies/eventuals
|
/src/eventual/ptr.rs
|
UTF-8
| 3,065 | 3.34375 | 3 |
[] |
no_license
|
use by_address::ByAddress;
use std::{
borrow::Borrow, cmp::Ordering, convert::AsRef, error::Error, fmt, hash::Hash, ops::Deref,
sync::Arc,
};
/// This type is a thin wrapper around T to enable cheap clone and comparisons.
/// Internally it is an Arc that is compared by address instead of by the
/// implementation of the pointed to value.
///
/// Additionally, Ptr implements Error where T: Error. This makes working with
/// TryEventuals easier since many error types do not impl Value, but when
/// wrapped in Ptr do.
///
/// One thing to be aware of is that because values are compared by address
/// subscribers may be triggered unnecessarily in some contexts. If this is
/// undesirable use Arc instead.
///
/// This type is not specifically Eventual related, but is a useful pattern.
#[repr(transparent)]
#[derive(Debug, Default)]
pub struct Ptr<T> {
inner: ByAddress<Arc<T>>,
}
impl<T> Ptr<T> {
#[inline]
pub fn new(wrapped: T) -> Self {
Self {
inner: ByAddress(Arc::new(wrapped)),
}
}
}
impl<T> Deref for Ptr<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
self.inner.deref()
}
}
impl<T> Borrow<T> for Ptr<T> {
#[inline]
fn borrow(&self) -> &T {
self.inner.borrow()
}
}
impl<T> AsRef<T> for Ptr<T> {
#[inline]
fn as_ref(&self) -> &T {
self.inner.as_ref()
}
}
impl<T> Hash for Ptr<T> {
#[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.inner.hash(state)
}
}
impl<T> Ord for Ptr<T> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
self.inner.cmp(&other.inner)
}
}
impl<T> PartialOrd for Ptr<T> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.inner.partial_cmp(&other.inner)
}
}
impl<T> PartialEq for Ptr<T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.inner.eq(&other.inner)
}
}
impl<T> Clone for Ptr<T> {
#[inline]
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl<T> Eq for Ptr<T> {}
impl<T> fmt::Display for Ptr<T>
where
T: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.inner.fmt(f)
}
}
impl<T> Error for Ptr<T>
where
T: Error,
{
// TODO: Consider. Ptr is supposed to be a thin wrapper around error so it
// can be "transient" and treated as an error. But this API offers the
// option of making it be more like a wrapped error that acknowledges it
// originated from the original error. Semantically that seems like a
// different thing (Ptr<Error> is not a new error caused by a previous one!)
// but at the same time that might make the backtrace accessible whereas
// here it cannot be because backtrace is a nightly API.
fn source(&self) -> Option<&(dyn Error + 'static)> {
self.inner.source()
}
}
impl<T> From<T> for Ptr<T> {
#[inline]
fn from(t: T) -> Self {
Self::new(t)
}
}
| true |
59e8b8eb830610aaa45870fadb3788ed2615a9dc
|
Rust
|
SourangshuGhosh/v3
|
/languages/rust/exercises/concept/options/.meta/example.rs
|
UTF-8
| 917 | 3.453125 | 3 |
[
"MIT"
] |
permissive
|
pub struct Player {
pub health: u32,
pub mana: Option<u32>,
pub level: u32,
}
impl Player {
pub fn revive(&self) -> Option<Player> {
match self.health {
// Player is dead!
0 => {
if self.level >= 10 {
Some(Player { health: 100, mana: Some(100), level: self.level })
} else {
Some(Player { health: 100, mana: None, level: self.level })
}
},
// Player is alive!
_ => None,
}
}
pub fn cast_spell(&mut self, mana_cost: u32) -> u32 {
match self.mana {
Some(m) => {
if m >= mana_cost {
self.m -= mana_cost;
return mana_cost * 2;
} else {
return 0;
}
}
None => 0,
}
}
}
| true |
606882c2e254177faae5d838febacc5bfbae5c18
|
Rust
|
BartMassey/advent-of-code-2020
|
/day17/soln.rs
|
UTF-8
| 2,889 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
// This program is licensed under the "MIT License".
// Please see the file LICENSE in this distribution
// for license terms.
//! Advent of Code Day 17.
//! Bart Massey 2020
use aoc::*;
use std::collections::HashSet;
type Board<const D: usize> = HashSet<[isize; D]>;
fn read_initial<const D: usize>() -> Board<D> {
let mut initial = HashSet::new();
for (row, l) in input_lines().enumerate() {
for (col, c) in l.chars().enumerate() {
let mut template = [0; D];
match c {
'#' => {
template[0] = row as isize;
template[1] = col as isize;
initial.insert(template);
}
'.' => (),
c => panic!("unexpected char {} in input", c),
}
}
}
initial
}
/// Returns a vector of offsets for the problem
/// neighborhood. Constructed so that the last offset is
/// all-zeros. Could be an iterator, but constructing just
/// once might be faster.
fn offsets<const D: usize>(
i: usize,
mut template: [isize; D],
) -> Vec<[isize; D]> {
if i == D {
return vec![template];
}
let mut acc = Vec::with_capacity(usize::pow(3, (D - i) as u32));
for &j in &[-1, 1, 0] {
template[i] = j;
let xo = offsets(i + 1, template);
acc.extend(xo);
}
acc
}
fn iter_life<const D: usize>(mut state: Board<D>, count: usize) -> Board<D> {
let mut off = offsets(0, [0; D]);
// Remove all-zeros "neighbor".
let _ = off.pop();
for _ in 0..count {
let mut next = HashSet::new();
let mut empties = HashSet::new();
for p in &state {
let mut neighbors = 0;
for dp in &off {
let mut xp = *p;
for i in 0..D {
xp[i] += dp[i];
}
if state.contains(&xp) {
neighbors += 1;
} else {
empties.insert(xp);
}
}
if neighbors == 2 || neighbors == 3 {
next.insert(*p);
}
}
for p in &empties {
let mut neighbors = 0;
for dp in &off {
let mut xp = *p;
for i in 0..D {
xp[i] += dp[i];
}
if state.contains(&xp) {
neighbors += 1;
}
}
if neighbors == 3 {
next.insert(*p);
}
}
state = next;
}
state
}
fn solve<const D: usize>() -> usize {
let initially: Board<D> = read_initial();
let finally = iter_life(initially, 6);
finally.len()
}
fn main() {
let n = match get_part() {
Part1 => solve::<3>(),
Part2 => solve::<4>(),
};
println!("{}", n);
}
| true |
297e4710f33517aa4f9c4533edf4d94a26e5b3eb
|
Rust
|
gymore-io/stadium
|
/tests/tests.rs
|
UTF-8
| 6,366 | 3.390625 | 3 |
[
"MIT"
] |
permissive
|
use std::cell::Cell;
use std::mem::ManuallyDrop;
use std::rc::Rc;
/// A simple structure that adds one to its inner counter when it gets dropped.
pub struct DropCounter(ManuallyDrop<Rc<Cell<usize>>>);
impl Drop for DropCounter {
fn drop(&mut self) {
self.0.set(self.0.get() + 1);
unsafe { ManuallyDrop::drop(&mut self.0) };
// must not use the arc after this point
}
}
#[test]
fn build_with_no_objects() {
stadium::builder().build();
}
#[test]
fn build_with_one_object() {
let mut b = stadium::builder();
let handle = b.insert(996633u32);
let s = b.build();
assert_eq!(s.get(handle), &996633u32);
}
#[test]
fn build_with_300_objects() {
let mut b = stadium::builder();
let mut handles_u16 = Vec::with_capacity(100);
let mut handles_u32 = Vec::with_capacity(100);
let mut handles_u64 = Vec::with_capacity(100);
for i in 0usize..300 {
match i % 3 {
0 => handles_u16.push(b.insert(i as u16)),
1 => handles_u32.push(b.insert(i as u32)),
2 => handles_u64.push(b.insert(i as u64)),
_ => unreachable!(),
}
}
let s = b.build();
for (i, h) in handles_u16.into_iter().enumerate() {
assert_eq!(*s.get(h), i as u16 * 3);
}
for (i, h) in handles_u32.into_iter().enumerate() {
assert_eq!(*s.get(h), i as u32 * 3 + 1);
}
for (i, h) in handles_u64.into_iter().enumerate() {
assert_eq!(*s.get(h), i as u64 * 3 + 2);
}
}
#[test]
fn zero_sized_type_insert() {
let mut b = stadium::builder();
b.insert(());
}
#[test]
fn zero_sized_type_insert_raw() {
let meta = stadium::ObjectMeta::of::<()>();
let mut b = stadium::builder();
b.insert_raw(meta);
}
#[test]
fn handle_is_associated_with_stadium() {
let mut b_1 = stadium::builder();
let mut b_2 = stadium::builder();
let h_1 = b_1.insert(0u8);
let h_2 = b_2.insert(0u8);
let s_1 = b_1.build();
let s_2 = b_2.build();
assert_eq!(s_1.is_associated_with(h_1), true);
assert_eq!(s_1.is_associated_with(h_2), false);
assert_eq!(s_2.is_associated_with(h_1), false);
assert_eq!(s_2.is_associated_with(h_2), true);
}
#[test]
fn replace_value_in_stadium() {
let mut b = stadium::builder();
let h = b.insert("Hello");
let mut s = b.build();
assert_eq!(s.replace(h, "World"), "Hello");
assert_eq!(*s.get(h), "World");
}
#[test]
fn get_mut_values_from_stadium() {
let mut b = stadium::builder();
let h = b.insert(vec![0, 1, 2]);
let mut s = b.build();
assert_eq!(s.get(h), &[0, 1, 2][..]);
s.get_mut(h).push(3);
assert_eq!(s.get(h), &[0, 1, 2, 3][..]);
}
#[test]
#[should_panic(expected = "The given handle was not created for this stadium")]
fn get_panics_on_invalid_handle() {
let mut b_1 = stadium::builder();
let mut b_2 = stadium::builder();
b_1.insert(0u8);
let h_2 = b_2.insert(0u8);
let s = b_1.build();
s.get(h_2);
}
#[test]
#[should_panic(expected = "The given handle was not created for this stadium")]
fn get_mut_panics_on_invalid_handle() {
let mut b_1 = stadium::builder();
let mut b_2 = stadium::builder();
b_1.insert(0u8);
let h_2 = b_2.insert(0u8);
let mut s = b_1.build();
s.get_mut(h_2);
}
#[test]
#[should_panic(expected = "The given handle was not created for this stadium")]
fn replace_panics_on_invalid_handle() {
let mut b_1 = stadium::builder();
let mut b_2 = stadium::builder();
b_1.insert(0u8);
let h_2 = b_2.insert(0u8);
let mut s = b_1.build();
s.replace(h_2, 1);
}
#[test]
fn swap_with_same_handle_does_nothing() {
let mut b = stadium::builder();
let h = b.insert(0);
let mut s = b.build();
assert_eq!(s[h], 0);
s.swap(h, h);
assert_eq!(s[h], 0);
}
#[test]
#[should_panic(expected = "`b` is not associated with this `Stadium`")]
fn swap_with_invalid_handle_panics() {
let mut b_1 = stadium::builder();
let mut b_2 = stadium::builder();
let h_1 = b_1.insert(0);
let h_2 = b_2.insert(0);
let mut s_1 = b_1.build();
s_1.swap(h_1, h_2)
}
#[test]
fn swap_values() {
let mut bu = stadium::builder();
let a = bu.insert(1);
let b = bu.insert(2);
let mut s = bu.build();
assert_eq!(s[a], 1);
assert_eq!(s[b], 2);
s.swap(a, b);
assert_eq!(s[a], 2);
assert_eq!(s[b], 1);
}
#[test]
fn the_builder_properly_drops_everything() {
let drop_count = Rc::new(Cell::new(0));
{
let mut b = stadium::builder();
for _ in 0..100 {
b.insert(DropCounter(ManuallyDrop::new(Rc::clone(&drop_count))));
}
}
assert_eq!(drop_count.get(), 100);
}
#[test]
fn the_stadium_properly_drops_everything() {
let drop_count = Rc::new(Cell::new(0));
{
let mut b = stadium::builder();
for _ in 0..100 {
b.insert(DropCounter(ManuallyDrop::new(Rc::clone(&drop_count))));
}
let _ = b.build();
}
assert_eq!(drop_count.get(), 100);
}
#[test]
fn the_builder_does_not_deallocate_zst() {
let mut b = stadium::builder();
let _ = b.insert(());
// this should corrupt the memory if it tries to deallocate
// the `()`.
// ... in debug mode at least
}
#[test]
fn stadium_can_retrieve_zst() {
let mut b = stadium::builder();
let h = b.insert(());
let s = b.build();
assert_eq!(s[h], ());
}
#[test]
fn zst_operations() {
let mut b = stadium::builder();
let h_a = b.insert(());
let h_b = b.insert(());
let mut s = b.build();
assert_eq!(s[h_a], ());
assert_eq!(s[h_b], ());
assert_eq!(s.replace(h_a, ()), ());
assert_eq!(s.replace(h_b, ()), ());
s.swap(h_a, h_b);
assert_eq!(s[h_a], ());
assert_eq!(s[h_b], ());
}
#[test]
fn insert_zeroed() {
let mut b = stadium::builder();
let h_a = unsafe { b.insert_zeroed::<u8>() };
let h_b = unsafe { b.insert_zeroed::<u128>() };
let s = b.build();
assert_eq!(s[h_a], 0u8);
assert_eq!(s[h_b], 0u128);
}
#[test]
fn insert_default() {
let mut b = stadium::builder();
let h_a = b.insert_default::<Vec<u8>>();
let h_b = b.insert_default::<Option<u8>>();
let h_c = b.insert_default::<u128>();
let s = b.build();
assert_eq!(&s[h_a], &[]);
assert_eq!(s[h_b], None);
assert_eq!(s[h_c], 0u128);
}
| true |
238005ce7d41fd03bbcf254a9ce426441941a6bb
|
Rust
|
loxp/kvs
|
/project-1/src/kv.rs
|
UTF-8
| 669 | 3.5625 | 4 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
/// key value store
pub struct KvStore {
store: HashMap<String, String>,
}
impl KvStore {
/// constructor of KvStore
pub fn new() -> Self {
KvStore {
store: HashMap::new(),
}
}
/// set a key value pair
pub fn set(&mut self, key: String, value: String) {
self.store.insert(key, value);
}
/// get value by key
pub fn get(&self, key: String) -> Option<String> {
self.store.get(&key).and_then(|v| Option::from(String::from(v)))
}
/// remove a key value pair by key
pub fn remove(&mut self, key: String) {
self.store.remove(&key);
}
}
| true |
357a20a0515a3857c68ec7f15767bb067088d720
|
Rust
|
KhemPoudel/mongo-explorer
|
/src-tauri/src/main.rs
|
UTF-8
| 7,473 | 2.671875 | 3 |
[] |
no_license
|
#![cfg_attr(
all(not(debug_assertions), target_os = "windows"),
windows_subsystem = "windows"
)]
use mongodb::{ Client,
Collection,
options::{
ClientOptions,
ServerAddress::{Tcp},
Credential
},
bson::{
doc,
Document,
oid::ObjectId
}
};
use serde::{Serialize, Deserialize};
use std::collections::HashMap;
use futures::stream::{TryStreamExt};
//use bson::{Bson, oid::ObjectId};
#[derive(Serialize, Deserialize, Debug)]
struct UserCredential {
username: String,
password: String
}
#[derive(Serialize, Deserialize, Debug)]
struct ConnectionDetails {
hostname: String,
port: u16,
user_credentials: Option<UserCredential>,
}
// #[derive(serde: Serialize)]
// struct DBError {
// err: String
// }
#[derive(Debug)]
struct Connection {
client: Client,
}
static mut CONNECTION: Option<Connection> = None;
#[tauri::command]
async fn create_connection(connection_details: ConnectionDetails) -> Result<HashMap<String, Vec<String>>, String> {
let options = match connection_details.user_credentials {
Some(cred) => {
let credential = Credential::builder()
.username(cred.username.into())
.password(cred.password.into())
.build();
ClientOptions::builder()
.hosts(vec![
Tcp {
host: connection_details.hostname.into(),
port: Some(connection_details.port),
}
])
.credential(credential)
.build()
},
None => {
ClientOptions::builder()
.hosts(vec![
Tcp {
host: connection_details.hostname.into(),
port: Some(connection_details.port),
}
])
.build()
}
};
if let Ok(c) = Client::with_options(options) {
unsafe {
CONNECTION = Some(Connection{ client: c.clone() });
let mut db_hash = HashMap::new();
let db_names = match c.list_database_names(None, None).await {
Ok(db_names) => db_names,
Err(err) => {
println!("{:?}", err);
return Err("No databases".into());
}
};
for db_name in db_names {
let db_handle = c.database(&db_name);
let collection_names = match db_handle.list_collection_names(None).await {
Ok(collection_names) => collection_names,
Err(error) => {
println!("{:?}", error);
vec![]
//return Err("Collection error".into());
},
};
db_hash.insert(db_name, collection_names);
}
Ok(db_hash)
}
} else {
return Err("Connection error".into());
}
//let client = Client::with_options(options)?;
}
#[tauri::command]
async fn find(db: String, collection: String) -> Result<Vec<Document>, String> {
unsafe {
match &CONNECTION {
Some(conn) => {
let student_coll: Collection = conn.client.database(&db).collection(&collection);
if let Ok(all_docs_cursor) = student_coll.find(None, None).await {
if let Ok(v) = all_docs_cursor.try_collect().await {
println!("{:?}", v);
return Ok(v);
} else {
return Err("no docs".into());
}
} else {
return Err("no collection".into());
}
},
None => {
return Err("no connection".into());
}
}
}
}
#[tauri::command]
async fn update_one(db: String, collection:String, object_id: String, new_doc: Document) -> Result<u64, String> {
println!("doc=> {:?}, id=> {}", &new_doc, &object_id);
unsafe {
match &CONNECTION {
Some(conn) => {
let coll: Collection = conn.client.database(&db).collection(&collection);
if let Ok(obj_id_bson) = ObjectId::parse_str(&object_id){
if let Ok(update_result) = coll.update_one(
doc! {
"_id": &obj_id_bson
},
doc! {
"$set": new_doc
}, None).await {
return Ok(update_result.modified_count);
} else {
return Err("no collection".into());
}
} else {
return Err("Wrong ObjectId Supplied".into());
}
},
None => {
return Err("no connection".into());
}
}
}
}
#[tauri::command]
async fn insert_one(db: String, collection:String, new_doc: Document) -> Result<String, String> {
println!("doc=> {:?}", &new_doc);
unsafe {
match &CONNECTION {
Some(conn) => {
let coll: Collection = conn.client.database(&db).collection(&collection);
if let Ok(insert_result) = coll.insert_one(new_doc.clone(), None).await {
return Ok(insert_result.inserted_id.to_string());
} else {
return Err("no collection".into());
}
},
None => {
return Err("no connection".into());
}
}
}
}
#[tauri::command]
async fn delete_one(db: String, collection:String, object_id: String) -> Result<u64, String> {
unsafe {
match &CONNECTION {
Some(conn) => {
let coll: Collection = conn.client.database(&db).collection(&collection);
if let Ok(obj_id_bson) = ObjectId::parse_str(&object_id){
if let Ok(delete_result) = coll.delete_one(
doc! {
"_id": &obj_id_bson
},
None).await {
return Ok(delete_result.deleted_count);
} else {
return Err("no collection".into());
}
} else {
return Err("Wrong ObjectId Supplied".into());
}
},
None => {
return Err("no connection".into());
}
}
}
}
fn main() {
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![create_connection, find, update_one, insert_one, delete_one])
.run(tauri::generate_context!())
.expect("error while running tauri application");
// create_connection(ConnectionDetails {
// hostname: "localhost".to_string(),
// port: 27017,
// user_credentials: Some(UserCredential {
// username: "khem".to_string(),
// password: "idunnoe7".to_string()
// })
// }).await.unwrap();
//
// find("test_mongo".to_string(), "students".to_string()).await.unwrap();
}
| true |
830e5559baeaf998ae06c6ddd6dc7e78743d8f48
|
Rust
|
Nugine/bpnn-rs
|
/src/bpnn.rs
|
UTF-8
| 3,953 | 2.53125 | 3 |
[] |
no_license
|
mod func;
mod types;
mod utils;
pub use self::func::*;
pub use self::types::*;
pub use self::utils::*;
use ndarray::Array;
pub struct BPNN {
weights: Vec<Matrix>,
changes: Vec<Matrix>,
activations: Vec<Activation>,
d_activations: Vec<DActivation>,
cost: Cost,
d_cost: DCost,
}
#[allow(non_snake_case)]
impl BPNN {
pub fn new(
input_size: usize,
layer_settings: &Vec<(usize, Activation, DActivation)>,
cost: Cost,
d_cost: DCost,
) -> Self {
let mut il = input_size + 1;
let mut W: Vec<Matrix> = Vec::new();
let mut C: Vec<Matrix> = Vec::new();
let mut acts: Vec<Activation> = Vec::new();
let mut d_acts: Vec<DActivation> = Vec::new();
for (ol, act, d_act) in layer_settings {
let ol = *ol;
W.push(random_matrix(ol, il));
C.push(zero_matrix(ol, il));
acts.push(*act);
d_acts.push(*d_act);
il = ol;
}
Self {
weights: W,
changes: C,
activations: acts,
d_activations: d_acts,
cost: cost,
d_cost: d_cost,
}
}
}
#[allow(non_snake_case)]
impl BPNN {
pub fn train_once(&mut self, input: &Vector, target: &Vector, rate: f64, factor: f64) -> f64 {
let l = self.weights.len();
assert_eq!(input.len(), self.weights[0].dim().1 - 1);
assert_eq!(target.len(), self.weights[l - 1].dim().0);
let W = &mut self.weights;
let C = &mut self.changes;
let activations = &self.activations;
let d_activations = &self.d_activations;
let mut z = vec![{
let mut v = input.to_vec();
v.push(1.);
Array::from_vec(v)
}];
for i in 0..l {
let x = &z[i];
let y = W[i].dot(x);
z.push((activations[i])(&y))
}
let mut delta = {
let e = (self.d_cost)(target, &z[l]);
let da = (d_activations[l - 1])(&z[l]);
e * &da
};
let output = z.pop().unwrap();
for i in (1..l).rev() {
let new_delta = {
let e = W[i].t().dot(&delta);
let da = (d_activations[i - 1])(&z[i]);
e * &da
};
let (ol, il) = C[i].dim();
let delta_2d: Matrix = delta.into_shape((ol, 1)).unwrap();
let z_i_t: Matrix = z.pop().unwrap().into_shape((1, il)).unwrap();
C[i] *= factor;
C[i].scaled_add(-rate, &delta_2d.dot(&z_i_t));
delta = new_delta;
}
{
let (ol, il) = C[0].dim();
let delta_2d: Matrix = delta.into_shape((ol, 1)).unwrap();
let z_i_t: Matrix = z.pop().unwrap().into_shape((1, il)).unwrap();
C[0] *= factor;
C[0].scaled_add(-rate, &delta_2d.dot(&z_i_t));
}
for i in 0..l {
W[i] += &C[i];
}
(self.cost)(target, &output)
}
pub fn train(&mut self, patterns: &Vec<(Vector, Vector)>, rate: f64, factor: f64) -> f64 {
patterns
.into_iter()
.map(|(ip, op)| self.train_once(ip, op, rate, factor))
.sum()
}
}
impl BPNN {
pub fn predict_once(&self, input: &Vector) -> Vector {
let l = self.weights.len();
assert_eq!(input.len(), self.weights[0].dim().1 - 1);
let mut vector = {
let mut v = input.to_vec();
v.push(1.);
Array::from_vec(v)
};
for i in 0..l {
vector = (self.weights[i]).dot(&vector);
vector = (self.activations[i])(&vector);
}
vector
}
pub fn predict(&self, inputs: &Vec<Vector>) -> Vec<Vector> {
let mut v = Vec::new();
for ip in inputs {
v.push(self.predict_once(ip))
}
v
}
}
| true |
6bc14006e7fc4ec427d182f7924a1442d7c73dd0
|
Rust
|
rcoh/angle-grinder
|
/src/typecheck.rs
|
UTF-8
| 16,674 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
use crate::data::Value;
use crate::errors::ErrorBuilder;
use crate::lang;
use crate::operator::{
average, count, count_distinct, expr, fields, limit, max, min, parse, percentile, split, sum,
timeslice, total, where_op,
};
use crate::{funcs, operator};
use thiserror::Error;
#[derive(Debug, Error)]
pub enum TypeError {
#[error("Expected boolean expression, found {}", found)]
ExpectedBool { found: String },
#[error("Expected an expression")]
ExpectedExpr,
#[error(
"Wrong number of patterns for parse. Pattern has {} but {} were extracted",
pattern,
extracted
)]
ParseNumPatterns { pattern: usize, extracted: usize },
#[error("Two `from` clauses were provided")]
DoubleFromClause,
#[error("Limit must be a non-zero integer, found {}", limit)]
InvalidLimit { limit: f64 },
#[error("Unknown function {}", name)]
UnknownFunction { name: String },
#[error("Expected a duration for the timeslice (e.g. 1h)")]
ExpectedDuration,
}
pub trait TypeCheck<O> {
fn type_check<E: ErrorBuilder>(self, error_builder: &E) -> Result<O, TypeError>;
}
impl TypeCheck<expr::BoolExpr> for lang::ComparisonOp {
fn type_check<E: ErrorBuilder>(self, _error_builder: &E) -> Result<expr::BoolExpr, TypeError> {
match self {
lang::ComparisonOp::Eq => Ok(expr::BoolExpr::Eq),
lang::ComparisonOp::Neq => Ok(expr::BoolExpr::Neq),
lang::ComparisonOp::Gt => Ok(expr::BoolExpr::Gt),
lang::ComparisonOp::Lt => Ok(expr::BoolExpr::Lt),
lang::ComparisonOp::Gte => Ok(expr::BoolExpr::Gte),
lang::ComparisonOp::Lte => Ok(expr::BoolExpr::Lte),
}
}
}
impl TypeCheck<expr::ArithmeticExpr> for lang::ArithmeticOp {
fn type_check<E: ErrorBuilder>(
self,
_error_builder: &E,
) -> Result<expr::ArithmeticExpr, TypeError> {
match self {
lang::ArithmeticOp::Add => Ok(expr::ArithmeticExpr::Add),
lang::ArithmeticOp::Subtract => Ok(expr::ArithmeticExpr::Subtract),
lang::ArithmeticOp::Multiply => Ok(expr::ArithmeticExpr::Multiply),
lang::ArithmeticOp::Divide => Ok(expr::ArithmeticExpr::Divide),
}
}
}
impl TypeCheck<expr::LogicalExpr> for lang::LogicalOp {
fn type_check<E: ErrorBuilder>(
self,
_error_builder: &E,
) -> Result<expr::LogicalExpr, TypeError> {
match self {
lang::LogicalOp::And => Ok(expr::LogicalExpr::And),
lang::LogicalOp::Or => Ok(expr::LogicalExpr::Or),
}
}
}
impl TypeCheck<operator::Expr> for lang::Expr {
fn type_check<E: ErrorBuilder>(self, error_builder: &E) -> Result<operator::Expr, TypeError> {
match self {
lang::Expr::Column { head, rest } => {
let head = match head {
lang::DataAccessAtom::Key(s) => s,
lang::DataAccessAtom::Index(_) => return Err(TypeError::ExpectedExpr),
};
let rest = rest
.iter()
.map(|s| match s {
lang::DataAccessAtom::Key(s) => expr::ValueRef::Field(s.to_string()),
lang::DataAccessAtom::Index(i) => expr::ValueRef::IndexAt(*i),
})
.collect();
Ok(operator::Expr::NestedColumn { head, rest })
}
lang::Expr::Unary { op, operand } => match op {
lang::UnaryOp::Not => Ok(operator::Expr::BoolUnary(expr::UnaryExpr {
operator: expr::BoolUnaryExpr::Not,
operand: Box::new((*operand).type_check(error_builder)?),
})),
},
lang::Expr::Binary { op, left, right } => match op {
lang::BinaryOp::Comparison(com_op) => {
Ok(operator::Expr::Comparison(expr::BinaryExpr::<
expr::BoolExpr,
> {
left: Box::new((*left).type_check(error_builder)?),
right: Box::new((*right).type_check(error_builder)?),
operator: com_op.type_check(error_builder)?,
}))
}
lang::BinaryOp::Arithmetic(arith_op) => {
Ok(operator::Expr::Arithmetic(expr::BinaryExpr::<
expr::ArithmeticExpr,
> {
left: Box::new((*left).type_check(error_builder)?),
right: Box::new((*right).type_check(error_builder)?),
operator: arith_op.type_check(error_builder)?,
}))
}
lang::BinaryOp::Logical(logical_op) => {
Ok(operator::Expr::Logical(expr::BinaryExpr::<
expr::LogicalExpr,
> {
left: Box::new((*left).type_check(error_builder)?),
right: Box::new((*right).type_check(error_builder)?),
operator: logical_op.type_check(error_builder)?,
}))
}
},
lang::Expr::FunctionCall { name, args } => {
let converted_args: Result<Vec<operator::Expr>, TypeError> = args
.into_iter()
.map(|arg| arg.type_check(error_builder))
.collect();
if let Some(func) = funcs::FUNC_MAP.get(name.as_str()) {
Ok(operator::Expr::FunctionCall {
func,
args: converted_args?,
})
} else {
Err(TypeError::UnknownFunction { name })
}
}
lang::Expr::IfOp {
cond,
value_if_true,
value_if_false,
} => Ok(operator::Expr::IfOp {
cond: Box::new(cond.type_check(error_builder)?),
value_if_true: Box::new(value_if_true.type_check(error_builder)?),
value_if_false: Box::new(value_if_false.type_check(error_builder)?),
}),
lang::Expr::Value(value) => {
let boxed = Box::new(value);
let static_value: &'static mut Value = Box::leak(boxed);
Ok(operator::Expr::Value(static_value))
}
lang::Expr::Error => Err(TypeError::ExpectedExpr),
}
}
}
const DEFAULT_LIMIT: i64 = 10;
impl TypeCheck<Box<dyn operator::OperatorBuilder + Send + Sync>>
for lang::Positioned<lang::InlineOperator>
{
/// Convert the operator syntax to a builder that can instantiate an operator for the
/// pipeline. Any semantic errors in the operator syntax should be detected here.
fn type_check<T: ErrorBuilder>(
self,
error_builder: &T,
) -> Result<Box<dyn operator::OperatorBuilder + Send + Sync>, TypeError> {
match self.value {
lang::InlineOperator::Json { input_column } => Ok(Box::new(parse::ParseJson::new(
input_column
.map(|e| e.type_check(error_builder))
.transpose()?,
))),
lang::InlineOperator::Logfmt { input_column } => Ok(Box::new(parse::ParseLogfmt::new(
input_column
.map(|e| e.type_check(error_builder))
.transpose()?,
))),
lang::InlineOperator::Parse {
pattern,
fields,
input_column,
no_drop,
} => {
let regex = pattern.to_regex();
let input_column = match input_column {
(Some(from), None) | (None, Some(from)) => Some(from.value),
(None, None) => None,
(Some(l), Some(r)) => {
let e = TypeError::DoubleFromClause;
error_builder
.report_error_for(&e)
.with_code_pointer(&l, "")
.with_code_pointer(&r, "")
.with_resolution("Only one from clause is allowed")
.send_report();
return Err(e);
}
};
if (regex.captures_len() - 1) != fields.len() {
Err(TypeError::ParseNumPatterns {
pattern: regex.captures_len() - 1,
extracted: fields.len(),
})
} else {
Ok(Box::new(parse::Parse::new(
regex,
fields,
input_column
.map(|e| e.type_check(error_builder))
.transpose()?,
parse::ParseOptions {
drop_nonmatching: !no_drop,
},
)))
}
}
lang::InlineOperator::Fields { fields, mode } => {
let omode = match mode {
lang::FieldMode::Except => fields::FieldMode::Except,
lang::FieldMode::Only => fields::FieldMode::Only,
};
Ok(Box::new(fields::Fields::new(&fields, omode)))
}
lang::InlineOperator::Where { expr: Some(expr) } => match expr
.value
.type_check(error_builder)?
{
operator::Expr::Value(constant) => {
if let Value::Bool(bool_value) = constant {
Ok(Box::new(where_op::Where::new(*bool_value)))
} else {
let e = TypeError::ExpectedBool {
found: format!("{:?}", constant),
};
error_builder
.report_error_for(&e)
.with_code_range(expr.range, "This is constant")
.with_resolution("Perhaps you meant to compare a field to this value?")
.with_resolution(format!("example: where field1 == {}", constant))
.send_report();
Err(e)
}
}
generic_expr => Ok(Box::new(where_op::Where::new(generic_expr))),
},
lang::InlineOperator::Where { expr: None } => {
let e = TypeError::ExpectedExpr;
error_builder
.report_error_for(&e)
.with_code_pointer(&self, "No condition provided for this 'where'")
.with_resolution(
"Insert an expression whose result determines whether a record should be \
passed downstream",
)
.with_resolution("example: where duration > 100")
.send_report();
Err(e)
}
lang::InlineOperator::Limit { count: Some(count) } => match count.value {
limit if limit.trunc() == 0.0 || limit.fract() != 0.0 => {
let e = TypeError::InvalidLimit { limit };
error_builder
.report_error_for(e.to_string())
.with_code_pointer(
&count,
if limit.fract() != 0.0 {
"Fractional limits are not allowed"
} else {
"Zero is not allowed"
},
)
.with_resolution("Use a positive integer to select the first N rows")
.with_resolution("Use a negative integer to select the last N rows")
.send_report();
Err(e)
}
limit => Ok(Box::new(limit::LimitDef::new(limit as i64))),
},
lang::InlineOperator::Limit { count: None } => {
Ok(Box::new(limit::LimitDef::new(DEFAULT_LIMIT)))
}
lang::InlineOperator::Split {
separator,
input_column,
output_column,
} => Ok(Box::new(split::Split::new(
separator,
input_column
.map(|e| e.type_check(error_builder))
.transpose()?,
output_column
.map(|e| e.type_check(error_builder))
.transpose()?,
))),
lang::InlineOperator::Timeslice { duration: None, .. } => {
Err(TypeError::ExpectedDuration)
}
lang::InlineOperator::Timeslice {
input_column,
duration: Some(duration),
output_column,
} => Ok(Box::new(timeslice::Timeslice::new(
input_column.type_check(error_builder)?,
duration,
output_column,
))),
lang::InlineOperator::Total {
input_column,
output_column,
} => Ok(Box::new(total::TotalDef::new(
input_column.type_check(error_builder)?,
output_column,
))),
lang::InlineOperator::FieldExpression { value, name } => Ok(Box::new(
fields::FieldExpressionDef::new(value.type_check(error_builder)?, name),
)),
}
}
}
impl TypeCheck<Box<dyn operator::AggregateFunction>> for lang::Positioned<lang::AggregateFunction> {
fn type_check<T: ErrorBuilder>(
self,
error_builder: &T,
) -> Result<Box<dyn operator::AggregateFunction>, TypeError> {
match self.value {
lang::AggregateFunction::Count { condition } => {
let expr = condition.map(|c| c.type_check(error_builder)).transpose()?;
Ok(Box::new(count::Count::new(expr)))
}
lang::AggregateFunction::Min { column } => {
Ok(Box::new(min::Min::empty(column.type_check(error_builder)?)))
}
lang::AggregateFunction::Average { column } => Ok(Box::new(average::Average::empty(
column.type_check(error_builder)?,
))),
lang::AggregateFunction::Max { column } => {
Ok(Box::new(max::Max::empty(column.type_check(error_builder)?)))
}
lang::AggregateFunction::Sum { column } => {
Ok(Box::new(sum::Sum::empty(column.type_check(error_builder)?)))
}
lang::AggregateFunction::Percentile {
column, percentile, ..
} => Ok(Box::new(percentile::Percentile::empty(
column.type_check(error_builder)?,
percentile,
))),
lang::AggregateFunction::CountDistinct { column: Some(pos) } => {
match pos.value.as_slice() {
[column] => Ok(Box::new(count_distinct::CountDistinct::empty(
column.clone().type_check(error_builder)?,
))),
_ => {
error_builder
.report_error_for("Expecting a single expression to count")
.with_code_pointer(
&pos,
match pos.value.len() {
0 => "No expression given",
_ => "Only a single expression can be given",
},
)
.with_resolution("example: count_distinct(field_to_count)")
.send_report();
Err(TypeError::ExpectedExpr)
}
}
}
lang::AggregateFunction::CountDistinct { column: None } => {
error_builder
.report_error_for("Expecting an expression to count")
.with_code_pointer(&self, "No field argument given")
.with_resolution("example: count_distinct(field_to_count)")
.send_report();
Err(TypeError::ExpectedExpr)
}
lang::AggregateFunction::Error => unreachable!(),
}
}
}
| true |
09fc7f56c3ac6436a870b90e6bc284c265d5bb19
|
Rust
|
cyber-meow/ReactiveRs
|
/src/signal/single_thread/spmc_signal.rs
|
UTF-8
| 6,087 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
use std::rc::Rc;
use std::cell::RefCell;
use runtime::SingleThreadRuntime;
use continuation::ContinuationSt;
use signal::Signal;
use signal::signal_runtime::{SignalRuntimeRefBase, SignalRuntimeRefSt};
use signal::valued_signal::{ValuedSignal, SpSignal, CanEmit, GetValue, CanTryEmit, TryEmitValue};
/// A shared pointer to a signal runtime.
pub struct SpmcSignalRuntimeRef<V> {
runtime: Rc<SpmcSignalRuntime<V>>,
}
impl<V> Clone for SpmcSignalRuntimeRef<V> {
fn clone(&self) -> Self {
SpmcSignalRuntimeRef { runtime: self.runtime.clone() }
}
}
/// Runtime for single-producer, multi-consumer signals.
struct SpmcSignalRuntime<V> {
value: RefCell<Option<V>>,
last_value: RefCell<Option<V>>,
last_value_updated: RefCell<bool>,
await_works: RefCell<Vec<Box<ContinuationSt<()>>>>,
present_works: RefCell<Vec<Box<ContinuationSt<()>>>>,
}
impl<V> SpmcSignalRuntime<V> where V: Clone {
/// Returns a new instance of SignalRuntime.
fn new() -> Self {
SpmcSignalRuntime {
value: RefCell::new(None),
last_value: RefCell::new(None),
last_value_updated: RefCell::new(false),
await_works: RefCell::new(Vec::new()),
present_works: RefCell::new(Vec::new()),
}
}
}
impl<V> SignalRuntimeRefBase<SingleThreadRuntime> for SpmcSignalRuntimeRef<V>
where V: Clone + 'static
{
/// Returns a bool to indicate if the signal was emitted or not on the current instant.
fn is_emitted(&self) -> bool {
self.runtime.value.borrow().is_some()
}
/// Resets the signal at the beginning of each instant.
fn reset(&mut self) {
*self.runtime.value.borrow_mut() = None;
*self.runtime.last_value_updated.borrow_mut() = false;
}
/// Exececutes all the continuations found in the vector `self.present_works`.
fn execute_present_works(&mut self, runtime: &mut SingleThreadRuntime) {
while let Some(c) = self.runtime.present_works.borrow_mut().pop() {
c.call_box(runtime, ());
}
}
}
impl<V> SignalRuntimeRefSt for SpmcSignalRuntimeRef<V> where V: Clone + 'static {
/// Calls `c` at the first cycle where the signal is present.
fn on_signal<C>(&mut self, runtime: &mut SingleThreadRuntime, c: C)
where C: ContinuationSt<()>
{
if self.is_emitted() {
c.call(runtime, ());
} else {
runtime.incr_await_counter();
self.runtime.await_works.borrow_mut().push(Box::new(c));
}
}
/// Calls `c` only if the signal is present during this cycle.
fn on_signal_present<C>(&mut self, runtime: &mut SingleThreadRuntime, c: C)
where C: ContinuationSt<()>
{
if self.is_emitted() {
c.call(runtime, ());
} else {
self.runtime.present_works.borrow_mut().push(Box::new(c));
}
}
}
impl<V> CanEmit<SingleThreadRuntime, V> for SpmcSignalRuntimeRef<V>
where V: Clone + 'static
{
fn emit(&mut self, runtime: &mut SingleThreadRuntime, emitted: V) {
if self.is_emitted() {
panic!("Multiple emissions of a single-producer signal inside an instant.");
}
*self.runtime.value.borrow_mut() = Some(emitted);
while let Some(c) = self.runtime.await_works.borrow_mut().pop() {
runtime.decr_await_counter();
c.call_box(runtime, ());
}
self.execute_present_works(runtime);
runtime.emit_signal(Box::new(self.clone()));
let signal_ref = self.clone();
let update_last_value = move |_: &mut SingleThreadRuntime, ()| {
if !*signal_ref.runtime.last_value_updated.borrow() {
*signal_ref.runtime.last_value.borrow_mut() = Some(signal_ref.get_value());
*signal_ref.runtime.last_value_updated.borrow_mut() = true;
}
};
runtime.on_end_of_instant(Box::new(update_last_value));
}
}
impl<V> CanTryEmit<SingleThreadRuntime, V> for SpmcSignalRuntimeRef<V>
where V: Clone + 'static
{
fn try_emit(&mut self, runtime: &mut SingleThreadRuntime, emitted: V) -> bool {
if self.is_emitted() {
false
} else {
self.emit(runtime, emitted);
true
}
}
}
impl<V> GetValue<V> for SpmcSignalRuntimeRef<V> where V: Clone {
/// Returns the value of the signal for the current instant.
/// The returned value is cloned and can thus be used directly.
fn get_value(&self) -> V {
self.runtime.value.borrow().clone().unwrap()
}
}
impl<V> SpmcSignalRuntimeRef<V> where V: Clone + 'static {
/// Returns a new instance of SignalRuntimeRef.
fn new() -> Self {
SpmcSignalRuntimeRef { runtime: Rc::new(SpmcSignalRuntime::new()) }
}
}
/// A non-parallel single-producer, multi-consumer signal.
pub struct SpmcSignalSt<V>(SpmcSignalRuntimeRef<V>);
impl<V> Clone for SpmcSignalSt<V> {
fn clone(&self) -> Self {
SpmcSignalSt(self.0.clone())
}
}
impl<V> Signal for SpmcSignalSt<V> where V: Clone + 'static {
type RuntimeRef = SpmcSignalRuntimeRef<V>;
fn runtime(&self) -> SpmcSignalRuntimeRef<V> {
self.0.clone()
}
}
impl<V> ValuedSignal for SpmcSignalSt<V> where V: Clone + 'static {
type Stored = V;
type SigType = SpSignal;
}
impl<V> SpmcSignalSt<V> where V: Clone + 'static {
/// Creates a new spmc signal.
pub fn new() -> Self {
SpmcSignalSt(SpmcSignalRuntimeRef::new())
}
/// Returns the last value associated to the signal when it was emitted.
/// Evaluates to the `None` before the first emission.
pub fn last_value(&self) -> Option<V> {
let r = self.runtime();
let last_v = r.runtime.last_value.borrow();
last_v.clone()
}
/// Emits a value to the signal only if the signal is not yet emitted.
/// Returns a bool to indicate if the emission suceeds or not.
pub fn try_emit(&self, emitted: V) -> TryEmitValue<Self, V> {
TryEmitValue { signal: self.clone(), emitted }
}
}
| true |
da2265944254441fa6c417c3f800dcc4af53cfc1
|
Rust
|
sergeyboyko0791/redis-asio
|
/src/base/error.rs
|
UTF-8
| 1,463 | 3.25 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt;
use std::error::Error;
#[derive(Debug, Clone, PartialEq)]
pub enum RedisErrorKind {
InternalError,
IncorrectConversion,
ConnectionError,
ParseError,
ReceiveError,
InvalidOptions,
}
#[derive(Debug, Clone)]
pub struct RedisError {
pub error: RedisErrorKind,
desc: String,
}
pub type RedisResult<T> = Result<T, RedisError>;
impl RedisError {
pub fn new(error: RedisErrorKind, desc: String) -> RedisError {
RedisError { error, desc }
}
}
impl fmt::Display for RedisError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "error: \"{}\", description: \"{}\"",
to_string(&self.error),
&self.desc)
}
}
impl std::error::Error for RedisError {
fn description(&self) -> &str {
&self.desc
}
}
impl From<std::io::Error> for RedisError {
fn from(err: std::io::Error) -> Self {
RedisError { error: RedisErrorKind::ConnectionError, desc: err.description().to_string() }
}
}
fn to_string(err: &RedisErrorKind) -> &'static str {
match err {
RedisErrorKind::InternalError => "InternalError",
RedisErrorKind::IncorrectConversion => "IncorrectConversion",
RedisErrorKind::ConnectionError => "ConnectionError",
RedisErrorKind::ParseError => "ParseError",
RedisErrorKind::ReceiveError => "ReceiveError",
RedisErrorKind::InvalidOptions => "InvalidOptions",
}
}
| true |
3160f440876e95a53f0413bc48be15627f7a0cda
|
Rust
|
rstropek/rust-samples
|
/wasm-serverless/word_puzzle_spin/src/lib.rs
|
UTF-8
| 1,787 | 2.625 | 3 |
[] |
no_license
|
use anyhow::Result;
use http::response::Builder;
use spin_sdk::{
http::{Request, Response, Params},
http_component, http_router,
};
use word_puzzle_generator::{place_words, GeneratorOptions};
#[http_component]
fn handle_word_puzzle_spin(req: Request) -> Result<Response> {
if req.method() == http::Method::OPTIONS {
return Ok(http::Response::builder()
.status(http::StatusCode::OK)
.header("Access-Control-Allow-Origin", "*")
.header("Access-Control-Allow-Methods", "*")
.header("Access-Control-Allow-Headers", "*")
.body(None)?);
}
let router = http_router! {
GET "/" => |_req, _params| {
Ok(http::Response::builder()
.status(http::StatusCode::OK)
.body(Some("Hello from spin!".into()))?)
},
POST "/generate" => generate_puzzle
};
router.handle(req)
}
fn generate_puzzle(req: Request, _params: Params) -> Result<Response> {
let body = req.body().as_ref().unwrap();
let options: GeneratorOptions = serde_json::from_str(std::str::from_utf8(body.as_ref())?)?;
if options.size > 20 {
return Ok(http::Response::builder()
.status(http::StatusCode::BAD_REQUEST)
.body(None)?);
}
let puzzle = place_words(options);
let response = serde_json::to_string_pretty(&puzzle)?.as_bytes().to_vec();
Ok(http::Response::builder()
.status(http::StatusCode::OK)
.with_cors()
.header("Content-Type", "application/json")
.body(Some(response.into()))?)
}
trait WithCors {
fn with_cors(self) -> Self;
}
impl WithCors for Builder {
fn with_cors(self) -> Self {
self.header("Access-Control-Allow-Origin", "*")
.header("Access-Control-Allow-Methods", "*")
.header("Access-Control-Allow-Headers", "*")
}
}
| true |
89d6edff6e18f9e28675a936ff0cfe07b5120e7d
|
Rust
|
untoldwind/t-rust-less
|
/lib/src/memguard/zeroize_buffer.rs
|
UTF-8
| 1,737 | 3.03125 | 3 |
[
"MIT"
] |
permissive
|
use std::io;
use std::ops;
use zeroize::Zeroize;
pub struct ZeroizeBytesBuffer(Vec<u8>);
impl ZeroizeBytesBuffer {
pub fn with_capacity(initial_capacity: usize) -> ZeroizeBytesBuffer {
ZeroizeBytesBuffer(Vec::with_capacity(initial_capacity))
}
}
impl ZeroizeBytesBuffer {
pub fn append(&mut self, byte: u8) {
if self.0.len() <= self.0.capacity() {
let next_size = 2 * (self.0.capacity() + 1);
let mut next_buffer = Vec::with_capacity(next_size);
next_buffer.extend_from_slice(&self.0);
self.0.zeroize();
self.0 = next_buffer;
}
self.0.push(byte)
}
}
impl Zeroize for ZeroizeBytesBuffer {
fn zeroize(&mut self) {
self.0.zeroize();
}
}
impl Drop for ZeroizeBytesBuffer {
fn drop(&mut self) {
self.0.zeroize()
}
}
impl ops::Deref for ZeroizeBytesBuffer {
type Target = [u8];
fn deref(&self) -> &Self::Target {
self.0.as_ref()
}
}
impl AsRef<[u8]> for ZeroizeBytesBuffer {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl io::Write for ZeroizeBytesBuffer {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let avalable = self.0.capacity() - self.0.len();
if buf.len() < avalable {
// This should not require any reallocation
self.0.extend_from_slice(buf);
} else {
// To be on the save side with create copy with larger capacity and zero out the old
let next_size = 2 * (self.0.capacity() + buf.len());
let mut next_buffer = Vec::with_capacity(next_size);
next_buffer.extend_from_slice(&self.0);
next_buffer.extend_from_slice(buf);
self.0.zeroize();
self.0 = next_buffer;
}
Ok(buf.len())
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
| true |
5e7593313205ed5d0a6721995546b81d3f74c6d5
|
Rust
|
finallyegg/cmsc330spring19-public
|
/discussions/discussion12/src/lib.rs
|
UTF-8
| 1,522 | 3.359375 | 3 |
[] |
no_license
|
// Discussion 12 exercises
use std::mem::swap;
use std::rc::Rc;
// Given a list of list of integers, return the sum. Use map and fold.
// sum_arr_arr(&[&[1, 2], &[3]] -> 6
pub fn sum_arr_arr(arr: &[&[i32]]) -> i32 {
// IMPLEMENT
0
}
#[derive (Debug, PartialEq)]
enum List<T> {
Nil,
Cons(T, Box<List<T>>)
}
use List::{Nil, Cons};
impl <T> List<T> {
pub fn new() -> Self {
Nil
}
// Adds an element to the end of the list.
pub fn add(&mut self, e: T) {
// IMPLEMENT
}
// Returns a Vec representation of the list.
pub fn vec(&self) -> Vec<&T> {
let mut out_vec = Vec::new();
self.vec_aux(&mut out_vec);
out_vec
}
fn vec_aux<'a>(&'a self, out_vec: &mut Vec<&'a T>) {
// IMPLEMENT
}
}
impl <'a, T> IntoIterator for &'a List<T> {
type Item = &'a T;
type IntoIter = ::std::vec::IntoIter<&'a T>;
// Returns an iterator for the list.
fn into_iter(self) -> Self::IntoIter {
self.vec().into_iter()
}
}
#[derive (Debug, PartialEq)]
struct Texture {
id: i32,
}
#[derive (Debug, PartialEq)]
struct Tree {
texture: Rc<Texture>,
}
fn rc_demo() -> (i32, i32) {
let tree1;
let tree2;
{
let texture1 = Rc::new(Texture { id: 1 });
let texture2 = Rc::new(Texture { id: 2 });
tree1 = Tree { texture: Rc::clone(&texture1) };
tree2 = Tree { texture: Rc::clone(&texture1) };
}
(tree1.texture.id, tree2.texture.id)
}
#[cfg(test)]
mod public;
| true |
3a7695d5b32a05d83125b8b8a3ebd81ec74d1a54
|
Rust
|
koonopek/wat_plan_v2_backend
|
/src/scrap_wat.rs
|
UTF-8
| 6,839 | 2.640625 | 3 |
[] |
no_license
|
use std::prelude::v1::Vec;
use std::result::Result;
use std::thread;
use reqwest;
use s3::bucket::Bucket;
use scraper::{Html, Selector};
use tokio::fs::File;
use s3::credentials::Credentials;
use s3::region::Region;
use tokio::prelude::*;
use crate::s3_driver;
use std::error::Error;
const COOLDOWN: std::time::Duration = std::time::Duration::from_secs(7);
const URL: &str = "https://s1.wcy.wat.edu.pl/ed1/";
pub const FOLDER_GROUP: &str = "groups/";
const VMAX: usize = 22;
const HMAX: usize = 49;
type GenericError = Box<dyn Error>;
#[derive(serde::Serialize, Clone)]
pub struct Krotka {
title: String,
class: String,
}
impl Krotka {
fn new(title: String, class: String) -> Krotka {
return Krotka { title, class };
}
}
type Task = tokio::task::JoinHandle<Result<(), std::io::Error>>;
#[tokio::main]
pub async fn fetch_parse_plan() -> Result<(), GenericError> {
let client: reqwest::Client = build_client().unwrap();
let sid = get_sid(&client, URL).await?;
println!("sid:{}", sid);
let user_id = std::env::var("USER").expect("ERROR: User global var not set");
let password = std::env::var("PASSWORD").expect("ERROR: Password global var not set");
login(&client, &sid, user_id, password).await?;
let groups = extract_groups(&sid).await;
let mut tasks: Vec<Task> = Vec::new();
for group in groups {
let sido = sid.clone();
thread::sleep(COOLDOWN);
let task = tokio::spawn(async move {
let content = process_request(&sido, &group).await;
let bucket: Bucket = s3_driver::get_bucket_async().await.unwrap();
let file_name = FOLDER_GROUP.to_owned() + &group[..];
save_to_s3(&content,&file_name,&bucket).await;
Ok(())
});
tasks.push(task);
}
for task in tasks {
let _ = task.await.expect("ERROR: Couldnt join task error");
}
Ok(())
}
async fn process_request(sido: &String, group: &String) -> Vec<Krotka> {
let plain_html = get_plan_site(&sido, &group)
.await
.expect("ERROR: Couldnt get plan site");
let krotkas = extract_krotkas(plain_html).await;
krotkas
}
async fn save_to_s3(content: &Vec<Krotka>, file_name: &str, bucket: &Bucket) {
let content = serde_json::to_string(content);
let content = content.unwrap();
let (_, code) = bucket.put_object(file_name, content.as_bytes(), "text/plain")
.await
.expect("Saving to s3 failed");
if code == 200 { println!("Plan saved to S3::{}::{}", bucket.name, file_name); }
}
//w 4 kolumnie text znajduje się sala
async fn extract_krotkas(html: String) -> Vec<Krotka> {
let mut result: Vec<Krotka> = Vec::new();
let selector = Selector::parse(r#"td[class="tdFormList1DSheTeaGrpHTM3"]"#).unwrap();
let html = Html::parse_fragment(&html[..]);
for td in html.select(&selector) {
let td_title = td.value().attr("title").unwrap_or("").to_owned();
let text = td.text().collect::<Vec<_>>();
let class = text.get(4).unwrap_or(&"").to_owned().to_owned();
let krotka = Krotka::new(td_title, class);
result.push(krotka);
}
trasnsponse(result)
}
async fn extract_groups(sid: &String) -> Vec<String> {
let html = get_plan_site(sid, "").await.expect("ERROR: Couldnt get groups site.");
let mut result: Vec<String> = Vec::new();
let selector = Selector::parse(r#"a[class=aMenu]"#).unwrap();
let html = Html::parse_fragment(&html[..]);
for a in html.select(&selector) {
let group = a.text().next().unwrap();
result.push(group.to_owned());
}
result
}
fn trasnsponse<T: Clone>(matrix: Vec<T>) -> Vec<T> {
if matrix.len() == 0 {
return matrix;
}
let mut new_matrix: Vec<T> = Vec::new();
let mut i: usize = 0;
let mut offset: usize = 0;
loop {
new_matrix.push(matrix[i].clone());
i += VMAX;
if i >= VMAX * HMAX {
offset += 1;
i = offset;
}
if new_matrix.len() == VMAX * HMAX {
break;
}
}
new_matrix
}
async fn get_sid(client: &reqwest::Client, url: &str) -> Result<String, reqwest::Error> {
let body = client.get(url).send().await?.text().await?;
let selector = Selector::parse(r#"form[name="aaa"]"#).unwrap();
let html = Html::parse_fragment(&body[..]);
let result = html
.select(&selector)
.next()
.unwrap()
.value()
.attr("action")
.expect("ERROR: Couldnt parse sid");
if result == "" {
panic!("ERROR: sid has expired");
}
let result: Vec<&str> = result.split('=').collect();
let sid = String::from(result[1]);
Ok(sid)
}
fn get_headers() -> reqwest::header::HeaderMap {
let mut headers2 = reqwest::header::HeaderMap::new();
let headers = [
(
"User-Agent",
"Mozilla/5.0 (X11;Fedora; Linux x86_64; rv:73.0) Gecko/20100101 Firefox/73.0",
),
("Content-Type", "application/x-www-form-urlencoded"),
("Origin", "https://s1.wcy.wat.edu.pl"),
("Connection", "keep-alive"),
("Referer", "https://s1.wcy.wat.edu.pl/ed1/"),
("Upgrade-Insecure-Requests", "1"),
("Pragma", "no-cache"),
("Cache-Control", "no-cache"),
];
for x in &headers {
headers2.insert(x.0, x.1.parse().unwrap());
}
headers2
}
fn build_client() -> Result<reqwest::Client, reqwest::Error> {
let client = reqwest::Client::builder()
.danger_accept_invalid_certs(true)
.build()?;
Ok(client)
}
async fn login(
client: &reqwest::Client,
sid: &str,
user_id: String,
password: String,
) -> Result<reqwest::Response, reqwest::Error> {
let form = &[
("formname", "login"),
("default_fun", "1"),
("userid", &user_id[..]),
("password", &password[..]),
("view_height", "1080"),
("view_width", "1920"),
];
let headers = get_headers();
let mut url: String = String::from("https://s1.wcy.wat.edu.pl/ed1/index.php?sid=");
url.push_str(sid);
let post = client
.post(&url[..])
.form(form)
.headers(headers)
.send()
.await?;
Ok(post)
}
async fn get_plan_site(sid: &String, group: &str) -> Result<String, reqwest::Error> {
let client = build_client().unwrap();
let mut url: String = String::from(
"https://s1.wcy.wat.edu.pl/ed1/logged_inc.php?mid=328&iid=20192&pos=0&rdo=1&sid=",
);
let mut group_base: String = String::from("&exv=");
url.push_str(&sid[..]);
group_base.push_str(group);
url.push_str(&group_base[..]);
println!("{}", url);
let post = client.post(&url[..]).send().await?.text().await?;
Ok(post)
}
| true |
24b1896d3ef972a1057eeca980d9a46bc1e3c597
|
Rust
|
claderoki/Intergalactica-Discord-Bot-Rust
|
/src/modules/pigeon/commands/explore.rs
|
UTF-8
| 4,685 | 2.671875 | 3 |
[] |
no_license
|
use std::time::Duration;
use chrono::NaiveDateTime;
use serenity::builder::CreateComponents;
use serenity::client::Context;
use serenity::framework::standard::macros::command;
use serenity::framework::standard::CommandResult;
use serenity::model::channel::Message;
use serenity::model::channel::ReactionType;
use serenity::model::interactions::message_component::ButtonStyle;
use serenity::model::interactions::InteractionResponseType;
use serenity::model::prelude::User;
use crate::discord_helpers::embed_utils::EmbedExtension;
use crate::modules::pigeon::helpers::validation::PigeonValidation;
use crate::modules::pigeon::models::pigeon::PigeonStatus;
use crate::modules::pigeon::repository::exploration::ExplorationRepository;
use crate::modules::pigeon::repository::pigeon::PigeonRepository;
use crate::modules::shared::repository::reminder::NewReminder;
use crate::modules::shared::repository::reminder::ReminderRepository;
#[command("explore")]
#[only_in(guild)]
#[description("Send your pigeon into space.")]
#[aliases("spaceplore")]
pub async fn explore(ctx: &Context, msg: &Message) -> CommandResult {
let human_id = PigeonValidation::new()
// .item_needed("space_shuttle")
.needs_active_pigeon(true)
.required_pigeon_status(PigeonStatus::Idle)
.validate(&msg.author)?;
let simple_location = ExplorationRepository::get_random_location()?;
let arrival_date = (chrono::offset::Utc::now()
+ chrono::Duration::minutes(simple_location.travel_distance_in_minutes))
.naive_utc();
ExplorationRepository::create_exploration(human_id, simple_location.id, arrival_date)?;
PigeonRepository::update_status(human_id, PigeonStatus::SpaceExploring)?;
success_scenario(msg, ctx, simple_location.image_url, arrival_date).await?;
Ok(())
}
fn create_reminder_components<'a>(
components: &'a mut CreateComponents,
) -> &'a mut CreateComponents {
components.create_action_row(|a| {
a.create_button(|b| {
b.custom_id("reminder")
.emoji(ReactionType::Unicode("❗".into()))
.style(ButtonStyle::Secondary)
.label("Remind me")
})
})
}
async fn should_remind(ctx: &Context, msg: &Message, user: &User) -> bool {
let interaction_result = &msg
.await_component_interaction(&ctx)
.author_id(user.id)
.timeout(Duration::from_secs(120))
.await;
match interaction_result {
Some(interaction) => {
let _ = interaction
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::DeferredUpdateMessage)
})
.await;
if interaction.data.custom_id == "reminder" {
return true;
}
false
}
None => false,
}
}
async fn success_scenario(
msg: &Message,
ctx: &Context,
image_url: String,
arrival_date: NaiveDateTime,
) -> Result<(), String> {
let interactive_msg = msg
.channel_id
.send_message(&ctx, |m| {
m.components(|c| create_reminder_components(c)).embed(|e| {
e.normal_embed("Your pigeon has successfully taken off to space!")
.thumbnail(image_url)
})
})
.await;
match interactive_msg {
Ok(message) => {
if should_remind(ctx, &message, &msg.author).await {
let text = format!(
"Your pigeon has landed on {}\n`/pigeon space` to check on it!",
"Luna"
);
let mut reminder = NewReminder::new(msg.author.id.into(), text, arrival_date);
// reminder.command("pigeon space")
reminder.channel_id(msg.channel_id.into());
let result = ReminderRepository::create(&reminder);
match result {
Ok(_) => {
let _ = msg
.channel_id
.send_message(&ctx, |m| {
m.embed(|e| {
e.normal_embed(
"Okay, I will remind you when your pigeon has arrived.",
)
})
})
.await;
}
Err(e) => {
return Err(format!("{:?}", e));
}
}
}
}
Err(e) => {
return Err(format!("{:?}", e));
}
}
Ok(())
}
| true |
2f0959f55db6038a552552adb41244bdaf78a76a
|
Rust
|
scraeling/aoc2020
|
/day10/src/main.rs
|
UTF-8
| 2,027 | 3.140625 | 3 |
[] |
no_license
|
use std::fs::read_to_string;
use timer::time;
use std::collections::HashMap;
use std::collections::HashSet;
fn main() {
let input = read_to_string("input.txt").unwrap();
let adapters = time!(parse_input(&input));
let p1 = time!(part_1(&adapters));
let p2 = time!(part_2(&adapters));
println!("Part 1: 1 jolt multiplied by 3 jolt differences when all adapters are connected: {}", p1);
println!("Part 2: Total number of distinct ways you can arrange adapters: {}", p2);
}
fn parse_input(input: &str) -> Vec<u32> {
let mut adapters = input
.split("\n")
.map(|num| num.parse::<u32>().unwrap())
.collect::<Vec<u32>>();
adapters.push(0);
adapters.sort();
adapters.push(adapters.last().unwrap() + 3);
adapters
}
fn part_1(adapters: &Vec<u32>) -> u32 {
let mut jolt_diff = vec![0u32;4];
for pair in adapters.windows(2) {
let diff = pair[1] - pair[0];
jolt_diff[diff as usize] += 1;
}
jolt_diff[1] * jolt_diff[3]
}
fn get_num_branches(adapter: &u32, set: &HashSet<u32>, branch_count: &mut HashMap<u32, u64>) -> u64 {
(adapter+1..=adapter+3u32)
.map(|a| {
if set.contains(&a) {
if branch_count.contains_key(&a) {
branch_count[&a]
}
else {
let count = get_num_branches(&a, set, branch_count);
branch_count.insert(a, count);
count
}
}
else { 0 }
})
.sum()
}
fn part_2(adapters: &Vec<u32>) -> u64 {
let target_joltage = adapters.last().unwrap().clone();
let set = adapters.iter().map(|a| a.clone()).collect::<HashSet<u32>>();
let mut branch_count: HashMap<u32, u64> = HashMap::new();
branch_count.insert(target_joltage, 1);
for a in adapters.iter().rev() {
if !branch_count.contains_key(a) {
let count = get_num_branches(a, &set, &mut branch_count);
branch_count.insert(a.clone(), count);
}
}
branch_count[&0u32]
}
| true |
8d69f8c6e833722eca9c0846de7f3a04aa1805ad
|
Rust
|
udoprog/OxidizeBot
|
/crates/oxidize-common/src/irc.rs
|
UTF-8
| 1,672 | 3.40625 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
/// Struct of tags.
#[derive(Debug, Clone, Default)]
pub struct Tags {
/// Contents of the id tag if present.
pub id: Option<String>,
/// Contents of the msg-id tag if present.
pub msg_id: Option<String>,
/// The display name of the user.
pub display_name: Option<String>,
/// The ID of the user.
pub user_id: Option<String>,
/// Color of the user.
pub color: Option<String>,
/// Emotes part of the message.
pub emotes: Option<String>,
}
impl Tags {
/// Extract tags from message.
pub fn from_tags<I, K, V>(tags: I) -> Tags
where
I: IntoIterator<Item = (K, V)>,
K: AsRef<str>,
V: AsRef<str>,
{
let mut id = None;
let mut msg_id = None;
let mut display_name = None;
let mut user_id = None;
let mut color = None;
let mut emotes = None;
for (key, value) in tags {
match key.as_ref() {
"id" => id = Some(value.as_ref().to_owned()),
"msg-id" => msg_id = Some(value.as_ref().to_owned()),
"display-name" => display_name = Some(value.as_ref().to_owned()),
"user-id" => user_id = Some(value.as_ref().to_owned()),
"color" => color = Some(value.as_ref().to_owned()),
"emotes" => emotes = Some(value.as_ref().to_owned()),
key => {
tracing::trace!(key, value = value.as_ref(), "unsupported tag");
}
}
}
Tags {
id,
msg_id,
display_name,
user_id,
color,
emotes,
}
}
}
| true |
6f819d64a46894411abc3e75c8c427027caabb92
|
Rust
|
gengjiawen/leetcode
|
/String/_1119_remove_vowels_from_a_string.rs
|
UTF-8
| 315 | 3.46875 | 3 |
[] |
no_license
|
// https://leetcode.com/problems/remove-vowels-from-a-string
pub fn remove_vowels(s: String) -> String {
return s
.chars()
.filter(|i| !vec!['a', 'o', 'e', 'i', 'u'].contains(i))
.collect();
}
#[test]
pub fn t1() {
assert_eq!(remove_vowels("aeiou".to_string()), "".to_string());
}
| true |
6b33947b744a41e8dc79340ad3448593fb25ba96
|
Rust
|
jcawl/learning-rust
|
/ch4_ownership/dangling_pointer.rs
|
UTF-8
| 510 | 3.390625 | 3 |
[] |
no_license
|
// !!code causes error!!
fn main() {
let ref_to_nothing = return_something();
}
fn return_nothing() -> &String {
//bring string into scope
let s = String::from("hello");
//return reference to string
&s
//`drop` is called on the memory storing s
//as it goes out of scope
}
fn return_something() -> String {
//bring string into scope
let s = String::from("hello");
//return string so ownership will be passed
//and `drop` will not be called on the memory
s
}
| true |
d402cebd064947560bfa93cb855d3ec32ac3891b
|
Rust
|
mrkgnao/sqlx
|
/sqlx-core/src/postgres/types/bool.rs
|
UTF-8
| 821 | 2.515625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use crate::decode::{Decode, DecodeError};
use crate::encode::Encode;
use crate::postgres::protocol::TypeId;
use crate::postgres::types::PgTypeInfo;
use crate::postgres::Postgres;
use crate::types::HasSqlType;
impl HasSqlType<bool> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::BOOL)
}
}
impl HasSqlType<[bool]> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::ARRAY_BOOL)
}
}
impl Encode<Postgres> for bool {
fn encode(&self, buf: &mut Vec<u8>) {
buf.push(*self as u8);
}
}
impl Decode<Postgres> for bool {
fn decode(buf: &[u8]) -> Result<Self, DecodeError> {
buf.get(0).map(|&b| b != 0).ok_or_else(|| {
DecodeError::Message(Box::new("Expected minimum 1 byte but received none."))
})
}
}
| true |
099b741c62573aa165098ff5f6ad47e6847f1dec
|
Rust
|
the-emerald/capra
|
/src/segment.rs
|
UTF-8
| 2,636 | 2.890625 | 3 |
[
"MIT"
] |
permissive
|
use crate::environment::Environment;
use crate::segment::DiveSegmentError::InconsistentDepth;
use crate::units::consumption::GasConsumption;
use crate::units::consumption_rate::GasConsumptionRate;
use crate::units::depth::Depth;
use crate::units::rate::Rate;
use thiserror::Error;
use time::Duration;
#[derive(Copy, Clone, Debug, Error, Eq, PartialEq, Hash)]
pub enum DiveSegmentError {
#[error("segment type inconsistent with start/end depth")]
InconsistentDepth,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "use-serde", derive(serde::Serialize, serde::Deserialize))]
pub enum SegmentType {
NoDeco,
DecoStop,
Bottom,
AscDesc,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "use-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Segment {
segment_type: SegmentType,
start_depth: Depth,
end_depth: Depth,
time: Duration,
ascent_rate: Rate,
descent_rate: Rate,
}
impl Segment {
pub fn new(
segment_type: SegmentType,
start_depth: Depth,
end_depth: Depth,
time: Duration,
ascent_rate: Rate,
descent_rate: Rate,
) -> Result<Self, DiveSegmentError> {
match (segment_type, start_depth == end_depth) {
(SegmentType::AscDesc, true) => return Err(InconsistentDepth),
(SegmentType::AscDesc, false) => {}
(_, false) => return Err(InconsistentDepth),
_ => {}
}
Ok(Self {
segment_type,
start_depth,
end_depth,
time,
ascent_rate,
descent_rate,
})
}
pub fn segment_type(&self) -> SegmentType {
self.segment_type
}
pub fn start_depth(&self) -> Depth {
self.start_depth
}
pub fn end_depth(&self) -> Depth {
self.end_depth
}
pub fn time(&self) -> &Duration {
&self.time
}
pub fn ascent_rate(&self) -> Rate {
self.ascent_rate
}
pub fn descent_rate(&self) -> Rate {
self.descent_rate
}
pub fn gas_consumed(
&self,
consumption_rate: GasConsumptionRate,
environment: Environment,
) -> GasConsumption {
let pressure = match self.segment_type {
SegmentType::AscDesc => {
((self.start_depth + self.end_depth) / Depth(2)).pressure(environment)
}
_ => self.end_depth.pressure(environment),
};
GasConsumption(
(pressure.0 * (self.time.as_seconds_f64() / 60.0) * consumption_rate.0 as f64) as u32,
)
}
}
| true |
7ac316b78327dc95636f184c3b617edc5cf818cf
|
Rust
|
opentimestamps/rust-opentimestamps
|
/src/attestation.rs
|
UTF-8
| 4,636 | 2.90625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
// Copyright (C) The OpenTimestamps developers
//
// This file is part of rust-opentimestamps.
//
// It is subject to the license terms in the LICENSE file found in the
// top-level directory of this distribution.
//
// No part of rust-opentimestamps including this file, may be copied, modified,
// propagated, or distributed except according to the terms contained in the
// LICENSE file.
//! # Attestations
//!
//! An attestation is a claim that some data existed at some time. It
//! comes from some server or from a blockchain.
//!
use std::fmt;
use std::io::{Read, Write};
use error::Error;
use hex::Hexed;
use ser;
/// Size in bytes of the tag identifying the attestation type
const TAG_SIZE: usize = 8;
/// Maximum length of a URI in a "pending" attestation
const MAX_URI_LEN: usize = 1000;
/// Tag indicating a Bitcoin attestation
const BITCOIN_TAG: &[u8] = b"\x05\x88\x96\x0d\x73\xd7\x19\x01";
/// Tag indicating a pending attestation
const PENDING_TAG: &[u8] = b"\x83\xdf\xe3\x0d\x2e\xf9\x0c\x8e";
/// An attestation that some data existed at some time
#[allow(missing_docs)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Attestation {
/// An attestation from a Bitcoin blockheader. This consists of a blockheight
/// and nothing more, it is expected that the current hash is equal to the
/// Merkle root of the block at this height.
Bitcoin {
height: usize
},
/// An attestation from some server. It is commented at length in Peter Todd's
/// `python-opentimestamps` that the server should be expected to keep anything
/// it attests to, forever, and therefore the only thing we store locally is a
/// single simple URI with a very restricted charset. (The restricted charset
/// seems mainly to be to avoid the software being used for nefarious purposes,
/// as it will fetch this URI under some circumstances.)
Pending {
uri: String
},
/// An unknown attestation that we just store straight
Unknown {
tag: Vec<u8>,
data: Vec<u8>
}
}
impl Attestation {
/// Deserialize an arbitrary attestation
pub fn deserialize<R: Read>(deser: &mut ser::Deserializer<R>) -> Result<Attestation, Error> {
let tag = deser.read_fixed_bytes(TAG_SIZE)?;
let len = deser.read_uint()?;
if tag == BITCOIN_TAG {
let height = deser.read_uint()?;
Ok(Attestation::Bitcoin {
height
})
} else if tag == PENDING_TAG {
// This validation logic copied from python-opentimestamps. Peter comments
// that he is deliberately avoiding ?, &, @, etc., to "keep us out of trouble"
let uri_bytes = deser.read_bytes(0, MAX_URI_LEN)?;
let uri_string = String::from_utf8(uri_bytes)?;
for ch in uri_string.chars() {
match ch {
'a'..='z' => {}
'A'..='Z' => {}
'0'..='9' => {}
'.' | '-' | '_' | '/' | ':' => {},
x => return Err(Error::InvalidUriChar(x))
}
}
Ok(Attestation::Pending {
uri: uri_string
})
} else {
Ok(Attestation::Unknown {
tag,
data: deser.read_fixed_bytes(len)?
})
}
}
/// Serialize an attestation
pub fn serialize<W: Write>(&self, ser: &mut ser::Serializer<W>) -> Result<(), Error> {
let mut byte_ser = ser::Serializer::new(vec![]);
match *self {
Attestation::Bitcoin { height } => {
ser.write_fixed_bytes(BITCOIN_TAG)?;
byte_ser.write_uint(height)?;
ser.write_bytes(&byte_ser.into_inner())
}
Attestation::Pending { ref uri } => {
ser.write_fixed_bytes(PENDING_TAG)?;
byte_ser.write_bytes(uri.as_bytes())?;
ser.write_bytes(&byte_ser.into_inner())
}
Attestation::Unknown { ref tag, ref data } => {
ser.write_fixed_bytes(tag)?;
ser.write_bytes(data)
}
}
}
}
impl fmt::Display for Attestation {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Attestation::Bitcoin { height } => write!(f, "Bitcoin block {}", height),
Attestation::Pending { ref uri } => write!(f, "Pending: update URI {}", uri),
Attestation::Unknown { ref tag, ref data } => write!(f, "unknown attestation type {}: {}", Hexed(tag), Hexed(data)),
}
}
}
| true |
c5072d4bd7bb0371af21f024ec8cb00979664255
|
Rust
|
JasonCreighton/raymond
|
/src/main.rs
|
UTF-8
| 6,407 | 2.875 | 3 |
[
"MIT"
] |
permissive
|
mod math;
mod ppm;
mod scene;
mod surface;
mod texture;
mod util;
use rand::random;
use std::time::Instant;
use structopt::StructOpt;
use math::*;
use scene::*;
use surface::*;
use texture::*;
#[derive(StructOpt)]
#[structopt(name = "raymond")]
struct CommandLineArguments {
/// Output file in PPM format (overwritten if already exists)
#[structopt(short = "o", long = "output", default_value = "raymond_out.ppm")]
output_file: String,
/// Width of output image (in pixels)
#[structopt(short = "w", long = "width", default_value = "1024")]
width: usize,
/// Height of output image (in pixels)
#[structopt(short = "h", long = "height", default_value = "768")]
height: usize,
/// Oversampling factor (ie, antialiasing)
#[structopt(short = "s", long = "samples", default_value = "2")]
oversampling_factor: usize,
}
fn random_sphere() -> VisObj {
VisObj {
surface: Box::new(Sphere::new(
&Vec3f {
x: random::<f32>() * 10.0,
y: random::<f32>() * 10.0 - 5.0,
z: random::<f32>() * 5.0,
},
1.0,
)),
texture: Box::new(RGB {
red: 0.0,
green: 0.0,
blue: 0.0,
//red: random::<f32>(),
//green: random::<f32>(),
//blue: random::<f32>(),
}),
reflectivity: 0.9,
}
}
fn build_scene(camera: &Camera) -> Scene {
let mut scene = Scene {
background: RGB {
red: 0.3,
green: 0.5,
blue: 0.9,
},
ambient_light_intensity: 0.25,
light_sources: Vec::new(),
objects: Vec::new(),
};
scene.light_sources.push(LightSource {
dir_to_light: Vec3f {
x: 0.0,
y: -10.0,
z: 10.0,
},
intensity: 0.75,
});
// Classic red and white infinite checkerboard
scene.objects.push(VisObj {
surface: Box::new(Plane::new(
&Vec3f {
x: 0.0,
y: 0.0,
z: 0.0,
},
&Vec3f {
x: 1.0,
y: 0.0,
z: 0.0,
},
&Vec3f {
x: 0.0,
y: 1.0,
z: 0.0,
},
)),
texture: Box::new(Checkerboard::new(
Box::new(RGB {
red: 2.5 / 3.0,
green: 2.5 / 3.0,
blue: 2.5 / 3.0,
}),
Box::new(RGB {
red: 2.5,
green: 0.0,
blue: 0.0,
}),
)),
reflectivity: 0.0,
});
let mut colormap = Vec::new();
colormap.push(RGB {
red: 0.0,
green: 0.0,
blue: 0.5,
});
colormap.push(RGB {
red: 0.0,
green: 0.0,
blue: 1.0,
});
colormap.push(RGB {
red: 0.0,
green: 1.0,
blue: 1.0,
});
colormap.push(RGB {
red: 1.0,
green: 1.0,
blue: 0.0,
});
colormap.push(RGB {
red: 1.0,
green: 0.0,
blue: 0.0,
});
colormap.push(RGB {
red: 0.5,
green: 0.0,
blue: 0.0,
});
// Rectangle showing the Mandelbrot set
scene.objects.push(VisObj {
surface: Box::new(Quad::new(
Plane::new(
&Vec3f {
x: -1.0,
y: 4.0,
z: 1.0,
},
&Vec3f {
x: 1.0,
y: -1.0,
z: 0.0,
}
.normalize(),
&Vec3f {
x: 0.0,
y: 0.0,
z: 1.0,
},
),
3.0,
2.5,
)),
texture: Box::new(CoordinateTransform {
texture: Box::new(MandelbrotSet { colormap }),
u_offset: -2.0,
v_offset: -1.25,
u_scale: 1.0,
v_scale: 1.0,
}),
reflectivity: 0.0,
});
// Rectangle recursively showing the same scene
scene.objects.push(VisObj {
surface: Box::new(Quad::new(
Plane::new(
&Vec3f {
x: -1.0,
y: -4.0,
z: 1.0,
},
&Vec3f {
x: 1.0,
y: 1.0,
z: 0.0,
}
.normalize(),
&Vec3f {
x: 0.0,
y: 0.0,
z: 1.0,
},
),
3.0,
2.5,
)),
texture: Box::new(CoordinateTransform {
texture: Box::new(Portal {
camera: camera.clone(),
}),
u_offset: -1.5,
v_offset: -1.25,
u_scale: -1.0 / 1.5,
v_scale: -1.0,
}),
reflectivity: 0.0,
});
// Nice reflective sphere
scene.objects.push(VisObj {
surface: Box::new(Sphere::new(
&Vec3f {
x: 0.0,
y: 0.0,
z: 2.25,
},
1.5,
)),
texture: Box::new(RGB::BLACK),
reflectivity: 0.9,
});
scene
}
fn main() {
let args = CommandLineArguments::from_args();
let camera = Camera::new(
Vec3f {
x: -11.0,
y: 0.0,
z: 2.0,
},
Vec3f {
x: 10.0,
y: 0.0,
z: -1.0,
},
45.0,
);
let scene = build_scene(&camera);
let trace_start = Instant::now();
let image =
scene.trace_image_oversampled(&camera, args.width, args.height, args.oversampling_factor);
println!("Traced image in {} ms.", trace_start.elapsed().as_millis());
let write_start = Instant::now();
let mut ppm_out =
ppm::PPMWriter::new(&args.output_file, image.columns as i32, image.rows as i32).unwrap();
for scanline in image.iter_rows() {
for pixel in scanline {
let (red, green, blue) = pixel.linear_to_srgb().to_rgb24();
ppm_out.write(red, green, blue).unwrap();
}
}
println!("Wrote output in {} ms.", write_start.elapsed().as_millis());
}
| true |
50ff7932f7eb0fb03a0a0a569b4555609666eb08
|
Rust
|
strategist922/chia-plotmover-rs
|
/src/main.rs
|
UTF-8
| 4,713 | 2.6875 | 3 |
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
mod cfg;
use cfg::Cfg;
use log::{debug, error, info};
use notify::{raw_watcher, RawEvent, RecursiveMode, Watcher};
use std::error::Error;
use std::sync::mpsc::channel;
use std::{
ffi::OsString,
fs::{self, DirEntry},
};
use sysinfo::{DiskExt, System, SystemExt};
#[macro_use]
extern crate lazy_static;
lazy_static! {
// parse config
static ref CFG : Cfg = Cfg::new().unwrap();
static ref SSDS : &'static Vec<String> = &CFG.dirs.ssds;
static ref HDDS : &'static Vec<String> = &CFG.dirs.hdds;
static ref ONLY_REPLACE : bool = CFG.options.only_replace.unwrap_or(false);
}
// check if any hdd has space for the new plot
fn hdd_has_space(hdd: &str, sys: &System, plot_sz: u64) -> bool {
for disk in sys.disks() {
let sys_path = disk.mount_point().to_str().unwrap();
debug!("{} , {} , {}", sys_path, hdd, sys_path == hdd);
if sys_path == hdd {
if disk.available_space() >= plot_sz {
return true;
} else {
return false;
}
}
}
panic!("hdd {} not found in system disks!", hdd);
}
fn get_free_space(plot_sz: u64) -> Result<&'static str, Box<dyn Error>> {
let mut hdd_idx: i8 = -1;
let sys = System::new_all();
// if any drive has space, use it
if *ONLY_REPLACE == false {
for (i, path) in HDDS.iter().enumerate() {
if hdd_has_space(&path, &sys, plot_sz) {
info!("hdd {:?} has space, using that", path);
return Ok(&HDDS[i]);
}
}
}
// otherwise, remove a plot
for (i, path) in HDDS.iter().enumerate() {
let legacy_path = format!("{}{}", path, "/legacy_plots");
let legacy_plots = fs::read_dir(&legacy_path)?;
if let Some(remove_plot) = legacy_plots.into_iter().last() {
let path_buf = remove_plot?.path();
let res = fs::remove_file(&path_buf);
if res.is_err() {
error!("unable to remove {:?} err= {}", &path_buf, res.unwrap_err());
continue;
}
info!("removed plot {:?}", path_buf.to_str().unwrap());
hdd_idx = i as i8;
break;
}
}
if hdd_idx < 0 {
panic!("no plots were available to remove! are we done re-plotting ?!?!?!")
}
Ok(&HDDS[hdd_idx as usize])
}
fn move_file(source: &DirEntry) {
let source_file = String::from(source.file_name().to_str().unwrap());
let source_path = String::from(source.path().to_str().unwrap());
let source_sz = fs::metadata(&source_path).unwrap().len();
let free_path = match get_free_space(source_sz) {
Ok(path) => path,
Err(e) => {
error!("No free space found, aborting the move! err: {}", e);
return;
}
};
let dest_path = format!("{}{}{}", free_path, "/pool_plots/", &source_file);
info!("copy plot {:?} to {:?} ... ", &source_path, dest_path);
match fs::copy(&source_path, &dest_path) {
Ok(_) => {
info!(" ... completed");
let res_rm = fs::remove_file(&source_path);
if res_rm.is_err() {
error!(
"unable to remove {}! err: {}",
source_path,
res_rm.unwrap_err()
);
}
}
Err(e) => {
error!(
"unable to copy {} to {}! err: {}",
&source_path, &dest_path, e
);
}
}
}
fn check_path(path: &str) {
info!("checking for plot files in path {}", path);
let files = fs::read_dir(path).unwrap();
files
.filter_map(Result::ok)
.filter(|f| f.path().extension().unwrap_or(&OsString::from("foo")) == "plot")
.for_each(|f| move_file(&f));
}
fn check_all() {
for path in SSDS.iter() {
check_path(path);
}
}
fn main() {
// init logging
log4rs::init_file("logcfg.yml", Default::default()).unwrap();
check_all();
info!("monitoring these dirs for new plots {:?}", &*SSDS);
// setup the channel and watch the dirs for new plots
let (tx, rx) = channel();
let mut watcher = raw_watcher(tx).unwrap();
for path in SSDS.iter() {
watcher.watch(path, RecursiveMode::NonRecursive).unwrap();
}
loop {
match rx.recv() {
Ok(RawEvent {
path: Some(path),
op: Ok(_op),
cookie: _,
}) => {
check_path(path.parent().unwrap().to_str().unwrap());
}
Ok(event) => println!("broken event: {:?}", event),
Err(e) => println!("watch error: {:?}", e),
}
}
}
| true |
e4d8e5c78d207c1ab19224fb2f8b06f1830215a8
|
Rust
|
alex179ohm/nsq-client-rs
|
/src/state.rs
|
UTF-8
| 899 | 2.765625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use futures::{AsyncRead, AsyncWrite};
pub trait State {}
pub trait EndState {}
pub trait Transition<S>
where
S: State,
Self: State,
{
fn to(self, recv: &dyn AsyncRead, send: &dyn AsyncWrite) -> S;
}
pub trait Terminate
where
Self: EndState,
{
fn end(self);
}
pub struct Magic;
impl Magic {
pub fn new() -> Self {
Magic {}
}
}
impl State for Magic {}
impl Transition<Identify> for Magic {
fn to(self, recv: &dyn AsyncRead, send: &dyn AsyncWrite) -> Identify {
Identify::new(false, false)
}
}
pub struct Identify {
tls: bool,
auth: bool,
}
impl Identify {
pub fn new(tls: bool, auth: bool) -> Self {
Identify {
tls: false,
auth: false,
}
}
pub fn tls(&self) -> bool {
self.tls
}
pub fn auth(&self) -> bool {
self.auth
}
}
impl State for Identify {}
| true |
b4aee045afc1e83ba40e895efc5ca35bef73d631
|
Rust
|
4e6/enso
|
/lib/rust/enso-logger/src/disabled.rs
|
UTF-8
| 918 | 2.765625 | 3 |
[
"Apache-2.0",
"AGPL-3.0-or-later"
] |
permissive
|
//! Contains definition of trivial logger that discards all messages except warnings and errors.
use enso_prelude::*;
use crate::Message;
use crate::AnyLogger;
use crate::enabled;
use enso_shapely::CloneRef;
use std::fmt::Debug;
// ==============
// === Logger ===
// ==============
/// Trivial logger that discards all messages except warnings and errors.
#[derive(Clone,CloneRef,Debug,Default)]
pub struct Logger {
enabled : enabled::Logger,
}
// === Impls ===
impls!{ From + &From <enabled::Logger> for Logger { |logger| Self::new(logger.path()) }}
impl AnyLogger for Logger {
type Owned = Self;
fn new (path:impl Into<ImString>) -> Self { Self {enabled : enabled::Logger::new(path) } }
fn path (&self) -> &str { self.enabled.path() }
fn warning (&self, msg:impl Message) { self.enabled.warning (msg) }
fn error (&self, msg:impl Message) { self.enabled.error (msg) }
}
| true |
f52df41dafb263b300902a26ebae86ae477e29b7
|
Rust
|
docweirdo/rOSt
|
/src/helpers.rs
|
UTF-8
| 604 | 3.359375 | 3 |
[] |
no_license
|
/// Writes u32 memory at the specified base + offset
pub fn write_register(base: u32, offset: u32, input: u32) {
unsafe { core::ptr::write_volatile((base + offset) as *mut u32, input) }
}
/// Reads u32 memory at the specified base + offset
pub fn read_register(base: u32, offset: u32) -> u32 {
unsafe { core::ptr::read_volatile((base + offset) as *const u32) }
}
/// Reads the specified bit from the u32 memory at the specified base + offset
pub fn read_register_bit(base: u32, offset: u32, bit: u32) -> u32 {
unsafe { core::ptr::read_volatile((base + offset) as *const u32) & 1 << bit }
}
| true |
6444b5a369ccb15b7e382a6e0c0fc0c12ee101b3
|
Rust
|
Boscop/mincode
|
/src/rustc_serialize/writer.rs
|
UTF-8
| 15,196 | 3.109375 | 3 |
[
"MIT"
] |
permissive
|
use std::io::Write;
use std::io::Error as IoError;
use std::error::Error;
use std::fmt;
use rustc_serialize_crate::Encoder;
use byteorder::WriteBytesExt;
use leb128;
use float::*;
pub type EncodingResult<T> = Result<T, EncodingError>;
/// An error that can be produced during encoding.
#[derive(Debug)]
pub enum EncodingError {
/// An error originating from the underlying `Writer`.
IoError(IoError),
/// An object could not be encoded with the given size limit.
///
/// This error is returned before any bytes are written to the
/// output `Writer`.
SizeLimit,
}
/// An Encoder that encodes values directly into a Writer.
///
/// This struct should not be used often.
/// For most cases, prefer the `encode_into` function.
pub struct EncoderWriter<'a, W: 'a> {
writer: &'a mut W,
write_f32: FloatEncoder<f32>,
write_f64: FloatEncoder<f64>,
}
pub struct SizeChecker {
pub size_limit: u64,
pub written: u64,
float_size_f32: usize,
float_size_f64: usize,
}
fn wrap_io(err: IoError) -> EncodingError {
EncodingError::IoError(err)
}
impl fmt::Display for EncodingError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
EncodingError::IoError(ref err) => write!(f, "IoError: {}", err),
EncodingError::SizeLimit => write!(f, "SizeLimit")
}
}
}
impl Error for EncodingError {
fn description(&self) -> &str {
match *self {
EncodingError::IoError(ref err) => Error::description(err),
EncodingError::SizeLimit => "the size limit for decoding has been reached"
}
}
fn cause(&self) -> Option<&Error> {
match *self {
EncodingError::IoError(ref err) => err.cause(),
EncodingError::SizeLimit => None
}
}
}
impl <'a, W: Write> EncoderWriter<'a, W> {
pub fn new(w: &'a mut W, float_enc: FloatEncoding) -> EncoderWriter<'a, W> {
let (write_f32, write_f64) = float_encoder(float_enc);
EncoderWriter {
writer: w,
write_f32: write_f32,
write_f64: write_f64,
}
}
fn write_unsigned<T: Into<u64>>(&mut self, v: T) -> EncodingResult<()> {
leb128::write::unsigned(&mut self.writer, v.into()).map(|_| ()).map_err(wrap_io)
}
fn write_signed<T: Into<i64>>(&mut self, v: T) -> EncodingResult<()> {
leb128::write::signed(&mut self.writer, v.into()).map(|_| ()).map_err(wrap_io)
}
}
impl SizeChecker {
pub fn new(limit: u64, float_enc: FloatEncoding) -> SizeChecker {
let (float_size_f32, float_size_f64) = float_sizes(float_enc);
SizeChecker {
size_limit: limit,
written: 0,
float_size_f32: float_size_f32,
float_size_f64: float_size_f64,
}
}
fn add_raw(&mut self, size: usize) -> EncodingResult<()> {
self.written += size as u64;
if self.written <= self.size_limit {
Ok(())
} else {
Err(EncodingError::SizeLimit)
}
}
/*fn add_value<T>(&mut self, _: T) -> EncodingResult<()> {
use std::mem::size_of;
self.add_raw(size_of::<T>())
}*/
fn add_value_unsigned<T: Into<u64>>(&mut self, t: T) -> EncodingResult<()> {
let mut v: Vec<u8> = vec![];
match leb128::write::unsigned(&mut v, t.into()) {
Ok(n) => self.add_raw(n),
Err(e) => Err(wrap_io(e))
}
}
fn add_value_signed<T: Into<i64>>(&mut self, t: T) -> EncodingResult<()> {
let mut v: Vec<u8> = vec![];
match leb128::write::signed(&mut v, t.into()) {
Ok(n) => self.add_raw(n),
Err(e) => Err(wrap_io(e))
}
}
}
impl<'a, W: Write> Encoder for EncoderWriter<'a, W> {
type Error = EncodingError;
fn emit_nil(&mut self) -> EncodingResult<()> {
Ok(())
}
fn emit_usize(&mut self, v: usize) -> EncodingResult<()> {
self.write_unsigned(v as u64)
}
fn emit_u64(&mut self, v: u64) -> EncodingResult<()> {
self.write_unsigned(v)
}
fn emit_u32(&mut self, v: u32) -> EncodingResult<()> {
self.write_unsigned(v)
}
fn emit_u16(&mut self, v: u16) -> EncodingResult<()> {
self.write_unsigned(v)
}
fn emit_u8(&mut self, v: u8) -> EncodingResult<()> {
self.writer.write_u8(v).map_err(wrap_io)
}
fn emit_isize(&mut self, v: isize) -> EncodingResult<()> {
self.write_signed(v as i64)
}
fn emit_i64(&mut self, v: i64) -> EncodingResult<()> {
self.write_signed(v)
}
fn emit_i32(&mut self, v: i32) -> EncodingResult<()> {
self.write_signed(v)
}
fn emit_i16(&mut self, v: i16) -> EncodingResult<()> {
self.write_signed(v)
}
fn emit_i8(&mut self, v: i8) -> EncodingResult<()> {
self.writer.write_i8(v).map_err(wrap_io)
}
fn emit_bool(&mut self, v: bool) -> EncodingResult<()> {
self.writer.write_u8(if v {1} else {0}).map_err(wrap_io)
}
fn emit_f64(&mut self, v: f64) -> EncodingResult<()> {
//self.writer.write_f64::<BigEndian>(v).map_err(wrap_io)
(self.write_f64)(&mut self.writer, v).map_err(wrap_io)
}
fn emit_f32(&mut self, v: f32) -> EncodingResult<()> {
//self.writer.write_f32::<BigEndian>(v).map_err(wrap_io)
(self.write_f32)(&mut self.writer, v).map_err(wrap_io)
}
fn emit_char(&mut self, v: char) -> EncodingResult<()> {
// TODO: change this back once unicode works
//let mut cbuf = [0; 4];
//let sz = v.encode_utf8(&mut cbuf[..]).unwrap_or(0);
//let ptr = &cbuf[..sz];
//self.writer.write_all(ptr).map_err(EncodingError::IoError)
let mut inter = String::with_capacity(1);
inter.push(v);
self.writer.write_all(inter.as_bytes()).map_err(EncodingError::IoError)
}
fn emit_str(&mut self, v: &str) -> EncodingResult<()> {
try!(self.emit_usize(v.len()));
self.writer.write_all(v.as_bytes()).map_err(EncodingError::IoError)
}
fn emit_enum<F>(&mut self, __: &str, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_enum_variant<F>(&mut self, _: &str, v_id: usize, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
try!(self.write_unsigned(v_id as u64));
f(self)
}
fn emit_enum_variant_arg<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_enum_struct_variant<F>(&mut self,
_: &str,
_: usize,
_: usize,
f: F)
-> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_enum_struct_variant_field<F>(&mut self, _: &str, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_struct<F>(&mut self, _: &str, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_struct_field<F>(&mut self, _: &str, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_tuple<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_tuple_arg<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_tuple_struct<F>(&mut self, _: &str, len: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
self.emit_tuple(len, f)
}
fn emit_tuple_struct_arg<F>(&mut self, f_idx: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
self.emit_tuple_arg(f_idx, f)
}
fn emit_option<F>(&mut self, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_option_none(&mut self) -> EncodingResult<()> {
self.writer.write_u8(0).map_err(wrap_io)
}
fn emit_option_some<F>(&mut self, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
try!(self.writer.write_u8(1).map_err(wrap_io));
f(self)
}
fn emit_seq<F>(&mut self, len: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
try!(self.emit_usize(len));
f(self)
}
fn emit_seq_elt<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_map<F>(&mut self, len: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
try!(self.emit_usize(len));
f(self)
}
fn emit_map_elt_key<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
fn emit_map_elt_val<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut EncoderWriter<'a, W>) -> EncodingResult<()>
{
f(self)
}
}
impl Encoder for SizeChecker {
type Error = EncodingError;
fn emit_nil(&mut self) -> EncodingResult<()> {
Ok(())
}
fn emit_usize(&mut self, v: usize) -> EncodingResult<()> {
self.add_value_unsigned(v as u64)
}
fn emit_u64(&mut self, v: u64) -> EncodingResult<()> {
self.add_value_unsigned(v)
}
fn emit_u32(&mut self, v: u32) -> EncodingResult<()> {
self.add_value_unsigned(v)
}
fn emit_u16(&mut self, v: u16) -> EncodingResult<()> {
self.add_value_unsigned(v)
}
fn emit_u8(&mut self, _: u8) -> EncodingResult<()> {
self.add_value_unsigned(0 as u8)
}
fn emit_isize(&mut self, v: isize) -> EncodingResult<()> {
self.add_value_signed(v as i64)
}
fn emit_i64(&mut self, v: i64) -> EncodingResult<()> {
self.add_value_signed(v)
}
fn emit_i32(&mut self, v: i32) -> EncodingResult<()> {
self.add_value_signed(v)
}
fn emit_i16(&mut self, v: i16) -> EncodingResult<()> {
self.add_value_signed(v)
}
fn emit_i8(&mut self, v: i8) -> EncodingResult<()> {
self.add_value_signed(v)
}
fn emit_bool(&mut self, _: bool) -> EncodingResult<()> {
self.add_value_unsigned(0 as u8)
}
fn emit_f64(&mut self, _: f64) -> EncodingResult<()> {
let bytes = self.float_size_f64;
self.add_raw(bytes)
}
fn emit_f32(&mut self, _: f32) -> EncodingResult<()> {
let bytes = self.float_size_f32;
self.add_raw(bytes)
}
fn emit_char(&mut self, v: char) -> EncodingResult<()> {
self.add_raw(v.len_utf8())
}
fn emit_str(&mut self, v: &str) -> EncodingResult<()> {
self.add_value_unsigned(v.len() as u64)?;
self.add_raw(v.len())
}
fn emit_enum<F>(&mut self, __: &str, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_enum_variant<F>(&mut self, _: &str, v_id: usize, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
self.add_value_unsigned(v_id as u32)?;
f(self)
}
fn emit_enum_variant_arg<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_enum_struct_variant<F>(&mut self,
_: &str,
_: usize,
_: usize,
f: F)
-> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_enum_struct_variant_field<F>(&mut self, _: &str, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_struct<F>(&mut self, _: &str, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_struct_field<F>(&mut self, _: &str, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_tuple<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_tuple_arg<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_tuple_struct<F>(&mut self, _: &str, len: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
self.emit_tuple(len, f)
}
fn emit_tuple_struct_arg<F>(&mut self, f_idx: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
self.emit_tuple_arg(f_idx, f)
}
fn emit_option<F>(&mut self, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_option_none(&mut self) -> EncodingResult<()> {
self.add_value_unsigned(0 as u8)
}
fn emit_option_some<F>(&mut self, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
try!(self.add_value_unsigned(1 as u8));
f(self)
}
fn emit_seq<F>(&mut self, len: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
try!(self.emit_usize(len));
f(self)
}
fn emit_seq_elt<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_map<F>(&mut self, len: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
try!(self.emit_usize(len));
f(self)
}
fn emit_map_elt_key<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
fn emit_map_elt_val<F>(&mut self, _: usize, f: F) -> EncodingResult<()>
where F: FnOnce(&mut SizeChecker) -> EncodingResult<()>
{
f(self)
}
}
| true |
802004b957670545bae39c5e9b8de0c65a649764
|
Rust
|
Michael-F-Bryan/midl-rs
|
/midl/src/syntax/parser.rs
|
UTF-8
| 3,481 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
use codespan::{CodeMap, FileMap, FileName};
use failure_derive::Fail;
use slog::{Discard, Logger};
use std::env;
use std::fmt::{self, Display, Formatter};
use std::io;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
#[derive(Clone)]
pub struct Parser {
imports: Arc<ImportResolver>,
logger: Logger,
}
impl Parser {
pub fn new(logger: Logger) -> Parser {
Parser::new_with_resolver(FileSystemImports::default(), logger)
}
pub fn new_with_resolver<I: ImportResolver>(resolver: I, logger: Logger) -> Parser {
Parser {
imports: Arc::new(resolver),
logger,
}
}
pub fn imports(&self) -> &dyn ImportResolver {
&*self.imports
}
}
impl Default for Parser {
fn default() -> Parser {
Parser::new(Logger::root(Discard, slog::o!()))
}
}
/// A mechanism for resolving file imports.
pub trait ImportResolver: Send + Sync + 'static {
fn add_filemap(&self, name: FileName, src: String) -> Arc<FileMap>;
fn load(&self, path: &Path, source: Option<&FileName>) -> Result<Arc<FileMap>, ImportError>;
}
/// An importer which will search for imports based on a list of directories
/// it's been given.
#[derive(Debug)]
pub struct FileSystemImports {
logger: Logger,
map: Mutex<CodeMap>,
paths: Vec<PathBuf>,
}
impl FileSystemImports {
fn normalize(&self, path: &Path, source: Option<&FileName>) -> PathBuf {
if path.is_absolute() {
path.to_path_buf()
} else if path.components().count() > 2 {
match source {
Some(parent) => unimplemented!(),
None => unimplemented!(),
}
} else {
unimplemented!()
}
}
}
impl Default for FileSystemImports {
fn default() -> FileSystemImports {
let mut paths = Vec::new();
if let Ok(current_dir) = env::current_dir() {
paths.push(current_dir);
}
FileSystemImports {
logger: Logger::root(Discard, slog::o!()),
map: Default::default(),
paths,
}
}
}
impl ImportResolver for FileSystemImports {
fn load(&self, path: &Path, source: Option<&FileName>) -> Result<Arc<FileMap>, ImportError> {
let path = self.normalize(path, source);
let filename = FileName::from(path.clone());
let mut map = self.map.lock().expect("The lock was poisoned");
if let Some(cached) = map.iter().find(|fm| fm.name() == &filename) {
return Ok(Arc::clone(cached));
}
slog::debug!(self.logger, "Trying to load an import from disk";
"filename" => path.display(),
"source" => source.map(ToString::to_string));
if !path.exists() {
Err(ImportError::NotFound)
} else {
map.add_filemap_from_disk(path).map_err(ImportError::Io)
}
}
fn add_filemap(&self, name: FileName, src: String) -> Arc<FileMap> {
self.map
.lock()
.expect("The lock was poisoned")
.add_filemap(name, src)
}
}
/// An error that may occur while importing a file.
#[derive(Debug, Fail)]
pub enum ImportError {
NotFound,
Io(io::Error),
}
impl Display for ImportError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
ImportError::NotFound => "Not Found".fmt(f),
ImportError::Io(ref inner) => inner.fmt(f),
}
}
}
| true |
be4356e5c28146d06c01f05e7b4a4c9199d75fa6
|
Rust
|
nisarhassan12/materialize
|
/src/transform/src/fusion/filter.rs
|
UTF-8
| 2,958 | 2.5625 | 3 |
[
"Apache-2.0",
"BSD-2-Clause",
"CC0-1.0",
"BSD-3-Clause",
"MPL-2.0",
"0BSD",
"PostgreSQL",
"GPL-1.0-or-later",
"GPL-2.0-only",
"MIT",
"BUSL-1.1"
] |
permissive
|
// Copyright Materialize, Inc. and contributors. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//! Fuses multiple `Filter` operators into one and canonicalizes predicates.
//!
//! If the `Filter` operator is empty, removes it.
//!
//! ```rust
//! use mz_expr::{MirRelationExpr, MirScalarExpr};
//! use mz_repr::{ColumnType, Datum, RelationType, ScalarType};
//!
//! use mz_transform::fusion::filter::Filter;
//!
//! let input = MirRelationExpr::constant(vec![], RelationType::new(vec![
//! ScalarType::Bool.nullable(false),
//! ]));
//!
//! let predicate0 = MirScalarExpr::Column(0);
//! let predicate1 = MirScalarExpr::Column(0);
//! let predicate2 = MirScalarExpr::Column(0);
//!
//! let mut expr =
//! input
//! .clone()
//! .filter(vec![predicate0.clone()])
//! .filter(vec![predicate1.clone()])
//! .filter(vec![predicate2.clone()]);
//!
//! // .transform() will deduplicate any predicates
//! use mz_transform::{Transform, TransformArgs};
//! Filter.transform(&mut expr, TransformArgs {
//! indexes: &mz_transform::EmptyIndexOracle,
//! stats: &mz_transform::EmptyStatisticsOracle,
//! global_id: None,
//! });
//!
//! let correct = input.filter(vec![predicate0]);
//!
//! assert_eq!(expr, correct);
//! ```
use mz_expr::visit::Visit;
use mz_expr::MirRelationExpr;
use crate::TransformArgs;
/// Fuses multiple `Filter` operators into one and deduplicates predicates.
#[derive(Debug)]
pub struct Filter;
impl crate::Transform for Filter {
#[tracing::instrument(
target = "optimizer"
level = "trace",
skip_all,
fields(path.segment = "filter_fusion")
)]
fn transform(
&self,
relation: &mut MirRelationExpr,
_: TransformArgs,
) -> Result<(), crate::TransformError> {
relation.visit_mut_pre(&mut Self::action)?;
mz_repr::explain::trace_plan(&*relation);
Ok(())
}
}
impl Filter {
/// Fuses multiple `Filter` operators into one and canonicalizes predicates.
pub fn action(relation: &mut MirRelationExpr) {
if let MirRelationExpr::Filter { input, predicates } = relation {
// consolidate nested filters.
while let MirRelationExpr::Filter {
input: inner,
predicates: p2,
} = &mut **input
{
predicates.append(p2);
*input = Box::new(inner.take_dangerous());
}
mz_expr::canonicalize::canonicalize_predicates(predicates, &input.typ().column_types);
// remove the Filter stage if empty.
if predicates.is_empty() {
*relation = input.take_dangerous();
}
}
}
}
| true |
51a042f4230f33de2290bceb5a4f8bf88f8674e6
|
Rust
|
hdtx/raytracer-comparison
|
/rust/src/vec3.rs
|
UTF-8
| 3,957 | 3.6875 | 4 |
[
"MIT"
] |
permissive
|
use std::cmp::PartialEq;
use std::ops;
#[derive(Debug, Clone, Copy)]
pub struct Vec3 {
x: f32,
y: f32,
z: f32,
}
impl Vec3 {
pub fn new(x: f32, y: f32, z: f32) -> Self {
Vec3 { x, y, z }
}
pub fn x(self) -> f32 {
self.x
}
pub fn y(self) -> f32 {
self.y
}
pub fn z(self) -> f32 {
self.z
}
pub fn dot(self, other: Vec3) -> f32 {
self.x * other.x + self.y * other.y + self.z * other.z
}
pub fn cross(self, other: Vec3) -> Vec3 {
let a = self;
let b = other;
Vec3 {
x: a.y * b.z - a.z * b.y,
y: a.z * b.x - a.x * b.z,
z: a.x * b.y - a.y * b.x,
}
}
pub fn norm(&self) -> f32 {
(self.x * self.x + self.y * self.y + self.z * self.z).sqrt()
}
pub fn compare(self, other: Vec3, tol: f32) -> bool {
(self - other).norm() < tol.abs()
}
pub fn unity(&self) -> Vec3 {
*self / self.norm()
}
pub fn at_len(self, t: f32) -> Vec3 {
self.unity() * t
}
}
impl PartialEq for Vec3 {
fn eq(&self, other: &Self) -> bool {
self.x == other.x && self.y == other.y && self.z == other.z
}
}
impl ops::Add for Vec3 {
type Output = Self;
fn add(self, other: Self) -> Self {
Self {
x: self.x + other.x,
y: self.y + other.y,
z: self.z + other.z,
}
}
}
impl ops::Sub for Vec3 {
type Output = Self;
fn sub(self, other: Self) -> Self {
Self {
x: self.x - other.x,
y: self.y - other.y,
z: self.z - other.z,
}
}
}
impl ops::Neg for Vec3 {
type Output = Self;
fn neg(self) -> Self {
Self {
x: -self.x,
y: -self.y,
z: -self.z,
}
}
}
impl ops::Mul<f32> for Vec3 {
type Output = Self;
fn mul(self, other: f32) -> Self {
Self {
x: other * self.x,
y: other * self.y,
z: other * self.z,
}
}
}
impl ops::Mul<Vec3> for f32 {
type Output = Vec3;
fn mul(self, other: Vec3) -> Vec3 {
Vec3 {
x: self * other.x,
y: self * other.y,
z: self * other.z,
}
}
}
impl ops::Div<f32> for Vec3 {
type Output = Self;
fn div(self, other: f32) -> Self {
Self {
x: self.x / other,
y: self.y / other,
z: self.z / other,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_eq() {
assert_eq!(Vec3::new(1., 2., 3.), Vec3::new(1., 2., 3.));
}
#[test]
fn test_add() {
assert_eq!(
Vec3::new(1., 2., 3.) + Vec3::new(4., 5., 6.),
Vec3::new(5., 7., 9.)
);
}
#[test]
fn test_sub() {
assert_eq!(
Vec3::new(5., 7., 9.) - Vec3::new(4., 5., 6.),
Vec3::new(1., 2., 3.)
);
}
#[test]
fn test_neg() {
assert_eq!(-Vec3::new(1., 2., 3.), Vec3::new(-1., -2., -3.));
}
#[test]
fn test_mul() {
assert_eq!(Vec3::new(1., 2., 3.) * 2., Vec3::new(2., 4., 6.));
assert_eq!(2. * Vec3::new(1., 2., 3.), Vec3::new(2., 4., 6.));
}
#[test]
fn test_div() {
assert_eq!(Vec3::new(2., 4., 6.) / 2., Vec3::new(1., 2., 3.));
}
#[test]
fn test_dot() {
assert_eq!(Vec3::new(1., 2., 3.).dot(Vec3::new(7., 6., 5.)), 34.);
}
#[test]
fn test_cross() {
assert_eq!(
Vec3::new(1., 0., 0.).cross(Vec3::new(0., 1., 0.)),
Vec3::new(0., 0., 1.)
);
}
#[test]
fn test_norm() {
assert_eq!(Vec3::new(9., 12., 112.).norm(), 113.);
}
#[test]
fn test_unity() {
assert!(
Vec3::new(3., 4., 5.).unity().compare(
Vec3::new(0.424, 0.566, 0.707),
0.001));
}
}
| true |
135f50ac761d7758d071cb52a8e6f3873cfd7bd5
|
Rust
|
peter-signal/dtls
|
/src/extension/extension_supported_signature_algorithms.rs
|
UTF-8
| 1,711 | 2.53125 | 3 |
[
"MIT"
] |
permissive
|
#[cfg(test)]
mod extension_supported_signature_algorithms_test;
use super::*;
use crate::signature_hash_algorithm::*;
const EXTENSION_SUPPORTED_SIGNATURE_ALGORITHMS_HEADER_SIZE: usize = 6;
// https://tools.ietf.org/html/rfc5246#section-7.4.1.4.1
#[derive(Clone, Debug, PartialEq)]
pub struct ExtensionSupportedSignatureAlgorithms {
pub(crate) signature_hash_algorithms: Vec<SignatureHashAlgorithm>,
}
impl ExtensionSupportedSignatureAlgorithms {
pub fn extension_value(&self) -> ExtensionValue {
ExtensionValue::SupportedSignatureAlgorithms
}
pub fn size(&self) -> usize {
2 + 2 + self.signature_hash_algorithms.len() * 2
}
pub fn marshal<W: Write>(&self, writer: &mut W) -> Result<(), Error> {
writer.write_u16::<BigEndian>(2 + 2 * self.signature_hash_algorithms.len() as u16)?;
writer.write_u16::<BigEndian>(2 * self.signature_hash_algorithms.len() as u16)?;
for v in &self.signature_hash_algorithms {
writer.write_u8(v.hash as u8)?;
writer.write_u8(v.signature as u8)?;
}
Ok(writer.flush()?)
}
pub fn unmarshal<R: Read>(reader: &mut R) -> Result<Self, Error> {
let _ = reader.read_u16::<BigEndian>()?;
let algorithm_count = reader.read_u16::<BigEndian>()? as usize / 2;
let mut signature_hash_algorithms = vec![];
for _ in 0..algorithm_count {
let hash = reader.read_u8()?.into();
let signature = reader.read_u8()?.into();
signature_hash_algorithms.push(SignatureHashAlgorithm { hash, signature });
}
Ok(ExtensionSupportedSignatureAlgorithms {
signature_hash_algorithms,
})
}
}
| true |
395469488158d80dc1ca0cf20643df94aa4b0cba
|
Rust
|
gitter-badger/dustbox-rs
|
/src/register.rs
|
UTF-8
| 1,045 | 3 | 3 |
[
"MIT"
] |
permissive
|
#[derive(Copy, Clone, Default)]
pub struct Register16 {
pub val: u16,
}
impl Register16 {
pub fn set_hi(&mut self, val: u8) {
self.val = (self.val & 0xFF) + (u16::from(val) << 8);
}
pub fn set_lo(&mut self, val: u8) {
self.val = (self.val & 0xFF00) + u16::from(val);
}
pub fn lo_u8(&mut self) -> u8 {
(self.val & 0xFF) as u8
}
pub fn hi_u8(&mut self) -> u8 {
(self.val >> 8) as u8
}
}
// r8 (4 low of r16)
pub const AL: usize = 0;
pub const CL: usize = 1;
pub const DL: usize = 2;
pub const BL: usize = 3;
pub const AH: usize = 4;
pub const CH: usize = 5;
pub const DH: usize = 6;
pub const BH: usize = 7;
// r16
pub const AX: usize = 0;
pub const CX: usize = 1;
pub const DX: usize = 2;
pub const BX: usize = 3;
pub const SP: usize = 4;
pub const BP: usize = 5;
pub const SI: usize = 6;
pub const DI: usize = 7;
// sreg16
pub const ES: usize = 0;
pub const CS: usize = 1;
pub const SS: usize = 2;
pub const DS: usize = 3;
pub const FS: usize = 4;
pub const GS: usize = 5;
| true |
3ef5f8aa7277ab4eb101ceeac1f2415c6ac34ac7
|
Rust
|
ruiramos/aoc2019
|
/day_12/day_12.rs
|
UTF-8
| 7,238 | 3 | 3 |
[] |
no_license
|
// cargo-deps: num = "0.2"
use std::collections::HashSet;
use std::env;
use std::fs::File;
use std::io::Read;
fn main() {
let mut moons = create_moons(read_data());
let mut xs: Vec<Vec<isize>> = vec![vec![], vec![], vec![], vec![]];
let mut ys: Vec<Vec<isize>> = vec![vec![], vec![], vec![], vec![]];
let mut zs: Vec<Vec<isize>> = vec![vec![], vec![], vec![], vec![]];
for i in 0..1_000_000 {
for (i, moon) in moons.iter().enumerate() {
let (x, y, z) = (moon.position[0], moon.position[1], moon.position[2]);
xs[i].push(x);
ys[i].push(y);
zs[i].push(z);
}
update_moons(&mut moons);
if i == 999 {
println!("01. {}", moons.iter().fold(0, |acc, m| acc + m.get_enery()));
}
}
let x_period = xs.iter().map(|e| calc_period(e.to_vec())).max().unwrap();
let y_period = ys.iter().map(|e| calc_period(e.to_vec())).max().unwrap();
let z_period = zs.iter().map(|e| calc_period(e.to_vec())).max().unwrap();
println!(
"02. {:?} {:?} {:?} {}",
x_period,
y_period,
z_period,
num::integer::lcm(x_period, num::integer::lcm(y_period, z_period))
);
}
fn calc_period(c: Vec<isize>) -> usize {
let first = c[0];
let mut min_index = 1;
loop {
let repeat_idx = c
.iter()
.skip(min_index)
.position(|e| *e == first)
.expect("cant find element again")
+ min_index;
for i in 0..repeat_idx {
if c[i] != c[repeat_idx + i] {
min_index = repeat_idx + 1;
break;
}
if i == repeat_idx - 1 {
return repeat_idx;
}
}
}
}
fn read_data() -> String {
let mut f = File::open("data.txt").unwrap();
let mut buffer = String::new();
f.read_to_string(&mut buffer).unwrap();
String::from(buffer.trim())
}
fn create_moons(s: String) -> Vec<Moon> {
let split = s.split('\n');
let mut moons: Vec<Moon> = vec![];
for m in split {
moons.push(Moon::parse(m.trim()));
}
moons
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct Moon {
position: [isize; 3],
velocity: [isize; 3],
}
impl Moon {
pub fn new() -> Moon {
Moon {
position: [0; 3],
velocity: [0; 3],
}
}
pub fn parse(s: &str) -> Moon {
let mut position = [0, 0, 0];
for (i, coord) in s[1..s.len() - 1].split(',').enumerate() {
let parts: Vec<&str> = coord.split('=').collect();
position[i] = parts[1]
.parse::<isize>()
.expect("Error parsing moon coords");
}
Moon {
position,
velocity: [0, 0, 0],
}
}
pub fn apply_velocity(&mut self) {
for (i, v) in self.velocity.iter().enumerate() {
self.position[i] += v;
}
}
pub fn get_enery(&self) -> usize {
self.position
.iter()
.map(|p| p.abs() as usize)
.sum::<usize>()
* self
.velocity
.iter()
.map(|p| p.abs() as usize)
.sum::<usize>()
}
}
pub fn apply_gravity(m1: Moon, m2: Moon) -> (Moon, Moon) {
let mut m1c = m1.clone();
let mut m2c = m2.clone();
for (i, p) in m1c.position.iter_mut().enumerate() {
if *p > m2c.position[i] {
m1c.velocity[i] -= 1;
m2c.velocity[i] += 1;
} else if *p < m2c.position[i] {
m1c.velocity[i] += 1;
m2c.velocity[i] -= 1;
}
}
(m1c, m2c)
}
fn update_moons(moons: &mut Vec<Moon>) {
for i in 0..moons.len() {
for j in i + 1..moons.len() {
let (m1, m2) = apply_gravity(moons[i], moons[j]);
moons[i] = m1;
moons[j] = m2;
}
}
for m in moons.iter_mut() {
m.apply_velocity();
}
}
#[cfg(test)]
mod test {
use super::*;
fn read_test_data() -> String {
String::from(
"<x=-1, y=0, z=2>
<x=2, y=-10, z=-7>
<x=4, y=-8, z=8>
<x=3, y=5, z=-1>",
)
}
fn read_test_data2() -> String {
String::from(
"<x=-8, y=-10, z=0>
<x=5, y=5, z=10>
<x=2, y=-7, z=3>
<x=9, y=-8, z=-3>",
)
}
#[test]
fn creates_a_moon() {
let m = Moon::parse("<x=-1, y=0, z=2>");
assert_eq!(m.position, [-1, 0, 2]);
assert_eq!(m.velocity, [0, 0, 0]);
}
#[test]
fn creates_moons_vec() {
let moons = create_moons(read_test_data());
assert_eq!(moons.len(), 4);
assert_eq!(moons[1].position, [2, -10, -7]);
}
#[test]
fn iterates() {
let mut moons = create_moons(read_test_data());
update_moons(&mut moons);
assert_eq!(moons[1].position, [3, -7, -4]);
assert_eq!(moons[1].velocity, [1, 3, 3]);
}
#[test]
fn calc_energy() {
let mut moons = create_moons(read_test_data());
for i in 0..10 {
update_moons(&mut moons);
}
assert_eq!(moons.iter().fold(0, |acc, m| acc + m.get_enery()), 179);
}
#[test]
fn calc_energy_2() {
let mut moons = create_moons(read_test_data2());
for i in 0..100 {
update_moons(&mut moons);
}
assert_eq!(moons.iter().fold(0, |acc, m| acc + m.get_enery()), 1940);
}
#[test]
fn simulate() {
let mut universes = HashSet::new();
let mut moons = create_moons(read_test_data());
let mut i = 0;
loop {
update_moons(&mut moons);
let new_moons = moons.clone();
if universes.get(&new_moons).is_some() {
break;
}
universes.insert(new_moons);
i += 1;
}
assert_eq!(i, 2772);
}
#[test]
fn test_calc_period() {
let mut xs: Vec<Vec<isize>> = vec![vec![], vec![], vec![], vec![]];
let mut ys: Vec<Vec<isize>> = vec![vec![], vec![], vec![], vec![]];
let mut zs: Vec<Vec<isize>> = vec![vec![], vec![], vec![], vec![]];
let mut moons = create_moons(read_test_data());
for i in 0..100 {
for (i, moon) in moons.iter().enumerate() {
let (x, y, z) = (moon.position[0], moon.position[1], moon.position[2]);
xs[i].push(x);
ys[i].push(y);
zs[i].push(z);
}
update_moons(&mut moons);
}
let x_period = xs
.iter()
.map(|e| calc_period(e.to_vec()))
.collect::<Vec<usize>>();
let y_period = ys
.iter()
.map(|e| calc_period(e.to_vec()))
.collect::<Vec<usize>>();
let z_period = zs
.iter()
.map(|e| calc_period(e.to_vec()))
.collect::<Vec<usize>>();
println!("periods {:?} {:?} {:?}", x_period, y_period, z_period);
assert_eq!(*x_period.iter().max().unwrap(), 18);
assert_eq!(*y_period.iter().max().unwrap(), 28);
assert_eq!(*z_period.iter().max().unwrap(), 44);
}
}
| true |
00872306e7f35f894ece7315cefb21bd1a3d1392
|
Rust
|
developer0116/slackrypt
|
/client/src/main.rs
|
UTF-8
| 2,304 | 2.578125 | 3 |
[] |
no_license
|
use std::convert::From;
use std::convert::Into;
use std::vec::Vec;
use rsa::RSAPublicKey;
use simple_logger::SimpleLogger;
mod crypto;
mod gui;
mod io;
mod prop;
mod util;
fn main() {
let dir: String = util::default_dir();
let version_header: String = String::from("Version: Slackrypt 0.3");
init(&dir);
let private_key = io::get_private_key(&dir).unwrap();
let public_key: RSAPublicKey = private_key.into();
let public_key_openssl: RSAPublicKey = io::get_public_key(&dir).unwrap();
assert_eq!(&public_key, &public_key_openssl);
let plaintext: Vec<u8> = util::get_user_input_message();
//plaintext encryption
//Notes on IV: https://security.stackexchange.com/questions/17044/when-using-aes-and-cbc-is-it-necessary-to-keep-the-iv-secret
let key: [u8; 16] = crypto::generate_random_hex_16();
let iv: [u8; 16] = crypto::generate_random_hex_16();
let ciphertext: Vec<u8> = crypto::encrypt_data_sym(&key, &iv, &plaintext);
//key encryption
let cipher_vec_key: Vec<u8> = crypto::encrypt_data_asym(&key, &public_key);
//sanity check
let cipher_vec_key_openssl: Vec<u8> = crypto::encrypt_data_asym(&key, &public_key_openssl);
//key decryption
let private_key = io::get_private_key(&dir).unwrap();
let de_key_vec: Vec<u8> = crypto::decrypt_data_asym(&cipher_vec_key, &private_key);
let de_key_vec_openssl: Vec<u8> =
crypto::decrypt_data_asym(&cipher_vec_key_openssl, &private_key);
assert_eq!(&de_key_vec, &de_key_vec_openssl);
//ciphertext decryption
let decrypted_ciphertext: Vec<u8> = crypto::decrypt_sym(&de_key_vec, &iv.to_vec(), &ciphertext);
assert_eq!(decrypted_ciphertext.as_slice(), plaintext.as_slice());
log::info!("Starting client...");
gui::init(&version_header); //this must be called last
}
fn init(dir: &str) {
SimpleLogger::from_env().init().unwrap();
util::create_dir(&dir);
let props = prop::get_properties();
log::info!("Loaded properties: {:?}", &props.unwrap());
let key_file = String::from(dir) + "/key.pem";
if !util::keys_exist(&key_file) {
let bits_str: String = util::get_env_var("SCRYPT_KEY_SIZE", "2048");
let bits: i32 = bits_str.parse::<i32>().unwrap();
crypto::create_keys_asym(bits, &key_file);
}
}
| true |
a3c7edb70cf5dfedfac5ec5d97b7315a5e8eee8f
|
Rust
|
MitchellTesla/vit-servicing-station
|
/vit-servicing-station-lib/src/v0/errors.rs
|
UTF-8
| 1,845 | 2.65625 | 3 |
[] |
no_license
|
use thiserror::Error;
use warp::{reply::Response, Rejection, Reply};
#[derive(Error, Debug)]
pub enum HandleError {
#[error("The data requested data for `{0}` is not available")]
NotFound(String),
#[error("Internal error")]
DatabaseError(#[from] diesel::r2d2::PoolError),
#[error("Unauthorized token")]
UnauthorizedToken,
#[error("Internal error, cause: {0}")]
InternalError(String),
#[error("Invalid header {0}, cause: {1}")]
InvalidHeader(&'static str, &'static str),
}
impl HandleError {
fn to_status_code(&self) -> warp::http::StatusCode {
match self {
HandleError::NotFound(_) => warp::http::StatusCode::NOT_FOUND,
HandleError::DatabaseError(_) => warp::http::StatusCode::SERVICE_UNAVAILABLE,
HandleError::InternalError(_) => warp::http::StatusCode::INTERNAL_SERVER_ERROR,
HandleError::UnauthorizedToken => warp::http::StatusCode::UNAUTHORIZED,
HandleError::InvalidHeader(_, _) => warp::http::StatusCode::BAD_REQUEST,
}
}
fn to_message(&self) -> String {
format!("{}", self)
}
fn to_response(&self) -> Response {
let status_code = self.to_status_code();
warp::reply::with_status(warp::reply::json(&self.to_json()), status_code).into_response()
}
fn to_json(&self) -> serde_json::Value {
serde_json::json!({"code": self.to_status_code().as_u16(), "message" : self.to_message()})
}
}
impl warp::Reply for HandleError {
fn into_response(self) -> Response {
self.to_response()
}
}
impl warp::reject::Reject for HandleError {}
pub async fn handle_rejection(err: Rejection) -> Result<impl Reply, Rejection> {
if let Some(handle_error) = err.find::<HandleError>() {
return Ok(handle_error.to_response());
}
Err(err)
}
| true |
e8d8992edf5d2f8eb7e57e3161e1be98a7525ddf
|
Rust
|
MidasLamb/async-graphql
|
/async-graphql-parser/src/pos.rs
|
UTF-8
| 2,732 | 3.4375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use serde::Serialize;
use std::borrow::{Borrow, BorrowMut};
use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::ops::{Deref, DerefMut};
/// Original position of element in source code
#[derive(PartialOrd, Ord, PartialEq, Eq, Clone, Copy, Default, Hash, Serialize)]
pub struct Pos {
/// One-based line number
pub line: usize,
/// One-based column number
pub column: usize,
}
impl fmt::Debug for Pos {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Pos({}:{})", self.line, self.column)
}
}
impl fmt::Display for Pos {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.line, self.column)
}
}
/// Represents the position of a AST node
#[derive(Clone, Debug, Copy, Default)]
#[allow(missing_docs)]
pub struct Positioned<T: ?Sized> {
pub pos: Pos,
pub node: T,
}
impl<T: fmt::Display> fmt::Display for Positioned<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.node.fmt(f)
}
}
impl<T: Clone> Positioned<T> {
#[inline]
#[allow(missing_docs)]
pub fn clone_inner(&self) -> T {
self.node.clone()
}
}
impl<T: PartialEq> PartialEq for Positioned<T> {
fn eq(&self, other: &Self) -> bool {
self.node.eq(&other.node)
}
}
impl<T: PartialOrd> PartialOrd for Positioned<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.node.partial_cmp(&other.node)
}
}
impl<T: Ord> Ord for Positioned<T> {
fn cmp(&self, other: &Self) -> Ordering {
self.node.cmp(&other.node)
}
}
impl<T: Ord> Eq for Positioned<T> {}
impl<T: ?Sized> Deref for Positioned<T> {
type Target = T;
fn deref(&self) -> &T {
&self.node
}
}
impl<T: ?Sized> DerefMut for Positioned<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.node
}
}
impl<T: Hash> Hash for Positioned<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.node.hash(state)
}
}
impl Borrow<str> for Positioned<String> {
fn borrow(&self) -> &str {
self.node.as_str()
}
}
impl BorrowMut<str> for Positioned<String> {
fn borrow_mut(&mut self) -> &mut str {
self.node.as_mut_str()
}
}
impl<T> Positioned<T> {
pub(crate) fn new(node: T, pos: Pos) -> Positioned<T> {
Positioned { node, pos }
}
#[inline]
pub fn into_inner(self) -> T {
self.node
}
/// Get start position
#[inline]
pub fn position(&self) -> Pos {
self.pos
}
#[inline]
pub(crate) fn pack<F: FnOnce(Self) -> R, R>(self, f: F) -> Positioned<R> {
Positioned {
pos: self.pos,
node: f(self),
}
}
}
| true |
c3040ffd1cd166aa797236bdf0b5afb5dac18078
|
Rust
|
occlum/occlum
|
/src/libos/src/fs/procfs/pid/mod.rs
|
UTF-8
| 4,509 | 2.6875 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
use super::*;
use crate::process::table::get_process;
use crate::process::{ProcessRef, ProcessStatus};
use self::cmdline::ProcCmdlineINode;
use self::comm::ProcCommINode;
use self::cwd::ProcCwdSymINode;
use self::exe::ProcExeSymINode;
use self::fd::LockedProcFdDirINode;
use self::maps::ProcMapsINode;
use self::root::ProcRootSymINode;
use self::stat::ProcStatINode;
mod cmdline;
mod comm;
mod cwd;
mod exe;
mod fd;
mod maps;
mod root;
mod stat;
pub struct LockedPidDirINode(RwLock<PidDirINode>);
struct PidDirINode {
process_ref: ProcessRef,
this: Weak<Dir<LockedPidDirINode>>,
parent: Arc<dyn INode>,
entries: HashMap<String, Arc<dyn INode>>,
}
impl LockedPidDirINode {
pub fn new(pid: pid_t, parent: Arc<dyn INode>) -> vfs::Result<Arc<dyn INode>> {
let inode = Arc::new(Dir::new(Self(RwLock::new(PidDirINode {
process_ref: get_process(pid).map_err(|_| FsError::EntryNotFound)?,
this: Weak::default(),
parent: Arc::clone(&parent),
entries: HashMap::new(),
}))));
inode.inner().0.write().unwrap().this = Arc::downgrade(&inode);
inode.inner().init_entries()?;
Ok(inode)
}
fn init_entries(&self) -> vfs::Result<()> {
let mut file = self.0.write().unwrap();
// cmdline
let cmdline_inode = ProcCmdlineINode::new(&file.process_ref);
file.entries.insert(String::from("cmdline"), cmdline_inode);
// cwd
let cwd_inode = ProcCwdSymINode::new(&file.process_ref);
file.entries.insert(String::from("cwd"), cwd_inode);
// exe
let exe_inode = ProcExeSymINode::new(&file.process_ref);
file.entries.insert(String::from("exe"), exe_inode);
// root
let root_inode = ProcRootSymINode::new(&file.process_ref);
file.entries.insert(String::from("root"), root_inode);
// comm
let comm_inode = ProcCommINode::new(&file.process_ref);
file.entries.insert(String::from("comm"), comm_inode);
// stat
let stat_inode = ProcStatINode::new(&file.process_ref);
file.entries.insert(String::from("stat"), stat_inode);
// maps
let maps_inode = ProcMapsINode::new(&file.process_ref);
file.entries.insert(String::from("maps"), maps_inode);
Ok(())
}
}
impl DirProcINode for LockedPidDirINode {
fn find(&self, name: &str) -> vfs::Result<Arc<dyn INode>> {
let file = self.0.read().unwrap();
if name == "." {
return Ok(file.this.upgrade().unwrap());
}
if name == ".." {
return Ok(Arc::clone(&file.parent));
}
// The 'fd' entry holds 1 Arc of LockedPidDirINode, so the LockedPidDirINode
// ifself will hold 2 Arcs. This makes it cannot be dropped automatically.
// We initialize the 'fd' here to avoid this.
// TODO:: Try to find a better solution.
if name == "fd" {
let fd_inode =
LockedProcFdDirINode::new(&file.process_ref, file.this.upgrade().unwrap());
return Ok(fd_inode);
}
if let Some(inode) = file.entries.get(name) {
Ok(Arc::clone(inode))
} else {
Err(FsError::EntryNotFound)
}
}
fn get_entry(&self, id: usize) -> vfs::Result<String> {
match id {
0 => Ok(String::from(".")),
1 => Ok(String::from("..")),
i => {
let file = self.0.read().unwrap();
if let Some(name) = file.entries.keys().nth(i - 2) {
Ok(name.to_owned())
} else if i == file.entries.len() + 2 {
Ok(String::from("fd"))
} else {
Err(FsError::EntryNotFound)
}
}
}
}
fn iterate_entries(&self, mut ctx: &mut DirentWriterContext) -> vfs::Result<usize> {
let file = self.0.read().unwrap();
let idx = ctx.pos();
// Write first two special entries
write_first_two_entries!(idx, &mut ctx, &file);
// Write the normal entries
let skipped = if idx < 2 { 0 } else { idx - 2 };
for (name, inode) in file.entries.iter().skip(skipped) {
write_inode_entry!(&mut ctx, name, inode);
}
// Write the fd entry
if idx <= 2 + file.entries.len() {
write_entry!(&mut ctx, "fd", PROC_INO, vfs::FileType::Dir);
}
Ok(ctx.written_len())
}
}
| true |
2fa40698df0d4f9ab63acd49cf993628b2f2a54a
|
Rust
|
m-rots/bernard-rs
|
/src/model/drive.rs
|
UTF-8
| 1,044 | 3.078125 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use crate::database::{Connection, Pool};
#[derive(Debug)]
pub struct Drive {
pub id: String,
pub page_token: String,
}
impl Drive {
pub(crate) async fn create(
id: &str,
page_token: &str,
conn: &mut Connection,
) -> sqlx::Result<()> {
sqlx::query!(
"INSERT INTO drives (id, page_token) VALUES ($1, $2)",
id,
page_token
)
.execute(conn)
.await?;
Ok(())
}
pub(crate) async fn get_by_id(id: &str, pool: &Pool) -> sqlx::Result<Option<Self>> {
sqlx::query_as!(Self, "SELECT * FROM drives WHERE id = $1", id)
.fetch_optional(pool)
.await
}
pub(crate) async fn update_page_token(
id: &str,
page_token: &str,
conn: &mut Connection,
) -> sqlx::Result<()> {
sqlx::query!(
"UPDATE drives SET page_token = $2 WHERE id = $1",
id,
page_token,
)
.execute(conn)
.await?;
Ok(())
}
}
| true |
2ea4bbbb70698d0d7223aa1be2f18a37267e9076
|
Rust
|
kent-mcleod/samples
|
/flock/src/main.rs
|
UTF-8
| 910 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
extern crate nix;
use nix::fcntl;
use std::io::prelude::*;
use std::fs::File;
use std::os::unix::prelude::*;
use std::fs::OpenOptions;
fn main() {
let f = (File::create("foo.txt")).unwrap();
let mut file2 = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open("foo2.txt")
.unwrap();
let fd = f.as_raw_fd();
let res = fcntl::flock(fd, fcntl::FlockArg::LockExclusiveNonblock);
match res {
Ok(_) => {
println!("got lock");
file2.write_all(b"Hello, world!").expect("Write file:");
loop {}
}
Err(e) => {
println!("Error: {:?}", e);
let mut s = String::new();
file2.read_to_string(&mut s).expect("Read file:");
println!("File: {}", s);
}
}
}
| true |
df1983e7abf4b7c1bdd5f0517910bceb2dd77906
|
Rust
|
otov4its/ripple-address-codec-rust
|
/tests/api.rs
|
UTF-8
| 5,476 | 2.71875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use ripple_address_codec as api;
use utils::*;
mod utils {
use std::convert::TryInto;
use hex;
use rand::{thread_rng, Rng};
pub fn to_bytes(hex: &str) -> Vec<u8> {
hex::decode(hex).unwrap()
}
pub fn to_20_bytes(hex: &str) -> [u8; 20] {
to_bytes(hex).try_into().unwrap()
}
pub fn to_16_bytes(hex: &str) -> [u8; 16] {
to_bytes(hex).try_into().unwrap()
}
pub fn to_hex(bytes: &[u8]) -> String {
hex::encode_upper(bytes)
}
pub fn get_20_random_bytes() -> [u8; 20] {
let mut bytes = [0; 20];
thread_rng()
.try_fill(&mut bytes[..])
.expect("random generator error");
bytes
}
pub fn get_16_random_bytes() -> [u8; 16] {
let mut bytes = [0; 16];
thread_rng()
.try_fill(&mut bytes[..])
.expect("random generator error");
bytes
}
}
mod account_id {
use super::*;
#[test]
fn decode_bad_alphabet() {
assert_eq!(
api::decode_account_id("r_000").unwrap_err(),
api::DecodeError
);
}
#[test]
fn decode_bad_lenght() {
assert_eq!(
api::decode_account_id("rJrRMgWyPbY35ErN").unwrap_err(),
api::DecodeError
);
}
#[test]
fn decode_bad_prefix() {
assert_eq!(
api::decode_account_id("bJrRMgiRgrU6hDF4pgu5DXQdWyPbY35ErN").unwrap_err(),
api::DecodeError
);
}
#[test]
fn decode_bad_checksum() {
assert_eq!(
api::decode_account_id("rJrRMgiRgrU6hDF4pgu5DXQdWyPbY35ErA").unwrap_err(),
api::DecodeError
);
}
#[test]
fn encode_random() {
let bytes = get_20_random_bytes();
let encoded = api::encode_account_id(&bytes);
let decoded_bytes = api::decode_account_id(&encoded).unwrap();
assert!(encoded.starts_with("r"));
assert_eq!(bytes, decoded_bytes);
}
#[test]
fn encode() {
assert_eq!(
api::encode_account_id(&to_20_bytes("BA8E78626EE42C41B46D46C3048DF3A1C3C87072")),
"rJrRMgiRgrU6hDF4pgu5DXQdWyPbY35ErN"
);
}
#[test]
fn decode() {
assert_eq!(
api::decode_account_id("rJrRMgiRgrU6hDF4pgu5DXQdWyPbY35ErN").unwrap(),
to_20_bytes("BA8E78626EE42C41B46D46C3048DF3A1C3C87072")
);
}
}
mod secp256k1_seed {
use super::*;
#[test]
fn decode_bad_alphabet() {
assert_eq!(api::decode_seed("s_000").unwrap_err(), api::DecodeError);
}
#[test]
fn decode_bad_lenght() {
assert_eq!(
api::decode_seed("sn259rEFXrQrWcwV6dfL").unwrap_err(),
api::DecodeError
);
}
#[test]
fn decode_bad_prefix() {
assert_eq!(
api::decode_seed("Sn259rEFXrQrWyx3Q7XneWcwV6dfL").unwrap_err(),
api::DecodeError
);
}
#[test]
fn decode_bad_checksum() {
assert_eq!(
api::decode_seed("sn259rEFXrQrWyx3Q7XneWcwV6dfA").unwrap_err(),
api::DecodeError
);
}
#[test]
fn encode_random() {
let bytes = get_16_random_bytes();
let encoded = api::encode_seed(&bytes, &api::Secp256k1);
let (decoded_bytes, decoded_kind) = api::decode_seed(&encoded).unwrap();
assert!(encoded.starts_with("s"));
assert_eq!(decoded_bytes, bytes);
assert_eq!(decoded_kind, &api::Secp256k1);
}
#[test]
fn encode() {
assert_eq!(
api::encode_seed(
&to_16_bytes("CF2DE378FBDD7E2EE87D486DFB5A7BFF"),
&api::Secp256k1
),
"sn259rEFXrQrWyx3Q7XneWcwV6dfL"
);
}
#[test]
fn decode() {
let (bytes, kind) = api::decode_seed("sn259rEFXrQrWyx3Q7XneWcwV6dfL").unwrap();
assert_eq!(to_hex(&bytes), "CF2DE378FBDD7E2EE87D486DFB5A7BFF");
assert_eq!(kind, &api::Secp256k1)
}
}
mod ed25519_seed {
use super::*;
#[test]
fn decode_bad_alphabet() {
assert_eq!(api::decode_seed("sEd_000").unwrap_err(), api::DecodeError);
}
#[test]
fn decode_bad_lenght() {
assert_eq!(api::decode_seed("sEdTM1uX8").unwrap_err(), api::DecodeError);
}
#[test]
fn decode_bad_prefix() {
assert_eq!(
api::decode_seed("SEdTM1uX8pu2do5XvTnutH6HsouMaM2").unwrap_err(),
api::DecodeError
);
}
#[test]
fn decode_bad_checksum() {
assert_eq!(
api::decode_seed("sEdTM1uX8pu2do5XvTnutH6HsouMaMA").unwrap_err(),
api::DecodeError
);
}
#[test]
fn encode_random() {
let bytes = get_16_random_bytes();
let encoded = api::encode_seed(&bytes, &api::Ed25519);
let (decoded_bytes, decoded_kind) = api::decode_seed(&encoded).unwrap();
assert!(encoded.starts_with("sEd"));
assert_eq!(decoded_bytes, bytes);
assert_eq!(decoded_kind, &api::Ed25519);
}
#[test]
fn encode() {
assert_eq!(
api::encode_seed(
&to_16_bytes("4C3A1D213FBDFB14C7C28D609469B341"),
&api::Ed25519
),
"sEdTM1uX8pu2do5XvTnutH6HsouMaM2"
);
}
#[test]
fn decode() {
let (bytes, kind) = api::decode_seed("sEdTM1uX8pu2do5XvTnutH6HsouMaM2").unwrap();
assert_eq!(to_hex(&bytes), "4C3A1D213FBDFB14C7C28D609469B341");
assert_eq!(kind, &api::Ed25519)
}
}
| true |
801a7c4fc62b6d89fc202331f51011a11c4aca9b
|
Rust
|
stevepryde/thirtyfour
|
/thirtyfour/src/common/command.rs
|
UTF-8
| 3,387 | 3.59375 | 4 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::Locator;
use std::fmt;
use std::fmt::Debug;
/// The webdriver selector to use when querying elements.
#[derive(Debug, Clone)]
pub enum BySelector {
/// Query by element id.
Id(String),
/// Query by link text.
LinkText(String),
/// Query by CSS.
Css(String),
/// Query by XPath.
XPath(String),
}
// NOTE: This needs to own its data so that we allow the user to specify custom
// CSS selectors such as tag/name/class etc and send fantoccini a reference
// to the formatted Css selector.
/// The webdriver selector to use when querying elements.
#[derive(Debug, Clone)]
pub struct By {
selector: BySelector,
}
impl fmt::Display for By {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.selector {
BySelector::Id(id) => write!(f, "Id({})", id),
BySelector::XPath(xpath) => write!(f, "XPath({})", xpath),
BySelector::LinkText(text) => write!(f, "Link Text({})", text),
BySelector::Css(css) => write!(f, "CSS({})", css),
}
}
}
#[allow(non_snake_case)]
impl By {
/// Select element by id.
pub fn Id(id: &str) -> Self {
Self {
selector: BySelector::Id(id.to_string()),
}
}
/// Select element by link text.
pub fn LinkText(text: &str) -> Self {
Self {
selector: BySelector::LinkText(text.to_string()),
}
}
/// Select element by CSS.
pub fn Css(css: &str) -> Self {
Self {
selector: BySelector::Css(css.to_string()),
}
}
/// Select element by XPath.
pub fn XPath(x: &str) -> Self {
Self {
selector: BySelector::XPath(x.to_string()),
}
}
/// Select element by name.
pub fn Name(name: &str) -> Self {
Self {
selector: BySelector::Css(format!(r#"[name="{}"]"#, name)),
}
}
/// Select element by tag.
pub fn Tag(tag: &str) -> Self {
Self {
selector: BySelector::Css(tag.to_string()),
}
}
/// Select element by class.
pub fn ClassName(name: &str) -> Self {
Self {
selector: BySelector::Css(format!(".{}", name)),
}
}
/// Get the [`Locator`] for this selector.
pub fn locator(&self) -> Locator {
match &self.selector {
BySelector::Id(id) => Locator::Id(id),
BySelector::LinkText(text) => Locator::LinkText(text),
BySelector::Css(css) => Locator::Css(css),
BySelector::XPath(x) => Locator::XPath(x),
}
}
}
impl<'a> From<&'a By> for Locator<'a> {
fn from(by: &'a By) -> Self {
by.locator()
}
}
impl<'a> From<Locator<'a>> for By {
fn from(locator: Locator<'a>) -> Self {
match locator {
Locator::Css(s) => By::Css(s),
Locator::Id(s) => By::Id(s),
Locator::LinkText(s) => By::LinkText(s),
Locator::XPath(s) => By::XPath(s),
}
}
}
/// Convert the specified locator to a string, used for debugging.
pub fn locator_to_string(locator: Locator<'_>) -> String {
match locator {
Locator::Css(s) => format!("Css({})", s),
Locator::Id(s) => format!("Id({}", s),
Locator::LinkText(s) => format!("LinkText({})", s),
Locator::XPath(s) => format!("XPath({})", s),
}
}
| true |
66f04c1eaa943283fb32b2f861e13a821c360c23
|
Rust
|
ryanpbrewster/jqi
|
/src/main.rs
|
UTF-8
| 5,243 | 2.984375 | 3 |
[] |
no_license
|
use serde_json::Value;
use std::io::Stdout;
use std::io::Write;
use std::path::PathBuf;
use std::{fmt::Display, io::BufReader};
use structopt::StructOpt;
use termion::cursor::{Goto, HideCursor};
use termion::event::{Event, Key};
use termion::input::TermRead;
use termion::raw::{IntoRawMode, RawTerminal};
use termion::style;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Args::from_args();
let root: Value = {
let fin = std::fs::File::open(args.input)?;
let reader = BufReader::new(fin);
serde_json::from_reader(reader)?
};
let mut pos = 0;
let mut path = JsonPath::new(&root);
let stdin = std::io::stdin();
let mut stdout = HideCursor::from(std::io::stdout().into_raw_mode()?);
write_data(&mut stdout, &path, pos)?;
for evt in stdin.events() {
let key = match evt? {
Event::Key(key) => key,
_ => continue,
};
match key {
Key::Esc | Key::Char('q') => break,
Key::Char('j') => {
if pos + 1 < path.field_count() {
pos += 1;
}
}
Key::Char('k') => {
if pos > 0 {
pos -= 1;
}
}
Key::Char('h') => {
if let Some(prev) = path.pop() {
pos = prev.index();
}
}
Key::Char('l') => {
if path.push(pos) {
pos = 0;
}
}
_ => continue,
};
write_data(&mut stdout, &path, pos)?;
}
// Restore the cursor and then exit.
write!(stdout, "{}{}", termion::clear::All, Goto(1, 1))?;
Ok(())
}
fn write_data(
stdout: &mut RawTerminal<Stdout>,
path: &JsonPath,
pos: usize,
) -> std::io::Result<()> {
write!(stdout, "{}{}", termion::clear::All, Goto(1, 1))?;
for name in path.names() {
write!(stdout, "{}", name)?;
}
write!(stdout, "{}", Goto(1, 3))?;
match path.cur {
Value::Null => write!(stdout, "null")?,
Value::Bool(b) => write!(stdout, "{}", b)?,
Value::Number(x) => write!(stdout, "{}", x)?,
Value::String(s) => write!(stdout, "{}", s)?,
Value::Array(ref vs) => write_fields(stdout, 0..vs.len(), pos)?,
Value::Object(ref obj) => write_fields(stdout, obj.keys(), pos)?,
}
stdout.flush()?;
Ok(())
}
fn write_fields<T: Display>(
stdout: &mut RawTerminal<Stdout>,
fields: impl Iterator<Item = T>,
highlighted: usize,
) -> std::io::Result<()> {
for (i, name) in fields.enumerate() {
write!(stdout, "{}", Goto(1, 3 + i as u16))?;
if i == highlighted {
write!(
stdout,
"{}{}{}",
style::Bold,
style::Italic,
style::Underline
)?;
}
write!(stdout, "{}{}", name, style::Reset)?;
}
Ok(())
}
#[derive(StructOpt)]
struct Args {
#[structopt(parse(from_os_str))]
input: PathBuf,
}
struct JsonPath<'a> {
path: Vec<(Segment, &'a Value)>,
cur: &'a Value,
}
impl<'a> JsonPath<'a> {
fn new(root: &Value) -> JsonPath {
JsonPath {
path: Vec::new(),
cur: root,
}
}
/// How many children does the current node have?
fn field_count(&self) -> usize {
match self.cur {
Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => 0,
Value::Array(vs) => vs.len(),
Value::Object(ref obj) => obj.len(),
}
}
/// Descend down the `idx`-th child.
fn push(&mut self, idx: usize) -> bool {
let next = match self.cur {
Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => None,
Value::Array(ref vs) => vs.get(idx).map(|v| (Segment::Index(idx), v)),
Value::Object(ref obj) => obj.keys().nth(idx).and_then(|key| {
obj.get(key)
.map(|v| (Segment::Name(idx, key.to_owned()), v))
}),
};
match next {
None => false,
Some((segment, next)) => {
self.path.push((segment, self.cur));
self.cur = next;
true
}
}
}
/// Back up one step, and return the index of the child we popped.
fn pop(&mut self) -> Option<Segment> {
let (segment, prev) = self.path.pop()?;
self.cur = prev;
Some(segment)
}
/// An iterator over the field names on this path.
fn names(&self) -> impl Iterator<Item = &Segment> {
self.path.iter().map(|(segment, _)| segment)
}
}
enum Segment {
Name(usize, String),
Index(usize),
}
impl Segment {
fn index(&self) -> usize {
match *self {
Segment::Name(idx, _) => idx,
Segment::Index(idx) => idx,
}
}
}
impl Display for Segment {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Segment::Name(_, name) => write!(f, ".{}", name),
Segment::Index(idx) => write!(f, "[{}]", idx),
}
}
}
| true |
23425a682c9446f8c0122421c383a0f950ae44c9
|
Rust
|
Tenebryo/coin
|
/bitboard/src/board.rs
|
UTF-8
| 17,786 | 3.375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::fmt;
use bit_ops::*;
use std::ops::Not;
use std::hash::*;
pub const MAX_MOVES : usize = 30;
pub type MoveList = [Move; MAX_MOVES];
pub type MoveOrder = [(i32, usize); MAX_MOVES];
#[derive(Copy, Clone, PartialEq, Hash, Eq, Serialize, Deserialize)]
pub struct Turn(bool);
impl Turn {
pub const BLACK : Turn = Turn(true);
pub const WHITE : Turn = Turn(false);
}
impl Not for Turn{
type Output = Turn;
fn not(self) -> Turn {
Turn(!self.0)
}
}
impl fmt::Display for Turn {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Turn::BLACK => write!(f, "BLACK"),
&Turn::WHITE => write!(f, "WHITE"),
}
}
}
#[derive(Copy, Clone, PartialEq, Serialize, Deserialize)]
pub struct Move {
data : u8,
}
impl Move {
pub fn new(x : u8, y : u8) -> Move {
Move {
data : (x & 0b111) | ((y & 0b111) << 3),
}
}
pub fn pass() -> Move {
Move {
data : 0b1_000_000,
}
}
pub fn null() -> Move {
Move {
data : 0b10_000_000,
}
}
pub fn x(&self) -> u8 {
self.data & 0b111
}
pub fn y(&self) -> u8 {
(self.data >> 3) & 0b111
}
pub fn is_pass(&self) -> bool {
(self.data & 0b1_000_000) != 0
}
pub fn is_null(&self) -> bool {
(self.data & 0b10_000_000) != 0
}
pub fn mask(&self) -> u64 {
match self.data {
0b1_000_000 => 0,
d => 1u64 << (d)
}
}
pub fn offset(&self) -> u8 {
self.data & 0b111_111
}
fn from_off(off : u8) -> Move {
Move {
data : off & 0b111_111,
}
}
}
#[inline]
pub fn empty_movelist() -> MoveList {
use std::mem;
unsafe{mem::uninitialized()}
//[Move::null(); MAX_MOVES]
}
#[inline]
pub fn empty_moveorder() -> MoveOrder {
[(0,0); MAX_MOVES]
}
const NUM_TO_LET : [char; 8] = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'];
impl fmt::Display for Move {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.is_pass() {
write!(f, "(PASS)")
} else if self.is_null() {
write!(f, "(NULL)")
} else {
write!(f, "({}{})", NUM_TO_LET[self.x() as usize], self.y()+1)
}
}
}
impl fmt::Debug for Move {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.is_pass() {
write!(f, "(PASS)")
} else if self.is_null() {
write!(f, "(NULL)")
} else {
write!(f, "({},{})", self.x(), self.y())
}
}
}
#[derive(Eq, Copy, Clone, Serialize, Deserialize)]
pub struct Board {
///Player Stones
ps : u64,
///Opponent Stones
os : u64,
///Player Moves
pm : u64,
///Opponent Moves
om : u64,
///Current Turn
ct : Turn,
}
impl Board {
/// Returns a new othello board initialized to the staring position.
pub fn new() -> Board {
Board {
ps : 0b00000000_00000000_00000000_00001000_00010000_00000000_00000000_00000000u64,
os : 0b00000000_00000000_00000000_00010000_00001000_00000000_00000000_00000000u64,
pm : 0b00000000_00000000_00010000_00100000_00000100_00001000_00000000_00000000u64,
om : 0b00000000_00000000_00001000_00000100_00100000_00010000_00000000_00000000u64,
ct : Turn::BLACK,
}
}
pub fn from_string(data: &[u8]) -> Board {
let mut m = 1;
let mut ps = 0u64;
let mut os = 0u64;
for &c in data {
match c as char {
'B' => { ps |= m;},
'W' => { os |= m;},
'\n' => {continue;},
_ => {},
}
m <<= 1;
}
Board::position(ps, os, Turn::BLACK)
}
/// Returns a new board from a given position and current turn, represented
/// by two 64-bit integers
pub fn position(ps : u64, os : u64, ct : Turn) -> Board {
let pm = 0;
let om = 0;
let mut b = Board {
ps, os,
pm, om,
ct,
};
b.update_moves_fast();
b
}
/// Returns a copy of the board
pub fn copy(&self) -> Board {
Board {
ps : self.ps,
os : self.os,
pm : self.pm,
om : self.om,
ct : self.ct,
}
}
pub fn permute(&mut self, perm : usize) {
let s_syms = all_board_syms(self.ps, self.os);
let m_syms = all_board_syms(self.pm, self.om);
self.ps = s_syms[perm].0;
self.os = s_syms[perm].1;
self.pm = m_syms[perm].0;
self.om = m_syms[perm].1;
}
/// Gets the current turn
pub fn get_turn(&self) -> Turn {
self.ct
}
/// Checks whether or not the game is over
pub fn is_done(&self) -> bool {
self.pm == 0 && self.om == 0
}
/// Checks whether a player has a legal move
pub fn has_move(&self) -> (bool,bool) {
(self.pm != 0, self.om != 0)
}
/// Checks whether a move is legal
pub fn check_move(&self, m : Move) -> (bool,bool) {
((self.pm & m.mask()) != 0, (self.om & m.mask()) != 0)
}
/// Updates a board by playing the given move for the given player
pub fn do_move(&mut self, m : Move) -> u64{
if m.is_pass() || m.is_null() {
self.swap();
return 0;
}
let mut pro = 0u64;
let mut gen = 0u64;
let mut msk = 0u64;
let org = m.mask();
gen = self.ps;
pro = self.os;
self.ps |= org;
msk |= sout_occl(gen, pro) & nort_occl(org, pro);
msk |= nort_occl(gen, pro) & sout_occl(org, pro);
msk |= east_occl(gen, pro) & west_occl(org, pro);
msk |= west_occl(gen, pro) & east_occl(org, pro);
msk |= sowe_occl(gen, pro) & noea_occl(org, pro);
msk |= noea_occl(gen, pro) & sowe_occl(org, pro);
msk |= soea_occl(gen, pro) & nowe_occl(org, pro);
msk |= nowe_occl(gen, pro) & soea_occl(org, pro);
self.ps ^= msk;
self.os ^= msk;
self.update_moves_fast();
self.swap();
msk
}
// #[cfg(target_arch = "x86_64")]
// pub fn f_do_move(&mut self, m : Move) -> u64 {
// use do_moves_fast::fast_do_move;
// // if m.is_pass() || m.is_null() {
// // self.swap();
// // return 0;
// // }
// let flipped = fast_do_move(m.data , m.x(), m.y(), self.ps, self.os);
// self.ps ^= flipped;
// self.os ^= flipped;
// self.os ^= m.mask();
// self.update_moves_fast();
// self.swap();
// flipped
// }
#[cfg(any(target_arch = "x86_64", target_arch = "x86"))]
pub fn f_do_move(&mut self, m : Move) -> u64 {
use do_moves_faster;
// if m.is_pass() || m.is_null() {
// self.swap();
// return 0;
// }
let flipped = do_moves_faster::do_move(m.data, self.ps, self.os);
self.ps ^= flipped;
self.os ^= flipped;
self.ps ^= m.mask();
self.update_moves_fast();
self.swap();
flipped
}
pub fn swap(&mut self) {
let mut tmp = self.ps;
self.ps = self.os;
self.os = tmp;
tmp = self.pm;
self.pm = self.om;
self.om = tmp;
self.ct = !self.ct;
}
/// Returns the bit mask of the given player's pieces
pub fn pieces(&self) -> (u64,u64) {
(self.ps, self.os)
}
/// Returns the mobility bit mask of the given player
pub fn mobility(&self) -> (u64, u64) {
(self.pm,self.om)
}
/// Calculates and returns the exposed pieces
pub fn calculate_exposed(&self) -> (u64, u64) {
let e = !(self.ps | self.os);
let bloom = (
nort_one(e) | sout_one(e) |
east_one(e) | west_one(e) |
noea_one(e) | sowe_one(e) |
nowe_one(e) | soea_one(e)
);
(self.ps & bloom, self.os & bloom)
}
/// Returns the stability bit mask of the given player
#[deprecated]
pub fn stability(&self, t : Turn) -> u64 {
//sides
const top : u64 = 255u64;
const bot : u64 = 18374686479671623680u64;
const lft : u64 = 72340172838076673u64;
const rht : u64 = 9259542123273814144u64;
let gen = match t {
Turn::BLACK => self.ps,
Turn::WHITE => self.os,
};
let pcs = self.ps|self.os;
let vrt = nort_occl(bot & pcs, pcs) & sout_occl(top & pcs, pcs);
let hrz = east_occl(lft & pcs, pcs) & west_occl(rht & pcs, pcs);
let dg1 = noea_occl((bot|lft) & pcs, pcs) & sowe_occl((top|rht) & pcs, pcs);
let dg2 = nowe_occl((bot|rht) & pcs, pcs) & soea_occl((top|lft) & pcs, pcs);
let mut stb = (0x8100000000000081u64 | (vrt & hrz & dg1 & dg2)) & gen;
//expand stable areas. At most 16 iterations necessary to reach from one
//corner to the other
for _ in 0..16 {
stb |= gen & (
(nort_one(stb) | sout_one(stb) | vrt) &
(east_one(stb) | west_one(stb) | hrz) &
(noea_one(stb) | sowe_one(stb) | dg1) &
(nowe_one(stb) | soea_one(stb) | dg2)
);
}
stb
}
/// Counts the number of stones each player has on the board.
pub fn count_pieces(&self) -> (u8, u8) {
(popcount_64(self.ps), popcount_64(self.os))
}
/// Counts gets the piece count difference between the current player and
/// the opponent.
pub fn piece_diff(&self) -> i8 {
popcount_64(self.ps) as i8 - popcount_64(self.os) as i8
}
/// Counts the number of stones on the board.
pub fn total_pieces(&self) -> u8 {
popcount_64(self.ps | self.os)
}
/// Counts the number empty squares on the board.
pub fn total_empty(&self) -> u8 {
64-popcount_64(self.ps | self.os)
}
/// Gets the moves available to the current player and stores them in the
/// array that is passed as an argument. The number of moves is returned.
pub fn get_moves(&self, out_moves : &mut MoveList) -> u8 {
if !self.has_move().0 {
out_moves[0] = Move::pass();
return 1;
}
let mut mvs = self.pm;
let n = popcount_64(mvs);
for i in 0..n {
out_moves[i as usize] = Move::from_off(bitscan_64(mvs));
mvs ^= out_moves[i as usize].mask();
}
n as u8
}
/// Writes moves representing the current empty squares on the board to
/// the out parameter `out_moves`. Note that this means the provided array
/// must be large enough. Returns the number of empty squares found.
pub fn get_empty(&self, out_moves : &mut [Move]) -> u8 {
let mut mvs = !(self.ps | self.os);
if mvs == 0 {
return 0;
}
let n = popcount_64(mvs);
for i in 0..n {
out_moves[i as usize] = Move::from_off(bitscan_64(mvs));
mvs ^= out_moves[i as usize].mask();
}
n as u8
}
/// Returns the index of a valid move for the current player in the move
/// array
pub fn get_move_index(&self, m : Move) -> usize {
popcount_64(self.mobility().0 & (m.mask()-1)) as usize
}
// Internal to the Board struct, finds and updates the moves for the given
// player.
#[deprecated]
fn find_moves(&mut self, t : Turn) {
let mut moves = 0;
let empty = !(self.ps | self.os);
let mut tmp = 0;
let (gen,pro) = match t {
Turn::BLACK => (self.ps,self.os),
Turn::WHITE => (self.os,self.ps),
};
tmp = sout_one(sout_occl(gen, pro) & pro);
moves |= tmp & empty;
tmp = nort_one(nort_occl(gen, pro) & pro);
moves |= tmp & empty;
tmp = east_one(east_occl(gen, pro) & pro);
moves |= tmp & empty;
tmp = west_one(west_occl(gen, pro) & pro);
moves |= tmp & empty;
tmp = soea_one(soea_occl(gen, pro) & pro);
moves |= tmp & empty;
tmp = sowe_one(sowe_occl(gen, pro) & pro);
moves |= tmp & empty;
tmp = noea_one(noea_occl(gen, pro) & pro);
moves |= tmp & empty;
tmp = nowe_one(nowe_occl(gen, pro) & pro);
moves |= tmp & empty;
match t {
Turn::BLACK => {self.pm = moves;},
Turn::WHITE => {self.om = moves;},
};
}
///This function makes sure the move bitboards are current in the function
#[deprecated]
pub fn update_moves(&mut self) {
self.find_moves(Turn::BLACK);
self.find_moves(Turn::WHITE);
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub fn update_moves_fast(&mut self) {
use find_moves_fast::fast_find_moves;
self.pm = fast_find_moves(self.ps, self.os);
self.om = fast_find_moves(self.os, self.ps);
}
}
impl Hash for Board {
fn hash<H: Hasher>(&self, state: &mut H) {
self.ps.to_le().hash(state);
self.os.to_be().hash(state);
}
}
use std::cmp;
impl cmp::PartialEq for Board {
fn eq(&self, other : &Board) -> bool {
self.ps == other.ps && self.os == other.os
}
}
impl fmt::Display for Board {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
const XY0 : usize = 145;
const DX : usize = 6;
const DY : usize = 156;
let mut txt_board = include_bytes!("text_board.txt").iter().cloned().collect::<Vec<_>>();
// let mut err = write!(f, " A B C D E F G H\n");
for y in 0..8 {
// let mut t = err.and(write!(f, "{}", y+1));
// err = t;
for x in 0..8 {
let m = Move::new(x,y).mask();
let xy_i = XY0 + x as usize * DX + y as usize * DY;
// let e = err.and(
if self.ps & m != 0 {
txt_board[xy_i] = '@' as u8;
// write!(f, " @")
} else if self.os & m != 0 {
txt_board[xy_i] = 'O' as u8;
// write!(f, " O")
} else {
// write!(f, " ")
}
// );
// err = e;
}
// t = err.and(write!(f, "\n"));
// err = t;
}
// err
let str_board = String::from_utf8_lossy(&txt_board);
write!(f, "{}", str_board)
}
}
impl fmt::Debug for Board {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut err = write!(f, " A B C D E F G H | A B C D E F G H | A B C D E F G H\n");
for y in 0..8 {
let mut t = err.and(write!(f, "{}", y+1));
err = t;
for x in 0..8 {
let m = Move::new(x,y).mask();
let e = err.and(
if self.ps & m != 0 {
write!(f, " @")
} else if self.os & m != 0 {
write!(f, " O")
} else {
write!(f, " ")
}
);
err = e;
}
t = err.and(write!(f, " |"));
for x in 0..8 {
let m = Move::new(x,y).mask();
let e = err.and(
if self.pm & m != 0 {
write!(f, " *")
} else {
write!(f, " ")
}
);
err = e;
}
t = err.and(write!(f, " |"));
for x in 0..8 {
let m = Move::new(x,y).mask();
let e = err.and(
if self.om & m != 0 {
write!(f, " *")
} else {
write!(f, " ")
}
);
err = e;
}
t = err.and(write!(f, "|"));
for x in 0..8 {
let m = Move::new(x,y).mask();
let e = err.and(
if self.ps & m != 0 {
write!(f, " *")
} else {
write!(f, " ")
}
);
err = e;
}
t = err.and(write!(f, "|"));
for x in 0..8 {
let m = Move::new(x,y).mask();
let e = err.and(
if self.os & m != 0 {
write!(f, " *")
} else {
write!(f, " ")
}
);
err = e;
}
t = err.and(write!(f, "\n"));
err = t;
}
err
}
}
#[derive(PartialEq, Eq, Copy, Clone)]
pub struct Position {
ps : u64,
os : u64,
}
impl Position {
pub fn from_board(b : Board) -> Position {
Position {
ps : b.ps,
os : b.os,
}
}
pub fn to_board(&self) -> Board {
Board::position(self.ps, self.os, Turn::BLACK)
}
}
| true |
343d053256046309cfd7c6ee068035f2da63f0c2
|
Rust
|
Unaidedsteak/rustlings-solutions
|
/src/exercise.rs
|
UTF-8
| 1,862 | 3.328125 | 3 |
[
"MIT"
] |
permissive
|
use serde::Deserialize;
use std::fmt::{self, Display, Formatter};
use std::fs::remove_file;
use std::path::PathBuf;
use std::process::{self, Command, Output};
const RUSTC_COLOR_ARGS: &[&str] = &["--color", "always"];
fn temp_file() -> String {
format!("./temp_{}", process::id())
}
#[derive(Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Mode {
Compile,
Test,
}
#[derive(Deserialize)]
pub struct ExerciseList {
pub exercises: Vec<Exercise>,
}
#[derive(Deserialize)]
pub struct Exercise {
pub path: PathBuf,
pub mode: Mode,
}
impl Exercise {
pub fn compile(&self) -> Output {
match self.mode {
Mode::Compile => Command::new("rustc")
.args(&[self.path.to_str().unwrap(), "-o", &temp_file()])
.args(RUSTC_COLOR_ARGS)
.output(),
Mode::Test => Command::new("rustc")
.args(&["--test", self.path.to_str().unwrap(), "-o", &temp_file()])
.args(RUSTC_COLOR_ARGS)
.output(),
}
.expect("Failed to run 'compile' command.")
}
pub fn run(&self) -> Output {
Command::new(&temp_file())
.output()
.expect("Failed to run 'run' command")
}
pub fn clean(&self) {
let _ignored = remove_file(&temp_file());
}
}
impl Display for Exercise {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.path.to_str().unwrap())
}
}
#[cfg(test)]
mod test {
use super::*;
use std::fs::File;
use std::path::Path;
#[test]
fn test_clean() {
File::create(&temp_file()).unwrap();
let exercise = Exercise {
path: PathBuf::from("example.rs"),
mode: Mode::Test,
};
exercise.clean();
assert!(!Path::new(&temp_file()).exists());
}
}
| true |
38d7f0721374e4828fd6a244e47d1c85f3dc20ec
|
Rust
|
MihirLuthra/discriminant_hash_derive
|
/tests/discriminant_hash_derive.rs
|
UTF-8
| 1,211 | 3.171875 | 3 |
[] |
no_license
|
use discriminant_hash_derive::DiscriminantHash;
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
#[derive(DiscriminantHash)]
enum Abc<'a, T> {
Simple,
Lifetime(&'a str),
HashNotImpl(Xyz),
Generic(T),
}
#[allow(unused)]
#[derive(Hash)]
enum Pqr<'a> {
Simple,
Lifetime(&'a str),
}
// Xyz doesn't impl Hash
struct Xyz;
fn main() {
assert_eq!(my_hash(Abc::Simple::<i32>), my_hash(Abc::Simple::<Xyz>));
assert_eq!(
my_hash(Abc::Lifetime::<i32>("hello")),
my_hash(Abc::Lifetime::<Xyz>("world"))
);
assert_eq!(
my_hash(Abc::HashNotImpl::<i32>(Xyz)),
my_hash(Abc::HashNotImpl::<Xyz>(Xyz))
);
assert_eq!(
my_hash(Abc::Generic::<i32>(4)),
my_hash(Abc::Generic::<Xyz>(Xyz))
);
assert_ne!(
my_hash(Abc::Simple::<i32>),
my_hash(Abc::Lifetime::<Xyz>("abc"))
);
/*
This may be same depending on how Pqr is defined
assert_eq!(
my_hash(Abc::Simple::<i32>),
my_hash(Pqr::Simple)
);
*/
}
fn my_hash<T>(obj: T) -> u64
where
T: Hash,
{
let mut hasher = DefaultHasher::new();
obj.hash(&mut hasher);
hasher.finish()
}
| true |
328e461eccfaac958f1515cf8ee1cc98926f599a
|
Rust
|
rustyforks/tendril-wiki
|
/libs/markdown/src/parsers/meta.rs
|
UTF-8
| 5,337 | 2.90625 | 3 |
[] |
no_license
|
use std::{collections::HashMap, path::Path};
use tasks::path_to_reader;
use crate::processors::tags::TagsArray;
#[derive(Debug)]
pub struct EditPageData {
pub body: String,
pub tags: Vec<String>,
pub title: String,
pub old_title: String,
pub metadata: HashMap<String, String>,
}
impl From<HashMap<String, String>> for EditPageData {
fn from(form_body: HashMap<String, String>) -> Self {
let mut title: String = String::new();
let mut old_title: String = String::new();
let mut tags: Vec<String> = Vec::new();
let mut body: String = String::new();
let mut metadata: HashMap<String, String> = HashMap::new();
for key in form_body.keys() {
match key.as_str() {
"title" => title = form_body.get(key).unwrap().to_owned(),
"old_title" => {
if let Some(old_title_from_form) = form_body.get(key) {
old_title = old_title_from_form.to_owned()
}
}
"tags" => {
tags = form_body
.get(key)
.unwrap()
.split(',')
.map(|s| s.to_owned())
.collect()
}
"body" => body = form_body.get(key).unwrap().to_owned(),
"metadata" => {
let stringified_meta = form_body.get(key).unwrap().to_owned();
let kv_pairs = stringified_meta.split('\n');
for pair_string in kv_pairs {
// Support metadata attributes with the : character.
let unpaired: Vec<&str> = pair_string.split(':').collect();
let key = unpaired[0].to_owned();
let value = unpaired[1..].join(":");
metadata.insert(key, value);
}
}
_ => {}
}
}
EditPageData {
body,
tags,
title,
old_title,
metadata,
}
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
enum MetaParserState {
Ready,
Parsing,
End,
}
#[derive(Debug, Default)]
pub struct NoteMeta {
pub metadata: HashMap<String, String>,
pub content: String,
}
#[derive(Copy, Clone)]
struct MetaParserMachine {
state: MetaParserState,
}
impl MetaParserMachine {
pub fn new() -> Self {
MetaParserMachine {
state: MetaParserState::Ready,
}
}
pub fn send(&mut self, next_state: MetaParserState) {
self.state = next_state;
}
pub fn current_state(self) -> MetaParserState {
self.state
}
}
impl From<EditPageData> for NoteMeta {
fn from(data: EditPageData) -> Self {
let mut metadata: HashMap<String, String> = data.metadata;
metadata.insert("title".into(), data.title);
let tags = TagsArray::from(data.tags);
metadata.insert("tags".into(), tags.write());
NoteMeta {
metadata,
content: data.body,
}
}
}
impl From<String> for NoteMeta {
fn from(stringified: String) -> Self {
parse_meta(stringified.lines().map(|l| l.into()), "raw_string") // mark that we've parsed from a passed string instead of a file
}
}
impl Into<String> for NoteMeta {
fn into(self) -> String {
let mut formatted_string = String::from("---\n");
for key in self.metadata.keys() {
formatted_string.push_str(key);
formatted_string.push_str(": ");
formatted_string.push_str(self.metadata.get(key).unwrap());
formatted_string.push('\n');
}
formatted_string.push_str("---\n");
formatted_string.push_str(&self.content);
formatted_string
}
}
pub fn path_to_data_structure(path: &Path) -> Result<NoteMeta, Box<dyn std::error::Error>> {
let reader = path_to_reader(path)?;
Ok(parse_meta(reader, path.to_str().unwrap()))
}
pub fn parse_meta(lines: impl Iterator<Item = String>, debug_marker: &str) -> NoteMeta {
let mut parser = MetaParserMachine::new();
let mut notemeta = NoteMeta::default();
for line in lines {
match line.as_str() {
"---" => match parser.current_state() {
MetaParserState::Ready => parser.send(MetaParserState::Parsing),
MetaParserState::Parsing => parser.send(MetaParserState::End),
_ => {}
},
_ => match parser.current_state() {
MetaParserState::Parsing => {
let values: Vec<&str> = line.split(": ").collect();
let vals: String;
assert!(values.len() > 1, "{}", debug_marker);
if values.len() > 2 {
vals = values[1..].join(": ");
} else {
vals = values[1].into()
}
notemeta.metadata.insert(values[0].into(), vals);
}
MetaParserState::End => {
notemeta.content.push_str(&format!("\n{}", line));
}
_ => {}
},
}
}
notemeta
}
| true |
555299b1a58aaec3ccca778d185d7bb1f9c09bc5
|
Rust
|
himlpplm/rust-tdlib
|
/src/types/delete_supergroup.rs
|
UTF-8
| 2,142 | 2.953125 | 3 |
[
"MIT"
] |
permissive
|
use crate::errors::*;
use crate::types::*;
use uuid::Uuid;
/// Deletes a supergroup or channel along with all messages in the corresponding chat. This will release the supergroup or channel username and remove all members; requires owner privileges in the supergroup or channel. Chats with more than 1000 members can't be deleted using this method
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct DeleteSupergroup {
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
#[serde(rename(serialize = "@client_id", deserialize = "@client_id"))]
client_id: Option<i32>,
/// Identifier of the supergroup or channel
supergroup_id: i32,
#[serde(rename(serialize = "@type"))]
td_type: String,
}
impl RObject for DeleteSupergroup {
#[doc(hidden)]
fn extra(&self) -> Option<&str> {
self.extra.as_deref()
}
#[doc(hidden)]
fn client_id(&self) -> Option<i32> {
self.client_id
}
}
impl RFunction for DeleteSupergroup {}
impl DeleteSupergroup {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDDeleteSupergroupBuilder {
let mut inner = DeleteSupergroup::default();
inner.extra = Some(Uuid::new_v4().to_string());
inner.td_type = "deleteSupergroup".to_string();
RTDDeleteSupergroupBuilder { inner }
}
pub fn supergroup_id(&self) -> i32 {
self.supergroup_id
}
}
#[doc(hidden)]
pub struct RTDDeleteSupergroupBuilder {
inner: DeleteSupergroup,
}
impl RTDDeleteSupergroupBuilder {
pub fn build(&self) -> DeleteSupergroup {
self.inner.clone()
}
pub fn supergroup_id(&mut self, supergroup_id: i32) -> &mut Self {
self.inner.supergroup_id = supergroup_id;
self
}
}
impl AsRef<DeleteSupergroup> for DeleteSupergroup {
fn as_ref(&self) -> &DeleteSupergroup {
self
}
}
impl AsRef<DeleteSupergroup> for RTDDeleteSupergroupBuilder {
fn as_ref(&self) -> &DeleteSupergroup {
&self.inner
}
}
| true |
19f26f8652fbd78d1193151ccf924284fa840804
|
Rust
|
opaps/sparkler
|
/src/main.rs
|
UTF-8
| 1,420 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
extern crate git2;
extern crate notify;
extern crate sparkler;
use notify::{watcher, RecursiveMode, Watcher};
use std::sync::mpsc::channel;
use std::time::Duration;
use sparkler::repo;
fn main() {
// Create a channel to receive the events.
let (tx, rx) = channel();
let path = "../tmp";
let the_repo = repo::Repo::new(path);
// Create a watcher object, delivering debounced events.
// The notification back-end is selected based on the platform.
let mut watcher = watcher(tx, Duration::from_secs(10)).unwrap();
// Add a path to be watched. All files and directories at that path and
// below will be monitored for changes.
watcher.watch(path, RecursiveMode::Recursive).unwrap();
println!("watching {}", path);
loop {
match rx.recv() {
Ok(event) => {
use notify::DebouncedEvent::*;
match event {
Create(path) | Write(path) | Chmod(path) | Remove(path) | Rename(_, path) => {
println!("event: {:?}", path);
// exclude .git
if !path.iter().any(|p| p.to_str().unwrap() == ".git") {
let _ = the_repo.get_status();
}
}
_ => {}
}
}
Err(e) => println!("watch error: {:?}", e),
}
}
}
| true |
d152f8690330af0bf6ed88eac66641bec131bf1a
|
Rust
|
serverlesstechnology/cqrs
|
/src/test/mod.rs
|
UTF-8
| 939 | 2.515625 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! This module provides a test framework for building a resilient test base around aggregates.
//! A `TestFramework` should be used to build a comprehensive set of aggregate tests to verify
//! your application logic.
//!
//! ```rust
//! # use cqrs_es::test::TestFramework;
//! # use cqrs_es::doc::{Customer, CustomerEvent, CustomerCommand, CustomerService};
//! # fn test() {
//! type CustomerTestFramework = TestFramework<Customer>;
//!
//! CustomerTestFramework::with(CustomerService::default())
//! .given_no_previous_events()
//! .when(CustomerCommand::AddCustomerName{
//! name: "John Doe".to_string()
//! })
//! .then_expect_events(vec![
//! CustomerEvent::NameAdded{
//! name: "John Doe".to_string()
//! }]);
//! # }
//! ```
mod executor;
mod framework;
mod validator;
pub use crate::test::executor::*;
pub use crate::test::framework::*;
pub use crate::test::validator::*;
| true |
e3918bc204ad6d5092ba4424cdb60d0ff3ebb6b2
|
Rust
|
melotic/docx-rs
|
/src/macros.rs
|
UTF-8
| 1,853 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
#[macro_export]
#[doc(hidden)]
macro_rules! __string_enum {
($name:ident { $($variant:ident = $value:expr, )* }) => {
impl std::fmt::Display for $name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
$( $name::$variant => write!(f, $value), )*
}
}
}
impl std::str::FromStr for $name {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
$($value => Ok($name::$variant),)*
s => Err(format!(
"Unkown Value. Found `{}`, Expected `{}`",
s,
stringify!($($value,)*)
))
}
}
}
}
}
#[macro_export]
#[doc(hidden)]
macro_rules! __setter {
($field:ident: Option<$ty:ty>) => {
#[inline(always)]
pub fn $field<T: Into<$ty>>(mut self, value: T) -> Self {
self.$field = Some(value.into());
self
}
};
($field:ident: $ty:ty) => {
#[inline(always)]
pub fn $field<T: Into<$ty>>(mut self, value: T) -> Self {
self.$field = value.into();
self
}
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! __xml_test_suites {
($type:tt, $($struct:expr, $string:expr,)*) => {
#[test]
fn xml_test_suites() -> strong_xml::XmlResult<()> {
let _ = env_logger::builder()
.is_test(true)
.format_timestamp(None)
.try_init();
$(
assert_eq!($string, ($struct).to_string()?);
assert_eq!($struct, $type::from_str($string)?);
)*
Ok(())
}
};
}
| true |
292d5520e6b4ff048c87acbe4291831f4a170991
|
Rust
|
ianzhang1988/RustOneFileExample
|
/src/bin/algo_binary_tree.rs
|
UTF-8
| 2,152 | 3.859375 | 4 |
[] |
no_license
|
use std::cell::Cell;
#[derive(Debug)]
enum NodeType<T> {
Node(Box<Node<T>>),
Null,
}
use NodeType::Null;
use std::borrow::{BorrowMut, Borrow};
#[derive(Debug)]
struct Node<T> {
value: T,
left: NodeType<T>,
right:NodeType<T>,
}
#[derive(Debug)]
struct Foo {
v1: u32,
v2: u32,
v3: String
}
/// update on Cell comment in bin/tcp_server_frame2.rs
///
/// Box is not immutable, a &mut T reference can change the data inside
/// so, what's the different between Cell and Box
/// &mut T is needed to change data in Box, on the other hand, only &T is needed to change data inside Cell
/// in other words, in Rust you can't use multiple &mut T reference to T, but some time you must change
/// data form multiple reference, that's what Cell is meant for.
///
fn box_vs_cell(){
// data in Box can be change
let mut b = Box::new(1);
println!("{}", b);
*b=2; // what..., i thought Box is immutable inside ...
println!("{}", b);
let mut b2 = Box::new(Foo{v1:1,v2:2,v3:"hello".to_string()});
println!("{:?}", b2);
b2.v1=5;
println!("{:?}", b2);
// let b3 = &mut b;
// let b4 = &mut b;
// **b3=4;
// **b4=8;
// let b3 = &mut b;
// ------ first mutable borrow occurs here
// let b4 = &mut b;
// ^^^^^^ second mutable borrow occurs here
// **b3=4;
// ------ first borrow later used here
let c = Cell::new(1);
let mut c1 = &c;
let mut c2 = &c;
c1.borrow_mut().set(2);
println!("{}", c.borrow().get());
c2.borrow_mut().set(3);
println!("{}", c.borrow().get());
}
fn main() {
box_vs_cell();
let mut root = Node{value:0, left: Null, right: Null };
let node_1 = NodeType::Node(Box::new(Node{value:1, left: Null, right: Null }));
let node_2 = NodeType::Node(Box::new(Node{value:2, left: Null, right: Null }));
let node_3 = NodeType::Node(Box::new(Node{value:3, left: Null, right: Null }));
root.left=node_1;
root.right=node_2;
// node_1.left=node_3;
if let NodeType::Node(node) = &mut (root.left) {
node.left = node_3;
}
println!("{:?}", root);
}
| true |
0662ed3c3f092f806eee262a753c1c8682d12ff2
|
Rust
|
richarddowner/Rust
|
/rust-by-example/modules/visibility/visibility.rs
|
UTF-8
| 1,813 | 3.6875 | 4 |
[] |
no_license
|
// By default, the items in a module have private visibility, but this
// can be overridden with thepub modifier. Only the public items of a
// module can be accessed from outside the module scope.
fn function() {
println!("called `function()`");
}
mod my {
// A public function
pub fn function() {
println!("called `my::function()`");
}
// A private function
fn private_function() {
println!("called `my::private_function()`");
}
// Items can access other items in the same module
pub fn indirect_access() {
print!("called `my::indirect_access()`, that \n> ");
// Regardless of their visibility
private_function();
}
// A public module
pub mod nested {
pub fn function() {
println!("called `my::nested::function()`");
}
#[allow(dead_code)]
fn private_function() {
println!("called `my::nested::private_function()`");
}
}
// A private modules
mod inaccessibile {
#[allow(dead_code)]
pub fn public_function() {
println!("called `my::inaccessible::public_function()`");
}
}
}
fn main() {
// The public items of a module can be accessed
my::function();
// Modules allow diambiguation between items that have the same name
function();
// the private items of a module can't be directly accessed
// Error! `private_function` is private
// my::private_function();
my::indirect_access();
// Public items inside public nested modules can be accessed from outside
// the parent module
my::nested::function();
// but private items inside public nested modules can't be accessed
// Error! `private_function` is private
// my::nested::private_function();
// Items inside private nested modules can't be accessed, regardless of
// their visibility
// Error! `inaccessible` is a private module
// my::inaccessibile::public_function();
}
| true |
1efee4dee7f4decf0adeb12428a40de8051e1e36
|
Rust
|
tmikus/rlox
|
/src/token.rs
|
UTF-8
| 1,234 | 3.484375 | 3 |
[] |
no_license
|
use std::any::Any;
use std::fmt;
use token_type::TokenType;
pub struct Token {
token_type: TokenType,
lexeme: String,
literal: Option<Box<Any>>,
line: i32,
}
impl Token {
pub fn new(token_type: TokenType, lexeme: String, literal: Option<Box<Any>>, line: i32) -> Token {
Token {
token_type: token_type,
lexeme: lexeme,
literal: literal,
line: line,
}
}
}
impl fmt::Display for Token {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self.literal {
Some(ref value) => {
if let Some(string) = value.downcast_ref::<String>() {
format_value(formatter, &self.token_type, &self.lexeme, string)
} else if let Some(double) = value.downcast_ref::<f64>() {
format_value(formatter, &self.token_type, &self.lexeme, double)
} else {
format_value(formatter, &self.token_type, &self.lexeme, "None")
}
},
None => format_value(formatter, &self.token_type, &self.lexeme, "None")
}
}
}
fn format_value<T: fmt::Display>(
formatter: &mut fmt::Formatter,
token_type: &TokenType,
lexeme: &String,
value: T,
) -> fmt::Result {
write!(formatter, "{:?} {} {}", token_type, lexeme, value)
}
| true |
d8c9ee153aeb0f0462a6178341091c396d824d73
|
Rust
|
ycrypto/salty
|
/build.rs
|
UTF-8
| 2,462 | 2.578125 | 3 |
[
"CC0-1.0",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("cargo:rerun-if-changed=build.rs");
// Cortex-M33 is compatible with Cortex-M4 and its DSP extension instruction UMAAL.
let target = env::var("TARGET")?;
let cortex_m4 = target.starts_with("thumbv7em") || target.starts_with("thumbv8m.main");
let fast_cortex_m4 = cortex_m4 && !cfg!(feature = "slow-motion");
if fast_cortex_m4 {
// According to the ARMv7-M Architecture Reference Manual,
// there are two architecture extensions:
// - the DSP extension: this is what we need, it is also called
// "an ARMv7E-M implementation".
// - the floating-extension: we don't use this
//
// The Cortex-M4 processor implements the ARMV7E-M architecture,
// and according to its Technical Reference Manual (section 3.3.1),
// the UMAAL instruction takes exactly 1 cycle.
//
// In the ARMv8-M Architecture Reference Manual, we read that
// there are several extensions: main, security, floating-point,
// DSP,... and that the main extension is a prerequisite for the
// DSP extension. The Cortex-M33 Technical Reference Manual (section B1.3)
// states that the DSP extension is optional, so technically
// `thumbv8m.main-none-eabi[hf]` is not sufficiently specified.
// It does *not* contain any data on the number of cycles for UMAAL.
//
// We treat Cortex-M33 as Cortex-M4 with possibly extra features.
let target = std::env::var("TARGET")?;
if !(target.starts_with("thumbv7em") || target.starts_with("thumbv8m.main")) {
panic!(
concat!(
"Target `{}` is not a Cortex-M processor with the DSP extension.\n",
"Try `--target thumbv7em-none-eabi` or `--target thumbv8m.main-none-eabi`\n",
),
target
);
}
let out_dir = std::path::PathBuf::from(std::env::var("OUT_DIR").unwrap());
std::fs::copy("bin/salty-asm.a", out_dir.join("libsalty-asm.a")).unwrap();
println!("cargo:rustc-link-lib=static=salty-asm");
println!("cargo:rustc-link-search={}", out_dir.display());
println!("cargo:rerun-if-changed=bin/salty-asm.a");
println!("cargo:rustc-cfg=haase");
} else {
println!("cargo:rustc-cfg=tweetnacl");
}
Ok(())
}
| true |
05019171098db2209283b43f10bd63c951ee233d
|
Rust
|
bouzuya/rust-atcoder
|
/cargo-atcoder/contests/arc022/src/bin/a.rs
|
UTF-8
| 461 | 2.90625 | 3 |
[] |
no_license
|
use proconio::input;
use proconio::marker::Chars;
fn main() {
input! {
s: Chars,
};
let mut x = 0;
for s_i in s {
if x == 0 && (s_i == 'i' || s_i == 'I') {
x += 1;
}
if x == 1 && (s_i == 'c' || s_i == 'C') {
x += 1;
}
if x == 2 && (s_i == 't' || s_i == 'T') {
x += 1;
}
}
let ans = x == 3;
println!("{}", if ans { "YES" } else { "NO" });
}
| true |
b1e0d1fc90060b8face28e0ac224fd74761fbc9e
|
Rust
|
exercism/rust
|
/exercises/practice/armstrong-numbers/tests/armstrong-numbers.rs
|
UTF-8
| 1,588 | 3.28125 | 3 |
[
"MIT"
] |
permissive
|
use armstrong_numbers::*;
#[test]
fn test_zero_is_an_armstrong_number() {
assert!(is_armstrong_number(0))
}
#[test]
#[ignore]
fn test_single_digit_numbers_are_armstrong_numbers() {
assert!(is_armstrong_number(5))
}
#[test]
#[ignore]
fn test_there_are_no_2_digit_armstrong_numbers() {
assert!(!is_armstrong_number(10))
}
#[test]
#[ignore]
fn test_three_digit_armstrong_number() {
assert!(is_armstrong_number(153))
}
#[test]
#[ignore]
fn test_three_digit_non_armstrong_number() {
assert!(!is_armstrong_number(100))
}
#[test]
#[ignore]
fn test_four_digit_armstrong_number() {
assert!(is_armstrong_number(9474))
}
#[test]
#[ignore]
fn test_four_digit_non_armstrong_number() {
assert!(!is_armstrong_number(9475))
}
#[test]
#[ignore]
fn test_seven_digit_armstrong_number() {
assert!(is_armstrong_number(9_926_315))
}
#[test]
#[ignore]
fn test_seven_digit_non_armstrong_number() {
assert!(!is_armstrong_number(9_926_316))
}
#[test]
#[ignore]
fn test_nine_digit_armstrong_number() {
assert!(is_armstrong_number(912_985_153));
}
#[test]
#[ignore]
fn test_nine_digit_non_armstrong_number() {
assert!(!is_armstrong_number(999_999_999));
}
#[test]
#[ignore]
fn test_ten_digit_non_armstrong_number() {
assert!(!is_armstrong_number(3_999_999_999));
}
// The following number has an Armstrong sum equal to 2^32 plus itself,
// and therefore will be detected as an Armstrong number if you are
// incorrectly using wrapping arithmetic.
#[test]
#[ignore]
fn test_properly_handles_overflow() {
assert!(!is_armstrong_number(4_106_098_957));
}
| true |
436254bbffc76e62195484497be09df81a7c7a2e
|
Rust
|
leontoeides/google_maps
|
/src/roads/nearest_roads/request/new.rs
|
UTF-8
| 1,503 | 3.078125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::client::GoogleMapsClient;
use crate::types::LatLng;
use crate::roads::nearest_roads::request::Request;
// =============================================================================
impl<'a> Request<'a> {
// -------------------------------------------------------------------------
//
/// Initializes the builder pattern for a Nearest Roads query with the
/// required parameters.
///
/// Note: The snapping algorithm works best for points that are not too far
/// apart. If you observe odd snapping behavior, try creating paths that
/// have points closer together. To ensure the best snap-to-road quality,
/// you should aim to provide paths on which consecutive pairs of points are
/// within 300m of each other. This will also help in handling any isolated,
/// long jumps between consecutive points caused by GPS signal loss, or
/// noise.
///
/// ## Arguments:
///
/// * `client` ‧ Your application's Google Maps API client struct.
///
/// * `points` ‧ The points to be snapped. The points parameter accepts a
/// list of latitude/longitude pairs.
pub fn new(
client: &GoogleMapsClient,
points: Vec<LatLng>,
) -> Request {
// Instantiate struct and return it to caller:
Request {
// Required parameters:
client,
points,
// Internal use only:
query: None,
} // struct
} // fn
} // impl
| true |
4e4b0e1b4802bb501dd0fc992c8aff53b718b653
|
Rust
|
paritytech/substrate
|
/utils/frame/benchmarking-cli/src/extrinsic/extrinsic_factory.rs
|
UTF-8
| 2,307 | 2.59375 | 3 |
[
"Apache-2.0",
"GPL-3.0-or-later",
"Classpath-exception-2.0",
"GPL-1.0-or-later",
"GPL-3.0-only"
] |
permissive
|
// This file is part of Substrate.
// Copyright (C) Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Provides the [`ExtrinsicFactory`] and the [`ExtrinsicBuilder`] types.
//! Is used by the *overhead* and *extrinsic* benchmarks to build extrinsics.
use sp_runtime::OpaqueExtrinsic;
/// Helper to manage [`ExtrinsicBuilder`] instances.
#[derive(Default)]
pub struct ExtrinsicFactory(pub Vec<Box<dyn ExtrinsicBuilder>>);
impl ExtrinsicFactory {
/// Returns a builder for a pallet and extrinsic name.
///
/// Is case in-sensitive.
pub fn try_get(&self, pallet: &str, extrinsic: &str) -> Option<&dyn ExtrinsicBuilder> {
let pallet = pallet.to_lowercase();
let extrinsic = extrinsic.to_lowercase();
self.0
.iter()
.find(|b| b.pallet() == pallet && b.extrinsic() == extrinsic)
.map(|b| b.as_ref())
}
}
/// Used by the benchmark to build signed extrinsics.
///
/// The built extrinsics only need to be valid in the first block
/// who's parent block is the genesis block.
/// This assumption simplifies the generation of the extrinsics.
/// The signer should be one of the pre-funded dev accounts.
pub trait ExtrinsicBuilder {
/// Name of the pallet this builder is for.
///
/// Should be all lowercase.
fn pallet(&self) -> &str;
/// Name of the extrinsic this builder is for.
///
/// Should be all lowercase.
fn extrinsic(&self) -> &str;
/// Builds an extrinsic.
///
/// Will be called multiple times with increasing nonces.
fn build(&self, nonce: u32) -> std::result::Result<OpaqueExtrinsic, &'static str>;
}
impl dyn ExtrinsicBuilder + '_ {
/// Name of this builder in CSV format: `pallet, extrinsic`.
pub fn name(&self) -> String {
format!("{}, {}", self.pallet(), self.extrinsic())
}
}
| true |
da8f679b2ef0c305e22cdbae4762fe37421907aa
|
Rust
|
buttercrab/hyeo-ung-lang
|
/src/app/interpreter.rs
|
UTF-8
| 3,750 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
use crate::core::state::UnOptState;
use crate::core::{execute, parse};
use crate::util::error::Error;
use crate::util::io;
use crate::util::option::HyeongOption;
use std::io::{stdin, Write};
use std::process;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use termcolor::{Color, ColorSpec, StandardStream, WriteColor};
/// Interpreter
///
/// It gets code line by line and executes.
/// Prints stdout and stderr separately.
#[cfg(not(tarpaulin_include))]
pub fn run(stdout: &mut StandardStream, hy_opt: &HyeongOption) -> Result<(), Error> {
let running = Arc::new(AtomicBool::new(true));
let r = running.clone();
let color = hy_opt.color;
let mut state = UnOptState::new();
ctrlc::set_handler(move || {
if r.load(Ordering::SeqCst) {
r.store(false, Ordering::SeqCst);
let mut stdout = StandardStream::stdout(color);
write!(stdout, "\ntype \"흑.하앙...\" or \"exit\" to exit\n").unwrap();
stdout
.set_color(ColorSpec::new().set_fg(Some(Color::Cyan)).set_bold(true))
.unwrap();
write!(stdout, ">").unwrap();
stdout.reset().unwrap();
write!(stdout, " ").unwrap();
stdout.flush().unwrap();
r.store(true, Ordering::SeqCst);
}
})
.expect("Error setting Ctrl-C handler");
writeln!(stdout, "Hyeo-ung Programming Language")?;
writeln!(stdout, "type help for help")?;
loop {
stdout.set_color(ColorSpec::new().set_fg(Some(Color::Cyan)).set_bold(true))?;
write!(stdout, ">")?;
stdout.reset()?;
write!(stdout, " ")?;
stdout.flush()?;
running.store(true, Ordering::SeqCst);
let input = io::read_line_from(&mut std::io::stdin())?;
running.store(false, Ordering::SeqCst);
if input == *"" {
process::exit(0);
}
let mut out = io::CustomWriter::new(|x| {
if !x.is_empty() {
let mut stdout = StandardStream::stdout(hy_opt.color);
write!(stdout, "[")?;
stdout.set_color(ColorSpec::new().set_bold(true))?;
write!(stdout, "stdout")?;
stdout.reset()?;
writeln!(stdout, "] {}", x)?;
stdout.flush()?;
}
Ok(())
});
let mut err = io::CustomWriter::new(|x| {
if !x.is_empty() {
let mut stdout = StandardStream::stdout(hy_opt.color);
write!(stdout, "[")?;
stdout.set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Red)))?;
write!(stdout, "stderr")?;
stdout.reset()?;
writeln!(stdout, "] {}", x)?;
stdout.flush()?;
}
Ok(())
});
match input.trim() {
"" => {
continue;
}
"clear" => {
state = UnOptState::new();
}
"help" => {
writeln!(stdout, "clear Clears the state")?;
writeln!(stdout, "exit Exit this interpreter")?;
writeln!(stdout, " You can also exit by typing \"흑.하앙...\"")?;
writeln!(stdout, "help Print this")?;
continue;
}
"exit" => {
process::exit(0);
}
_ => {
let code = parse::parse(input);
for c in code.iter() {
state = execute::execute(&mut stdin(), &mut out, &mut err, state, c)?;
}
}
}
out.flush().unwrap();
err.flush().unwrap();
}
}
| true |
f66df4f9f649c4f2e753c3dfc6033360864995ef
|
Rust
|
HideakiImamura/MyCompetitiveProgramming
|
/BeginnersSelection/ABC086C.rs
|
UTF-8
| 775 | 3.515625 | 4 |
[] |
no_license
|
fn read<T: std::str::FromStr>() -> T {
let mut n = String::new();
std::io::stdin().read_line(&mut n).ok();
n.trim().parse().ok().unwrap()
}
fn read_vec<T: std::str::FromStr>() -> Vec<T> {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().split_whitespace()
.map(|e| e.parse().ok().unwrap()).collect()
}
fn main() {
let n = read::<u32>();
let t = 0;
let x = 0;
let y = 0;
let mut ans = true;
for i in 0..n {
let v = read_vec::<u32>();
let t = v[0] - t;
let x = v[1] - x;
let y = v[2] - y;
if x + y > t || (x + y) % 2 != t % 2 {
ans = false;
}
}
if ans {
println!("Yes");
} else {
println!("No");
}
}
| true |
2299e3330098ec34aaed9439924a9245732cb4a8
|
Rust
|
yutiansut/sonnerie
|
/src/database_reader.rs
|
UTF-8
| 3,442 | 2.84375 | 3 |
[
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
use std::path::{Path,PathBuf};
use std::fs::File;
use std::io::Seek;
use crate::merge::Merge;
use crate::record::OwnedRecord;
use crate::key_reader::*;
use crate::Wildcard;
use byteorder::ByteOrder;
pub struct DatabaseReader
{
_dir: PathBuf,
txes: Vec<(PathBuf,Reader)>,
}
impl DatabaseReader
{
pub fn new(dir: &Path)
-> std::io::Result<DatabaseReader>
{
Self::new_opts(dir, true)
}
pub fn without_main_db(dir: &Path)
-> std::io::Result<DatabaseReader>
{
Self::new_opts(dir, false)
}
fn new_opts(dir: &Path, include_main_db: bool)
-> std::io::Result<DatabaseReader>
{
let dir_reader = std::fs::read_dir(dir)?;
let mut paths = vec!();
for entry in dir_reader
{
let entry = entry?;
if let Some(s) = entry.file_name().to_str()
{
if s.starts_with("tx.") && !s.ends_with(".tmp")
{
paths.push(entry.path());
}
}
}
paths.sort();
let mut txes = Vec::with_capacity(paths.len());
if include_main_db
{
let main_db_name = dir.join("main");
let mut f = File::open(&main_db_name)?;
let len = f.seek(std::io::SeekFrom::End(0))? as usize;
if len == 0
{
eprintln!("disregarding main database, it is zero length");
}
else
{
let main_db = Reader::new(f)?;
txes.push( (main_db_name, main_db) );
}
}
for p in paths
{
let mut f = File::open(&p)?;
let len = f.seek(std::io::SeekFrom::End(0))? as usize;
if len == 0
{
eprintln!("disregarding {:?}, it is zero length", p);
continue;
}
let r = Reader::new(f)?;
txes.push( (p,r) );
}
Ok(DatabaseReader
{
txes,
_dir: dir.to_owned(),
})
}
pub fn transaction_paths(&self) -> Vec<PathBuf>
{
self.txes
.iter()
.map( |e| e.0.clone())
.collect()
}
pub fn get<'rdr, 'k>(&'rdr self, key: &'k str)
-> DatabaseKeyReader<'rdr, 'k, std::ops::RangeInclusive<&'k str>>
{
self.get_range( key ..= key )
}
pub fn get_range<'d, 'r, RB>(&'d self, range: RB)
-> DatabaseKeyReader<'d, 'r, RB>
where
RB: std::ops::RangeBounds<&'r str> + Clone
{
let mut readers = Vec::with_capacity(self.txes.len());
for tx in &self.txes
{
readers.push( tx.1.get_range(range.clone()) );
}
let merge = Merge::new(
readers,
|a, b|
{
a.key().cmp(b.key())
.then_with(
||
byteorder::BigEndian::read_u64(a.value())
.cmp(&byteorder::BigEndian::read_u64(b.value()))
)
},
);
DatabaseKeyReader
{
_db: self,
merge: Box::new(merge),
}
}
pub fn get_filter<'d, 'k>(&'d self, wildcard: &'k Wildcard)
-> DatabaseKeyReader<'d, 'k, std::ops::RangeFrom<&'k str>>
{
let mut readers = Vec::with_capacity(self.txes.len());
for tx in &self.txes
{
readers.push( tx.1.get_filter(wildcard) );
}
let merge = Merge::new(
readers,
|a, b|
{
a.key().cmp(b.key())
.then_with(
||
byteorder::BigEndian::read_u64(a.value())
.cmp(&byteorder::BigEndian::read_u64(b.value()))
)
},
);
DatabaseKeyReader
{
_db: self,
merge: Box::new(merge),
}
}
}
pub struct DatabaseKeyReader<'d, 'r, RB>
where
RB: std::ops::RangeBounds<&'r str>
{
_db: &'d DatabaseReader,
merge: Box<Merge<
StringKeyRangeReader<'d, 'r, RB>, OwnedRecord,
>>,
}
impl<'d, 'r, RB> Iterator for DatabaseKeyReader<'d, 'r, RB>
where
RB: std::ops::RangeBounds<&'r str>
{
type Item = OwnedRecord;
fn next(&mut self) -> Option<Self::Item>
{
self.merge.next()
}
}
| true |
844e58e125b26111e4f37cbec259eada66a2ea87
|
Rust
|
TurtlePU/odlang-rs
|
/src/eval.rs
|
UTF-8
| 1,565 | 2.71875 | 3 |
[] |
no_license
|
use crate::{syntax::*, prelude::*, typeck};
pub fn eval(term: Term) -> Term {
match (*term).clone() {
TmApp(f, x) => match ((*eval(f)).clone(), eval(x)) {
(TmAbs(v, _, y), x) => eval(subst(x, y, v)),
(f, x) => de::app(f, x),
},
TmTyApp(f, t) => match (*eval(f)).clone() {
TmTyAbs(v, y) => eval(subst_type(t, y, v)),
term => de::ty_app(term, t),
},
_ => term,
}
}
fn subst_type(with: Type, term: Term, var: Var) -> Term {
match (*term).clone() {
TmUnit => term,
TmVar(_) => term,
TmAbs(n, ty, y) => de::abs(n, typeck::subst_type(ty, with, var), y),
TmApp(f, x) => {
de::app(subst_type(with.clone(), f, var), subst_type(with, x, var))
}
TmTyAbs(n, body) => de::ty_abs(n, subst_type(with, body, var)),
TmTyApp(f, x) => de::ty_app(
subst_type(with.clone(), f, var),
typeck::subst_type(x, with, var),
),
TmError => unreachable!()
}
}
fn subst(with: Term, inside: Term, what: Var) -> Term {
match (*inside).clone() {
TmUnit => inside,
TmVar(var) if var == what => with,
TmVar(_) => inside,
TmAbs(n, ty, y) => de::abs(n, ty, subst(with, y, what)),
TmApp(f, x) => {
de::app(subst(with.clone(), f, what), subst(with, x, what))
}
TmTyAbs(n, y) => de::ty_abs(n, subst(with, y, what)),
TmTyApp(f, t) => de::ty_app(subst(with, f, what), t),
TmError => unreachable!()
}
}
| true |
34c62de1c3c4ef79e1bbc6fc1db3af9304717401
|
Rust
|
TheAlgorithms/Rust
|
/src/sorting/comb_sort.rs
|
UTF-8
| 1,322 | 3.5625 | 4 |
[
"MIT"
] |
permissive
|
pub fn comb_sort<T: Ord>(arr: &mut [T]) {
let mut gap = arr.len();
let shrink = 1.3;
let mut sorted = false;
while !sorted {
gap = (gap as f32 / shrink).floor() as usize;
if gap <= 1 {
gap = 1;
sorted = true;
}
for i in 0..arr.len() - gap {
let j = i + gap;
if arr[i] > arr[j] {
arr.swap(i, j);
sorted = false;
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::sorting::have_same_elements;
use crate::sorting::is_sorted;
#[test]
fn descending() {
//descending
let mut ve1 = vec![6, 5, 4, 3, 2, 1];
let cloned = ve1.clone();
comb_sort(&mut ve1);
assert!(is_sorted(&ve1) && have_same_elements(&ve1, &cloned));
}
#[test]
fn ascending() {
//pre-sorted
let mut ve2 = vec![1, 2, 3, 4, 5, 6];
let cloned = ve2.clone();
comb_sort(&mut ve2);
assert!(is_sorted(&ve2) && have_same_elements(&ve2, &cloned));
}
#[test]
fn duplicates() {
//pre-sorted
let mut ve3 = vec![2, 2, 2, 2, 2, 1];
let cloned = ve3.clone();
comb_sort(&mut ve3);
assert!(is_sorted(&ve3) && have_same_elements(&ve3, &cloned));
}
}
| true |
e17244d627a34540671d7e6112510e8ff251881a
|
Rust
|
dennisss/dacha
|
/pkg/common/src/collections.rs
|
UTF-8
| 2,320 | 3.0625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use core::fmt::Debug;
use core::iter::Iterator;
use core::marker::PhantomData;
use core::mem::zeroed;
use core::mem::MaybeUninit;
use core::ops::{Deref, DerefMut};
use crate::const_default::ConstDefault;
#[derive(Clone, PartialEq)]
pub struct FixedString<A> {
data: A,
length: usize,
}
impl<A: AsRef<[u8]> + AsMut<[u8]>> FixedString<A> {
pub const fn new(data: A) -> Self {
Self { data, length: 0 }
}
pub fn push(&mut self, c: char) {
let remaining = &mut self.data.as_mut()[self.length..];
self.length += c.encode_utf8(remaining).len();
}
pub fn push_str(&mut self, s: &str) {
let remaining = &mut self.data.as_mut()[self.length..];
remaining[0..s.len()].copy_from_slice(s.as_bytes());
self.length += s.len();
}
}
impl<A: ConstDefault> Default for FixedString<A> {
fn default() -> Self {
Self {
data: A::DEFAULT,
length: 0,
}
}
}
// impl<A: AsRef<[u8]> + AsMut<[u8]> + Default> From<&str> for FixedString<A> {
// fn from(v: &str) -> Self {
// let mut inst = Self::new(A::default());
// inst.push_str(v);
// inst
// }
// }
impl<A: AsRef<[u8]> + AsMut<[u8]> + ConstDefault> From<&str> for FixedString<A> {
fn from(s: &str) -> Self {
let mut inst = Self::DEFAULT;
inst.push_str(s);
inst
}
}
impl<A: AsRef<[u8]>> AsRef<[u8]> for FixedString<A> {
fn as_ref(&self) -> &[u8] {
&self.data.as_ref()[0..self.length]
}
}
impl<A: AsRef<[u8]>> AsRef<str> for FixedString<A> {
fn as_ref(&self) -> &str {
// All operations we implement are valid UTF-8 mutations so the underlying
// storage should always contain valid UTF-8 data.
unsafe { core::str::from_utf8_unchecked(AsRef::<[u8]>::as_ref(self)) }
}
}
impl<A: AsRef<[u8]> + AsMut<[u8]>> Deref for FixedString<A> {
type Target = str;
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
impl<A: AsRef<[u8]> + AsMut<[u8]> + ConstDefault> ConstDefault for FixedString<A> {
const DEFAULT: Self = Self::new(A::DEFAULT);
}
impl<A: AsRef<[u8]>> Debug for FixedString<A> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let s: &str = self.as_ref();
s.fmt(f)
}
}
| true |
e145fdf818bf44c43312c8da3fd7a0c0b0ee2807
|
Rust
|
tyu-ru/proconio_enum_query
|
/tests/compile_error/incollect_arg.rs
|
UTF-8
| 421 | 2.515625 | 3 |
[
"CC0-1.0"
] |
permissive
|
#[proconio_enum_query::proconio_enum_query(hoge)]
enum Query1 {
A,
}
#[proconio_enum_query::proconio_enum_query(fuga(piyo))]
enum Query2 {
A,
}
#[proconio_enum_query::proconio_enum_query(foo = 10)]
enum Query3 {
A,
}
#[proconio_enum_query::proconio_enum_query(start_index = abc)]
enum Query3 {
A,
}
#[proconio_enum_query::proconio_enum_query(start_index = "abc")]
enum Query3 {
A,
}
fn main() {}
| true |
3d0f18cc77507ad1807897f40bf9f2cc3cf2336f
|
Rust
|
abeaumont/disassemble.rs
|
/src/webassembly.rs
|
UTF-8
| 9,697 | 2.671875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use parity_wasm::elements::Opcode;
use super::address::Address;
use super::instruction::Instruction;
#[allow(missing_docs)]
#[derive(Debug)]
pub struct WasmInstruction {
idx: u64,
op: Opcode,
}
impl WasmInstruction {
#[allow(missing_docs)]
pub fn new(idx: u64, op: Opcode) -> Self {
WasmInstruction { idx, op }
}
}
impl Instruction for WasmInstruction {
fn address(&self) -> Address {
Address::new(self.idx)
}
fn comment(&self) -> Option<String> {
None
}
fn mnemonic(&self) -> &str {
match self.op {
Opcode::Unreachable => "unreachable",
Opcode::Nop => "nop",
Opcode::Block(..) => "block",
Opcode::Loop(..) => "loop",
Opcode::If(..) => "if",
Opcode::Else => "else",
Opcode::End => "end",
Opcode::Br(..) => "br",
Opcode::BrIf(..) => "br_if",
Opcode::BrTable(..) => "br_table",
Opcode::Return => "return",
Opcode::Call(..) => "call",
Opcode::CallIndirect(..) => "call_indirect",
Opcode::Drop => "drop",
Opcode::Select => "select",
Opcode::GetLocal(..) => "get_local",
Opcode::SetLocal(..) => "set_local",
Opcode::TeeLocal(..) => "tee_local",
Opcode::GetGlobal(..) => "get_global",
Opcode::SetGlobal(..) => "set_global",
Opcode::I32Load(..) => "i32.load",
Opcode::I64Load(..) => "i64.load",
Opcode::F32Load(..) => "f32.load",
Opcode::F64Load(..) => "f64.load",
Opcode::I32Load8S(..) => "i32.load8_s",
Opcode::I32Load8U(..) => "i32.load8_u",
Opcode::I32Load16S(..) => "i32.load16_s",
Opcode::I32Load16U(..) => "i32.load16_u",
Opcode::I64Load8S(..) => "i64.load8_s",
Opcode::I64Load8U(..) => "i64.load8_u",
Opcode::I64Load16S(..) => "i64.load16_s",
Opcode::I64Load16U(..) => "i64.load16_u",
Opcode::I64Load32S(..) => "i64.load32_s",
Opcode::I64Load32U(..) => "i64.load32_u",
Opcode::I32Store(..) => "i32.store",
Opcode::I64Store(..) => "i64.store",
Opcode::F32Store(..) => "f32.store",
Opcode::F64Store(..) => "f64.store",
Opcode::I32Store8(..) => "i32.store8",
Opcode::I32Store16(..) => "i32.store16",
Opcode::I64Store8(..) => "i64.store8",
Opcode::I64Store16(..) => "i64.store16",
Opcode::I64Store32(..) => "i64.store32",
Opcode::CurrentMemory(..) => "current_memory",
Opcode::GrowMemory(..) => "grow_memory",
Opcode::I32Const(..) => "i32.const",
Opcode::I64Const(..) => "i64.const",
Opcode::F32Const(..) => "f32.const",
Opcode::F64Const(..) => "f64.const",
Opcode::I32Eq => "i32.eq",
Opcode::I32Eqz => "i32.eqz",
Opcode::I32Ne => "i32.ne",
Opcode::I32LtS => "i32.lt_s",
Opcode::I32LtU => "i32.lt_u",
Opcode::I32GtS => "i32.gt_s",
Opcode::I32GtU => "i32.gt_u",
Opcode::I32LeS => "i32.le_s",
Opcode::I32LeU => "i32.le_u",
Opcode::I32GeS => "i32.ge_s",
Opcode::I32GeU => "i32.ge_u",
Opcode::I64Eq => "i64.eq",
Opcode::I64Eqz => "i64.eqz",
Opcode::I64Ne => "i64.ne",
Opcode::I64LtS => "i64.lt_s",
Opcode::I64LtU => "i64.lt_u",
Opcode::I64GtS => "i64.gt_s",
Opcode::I64GtU => "i64.gt_u",
Opcode::I64LeS => "i64.le_s",
Opcode::I64LeU => "i64.le_u",
Opcode::I64GeS => "i64.ge_s",
Opcode::I64GeU => "i64.ge_u",
Opcode::F32Eq => "f32.eq",
Opcode::F32Ne => "f32.ne",
Opcode::F32Lt => "f32.lt",
Opcode::F32Gt => "f32.gt",
Opcode::F32Le => "f32.le",
Opcode::F32Ge => "f32.ge",
Opcode::F64Eq => "f64.eq",
Opcode::F64Ne => "f64.ne",
Opcode::F64Lt => "f64.lt",
Opcode::F64Gt => "f64.gt",
Opcode::F64Le => "f64.le",
Opcode::F64Ge => "f64.ge",
Opcode::I32Clz => "i32.clz",
Opcode::I32Ctz => "i32.ctz",
Opcode::I32Popcnt => "i32.popcnt",
Opcode::I32Add => "i32.add",
Opcode::I32Sub => "i32.sub",
Opcode::I32Mul => "i32.mul",
Opcode::I32DivS => "i32.div_s",
Opcode::I32DivU => "i32.div_u",
Opcode::I32RemS => "i32.rem_s",
Opcode::I32RemU => "i32.rem_u",
Opcode::I32And => "i32.and",
Opcode::I32Or => "i32.or",
Opcode::I32Xor => "i32.xor",
Opcode::I32Shl => "i32.shl",
Opcode::I32ShrS => "i32.shr_s",
Opcode::I32ShrU => "i32.shr_u",
Opcode::I32Rotl => "i32.rotl",
Opcode::I32Rotr => "i32.rotr",
Opcode::I64Clz => "i64.clz",
Opcode::I64Ctz => "i64.ctz",
Opcode::I64Popcnt => "i64.popcnt",
Opcode::I64Add => "i64.add",
Opcode::I64Sub => "i64.sub",
Opcode::I64Mul => "i64.mul",
Opcode::I64DivS => "i64.div_s",
Opcode::I64DivU => "i64.div_u",
Opcode::I64RemS => "i64.rem_s",
Opcode::I64RemU => "i64.rem_u",
Opcode::I64And => "i64.and",
Opcode::I64Or => "i64.or",
Opcode::I64Xor => "i64.xor",
Opcode::I64Shl => "i64.shl",
Opcode::I64ShrS => "i64.shr_s",
Opcode::I64ShrU => "i64.shr_u",
Opcode::I64Rotl => "i64.rotl",
Opcode::I64Rotr => "i64.rotr",
Opcode::F32Abs => "f32.abs",
Opcode::F32Neg => "f32.neg",
Opcode::F32Ceil => "f32.ceil",
Opcode::F32Floor => "f32.floor",
Opcode::F32Trunc => "f32.trunc",
Opcode::F32Nearest => "f32.nearest",
Opcode::F32Sqrt => "f32.sqrt",
Opcode::F32Add => "f32.add",
Opcode::F32Sub => "f32.sub",
Opcode::F32Mul => "f32.mul",
Opcode::F32Div => "f32.div",
Opcode::F32Min => "f32.min",
Opcode::F32Max => "f32.max",
Opcode::F32Copysign => "f32.copysign",
Opcode::F64Abs => "f64.abs",
Opcode::F64Neg => "f64.neg",
Opcode::F64Ceil => "f64.ceil",
Opcode::F64Floor => "f64.floor",
Opcode::F64Trunc => "f64.trunc",
Opcode::F64Nearest => "f64.nearest",
Opcode::F64Sqrt => "f64.sqrt",
Opcode::F64Add => "f64.add",
Opcode::F64Sub => "f64.sub",
Opcode::F64Mul => "f64.mul",
Opcode::F64Div => "f64.div",
Opcode::F64Min => "f64.min",
Opcode::F64Max => "f64.max",
Opcode::F64Copysign => "f64.copysign",
Opcode::I32WarpI64 => "i32.wrap/i64",
Opcode::I32TruncSF32 => "i32.trunc_s/f32",
Opcode::I32TruncUF32 => "i32.trunc_u/f32",
Opcode::I32TruncSF64 => "i32.trunc_s/f64",
Opcode::I32TruncUF64 => "i32.trunc_u/f64",
Opcode::I64ExtendSI32 => "i64.extend_s/i32",
Opcode::I64ExtendUI32 => "i64.extend_u/i32",
Opcode::I64TruncSF32 => "i64.trunc_s/f32",
Opcode::I64TruncUF32 => "i64.trunc_u/f32",
Opcode::I64TruncSF64 => "i64.trunc_s/f64",
Opcode::I64TruncUF64 => "i64.trunc_u/f64",
Opcode::F32ConvertSI32 => "f32.convert_s/i32",
Opcode::F32ConvertUI32 => "f32.convert_u/i32",
Opcode::F32ConvertSI64 => "f32.convert_s/i64",
Opcode::F32ConvertUI64 => "f32.convert_u/i64",
Opcode::F32DemoteF64 => "f32.demote/f64",
Opcode::F64ConvertSI32 => "f64.convert_s/i32",
Opcode::F64ConvertUI32 => "f64.convert_u/i32",
Opcode::F64ConvertSI64 => "f64.convert_s/i64",
Opcode::F64ConvertUI64 => "f64.convert_u/i64",
Opcode::F64PromoteF32 => "f64.promote/f32",
Opcode::I32ReinterpretF32 => "i32.reinterpret/f32",
Opcode::I64ReinterpretF64 => "i64.reinterpret/f64",
Opcode::F32ReinterpretI32 => "f32.reinterpret/i32",
Opcode::F64ReinterpretI64 => "f64.reinterpret/i64",
}
}
fn cycle_count(&self) -> Option<u32> {
None
}
fn is_call(&self) -> bool {
match self.op {
Opcode::Call(..) |
Opcode::CallIndirect(..) => true,
_ => false,
}
}
fn is_local_conditional_jump(&self) -> bool {
match self.op {
Opcode::If(..) |
Opcode::BrIf(..) |
Opcode::BrTable(..) => true,
_ => false,
}
}
fn is_local_jump(&self) -> bool {
match self.op {
Opcode::Br(..) => true,
_ => false,
}
}
fn is_return(&self) -> bool {
match self.op {
Opcode::Return => true,
_ => false,
}
}
fn target_address(&self) -> Option<Address> {
match self.op {
Opcode::Call(a) => Some(Address::new(a as u64)),
_ => None,
}
}
}
| true |
36866d79a18c8f3e0a00ea067c253f65f630202d
|
Rust
|
alonn24/learnings
|
/rustlang/src/adventofcode2019/day6/part1.rs
|
UTF-8
| 953 | 3.421875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
// Conver that key orbits value
pub fn convert_input(input: String) -> HashMap<String, String> {
let mut res: HashMap<String, String> = HashMap::new();
let lines: Vec<&str> = input.split("\n").collect();
for line in lines.iter() {
let parts: Vec<&str> = line.split(")").collect();
res.insert(String::from(parts[1]), String::from(parts[0]));
}
res
}
fn get_orbits(input: &HashMap<String, String>, key: &String) -> i32 {
if !input.contains_key(key) {
return 0;
}
1 + get_orbits(input, input.get(key).expect("asd"))
}
pub fn part1(raw_input: String) -> i32 {
let input = convert_input(raw_input);
let mut count = 0;
for key in input.keys() {
count += get_orbits(&input, key);
}
count
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test() {
let input = "COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L".to_string();
assert_eq!(part1(input), 42);
}
}
| true |
f199dda63c9958057c754382f8303f29bff29b76
|
Rust
|
ferrous-systems/imxrt1052
|
/src/iomuxc_gpr/gpr5/mod.rs
|
UTF-8
| 23,035 | 2.75 | 3 |
[] |
no_license
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::GPR5 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `WDOG1_MASK`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum WDOG1_MASKR {
#[doc = "WDOG1 Timeout behaves normally"]
WDOG1_MASK_0,
#[doc = "WDOG1 Timeout is masked"]
WDOG1_MASK_1,
}
impl WDOG1_MASKR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
WDOG1_MASKR::WDOG1_MASK_0 => false,
WDOG1_MASKR::WDOG1_MASK_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> WDOG1_MASKR {
match value {
false => WDOG1_MASKR::WDOG1_MASK_0,
true => WDOG1_MASKR::WDOG1_MASK_1,
}
}
#[doc = "Checks if the value of the field is `WDOG1_MASK_0`"]
#[inline]
pub fn is_wdog1_mask_0(&self) -> bool {
*self == WDOG1_MASKR::WDOG1_MASK_0
}
#[doc = "Checks if the value of the field is `WDOG1_MASK_1`"]
#[inline]
pub fn is_wdog1_mask_1(&self) -> bool {
*self == WDOG1_MASKR::WDOG1_MASK_1
}
}
#[doc = "Possible values of the field `WDOG2_MASK`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum WDOG2_MASKR {
#[doc = "WDOG2 Timeout behaves normally"]
WDOG2_MASK_0,
#[doc = "WDOG2 Timeout is masked"]
WDOG2_MASK_1,
}
impl WDOG2_MASKR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
WDOG2_MASKR::WDOG2_MASK_0 => false,
WDOG2_MASKR::WDOG2_MASK_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> WDOG2_MASKR {
match value {
false => WDOG2_MASKR::WDOG2_MASK_0,
true => WDOG2_MASKR::WDOG2_MASK_1,
}
}
#[doc = "Checks if the value of the field is `WDOG2_MASK_0`"]
#[inline]
pub fn is_wdog2_mask_0(&self) -> bool {
*self == WDOG2_MASKR::WDOG2_MASK_0
}
#[doc = "Checks if the value of the field is `WDOG2_MASK_1`"]
#[inline]
pub fn is_wdog2_mask_1(&self) -> bool {
*self == WDOG2_MASKR::WDOG2_MASK_1
}
}
#[doc = "Possible values of the field `GPT2_CAPIN1_SEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GPT2_CAPIN1_SELR {
#[doc = "source from pad"]
GPT2_CAPIN1_SEL_0,
#[doc = "source from enet1.ipp_do_mac0_timer[3]"]
GPT2_CAPIN1_SEL_1,
}
impl GPT2_CAPIN1_SELR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
GPT2_CAPIN1_SELR::GPT2_CAPIN1_SEL_0 => false,
GPT2_CAPIN1_SELR::GPT2_CAPIN1_SEL_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> GPT2_CAPIN1_SELR {
match value {
false => GPT2_CAPIN1_SELR::GPT2_CAPIN1_SEL_0,
true => GPT2_CAPIN1_SELR::GPT2_CAPIN1_SEL_1,
}
}
#[doc = "Checks if the value of the field is `GPT2_CAPIN1_SEL_0`"]
#[inline]
pub fn is_gpt2_capin1_sel_0(&self) -> bool {
*self == GPT2_CAPIN1_SELR::GPT2_CAPIN1_SEL_0
}
#[doc = "Checks if the value of the field is `GPT2_CAPIN1_SEL_1`"]
#[inline]
pub fn is_gpt2_capin1_sel_1(&self) -> bool {
*self == GPT2_CAPIN1_SELR::GPT2_CAPIN1_SEL_1
}
}
#[doc = "Possible values of the field `ENET_EVENT3IN_SEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENET_EVENT3IN_SELR {
#[doc = "event3 source input from pad"]
ENET_EVENT3IN_SEL_0,
#[doc = "event3 source input from gpt2.ipp_do_cmpout1"]
ENET_EVENT3IN_SEL_1,
}
impl ENET_EVENT3IN_SELR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENET_EVENT3IN_SELR::ENET_EVENT3IN_SEL_0 => false,
ENET_EVENT3IN_SELR::ENET_EVENT3IN_SEL_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENET_EVENT3IN_SELR {
match value {
false => ENET_EVENT3IN_SELR::ENET_EVENT3IN_SEL_0,
true => ENET_EVENT3IN_SELR::ENET_EVENT3IN_SEL_1,
}
}
#[doc = "Checks if the value of the field is `ENET_EVENT3IN_SEL_0`"]
#[inline]
pub fn is_enet_event3in_sel_0(&self) -> bool {
*self == ENET_EVENT3IN_SELR::ENET_EVENT3IN_SEL_0
}
#[doc = "Checks if the value of the field is `ENET_EVENT3IN_SEL_1`"]
#[inline]
pub fn is_enet_event3in_sel_1(&self) -> bool {
*self == ENET_EVENT3IN_SELR::ENET_EVENT3IN_SEL_1
}
}
#[doc = "Possible values of the field `VREF_1M_CLK_GPT1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum VREF_1M_CLK_GPT1R {
#[doc = "GPT1 ipg_clk_highfreq driven by IPG_PERCLK"]
VREF_1M_CLK_GPT1_0,
#[doc = "GPT1 ipg_clk_highfreq driven by anatop 1 MHz clock"]
VREF_1M_CLK_GPT1_1,
}
impl VREF_1M_CLK_GPT1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
VREF_1M_CLK_GPT1R::VREF_1M_CLK_GPT1_0 => false,
VREF_1M_CLK_GPT1R::VREF_1M_CLK_GPT1_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> VREF_1M_CLK_GPT1R {
match value {
false => VREF_1M_CLK_GPT1R::VREF_1M_CLK_GPT1_0,
true => VREF_1M_CLK_GPT1R::VREF_1M_CLK_GPT1_1,
}
}
#[doc = "Checks if the value of the field is `VREF_1M_CLK_GPT1_0`"]
#[inline]
pub fn is_vref_1m_clk_gpt1_0(&self) -> bool {
*self == VREF_1M_CLK_GPT1R::VREF_1M_CLK_GPT1_0
}
#[doc = "Checks if the value of the field is `VREF_1M_CLK_GPT1_1`"]
#[inline]
pub fn is_vref_1m_clk_gpt1_1(&self) -> bool {
*self == VREF_1M_CLK_GPT1R::VREF_1M_CLK_GPT1_1
}
}
#[doc = "Possible values of the field `VREF_1M_CLK_GPT2`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum VREF_1M_CLK_GPT2R {
#[doc = "GPT2 ipg_clk_highfreq driven by IPG_PERCLK"]
VREF_1M_CLK_GPT2_0,
#[doc = "GPT2 ipg_clk_highfreq driven by anatop 1 MHz clock"]
VREF_1M_CLK_GPT2_1,
}
impl VREF_1M_CLK_GPT2R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
VREF_1M_CLK_GPT2R::VREF_1M_CLK_GPT2_0 => false,
VREF_1M_CLK_GPT2R::VREF_1M_CLK_GPT2_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> VREF_1M_CLK_GPT2R {
match value {
false => VREF_1M_CLK_GPT2R::VREF_1M_CLK_GPT2_0,
true => VREF_1M_CLK_GPT2R::VREF_1M_CLK_GPT2_1,
}
}
#[doc = "Checks if the value of the field is `VREF_1M_CLK_GPT2_0`"]
#[inline]
pub fn is_vref_1m_clk_gpt2_0(&self) -> bool {
*self == VREF_1M_CLK_GPT2R::VREF_1M_CLK_GPT2_0
}
#[doc = "Checks if the value of the field is `VREF_1M_CLK_GPT2_1`"]
#[inline]
pub fn is_vref_1m_clk_gpt2_1(&self) -> bool {
*self == VREF_1M_CLK_GPT2R::VREF_1M_CLK_GPT2_1
}
}
#[doc = "Values that can be written to the field `WDOG1_MASK`"]
pub enum WDOG1_MASKW {
#[doc = "WDOG1 Timeout behaves normally"]
WDOG1_MASK_0,
#[doc = "WDOG1 Timeout is masked"]
WDOG1_MASK_1,
}
impl WDOG1_MASKW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
WDOG1_MASKW::WDOG1_MASK_0 => false,
WDOG1_MASKW::WDOG1_MASK_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _WDOG1_MASKW<'a> {
w: &'a mut W,
}
impl<'a> _WDOG1_MASKW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: WDOG1_MASKW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "WDOG1 Timeout behaves normally"]
#[inline]
pub fn wdog1_mask_0(self) -> &'a mut W {
self.variant(WDOG1_MASKW::WDOG1_MASK_0)
}
#[doc = "WDOG1 Timeout is masked"]
#[inline]
pub fn wdog1_mask_1(self) -> &'a mut W {
self.variant(WDOG1_MASKW::WDOG1_MASK_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `WDOG2_MASK`"]
pub enum WDOG2_MASKW {
#[doc = "WDOG2 Timeout behaves normally"]
WDOG2_MASK_0,
#[doc = "WDOG2 Timeout is masked"]
WDOG2_MASK_1,
}
impl WDOG2_MASKW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
WDOG2_MASKW::WDOG2_MASK_0 => false,
WDOG2_MASKW::WDOG2_MASK_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _WDOG2_MASKW<'a> {
w: &'a mut W,
}
impl<'a> _WDOG2_MASKW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: WDOG2_MASKW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "WDOG2 Timeout behaves normally"]
#[inline]
pub fn wdog2_mask_0(self) -> &'a mut W {
self.variant(WDOG2_MASKW::WDOG2_MASK_0)
}
#[doc = "WDOG2 Timeout is masked"]
#[inline]
pub fn wdog2_mask_1(self) -> &'a mut W {
self.variant(WDOG2_MASKW::WDOG2_MASK_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `GPT2_CAPIN1_SEL`"]
pub enum GPT2_CAPIN1_SELW {
#[doc = "source from pad"]
GPT2_CAPIN1_SEL_0,
#[doc = "source from enet1.ipp_do_mac0_timer[3]"]
GPT2_CAPIN1_SEL_1,
}
impl GPT2_CAPIN1_SELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
GPT2_CAPIN1_SELW::GPT2_CAPIN1_SEL_0 => false,
GPT2_CAPIN1_SELW::GPT2_CAPIN1_SEL_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _GPT2_CAPIN1_SELW<'a> {
w: &'a mut W,
}
impl<'a> _GPT2_CAPIN1_SELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: GPT2_CAPIN1_SELW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "source from pad"]
#[inline]
pub fn gpt2_capin1_sel_0(self) -> &'a mut W {
self.variant(GPT2_CAPIN1_SELW::GPT2_CAPIN1_SEL_0)
}
#[doc = "source from enet1.ipp_do_mac0_timer[3]"]
#[inline]
pub fn gpt2_capin1_sel_1(self) -> &'a mut W {
self.variant(GPT2_CAPIN1_SELW::GPT2_CAPIN1_SEL_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 23;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENET_EVENT3IN_SEL`"]
pub enum ENET_EVENT3IN_SELW {
#[doc = "event3 source input from pad"]
ENET_EVENT3IN_SEL_0,
#[doc = "event3 source input from gpt2.ipp_do_cmpout1"]
ENET_EVENT3IN_SEL_1,
}
impl ENET_EVENT3IN_SELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENET_EVENT3IN_SELW::ENET_EVENT3IN_SEL_0 => false,
ENET_EVENT3IN_SELW::ENET_EVENT3IN_SEL_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _ENET_EVENT3IN_SELW<'a> {
w: &'a mut W,
}
impl<'a> _ENET_EVENT3IN_SELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENET_EVENT3IN_SELW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "event3 source input from pad"]
#[inline]
pub fn enet_event3in_sel_0(self) -> &'a mut W {
self.variant(ENET_EVENT3IN_SELW::ENET_EVENT3IN_SEL_0)
}
#[doc = "event3 source input from gpt2.ipp_do_cmpout1"]
#[inline]
pub fn enet_event3in_sel_1(self) -> &'a mut W {
self.variant(ENET_EVENT3IN_SELW::ENET_EVENT3IN_SEL_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `VREF_1M_CLK_GPT1`"]
pub enum VREF_1M_CLK_GPT1W {
#[doc = "GPT1 ipg_clk_highfreq driven by IPG_PERCLK"]
VREF_1M_CLK_GPT1_0,
#[doc = "GPT1 ipg_clk_highfreq driven by anatop 1 MHz clock"]
VREF_1M_CLK_GPT1_1,
}
impl VREF_1M_CLK_GPT1W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
VREF_1M_CLK_GPT1W::VREF_1M_CLK_GPT1_0 => false,
VREF_1M_CLK_GPT1W::VREF_1M_CLK_GPT1_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _VREF_1M_CLK_GPT1W<'a> {
w: &'a mut W,
}
impl<'a> _VREF_1M_CLK_GPT1W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: VREF_1M_CLK_GPT1W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "GPT1 ipg_clk_highfreq driven by IPG_PERCLK"]
#[inline]
pub fn vref_1m_clk_gpt1_0(self) -> &'a mut W {
self.variant(VREF_1M_CLK_GPT1W::VREF_1M_CLK_GPT1_0)
}
#[doc = "GPT1 ipg_clk_highfreq driven by anatop 1 MHz clock"]
#[inline]
pub fn vref_1m_clk_gpt1_1(self) -> &'a mut W {
self.variant(VREF_1M_CLK_GPT1W::VREF_1M_CLK_GPT1_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `VREF_1M_CLK_GPT2`"]
pub enum VREF_1M_CLK_GPT2W {
#[doc = "GPT2 ipg_clk_highfreq driven by IPG_PERCLK"]
VREF_1M_CLK_GPT2_0,
#[doc = "GPT2 ipg_clk_highfreq driven by anatop 1 MHz clock"]
VREF_1M_CLK_GPT2_1,
}
impl VREF_1M_CLK_GPT2W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
VREF_1M_CLK_GPT2W::VREF_1M_CLK_GPT2_0 => false,
VREF_1M_CLK_GPT2W::VREF_1M_CLK_GPT2_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _VREF_1M_CLK_GPT2W<'a> {
w: &'a mut W,
}
impl<'a> _VREF_1M_CLK_GPT2W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: VREF_1M_CLK_GPT2W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "GPT2 ipg_clk_highfreq driven by IPG_PERCLK"]
#[inline]
pub fn vref_1m_clk_gpt2_0(self) -> &'a mut W {
self.variant(VREF_1M_CLK_GPT2W::VREF_1M_CLK_GPT2_0)
}
#[doc = "GPT2 ipg_clk_highfreq driven by anatop 1 MHz clock"]
#[inline]
pub fn vref_1m_clk_gpt2_1(self) -> &'a mut W {
self.variant(VREF_1M_CLK_GPT2W::VREF_1M_CLK_GPT2_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 6 - WDOG1 Timeout Mask"]
#[inline]
pub fn wdog1_mask(&self) -> WDOG1_MASKR {
WDOG1_MASKR::_from({
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 7 - WDOG2 Timeout Mask"]
#[inline]
pub fn wdog2_mask(&self) -> WDOG2_MASKR {
WDOG2_MASKR::_from({
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 23 - GPT2 input capture channel 1 source select"]
#[inline]
pub fn gpt2_capin1_sel(&self) -> GPT2_CAPIN1_SELR {
GPT2_CAPIN1_SELR::_from({
const MASK: bool = true;
const OFFSET: u8 = 23;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 25 - ENET input timer event3 source select"]
#[inline]
pub fn enet_event3in_sel(&self) -> ENET_EVENT3IN_SELR {
ENET_EVENT3IN_SELR::_from({
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 28 - GPT1 1 MHz clock source select"]
#[inline]
pub fn vref_1m_clk_gpt1(&self) -> VREF_1M_CLK_GPT1R {
VREF_1M_CLK_GPT1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 29 - GPT2 1 MHz clock source select"]
#[inline]
pub fn vref_1m_clk_gpt2(&self) -> VREF_1M_CLK_GPT2R {
VREF_1M_CLK_GPT2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 29;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 6 - WDOG1 Timeout Mask"]
#[inline]
pub fn wdog1_mask(&mut self) -> _WDOG1_MASKW {
_WDOG1_MASKW { w: self }
}
#[doc = "Bit 7 - WDOG2 Timeout Mask"]
#[inline]
pub fn wdog2_mask(&mut self) -> _WDOG2_MASKW {
_WDOG2_MASKW { w: self }
}
#[doc = "Bit 23 - GPT2 input capture channel 1 source select"]
#[inline]
pub fn gpt2_capin1_sel(&mut self) -> _GPT2_CAPIN1_SELW {
_GPT2_CAPIN1_SELW { w: self }
}
#[doc = "Bit 25 - ENET input timer event3 source select"]
#[inline]
pub fn enet_event3in_sel(&mut self) -> _ENET_EVENT3IN_SELW {
_ENET_EVENT3IN_SELW { w: self }
}
#[doc = "Bit 28 - GPT1 1 MHz clock source select"]
#[inline]
pub fn vref_1m_clk_gpt1(&mut self) -> _VREF_1M_CLK_GPT1W {
_VREF_1M_CLK_GPT1W { w: self }
}
#[doc = "Bit 29 - GPT2 1 MHz clock source select"]
#[inline]
pub fn vref_1m_clk_gpt2(&mut self) -> _VREF_1M_CLK_GPT2W {
_VREF_1M_CLK_GPT2W { w: self }
}
}
| true |
d4c51c9eb0ccabfdc0b6909f4af06f1e21c8419b
|
Rust
|
zaeleus/noodles
|
/noodles-vcf/examples/vcf_read_header_async.rs
|
UTF-8
| 689 | 2.671875 | 3 |
[
"MIT"
] |
permissive
|
//! Prints the header of a VCF file.
//!
//! The header is defined to be the meta lines (`##` prefix) and header line (`#`` prefix).
//!
//! The result is similar to or matches the output of `bcftools head <src>`. bcftools may add a
//! PASS FILTER to the meta if it is missing.
use std::env;
use noodles_vcf as vcf;
use tokio::{
fs::File,
io::{self, BufReader},
};
#[tokio::main]
async fn main() -> io::Result<()> {
let src = env::args().nth(1).expect("missing src");
let mut reader = File::open(src)
.await
.map(BufReader::new)
.map(vcf::AsyncReader::new)?;
let header = reader.read_header().await?;
print!("{header}");
Ok(())
}
| true |
73d2d4152f91aeca18f68f121d8def1fb2b2a46d
|
Rust
|
curly-lang/comb-rust
|
/src/main.rs
|
UTF-8
| 2,647 | 2.890625 | 3 |
[] |
no_license
|
use std::env;
use std::fs;
use toml::Value;
use comb::toml::toml_parser;
use comb::toml::build_toml_generation;
use comb::dependencies::get;
use comb::dependencies::get::DependencyFail;
use comb::dependencies::toml_to_struct;
fn main() -> Result<(), ()> {
let mut args = env::args();
let name = args.next().unwrap();
let build_file_input = "comb.toml";
let build_file_output = "comb.build.toml";
if args.len() == 0 {
println!(
"usage:\n{} update\n{} build",
&name,
&name
);
Ok(())
} else {
match args.next().unwrap().as_str() {
"update" => update(build_file_input, build_file_output, true),
"build" => build(build_file_input, build_file_output),
_ => Err(())
}
}
}
fn update(
file_in: &str,
file_out: &str,
force_update: bool,
) -> Result<(), ()> {
return build_toml_generation::generate_build_toml(file_in, file_out, force_update);
}
fn make_and_cd_build() -> Result<(), ()> {
let _ = fs::remove_dir_all("build");
match fs::create_dir("build") {
Ok(_) => (),
Err(_) => {
println!("failed to create build directory");
return Err(())
}
}
match env::set_current_dir("build") {
Ok(_) => (),
Err(_) => {
println!("failed to cd into the build directory");
return Err(())
}
}
Ok(())
}
fn compile_dependencies(
build_toml: &Value,
) -> Result<(), ()> {
let dependency_table = build_toml.as_table().unwrap().get("dependencies").unwrap().as_table().unwrap();
let dependency_list = toml_to_struct::toml_table_to_structs(dependency_table);
match get::get_dependencies(&dependency_list) {
Ok(_) => (),
Err(e) => {
match e {
DependencyFail::FailedToGet(err) => println!("Failed to get dependency:\n{}", err),
DependencyFail::ModuleDoesNotExist(err) => println!("Module does not exist:\n{}", err),
DependencyFail::PathDoesNotExist(err) => println!("Path does not exist:\n{}", err),
DependencyFail::RepoDoesNotExist(err) => println!("Repository does not exist:\n{}", err),
DependencyFail::BuildDirFail(err) => println!("Failed to create directory:\n{}", err),
}
return Err(())
}
}
println!("{:?}", dependency_list);
Ok(())
}
fn build(
build_file_in: &str,
build_file_out: &str,
) -> Result<(), ()> {
match update(build_file_in, build_file_out, false) {
Ok(_) => (),
Err(_) => return Err(()),
}
let build_toml = toml_parser::parse_toml(build_file_out).unwrap();
make_and_cd_build()?;
match compile_dependencies(&build_toml) {
Ok(_) => (),
Err(_) => return Err(()),
}
Ok(())
}
| true |
8cffa12b0b41e1468ca520a583db22718d193cdd
|
Rust
|
dariedl/opgame
|
/src/game_state.rs
|
UTF-8
| 1,227 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use rand::Rng;
use crate::data::Character;
use crate::data::Probe;
// use crate::domainMessage::Command;
use crate::domain_message::printCmd;
use crate::domain_message::printEvent;
use crate::domain_message::Command;
use crate::domain_message::Event;
pub fn handle_state(command: Command) {
printCmd(&command);
let events = match command {
Command::DoAdventureProbe(character, probe) => do_adventure_probe(character, probe),
};
for event in events {
printEvent(event)
}
}
fn roll_dice() -> i8 {
let dice_roll = rand::thread_rng().gen_range(0, 8) - 4;
println!("Diceroll: {}!", dice_roll);
return dice_roll;
}
pub fn do_adventure_probe(character: Character, probe: Probe) -> Vec<Event> {
let attribute = character.fight as i8;
let diff = probe.difficulty as i8;
let result = roll_dice() + attribute;
return match result {
r if r < diff - 1 => vec![
Event::FailedProbe(probe),
Event::InjuredCharacter(character),
],
r if r < diff => vec![Event::FailedProbe(probe)],
r if r >= diff + 2 => vec![Event::SucceededProbe(probe), Event::GainedBerry(2)],
_ => vec![Event::SucceededProbe(probe)],
};
}
| true |
cee9e3290125988eaa508726e0a88107d032350c
|
Rust
|
Ruddle/oxidator
|
/src/procedural_texels.rs
|
UTF-8
| 834 | 3.0625 | 3 |
[
"MIT"
] |
permissive
|
pub fn create_texels(size: usize) -> Vec<u8> {
let mut v = Vec::new();
for i in 0..size {
for j in 0..size {
let i = i as f32 / size as f32;
let j = j as f32 / size as f32;
v.push((i * 255.0) as u8);
v.push(((1.0 - j) * 255.0) as u8);
v.push(((1.0 - i) * j * 255.0) as u8);
v.push(255);
}
}
v
}
pub fn checker(size: usize) -> Vec<u8> {
let mut v = Vec::new();
for i in 0..size {
for j in 0..size {
let pair = (i + j) % 2 == 0;
if pair {
v.push(0);
v.push(0);
v.push(0);
} else {
v.push(255);
v.push(255);
v.push(255);
}
v.push(255);
}
}
v
}
| true |
9589c0e2b1fc68cd4984ea6bd5eae11dde12cb8f
|
Rust
|
scottyla19/rust-book
|
/closures/src/main.rs
|
UTF-8
| 1,691 | 3.390625 | 3 |
[] |
no_license
|
use std::thread;
use std::time::Duration;
use std::collections::HashMap;
use std::hash::Hash;
use std::collections::hash_map::Entry;
fn main() {
let simulated_user_specified_value = 6;
let simulated_random_number = 3;
generate_workout(
simulated_user_specified_value,
simulated_random_number
);
}
fn generate_workout(intensity: u32, random_number: u32) {
let mut expensive_result = Cacher::new(|num| {
println!("calculating slowly...");
thread::sleep(Duration::from_secs(2));
num
});
if intensity < 25 {
println!(
"Today, do {} pushups!",
expensive_result.value(intensity)
);
println!(
"Next, do {} situps!",
expensive_result.value(intensity*2)
);
} else {
if random_number == 3 {
println!("Take a break today! Remember to stay hydrated!");
} else {
println!(
"Today, run for {} minutes!",
expensive_result.value(intensity)
);
}
}
}
struct Cacher<T,K,U>
where T: Fn(K) -> U
{
calculation: T,
value: HashMap<K,U>,
}
impl<T,K,U> Cacher<T,K,U>
where T: Fn(K) -> U,
K: Hash + Eq + Clone,
U: Clone
{
fn new(calculation: T) -> Cacher<T,K,U> {
Cacher {
// let mut mapper = HashMap::new();
calculation,
value: HashMap::new(),
}
}
fn value(&mut self, arg: K) -> &U {
match self.value.entry(arg.clone()) {
Entry::Occupied(v) => v.into_mut(),
Entry::Vacant(v) => v.insert((self.calculation)(arg)),
}
}
}
| true |
9a5a9c26e2811787f27615d09bb479d7c36fce83
|
Rust
|
legends2k/advent-of-code
|
/2018/day_22/src/main.rs
|
UTF-8
| 7,745 | 3.03125 | 3 |
[
"Unlicense"
] |
permissive
|
use std::{
cmp::Reverse,
collections::{BinaryHeap, HashMap},
error::Error,
fmt::Debug,
io::stdin,
ops::{Add, Index, IndexMut},
};
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
struct Point(i32, i32);
impl Point {
fn non_negative(&self) -> bool {
(self.0 >= 0) && (self.1 >= 0)
}
}
impl Add for Point {
type Output = Self;
fn add(self, other: Self) -> Self {
Point(self.0 + other.0, self.1 + other.1)
}
}
#[repr(u8)]
#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug)]
enum Tool {
Gear,
Torch,
None,
}
#[repr(u8)]
#[derive(Copy, Clone, Debug)]
enum RegionType {
Rocky,
Wet,
Narrow,
}
impl RegionType {
fn get(erosion: u64) -> Self {
match erosion % 3 {
0 => RegionType::Rocky,
1 => RegionType::Wet,
2 => RegionType::Narrow,
_ => unreachable!(),
}
}
fn is_allowed(self, t: Tool) -> bool {
matches!(
(self, t),
(RegionType::Rocky, Tool::Gear)
| (RegionType::Rocky, Tool::Torch)
| (RegionType::Wet, Tool::Gear)
| (RegionType::Wet, Tool::None)
| (RegionType::Narrow, Tool::Torch)
| (RegionType::Narrow, Tool::None)
)
}
fn get_alternate(self, t: Tool) -> Tool {
match (self, t) {
(RegionType::Rocky, Tool::Gear) => Tool::Torch,
(RegionType::Rocky, Tool::Torch) => Tool::Gear,
(RegionType::Wet, Tool::Gear) => Tool::None,
(RegionType::Wet, Tool::None) => Tool::Gear,
(RegionType::Narrow, Tool::Torch) => Tool::None,
(RegionType::Narrow, Tool::None) => Tool::Torch,
_ => unreachable!(),
}
}
}
#[derive(Clone, Debug)]
struct Pathing([Option<u32>; 3]);
impl Index<Tool> for Pathing {
type Output = Option<u32>;
fn index(&self, idx: Tool) -> &Self::Output {
&self.0[idx as usize]
}
}
impl IndexMut<Tool> for Pathing {
fn index_mut(&mut self, idx: Tool) -> &mut Self::Output {
&mut self.0[idx as usize]
}
}
#[derive(Clone, Debug)]
struct Region {
erosion: u64,
costs: Pathing,
}
impl Region {
fn new(erosion: u64) -> Self {
Region {
erosion,
costs: Pathing([None, None, None]),
}
}
// |cost| is considered cheaper if region has no costs for any tool
// or if |cost| is lesser than or equal to any previous costs
fn is_cheaper(&self, cost: u32) -> bool {
self.costs.0.iter().all(|c| c.is_none())
|| self.costs.0.iter().any(|c| match c {
Some(x) => cost <= *x,
_ => false,
})
}
fn is_cheaper_for_tool(&self, cost: u32, tool: Tool) -> bool {
match self.costs[tool] {
None => true,
Some(c) => c > cost,
}
}
}
struct Map {
depth: u16,
target: Point,
cells: HashMap<Point, Region>,
}
impl Map {
// bounds to go beyond |target| when bounded_map feature is enabled.
const BUFFER: i32 = 100;
fn new(depth: u16, target: Point) -> Self {
let cap = if cfg!(feature = "bounded_map") {
(target.0 + Map::BUFFER) * (target.1 + Map::BUFFER)
} else {
target.0 * target.1
} as usize;
let mut m = Map {
depth,
target,
cells: HashMap::<Point, Region>::with_capacity(cap),
};
m.cells
.insert(Point(0, 0), Region::new(depth as u64 % 20183));
m.cells.insert(target, Region::new(depth as u64 % 20183));
m
}
fn get_erosion(&mut self, pt: Point) -> u64 {
match self.cells.get(&pt) {
Some(Region { erosion, .. }) => *erosion,
None => {
let depth = self.depth as u64;
let e = match pt {
Point(0, y) => (y as u64 * 48271 + depth) % 20183,
Point(x, 0) => (x as u64 * 16807 + depth) % 20183,
_ => {
let left = Point(pt.0 - 1, pt.1);
let up = Point(pt.0, pt.1 - 1);
(self.get_erosion(left) * self.get_erosion(up) + depth) % 20183
}
};
self.cells.insert(pt, Region::new(e));
e
}
}
}
fn risk_level(&mut self) -> u32 {
let width = self.target.0;
let height = self.target.1;
(0..=width)
.flat_map(|x| (0..=height).map(move |y| Point(x, y)))
.map(|pt| RegionType::get(self.get_erosion(pt)) as u32)
.sum()
}
fn get_region(&mut self, pos: Point) -> Option<&mut Region> {
let pos_allowed = pos.non_negative()
&& match cfg!(feature = "bounded_map") {
true => {
let max_pos = self.target + Point(Map::BUFFER, Map::BUFFER);
(pos.0 < max_pos.0) && (pos.1 < max_pos.1)
}
false => true,
};
match (pos_allowed, self.cells.contains_key(&pos)) {
(true, true) => self.cells.get_mut(&pos),
(true, false) => {
self.get_erosion(pos); // this inserts <Point, Region> into |self.cells|
self.cells.get_mut(&pos)
}
_ => None,
}
}
}
fn shortest_path(m: &mut Map, target: Point) -> u32 {
// Visit every adjacent cell
// Put cost-to-reach
// Keep going in all 4 directions
// Reach target
// Drop yet-to-visit cells with higher cost
// Complete all yet-to-visit cells with lower cost
// Last step is unneeded as a priority queue is used; early short-circuit is
// okay. Enable bounded_map feature to short-circuit; it’s slow without this.
let mouth = m.get_region(Point(0, 0)).unwrap();
mouth.costs[Tool::Torch] = Some(0);
let mut to_visit = BinaryHeap::new();
to_visit.push(Reverse((0u32, Point(0, 0), Tool::Torch)));
while let Some(Reverse((cost, pos, tool))) = to_visit.pop() {
let region = RegionType::get(m.get_erosion(pos));
// only pursue paths less costlier than current target cost if reached
if !m.get_region(m.target).unwrap().is_cheaper(cost) {
continue;
}
let adjs = [
Point(pos.0 - 1, pos.1),
Point(pos.0 + 1, pos.1),
Point(pos.0, pos.1 - 1),
Point(pos.0, pos.1 + 1),
];
for adj_pos in adjs {
if let Some(adj_region) = m.get_region(adj_pos) {
let (new_cost, new_tool) =
match RegionType::get(adj_region.erosion).is_allowed(tool) {
true => (cost + 1, tool),
false => (cost + 1 + 7, region.get_alternate(tool)),
};
if adj_pos == target {
return new_cost + if new_tool != Tool::Torch { 7 } else { 0 };
}
// skip update if already visited with same tool at lesser cost
if !adj_region.is_cheaper_for_tool(new_cost, new_tool) {
continue;
}
// If already visited and this cost is cheaper, we will anyway start a
// new follow-up by pushing to |to_visit|.
adj_region.costs[new_tool] = Some(new_cost);
// Don’t enlist in to_visit if we’ve already reached or if new cost
// is cheaper than target’s current costs.
if adj_pos != target
&& m.get_region(m.target).unwrap().is_cheaper(new_cost)
{
to_visit.push(Reverse((new_cost, adj_pos, new_tool)));
}
}
}
}
m.get_region(target).unwrap().costs[Tool::Torch].unwrap()
}
fn main() -> Result<(), Box<dyn Error>> {
let mut buf = String::with_capacity(20);
stdin().read_line(&mut buf)?;
let depth = buf
.trim_end()
.rsplit_once(' ')
.ok_or("Invalid input")?
.1
.parse::<u16>()?;
buf.clear();
stdin().read_line(&mut buf)?;
let target = buf
.trim_end()
.rsplit_once(' ')
.ok_or("Invalid input")?
.1
.split_once(',')
.ok_or("Invalid input")?;
let target_pos = Point(target.0.parse()?, target.1.parse()?);
let mut m = Map::new(depth, target_pos);
// Part 1: print risk level of rectangle from cave mouth to target
println!("Risk level: {}", m.risk_level());
println!(
"Fastest path to target: {}",
shortest_path(&mut m, target_pos)
);
Ok(())
}
| true |
0db0b9dd49fcfb8b06ad531ec22adc1ad885dce9
|
Rust
|
tock/tock
|
/chips/stm32f303xc/src/wdt.rs
|
UTF-8
| 5,263 | 2.71875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Licensed under the Apache License, Version 2.0 or the MIT License.
// SPDX-License-Identifier: Apache-2.0 OR MIT
// Copyright Tock Contributors 2022.
//! Window watchdog timer
use crate::rcc;
use core::cell::Cell;
use kernel::platform::chip::ClockInterface;
use kernel::utilities::registers::interfaces::ReadWriteable;
use kernel::utilities::registers::{register_bitfields, ReadWrite};
use kernel::utilities::StaticRef;
const WINDOW_WATCHDOG_BASE: StaticRef<WwdgRegisters> =
unsafe { StaticRef::new(0x4000_2C00 as *const WwdgRegisters) };
#[repr(C)]
pub struct WwdgRegisters {
cr: ReadWrite<u32, Control::Register>,
cfr: ReadWrite<u32, Config::Register>,
sr: ReadWrite<u32, Status::Register>,
}
register_bitfields![u32,
Control [
/// Watch dog activation
/// Set by software and only cleared by hardware after a reset.
/// When set, the watchdog can generate a reset.
WDGA OFFSET(7) NUMBITS(1) [],
/// 7 bit counter
/// These bits contain the value of the watchdog counter. It is
/// decremented every 4096 * 2^WDGTB PCLK cycles. A reset is produced
/// when it is decremented from 0x40 to 0x3F (T[6] becomes cleared).
T OFFSET(0) NUMBITS(7) []
],
Config [
/// Early wakeup interrupt
/// When set, interrupt occurs whenever the counter reaches the value
/// of 0x40. This interrupt is only cleared by hardware after a reset.
EWI OFFSET(9) NUMBITS(1) [],
/// Timer base
/// This allows modifying the time base of the prescaler.
WDGTB OFFSET(7) NUMBITS(2) [
/// CK Counter Clock (PCLK div 4096) div 1
DIVONE = 0,
/// CK Counter Clock (PCLK div 4096) div 2
DIVTWO = 1,
/// CK Counter Clock (PCLK div 4096) div 4
DIVFOUR = 2,
/// CK Counter Clock (PCLK div 4096) div 8
DIVEIGHT = 3
],
/// 7 bit window value
/// These bits contain the window value to be compared to the
/// downcounter.
W OFFSET(0) NUMBITS(7) []
],
Status [
/// Early wakeup interrupt flag
/// This is set when the counter has reached the value 0x40. It must be
/// cleared by software by writing 0. This bit is also set when the
/// interrupt is not enabled.
EWIF OFFSET(0) NUMBITS(1) []
]
];
pub struct WindoWdg<'a> {
registers: StaticRef<WwdgRegisters>,
clock: WdgClock<'a>,
enabled: Cell<bool>,
}
impl<'a> WindoWdg<'a> {
pub const fn new(rcc: &'a rcc::Rcc) -> Self {
Self {
registers: WINDOW_WATCHDOG_BASE,
clock: WdgClock(rcc::PeripheralClock::new(
rcc::PeripheralClockType::APB1(rcc::PCLK1::WWDG),
rcc,
)),
enabled: Cell::new(false),
}
}
pub fn enable(&self) {
self.enabled.set(true);
}
fn set_window(&self, value: u32) {
// Set the window value to the biggest possible one.
self.registers.cfr.modify(Config::W.val(value));
}
/// Modifies the time base of the prescaler.
/// 0 - decrements the watchdog every clock cycle
/// 1 - decrements the watchdog every 2nd clock cycle
/// 2 - decrements the watchdog every 4th clock cycle
/// 3 - decrements the watchdog every 8th clock cycle
fn set_prescaler(&self, time_base: u8) {
match time_base {
0 => self.registers.cfr.modify(Config::WDGTB::DIVONE),
1 => self.registers.cfr.modify(Config::WDGTB::DIVTWO),
2 => self.registers.cfr.modify(Config::WDGTB::DIVFOUR),
3 => self.registers.cfr.modify(Config::WDGTB::DIVEIGHT),
_ => {}
}
}
pub fn start(&self) {
// Enable the APB1 clock for the watchdog.
self.clock.enable();
// This disables the window feature. Set this to a value smaller than
// 0x7F if you want to enable it.
self.set_window(0x7F);
self.set_prescaler(3);
// Set the T[6] bit to avoid a reset when the watchdog is activated.
self.tickle();
// With the APB1 clock running at 36Mhz we are getting timeout value of
// t_WWDG = (1 / 36000) * 4096 * 2^3 * (63 + 1) = 58ms
self.registers.cr.modify(Control::WDGA::SET);
}
pub fn tickle(&self) {
// Uses 63 as the value the watchdog starts counting from.
self.registers.cr.modify(Control::T.val(0x7F));
}
}
struct WdgClock<'a>(rcc::PeripheralClock<'a>);
impl ClockInterface for WdgClock<'_> {
fn is_enabled(&self) -> bool {
self.0.is_enabled()
}
fn enable(&self) {
self.0.enable();
}
fn disable(&self) {
self.0.disable();
}
}
impl<'a> kernel::platform::watchdog::WatchDog for WindoWdg<'a> {
fn setup(&self) {
if self.enabled.get() {
self.start();
}
}
fn tickle(&self) {
if self.enabled.get() {
self.tickle();
}
}
fn suspend(&self) {
if self.enabled.get() {
self.clock.disable();
}
}
fn resume(&self) {
if self.enabled.get() {
self.clock.enable();
}
}
}
| true |
7a64e9e773496d3d36321c5b9f9fbd754cb71b72
|
Rust
|
theemathas/binary_turk
|
/game/src/pos/psudo_legal.rs
|
UTF-8
| 13,828 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
use std::vec;
use std::iter;
use square::{Square, Rank, File};
use moves::Move;
use color::{White, Black};
use piece::Piece;
use piece::Type::{Pawn, King, Queen, Bishop, Knight, Rook};
use castle::{Kingside, Queenside};
use super::Position;
use super::bitboard::BitBoard;
pub struct Iter<'a>(iter::Chain<NoisyIter<'a>, QuietIter<'a>>);
impl<'a> Iterator for Iter<'a> {
type Item = Move;
fn next(&mut self) -> Option<Move> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
pub struct NoisyIter<'a>(Box<Iterator<Item = Move> + 'a>);
impl<'a> Iterator for NoisyIter<'a> {
type Item = Move;
fn next(&mut self) -> Option<Move> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
pub struct QuietIter<'a>(Box<Iterator<Item = Move> + 'a>);
impl<'a> Iterator for QuietIter<'a> {
type Item = Move;
fn next(&mut self) -> Option<Move> { self.0.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() }
}
pub fn iter<'a>(p: &'a Position) -> Iter<'a> {
Iter(noisy_iter(p).chain(quiet_iter(p)))
}
pub fn noisy_iter<'a>(p: &'a Position) -> NoisyIter<'a> {
NoisyIter(Box::new(en_passant_iter(p).chain(
p.piece_iter().flat_map(move |(piece_id, from)| {
noisy_move_from_iter(p, piece_id, from)
})
)))
}
pub fn quiet_iter<'a>(p: &'a Position) -> QuietIter<'a> {
QuietIter(Box::new(castle_iter(p).chain(
p.piece_iter().flat_map(move |(piece_id, from)| {
quiet_move_from_iter(p, piece_id, from)
})
)))
}
fn quiet_move_from_iter(p: &Position,
piece_id: Piece,
from: Square) -> Box<Iterator<Item = Move>> {
if piece_id.color() != p.side_to_move() {
Box::new(None.into_iter())
} else {
match piece_id.piece_type() {
Pawn => quiet_pawn_from_iter(p, piece_id, from),
Queen|Bishop|Rook => quiet_slider_from_iter(p, piece_id, from),
King|Knight => quiet_fixed_from_iter(p, piece_id, from),
}
}
}
fn noisy_move_from_iter<'a>(p: &'a Position,
piece_id: Piece,
from: Square) -> Box<Iterator<Item = Move> + 'a> {
if piece_id.color() != p.side_to_move() {
Box::new(None.into_iter())
} else {
match piece_id.piece_type() {
Pawn => noisy_pawn_from_iter(p, piece_id, from),
Queen|Bishop|Rook => noisy_slider_from_iter(p, piece_id, from),
King|Knight => noisy_fixed_from_iter(p, piece_id, from),
}
}
}
// (file, rank)
type Diff = (i32, i32);
fn shift(s: Square, dir: Diff) -> Option<Square> {
let (dx, dy) = dir;
let (File(file), Rank(rank)) = s.to_tuple();
Square::from_i32(file + dx, rank + dy)
}
static ROOK_SLIDE: [Diff; 4] = [(1, 0), (0, 1), (-1, 0), (0, -1)];
static BISHOP_SLIDE: [Diff; 4] = [(1, 1), (1, -1), (-1, -1), (-1, 1)];
static QUEEN_SLIDE: [Diff; 8] = [(1, 0), (0, 1), (-1, 0), (0, -1),
(1, 1), (1, -1), (-1, -1), (-1, 1)];
lazy_static! {
static ref ROOK_SLIDE_TABLE: [BitBoard; 64] = slider_table_gen(&ROOK_SLIDE);
static ref BISHOP_SLIDE_TABLE: [BitBoard; 64] = slider_table_gen(&BISHOP_SLIDE);
static ref QUEEN_SLIDE_TABLE: [BitBoard; 64] = slider_table_gen(&QUEEN_SLIDE);
static ref BEHIND_TABLE: [[BitBoard; 64]; 64] = behind_table_gen();
}
fn behind_table_gen() -> [[BitBoard; 64]; 64] {
let mut ans = [[BitBoard::new(); 64]; 64];
for i in 0..64 {
ans[i as usize] = behind_from_square_gen(Square::from_id(i));
}
ans
}
fn behind_from_square_gen(from: Square) -> [BitBoard; 64] {
let mut ans = [BitBoard::new(); 64];
for &dir in &QUEEN_SLIDE {
let mut to = from;
while let Some(temp_to) = shift(to, dir) {
to = temp_to;
let curr_ans: &mut BitBoard = &mut ans[to.to_id() as usize];
let mut blocked = to;
while let Some(temp_blocked) = shift(blocked, dir) {
blocked = temp_blocked;
curr_ans.set_at(blocked);
}
}
}
ans
}
fn slider_table_gen(diffs: &[Diff]) -> [BitBoard; 64] {
let mut ans = [BitBoard::new(); 64];
for i in 0..64 {
ans[i as usize] = slider_from_square_gen(Square::from_id(i), diffs);
}
ans
}
fn slider_from_square_gen(from: Square, diffs: &[Diff]) -> BitBoard {
let mut ans = BitBoard::new();
for &dir in diffs {
let mut to = from;
while let Some(temp) = shift(to, dir) {
to = temp;
ans.set_at(to);
}
}
ans
}
fn reachable_from_bitboard(p: &Position, piece_id: Piece, from: Square) -> BitBoard {
let piece_type = piece_id.piece_type();
let table: &[BitBoard; 64] = &match piece_type {
Rook => *ROOK_SLIDE_TABLE,
Bishop => *BISHOP_SLIDE_TABLE,
Queen => *QUEEN_SLIDE_TABLE,
_ => panic!(),
};
let mut ans: BitBoard = table[from.to_id() as usize];
let potential_blocker: BitBoard = ans.intersect(!p.data.empty_data());
for blocker_square in potential_blocker.iter() {
ans = ans.intersect(
!BEHIND_TABLE[from.to_id() as usize][blocker_square.to_id() as usize]);
}
ans
}
fn quiet_slider_from_iter(p: &Position,
piece_id: Piece,
from: Square) -> Box<Iterator<Item = Move>> {
let to_bitboard = reachable_from_bitboard(p, piece_id, from);
let ans = to_bitboard.intersect(p.data.empty_data());
Box::new(ans.iter().map(move |to: Square| Move::new(from, to)))
}
fn noisy_slider_from_iter<'a>(p: &'a Position,
piece_id: Piece,
from: Square) -> Box<Iterator<Item = Move> + 'a> {
let to_bitboard = reachable_from_bitboard(p, piece_id, from);
let ans = to_bitboard.intersect(p.data.color_data(p.side_to_move().invert()));
Box::new(ans.iter().map(move |to: Square| {
let mut ans = Move::new(from, to);
ans.set_capture_normal(p.at(to));
ans
}))
}
static KING_FIXED: [Diff; 8] = [(1, 0), (0, 1), (-1, 0), (0, -1),
(1, 1), (1, -1), (-1, -1), (-1, 1)];
static KNIGHT_FIXED: [Diff; 8] = [(2, 1), (2, -1), (-2, -1), (-2, 1),
(1, 2), (1, -2), (-1, -2), (-1, 2)];
lazy_static! {
static ref KING_FIXED_TABLE: [BitBoard; 64] = fixed_table_gen(&KING_FIXED);
static ref KNIGHT_FIXED_TABLE: [BitBoard; 64] = fixed_table_gen(&KNIGHT_FIXED);
}
fn fixed_table_gen(diffs: &[Diff]) -> [BitBoard; 64] {
let mut ans = [BitBoard::new(); 64];
for i in 0..64 {
ans[i as usize] = fixed_from_square_gen(Square::from_id(i), diffs);
}
ans
}
fn fixed_from_square_gen(from: Square, diffs: &[Diff]) -> BitBoard {
let mut ans = BitBoard::new();
for dir in diffs {
if let Some(to) = shift(from, *dir) {
ans.set_at(to);
}
}
ans
}
fn quiet_fixed_from_iter(p: &Position,
piece_id: Piece,
from: Square) -> Box<Iterator<Item = Move>> {
let table: &[BitBoard; 64] = &match piece_id.piece_type() {
King => *KING_FIXED_TABLE,
Knight => *KNIGHT_FIXED_TABLE,
_ => panic!(),
};
let to_bits = table[from.to_id() as usize].intersect(p.data.empty_data());
Box::new(to_bits.iter().map(move |to: Square| Move::new(from, to)))
}
fn noisy_fixed_from_iter<'a>(p: &'a Position,
piece_id: Piece,
from: Square) -> Box<Iterator<Item = Move> + 'a> {
let table: &[BitBoard; 64] = &match piece_id.piece_type() {
King => *KING_FIXED_TABLE,
Knight => *KNIGHT_FIXED_TABLE,
_ => panic!(),
};
let other_color = piece_id.color().invert();
let to_bits = table[from.to_id() as usize].intersect(p.data.color_data(other_color));
Box::new(to_bits.iter().map(move |to: Square| {
let mut curr_move = Move::new(from, to);
curr_move.set_capture_normal(p.at(to));
curr_move
}))
}
fn quiet_pawn_from_iter(p: &Position,
piece_id: Piece,
from: Square) -> Box<Iterator<Item = Move>> {
let mut ans = Vec::new();
let piece_color = piece_id.color();
let from_rank = from.rank().0;
//rank_up is the 1-based rank from the piece-owner's side.
let (dy, rank_up): (i32, i32) = match piece_color {
White => ( 1, 1 + from_rank ),
Black => (-1, 8 - from_rank ),
};
let move_dir: Diff = (0, dy);
let to: Square = shift(from, move_dir).unwrap();
// if destination is empty
if p.is_empty_at(to) {
match rank_up {
7 => {},
2 => {
let curr_move = Move::new(from, to);
ans.push(curr_move);
let to2: Square = shift(to, move_dir).unwrap();
if p.is_empty_at(to2) {
let mut curr_move2 = Move::new(from, to2);
curr_move2.set_pawn_double_move(true);
ans.push(curr_move2);
}
},
_ => {
let curr_move = Move::new(from, to);
ans.push(curr_move);
},
}
}
Box::new(ans.into_iter())
}
fn noisy_pawn_from_iter(p: &Position,
piece_id: Piece,
from: Square) -> Box<Iterator<Item = Move>> {
let mut ans = Vec::new();
let piece_color = piece_id.color();
let from_rank = from.rank().0;
//rank_up is the 1-based rank from the piece-owner's side.
let (dy, rank_up): (i32, i32) = match piece_color {
White => ( 1, 1 + from_rank ),
Black => (-1, 8 - from_rank ),
};
let move_dir: Diff = (0, dy);
let to: Square = shift(from, move_dir).unwrap();
// if destination is empty
if p.is_empty_at(to) {
match rank_up {
7 => {
for new_piece in [Queen, Knight, Rook, Bishop].iter() {
let mut curr_move = Move::new(from, to);
curr_move.set_promote(Some(*new_piece));
ans.push(curr_move);
}
},
_ => {},
}
}
for dx in [1, -1].iter() {
let capture_dir: Diff = (*dx, dy);
let capture_new_pos: Option<Square> = shift(from, capture_dir);
let capture_to: Square = match capture_new_pos {
Some(val) => val,
None => continue,
};
if p.is_color_at(capture_to, piece_color.invert()) {
if rank_up == 7 {
for new_piece in [Queen, Knight, Rook, Bishop].iter() {
let mut curr_move = Move::new(from, capture_to);
curr_move.set_capture_normal(p.at(capture_to));
curr_move.set_promote(Some(*new_piece));
ans.push(curr_move);
}
} else {
let mut curr_move = Move::new(from, capture_to);
curr_move.set_capture_normal(p.at(capture_to));
ans.push(curr_move);
}
}
}
Box::new(ans.into_iter())
}
fn en_passant_iter(p: &Position) -> vec::IntoIter<Move> {
let mut ans = Vec::new();
let to_file = match p.en_passant() {
Some(f) => f,
None => return ans.into_iter(),
};
let (from_rank, to_rank) = match p.side_to_move() {
White => (Rank(4), Rank(5)),
Black => (Rank(3), Rank(2)),
};
let (x, y, z);
let from_file_all: &[File] = match to_file {
File(0) => { x = [File(1)]; &x },
File(7) => { y = [File(6)]; &y },
File(f) => { z = [File(f-1), File(f+1)]; &z },
};
let expect_piece = Piece::new(p.side_to_move(), Pawn);
let to = Square::new(to_file, to_rank);
for &from_file in from_file_all.iter() {
let from = Square::new(from_file, from_rank);
if p.is_piece_at(expect_piece, from) {
let mut curr_move = Move::new(from, to);
curr_move.set_en_passant(true);
ans.push(curr_move);
}
}
ans.into_iter()
}
fn castle_iter(p: &Position) -> vec::IntoIter<Move> {
let mut ans = Vec::new();
match p.side_to_move() {
White => {
if p.can_castle_now(Kingside, White) {
let from = Square::new(File(4), Rank(0));
let to = Square::new(File(6), Rank(0));
let mut curr_move = Move::new(from, to);
curr_move.set_castle(Some(Kingside));
ans.push(curr_move);
}
if p.can_castle_now(Queenside, White) {
let from = Square::new(File(4), Rank(0));
let to = Square::new(File(2), Rank(0));
let mut curr_move = Move::new(from, to);
curr_move.set_castle(Some(Queenside));
ans.push(curr_move);
}
}
Black => {
if p.can_castle_now(Kingside, Black) {
let from = Square::new(File(4), Rank(7));
let to = Square::new(File(6), Rank(7));
let mut curr_move = Move::new(from, to);
curr_move.set_castle(Some(Kingside));
ans.push(curr_move);
}
if p.can_castle_now(Queenside, Black) {
let from = Square::new(File(4), Rank(7));
let to = Square::new(File(2), Rank(7));
let mut curr_move = Move::new(from, to);
curr_move.set_castle(Some(Queenside));
ans.push(curr_move);
}
}
}
ans.into_iter()
}
| true |
ee06c69cadc052111c60bc7184f88c480dc80ae5
|
Rust
|
GuildOfWeavers/winterfell
|
/math/src/field/traits.rs
|
UTF-8
| 9,184 | 3.25 | 3 |
[
"MIT"
] |
permissive
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use core::{
convert::TryFrom,
fmt::{Debug, Display},
ops::{
Add, AddAssign, BitAnd, Div, DivAssign, Mul, MulAssign, Neg, Shl, Shr, ShrAssign, Sub,
SubAssign,
},
};
use utils::{collections::Vec, AsBytes, Deserializable, DeserializationError, Serializable};
// FIELD ELEMENT
// ================================================================================================
/// Defines an element in a finite field.
///
/// This trait defines basic arithmetic operations for elements in
/// [finite fields](https://en.wikipedia.org/wiki/Finite_field) (e.g. addition subtraction,
/// multiplication, division) as well as several convenience functions (e.g. double, square cube).
/// Moreover, it defines interfaces for serializing and deserializing field elements.
///
/// The elements could be in a prime field or an extension of a prime field. Currently, only
/// quadratic field extensions are supported.
pub trait FieldElement:
Copy
+ Clone
+ Debug
+ Display
+ Default
+ Send
+ Sync
+ Eq
+ PartialEq
+ Sized
+ Add<Self, Output = Self>
+ Sub<Self, Output = Self>
+ Mul<Self, Output = Self>
+ Div<Self, Output = Self>
+ AddAssign<Self>
+ SubAssign<Self>
+ MulAssign<Self>
+ DivAssign<Self>
+ Neg<Output = Self>
+ From<<Self as FieldElement>::BaseField>
+ From<u128>
+ From<u64>
+ From<u32>
+ From<u16>
+ From<u8>
+ for<'a> TryFrom<&'a [u8]>
+ AsBytes
+ Serializable
+ Deserializable
{
/// A type defining positive integers big enough to describe a field modulus for
/// `Self::BaseField` with no loss of precision.
type PositiveInteger: Debug
+ Copy
+ PartialEq
+ PartialOrd
+ ShrAssign
+ Shl<u32, Output = Self::PositiveInteger>
+ Shr<u32, Output = Self::PositiveInteger>
+ BitAnd<Output = Self::PositiveInteger>
+ From<u32>
+ From<u64>;
/// Base field type for this finite field. For prime fields, `BaseField` should be set
/// to `Self`.
type BaseField: StarkField;
/// Number of bytes needed to encode an element
const ELEMENT_BYTES: usize;
/// True if internal representation of an element can be redundant - i.e., multiple
/// internal representations map to the same canonical representation.
const IS_MALLEABLE: bool;
/// The additive identity.
const ZERO: Self;
/// The multiplicative identity.
const ONE: Self;
// ALGEBRA
// --------------------------------------------------------------------------------------------
/// Returns this field element added to itself.
fn double(self) -> Self {
self + self
}
/// Returns this field element raised to power 2.
fn square(self) -> Self {
self * self
}
/// Returns this field element raised to power 3.
fn cube(self) -> Self {
self * self * self
}
/// Exponentiates this field element by `power` parameter.
fn exp(self, power: Self::PositiveInteger) -> Self {
let mut r = Self::ONE;
let mut b = self;
let mut p = power;
let int_zero = Self::PositiveInteger::from(0u32);
let int_one = Self::PositiveInteger::from(1u32);
if p == int_zero {
return Self::ONE;
} else if b == Self::ZERO {
return Self::ZERO;
}
while p > int_zero {
if p & int_one == int_one {
r *= b;
}
p >>= int_one;
b = b.square();
}
r
}
/// Returns a multiplicative inverse of this field element. If this element is ZERO, ZERO is
/// returned.
fn inv(self) -> Self;
/// Returns a conjugate of this field element.
fn conjugate(&self) -> Self;
// RANDOMNESS
// --------------------------------------------------------------------------------------------
/// Returns a cryptographically-secure random element drawn uniformly from the entire field.
#[cfg(feature = "std")]
fn rand() -> Self;
/// Returns a field element if the set of bytes forms a valid field element, otherwise returns
/// None. The element is expected to be in canonical representation. This function is primarily
/// intended for sampling random field elements from a hash function output.
fn from_random_bytes(bytes: &[u8]) -> Option<Self>;
// SERIALIZATION / DESERIALIZATION
// --------------------------------------------------------------------------------------------
/// Converts a vector of field elements into a vector of bytes. The elements may be in the
/// internal representation rather than in the canonical representation. This conversion is
/// intended to be zero-copy (i.e. by re-interpreting the underlying memory).
fn elements_into_bytes(elements: Vec<Self>) -> Vec<u8>;
/// Converts a list of elements into a list of bytes. The elements may be in the internal
/// representation rather than in the canonical representation. This conversion is intended
/// to be zero-copy (i.e. by re-interpreting the underlying memory).
fn elements_as_bytes(elements: &[Self]) -> &[u8];
/// Converts a list of bytes into a list of field elements. The elements are assumed to
/// encoded in the internal representation rather than in the canonical representation. The
/// conversion is intended to be zero-copy (i.e. by re-interpreting the underlying memory).
///
/// # Errors
/// An error is returned if:
/// * Memory alignment of `bytes` does not match memory alignment of field element data.
/// * Length of `bytes` does not divide into whole number of elements.
///
/// # Safety
/// This function is unsafe because it does not check whether underlying bytes represent valid
/// field elements according to their internal representation.
unsafe fn bytes_as_elements(bytes: &[u8]) -> Result<&[Self], DeserializationError>;
// INITIALIZATION
// --------------------------------------------------------------------------------------------
/// Returns a vector of length `n` initialized with all ZERO elements.
///
/// Specialized implementations of this function may be faster than the generic implementation.
fn zeroed_vector(n: usize) -> Vec<Self> {
vec![Self::ZERO; n]
}
/// Returns a vector of `n` pseudo-random elements drawn uniformly from the entire
/// field based on the provided `seed`.
#[cfg(feature = "std")]
fn prng_vector(seed: [u8; 32], n: usize) -> Vec<Self>;
// UTILITIES
// --------------------------------------------------------------------------------------------
/// Normalizes internal representation of this element.
///
/// Normalization is applicable only to malleable field elements; for non-malleable elements
/// this is a no-op.
fn normalize(&mut self);
}
// STARK FIELD
// ================================================================================================
/// Defines an element in a STARK-friendly finite field.
///
/// A STARK-friendly field is defined as a prime field with high two-addicity. That is, the
/// the modulus of the field should be a prime number of the form `k` * 2^`n` + 1 (a Proth prime),
/// where `n` is relatively larger (e.g., greater than 32).
pub trait StarkField: FieldElement<BaseField = Self> {
/// Type describing quadratic extension of this StarkField.
type QuadExtension: FieldElement<BaseField = Self>;
/// Prime modulus of the field. Must be of the form `k` * 2^`n` + 1 (a Proth prime).
/// This ensures that the field has high 2-adicity.
const MODULUS: Self::PositiveInteger;
/// The number of bits needed to represents `Self::MODULUS`.
const MODULUS_BITS: u32;
/// A multiplicative generator of the field.
const GENERATOR: Self;
/// Let Self::MODULUS = `k` * 2^`n` + 1; then, TWO_ADICITY is `n`.
const TWO_ADICITY: u32;
/// Let Self::MODULUS = `k` * 2^`n` + 1; then, TWO_ADIC_ROOT_OF_UNITY is 2^`n` root of unity
/// computed as Self::GENERATOR^`k`.
const TWO_ADIC_ROOT_OF_UNITY: Self;
/// Returns the root of unity of order 2^`n`.
///
/// # Panics
/// Panics if the root of unity for the specified order does not exist in this field.
fn get_root_of_unity(n: u32) -> Self {
assert!(n != 0, "cannot get root of unity for n = 0");
assert!(
n <= Self::TWO_ADICITY,
"order cannot exceed 2^{}",
Self::TWO_ADICITY
);
let power = Self::PositiveInteger::from(1u32) << (Self::TWO_ADICITY - n);
Self::TWO_ADIC_ROOT_OF_UNITY.exp(power)
}
/// Returns byte representation of the field modulus in little-endian byte order.
fn get_modulus_le_bytes() -> Vec<u8>;
/// Returns a canonical integer representation of the field element.
fn as_int(&self) -> Self::PositiveInteger;
}
| true |
4fba6f44154d2bf9dab97efdfb6901850a47aa0b
|
Rust
|
grilledwindow/mel-dl
|
/src/config.rs
|
UTF-8
| 1,740 | 3.171875 | 3 |
[] |
no_license
|
use serde::Deserialize;
use std::path::Path;
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(bound(deserialize = "'de: 'a"))]
pub struct Module<'a> {
// module name
pub name: &'a str,
// the order of the course on the website
pub course_nth: u8,
// first week appears on top, defaults to true
#[serde(default = "default_true")]
pub folder_order_ascending: bool,
// img used, defaults to folder
#[serde(default = "default_img_alt")]
pub img_alt: &'a str,
// when the folders start, defaults to 1
#[serde(default = "default_week_start")]
pub week_start: u8,
// name of materials tab, defaults to "Learning Materials"
#[serde(default = "default_materials")]
pub materials: &'a str,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(bound(deserialize = "'de: 'a"))]
pub struct SettingsRead<'a> {
#[serde(default = "default_true")]
pub order_ascending: bool,
pub path: String,
pub bin: String,
pub temp_download_folder: &'a str,
}
#[derive(Debug)]
pub struct Settings<'a> {
pub order_ascending: bool,
pub path: &'a Path,
pub bin: &'a Path,
pub temp_download_folder: &'a Path,
}
impl<'a> Settings<'a> {
pub fn from(sr: &'a SettingsRead) -> Self {
Self {
order_ascending: sr.order_ascending,
path: &Path::new(&sr.path),
bin: &Path::new(&sr.bin),
temp_download_folder: &Path::new(sr.temp_download_folder),
}
}
}
fn default_true() -> bool {
true
}
fn default_img_alt<'a>() -> &'a str {
"folder"
}
fn default_week_start() -> u8 {
0
}
fn default_materials<'a>() -> &'a str {
"Learning Materials"
}
| true |
db1e871aa4e2fb504ae76e9ccd021411884f28fa
|
Rust
|
Jvanrhijn/rustyrenderer
|
/src/model.rs
|
UTF-8
| 10,641 | 3 | 3 |
[] |
no_license
|
use std::cmp;
use std::vec::{Vec};
use std::vec;
use std::mem;
extern crate num;
use geo;
use geo::Vector;
use image;
pub trait Polygon<T>
{
fn draw(&self, img: &mut image::RgbImage, color: &[u8; 3]);
fn draw_filled(&self, img: &mut image::RgbImage, color: &[u8; 3], zbuf: &mut Vec<f64>);
fn inside(&self, point: &geo::Vec3<T>) -> bool;
fn bounding_box(&self, dimx: u32, dimy: u32) -> Line<i32>;
}
pub struct Line<T> {
start: geo::Vec3<T>,
end: geo::Vec3<T>,
}
impl<T> Line<T>
where T: geo::Number<T>
{
pub fn new(start: geo::Vec3<T>, end: geo::Vec3<T>) -> Line<T> {
Line{start, end}
}
fn rasterize(&self, xdim: u32, ydim: u32) -> Line<u32> {
let (start, end) = (self.start.to_f64().unwrap(), self.end.to_f64().unwrap());
let start = geo::Vec3::<u32>::new(((start.x + 1.)*0.5*(xdim as f64)) as u32,
((start.y + 1.)*0.5*(ydim as f64)) as u32, 0);
let end = geo::Vec3::<u32>::new(((end.x + 1.)*0.5*(xdim as f64)) as u32,
((end.y + 1.)*0.5*(ydim as f64)) as u32, 0);
Line::new(start, end)
}
fn vertices(&self) -> [&geo::Vec3<T>; 2] {
[&self.start, &self.end]
}
}
impl<T> Polygon<T> for Line<T>
where T: geo::Number<T>
{
fn draw(&self, img: &mut image::RgbImage, color: &[u8; 3]) {
for pixel in self.into_iter() {
let geo::Vec3i{x, y, z: _} = pixel;
img.put_pixel(x as u32, y as u32, image::Rgb::<u8>(*color));
}
}
fn draw_filled(&self, img: &mut image::RgbImage, color: &[u8; 3], zbuf: &mut Vec<f64>) {
self.draw(img, color);
}
fn inside(&self, point: &geo::Vec3<T>) -> bool {
let point = point.to_i32().unwrap();
for pixel in self.into_iter() {
if pixel.x == point.x && pixel.y == point.y {
return true;
}
}
false
}
fn bounding_box(&self, _dimx: u32, _dimy: u32) -> Line<i32> {
let Line{start, end} = self;
let start = start.to_i32().unwrap();
let end = end.to_i32().unwrap();
Line{start, end}
}
}
pub struct LineIterator
{
line: Line<u32>,
dx: i32,
dy: i32,
derror: i32,
error: i32,
x: i32,
y: i32,
steep: bool,
}
impl LineIterator
{
pub fn new<T>(line: &Line<T>) -> LineIterator
where T: geo::Number<T>
{
let Line{start, end} = line;
let (mut x0, mut y0) = (start.x.to_u32().unwrap(), start.y.to_u32().unwrap());
let (mut x1, mut y1) = (end.x.to_u32().unwrap(), end.y.to_u32().unwrap());
let steep = (x1 as i32 - x0 as i32).abs() < (y1 as i32 - y0 as i32).abs();
if steep {
mem::swap(&mut x0, &mut y0);
mem::swap(&mut y1, &mut x1);
}
if x0 > x1 {
mem::swap(&mut x0, &mut x1);
mem::swap(&mut y0, &mut y1);
}
let dx = (x1 as i32 - x0 as i32);
let dy = (y1 as i32 - y0 as i32);
let derror = dy.abs()*2;
let oriented_line = Line::new(geo::Vec3::<u32>::new(x0, y0, 0),
geo::Vec3::<u32>::new(x1, y1, 0));
LineIterator{line: oriented_line, dx, dy, derror, error: 0,
x: x0 as i32, y: y0 as i32, steep}
}
}
impl Iterator for LineIterator {
type Item = geo::Vec3i;
fn next(&mut self) -> Option<geo::Vec3i> {
self.error += self.derror;
if self.error > self.dx {
self.y += if self.dy > 0 { 1 } else { -1 };
self.error -= self.dx * 2;
}
self.x += 1;
let (x, y) = match self.steep {
false => (self.x, self.y),
true => (self.y, self.x)
};
if self.x <= self.line.end.x as i32 {
Some(geo::Vec3i::new(x, y, 0))
} else {
None
}
}
}
impl<'a, T> IntoIterator for &'a Line<T>
where T: geo::Number<T>
{
type Item = geo::Vec3i;
type IntoIter = LineIterator;
fn into_iter(self) -> Self::IntoIter {
LineIterator::new(&self)
}
}
pub struct Triangle<T> {
a: geo::Vec3<T>,
b: geo::Vec3<T>,
c: geo::Vec3<T>,
edges: [Line<T>; 3],
}
impl<T> Triangle<T>
where T: geo::Number<T>
{
pub fn new(a: geo::Vec3<T>, b: geo::Vec3<T>, c: geo::Vec3<T>) -> Triangle<T> {
let ab = Line::new(a.clone(), b.clone());
let bc = Line::new(b.clone(), c.clone());
let ac = Line::new(a.clone(), c.clone());
Triangle{a, b, c, edges: [ab, bc, ac]}
}
pub fn barycentric(&self, point: &geo::Vec3<f64>) -> geo::Vec3f {
let (a, b, c) = (self.a.to_f64().unwrap(), self.b.to_f64().unwrap(), self.c.to_f64().unwrap());
let first = geo::Vec3::<f64>::new((&b-&a).x, (&c-&a).x, (&a-point).x);
let second = geo::Vec3::<f64>::new((&b-&a).y, (&c-&a).y, (&a-point).y);
let u = first.cross(&second);
if u.z.abs() < 1. {
geo::Vec3f::new(-1., 1., 1.)
} else {
geo::Vec3f::new(1.-(u.x+u.y)/u.z, u.y/u.z, u.x/u.z)
}
}
fn rasterize(&self, dimx: u32, dimy: u32) -> Triangle<i32> {
let mut vertices = vec::Vec::<geo::Vec3<i32>>::new();
for vert in self.vertices().iter() {
let vert = (*vert).to_f64().unwrap();
vertices.push(geo::Vec3::<i32>::new(((vert.x + 1.)*0.5*(dimx as f64)) as i32,
((vert.y + 1.)*0.5*(dimy as f64)) as i32, 0));
}
Triangle::new(vertices[0], vertices[1], vertices[2])
}
pub fn normal(&self) -> geo::Vec3f {
let normal = (&self.c-&self.a).cross(&(&self.b-&self.a));
normal.normalize()
}
fn vertices(&self) -> [&geo::Vec3<T>; 3] {
[&self.a, &self.b, &self.c]
}
}
impl<T> Polygon<T> for Triangle<T>
where T: geo::Number<T> + num::ToPrimitive
{
fn draw(&self, img: &mut image::RgbImage, color: &[u8; 3]) {
let (imgx, imgy) = img.dimensions();
let rast = self.rasterize(imgx-1, imgy-1);
let (a, b, c) = match &rast.edges {
[a, b, c] => (a, b, c),
_ => unreachable!()
};
a.draw(img, color);
b.draw(img, color);
c.draw(img, color);
}
fn draw_filled(&self, img: &mut image::RgbImage, color: &[u8; 3], zbuf: &mut Vec<f64>) {
let (imgx, imgy) = img.dimensions();
let rast = self.rasterize(imgx, imgy);
let Line{start: bbox_min, end: bbox_max} = self.bounding_box(imgx, imgy);
let mut point = geo::Vec3::<f64>::new(0., 0., 0.);
for x in bbox_min.x..bbox_max.x {
for y in bbox_min.y..bbox_max.y {
point.x = x as f64; point.y = y as f64;
if !rast.inside(&point.to_i32().unwrap()) {
continue;
}
point.z = 0.;
let barycentric: [f64; 3] = self.barycentric(&point).into();
for (i, vertex) in self.vertices().into_iter().enumerate() {
point.z += vertex.to_f64().unwrap().z*barycentric[i];
}
if zbuf[(point.x + point.y*(imgx as f64)) as usize] < point.z {
zbuf[(point.x + point.y*(imgx as f64)) as usize] = point.z;
img.put_pixel(point.x as u32, point.y as u32, image::Rgb::<u8>(*color));
}
}
}
}
fn inside(&self, point: &geo::Vec3<T>) -> bool {
let geo::Vec3f{x, y, z} = self.barycentric(&point.to_f64().unwrap());
!(x < 0. || y < 0. || z < 0.)
}
fn bounding_box(&self, dimx: u32, dimy: u32) -> Line<i32> {
let rast = self.rasterize(dimx, dimy);
let mut bbox_max = geo::Vec3::<i32>::new(0, 0, 0);
let mut bbox_min = geo::Vec3::<i32>::new(dimx as i32 -1, dimy as i32 -1, 0);
let clamp = geo::Vec2::<i32>::new(dimx as i32 -1, dimy as i32 -1);
for vertex in rast.vertices().into_iter() {
let vertex = vertex.to_i32().unwrap();
bbox_min.x = cmp::max(0, cmp::min(bbox_min.x, vertex.x));
bbox_min.y = cmp::max(0, cmp::min(bbox_min.y, vertex.y));
bbox_max.x = cmp::min(clamp.x, cmp::max(bbox_max.x, vertex.x));
bbox_max.y = cmp::min(clamp.y, cmp::max(bbox_max.y, vertex.y));
}
Line{start: bbox_min, end: bbox_max}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn triangle_create() {
let a = geo::Vec3f::new(1.0, 1.0, 0.0);
let b = geo::Vec3f::new(1.0, 2.0, 0.0);
let c = geo::Vec3f::new(0.0, 2.0, 0.0);
let triangle = Triangle::new(a, b, c);
}
#[test]
fn line_create() {
let a = geo::Vec3f::new(1.0, 1.0, 0.0);
let b = geo::Vec3f::new(1.0, 2.0, 0.0);
let c = geo::Vec3f::new(0.0, 2.0, 0.0);
let triangle = Triangle::new(a, b, c);
}
#[test]
fn triangle_vertices() {
let a = geo::Vec3f::new(1.0, 1.0, 0.0);
let b = geo::Vec3f::new(1.0, 2.0, 0.0);
let c = geo::Vec3f::new(0.0, 2.0, 0.0);
let triangle = Triangle::new(a, b, c);
let verts = triangle.vertices();
assert_eq!(verts[0], &geo::Vec3f::new(1.0, 1.0, 0.0));
assert_eq!(verts[1], &geo::Vec3f::new(1.0, 2.0, 0.0));
assert_eq!(verts[2], &geo::Vec3f::new(0.0, 2.0, 0.0));
}
#[test]
fn line_points() {
let a = geo::Vec3i::new(1, 2, 3); let b = geo::Vec3i::new(4, 5, 6);
let line = Line::new(a, b);
let points = line.vertices();
assert_eq!(points[0], &geo::Vec3i::new(1, 2, 3));
assert_eq!(points[1], &geo::Vec3i::new(4, 5, 6));
}
#[test]
fn line_iterate() {
let line = Line::new(geo::Vec3::<u64>::new(0, 0, 0),
geo::Vec3::<u64>::new(9, 9, 0));
let image = image::RgbImage::new(10, 20);
let line_iter = LineIterator::new(&line);
let mut x = 0;
let mut y = 0;
for (i, pixel) in line.into_iter().enumerate() {
assert_eq!(pixel, geo::Vec3i::new((i+1) as i32, (i+1) as i32, 0));
}
}
#[test]
fn inside_triangle() {
let a = geo::Vec3f::new(0.0, 0.0, 0.0);
let b = geo::Vec3f::new(1.0, 0.0, 0.0);
let c = geo::Vec3f::new(1.0, 1.0, 0.0);
let triangle = Triangle::new(a, b, c);
let not_inside = geo::Vec3f::new(-0.1, 0., 0.);
let inside = geo::Vec3f::new(0.9, 0.1, 0.);
assert!(!triangle.inside(¬_inside));
assert!(triangle.inside(&inside));
}
}
| true |
3ffd370fe6258948ad4189de5fce028f8cfe9102
|
Rust
|
cloew/KaoBoy
|
/src/cpu/instructions/instructions.rs
|
UTF-8
| 3,890 | 3.25 | 3 |
[] |
no_license
|
use super::add;
use super::bit;
use super::compare;
use super::dec;
use super::inc;
use super::jump;
use super::load;
use super::rotate;
use super::stack;
use super::subtract;
use super::xor;
use super::instruction::Instruction;
use super::super::ProgramCounter;
use crate::as_hex;
use std::boxed::Box;
use std::option::Option;
type PackageInstructionLoader = fn(u8) -> Option<Box<(dyn Instruction + 'static)>>;
static PREFIX_INSTRUCTION: u8 = 0xCB;
pub fn load_instruction(program: &mut ProgramCounter) -> Box<dyn Instruction> {
let instruction_byte = program.read_next_byte();
println!("Program Counter: {}, {}", as_hex!(program.get_counter()-1), as_hex!(instruction_byte));
if instruction_byte == PREFIX_INSTRUCTION {
let instruction_byte = program.read_next_byte();
return load_prefix_instruction(instruction_byte);
} else {
return load_standard_instruction(instruction_byte);
}
}
fn load_standard_instruction(instruction_byte: u8) -> Box<dyn Instruction> {
let package_instruction_loaders: Vec<PackageInstructionLoader>
= vec![
add::instructions::load_instruction,
compare::instructions::load_instruction,
dec::instructions::load_instruction,
inc::instructions::load_instruction,
jump::instructions::load_instruction,
load::instructions::load_instruction,
rotate::instructions::load_instruction,
stack::instructions::load_instruction,
subtract::instructions::load_instruction,
xor::instructions::load_instruction,
];
let next_instruction = load_instruction_from_packages(instruction_byte, package_instruction_loaders);
return match next_instruction {
Some(instruction) => {
instruction
},
None => panic!("Unknown instruction: {}", as_hex!(instruction_byte)),
}
}
fn load_prefix_instruction(instruction_byte: u8) -> Box<dyn Instruction> {
let package_instruction_loaders: Vec<PackageInstructionLoader> = vec![
bit::instructions::load_instruction,
rotate::instructions::load_prefix_instruction,
];
let next_instruction = load_instruction_from_packages(instruction_byte, package_instruction_loaders);
return match next_instruction {
Some(instruction) => {
instruction
},
None => panic!("Unknown prefix instruction: {}", as_hex!(instruction_byte)),
}
}
fn load_instruction_from_packages(instruction_byte: u8, package_instruction_loaders: Vec<PackageInstructionLoader>) -> Option<Box<dyn Instruction>> {
let next_instruction = package_instruction_loaders.iter().find_map(
|load_from_package| load_from_package(instruction_byte)
);
return next_instruction;
}
#[cfg(test)]
mod tests {
use super::*;
use crate::cpu::testing::build_test_program_counter;
#[test]
fn test_load_instruction_returns_instruction() {
const ADD_INSTRUCTION: u8 = 0x87;
let mut program = build_test_program_counter();
program._memory.borrow_mut().write_byte(0x0000, ADD_INSTRUCTION);
load_instruction(&mut program);
// Not sure if there'e anything I can assert on
}
// TODO: Uncomment once Prefix has its first actual instruction
/*#[test]
fn test_load_instruction_prefix_instruction_returns_instruction() {
const BIT_INSTRUCTION: u8 = 0x40;
let mut program = build_test_program_counter();
program._memory.borrow_mut().write_byte(0x0000, PREFIX_INSTRUCTION);
program._memory.borrow_mut().write_byte(0x0001, BIT_INSTRUCTION);
load_instruction(&mut program);
// Not sure if there'e anything I can assert on
}*/
}
| true |
341f28f5cf6b2777a56e6278c007ba6598f1bbff
|
Rust
|
CodeSteak/hs_app
|
/hs_crawler/src/crawler/timetable.rs
|
UTF-8
| 4,511 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use super::*;
use crate::util::*;
use std::io;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Read;
use std::collections::HashMap;
use select::document::Document;
use select::predicate::*;
use chrono::{Date, Local};
use reqwest;
type Timetable = HashMap<Date<Local>, Vec<String>>;
use std::sync::mpsc::Receiver;
pub fn get_async(q: Query, course: &str) -> Receiver<Result<Timetable, String>> {
let course_copy = course.to_string();
dirty_err_async(move || get(q, &course_copy))
}
pub enum Query {
ThisWeek,
NextWeek,
}
pub fn get(q: Query, course: &str) -> Result<Timetable, DirtyError> {
let index = download_timetable_index()?;
let course_url = index
.get(&course.to_lowercase())
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, "Course not found."))?;
match q {
Query::ThisWeek => {
let date = last_monday();
download_timetable_from_url(&date, course_url)
}
Query::NextWeek => {
let mut date = last_monday();
for _ in 0..7 {
date = date.succ();
}
download_timetable_from_url(&date, &course_url.replace("week=0", "week=1"))
}
}
}
/// Downloads the timetable for a given url. This is blocking.
/// Returns Days as Columns, Hours as Rows.
fn download_timetable_from_url(
start_date: &Date<Local>,
url: &str,
) -> Result<Timetable, DirtyError> {
let mut date = start_date.clone();
let res = reqwest::blocking::get(url)?;
if res.status() != 200 {
return Err(io::Error::new(io::ErrorKind::InvalidData, "Didn't get course table.").into());
}
let mut html = String::new();
res.take(MAX_RESPONSE_SIZE).read_to_string(&mut html)?;
let dom = Document::from(&*html);
let timetable_node = dom.find(Class("timetable")).next().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidData,
"Expected timetable class in html",
)
})?;
let timetable: Timetable = timetable_node
.find(Attr("scope", "row"))
.map(|row| {
row.find((Class("lastcol")).and(Name("td")))
.map(|column| column.text().ihh_fix())
.collect::<Vec<String>>()
}).collect::<Vec<Vec<String>>>()
.transpose()
.into_iter()
.map(|d| {
let ret = (date.clone(), d);
date = date.succ();
ret
}).collect();
Ok(timetable)
}
/// Data is stored as (lowercase_course_name : String, url : String).
type LowercaseCourseToUrl = HashMap<String, String>;
pub const TIMETABLE_INDEX: &str = "https://www.hs-offenburg.de/studium/vorlesungsplaene/";
/// Downloads all the links for the timetable of each course.
/// `TIMETABLE_INDEX` is used as source.
/// This call is blocking.
fn download_timetable_index() -> Result<LowercaseCourseToUrl, DirtyError> {
// Some constants for Parsing.
const LINK_FILTER_A: &str = "<a href=\"http://www.hs-offenburg.de/index.php?id=6627";
const LINK_FILTER_B: &str = "<a href=\"https://www.hs-offenburg.de/index.php?id=6627";
const LINK_START: &str = "<a href=\"";
const LINK_MIDDLE: &str = "\">";
const LINK_END: &str = "</a>";
let res = reqwest::blocking::get(TIMETABLE_INDEX)?;
if res.status() != 200 {
return Err(io::Error::new(io::ErrorKind::InvalidData, "Didn't get course index.").into());
}
// we need this to iterate over lines.
let reader = BufReader::new(res.take(MAX_RESPONSE_SIZE));
// Does MAGIC #oldschool, don't ask. // TODO: use select;
let course_to_url: HashMap<String, String> = reader
.lines()
.flat_map(|line| line)
.filter(|line| line.starts_with(LINK_FILTER_A) || line.starts_with(LINK_FILTER_B))
.flat_map(|line| {
let parts = line
.replace(LINK_START, "")
.replace(LINK_END, "")
.split(LINK_MIDDLE)
.map(|item| item.to_string()) // Borrow Checker Stuff
.collect::<Vec<String>>();
match &parts[..] {
[link, name] => Some((name.to_lowercase(), link.replace("http://", "https://"))),
_ => None,
}
}).collect();
if course_to_url.is_empty() {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"Failed to parse HTML for course index",
).into());
}
Ok(course_to_url)
}
| true |
445fc2eace5164ebfc4e0e5f92a4ffec8684d32c
|
Rust
|
irrst/storage-poc
|
/src/alternative/single_element.rs
|
UTF-8
| 5,776 | 3 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Alternative implementation of `ElementStorage`.
use core::{
alloc::AllocError,
fmt::{self, Debug},
hint,
marker::Unsize,
mem,
mem::ManuallyDrop,
ptr::NonNull,
};
use rfc2580::Pointee;
use crate::traits::ElementStorage;
use super::{Builder, Inner};
/// SingleElement is a composite of 2 ElementStorage.
///
/// It will first attempt to allocate from the first storage if possible, and otherwise use the second storage if
/// necessary.
pub struct SingleElement<F, S, FB, SB>(Inner<F, S, FB, SB>);
impl<F, S, FB, SB> SingleElement<F, S, FB, SB> {
/// Creates an instance containing the First alternative.
pub fn first(first: F, second_builder: SB) -> Self {
Self(Inner::first(first, second_builder))
}
/// Creates an instance containing the Second alternative.
pub fn second(second: S, first_builder: FB) -> Self {
Self(Inner::second(second, first_builder))
}
}
impl<F, S, FB, SB> ElementStorage for SingleElement<F, S, FB, SB>
where
F: ElementStorage,
S: ElementStorage,
FB: Builder<F>,
SB: Builder<S>,
{
type Handle<T: ?Sized + Pointee> = SingleElementHandle<F::Handle<T>, S::Handle<T>>;
unsafe fn deallocate<T: ?Sized + Pointee>(&mut self, handle: &Self::Handle<T>) {
match &mut self.0 {
Inner::First(ref mut first) => first.deallocate(&handle.first),
Inner::Second(ref mut second) => second.deallocate(&handle.second),
Inner::Poisoned => panic!("Poisoned"),
}
}
unsafe fn get<T: ?Sized + Pointee>(&self, handle: &Self::Handle<T>) -> NonNull<T> {
match &self.0 {
Inner::First(ref first) => first.get(&handle.first),
Inner::Second(ref second) => second.get(&handle.second),
Inner::Poisoned => panic!("Poisoned"),
}
}
unsafe fn coerce<U: ?Sized + Pointee, T: ?Sized + Pointee + Unsize<U>>(
&self,
handle: &Self::Handle<T>,
) -> Self::Handle<U> {
match &self.0 {
Inner::First(ref first) => SingleElementHandle {
first: ManuallyDrop::new(first.coerce(&handle.first)),
},
Inner::Second(ref second) => SingleElementHandle {
second: ManuallyDrop::new(second.coerce(&handle.second)),
},
Inner::Poisoned => panic!("Poisoned"),
}
}
fn create<T: Pointee>(&mut self, value: T) -> Result<Self::Handle<T>, T> {
match &mut self.0 {
Inner::First(ref mut first) => match first.create(value) {
Ok(first) => Ok(SingleElementHandle {
first: ManuallyDrop::new(first),
}),
Err(value) => {
if let Inner::First(first) = mem::replace(&mut self.0, Inner::Poisoned) {
let (second, result) = first.transform(|_, second: &mut S| {
second.create(value).map(|second| SingleElementHandle {
second: ManuallyDrop::new(second),
})
});
self.0 = Inner::Second(second);
return result;
}
// Safety:
// - self.0 was First before invoking replace, hence replace returns First.
unsafe { hint::unreachable_unchecked() };
}
},
Inner::Second(ref mut second) => {
second.create(value).map(|second| SingleElementHandle {
second: ManuallyDrop::new(second),
})
}
Inner::Poisoned => panic!("Poisoned"),
}
}
fn allocate<T: ?Sized + Pointee>(
&mut self,
meta: T::MetaData,
) -> Result<Self::Handle<T>, AllocError> {
match &mut self.0 {
Inner::First(ref mut first) => match first.allocate(meta) {
Ok(first) => Ok(SingleElementHandle {
first: ManuallyDrop::new(first),
}),
Err(_) => {
if let Inner::First(first) = mem::replace(&mut self.0, Inner::Poisoned) {
let (second, result) = first.transform(|_, second: &mut S| {
second.allocate(meta).map(|second| SingleElementHandle {
second: ManuallyDrop::new(second),
})
});
self.0 = Inner::Second(second);
return result;
}
// Safety:
// - self.0 was First before invoking replace, hence replace returns First.
unsafe { hint::unreachable_unchecked() };
}
},
Inner::Second(ref mut second) => {
second.allocate(meta).map(|second| SingleElementHandle {
second: ManuallyDrop::new(second),
})
}
Inner::Poisoned => panic!("Poisoned"),
}
}
}
impl<F, S, FB, SB> Debug for SingleElement<F, S, FB, SB> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "SingleElement")
}
}
impl<F: Default, S, FB, SB: Default> Default for SingleElement<F, S, FB, SB> {
fn default() -> Self {
Self(Inner::default())
}
}
/// SingleElementHandle, an alternative between 2 handles.
pub union SingleElementHandle<F, S> {
first: ManuallyDrop<F>,
second: ManuallyDrop<S>,
}
impl<F, S> Debug for SingleElementHandle<F, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "SingleElementHandle")
}
}
| true |
4f3470ee153fe1b356a754602bf40665cf363ea1
|
Rust
|
NobodyNada/advent2020
|
/day12/src/part2.rs
|
UTF-8
| 1,982 | 3.46875 | 3 |
[
"MIT"
] |
permissive
|
use std::io::{self, BufRead};
#[derive(Copy, Clone)]
enum Direction {
North, South,
East,
West
}
impl Direction {
fn from_char(c: u8) -> Option<Direction> {
match c {
b'N' => Some(Direction::North),
b'S' => Some(Direction::South),
b'E' => Some(Direction::East),
b'W' => Some(Direction::West),
_ => None
}
}
fn dxy(self) -> (i32, i32) {
match self {
Direction::North => (0, 1),
Direction::East => (1, 0),
Direction::West => (-1, 0),
Direction::South => (0, -1)
}
}
fn offset_coords_by(self, coords: (i32, i32), distance: i32) -> (i32, i32) {
(
coords.0 + self.dxy().0*distance,
coords.1 + self.dxy().1*distance
)
}
}
pub fn run() {
// The current ship location.
let mut coords: (i32, i32) = (0, 0);
// The location of the waypoint relative to the ship.
let mut wpt_offset: (i32, i32) = (10, 1);
for line in io::stdin().lock().lines() {
let line = line.expect("read error");
let (op, operand) = (line.bytes().next(), &line[1..]);
let op = op.expect("empty line");
let operand: i32 = operand.parse().expect("invalid input");
match op {
b'N' | b'S' | b'E' | b'W' => wpt_offset = Direction::from_char(op).unwrap().offset_coords_by(wpt_offset, operand),
b'L' => (0..operand/90).for_each(|_| wpt_offset = (
-wpt_offset.1,
wpt_offset.0
)),
b'R' => (0..operand/90).for_each(|_| wpt_offset = (
wpt_offset.1,
-wpt_offset.0
)),
b'F' => coords = (
coords.0 + wpt_offset.0*operand,
coords.1 + wpt_offset.1*operand
),
_ => panic!("invalid operation")
}
}
println!("{}", coords.0.abs() + coords.1.abs());
}
| true |
f281bcdd07c74f0ed59ccdc7934d6e7fba71e805
|
Rust
|
sukawasatoru/rust-myscript
|
/src/model/sqlite_user_version.rs
|
UTF-8
| 6,046 | 2.59375 | 3 |
[] |
no_license
|
/*
* Copyright 2020, 2021, 2022, 2023 sukawasatoru
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use rusqlite::types::{FromSql, FromSqlError, FromSqlResult, ValueRef};
use std::cmp::Ordering;
#[derive(Clone, Eq, Debug, PartialEq)]
pub struct SQLiteUserVersion {
pub major: u8,
pub minor: u16,
pub patch: u8,
}
impl From<(u8, u16, u8)> for SQLiteUserVersion {
fn from((major, minor, patch): (u8, u16, u8)) -> Self {
Self {
major,
minor,
patch,
}
}
}
impl From<u32> for SQLiteUserVersion {
#[allow(clippy::unusual_byte_groupings)]
fn from(value: u32) -> Self {
Self {
major: (value >> 24) as u8,
minor: ((value & 0b11111111_11111111_00000000) >> 8) as u16,
patch: (value & 0b11111111) as u8,
}
}
}
impl FromSql for SQLiteUserVersion {
fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> {
let val: i32 = value
.as_i64()?
.try_into()
.map_err(|e| FromSqlError::Other(Box::new(e)))?;
Ok((val as u32).into())
}
}
impl std::str::FromStr for SQLiteUserVersion {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let v = s.split('.').collect::<Vec<_>>();
if v.len() != 3 {
anyhow::bail!("supports semantics version only");
}
Ok((v[0].parse()?, v[1].parse()?, v[2].parse()?).into())
}
}
impl From<&SQLiteUserVersion> for u32 {
fn from(rhs: &SQLiteUserVersion) -> Self {
((rhs.major as u32) << 24) | ((rhs.minor as u32) << 8) | (rhs.patch as u32)
}
}
impl From<SQLiteUserVersion> for u32 {
fn from(value: SQLiteUserVersion) -> Self {
u32::from(&value)
}
}
impl std::fmt::Display for SQLiteUserVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
impl PartialOrd<SQLiteUserVersion> for SQLiteUserVersion {
fn partial_cmp(&self, other: &SQLiteUserVersion) -> Option<Ordering> {
let major = self.major.cmp(&other.major);
if major != Ordering::Equal {
return Some(major);
}
let minor = self.minor.cmp(&other.minor);
if minor != Ordering::Equal {
return Some(minor);
}
let patch = self.patch.cmp(&other.patch);
if patch != Ordering::Equal {
return Some(patch);
}
Some(Ordering::Equal)
}
}
#[cfg(test)]
mod tests {
use super::*;
use futures::StreamExt;
#[test]
fn sqlite_user_version() {
assert_eq!(0b11111111_11111111_11111111_11111111u32, u32::MAX);
assert_eq!(
(0b11111111u32 << 24) | (0b11111111_11111111u32 << 8) | 0b11111111,
u32::MAX
);
assert_eq!(SQLiteUserVersion::from((1, 2, 3)).to_string(), "1.2.3");
assert_eq!(
SQLiteUserVersion::from((255, 65535, 255)).to_string(),
"255.65535.255"
);
assert_eq!(
u32::from(SQLiteUserVersion::from((255, 65535, 255))),
u32::MAX
);
assert_eq!(
"1.2.3".parse::<SQLiteUserVersion>().unwrap(),
SQLiteUserVersion::from((1, 2, 3))
);
assert!("".parse::<SQLiteUserVersion>().is_err());
assert!("0.0.0.0".parse::<SQLiteUserVersion>().is_err());
}
#[test]
fn string_overflow() {
assert!("256.0.0".parse::<SQLiteUserVersion>().is_err());
assert!("0.65536.0".parse::<SQLiteUserVersion>().is_err());
assert!("0.0.256".parse::<SQLiteUserVersion>().is_err());
}
#[test]
fn parse_u32() {
let orig = SQLiteUserVersion::from((1, 2, 3));
let orig_u32 = u32::from(orig.clone());
let orig_u32_version = SQLiteUserVersion::from(orig_u32);
assert_eq!(orig_u32_version, orig);
}
#[test]
fn parse_u32_max() {
let orig = SQLiteUserVersion::from((255, 65535, 255));
let orig_u32 = u32::from(orig.clone());
let orig_u32_version = SQLiteUserVersion::from(orig_u32);
assert_eq!(orig_u32_version, orig);
}
#[tokio::test(flavor = "multi_thread")]
#[ignore]
async fn parse_u32_matrix() {
let mut futs = futures::stream::FuturesUnordered::new();
let cpus = num_cpus::get();
let duration = 255 / cpus;
let mut count = 0;
for i in 0..cpus {
futs.push(tokio::task::spawn(async move {
let start = count;
// "cpus + 1" means "=" of "0..=255".
let end = count + duration + if i == cpus - 1 { 255 % cpus + 1 } else { 0 };
for major in start..end {
for minor in 0..=65535 {
for patch in 0..=255 {
let major = major as u8;
let orig = SQLiteUserVersion::from((major, minor, patch));
let orig_u32 = u32::from(orig.clone());
let orig_u32_version = SQLiteUserVersion::from(orig_u32);
assert_eq!(orig_u32_version, orig);
}
}
}
}));
count += duration;
}
while let Some(data) = futs.next().await {
if let Err(e) = data {
dbg!(e);
assert!(false);
}
}
}
}
| true |
a33f23f5f4239eecf304e2d437112c245583c662
|
Rust
|
m4b/scroll
|
/src/lesser.rs
|
UTF-8
| 6,744 | 3.375 | 3 |
[
"MIT"
] |
permissive
|
use crate::ctx::{FromCtx, IntoCtx, SizeWith};
use std::io::{Read, Result, Write};
/// An extension trait to `std::io::Read` streams; mainly targeted at reading primitive types with
/// a known size.
///
/// Requires types to implement [`FromCtx`](ctx/trait.FromCtx.html) and [`SizeWith`](ctx/trait.SizeWith.html).
///
/// **NB** You should probably add `repr(C)` and be very careful how you implement
/// [`SizeWith`](ctx/trait.SizeWith.html), otherwise you will get IO errors failing to fill entire
/// buffer (the size you specified in `SizeWith`), or out of bound errors (depending on your impl)
/// in `from_ctx`.
///
/// Warning: Currently ioread/write uses a small 256-byte buffer and can not read/write larger types
///
/// # Example
/// ```rust
/// use std::io::Cursor;
/// use scroll::{self, ctx, LE, Pread, IOread};
///
/// #[repr(packed)]
/// struct Foo {
/// foo: i64,
/// bar: u32,
/// }
///
/// impl ctx::FromCtx<scroll::Endian> for Foo {
/// fn from_ctx(bytes: &[u8], ctx: scroll::Endian) -> Self {
/// Foo { foo: bytes.pread_with::<i64>(0, ctx).unwrap(), bar: bytes.pread_with::<u32>(8, ctx).unwrap() }
/// }
/// }
///
/// impl ctx::SizeWith<scroll::Endian> for Foo {
/// // our parsing context doesn't influence our size
/// fn size_with(_: &scroll::Endian) -> usize {
/// ::std::mem::size_of::<Foo>()
/// }
/// }
///
/// let bytes_ = [0x0b,0x0b,0x00,0x00,0x00,0x00,0x00,0x00, 0xef,0xbe,0x00,0x00,];
/// let mut bytes = Cursor::new(bytes_);
/// let foo = bytes.ioread_with::<i64>(LE).unwrap();
/// let bar = bytes.ioread_with::<u32>(LE).unwrap();
/// assert_eq!(foo, 0xb0b);
/// assert_eq!(bar, 0xbeef);
/// let error = bytes.ioread_with::<f64>(LE);
/// assert!(error.is_err());
/// let mut bytes = Cursor::new(bytes_);
/// let foo_ = bytes.ioread_with::<Foo>(LE).unwrap();
/// // Remember that you need to copy out fields from packed structs
/// // with a `{}` block instead of borrowing them directly
/// // ref: https://github.com/rust-lang/rust/issues/46043
/// assert_eq!({foo_.foo}, foo);
/// assert_eq!({foo_.bar}, bar);
/// ```
///
pub trait IOread<Ctx: Copy>: Read {
/// Reads the type `N` from `Self`, with a default parsing context.
/// For the primitive numeric types, this will be at the host machine's endianness.
///
/// # Example
/// ```rust
/// use scroll::IOread;
/// use std::io::Cursor;
/// let bytes = [0xef, 0xbe];
/// let mut bytes = Cursor::new(&bytes[..]);
/// let beef = bytes.ioread::<u16>().unwrap();
///
/// #[cfg(target_endian = "little")]
/// assert_eq!(0xbeef, beef);
/// #[cfg(target_endian = "big")]
/// assert_eq!(0xefbe, beef);
/// ```
#[inline]
fn ioread<N: FromCtx<Ctx> + SizeWith<Ctx>>(&mut self) -> Result<N>
where
Ctx: Default,
{
let ctx = Ctx::default();
self.ioread_with(ctx)
}
/// Reads the type `N` from `Self`, with the parsing context `ctx`.
/// **NB**: this will panic if the type you're reading has a size greater than 256. Plans are to have this allocate in larger cases.
///
/// For the primitive numeric types, this will be at the host machine's endianness.
///
/// # Example
/// ```rust
/// use scroll::{IOread, LE, BE};
/// use std::io::Cursor;
/// let bytes = [0xef, 0xbe, 0xb0, 0xb0, 0xfe, 0xed, 0xde, 0xad];
/// let mut bytes = Cursor::new(&bytes[..]);
/// let beef = bytes.ioread_with::<u16>(LE).unwrap();
/// assert_eq!(0xbeef, beef);
/// let b0 = bytes.ioread::<u8>().unwrap();
/// assert_eq!(0xb0, b0);
/// let b0 = bytes.ioread::<u8>().unwrap();
/// assert_eq!(0xb0, b0);
/// let feeddead = bytes.ioread_with::<u32>(BE).unwrap();
/// assert_eq!(0xfeeddead, feeddead);
/// ```
#[inline]
fn ioread_with<N: FromCtx<Ctx> + SizeWith<Ctx>>(&mut self, ctx: Ctx) -> Result<N> {
let mut scratch = [0u8; 256];
let size = N::size_with(&ctx);
let mut buf = &mut scratch[0..size];
self.read_exact(&mut buf)?;
Ok(N::from_ctx(buf, ctx))
}
}
/// Types that implement `Read` get methods defined in `IOread`
/// for free.
impl<Ctx: Copy, R: Read + ?Sized> IOread<Ctx> for R {}
/// An extension trait to `std::io::Write` streams; this only serializes simple types, like `u8`, `i32`, `f32`, `usize`, etc.
///
/// To write custom types with a single `iowrite::<YourType>` call, implement [`IntoCtx`](ctx/trait.IntoCtx.html) and [`SizeWith`](ctx/trait.SizeWith.html) for `YourType`.
pub trait IOwrite<Ctx: Copy>: Write {
/// Writes the type `N` into `Self`, with the parsing context `ctx`.
/// **NB**: this will panic if the type you're writing has a size greater than 256. Plans are to have this allocate in larger cases.
///
/// For the primitive numeric types, this will be at the host machine's endianness.
///
/// # Example
/// ```rust
/// use scroll::IOwrite;
/// use std::io::Cursor;
///
/// let mut bytes = [0x0u8; 4];
/// let mut bytes = Cursor::new(&mut bytes[..]);
/// bytes.iowrite(0xdeadbeef as u32).unwrap();
///
/// #[cfg(target_endian = "little")]
/// assert_eq!(bytes.into_inner(), [0xef, 0xbe, 0xad, 0xde,]);
/// #[cfg(target_endian = "big")]
/// assert_eq!(bytes.into_inner(), [0xde, 0xad, 0xbe, 0xef,]);
/// ```
#[inline]
fn iowrite<N: SizeWith<Ctx> + IntoCtx<Ctx>>(&mut self, n: N) -> Result<()>
where
Ctx: Default,
{
let ctx = Ctx::default();
self.iowrite_with(n, ctx)
}
/// Writes the type `N` into `Self`, with the parsing context `ctx`.
/// **NB**: this will panic if the type you're writing has a size greater than 256. Plans are to have this allocate in larger cases.
///
/// For the primitive numeric types, this will be at the host machine's endianness.
///
/// # Example
/// ```rust
/// use scroll::{IOwrite, LE, BE};
/// use std::io::{Write, Cursor};
///
/// let mut bytes = [0x0u8; 10];
/// let mut cursor = Cursor::new(&mut bytes[..]);
/// cursor.write_all(b"hello").unwrap();
/// cursor.iowrite_with(0xdeadbeef as u32, BE).unwrap();
/// assert_eq!(cursor.into_inner(), [0x68, 0x65, 0x6c, 0x6c, 0x6f, 0xde, 0xad, 0xbe, 0xef, 0x0]);
/// ```
#[inline]
fn iowrite_with<N: SizeWith<Ctx> + IntoCtx<Ctx>>(&mut self, n: N, ctx: Ctx) -> Result<()> {
let mut buf = [0u8; 256];
let size = N::size_with(&ctx);
let buf = &mut buf[0..size];
n.into_ctx(buf, ctx);
self.write_all(buf)?;
Ok(())
}
}
/// Types that implement `Write` get methods defined in `IOwrite`
/// for free.
impl<Ctx: Copy, W: Write + ?Sized> IOwrite<Ctx> for W {}
| true |
db1d467def91ec67c436897703c6194f23da1d13
|
Rust
|
BitcoinUnlimited/ElectrsCash
|
/src/fake.rs
|
UTF-8
| 909 | 3.03125 | 3 |
[
"MIT"
] |
permissive
|
use crate::store::{ReadStore, Row, WriteStore};
use crate::util::Bytes;
pub struct FakeStore;
impl ReadStore for FakeStore {
fn get(&self, _key: &[u8]) -> Option<Bytes> {
None
}
fn scan(&self, _prefix: &[u8]) -> Vec<Row> {
vec![]
}
}
impl WriteStore for FakeStore {
fn write<I: IntoIterator<Item = Row>>(&self, _rows: I, _sync: bool) {}
fn flush(&self) {}
}
#[cfg(test)]
mod tests {
#[test]
fn test_fakestore() {
use crate::fake;
use crate::store::{ReadStore, Row, WriteStore};
let store = fake::FakeStore {};
store.write(
vec![Row {
key: b"k".to_vec(),
value: b"v".to_vec(),
}],
true,
);
store.flush();
// nothing was actually written
assert!(store.get(b"").is_none());
assert!(store.scan(b"").is_empty());
}
}
| true |
da7c075678744ff4c9c6fc668b359375c1c4d5e0
|
Rust
|
EasyPost/linecmp
|
/src/main.rs
|
UTF-8
| 6,365 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
extern crate itertools;
extern crate clap;
use std::process::exit;
use std::io::{self,BufRead,BufReader,Write};
use std::fs::File;
use std::fmt;
use std::error::Error;
use itertools::Itertools;
use itertools::EitherOrBoth::{Both, Right, Left};
use clap::Arg;
#[derive(Debug)]
struct Difference {
line: usize,
lhs: Option<String>,
rhs: Option<String>,
}
impl Difference {
fn new(i: usize, lhs: Option<String>, rhs: Option<String>) -> Self {
Difference {
line: i + 1,
lhs,
rhs
}
}
}
enum DiffItem {
Difference(Difference),
NoDifference
}
const MAX_BATCH_SIZE: usize = 100;
#[derive(Debug)]
enum MainError {
FileOpenError { filename: String, error: io::Error },
WriteError(io::Error),
}
impl fmt::Display for MainError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
MainError::FileOpenError { ref filename, ref error } => {
write!(f, "Error opening {}: {}", filename, error)
}
_ => <Self as fmt::Debug>::fmt(self, f)
}
}
}
impl Error for MainError {
fn description(&self) -> &'static str {
"error"
}
fn cause(&self) -> Option<&Error> {
match *self {
MainError::FileOpenError { error: ref e, .. } => Some(e),
MainError::WriteError(ref e) => Some(e),
}
}
}
fn open(path: &str) -> Result<BufReader<File>, io::Error> {
Ok(BufReader::with_capacity(
1 << 19,
File::open(
path
)?
))
}
fn write_difference_batches<W, I>(mut target: W, difference_batches: I) -> Result<(), io::Error>
where W: Write, I: Iterator<Item=(usize, Vec<Difference>)> {
for (first_line, diff) in difference_batches {
writeln!(target, "line {}", first_line)?;
for di in &diff {
match di.lhs {
Some(ref l) => writeln!(target, "< {}", l)?,
None => writeln!(target, "< [missing]")?,
};
}
for di in diff {
match di.rhs {
Some(ref l) => writeln!(target, "> {}", l)?,
None => writeln!(target, "> [missing]")?,
};
}
}
Ok(())
}
fn main_i() -> Result<i32, MainError> {
let matches = clap::App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author("EasyPost <[email protected]>")
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(Arg::with_name("file1")
.required(true)
.help("Path to LHS file operand"))
.arg(Arg::with_name("file2")
.required(true)
.help("Path to RHS file operand"))
.get_matches();
let path1 = matches.value_of("file1").unwrap();
let path2 = matches.value_of("file2").unwrap();
let file1 = open(&path1).map_err(|e| MainError::FileOpenError { filename: path1.to_owned(), error: e })?;
let file2 = open(&path2).map_err(|e| MainError::FileOpenError { filename: path2.to_owned(), error: e })?;
let differences = file1.lines().zip_longest(file2.lines()).enumerate().filter_map( |(i, zr)| {
if i % 100_000 == 0 {
eprintln!("# {}", i);
}
match zr {
Both(l, r) => {
let lhs = l.unwrap();
let rhs = r.unwrap();
if lhs != rhs {
Some(DiffItem::Difference(Difference::new(i, Some(lhs.to_owned()), Some(rhs.to_owned()))))
} else if i % 10000 == 0 {
// emit a NoDifference chunk every few thousand lines to prevent it.peek()
// from blocking for a really long time in between differences
Some(DiffItem::NoDifference)
} else {
// Most of the time, though, just have filter_map elide those
None
}
},
Left(l) => {
Some(DiffItem::Difference(Difference::new(i, Some(l.unwrap()), None)))
},
Right(r) => {
Some(DiffItem::Difference(Difference::new(i, None, Some(r.unwrap()))))
}
}
});
// Group the diffs into batches of consecutive lines
let difference_batches = differences.peekable().batching(|it| {
let mut resp = vec!();
loop {
match it.next() {
None => { return None; },
Some(DiffItem::NoDifference) => { continue },
Some(DiffItem::Difference(first)) => {
let first_line = first.line;
let mut cur_line = first.line;
resp.push(first);
while resp.len() < MAX_BATCH_SIZE {
// it would be clearer here if we could call the it.next() inside the
// it.peek(), but the borrow checker disallows it, so we break instead
match it.peek() {
Some(&DiffItem::NoDifference) => {
break;
}
Some(&DiffItem::Difference(ref diff)) => {
if diff.line != cur_line + 1 {
break;
}
},
None => {
break;
}
};
if let DiffItem::Difference(next_line) = it.next().unwrap() {
cur_line = next_line.line;
resp.push(next_line);
}
}
return Some((first_line, resp));
}
}
}
});
let stdout = io::stdout();
write_difference_batches(stdout.lock(), difference_batches).map_err(MainError::WriteError)?;
Ok(0)
}
pub fn main() {
match main_i() {
Ok(i) => exit(i),
Err(e) => {
eprintln!("{}", e);
exit(1);
}
}
}
| true |
4fd083c4002d1de49589efb2dcb71cee4ad8c2f8
|
Rust
|
Vanderkast/leetcode_rust
|
/biwise_and_range/src/main.rs
|
UTF-8
| 963 | 3.453125 | 3 |
[] |
no_license
|
struct Solution;
impl Solution {
pub fn range_bitwise_and(left: i32, right: i32) -> i32 {
if left == right {
return left;
}
let temp = left.leading_zeros();
if temp != right.leading_zeros() {
return 0;
}
let mut temp = 1 << (31 - temp);
let mut result = temp;
temp >>= 1;
while left & temp == right & temp {
result += left & temp;
temp >>= 1;
}
result
}
}
fn main() {
assert_eq!(0, Solution::range_bitwise_and(0, 0));
assert_eq!(4, Solution::range_bitwise_and(5, 6));
assert_eq!(4, Solution::range_bitwise_and(5, 7));
assert_eq!(6, Solution::range_bitwise_and(6, 7));
assert_eq!(0, Solution::range_bitwise_and(7, 21));
assert_eq!(10, Solution::range_bitwise_and(10, 11));
assert_eq!(12, Solution::range_bitwise_and(12, 14));
assert_eq!(20, Solution::range_bitwise_and(20, 22));
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.