blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
ebe6880c7228d3706d616a6144678369cc73e79d
|
Rust
|
neara/rust_hyper_simple_api
|
/src/views/generic_error_views.rs
|
UTF-8
| 656 | 2.546875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use hyper::{Response, Body, StatusCode};
use super::types::Result;
use super::response::to_json;
pub async fn not_found() -> Result<Response<Body>>{
info!("Requested url not found");
let mut payload:HashMap<&str, &str> = HashMap::new();
payload.insert("error", "not found");
to_json(&payload, Some(StatusCode::NOT_FOUND))
}
pub async fn method_not_allowed() -> Result<Response<Body>>{
info!("Requested method not allowed");
let mut payload:HashMap<&str, &str> = HashMap::new();
payload.insert("error", "method not allowed");
to_json(&payload, Some(StatusCode::METHOD_NOT_ALLOWED))
}
| true |
e3e3ae060639c508a16304a71a9887a16076bd18
|
Rust
|
comit-network/monero-rs
|
/src/clsag/macros.rs
|
UTF-8
| 2,540 | 2.90625 | 3 |
[
"MIT"
] |
permissive
|
use curve25519_dalek::edwards::{CompressedEdwardsY, EdwardsPoint};
use std::borrow::Cow;
/// Hashes a set of elements to a Scalar.
///
/// Specifically designed to be used within the clsag module. Use with caution in other places.
macro_rules! hash_to_scalar {
($($e:tt) || +) => {
{
use crate::clsag::macros::ToHashInput as _;
use tiny_keccak::Hasher as _;
let mut hasher = tiny_keccak::Keccak::v256();
$(
let bytes_vec = $e.to_hash_input();
for el in bytes_vec {
hasher.update(el.as_ref());
}
)+
let mut hash = [0u8; 32];
hasher.finalize(&mut hash);
curve25519_dalek::scalar::Scalar::from_bytes_mod_order(hash)
}
};
}
/// Type alias for a single hash input element.
///
/// Monero's CLSAG implementation hashes elements as arrays of 32 bytes, even if they are shorter than that.
/// This type alias and the corresponding trait enforce this behaviour at the type-system level.
type HashInput<'a> = Cow<'a, [u8; 32]>;
pub(crate) trait ToHashInput {
fn to_hash_input(&self) -> Vec<HashInput<'_>>;
}
impl ToHashInput for CompressedEdwardsY {
fn to_hash_input(&self) -> Vec<HashInput<'_>> {
vec![HashInput::Borrowed(&self.0)]
}
}
impl ToHashInput for EdwardsPoint {
fn to_hash_input(&self) -> Vec<HashInput<'_>> {
vec![HashInput::Owned(self.compress().0)]
}
}
impl ToHashInput for [u8; 32] {
fn to_hash_input(&self) -> Vec<HashInput<'_>> {
vec![HashInput::Borrowed(&self)]
}
}
impl ToHashInput for [u8; 11] {
fn to_hash_input(&self) -> Vec<HashInput<'_>> {
let mut bytes = [0u8; 32];
bytes[0..11].copy_from_slice(self);
vec![HashInput::Owned(bytes)]
}
}
impl<'a> ToHashInput for [EdwardsPoint; 11] {
fn to_hash_input(&self) -> Vec<HashInput<'_>> {
vec![
HashInput::Owned(self[0].compress().0),
HashInput::Owned(self[1].compress().0),
HashInput::Owned(self[2].compress().0),
HashInput::Owned(self[3].compress().0),
HashInput::Owned(self[4].compress().0),
HashInput::Owned(self[5].compress().0),
HashInput::Owned(self[6].compress().0),
HashInput::Owned(self[7].compress().0),
HashInput::Owned(self[8].compress().0),
HashInput::Owned(self[9].compress().0),
HashInput::Owned(self[10].compress().0),
]
}
}
| true |
03af9cc5748929dff311692e6007cfadad7cdbde
|
Rust
|
sile/erl_pp
|
/src/token_reader.rs
|
UTF-8
| 6,816 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use erl_tokenize::tokens::{AtomToken, StringToken, SymbolToken, VariableToken};
use erl_tokenize::values::Symbol;
use erl_tokenize::{Lexer, LexicalToken};
use std::collections::{HashMap, VecDeque};
use std::fmt::Debug;
use std::path::Path;
use crate::macros::NoArgsMacroCall;
use crate::{Error, MacroCall, MacroDef, Result};
#[derive(Debug)]
pub struct TokenReader<T> {
tokens: T,
included_tokens: Vec<Lexer<String>>,
unread: VecDeque<LexicalToken>,
}
impl<T> TokenReader<T>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>,
{
pub fn new(tokens: T) -> Self {
TokenReader {
tokens,
included_tokens: Vec::new(),
unread: VecDeque::new(),
}
}
pub fn add_included_text<P: AsRef<Path>>(&mut self, path: P, text: String) {
let mut lexer = Lexer::new(text);
lexer.set_filepath(path);
self.included_tokens.push(lexer);
}
pub fn read<V>(&mut self) -> Result<V>
where
V: ReadFrom,
{
V::read_from(self)
}
pub fn try_read<V>(&mut self) -> Result<Option<V>>
where
V: ReadFrom,
{
V::try_read_from(self)
}
pub fn try_read_macro_call(
&mut self,
macros: &HashMap<String, MacroDef>,
) -> Result<Option<MacroCall>> {
if let Some(call) = self.try_read::<NoArgsMacroCall>()? {
let mut call = MacroCall {
_question: call._question,
name: call.name,
args: None,
};
if macros
.get(call.name.value())
.map_or(false, MacroDef::has_variables)
{
call.args = Some(self.read()?);
}
Ok(Some(call))
} else {
Ok(None)
}
}
pub fn read_expected<V>(&mut self, expected: &V::Value) -> Result<V>
where
V: ReadFrom + Expect + Into<LexicalToken>,
{
V::read_expected(self, expected)
}
pub fn try_read_expected<V>(&mut self, expected: &V::Value) -> Result<Option<V>>
where
V: ReadFrom + Expect + Into<LexicalToken>,
{
V::try_read_expected(self, expected)
}
pub fn try_read_token(&mut self) -> Result<Option<LexicalToken>> {
if let Some(token) = self.unread.pop_front() {
Ok(Some(token))
} else if !self.included_tokens.is_empty() {
match self
.included_tokens
.last_mut()
.expect("unreachable")
.next()
.transpose()?
{
None => {
self.included_tokens.pop();
self.try_read_token()
}
Some(t) => Ok(Some(t)),
}
} else {
match self.tokens.next().transpose()? {
None => Ok(None),
Some(t) => Ok(Some(t)),
}
}
}
pub fn read_token(&mut self) -> Result<LexicalToken> {
if let Some(token) = self.try_read_token()? {
Ok(token)
} else {
Err(Error::UnexpectedEof)
}
}
pub fn unread_token(&mut self, token: LexicalToken) {
self.unread.push_front(token);
}
}
pub trait ReadFrom: Sized {
fn read_from<T>(reader: &mut TokenReader<T>) -> Result<Self>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>;
fn try_read_from<T>(reader: &mut TokenReader<T>) -> Result<Option<Self>>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>,
{
Self::read_from(reader).map(Some).or_else(|e| {
if let Error::UnexpectedToken { token, .. } = e {
reader.unread_token(token);
return Ok(None);
}
if let Error::UnexpectedEof = e {
return Ok(None);
}
Err(e)
})
}
fn read_expected<T>(reader: &mut TokenReader<T>, expected: &Self::Value) -> Result<Self>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>,
Self: Expect + Into<LexicalToken>,
{
Self::read_from(reader).and_then(|token| {
if !token.expect(expected) {
return Err(Error::unexpected_token(
token.into(),
&format!("{:?}", expected),
));
}
Ok(token)
})
}
fn try_read_expected<T>(
reader: &mut TokenReader<T>,
expected: &Self::Value,
) -> Result<Option<Self>>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>,
Self: Expect + Into<LexicalToken>,
{
Self::try_read_from(reader).map(|token| {
token.and_then(|token| {
if token.expect(expected) {
Some(token)
} else {
reader.unread_token(token.into());
None
}
})
})
}
}
impl ReadFrom for AtomToken {
fn read_from<T>(reader: &mut TokenReader<T>) -> Result<Self>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>,
{
let token = reader.read_token()?;
token
.into_atom_token()
.map_err(|token| Error::unexpected_token(token, "atom"))
}
}
impl ReadFrom for VariableToken {
fn read_from<T>(reader: &mut TokenReader<T>) -> Result<Self>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>,
{
let token = reader.read_token()?;
token
.into_variable_token()
.map_err(|token| Error::unexpected_token(token, "variable"))
}
}
impl ReadFrom for SymbolToken {
fn read_from<T>(reader: &mut TokenReader<T>) -> Result<Self>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>,
{
let token = reader.read_token()?;
token
.into_symbol_token()
.map_err(|token| Error::unexpected_token(token, "symbol"))
}
}
impl ReadFrom for StringToken {
fn read_from<T>(reader: &mut TokenReader<T>) -> Result<Self>
where
T: Iterator<Item = erl_tokenize::Result<LexicalToken>>,
{
let token = reader.read_token()?;
token
.into_string_token()
.map_err(|token| Error::unexpected_token(token, "string"))
}
}
pub trait Expect {
type Value: PartialEq + Debug + ?Sized;
fn expect(&self, expected: &Self::Value) -> bool;
}
impl Expect for AtomToken {
type Value = str;
fn expect(&self, expected: &Self::Value) -> bool {
self.value() == expected
}
}
impl Expect for SymbolToken {
type Value = Symbol;
fn expect(&self, expected: &Self::Value) -> bool {
self.value() == *expected
}
}
| true |
f05a34e8fe3ee558b413d3eadc5ef07ed17c68bb
|
Rust
|
hrntsm/study-language
|
/Rust/ms-rust/chapter4/borrowing/src/main.rs
|
UTF-8
| 322 | 3.515625 | 4 |
[
"MIT"
] |
permissive
|
fn main() {
let mut greeting = String::from("Hello");
print_greeting(&greeting);
print_greeting(&greeting);
change(&mut greeting);
}
fn print_greeting(message: &String) {
println!("Greeting: {}", message);
}
fn change(message: &mut String) {
message.push_str("!");
print_greeting(message);
}
| true |
7247fb9d09b6cbcc7779d1830f14bbfa053466eb
|
Rust
|
emabee/flexi_logger
|
/src/filter.rs
|
UTF-8
| 2,057 | 3.546875 | 4 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! This module contains two traits which allow adding a stateful filter
//! using [`Logger::filter`](crate::Logger::filter).
//!
//! # Example
//!
//! ```rust
//! use flexi_logger::{
//! filter::{LogLineFilter, LogLineWriter},
//! DeferredNow, FlexiLoggerError,
//! };
//!
//! pub struct BarsOnly;
//! impl LogLineFilter for BarsOnly {
//! fn write(
//! &self,
//! now: &mut DeferredNow,
//! record: &log::Record,
//! log_line_writer: &dyn LogLineWriter,
//! ) -> std::io::Result<()> {
//! if record.args().to_string().contains("bar") {
//! log_line_writer.write(now, record)?;
//! }
//! Ok(())
//! }
//! }
//!
//! fn main() -> Result<(), FlexiLoggerError> {
//! flexi_logger::Logger::try_with_str("info")?
//! .filter(Box::new(BarsOnly))
//! .start()?;
//! log::info!("barista");
//! log::info!("foo"); // will be swallowed by the filter
//! log::info!("bar");
//! log::info!("gaga"); // will be swallowed by the filter
//! Ok(())
//! }
//! ```
use crate::DeferredNow;
use log::Record;
/// Trait of the filter object.
#[allow(clippy::module_name_repetitions)]
pub trait LogLineFilter {
/// Each log line that `flexi_logger` would write to the configured output channel is
/// sent to this method.
///
/// Note that the log line only appears in the configured output channel if the
/// filter implementation forwards it to the provided `LogLineWriter`.
///
/// # Errors
///
/// If writing to the configured output channel fails.
fn write(
&self,
now: &mut DeferredNow,
record: &Record,
log_line_writer: &dyn LogLineWriter,
) -> std::io::Result<()>;
}
/// Write out a single log line
pub trait LogLineWriter {
/// Write out a log line to the configured output channel.
///
/// # Errors
///
/// If writing to the configured output channel fails.
fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()>;
}
| true |
2f14b4d61979e82c145ea28cd403336f169f1570
|
Rust
|
str4d/ire
|
/src/crypto/dh.rs
|
UTF-8
| 12,941 | 2.65625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use num_bigint::BigUint;
use rand::{rngs::OsRng, Rng};
use std::iter::repeat;
use crate::constants::{ELGAMAL_G, ELGAMAL_P};
use crate::crypto::math::rectify;
use crate::crypto::SessionKey;
pub struct DHSessionKeyBuilder {
dh_priv: BigUint,
dh_pub: BigUint,
}
impl DHSessionKeyBuilder {
pub fn new() -> Self {
let mut rng = OsRng;
let mut buf = vec![0; 256];
rng.fill(&mut buf[..]);
let dh_priv = BigUint::from_bytes_be(&buf);
let dh_pub = ELGAMAL_G.modpow(&dh_priv, &ELGAMAL_P);
DHSessionKeyBuilder { dh_priv, dh_pub }
}
pub fn get_pub(&self) -> Vec<u8> {
rectify(&self.dh_pub, 256)
}
pub fn build_session_key(&self, peer_pub: &[u8; 256]) -> SessionKey {
// Calculate the exchanged DH key
let peer_pub = BigUint::from_bytes_be(peer_pub);
let dh_key = peer_pub.modpow(&self.dh_priv, &ELGAMAL_P);
// Represent the exchanged key as a positive minimal-length two's-complement
// big-endian byte array. If most significant bit is 1, prepend a zero-byte
// (to match Java's BigInteger.toByteArray() representation).
let mut buf = dh_key.to_bytes_be();
if buf[0] & 0x80 != 0 {
buf.insert(0, 0x00);
}
// If that byte array is less than 32 bytes, append 0x00 bytes to extend to
// 32 bytes. This is vanishingly unlikely, but have to do it for compatibility.
let length = buf.len();
if length < 32 {
buf.extend(repeat(0).take(32 - length));
}
let mut key = [0u8; 32];
key.copy_from_slice(&buf[0..32]);
SessionKey(key)
}
}
#[cfg(test)]
mod tests {
use num_bigint::BigUint;
use num_traits::Num;
use super::DHSessionKeyBuilder;
use crate::crypto::SessionKey;
#[test]
fn build_session_key() {
struct TestVector<'a> {
dh_priv: &'a str,
dh_pub: [u8; 256],
peer_pub: [u8; 256],
session_key: SessionKey,
}
// Generated via Java's DHSessionKeyBuilder
let test_vectors = vec![
TestVector {
dh_priv: "D100BF92_2E965504_BC6E2E6D_8AB968D9_883890B7_65EF673C\
54479E8B_570A8157_80092340_A8E6178F_C7FB9732_3016A6C4\
4F953792_5C32C30C_7BC9117F_7078A214_96853C44_9D7AAC7B\
124CCB9A_AB14CF3B_67D2EDA3_E6251B6D_6A48AD72_FBD466B2\
B331C9A2_6F269DF4_8E7DC944_17546F7A_B20E39B8_57CC7A0A\
C572C30E_BFD06EA6_2E63E7D5_C921EAB6_A9B8FC02_F31B4103\
5CF8850A_6DE31D07_53785A9F_20A7DE4D_8E2CCFB9_79C62E0B\
05624443_7E7149CB_0B6D65BD_F7B4ADE9_1045D432_1F173603\
C314FA4F_541F6E0A_DDF73002_0D7F19A3_0C61BB4D_51483239\
16F52D21_CC765916_6E7141B5_61877053_ECA28EAF_0D221D22\
907D6E78_539797EF_1D29E4C9_B61169E0",
dh_pub: [
0xeb, 0x84, 0x78, 0x1e, 0xe2, 0x2c, 0x07, 0xbe, 0xde, 0x67, 0xce, 0x83, 0x89,
0xeb, 0x34, 0x01, 0x92, 0xaf, 0x25, 0x95, 0x2e, 0x6c, 0x35, 0x35, 0x21, 0x7f,
0xc7, 0x60, 0xd9, 0x59, 0x0d, 0x11, 0x17, 0x70, 0xbd, 0xb8, 0x35, 0x79, 0x03,
0x4a, 0x65, 0x5b, 0xb8, 0xf2, 0x03, 0xd6, 0x90, 0x41, 0xf7, 0x20, 0x7c, 0x57,
0xe2, 0xa5, 0x46, 0xb0, 0xc3, 0xfd, 0x75, 0x5e, 0x4e, 0xf9, 0x7f, 0x6e, 0x76,
0xf1, 0x07, 0xa6, 0xd6, 0xcd, 0x6c, 0xa9, 0x42, 0xc5, 0xc4, 0x09, 0xd0, 0xce,
0x55, 0x3c, 0x53, 0xa0, 0xd8, 0xc0, 0xc9, 0x66, 0x9f, 0xce, 0xe3, 0xd8, 0xb8,
0xe8, 0x92, 0x33, 0x62, 0x72, 0x85, 0xd9, 0x6c, 0x07, 0x11, 0x52, 0x6d, 0x8a,
0x80, 0x92, 0xe1, 0x37, 0xe2, 0x43, 0x01, 0x52, 0xc9, 0x94, 0xac, 0x70, 0xf1,
0x74, 0x46, 0xde, 0x1f, 0x22, 0x77, 0x56, 0x5e, 0x8c, 0xf0, 0x4e, 0xb8, 0xcb,
0xf2, 0x44, 0x16, 0xc8, 0x3c, 0x50, 0x9e, 0x25, 0xb6, 0x61, 0x2f, 0x4f, 0x16,
0x89, 0xe5, 0xd0, 0x9f, 0x0b, 0x29, 0x06, 0x01, 0x0c, 0x24, 0x37, 0x99, 0x5d,
0xd4, 0xf8, 0x7b, 0x4f, 0x92, 0xf3, 0x99, 0x8d, 0xa4, 0x76, 0xb3, 0x9b, 0xdf,
0xbb, 0x34, 0x7f, 0x5b, 0x7f, 0x3e, 0x72, 0x4c, 0xc1, 0x20, 0x8b, 0x85, 0x70,
0xbf, 0xce, 0x0d, 0xe7, 0x3f, 0x40, 0x51, 0x3d, 0xc2, 0x80, 0xcb, 0x36, 0x25,
0x52, 0x54, 0x74, 0xbb, 0x42, 0x1f, 0x3f, 0xd6, 0x50, 0x60, 0x3c, 0x2e, 0x9f,
0x83, 0xd0, 0x9d, 0x00, 0x82, 0x61, 0x40, 0x92, 0xd9, 0x9b, 0x5e, 0x1f, 0xa2,
0xa0, 0xff, 0x83, 0x99, 0x38, 0x2f, 0xf1, 0xee, 0xe3, 0x9e, 0x6a, 0x99, 0x41,
0xee, 0x9f, 0x20, 0xd1, 0xda, 0x2f, 0x7f, 0xdf, 0xc3, 0x88, 0x62, 0x49, 0x26,
0xb2, 0x59, 0xf3, 0x7e, 0x30, 0x3e, 0x76, 0x7f, 0x83,
],
peer_pub: [
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x1c, 0x7d, 0x69, 0x68,
0x50, 0x54, 0x3a, 0x3d, 0x30, 0x56, 0xa8, 0xa4, 0xf9, 0x32, 0x18, 0x1f, 0xf3,
0x79, 0xb6, 0x98, 0x73, 0xb2, 0x7b, 0x81, 0x4c, 0x58, 0x19, 0x11, 0xdb, 0x36,
0x9a, 0xd5, 0xb3, 0xa0, 0x9d, 0x2e, 0x10, 0xa1, 0xb6, 0xec, 0xa6, 0x9d, 0x7a,
0x8d, 0x3c, 0xed, 0x10, 0x22, 0x93, 0xd9, 0x0f, 0xff, 0x4d, 0x65, 0xfb, 0x39,
0xde, 0x97, 0xaa, 0x41, 0x56, 0x2a, 0x3a, 0x02, 0x28, 0x5a, 0x28, 0x95, 0xbb,
0xa3, 0x9b, 0x29, 0x47, 0x9b, 0x10, 0xe3, 0xba, 0xc8, 0x9e, 0x67, 0xbb, 0x3c,
0x29, 0x7e, 0x8a, 0x17, 0x50, 0x28, 0x44, 0x67, 0xc3, 0x2d, 0xa7, 0x5c, 0x4d,
0x1d, 0x75, 0xa3, 0xab, 0x69, 0x8c, 0x0a, 0xed, 0x38, 0x59, 0xb6, 0xa6, 0xa5,
0xe5, 0x85, 0x02, 0x48, 0x07, 0x88, 0xdc, 0xc3, 0x8a, 0x45, 0x08, 0x5b, 0x1a,
0x5d, 0x22, 0x35, 0x21, 0xb7, 0x92, 0xc6, 0x3e, 0x49, 0x34, 0x8c, 0xa6, 0x79,
0xf2, 0x82, 0x40, 0x38, 0x5a, 0x08, 0x5e, 0x93, 0x91, 0x9e, 0x8b, 0x60, 0xb7,
0x65, 0x3a, 0x00, 0xa2, 0x59, 0x24, 0x5e, 0x96, 0xcb, 0x21, 0x2e, 0x47, 0xbb,
0x47, 0x2e, 0x05, 0x16, 0xc1, 0x51, 0x3d, 0xb2, 0x7e, 0xe5, 0x70, 0xdb, 0xf4,
0x9a, 0x4d, 0x2b, 0xbf, 0x26, 0xf8, 0x43, 0x3e, 0x5f, 0x98, 0x80, 0xc8, 0xa1,
0x83, 0x6f, 0x3f, 0x1f, 0xfb, 0x0b, 0x6c, 0xd8, 0xa8, 0xac, 0x77, 0x81, 0x57,
0x4f, 0x27, 0xdc, 0x96, 0xab, 0xc7, 0x1f, 0x01, 0x45, 0x70, 0x4d, 0xac, 0xd0,
0xbc, 0x52, 0x00, 0x93, 0xfe, 0x16, 0xae, 0x5a, 0xc5, 0x2a, 0x64, 0x02, 0x49,
0x31, 0xd4, 0x88, 0xd8, 0x5c, 0x74, 0x07, 0xd8, 0xef, 0x86, 0x1a, 0x22, 0xcb,
0x20, 0xa2, 0x7c, 0xe3, 0x7c, 0x20, 0xd9, 0x3f, 0x17,
],
session_key: SessionKey([
0x11, 0x28, 0x94, 0x3b, 0xcb, 0x88, 0x3b, 0x5e, 0x1d, 0xf5, 0xda, 0xb0, 0xce,
0xf9, 0xa6, 0x1d, 0x82, 0xa4, 0xed, 0x69, 0x10, 0x35, 0xca, 0xf9, 0xe9, 0x59,
0x2f, 0x33, 0x32, 0xfc, 0x0d, 0xd9,
]),
},
TestVector {
dh_priv: "67E14314_D5FBC506_99A9E30F_E59AC5DE_7A4EF8A3_30DE7F28\
82792FE1_CD5F693E_B49C0225_940A61EE_768F1544_FEDC125F\
52F31FD2_87F4CF68_5A848CCF_C6BB23FC_8CF7D52C_D47A271E\
A920A56E_C6B46B64_5CDED831_DAFE34DC_852D8215_7A3D093C\
B5ABE447_E4EEEF99_4A811A96_D4331765_8A78A683_32013CDA\
BFFB5BA4_060D103B_C64514FF_B47BDE97_2BB72A50_7B39854A\
FA4F8B58_125DCF3E_9C39F14B_D9B67FA9_B8B12896_86CC56BB\
9B1C8B64_9AFD2BEB_F64AEA9D_4D73B968_68871F9A_694E416C\
2F5E0217_0EB97175_8AEA1B9E_93EED7C3_5B8E16F6_054D1244\
68DA3E0B_5B80FA66_9F0A7041_7FD2B29D_B20AED55_18DE8F33\
B064D4A2_A28FE378_3D94BC77_3F6FB6BB",
dh_pub: [
0x59, 0x00, 0x5c, 0x7f, 0x22, 0x4b, 0x41, 0x8f, 0xb8, 0x91, 0xe7, 0xad, 0x31,
0x56, 0xc0, 0x1f, 0xbc, 0x5e, 0x2a, 0xb0, 0x3a, 0xf1, 0x56, 0x3a, 0x7b, 0x28,
0x17, 0x92, 0x4d, 0x50, 0xdf, 0xc1, 0xd8, 0x38, 0x84, 0x24, 0xe5, 0x82, 0x96,
0x1a, 0xb3, 0x60, 0xcd, 0xf5, 0xec, 0xca, 0x1a, 0xcf, 0x66, 0x98, 0x31, 0xd3,
0x46, 0x4e, 0x58, 0x3f, 0xd2, 0xbd, 0x98, 0x8f, 0x6b, 0x07, 0x20, 0x36, 0xc7,
0xce, 0xc6, 0x4f, 0x7b, 0xcc, 0x77, 0xe2, 0x06, 0x95, 0x2c, 0x84, 0xf6, 0x65,
0x0f, 0x0d, 0x01, 0xc9, 0x66, 0xab, 0xe4, 0x7c, 0x08, 0xa3, 0x9c, 0xbe, 0x82,
0x28, 0x2b, 0xc8, 0x7d, 0x89, 0x2a, 0xba, 0x98, 0x0e, 0x4c, 0x28, 0xe5, 0x0f,
0x81, 0x32, 0x13, 0xb9, 0x31, 0x4f, 0x05, 0x90, 0x7b, 0x8b, 0x23, 0xc8, 0xf1,
0x2a, 0x2c, 0xc4, 0x93, 0xcf, 0xbd, 0xe2, 0x1e, 0x91, 0x9f, 0xb2, 0x84, 0x8a,
0xb2, 0xe7, 0x4f, 0x24, 0x11, 0x40, 0x19, 0x84, 0x7f, 0x15, 0xda, 0xf6, 0x8e,
0xda, 0x4c, 0x86, 0x13, 0x60, 0x78, 0xdf, 0xb7, 0xe4, 0x46, 0x17, 0x88, 0xf7,
0x04, 0x49, 0xf3, 0xf2, 0x9a, 0x0b, 0xd5, 0x84, 0x7b, 0xca, 0xab, 0x5d, 0x07,
0x5a, 0x88, 0x3a, 0xee, 0xc1, 0xb4, 0xcb, 0xbc, 0x55, 0x6f, 0x85, 0xc4, 0x0f,
0xa7, 0xaa, 0x4e, 0xe3, 0x29, 0xb1, 0x10, 0x0e, 0x00, 0xd6, 0x15, 0x05, 0x0b,
0x44, 0x84, 0x56, 0x29, 0x3c, 0x43, 0xaf, 0x36, 0x49, 0x1c, 0xbd, 0xd9, 0x78,
0x0d, 0x9f, 0x68, 0xb8, 0x62, 0x90, 0xb7, 0xb9, 0x81, 0x17, 0xfe, 0x59, 0x71,
0x88, 0x17, 0x0b, 0x41, 0x08, 0xe4, 0x4d, 0xfa, 0x97, 0xf0, 0x5f, 0x97, 0x01,
0x03, 0xa5, 0x2a, 0x0d, 0xc3, 0x0c, 0x8e, 0xe4, 0xa7, 0xb6, 0xab, 0xab, 0xe6,
0x49, 0x06, 0x38, 0x4e, 0xec, 0x3e, 0xf8, 0x2f, 0xfd,
],
peer_pub: [
0xd1, 0x2f, 0x7d, 0x48, 0xea, 0x85, 0xd3, 0x6c, 0x32, 0x85, 0x76, 0xf9, 0xf3,
0x68, 0x21, 0x11, 0x17, 0x37, 0x3b, 0x19, 0xc4, 0xb1, 0xb2, 0x0c, 0xa4, 0x23,
0xa9, 0x9a, 0xfb, 0xa4, 0xa1, 0xe7, 0xc3, 0xb7, 0xad, 0x26, 0xa2, 0xed, 0xc4,
0x3d, 0xc8, 0xc3, 0x07, 0xe6, 0x81, 0x36, 0x59, 0x39, 0xd1, 0xe3, 0xf0, 0xd4,
0x76, 0xee, 0xfe, 0x1c, 0xb0, 0x31, 0xfe, 0xf7, 0xe8, 0x4f, 0x57, 0xd8, 0x3c,
0xa2, 0x84, 0x8c, 0x05, 0xe0, 0x0c, 0x1d, 0x30, 0xb8, 0x55, 0xdc, 0x72, 0x34,
0x03, 0x46, 0x23, 0x76, 0x92, 0x6b, 0x3e, 0x7f, 0x23, 0x7d, 0x95, 0x57, 0x68,
0x0d, 0xdf, 0x39, 0xe6, 0x43, 0x77, 0x37, 0xb8, 0x0b, 0x69, 0xc3, 0x51, 0xe9,
0x90, 0xb2, 0xce, 0x18, 0xd0, 0xcd, 0x21, 0x9b, 0x4f, 0xe0, 0x3c, 0xac, 0x6d,
0x91, 0xa7, 0x07, 0x08, 0xeb, 0x16, 0x20, 0x69, 0xb7, 0x57, 0x23, 0x16, 0xba,
0xbc, 0x11, 0x22, 0x52, 0xbc, 0x00, 0x5d, 0x62, 0x0a, 0xae, 0xdd, 0xc3, 0xed,
0x7a, 0xb4, 0xb1, 0xa3, 0xd1, 0x32, 0xb4, 0x39, 0x1b, 0x6e, 0xc2, 0xc2, 0x97,
0xfa, 0x72, 0xb7, 0x27, 0x62, 0x3d, 0xec, 0xa5, 0x90, 0xd6, 0x2b, 0xed, 0x06,
0x85, 0x44, 0x35, 0x9b, 0x93, 0xcb, 0xcc, 0xc0, 0x6d, 0x44, 0x47, 0x41, 0x03,
0xca, 0x02, 0x27, 0xcf, 0x40, 0xaf, 0x5f, 0xe4, 0x04, 0x9b, 0xd6, 0x80, 0xf4,
0x86, 0x1a, 0xf2, 0x8e, 0x1c, 0x2c, 0x22, 0x30, 0x1d, 0xc7, 0xd7, 0x54, 0x64,
0xf2, 0x3e, 0x4c, 0xcd, 0x9b, 0x2d, 0x8a, 0x05, 0x4e, 0x2f, 0xc0, 0x14, 0xb9,
0xf4, 0x40, 0xe4, 0x90, 0xf9, 0x13, 0x0e, 0xdd, 0xc8, 0x90, 0x96, 0xa9, 0x8d,
0x51, 0x9c, 0x52, 0x3d, 0xdd, 0xb9, 0x5c, 0x4c, 0xbc, 0x34, 0x4f, 0x81, 0x4f,
0xc2, 0x11, 0x32, 0xed, 0x1d, 0x91, 0xa7, 0x0d, 0x07,
],
session_key: SessionKey([
0x00, 0xae, 0x45, 0x63, 0xa5, 0x62, 0xca, 0x68, 0x88, 0x93, 0xf6, 0xa4, 0xf6,
0xb9, 0xb9, 0x7d, 0xd1, 0x6b, 0xfe, 0xa2, 0xca, 0x2b, 0x64, 0xa1, 0x08, 0xcf,
0x7d, 0xea, 0xe6, 0x23, 0x4f, 0x79,
]),
},
];
for tv in test_vectors {
let dh_priv = BigUint::from_str_radix(tv.dh_priv, 16).unwrap();
let dh_pub = BigUint::from_bytes_be(&tv.dh_pub[..]);
let builder = DHSessionKeyBuilder { dh_priv, dh_pub };
assert_eq!(builder.get_pub(), Vec::from(&tv.dh_pub[..]));
let session_key = builder.build_session_key(&tv.peer_pub);
assert_eq!(session_key.0, tv.session_key.0);
}
}
}
| true |
9ab1fb146ed44191fd8fb88670e022c7358902e4
|
Rust
|
po-gl/Raytracer
|
/src/tuple.rs
|
UTF-8
| 6,710 | 3.671875 | 4 |
[
"MIT"
] |
permissive
|
/// # tuple
/// `tuple` is a module to represent our most basic data structure, an ordered list
use std::ops;
use super::float::Float;
#[derive(Debug, PartialEq, Copy, Clone)]
pub struct Tuple {
pub x: Float,
pub y: Float,
pub z: Float,
pub w: Float,
}
impl Tuple {
pub fn new(x: f64, y: f64, z: f64, w: f64) -> Tuple {
Tuple {x: Float(x), y: Float(y), z: Float(z), w: Float(w)}
}
pub fn is_point(&self) -> bool {
self.w == 1.0
}
pub fn is_vector(&self) -> bool {
self.w == 0.0
}
pub fn magnitude(&self) -> f64 {
(&self.x * &self.x + &self.y * &self.y + &self.z * &self.z + &self.w * &self.w).sqrt()
}
pub fn normalize(&self) -> Tuple {
let magnitude = self.magnitude();
if Float(magnitude) == Float(0.0) {
return Tuple::new(0.0, 0.0, 0.0, 0.0);
} else {
Tuple::new(&self.x.value() / magnitude, &self.y.value() / magnitude, &self.z.value() / magnitude, &self.w.value() / magnitude)
}
}
pub fn reflect(&self, normal: &Tuple) -> Tuple {
self - normal * 2.0 * dot(self, normal)
}
}
pub fn point(x: f64, y: f64, z: f64) -> Tuple {
Tuple::new(x, y, z, 1.0)
}
pub fn vector(x: f64, y: f64, z: f64) -> Tuple {
Tuple::new(x, y, z, 0.0)
}
pub fn dot(a: &Tuple, b: &Tuple) -> f64 {
(&a.x * &b.x + &a.y * &b.y + &a.z * &b.z + &a.w * &b.w).value()
}
pub fn cross(a: &Tuple, b: &Tuple) -> Tuple {
vector((&a.y * &b.z - &a.z * &b.y).value(),
(&a.z * &b.x - &a.x * &b.z).value(),
(&a.x * &b.y - &a.y * &b.x).value())
}
// Addition
impl_op_ex!(+ |a: &Tuple, b: &Tuple| -> Tuple { Tuple {x: &a.x + &b.x, y: &a.y + &b.y, z: &a.z + &b.z, w: &a.w + &b.w} });
// Subtraction
impl_op_ex!(- |a: &Tuple, b: &Tuple| -> Tuple { Tuple {x: &a.x - &b.x, y: &a.y - &b.y, z: &a.z - &b.z, w: &a.w - &b.w} });
// Multiplication
impl_op_ex!(* |a: &Tuple, s: f64| -> Tuple { Tuple {x: &a.x * s, y: &a.y * s, z: &a.z * s, w: &a.w * s} });
//impl_op_ex!(* |a: &Tuple, b: &Tuple| -> Tuple { Tuple {x: &a.x * &b.x, y: &a.y * &b.y, z: &a.z * &b.z, w: &a.w * &b.w} });
// Division
impl_op_ex!(/ |a: &Tuple, s: f64| -> Tuple { Tuple {x: &a.x / s, y: &a.y / s, z: &a.z / s, w: &a.w / s} });
//impl_op_ex!(/ |a: &Tuple, b: &Tuple| -> Tuple { Tuple {x: &a.x / &b.x, y: &a.y / &b.y, z: &a.z / &b.z, w: &a.w / &b.w} });
// Negation (unary operator)
impl_op_ex!(- |a: &Tuple| -> Tuple { Tuple {x: 0.0 - &a.x, y: 0.0 - &a.y, z: 0.0 - &a.z, w: 0.0 - &a.w} });
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn tuples() {
let a = Tuple::new(4.3, -4.2, 3.1, 1.0);
assert_eq!(a.x, 4.3);
assert_eq!(a.y, -4.2);
assert_eq!(a.z, 3.1);
assert_eq!(a.w, 1.0);
assert!(a.is_point());
assert!(!a.is_vector());
let a = Tuple::new(4.3, -4.2, 3.1, 0.0);
assert_eq!(a.x, 4.3);
assert_eq!(a.y, -4.2);
assert_eq!(a.z, 3.1);
assert_eq!(a.w, 0.0);
assert!(!a.is_point());
assert!(a.is_vector());
}
#[test]
fn tuple_eq() {
let a = Tuple::new(43.8, 23.0, 1.0, 1.0);
let b = Tuple::new(43.8, 23.0, 1.0, 1.0);
assert_eq!(a, b);
let a = Tuple::new(43.8, 23.0, 1.0, 1.0);
let b = Tuple::new(43.8, 23.1, 1.0, 1.0);
assert_ne!(a, b);
let a = Tuple::new(43.8, 237.9, 324.0, 0.0);
let b = Tuple::new(43.8, 237.9, 324.0, 0.0);
assert_eq!(a, b);
let a = Tuple::new(43.8, 23.0, 1.0, 1.0);
let b = Tuple::new(43.8000000001, 23.0000000001, 1.00000000001, 1.0);
assert_eq!(a, b);
let a = Tuple::new(43.8000000001, 23.0000000001, 1.00000000001, 1.0);
let b = Tuple::new(43.8, 23.0, 1.0, 1.0);
assert_eq!(a, b);
}
#[test]
fn tuple_creation() {
let p = point(4.0, -4.0, 3.0);
assert_eq!(p, Tuple::new(4.0, -4.0, 3.0, 1.0));
let p = vector(4.0, -4.0, 3.0);
assert_eq!(p, Tuple::new(4.0, -4.0, 3.0, 0.0));
}
#[test]
fn tuple_operations() {
// Addition
let a = Tuple::new(3.0, -2.0, 5.0, 1.0);
let b = Tuple::new(-2.0, 3.0, 1.0, 0.0);
assert_eq!(a + b, Tuple::new(1.0, 1.0, 6.0, 1.0));
// Subtraction
let a = point(3.0, 2.0, 1.0);
let b = point(5.0, 6.0, 7.0);
assert_eq!(a - b, vector(-2.0, -4.0, -6.0));
let a = point(3.0, 2.0, 1.0);
let b = vector(5.0, 6.0, 7.0);
assert_eq!(a - b, point(-2.0, -4.0, -6.0));
let a = vector(3.0, 2.0, 1.0);
let b = vector(5.0, 6.0, 7.0);
assert_eq!(a - b, vector(-2.0, -4.0, -6.0));
let a = vector(3.0, 2.0, 1.0);
assert_eq!(-a, vector(-3.0, -2.0, -1.0));
// Negation
let a = Tuple::new(1.0, -2.0, 3.0, -4.0);
assert_eq!(-a, Tuple::new(-1.0, 2.0, -3.0, 4.0));
// Scalar multiplication
let a = Tuple::new(1.0, -2.0, 3.0, -4.0);
assert_eq!(a * 3.5, Tuple::new(3.5, -7.0, 10.5, -14.0));
let a = Tuple::new(1.0, -2.0, 3.0, -4.0);
assert_eq!(a * 0.5, Tuple::new(0.5, -1.0, 1.5, -2.0));
// Scalar division
let a = Tuple::new(1.0, -2.0, 3.0, -4.0);
assert_eq!(a / 2.0, Tuple::new(0.5, -1.0, 1.5, -2.0));
// Magnitude
let a = vector(1.0, 0.0, 0.0);
assert_eq!(a.magnitude(), 1.0);
let a = vector(0.0, 1.0, 0.0);
assert_eq!(a.magnitude(), 1.0);
let a = vector(0.0, 0.0, 1.0);
assert_eq!(a.magnitude(), 1.0);
let a = vector(1.0, 2.0, 3.0);
assert_eq!(a.magnitude(), 14.0f64.sqrt());
// Normalize
let a = vector(1.0, 2.0, 3.0);
assert_eq!(a.normalize(), vector(0.26726, 0.53452, 0.80178));
// Dot product
let a = vector(1.0, 2.0, 3.0);
let b = vector(2.0, 3.0, 4.0);
assert_eq!(dot(&a, &b), 20.0);
// Cross product
let a = vector(1.0, 2.0, 3.0);
let b = vector(2.0, 3.0, 4.0);
assert_eq!(cross(&a, &b), vector(-1.0, 2.0, -1.0));
assert_eq!(cross(&b, &a), vector(1.0, -2.0, 1.0));
}
#[test]
fn tuple_reflect() {
// Reflecting a vector approaching at a 45 deg angle
let v = vector(1.0, -1.0, 0.0);
let n = vector(0.0, 1.0, 0.0);
let r = v.reflect(&n);
assert_eq!(r, vector(1.0, 1.0, 0.0));
// Reflecting a vector off a slanted surface
let v = vector(0.0, -1.0, 0.0);
let n = vector(2.0f64.sqrt()/2.0, 2.0f64.sqrt()/2.0, 0.0);
let r = v.reflect(&n);
assert_eq!(r, vector(1.0, 0.0, 0.0));
}
#[test]
fn tuple_performance() {
}
}
| true |
b3b7a663651154c7b83723b3800b8ab0cae4e485
|
Rust
|
alexhausen/examples
|
/clean_code/src/domain/entity/cpf.rs
|
UTF-8
| 3,000 | 3.640625 | 4 |
[
"MIT"
] |
permissive
|
// value object (DDD)
#[allow(dead_code)]
use regex::Regex;
use std::fmt;
#[derive(Clone)]
pub struct CPF {
pub value: String,
}
impl CPF {
pub fn new(cpf: &str) -> Result<CPF, &str> {
if CPF::is_valid(cpf) {
return Ok(CPF {
value: cpf.to_string(),
});
}
return Err("CPF Inválido");
}
fn is_valid(cpf: &str) -> bool {
let clean_cpf = CPF::remove_punctuation(cpf);
if !CPF::has_eleven_digits(clean_cpf.as_str()) {
return false;
}
if CPF::are_all_digits_the_same(clean_cpf.as_str()) {
return false;
}
let digit1 = CPF::compute_digit(&clean_cpf[0..9]);
let digit2 = CPF::compute_digit(&clean_cpf[0..10]);
return CPF::match_digits(digit1, digit2, clean_cpf.as_str());
}
fn match_digits(digit1: u32, digit2: u32, cpf: &str) -> bool {
let cpf_digit1 = (cpf.as_bytes()[9] as char).to_digit(10).unwrap();
let cpf_digit2 = (cpf.as_bytes()[10] as char).to_digit(10).unwrap();
return digit1 == cpf_digit1 && digit2 == cpf_digit2;
}
fn compute_digit(part: &str) -> u32 {
let mut digit: u32 = 0;
let factor = part.len() + 1;
for (i, c) in part.chars().enumerate() {
let d = c.to_digit(10).unwrap();
let temp = (factor - i) as u32;
digit += d * temp;
}
let rest = digit % 11;
digit = if rest < 2 { 0 } else { 11 - rest };
return digit;
}
fn remove_punctuation(cpf: &str) -> String {
let re = Regex::new("[\\.|/|\\-]").unwrap();
let clean_cpf = re.replace_all(cpf, "");
return String::from(clean_cpf);
}
fn are_all_digits_the_same(cpf: &str) -> bool {
let first = cpf.chars().nth(0).unwrap();
let result = cpf.chars().fold(true, |mut acc, c| {
acc &= c == first;
acc
});
return result;
}
fn has_eleven_digits(cpf: &str) -> bool {
let re = Regex::new("^(\\d){11}$").unwrap();
let result = re.is_match(cpf);
return result;
}
}
impl fmt::Display for CPF {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// TODO format CPF output to string xxx.xxx.xxx-xx
f.write_str(self.value.as_ref())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn valid_cpfs() {
assert_eq!(CPF::is_valid("86446422784"), true);
assert_eq!(CPF::is_valid("91720489726"), true);
assert_eq!(CPF::is_valid("864.464.227-84"), true);
assert_eq!(CPF::is_valid("111.444.777-35"), true);
}
#[test]
fn invalid_cpfs() {
assert_eq!(CPF::is_valid("00000000000"), false);
assert_eq!(CPF::is_valid("111.111.111-11"), false);
assert_eq!(CPF::is_valid("86446422799"), false);
assert_eq!(CPF::is_valid("a1720489726"), false);
assert_eq!(CPF::is_valid("86446422784a"), false);
}
}
| true |
1153757889aa588e09ba5f3d1d721200a92e9f2e
|
Rust
|
ethanmott/aoc-2015
|
/day13/src/main.rs
|
UTF-8
| 4,296 | 3.359375 | 3 |
[] |
no_license
|
use common::files;
use itertools::Itertools;
type Person = String;
#[derive(Clone, Debug)]
struct Table {
seats: Vec<Seat>
}
impl Table {
pub fn new(size: usize) -> Self {
let mut seats: Vec<Seat> = Vec::with_capacity(size);
for _ in 0..size {
seats.push(Seat::new())
}
Table {
seats
}
}
fn seat_person_in_first_available_seat(&mut self, person: Person) {
let num_seats = self.seats.len();
for i in 0..num_seats {
let mut seat = &mut self.seats[i];
if seat.person.is_none() {
seat.person = Some(person.clone());
// Update seats to the left and right with new person
self.seats[((i as i32 - 1) + num_seats as i32) as usize % num_seats].right = Some(person.clone());
self.seats[(i + 1) % num_seats].left = Some(person.clone());
return;
}
}
panic!("No seats left for person: {}", person);
}
}
#[derive(Clone, Debug)]
struct Seat {
left: Option<Person>,
person: Option<Person>,
right: Option<Person>
}
impl Seat {
pub fn new() -> Self {
Seat {
left: None,
person: None,
right: None
}
}
}
#[derive(Clone, Debug)]
struct SeatingRule {
person: Person,
next_to_person: Person,
happiness_modifier: i32
}
fn main() {
let rules: Vec<SeatingRule> = files::get_file_lines("day13.txt").iter()
.map(|l| parse_seating_rule(l))
.collect();
let people: Vec<Person> = rules.iter()
.map(|r| r.person.clone())
.unique()
.collect();
// println!("part1 max: {:?}", find_max_total_happiness(people.clone(), rules.clone()));
let mut people2 = people.clone();
people2.push(Person::from("Ethan"));
println!("part2 max: {:?}", find_max_total_happiness(people2.clone(), rules.clone()));
}
fn find_max_total_happiness(people: Vec<Person>, rules: Vec<SeatingRule>) -> i32 {
let mut max = i32::min_value();
for permutation in people.iter().permutations(people.len()) {
let mut table = Table::new(permutation.clone().len());
for person in permutation.clone() {
table.seat_person_in_first_available_seat(person.clone());
}
let happiness = get_table_happiness(table.clone(), rules.clone());
if happiness > max {
println!("Found new best so far! {} > {}", happiness, max);
println!("{:?}", permutation.clone());
max = happiness;
}
}
max
}
fn get_table_happiness(table: Table, rules: Vec<SeatingRule>) -> i32 {
let mut happiness: i32 = 0;
for seat in table.seats.iter() {
happiness += seat.person.clone().map_or(0, |seated_person| -> i32 {
let mut local_happiness: i32 = 0;
for rule in rules.iter() {
if seated_person == rule.person {
local_happiness += seat.left.clone()
.filter(|left_person| left_person == &rule.next_to_person)
.map_or(0, |_| rule.happiness_modifier);
local_happiness += seat.right.clone()
.filter(|right_person| right_person == &rule.next_to_person)
.map_or(0, |_| rule.happiness_modifier);
}
}
// println!("Seat: {:?}, local happiness: {:?}", seat, local_happiness);
local_happiness
});
}
happiness
}
fn parse_seating_rule(line: &String) -> SeatingRule {
let parts: Vec<&str> = line.split(' ').collect();
let person = parts[0].to_string();
let mut next_to_person = parts[10].to_string();
// remove the period
next_to_person.pop();
let happiness_direction = parts[2];
let multiplier = match happiness_direction {
"gain" => 1,
"lose" => -1,
_ => 1
};
let happiness_modifier_units = parts[3].parse::<i32>().unwrap();
println!("{:?}", parts);
SeatingRule {
person,
next_to_person,
happiness_modifier: multiplier * happiness_modifier_units
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
}
}
| true |
b440ab97044f50300b8c6463552cdf2781408bb5
|
Rust
|
cuviper/auto_enums
|
/derive/src/derive/core/iter/double_ended_iterator.rs
|
UTF-8
| 1,320 | 2.703125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::utils::*;
pub(crate) const NAME: &[&str] = &["DoubleEndedIterator"];
pub(crate) fn derive(data: &Data, items: &mut Vec<ItemImpl>) -> Result<()> {
#[cfg(feature = "try_trait")]
let try_trait = quote! {
#[inline]
fn try_rfold<__U, __F, __R>(&mut self, init: __U, f: __F) -> __R
where
__F: ::core::ops::FnMut(__U, Self::Item) -> __R,
__R: ::core::ops::Try<Ok = __U>;
};
// It is equally efficient if `try_rfold` can be used.
#[cfg(not(feature = "try_trait"))]
let try_trait = quote! {
#[inline]
fn rfold<__U, __F>(self, accum: __U, f: __F) -> __U
where
__F: ::core::ops::FnMut(__U, Self::Item) -> __U;
#[inline]
fn rfind<__P>(&mut self, predicate: __P) -> ::core::option::Option<Self::Item>
where
__P: ::core::ops::FnMut(&Self::Item) -> bool;
};
derive_trait!(
data,
Some(format_ident!("Item")),
parse_quote!(::core::iter::DoubleEndedIterator)?,
parse_quote! {
trait DoubleEndedIterator: ::core::iter::Iterator {
#[inline]
fn next_back(&mut self) -> ::core::option::Option<Self::Item>;
#try_trait
}
}?,
)
.map(|item| items.push(item))
}
| true |
eab0213cc7eea17a0bc204098942f4a6e8e987d0
|
Rust
|
magiclen/number-as
|
/tests/number_as.rs
|
UTF-8
| 1,164 | 2.59375 | 3 |
[
"MIT"
] |
permissive
|
use number_as::NumberAs;
macro_rules! number_as_batch_test {
($from:expr) => {
let _a: u8 = $from.number_as();
let _a: u16 = $from.number_as();
let _a: u32 = $from.number_as();
let _a: u64 = $from.number_as();
let _a: u128 = $from.number_as();
let _a: usize = $from.number_as();
let _a: i8 = $from.number_as();
let _a: i16 = $from.number_as();
let _a: i32 = $from.number_as();
let _a: i64 = $from.number_as();
let _a: i128 = $from.number_as();
let _a: isize = $from.number_as();
let _a: f32 = $from.number_as();
let _a: f64 = $from.number_as();
};
}
#[test]
fn it_works() {
number_as_batch_test!(0u8);
number_as_batch_test!(0u16);
number_as_batch_test!(0u32);
number_as_batch_test!(0u64);
number_as_batch_test!(0u128);
number_as_batch_test!(0usize);
number_as_batch_test!(0i8);
number_as_batch_test!(0i16);
number_as_batch_test!(0i32);
number_as_batch_test!(0i64);
number_as_batch_test!(0i128);
number_as_batch_test!(0isize);
number_as_batch_test!(0f32);
number_as_batch_test!(0f64);
}
| true |
46497f15bde711242bb7868fa3a6518d236949e8
|
Rust
|
ponderousmad/rust-streak
|
/src/camera.rs
|
UTF-8
| 982 | 3.265625 | 3 |
[] |
no_license
|
use cgmath::Point3;
use cgmath::Vector3;
use cgmath::Quaternion;
use cgmath::BaseFloat;
use num::traits::Float;
#[derive(Copy, Clone, Debug)]
pub struct Camera<F> {
pub position : Point3<F>,
pub orientation : Quaternion<F>,
pub field_of_view : F
}
impl<F> Camera<F> where F : BaseFloat + Float {
pub fn new(position: Point3<F>, orientation: Quaternion<F>, fov: F) -> Camera<F> {
Camera::<F> { position: position, orientation: orientation, field_of_view: fov }
}
pub fn up(self) -> Vector3<F> {
return self.orientation * Vector3::<F>::unit_y();
}
/// Calculate the ray for a given 'pixel'
/// x and y are in normalized device offsets in the range [-1, 1]
/// or less for a non-square aspect ratio.
pub fn ray(self, x: F, y: F) -> Vector3<F> {
let z : F = F::one() / F::tan(self.field_of_view / (F::one() + F::one()));
let ray = Vector3::<F>::new(x, y, z);
return self.orientation * ray;
}
}
| true |
6c7500d52908d4f57b44a19459dc5e197fbde9d6
|
Rust
|
jsadusk/expression
|
/src/simple_engine.rs
|
UTF-8
| 2,677 | 2.96875 | 3 |
[] |
no_license
|
use crate::error::*;
use crate::engine::*;
use crate::expression::*;
use crate::list::*;
use worm_cell::AtomicWormCellReader;
pub struct SimpleEngine<'a, ErrorType> {
terms: Vec<Box<dyn ExpressionCache<ErrorType> + 'a>>,
}
impl<'a, ErrorType> SimpleEngine<'a, ErrorType>
where ErrorType: 'a + std::error::Error + 'static
{
pub fn new() -> SimpleEngine<'a, ErrorType> {
SimpleEngine { terms: Vec::new() }
}
fn eval_term(&mut self, term: Term) -> Result<(), ExpressionError<ErrorType>> {
if !self.terms[term.0].evaluated() {
for subterm in self.terms[term.0].terms() {
self.eval_term(subterm)?;
}
self.terms[term.0].eval()
} else {
Ok(())
}
}
}
impl<'a, ET> Engine<'a> for SimpleEngine<'a, ET>
where ET: 'a + std::error::Error + 'static
{
type ErrorType = ET;
fn eval<'b, TermType>(&mut self, term: &'b TermType) -> Result<&'b TermType::ValueType, ExpressionError<Self::ErrorType>>
where TermType: TypedTerm {
self.eval_term(term.term())?;
term.try_get().map_err(|e| ExpressionError::<Self::ErrorType>::Engine(e))
}
fn term<Expr>(&mut self, expr: Expr) -> TypedTermImpl<Expr::ValueType>
where
Expr: Expression + 'a,
Self::ErrorType: From<Expr::ErrorType>
{
let expr_cache = Box::new(TypedExpressionCache::new(expr));
let term_result = AtomicWormCellReader::new(expr_cache.result.clone());
self.terms.push(expr_cache);
TypedTermImpl { term: Term(self.terms.len() - 1),
result: term_result}
}
fn list_term<ListExpr>(&mut self, expr: ListExpr) ->
ListTermImpl<ListExpr::ElementType>
where
ListExpr: ListExpression + 'a,
Self::ErrorType: From<ListExpr::ErrorType>
{
ListTermImpl::<ListExpr::ElementType>(self.term(ListExpressionWrapper::<ListExpr>(expr)))
}
fn random_list_term<ListExpr>(&mut self, expr: ListExpr) ->
ListTermImpl<ListExpr::ElementType>
where
ListExpr: RandomListExpression + Sync + 'a,
ListExpr::ElementSetup: Sync,
ListExpr::ElementType: Send,
ListExpr::ErrorType: Send,
Self::ErrorType: From<ListExpr::ErrorType>
{
self.list_term(RandomListExpressionWrapper::<ListExpr>(expr))
}
fn sequential_list_term<ListExpr>(&mut self, expr: ListExpr) ->
TypedTermImpl<Vec<ListExpr::ElementType>>
where
ListExpr: SequentialListExpression + 'a,
Self::ErrorType: From<ListExpr::ErrorType>
{
self.term(SequentialListExpressionWrapper::<ListExpr>(expr))
}
}
| true |
010c367700bd7d9f6581ef008ac30496af2cf685
|
Rust
|
swlody/advent-2019-rs
|
/src/day2.rs
|
UTF-8
| 1,130 | 2.8125 | 3 |
[] |
no_license
|
use crate::intcode;
use itertools::Itertools;
fn fix_program(program: &mut [i64], noun: i64, verb: i64) {
program[1] = noun;
program[2] = verb;
}
#[aoc_generator(day2)]
fn input_generator(input: &str) -> Vec<i64> {
input
.trim()
.split(',')
.map(|x| {
x.parse()
.unwrap_or_else(|_| panic!("Unable to parse \"{}\" as integer", x))
})
.collect()
}
#[aoc(day2, part1)]
pub fn solve_part1(program: &[i64]) -> i64 {
let mut program = program.to_vec();
fix_program(&mut program, 12, 2);
while let Some(_) = intcode::run_program(&mut program, &mut 0, &mut 0, &[]) {}
program[0]
}
#[aoc(day2, part2)]
pub fn solve_part2(program: &[i64]) -> i64 {
for inputs in (1..99).permutations(2) {
let mut program = program.to_vec();
let (noun, verb) = (inputs[0], inputs[1]);
fix_program(&mut program, noun, verb);
while let Some(_) = intcode::run_program(&mut program, &mut 0, &mut 0, &[]) {}
if program[0] == 19_690_720 {
return 100 * noun + verb;
}
}
unreachable!()
}
| true |
169e59ce3b7a12776ea18e4864af6df44cafbaf8
|
Rust
|
nikomatsakis/graph-compress
|
/src/lib.rs
|
UTF-8
| 2,687 | 2.625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
#![feature(conservative_impl_trait)]
#![feature(field_init_shorthand)]
#![feature(pub_restricted)]
#![feature(rustc_private)]
extern crate rustc_data_structures;
use rustc_data_structures::graph::{Graph, NodeIndex};
use rustc_data_structures::unify::UnificationTable;
use std::fmt::Debug;
#[cfg(test)]
#[macro_use]
mod test_macro;
mod construct;
mod classify;
use self::classify::Classify;
mod dag_id;
use self::dag_id::DagId;
#[cfg(test)]
mod test;
pub struct GraphReduce<'g, N> where N: 'g + Debug {
in_graph: &'g Graph<N, ()>,
start_nodes: &'g [NodeIndex],
unify: UnificationTable<DagId>,
}
struct Dag {
// The "parent" of a node is the node which reached it during the
// initial DFS. To encode the case of "no parent" (i.e., for the
// roots of the walk), we make `parents[i] == i` to start, which
// turns out be convenient.
parents: Vec<NodeIndex>,
// Additional edges beyond the parents.
cross_edges: Vec<(NodeIndex, NodeIndex)>,
// Nodes with no successors.
leaf_nodes: Vec<NodeIndex>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
struct DagNode {
in_index: NodeIndex
}
impl<'g, N> GraphReduce<'g, N>
where N: Debug
{
pub fn new(in_graph: &'g Graph<N, ()>, start_nodes: &'g [NodeIndex]) -> Self {
let mut unify = UnificationTable::new();
// create a set of unification keys whose indices
// correspond to the indices from the input graph
for i in 0..in_graph.len_nodes() {
let k = unify.new_key(());
assert!(k == DagId::from_in_index(NodeIndex(i)));
}
GraphReduce { in_graph, unify, start_nodes }
}
pub fn compute(mut self) -> Graph<&'g N, ()> {
let dag = Classify::new(&mut self).walk();
construct::construct_graph(&mut self, dag)
}
fn inputs(&self, in_node: NodeIndex) -> impl Iterator<Item = NodeIndex> + 'g {
self.in_graph.predecessor_nodes(in_node)
}
fn mark_cycle(&mut self, in_node1: NodeIndex, in_node2: NodeIndex) {
let dag_id1 = DagId::from_in_index(in_node1);
let dag_id2 = DagId::from_in_index(in_node2);
self.unify.union(dag_id1, dag_id2);
}
/// Convert a dag-id into its cycle head representative. This will
/// be a no-op unless `in_node` participates in a cycle, in which
/// case a distinct node *may* be returned.
fn cycle_head(&mut self, in_node: NodeIndex) -> NodeIndex {
let i = DagId::from_in_index(in_node);
self.unify.find(i).as_in_index()
}
#[cfg(test)]
fn in_cycle(&mut self, ni1: NodeIndex, ni2: NodeIndex) -> bool {
self.cycle_head(ni1) == self.cycle_head(ni2)
}
}
| true |
816e994a627f4d6c82e0d132aa1ff8c6079b8f63
|
Rust
|
andyherbert/raytracer
|
/src/renderer/light.rs
|
UTF-8
| 2,463 | 3 | 3 |
[] |
no_license
|
use crate::{ComputedMesh, Ray, Vert, IntersectionCompute};
use std::sync::Arc;
#[derive(Clone)]
pub struct Light {
pub position: Vert,
pub intensity: Vert,
pub casts_shadows: bool,
}
impl Light {
pub fn new() -> Light {
Light {
position: Vert::new(0.0, 0.0, 0.0),
intensity: Vert::rgb(255, 255, 255),
casts_shadows: true,
}
}
pub fn lighting(&self, compute: &IntersectionCompute, shadowed: bool) -> Vert {
let effective_colour = self.intensity.clone() * compute.colour.clone();
let light_v = (compute.point.clone() - self.position.clone()).normalise();
let ambient = effective_colour.multiply_by_scalar(compute.triangle.material.ambient);
let light_dot_normal = light_v.dot_product(&compute.norm_v);
let (diffuse, specular) = if shadowed || light_dot_normal < 0.0 {
let diffuse = Vert::default();
let specular = Vert::default();
(diffuse, specular)
} else {
let diffuse = effective_colour.multiply_by_scalar(compute.triangle.material.diffuse).multiply_by_scalar(light_dot_normal);
let light_v = (compute.point.clone() - self.position.clone()).normalise();
let reflect_v = light_v.clone() - compute.norm_v.multiply_by_scalar(2.0).multiply_by_scalar(light_v.dot_product(&compute.norm_v));
let reflect_dot_eye = reflect_v.dot_product(&compute.eye_v);
if reflect_dot_eye <= 0.0 {
(diffuse, Vert::default())
} else {
let factor = reflect_dot_eye.powf(compute.triangle.material.shininess);
let specular = self.intensity.multiply_by_scalar(compute.triangle.material.specular * factor);
(diffuse, specular)
}
};
ambient + diffuse + specular
}
pub fn shadowed(&self, over_point: Vert, meshes: &Vec<Arc<ComputedMesh>>) -> bool {
let vec = self.position.clone() - over_point.clone();
let distance = vec.magnitude();
let direction = vec.normalise();
let ray = Ray::new(over_point, direction);
for mesh in meshes {
for triangle in &mesh.triangles {
if let Some((time, ..)) = triangle.intersects_with(&ray) {
if time < distance {
return true;
}
}
}
}
false
}
}
| true |
ece9b00499262c75836f8068c3b5d9c70f6f1094
|
Rust
|
xie007/rgo
|
/src/token.rs
|
UTF-8
| 13,469 | 3.59375 | 4 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
use std::fmt;
use self::TokenKind::*;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TokenAndSpan {
pub token: Token,
pub span: Span,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Span {
pub start: u32,
pub end: u32,
}
pub trait Spanner {
fn span(&self) -> Span;
}
impl Spanner for Span {
fn span(&self) -> Span {
*self
}
}
/// Returns a new span from the start of the first element to the end of the last element.
/// This is useful in cases like the following:
/// ```
/// var a, b, c, d, e, f, g int
/// ```
/// We have `Vec<Spanned<Ident>>` for all the variable names, so we can get a span of the whole list.
///
/// If the vector is empty, this returns a default `Span`.
impl<T: Spanner> Spanner for Vec<T> {
fn span(&self) -> Span {
if self.is_empty() {
// XXX
return Span { start: 0, end: 0 };
}
Span {
start: self.first().unwrap().span().start,
end: self.last().unwrap().span().end,
}
}
}
/// Simply get the span embedded in this `Spanned`.
impl<T: fmt::Debug + Clone + PartialEq + Eq> Spanner for Spanned<T> {
fn span(&self) -> Span {
self.span
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Spanned<T: fmt::Debug + Clone + PartialEq + Eq> {
pub span: Span,
pub item: T,
}
impl<T: fmt::Debug + Clone + PartialEq + Eq> Spanned<T> {
pub fn new(span: Span, item: T) -> Spanned<T> {
Spanned {
span: span,
item: item,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Token {
pub kind: TokenKind,
pub value: Option<String>,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// If the token contains a value, display it.
match self.value {
Some(ref v) => write!(f, "{}({})", self, v),
None => write!(f, "{}", self),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TokenKind {
/// Identifier.
Ident,
// Delimiters.
/// (
LParen,
/// )
RParen,
/// [
LBracket,
/// ]
RBracket,
/// {
LBrace,
/// }
RBrace,
// Literals.
/// Decimal integer literal.
Decimal,
/// Octal integer literal.
Octal,
/// Hex integer literal.
Hex,
/// Floating-point literal.
Float,
/// Imaginary literal (e.g. `6.67428e-11i`).
Imaginary,
/// Rune literal (e.g. `'本'`, `'\U00101234'`).
Rune,
/// Interpreted string literal.
Str,
/// Raw string literal.
StrRaw,
// Keywords.
Break,
Case,
Chan,
Const,
Continue,
Default,
Defer,
Else,
Fallthrough,
For,
Func,
Go,
Goto,
If,
Import,
Interface,
Map,
Package,
Range,
Return,
Select,
Struct,
Switch,
Type,
Var,
// Operators.
/// +
Plus,
/// -
Minus,
/// *
Star,
/// /
Slash,
/// %
Percent,
/// &
And,
/// |
Or,
/// ^
Caret,
/// <<
Lshift,
/// >>
Rshift,
/// &^
BitClear,
/// &&
AndAnd,
/// ||
OrOr,
/// ==
Equals,
/// !=
NotEqual,
/// <
LessThan,
/// >
GreaterThan,
/// <=
LessThanOrEqual,
/// >=
GreaterThanOrEqual,
/// ++
Increment,
/// --
Decrement,
/// +=
PlusAssign,
/// -=
MinusAssign,
/// *=
StarAssign,
/// /=
SlashAssign,
/// %=
PercentAssign,
/// &=
AndAssign,
/// |=
OrAssign,
/// ^=
CaretAssign,
/// <<=
LshiftAssign,
/// >>=
RshiftAssign,
/// &^=
BitClearAssign,
/// !
Not,
/// =
Assign,
/// :=
ColonAssign,
/// <-
Arrow,
// Misc.
/// ...
Ellipsis,
/// ,
Comma,
/// .
Dot,
/// ;
Semicolon,
/// :
Colon,
/// End of file
Eof,
}
impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// We're using the derived Debug impl for simplicity.
// It displays the name of each variant as written in the enum declaration.
fmt::Debug::fmt(self, f)
}
}
impl TokenKind {
pub fn precedence(self) -> i32 {
// Precedence Operator
// 5 * / % << >> & &^
// 4 + - | ^
// 3 == != < <= > >=
// 2 &&
// 1 ||
match self {
Star | Slash | Percent | Lshift | Rshift | And | BitClear => 5,
Plus | Minus | Or | Caret => 4,
Equals | NotEqual | LessThan | LessThanOrEqual | GreaterThan | GreaterThanOrEqual => 3,
AndAnd => 2,
OrOr => 1,
_ => panic!("BUG: calling .precedence() on a token which is not a binary operator"),
}
}
pub fn is_ident(self) -> bool {
self == Ident
}
pub fn is_unary_op(self) -> bool {
// unary_op = "+" | "-" | "!" | "^" | "*" | "&" | "<-" .
match self {
Plus | Minus | Not | Caret | Star | And | Arrow => true,
_ => false,
}
}
pub fn is_assign_op(self) -> bool {
// assign_op = [ add_op | mul_op ] "=" .
// add_op = "+" | "-" | "|" | "^" .
// mul_op = "*" | "/" | "%" | "<<" | ">>" | "&" | "&^" .
match self {
Assign | PlusAssign | MinusAssign | OrAssign | CaretAssign | StarAssign |
SlashAssign | PercentAssign | LshiftAssign | RshiftAssign | AndAssign |
BitClearAssign => true,
_ => false,
}
}
pub fn is_literal(self) -> bool {
match self {
Str | StrRaw | Decimal | Octal | Hex | Float | Imaginary | Rune => true,
_ => false,
}
}
pub fn can_start_statement(self) -> bool {
// Grammar:
// Statement =
// Declaration | LabeledStmt | SimpleStmt |
// GoStmt | ReturnStmt | BreakStmt | ContinueStmt | GotoStmt |
// FallthroughStmt | Block | IfStmt | SwitchStmt | SelectStmt | ForStmt |
// DeferStmt .
//
// SimpleStmt = EmptyStmt | ExpressionStmt | SendStmt | IncDecStmt | Assignment |
// ShortVarDecl .
if self.can_start_decl() || self.can_start_labeled_stmt() ||
self.can_start_simple_stmt() || self.can_start_go_stmt() ||
self.can_start_block() {
return true;
}
match self {
Return | Break | Continue | Goto | Fallthrough | If |
// XXX/TODO: make sure that this is correct.
Switch | Select | For | Defer => true,
_ => false,
}
}
pub fn can_start_block(self) -> bool {
self == LBrace
}
pub fn can_start_return_stmt(self) -> bool {
self == Return
}
pub fn can_start_labeled_stmt(self) -> bool {
// LabeledStmt = Label ":" Statement .
// Label = identifier .
self.is_ident()
}
pub fn can_start_go_stmt(self) -> bool {
self == Go
}
pub fn can_start_decl(self) -> bool {
trace!("can_start_decl");
// Declaration = ConstDecl | TypeDecl | VarDecl .
self == Const || self == Type || self == Var
}
pub fn can_start_simple_stmt(self) -> bool {
self == Semicolon || self.can_start_expr() || self.can_start_send_stmt() ||
self.can_start_inc_dec_stmt() || self.can_start_assignment() ||
self.can_start_short_var_decl()
}
pub fn can_start_expr(self) -> bool {
// Expression = UnaryExpr | Expression binary_op Expression .
// UnaryExpr = PrimaryExpr | unary_op UnaryExpr .
//
// binary_op = "||" | "&&" | rel_op | add_op | mul_op .
// rel_op = "==" | "!=" | "<" | "<=" | ">" | ">=" .
// add_op = "+" | "-" | "|" | "^" .
// mul_op = "*" | "/" | "%" | "<<" | ">>" | "&" | "&^" .
//
// unary_op = "+" | "-" | "!" | "^" | "*" | "&" | "<-" .
//
//
// PrimaryExpr =
// Operand |
// Conversion |
// PrimaryExpr Selector |
// PrimaryExpr Index |
// PrimaryExpr Slice |
// PrimaryExpr TypeAssertion |
// PrimaryExpr Arguments .
//
// Selector = "." identifier .
// Index = "[" Expression "]" .
// Slice = "[" ( [ Expression ] ":" [ Expression ] ) |
// ( [ Expression ] ":" Expression ":" Expression )
// "]" .
// TypeAssertion = "." "(" Type ")" .
// Arguments = "(" [ ( ExpressionList | Type [ "," ExpressionList ] ) [ "..." ] [ ","
// ] ] ")" .
//
// Conversion = Type "(" Expression [ "," ] ")" .
//
// MethodExpr = ReceiverType "." MethodName .
// ReceiverType = TypeName | "(" "*" TypeName ")" | "(" ReceiverType ")" .
// XXX/TODO: review this code - critical.
self.can_start_unary_expr()
}
pub fn can_start_unary_expr(self) -> bool {
self.can_start_primary_expr() || self.is_unary_op()
}
pub fn can_start_primary_expr(self) -> bool {
self.can_start_operand() || self.can_start_conversion()
}
pub fn can_start_operand(self) -> bool {
// Operand = Literal | OperandName | MethodExpr | "(" Expression ")" .
// OperandName = identifier | QualifiedIdent.
// MethodExpr = ReceiverType "." MethodName .
// ReceiverType = TypeName | "(" "*" TypeName ")" | "(" ReceiverType ")" .
//
// QualifiedIdent starts with an identifier.
// So does MethodExpr.
self.can_start_lit() || self.is_ident() || self == LParen
}
pub fn can_start_conversion(self) -> bool {
self.can_start_type()
}
pub fn can_start_type(self) -> bool {
// Type = TypeName | TypeLit | "(" Type ")" .
// TypeName = identifier | QualifiedIdent .
// TypeLit = ArrayType | StructType | PointerType | FunctionType | InterfaceType |
// SliceType | MapType | ChannelType .
self.is_ident() || self.can_start_type_lit() || self == LParen
}
pub fn can_start_type_lit(self) -> bool {
// TypeLit = ArrayType | StructType | PointerType | FunctionType | InterfaceType |
// SliceType | MapType | ChannelType .
self.can_start_array_type() || self.can_start_struct_type() ||
self.can_start_pointer_type() || self.can_start_func_type() ||
self.can_start_interface_type() || self.can_start_slice_type() ||
self.can_start_map_type() || self.can_start_chan_type()
}
pub fn can_start_pointer_type(self) -> bool {
self == Star
}
pub fn can_start_func_type(self) -> bool {
// FunctionType = "func" Signature .
self == Func
}
pub fn can_start_interface_type(self) -> bool {
// InterfaceType = "interface" "{" { MethodSpec ";" } "}" .
self == Interface
}
pub fn can_start_chan_type(self) -> bool {
// ChannelType = ( "chan" | "chan" "<-" | "<-" "chan" ) ElementType .
self == Chan || self == Arrow
}
pub fn can_start_lit(self) -> bool {
// Literal = BasicLit | CompositeLit | FunctionLit .
// BasicLit = int_lit | float_lit | imaginary_lit | rune_lit | string_lit .
self.can_start_basic_lit() || self.can_start_composite_lit() || self.can_start_func_lit()
}
pub fn can_start_basic_lit(self) -> bool {
self.is_literal()
}
pub fn can_start_composite_lit(self) -> bool {
// CompositeLit = LiteralType LiteralValue .
self.can_start_lit_type()
}
pub fn can_start_lit_type(self) -> bool {
// LiteralType = StructType | ArrayType | "[" "..." "]" ElementType |
// SliceType | MapType | TypeName .
self.can_start_struct_type() || self.can_start_array_type() || self == RBracket ||
self.can_start_slice_type() || self.can_start_map_type() || self.is_ident()
}
pub fn can_start_func_lit(self) -> bool {
// FunctionLit = "func" Function .
self == Func
}
pub fn can_start_struct_type(self) -> bool {
self == Struct
}
pub fn can_start_array_type(self) -> bool {
self == RBracket
}
pub fn can_start_slice_type(self) -> bool {
self == RBracket
}
pub fn can_start_map_type(self) -> bool {
self == Map
}
pub fn can_start_send_stmt(self) -> bool {
// SendStmt = Channel "<-" Expression .
// Channel = Expression .
self.can_start_expr()
}
pub fn can_start_inc_dec_stmt(self) -> bool {
// IncDecStmt = Expression ( "++" | "--" ) .
self.can_start_expr()
}
pub fn can_start_assignment(self) -> bool {
// Assignment = ExpressionList assign_op ExpressionList .
// ExpressionList = Expression { "," Expression } .
self.can_start_expr()
}
pub fn can_start_short_var_decl(self) -> bool {
// ShortVarDecl = IdentifierList ":=" ExpressionList .
// IdentifierList = identifier { "," identifier } .
self.is_ident()
}
}
| true |
fe2a435b2453920bdb6d9576b845335004f5b391
|
Rust
|
winksaville/fuchsia
|
/garnet/bin/odu/src/common_operations.rs
|
UTF-8
| 2,653 | 2.734375 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::file_target::FileBlockingTarget,
crate::target::{AvailableTargets, Error, Target, TargetOps},
libc::c_void,
log::debug,
std::{ops::Range, os::unix::io::RawFd, result::Result, sync::Arc, time::Instant},
};
pub fn pwrite(raw_fd: RawFd, buffer: &mut Vec<u8>, offset: i64) -> Result<(), Error> {
let ret =
unsafe { libc::pwrite(raw_fd, buffer.as_ptr() as *const c_void, buffer.len(), offset) };
debug!("safe_pwrite: {:?} {}", offset, ret);
if ret < 0 {
return Err(Error::DoIoError(std::io::Error::last_os_error().kind()));
} else if ret < buffer.len() as isize {
// TODO(auradkar): Define a set of error codes to be used throughout the app.
return Err(Error::ShortWrite);
}
Ok(())
}
/// Based on the input args, create_target searches available Targets and
/// creates an appropriate Target trait.
pub fn create_target(
target_type: AvailableTargets,
id: u64,
name: String,
offset: Range<u64>,
start: Instant,
) -> Arc<Box<dyn Target + Send + Sync>> {
match target_type {
AvailableTargets::FileTarget => FileBlockingTarget::new(name, id, offset, start),
}
}
/// Returned allowed TargetOps for giver Target `target_type`.
/// Allowed are the operations for which generator can generate io packets.
/// There maybe operations *supported* by the target for which generator
/// is not allowed to generate IO packets.
pub fn allowed_ops(target_type: AvailableTargets) -> &'static TargetOps {
match target_type {
AvailableTargets::FileTarget => FileBlockingTarget::allowed_ops(),
}
}
#[cfg(test)]
mod tests {
use {
crate::common_operations::pwrite,
crate::target::Error,
std::{fs::File, fs::OpenOptions, io::ErrorKind, os::unix::io::AsRawFd},
};
#[test]
fn test_pwrite_error_write_to_read_only_file() {
let file_name =
"/tmp/odu-common_operations-test_pwrite_error_write_to_read_only_file-file01"
.to_string();
// Create a file in rw mode if it doesn't exists.
File::create(&file_name).unwrap();
// Open the file in read-only mode and try to write to it.
let f = OpenOptions::new().read(true).write(false).open(file_name).unwrap();
let mut buffer = vec![0; 100];
let ret = pwrite(f.as_raw_fd(), &mut buffer, 0);
assert!(ret.is_err());
assert_eq!(ret.err(), Some(Error::DoIoError(ErrorKind::Other)));
}
}
| true |
dff69c88b731e545f7153a71f2b0c7ce3d5ee25f
|
Rust
|
go717franciswang/exercism
|
/rust/wordy/src/lib.rs
|
UTF-8
| 2,225 | 3.828125 | 4 |
[] |
no_license
|
pub struct WordProblem {
command: String
}
#[derive(Clone)]
enum Op {
Add,
Subtract,
Multiply,
Divide
}
impl WordProblem {
pub fn new(command: &str) -> Self {
WordProblem { command: command.to_string() }
}
pub fn answer(&self) -> Result<i32, &'static str> {
let mut command = self.command.to_string();
if command.starts_with("What is ") {
command.drain(..("What is ".len()));
} else {
return Err("Non math question")
}
let mut ans: i32 = Self::extract_num(&mut command).unwrap();
loop {
if command.is_empty() {
break;
}
let op = match Self::extract_op(&mut command) {
Some(op) => op,
None => return Err("Unknown command")
};
let n = match Self::extract_num(&mut command) {
Some(n) => n,
None => return Err("Can't find number")
};
match op {
Op::Add => ans += n,
Op::Subtract => ans -= n,
Op::Multiply => ans *= n,
Op::Divide => ans /= n,
}
}
Ok(ans)
}
fn extract_num(command: &mut String) -> Option<i32> {
let mut n: i32 = 0;
let mut sign: i32 = 1;
let mut len: usize = 0;
for (i, c) in command.char_indices() {
match c {
'-' if i == 0 => sign = -1,
'0'...'9' => n = n*10+(c as i32 - '0' as i32),
_ if i == 0 => return None,
_ => { len = i; break }
}
}
command.drain(..(len+1));
Some(n*sign)
}
fn extract_op(command: &mut String) -> Option<Op> {
let op_map = [
("plus ", Op::Add),
("minus ", Op::Subtract),
("multiplied by ", Op::Multiply),
("divided by ", Op::Divide)
];
for &(ref s, ref op) in op_map.iter() {
if command.starts_with(&s.to_string()) {
command.drain(..(s.len()));
return Some(op.clone())
}
}
return None
}
}
| true |
9e2cc7f767dbcca02f81ab5f06eec8b10d931c00
|
Rust
|
muzudho/rust-kifuwarabe-wcsc29-lib
|
/src/studio/board_size.rs
|
UTF-8
| 1,069 | 2.890625 | 3 |
[
"MIT"
] |
permissive
|
use studio::address::*;
pub const DEFAULT_FILE_LEN: usize = 9;
pub const DEFAULT_RANK_LEN: usize = 9;
#[derive(Clone, Copy, PartialEq)]
pub struct BoardSize {
pub file_len: i8,
pub rank_len: i8,
}
impl BoardSize {
pub fn create_hon_shogi() -> BoardSize {
BoardSize {
file_len: DEFAULT_FILE_LEN as i8,
rank_len: DEFAULT_RANK_LEN as i8,
}
}
pub fn cell_to_address(self, cell: Cell) -> usize {
((cell.get_rank() - 1) * self.file_len + (cell.get_file() - 1)) as usize
}
pub fn address_to_cell(self, address: usize) -> Cell {
Cell::from_file_rank(
(address % self.file_len as usize) as i8 + 1,
(address / self.file_len as usize) as i8 + 1,
)
}
pub fn len(self) -> usize {
(self.file_len * self.rank_len) as usize
}
pub fn is_empty(self) -> bool {
self.file_len * self.rank_len < 1
}
pub fn get_file_len(self) -> i8 {
self.file_len
}
pub fn get_rank_len(self) -> i8 {
self.rank_len
}
}
| true |
9911421e740f8cff2eb589dcbc7b4d605854c7d9
|
Rust
|
m-ou-se/ninj
|
/lib/spec/read.rs
|
UTF-8
| 8,170 | 2.546875 | 3 |
[] |
no_license
|
use super::error::{ExpansionError, ReadError};
use super::expand::{expand_path, expand_str, expand_var};
use super::parse::{Parser, Statement, Variable};
use super::scope::{BuildRuleScope, BuildScope, ExpandedVar, FileScope, Rule, VarScope};
use super::{BuildCommand, BuildRule, DepStyle, Spec};
use crate::error::{AddLocationToError, AddLocationToResult, ErrorWithLocation, Location};
use pile::Pile;
use raw_string::{RawStr, RawString};
use std::borrow::ToOwned;
use std::fs::File;
use std::io::{BufReader, Read};
use std::mem::replace;
use std::path::Path;
use std::str::from_utf8;
fn read_bytes(file_name: &Path) -> Result<Vec<u8>, ReadError> {
let mut bytes = Vec::new();
File::open(file_name)
.and_then(|f| BufReader::with_capacity(0x10000, f).read_to_end(&mut bytes))
.map_err(|error| ReadError::IoError {
file_name: file_name.to_owned(),
error,
})?;
Ok(bytes)
}
/// Read, parse, and resolve rules and variables in a `ninja.build` file.
///
/// Parses the file, including any included and subninja'd files, and resolves
/// all rules and variables, resulting in a `Spec`.
pub fn read(file_name: &Path) -> Result<Spec, ErrorWithLocation<ReadError>> {
let source = read_bytes(file_name).err_at(Location::UNKNOWN)?;
read_from(file_name, &source)
}
/// [`read()`], but with the source given directly instead of read from a file.
///
/// Useful for testing and fuzzing.
///
/// `file_name` is used in errors, and to know where to look for `include` and
/// `subninja` files.
pub fn read_from(file_name: &Path, source: &[u8]) -> Result<Spec, ErrorWithLocation<ReadError>> {
let pile = Pile::new();
let mut spec = Spec::new();
let mut scope = FileScope::new();
let mut pools = vec![("console".to_string(), 1)];
read_into(
file_name,
RawStr::from_bytes(source),
&pile,
&mut spec,
&mut scope,
&mut pools,
)?;
if let Some(var) = scope
.vars
.iter_mut()
.rfind(|var| var.name.as_bytes() == b"builddir")
{
spec.build_dir = Some(replace(&mut var.value, RawString::new()));
}
Ok(spec)
}
fn read_into<'a: 'p, 'p>(
file_name: &Path,
source: &'a RawStr,
pile: &'a Pile<Vec<u8>>,
spec: &mut Spec,
scope: &mut FileScope<'a, 'p>,
pools: &mut Vec<(String, u16)>,
) -> Result<(), ErrorWithLocation<ReadError>> {
let mut parser = Parser::new(file_name, source);
while let Some(statement) = parser.next_statement()? {
let loc = parser.location();
match statement {
Statement::Variable { name, value } => {
let value = expand_str(value, scope).err_at(loc)?;
scope.vars.push(ExpandedVar { name, value })
}
Statement::Rule { name } => {
if scope.rules.iter().any(|rule| rule.name == name) {
return Err(ReadError::DuplicateRule(name.to_string()).at(loc));
}
let mut vars = Vec::new();
while let Some(var) = parser.next_variable()? {
if !match var.name {
"command" | "description" | "depfile" | "deps" | "msvc_deps_prefix" => true,
"rspfile" | "rspfile_content" | "generator" | "restat" | "pool" => true,
_ => false,
} {
return Err(
ReadError::UnknownVariable(var.name.to_string()).at(parser.location())
);
}
vars.push(var);
}
scope.rules.push(Rule { name, vars })
}
Statement::Pool { name } => {
if pools.iter().any(|(n, _)| n == name) {
return Err(ReadError::DuplicatePool(name.to_string()).at(loc));
}
let mut depth = None;
while let Some(Variable { name, value }) = parser.next_variable()? {
let loc = parser.location();
if name != "depth" {
return Err(ReadError::UnknownVariable(name.to_string()).at(loc));
}
// Expand the value.
let value = expand_str(value, scope).err_at(loc)?;
// Parse the value as an u32.
depth = Some(
from_utf8(value.as_bytes())
.ok()
.and_then(|s| s.parse().ok())
.ok_or_else(|| ReadError::InvalidPoolDepth.at(loc))?,
);
}
if let Some(depth) = depth {
pools.push((name.to_owned(), depth));
} else {
return Err(ReadError::ExpectedPoolDepth.at(parser.location()));
}
}
Statement::Build {
rule_name,
explicit_outputs,
implicit_outputs,
explicit_deps,
implicit_deps,
order_deps,
} => {
let mut vars = Vec::new();
while let Some(Variable { name, value }) = parser.next_variable()? {
vars.push(ExpandedVar {
name,
value: expand_str(value, scope).err_at(parser.location())?,
});
}
// Bring the build variables into scope.
let build_scope = BuildScope {
file_scope: &scope,
build_vars: &vars,
};
// And expand the input and output paths with it.
let mut outputs =
Vec::with_capacity(explicit_outputs.len() + implicit_outputs.len());
let mut inputs = Vec::with_capacity(explicit_deps.len() + implicit_deps.len());
expand_paths_into(&explicit_outputs, &build_scope, &mut outputs).err_at(loc)?;
expand_paths_into(&explicit_deps, &build_scope, &mut inputs).err_at(loc)?;
let command = if rule_name == "phony" {
None
} else {
// Look up the rule in the current scope.
let rule = scope
.lookup_rule(rule_name)
.ok_or_else(|| ReadError::UndefinedRule(rule_name.to_string()).at(loc))?;
// Bring $in, $out, and the rule variables into scope.
let build_rule_scope = BuildRuleScope {
build_scope: &build_scope,
rule_vars: &rule.vars,
inputs: &inputs,
outputs: &outputs,
};
let expand_var = |name| expand_var(name, &build_rule_scope).err_at(loc);
// And expand the special variables with it:
// First the pool, and also look it up:
let pool = expand_var("pool")?;
let (pool, pool_depth) = if pool.is_empty() {
(String::new(), None)
} else {
let (n, d) = pools
.iter()
.find(|(name, _)| name.as_bytes() == pool.as_bytes())
.ok_or_else(|| ReadError::UndefinedPool(pool).at(loc))?;
(n.clone(), Some(*d))
};
// And then the rest:
Some(BuildCommand {
rule_name: rule_name.to_string(),
command: expand_var("command")?,
description: expand_var("description")?,
depfile: expand_var("depfile")?,
deps: match expand_var("deps")?.as_bytes() {
b"gcc" => Some(DepStyle::Gcc),
b"msvc" => Some(DepStyle::Msvc),
_ => None,
},
msvc_deps_prefix: expand_var("msvc_deps_prefix")?,
generator: build_rule_scope.lookup_var("generator").is_some(),
restat: build_rule_scope.lookup_var("restat").is_some(),
rspfile: expand_var("rspfile")?,
rspfile_content: expand_var("rspfile")?,
pool,
pool_depth,
})
};
let mut order = Vec::new();
expand_paths_into(&implicit_outputs, &build_scope, &mut outputs).err_at(loc)?;
expand_paths_into(&implicit_deps, &build_scope, &mut inputs).err_at(loc)?;
expand_paths_into(&order_deps, &build_scope, &mut order).err_at(loc)?;
spec.build_rules.push(BuildRule {
outputs,
inputs,
order_deps: order,
command,
});
}
Statement::Default { paths } => {
spec.default_targets.reserve(paths.len());
for p in paths {
spec.default_targets.push(expand_str(p, scope).err_at(loc)?);
}
}
Statement::Include { path } => {
let path = expand_str(path, scope).err_at(loc)?;
let path = path.to_path().err_at(loc)?;
let source = pile.add(read_bytes(&path).err_at(loc)?);
read_into(
&file_name.with_file_name(path),
RawStr::from_bytes(source),
pile,
spec,
scope,
pools,
)?;
}
Statement::SubNinja { path } => {
let path = expand_str(path, scope).err_at(loc)?;
let path = path.to_path().err_at(loc)?;
let source = read_bytes(&path).err_at(loc)?;
read_into(
&file_name.with_file_name(path),
RawStr::from_bytes(&source),
&Pile::new(),
spec,
&mut scope.new_subscope(),
pools,
)?;
}
}
}
Ok(())
}
fn expand_paths_into<S: VarScope>(
sources: &[&RawStr],
scope: &S,
vec: &mut Vec<RawString>,
) -> Result<(), ExpansionError> {
vec.reserve(sources.len());
for source in sources {
vec.push(expand_path(source, scope)?);
}
Ok(())
}
| true |
665c2088f2d94db9446a79acd317871b8af928ea
|
Rust
|
PsichiX/CallOfFerris-RAUI
|
/src/components/cloud.rs
|
UTF-8
| 1,750 | 2.78125 | 3 |
[] |
no_license
|
use ggez::{
graphics,
mint::{Point2, Vector2},
Context, GameResult,
};
use graphics::DrawParam;
use crate::{
utils::{AssetManager, Position},
WIDTH,
};
pub struct Cloud {
position: Position,
scale: f32,
speed: f32,
}
impl Cloud {
pub fn new(
pos_x: f32,
pos_y: f32,
scale: f32,
speed: f32,
asset_manager: &AssetManager,
) -> Self {
let cloud = asset_manager.get_image("Some(cloud).png");
let position = Position::new(pos_x, pos_y, cloud.width(), cloud.height());
Self {
position,
scale,
speed,
}
}
pub fn draw(&mut self, ctx: &mut Context, asset_manager: &AssetManager) -> GameResult<()> {
let cloud = asset_manager.get_image("Some(cloud).png");
graphics::draw(
ctx,
&cloud,
DrawParam::default()
.scale(Vector2 {
x: self.scale,
y: self.scale,
})
.dest(Point2 {
x: self.position.pos_start.x,
y: self.position.pos_start.y,
}),
)?;
Ok(())
}
pub fn update(&mut self, ctx: &mut Context, asset_manager: &AssetManager) {
let cloud = asset_manager.get_image("Some(cloud).png");
let delta_time = ggez::timer::delta(ctx).as_secs_f32();
self.position.move_by("x+", delta_time * self.speed);
if self.position.pos_start.x > WIDTH + 100. {
self.position = Position::new(
-100.,
self.position.pos_start.y,
cloud.width(),
cloud.height(),
);
}
}
}
| true |
61763e2dbfa22b190807e2e1b59fd96347f3c4b8
|
Rust
|
mdk97/Algorithms
|
/Rust/fibonacci.rs
|
UTF-8
| 239 | 3.359375 | 3 |
[] |
no_license
|
fn fibonacci(x: i32) -> i32
{
if x == 1 || x == 2
{
return 1;
}
else
{
return fibonacci(x-1) + fibonacci(x-2)
}
}
fn main()
{
for x in 1..10
{
println!("{}", fibonacci(x));
}
}
| true |
13d2d2bdaa65ba595016e88effbd0d11ae04a5dd
|
Rust
|
isgasho/flowbetween
|
/flo/src/editor/editor_controller.rs
|
UTF-8
| 6,675 | 2.609375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use super::menu_controller::*;
use super::canvas_controller::*;
use super::toolbox_controller::*;
use super::timeline_controller::*;
use super::controlbar_controller::*;
use super::super::model::*;
use super::super::style::*;
use flo_ui::*;
use flo_ui_files::ui::*;
use flo_binding::*;
use flo_animation::*;
use std::sync::*;
use std::marker::PhantomData;
use std::collections::HashMap;
use serde_json;
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
enum SubController {
Canvas,
Menu,
ControlBar,
Timeline,
Toolbox
}
///
/// The editor controller manages the editing of a single file
///
pub struct EditorController<Anim: FileAnimation> {
/// Phantom data so we can have the animation type
anim: PhantomData<Anim>,
/// The main editor UI
ui: Binding<Control>,
/// The subcontrollers for this editor
subcontrollers: HashMap<SubController, Arc<dyn Controller>>
}
impl<Loader: 'static+FileAnimation> EditorController<Loader>
where Loader::NewAnimation: 'static+EditableAnimation {
///
/// Creates a new editor controller from an animation
///
pub fn new(animation: Loader::NewAnimation) -> EditorController<Loader> {
let animation = FloModel::new(animation);
Self::from_model(animation)
}
///
/// Creates a new editor controller from a model
///
pub fn from_model(animation: FloModel<Loader::NewAnimation>) -> EditorController<Loader> {
let canvas = Arc::new(CanvasController::new(&animation));
let menu = Arc::new(MenuController::new(&animation));
let timeline = Arc::new(TimelineController::new(&animation));
let toolbox = Arc::new(ToolboxController::new(&animation));
let control_bar = Arc::new(ControlBarController::new(&animation));
let ui = bind(Self::ui());
let mut subcontrollers: HashMap<SubController, Arc<dyn Controller>> = HashMap::new();
subcontrollers.insert(SubController::Canvas, canvas);
subcontrollers.insert(SubController::Menu, menu);
subcontrollers.insert(SubController::Timeline, timeline);
subcontrollers.insert(SubController::Toolbox, toolbox);
subcontrollers.insert(SubController::ControlBar, control_bar);
EditorController {
anim: PhantomData,
ui: ui,
subcontrollers: subcontrollers,
}
}
///
/// Creates the menu bar control for this session
///
fn menu_bar() -> Control {
use self::Position::*;
Control::container()
.with(Bounds {
x1: Start,
y1: After,
x2: End,
y2: Offset(32.0)
})
.with_controller(&serde_json::to_string(&SubController::Menu).unwrap())
}
///
/// Creates the timeline control
///
pub fn timeline() -> Control {
use self::Position::*;
Control::container()
.with(Bounds {
x1: Start,
y1: After,
x2: End,
y2: Offset(256.0)
})
.with_controller(&serde_json::to_string(&SubController::Timeline).unwrap())
}
///
/// Creates the toolbar control
///
pub fn toolbox() -> Control {
use self::Position::*;
Control::container()
.with(Bounds {
x1: Start,
y1: After,
x2: Offset(TOOL_CONTROL_SIZE),
y2: End
})
.with_controller(&serde_json::to_string(&SubController::Toolbox).unwrap())
}
///
/// Creates the canvas control
///
pub fn canvas() -> Control {
use self::Position::*;
Control::container()
.with(Bounds {
x1: After,
y1: Start,
x2: Stretch(1.0),
y2: End
})
.with_controller(&serde_json::to_string(&SubController::Canvas).unwrap())
}
///
/// Creates the control bar control
///
pub fn control_bar() -> Control {
Control::container()
.with(Bounds::next_vert(28.0))
.with(Appearance::Background(TIMESCALE_BACKGROUND))
.with(Font::Size(12.0))
.with(Font::Weight(FontWeight::Light))
.with_controller(&serde_json::to_string(&SubController::ControlBar).unwrap())
}
///
/// Creates the UI tree for this controller
///
pub fn ui() -> Control {
use self::Position::*;
let menu_bar = Self::menu_bar();
let timeline = Self::timeline();
let toolbar = Self::toolbox();
let canvas = Self::canvas();
let control_bar = Self::control_bar();
Control::container()
.with(Bounds::fill_all())
.with(vec![
menu_bar,
Control::container()
.with((vec![toolbar, canvas],
Bounds { x1: Start, y1: After, x2: End, y2: Stretch(1.0) })),
Control::empty()
.with(Bounds::next_vert(1.0))
.with(Appearance::Background(TIMESCALE_BORDER)),
control_bar,
Control::empty()
.with(Bounds::next_vert(1.0))
.with(Appearance::Background(TIMESCALE_LAYERS)),
timeline])
}
}
impl<Loader: 'static+FileAnimation> Controller for EditorController<Loader>
where Loader::NewAnimation: 'static+EditableAnimation {
fn ui(&self) -> BindRef<Control> {
BindRef::new(&self.ui)
}
fn get_subcontroller(&self, id: &str) -> Option<Arc<dyn Controller>> {
let decoded_id = serde_json::from_str(id);
if let Ok(decoded_id) = decoded_id {
self.subcontrollers.get(&decoded_id).map(|controller_ref| controller_ref.clone())
} else {
None
}
}
}
impl<Loader: 'static+FileAnimation> FileController for EditorController<Loader>
where Loader::NewAnimation: 'static+EditableAnimation {
/// The model that this controller needs to be constructed
type Model = FloSharedModel<Loader>;
///
/// Creates this controller with the specified instance model
///
fn open(model: FloModel<Loader::NewAnimation>) -> Self {
Self::from_model(model)
}
}
impl<Loader: 'static+FileAnimation> PartialEq for EditorController<Loader> {
fn eq(&self, _rhs: &Self) -> bool {
// These are never equal at the moment (this is needed for the binding library)
false
}
}
| true |
dab928c0b5cbdfe1205982d17e8f391d1eedd7f4
|
Rust
|
isgasho/shine
|
/crates/shine-stdext/tests/indexedarena.rs
|
UTF-8
| 4,443 | 2.6875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use rand;
use log::{debug, trace};
use permutohedron::Heap;
use rand::seq::SliceRandom;
use std::cell::Cell;
use std::mem;
use shine_stdext::arena::IndexedArena;
use shine_testutils::init_test;
struct DropTracker<'a>(&'a Cell<usize>);
impl<'a> Drop for DropTracker<'a> {
fn drop(&mut self) {
trace!("drop");
self.0.set(self.0.get() + 1);
}
}
struct Node<'a>(i32, DropTracker<'a>);
#[test]
fn simple() {
init_test(module_path!());
let drop_counter = Cell::new(0);
{
let mut arena = IndexedArena::new();
debug!("store");
assert_eq!(arena.len(), 0);
let (id1, _) = arena.allocate(Node(1, DropTracker(&drop_counter)));
assert_eq!(arena.len(), 1);
let (id2, _) = arena.allocate(Node(2, DropTracker(&drop_counter)));
assert_eq!(arena.len(), 2);
let (id3, _) = arena.allocate(Node(3, DropTracker(&drop_counter)));
assert_eq!(arena.len(), 3);
let (id4, _) = arena.allocate(Node(4, DropTracker(&drop_counter)));
assert_eq!(arena.len(), 4);
assert_eq!(arena[id1].0, 1);
assert_eq!(arena[id2].0, 2);
assert_eq!(arena[id3].0, 3);
assert_eq!(arena[id4].0, 4);
assert_eq!(drop_counter.get(), 0);
debug!("remove");
let node3 = arena.deallocate(id3);
assert_eq!(arena.len(), 3);
assert_eq!(drop_counter.get(), 0);
mem::drop(node3);
assert_eq!(drop_counter.get(), 1);
debug!("add");
let (id3, _) = arena.allocate(Node(103, DropTracker(&drop_counter)));
assert_eq!(arena.len(), 4);
assert_eq!(arena[id1].0, 1);
assert_eq!(arena[id2].0, 2);
assert_eq!(arena[id3].0, 103);
assert_eq!(arena[id4].0, 4);
}
assert_eq!(drop_counter.get(), 5);
}
#[test]
fn stress() {
init_test(module_path!());
let mut data = [1usize, 2, 5, 7, 100, 4000];
let mut heap = Heap::new(&mut data);
while let Some(sizes) = heap.next_permutation() {
trace!("permutation {:?}", sizes);
let drop_counter = Cell::new(0);
let mut drop_count = 0;
{
let mut arena = IndexedArena::new();
for &mut cnt in sizes.into_iter() {
let rem = cnt / 2;
let mut ids = Vec::new();
trace!("store {}", cnt);
for i in 0..cnt {
assert_eq!(arena.len(), i);
let (id, _) = arena.allocate(Node(i as i32, DropTracker(&drop_counter)));
ids.push((i as i32, id));
}
assert_eq!(arena.len(), cnt);
assert_eq!(drop_counter.get(), drop_count);
ids.shuffle(&mut rand::thread_rng());
trace!("check");
for v in ids.iter() {
assert_eq!(arena[v.1].0, v.0);
}
trace!("remove half");
for i in 0..rem {
assert_eq!(drop_counter.get(), drop_count + i);
assert_eq!(arena.len(), cnt - i);
let d = arena.deallocate(ids[i].1);
mem::drop(d);
ids[i].1 = usize::max_value();
}
assert_eq!(arena.len(), cnt - rem);
assert_eq!(drop_counter.get(), drop_count + rem);
trace!("check");
for v in ids.iter() {
if v.1 != usize::max_value() {
assert_eq!(arena[v.1].0, v.0);
}
}
trace!("add back");
for v in ids.iter_mut() {
if v.1 == usize::max_value() {
let (id, _) = arena.allocate(Node(-v.0, DropTracker(&drop_counter)));
v.1 = id;
}
}
assert_eq!(arena.len(), ids.len());
assert_eq!(drop_counter.get(), drop_count + rem);
trace!("check");
for v in ids.iter() {
assert!(arena[v.1].0 == v.0 || arena[v.1].0 == -v.0);
}
arena.clear();
assert_eq!(arena.len(), 0);
assert_eq!(drop_counter.get(), drop_count + rem + cnt);
drop_count += rem + cnt;
}
}
assert_eq!(drop_counter.get(), drop_count);
}
}
| true |
396aa445793d6f6ef433248fcd2949eb450c9c24
|
Rust
|
danmarcab/advent-of-code-2019
|
/src/day9.rs
|
UTF-8
| 6,008 | 3.046875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
#[aoc_generator(day9)]
pub fn input_generator(input: &str) -> Vec<i64> {
input
.trim()
.split(',')
.map(|d| -> i64 { d.parse().unwrap() })
.collect::<Vec<i64>>()
}
#[aoc(day9, part1)]
pub fn part1(program: &[i64]) -> String {
let mut int_code = IntCode::init(program, vec![1]);
int_code.run();
int_code
.output
.iter()
.map(|i| format!("{}", i))
.collect::<Vec<String>>()
.join(", ")
}
#[aoc(day9, part2)]
pub fn part2(program: &[i64]) -> String {
let mut int_code = IntCode::init(program, vec![2]);
int_code.run();
int_code
.output
.iter()
.map(|i| format!("{}", i))
.collect::<Vec<String>>()
.join(", ")
}
struct IntCode {
memory: HashMap<i64, i64>,
pc: i64,
relative_offset: i64,
input: Vec<i64>,
output: Vec<i64>,
done: bool,
}
impl IntCode {
fn init(program: &[i64], input: Vec<i64>) -> IntCode {
IntCode {
memory: program
.iter()
.enumerate()
.map(|(a, b)| (a as i64, *b))
.into_iter()
.collect(),
pc: 0,
relative_offset: 0,
input: input,
output: vec![],
done: false,
}
}
fn run(&mut self) {
while !self.done {
self.step();
}
}
fn step(&mut self) {
let op_code = self.memory[&self.pc];
if self.done {
println!("You are trying to step a done IntCode");
unreachable!();
}
match to_digits(op_code) {
(mode_t, mode_b, mode_a, 0, 1) => {
let param_a: i64 = self.param_value(mode_a, 1);
let param_b: i64 = self.param_value(mode_b, 2);
let target_dir: i64 = self.dest_dir(mode_t, 3);
self.memory.insert(target_dir, param_a + param_b);
self.pc += 4;
}
(mode_t, mode_b, mode_a, 0, 2) => {
let param_a: i64 = self.param_value(mode_a, 1);
let param_b: i64 = self.param_value(mode_b, 2);
let target_dir: i64 = self.dest_dir(mode_t, 3);
self.memory.insert(target_dir, param_a * param_b);
self.pc += 4;
}
(0, 0, mode, 0, 3) => {
let target_dir: i64 = self.dest_dir(mode, 1);
if let Some(val) = self.input.get(0) {
self.memory.insert(target_dir, *val);
self.input = (&(self.input[1..])).to_vec();
self.pc += 2;
} else {
println!("need input");
unreachable!();
}
}
(0, 0, mode, 0, 4) => {
let val = self.param_value(mode, 1);
self.output.push(val);
self.pc += 2;
}
(0, mode_dest, mode_cond, 0, 5) => {
let cond: i64 = self.param_value(mode_cond, 1);
let dest: i64 = self.param_value(mode_dest, 2);
self.pc = if cond != 0 { dest } else { self.pc + 3 };
}
(0, mode_dest, mode_cond, 0, 6) => {
let cond: i64 = self.param_value(mode_cond, 1);
let dest: i64 = self.param_value(mode_dest, 2);
self.pc = if cond == 0 { dest } else { self.pc + 3 };
}
(mode_t, mode_b, mode_a, 0, 7) => {
let param_a: i64 = self.param_value(mode_a, 1);
let param_b: i64 = self.param_value(mode_b, 2);
let target_dir: i64 = self.dest_dir(mode_t, 3);
self.memory
.insert(target_dir, if param_a < param_b { 1 } else { 0 });
self.pc += 4;
}
(mode_t, mode_b, mode_a, 0, 8) => {
let param_a: i64 = self.param_value(mode_a, 1);
let param_b: i64 = self.param_value(mode_b, 2);
let target_dir: i64 = self.dest_dir(mode_t, 3);
self.memory
.insert(target_dir, if param_a == param_b { 1 } else { 0 });
self.pc += 4;
}
(0, 0, mode, 0, 9) => {
self.relative_offset += self.param_value(mode, 1);
self.pc += 2;
}
(0, 0, 0, 9, 9) => {
self.done = true;
}
other => {
println!("Invalid Instruction: {:?}", other);
unreachable!()
}
}
}
fn param_value(&self, mode: i64, param_n: i64) -> i64 {
let value = self.memory[&(self.pc + param_n)];
let default: i64 = 0;
match mode {
0 => *(self.memory.get(&value).unwrap_or(&default)),
1 => value,
2 => self.memory[&(self.relative_offset + value)],
_other => unreachable!(),
}
}
fn dest_dir(&self, mode: i64, param_n: i64) -> i64 {
let dir = self.memory[&(self.pc + param_n)];
match mode {
0 => dir,
2 => self.relative_offset + dir,
_other => unreachable!(),
}
}
}
// HELPERS
fn to_digits(n: i64) -> (i64, i64, i64, i64, i64) {
(
n / 10000 % 10,
n / 1000 % 10,
n / 100 % 10,
n / 10 % 10,
n % 10,
)
}
#[cfg(test)]
mod tests {
use super::part1;
#[test]
fn day9part1() {
assert_eq!(
part1(&[109, 1, 204, -1, 1001, 100, 1, 100, 1008, 100, 16, 101, 1006, 101, 0, 99]),
"109, 1, 204, -1, 1001, 100, 1, 100, 1008, 100, 16, 101, 1006, 101, 0, 99"
);
assert_eq!(
part1(&[1102, 34915192, 34915192, 7, 4, 7, 99, 0]),
"1219070632396864"
);
assert_eq!(part1(&[104, 1125899906842624, 99]), "1125899906842624");
}
}
| true |
bed11774a250e292246cc163aa0358bdccf72703
|
Rust
|
xuorig/rust-distributed-redis
|
/src/hashing.rs
|
UTF-8
| 2,255 | 3.75 | 4 |
[] |
no_license
|
use crc::crc32;
use std::collections::HashMap;
#[derive(Debug)]
pub struct Ring {
replicas: i32,
ring: HashMap<u32, String>,
keys: Vec<u32>,
}
impl Ring {
/// Returns a HashRing given a set of nodes and a replication factor
///
/// # Arguments
///
/// * `nodes` - A Vec of Strings to distribute in the ring
/// * `factor` - The number of virtual nodes in the ring for one node / aka `weight`
///
pub fn new(replicas: i32) -> Ring {
Ring {
replicas: replicas,
ring: HashMap::new(),
keys: vec![],
}
}
pub fn add_node(&mut self, node: String) {
for i in 0..self.replicas {
let node = node.clone();
let key = format!("{}-{}", node, i.to_string());
let hash = crc32::checksum_ieee(key.as_bytes());
self.ring.insert(hash, node);
self.keys.push(hash);
self.keys.sort();
}
}
/// Returns a Node identifier given a key
///
/// * `key` the key for which we want the corresponding shard/node
///
pub fn get(&self, key: String) -> Option<&String> {
if self.ring.is_empty() {
return None;
}
let hash = crc32::checksum_ieee(key.as_bytes());
// Binary search for the closest node for this hash
// Rust binary search returns an error with the index where we should
// insert to keep the list sorted, pretty cool 👇
let idx = match self.keys.binary_search(&hash) {
Ok(i) => i,
Err(i) => i,
};
// If we would need to insert at the end, pick the first node
// instead.
if idx == self.keys.len() {
self.ring.get(&self.keys[0])
} else {
self.ring.get(&self.keys[idx])
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn add_node_replicates_with_factor() {
let mut ring = Ring::new(5);
ring.add_node(String::from("a"));
ring.add_node(String::from("b"));
assert_eq!(10, ring.ring.len());
}
#[test]
fn get_empty_ring() {
let ring = Ring::new(5);
assert_eq!(None, ring.get(String::from("a")));
}
#[test]
fn get_ring_one_node() {
let mut ring = Ring::new(5);
let node = String::from("a");
ring.add_node(node.clone());
let got_node = ring.get(String::from("my_key")).unwrap();
assert_eq!(node, *got_node)
}
}
| true |
65b5f24008d2d8ae51883bc6fd87c06d4f940425
|
Rust
|
rosehsu47/Rust_learning
|
/pgtry/src/main.rs
|
UTF-8
| 1,058 | 3.28125 | 3 |
[] |
no_license
|
// extern crate postgres;
use postgres::{Client, NoTls};
fn main() {
println!("{}","Start" );
let db = "host=localhost user=rosehsu";
// let mut conn = Client::connect(db, NoTls)?;
let mut conn = match Client::connect(db, NoTls) {
Ok(conn) => {
println!("{}","Connect successfully!" );
conn
},
Err(e) => {
println!("Connection error: {}", e);
return;
}
};
&conn.execute("create table if not exists blog (
id serial primary key,
title varchar(255),
body text)", &[]).ok().expect("Table creation failed");
let s_to_s = [1,2,3];
change_type(s_to_s);
}
fn change_type (values: [i32 ; 3]) {
let mut result : String = "".to_string();
for num in &values {
let num_to_str = num.to_string();
&result.push_str(&num_to_str);
}
println!("{}",&result );
// let result2 = values as String;
// result2
}
#[allow(dead_code)]
enum Result<T, E> {
Ok(T),
Err(E),
}
| true |
2dbc4d9f39713b072d670026cfa966ced9587c68
|
Rust
|
PeterW-LWL/mail
|
/core/src/compose.rs
|
UTF-8
| 11,594 | 3.421875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
//! This module provides utilities for composing multipart mails.
//!
//! While the `Mail` type on itself can represent any multipart
//! mail most mails have a certain pattern to their structure,
//! consisting mainly of `multipart/mixed` for attachments,
//! `multipart/alternative` for alternative bodies and
//! `multipart/related` for including embedded resources which
//! can be used in the mail bodies like e.g. a logo.
//!
//! This module provides the needed utilities to more simply
//! create a `Mail` instance which represents this kind of
//! mails.
//-------------------------------------------------------------\\
// NOTE: Implementations for creating (composing) mails are ||
// split from the type dev, and normal impl blocks and placed ||
// in the later part of the file for better readability. ||
//-------------------------------------------------------------//
use media_type::{MULTIPART, ALTERNATIVE, RELATED, MIXED};
use vec1::Vec1;
use headers::{
HeaderKind,
headers,
header_components::{
Disposition,
DispositionKind,
MediaType
}
};
use crate::{
mail::Mail,
resource::Resource
};
/// Parts used to create a mail body (in a multipart mail).
///
/// This type contains a `Resource` which is normally used
/// to create a alternative body in a `multipart/alternative`
/// section. As well as a number of "embeddings" which depending
/// on there disposition can are either used as attachments
/// or embedded resource to which the body can referre to
/// by it's content id.
#[derive(Debug)]
pub struct BodyPart {
/// A body created by a template.
pub resource: Resource,
/// A number of embeddings which should be displayed inline.
///
/// This is normally used to embed images then displayed in
/// a html body. It is not in the scope of this part of the
/// library to bind content id's to resources to thinks using
/// them to display the embeddings. This part of the library
/// does "just" handle that they are correctly placed in the
/// resulting Mail. The `mail-templates` crate provides more
/// functionality for better ergonomics.
///
/// Note that embeddings placed in a `BodyPart` instance are potentially
/// only usable in the body specified in the same `BodyPart` instance.
pub inline_embeddings: Vec<Resource>,
/// A number of embeddings which should be treated as attachments.
///
/// Attachments of a `BodyPart` instance will be combined with
/// the attachments of other instances and the ones in the
/// `MailParts` instance.
pub attachments: Vec<Resource>
}
/// Parts which can be used to compose a multipart mail.
///
/// This can be used to crate a mail, possible having
/// attachments with multiple alternative bodies having
/// embedded resources which can be referred to by the
/// bodies with content ids. This embeddings can be both
/// body specific or shared between bodies.
///
/// # Limitations
///
/// Any non alternative body will be either an attachment
/// or an body with a inline disposition header in a
/// `multipart/related` body. Which means you can not
/// use this mechanism to e.g. create a `multipart/mixed`
/// body with multiple disposition inline sub-bodies
/// which should be displayed side-by-side. Generally this
/// is not a very good way to create a mail, through a
/// valid way nevertheless.
///
pub struct MailParts {
/// A vector of alternative bodies
///
/// A typical setup would be to have two alternative bodies one text/html and
/// another text/plain as fallback (for which the text/plain body would be
/// the first in the vec and the text/html body the last one).
///
/// Note that the order in the vector /// a additional text/plainis
/// the same as the order in which they will appear in the mail. I.e.
/// the first one is the last fallback while the last one should be
/// shown if possible.
pub alternative_bodies: Vec1<BodyPart>,
/// A number of embeddings which should be displayed inline.
///
/// This is normally used to embed images then displayed in
/// a html body. It is not in the scope of this part of the
/// library to bind content id's to resources to thinks using
/// them to display the embeddings. This part of the library
/// does "just" handle that they are correctly placed in the
/// resulting Mail. The `mail-templates` crate provides more
/// functionality for better ergonomics.
pub inline_embeddings: Vec<Resource>,
/// A number of embeddings which should be treated as attachments
pub attachments: Vec<Resource>
}
//-------------------------------------------------------\\
// implementations for creating mails are from here on ||
//-------------------------------------------------------//
impl MailParts {
/// Create a `Mail` instance based on this `MailParts` instance.
///
///
/// If this instance contains any attachments then the
/// returned mail will be a `multipart/mixed` mail with
/// the first body containing the actual mail and the
/// other bodies containing the attachments.
///
/// If the `MailParts.inline_embeddings` is not empty then
/// the mail will be wrapped in `multipart/related` (inside
/// any potential `multipart/mixed`) containing the
/// actual mail in the first body and the inline embeddings
/// in the other bodies.
///
/// The mail will have a `multipart/alternative` body
/// if it has more then one alternative body
/// (inside a potential `multipart/related` inside a
/// potential `multipart/mixed` body). This body contains
/// one sub-body for each `BodyPart` instance in
/// `MailParts.alternative_bodies`.
///
/// Each sub-body created for a `BodyPart` will be wrapped
/// inside a `multipart/related` if it has body specific
/// embeddings (with content disposition inline).
pub fn compose(self)
-> Mail
{
let MailParts {
alternative_bodies,
inline_embeddings,
attachments
} = self;
let mut attachments = attachments.into_iter()
.map(|atta| atta.create_mail_with_disposition(DispositionKind::Attachment))
.collect::<Vec<_>>();
let mut alternatives = alternative_bodies.into_iter()
.map(|body| body.create_mail(&mut attachments))
.collect::<Vec<_>>();
//UNWRAP_SAFE: bodies is Vec1, i.e. we have at last one
let mail = alternatives.pop().unwrap();
let mail =
if alternatives.is_empty() {
mail
} else {
mail.wrap_with_alternatives(alternatives)
};
let mail =
if inline_embeddings.is_empty() {
mail
} else {
let related = inline_embeddings.into_iter()
.map(|embedding| {
embedding.create_mail_with_disposition(DispositionKind::Inline)
})
.collect::<Vec<_>>();
mail.wrap_with_related(related)
};
let mail =
if attachments.is_empty() {
mail
} else {
mail.wrap_with_mixed(attachments)
};
mail
}
}
impl BodyPart {
/// Creates a `Mail` instance from this `BodyPart` instance.
///
/// All embeddings in `BodyPart.embeddings` which have a
/// attachment content disposition are placed into the
/// `attachments_out` parameter, as attachments should
/// always be handled on the outer most level but the
/// produced mail is likely not the outer most level.
///
/// This will create a non-multipart body for the
/// body `Resource`, if there are any embeddings which
/// have a `Inline` disposition that body will be
/// wrapped into a `multipart/related` body containing
/// them.
pub fn create_mail(
self,
attachments_out: &mut Vec<Mail>,
) -> Mail {
let BodyPart {
resource,
inline_embeddings,
attachments
} = self;
let body = resource.create_mail();
for attachment in attachments.into_iter() {
let mail = attachment.create_mail_with_disposition(DispositionKind::Attachment);
attachments_out.push(mail)
}
if inline_embeddings.is_empty() {
body
} else {
let related = inline_embeddings.into_iter()
.map(|embedding| {
embedding.create_mail_with_disposition(DispositionKind::Inline)
})
.collect::<Vec<_>>();
body.wrap_with_related(related)
}
}
}
impl Resource {
/// Create a `Mail` instance representing this `Resource`.
///
/// This is not a complete mail, i.e. it will not contain
/// headers like `From` or `To` and in many cases the
/// returned `Mail` instance will be wrapped into other
/// mail instances adding alternative bodies, embedded
/// resources and attachments.
pub fn create_mail(self) -> Mail {
Mail::new_singlepart_mail(self)
}
pub fn create_mail_with_disposition(self, disposition_kind: DispositionKind) -> Mail {
let mut mail = self.create_mail();
//TODO[1.0] grab meta from resource
let disposition = Disposition::new(disposition_kind, Default::default());
mail.insert_header(headers::ContentDisposition::body(disposition));
mail
}
}
impl Mail {
/// Create a `multipart/mixed` `Mail` instance containing this mail as
/// first body and one additional body for each attachment.
///
/// Normally this is used with embeddings having a attachment
/// disposition creating a mail with attachments.
pub fn wrap_with_mixed(self, other_bodies: Vec<Mail>)
-> Mail
{
let mut bodies = other_bodies;
bodies.push(self);
new_multipart(&MIXED, bodies)
}
/// Create a `multipart/alternative` `Mail` instance containing this
/// mail as the _main_ body with given alternatives.
///
/// The "priority" of alternative bodies is ascending with the body
/// which should be shown only if all other bodies can't be displayed
/// first. I.e. the order is the same order as
/// specified by `multipart/alternative`.
/// This also means that _this_ body will be the last body as it is
/// meant to be the _main_ body.
pub fn wrap_with_alternatives(self, alternates: Vec<Mail>)
-> Mail
{
let mut bodies = alternates;
//TODO[opt] accept iter and prepend instead of insert in vec
bodies.insert(0, self);
new_multipart(&ALTERNATIVE, bodies)
}
/// Creates a `multipart/related` `Mail` instance containing this
/// mail first and then all related bodies.
pub fn wrap_with_related(self, related: Vec<Mail>)
-> Mail
{
let mut bodies = related;
bodies.insert(0, self);
new_multipart(&RELATED, bodies)
}
}
/// Creates a `multipart/<sub_type>` mail with given bodies.
///
/// # Panic
///
/// If `sub_type` can not be used to create a multipart content
/// type this will panic.
fn new_multipart(sub_type: &'static str, bodies: Vec<Mail>)
-> Mail
{
let content_type = MediaType::new(MULTIPART, sub_type)
.unwrap();
Mail::new_multipart_mail(content_type, bodies)
}
| true |
505e1a8e6f44a135d60702beb5ad2283f9f32bc5
|
Rust
|
nyeogmi/zonkscript
|
/src/module/shared.rs
|
UTF-8
| 3,662 | 2.796875 | 3 |
[] |
no_license
|
use std::borrow::Cow;
use crate::reexports::*;
use super::*;
pub struct ModuleBuilder {
pub(super) procedures: Prototyped<Procedure, ProcedureBuilder>,
pub(super) datatypes: Prototyped<DataType, DataTypeBuilder>,
pub(super) primitives: Named<Primitive>,
}
#[derive(Debug)]
pub struct Module {
pub(super) procedures: Finalized<Procedure>,
pub(super) datatypes: Finalized<DataType>,
pub(super) primitives: Finalized<Primitive>,
pub std_primitives: StdPrimitives,
}
pub type Identifier = Cow<'static, str>;
impl Module {
// TODO: Don't panic here
pub(crate) fn procedure(&self, procedure: ZId<Procedure>) -> &Procedure {
&self.procedures.data[procedure.0]
}
pub(crate) fn datatype(&self, datatype: ZId<DataType>) -> &DataType {
&self.datatypes.data[datatype.0]
}
}
impl ModuleBuilder {
pub fn new() -> ModuleBuilder {
let mut builder = ModuleBuilder {
procedures: Prototyped::new(),
datatypes: Prototyped::new(),
primitives: Named::new(),
};
builder.add_std_primitives();
builder
}
// == procedures ==
pub fn procedure(&mut self, identifier: &Identifier) -> ZId<Procedure> {
self.procedures.reference(identifier)
}
fn mut_procedure(&mut self, id: ZId<Procedure>) -> Option<&mut ProcedureBuilder> {
self.procedures.mutate(id, ProcedureBuilder::new)
}
pub fn seal_procedure(&mut self, id: ZId<Procedure>) {
// TODO: Panic on double-finalize? Probably just ignore.
self.procedures.seal(id);
}
pub fn local(&mut self, id: ZId<Procedure>, name: &Identifier, ty: ZId<DataType>) -> ZId<Local> {
if let Some(mp) = self.mut_procedure(id) {
mp.push_local(name, ty)
} else {
panic!("can't edit procedure")
}
}
pub fn push_instruction(&mut self, id: ZId<Procedure>, instruction: Instruction) {
if let Some(mp) = self.mut_procedure(id) {
mp.push_instruction(instruction)
} else {
panic!("can't edit procedure")
}
}
// == data types ==
pub fn datatype(&mut self, identifier: &Identifier) -> ZId<DataType> {
self.datatypes.reference(identifier)
}
fn mut_datatype(&mut self, id: ZId<DataType>) -> Option<&mut DataTypeBuilder> {
self.datatypes.mutate(id, DataTypeBuilder::new)
}
pub fn seal_datatype(&mut self, id: ZId<DataType>) {
// TODO: Panic on double-finalize? Probably just ignore.
self.datatypes.seal(id);
}
pub fn push_field(&mut self, id: ZId<DataType>, field: ZId<DataType>) {
if let Some(ms) = self.mut_datatype(id) {
ms.push(field)
} else {
panic!("can't edit datatype")
}
}
// == primitives ==
pub fn primitive(&mut self, identifier: &Identifier, primitive: impl Fn() -> Primitive) -> ZId<DataType> {
// NYEO NOTE: This function breaks encapsulation in a few ways and should be changed somehow
// more args?
let id = self.datatypes.reference(identifier);
if self.datatypes.is_populated(id) {
// TODO: Assert that the struct _is_ a primitive for this primitive type?
return id;
}
assert!(!self.primitives.names.contains_key(identifier));
let prim = primitive();
let prim_id = self.primitives.get_or_insert(identifier, || prim);
let struct_id = self.datatypes.reference(identifier);
self.datatypes.inject(struct_id, DataType::wrap(identifier.clone(), prim_id, prim), |x, y| x == y);
struct_id
}
}
| true |
39c30804b59217975f23dbd7f5ae5359cd7fea01
|
Rust
|
franktea/leetcode-rust
|
/src/bin/0006.rs
|
UTF-8
| 581 | 3 | 3 |
[] |
no_license
|
impl Solution {
pub fn convert(s: String, num_rows: i32) -> String {
let n = num_rows as usize;
let mut rows: Vec<String> = vec!["".to_string(); n];
let index = (0..n).chain((1..n-1).rev()); // 行的序列
s.chars().zip(index.cycle()).into_iter().for_each(|(c,i)| rows[i].push(c));
rows.join("")
}
}
pub struct Solution;
fn main() {
assert_eq!(Solution::convert("PAYPALISHIRING".to_string(), 3), "PAHNAPLSIIGYIR".to_string());
assert_eq!(Solution::convert("PAYPALISHIRING".to_string(), 4), "PINALSIGYAHRPI".to_string());
}
| true |
a0e5c0e11d4b0405242c2c77c7bc66d80d6a117d
|
Rust
|
Yangjiaxi/DragonBook
|
/book_2_5_5/src/main.rs
|
UTF-8
| 221 | 2.65625 | 3 |
[] |
no_license
|
use book_2_5_5::parser::Parser;
fn main() {
let mut parser = Parser::init("9-3+2-2+9");
println!("{:?}", parser);
match parser.expr() {
Err(e) => println!("Error : {:?}", e),
_ => {}
};
}
| true |
71499d634f8d6fe3999179698033533bea077cdd
|
Rust
|
ivyjsgit/satellite_numeric
|
/src/methods.rs
|
UTF-8
| 19,849 | 2.84375 | 3 |
[] |
no_license
|
use anyhop::{Atom, Goal, Method, MethodResult, Task};
use anyhop::MethodResult::{TaskLists};
use anyhop::Task::Operator;
use MethodResult::*;
use Task::*;
use log::{debug, error, info, trace, warn};
use SatelliteMethod::*;
use crate::methods::SatelliteMethod::{ScheduleAll, ScheduleOne};
use crate::operators::SatelliteOperator::{Calibrate, SwitchOff, SwitchOn, TakeImage, TurnTo};
use super::operators::*;
use fixed::types::I40F24;
#[derive(Copy, Clone, PartialOrd, PartialEq, Ord, Eq, Debug)]
pub enum SatelliteMethod {
ScheduleAll,
//state, satellite, instrument, mode, new_direction, previous_direction
ScheduleOne(SatelliteEnum, SatelliteEnum, SatelliteEnum, SatelliteEnum, SatelliteEnum),
//SatelliteState, Satellite, Instrument
Switching(SatelliteEnum, SatelliteEnum),
}
#[derive(Clone, PartialOrd, PartialEq, Ord, Eq, Debug)]
pub enum SatelliteStatus {
Done,
//state, satellite, instrument, mode, new_direction, previous_direction
NotDone(u32, SatelliteEnum, SatelliteEnum, SatelliteEnum, SatelliteEnum, SatelliteEnum),
}
impl SatelliteStatus {
pub fn new(identifier: u32, state: SatelliteState, satellite: SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum, previous_direction: SatelliteEnum, goal: SatelliteGoals) -> SatelliteStatus {
return if is_satellite_done(state, &goal) {
SatelliteStatus::Done
} else {
SatelliteStatus::NotDone(identifier, satellite, instrument, mode, new_direction, previous_direction)
}
}
}
//Checks to see if the planner is done by comparing the state and the goal.
pub fn is_satellite_done(state: SatelliteState, goal: &SatelliteGoals) -> bool {
for goal_image in goal.have_image.keys() {
if !state.have_image.contains_key(goal_image) {
return false;
} else {
if !(state.have_image.get(goal_image) == goal.have_image.get(goal_image)) {
return false;
}
}
}
return true;
}
//Turn an instrument on or off.
fn switching(state: &SatelliteState, satellite: SatelliteEnum, instrument: SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> {
TaskLists(vec![if !state.power_on.is_empty() && !state.power_on.contains(&instrument) {
let powered_on_instrument = find_powered_on_instruments(state, &satellite);
debug!("Our powered_on instrument is: {:?}", powered_on_instrument);
match powered_on_instrument{
Some(n) => debug!("Performing switchoff on SwitchOff({:?}, {:?})",n, satellite ),
None => debug!("None"),
}
match powered_on_instrument{
Some(n)=> vec![Operator(SwitchOff(n, satellite)),
Operator(SwitchOn(instrument, satellite))],
None =>
{
if state.power_on.contains(&instrument){
vec![Operator(SwitchOff(instrument, satellite)),
Operator(SwitchOn(instrument, satellite))]
}else{
vec![Operator(SwitchOn(instrument, satellite))]
}
}
}
} else if state.power_on.is_empty() {
vec![Operator(SwitchOn(instrument, satellite))]
} else {
vec![]
}])
}
//Schedules one step into the planner.
fn schedule_one(state: &SatelliteState, satellite: SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum, previous_direction: SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> {
let is_instrument_powered_on = !state.power_avail.get(&satellite).unwrap();
if is_satellite_pointing_in_direction(state, &satellite, &new_direction){ //Prevents short circuiting of the and from earlier
if is_instrument_powered_on || state.power_on.is_empty(){
debug!("Scheduling pointing with on instruments");
return schedule_pointing_with_powered_on_instruments(satellite, instrument, mode, new_direction)
} else {
debug!("Scheduling pointing with off instruments");
return schedule_pointing_with_powered_off_instruments(state, &satellite, instrument, mode, new_direction)
}
}else{
if is_instrument_powered_on || state.power_on.is_empty(){
debug!("Scheduling no pointing with on instruments");
let calibration_target_direction = state.calibration_target.get(&instrument).unwrap();
return schedule_not_pointing_with_powered_on_instruments(satellite, instrument, mode, new_direction, previous_direction, calibration_target_direction)
}else{
debug!("Scheduling no pointing with off instruments");
let calibration_target_direction = state.calibration_target.get(&instrument).unwrap();
return schedule_not_pointing_with_powered_off_instruments(state, &satellite, instrument, mode, new_direction, previous_direction, calibration_target_direction)
}
}
}
// New function
fn remove_redundant_turns(tasks: Vec<Task<SatelliteOperator<SatelliteEnum>, SatelliteMethod>>) -> Vec<Task<SatelliteOperator<SatelliteEnum>, SatelliteMethod>> {
tasks.iter().filter(|t| match t {
Method(_) => true,
Operator(op) => match op {
TurnTo(_,dest,start) => dest != start,
_ => true
}
}).map(|t| *t).collect()
}
// Replacement
fn schedule_not_pointing_with_powered_off_instruments(state: &SatelliteState, satellite: &SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum, previous_direction: SatelliteEnum, calibration_target_direction: &SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> {
let powered_on_instrument= find_powered_on_instruments(state, &satellite);
debug!("Our found powered on instrument is is {:?}", instrument);
debug!("Our satellite is {:?}", satellite);
match powered_on_instrument {
Some(instrument_to_power_off) =>
{
if calibration_target_direction == &previous_direction && &new_direction == calibration_target_direction{
TaskLists(vec![vec![Operator(SwitchOff(instrument_to_power_off, *satellite)),
Method(Switching(*satellite, instrument)),
Operator(Calibrate(*satellite, instrument, *calibration_target_direction)),
Operator(TakeImage(*satellite, new_direction, instrument, mode))]])
} else if calibration_target_direction == &previous_direction {
TaskLists(vec![remove_redundant_turns(vec![Operator(SwitchOff(instrument_to_power_off, *satellite)),
Method(Switching(*satellite, instrument)),
Operator(Calibrate(*satellite, instrument, *calibration_target_direction)),
Operator(TurnTo(*satellite, new_direction, *calibration_target_direction)),
Operator(TakeImage(*satellite, new_direction, instrument, mode))])])
} else if &new_direction==calibration_target_direction {
TaskLists(vec![remove_redundant_turns(vec![Operator(SwitchOff(instrument_to_power_off, *satellite)),
Operator(TurnTo(*satellite, *calibration_target_direction, previous_direction)),
Method(Switching(*satellite, instrument)),
Operator(Calibrate(*satellite, instrument, *calibration_target_direction)),
Operator(TakeImage(*satellite, new_direction, instrument, mode))])])
}else{
TaskLists(vec![remove_redundant_turns(vec![Operator(SwitchOff(instrument_to_power_off, *satellite)),
Operator(TurnTo(*satellite, *calibration_target_direction, previous_direction)),
Method(Switching(*satellite, instrument)),
Operator(Calibrate(*satellite, instrument, *calibration_target_direction)),
Operator(TurnTo(*satellite, new_direction, *calibration_target_direction)),
Operator(TakeImage(*satellite, new_direction, instrument, mode))])])
}
},
None => if &new_direction==calibration_target_direction || new_direction==previous_direction{
TaskLists(vec![vec![Method(Switching(*satellite, instrument)),
Operator(Calibrate(*satellite, instrument, *calibration_target_direction)),
Operator(TakeImage(*satellite, new_direction, instrument, mode))]])
}else{
TaskLists(vec![remove_redundant_turns(vec![Operator(TurnTo(*satellite, *calibration_target_direction, previous_direction)),
Method(Switching(*satellite, instrument)),
Operator(Calibrate(*satellite, instrument, *calibration_target_direction)),
Operator(TurnTo(*satellite, new_direction, *calibration_target_direction)),
Operator(TakeImage(*satellite, new_direction, instrument, mode))])])
},
}
}
fn schedule_not_pointing_with_powered_on_instruments(satellite: SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum, previous_direction: SatelliteEnum, calibration_target_direction: &SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> {
if calibration_target_direction == &previous_direction && &new_direction==calibration_target_direction{
TaskLists(vec![vec![Method(Switching(satellite, instrument)),
Operator(Calibrate(satellite, instrument, *calibration_target_direction)),
Operator(TakeImage(satellite, new_direction, instrument, mode))]])
}else if calibration_target_direction == &previous_direction{
TaskLists(vec![vec![Method(Switching(satellite, instrument)),
Operator(Calibrate(satellite, instrument, *calibration_target_direction)),
Operator(TurnTo(satellite, new_direction, *calibration_target_direction)),
Operator(TakeImage(satellite, new_direction, instrument, mode))]])
}else if &new_direction==calibration_target_direction{
TaskLists(vec![vec![Operator(TurnTo(satellite, *calibration_target_direction, previous_direction)),
Method(Switching(satellite, instrument)),
Operator(Calibrate(satellite, instrument, *calibration_target_direction)),
Operator(TakeImage(satellite, new_direction, instrument, mode))]])
}else{
debug!("Taking the dangerous way!");
TaskLists(vec![vec![Operator(TurnTo(satellite, *calibration_target_direction, previous_direction)),
Method(Switching(satellite, instrument)),
Operator(Calibrate(satellite, instrument, *calibration_target_direction)),
Operator(TurnTo(satellite, new_direction, *calibration_target_direction)),
Operator(TakeImage(satellite, new_direction, instrument, mode))]])
}
}
fn schedule_pointing_with_powered_on_instruments(satellite: SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> {
TaskLists(vec![vec![Method(Switching(satellite, instrument)),
Operator(Calibrate(satellite, instrument, new_direction)),
Operator(TakeImage(satellite, new_direction, instrument, mode))]])
}
fn schedule_pointing_with_powered_off_instruments(state: &SatelliteState, satellite: &SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> {
match find_powered_on_instruments(state, &satellite) {
Some(instrument_to_power_off) => TaskLists(vec![vec![Operator(SwitchOff(instrument_to_power_off, *satellite)),
Method(Switching(*satellite, instrument)),
Operator(Calibrate(*satellite, instrument, new_direction)),
Operator(TakeImage(*satellite, new_direction, instrument, mode))]]),
None => TaskLists(vec![vec![Method(Switching(*satellite, instrument)),
Operator(Calibrate(*satellite, instrument, new_direction)),
Operator(TakeImage(*satellite, new_direction, instrument, mode))]])
}
}
//Given a state, and a Satellite::SatelliteEnum, return an Instrument::Maybe<SatelliteEnum> containing any powered on instruments owned by the satellite.
fn find_powered_on_instruments(state: &SatelliteState, satellite: &SatelliteEnum) -> Option<SatelliteEnum>{
debug!("Attempting to search the following {:?} ", state.onboard);
for onboard_instrument_array in state.onboard.get(satellite) { //Get the instrument array for the satellite
for onboard_instrument in onboard_instrument_array.into_iter() { //Loop over the instruments
debug!("Seeing if contains: {:?}", onboard_instrument);
if state.power_on.contains(onboard_instrument) { //Check if the instrument is powered on
return Some(onboard_instrument.clone());
}
}
}
return None;
}
fn is_satellite_pointing_in_direction(state: &SatelliteState, satellite: &SatelliteEnum, direction: &SatelliteEnum) -> bool {
return match state.pointing.get(satellite) {
Some(x) => x == direction, //If we have the correct instrument selected, we need to make sure that it is selected at the right direction.
None => false, //If the lookup fails, the if statement should fail.
};
}
fn schedule_all(state: &SatelliteState, goal: &SatelliteGoals) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> {
let mut tasks: Vec<Vec<Task<SatelliteOperator<SatelliteEnum>, SatelliteMethod>>> = vec![];
let mut completed_tasks: Vec<SatelliteEnum> = vec![];
for goal_image in goal.have_image.keys() {
if !(state.have_image.get(goal_image) == goal.have_image.get(goal_image)) {
let goal_image_clone = goal_image.clone();
let mode = goal.have_image.get(goal_image).unwrap();
let instrument = brute_force_instrument(state, mode).unwrap(); //First look up the goal image to see which mode it should be in, and then look up which mode it should be in.
let new_direction = goal_image_clone;
let satellite = brute_force_satellite(state, &instrument, mode).unwrap();
let previous_direction = state.pointing.get(&satellite.clone()).unwrap();
tasks.push(vec![Task::Method(ScheduleOne(satellite, instrument, mode.clone(), new_direction, previous_direction.clone())), Task::Method(ScheduleAll)]);
} else {
let image_clone = goal_image.clone();
completed_tasks.push(image_clone);
}
}
return if goal.have_image.keys().eq(&completed_tasks) {
let pointing_tasks = pointing_needed(state, goal).iter()
.map(|(sat, dir)| Task::Operator(TurnTo(*sat, *dir, *state.pointing.get(sat).unwrap())))
.collect();
TaskLists(vec![pointing_tasks])
}else if tasks.len()>0{
TaskLists(tasks)
}else {
debug!("ScheduleAll is returning failure");
debug!("completed_tasks: {:?}", completed_tasks);
debug!("goals: {:?}", goal.have_image.keys());
debug!("goal.pointing: {:?}", goal.pointing);
for satellite in state.pointing.iter() {
debug!("pointing: {:?}", satellite);
}
Failure
};
}
fn pointing_needed(state: &SatelliteState, goal: &SatelliteGoals) -> Vec<(SatelliteEnum,SatelliteEnum)> {
goal.pointing.iter()
.map(|(sat, dir)| (*sat, *dir))
.filter(|(sat, dir)| dir != state.pointing.get(&sat).unwrap())
.collect()
}
//This method returns a Maybe<Instrument> from a state, and a mode.
fn brute_force_instrument(state: &SatelliteState, mode: &SatelliteEnum) -> Option<SatelliteEnum> {
for instrument in state.supports.keys(){
if state.supports.get(instrument)?.contains(mode){
return Some(instrument.clone());
}
}
return None;
}
//This method returns a Satellite::Maybe<SatelliteEnum> from a state, an instrument and a mode.
fn brute_force_satellite(state: &SatelliteState, instrument: &SatelliteEnum, mode: &SatelliteEnum) -> Option<SatelliteEnum> {
for satellites in state.onboard.keys() {
if state.does_instrument_support_mode(instrument, mode)&& is_onboard(state, satellites.clone(), instrument.clone()){
return Some(satellites.clone());
}
}
return None;
}
fn is_onboard(state: &SatelliteState, satellite: SatelliteEnum, instrument: SatelliteEnum) -> bool{
match state.onboard.get(&satellite){
Some(n) => n.contains(&instrument),
None => false,
}
}
impl Method for SatelliteMethod {
type S = SatelliteState;
type G = SatelliteGoals;
type O = SatelliteOperator<SatelliteEnum>;
fn apply(&self, state: &SatelliteState, goal: &SatelliteGoals) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> {
use SatelliteMethod::*;
match self {
ScheduleAll => schedule_all(state, goal),
ScheduleOne(satellite, instrument, mode, new_direction, previous_direction) => schedule_one(state, satellite.clone(), instrument.clone(), mode.clone(), new_direction.clone(), previous_direction.clone()),
Switching(satellite, instrument) => switching(state, satellite.clone(), instrument.clone()),
}
}
}
impl Goal for SatelliteGoals {
type O = SatelliteOperator<SatelliteEnum>;
type M = SatelliteMethod;
type S = SatelliteState;
type C = I40F24;
fn starting_tasks(&self) -> Vec<Task<SatelliteOperator<SatelliteEnum>, SatelliteMethod>> {
vec![Task::Method(SatelliteMethod::ScheduleAll)]
}
fn accepts(&self, state: &Self::S) -> bool {
return self.all_met_in(state);
}
fn distance_from(&self, state: &Self::S) -> Self::C {
let mut unvisited = 0;
let mut num_turns = 0;
for goal_image in self.have_image.keys(){
//If we haven't visited the image, add it to the unvisited count.
if state.have_image.get(goal_image)!=self.have_image.get(goal_image){
unvisited+=1;
}
}
for goal_pointing in self.pointing.keys(){
if state.pointing.get(goal_pointing)!=self.pointing.get(goal_pointing){
num_turns+=1;
}
}
return I40F24::from_num(unvisited+num_turns);
}
}
| true |
6ba2341c57cb1b95b02311ab4361f43ab23bab48
|
Rust
|
harpsword/tex-rs
|
/src/tex_the_program/section_0452.rs
|
UTF-8
| 3,127 | 2.59375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! @ When the following code is executed, we have |cur_tok=point_token|, but this
//! token has been backed up using |back_input|; we must first discard it.
//!
//! It turns out that a decimal point all by itself is equivalent to `\.{0.0}'.
//! Let's hope people don't use that fact.
//
// @<Scan decimal fraction@>=
pub(crate) macro Scan_decimal_fraction
($globals:expr, $f:expr) {{
/// number of digits in a decimal fraction
let mut k: small_number;
/// top of decimal digit stack
let (mut p, mut q): (pointer, pointer);
// begin k:=0; p:=null; get_token; {|point_token| is being re-scanned}
k = 0.into();
p = null;
/// `point_token` is being re-scanned
get_token($globals)?;
crate::region_forward_label!(
|'done1|
{
// loop@+ begin get_x_token;
loop {
get_x_token($globals)?;
// if (cur_tok>zero_token+9)or(cur_tok<zero_token) then goto done1;
if $globals.cur_tok > zero_token + 9 || $globals.cur_tok < zero_token {
crate::goto_forward_label!('done1);
}
// if k<17 then {digits for |k>=17| cannot affect the result}
if k < 17 {
/// digits for `k>=17` cannot affect the result
const _ : () = ();
// begin q:=get_avail; link(q):=p; info(q):=cur_tok-zero_token;
q = get_avail($globals);
link!($globals, q) = p;
info_inner!($globals, q) = ($globals.cur_tok.get() - zero_token) as _;
// p:=q; incr(k);
p = q;
incr!(k);
// end;
}
// end;
}
}
'done1 <-
);
// done1: for kk:=k downto 1 do
for kk in (1..=k.get()).into_iter().rev() {
// begin dig[kk-1]:=info(p); q:=p; p:=link(p); free_avail(q);
$globals.dig[(kk - 1) as usize] = (info_inner!($globals, p) as u8).into();
q = p;
p = link!($globals, p);
free_avail!($globals, q);
// end;
}
// f:=round_decimals(k);
$f = round_decimals($globals, k).inner();
// if cur_cmd<>spacer then back_input;
if $globals.cur_cmd != spacer {
back_input($globals);
}
// end
use crate::pascal::u8_from_m_to_n;
use crate::section_0016::incr;
use crate::section_0121::free_avail;
use crate::section_0101::small_number;
use crate::section_0102::round_decimals;
use crate::section_0115::pointer;
use crate::section_0115::null;
use crate::section_0120::get_avail;
use crate::section_0207::spacer;
use crate::section_0365::get_token;
use crate::section_0380::get_x_token;
use crate::section_0118::link;
use crate::section_0445::zero_token;
use crate::section_0118::info_inner;
use crate::section_0325::back_input;
}}
| true |
445756d89c03346d6d481fb01e9807e1bc44fa2f
|
Rust
|
emakashov/exercisme
|
/rust/bob/src/lib.rs
|
UTF-8
| 887 | 3.84375 | 4 |
[
"MIT"
] |
permissive
|
fn is_ask<'a>(question: &'a str) -> bool {
question.ends_with("?")
}
fn is_shout<'a>(question: &'a str) -> bool {
let letters: Vec<char> = question.chars()
.filter(|ch| ch.is_alphabetic())
.collect();
letters.len() > 0 && letters.iter().all(|ch| ch.is_uppercase())
}
fn is_contains_weird_characters<'a>(question: &'a str) -> bool {
let chs = "%^*@#$(*^";
question.chars().any(|ch| chs.contains(ch))
}
fn is_yell<'a>(question: &'a str) -> bool {
is_shout(&question) || is_contains_weird_characters(&question)
}
fn is_anything<'a>(question: &'a str) -> bool {
question.is_empty()
}
pub fn reply(question: &str) -> &str {
if is_ask(&question) {
"Sure."
} else if is_yell(&question) {
"Whoa, chill out!"
} else if is_anything(&question) {
"Fine. Be that way!"
} else {
"Whatever."
}
}
| true |
2b1eb738a15382dd8569844a1957fc5930ddb2b4
|
Rust
|
wbprime/rust-by-example
|
/src/bin/match.rs
|
UTF-8
| 334 | 3.078125 | 3 |
[] |
no_license
|
fn main() {
let idx = 10;
let l1 = match idx {
1 => 0,
2 | 3 | 4 | 5 | 6 => 1,
7...10 => 2,
_ => 3,
};
println!("Matched {}", l1);
let r1 = &l1;
match r1 {
&l1 => println!("l1 got {:?}", l1),
}
match r1 {
ref l2 => println!("l2 got {:?}", l2),
}
}
| true |
9462306dac3675ad26ac756b96f7d25604f817a3
|
Rust
|
jamiely/adventofcode
|
/2019/rust/src/day3.rs
|
UTF-8
| 6,229 | 3.546875 | 4 |
[] |
no_license
|
///
/// https://adventofcode.com/2019/day/3
///
use std::fs::File;
use std::io::{self, prelude::*, BufReader};
use std::cmp::{max, min};
use std::collections::HashMap;
#[derive(Debug, Clone, Copy)]
pub enum Direction {
Up,
Down,
Left,
Right
}
#[derive(Debug, Clone, Copy)]
pub struct Instruction {
direction: Direction,
steps: i32
}
#[derive(Debug, Clone, Copy)]
pub struct Size {
width: i32,
height: i32
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub struct Point {
x: i32,
y: i32
}
#[derive(Debug, Clone, Copy)]
pub struct Box {
origin: Point,
size: Size
}
pub struct Wire {
instructions: Vec<Instruction>,
bounding_box: Box
}
pub fn bounding_box(instructions: &Vec<Instruction>) -> Box {
let mut x_left = 0;
let mut x_right = 0;
let mut y_top = 0;
let mut y_bottom = 0;
let mut x = 0;
let mut y = 0;
for instruction in instructions {
match instruction.direction {
Direction::Up => y += instruction.steps,
Direction::Down => y -= instruction.steps,
Direction::Left => x -= instruction.steps,
Direction::Right => x -= instruction.steps
}
x_right = max(x, x_right);
x_left = min(x, x_left);
y_top = max(y, y_top);
y_bottom = min(y, y_bottom);
}
let width = x_right - x_left;
let height = y_top - y_bottom;
let size = Size { width, height };
let origin = Point { x: x_left, y: y_bottom };
return Box { origin, size };
}
pub fn parse_dir(s: &str) -> Option<Direction> {
return match s {
"U" => Some(Direction::Up),
"D" => Some(Direction::Down),
"R" => Some(Direction::Right),
"L" => Some(Direction::Left),
_ => None
};
}
pub fn parse_instruction_str(s: &str) -> Option<Instruction> {
if s.len() < 2 {
return None;
}
let (s_dir, s_steps) = s.split_at(1);
return match (parse_dir(s_dir), s_steps.parse::<i32>().ok()) {
(Some(direction), Some(steps)) => Some(Instruction { direction, steps }),
_ => None
};
}
pub fn parse_instruction_line(line: &String) -> Vec<Instruction> {
return line.split(',')
.map(parse_instruction_str)
.filter_map(|x| x)
.collect();
}
pub fn instruction_points(instructions: &Vec<Instruction>) -> Vec<Point> {
let mut x = 0;
let mut y = 0;
let mut points: Vec<Point> = Vec::new();
points.push(Point{x,y});
for instruction in instructions {
for _i in 1..=instruction.steps {
match instruction.direction {
Direction::Up => y += 1,
Direction::Down => y -= 1,
Direction::Left => x -= 1,
Direction::Right => x += 1
}
points.push(Point{x,y});
}
}
return points;
}
#[derive(Debug, Copy, Clone)]
pub struct Intersection {
point: Point,
sum_steps_to_point: usize
}
pub fn find_instruction_intersection_points(primary: &Vec<Instruction>, other: &Vec<Instruction>) -> Vec<Point> {
return find_instruction_intersections(primary, other)
.iter()
.map(|i| i.point)
.collect();
}
pub fn find_instruction_intersections(primary: &Vec<Instruction>, other: &Vec<Instruction>) -> Vec<Intersection> {
let points = instruction_points(primary);
// we want to convert this to a Set for easy lookup
let points_set: HashMap<Point, usize> = points.iter().enumerate()
.map(|(i, p)| (p.to_owned(), i))
.collect();
println!("Contains {} points", points.len());
let other_points = instruction_points(other);
return other_points
.iter().enumerate()
.filter(|(_i, p)| points_set.contains_key(p))
.map(|(i, p)| Intersection{
point: p.to_owned(),
sum_steps_to_point: i + points_set[p]
})
.collect()
}
pub fn manhattan_distance(p1: &Point, p2: &Point) -> i32 {
return (p1.x - p2.x).abs() + (p1.y - p2.y).abs();
}
pub fn load_instruction_sets(filepath: &str) -> io::Result<Vec<Vec<Instruction>>> {
let file = File::open(filepath)?;
let reader = BufReader::new(file);
let lines: Vec<String> = reader.lines()
.filter_map(Result::ok)
.collect();
let instruction_sets: Vec<Vec<Instruction>> = lines
.iter()
.map(|line| -> Vec<Instruction> {
let instructions: Vec<Instruction> = parse_instruction_line(line);
return instructions;
})
.collect();
return Ok(instruction_sets);
}
// a: 1064
/// ```
/// assert_eq!(aoc2019::day3::run_a("../input/3.input").unwrap(), 1064);
/// ```
pub fn run_a(filepath: &str) -> io::Result<i32> {
let instruction_sets = load_instruction_sets(filepath)?;
let origin = Point{x: 0, y: 0};
let mut result = -1;
match &instruction_sets[0..2] {
[i1, i2] => {
let intersections: Vec<Point> = find_instruction_intersection_points(i1, i2);
let mut distances: Vec<i32> = intersections.iter()
.map(|p| manhattan_distance(p, &origin))
.collect();
distances.sort();
result = distances[1];
// println!("intersections {:?}", intersections);
println!("sorted distances {:?}", distances);
}
_ => println!("unexpected")
}
return Ok(result);
}
/// ```
/// assert_eq!(aoc2019::day3::run_b("../input/3.input").unwrap(), 25676);
/// ```
pub fn run_b(filepath: &str) -> io::Result<usize> {
let instruction_sets = load_instruction_sets(filepath)?;
let mut result = 0;
match &instruction_sets[0..2] {
[i1, i2] => {
let mut intersections: Vec<Intersection> = find_instruction_intersections(i1, i2);
intersections.sort_by(|a, b| a.sum_steps_to_point.cmp(&b.sum_steps_to_point));
result = intersections[1].sum_steps_to_point;
// println!("intersections {:?}", intersections);
println!("sorted intersections {:?}", intersections);
}
_ => println!("unexpected")
}
println!("Result is {}", result);
return Ok(result);
}
| true |
b993bec1b27c876a6bbcbfef9454d2de32289dc6
|
Rust
|
lgvier/rust-tracer
|
/src/bounds.rs
|
UTF-8
| 10,300 | 2.984375 | 3 |
[] |
no_license
|
use core::ops::Add;
use std::f64::INFINITY;
use crate::{matrix::Matrix, point, ray::Ray, tuple::Tuple, EPSILON};
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct BoundingBox {
pub min: Tuple,
pub max: Tuple,
}
impl BoundingBox {
pub fn new(min: Tuple, max: Tuple) -> BoundingBox {
BoundingBox { min, max }
}
pub fn empty() -> BoundingBox {
BoundingBox::new(
Tuple::point(INFINITY, INFINITY, INFINITY),
Tuple::point(-INFINITY, -INFINITY, -INFINITY),
)
}
pub fn contains_point(&self, p: Tuple) -> bool {
p.x >= self.min.x
&& p.x <= self.max.x
&& p.y >= self.min.y
&& p.y <= self.max.y
&& p.z >= self.min.z
&& p.z <= self.max.z
}
pub fn contains_box(&self, other: BoundingBox) -> bool {
self.contains_point(other.min) && self.contains_point(other.max)
}
pub fn transform(&self, matrix: Matrix) -> BoundingBox {
BoundingBox::empty()
+ (matrix * self.min)
+ (matrix * point!(self.min.x, self.min.y, self.max.z))
+ (matrix * point!(self.min.x, self.max.y, self.min.z))
+ (matrix * point!(self.min.x, self.max.y, self.max.z))
+ (matrix * point!(self.max.x, self.min.y, self.min.z))
+ (matrix * point!(self.max.x, self.min.y, self.max.z))
+ (matrix * point!(self.max.x, self.max.y, self.min.z))
+ (matrix * self.max)
}
pub fn intersects(&self, ray: &Ray) -> bool {
let (xtmin, xtmax) =
BoundingBox::check_axis(ray.origin.x, ray.direction.x, self.min.x, self.max.x);
let (ytmin, ytmax) =
BoundingBox::check_axis(ray.origin.y, ray.direction.y, self.min.y, self.max.y);
let (ztmin, ztmax) =
BoundingBox::check_axis(ray.origin.z, ray.direction.z, self.min.z, self.max.z);
let tmin = xtmin.max(ytmin.max(ztmin));
let tmax = xtmax.min(ytmax.min(ztmax));
if tmin > tmax {
// miss
false
} else {
true
}
}
// Reused in cube.rs
pub fn check_axis(origin: f64, direction: f64, min: f64, max: f64) -> (f64, f64) {
let tmin_numerator = min - origin;
let tmax_numerator = max - origin;
let (tmin, tmax) = if direction.abs() >= EPSILON {
(tmin_numerator / direction, tmax_numerator / direction)
} else {
(tmin_numerator * INFINITY, tmax_numerator * INFINITY)
};
if tmin > tmax {
(tmax, tmin)
} else {
(tmin, tmax)
}
}
pub fn split(&self) -> (Self, Self) {
let dx = self.max.x - self.min.x;
let dy = self.max.y - self.min.y;
let dz = self.max.z - self.min.z;
let greatest = dx.max(dy.max(dz));
let Tuple {
x: mut x0,
y: mut y0,
z: mut z0,
..
} = self.min;
let Tuple {
x: mut x1,
y: mut y1,
z: mut z1,
..
} = self.max;
if greatest == dx {
x0 = x0 + dx / 2.0;
x1 = x0;
} else if greatest == dy {
y0 = y0 + dy / 2.0;
y1 = y0;
} else {
z0 = z0 + dz / 2.0;
z1 = z0;
}
let mid_min = point!(x0, y0, z0);
let mid_max = point!(x1, y1, z1);
(
BoundingBox::new(self.min, mid_max),
BoundingBox::new(mid_min, self.max),
)
}
}
impl Add<BoundingBox> for BoundingBox {
type Output = BoundingBox;
fn add(self, other: Self) -> Self {
BoundingBox::new(
Tuple::point(
self.min.x.min(other.min.x),
self.min.y.min(other.min.y),
self.min.z.min(other.min.z),
),
Tuple::point(
self.max.x.max(other.max.x),
self.max.y.max(other.max.y),
self.max.z.max(other.max.z),
),
)
}
}
impl Add<Tuple> for BoundingBox {
type Output = BoundingBox;
fn add(self, other: Tuple) -> Self {
BoundingBox::new(
Tuple::point(
self.min.x.min(other.x),
self.min.y.min(other.y),
self.min.z.min(other.z),
),
Tuple::point(
self.max.x.max(other.x),
self.max.y.max(other.y),
self.max.z.max(other.z),
),
)
}
}
#[cfg(test)]
mod tests {
use std::f64::consts::PI;
use super::*;
use crate::{ray, vector};
#[test]
fn add_points() {
let b = BoundingBox::empty() + point!(-5, 2, 0) + point!(7, 0, -3);
assert_eq!(point!(-5, 0, -3), b.min);
assert_eq!(point!(7, 2, 0), b.max);
}
#[test]
fn add_box() {
let b1 = BoundingBox::new(point!(-5, 2, 0), point!(7, 4, 4));
let b2 = BoundingBox::new(point!(8, -7, -2), point!(14, 2, 8));
let result = b1 + b2;
assert_eq!(point!(-5, -7, -2), result.min);
assert_eq!(point!(14, 4, 8), result.max);
}
#[test]
fn contains_point() {
let b = BoundingBox::new(point!(5, -2, 0), point!(14, 4, 7));
for &(point, result) in &[
(point!(5, -2, 0), true),
(point!(11, 4, 7), true),
(point!(8, 1, 3), true),
(point!(3, 0, 3), false),
(point!(8, -4, 3), false),
(point!(8, 1, -1), false),
// (point!(13, 1, 3), false),
(point!(8, 5, 3), false),
(point!(8, 1, 8), false),
] {
assert_eq!(result, b.contains_point(point), "contains {:?}", point);
}
}
#[test]
fn contains_box() {
let b = BoundingBox::new(point!(5, -2, 0), point!(14, 4, 7));
for &(other, result) in &[
(BoundingBox::new(point!(5, -2, 0), point!(11, 4, 7)), true),
(BoundingBox::new(point!(6, -1, 1), point!(10, 3, 6)), true),
(BoundingBox::new(point!(4, -3, -1), point!(10, 3, 6)), false),
(BoundingBox::new(point!(6, -1, 1), point!(12, 5, 8)), false),
] {
assert_eq!(result, b.contains_box(other), "contains {:?}", other);
}
}
#[test]
fn transform() {
let b = BoundingBox::new(point!(-1, -1, -1), point!(1, 1, 1));
let matrix = Matrix::rotation_x(PI / 4.) * Matrix::rotation_y(PI / 4.);
let transformed = b.transform(matrix);
assert_eq!(point!(-1.41421, -1.707106, -1.707106), transformed.min);
assert_eq!(point!(1.41421, 1.707106, 1.707106), transformed.max);
}
#[test]
fn intersecting_with_a_bounding_box_at_the_origin() {
let bb = BoundingBox::new(point!(-1, -1, -1), point!(1, 1, 1));
for &(origin, direction, expected) in &[
(point!(5, 0.5, 0), vector!(-1, 0, 0), true),
(point!(-5, 0.5, 0), vector!(1, 0, 0), true),
(point!(0.5, 5, 0), vector!(0, -1, 0), true),
(point!(0.5, -5, 0), vector!(0, 1, 0), true),
(point!(0.5, 0, 5), vector!(0, 0, -1), true),
(point!(0.5, 0, -5), vector!(0, 0, 1), true),
(point!(0, 0.5, 0), vector!(0, 0, 1), true),
(point!(-2, 0, 0), vector!(2, 4, 6), false),
(point!(0, -2, 0), vector!(6, 2, 4), false),
(point!(0, 0, -2), vector!(4, 6, 2), false),
(point!(2, 0, 2), vector!(0, 0, -1), false),
(point!(0, 2, 2), vector!(0, -1, 0), false),
(point!(2, 2, 0), vector!(-1, 0, 0), false),
] {
let r = ray!(origin, direction.normalize());
assert_eq!(expected, bb.intersects(&r));
}
}
#[test]
fn intersecting_with_a_non_cubic_bounding_box() {
let bb = BoundingBox::new(point!(5, -2, 0), point!(11, 4, 7));
for &(origin, direction, expected) in &[
(point!(15, 1, 2), vector!(-1, 0, 0), true),
(point!(-5, -1, 4), vector!(1, 0, 0), true),
(point!(7, 6, 5), vector!(0, -1, 0), true),
(point!(9, -5, 6), vector!(0, 1, 0), true),
(point!(8, 2, 12), vector!(0, 0, -1), true),
(point!(6, 0, -5), vector!(0, 0, 1), true),
(point!(8, 1, 3.5), vector!(0, 0, 1), true),
(point!(9, -1, -8), vector!(2, 4, 6), false),
(point!(8, 3, -4), vector!(6, 2, 4), false),
(point!(9, -1, -2), vector!(4, 6, 2), false),
(point!(4, 0, 9), vector!(0, 0, -1), false),
(point!(8, 6, -1), vector!(0, -1, 0), false),
(point!(12, 5, 4), vector!(-1, 0, 0), false),
] {
let r = ray!(origin, direction.normalize());
assert_eq!(expected, bb.intersects(&r));
}
}
#[test]
fn split_perfect_cube() {
let bb = BoundingBox::new(point!(-1, -4, -5), point!(9, 6, 5));
let (left, right) = bb.split();
assert_eq!(point!(-1, -4, -5), left.min);
assert_eq!(point!(4, 6, 5), left.max);
assert_eq!(point!(4, -4, -5), right.min);
assert_eq!(point!(9, 6, 5), right.max);
}
#[test]
fn split_x_wide_box() {
let bb = BoundingBox::new(point!(-1, -2, -3), point!(9, 5.5, 3));
let (left, right) = bb.split();
assert_eq!(point!(-1, -2, -3), left.min);
assert_eq!(point!(4, 5.5, 3), left.max);
assert_eq!(point!(4, -2, -3), right.min);
assert_eq!(point!(9, 5.5, 3), right.max);
}
#[test]
fn split_y_wide_box() {
let bb = BoundingBox::new(point!(-1, -2, -3), point!(5, 8, 3));
let (left, right) = bb.split();
assert_eq!(point!(-1, -2, -3), left.min);
assert_eq!(point!(5, 3, 3), left.max);
assert_eq!(point!(-1, 3, -3), right.min);
assert_eq!(point!(5, 8, 3), right.max);
}
#[test]
fn split_z_wide_box() {
let bb = BoundingBox::new(point!(-1, -2, -3), point!(5, 3, 7));
let (left, right) = bb.split();
assert_eq!(point!(-1, -2, -3), left.min);
assert_eq!(point!(5, 3, 2), left.max);
assert_eq!(point!(-1, -2, 2), right.min);
assert_eq!(point!(5, 3, 7), right.max);
}
}
| true |
85e03a89f3754a2e09d40dfeaae2234852ae03fa
|
Rust
|
rust-av/ivf-rs
|
/src/muxer.rs
|
UTF-8
| 6,181 | 2.515625 | 3 |
[
"MIT"
] |
permissive
|
//!
//! Implement the muxer trait from av-format and expose all the correct
//! abstraction to handle them. Refer to the `Muxer` trait for more info.
//!
//!
use std::io::Write;
use std::sync::Arc;
use log::{debug, trace};
use av_bitstream::bytewrite::*;
use av_data::packet::Packet;
use av_data::params::MediaKind;
use av_data::rational::Rational32;
use av_data::value::Value;
use av_format::common::GlobalInfo;
use av_format::error::*;
pub use av_format::muxer::Muxer;
pub use av_format::muxer::{Context, Writer};
use crate::common::Codec;
#[derive(Debug)]
pub struct IvfMuxer {
version: u16,
width: u16,
height: u16,
frame_rate: Rational32,
scale: u32,
codec: Codec,
duration: u32,
info: Option<GlobalInfo>,
}
impl Default for IvfMuxer {
fn default() -> Self {
IvfMuxer {
frame_rate: Rational32::new(30, 1),
version: Default::default(),
width: Default::default(),
height: Default::default(),
scale: Default::default(),
codec: Default::default(),
duration: Default::default(),
info: Default::default(),
}
}
}
impl IvfMuxer {
pub fn new() -> IvfMuxer {
IvfMuxer::default()
}
}
/// This should be called if IvfMuxer::info is set
impl Muxer for IvfMuxer {
fn configure(&mut self) -> Result<()> {
match self.info.as_ref() {
Some(info) if !info.streams.is_empty() => {
self.duration = info.streams[0].duration.unwrap_or_default() as u32;
let params = &info.streams[0].params;
self.version = 0;
if let Some(MediaKind::Video(video)) = ¶ms.kind {
self.width = video.width as u16;
self.height = video.height as u16;
};
self.frame_rate = info
.timebase
.map(|tb| Rational32::new(*tb.denom() as i32, *tb.numer() as i32))
.unwrap_or_else(|| Rational32::new(30, 1));
self.scale = 1;
self.codec = match params.codec_id.as_deref() {
Some("av1") => Codec::AV1,
Some("vp8") => Codec::VP8,
Some("vp9") => Codec::VP9,
_ => Codec::default(),
};
debug!("Configuration changes {:?}", self);
Ok(())
}
_ => {
debug!("No configuration changes {:?}", self);
Ok(())
}
}
}
fn write_header<W: Write>(&mut self, buf: &mut Writer<W>) -> Result<()> {
debug!("Write muxer header: {:?}", self);
let codec = match self.codec {
Codec::VP8 => b"VP80",
Codec::VP9 => b"VP90",
Codec::AV1 => b"AV01",
};
let mut tmp_buf = [0u8; 20];
buf.write_all(b"DKIF")?;
put_u16l(&mut tmp_buf[0..2], self.version);
put_u16l(&mut tmp_buf[2..4], 32);
buf.write_all(&tmp_buf[..4])?;
buf.write_all(codec)?;
put_u16l(&mut tmp_buf[0..2], self.width);
put_u16l(&mut tmp_buf[2..4], self.height);
put_u32l(&mut tmp_buf[4..8], *self.frame_rate.numer() as u32);
put_u32l(&mut tmp_buf[8..12], *self.frame_rate.denom() as u32);
put_u32l(&mut tmp_buf[12..16], self.duration);
put_u32l(&mut tmp_buf[16..20], 0);
buf.write_all(&tmp_buf)?;
Ok(())
}
fn write_packet<W: Write>(&mut self, buf: &mut Writer<W>, pkt: Arc<Packet>) -> Result<()> {
trace!("Write packet: {:?}", pkt.pos);
let mut frame_header = [0; 12];
put_u32l(&mut frame_header[0..4], pkt.data.len() as u32);
put_u64l(&mut frame_header[4..12], pkt.pos.unwrap_or_default() as u64);
buf.write_all(&frame_header)?;
buf.write_all(&pkt.data)?;
Ok(())
}
fn write_trailer<W: Write>(&mut self, buf: &mut Writer<W>) -> Result<()> {
buf.flush()?;
Ok(())
}
fn set_global_info(&mut self, info: GlobalInfo) -> Result<()> {
self.info = Some(info);
Ok(())
}
fn set_option<'a>(&mut self, key: &str, val: Value<'a>) -> Result<()> {
match key {
"frame_rate" => {
self.frame_rate = get_val_rational(val)?;
}
"width" => {
self.width = get_val_int(val)? as u16;
}
"height" => {
self.height = get_val_int(val)? as u16;
}
"scale" => {
self.scale = get_val_int(val)? as u32;
}
"duration" => {
self.duration = get_val_int(val)? as u32;
}
_ => {
return Err(av_format::error::Error::InvalidData);
}
};
Ok(())
}
}
fn get_val_rational(val: Value<'_>) -> Result<Rational32> {
match val {
Value::I64(val) => Ok(Rational32::new(val as i32, 1)),
Value::U64(val) => Ok(Rational32::new(val as i32, 1)),
Value::Pair(numer, denom) => Ok(Rational32::new(numer as i32, denom as i32)),
_ => Err(av_format::error::Error::InvalidData),
}
}
fn get_val_int(val: Value<'_>) -> Result<i64> {
match val {
Value::I64(val) => Ok(val),
Value::U64(val) => Ok(val as i64),
_ => Err(av_format::error::Error::InvalidData),
}
}
#[cfg(test)]
mod tests {
use std::io::Cursor;
use av_format::common::GlobalInfo;
use av_format::muxer::{Context, Writer};
use super::*;
#[test]
fn mux() {
let _ = pretty_env_logger::try_init();
let info = GlobalInfo {
duration: None,
timebase: None,
streams: Vec::new(),
};
let mut muxer = Context::new(IvfMuxer::new(), Writer::new(Cursor::new(Vec::new())));
muxer.set_global_info(info).unwrap();
muxer.configure().unwrap();
muxer.write_header().unwrap();
tempfile::tempfile()
.unwrap()
.write_all(muxer.writer().as_ref().0.get_ref())
.unwrap();
}
}
| true |
7d7ee0d6eec438503bf60a08acfe5fc041c394ec
|
Rust
|
csherland/Advent-of-Code-2019
|
/day1/src/main.rs
|
UTF-8
| 744 | 3.296875 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
fn main() {
let fp = File::open("./inputs.txt")
.expect("Could not find file");
let file = BufReader::new(&fp);
let inputs : Vec<i32> = file.lines()
.map(|l| l.expect("Could not read line").parse().unwrap())
.collect();
let mut required_fuel = 0;
for mass in inputs.clone() {
let mut fuel = ( mass / 3 ) - 2;
required_fuel += fuel;
// Mass of fuel we just added also requires fuel!
while fuel > 0 {
let extra_fuel = (fuel / 3) - 2;
if extra_fuel > 0 {
required_fuel += extra_fuel;
}
fuel = extra_fuel;
}
}
println!("Required fuel: {}", required_fuel);
}
| true |
e0caecd71df3be57379f3737b69152cf58331ea3
|
Rust
|
CORDEA/eyes.server
|
/src/formatter.rs
|
UTF-8
| 953 | 2.796875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::google::response::AddressComponent;
pub fn format(addresses: &Vec<AddressComponent>) -> String {
let mut result: String = String::new();
if let Some(adrs) = find_by_type(addresses, "sublocality_level_2") {
result += &adrs.long_name;
}
if let Some(adrs) = find_by_type(addresses, "locality") {
if !result.is_empty() {
result += ", "
}
result += &adrs.long_name;
}
if let Some(adrs) = find_by_type(addresses, "administrative_area_level_1") {
if !result.is_empty() {
result += ", "
}
result += &adrs.long_name;
}
result
}
fn find_by_type<'a>(addresses: &'a Vec<AddressComponent>, find_type: &'static str) -> Option<&'a AddressComponent> {
addresses
.iter()
.find(|&address|
address.types
.iter()
.find(|&typ| typ == find_type)
.is_some()
)
}
| true |
7b1c87d56eb8ea0be2c88e6245c391daf38b7a40
|
Rust
|
msmedes/rusthasm
|
/src/assembler/symbol.rs
|
UTF-8
| 1,885 | 3.3125 | 3 |
[] |
no_license
|
use std::collections::HashMap;
#[derive(Debug)]
pub struct SymbolTable {
table: HashMap<String, u16>,
counter: u16,
}
impl SymbolTable {
pub fn new() -> SymbolTable {
let table: HashMap<String, u16> = [
("SP", 0),
("LCL", 1),
("ARG", 2),
("THIS", 3),
("THAT", 4),
("r0", 0),
("R0", 0),
("r1", 1),
("R1", 1),
("r2", 2),
("R2", 2),
("r3", 3),
("R3", 3),
("r4", 4),
("R4", 4),
("r5", 5),
("R5", 5),
("r6", 6),
("R6", 6),
("r7", 7),
("R7", 7),
("r8", 8),
("R8", 8),
("r9", 9),
("R9", 9),
("r10", 10),
("R10", 10),
("r11", 11),
("R11", 11),
("r12", 12),
("R12", 12),
("r13", 13),
("R13", 13),
("r14", 14),
("R14", 14),
("r15", 15),
("R15", 15),
("SCREEN", 16834),
("KBD", 24576),
]
.iter()
.map(|(key, val)| (key.to_owned().to_string(), *val))
.collect();
SymbolTable { table, counter: 16 }
}
pub fn add_entry(&mut self, symbol: String, addr: u16) {
self.table.insert(symbol, addr);
}
pub fn add_variable(&mut self, symbol: String) -> u16 {
self.table.insert(symbol, self.counter);
self.counter += 1;
self.counter - 1
}
pub fn get_addr(&self, symbol: &String) -> Option<u16> {
match self.table.get(symbol) {
Some(val) => Some(*val),
None => None,
}
}
pub fn contains(&self, symbol: String) -> bool {
self.table.get(&symbol).is_some()
}
}
| true |
cc4351bd66e11f7725b782ed8b69ec80e213745b
|
Rust
|
rust-lang/rust-analyzer
|
/crates/parser/src/tests/top_entries.rs
|
UTF-8
| 6,633 | 2.546875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use expect_test::expect;
use crate::TopEntryPoint;
#[test]
fn source_file() {
check(
TopEntryPoint::SourceFile,
"",
expect![[r#"
SOURCE_FILE
"#]],
);
check(
TopEntryPoint::SourceFile,
"struct S;",
expect![[r#"
SOURCE_FILE
STRUCT
STRUCT_KW "struct"
WHITESPACE " "
NAME
IDENT "S"
SEMICOLON ";"
"#]],
);
check(
TopEntryPoint::SourceFile,
"@error@",
expect![[r#"
SOURCE_FILE
ERROR
AT "@"
MACRO_CALL
PATH
PATH_SEGMENT
NAME_REF
IDENT "error"
ERROR
AT "@"
error 0: expected an item
error 6: expected BANG
error 6: expected `{`, `[`, `(`
error 6: expected SEMICOLON
error 6: expected an item
"#]],
);
}
#[test]
fn macro_stmt() {
check(
TopEntryPoint::MacroStmts,
"",
expect![[r#"
MACRO_STMTS
"#]],
);
check(
TopEntryPoint::MacroStmts,
"#!/usr/bin/rust",
expect![[r##"
MACRO_STMTS
ERROR
SHEBANG "#!/usr/bin/rust"
error 0: expected expression, item or let statement
"##]],
);
check(
TopEntryPoint::MacroStmts,
"let x = 1 2 struct S;",
expect![[r#"
MACRO_STMTS
LET_STMT
LET_KW "let"
WHITESPACE " "
IDENT_PAT
NAME
IDENT "x"
WHITESPACE " "
EQ "="
WHITESPACE " "
LITERAL
INT_NUMBER "1"
WHITESPACE " "
EXPR_STMT
LITERAL
INT_NUMBER "2"
WHITESPACE " "
STRUCT
STRUCT_KW "struct"
WHITESPACE " "
NAME
IDENT "S"
SEMICOLON ";"
"#]],
);
}
#[test]
fn macro_items() {
check(
TopEntryPoint::MacroItems,
"",
expect![[r#"
MACRO_ITEMS
"#]],
);
check(
TopEntryPoint::MacroItems,
"#!/usr/bin/rust",
expect![[r##"
MACRO_ITEMS
ERROR
SHEBANG "#!/usr/bin/rust"
error 0: expected an item
"##]],
);
check(
TopEntryPoint::MacroItems,
"struct S; foo!{}",
expect![[r#"
MACRO_ITEMS
STRUCT
STRUCT_KW "struct"
WHITESPACE " "
NAME
IDENT "S"
SEMICOLON ";"
WHITESPACE " "
MACRO_CALL
PATH
PATH_SEGMENT
NAME_REF
IDENT "foo"
BANG "!"
TOKEN_TREE
L_CURLY "{"
R_CURLY "}"
"#]],
);
}
#[test]
fn macro_pattern() {
check(
TopEntryPoint::Pattern,
"",
expect![[r#"
ERROR
error 0: expected pattern
"#]],
);
check(
TopEntryPoint::Pattern,
"Some(_)",
expect![[r#"
TUPLE_STRUCT_PAT
PATH
PATH_SEGMENT
NAME_REF
IDENT "Some"
L_PAREN "("
WILDCARD_PAT
UNDERSCORE "_"
R_PAREN ")"
"#]],
);
check(
TopEntryPoint::Pattern,
"None leftover tokens",
expect![[r#"
ERROR
IDENT_PAT
NAME
IDENT "None"
WHITESPACE " "
IDENT "leftover"
WHITESPACE " "
IDENT "tokens"
"#]],
);
check(
TopEntryPoint::Pattern,
"@err",
expect![[r#"
ERROR
ERROR
AT "@"
IDENT "err"
error 0: expected pattern
"#]],
);
}
#[test]
fn type_() {
check(
TopEntryPoint::Type,
"",
expect![[r#"
ERROR
error 0: expected type
"#]],
);
check(
TopEntryPoint::Type,
"Option<!>",
expect![[r#"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Option"
GENERIC_ARG_LIST
L_ANGLE "<"
TYPE_ARG
NEVER_TYPE
BANG "!"
R_ANGLE ">"
"#]],
);
check(
TopEntryPoint::Type,
"() () ()",
expect![[r#"
ERROR
TUPLE_TYPE
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
L_PAREN "("
R_PAREN ")"
"#]],
);
check(
TopEntryPoint::Type,
"$$$",
expect![[r#"
ERROR
ERROR
DOLLAR "$"
DOLLAR "$"
DOLLAR "$"
error 0: expected type
"#]],
);
}
#[test]
fn expr() {
check(
TopEntryPoint::Expr,
"",
expect![[r#"
ERROR
error 0: expected expression
"#]],
);
check(
TopEntryPoint::Expr,
"2 + 2 == 5",
expect![[r#"
BIN_EXPR
BIN_EXPR
LITERAL
INT_NUMBER "2"
WHITESPACE " "
PLUS "+"
WHITESPACE " "
LITERAL
INT_NUMBER "2"
WHITESPACE " "
EQ2 "=="
WHITESPACE " "
LITERAL
INT_NUMBER "5"
"#]],
);
check(
TopEntryPoint::Expr,
"let _ = 0;",
expect![[r#"
ERROR
LET_EXPR
LET_KW "let"
WHITESPACE " "
WILDCARD_PAT
UNDERSCORE "_"
WHITESPACE " "
EQ "="
WHITESPACE " "
LITERAL
INT_NUMBER "0"
SEMICOLON ";"
"#]],
);
}
#[track_caller]
fn check(entry: TopEntryPoint, input: &str, expect: expect_test::Expect) {
let (parsed, _errors) = super::parse(entry, input);
expect.assert_eq(&parsed)
}
| true |
11a44e35eb05d8b9aeb04807308973c43d2a76bf
|
Rust
|
andy128k/einstein
|
/einstein/src/resources/rules/rules_de.rs
|
UTF-8
| 3,260 | 3.046875 | 3 |
[] |
no_license
|
use super::base::TextItem;
pub const RULES: &[TextItem] = &[
TextItem::Text(
"Das Ziel des Spieles besteht darin, alle Karten in einem 6x6-Quadrat
aufzudecken. Wenn alle Karten offen sind, sieht das Feld wie folgt aus:",
),
TextItem::Image(resource!("./opensquare.bmp")),
TextItem::Text(
"Jede Zeile des Quadrat enthält nur Karten eines Typs. Beispielsweise enthält
die erste Zeile arabische Zahlen, die zweite Buchstaben, die dritte römische
Zahlen, die vierte Würfel, die fünfte geometrische Figuren und die sechste
mathematische Symbole.",
),
TextItem::Text(
"Verwenden Sie Logik und öffnen Sie Karten mit der Ausschlussmethode.
Falls eine Karte sich nicht öffnet, enthält die Zelle alle möglichen
Karten. Zum Beispiel bedeutet",
),
TextItem::Image(resource!("./closed.bmp")),
TextItem::Text(
"dass diese Zelle jede römische Zahl außer der III enthalten könnte
(da die Karte mit dem Bild III fehlt). Um eine Karte zu öffnen, klicken
Sie mit der linken Maustaste auf das kleine Bild . Um eine Karte
auszuschließen, klicken Sie mit der rechten Maustaste.",
),
TextItem::Text(
"Verwenden Sie Tipps, um das Puzzle zu lösen. Es gibt zwei Arten von
Tipps: Horizontale und Vertikale. Vertikale Tipps befinden sich unten
am Bildschirm. Zum Beispiel bedeutet der vertikale Tipp",
),
TextItem::Image(resource!("./verthint.bmp")),
TextItem::Text(
"dass der Buchstabe »B« und das Zeichen »+« sich in der gleichen Spalte befinden.",
),
TextItem::Text(
"Horizontale Tipps befinden sich auf der rechten Seite des Puzzlequadrats.
Es gibt eine Reihe von Arten von horizontalen Tipps. Die erste Art von
horizontalen Tipps besagt, dass zwei Karten sich in benachbarten Spalten
befinden, es aber unbekannt ist, welche sich auf der rechten und welche
sich auf der linken Seite befindet:",
),
TextItem::Image(resource!("./hornearhint.bmp")),
TextItem::Text(
"Die zweite Art von Tipp bedeutet, dass sich eine Karte links von einer
anderen befindet. Es sagt nichts über die Distanz zwischen den Karten
aus. Sie können sich in benachbarten Spalten oder auf gegenüberliegenden
Seiten des Puzzles befinden:",
),
TextItem::Image(resource!("./horposhint.bmp")),
TextItem::Text(
"Die letzte Art von Tipp bedeutet, dass sich eine Karte zwischen zwei
anderen Karten befindet:",
),
TextItem::Image(resource!("./horbetweenhint.bmp")),
TextItem::Text(
"Alle drei Karten müssen sich in benachbarten Spalten befinden, die zentrale
Karte ist immer zwischen den anderen zwei, aber es ist unbekannt, welche
Karte sich auf der rechten Seite und welche sich auf der linken befindet.",
),
TextItem::Text(
"Falls Sie einen Tipp nicht mehr benötigen, entfernen Sie ihn durch einen
rechten Mausklick. Sie können entfernte Tipps immer durch Drücken des
»Umschalte«-Knopfs wieder sehen.",
),
TextItem::Text("Wer ein Spiel lädt oder neu startet, kommt nicht in die Ruhmeshalle."),
];
| true |
26d75aa4e8bff2b950d27554da93675fb81c0c59
|
Rust
|
AndrewKvalheim/rust-exercises
|
/paasio/src/inspectable.rs
|
UTF-8
| 280 | 3.09375 | 3 |
[
"MIT"
] |
permissive
|
pub trait Inspectable<T> {
fn inspect<F: FnOnce(&T) -> ()>(self, f: F) -> Self;
}
impl<T> Inspectable<T> for std::io::Result<T> {
fn inspect<F: FnOnce(&T) -> ()>(self, f: F) -> Self {
self.map(|value| {
f(&value);
value
})
}
}
| true |
25d393d99d67066d830149f2a6af0e1bdccbfd19
|
Rust
|
abonander/par-vec
|
/src/lib.rs
|
UTF-8
| 9,977 | 3.546875 | 4 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Parallel mutation of vectors via non-overlapping slices.
#![cfg_attr(feature = "bench", feature(test, step_by))]
use std::fmt::{Formatter, Debug};
use std::fmt::Error as FmtError;
use std::sync::{Arc, Condvar, Mutex};
use std::mem;
use std::ops;
/// Our inner `Vec` container.
struct VecBox<T> {
slice_count: usize,
data: Vec<T>,
}
impl<T> VecBox<T> {
fn new(slice_count: usize, data: Vec<T>) -> VecBox<T> {
VecBox {
slice_count: slice_count,
data: data,
}
}
/// Decrement the slice count
fn decrement(&mut self) {
self.slice_count -= 1;
}
/// Try to unwrap this box, replacing `data` with an empty vector if `slice_count == 0`
fn try_unwrap(&mut self) -> Option<Vec<T>> {
match self.slice_count {
0 => Some(mem::replace(&mut self.data, Vec::new())),
_ => None,
}
}
}
struct ParVecInner<T> {
inner: Mutex<VecBox<T>>,
cvar: Condvar,
}
impl<T: Send> ParVecInner<T> {
fn new(slice_count: usize, data: Vec<T>) -> ParVecInner<T> {
ParVecInner {
inner: Mutex::new(VecBox::new(slice_count, data)),
cvar: Condvar::new(),
}
}
fn decrement(&self) {
self.inner.lock().unwrap().decrement();
self.cvar.notify_one();
}
fn try_unwrap(&self, timeout: u32) -> Option<Vec<T>> {
let mut lock = self.inner.lock().unwrap();
if let Some(data) = lock.try_unwrap() {
return Some(data);
}
let (mut lock, _) = self.cvar.wait_timeout_ms(lock, timeout).unwrap();
lock.try_unwrap()
}
fn unwrap(&self) -> Vec<T> {
let mut lock = self.inner.lock().unwrap();
loop {
if let Some(data) = lock.try_unwrap() {
return data;
}
lock = self.cvar.wait(lock).unwrap();
}
}
}
/// A vector that can be mutated in-parallel via non-overlapping slices.
///
/// Get a `ParVec` and a vector of slices via `new()`, send the slices to other threads
/// and mutate them, then get the mutated vector with `.unwrap()` when finished.
pub struct ParVec<T> {
inner: Arc<ParVecInner<T>>,
}
impl<T: Send> ParVec<T> {
/// Create a new `ParVec`, returning it and a number of slices equal to
/// `slice_count`, that can be sent to other threads and mutated in-parallel.
///
/// The vector's length will be divided up amongst the slices as evenly as possible.
pub fn new(vec: Vec<T>, slice_count: usize) -> (ParVec<T>, Vec<ParSlice<T>>) {
let slices = sub_slices(&vec, slice_count);
let inner = Arc::new(ParVecInner::new(slice_count, vec));
let par_slices = slices.into_iter().map(|slice|
ParSlice {
inner: inner.clone(),
data: slice,
}
).collect();
let par_vec = ParVec {
inner: inner,
};
(par_vec, par_slices)
}
/// Attempt to take the inner `Vec` before `timeout` if there are no slices remaining.
/// Returns `None` if the timeout elapses and there are still slices remaining.
pub fn try_unwrap(&self, timeout: u32) -> Option<Vec<T>> {
self.inner.try_unwrap(timeout)
}
/// Take the inner `Vec`, waiting until all slices have been freed.
///
/// ###Deadlock Warning
/// Before calling this method, you should ensure that all `ParSlice` instances have either been:
///
/// - moved to other threads that will quit sometime in the future, or;
/// - dropped, implicitly (left in an inner scope) or explicitly (passed to `mem::drop()`)
///
/// Otherwise, a deadlock will likely occur.
pub fn unwrap(self) -> Vec<T> {
self.inner.unwrap()
}
}
/// Create a vector of raw subslices that are as close to each other in size as possible.
fn sub_slices<T>(parent: &[T], slice_count: usize) -> Vec<*mut [T]> {
use std::cmp;
let len = parent.len();
let mut start = 0;
// By iteratively dividing the length remaining in the vector by the number of slices
// remaining, we get a set of slices with a minimal deviation of lengths.
//
// For example, taking 8 slices of a vector of length 42 should yield 6 slices of length 5 and
// 2 slices of length 6. In contrast, taking 7 slices should yield 7 slices of length 6.
(1 .. slice_count + 1).rev().map(|curr| {
let slice_len = (len - start) / curr;
let end = cmp::min(start + slice_len, len);
let slice = &parent[start..end];
start += slice_len;
slice as *const [T] as *mut [T]
}).collect()
}
/// A slice of `ParVec` that can be sent to another task for processing.
/// Automatically releases the slice on drop.
pub struct ParSlice<T: Send> {
inner: Arc<ParVecInner<T>>,
data: *mut [T],
}
unsafe impl<T: Send> Send for ParSlice<T> {}
impl<T: Send> ops::Deref for ParSlice<T> {
type Target = [T];
fn deref<'a>(&'a self) -> &'a [T] {
unsafe { & *self.data }
}
}
impl<T: Send> ops::DerefMut for ParSlice<T> {
fn deref_mut<'a>(&'a mut self) -> &'a mut [T] {
unsafe { &mut *self.data }
}
}
impl<T: Send> Debug for ParSlice<T> where T: Debug {
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
write!(f, "{:?}", &*self)
}
}
impl<T: Send> Drop for ParSlice<T> {
fn drop(&mut self) {
self.inner.decrement();
}
}
// place these constants here so both the `test` and `bench` modules can use them
const TEST_SLICES: usize = 8;
const TEST_MAX: u32 = 1000;
#[cfg(test)]
mod test {
use ::{ParVec, TEST_SLICES, TEST_MAX};
#[test]
fn test_unwrap_safely() {
let (vec, slices) = ParVec::new([5u32; TEST_MAX as usize].to_vec(), TEST_SLICES);
drop(slices);
let vec = vec.unwrap();
assert_eq!(&*vec, &[5u32; TEST_MAX as usize][..]);
}
#[test]
fn test_slices() {
let (_, slices) = ParVec::new((1u32 .. TEST_MAX).collect(), TEST_SLICES);
assert_eq!(slices.len(), TEST_SLICES);
}
#[test]
fn test_nonoverlapping_slices() {
fn are_nonoverlapping<T>(left: &[T], right: &[T]) -> bool {
let left_start = left.as_ptr() as usize;
let right_start = right.as_ptr() as usize;
let left_end = left_start + left.len();
let right_end = right_start + right.len();
// `left` starts and ends before `right`
left_end < right_start ||
// `right` ends before `left`
right_end < left_start
}
let data: Vec<u32> = (1 .. TEST_MAX).collect();
let start_ptr = data.as_ptr() as usize;
let (_, slices) = ParVec::new(data, TEST_SLICES);
// This can probably be done in O(n log n) instead of O(n^2).
// Suggestions are welcome.
for (left_idx, left) in slices.iter().enumerate() {
for (_, right) in slices.iter().enumerate()
.filter(|&(right_idx, _)| right_idx != left_idx)
{
let left_start = left.as_ptr() as usize - start_ptr;
let right_start = right.as_ptr() as usize - start_ptr;
assert!(
are_nonoverlapping(left, right),
"Slices overlapped! left: {left:?} right: {right:?}",
left = (left_start, left_start + left.len()),
right = (right_start, right_start + right.len())
)
}
}
}
}
#[cfg(feature = "bench")]
mod bench {
extern crate rand;
extern crate threadpool;
extern crate test;
use ::{ParVec, TEST_SLICES, TEST_MAX};
use self::rand::{thread_rng, Rng};
use self::test::Bencher;
use self::threadpool::ThreadPool;
#[bench]
fn seq_prime_factors_1000(b: &mut Bencher) {
let vec: Vec<u32> = (1 .. TEST_MAX).collect();
b.iter(|| {
let _: Vec<(u32, Vec<u32>)> = vec.iter()
.map(|&x| (x, get_prime_factors(x)))
.collect();
});
}
#[bench]
fn par_prime_factors_1000(b: &mut Bencher) {
let mut rng = thread_rng();
let pool = ThreadPool::new(TEST_SLICES);
b.iter(|| {
let mut vec: Vec<(u32, Vec<u32>)> = (1 .. TEST_MAX)
.map(|x| (x, Vec::new())).collect();
// Shuffle so each thread gets an even distribution of work.
// Otherwise, the threads with the lower numbers will quit early.
rng.shuffle(&mut *vec);
let (par_vec, par_slices) = ParVec::new(vec, TEST_SLICES);
for mut slice in par_slices {
pool.execute(move ||
for pair in &mut *slice {
let (x, ref mut x_primes) = *pair;
*x_primes = get_prime_factors(x);
}
);
}
let mut vec = par_vec.unwrap();
// Sort so they're in the same order as sequential.
vec.sort();
});
}
fn get_prime_factors(x: u32) -> Vec<u32> {
(1 .. x).filter(|&y| x % y == 0 && is_prime(y)).collect()
}
fn is_prime(x: u32) -> bool {
// 2 and 3 are prime, but 0 and 1 are not.
(x > 1 && x < 4) ||
// Fast check for even-ness.
x & 1 != 0 &&
// If `x mod i` for every odd number `i < x`, then x is prime.
// Intentionally naive for the sake of the benchmark.
(3 .. x).step_by(2).all(|i| x % i != 0)
}
#[test]
fn test_is_prime() {
// Test a reasonable number of primes to make sure the function actually works
for &i in &[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37] {
assert!(is_prime(i));
}
for i in (4..40).step_by(2) {
assert!(!is_prime(i));
}
}
}
| true |
68e0874a88985cab18c24eb0b7185f3115703cff
|
Rust
|
aksiksi/kvs
|
/src/engine/sled.rs
|
UTF-8
| 1,426 | 2.921875 | 3 |
[] |
no_license
|
use std::path::PathBuf;
use super::KvsEngine;
use crate::error::{Error, Result};
/// Wrapper for Sled storage engine
pub struct SledKvsEngine {
db: sled::Db,
}
impl SledKvsEngine {
const LOG_NAME: &'static str = "sled";
/// Returns `true` if log already exists
pub fn is_log_present(path: impl Into<PathBuf>) -> bool {
let log_dir = path.into();
let log_file = log_dir.join(Self::LOG_NAME);
log_file.exists()
}
pub fn open(path: impl Into<PathBuf>) -> Result<Self> {
let log_dir = path.into();
let log_file = log_dir.join(Self::LOG_NAME);
let db = sled::open(log_file)?;
log::info!("Opened DB, recovered = {}", db.was_recovered());
Ok(Self { db })
}
}
impl KvsEngine for SledKvsEngine {
fn set(&mut self, key: String, value: String) -> Result<()> {
self.db.insert(key.as_bytes(), value.as_bytes())?;
self.db.flush()?;
Ok(())
}
fn get(&mut self, key: String) -> Result<Option<String>> {
let value = self
.db
.get(key.as_bytes())?
.map(|v| String::from_utf8(v.to_vec()).unwrap());
Ok(value)
}
fn remove(&mut self, key: String) -> Result<()> {
let res = self.db.remove(key.as_bytes())?;
self.db.flush()?;
match res {
None => Err(Error::KeyNotFound),
_ => Ok(()),
}
}
}
| true |
a69124a7d0f2161ae9a12523fbc527f1a3ce99cf
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/ui/return/return-unit-from-diverging.rs
|
UTF-8
| 245 | 2.75 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// Test that we get the usual error that we'd get for any other return type and not something about
// diverging functions not being able to return.
fn fail() -> ! {
return; //~ ERROR in a function whose return type is not
}
fn main() {
}
| true |
80f61917243a3a9525e6aa477cb9947afaf78058
|
Rust
|
juliD/Stresstest
|
/src/examples/send_addresses.rs
|
UTF-8
| 2,305 | 2.921875 | 3 |
[] |
no_license
|
extern crate actor_model;
use std::collections::LinkedList;
use std::thread;
use std::time::Duration;
use actor_model::actor::*;
use actor_model::actor_system::*;
use actor_model::address::*;
use actor_model::context::*;
use actor_model::thread_utils::*;
#[derive(Clone)]
enum CustomMessage {
SpawnMessage(Address<CustomMessage>, u32),
StringMessage(String),
}
struct SpawningActor {
context: Option<Context<CustomMessage>>,
}
impl SpawningActor {}
impl Actor<CustomMessage> for SpawningActor {
fn handle(&mut self, message: CustomMessage, origin_address: Option<Address<CustomMessage>>) {
match message {
CustomMessage::SpawnMessage(address, count) => {
let ctx: &Context<CustomMessage> = self.context.as_ref().expect("unwrapping context");
let child_addr = ctx.register_actor(SpawningActor { context: None });
ctx.send(
&address,
CustomMessage::StringMessage(format!("Hello #{}", count).to_owned()),
);
thread::sleep(Duration::from_millis(1000));
let count2 = count + 1;
ctx.send(
&child_addr,
CustomMessage::SpawnMessage(address.clone(), count2),
);
}
_ => {}
};
}
fn start(&mut self, context: Context<CustomMessage>) {
self.context = Some(context);
println!("new SpawningActor");
}
}
struct OutputActor {}
impl Actor<CustomMessage> for OutputActor {
fn handle(&mut self, message: CustomMessage, _origin_address: Option<Address<CustomMessage>>) {
match message {
CustomMessage::StringMessage(content) => {
println!("OutputActor received: {}", content);
}
_ => {}
};
}
fn start(&mut self, context: Context<CustomMessage>) {}
}
// messages and spawning child actors
pub fn run() {
println!("init");
ActorSystem::start(|| {
let output_actor = ActorSystem::register_actor(OutputActor {}, None);
let spawning_addr = ActorSystem::register_actor(SpawningActor { context: None }, None);
spawning_addr.senda(CustomMessage::SpawnMessage(output_actor, 0))
});
println!("done");
}
| true |
81dcc093261d632d821e48ba0eae54a4232d1839
|
Rust
|
Xazax-hun/domains-oxide
|
/bril-lib/src/lexer.rs
|
UTF-8
| 12,323 | 3.265625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::collections::HashMap;
use std::sync::OnceLock;
use utils::DiagnosticEmitter;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Identifier(pub usize);
#[derive(Clone, Debug, Copy, Eq, PartialEq, Hash)]
pub struct Location(pub u32);
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TokenValue {
Local(Identifier),
Global(Identifier),
Label(Identifier),
Integer(i32),
// Arithmetic
Add,
Mul,
Sub,
Div,
Mod,
// Logic
True,
False,
Equal,
LessThan,
GreaterThan,
LessThanOrEq,
GreaterThanOrEq,
Not,
And,
Or,
// Control flow
Jump,
Branch,
Call,
Return,
// Separators
Define,
LeftParen,
RightParen,
LeftBrace,
RightBrace,
Colon,
Semicolon,
Comma,
// Builtin types,
Int,
Bool,
// Misc
Const,
Print,
Nop,
Identity,
EndOfFile,
}
use TokenValue::*;
fn from_char(c: char) -> Option<TokenValue> {
match c {
'(' => Some(LeftParen),
')' => Some(RightParen),
'{' => Some(LeftBrace),
'}' => Some(RightBrace),
':' => Some(Colon),
';' => Some(Semicolon),
'=' => Some(Define),
',' => Some(Comma),
_ => None,
}
}
impl core::fmt::Display for TokenValue {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match *self {
Local(i) => write!(f, "local_{}", i.0),
Global(i) => write!(f, "global_{}", i.0),
Label(i) => write!(f, "label_{}", i.0),
Integer(i) => write!(f, "{i}"),
Add => write!(f, "add"),
Mul => write!(f, "mul"),
Sub => write!(f, "sub"),
Div => write!(f, "div"),
Mod => write!(f, "mod"),
True => write!(f, "true"),
False => write!(f, "false"),
Equal => write!(f, "eq"),
LessThan => write!(f, "lt"),
GreaterThan => write!(f, "gt"),
LessThanOrEq => write!(f, "le"),
GreaterThanOrEq => write!(f, "ge"),
Not => write!(f, "not"),
And => write!(f, "and"),
Or => write!(f, "or"),
Jump => write!(f, "jmp"),
Branch => write!(f, "br"),
Call => write!(f, "call"),
Return => write!(f, "ret"),
Define => write!(f, "="),
LeftParen => write!(f, "("),
RightParen => write!(f, ")"),
LeftBrace => write!(f, "{{"),
RightBrace => write!(f, "}}"),
Colon => write!(f, ":"),
Semicolon => write!(f, ";"),
Comma => write!(f, ","),
Const => write!(f, "const"),
Print => write!(f, "print"),
Nop => write!(f, "nop"),
Identity => write!(f, "id"),
Int => write!(f, "int"),
Bool => write!(f, "bool"),
EndOfFile => write!(f, "END_OF_FILE"),
}
}
}
static KEYWORDS: OnceLock<HashMap<String, TokenValue>> = OnceLock::new();
fn init_keywords() -> HashMap<String, TokenValue> {
let mut m = HashMap::new();
for kw in [Add, Mul, Div, Sub, Mod] {
m.insert(kw.to_string(), kw);
}
for kw in [
True,
False,
Equal,
LessThan,
GreaterThan,
LessThanOrEq,
GreaterThanOrEq,
Not,
And,
Or,
] {
m.insert(kw.to_string(), kw);
}
for kw in [Jump, Branch, Call, Return] {
m.insert(kw.to_string(), kw);
}
for kw in [Const, Print, Nop, Identity] {
m.insert(kw.to_string(), kw);
}
for kw in [Int, Bool] {
m.insert(kw.to_string(), kw);
}
m
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Token {
pub value: TokenValue,
pub line_num: Location,
}
impl Token {
pub fn error(&self, diag: &mut DiagnosticEmitter, s: &str) {
if self.value == EndOfFile {
diag.report(self.line_num.0, "at end of file", s);
} else {
diag.report(self.line_num.0, &format!("at '{self}'"), s);
}
}
}
impl core::fmt::Display for Token {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(f, "{}", self.value)
}
}
#[derive(Debug, Clone, Default)]
pub struct IdentifierTable(pub Vec<String>);
impl IdentifierTable {
pub fn lookup(&self, ident: &str) -> Option<Identifier> {
// TODO: more efficient lookup.
self.0.iter().position(|str| str == ident).map(Identifier)
}
fn get_identifier(&mut self, ident: &str) -> Identifier {
if let Some(id) = self.lookup(ident) {
id
} else {
self.0.push(ident.to_owned());
Identifier(self.0.len() - 1)
}
}
pub fn get_name(&self, id: Identifier) -> &str {
&self.0[id.0]
}
}
pub struct Lexer<'src> {
source: &'src str,
start: usize,
current: usize,
line_num: u32,
has_error: bool, // TODO: can we get rid of this?
diagnostic_emitter: &'src mut DiagnosticEmitter,
identifiers: IdentifierTable,
}
#[derive(Debug, Clone, Default)]
pub struct LexResult {
pub tokens: Vec<Token>,
pub identifiers: IdentifierTable,
}
impl<'src> Lexer<'src> {
pub fn new(source: &'src str, diagnostic_emitter: &'src mut DiagnosticEmitter) -> Self {
Lexer {
source,
start: 0,
current: 0,
line_num: 1,
has_error: false,
diagnostic_emitter,
identifiers: IdentifierTable::default(),
}
}
pub fn lex_all(mut self) -> LexResult {
// TODO: better support for unicode:
// * Point out where the non-ascii character is
// * Allow any non-control characters in comments.
// * Avoid multiple passes over the input.
if !self.source.is_ascii() {
self.diagnostic_emitter
.error(self.line_num, "Only ASCII input is supported.");
return LexResult::default();
}
let mut tokens = Vec::new();
while !self.is_at_end() {
if let Some(tok) = self.lex() {
tokens.push(tok);
} else if self.has_error {
return LexResult::default();
}
}
tokens.push(Token {
value: EndOfFile,
line_num: Location(self.line_num),
});
LexResult {
tokens,
identifiers: self.identifiers,
}
}
fn lex(&mut self) -> Option<Token> {
loop {
if self.is_at_end() {
return None;
}
self.start = self.current;
match self.advance() {
// Unambiguous single character tokens.
c @ ('=' | '(' | ')' | '{' | '}' | ':' | ';' | ',') => {
return Some(Token {
value: from_char(c).unwrap(),
line_num: Location(self.line_num),
})
}
// Whitespace
'\n' => {
self.line_num += 1;
continue;
}
' ' | '\t' | '\r' => continue,
// Comments
'#' => {
while self.advance() != '\n' && !self.is_at_end() {}
continue;
}
'/' => {
if self.match_char('/') {
while self.advance() != '\n' && !self.is_at_end() {}
continue;
}
if self.match_char('*') {
loop {
while self.advance() != '*' && !self.is_at_end() {}
if self.is_at_end() {
self.diagnostic_emitter
.error(self.line_num, "Multiline comment no closed.");
self.has_error = true;
return None;
}
if self.advance() == '/' {
break;
}
}
continue;
}
self.diagnostic_emitter.error(
self.line_num,
&format!(
"Unexpected token: '{}'.",
&self.source[self.start..self.current]
),
);
self.has_error = true;
return None;
}
// Negative numbers
'-' => {
if let n @ Some(_) = self.lex_number() {
return n;
}
self.diagnostic_emitter
.error(self.line_num, "Expected number after '-'.");
self.has_error = true;
return None;
}
c @ ('@' | '.') => {
if self.peek().is_ascii_alphabetic() {
let ident = self.lex_identifier();
let value = if c == '@' {
Global(self.identifiers.get_identifier(ident))
} else {
Label(self.identifiers.get_identifier(ident))
};
return Some(Token {
value,
line_num: Location(self.line_num),
});
}
self.diagnostic_emitter
.error(self.line_num, &format!("Unexpected token: '{c}'."));
self.has_error = true;
return None;
}
c => {
if c.is_ascii_digit() {
return self.lex_number();
}
if c.is_ascii_alphabetic() {
let ident = self.lex_identifier();
let line_num = self.line_num;
return Some(KEYWORDS.get_or_init(init_keywords).get(ident).map_or_else(
|| Token {
value: Local(self.identifiers.get_identifier(ident)),
line_num: Location(line_num),
},
|value| Token {
value: *value,
line_num: Location(line_num),
},
));
}
self.diagnostic_emitter.error(
self.line_num,
&format!(
"Unexpected token: '{}'.",
&self.source[self.start..self.current]
),
);
self.has_error = true;
return None;
}
}
}
}
fn lex_number(&mut self) -> Option<Token> {
while self.peek().is_ascii_digit() {
self.advance();
}
let value: i32 = self.source[self.start..self.current].parse().ok()?;
Some(Token {
value: Integer(value),
line_num: Location(self.line_num),
})
}
fn lex_identifier(&mut self) -> &'src str {
while self.peek().is_ascii_alphanumeric() || self.peek() == '_' {
self.advance();
}
&self.source[self.start..self.current]
}
fn is_at_end(&self) -> bool {
self.current >= self.source.len()
}
fn peek(&self) -> char {
self.source.chars().nth(self.current).map_or('\0', |x| x)
}
fn advance(&mut self) -> char {
let prev = self.peek();
self.current += 1;
prev
}
fn match_char(&mut self, expected: char) -> bool {
if self.source.chars().nth(self.current) == Some(expected) {
self.current += 1;
true
} else {
false
}
}
}
| true |
cd563272fe8609ad8e41fce080938ccfc21faacd
|
Rust
|
abusch/hyperlocal
|
/src/client/mod.rs
|
UTF-8
| 1,805 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
//! Hyper client bindings for unix domain sockets
// Std lib
use std::io;
// Third party
use futures::future::{self, FutureResult};
use futures::{Future, IntoFuture};
use hyper::client::connect::{Connect, Connected, Destination};
use tokio_uds::UnixStream;
use super::Uri;
const UNIX_SCHEME: &str = "unix";
/// A type which implements hyper's client connector interface
/// for unix domain sockets
///
/// `UnixConnector` instances assume uri's
/// constructued with `hyperlocal::Uri::new()` which produce uris with a `unix://`
/// scheme
///
/// # examples
///
/// ```no_run
/// extern crate hyper;
/// extern crate hyperlocal;
///
/// let client = hyper::Client::builder()
/// .build::<_, hyper::Body>(hyperlocal::UnixConnector::new());
/// ```
#[derive(Clone)]
pub struct UnixConnector;
impl UnixConnector {
pub fn new() -> Self {
UnixConnector
}
}
impl Connect for UnixConnector {
type Transport = UnixStream;
type Error = io::Error;
type Future = FutureResult<(UnixStream, Connected), io::Error>;
fn connect(&self, destination: Destination) -> Self::Future {
if destination.scheme() != UNIX_SCHEME {
return future::err(io::Error::new(
io::ErrorKind::InvalidInput,
format!("Invalid uri {:?}", destination),
));
}
match Uri::socket_path_dest(&destination) {
Some(ref path) => UnixStream::connect(path)
.wait() // We have to block because we
.map(|s| (s, Connected::new()))
.into_future(),
_ => future::err(io::Error::new(
io::ErrorKind::InvalidInput,
format!("Invalid uri {:?}", destination),
)),
}
}
}
| true |
c407549c4e76f8aebadb39d2fcdb51e8b5d29f89
|
Rust
|
nisalmenuka2/smoltcp
|
/src/phy/tracer.rs
|
UTF-8
| 2,251 | 3.15625 | 3 |
[
"0BSD",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
use Error;
use wire::pretty_print::{PrettyPrint, PrettyPrinter};
use super::Device;
/// A tracer device.
///
/// A tracer is a device that prints all packets traversing it
/// to the standard output, and delegates to another device otherwise.
pub struct Tracer<T: Device, U: PrettyPrint> {
lower: T,
writer: fn(PrettyPrinter<U>)
}
impl<T: Device, U: PrettyPrint> Tracer<T, U> {
/// Create a tracer device.
pub fn new(lower: T, writer: fn(PrettyPrinter<U>)) -> Tracer<T, U> {
Tracer {
lower: lower,
writer: writer
}
}
/// Create a tracer device, printing to standard output.
#[cfg(feature = "std")]
pub fn new_stdout(lower: T) -> Tracer<T, U> {
fn writer<U: PrettyPrint>(printer: PrettyPrinter<U>) {
print!("{}", printer)
}
Tracer {
lower: lower,
writer: writer
}
}
/// Return the underlying device, consuming the tracer.
pub fn into_lower(self) -> T {
self.lower
}
}
impl<T: Device, U: PrettyPrint> Device for Tracer<T, U> {
type RxBuffer = T::RxBuffer;
type TxBuffer = TxBuffer<T::TxBuffer, U>;
fn mtu(&self) -> usize { self.lower.mtu() }
fn receive(&mut self) -> Result<Self::RxBuffer, Error> {
let buffer = try!(self.lower.receive());
(self.writer)(PrettyPrinter::<U>::new("<- ", &buffer));
Ok(buffer)
}
fn transmit(&mut self, length: usize) -> Result<Self::TxBuffer, Error> {
let buffer = try!(self.lower.transmit(length));
Ok(TxBuffer {
buffer: buffer,
writer: self.writer
})
}
}
#[doc(hidden)]
pub struct TxBuffer<T: AsRef<[u8]>, U: PrettyPrint> {
buffer: T,
writer: fn(PrettyPrinter<U>)
}
impl<T: AsRef<[u8]>, U: PrettyPrint> AsRef<[u8]>
for TxBuffer<T, U> {
fn as_ref(&self) -> &[u8] { self.buffer.as_ref() }
}
impl<T: AsRef<[u8]> + AsMut<[u8]>, U: PrettyPrint> AsMut<[u8]>
for TxBuffer<T, U> {
fn as_mut(&mut self) -> &mut [u8] { self.buffer.as_mut() }
}
impl<T: AsRef<[u8]>, U: PrettyPrint> Drop for TxBuffer<T, U> {
fn drop(&mut self) {
(self.writer)(PrettyPrinter::<U>::new("-> ", &self.buffer));
}
}
| true |
a1048c4e8ba627afbe46340ffa211d7737329a70
|
Rust
|
twardakm/rgit
|
/src/tools.rs
|
UTF-8
| 608 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
use anyhow::{Context, Result};
use std::path::PathBuf;
use std::process::Command;
/// Returns default path for scan results
pub fn get_default_scan_path() -> Result<PathBuf> {
Ok(dirs::home_dir()
.context("Failed to get home directory")?
.join(".rgit"))
}
/// Returns current git username
pub fn get_git_user_name() -> Result<String> {
let user = Command::new("git")
.arg("config")
.arg("user.name")
.output()
.context("Failed to execute: git config user.name")?;
let user = String::from_utf8(user.stdout)?;
Ok(String::from(user.trim()))
}
| true |
efd66c0ae94baca130bf13233d62975003011c41
|
Rust
|
andrew-johnson-4/rdxl_scaffolding
|
/src/form.rs
|
UTF-8
| 4,154 | 2.625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use rdxl::{xtype,xrender};
xtype!(
/** InputButton renders a button element */
<!InputButton name:String/>
);
xtype!(
/** InputButtonGroup renders a group of button elements */
<!InputButtonGroup><?InputButton/></InputButtonGroup>
);
xtype!(
/** InputCheckbox renders a checkbox element */
<!InputCheckbox name:String/>
);
xtype!(
/** InputColor renders a color picker element */
<!InputColor name:String/>
);
xtype!(
/** InputDate renders a date input element */
<!InputDate name:String/>
);
xtype!(
/** InputDatetime renders a datetime input element */
<!InputDatetime name:String/>
);
xtype!(
/** InputEmail renders an email input element */
<!InputEmail name:String/>
);
xtype!(
/** InputFile renders a file input element */
<!InputFile name:String/>
);
xtype!(
/** InputImage renders an image input element */
<!InputImage name:String/>
);
xtype!(
/** InputMonth renders a month input element */
<!InputMonth name:String/>
);
xtype!(
/** InputNumber renders a number input element */
<!InputNumber name:String/>
);
xtype!(
/** InputPassword renders a password input element */
<!InputPassword name:String/>
);
xtype!(
/** InputRadio renders a group of radio input elements */
<!InputRadio name:String><!InputRadioOption value:String/></InputRadio>
);
xtype!(
/** InputRange renders a range slider element */
<!InputRange name:String min:u64 max:u64/>
);
xtype!(
/** InputSearch renders a search input element */
<!InputSearch name:String/>
);
xtype!(
/** InputSubmit renders a form submit element */
<!InputSubmit/>
);
xtype!(
/** InputTelephoneNumber renders a telephone input element */
<!InputTelephoneNumber name:String/>
);
xtype!(
/** InputText renders a text input element */
<!InputText name:String/>
);
xtype!(
/** InputTime renders a time input element */
<!InputTime name:String/>
);
xtype!(
/** InputUrl renders a url input element */
<!InputUrl name:String/>
);
xtype!(
/** InputWeek renders a week input element */
<!InputWeek name:String/>
);
xrender!(InputButton, <input type="button" name={{ format!("'{}'",self.name) }} value={{ format!("'{}'",self.name) }}/>);
xrender!(InputCheckbox, <input type="checkbox" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputButtonGroup, <span style="background-color:#CCCCCC;">
{{ for bc in self.children.iter() {{
{{ let InputButtonGroupChildren::InputButton(b) = bc; }}
{{ b }}
}} }}
</span>);
xrender!(InputRadio, <span style="background-color:#CCCCCC;">
{{ for rc in self.children.iter() {{
{{ let InputRadioChildren::InputRadioOption(r) = rc; }}
<input type="radio" name={{ format!(r#""{}""#, self.name) }} value={{ format!(r#""{}""#, r.value) }}/>
}} }}
</span>);
xrender!(InputText, <input type="text" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputEmail, <input type="email" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputSearch, <input type="search" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputPassword, <input type="password" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputNumber, <input type="number" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputTelephoneNumber, <input type="tel" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputUrl, <input type="url" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputRange, <input type="range" name={{ format!("'{}'",self.name) }} min={{self.min}} max={{self.max}}/>);
xrender!(InputColor, <input type="color" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputDate, <input type="date" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputDatetime, <input type="datetime-local" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputMonth, <input type="month" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputWeek, <input type="week" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputTime, <input type="time" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputFile, <input type="file" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputImage, <input type="image" name={{ format!("'{}'",self.name) }}/>);
xrender!(InputSubmit, <input type="submit"/>);
| true |
dfda74936f0cd43c9309311815884fb26c473407
|
Rust
|
HeroicKatora/oxide-auth
|
/oxide-auth/src/endpoint/tests/mod.rs
|
UTF-8
| 5,989 | 2.90625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::endpoint::*;
use crate::primitives::generator::TagGrant;
use crate::primitives::grant::Grant;
use std::borrow::Cow;
use std::collections::HashMap;
use url::Url;
/// Open and simple implementation of `WebRequest`.
#[derive(Clone, Debug, Default)]
struct CraftedRequest {
/// The key-value pairs in the url query component.
pub query: Option<HashMap<String, Vec<String>>>,
/// The key-value pairs of a `x-www-form-urlencoded` body.
pub urlbody: Option<HashMap<String, Vec<String>>>,
/// Provided authorization header.
pub auth: Option<String>,
}
/// Open and simple implementation of `WebResponse`.
#[derive(Debug, Default)]
struct CraftedResponse {
/// HTTP status code.
pub status: Status,
/// A location header, for example for redirects.
pub location: Option<Url>,
/// Indicates how the client should have authenticated.
///
/// Only set with `Unauthorized` status.
pub www_authenticate: Option<String>,
/// Encoded body of the response.
///
/// One variant for each possible encoding type.
pub body: Option<Body>,
}
/// An enum containing the necessary HTTP status codes.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
enum Status {
/// Http status code 200.
Ok,
/// Http status code 302.
Redirect,
/// Http status code 400.
BadRequest,
/// Http status code 401.
Unauthorized,
}
/// Models the necessary body contents.
///
/// Real HTTP protocols should set a content type header for each of the body variants.
#[derive(Clone, Debug)]
enum Body {
/// A pure text body.
Text(String),
/// A json encoded body, `application/json`.
Json(String),
}
#[derive(Debug)]
enum CraftedError {
Crafted,
}
impl WebRequest for CraftedRequest {
type Response = CraftedResponse;
type Error = CraftedError;
fn query(&mut self) -> Result<Cow<dyn QueryParameter + 'static>, Self::Error> {
self.query
.as_ref()
.map(|hm| Cow::Borrowed(hm as &dyn QueryParameter))
.ok_or(CraftedError::Crafted)
}
fn urlbody(&mut self) -> Result<Cow<dyn QueryParameter + 'static>, Self::Error> {
self.urlbody
.as_ref()
.map(|hm| Cow::Borrowed(hm as &dyn QueryParameter))
.ok_or(CraftedError::Crafted)
}
fn authheader(&mut self) -> Result<Option<Cow<str>>, Self::Error> {
Ok(self.auth.as_ref().map(|bearer| bearer.as_str().into()))
}
}
impl WebResponse for CraftedResponse {
type Error = CraftedError;
fn ok(&mut self) -> Result<(), Self::Error> {
self.status = Status::Ok;
self.location = None;
self.www_authenticate = None;
Ok(())
}
/// A response which will redirect the user-agent to which the response is issued.
fn redirect(&mut self, url: Url) -> Result<(), Self::Error> {
self.status = Status::Redirect;
self.location = Some(url);
self.www_authenticate = None;
Ok(())
}
/// Set the response status to 400.
fn client_error(&mut self) -> Result<(), Self::Error> {
self.status = Status::BadRequest;
self.location = None;
self.www_authenticate = None;
Ok(())
}
/// Set the response status to 401 and add a `WWW-Authenticate` header.
fn unauthorized(&mut self, header_value: &str) -> Result<(), Self::Error> {
self.status = Status::Unauthorized;
self.location = None;
self.www_authenticate = Some(header_value.to_owned());
Ok(())
}
/// A pure text response with no special media type set.
fn body_text(&mut self, text: &str) -> Result<(), Self::Error> {
self.body = Some(Body::Text(text.to_owned()));
Ok(())
}
/// Json repsonse data, with media type `aplication/json.
fn body_json(&mut self, data: &str) -> Result<(), Self::Error> {
self.body = Some(Body::Json(data.to_owned()));
Ok(())
}
}
struct TestGenerator(String);
impl TagGrant for TestGenerator {
fn tag(&mut self, _: u64, _grant: &Grant) -> Result<String, ()> {
Ok(self.0.clone())
}
}
struct Allow(String);
struct Deny;
impl OwnerSolicitor<CraftedRequest> for Allow {
fn check_consent(
&mut self, _: &mut CraftedRequest, _: Solicitation,
) -> OwnerConsent<CraftedResponse> {
OwnerConsent::Authorized(self.0.clone())
}
}
impl OwnerSolicitor<CraftedRequest> for Deny {
fn check_consent(
&mut self, _: &mut CraftedRequest, _: Solicitation,
) -> OwnerConsent<CraftedResponse> {
OwnerConsent::Denied
}
}
impl<'l> OwnerSolicitor<CraftedRequest> for &'l Allow {
fn check_consent(
&mut self, _: &mut CraftedRequest, _: Solicitation,
) -> OwnerConsent<CraftedResponse> {
OwnerConsent::Authorized(self.0.clone())
}
}
impl<'l> OwnerSolicitor<CraftedRequest> for &'l Deny {
fn check_consent(
&mut self, _: &mut CraftedRequest, _: Solicitation,
) -> OwnerConsent<CraftedResponse> {
OwnerConsent::Denied
}
}
trait ToSingleValueQuery {
fn to_single_value_query(self) -> HashMap<String, Vec<String>>;
}
impl<'r, I, K, V> ToSingleValueQuery for I
where
I: Iterator<Item = &'r (K, V)>,
K: AsRef<str> + 'r,
V: AsRef<str> + 'r,
{
fn to_single_value_query(self) -> HashMap<String, Vec<String>> {
self.map(|&(ref k, ref v)| (k.as_ref().to_string(), vec![v.as_ref().to_string()]))
.collect()
}
}
impl Default for Status {
fn default() -> Self {
Status::Ok
}
}
pub mod defaults {
pub const EXAMPLE_CLIENT_ID: &str = "ClientId";
pub const EXAMPLE_OWNER_ID: &str = "Owner";
pub const EXAMPLE_PASSPHRASE: &str = "VGhpcyBpcyBhIHZlcnkgc2VjdXJlIHBhc3NwaHJhc2UK";
pub const EXAMPLE_REDIRECT_URI: &str = "https://client.example/endpoint";
pub const EXAMPLE_SCOPE: &str = "example default";
}
mod authorization;
mod access_token;
mod client_credentials;
mod resource;
mod refresh;
mod pkce;
| true |
bb699f6bf1ef8577e916606a86287f2d5b2b45f6
|
Rust
|
nukep/rust-cubes-demo
|
/src/util/compare.rs
|
UTF-8
| 373 | 3.609375 | 4 |
[] |
no_license
|
pub trait CompareSmallest<T: PartialOrd> {
fn set_if_smallest(&mut self, value: T);
}
impl<T: PartialOrd> CompareSmallest<T> for Option<T> {
fn set_if_smallest(&mut self, value: T) {
let set = match self.as_ref() {
Some(v) => value.lt(v),
None => true
};
if set {
*self = Some(value);
}
}
}
| true |
b7d8533008b507b3eb30f63061e863eadeb982b4
|
Rust
|
graphprotocol/stable-hash
|
/tests/profiling.rs
|
UTF-8
| 4,569 | 2.578125 | 3 |
[] |
no_license
|
mod common;
use std::collections::{HashMap, HashSet};
use std::hash::Hash;
use common::*;
use firestorm::profile_fn;
use stable_hash::utils::AsBytes;
use stable_hash::*;
#[test]
#[ignore = "benchmark"]
fn compare() {
let mut data = HashMap::new();
data.insert("abc", 100u64);
data.insert("abcdef", 100u64);
data.insert("abcdefged", 0u64);
data.insert("abcdefgedaekllw", 50000u64);
data.insert("acfaek", 50000u64);
data.insert("aek", 511110000u64);
fn profile(value: &impl StableHash) {
profile_fn!(profile);
fast_stable_hash(value);
crypto_stable_hash(value);
}
if firestorm::enabled() {
firestorm::bench("./firestorm", || profile(&data)).unwrap();
}
}
use rand::{thread_rng, Rng, RngCore};
trait R {
fn rand() -> Self;
}
impl R for i32 {
fn rand() -> Self {
thread_rng().gen()
}
}
impl R for usize {
fn rand() -> Self {
let num: u32 = thread_rng().gen();
(num % 45) as usize
}
}
impl<T> R for Vec<T>
where
T: R,
{
fn rand() -> Self {
let count = R::rand();
let mut v = Vec::with_capacity(count);
for _ in 0..count {
v.push(R::rand());
}
v
}
}
impl<K, V> R for HashMap<K, V>
where
K: R + Hash + Eq,
V: R,
{
fn rand() -> Self {
let count = R::rand();
let mut h = HashMap::with_capacity(count);
for _ in 0..count {
let k = R::rand();
let v = R::rand();
h.insert(k, v);
}
h
}
}
impl<T> R for HashSet<T>
where
T: R + Hash + Eq,
{
fn rand() -> Self {
let count = R::rand();
let mut h = HashSet::with_capacity(count);
for _ in 0..count {
let t = R::rand();
h.insert(t);
}
h
}
}
impl R for u8 {
fn rand() -> Self {
thread_rng().gen()
}
}
impl R for String {
fn rand() -> Self {
loop {
let bytes = R::rand();
if let Ok(s) = String::from_utf8(bytes) {
return s;
}
}
}
}
impl R for bool {
fn rand() -> Self {
thread_rng().gen()
}
}
impl R for [u8; 32] {
fn rand() -> Self {
let mut value = Self::default();
thread_rng().fill_bytes(&mut value);
value
}
}
impl R for Value {
fn rand() -> Self {
let d: u32 = thread_rng().gen();
match d % 5 {
0 => Value::Null,
1 => Value::Number(R::rand()),
2 => Value::String(R::rand()),
3 => Value::Bool(R::rand()),
4 => Value::Array(R::rand()),
_ => unreachable!(),
}
}
}
#[derive(Debug)]
enum Value {
Null,
Number(i32),
String(String),
Bool(bool),
Array([u8; 32]),
}
// See also d3ba3adc-6e9b-4586-a7e7-6b542df39462
impl StableHash for Value {
fn stable_hash<H: StableHasher>(&self, field_address: H::Addr, state: &mut H) {
let variant = match self {
Self::Null => return,
Self::Number(n) => {
n.stable_hash(field_address.child(0), state);
1
}
Self::String(n) => {
n.stable_hash(field_address.child(0), state);
2
}
Self::Bool(n) => {
n.stable_hash(field_address.child(0), state);
3
}
Self::Array(n) => {
AsBytes(n).stable_hash(field_address.child(0), state);
4
}
};
// Alternatively, variant.stable_hash(field_address, state).
// But, this is slightly faster at the expense of interacting
// with a more low-level API that is easier to screw up.
state.write(field_address, &[variant]);
}
}
#[derive(Debug)]
struct C {
s: HashMap<String, Value>,
n: i32,
}
impl_stable_hash!(C { s, n });
impl R for C {
fn rand() -> Self {
Self {
s: R::rand(),
n: R::rand(),
}
}
}
#[derive(Debug)]
struct A {
v1: Vec<B>,
v2: Vec<B>,
v3: Vec<B>,
}
impl_stable_hash!(A { v1, v2, v3 });
impl R for A {
fn rand() -> Self {
Self {
v1: R::rand(),
v2: R::rand(),
v3: R::rand(),
}
}
}
#[derive(Debug)]
struct B {
a: u8,
c: HashMap<String, C>,
}
impl_stable_hash!(B { a, c });
impl R for B {
fn rand() -> Self {
Self {
a: R::rand(),
c: R::rand(),
}
}
}
| true |
75e8b4db5fe4997ebdc8dea55c2b32c707a4002c
|
Rust
|
supr/whois
|
/src/main.rs
|
UTF-8
| 2,688 | 2.859375 | 3 |
[] |
no_license
|
use std::time::Duration;
use tokio::io::AsyncReadExt;
use tokio::io::AsyncWriteExt;
use tokio::io::BufReader;
use tokio::net::TcpStream;
use tokio::runtime::Builder;
#[derive(Debug)]
struct WhoisClient {
server: String,
}
impl WhoisClient {
fn new() -> Self {
WhoisClient {
server: "whois.verisign-grs.com".to_owned(),
}
}
fn set_server(mut self, server: &str) -> Self {
self.server = server.to_owned();
self
}
async fn query(&self, hostname: &str) -> Result<(), Box<dyn std::error::Error>> {
let mut stream = TcpStream::connect(format!("{}:43", &self.server)).await?;
let mut request = hostname.to_owned();
request.push_str("\r\n");
tokio::spawn(async move {
let (rdr, mut wtr) = stream.split();
if let Ok(_) = wtr.write_all(request.as_bytes()).await {
let mut rdr = BufReader::with_capacity(4 * 1024, rdr);
let mut out = String::new();
rdr.read_to_string(&mut out).await;
println!("{}", out);
}
});
Ok(())
}
}
fn main() {
let threads = match num_cpus::get() {
1 | 2 => 1,
_ => 2,
};
let rt = Builder::new()
.blocking_threads(threads)
.core_threads(threads)
.keep_alive(Some(Duration::from_secs(10)))
.name_prefix("whois-thread-")
.build()
.unwrap();
let mut rl = rustyline::Editor::<()>::new();
let mut wc = WhoisClient::new();
loop {
let readline = rl.readline(">> ");
match readline {
Ok(line) => {
rl.add_history_entry(line.as_str());
let mut line_splits = line.split_whitespace();
if let Some(cmd) = line_splits.next() {
if let Some(arg) = line_splits.next() {
match cmd {
"server" => {
wc = wc.set_server(arg);
continue;
}
_ => {}
};
} else {
match cmd {
"quit" | "QUIT" | "q" | "Q" => break,
_ => rt.block_on(wc.query(cmd)),
};
}
}
}
Err(rustyline::error::ReadlineError::Interrupted) => break,
Err(rustyline::error::ReadlineError::Eof) => break,
Err(err) => {
eprintln!("Error: {:?}", err);
break;
}
}
}
}
| true |
dc723085c81cfe5951dbc46cf4c91c0f3d9c4e57
|
Rust
|
amling/r4
|
/executor/src/r4l/ast.rs
|
UTF-8
| 2,629 | 3.046875 | 3 |
[] |
no_license
|
use record::Record;
use std::collections::HashMap;
use std::sync::Arc;
use record::Path;
use record::OwnPath;
#[derive(Debug)]
pub enum Expr {
Statement(Vec<Box<Expr>>),
Ternary(Box<Expr>, Box<Expr>, Box<Expr>),
Binary(Box<Expr>, BinaryOp, Box<Expr>),
Unary(UnaryOp, Box<Expr>),
RecordRead(Box<Expr>, OwnPath),
RecordReadFill(Box<Expr>, OwnPath),
RecordWrite(Box<Expr>, OwnPath, Box<Expr>),
RecordDelete(Box<Expr>, OwnPath),
Literal(Record),
ArrayLiteral(Vec<Box<Expr>>),
HashLiteral(HashMap<Arc<str>, Box<Expr>>),
WriteVar(Arc<str>, Box<Expr>),
ReadVar(Arc<str>),
}
#[derive(Debug)]
pub enum UnaryOp {
LogNeg(),
NumNeg(),
}
#[derive(Debug)]
pub enum BinaryOp {
LogOr(),
LogAnd(),
NumLt(),
NumLte(),
NumGt(),
NumGte(),
NumEq(),
NumNe(),
Lt(),
Lte(),
Gt(),
Gte(),
Eq(),
Ne(),
Add(),
Sub(),
Cat(),
Mul(),
Div(),
Mod(),
}
pub fn int_literal(s: &str) -> Box<Expr> {
let n: i64 = s.parse().unwrap();
return Box::new(Expr::Literal(Record::from(n)));
}
pub fn float_literal(s: &str) -> Box<Expr> {
let n: f64 = s.parse().unwrap();
return Box::new(Expr::Literal(Record::from(n)));
}
pub fn string_literal(s: &str) -> Box<Expr> {
let s: Vec<_> = s.chars().collect();
assert!(s[0] == '"');
assert!(s[s.len() - 1] == '"');
let s = &s[1..(s.len() - 1)];
let mut i = s.iter();
let mut s = "".to_string();
while let Some(c) = i.next() {
match c {
'\\' => {
match i.next() {
Some('t') => {
s.push('\t');
}
Some('n') => {
s.push('\n');
}
Some('\\') => {
s.push('\\');
}
Some('"') => {
s.push('"');
}
oc => {
panic!("Unexpected backslash in string literal: {:?}?", oc);
}
}
}
c => {
s.push(*c);
}
}
}
return Box::new(Expr::Literal(Record::from(s)));
}
pub fn path_literal(s: &str) -> (Arc<str>, OwnPath) {
assert!(s.starts_with("{{"));
assert!(s.ends_with("}}"));
let s = &s[2..(s.len() - 2)];
let mut var = Arc::from("r");
let mut s = s;
if let Some(i) = s.find(':') {
var = Arc::from(&s[0..i]);
s = &s[(i + 1)..];
}
return (var, Path::new(s).to_owned());
}
| true |
77395ea8adfbeb41e91936cc56544f3d66eec920
|
Rust
|
Senlody/bellman
|
/src/multicore.rs
|
UTF-8
| 1,333 | 2.9375 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use crossbeam::{self, Scope, ScopedJoinHandle};
use num_cpus;
pub enum MaybeJoinHandle<T> {
MultiThreaded(ScopedJoinHandle<T>),
SingleThreaded(T)
}
impl<T> MaybeJoinHandle<T> {
pub fn join(self) -> T {
match self {
MaybeJoinHandle::MultiThreaded(scope) => scope.join(),
MaybeJoinHandle::SingleThreaded(t) => t
}
}
}
#[derive(Clone, Copy)]
pub enum MaybeScope<'a, 'b: 'a> {
MultiThreaded(&'a Scope<'b>),
SingleThreaded
}
impl<'a, 'b> MaybeScope<'a, 'b> {
pub fn spawn<F, T>(&self, f: F) -> MaybeJoinHandle<T>
where F: FnOnce() -> T + Send + 'b, T: Send + 'b
{
match self {
&MaybeScope::MultiThreaded(scope) => MaybeJoinHandle::MultiThreaded(scope.spawn(f)),
&MaybeScope::SingleThreaded => MaybeJoinHandle::SingleThreaded(f())
}
}
}
pub fn scope<'a, F, R>(
elements: usize,
f: F
) -> R where F: for<'b> FnOnce(MaybeScope<'b, 'a>, usize) -> R
{
let num_cpus = num_cpus::get();
if elements <= num_cpus {
if elements == 0 {
f(MaybeScope::SingleThreaded, 1)
} else {
f(MaybeScope::SingleThreaded, elements)
}
} else {
crossbeam::scope(|scope| {
f(MaybeScope::MultiThreaded(scope), elements / num_cpus)
})
}
}
| true |
b796b22da35975879127060266efd44b93f6c290
|
Rust
|
GGist/ssdp-rs
|
/src/message/mod.rs
|
UTF-8
| 4,172 | 2.765625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
//! Messaging primitives for discovering devices and services.
use std::io;
use std::net::SocketAddr;
use net::connector::UdpConnector;
use net::IpVersionMode;
mod notify;
mod search;
mod ssdp;
pub mod listen;
pub mod multicast;
use get_if_addrs;
pub use message::multicast::Multicast;
pub use message::search::{SearchRequest, SearchResponse, SearchListener};
pub use message::notify::{NotifyMessage, NotifyListener};
pub use message::listen::Listen;
/// Multicast Socket Information
pub const UPNP_MULTICAST_IPV4_ADDR: &'static str = "239.255.255.250";
pub const UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR: &'static str = "FF02::C";
pub const UPNP_MULTICAST_PORT: u16 = 1900;
/// Default TTL For Multicast
pub const UPNP_MULTICAST_TTL: u32 = 2;
/// Enumerates different types of SSDP messages.
#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]
pub enum MessageType {
/// A notify message.
Notify,
/// A search message.
Search,
/// A response to a search message.
Response,
}
#[derive(Clone)]
pub struct Config {
pub ipv4_addr: String,
pub ipv6_addr: String,
pub port: u16,
pub ttl: u32,
pub mode: IpVersionMode,
}
impl Config {
pub fn new() -> Self {
Default::default()
}
pub fn set_ipv4_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv4_addr = value.into();
self
}
pub fn set_ipv6_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv6_addr = value.into();
self
}
pub fn set_port(mut self, value: u16) -> Self {
self.port = value;
self
}
pub fn set_ttl(mut self, value: u32) -> Self {
self.ttl = value;
self
}
pub fn set_mode(mut self, value: IpVersionMode) -> Self {
self.mode = value;
self
}
}
impl Default for Config {
fn default() -> Self {
Config {
ipv4_addr: UPNP_MULTICAST_IPV4_ADDR.to_string(),
ipv6_addr: UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR.to_string(),
port: UPNP_MULTICAST_PORT,
ttl: UPNP_MULTICAST_TTL,
mode: IpVersionMode::Any,
}
}
}
/// Generate `UdpConnector` objects for all local `IPv4` interfaces.
fn all_local_connectors(multicast_ttl: Option<u32>, filter: &IpVersionMode) -> io::Result<Vec<UdpConnector>> {
trace!("Fetching all local connectors");
map_local(|&addr| match (filter, addr) {
(&IpVersionMode::V4Only, SocketAddr::V4(n)) |
(&IpVersionMode::Any, SocketAddr::V4(n)) => {
Ok(Some(try!(UdpConnector::new((*n.ip(), 0), multicast_ttl))))
}
(&IpVersionMode::V6Only, SocketAddr::V6(n)) |
(&IpVersionMode::Any, SocketAddr::V6(n)) => Ok(Some(try!(UdpConnector::new(n, multicast_ttl)))),
_ => Ok(None),
})
}
/// Invoke the closure for every local address found on the system
///
/// This method filters out _loopback_ and _global_ addresses.
fn map_local<F, R>(mut f: F) -> io::Result<Vec<R>>
where F: FnMut(&SocketAddr) -> io::Result<Option<R>>
{
let addrs_iter = try!(get_local_addrs());
let mut obj_list = Vec::with_capacity(addrs_iter.len());
for addr in addrs_iter {
trace!("Found {}", addr);
match addr {
SocketAddr::V4(n) if !n.ip().is_loopback() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
// Filter all loopback and global IPv6 addresses
SocketAddr::V6(n) if !n.ip().is_loopback() && !n.ip().is_global() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
_ => (),
}
}
Ok(obj_list)
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
fn get_local_addrs() -> io::Result<Vec<SocketAddr>> {
let iface_iter = try!(get_if_addrs::get_if_addrs()).into_iter();
Ok(iface_iter.filter_map(|iface| Some(SocketAddr::new(iface.addr.ip(), 0)))
.collect())
}
| true |
54483cb97df1565f9a8900dbc7a385342225ac62
|
Rust
|
bbarclay/grex
|
/src/regexp/feature.rs
|
UTF-8
| 3,921 | 2.765625 | 3 |
[
"Apache-2.0"
] |
permissive
|
/*
* Copyright © 2019-today Peter M. Stahl [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#![allow(deprecated)]
/// This enum specifies the supported conversion features which can be passed to method
/// [`RegExpBuilder.with_conversion_of`](./struct.RegExpBuilder.html#method.with_conversion_of).
#[derive(Clone, Debug, Hash, Ord, PartialOrd, Eq, PartialEq)]
#[deprecated(since = "1.3.0", note = "This enum will be removed in 1.4.0.")]
pub enum Feature {
/// This feature converts any Unicode decimal digit to character class `\d`.
///
/// It takes precedence over the
/// [`Word`](Feature::Word) feature if both are set.
/// Decimal digits are converted to `\d`, the remaining word characters to `\w`.
///
/// It takes precedence over the
/// [`NonSpace`](Feature::NonSpace) feature if both are set.
/// Decimal digits are converted to `\d`, the remaining non-whitespace characters to `\S`.
Digit,
/// This feature converts any character which is not
/// a Unicode decimal digit to character class `\D`.
///
/// It takes precedence over the
/// [`NonWord`](Feature::NonWord) feature if both are set.
/// Non-digits which are also non-word characters are converted to `\D`.
///
/// It takes precedence over the
/// [`NonSpace`](Feature::NonSpace) feature if both are set.
/// Non-digits which are also non-space characters are converted to `\D`.
NonDigit,
/// This feature converts any Unicode whitespace character to character class `\s`.
///
/// It takes precedence over the
/// [`NonDigit`](Feature::NonDigit) feature if both are set.
/// Whitespace characters are converted to `\s`, the remaining non-digit characters to `\D`.
///
/// It takes precedence over the
/// [`NonWord`](Feature::NonWord) feature if both are set.
/// Whitespace characters are converted to `\s`, the remaining non-word characters to `\W`.
Space,
/// This feature converts any character which is not
/// a Unicode whitespace character to character class `\S`.
NonSpace,
/// This feature converts any Unicode word character to character class `\w`.
///
/// It takes precedence over the
/// [`NonDigit`](Feature::NonDigit) feature if both are set.
/// Word characters are converted to `\w`, the remaining non-digit characters to `\D`.
///
/// It takes precedence over the
/// [`NonSpace`](Feature::NonSpace) feature if both are set.
/// Word characters are converted to `\w`, the remaining non-space characters to `\S`.
Word,
/// This feature converts any character which is not
/// a Unicode word character to character class `\W`.
///
/// It takes precedence over the
/// [`NonSpace`](Feature::NonSpace) feature if both are set.
/// Non-words which are also non-space characters are converted to `\W`.
NonWord,
/// This feature detects repeated non-overlapping substrings and
/// converts them to `{min,max}` quantifier notation.
Repetition,
/// This feature enables case-insensitive matching of test cases
/// so that letters match both upper and lower case.
CaseInsensitivity,
/// This feature replaces non-capturing groups by capturing ones.
CapturingGroup,
}
impl Feature {
pub(crate) fn is_char_class(&self) -> bool {
!matches!(self, Feature::Repetition)
}
}
| true |
178465420556293d2b0e470a4744adf66fc3cb0c
|
Rust
|
simensgreen/bevy
|
/crates/bevy_ui/src/focus.rs
|
UTF-8
| 4,885 | 2.6875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::Node;
use bevy_core::FloatOrd;
use bevy_ecs::{
component::Component,
entity::Entity,
system::{Local, Query, Res},
};
use bevy_input::{mouse::MouseButton, touch::Touches, Input};
use bevy_transform::components::GlobalTransform;
use bevy_window::Windows;
use smallvec::SmallVec;
#[derive(Component, Copy, Clone, Eq, PartialEq, Debug)]
pub enum Interaction {
Clicked,
Hovered,
None,
}
impl Default for Interaction {
fn default() -> Self {
Interaction::None
}
}
#[derive(Component, Copy, Clone, Eq, PartialEq, Debug)]
pub enum FocusPolicy {
Block,
Pass,
}
impl Default for FocusPolicy {
fn default() -> Self {
FocusPolicy::Block
}
}
#[derive(Default)]
pub struct State {
entities_to_reset: SmallVec<[Entity; 1]>,
}
#[allow(clippy::type_complexity)]
pub fn ui_focus_system(
mut state: Local<State>,
windows: Res<Windows>,
mouse_button_input: Res<Input<MouseButton>>,
touches_input: Res<Touches>,
mut node_query: Query<(
Entity,
&Node,
&GlobalTransform,
Option<&mut Interaction>,
Option<&FocusPolicy>,
)>,
) {
let cursor_position = if let Some(cursor_position) = windows
.get_primary()
.and_then(|window| window.cursor_position())
{
cursor_position
} else {
return;
};
// reset entities that were both clicked and released in the last frame
for entity in state.entities_to_reset.drain(..) {
if let Ok(mut interaction) = node_query.get_component_mut::<Interaction>(entity) {
*interaction = Interaction::None;
}
}
let mouse_released =
mouse_button_input.just_released(MouseButton::Left) || touches_input.just_released(0);
if mouse_released {
for (_entity, _node, _global_transform, interaction, _focus_policy) in node_query.iter_mut()
{
if let Some(mut interaction) = interaction {
if *interaction == Interaction::Clicked {
*interaction = Interaction::None;
}
}
}
}
let mouse_clicked =
mouse_button_input.just_pressed(MouseButton::Left) || touches_input.just_released(0);
let mut moused_over_z_sorted_nodes = node_query
.iter_mut()
.filter_map(
|(entity, node, global_transform, interaction, focus_policy)| {
let position = global_transform.translation;
let ui_position = position.truncate();
let extents = node.size / 2.0;
let min = ui_position - extents;
let max = ui_position + extents;
// if the current cursor position is within the bounds of the node, consider it for
// clicking
if (min.x..max.x).contains(&cursor_position.x)
&& (min.y..max.y).contains(&cursor_position.y)
{
Some((entity, focus_policy, interaction, FloatOrd(position.z)))
} else {
if let Some(mut interaction) = interaction {
if *interaction == Interaction::Hovered {
*interaction = Interaction::None;
}
}
None
}
},
)
.collect::<Vec<_>>();
moused_over_z_sorted_nodes.sort_by_key(|(_, _, _, z)| -*z);
let mut moused_over_z_sorted_nodes = moused_over_z_sorted_nodes.into_iter();
// set Clicked or Hovered on top nodes
for (entity, focus_policy, interaction, _) in moused_over_z_sorted_nodes.by_ref() {
if let Some(mut interaction) = interaction {
if mouse_clicked {
// only consider nodes with Interaction "clickable"
if *interaction != Interaction::Clicked {
*interaction = Interaction::Clicked;
// if the mouse was simultaneously released, reset this Interaction in the next
// frame
if mouse_released {
state.entities_to_reset.push(entity);
}
}
} else if *interaction == Interaction::None {
*interaction = Interaction::Hovered;
}
}
match focus_policy.cloned().unwrap_or(FocusPolicy::Block) {
FocusPolicy::Block => {
break;
}
FocusPolicy::Pass => { /* allow the next node to be hovered/clicked */ }
}
}
// reset lower nodes to None
for (_entity, _focus_policy, interaction, _) in moused_over_z_sorted_nodes {
if let Some(mut interaction) = interaction {
if *interaction != Interaction::None {
*interaction = Interaction::None;
}
}
}
}
| true |
711d10bc95c03f097d95ed8c4204f3a4a3994d42
|
Rust
|
rusnasonov/xlsx2csv
|
/src/main.rs
|
UTF-8
| 907 | 2.96875 | 3 |
[] |
no_license
|
mod from_xlsx;
use clap;
use std;
fn main() {
let app = clap::App::new("xlsx2csv")
.version("0.1")
.author("Ruslan Nasonov <[email protected]>")
.about("Convert xlsx to csv ");
let app = app.arg(
clap::Arg::with_name("source")
.help("Path to file or '-' for stdin")
.required(true)
.index(1)
);
let app = app.arg(
clap::Arg::with_name("sheet")
.short("s")
.long("sheet")
.takes_value(true)
.default_value("1")
.help("Sheet number")
);
let matches = app.get_matches();
let source = matches.value_of("source").expect("Source is not set");
let sheet = matches.value_of("sheet").expect("Sheet is not set");
match from_xlsx::from_xlsx(source, sheet) {
Ok(csv) => {csv},
Err(err) => {
println!("{:?}", err);
std::process::exit(1);
}
};
}
| true |
70b0b2bd12c7cbd52a7d18b350e2ef229157dab2
|
Rust
|
id4ho/crustopals
|
/src/crustopals/tools/aes/word.rs
|
UTF-8
| 1,399 | 3.078125 | 3 |
[] |
no_license
|
use super::*;
use std::fmt;
use std::ops::Index;
#[derive(Clone)]
pub struct Word {
pub bytes: [u8; 4],
}
impl Word {
pub fn new(slice: &[u8]) -> Word {
let mut bytes: [u8; 4] = Default::default();
bytes.copy_from_slice(slice);
Word { bytes }
}
pub fn xor(&self, other: &Word) -> Word {
Word::new(&tools::xor_bytes(&self.bytes, &other.bytes)[..])
}
pub fn rotated(&self) -> Word {
Word::new(&[self.bytes[1], self.bytes[2], self.bytes[3], self.bytes[0]])
}
pub fn sbox_mapped(&self) -> Word {
Word::new(&[
s_box(self.bytes[0]),
s_box(self.bytes[1]),
s_box(self.bytes[2]),
s_box(self.bytes[3]),
])
}
pub fn inv_sbox_mapped(&self) -> Word {
Word::new(&[
inv_s_box(self.bytes[0]),
inv_s_box(self.bytes[1]),
inv_s_box(self.bytes[2]),
inv_s_box(self.bytes[3]),
])
}
}
impl PartialEq for Word {
fn eq(&self, other: &Word) -> bool {
self.bytes == other.bytes
}
}
impl fmt::Debug for Word {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut iter = self.bytes.iter();
write!(
f,
"{:02x}{:02x}{:02x}{:02x}",
iter.next().unwrap(),
iter.next().unwrap(),
iter.next().unwrap(),
iter.next().unwrap(),
)
}
}
impl Index<usize> for Word {
type Output = u8;
fn index(&self, index: usize) -> &u8 {
&self.bytes[index]
}
}
| true |
ff5091403fad33103cccbd3611971d91c5933c7b
|
Rust
|
fossabot/alpha_vantage
|
/src/forex.rs
|
UTF-8
| 16,960 | 3.3125 | 3 |
[
"MIT"
] |
permissive
|
//! Module for Forex realtime and historical data
//!
//! APIs under this section provide a wide range of data feed for realtime and
//! historical forex (FX) rates.
//!
//! You can read about [Forex][forex] API and what it returns
//! on alphavantage documentation
//!
//! [forex]: https://www.alphavantage.co/documentation/#fx
use crate::{
user::APIKey,
util::{ForexFunction, Interval, OutputSize},
};
use reqwest::Url;
use serde_derive::Deserialize;
use std::collections::HashMap;
const LINK: &str = "https://www.alphavantage.co/query?function=";
/// Struct used to store metadata value
#[derive(Debug, Clone)]
struct MetaData {
information: String,
from_symbol: String,
to_symbol: String,
last_refreshed: String,
interval: Option<String>,
output_size: Option<String>,
time_zone: String,
}
/// Struct to store Entry value
#[derive(Default, Debug, Clone)]
pub struct Entry {
time: String,
open: String,
high: String,
low: String,
close: String,
}
/// trait which helps for performing some common operation on Vec<Entry>
pub trait VecEntry {
/// Find a entry with a given time as a input return none if no entry found
fn find(&self, time: &str) -> Option<Entry>;
/// Return a entry which is of latest time period
fn latest(&self) -> Entry;
/// Return a top n latest Entry if n Entry is present else return Error
fn latestn(&self, n: usize) -> Result<Vec<Entry>, &str>;
}
impl VecEntry for Vec<Entry> {
#[must_use]
fn find(&self, time: &str) -> Option<Entry> {
for entry in self {
if entry.time == time {
return Some(entry.clone());
}
}
None
}
#[must_use]
fn latest(&self) -> Entry {
let mut latest = Entry::default();
let mut new_time = String::new();
for entry in self {
if new_time < entry.time {
latest = entry.clone();
new_time = entry.time.clone();
}
}
latest
}
fn latestn(&self, n: usize) -> Result<Vec<Entry>, &str> {
let mut time_list = vec![];
for entry in self {
time_list.push(entry.time.clone());
}
time_list.sort();
time_list.reverse();
let mut full_list = Self::new();
for i in 0..n {
let time = time_list.get(i);
if let Some(time) = time {
let entry = self
.find(time)
.expect("fail to find time value for latestn forex");
full_list.push(entry);
} else {
return Err("desired number of latest Entry not found try using less value");
}
}
Ok(full_list)
}
}
impl Entry {
/// Return time for entry
#[must_use]
pub fn time(&self) -> &str {
&self.time
}
/// Return open value
#[must_use]
pub fn open(&self) -> f64 {
return_f64(&self.open)
}
/// Return high value
#[must_use]
pub fn high(&self) -> f64 {
return_f64(&self.high)
}
/// Return low value
#[must_use]
pub fn low(&self) -> f64 {
return_f64(&self.low)
}
/// Return close value
#[must_use]
pub fn close(&self) -> f64 {
return_f64(&self.close)
}
}
/// Return f64 from &str
fn return_f64(data: &str) -> f64 {
data.trim()
.parse::<f64>()
.expect("Cannot convert string to f64")
}
/// Struct to store Forex data after forex API call
#[derive(Debug, Default)]
pub struct Forex {
error_message: Option<String>,
information: Option<String>,
meta_data: Option<MetaData>,
forex: Option<Vec<Entry>>,
}
impl Forex {
/// Return information of data
///
/// ```
/// use alpha_vantage::util::*;
/// let api = alpha_vantage::set_api("demo");
/// let forex = api.forex(
/// ForexFunction::IntraDay,
/// "EUR",
/// "USD",
/// Interval::FiveMin,
/// OutputSize::Full,
/// );
/// let information = forex.information();
/// assert_eq!(information.unwrap(), "FX Intraday (5min) Time Series");
/// ```
pub fn information(&self) -> Result<&str, &str> {
self.return_meta_string("information")
}
/// Return from symbol
///
/// ```
/// use alpha_vantage::util::*;
/// let api = alpha_vantage::set_api("demo");
/// let forex = api.forex(
/// ForexFunction::IntraDay,
/// "EUR",
/// "USD",
/// Interval::FiveMin,
/// OutputSize::Full,
/// );
/// let symbol_from = forex.symbol_from();
/// assert_eq!(symbol_from.unwrap(), "EUR");
/// ```
pub fn symbol_from(&self) -> Result<&str, &str> {
self.return_meta_string("from symbol")
}
/// Return to symbol
///
/// ```
/// use alpha_vantage::util::*;
/// let api = alpha_vantage::set_api("demo");
/// let forex = api.forex(
/// ForexFunction::IntraDay,
/// "EUR",
/// "USD",
/// Interval::FiveMin,
/// OutputSize::Full,
/// );
/// let symbol_to = forex.symbol_to();
/// assert_eq!(symbol_to.unwrap(), "USD");
/// ```
pub fn symbol_to(&self) -> Result<&str, &str> {
self.return_meta_string("to symbol")
}
/// Return last refreshed time produce error if API returns error message or
/// information instead of meta data
pub fn last_refreshed(&self) -> Result<&str, &str> {
self.return_meta_string("last refreshed")
}
/// Return time zone of all data time produce error if API return
/// error message or information instead of meta data
pub fn time_zone(&self) -> Result<&str, &str> {
self.return_meta_string("time zone")
}
/// Return out interval for intraday
///
/// ```
/// use alpha_vantage::util::*;
/// let api = alpha_vantage::set_api("demo");
/// let forex = api.forex(
/// ForexFunction::IntraDay,
/// "EUR",
/// "USD",
/// Interval::FiveMin,
/// OutputSize::Full,
/// );
/// let interval = forex.interval();
/// assert_eq!(interval.unwrap(), "5min");
/// ```
pub fn interval(&self) -> Result<&str, &str> {
self.operate_option_meta_value("interval")
}
/// Return output size which can be full or compact
///
/// ```
/// use alpha_vantage::util::*;
/// let api = alpha_vantage::set_api("demo");
/// let forex = api.forex(
/// ForexFunction::IntraDay,
/// "EUR",
/// "USD",
/// Interval::FiveMin,
/// OutputSize::Full,
/// );
/// let output_size = forex.output_size();
/// assert_eq!(output_size.unwrap(), "Full size");
/// ```
pub fn output_size(&self) -> Result<&str, &str> {
self.operate_option_meta_value("output size")
}
/// Method return Entry
pub fn entry(&self) -> Result<Vec<Entry>, &str> {
if let Some(entry) = &self.forex {
Ok(entry.to_vec())
} else if let Some(error) = &self.error_message {
Err(error)
} else if let Some(information) = &self.information {
Err(information)
} else {
Err("Unknown error")
}
}
/// Return a meta data field in Result type
fn return_meta_string(&self, which_val: &str) -> Result<&str, &str> {
if let Some(meta_data) = &self.meta_data {
let value = match which_val {
"information" => &meta_data.information,
"from symbol" => &meta_data.from_symbol,
"to symbol" => &meta_data.to_symbol,
"time zone" => &meta_data.time_zone,
"last refreshed" => &meta_data.last_refreshed,
_ => "",
};
Ok(value)
} else if let Some(error) = &self.error_message {
Err(error)
} else if let Some(information) = &self.information {
Err(information)
} else {
Err("Unknown error")
}
}
/// Convert out Option meta data field as a Result field
fn operate_option_meta_value(&self, which_val: &str) -> Result<&str, &str> {
if let Some(meta_data) = &self.meta_data {
if let Some(value) = match which_val {
"interval" => &meta_data.interval,
"output size" => &meta_data.output_size,
_ => &None,
} {
Ok(value)
} else {
Err("No value present")
}
} else if let Some(error) = &self.error_message {
Err(error)
} else if let Some(information) = &self.information {
Err(information)
} else {
Err("Unknown error")
}
}
}
/// Entry Helper
#[derive(Clone, Debug, Deserialize)]
struct EntryHelper {
#[serde(rename = "1. open")]
open: String,
#[serde(rename = "2. high")]
high: String,
#[serde(rename = "3. low")]
low: String,
#[serde(rename = "4. close")]
close: String,
}
/// struct which helps for collecting forex data from website
#[derive(Debug, Deserialize)]
pub(crate) struct ForexHelper {
#[serde(rename = "Error Message")]
error_message: Option<String>,
#[serde(rename = "Information")]
information: Option<String>,
#[serde(rename = "Meta Data")]
meta_data: Option<HashMap<String, String>>,
#[serde(flatten)]
forex: Option<HashMap<String, HashMap<String, EntryHelper>>>,
}
impl ForexHelper {
/// convert [ForexHelper][ForexHelper] to [Forex][Forex]
pub(crate) fn convert(self) -> Forex {
let mut forex_struct = Forex::default();
forex_struct.error_message = self.error_message;
forex_struct.information = self.information;
if let Some(meta_data) = self.meta_data {
let information = &meta_data["1. Information"];
let from_symbol = &meta_data["2. From Symbol"];
let to_symbol = &meta_data["3. To Symbol"];
let last_refreshed = meta_data.get("4. Last Refreshed");
let mut last_refreshed_value = return_option_value(last_refreshed);
if last_refreshed_value.is_none() {
let last_refreshed = meta_data.get("5. Last Refreshed");
last_refreshed_value = return_option_value(last_refreshed);
}
let time_zone = meta_data.get("5. Time Zone");
let mut time_zone_value = return_option_value(time_zone);
if time_zone_value.is_none() {
let time_zone = meta_data.get("6. Time Zone");
time_zone_value = return_option_value(time_zone);
}
if time_zone_value.is_none() {
let time_zone = meta_data.get("7. Time Zone");
time_zone_value = return_option_value(time_zone);
}
let output_size = meta_data.get("4. Output Size");
let mut output_size_value = return_option_value(output_size);
if output_size_value.is_none() {
let output_size = meta_data.get("6. Output Size");
output_size_value = return_option_value(output_size);
}
let interval = meta_data.get("5. Interval");
let interval_value = return_option_value(interval);
forex_struct.meta_data = Some(MetaData {
information: information.to_string(),
from_symbol: from_symbol.to_string(),
to_symbol: to_symbol.to_string(),
last_refreshed: last_refreshed_value.expect("Last refreshed value contains None"),
interval: interval_value,
output_size: output_size_value,
time_zone: time_zone_value.expect("Time zone contains None value"),
});
}
let mut value: Vec<Entry> = Vec::new();
if let Some(entry) = self.forex {
for hash in entry.values() {
for val in hash.keys() {
let mut entry: Entry = crate::forex::Entry::default();
entry.time = val.to_string();
let entry_helper = hash
.get(val)
.expect("Cannot get a val from hash map")
.clone();
entry.open = entry_helper.open;
entry.high = entry_helper.high;
entry.low = entry_helper.low;
entry.close = entry_helper.close;
value.push(entry);
}
}
}
if !value.is_empty() {
forex_struct.forex = Some(value);
}
forex_struct
}
}
/// Convert Option(&String) to String
fn return_option_value(value: Option<&std::string::String>) -> Option<String> {
match value {
Some(value) => Some(value.to_string()),
None => None,
}
}
/// Function used to create a [Forex][Forex] struct.
///
/// Instead of using this function directly calling through [APIKey][APIKey]
/// method is recommended
#[must_use]
pub fn forex(
function: ForexFunction,
from_symbol: &str,
to_symbol: &str,
interval: Interval,
output_size: OutputSize,
api_data: (&str, Option<u64>),
) -> Forex {
let api;
if let Some(timeout) = api_data.1 {
api = APIKey::set_with_timeout(api_data.0, timeout);
} else {
api = APIKey::set_api(api_data.0);
}
api.forex(function, from_symbol, to_symbol, interval, output_size)
}
/// Create Url from given user parameter for reqwest crate
pub(crate) fn create_url(
function: ForexFunction,
from_symbol: &str,
to_symbol: &str,
interval: Interval,
output_size: OutputSize,
api: &str,
) -> Url {
let function = match function {
ForexFunction::IntraDay => "FX_INTRADAY",
ForexFunction::Daily => "FX_DAILY",
ForexFunction::Weekly => "FX_WEEKLY",
ForexFunction::Monthly => "FX_MONTHLY",
};
let mut url = format!(
"{}{}&from_symbol={}&to_symbol={}",
LINK, function, from_symbol, to_symbol
);
let interval = match interval {
Interval::OneMin => "1min",
Interval::FiveMin => "5min",
Interval::FifteenMin => "15min",
Interval::ThirtyMin => "30min",
Interval::SixtyMin => "60min",
Interval::None => "",
};
if interval != "" {
url.push_str(&format!("&interval={}", interval));
}
url.push_str(match output_size {
OutputSize::Full => "&outputsize=full",
_ => "",
});
url.push_str(&format!("&apikey={}", api));
url.parse().expect("Fail to parse url")
}
// Test module
#[cfg(test)]
mod test {
use crate::util::*;
use reqwest::Url;
#[test]
// Testing forex create_url() function
fn test_forex_create_url() {
assert_eq!(
super::create_url(
ForexFunction::Daily,
"USD",
"NPR",
Interval::None,
OutputSize::None,
"random"
),
Url::parse(
"https://www.alphavantage.co/query?function=FX_DAILY\
&from_symbol=USD\
&to_symbol=NPR\
&apikey=random"
)
.unwrap()
);
assert_eq!(
super::create_url(
ForexFunction::Weekly,
"USD",
"NPR",
Interval::None,
OutputSize::None,
"random"
),
Url::parse(
"https://www.alphavantage.co/query?function=FX_WEEKLY\
&from_symbol=USD\
&to_symbol=NPR\
&apikey=random"
)
.unwrap()
);
assert_eq!(
super::create_url(
ForexFunction::Monthly,
"USD",
"NPR",
Interval::None,
OutputSize::None,
"random"
),
Url::parse(
"https://www.alphavantage.co/query?function=FX_MONTHLY\
&from_symbol=USD\
&to_symbol=NPR\
&apikey=random"
)
.unwrap()
);
assert_eq!(
super::create_url(
ForexFunction::IntraDay,
"USD",
"NPR",
Interval::FifteenMin,
OutputSize::Full,
"random"
),
Url::parse(
"https://www.alphavantage.co/query?function=FX_INTRADAY\
&from_symbol=USD\
&to_symbol=NPR\
&interval=15min\
&outputsize=full\
&apikey=random"
)
.unwrap()
);
}
}
| true |
1d78e0ad146af2fa3116d4ba8f331af059648389
|
Rust
|
rust-lang/miri
|
/tests/fail/shims/sync/libc_pthread_mutex_NULL_deadlock.rs
|
UTF-8
| 532 | 2.53125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//@ignore-target-windows: No libc on Windows
//
// Check that if we pass NULL attribute, then we get the default mutex type.
fn main() {
unsafe {
let mut mutex: libc::pthread_mutex_t = std::mem::zeroed();
assert_eq!(libc::pthread_mutex_init(&mut mutex as *mut _, std::ptr::null() as *const _), 0);
assert_eq!(libc::pthread_mutex_lock(&mut mutex as *mut _), 0);
libc::pthread_mutex_lock(&mut mutex as *mut _); //~ ERROR: Undefined Behavior: trying to acquire already locked default mutex
}
}
| true |
5141dad57479ebd4c791b84cfe64122379f76d1f
|
Rust
|
isgasho/falsework
|
/src/lib.rs
|
UTF-8
| 3,911 | 3.234375 | 3 |
[
"MIT"
] |
permissive
|
pub mod cli {
use crate::cmd::{Command};
#[derive(Debug)]
pub struct App<'a, 'c> {
name: &'a str,
author: &'a str,
version: &'a str,
description: &'a str,
commands: Vec<Command<'c>>,
}
pub fn new<'a, 'c>() -> App<'a, 'c> {
App {
name: "Falsework",
author: "Author name <[email protected]>",
version: "0.0.1",
description: "A command line program built with Falsework.",
commands: vec![],
}
}
impl<'a, 'c> App<'a, 'c> {
pub fn name(&mut self, name: &'a str) -> &mut App<'a, 'c> {
self.name = name;
self
}
pub fn author(&mut self, author: &'a str) -> &mut App<'a, 'c> {
self.author = author;
self
}
pub fn version(&mut self, version: &'a str) -> &mut App<'a, 'c> {
self.version = version;
self
}
pub fn description(&mut self, description: &'a str) -> &mut App<'a, 'c> {
self.description = description;
self
}
pub fn add_cmd(&mut self, cmd: Command<'c>) -> &mut App<'a, 'c> {
self.commands.push(cmd);
self
}
pub fn commands(&mut self, cmd_list: Vec<Command<'c>>) {
for v in cmd_list {
self.commands.push(v);
}
}
pub fn get_command(&self, r#use: &str) -> Option<&Command<'c>> {
for v in &self.commands {
if v.r#use == r#use {
return Some(v);
}
}
None
}
pub fn get_command_mut(&mut self, r#use: &str) -> Option<&mut Command<'c>> {
for v in &mut self.commands {
if v.r#use == r#use {
return Some(v);
}
}
None
}
pub fn run(&self) {}
}
}
pub mod cmd {
#[derive(Debug)]
pub struct Command<'c> {
pub run: fn(),
// Long is the long message shown in the 'help <this-command>' output.
pub long: &'c str,
// Short is the short description shown in the 'help' output.
pub short: &'c str,
pub r#use: &'c str,
// pub flags: Vec<Flag<T>>,
// pub aliases: Vec<&'c str>,
}
impl<'c> Command<'c> {
// pub fn flags(&mut self) -> &mut Vec<Flag<T>> {
// &mut self.flags
// }
}
#[derive(Debug)]
pub enum Types {
I64,
F64,
BOOL,
STRING,
}
#[derive(Debug)]
pub struct Flag<T> {
pub name: String,
pub r#type: Types,
pub usages: String,
pub value: Box<T>,
}
impl<T> Flag<T> {
// s2s add -x 10 -y 10 = 20
// -x i64 default= 10 usages 加数
pub fn string(&mut self, name: &str, default: T, usages: &str) {
self.name = name.parse().unwrap();
self.value = Box::new(default);
self.r#type = Types::STRING;
self.usages = usages.parse().unwrap()
}
pub fn int(&mut self, name: &str, default: T, usages: &str) {
self.name = name.parse().unwrap();
self.value = Box::new(default);
self.r#type = Types::I64;
self.usages = usages.parse().unwrap()
}
pub fn bool(&mut self, name: &str, default: T, usages: &str) {
self.name = name.parse().unwrap();
self.value = Box::new(default);
self.r#type = Types::BOOL;
self.usages = usages.parse().unwrap()
}
pub fn float(&mut self, name: &str, default: T, usages: &str) {
self.name = name.parse().unwrap();
self.value = Box::new(default);
self.r#type = Types::F64;
self.usages = usages.parse().unwrap()
}
}
}
| true |
33075fc5083e64a98112b94abef090ea1141dede
|
Rust
|
jthelin/demikernel
|
/src/rust/catnapw/futures/push.rs
|
UTF-8
| 2,649 | 2.53125 | 3 |
[
"MIT"
] |
permissive
|
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
//==============================================================================
// Imports
//==============================================================================
use crate::runtime::{
fail::Fail,
memory::DemiBuffer,
QDesc,
};
use ::socket2::Socket;
use ::std::{
cell::RefCell,
future::Future,
pin::Pin,
rc::Rc,
task::{
Context,
Poll,
},
};
use ::windows::Win32::Networking::WinSock::WSAEWOULDBLOCK;
//==============================================================================
// Structures
//==============================================================================
/// Push Operation Descriptor
pub struct PushFuture {
/// Associated queue descriptor.
qd: QDesc,
// Underlying socket.
socket: Rc<RefCell<Socket>>,
/// Buffer to send.
buf: DemiBuffer,
}
//==============================================================================
// Associate Functions
//==============================================================================
/// Associate Functions for Push Operation Descriptors
impl PushFuture {
/// Creates a descriptor for a push operation.
pub fn new(qd: QDesc, socket: Rc<RefCell<Socket>>, buf: DemiBuffer) -> Self {
Self { qd, socket, buf }
}
/// Returns the queue descriptor associated to the target [PushFuture].
pub fn get_qd(&self) -> QDesc {
self.qd
}
}
//==============================================================================
// Trait Implementations
//==============================================================================
/// Future Trait Implementation for Push Operation Descriptors
impl Future for PushFuture {
type Output = Result<(), Fail>;
/// Polls the target [PushFuture].
fn poll(self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Self::Output> {
let self_: &mut PushFuture = self.get_mut();
match self_.socket.borrow().send(&self_.buf[..]) {
// Operation completed.
Ok(nbytes) => {
trace!("data pushed ({:?}/{:?} bytes)", nbytes, self_.buf.len());
Poll::Ready(Ok(()))
},
// Operation in progress.
Err(e) if e.raw_os_error() == Some(WSAEWOULDBLOCK.0) => {
ctx.waker().wake_by_ref();
Poll::Pending
},
// Error.
Err(e) => {
warn!("push failed ({:?})", e);
Poll::Ready(Err(Fail::new(e.kind() as i32, "operation failed")))
},
}
}
}
| true |
6a2e64e22c417f5bb3e5e85836ba7156c88fa75f
|
Rust
|
jlgerber/rustyphone
|
/userdb_core/src/delete/person.rs
|
UTF-8
| 1,135 | 3.328125 | 3 |
[] |
no_license
|
//! Delete a person given a login or id.
use std::convert::AsRef;
use sqlx::prelude::*;
const DELETE: &str = r"
SELECT
*
FROM
deletePerson($1) as id;
";
const DELETE_BY_ID: &str = r"
SELECT
*
FROM
deletePersonById($1) as id;
";
#[derive(FromRow)]
struct Rval {
id: Option<i32>
}
/// Delete a person record which matches the supplied arguments.
pub async fn delete<I>(
pool: &sqlx::PgPool,
login: I,
) -> Result<Option<i32>, sqlx::Error>
where
I: AsRef<str>,
{
let Rval{id} = sqlx::query_as(&DELETE)
.bind(login.as_ref())
.fetch_one(pool).await?;
if let Some(value) = id {
if value == 0 {
return Ok(None);
}
}
Ok(id)
}
/// delete phone given its id. This is a more direct method which
/// simply deletes the phone by its id.
pub async fn delete_by_id(
pool: &sqlx::PgPool,
id: u32,
) -> Result<Option<i32>, sqlx::Error> {
let Rval{id} = sqlx::query_as(&DELETE_BY_ID)
.bind(id)
.fetch_one(pool).await?;
if let Some(value) = id {
if value == 0 {
return Ok(None);
}
}
Ok(id)
}
| true |
2e5fe518c37286ce6fdd1e78f5b033455172e24f
|
Rust
|
OskarPersson/advent-of-code
|
/2021/day21/src/main.rs
|
UTF-8
| 3,988 | 3.09375 | 3 |
[] |
no_license
|
use std::collections::HashMap;
type Cache = HashMap<(i64, i64, i64, i64, bool), (i64, i64)>;
fn parse_input(contents: &str) -> (i64, i64) {
let p1 = contents
.lines()
.next()
.unwrap()
.chars()
.last()
.unwrap()
.to_digit(10)
.unwrap() as i64;
let p2 = contents
.lines()
.nth(1)
.unwrap()
.chars()
.last()
.unwrap()
.to_digit(10)
.unwrap() as i64;
(p1, p2)
}
fn part1(contents: &str) -> i64 {
let (mut p1_space, mut p2_space) = parse_input(contents);
let (mut p1_score, mut p2_score) = (0, 0);
let mut die_throws = 0;
loop {
let steps = (die_throws + 1) + (die_throws + 2) + (die_throws + 3);
die_throws += 3;
p1_space = (p1_space + steps) % 10;
if p1_space == 0 {
p1_space = 10;
}
p1_score += p1_space;
if p1_score >= 1000 {
break;
}
let steps = (die_throws + 1) + (die_throws + 2) + (die_throws + 3);
die_throws += 3;
p2_space = (p2_space + steps) % 10;
if p2_space == 0 {
p2_space = 10;
}
p2_score += p2_space;
if p2_score >= 1000 {
break;
}
}
p1_score.min(p2_score) * die_throws
}
fn quantum_roll(
p1_space: i64,
p1_score: i64,
p2_space: i64,
p2_score: i64,
p1: bool,
cache: &mut Cache,
) -> (i64, i64) {
if p1_score >= 21 {
return (1, 0);
}
if p2_score >= 21 {
return (0, 1);
}
let cache_key = (p1_space, p1_score, p2_space, p2_score, p1);
if let Some(e) = cache.get(&cache_key) {
return *e;
}
let mut p1_total = 0;
let mut p2_total = 0;
for i in 1..=3 {
for j in 1..=3 {
for k in 1..=3 {
let val = i + j + k;
let (p1_wins, p2_wins) = if p1 {
let mut new_space = (p1_space + val) % 10;
if new_space == 0 {
new_space = 10;
}
quantum_roll(
new_space,
p1_score + new_space,
p2_space,
p2_score,
!p1,
cache,
)
} else {
let mut new_space = (p2_space + val) % 10;
if new_space == 0 {
new_space = 10;
}
quantum_roll(
p1_space,
p1_score,
new_space,
p2_score + new_space,
!p1,
cache,
)
};
p1_total += p1_wins;
p2_total += p2_wins;
}
}
}
cache.insert(cache_key, (p1_total, p2_total));
(p1_total, p2_total)
}
fn part2(contents: &str) -> i64 {
let (p1_space, p2_space) = parse_input(contents);
let (p1_score, p2_score): (i64, i64) = (0, 0);
let mut cache: Cache = HashMap::new();
let (p1, p2) = quantum_roll(p1_space, p1_score, p2_space, p2_score, true, &mut cache);
p1.max(p2)
}
fn main() {
let contents = include_str!("../input.txt");
let part1 = part1(contents);
println!("part1: {}", part1);
let part2 = part2(contents);
println!("part2: {}", part2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part_1() {
assert_eq!(
part1(
"Player 1 starting position: 4
Player 2 starting position: 8"
),
739785
);
}
#[test]
fn test_part_2() {
assert_eq!(
part2(
"Player 1 starting position: 4
Player 2 starting position: 8"
),
444356092776315
);
}
}
| true |
3b66bdaaec514f00a6b2ed1ad76a3276d48308f7
|
Rust
|
eg-ayoub/ds_raft
|
/src/raft_server/log_entry.rs
|
UTF-8
| 3,258 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
use serde::{Serialize, Deserialize};
use std::cmp::min;
use std::fmt;
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub enum RaftOperation {
Incr,
Decr,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct LogEntry {
pub operation: RaftOperation,
pub term: u64,
}
pub trait RaftList {
fn new() -> Self;
fn len(&self) -> usize;
fn get(&self, raft_index: usize) -> LogEntry;
fn find(&self, raft_index: usize, term: u64) -> Result<bool, &'static str>;
fn put(&mut self, entry: LogEntry) -> usize;
fn last_term(&self) -> u64;
fn find_conflicts(&self, prev_index: usize, compare_to: &Vec<LogEntry>) -> Option<usize>;
fn truncate(&mut self, starting: usize);
fn append_rest(&mut self, starting: usize, prev: usize, compare_to: &Vec<LogEntry>);
fn append_all(&mut self, prev: usize, compare_to: &Vec<LogEntry>);
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Log {
list: Vec<LogEntry>,
}
impl RaftList for Log{
fn new() -> Log {
Log {
list: Vec::new()
}
}
fn len(&self) -> usize {
self.list.len()
}
fn get(&self, raft_index: usize) -> LogEntry {
self.list[raft_index-1]
}
fn find(&self, raft_index: usize, term: u64) -> Result<bool, &'static str> {
if raft_index > self.list.len() {
Err("index out of bounds")
} else if raft_index == 0 {
if term != 0 {
Err("entry at index 0")
} else {
Ok(true)
}
} else {
Ok(self.list[raft_index-1].term == term)
}
}
fn put(&mut self, entry: LogEntry) -> usize {
self.list.push(entry);
self.list.len()
}
fn last_term(&self) -> u64 {
if self.list.len() != 0 {
return self.list[self.list.len() - 1].term;
}else{
return 0;
}
}
fn find_conflicts(&self, prev_index: usize, compare_to: &Vec<LogEntry>) -> Option<usize> {
for index in prev_index..min(prev_index + compare_to.len(), self.list.len()) {
if compare_to[index - prev_index].term != self.list[index].term {
return Some(index);
}
}
return None;
}
fn truncate(&mut self, starting: usize) {
self.list.truncate(starting);
}
fn append_rest(&mut self, starting: usize, prev: usize, compare_to: &Vec<LogEntry>) {
for index in starting-prev..compare_to.len() {
info!("follower appended <{}>", prev + index + 1);
self.list.push(compare_to[index]);
}
}
fn append_all(&mut self, prev: usize, compare_to: &Vec<LogEntry>) {
for index in self.len()-prev..compare_to.len() {
info!("follower appended <{}>", prev + index + 1);
self.list.push(compare_to[index]);
}
}
}
impl fmt::Display for Log {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[")?;
for (count, v) in self.list.iter().enumerate() {
if count != 0 {
write!(f, ",")?;
}
write!(f, "({}|{}|{:?})", count + 1, v.term, v.operation)?;
}
write!(f, "]")
}
}
| true |
e54bd67c0d0817505c59d7f2bef84f4b7b81caa2
|
Rust
|
d3zd3z/rdump
|
/filer/src/bin/filer.rs
|
UTF-8
| 4,460 | 2.796875 | 3 |
[] |
no_license
|
// Show a tree.
extern crate cas;
extern crate byteorder;
use byteorder::{BigEndian, ReadBytesExt};
use cas::{Kind, Oid};
use cas::Result;
use cas::pdump::HexDump;
use cas::pool::{AdumpPool, ChunkSource};
use std::collections::BTreeMap;
use std::env;
use std::io::Read;
fn main() {
let mut argsi = env::args();
match argsi.next() {
None => panic!("No program name given"),
Some(_) => (),
}
let path = match argsi.next() {
Some(path) => path,
None => panic!("Expecting a single argument, of the pool name"),
};
match argsi.next() {
Some(_) => panic!("Unexpected extra argument"),
None => (),
}
let pool = AdumpPool::open(&path).unwrap();
let walk = Walk { source: &pool };
match pool.backups().unwrap().first() {
None => println!("No backups"),
Some(oid) => walk.show_backup(oid),
}
}
struct Walk<'a> {
source: &'a ChunkSource,
}
impl<'a> Walk<'a> {
fn show_backup(&self, id: &Oid) {
println!("back: {:?}", id);
let ch = self.source.find(id).unwrap();
assert_eq!(ch.kind(), Kind::new("back").unwrap());
(&ch.data()[..]).dump();
let mut buf = &ch.data()[..];
let props = buf.read_props().unwrap();
println!("props: {:#?}", props);
// Get the backup hash.
let hash = props.data.get("hash").unwrap();
let oid = Oid::from_hex(hash).unwrap();
println!("root: {:?}", oid);
self.show_node(&oid);
}
fn show_node(&self, id: &Oid) {
let ch = self.source.find(id).unwrap();
println!("kind: {:?}", ch.kind());
// (&ch.data()[..]).dump();
let props = (&ch.data()[..]).read_props().unwrap();
println!("props: {:#?}", props);
if props.kind == "DIR" {
let child_oid = props.data.get("children").unwrap();
let child_oid = Oid::from_hex(child_oid).unwrap();
self.show_dir(&child_oid);
} else if props.kind == "REG" {
let data_oid = props.data.get("data").unwrap();
let data_oid = Oid::from_hex(data_oid).unwrap();
self.show_data(&data_oid);
}
}
fn show_dir(&self, id: &Oid) {
let ch = self.source.find(id).unwrap();
// (&ch.data()[..]).dump();
let entries = (&ch.data()[..]).read_dir().unwrap();
println!("dir: {:#?}", entries);
for child in &entries {
println!("Walk: {:?}", child.name);
self.show_node(&child.oid);
}
}
fn show_data(&self, id: &Oid) {
let ch = self.source.find(id).unwrap();
println!("data: {:#?}", ch.kind())
}
}
trait Decode: Read {
fn read_string1(&mut self) -> Result<String> {
let len = try!(self.read_u8());
let mut buf = vec![0u8; len as usize];
try!(self.read_exact(&mut buf));
Ok(try!(String::from_utf8(buf)))
}
fn read_string2(&mut self) -> Result<String> {
let len = try!(self.read_u16::<BigEndian>());
let mut buf = vec![0u8; len as usize];
try!(self.read_exact(&mut buf));
Ok(try!(String::from_utf8(buf)))
}
fn read_props(&mut self) -> Result<Props> {
let kind = try!(self.read_string1());
let mut dict = BTreeMap::new();
loop {
let key = match self.read_string1() {
Ok(key) => key,
Err(ref err) if err.is_unexpected_eof() => break,
Err(e) => return Err(e),
};
let value = try!(self.read_string2());
dict.insert(key, value);
}
Ok(Props {
kind: kind,
data: dict,
})
}
fn read_dir(&mut self) -> Result<Vec<DirEntry>> {
let mut result = vec![];
loop {
let name = match self.read_string2() {
Ok(name) => name,
Err(ref err) if err.is_unexpected_eof() => break,
Err(e) => return Err(e),
};
let mut buf = [0u8; 20];
try!(self.read_exact(&mut buf));
result.push(DirEntry {
name: name,
oid: Oid::from_raw(&buf),
});
}
Ok(result)
}
}
impl<T: Read> Decode for T {}
#[derive(Debug)]
struct Props {
kind: String,
data: BTreeMap<String, String>,
}
#[derive(Debug)]
struct DirEntry {
name: String,
oid: Oid,
}
| true |
6c5ac84ed6e7005f459278fef45dbe985ff1aaaa
|
Rust
|
kconte/project-euler-rust
|
/solvers/021/src/main.rs
|
UTF-8
| 636 | 3.15625 | 3 |
[] |
no_license
|
#![warn(clippy::all)]
#![warn(clippy::pedantic)]
use num_integer::Roots;
fn main() {
run();
}
fn run() {
let start = std::time::Instant::now();
// code goes here
let mut res = 0;
for i in 2..=10_000 {
let a = d(i);
if a > i && a < 10_000 {
let b = d(a);
if b == i {
res += i + a;
}
}
}
let span = start.elapsed().as_nanos();
println!("{} {}", res, span);
}
fn d(n: u64) -> u64 {
let mut sqrt = n.sqrt();
let mut res = 1;
if sqrt * sqrt == res {
res += sqrt;
sqrt -= 1;
}
for i in 2..=sqrt {
if n % i == 0 {
res += i + (n / i);
}
}
res
}
| true |
9b76c0d40dba1bdb79de962414971a55f7d0c5e2
|
Rust
|
selatotal/rust-playground
|
/pickledb_dump/src/storage.rs
|
UTF-8
| 1,914 | 3.171875 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt::Debug;
use pickledb::{PickleDb, PickleDbDumpPolicy, SerializationMethod};
use serde::de::DeserializeOwned;
use serde::Serialize;
pub struct Storage {
db: PickleDb,
}
impl Storage {
pub fn new(file: &str) -> Storage {
let db = match PickleDb::load(&file, PickleDbDumpPolicy::AutoDump, SerializationMethod::Bin) {
Ok(db) => db,
Err(_) => PickleDb::new(file, PickleDbDumpPolicy::AutoDump, SerializationMethod::Bin),
};
Storage { db }
}
}
impl Storage {
pub fn set<T: Serialize>(&mut self, key: &str, value: &T) -> Result<(), String> {
self.db.set(key, value).map_err(|e| e.to_string())
}
pub fn get<T: DeserializeOwned>(&self, key: &str) -> Option<T> {
self.db.get::<T>(key)
}
pub fn remove(&mut self, key: &str) -> Result<bool, String> {
self.db.rem(key).map_err(|e| e.to_string())
}
pub fn exists(&self, key: &str) -> bool {
self.db.exists(key)
}
pub fn create_list(&mut self, name: &str) -> Result<(), String> {
if !self.db.lexists(name) {
return self.db.lcreate(name).map(|_| ()).map_err(|e| e.to_string());
}
Ok(())
}
pub fn ladd<T: Serialize + Debug>(&mut self, list: &str, value: &T) -> Result<(), String> {
self.db
.ladd(list, value)
.ok_or(format!("Error on trying to persist object: {:?}", value))
.map(|_| ())
}
pub fn lfirst<T: DeserializeOwned>(&self, list: &str) -> Option<T> {
self.db.lget::<T>(list, 0)
}
pub fn lremove(&mut self, list: &str, i: usize) -> Result<(), String> {
self.db.lpop::<()>(list, i).ok_or(format!(
"Error on trying to remove object on position {} from list {}",
i, list
))
}
pub fn list_is_empty(&self, list: &str) -> bool {
self.db.llen(list) == 0
}
}
| true |
5caaf3539bf6bb73cb6dc0a27f115febece9c794
|
Rust
|
caelia/sukkiri
|
/sukkiri-lib/src/storage/sqlite3.rs
|
UTF-8
| 3,407 | 2.796875 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
extern crate rusqlite;
use super::sql_queries::sqlite3 as queries;
pub use super::base::SKStore;
use std::mem;
use rusqlite::{SqliteConnection, SqliteRows};
use rusqlite::SQLITE_OPEN_READ_ONLY as READ_ONLY;
use self::OptSqliteConnection::{ReadOnly, ReadWrite, NoConn};
pub enum OptSqliteConnection {
ReadOnly(SqliteConnection),
ReadWrite(SqliteConnection),
NoConn,
}
impl OptSqliteConnection {
fn take(&mut self) -> OptSqliteConnection {
mem::replace(self, NoConn)
}
fn expect(self, writeable: bool) -> SqliteConnection {
match self {
ReadWrite(c) => {
if writeable {
c
} else {
panic!("Expected ReadOnly connection, ReadWrite found.");
}
},
ReadOnly(c) => {
if writeable {
panic!("Expected ReadWrite connection, ReadOnly found.");
} else {
c
}
},
NoConn => {
if writeable {
panic!("Expected ReadWrite connection, no connection found.");
} else {
panic!("Expected ReadOnly connection, no connection found.");
}
}
}
}
}
pub struct SKSqliteStore<'a> {
path: &'a str,
conn: OptSqliteConnection
}
impl<'a> SKSqliteStore<'a> {
pub fn new(file: &'a str) -> SKSqliteStore {
SKSqliteStore { path: file, conn: NoConn }
}
fn read_action<'b, F: Fn(SqliteConnection) -> SqliteRows<'b>>(&mut self, f: F) -> SqliteRows<'b> {
self.connect();
let konn = self.conn.expect(false);
f(konn)
}
fn write_action<F: Fn(SqliteConnection)>(&mut self, f: F) {
self.connect_rw();
let konn = self.conn.expect(true);
f(konn)
}
fn rw_action<'b, F: Fn(SqliteConnection) -> SqliteRows<'b>>(&mut self, f: F) -> SqliteRows<'b> {
self.connect_rw();
let konn = self.conn.expect(true);
f(konn)
}
}
impl<'a> SKStore for SKSqliteStore<'a> {
fn initialize(&self) {
println!("Initializing database.");
}
fn connect(&mut self) {
fn connect_unsafe(this: &mut SKSqliteStore) {
this.conn = ReadOnly(SqliteConnection::open_with_flags(this.path, READ_ONLY).unwrap());
}
match self.conn {
ReadOnly(_) => (),
ReadWrite(_) => {
self.disconnect();
connect_unsafe(self);
},
NoConn => connect_unsafe(self)
};
}
fn connect_rw(&mut self) {
fn connect_rw_unsafe(this: &mut SKSqliteStore) {
this.conn = ReadWrite(SqliteConnection::open(this.path).unwrap());
};
match self.conn {
ReadWrite(_) => (),
ReadOnly(_) => {
self.disconnect();
connect_rw_unsafe(self);
},
NoConn => connect_rw_unsafe(self)
};
}
fn disconnect(&mut self) {
let conn = self.conn.take();
match conn {
ReadOnly(c) | ReadWrite(c) => c.close().unwrap(),
NoConn => ()
};
}
}
mod tests {
use super::*;
#[test]
fn test_setup_new_db() {
let store = SKSqliteStore::new("test.db");
assert_eq!(store.path, "test.db");
}
}
| true |
c902ff0ed6ab6435e627334b8b23121fea96d4bd
|
Rust
|
isabella232/semtech-udp
|
/examples/client.rs
|
UTF-8
| 2,083 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use semtech_udp::client_runtime::UdpRuntime;
use semtech_udp::Up::PushData;
use std::net::SocketAddr;
use std::str::FromStr;
use std::time::Duration;
use structopt::StructOpt;
use tokio::time::sleep;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let mac_address = [0, 0, 0, 0, 4, 3, 2, 1];
let cli = Opt::from_args();
let outbound = SocketAddr::from(([0, 0, 0, 0], cli.port));
let host = SocketAddr::from_str(cli.host.as_str())?;
println!("Connecting to server {} from port {}", cli.host, cli.port);
let udp_runtime = UdpRuntime::new(mac_address.clone(), outbound, host).await?;
let (mut receiver, sender) = (udp_runtime.subscribe(), udp_runtime.publish_to());
tokio::spawn(async move {
udp_runtime.run().await.unwrap();
});
let uplink_sender = sender.clone();
tokio::spawn(async move {
loop {
println!("Sending a random uplink");
uplink_sender
.send(semtech_udp::Packet::Up(PushData(
semtech_udp::push_data::Packet::random(),
)))
.await
.unwrap();
sleep(Duration::from_secs(5)).await;
}
});
loop {
let msg = receiver.recv().await?;
println!("msg: {:?}", msg);
match msg {
semtech_udp::Packet::Down(down) => {
if let semtech_udp::Down::PullResp(packet) = down {
let ack =
(*packet).into_ack_for_gateway(semtech_udp::MacAddress::new(&mac_address));
sender.send(ack.into()).await?;
}
}
semtech_udp::Packet::Up(_up) => panic!("Should not receive Semtech up frames"),
}
}
}
#[derive(Debug, StructOpt)]
#[structopt(name = "virtual-lorawan-device", about = "LoRaWAN test device utility")]
pub struct Opt {
/// dial out port
#[structopt(short, long, default_value = "1600")]
pub port: u16,
#[structopt(short, long, default_value = "127.0.0.1:1680")]
pub host: String,
}
| true |
cdea6b3f08747c781a4c1b05596b6a88b6ccaae3
|
Rust
|
rcore-os-infohub/ossoc2020-kszlzj-daily
|
/step2/os/src/memory/mapping/mapping.rs
|
UTF-8
| 1,578 | 2.84375 | 3 |
[] |
no_license
|
#[derive(Default)]
pub struct Mapping {
page_tables: Vec<PageTableTracker>,
root_ppn: PhysicalPageNumber,
mapped_pairs: VecDeque<(VirtualPageNumber, FrameTracker)>,
}
impl Mapping {
pub fn new() -> MemoryResult<Mapping> {
let root_table = PageTableTracker::new(FRAME_ALLOCATOR.lock().alloc()?);
let root_ppn = root_table.page_number();
Ok(Mapping {
page_tables: vec![root_table],
root_ppn,
mapped_pairs: VecDeque::new(),
})
}
pub fn find_entry(&mut self,vpn: VirtualPageNumber) -> MemoryResult<&mut PageTableEntry>{
let root_table: &mut PageTable = PhysicalAddress::from(self.root_ppn).deref_kernel();
let mut entry = &mut root_table.entries[vpn.levels()[0]];
for vpn_slice in &vpn.levels()[1..] {
if entry.is_empty() {
let new_table = PageTableTracker::new(FRAME_ALLOCATOR.lock().alloc()?);
let new_ppn = new_table.page_number();
*entry = PageTableEntry::new(Some(new_ppn), Flags::VALID);
self.page_tables.push(new_table);
}
entry = &mut entry.get_next_table().entries[*vpn_slice];
}
Ok(entry)
}
fn map_one(
&mut self,
vpn: VirtualPageNumber,
ppn: Option<PhysicalPageNumber>,
flags: Flags,
) -> MemoryResult<()>{
let entry = self.find_entry(vpn)?;
assert!(entry.is_empty(), "virtual address is already mapped");
*entry = PageTableEntry::new(ppn,flags);
Ok(())
}
}
| true |
b199c113936ea5cfb75b68309dde5a1e99519461
|
Rust
|
rskgk/kuba
|
/src/geom/cell.rs
|
UTF-8
| 1,428 | 2.890625 | 3 |
[
"Apache-2.0"
] |
permissive
|
// NOTE: We make cell an isize instead of usize so that it can be manipulated with negative values,
// e.g. when using it as an offset instead of an absolute cell in a map.
pub type Cell<D> = na::Point<isize, D>;
pub type Cell2 = Cell<na::U2>;
pub type Cell3 = Cell<na::U3>;
pub trait CellToNdIndex<NaD, NdD>
where
NaD: na::DimName,
NdD: nd::Dimension,
na::DefaultAllocator: na::allocator::Allocator<isize, NaD>,
{
fn to_ndindex(&self) -> NdD;
}
#[macro_export]
macro_rules! cell2 {
($($val: expr),+) => {{
$crate::Cell2::new($($val),*)
}}
}
#[macro_export]
macro_rules! cell3 {
($($val: expr),+) => {{
$crate::Cell3::new($($val),*)
}}
}
macro_rules! ndindex_from_cell_impl {
($($NaD: ty, $NdD: ty, $len: expr);*) => {$(
impl CellToNdIndex<$NaD, $NdD> for Cell<$NaD> {
#[inline]
fn to_ndindex(&self) -> $NdD {
unsafe {
let index: [isize; $len] = self.coords.into();
// Consider removing this check for performance.
//for val in &index[..] {
// assert!(!val.is_negative(), "Negative value in cell {:?}", index);
//}
nd::Dim(std::mem::transmute::<[isize; $len], [usize; $len]>(index))
}
}
}
)*}
}
ndindex_from_cell_impl!(na::U2, nd::Ix2, 2; na::U3, nd::Ix3, 3);
| true |
b58a1d5c0c37fb5c6dce934cb802c145dfe230f6
|
Rust
|
PLUS-POSTECH/soma
|
/src/error.rs
|
UTF-8
| 2,116 | 2.671875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use failure::Fail;
#[derive(Debug, Fail)]
pub enum Error {
#[fail(display = "Failed to access the data directory")]
DataDirectoryAccessDenied,
#[fail(display = "Another Soma instance is using the data directory")]
DataDirectoryLockFailed,
#[fail(display = "Failed to build docker image for a problem")]
DockerBuildFailed,
#[fail(display = "A repository with the same name already exists")]
DuplicateRepository,
#[fail(display = "Failed to detect filename from the path")]
FileNameNotFound,
#[fail(
display = "The specified file does not exist, or you don't have enough permission to access it"
)]
FileUnreachable,
#[fail(display = "Some entry in the manifest is invalid")]
InvalidManifest,
#[fail(display = "The provided repository does not contain 'soma.toml' or 'soma-list.toml'")]
InvalidRepository,
#[fail(display = "soma-list.toml contains a duplicate or inaccessible entry")]
InvalidSomaList,
#[fail(
display = "The name doesn't satisfy docker name component rules, which allows lower case alphanumerics with non-boundary '_', '__', or (multiple) '-'(s)"
)]
InvalidName,
#[fail(display = "The specified file's path contains unsupported characters")]
InvalidUnicode,
#[fail(display = "There is a container already running for the specified problem")]
ProblemAlreadyRunning,
#[fail(display = "The specified problem is not found")]
ProblemNotFound,
#[fail(display = "There is no container running for the specified problem")]
ProblemNotRunning,
#[fail(display = "The provided query returned multiple problems")]
ProblemQueryAmbiguous,
#[fail(display = "There is an image or an container from the repository")]
RepositoryInUse,
#[fail(display = "The specified repository is not found")]
RepositoryNotFound,
#[fail(
display = "The repository contains changes that cannot be handled by update command; Please remove and add the repository manually"
)]
UnsupportedUpdate,
}
pub type Result<T> = std::result::Result<T, failure::Error>;
| true |
77030ab5b544eaeb9c4bd2952ec3b5b904d408df
|
Rust
|
jamesmunns/slack-rs-api
|
/src/auth.rs
|
UTF-8
| 2,193 | 2.75 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2015-2016 the slack-rs authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Checks authentication & identity.
//!
//! For more information, see [Slack's API
//! documentation](https://api.slack.com/methods).
use std::collections::HashMap;
use super::{ApiResult, SlackWebRequestSender, parse_slack_response};
/// Checks authentication & identity.
///
/// Wraps https://api.slack.com/methods/auth.test
pub fn test<R: SlackWebRequestSender>(client: &R, token: &str) -> ApiResult<TestResponse> {
let response = try!(client.send_authed("auth.test", token, HashMap::new()));
parse_slack_response(response, true)
}
#[derive(RustcDecodable)]
pub struct TestResponse {
pub url: String,
pub team: String,
pub user: String,
pub team_id: String,
pub user_id: String,
}
#[cfg(test)]
mod tests {
use super::*;
use super::super::test_helpers::*;
#[test]
fn general_api_error_response() {
let client = MockSlackWebRequestSender::respond_with(r#"{"ok": false, "err": "some_error"}"#);
let result = test(&client, "TEST_TOKEN");
assert!(result.is_err());
}
#[test]
fn test_ok_response() {
let client = MockSlackWebRequestSender::respond_with(r#"{
"ok": true,
"url": "https:\/\/example-team.slack.com\/",
"team": "example team",
"user": "testuser",
"team_id": "T12345678",
"user_id": "U12345678"
}"#);
let result = test(&client, "TEST_TOKEN");
if let Err(err) = result {
panic!(format!("{:?}", err));
}
assert_eq!(result.unwrap().user, "testuser");
}
}
| true |
61c04fbbe8587a232b361395a4453821cd7de962
|
Rust
|
makotokato/gecko-dev
|
/third_party/rust/wasmparser/src/readers/component/start.rs
|
UTF-8
| 1,931 | 3.34375 | 3 |
[
"LLVM-exception",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::{BinaryReader, Result, SectionReader};
use std::ops::Range;
/// Represents the start function in a WebAssembly component.
#[derive(Debug, Clone)]
pub struct ComponentStartFunction {
/// The index to the start function.
pub func_index: u32,
/// The start function arguments.
///
/// The arguments are specified by value index.
pub arguments: Box<[u32]>,
/// The number of expected results for the start function.
pub results: u32,
}
/// A reader for the start section of a WebAssembly component.
#[derive(Clone)]
pub struct ComponentStartSectionReader<'a>(BinaryReader<'a>);
impl<'a> ComponentStartSectionReader<'a> {
/// Constructs a new `ComponentStartSectionReader` for the given data and offset.
pub fn new(data: &'a [u8], offset: usize) -> Result<Self> {
Ok(Self(BinaryReader::new_with_offset(data, offset)))
}
/// Gets the original position of the section reader.
pub fn original_position(&self) -> usize {
self.0.original_position()
}
/// Reads the start function from the section.
///
/// # Examples
/// ```
/// use wasmparser::ComponentStartSectionReader;
///
/// # let data: &[u8] = &[0x00, 0x03, 0x01, 0x02, 0x03, 0x01];
/// let mut reader = ComponentStartSectionReader::new(data, 0).unwrap();
/// let start = reader.read().expect("start");
/// println!("Start: {:?}", start);
/// ```
pub fn read(&mut self) -> Result<ComponentStartFunction> {
self.0.read_component_start()
}
}
impl<'a> SectionReader for ComponentStartSectionReader<'a> {
type Item = ComponentStartFunction;
fn read(&mut self) -> Result<Self::Item> {
Self::read(self)
}
fn eof(&self) -> bool {
self.0.eof()
}
fn original_position(&self) -> usize {
Self::original_position(self)
}
fn range(&self) -> Range<usize> {
self.0.range()
}
}
| true |
9aab63b5a3d3abbc22ffe215d5a3dffda92050cb
|
Rust
|
fredmorcos/attic
|
/Projects/Crawl (Rust)/crawl-reqwest/src/main.rs
|
UTF-8
| 4,077 | 2.640625 | 3 |
[
"Unlicense"
] |
permissive
|
use bytes::buf::Buf;
use futures::future::{self, Future};
use futures::stream::Stream;
use log::{error, info, warn};
use reqwest::r#async::Client;
use select::document::Document;
use select::predicate::Name;
use std::collections::HashMap;
use std::string::FromUtf8Error;
use std::sync::{Arc, Mutex};
use structopt::StructOpt;
use tokio::runtime::Runtime;
use url::Url;
type ReqErr = reqwest::Error;
type FetchRes = Result<String, FromUtf8Error>;
fn fetch(url: Url, client: Arc<Client>) -> impl Future<Item = FetchRes, Error = ReqErr> {
client.get(url).send().and_then(|res| {
res.into_body()
.concat2()
.map(|body| Ok(String::from_utf8(body.collect())?))
})
}
fn get_elements<'i, 'a: 'i>(
document: &'a Document,
name: &'i str,
attr_name: &'i str,
) -> impl Iterator<Item = &'a str> + 'i {
document
.find(Name(name))
.filter_map(move |n| n.attr(attr_name))
}
fn concat(url: &Url, link: &str) -> Result<Option<Url>, url::ParseError> {
let mut joined = url.join(link)?;
let url_host = url.host();
let joined_host = joined.host();
if joined_host != url_host {
return Ok(None);
}
normalize(&mut joined);
Ok(Some(joined))
}
fn normalize(url: &mut Url) {
url.set_fragment(None);
url.set_query(None);
}
fn get_links<'d>(document: &'d Document, url: &'d Url) -> impl Iterator<Item = Url> + 'd {
get_elements(&document, "a", "href").filter_map(move |l| match concat(url, l) {
Ok(Some(link)) => Some(link),
Ok(None) => None,
Err(e) => {
warn!("Cannot normalize URL `{} + {}`: {}", url, l, e);
None
}
})
}
type DB = HashMap<Url, bool>;
fn crawl<'a>(
url: Url,
client: Arc<Client>,
) -> impl Future<Item = Vec<Url>, Error = Url> + 'a + Send {
let url_clone = url.clone();
let url_clone_fetch = url.clone();
fetch(url_clone_fetch, client.clone())
.map_err(move |e| {
warn!("Error fetching page {}: {}", url_clone, e);
url_clone
})
.and_then(move |contents| {
info!("Fetched {}", url);
match contents {
Ok(contents) => {
let document = Document::from(contents.as_str());
future::ok(get_links(&document, &url).collect())
}
Err(e) => {
warn!("Error getting page contents for {}: {}", url, e);
future::err(url)
}
}
})
.from_err()
}
#[derive(StructOpt)]
struct Args {
#[structopt(short = "-u")]
url: String,
}
fn main_loop(mut rt: Runtime, urls: Vec<Url>, client: Arc<Client>, db: &mut DB) -> Result<(), ()> {
if urls.is_empty() {
return Ok(());
}
let futs = urls.into_iter().map(|l| crawl(l, client.clone()));
let mut futs = tokio::prelude::stream::futures_unordered(futs);
let mut new_urls = vec![];
match rt.block_on(futs.collect()) {
Ok(links) => {
for link in links.into_iter().flatten() {
db.entry(link.clone()).or_insert(true);
futs.push(crawl(link, client.clone()));
}
// new_urls.extend(links.into_iter().flatten().collect::<Vec<Url>>());
}
Err(bad_url) => {
error!("Error running stream element for {}", bad_url);
db.insert(bad_url, false);
return Err(());
}
}
main_loop(rt, new_urls, client, db)
}
fn main() -> Result<(), ()> {
env_logger::init();
let args = Args::from_args();
let client = Arc::new(Client::new());
let mut url = Url::parse(&args.url).map_err(|e| {
error!("Cannot parse URL: {}", e);
})?;
normalize(&mut url);
let url_clone = url.clone();
let mut db = DB::new();
db.insert(url_clone, true);
let rt = Runtime::new().map_err(|e| error!("Error creating async runtime: {}", e))?;
main_loop(rt, vec![url], client, &mut db)?;
info!("DB = {:?}", db);
Ok(())
}
| true |
c69c6f889bfbfe2cf04c901d7987dc8ccd1dda6c
|
Rust
|
polymath-is/loadstone
|
/loadstone_config/src/port.rs
|
UTF-8
| 2,523 | 3.03125 | 3 |
[] |
no_license
|
use std::fmt::Display;
use crate::KB;
use enum_iterator::IntoEnumIterator;
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize, IntoEnumIterator)]
pub enum Port {
Stm32F412,
Wgm160P,
}
impl Default for Port {
// Arbitrary default port for the purposes of seeding
// the defaults in the web application
fn default() -> Self { Self::Stm32F412 }
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum Family {
Stm32,
Efm32,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum Subfamily {
Stm32f4,
Efm32Gg11,
}
impl Display for Port {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(match self {
Port::Stm32F412 => "stm32f412",
Port::Wgm160P => "wgm160p",
})
}
}
impl Display for Family {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(match self {
Family::Stm32 => "stm32",
Family::Efm32 => "efm32",
})
}
}
impl Display for Subfamily {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(match self {
Subfamily::Stm32f4 => "f4",
Subfamily::Efm32Gg11 => "gg11",
})
}
}
impl Port {
pub fn family(&self) -> Family {
match self {
Port::Stm32F412 => Family::Stm32,
Port::Wgm160P => Family::Efm32,
}
}
pub fn subfamily(&self) -> Subfamily {
match self {
Port::Stm32F412 => Subfamily::Stm32f4,
Port::Wgm160P => Subfamily::Efm32Gg11,
}
}
// We might consider making these configurable later, but the need hasn't come up yet.
pub fn linker_script_constants(&self) -> Option<LinkerScriptConstants> {
match self {
Port::Stm32F412 => Some(LinkerScriptConstants {
flash: LinkerArea { origin: 0x08000000, size: KB!(896) },
ram: LinkerArea { origin: 0x20000000, size: KB!(256) },
}),
Port::Wgm160P => Some(LinkerScriptConstants {
flash: LinkerArea { origin: 0x00000000, size: KB!(1024) },
ram: LinkerArea { origin: 0x20000000, size: KB!(128) },
}),
}
}
}
pub struct LinkerScriptConstants {
pub flash: LinkerArea,
pub ram: LinkerArea,
}
pub struct LinkerArea {
pub origin: u32,
pub size: usize,
}
| true |
e9cbe042777afcee2d7a9dfc1db57f39724d8aa1
|
Rust
|
c0dearm/piss
|
/src/encoder.rs
|
UTF-8
| 2,529 | 3.1875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use crate::errors::Error;
use crate::utils::ByteMask;
use image::{ImageBuffer, Rgb};
pub struct Encoder {
image: ImageBuffer<Rgb<u8>, Vec<u8>>,
secret: File,
mask: ByteMask,
zeroes: usize,
}
impl Encoder {
pub fn new(image_path: PathBuf, secret_path: PathBuf, mask: ByteMask) -> Result<Self, Error> {
let image = image::open(image_path)?.to_rgb();
let secret = File::open(secret_path)?;
let metadata = secret.metadata()?;
let image_size = image.len();
let secret_size = (metadata.len() * mask.chunks as u64) as usize;
if image_size < secret_size {
Err(Error::SecretTooLarge)
} else {
let zeroes = image_size - secret_size;
Ok(Encoder {
image,
secret,
mask,
zeroes,
})
}
}
pub fn save(&mut self, output: PathBuf) -> Result<(), Error> {
let mut byte_iter = self.mask;
let mask = !byte_iter.mask;
// Iterator over splitted secret bytes
let secret_bytes = self
.secret
.try_clone()?
.bytes()
.flat_map(|b| byte_iter.set_byte(b.unwrap()));
// Fill secret with 0s at the beginning to fit full image and zip it with it
let image_secret_bytes = self
.image
.iter_mut()
.zip((0..self.zeroes).map(|_| 0).chain(secret_bytes));
// Write the LSB bytes to the image
for (p, b) in image_secret_bytes {
*p = (*p & mask) | b;
}
self.image.save(output)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::{ByteMask, Encoder};
use std::path::PathBuf;
#[test]
fn test_new() {
let mask = ByteMask::new(2).unwrap();
let encoder = Encoder::new(
PathBuf::from("./samples/the-matrix.jpg"),
PathBuf::from("./samples/secret.txt"),
mask,
)
.unwrap();
assert_eq!(encoder.zeroes, 417520);
}
#[test]
fn test_save() {
let mask = ByteMask::new(2).unwrap();
let mut encoder = Encoder::new(
PathBuf::from("./samples/the-matrix.jpg"),
PathBuf::from("./samples/secret.txt"),
mask,
)
.unwrap();
encoder.save(PathBuf::from("./samples/tmp.png")).unwrap();
std::fs::remove_file("./samples/tmp.png").unwrap();
}
}
| true |
4b3ce4bf016611f148b7e272c8e31dbb4ae4c5a9
|
Rust
|
SnakeSolid/rust-team-activity
|
/src/server/mod.rs
|
UTF-8
| 715 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
mod activity;
use self::activity::ActivityHandler;
use iron::Iron;
use mount::Mount;
use staticfile::Static;
use Config;
use Database;
pub fn start(config: &Config, database: Database) -> () {
let mut mount = Mount::new();
mount.mount("/api/v1/activity", ActivityHandler::new(config, database));
mount.mount("/static", Static::new("public/static"));
mount.mount("/", Static::new("public"));
let server = config.server();
let address = server.address();
let port = server.port();
println!("Listening on {}:{}...", address, port);
match Iron::new(mount).http((address, port)) {
Ok(_) => {}
Err(err) => error!("Failed to start HTTP server: {}", err),
}
}
| true |
d5486f4fbe38221649ed8be67803a4b8488f4163
|
Rust
|
Ainevsia/Leetcode-Rust
|
/Weekly Contest 181/1391. Check if There is a Valid Path in a Grid/src/main.rs
|
UTF-8
| 2,439 | 3.28125 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
fn main() {
assert_eq!(Solution::has_valid_path(vec![vec![2,4,3],vec![6,5,2]]), true);
}
struct Solution {}
impl Solution {
pub fn has_valid_path(grid: Vec<Vec<i32>>) -> bool {
let (m, n) = (grid.len(), grid[0].len());
let mut uf = WeightedQuickUnionUF::new(m * 2 - 1, n * 2 - 1);
// initialize union - find complete
for i in 0..m {
for j in 0..n {
let r = grid[i][j];
if (r == 2 || r == 5 || r == 6) && i > 0 {
uf.union((i * 2, j * 2), (i * 2 - 1, j * 2))
}
if (r == 1 || r == 3 || r == 5) && j > 0 {
uf.union((i * 2, j * 2), (i * 2, j * 2 - 1))
}
if (r == 2 || r == 3 || r == 4) && i < m - 1 {
uf.union((i * 2, j * 2), (i * 2 + 1, j * 2))
}
if (r == 1 || r == 4 || r == 6) && j < n - 1 {
uf.union((i * 2, j * 2), (i * 2, j * 2 + 1))
}
}
}
uf.connected((0, 0), (m * 2 - 2, n * 2 - 2))
}
}
struct WeightedQuickUnionUF {
id: Vec<Vec<(usize, usize)>>,
sz: Vec<Vec<usize>>,
}
impl WeightedQuickUnionUF {
pub fn new(m: usize, n: usize) -> WeightedQuickUnionUF {
let mut res = WeightedQuickUnionUF {
id: vec![vec![(0,0); n]; m],
sz: vec![vec![1; n]; m],
};
for i in 0..m {
for j in 0..n {
res.id[i][j] = (i, j);
}
}
res
}
pub fn find(&self, mut p: (usize, usize)) -> (usize, usize) {
while p != self.id[p.0][p.1] {
p = self.id[p.0][p.1];
}
p
}
pub fn union(&mut self, p: (usize, usize), q: (usize, usize)) {
let cp = self.find(p);
let cq = self.find(q);
if cp == cq { return }
if self.sz[cp.0][cp.1] < self.sz[cq.0][cq.1] {
self.id[cp.0][cp.1] = cq;
self.sz[cq.0][cq.1] += self.sz[cp.0][cp.1];
} else {
self.id[cq.0][cq.1] = cp;
self.sz[cp.0][cp.1] += self.sz[cq.0][cq.1];
}
}
pub fn connected(&self, p: (usize, usize), q: (usize, usize)) -> bool {
self.find(p) == self.find(q)
}
}
#[cfg(test)]
mod test {
use crate::*;
#[test]
fn basic() {
assert_eq!(Solution::has_valid_path(vec![vec![2,4,3],vec![6,5,2]]), true);
}
}
| true |
891421e3e5a29cba8fd46c07b66104712dd188ee
|
Rust
|
macote/hshchk
|
/src/speed.rs
|
UTF-8
| 1,338 | 3.125 | 3 |
[
"MIT"
] |
permissive
|
use std::convert::TryInto;
pub static BPS: &str = "B/s";
pub static KBPS: &str = "KB/s";
pub static MBPS: &str = "MB/s";
pub static GBPS: &str = "GB/s";
pub static TBPS: &str = "TB/s";
pub struct Speed {
pub bytes_per_interval: u64,
pub unit: &'static str,
}
pub fn get_speed(current_bytes: u64, previous_bytes: u64, elapsed_millis: u128) -> Speed {
if elapsed_millis == 0 {
return Speed {
bytes_per_interval: 0,
unit: BPS,
};
}
let speed = (current_bytes - previous_bytes) as u128 * 1_000 / elapsed_millis;
if speed < 1_024 {
return Speed {
bytes_per_interval: speed.try_into().unwrap(),
unit: BPS,
};
} else if speed < 1_048_576 {
return Speed {
bytes_per_interval: (speed / 1_024).try_into().unwrap(),
unit: KBPS,
};
} else if speed < 1_073_741_824 {
return Speed {
bytes_per_interval: (speed / 1_048_576).try_into().unwrap(),
unit: MBPS,
};
} else if speed < 1_099_511_627_776 {
return Speed {
bytes_per_interval: (speed / 1_048_576).try_into().unwrap(),
unit: GBPS,
};
}
Speed {
bytes_per_interval: (speed / 1_099_511_627_776).try_into().unwrap(),
unit: TBPS,
}
}
| true |
fe9567b79960027b21f13759132ec6434a2966c3
|
Rust
|
haraldh/dracut-install
|
/src/elfkit/dynamic.rs
|
UTF-8
| 3,899 | 2.90625 | 3 |
[] |
no_license
|
use super::types;
use super::{Error, Header, SectionContent};
use num_traits::FromPrimitive;
use std::io::Read;
#[derive(Debug, Clone)]
pub enum DynamicContent {
None,
String((Vec<u8>, Option<u64>)),
Address(u64),
Flags1(types::DynamicFlags1),
}
impl Default for DynamicContent {
fn default() -> Self {
DynamicContent::None
}
}
#[derive(Debug, Clone, Default)]
pub struct Dynamic {
pub dhtype: types::DynamicType,
pub content: DynamicContent,
}
impl Dynamic {
pub fn from_reader<R>(
mut io: R,
linked: Option<&SectionContent>,
eh: &Header,
) -> Result<SectionContent, Error>
where
R: Read,
{
let strtab = match linked {
None => None,
Some(&SectionContent::Strtab(ref s)) => Some(s),
any => {
return Err(Error::LinkedSectionIsNotStrtab {
during: "reading dynamic",
link: any.cloned(),
});
}
};
let mut r = Vec::new();
while let Ok(tag) = elf_read_uclass!(eh, io) {
let val = elf_read_uclass!(eh, io)?;
match types::DynamicType::from_u64(tag) {
None => {
//return Err(Error::InvalidDynamicType(tag)),
// be conservative and return UNKNOWN
r.push(Dynamic {
dhtype: types::DynamicType::UNKNOWN,
content: DynamicContent::None,
});
}
Some(types::DynamicType::NULL) => {
r.push(Dynamic {
dhtype: types::DynamicType::NULL,
content: DynamicContent::None,
});
break;
}
Some(types::DynamicType::RPATH) => {
r.push(Dynamic {
dhtype: types::DynamicType::RPATH,
content: DynamicContent::String(match strtab {
None => (Vec::default(), None),
Some(s) => (s.get(val as usize), Some(val)),
}),
});
}
Some(types::DynamicType::RUNPATH) => {
r.push(Dynamic {
dhtype: types::DynamicType::RUNPATH,
content: DynamicContent::String(match strtab {
None => (Vec::default(), None),
Some(s) => (s.get(val as usize), Some(val)),
}),
});
}
Some(types::DynamicType::NEEDED) => {
r.push(Dynamic {
dhtype: types::DynamicType::NEEDED,
content: DynamicContent::String(match strtab {
None => (Vec::default(), None),
Some(s) => (s.get(val as usize), Some(val)),
}),
});
}
Some(types::DynamicType::FLAGS_1) => {
r.push(Dynamic {
dhtype: types::DynamicType::FLAGS_1,
content: DynamicContent::Flags1(
match types::DynamicFlags1::from_bits(val) {
Some(v) => v,
None => return Err(Error::InvalidDynamicFlags1(val)),
},
),
});
}
Some(x) => {
r.push(Dynamic {
dhtype: x,
content: DynamicContent::Address(val),
});
}
};
}
Ok(SectionContent::Dynamic(r))
}
}
| true |
39a58362b820734356f8a419f926bbf75241dc58
|
Rust
|
PickledChair/RustyMonkey
|
/src/main.rs
|
UTF-8
| 882 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
use rusty_monkey::{repl::start, execution::exec};
use whoami;
use std::io;
use std::path::PathBuf;
fn main() {
let mut has_args = false;
let mut is_first = true;
for arg in std::env::args() {
if is_first {
is_first = false;
} else {
has_args = true;
let path = PathBuf::from(arg);
if exec(&path).is_err() {
eprintln!("Could'nt exec the Monkey source: {}", path.display());
std::process::exit(1);
}
break;
}
}
if !has_args {
println!("Hello {}! This is the Monkey programming language!", whoami::username());
println!("Feel free to type in commands");
if start(&mut io::stdout()).is_err() {
eprintln!("Couldn't start the Monkey interpreter.");
std::process::exit(1);
}
}
}
| true |
bb4af865957df9296950eeaec2ca8456e0ab6323
|
Rust
|
timClicks/lucky
|
/src/cli/charm.rs
|
UTF-8
| 805 | 2.515625 | 3 |
[
"MIT"
] |
permissive
|
use clap::{App, ArgMatches};
mod build;
mod create;
use crate::cli::*;
pub(super) struct CharmSubcommand;
impl<'a> CliCommand<'a> for CharmSubcommand {
fn get_name(&self) -> &'static str {
"charm"
}
fn get_app(&self) -> App<'a> {
self.get_base_app().about("Build and create Lucky charms")
}
fn get_subcommands(&self) -> Vec<Box<dyn CliCommand<'a>>> {
vec![
Box::new(build::BuildSubcommand),
Box::new(create::CreateSubcommand),
]
}
fn get_doc(&self) -> Option<CliDoc> {
Some(CliDoc {
name: "lucky_charm",
content: include_str!("charm/charm.md"),
})
}
fn execute_command(&self, _args: &ArgMatches, data: CliData) -> anyhow::Result<CliData> {
Ok(data)
}
}
| true |
00d883c24a11454d7363c052a3fabd804efd34c0
|
Rust
|
dmitri-mamrukov/blockchain-in-rust
|
/src/blockchain.rs
|
UTF-8
| 31,762 | 2.9375 | 3 |
[] |
no_license
|
use std::collections::HashSet;
use crate::{check_difficulty, Block, BlockHash, Hashable};
#[derive(Debug, PartialEq)]
pub enum BlockValidationErr {
MismatchedIndex,
InvalidHash,
AchronologicalTimestamp,
MismatchedPreviousHash,
InvalidGenesisBlockFormat,
InvalidInput,
InsufficientInputValue,
InvalidCoinbaseTransaction,
FeeExceedsCoinbaseTransactionOutputValue,
}
/**
* A blockchain is just a block vector, which acts as a distributed ledger.
*/
#[derive(Default)]
pub struct Blockchain {
pub blocks: Vec<Block>,
unspent_outputs: HashSet<BlockHash>,
}
impl Blockchain {
pub fn new() -> Self {
Blockchain {
blocks: vec![],
unspent_outputs: HashSet::new(),
}
}
/**
* Block Verification
* ------------------
*
* Each supposed valid block has a nonce attached to it that we assume took
* an approximately certain amount of effort to generate. This
* "approximately certain amount of effort" is described by the difficulty
* value.
*
* We will verify four things now:
*
* 1. Actual index == stored index value (note that Bitcoin blocks don't
* store their index).
*
* 2. Block's hash fits stored difficulty value (we'll just trust the
* difficulty for now) (insecure).
*
* 3. Time is always increasing (in real life [IRL] network latency/sync
* demands leniency here).
*
* 4. Actual previous block's hash == stored previous_block_hash value
* (except for the genesis block).
*
* Security Notes
* --------------
*
* This is not secure! There are some things to take into account:
*
* - The difficulty stored in a block is not validated.
*
* - The value of the coinbase transaction is not validated.
*
* - "Coin ownership" is neither enforced nor existent.
*
* - Two otherwise identical outputs from different transactions are
* indistinguishable.
*/
pub fn update_with_block(&mut self, block: Block) -> Result<(), BlockValidationErr> {
let index = self.blocks.len();
if block.index != index as u32 {
return Err(BlockValidationErr::MismatchedIndex);
} else if !check_difficulty(&block.hash(), block.difficulty) {
return Err(BlockValidationErr::InvalidHash);
} else if self.is_genesis_block(index) {
if block.previous_block_hash != vec![0; 32] {
return Err(BlockValidationErr::InvalidGenesisBlockFormat);
}
} else {
let previous_block = &self.blocks[index - 1];
if block.timestamp <= previous_block.timestamp {
return Err(BlockValidationErr::AchronologicalTimestamp);
} else if block.previous_block_hash != previous_block.hash {
return Err(BlockValidationErr::MismatchedPreviousHash);
}
}
if let Some((coinbase, transactions)) = block.transactions.split_first() {
if !coinbase.is_coinbase() {
return Err(BlockValidationErr::InvalidCoinbaseTransaction);
}
let mut block_spent: HashSet<BlockHash> = HashSet::new();
let mut block_created: HashSet<BlockHash> = HashSet::new();
let mut total_fee = 0;
for transaction in transactions {
let input_hashes = transaction.input_hashes();
if !(&input_hashes - &self.unspent_outputs).is_empty() {
return Err(BlockValidationErr::InvalidInput);
}
let input_value = transaction.input_value();
let output_value = transaction.output_value();
if output_value > input_value {
return Err(BlockValidationErr::InsufficientInputValue);
}
let fee = input_value - output_value;
total_fee += fee;
block_spent.extend(input_hashes);
block_created.extend(transaction.output_hashes());
}
if coinbase.output_value() < total_fee {
return Err(BlockValidationErr::FeeExceedsCoinbaseTransactionOutputValue);
} else {
block_created.extend(coinbase.output_hashes());
}
self.unspent_outputs
.retain(|output| !block_spent.contains(output));
self.unspent_outputs.extend(block_created);
}
self.blocks.push(block);
Ok(())
}
fn is_genesis_block(&self, index: usize) -> bool {
index == 0
}
}
#[cfg(test)]
mod blockchain_constructor_tests {
use std::collections::HashSet;
use super::{Block, BlockHash, Blockchain};
fn assert_default_constructor(instance: Blockchain) {
assert_eq!(Vec::<Block>::new(), instance.blocks);
assert_eq!(HashSet::<BlockHash>::new(), instance.unspent_outputs);
}
#[test]
fn constructor_with_new() {
let instance = Blockchain::new();
assert_default_constructor(instance);
}
#[test]
fn constructor_with_default() {
let instance: Blockchain = Default::default();
assert_default_constructor(instance);
}
}
#[cfg(test)]
mod blockchain_update_with_block_tests {
use crate::transaction::Output;
use crate::{now, Transaction};
use super::{check_difficulty, Block, BlockHash, BlockValidationErr, Blockchain, Hashable};
const IMPOSSIBLE_DIFFICULTY: u128 = 0x0000_0000_0000_0000_0000_0000_0000_0000;
const DIFFICULTY: u128 = 0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff;
struct BlockOutputConfig {
unspent_output_value: u64,
output_value: u64,
expected_difference: u64,
}
fn genesis_block_hash() -> BlockHash {
vec![0; 32]
}
fn current_time() -> u128 {
now().expect("Failure to get the current time in milliseconds.")
}
fn create_coinbase_transaction() -> Transaction {
Transaction {
inputs: vec![],
outputs: vec![],
}
}
fn create_block_with_impossible_difficulty(
index: u32,
timestamp: u128,
previous_block_hash: BlockHash,
transactions: Vec<Transaction>,
) -> Block {
let block = Block::new(
index,
timestamp,
previous_block_hash,
transactions,
IMPOSSIBLE_DIFFICULTY,
);
assert_eq!(false, check_difficulty(&block.hash(), block.difficulty));
block
}
fn create_block_with_valid_difficulty(
index: u32,
timestamp: u128,
previous_block_hash: BlockHash,
transactions: Vec<Transaction>,
) -> Block {
let block = Block::new(
index,
timestamp,
previous_block_hash,
transactions,
DIFFICULTY,
);
assert_eq!(true, check_difficulty(&block.hash(), block.difficulty));
block
}
fn add_block_to_blockchain(blockchain: &mut Blockchain, block: Block) {
let original_length = blockchain.blocks.len();
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_ok());
assert_eq!(Ok(()), result);
assert_eq!(original_length + 1, blockchain.blocks.len());
}
fn assert_add_block_with_sufficient_inputs(config: BlockOutputConfig) {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![Transaction {
inputs: vec![],
outputs: vec![
Output {
to_address: "Alice".to_string(),
value: 1,
},
Output {
to_address: "Bob".to_string(),
value: 2,
},
],
}],
);
let mut blockchain = Blockchain::new();
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![
Transaction {
inputs: vec![],
outputs: vec![Output {
to_address: "Chris".to_owned(),
value: config.unspent_output_value,
}],
},
Transaction {
inputs: vec![
Output {
to_address: "Alice".to_owned(),
value: 1,
},
Output {
to_address: "Bob".to_owned(),
value: 2,
},
],
outputs: vec![Output {
to_address: "Chris".to_owned(),
value: config.output_value,
}],
},
],
);
assert!(block.transactions[1].input_value() >= block.transactions[1].output_value());
assert_eq!(
block.transactions[1].input_value(),
block.transactions[1].output_value() + config.expected_difference
);
add_block_to_blockchain(&mut blockchain, genesis_block);
add_block_to_blockchain(&mut blockchain, block);
}
#[test]
fn add_block_with_invalid_previous_block_hash() {
let wrong_block_hash = vec![];
let genesis_block =
create_block_with_valid_difficulty(0, current_time(), wrong_block_hash, vec![]);
let mut blockchain = Blockchain::new();
let result = blockchain.update_with_block(genesis_block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::InvalidGenesisBlockFormat), result);
}
#[test]
fn add_block_with_index_as_one_to_empty_blockchain() {
let wrong_index = 1;
let genesis_block = Block::new(wrong_index, 2, vec![], vec![], 3);
let mut blockchain = Blockchain::new();
let result = blockchain.update_with_block(genesis_block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::MismatchedIndex), result);
}
#[test]
fn add_block_with_index_as_zero_to_one_block_blockchain() {
let timestamp = current_time();
let genesis_block =
create_block_with_valid_difficulty(0, timestamp, genesis_block_hash(), vec![]);
let wrong_index = 0;
let block = create_block_with_valid_difficulty(
wrong_index,
timestamp + 1,
genesis_block.hash.clone(),
vec![],
);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::MismatchedIndex), result);
}
#[test]
fn add_block_with_invalid_hash() {
let timestamp = current_time();
let genesis_block =
create_block_with_valid_difficulty(0, timestamp, genesis_block_hash(), vec![]);
let block = create_block_with_impossible_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![],
);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::InvalidHash), result);
}
#[test]
fn add_block_with_timestamp_earlier_than_previous_timestamp() {
let timestamp = current_time();
let genesis_block =
create_block_with_valid_difficulty(0, timestamp, genesis_block_hash(), vec![]);
let wrong_timestamp = timestamp - 1;
let block = create_block_with_valid_difficulty(
1,
wrong_timestamp,
genesis_block.hash.clone(),
vec![],
);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::AchronologicalTimestamp), result);
}
#[test]
fn add_block_with_timestamp_equal_to_previous_timestamp() {
let timestamp = current_time();
let genesis_block =
create_block_with_valid_difficulty(0, timestamp, genesis_block_hash(), vec![]);
let wrong_timestamp = timestamp - 1;
let block = create_block_with_valid_difficulty(
1,
wrong_timestamp,
genesis_block.hash.clone(),
vec![],
);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::AchronologicalTimestamp), result);
}
#[test]
fn add_block_with_mismatched_previous_hash() {
let timestamp = current_time();
let genesis_block =
create_block_with_valid_difficulty(0, timestamp, genesis_block_hash(), vec![]);
let wrong_previous_hash = vec![1, 2, 3];
let block =
create_block_with_valid_difficulty(1, timestamp + 1, wrong_previous_hash, vec![]);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::MismatchedPreviousHash), result);
}
#[test]
fn add_block_with_transaction_that_has_non_empty_inputs() {
let timestamp = current_time();
let wrong_inputs = vec![Output {
to_address: "Alice".to_string(),
value: 1,
}];
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![Transaction {
inputs: wrong_inputs,
outputs: vec![],
}],
);
let mut blockchain = Blockchain::new();
let result = blockchain.update_with_block(genesis_block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::InvalidCoinbaseTransaction), result);
}
#[test]
fn add_block_with_transactions_where_first_one_has_non_empty_inputs_case1() {
let timestamp = current_time();
let wrong_inputs = vec![Output {
to_address: "Alice".to_string(),
value: 1,
}];
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![
Transaction {
inputs: wrong_inputs,
outputs: vec![],
},
Transaction {
inputs: vec![],
outputs: vec![],
},
Transaction {
inputs: vec![],
outputs: vec![],
},
],
);
let mut blockchain = Blockchain::new();
let result = blockchain.update_with_block(genesis_block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::InvalidCoinbaseTransaction), result);
}
#[test]
fn add_block_with_transactions_where_first_one_has_non_empty_inputs_case2() {
let timestamp = current_time();
let wrong_inputs = vec![Output {
to_address: "Alice".to_string(),
value: 1,
}];
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![
create_coinbase_transaction(),
Transaction {
inputs: vec![],
outputs: vec![],
},
Transaction {
inputs: vec![],
outputs: vec![],
},
],
);
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![
Transaction {
inputs: wrong_inputs,
outputs: vec![],
},
Transaction {
inputs: vec![],
outputs: vec![],
},
Transaction {
inputs: vec![],
outputs: vec![],
},
],
);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::InvalidCoinbaseTransaction), result);
}
#[test]
fn add_block_with_outputs_less_than_fee_case1() {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![Transaction {
inputs: vec![],
outputs: vec![Output {
to_address: "Alice".to_string(),
value: 1,
}],
}],
);
let mut blockchain = Blockchain::new();
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![
create_coinbase_transaction(),
Transaction {
inputs: vec![Output {
to_address: "Alice".to_owned(),
value: 1,
}],
outputs: vec![],
},
],
);
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(
Err(BlockValidationErr::FeeExceedsCoinbaseTransactionOutputValue),
result
);
}
#[test]
fn add_block_with_outputs_less_than_fee_case2() {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![Transaction {
inputs: vec![],
outputs: vec![Output {
to_address: "Alice".to_string(),
value: 1,
}],
}],
);
let mut blockchain = Blockchain::new();
let mut coinbase_transaction = create_coinbase_transaction();
coinbase_transaction.outputs = vec![Output {
to_address: "Chris".to_owned(),
value: 0,
}];
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![
coinbase_transaction,
Transaction {
inputs: vec![Output {
to_address: "Alice".to_owned(),
value: 1,
}],
outputs: vec![],
},
],
);
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(
Err(BlockValidationErr::FeeExceedsCoinbaseTransactionOutputValue),
result
);
}
#[test]
fn add_block_with_second_transaction_that_has_non_empty_inputs() {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![
create_coinbase_transaction(),
Transaction {
inputs: vec![Output {
to_address: "Alice".to_string(),
value: 1,
}],
outputs: vec![],
},
],
);
let mut blockchain = Blockchain::new();
let result = blockchain.update_with_block(genesis_block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::InvalidInput), result);
}
#[test]
fn add_block_with_insufficient_inputs_case1() {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![
create_coinbase_transaction(),
Transaction {
inputs: vec![],
outputs: vec![Output {
to_address: "Alice".to_string(),
value: 1,
}],
},
],
);
let mut blockchain = Blockchain::new();
let result = blockchain.update_with_block(genesis_block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::InsufficientInputValue), result);
}
#[test]
fn add_block_with_insufficient_inputs_case2() {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![Transaction {
inputs: vec![],
outputs: vec![
Output {
to_address: "Alice".to_string(),
value: 1,
},
Output {
to_address: "Bob".to_string(),
value: 2,
},
],
}],
);
let mut blockchain = Blockchain::new();
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![
Transaction {
inputs: vec![],
outputs: vec![Output {
to_address: "Chris".to_owned(),
value: 4,
}],
},
Transaction {
inputs: vec![
Output {
to_address: "Alice".to_owned(),
value: 1,
},
Output {
to_address: "Bob".to_owned(),
value: 2,
},
],
outputs: vec![Output {
to_address: "Chris".to_owned(),
value: 4,
}],
},
],
);
add_block_to_blockchain(&mut blockchain, genesis_block);
let result = blockchain.update_with_block(block);
assert_eq!(true, result.is_err());
assert_eq!(Err(BlockValidationErr::InsufficientInputValue), result);
}
#[test]
fn add_block_with_exactly_sufficient_inputs() {
assert_add_block_with_sufficient_inputs(BlockOutputConfig {
unspent_output_value: 4,
output_value: 3,
expected_difference: 0,
});
}
#[test]
fn add_block_with_more_than_sufficient_inputs_case1() {
assert_add_block_with_sufficient_inputs(BlockOutputConfig {
unspent_output_value: 4,
output_value: 2,
expected_difference: 1,
});
}
#[test]
fn add_block_with_more_than_sufficient_inputs_case2() {
assert_add_block_with_sufficient_inputs(BlockOutputConfig {
unspent_output_value: 4,
output_value: 1,
expected_difference: 2,
});
}
#[test]
fn add_block_with_more_than_sufficient_inputs_case3() {
assert_add_block_with_sufficient_inputs(BlockOutputConfig {
unspent_output_value: 4,
output_value: 0,
expected_difference: 3,
});
}
#[test]
fn add_one_block_without_transactions_to_blockchain() {
let timestamp = current_time();
let genesis_block =
create_block_with_valid_difficulty(0, timestamp, genesis_block_hash(), vec![]);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
}
#[test]
fn add_two_blocks_without_transactions_to_blockchain() {
let timestamp = current_time();
let genesis_block =
create_block_with_valid_difficulty(0, timestamp, genesis_block_hash(), vec![]);
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![],
);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
add_block_to_blockchain(&mut blockchain, block);
}
#[test]
fn add_three_blocks_without_transactions_to_blockchain() {
let timestamp = current_time();
let genesis_block =
create_block_with_valid_difficulty(0, timestamp, genesis_block_hash(), vec![]);
let block1 = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![],
);
let block2 =
create_block_with_valid_difficulty(2, timestamp + 2, block1.hash.clone(), vec![]);
let mut blockchain = Blockchain::new();
add_block_to_blockchain(&mut blockchain, genesis_block);
add_block_to_blockchain(&mut blockchain, block1);
add_block_to_blockchain(&mut blockchain, block2);
}
#[test]
fn add_one_block_with_one_transaction_to_blockchain() {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![Transaction {
inputs: vec![],
outputs: vec![Output {
to_address: "Alice".to_string(),
value: 1,
}],
}],
);
let mut blockchain = Blockchain::new();
let mut coinbase_transaction = create_coinbase_transaction();
coinbase_transaction.outputs = vec![Output {
to_address: "Chris".to_owned(),
value: 1,
}];
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![
coinbase_transaction,
Transaction {
inputs: vec![Output {
to_address: "Alice".to_owned(),
value: 1,
}],
outputs: vec![],
},
],
);
add_block_to_blockchain(&mut blockchain, genesis_block);
add_block_to_blockchain(&mut blockchain, block);
}
#[test]
fn add_one_block_with_two_transactions_to_blockchain() {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![Transaction {
inputs: vec![],
outputs: vec![
Output {
to_address: "Alice".to_string(),
value: 1,
},
Output {
to_address: "Bob".to_owned(),
value: 2,
},
],
}],
);
let mut blockchain = Blockchain::new();
let mut coinbase_transaction = create_coinbase_transaction();
coinbase_transaction.outputs = vec![Output {
to_address: "Chris".to_owned(),
value: 3,
}];
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![
coinbase_transaction,
Transaction {
inputs: vec![Output {
to_address: "Alice".to_owned(),
value: 1,
}],
outputs: vec![],
},
Transaction {
inputs: vec![Output {
to_address: "Bob".to_owned(),
value: 2,
}],
outputs: vec![],
},
],
);
add_block_to_blockchain(&mut blockchain, genesis_block);
add_block_to_blockchain(&mut blockchain, block);
}
#[test]
fn add_one_block_with_three_transactions_to_blockchain() {
let timestamp = current_time();
let genesis_block = create_block_with_valid_difficulty(
0,
timestamp,
genesis_block_hash(),
vec![Transaction {
inputs: vec![],
outputs: vec![
Output {
to_address: "Alice".to_string(),
value: 1,
},
Output {
to_address: "Bob".to_owned(),
value: 2,
},
Output {
to_address: "John".to_owned(),
value: 3,
},
],
}],
);
let mut blockchain = Blockchain::new();
let mut coinbase_transaction = create_coinbase_transaction();
coinbase_transaction.outputs = vec![Output {
to_address: "Chris".to_owned(),
value: 6,
}];
let block = create_block_with_valid_difficulty(
1,
timestamp + 1,
genesis_block.hash.clone(),
vec![
coinbase_transaction,
Transaction {
inputs: vec![Output {
to_address: "Alice".to_owned(),
value: 1,
}],
outputs: vec![],
},
Transaction {
inputs: vec![Output {
to_address: "Bob".to_owned(),
value: 2,
}],
outputs: vec![],
},
Transaction {
inputs: vec![Output {
to_address: "John".to_owned(),
value: 3,
}],
outputs: vec![],
},
],
);
add_block_to_blockchain(&mut blockchain, genesis_block);
add_block_to_blockchain(&mut blockchain, block);
}
}
| true |
25fcdd9249e26aed5928ffad88908447061ffb74
|
Rust
|
juliusdelta/prlink
|
/src/main.rs
|
UTF-8
| 1,688 | 2.84375 | 3 |
[] |
no_license
|
#![feature(plugin)]
#![plugin(clippy)]
extern crate clap;
extern crate glob;
extern crate url;
use clap::{App, Arg};
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use url::form_urlencoded::{serialize};
use glob::glob;
use std::path::PathBuf;
fn read_file(path: String) -> String {
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", path, Error::description(&why)),
Ok(file) => file,
};
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("couldn't read {}: {}", path, Error::description(&why)),
Ok(_) => s,
}
}
fn main() {
let matches = App::new("prlink")
.version("1.0")
.author("Pradeep Gowda <[email protected]>")
.about("Print Rust playground links")
.arg(Arg::with_name("INPUT")
.help("Sets the input path to use")
.required(true)
.index(1))
.get_matches();
let dirpath = matches.value_of("INPUT").unwrap();
let relative_path = PathBuf::from(dirpath);
let mut absolute_path = std::env::current_dir().unwrap();
absolute_path.push(relative_path);
absolute_path.push("*.rs");
for entry in glob(absolute_path.to_str().unwrap()).unwrap() {
match entry {
Ok(path) => {
let data = &[("code".to_owned(), read_file(path.to_str().unwrap().to_owned()))];
let s = serialize(data);
let fname = path.file_name().unwrap().to_string_lossy();
println!("- [{:?}](http://play.rust-lang.org/?{})", fname, s);
},
Err(e) => println!("{:?}", e),
}
}
}
| true |
58cb930450e0085f463e73037eb5cf0491bbb997
|
Rust
|
mbrt/ruplicity
|
/src/lib.rs
|
UTF-8
| 19,350 | 2.96875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"GPL-2.0-only"
] |
permissive
|
//! A library for reading duplicity backups.
//!
//! This library provides utilities to manage duplicity backups [1]. Backup files could be prensent
//! in the local file system, or can be accessed remotely, provided that the right backend is
//! implemented. This is a rust version of the original duplicity project, that is written in
//! Python. The goal is to provide a library to be used for different purposes (e.g. a command
//! line utility, a fusion filesystem, etc.) and to improve overall performances. Compatibility
//! with the original duplicity backup format is guaranteed.
//!
//! [1]: http://duplicity.nongnu.org/
//!
//! # Example
//! In this example we open a directory containing a backup, and print informations about the files
//! in all the snapshots.
//!
//! ```
//! use ruplicity::Backup;
//! use ruplicity::backend::local::LocalBackend;
//! use ruplicity::timefmt::TimeDisplay;
//!
//! // use the local backend to open a path in the file system containing a backup
//! let backend = LocalBackend::new("tests/backups/single_vol");
//! let backup = Backup::new(backend).unwrap();
//! for snapshot in backup.snapshots().unwrap() {
//! println!("Snapshot {}", snapshot.time().into_local_display());
//! println!("{}", snapshot.entries().unwrap());
//! }
//! ```
#![deny(
missing_copy_implementations,
missing_docs,
trivial_casts,
trivial_numeric_casts,
unsafe_code,
unstable_features,
unused_import_braces,
unused_qualifications
)]
#![cfg_attr(feature = "nightly", allow(unstable_features))]
#![cfg_attr(feature = "lints", feature(plugin))]
#![cfg_attr(feature = "lints", plugin(clippy))]
extern crate byteorder;
extern crate flate2;
extern crate regex;
extern crate tabwriter;
extern crate tar;
extern crate time;
mod macros;
mod rawpath;
pub mod backend;
pub mod collections;
pub mod manifest;
pub mod signatures;
pub mod timefmt;
use std::cell::{Ref, RefCell};
use std::fmt::{self, Display, Formatter};
use std::io;
use std::ops::Deref;
use std::path::Path;
use time::Timespec;
pub use backend::Backend;
use collections::{BackupChain, BackupSet, Collections};
use manifest::Manifest;
use signatures::Chain;
/// A top level representation of a duplicity backup.
#[derive(Debug)]
pub struct Backup<B> {
backend: B,
collections: Collections,
signatures: Vec<RefCell<Option<Chain>>>,
manifests: Vec<RefCell<Option<Manifest>>>,
}
/// Represents all the snapshots in a backup.
pub struct Snapshots<'a> {
backup: &'a dyn ResourceCache,
}
/// An iterator over the snapshots in a backup.
pub struct SnapshotsIter<'a> {
set_iter: CollectionsIter<'a>,
chain_id: usize,
sig_id: usize,
man_id: usize,
backup: &'a dyn ResourceCache,
}
/// A snapshot in a backup.
pub struct Snapshot<'a> {
set: &'a BackupSet,
// the number of the parent backup chain, starting from zero
chain_id: usize,
sig_id: usize,
man_id: usize,
backup: &'a dyn ResourceCache,
}
/// Contains the files present in a certain backup snapshot.
pub struct SnapshotEntries<'a> {
chain: Ref<'a, Option<Chain>>,
sig_id: usize,
}
/// Reference to a Manifest.
#[derive(Debug)]
pub struct ManifestRef<'a>(Ref<'a, Option<Manifest>>);
struct CollectionsIter<'a> {
chain_iter: collections::ChainIter<'a, BackupChain>,
incset_iter: Option<collections::BackupSetIter<'a>>,
}
/// Allows to be used as an interface for `Backup` struct without generic parameters. This allows
/// to reduce code size, since we don't have to godegen the entire module for different Backend
/// generic parameters. This trait is used as an interface between `Backup` and its inner
/// components.
trait ResourceCache {
fn _collections(&self) -> &Collections;
fn _signature_chain(&self, chain_id: usize) -> io::Result<Ref<Option<Chain>>>;
fn _manifest(
&self,
chain_id: usize,
manifest_path: &str,
) -> Result<Ref<Option<Manifest>>, manifest::ParseError>;
}
impl<B: Backend> Backup<B> {
/// Opens an existig backup by using the given backend.
///
/// # Errors
/// This function will return an error whenever the backend returns an error in a file
/// operation. If the backend can't provide access to backup files, because they are
/// unavailable or non-existing, an empty backup could be returned.
///
/// # Examples
/// ```
/// use ruplicity::Backup;
/// use ruplicity::backend::local::LocalBackend;
///
/// // use the local backend to open a path in the file system containing a backup
/// let backend = LocalBackend::new("tests/backups/single_vol");
/// let backup = Backup::new(backend).unwrap();
/// println!("Got backup with {} snapshots!", backup.snapshots().unwrap().into_iter().count());
/// ```
pub fn new(backend: B) -> io::Result<Self> {
let files = backend.file_names()?;
let collections = Collections::from_filenames(files);
let signatures = collections
.signature_chains()
.map(|_| RefCell::new(None))
.collect();
let manifests = (0..collections.num_snapshots())
.map(|_| RefCell::new(None))
.collect();
Ok(Backup {
backend: backend,
collections: collections,
signatures: signatures,
manifests: manifests,
})
}
/// Constructs an iterator over the snapshots currently present in this backup.
pub fn snapshots(&self) -> io::Result<Snapshots> {
// in future, when we will add lazy collections,
// this could fail, so we add a Result in advance
Ok(Snapshots { backup: self })
}
/// Unwraps this backup and returns the inner backend.
pub fn into_inner(self) -> B {
self.backend
}
}
impl<'a> Snapshots<'a> {
/// Returns the low level representation of the snapshots.
pub fn as_collections(&self) -> &'a Collections {
self.backup._collections()
}
}
impl<'a> IntoIterator for Snapshots<'a> {
type Item = Snapshot<'a>;
type IntoIter = SnapshotsIter<'a>;
fn into_iter(self) -> Self::IntoIter {
let set_iter = CollectionsIter {
chain_iter: self.backup._collections().backup_chains(),
incset_iter: None,
};
// in future, when we will add lazy collections,
// this could fail, so we add a Result in advance
SnapshotsIter {
set_iter: set_iter,
chain_id: 0,
sig_id: 0,
man_id: 0,
backup: self.backup,
}
}
}
impl<'a> Iterator for SnapshotsIter<'a> {
type Item = Snapshot<'a>;
fn next(&mut self) -> Option<Self::Item> {
// first test if we have a valid iterator to an incset
if let Some(ref mut incset_iter) = self.set_iter.incset_iter {
// we have a set iter, so return the next element if present
if let Some(inc_set) = incset_iter.next() {
self.sig_id += 1;
self.man_id += 1;
return Some(Snapshot {
set: inc_set,
chain_id: self.chain_id - 1,
sig_id: self.sig_id,
man_id: self.man_id - 1,
backup: self.backup,
});
}
}
// the current incset is exausted or not present,
// we need to advance the chain and return the next full set if present,
// otherwise the job is finished
match self.set_iter.chain_iter.next() {
Some(chain) => {
self.chain_id += 1;
self.sig_id = 0;
self.man_id += 1;
self.set_iter.incset_iter = Some(chain.inc_sets());
Some(Snapshot {
set: chain.full_set(),
chain_id: self.chain_id - 1,
sig_id: self.sig_id,
man_id: self.man_id - 1,
backup: self.backup,
})
}
None => None,
}
}
}
impl<'a> Snapshot<'a> {
/// Returns the time in which the snapshot has been taken.
pub fn time(&self) -> Timespec {
self.set.end_time()
}
/// Returns whether the snapshot is a full backup.
///
/// A full snapshot does not depend on previous snapshots.
pub fn is_full(&self) -> bool {
self.set.is_full()
}
/// Returns whether the snapshot is an incremental backup.
///
/// An incremental snapshot depends on all the previous incremental snapshots and the first
/// previous full snapshot. This set of dependent snapshots is called "chain".
pub fn is_incremental(&self) -> bool {
self.set.is_incremental()
}
/// Returns the number of volumes contained in the snapshot.
pub fn num_volumes(&self) -> usize {
self.set.num_volumes()
}
/// Returns the low level representation of the snapshot.
pub fn as_backup_set(&self) -> &'a BackupSet {
self.set
}
/// Returns the files and directories present in the snapshot.
///
/// Be aware that using this functionality means that all the signature files in the current
/// backup chain must be loaded, and this could take some time, depending on the file access
/// provided by the backend and the signatures size.
pub fn entries(&self) -> io::Result<SnapshotEntries> {
let sig = self.backup._signature_chain(self.chain_id)?;
if self.sig_id < sig.as_ref().unwrap().snapshots().len() {
Ok(SnapshotEntries {
chain: sig,
sig_id: self.sig_id,
})
} else {
Err(not_found("The signature chain is incomplete"))
}
}
/// Returns the manifest for this snapshot.
///
/// The relative manifest file is read on demand and cached for subsequent uses.
pub fn manifest(&self) -> Result<ManifestRef<'a>, manifest::ParseError> {
Ok(ManifestRef(
self.backup
._manifest(self.man_id, self.set.manifest_path())?,
))
}
}
impl<'a> SnapshotEntries<'a> {
/// Returns the signatures representation for the entries.
///
/// This function can be used to retrieve information about the files in the snapshot.
pub fn as_signature(&self) -> signatures::SnapshotEntries {
self.chain
.as_ref()
.unwrap()
.snapshots()
.nth(self.sig_id)
.unwrap()
.files()
}
}
impl<'a> Display for SnapshotEntries<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.as_signature().into_display().fmt(f)
}
}
impl<'a> Deref for ManifestRef<'a> {
type Target = Manifest;
fn deref(&self) -> &Manifest {
self.0.as_ref().unwrap()
}
}
impl<B: Backend> ResourceCache for Backup<B> {
fn _collections(&self) -> &Collections {
&self.collections
}
fn _signature_chain(&self, chain_id: usize) -> io::Result<Ref<Option<Chain>>> {
{
// check if there is a cached value
let mut sig = self.signatures[chain_id].borrow_mut();
if sig.is_none() {
// compute signatures now
if let Some(sigchain) = self.collections.signature_chains().nth(chain_id) {
let new_sig = Chain::from_sigchain(sigchain, &self.backend)?;
*sig = Some(new_sig);
} else {
return Err(not_found(
"The given backup snapshot does not have a \
corresponding signature",
));
}
}
}
// need to close previous scope to borrow again
// return the cached value
Ok(self.signatures[chain_id].borrow())
}
fn _manifest(
&self,
id: usize,
path: &str,
) -> Result<Ref<Option<Manifest>>, manifest::ParseError> {
{
// check if there is a cached value
let mut sig = self.manifests[id].borrow_mut();
if sig.is_none() {
// compute manifest now
let mut file = io::BufReader::new(self.backend.open_file(Path::new(path))?);
*sig = Some(Manifest::parse(&mut file)?);
}
}
// need to close previous scope to borrow again
// return the cached value
Ok(self.manifests[id].borrow())
}
}
fn not_found(msg: &str) -> io::Error {
io::Error::new(io::ErrorKind::NotFound, msg)
}
#[cfg(test)]
mod test {
use super::*;
use backend::local::LocalBackend;
use collections::{BackupSet, Collections};
use manifest::Manifest;
use rawpath::RawPath;
use signatures::{Chain, Entry};
use timefmt::parse_time_str;
use std::fs::File;
use std::io::BufReader;
use std::path::Path;
use time::Timespec;
#[derive(Debug, Eq, PartialEq)]
struct SnapshotTest {
time: Timespec,
is_full: bool,
num_volumes: usize,
}
#[derive(Debug, Clone, Eq, PartialEq)]
struct EntryTest {
path: RawPath,
mtime: Timespec,
uname: String,
gname: String,
}
impl EntryTest {
pub fn from_entry(file: &Entry) -> Self {
EntryTest {
path: RawPath::from_bytes(file.path_bytes().to_owned()),
mtime: file.mtime(),
uname: file.username().unwrap().to_owned(),
gname: file.groupname().unwrap().to_owned(),
}
}
pub fn from_info(path: &[u8], mtime: &str, uname: &str, gname: &str) -> Self {
EntryTest {
path: RawPath::from_bytes(path.to_owned()),
mtime: parse_time_str(mtime).unwrap(),
uname: uname.to_owned(),
gname: gname.to_owned(),
}
}
}
fn from_backup_set(set: &BackupSet, full: bool) -> SnapshotTest {
SnapshotTest {
time: set.end_time(),
is_full: full,
num_volumes: set.num_volumes(),
}
}
fn from_collection(coll: &Collections) -> Vec<SnapshotTest> {
let mut result = Vec::new();
for chain in coll.backup_chains() {
result.push(from_backup_set(chain.full_set(), true));
for set in chain.inc_sets() {
result.push(from_backup_set(set, false));
}
}
result
}
fn to_test_snapshot<B: Backend>(backup: &Backup<B>) -> Vec<SnapshotTest> {
backup
.snapshots()
.unwrap()
.into_iter()
.map(|s| {
assert!(s.is_full() != s.is_incremental());
SnapshotTest {
time: s.time(),
is_full: s.is_full(),
num_volumes: s.num_volumes(),
}
})
.collect()
}
fn single_vol_signature_chain() -> Chain {
let backend = LocalBackend::new("tests/backups/single_vol");
let filenames = backend.file_names().unwrap();
let coll = Collections::from_filenames(filenames);
Chain::from_sigchain(coll.signature_chains().next().unwrap(), &backend).unwrap()
}
fn from_sigchain(chain: &Chain) -> Vec<Vec<EntryTest>> {
chain
.snapshots()
.map(|s| {
s.files()
.map(|f| EntryTest::from_entry(&f))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>()
}
fn from_backup<B: Backend>(backup: &Backup<B>) -> Vec<Vec<EntryTest>> {
backup
.snapshots()
.unwrap()
.into_iter()
.map(|s| {
s.entries()
.unwrap()
.as_signature()
.map(|f| EntryTest::from_entry(&f))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>()
}
#[test]
fn same_collections_single_vol() {
let backend = LocalBackend::new("tests/backups/single_vol");
let filenames = backend.file_names().unwrap();
let coll = Collections::from_filenames(filenames);
let backup = Backup::new(backend).unwrap();
let expected = from_collection(&coll);
let actual = to_test_snapshot(&backup);
assert_eq!(actual, expected);
}
#[test]
fn same_collections_multi_chain() {
let backend = LocalBackend::new("tests/backups/multi_chain");
let filenames = backend.file_names().unwrap();
let coll = Collections::from_filenames(filenames);
let backup = Backup::new(backend).unwrap();
let expected = from_collection(&coll);
let actual = to_test_snapshot(&backup);
assert_eq!(actual, expected);
}
#[test]
fn same_files() {
let sigchain = single_vol_signature_chain();
let expected = from_sigchain(&sigchain);
let backend = LocalBackend::new("tests/backups/single_vol");
let backup = Backup::new(backend).unwrap();
let actual = from_backup(&backup);
assert_eq!(actual, expected);
}
#[test]
fn multi_chain_files() {
let backend = LocalBackend::new("tests/backups/multi_chain");
let backup = Backup::new(backend).unwrap();
let actual = from_backup(&backup);
let expected = vec![
vec![
make_entry_test(b"", "20160108t223141z"),
make_entry_test(b"file", "20160108t222924z"),
],
vec![
make_entry_test(b"", "20160108t223153z"),
make_entry_test(b"file", "20160108t223153z"),
],
vec![
make_entry_test(b"", "20160108t223206z"),
make_entry_test(b"file", "20160108t223206z"),
],
vec![
make_entry_test(b"", "20160108t223215z"),
make_entry_test(b"file", "20160108t223215z"),
],
];
assert_eq!(actual, expected);
fn make_entry_test(path: &[u8], mtime: &str) -> EntryTest {
EntryTest::from_info(path, mtime, "michele", "michele")
}
}
#[test]
fn multi_chain_manifests() {
let backend = LocalBackend::new("tests/backups/multi_chain");
let backup = Backup::new(backend).unwrap();
let actual = backup
.snapshots()
.unwrap()
.into_iter()
.map(|snapshot| snapshot.manifest().unwrap());
let names = vec![
"duplicity-full.20160108T223144Z.manifest",
"duplicity-inc.20160108T223144Z.to.20160108T223159Z.manifest",
"duplicity-full.20160108T223209Z.manifest",
"duplicity-inc.20160108T223209Z.to.20160108T223217Z.manifest",
];
let expected = names.iter().map(|name| {
let mut path = Path::new("tests/backups/multi_chain").to_owned();
path.push(name);
let mut file = BufReader::new(File::open(path).unwrap());
Manifest::parse(&mut file).unwrap()
});
for (e, a) in expected.zip(actual) {
assert_eq!(e, *a);
}
}
}
| true |
7193269c3e965e677afd043ed0b28894af8a855b
|
Rust
|
senden9/lsh-rs
|
/lsh-rs/src/dist.rs
|
UTF-8
| 1,159 | 3.375 | 3 |
[
"MIT"
] |
permissive
|
use ndarray::prelude::*;
use rayon::prelude::*;
/// L2 norm of a single vector.
///
/// # Examples
///
/// ```
/// use lsh_rs::dist::l2_norm;
/// let a = vec![1., -1.];
/// let norm_a = l2_norm(&a);
///
/// // norm between two vectors
/// let b = vec![0.2, 1.2];
/// let c: Vec<f32> = a.iter().zip(b).map(|(ai, bi)| ai - bi).collect();
/// let norm_ab = l2_norm(&c);
/// ```
pub fn l2_norm(x: &[f32]) -> f32 {
let x = aview1(x);
x.dot(&x).sqrt()
}
/// Dot product between two vectors.
///
/// # Panics
///
/// Panics if `a.len() != b.len()`.
///
/// # Examples
///
/// ```
/// use lsh_rs::dist::inner_prod;
/// let a = vec![1., -1.];
/// let b = vec![0.2, 1.2];
/// let prod = inner_prod(&a, &b);
/// ```
pub fn inner_prod(a: &[f32], b: &[f32]) -> f32 {
aview1(a).dot(&aview1(b))
}
/// Cosine similarity between two vectors.
///
/// # Panics
///
/// Panics if `a.len() != b.len()`.
///
/// # Examples
///
/// ```
/// use lsh_rs::dist::cosine_sim;
/// let a = vec![1., -1.];
/// let b = vec![0.2, 1.2];
/// let sim = cosine_sim(&a, &b);
/// ```
pub fn cosine_sim(a: &[f32], b: &[f32]) -> f32 {
inner_prod(a, b) / (l2_norm(a) * l2_norm(b))
}
| true |
b6ccde3a6d960b344ee024582d4aa06b6fe05739
|
Rust
|
astral-sh/ruff
|
/crates/ruff_python_formatter/src/expression/expr_bin_op.rs
|
UTF-8
| 10,687 | 2.53125 | 3 |
[
"BSD-3-Clause",
"0BSD",
"LicenseRef-scancode-free-unknown",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0"
] |
permissive
|
use std::iter;
use ruff_python_ast::{
Constant, Expr, ExprAttribute, ExprBinOp, ExprConstant, ExprUnaryOp, Operator, UnaryOp,
};
use smallvec::SmallVec;
use ruff_formatter::{
format_args, write, FormatOwnedWithRule, FormatRefWithRule, FormatRuleWithOptions,
};
use ruff_python_ast::node::{AnyNodeRef, AstNode};
use ruff_python_ast::str::is_implicit_concatenation;
use crate::comments::{trailing_comments, trailing_node_comments};
use crate::expression::expr_constant::ExprConstantLayout;
use crate::expression::parentheses::{
in_parentheses_only_group, in_parentheses_only_soft_line_break,
in_parentheses_only_soft_line_break_or_space, is_expression_parenthesized, parenthesized,
NeedsParentheses, OptionalParentheses,
};
use crate::expression::string::StringLayout;
use crate::expression::Parentheses;
use crate::prelude::*;
use crate::FormatNodeRule;
#[derive(Default)]
pub struct FormatExprBinOp {
parentheses: Option<Parentheses>,
}
impl FormatRuleWithOptions<ExprBinOp, PyFormatContext<'_>> for FormatExprBinOp {
type Options = Option<Parentheses>;
fn with_options(mut self, options: Self::Options) -> Self {
self.parentheses = options;
self
}
}
impl FormatNodeRule<ExprBinOp> for FormatExprBinOp {
fn fmt_fields(&self, item: &ExprBinOp, f: &mut PyFormatter) -> FormatResult<()> {
let comments = f.context().comments().clone();
match Self::layout(item, f.context()) {
BinOpLayout::LeftString(expression) => {
let right_has_leading_comment = f
.context()
.comments()
.has_leading_comments(item.right.as_ref());
let format_right_and_op = format_with(|f| {
if right_has_leading_comment {
space().fmt(f)?;
} else {
soft_line_break_or_space().fmt(f)?;
}
item.op.format().fmt(f)?;
if right_has_leading_comment {
hard_line_break().fmt(f)?;
} else {
space().fmt(f)?;
}
group(&item.right.format()).fmt(f)
});
let format_left = format_with(|f: &mut PyFormatter| {
let format_string =
expression.format().with_options(ExprConstantLayout::String(
StringLayout::ImplicitConcatenatedBinaryLeftSide,
));
if is_expression_parenthesized(expression.into(), f.context().source()) {
parenthesized("(", &format_string, ")").fmt(f)
} else {
format_string.fmt(f)
}
});
group(&format_args![format_left, group(&format_right_and_op)]).fmt(f)
}
BinOpLayout::Default => {
let format_inner = format_with(|f: &mut PyFormatter| {
let source = f.context().source();
let binary_chain: SmallVec<[&ExprBinOp; 4]> =
iter::successors(Some(item), |parent| {
parent.left.as_bin_op_expr().and_then(|bin_expression| {
if is_expression_parenthesized(
bin_expression.as_any_node_ref(),
source,
) {
None
} else {
Some(bin_expression)
}
})
})
.collect();
// SAFETY: `binary_chain` is guaranteed not to be empty because it always contains the current expression.
let left_most = binary_chain.last().unwrap();
// Format the left most expression
in_parentheses_only_group(&left_most.left.format()).fmt(f)?;
// Iterate upwards in the binary expression tree and, for each level, format the operator
// and the right expression.
for current in binary_chain.into_iter().rev() {
let ExprBinOp {
range: _,
left: _,
op,
right,
} = current;
let operator_comments = comments.dangling_comments(current);
let needs_space = !is_simple_power_expression(current);
let before_operator_space = if needs_space {
in_parentheses_only_soft_line_break_or_space()
} else {
in_parentheses_only_soft_line_break()
};
write!(
f,
[
before_operator_space,
op.format(),
trailing_comments(operator_comments),
]
)?;
// Format the operator on its own line if the right side has any leading comments.
if comments.has_leading_comments(right.as_ref())
|| !operator_comments.is_empty()
{
hard_line_break().fmt(f)?;
} else if needs_space {
space().fmt(f)?;
}
in_parentheses_only_group(&right.format()).fmt(f)?;
// It's necessary to format the trailing comments because the code bypasses
// `FormatNodeRule::fmt` for the nested binary expressions.
// Don't call the formatting function for the most outer binary expression because
// these comments have already been formatted.
if current != item {
trailing_node_comments(current).fmt(f)?;
}
}
Ok(())
});
in_parentheses_only_group(&format_inner).fmt(f)
}
}
}
fn fmt_dangling_comments(&self, _node: &ExprBinOp, _f: &mut PyFormatter) -> FormatResult<()> {
// Handled inside of `fmt_fields`
Ok(())
}
}
impl FormatExprBinOp {
fn layout<'a>(bin_op: &'a ExprBinOp, context: &PyFormatContext) -> BinOpLayout<'a> {
if let Some(
constant @ ExprConstant {
value: Constant::Str(_),
range,
..
},
) = bin_op.left.as_constant_expr()
{
let comments = context.comments();
if bin_op.op == Operator::Mod
&& context.node_level().is_parenthesized()
&& !comments.has_dangling_comments(constant)
&& !comments.has_dangling_comments(bin_op)
&& is_implicit_concatenation(&context.source()[*range])
{
BinOpLayout::LeftString(constant)
} else {
BinOpLayout::Default
}
} else {
BinOpLayout::Default
}
}
}
const fn is_simple_power_expression(expr: &ExprBinOp) -> bool {
expr.op.is_pow() && is_simple_power_operand(&expr.left) && is_simple_power_operand(&expr.right)
}
/// Return `true` if an [`Expr`] adheres to [Black's definition](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-breaks-binary-operators)
/// of a non-complex expression, in the context of a power operation.
const fn is_simple_power_operand(expr: &Expr) -> bool {
match expr {
Expr::UnaryOp(ExprUnaryOp {
op: UnaryOp::Not, ..
}) => false,
Expr::Constant(ExprConstant {
value: Constant::Complex { .. } | Constant::Float(_) | Constant::Int(_),
..
}) => true,
Expr::Name(_) => true,
Expr::UnaryOp(ExprUnaryOp { operand, .. }) => is_simple_power_operand(operand),
Expr::Attribute(ExprAttribute { value, .. }) => is_simple_power_operand(value),
_ => false,
}
}
#[derive(Copy, Clone, Debug)]
enum BinOpLayout<'a> {
Default,
/// Specific layout for an implicit concatenated string using the "old" c-style formatting.
///
/// ```python
/// (
/// "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa %s"
/// "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb %s" % (a, b)
/// )
/// ```
///
/// Prefers breaking the string parts over breaking in front of the `%` because it looks better if it
/// is kept on the same line.
LeftString(&'a ExprConstant),
}
#[derive(Copy, Clone)]
pub struct FormatOperator;
impl<'ast> AsFormat<PyFormatContext<'ast>> for Operator {
type Format<'a> = FormatRefWithRule<'a, Operator, FormatOperator, PyFormatContext<'ast>>;
fn format(&self) -> Self::Format<'_> {
FormatRefWithRule::new(self, FormatOperator)
}
}
impl<'ast> IntoFormat<PyFormatContext<'ast>> for Operator {
type Format = FormatOwnedWithRule<Operator, FormatOperator, PyFormatContext<'ast>>;
fn into_format(self) -> Self::Format {
FormatOwnedWithRule::new(self, FormatOperator)
}
}
impl FormatRule<Operator, PyFormatContext<'_>> for FormatOperator {
fn fmt(&self, item: &Operator, f: &mut Formatter<PyFormatContext<'_>>) -> FormatResult<()> {
let operator = match item {
Operator::Add => "+",
Operator::Sub => "-",
Operator::Mult => "*",
Operator::MatMult => "@",
Operator::Div => "/",
Operator::Mod => "%",
Operator::Pow => "**",
Operator::LShift => "<<",
Operator::RShift => ">>",
Operator::BitOr => "|",
Operator::BitXor => "^",
Operator::BitAnd => "&",
Operator::FloorDiv => "//",
};
text(operator).fmt(f)
}
}
impl NeedsParentheses for ExprBinOp {
fn needs_parentheses(
&self,
parent: AnyNodeRef,
_context: &PyFormatContext,
) -> OptionalParentheses {
if parent.is_expr_await() && !self.op.is_pow() {
OptionalParentheses::Always
} else {
OptionalParentheses::Multiline
}
}
}
| true |
ebd9da80c84367adbb656cbb4d1eb76fcb0f790c
|
Rust
|
superdump/building-blocks
|
/crates/building_blocks_core/src/lib.rs
|
UTF-8
| 1,322 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
//! The core data types for defining 2D and 3D integer lattices:
//! - `PointN`: an N-dimensional point, most importantly `Point2i` and `Point3i`
//! - `ExtentN`: an N-dimensional extent, most importantly `Extent2i` and `Extent3i`
pub mod axis;
pub mod extent;
pub mod extent2;
pub mod extent3;
pub mod point;
pub mod point2;
pub mod point3;
pub mod point_traits;
pub use axis::{Axis2, Axis3, Axis3Permutation, SignedAxis2, SignedAxis3};
pub use extent::{bounding_extent, Extent, ExtentN, IntegerExtent};
pub use extent2::{Extent2, Extent2f, Extent2i};
pub use extent3::{Extent3, Extent3f, Extent3i};
pub use point::PointN;
pub use point2::{Point2, Point2f, Point2i};
pub use point3::{Point3, Point3f, Point3i};
pub use point_traits::{
Bounded, ComponentwiseIntegerOps, Distance, DotProduct, GetComponent, IntegerPoint,
MapComponents, Neighborhoods, Norm, NormSquared, Ones, Point, SmallZero,
};
pub use num;
pub mod prelude {
pub use super::{
Axis2, Axis3, Bounded, ComponentwiseIntegerOps, Distance, DotProduct, Extent, Extent2,
Extent2f, Extent2i, Extent3, Extent3f, Extent3i, ExtentN, GetComponent, IntegerExtent,
IntegerPoint, MapComponents, Neighborhoods, Norm, NormSquared, Ones, Point, Point2,
Point2f, Point2i, Point3, Point3f, Point3i, PointN, SmallZero,
};
}
| true |
9462da26e7590797b9be5cf9dd6c4607a96f67f4
|
Rust
|
iquiw/rsbin
|
/src/rsbin/routine.rs
|
UTF-8
| 2,016 | 2.734375 | 3 |
[] |
no_license
|
use std::ffi::OsStr;
use std::path::Path;
use std::process::Command;
use anyhow::{anyhow, Context, Result};
use super::config::{RsbinBuildType, RsbinScript};
use super::os::RsbinEnv;
use super::util::create_dir_if_missing;
impl RsbinScript {
pub fn execute<S>(&self, env: &RsbinEnv, args: &[S]) -> Result<()>
where
S: AsRef<OsStr>,
{
let path = env.bin_path(self);
run_command(&path, Command::new(&path).args(args))
}
pub fn compile(&self, env: &RsbinEnv) -> Result<()> {
match self.build_type {
RsbinBuildType::Rustc => build_rustc(Path::new(&self.path), &env.bin_path(self)),
RsbinBuildType::Ghc => build_ghc(
Path::new(&self.path),
&env.bin_path(self),
&env.tmp_path(self),
&self.build_opts,
),
_ => Err(anyhow!("Unsupported build-type")),
}
}
pub fn does_bin_exist(&self, env: &RsbinEnv) -> bool {
env.bin_path(self).is_file()
}
}
fn run_command(path: &Path, cmd: &mut Command) -> Result<()> {
let status = cmd
.status()
.with_context(|| format!("{}: execution failed", path.display()))?;
if status.success() {
Ok(())
} else {
match status.code() {
Some(code) => Err(anyhow!("{}: process exited with {}", path.display(), code)),
None => Err(anyhow!("{}: interrupted by signal", path.display())),
}
}
}
fn build_rustc(src: &Path, dst: &Path) -> Result<()> {
let path = Path::new("rustc");
run_command(path, Command::new(path).arg("-o").arg(dst).arg(src))
}
fn build_ghc(src: &Path, dst: &Path, tmpdir: &Path, opts: &[String]) -> Result<()> {
create_dir_if_missing(tmpdir)?;
let path = Path::new("ghc");
run_command(
path,
Command::new(path)
.args(opts)
.arg("-outputdir")
.arg(tmpdir)
.arg("-o")
.arg(dst)
.arg(src),
)
}
| true |
ff94e54d13adce16f1e7935e7b818f2f7597dcba
|
Rust
|
cpcsdk/rust.cpclib
|
/cpclib-imgconverter/src/bin/cpc2img.rs
|
UTF-8
| 3,660 | 2.734375 | 3 |
[] |
no_license
|
use cpclib::common::clap::{self, value_parser, Arg, ArgAction, Command};
use cpclib::common::itertools::Itertools;
use cpclib::image::image::ColorMatrix;
use cpclib::image::pixels;
use cpclib_imgconverter::{self, get_requested_palette};
fn main() {
let cmd = cpclib_imgconverter::specify_palette!(clap::Command::new("cpc2png")
.about("Generate PNG from CPC files")
.arg(
Arg::new("MODE")
.short('m')
.long("mode")
.help("Screen mode of the image to convert.")
.value_name("MODE")
.default_value("0")
.value_parser(["0", "1", "2"])
)
.arg(
Arg::new("MODE0RATIO")
.long("mode0ratio")
.help("Horizontally double the pixels")
.action(ArgAction::SetTrue)
)
.subcommand(
Command::new("SPRITE")
.about("Load from a linear sprite data")
.name("sprite")
.arg(
Arg::new("WIDTH")
.long("width")
.required(true)
.help("Width of the sprite in pixels")
)
)
.arg(Arg::new("INPUT").required(true))
.arg(Arg::new("OUTPUT").required(true)));
let matches = cmd.get_matches();
let palette = dbg!(get_requested_palette(&matches).unwrap_or_default());
let input_fname = matches.get_one::<String>("INPUT").unwrap();
let output_fname = matches.get_one::<String>("OUTPUT").unwrap();
let mode = matches.get_one::<String>("MODE").unwrap().parse().unwrap();
let mode0ratio = matches.contains_id("MODE0RATIO");
// read the data file
let data = std::fs::read(input_fname).expect("Unable to read input file");
// remove header if any
let data = if cpclib::disc::amsdos::AmsdosHeader::from_buffer(&data).is_checksum_valid() {
&data[128..]
}
else {
&data
};
let mut matrix: ColorMatrix = if let Some(sprite) = matches.subcommand_matches("SPRITE") {
let width: usize = sprite.get_one::<String>("WIDTH").unwrap().parse().unwrap();
let width = match mode {
0 => width / 2,
1 => width / 4,
2 => width / 8,
_ => unreachable!()
};
// convert it
data.chunks_exact(width)
.map(|line| {
// build lines of pen
let line = line.iter();
match mode {
0 => {
line.flat_map(|b| pixels::mode0::byte_to_pens(*b).into_iter())
.collect_vec()
}
1 => {
line.flat_map(|b| pixels::mode1::byte_to_pens(*b).into_iter())
.collect_vec()
}
2 => {
line.flat_map(|b| pixels::mode2::byte_to_pens(*b))
.collect_vec()
}
_ => unreachable!()
}
})
.map(move |pens| {
// build lines of inks
pens.iter()
.map(|pen| palette.get(pen))
.cloned()
.collect_vec()
})
.collect_vec()
.into()
}
else {
unimplemented!()
};
if mode0ratio {
matrix.double_horizontally();
}
// save the generated file
let img = matrix.as_image();
img.save(output_fname).expect("Error while saving the file");
}
| true |
8b5fe61a2267ca6c0811651b8563a44a4cac9584
|
Rust
|
vigna/dsi-bitstream-rs
|
/src/codes/stats.rs
|
UTF-8
| 2,316 | 3.0625 | 3 |
[
"Apache-2.0",
"LGPL-2.1-only"
] |
permissive
|
use super::*;
use core::sync::atomic::{AtomicUsize, Ordering};
pub fn len_golomb(value: u64, b: u64) -> usize {
len_unary(value / b) + len_minimal_binary(value % b, b)
}
#[derive(Default, Debug)]
/// A struct to keep track of the space needed to store a stream of integers
/// using different codes, this can be used to determine which code is the
/// most efficient for a given stream.
pub struct CodesStats {
pub unary: AtomicUsize,
pub gamma: AtomicUsize,
pub delta: AtomicUsize,
pub zeta: [AtomicUsize; 10],
pub golomb: [AtomicUsize; 20],
}
impl CodesStats {
/// Create a new `CodesStats` struct
pub fn new() -> Self {
Default::default()
}
/// Update the stats with the length of the code for `value` and return back
/// `value` for convienience
pub fn update(&self, value: u64) -> u64 {
self.unary.fetch_add(len_unary(value), Ordering::Relaxed);
self.gamma.fetch_add(len_gamma(value), Ordering::Relaxed);
self.delta.fetch_add(len_delta(value), Ordering::Relaxed);
for (k, val) in self.zeta.iter().enumerate() {
val.fetch_add(len_zeta(value, (k + 1) as _), Ordering::Relaxed);
}
for (b, val) in self.golomb.iter().enumerate() {
val.fetch_add(len_golomb(value, (b + 1) as _), Ordering::Relaxed);
}
value
}
/// Return the best code for the stream, as in the one that needed the
/// least space, and the space needed by that code
pub fn get_best_code(&self) -> (Code, usize) {
// TODO!: make cleaner
let mut best = self.unary.load(Ordering::Relaxed);
let mut best_code = Code::Unary;
macro_rules! check {
($code:expr, $len:expr) => {
let len = $len.load(Ordering::Relaxed);
if len < best {
best = len;
best_code = $code;
}
};
}
check!(Code::Gamma, self.gamma);
check!(Code::Delta, self.delta);
for (k, val) in self.zeta.iter().enumerate() {
check!(Code::Zeta { k: (k + 1) as _ }, *val);
}
for (b, val) in self.golomb.iter().enumerate() {
check!(Code::Golomb { b: (b + 1) as _ }, *val);
}
(best_code, best)
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.