blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
f32bc0d2d9bdae4369a181724001d77d0b4130b1
|
Rust
|
SINHASantos/rust-users
|
/src/traits.rs
|
UTF-8
| 1,715 | 2.9375 | 3 |
[
"MIT"
] |
permissive
|
use std::ffi::OsStr;
use std::sync::Arc;
use libc::{uid_t, gid_t};
use base::{User, Group};
/// Trait for producers of users.
pub trait Users {
/// Returns a `User` if one exists for the given user ID; otherwise, returns `None`.
fn get_user_by_uid(&self, uid: uid_t) -> Option<Arc<User>>;
/// Returns a `User` if one exists for the given username; otherwise, returns `None`.
fn get_user_by_name<S: AsRef<OsStr> + ?Sized>(&self, username: &S) -> Option<Arc<User>>;
/// Returns the user ID for the user running the process.
fn get_current_uid(&self) -> uid_t;
/// Returns the username of the user running the process.
fn get_current_username(&self) -> Option<Arc<OsStr>>;
/// Returns the effective user id.
fn get_effective_uid(&self) -> uid_t;
/// Returns the effective username.
fn get_effective_username(&self) -> Option<Arc<OsStr>>;
}
/// Trait for producers of groups.
pub trait Groups {
/// Returns a `Group` if one exists for the given group ID; otherwise, returns `None`.
fn get_group_by_gid(&self, gid: gid_t) -> Option<Arc<Group>>;
/// Returns a `Group` if one exists for the given groupname; otherwise, returns `None`.
fn get_group_by_name<S: AsRef<OsStr> + ?Sized>(&self, group_name: &S) -> Option<Arc<Group>>;
/// Returns the group ID for the user running the process.
fn get_current_gid(&self) -> gid_t;
/// Returns the group name of the user running the process.
fn get_current_groupname(&self) -> Option<Arc<OsStr>>;
/// Returns the effective group id.
fn get_effective_gid(&self) -> gid_t;
/// Returns the effective group name.
fn get_effective_groupname(&self) -> Option<Arc<OsStr>>;
}
| true |
e22316339b079f77004a1208fd2aa53b250a48d9
|
Rust
|
yuki-uchida/Competitive_programming
|
/AtcoderBeginnerContest/276/rust/src/b.rs
|
UTF-8
| 1,062 | 2.921875 | 3 |
[] |
no_license
|
use proconio::input;
fn main() {
input! {
n: usize,
m: usize,
mut roads: [(usize, usize); m],
}
// println!("{:?}", roads);
let mut connected_cities: Vec<Vec<i32>> = vec![vec![]; n+1];
let mut connected_cities_count: Vec<i32> = vec![0; n+1];
for i in 0..m {
let a_city: i32 = (roads[i].0 as i32);
let b_city: i32 = (roads[i].1 as i32);
connected_cities[a_city as usize].push(b_city);
connected_cities[b_city as usize].push(a_city);
connected_cities_count[a_city as usize] += 1;
connected_cities_count[b_city as usize] += 1;
}
// println!("{:?}", connected_cities);
// println!("{:?}", connected_cities_count);
for j in 1..(n+1) {
connected_cities[j as usize].sort();
let connected_cities: Vec<String> = connected_cities[j as usize].iter().map(|i: &i32| i.to_string()).collect();
let connected_cities_str: String = connected_cities.join(" ");
println!("{} {}", connected_cities_count[j], connected_cities_str);
}
}
| true |
861ec34757b1bca90d4be20cb6417c5d03cfcdfd
|
Rust
|
youngqqcn/RustNotes
|
/rust-by-example/18-错误处理/6-结果Result.rs
|
UTF-8
| 394 | 3.125 | 3 |
[] |
no_license
|
// Author: yqq
// Date: 2022-11-20 10:50:14
// Description:
fn multiply(first_number_str: &str, second_number_str: &str) -> i32 {
let f = first_number_str.parse::<i32>().unwrap();
let s = second_number_str.parse::<i32>().unwrap();
f * s
}
fn main() {
let t = multiply("10", "2");
println!("{:?}", t);
let xx = multiply("xxx", "xxx");
println!("{:?}", xx);
}
| true |
68a5b24fbd59e8c622c1a77266469eb1aec4efaa
|
Rust
|
visig9/slottle
|
/src/throttle/mod.rs
|
UTF-8
| 23,217 | 3.421875 | 3 |
[
"MIT"
] |
permissive
|
use std::{
collections::VecDeque,
fmt::{self, Debug},
sync::{Arc, Mutex},
thread,
time::{Duration, Instant},
};
use std_semaphore::Semaphore;
pub mod interval;
pub type IntervalFn = dyn Fn(Option<&ThrottleLog>) -> Duration + Send + Sync + 'static;
/// Limiting resource access speed by interval and concurrent.
pub struct Throttle {
/// Which time point are allowed to perform the next `run()`.
allowed_future: Mutex<Instant>,
semaphore: Semaphore,
log: Option<Mutex<ThrottleLog>>,
interval_fn: Arc<IntervalFn>,
concurrent: u32,
}
impl Throttle {
/// Initialize a builder to create throttle.
pub fn builder() -> ThrottleBuilder {
ThrottleBuilder::new()
}
/// Run a function.
///
/// Call `run(...)` may block current thread by throttle's state and configuration.
///
/// # Example
///
/// ```
/// use std::time::Duration;
/// use rayon::prelude::*;
/// use slottle::Throttle;
///
/// let throttle = Throttle::builder()
/// .interval(Duration::from_millis(5))
/// .build()
/// .unwrap();
///
/// let ans: Vec<u32> = vec![3, 2, 1]
/// .into_par_iter()
/// .map(|x| {
/// // parallel run here
/// throttle.run(|| x + 1)
/// })
/// .collect();
///
/// assert_eq!(ans, vec![4, 3, 2]);
/// ```
pub fn run<F, T>(&self, f: F) -> T
where
F: FnOnce() -> T,
{
// occupying single concurrency quota
let _semaphore_guard = self.semaphore.access();
self.waiting();
let result = f();
self.write_log(true);
result
}
/// Run a function which are fallible.
///
/// When `f` return an `Err`, throttle will treat this function run
/// into "failed" state. Failure will counting by [`ThrottleLog`] and may change
/// following delay intervals in current throttle scope by user defined [`Interval`]
/// within [`ThrottleBuilder::interval()`].
///
/// Call `run_fallible(...)` may block current thread by throttle's state and configuration.
///
/// # Example
///
/// ```
/// use std::time::{Duration, Instant};
/// use rayon::prelude::*;
/// use slottle::{Throttle,Interval};
///
/// let throttle = Throttle::builder()
/// .interval(Interval::new(
/// |log| match log.unwrap().failure_count_cont() {
/// 0 => Duration::from_millis(10), // if successful
/// _ => Duration::from_millis(50), // if failed
/// },
/// 1, // log_size
/// ))
/// .build()
/// .unwrap();
///
/// let started_time = Instant::now();
///
/// vec![Result::<(), ()>::Err(()); 3] // 3 Err here
/// .into_par_iter()
/// .for_each(|err| {
/// throttle.run_fallible(|| {
/// let time_passed_ms = started_time.elapsed().as_secs_f64() * 1000.0;
/// println!("time passed: {:.2}ms", time_passed_ms);
/// err
/// });
/// });
/// ```
///
/// The pervious code will roughly print:
///
/// ```text
/// time passed: 0.32ms
/// time passed: 10.19ms
/// time passed: 60.72ms
/// ```
///
///
///
/// ## Explanation: Data in [`ThrottleLog`] will delay one op to take effect
///
/// If you read previous example and result carefully, You may notice first op
/// failed but second op not immediate slowdown (50ms). The slowdown appeared on
/// third. You may wonder what happen here?
///
/// Say technically, all the following statements are true:
///
/// 1. We known an op failed or not, only when it has finished.
/// 2. Current implementation of `Throttle` try to do "waiting" *just before* an op start.
/// - If put waiting *after* an op finished, final op may blocking the thread unnecessarily.
/// 3. The "next allowed timepoint" must assigned with "waiting" as an atomic unit.
/// - If not, in multi-thread situation, more than one op may retrieve the same "allowed
/// timepoint", then run in the same time.
///
/// So, combine those 3 points. When op 1 finished and [`ThrottleLog`] updating, "next allowed timepoint"
/// already be calculated for other pending ops (those ops may started before current op finished if
/// `concurrent >= 2`). But it looking little weird when `concurrent == 1`.
///
/// Here is the chart:
///
/// ```text
/// f: assigned jobs, s: sleep function
///
/// thread 1: |f1()---|s()----|f2()--|s()---------------------------------|f3()---|.......
/// | int.succ | interval (failed) |...............
/// ^ ^ ^-- at this point throttle determined which time f3 allowed to run
/// \ \
/// \ -- f1 finished, now throttle known f1 failed, write into the log
/// \
/// -- at this point throttle determined "which time f2 allowed to run"
///
/// time pass ----->
/// ```
///
/// Thus, data in [`ThrottleLog`] will delay one op to take effect (no matter how many concurrent).
pub fn run_fallible<F, T, E>(&self, f: F) -> Result<T, E>
where
F: FnOnce() -> Result<T, E>,
{
// occupying single concurrency quota
let _semaphore_guard = self.semaphore.access();
self.waiting();
let result = f();
self.write_log(result.is_ok());
result
}
/// Run a function and retry when it failed.
///
/// If `f` return an `Result::Err`, throttle will auto re-run the function. Retry will
/// happen again and again until it reach `max_retry` limitation or succeed.
/// For example, assume `max_retry == 4` that `f` may run `5` times as maximum.
///
/// This method may effect intervals calculation due to any kind of `Err` happened.
/// Check [`run_fallible()`](Self::run_fallible) to see how it work.
///
/// Call `retry(...)` may block current thread by throttle's state and configuration.
///
/// # Example
///
/// ```
/// use std::time::Duration;
/// use rayon::prelude::*;
/// use slottle::Throttle;
///
/// let throttle = Throttle::builder().build().unwrap();
///
/// let which_round_finished: Vec<Result<_, _>> = vec![2, 1, 0]
/// .into_par_iter()
/// .map(|x| {
/// throttle.retry(
/// // round always in `1..=(max_retry + 1)` (`1..=2` in this case)
/// |round| match x + round >= 3 {
/// false => Err(round),
/// true => Ok(round),
/// },
/// 1, // max_retry == 1
/// )
/// })
/// .collect();
///
/// assert_eq!(which_round_finished, vec![Ok(1), Ok(2), Err(2)]);
/// ```
///
/// Function `f` can also return [`RetryableResult::FatalErr`] to ask throttle don't do any
/// further retry:
///
/// ```
/// use std::time::Duration;
/// use rayon::prelude::*;
/// use slottle::{Throttle, RetryableResult};
///
/// let throttle = Throttle::builder().build().unwrap();
///
/// let which_round_finished: Vec<Result<_, _>> = vec![2, 1, 0]
/// .into_par_iter()
/// .map(|x| {
/// throttle.retry(
/// // round always in `1..=(max_retry + 1)` (`1..=2` in this case)
/// |round| match x + round >= 3 {
/// // FatalErr would not retry
/// false => RetryableResult::FatalErr(round),
/// true => RetryableResult::Ok(round),
/// },
/// 1, // max_retry == 1
/// )
/// })
/// .collect();
///
/// assert_eq!(which_round_finished, vec![Ok(1), Err(1), Err(1)]);
/// ```
///
pub fn retry<F, T, E, R>(&self, mut f: F, max_retry: usize) -> Result<T, E>
where
F: FnMut(usize) -> R,
R: Into<RetryableResult<T, E>>,
{
let max_try = max_retry + 1;
let mut round = 1;
loop {
// occupying single concurrency quota
let _semaphore_guard = self.semaphore.access();
self.waiting();
let result: RetryableResult<T, E> = f(round).into();
match result {
RetryableResult::Ok(v) => {
self.write_log(true);
return Ok(v);
}
RetryableResult::RetryableErr(e) => {
self.write_log(false);
if round == max_try {
return Err(e);
} else {
round += 1;
}
}
RetryableResult::FatalErr(e) => {
self.write_log(false);
return Err(e);
}
};
}
}
fn waiting(&self) {
// renew allow_future & calculate how long to wait further
let still_should_wait: Option<Duration> = {
let mut allowed_future_guard = self
.allowed_future
.lock()
.expect("mutex impossible to be poison");
// generate next interval
let next_interval: Duration = (self.interval_fn)(
self.log
.as_ref()
.map(|log| log.lock().expect("mutex impossible to be poison"))
.as_deref(),
) / self.concurrent;
// get old allow_future
let allowed_future = *allowed_future_guard;
// Instant::now() should be called after the lock acquired or else may inaccurate.
let now = Instant::now();
// counting next_allowed_future from when?
let next_allowed_future_baseline = *[now, allowed_future]
.iter()
.max()
.expect("this is [Instant; 2] array so max value always exists");
let next_allowed_future = next_allowed_future_baseline + next_interval;
*allowed_future_guard = next_allowed_future;
drop(allowed_future_guard);
allowed_future.checked_duration_since(now)
};
// sleep still_should_wait in this period
if let Some(still_should_wait) = still_should_wait {
thread::sleep(still_should_wait);
}
}
fn write_log(&self, successful: bool) {
if let Some(log) = self.log.as_ref() {
log.lock()
.expect("mutex impossible to be poison")
.push(LogRecord {
time: Instant::now(),
successful,
});
}
}
}
impl Debug for Throttle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Throttle")
.field("allowed_future", &self.allowed_future)
.field("concurrent", &self.concurrent)
.finish()
}
}
/// Use to build a [`Throttle`].
///
/// Created by [`Throttle::builder()`] API.
pub struct ThrottleBuilder {
interval_fn: Arc<IntervalFn>,
concurrent: u32,
log_size: usize,
}
impl ThrottleBuilder {
fn new() -> Self {
Self {
interval_fn: Arc::new(|_| Duration::default()),
concurrent: 1,
log_size: 0,
}
}
/// Set interval of throttle.
///
/// # Example
///
/// ```
/// use slottle::{Throttle, Interval};
/// use std::time::Duration;
/// use rand;
///
/// // fixed interval: 10ms
/// Throttle::builder().interval(Duration::from_millis(10));
///
/// // random interval between: 10ms ~ 0ms
/// Throttle::builder()
/// .interval(|| Duration::from_millis(10).mul_f64(rand::random()));
///
/// // increasing delay if failed continuously
/// Throttle::builder()
/// .interval(Interval::new(
/// |log| match log.unwrap().failure_count_cont() {
/// 0 => Duration::from_millis(10),
/// 1 => Duration::from_millis(30),
/// 2 => Duration::from_millis(50),
/// 3 => Duration::from_millis(70),
/// _ => unreachable!(),
/// },
/// 3, // maximum log size
/// ));
///
/// // use pre-defined algorithm
/// Throttle::builder()
/// .interval(slottle::fibonacci(
/// Duration::from_millis(10),
/// Duration::from_secs(2),
/// ));
/// ```
pub fn interval<I>(&mut self, interval: I) -> &mut Self
where
I: Into<Interval>,
{
let interval = interval.into();
self.interval_fn = interval.interval_fn;
self.log_size = interval.log_size;
self
}
/// Set concurrent, default value is `1`.
pub fn concurrent(&mut self, concurrent: u32) -> &mut Self {
self.concurrent = concurrent;
self
}
/// Create a new [`Throttle`] with current configuration.
///
/// Return `None` if `concurrent` == `0` or larger than `isize::MAX`.
pub fn build(&self) -> Option<Throttle> {
use std::convert::TryInto;
if self.concurrent == 0 {
return None;
}
Some(Throttle {
allowed_future: Mutex::new(Instant::now()),
log: match self.log_size {
0 => None,
_ => Some(Mutex::new(ThrottleLog::new(self.log_size))),
},
semaphore: Semaphore::new(self.concurrent.try_into().ok()?),
interval_fn: Arc::clone(&self.interval_fn),
concurrent: self.concurrent,
})
}
}
impl Debug for ThrottleBuilder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ThrottleBuilder")
.field("concurrent", &self.concurrent)
.field("log_size", &self.log_size)
.finish()
}
}
/// The result type for [`Throttle::retry()`] API.
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub enum RetryableResult<T, E> {
/// Represent operation successful.
Ok(T),
/// Represent operation failed & allow to retry.
RetryableErr(E),
/// Represent operation failed & should not retry.
FatalErr(E),
}
impl<T, E> From<Result<T, E>> for RetryableResult<T, E> {
fn from(result: Result<T, E>) -> Self {
match result {
Ok(v) => Self::Ok(v),
Err(e) => Self::RetryableErr(e),
}
}
}
/// Collect operation log of a [`Throttle`].
///
/// User can access this log by [`ThrottleBuilder::interval()`] API by
/// [`Interval`].
///
/// `ThrottleLog` will drop oldest log records automatically when it reach
/// it size limit.
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct ThrottleLog {
size: usize,
inner: VecDeque<LogRecord>,
}
impl ThrottleLog {
fn new(size: usize) -> Self {
Self {
size,
inner: VecDeque::with_capacity(size),
}
}
fn push(&mut self, log_record: LogRecord) {
// if size == 0, noop
if self.size == 0 {
return;
}
// if size != 0 and already full, remove oldest before insert record
if self.size == self.inner.len() {
self.inner.pop_back();
}
self.inner.push_front(log_record);
}
/// Get maximum log size.
///
/// This value would never change.
pub fn size(&self) -> usize {
self.size
}
/// Get how many failures exists in log.
///
/// # Example
///
/// (Left is new, right is old, F = Failure, S = Successful)
///
/// - `FFFFF`: 5
/// - `FFSFF`: 4
/// - `SFFFF`: 4
/// - `FSSSS`: 1
pub fn failure_count(&self) -> usize {
self.inner
.iter()
.filter(|record| !record.successful)
.count()
}
/// Get how many failures from newest log entry continuously.
///
/// # Example
///
/// (Left is new, right is old, F = Failure, S = Successful)
///
/// - `FFFFF`: 5
/// - `FFSFF`: 2
/// - `SFFFF`: 0
/// - `FSSSS`: 1
pub fn failure_count_cont(&self) -> usize {
self.inner
.iter()
.take_while(|record| !record.successful)
.count()
}
/// Get failure rate in whole log.
///
/// # Example
///
/// (Left is new, right is old, F = Failure, S = Successful)
///
/// - `FFFFF`: 1.0
/// - `FFSFF`: 0.8
/// - `SFFFF`: 0.8
/// - `FSSSS`: 0.2
///
/// This function use `size` as denominator. Return `None` if `size == 0`.
pub fn failure_rate(&self) -> Option<f64> {
if self.size == 0 {
None
} else {
let failed_count = self.failure_count();
Some(failed_count as f64 / self.size as f64)
}
}
/// Get duration between first and last log record.
///
/// Return `None` if don't have at least 2 log records.
pub fn duration(&self) -> Option<Duration> {
if self.inner.len() <= 1 {
None
} else {
Some(self.inner.front().unwrap().time - self.inner.back().unwrap().time)
}
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct LogRecord {
time: Instant,
successful: bool,
}
/// The interval configuration.
#[derive(Clone)]
pub struct Interval {
interval_fn: Arc<IntervalFn>,
log_size: usize,
}
impl Interval {
/// Create a interval calculating algorithm
///
/// Define an `interval_fn` to generate interval dynamically.
///
/// `log_size` argument determine the maximum size of [`ThrottleLog`] which
/// `interval_fn` can access. If `log_size == 0`, `interval_fn` will receive
/// `None`.
pub fn new<F>(interval_fn: F, log_size: usize) -> Self
where
F: Fn(Option<&ThrottleLog>) -> Duration + Send + Sync + 'static,
{
Self {
interval_fn: Arc::new(interval_fn),
log_size,
}
}
/// Apply post-process to generated interval.
///
/// This method are useful when user want to do some tweaks with
/// pre-built interval algorithm.
///
/// # example
///
/// ```
/// use std::time::Duration;
/// use slottle::Interval;
///
/// // following algorithm produce random duration from 0 ~ 10ms
/// let algo = Interval::new(|_| Duration::from_millis(10), 0)
/// .modify(|dur| dur.mul_f64(rand::random()));
/// ```
pub fn modify<F>(self, f: F) -> Interval
where
F: Fn(Duration) -> Duration + Send + Sync + 'static,
{
let orig_fn = self.interval_fn;
Self {
interval_fn: Arc::new(move |log| f(orig_fn(log))),
log_size: self.log_size,
}
}
}
impl<F> From<F> for Interval
where
F: Fn() -> Duration + Send + Sync + 'static,
{
fn from(f: F) -> Self {
Self {
interval_fn: Arc::new(move |_| f()),
log_size: 0,
}
}
}
impl From<Duration> for Interval {
fn from(duration: Duration) -> Self {
Self {
interval_fn: Arc::new(move |_| duration),
log_size: 0,
}
}
}
impl Debug for Interval {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Interval")
.field("log_size", &self.log_size)
.finish()
}
}
impl Default for Interval {
fn default() -> Self {
Self {
interval_fn: Arc::new(|_| Duration::default()),
log_size: 0,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn with_concurrent_equal_0() {
assert!(Throttle::builder().concurrent(0).build().is_none());
}
#[test]
fn with_concurrent_equal_to_isize_max() {
// this case may run out of memory in previous implementation.
assert!(Throttle::builder()
.concurrent(isize::MAX as u32)
.build()
.is_some());
}
#[test]
#[cfg(any(
target_pointer_width = "8",
target_pointer_width = "16",
target_pointer_width = "32",
))]
fn with_concurrent_large_than_isize_max() {
assert!(Throttle::builder()
// If isize::MAX > u32 (mean target_pointer_width = 64 or larger), just
// cannot compile due to overflow.
.concurrent(isize::MAX as u32 + 1)
.build()
.is_none());
}
#[test]
fn retryable_result_convert() {
let orig: Result<bool, u32> = Err(42);
let to: RetryableResult<bool, u32> = orig.into();
assert_eq!(to, RetryableResult::RetryableErr(42))
}
#[test]
fn throttle_log_op() {
let mut log = ThrottleLog::new(4);
assert_eq!(log.failure_count_cont(), 0);
assert_eq!(log.failure_count(), 0);
assert_eq!(log.failure_rate().unwrap(), 0.0);
log.push(LogRecord {
time: Instant::now(),
successful: false,
});
assert_eq!(log.failure_count_cont(), 1);
assert_eq!(log.failure_count(), 1);
assert_eq!(log.failure_rate().unwrap(), 0.25);
log.push(LogRecord {
time: Instant::now(),
successful: false,
});
assert_eq!(log.failure_count_cont(), 2);
assert_eq!(log.failure_count(), 2);
assert_eq!(log.failure_rate().unwrap(), 0.5);
log.push(LogRecord {
time: Instant::now(),
successful: true,
});
log.push(LogRecord {
time: Instant::now(),
successful: true,
});
assert_eq!(log.failure_count_cont(), 0);
assert_eq!(log.failure_count(), 2);
assert_eq!(log.failure_rate().unwrap(), 0.5);
log.push(LogRecord {
time: Instant::now(),
successful: true,
});
assert_eq!(log.failure_count_cont(), 0);
assert_eq!(log.failure_count(), 1);
assert_eq!(log.failure_rate().unwrap(), 0.25);
log.push(LogRecord {
time: Instant::now(),
successful: false,
});
assert_eq!(log.failure_count_cont(), 1);
assert_eq!(log.failure_count(), 1);
assert_eq!(log.failure_rate().unwrap(), 0.25);
}
#[test]
fn throttle_log_new_0() {
let mut log = ThrottleLog::new(0);
log.push(LogRecord {
time: Instant::now(),
successful: false,
});
assert_eq!(log.failure_count_cont(), 0);
assert_eq!(log.failure_count(), 0);
assert!(log.failure_rate().is_none());
}
#[test]
fn interval_modify() {
let algo = Interval::new(|_| Duration::from_millis(10), 0).modify(|dur| dur * 2);
assert_eq!((algo.interval_fn)(None), Duration::from_millis(20));
}
}
| true |
b2f14234c5e36f8254cb776c813719de95aab956
|
Rust
|
itotallyrock/oxide-og
|
/src/castles.rs
|
UTF-8
| 3,107 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
use std::convert::TryFrom;
use bitflags::bitflags;
use super::errors;
bitflags! {
pub struct CastlePermissions: u8 {
const NONE = 0;
const WHITE_KING = 1;
const WHITE_QUEEN = 2;
const WHITE_ALL = 3;
const BLACK_KING = 4;
const BOTH_KINGS = 5;
const WHITE_QUEEN_BLACK_KING = 6;
const WHITE_ALL_BLACK_KING = 7;
const BLACK_QUEEN = 8;
const WHITE_KING_BLACK_QUEEN = 9;
const BOTH_QUEENS = 10;
const WHITE_ALL_BLACK_QUEEN = 11;
const BLACK_ALL = 12;
const BLACK_ALL_WHITE_KING = 13;
const BLACK_ALL_WHITE_QUEEN = 14;
const ALL = 15;
}
}
impl Default for CastlePermissions {
fn default() -> Self {
CastlePermissions::NONE
}
}
impl ToString for CastlePermissions {
fn to_string(&self) -> String {
match self.clone() {
CastlePermissions::WHITE_KING => "K".to_string(),
CastlePermissions::WHITE_QUEEN => "Q".to_string(),
CastlePermissions::WHITE_ALL => "KQ".to_string(),
CastlePermissions::BLACK_KING => "k".to_string(),
CastlePermissions::BOTH_KINGS => "Kk".to_string(),
CastlePermissions::WHITE_QUEEN_BLACK_KING => "Qk".to_string(),
CastlePermissions::WHITE_ALL_BLACK_KING => "KQk".to_string(),
CastlePermissions::BLACK_QUEEN => "q".to_string(),
CastlePermissions::WHITE_KING_BLACK_QUEEN => "Kq".to_string(),
CastlePermissions::BOTH_QUEENS => "Qq".to_string(),
CastlePermissions::WHITE_ALL_BLACK_QUEEN => "KQq".to_string(),
CastlePermissions::BLACK_ALL => "kq".to_string(),
CastlePermissions::BLACK_ALL_WHITE_KING => "Kkq".to_string(),
CastlePermissions::BLACK_ALL_WHITE_QUEEN => "Qkq".to_string(),
CastlePermissions::ALL => "KQkq".to_string(),
CastlePermissions::NONE | _ => "-".to_string(),
}
}
}
impl TryFrom<String> for CastlePermissions {
type Error = errors::InvalidCastlesError;
fn try_from(castle_string: String) -> Result<Self, Self::Error> {
match castle_string.as_str() {
"KQkq" => Ok(CastlePermissions::ALL),
"KQk" => Ok(CastlePermissions::WHITE_ALL_BLACK_KING),
"KQq" => Ok(CastlePermissions::WHITE_ALL_BLACK_QUEEN),
"Kkq" => Ok(CastlePermissions::BLACK_ALL_WHITE_KING),
"Qkq" => Ok(CastlePermissions::BLACK_ALL_WHITE_QUEEN),
"KQ" => Ok(CastlePermissions::WHITE_ALL),
"kq" => Ok(CastlePermissions::BLACK_ALL),
"Kk" => Ok(CastlePermissions::BOTH_KINGS),
"Qq" => Ok(CastlePermissions::BOTH_QUEENS),
"Qk" => Ok(CastlePermissions::WHITE_QUEEN_BLACK_KING),
"K" => Ok(CastlePermissions::WHITE_KING),
"Q" => Ok(CastlePermissions::WHITE_QUEEN),
"k" => Ok(CastlePermissions::BLACK_KING),
"q" => Ok(CastlePermissions::BLACK_QUEEN),
"-" => Ok(CastlePermissions::NONE),
_ => Err(errors::InvalidCastlesError),
}
}
}
| true |
768742a012d7ea3d44bf85c26b0f9e0e29f4d2d5
|
Rust
|
MarcusDunn/kaleidoscope_rs
|
/src/main.rs
|
UTF-8
| 13,396 | 3.5 | 4 |
[] |
no_license
|
#![allow(dead_code)]
use std::convert::TryFrom;
use std::fs::File;
use std::io::{BufReader, Read};
use std::iter::Peekable;
use std::process::exit;
use std::vec::IntoIter;
#[derive(Debug, PartialEq)]
enum Token {
Definition,
External,
Identifier(String),
Number(f64),
Symbol(char),
}
struct Tokenizer {
contents: String,
}
impl Tokenizer {
fn new(file: File) -> Result<Tokenizer, std::io::Error> {
let mut contents = String::new();
BufReader::new(file).read_to_string(&mut contents)?;
Ok(Tokenizer { contents })
}
}
#[derive(PartialEq, Debug)]
enum Expression {
Number(f64),
Expression(Box<Expression>),
Variable(String),
Binary(Box<BinaryExpression>),
Call(Box<CallExpression>),
}
#[derive(PartialEq, Debug)]
struct CallExpression {
callee: String,
args: Vec<Expression>,
}
struct PrototypeAst {
name: String,
args: Vec<String>,
}
struct FunctionAst {
proto: PrototypeAst,
body: Expression,
}
#[derive(PartialEq, Debug)]
struct BinaryExpression {
op: BinaryOperator,
lhs: Expression,
rhs: Expression,
}
#[derive(Debug, Eq, PartialEq)]
struct TokenizerError(String);
impl IntoIterator for Tokenizer {
type Item = Token;
type IntoIter = IntoIter<Token>;
fn into_iter(self) -> Self::IntoIter {
self.contents
.split_whitespace()
.map(|word| {
if word == "def" {
Token::Definition
} else if word == "extern" {
Token::External
} else if let Ok(number) = word.parse::<f64>() {
Token::Number(number)
} else if word.starts_with(|c: char| !c.is_alphanumeric()) {
Token::Symbol(word.chars().next().unwrap())
} else {
Token::Identifier(String::from(word))
}
})
.collect::<Vec<_>>()
.into_iter()
}
}
#[derive(Debug, Eq, PartialEq)]
enum BinaryOperator {
Plus,
Minus,
Multiply,
Divide,
GreaterThan,
LessThan,
}
impl BinaryOperator {
fn precedence(&self) -> u8 {
use BinaryOperator::*;
match self {
GreaterThan | LessThan => 1,
Plus | Minus => 2,
Multiply | Divide => 3,
}
}
}
impl TryFrom<char> for BinaryOperator {
type Error = ();
fn try_from(value: char) -> Result<Self, Self::Error> {
match value {
'+' => Ok(BinaryOperator::Plus),
'-' => Ok(BinaryOperator::Minus),
'*' => Ok(BinaryOperator::Multiply),
'/' => Ok(BinaryOperator::Divide),
'>' => Ok(BinaryOperator::GreaterThan),
'<' => Ok(BinaryOperator::LessThan),
_ => Err(()),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn tokenize_def() {
let tokenizer = Tokenizer {
contents: "def".to_string(),
};
assert_eq!(tokenizer.into_iter().next(), Some(Token::Definition))
}
#[test]
fn tokenize_identifier() {
let tokenizer = Tokenizer {
contents: "defa".to_string(),
};
assert_eq!(
tokenizer.into_iter().next(),
Some(Token::Identifier(String::from("defa")))
)
}
#[test]
fn tokenize_extern() {
let tokenizer = Tokenizer {
contents: "extern".to_string(),
};
assert_eq!(tokenizer.into_iter().next(), Some(Token::External))
}
#[test]
fn tokenize_number() {
let tokenizer = Tokenizer {
contents: "1.23".to_string(),
};
assert_eq!(tokenizer.into_iter().next(), Some(Token::Number(1.23)))
}
#[test]
fn tokenize_symbol() {
let tokenizer = Tokenizer {
contents: "+".to_string(),
};
assert_eq!(tokenizer.into_iter().next(), Some(Token::Symbol('+')))
}
#[test]
fn tokenize_series() {
let token_iter = Tokenizer {
contents: "1.23 def extern helloWorld 1.23421 externblah ( )".to_string(),
}
.into_iter();
let expected = vec![
Token::Number(1.23),
Token::Definition,
Token::External,
Token::Identifier("helloWorld".to_string()),
Token::Number(1.23421),
Token::Identifier("externblah".to_string()),
Token::Symbol('('),
Token::Symbol(')'),
];
for (a, b) in token_iter.zip(expected) {
assert_eq!(a, b)
}
}
#[test]
fn two_plus_one() {
let one = Expression::Number(1.0);
let two = Expression::Number(1.0);
let _ = Expression::Binary(Box::new(BinaryExpression {
op: BinaryOperator::Plus,
lhs: one,
rhs: two,
}));
}
#[test]
fn parse_simple_function_call() {
let tokens = vec![
Token::Identifier(String::from("println")),
Token::Symbol('('),
Token::Symbol(')'),
];
let expected = Expression::Call(Box::new(CallExpression {
callee: String::from("println"),
args: vec![],
}));
assert_eq!(parse(&mut tokens.into_iter().peekable()), Ok(expected))
}
#[test]
fn parse_more_complex_function_call() {
let tokens = vec![
Token::Identifier(String::from("println")),
Token::Symbol('('),
Token::Identifier(String::from("concat")),
Token::Symbol('('),
Token::Identifier(String::from("string1")),
Token::Symbol(','),
Token::Identifier(String::from("string2")),
Token::Symbol(')'),
Token::Symbol(')'),
];
let expected = Expression::Call(Box::new(CallExpression {
callee: String::from("println"),
args: vec![Expression::Call(Box::new(CallExpression {
callee: String::from("concat"),
args: vec![
Expression::Variable(String::from("string1")),
Expression::Variable(String::from("string2")),
],
}))],
}));
assert_eq!(parse(&mut tokens.into_iter().peekable()), Ok(expected))
}
#[test]
fn simple_binary_operator() {
let tokens = vec![
Token::Identifier(String::from("num1")),
Token::Symbol('+'),
Token::Identifier(String::from("num2")),
];
let expected = Expression::Binary(Box::new(BinaryExpression {
op: BinaryOperator::Plus,
lhs: Expression::Variable(String::from("num1")),
rhs: Expression::Variable(String::from("num2")),
}));
assert_eq!(parse(&mut tokens.into_iter().peekable()), Ok(expected))
}
#[test]
fn chained_binary_operator() {
let tokens = vec![
Token::Identifier(String::from("num1")),
Token::Symbol('+'),
Token::Identifier(String::from("num2")),
Token::Symbol('*'),
Token::Identifier(String::from("num3")),
];
let expected = Expression::Binary(Box::new(BinaryExpression {
op: BinaryOperator::Plus,
lhs: Expression::Variable(String::from("num1")),
rhs: Expression::Binary(Box::new(BinaryExpression {
op: BinaryOperator::Multiply,
lhs: Expression::Variable(String::from("num2")),
rhs: Expression::Variable(String::from("num3")),
})),
}));
assert_eq!(parse(&mut tokens.into_iter().peekable()), Ok(expected))
}
#[test]
fn chained_binary_operator_2() {
let tokens = vec![
Token::Identifier(String::from("num1")),
Token::Symbol('*'),
Token::Identifier(String::from("num2")),
Token::Symbol('+'),
Token::Identifier(String::from("num3")),
];
let expected = Expression::Binary(Box::new(BinaryExpression {
op: BinaryOperator::Plus,
lhs: Expression::Binary(Box::new(BinaryExpression {
op: BinaryOperator::Multiply,
lhs: Expression::Variable(String::from("num1")),
rhs: Expression::Variable(String::from("num2")),
})),
rhs: Expression::Variable(String::from("num3")),
}));
assert_eq!(parse(&mut tokens.into_iter().peekable()), Ok(expected))
}
}
fn main() -> Result<(), std::io::Error> {
let token_iter = Tokenizer::new(File::open("main.marc")?)?.into_iter();
let _ = parse(&mut token_iter.peekable());
Ok(())
}
// todo, better error handling with EOF
fn parse(mut token_iter: &mut Peekable<impl Iterator<Item = Token>>) -> Result<Expression, String> {
if let Some(token) = token_iter.next() {
let expression = match token {
Token::Definition => { todo!() }
Token::External => { todo!() }
Token::Identifier(name) => {
if let Some(Token::Symbol('(')) = token_iter.peek() {
let _ = token_iter.next(); // pop off the '('
let mut arguments: Vec<Expression> = Vec::new();
loop {
if let Some(token) = token_iter.peek() {
match token {
Token::Symbol(')') => {
let _ = token_iter.next(); // pop off the ')'
break;
}
Token::Symbol(',') => {
let _ = token_iter.next(); // pop off the ','
continue;
}
_ => {
match parse(token_iter) {
Ok(argument) => arguments.push(argument),
Err(error_message) => return Err(format!("Error while parsing arguments to {}: {}", name, error_message)),
}
}
}
} else {
return Err(format!("Error while parsing arguments to {}: missing closing parenthesis around arguments", name));
}
}
Ok(Expression::Call(Box::new(CallExpression {
callee: name,
args: arguments,
})))
} else {
Ok(Expression::Variable(name))
}
}
Token::Number(num) => { Ok(Expression::Number(num)) }
Token::Symbol('(') => {
match parse(&mut token_iter) {
Ok(inner_expression) => {
match token_iter.next() {
Some(Token::Symbol(')')) => Ok(inner_expression),
Some(token) => Err(format!("found {:?} after a single expression inside parenthesis, perhaps missing closing parenthesis?", token)),
None => Err(String::from("reached end of input while parsing expression inside parenthesis, perhaps missing closing parenthesis?"))
}
}
Err(error_message) => {
Err(format!("could not find an expression inside parenthesis, instead got: {}", error_message))
}
}
}
_ => panic!()
};
match expression {
Ok(expression) => check_binary_expression(expression, 0, token_iter),
Err(error_message) => Err(error_message),
}
} else {
Err(String::from("reached end of iterator"))
}
}
fn check_binary_expression(
left_hand_side: Expression,
left_hand_side_precedence: u8,
token_iter: &mut Peekable<impl Iterator<Item = Token>>,
) -> Result<Expression, String> {
match token_iter.peek() {
Some(Token::Symbol(s)) => match BinaryOperator::try_from(*s) {
Ok(binary_operator) => {
if binary_operator.precedence() < left_hand_side_precedence {
Ok(left_hand_side)
} else {
let _ = token_iter.next(); // pop binary operator
if let Ok(right_hand_side) = parse(token_iter) {
if let Some(token) = token_iter.peek() {
todo!()
} else {
Ok(Expression::Binary(Box::new(BinaryExpression {
op: binary_operator,
lhs: left_hand_side,
rhs: right_hand_side,
})))
}
} else {
Err(format!(
"missing right hand side of binary operator {:?}",
binary_operator
))
}
}
}
Err(_) => Ok(left_hand_side),
},
_ => Ok(left_hand_side),
}
}
| true |
41200888536e45068571d147a8f61a2143405f16
|
Rust
|
XAMPPRocky/openm44
|
/src/map.rs
|
UTF-8
| 1,251 | 2.921875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use ggez::GameResult;
use ggez::Context;
use ggez::graphics;
use ggez::graphics::Color;
use crate::hex::Hex;
use crate::layout::Layout;
use crate::tile::Tile;
use crate::player::Player;
pub struct Map {
pub grid: HashMap<Hex, Tile>,
pub layout: Layout,
}
impl Map {
pub fn new(width: i8, height: i8) -> Self {
let mut grid = HashMap::with_capacity((width * height) as usize);
for r in 0..height {
let r_offset = r >> 1;
for q in -r_offset..(width - r_offset) {
grid.insert(Hex::new(q, r, -q-r), Tile::new());
}
}
Self { grid, layout: Layout::default() }
}
pub fn draw(&mut self, ctx: &mut Context, player: &Player) -> GameResult<()> {
graphics::set_background_color(ctx, Color::from((0, 0, 0)));
for (hex, tile) in self.grid.iter_mut() {
self.layout.draw_hex(ctx, *hex, tile)?;
}
if let Some(hex) = player.selected() {
self.layout.draw_selection(ctx, hex, &self.grid[&hex])?;
for (target, tile) in self.grid.iter_mut() {
self.layout.draw_distance(ctx, hex, *target, tile)?;
}
}
Ok(())
}
}
| true |
e29e3c8ec82589c98df898e84d25d2341194d1ee
|
Rust
|
johnpmayer/quicksilver-utils
|
/quicksilver-utils-async/src/desktop/websocket.rs
|
UTF-8
| 5,031 | 2.65625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
use futures_io::{AsyncRead, AsyncWrite};
use url::Url;
use async_std::net::TcpStream;
use async_tls::TlsConnector;
use bytes::Bytes;
use soketto::{
connection::{Error as ConnectionError, Receiver, Sender},
handshake::{Client, Error as HandshakeError, ServerResponse},
};
use std::cell::RefCell;
use std::io::Error as IoError;
use std::sync::Arc;
use log::{debug, trace, warn};
use super::tls::client_config;
use crate::websocket::{WebSocketError, WebSocketMessage};
#[derive(Clone)]
pub struct AsyncWebSocket {
sender: Arc<RefCell<Sender<Box<dyn AsyncStream>>>>,
receiver: Arc<RefCell<Receiver<Box<dyn AsyncStream>>>>,
}
impl From<HandshakeError> for WebSocketError {
fn from(err: HandshakeError) -> Self {
WebSocketError::NativeError(format!("Handshake error: {}", err))
}
}
impl From<ConnectionError> for WebSocketError {
fn from(err: ConnectionError) -> Self {
WebSocketError::NativeError(format!("Connection error: {}", err))
}
}
impl From<IoError> for WebSocketError {
fn from(err: IoError) -> Self {
WebSocketError::NativeError(format!("IO Error: {}", err))
}
}
trait AsyncStream: AsyncRead + AsyncWrite + Unpin {}
impl<T: AsyncRead + AsyncWrite + Unpin> AsyncStream for T {}
async fn client(url: &Url) -> Result<Client<'_, Box<dyn AsyncStream>>, WebSocketError> {
debug!("Creating client to url {}", url);
let port = url.port_or_known_default();
let host = url.host_str().expect("url host");
let path = url.path();
let scheme = url.scheme();
let addresses = url.socket_addrs(|| port).expect("url lookup via dns");
trace!("Possible addresses {:?}", addresses);
let address = addresses[0];
trace!("Connecting to address {}", address);
let transport_stream = {
let mut connected_stream: Option<TcpStream> = None;
for address in addresses {
let attempted_stream = TcpStream::connect(address).await;
match attempted_stream {
Ok(stream) => {
connected_stream = Some(stream);
trace!("Successfully connected to address {}", address);
break;
}
Err(e) => warn!("Couldn't connect to address {}, {}", address, e),
}
}
match connected_stream {
Some(stream) => stream,
None => {
return Err(WebSocketError::NativeError(
"All addresses failed to connect".to_string(),
))
}
}
};
trace!("Scheme: {}", scheme);
let boxed_stream: Box<dyn AsyncStream> = if scheme == "wss" {
debug!(
"Starting TLS handshake for secure websocket with domain {}",
host
);
let config = client_config();
let connector: TlsConnector = TlsConnector::from(Arc::new(config));
trace!("Created connector");
let handshake = connector.connect(host, transport_stream);
let tls_stream = handshake.await?;
debug!("Completed TLS handshake");
Box::new(tls_stream)
} else {
Box::new(transport_stream)
};
Ok(Client::new(boxed_stream, host, path))
}
impl AsyncWebSocket {
pub async fn connect(url: &Url) -> Result<Self, WebSocketError> {
let mut client = client(url).await?;
let (sender, receiver) = match client.handshake().await? {
ServerResponse::Accepted { .. } => client.into_builder().finish(),
ServerResponse::Redirect { .. } => unimplemented!("follow location URL"),
ServerResponse::Rejected { .. } => unimplemented!("handle failure"),
};
let sender = Arc::new(RefCell::new(sender));
let receiver = Arc::new(RefCell::new(receiver));
Ok(AsyncWebSocket { sender, receiver })
}
pub async fn send(&self, msg: &WebSocketMessage) -> Result<(), WebSocketError> {
let mut sender = self.sender.borrow_mut();
match msg {
WebSocketMessage::String(s) => sender.send_text(s).await?,
WebSocketMessage::Binary(b) => sender.send_binary(b).await?,
}
sender.flush().await?; // otherwise it just sits there, which is just surprising for casual users
Ok(())
}
pub async fn receive(&self) -> Result<WebSocketMessage, WebSocketError> {
let data = self.receiver.borrow_mut().receive_data().await?;
let message = if data.is_binary() {
let data_slice: &[u8] = data.as_ref();
WebSocketMessage::Binary(Bytes::copy_from_slice(data_slice))
} else {
let data_slice: &[u8] = data.as_ref();
let s = String::from_utf8(Vec::from(data_slice))
.map_err(|_| WebSocketError::NativeError("invalid ut8".to_string()))?;
WebSocketMessage::String(s)
};
Ok(message)
}
pub async fn close(&self) -> Result<(), WebSocketError> {
self.sender.borrow_mut().close().await?;
Ok(())
}
}
| true |
db147fc4a91dd5171ddc1dee992e905078111957
|
Rust
|
snandasena/rust-http2
|
/src/client/req.rs
|
UTF-8
| 2,137 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
use bytes::Bytes;
use client::types::ClientTypes;
use common::sender::CommonSender;
use common::sender::SendError;
use common::window_size::StreamDead;
use error;
use futures::Poll;
use futures::Stream;
use ErrorCode;
use Headers;
use HttpStreamAfterHeaders;
use SenderState;
/// Reference to outgoing stream on the client side.
pub struct ClientRequest {
pub(crate) common: CommonSender<ClientTypes>,
}
impl ClientRequest {
pub fn state(&self) -> SenderState {
self.common.state()
}
/// Wait for stream to be ready to accept data.
pub fn poll(&mut self) -> Poll<(), StreamDead> {
self.common.poll()
}
/// Synchronously wait till outgoing stream has non-zero space
pub fn block_wait(&mut self) -> Result<(), StreamDead> {
self.common.block_wait()
}
/// Enqueue data to outgoing stream
///
/// This operation fails if stream is in incorrect state.
///
/// The operation does not fail if stream or connection windows is not available,
/// in that case message will be queued until peer increases window.
pub fn send_data(&mut self, data: Bytes) -> Result<(), SendError> {
self.common.send_data(data)
}
/// Send last `DATA` frame
pub fn send_data_end_of_stream(&mut self, data: Bytes) -> Result<(), SendError> {
self.common.send_data_end_of_stream(data)
}
/// Send trailing headers
pub fn send_trailers(&mut self, trailers: Headers) -> Result<(), SendError> {
self.common.send_trailers(trailers)
}
pub fn pull_from_stream(&mut self, stream: HttpStreamAfterHeaders) -> Result<(), SendError> {
self.common.pull_from_stream(stream)
}
pub fn pull_bytes_from_stream<S>(&mut self, stream: S) -> Result<(), SendError>
where
S: Stream<Item = Bytes, Error = error::Error> + Send + 'static,
{
self.common.pull_bytes_from_stream(stream)
}
pub fn reset(&mut self, error_code: ErrorCode) -> Result<(), SendError> {
self.common.reset(error_code)
}
pub fn close(&mut self) -> Result<(), SendError> {
self.common.close()
}
}
| true |
bbb5ef44821d42df79a844cc432aa80925a90042
|
Rust
|
cloudjunky/httprobe-rs
|
/src/main.rs
|
UTF-8
| 2,893 | 2.9375 | 3 |
[] |
no_license
|
use std::io;
use reqwest::{Response, Client, Url};
use tokio;
use std::time::Duration;
use clap::{Arg, App};
use std::fs::File;
use std::io::{BufRead, BufReader};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let matches = App::new("httprobe-rs")
.version("1.0")
.about("Reads from STDIN and GETs the URL.")
.arg(Arg::with_name("INPUT")
.help("Sets the input file to use")
.required(true)
.index(1))
.arg(Arg::with_name("t")
.short("t")
.long("timeout")
.multiple(false)
.takes_value(true)
.help("Sets timeout in seconds."))
.get_matches();
//Default timeout is 3 seconds.
let default_timeout = "3";
let timeout_arg = matches.value_of("t").unwrap_or(default_timeout);
//Parse the timeout string but if it doesn't error.
let timer = timeout_arg.parse::<u64>().expect("Couldn't parse timeout value.");
//Setup the Client for how it will query.
let timeout = Duration::new(timer,0);
let client = Client::builder()
.timeout(timeout)
.build()?;
let input_value = matches.value_of(&"INPUT").unwrap();
if input_value == "-" {
process_stdin(&client).await?;
} else {
process_file(&client, input_value).await?;
}
Ok(())
}
async fn process_stdin(client: &Client) -> Result<(),Box<dyn std::error::Error>> {
loop {
let mut url = String::new();
match io::stdin().read_line(&mut url) {
Ok(len) => if len == 0 {
break;
} else {
let result = probe_site(&client, &url).await;
match result {
Ok(r) => println!("{} {}", r.url(), r.status()),
Err(_) => (),
}
}
Err(error) => {
eprintln!("Error: {}", error);
}
}
}
Ok(())
}
async fn process_file(client: &Client, filename: &str) -> Result<(),Box<dyn std::error::Error>> {
let f = File::open(filename).unwrap();
let f = BufReader::new(f);
for url in f.lines() {
let url = url.expect("No Line");
let result = probe_site(&client, &url).await;
match result {
Ok(r) => println!("{} {}", r.url(), r.status()),
Err(_) => (),
}
}
Ok(())
}
async fn probe_site(client: &Client, url: &str) -> Result<Response, Box<dyn std::error::Error>> {
let parsed_url = Url::parse(url)?;
let res = client.get(parsed_url.as_str()).send().await?;
Ok(res)
}
| true |
7344b9e71b6395dad45c7734f309588c9b2a07fb
|
Rust
|
Niedzwiedzw/wikipedia-api-rust
|
/examples/minimal.rs
|
UTF-8
| 540 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use std::error::Error;
use wikiapi::responses::{ WikiSearchResult, all_languages };
fn main() -> Result<(), Box<Error>> {
let query = "Waldemar Sierański";
for lang in all_languages() {
let result = WikiSearchResult::new(query, lang);
println!("{:#?}", result);
let article = &result.articles[0];
println!("example article: {:#?}", article);
println!("...and it's language links:");
let language_links = article.language_links();
println!("{:?}", language_links);
}
Ok(())
}
| true |
5d72d0766543e957db37d70415ce0e42c3975fde
|
Rust
|
yangbin/qumulus
|
/src/delegate.rs
|
UTF-8
| 1,765 | 3.015625 | 3 |
[] |
no_license
|
//! Contains functions to help measure size / population statistics of Nodes and help decide the
//! appropriate points in the tree to partition as Zones.
use std::collections::BinaryHeap;
use time;
use node::Node;
/// Possibly delegate
pub fn delegate(node: &Node) -> Option<Node> {
// TODO: allow other strategies
let (_, delegate_node) = check_node(node);
delegate_node
}
fn check_node(node: &Node) -> (usize, Option<Node>) {
let mut delegate_node: Node = Default::default();
let mut total_size = node.byte_size();
if total_size > 32768 {
// TODO: delegate this Node if value stored here is e.g. > 32k
}
// TODO: handle if Node has many children, e.g. > 10000
else {
// recursively check if children need to be delegated
let mut largest_children = BinaryHeap::new();
node.each_child(|k, child_node| {
let (mut child_size, child_delegations) = check_node(child_node);
if let Some(child_delegations) = child_delegations {
delegate_node.add_child(k.clone(), child_delegations);
}
child_size += k.len();
total_size += child_size;
if child_size > 1024 {
largest_children.push( (child_size, k.clone()) );
}
});
while total_size > 65535 {
if let Some( (child_size, k) ) = largest_children.pop() {
delegate_node.add_child(k.clone(), Node::delegate(time::precise_time_ns()));
total_size -= child_size;
}
else {
break;
}
}
}
let delegate_node = if delegate_node.is_noop() { None } else { Some(delegate_node) };
(total_size, delegate_node)
}
| true |
e5698c7e0f5fb52461c88de8b656a7f64fac9192
|
Rust
|
frostblooded/automata
|
/src/determinizer.rs
|
UTF-8
| 6,051 | 2.9375 | 3 |
[
"MIT"
] |
permissive
|
use crate::nfa::NFA;
use crate::dfa::DFA;
use crate::transition::Transition;
use crate::counter::Counter;
use std::collections::{BTreeSet, BTreeMap};
pub(crate) struct Determinizer {
nfa: NFA,
dfa: DFA
}
impl Determinizer {
pub(crate) fn new(new_nfa: NFA) -> Self {
Determinizer {
nfa: new_nfa,
dfa: DFA::new()
}
}
pub(crate) fn determinize(mut self) -> Self {
let mut res_final_states = BTreeSet::<u32>::new();
let mut res_transitions = BTreeSet::<Transition<char>>::new();
let initial_epsilon_closure: BTreeSet<u32> = self.epsilon_closure(&self.nfa.initial_states);
let mut found_this_step: BTreeSet<BTreeSet<u32>> = set![initial_epsilon_closure.clone()];
let mut found_last_step: BTreeSet<BTreeSet<u32>>;
// While making the automaton deterministic, we are finding
// sets of states, which are themselves the new states.
// In the process of doing so, we need to have the state sets and
// their respective ids stored somewhere.
let mut found_set_states: BTreeMap<BTreeSet<u32>, u32> = BTreeMap::new();
let mut set_states_counter = Counter::new();
found_set_states.insert(initial_epsilon_closure.clone(), set_states_counter.tick());
while !found_this_step.is_empty() {
found_last_step = found_this_step.clone();
found_this_step.clear();
for state in &found_last_step {
for letter in &self.nfa.alphabet {
let reachable_with_letter = self.reachable_from_set(state, Some(*letter));
let reachable_enclosed = self.epsilon_closure(&reachable_with_letter);
if !found_set_states.contains_key(&reachable_enclosed) {
found_set_states.insert(reachable_enclosed.clone(), set_states_counter.tick());
found_this_step.insert(reachable_enclosed.clone());
}
let found_state_id = found_set_states[&reachable_enclosed];
let state_id = found_set_states[state];
res_transitions.insert(Transition::new(state_id, *letter, found_state_id));
}
if !self.nfa.final_states.is_disjoint(state) {
let state_id = found_set_states[state];
res_final_states.insert(state_id);
}
}
}
let mut dfa = DFA::new();
dfa.alphabet = self.nfa.alphabet.clone();
dfa.states = found_set_states.values().cloned().collect();
// The initial state is always the first state because the algorithm
// starts working from it
dfa.initial_state = Some(0);
dfa.final_states = res_final_states;
dfa.transitions = res_transitions;
dfa.counter = set_states_counter;
self.dfa = dfa;
self
}
fn reachable_from_set(&self, start_states: &BTreeSet<u32>, wanted_label: Option<char>) -> BTreeSet<u32> {
let mut res = BTreeSet::new();
for state in start_states {
res = res.union(&self.nfa.reachable(*state, wanted_label)).cloned().collect();
}
res
}
fn epsilon_closure(&self, starting_states: &BTreeSet<u32>) -> BTreeSet<u32> {
let mut res = starting_states.clone();
let mut found_this_step = starting_states.clone();
let mut found_last_step: BTreeSet<u32>;
while !found_this_step.is_empty() {
found_last_step = found_this_step;
found_this_step = BTreeSet::new();
for state in found_last_step {
let epsilon_reachable = self.nfa.reachable(state, None);
for reached_state in &epsilon_reachable {
if !res.contains(reached_state) {
res.insert(*reached_state);
found_this_step.insert(*reached_state);
}
}
}
}
res
}
pub(crate) fn take(self) -> DFA {
self.dfa
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn epsilon_closure() {
let mut nfa = NFA::new();
nfa.states.insert(0);
nfa.states.insert(1);
nfa.states.insert(2);
nfa.counter.value = 3;
nfa.transitions.insert(Transition::new(0, Some('a'), 1));
nfa.transitions.insert(Transition::new(1, None, 2));
nfa.transitions.insert(Transition::new(2, Some('b'), 0));
let determinizer = Determinizer::new(nfa);
assert_eq!(determinizer.epsilon_closure(&set![0]), set![0]);
assert_eq!(determinizer.epsilon_closure(&set![0, 1]), set![0, 1, 2]);
assert_eq!(determinizer.epsilon_closure(&set![1]), set![1, 2]);
}
#[test]
fn determinize() {
let mut nfa = NFA::new();
nfa.alphabet = set!['a', 'b'];
nfa.states = set![0, 1, 2];
nfa.counter.value = 3;
nfa.initial_states = set![2];
nfa.final_states = set![0];
nfa.transitions = set![
Transition::new(0, Some('a'), 1),
Transition::new(0, Some('b'), 2),
Transition::new(0, None, 1),
Transition::new(1, Some('b'), 1),
Transition::new(1, None, 0),
Transition::new(2, Some('a'), 2),
Transition::new(2, Some('b'), 1)
];
let dfa = Determinizer::new(nfa).determinize().take();
assert_eq!(dfa.alphabet, set!['a', 'b']);
assert_eq!(dfa.states, set![0, 1, 2]);
assert_eq!(dfa.counter.value, 3);
assert_eq!(dfa.initial_state, Some(0));
assert_eq!(dfa.final_states, set![1, 2]);
assert_eq!(dfa.transitions, set![
Transition::new(0, 'a', 0),
Transition::new(0, 'b', 1),
Transition::new(1, 'a', 1),
Transition::new(1, 'b', 2),
Transition::new(2, 'a', 2),
Transition::new(2, 'b', 2)
]);
}
}
| true |
deb6fb64b79016ff9049a6e5882220d34714ecf6
|
Rust
|
crackcomm/wtf-rlsr
|
/src/ws/graphs.rs
|
UTF-8
| 1,254 | 2.984375 | 3 |
[] |
no_license
|
//! Workspace dependencies.
use cargo::{core::Workspace as CargoWorkspace, util::graph::Graph};
/// Dependencies graph.
pub type DepGraph = Graph<String, Vec<String>>;
/// Workspace dependencies graph.
pub struct WorkspaceGraphs {
pub dependants: DepGraph,
pub dependencies: DepGraph,
}
/// Creates a graph of dependencies for a workspace.
pub(super) fn workspace_graph(workspace: &CargoWorkspace) -> WorkspaceGraphs {
let mut dependencies = DepGraph::new();
let mut dependants = DepGraph::new();
for member in workspace.members() {
dependants.add(member.name().to_string());
dependencies.add(member.name().to_string());
}
for member in workspace.members() {
let pkg_name = member.name().to_string();
for dep in member.dependencies() {
let dep_name = dep.package_name().to_string();
let is_member = workspace
.members()
.any(|pkg| pkg.name().as_str() == dep_name);
if is_member {
dependencies.link(pkg_name.clone(), dep_name.clone());
dependants.link(dep_name.clone(), pkg_name.clone());
}
}
}
WorkspaceGraphs {
dependants,
dependencies,
}
}
| true |
5ca8291c6a993c23b5a45b03c79d84800fc3c3e3
|
Rust
|
citahub/cita
|
/cita-auth/src/history.rs
|
UTF-8
| 5,601 | 2.9375 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright Rivtower Technologies LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashSet;
use util::instrument::{unix_now, AsMillis};
use util::BLOCKLIMIT;
#[derive(Debug, Default)]
pub struct HistoryHeights {
heights: HashSet<u64>,
max_height: u64,
min_height: u64,
is_init: bool,
last_timestamp: u64,
}
impl HistoryHeights {
pub fn new() -> Self {
HistoryHeights {
heights: HashSet::new(),
max_height: 0,
min_height: 0,
is_init: false,
//init value is 0 mean first time must not too frequent
last_timestamp: 0,
}
}
pub fn reset(&mut self) {
self.heights.clear();
self.max_height = 0;
self.min_height = 0;
self.is_init = false;
self.last_timestamp = 0;
}
pub fn update_height(&mut self, height: u64) {
// update 'min_height', 'max_height', 'heights'
if height < self.min_height {
trace!(
"height is small than min_height: {} < {}",
height,
self.min_height,
);
return;
} else if height > self.max_height {
self.max_height = height;
let old_min_height = self.min_height;
self.min_height = if height > BLOCKLIMIT {
height - BLOCKLIMIT + 1
} else {
0
};
self.heights.insert(height);
for i in old_min_height..self.min_height {
self.heights.remove(&i);
}
} else {
self.heights.insert(height);
}
// update 'is_init'
let mut is_init = true;
for i in self.min_height..self.max_height {
if !self.heights.contains(&i) {
is_init = false;
break;
}
}
self.is_init = is_init;
}
pub fn next_height(&self) -> u64 {
self.max_height + 1
}
pub fn is_init(&self) -> bool {
self.is_init
}
pub fn max_height(&self) -> u64 {
self.max_height
}
pub fn min_height(&self) -> u64 {
self.min_height
}
// at least wait 3s from latest update
pub fn is_too_frequent(&self) -> bool {
AsMillis::as_millis(&unix_now()) < self.last_timestamp + 3000
}
pub fn update_time_stamp(&mut self) {
// update time_stamp
self.last_timestamp = AsMillis::as_millis(&unix_now());
}
}
#[cfg(test)]
mod history_heights_tests {
use super::HistoryHeights;
#[test]
fn basic() {
let mut h = HistoryHeights::new();
assert_eq!(h.is_init(), false);
assert_eq!(h.next_height(), 1);
h.update_height(60);
assert_eq!(h.is_init(), false);
assert_eq!(h.next_height(), 61);
for i in 0..60 {
h.update_height(i);
}
assert_eq!(h.is_init(), true);
assert_eq!(h.next_height(), 61);
h.update_height(70);
assert_eq!(h.is_init(), false);
assert_eq!(h.next_height(), 71);
for i in 0..70 {
h.update_height(i);
}
assert_eq!(h.is_init(), true);
assert_eq!(h.next_height(), 71);
h.update_height(99);
assert_eq!(h.is_init(), false);
assert_eq!(h.next_height(), 100);
for i in 0..99 {
h.update_height(i);
}
assert_eq!(h.is_init(), true);
assert_eq!(h.next_height(), 100);
h.update_height(100);
assert_eq!(h.is_init(), true);
assert_eq!(h.next_height(), 101);
h.update_height(101);
assert_eq!(h.is_init(), true);
assert_eq!(h.next_height(), 102);
}
}
#[cfg(test)]
mod history_heights_quick_check {
use super::HistoryHeights;
use quickcheck::Arbitrary;
use quickcheck::Gen;
#[derive(Clone, Debug)]
struct HistoryHeightsArgs {
history_heights: Vec<u64>,
}
impl Arbitrary for HistoryHeightsArgs {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let mut heights_ranges: Vec<u64> = vec![];
for i in 0..201 {
heights_ranges.push(i);
}
let mut history_heights: Vec<u64> = vec![];
for _ in 0..200 {
let index = g.next_u64() as usize % heights_ranges.len();
history_heights.push(heights_ranges.remove(index));
}
HistoryHeightsArgs { history_heights }
}
}
quickcheck! {
fn prop(args: HistoryHeightsArgs) -> bool {
let mut h = HistoryHeights::new();
for i in args.history_heights {
h.update_height(i);
}
let min = h.min_height();
let mut sum: u64 = 0;
for j in &h.heights {
sum += j - min + 1;
}
if h.is_init() {
sum == 101 * 50
} else {
sum != 101 * 50
}
}
}
}
| true |
a64a85d9974620ae3c20a749ae74649bccfc8dac
|
Rust
|
nixpulvis/tiger
|
/tiger-syntax/src/ast/expression.rs
|
UTF-8
| 6,517 | 3.359375 | 3 |
[] |
no_license
|
use super::{Symbol, Variable, Declaration, Operation};
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Expression {
Nil,
Break,
Int(i32),
String(String),
Variable(Box<Variable>),
If {
test: Box<Expression>,
t: Box<Expression>,
f: Option<Box<Expression>>,
},
Sequence(Vec<Box<Expression>>),
Call {
ident: Symbol,
arguments: Vec<Box<Expression>>,
},
Operation {
op: Operation,
left: Box<Expression>,
right: Box<Expression>,
},
Record {
fields: Vec<(Symbol, Box<Expression>)>,
tdent: Symbol,
},
Assign {
variable: Box<Variable>,
expression: Box<Expression>,
},
While {
test: Box<Expression>,
body: Box<Expression>,
},
For {
ident: Symbol,
low: Box<Expression>,
high: Box<Expression>,
body: Box<Expression>,
},
Let {
declarations: Vec<Box<Declaration>>,
body: Box<Expression>,
},
Array {
tdent: Symbol,
size: Box<Expression>,
init: Box<Expression>,
},
}
#[cfg(test)]
mod tests {
use tiger;
use tok::Lexer;
use ast::{
Expression as E,
Variable as V,
Declaration as D,
Operation as O,
Type as T,
};
macro_rules! test {
($source:expr, $expected:expr) => {{
let lexer = Lexer::new($source);
let parse = tiger::parse_Expression($source, lexer);
assert_eq!($expected, *parse.expect("failed to parse"));
}};
}
#[test]
fn test_nil() {
test!("nil", E::Nil);
}
#[test]
fn test_break() {
test!("break", E::Break);
}
#[test]
fn test_int() {
test!("123", E::Int(123));
}
#[test]
fn test_string() {
test!(r#""hello""#, E::String("hello".into()));
}
#[test]
fn test_variable() {
let one = Box::new(E::Int(1));
let foo = Box::new(V::Simple("foo".into()));
let foo_bar = Box::new(V::Field(foo.clone(), "bar".into()));
let foo_sub = Box::new(V::Subscript(foo.clone(), one.clone()));
let foo_bar_sub = Box::new(V::Subscript(foo_bar.clone(), one.clone()));
test!("foo", E::Variable(foo.clone()));
test!("foo.bar", E::Variable(foo_bar.clone()));
test!("foo[1]", E::Variable(foo_sub.clone()));
test!("foo.bar[1]", E::Variable(foo_bar_sub.clone()));
}
#[test]
fn test_if() {
let one = Box::new(E::Int(1));
test!("if 1 then 1", E::If {
test: one.clone(),
t: one.clone(),
f: None,
});
test!("if 1 then 1 else 1", E::If {
test: one.clone(),
t: one.clone(),
f: Some(one.clone()),
});
}
#[test]
fn test_sequence() {
let one = Box::new(E::Int(1));
test!("()", E::Sequence(vec![]));
test!("(1)", E::Sequence(vec![one.clone()]));
test!("(1; 1)", E::Sequence(vec![one.clone(), one.clone()]));
}
#[test]
fn test_call() {
let one = Box::new(E::Int(1));
let foo = "foo".to_string();
test!("foo()", E::Call {
ident: foo.clone(),
arguments: vec![],
});
test!("foo(1)", E::Call {
ident: foo.clone(),
arguments: vec![one.clone()],
});
test!("foo(1, 1)", E::Call {
ident: foo.clone(),
arguments: vec![one.clone(), one.clone()],
});
}
#[test]
fn test_operation() {
macro_rules! test_operation {
($source:expr, $op:expr) => {
test!($source, E::Operation {
op: $op,
left: Box::new(E::Int(1)),
right: Box::new(E::Int(1)),
});
};
}
test_operation!("1 + 1", O::Plus);
test_operation!("1 - 1", O::Minus);
test_operation!("1 * 1", O::Times);
test_operation!("1 / 1", O::Divide);
test_operation!("1 = 1", O::Eq);
test_operation!("1 <> 1", O::Neq);
test_operation!("1 < 1", O::Lt);
test_operation!("1 <= 1", O::Le);
test_operation!("1 > 1", O::Gt);
test_operation!("1 >= 1", O::Ge);
}
#[test]
fn test_record() {
let one = Box::new(E::Int(1));
test!("posn { x=1, y=1 }", E::Record {
fields: vec![
("x".into(), one.clone()),
("y".into(), one.clone()),
],
tdent: "posn".into(),
});
}
#[test]
fn test_assign() {
let one = Box::new(E::Int(1));
let foo = Box::new(V::Simple("foo".into()));
let foo_bar = Box::new(V::Field(foo.clone(), "bar".into()));
test!("foo := 1", E::Assign {
variable: foo.clone(),
expression: one.clone(),
});
test!("foo.bar := 1", E::Assign {
variable: foo_bar.clone(),
expression: one.clone(),
});
}
#[test]
fn test_while() {
let one = Box::new(E::Int(1));
test!("while 1 do 1", E::While {
test: one.clone(),
body: one.clone(),
});
}
#[test]
fn test_for() {
let one = Box::new(E::Int(1));
test!("for foo := 1 to 1 do 1", E::For {
ident: "foo".into(),
low: one.clone(),
high: one.clone(),
body: one.clone(),
});
}
#[test]
fn test_let() {
let one = Box::new(E::Int(1));
let two = Box::new(E::Int(2));
let any = Box::new(T::Record(vec![("any".into(), "int".into())]));
let any_dec = Box::new(D::Type {
tdent: "any".into(),
ty: any,
});
let buffer_dec = Box::new(D::Variable {
ident: "buffer".into(),
tdent: None,
init: one.clone(),
});
let body = Box::new(E::Sequence(vec![one.clone(), two.clone()]));
test!(r###"
let
type any = {any : int}
var buffer := 1
in
1;
2
end
"###, E::Let {
declarations: vec![any_dec, buffer_dec],
body: body,
});
}
#[test]
fn test_array() {
let one = Box::new(E::Int(1));
test!("foo [1] of 1", E::Array {
tdent: "foo".into(),
size: one.clone(),
init: one.clone(),
});
}
}
| true |
4b3effbd9c988545e40a5dd87678bfc429b9f833
|
Rust
|
fridge-dev/lost-cities-game
|
/src/crates/bin-client/src/main.rs
|
UTF-8
| 2,233 | 3.03125 | 3 |
[] |
no_license
|
use client_engine::client_game_api::provider;
use bin_client::cli::smart_cli;
use bin_client::screens::main_menu;
use std::{env, process};
const DEFAULT_HOSTNAME: &str = "localhost";
const DEFAULT_PORT: u16 = 8051;
#[tokio::main]
async fn main() {
let (program_name, hostname, port) = get_cli_args();
// Connect to server and run game
let mut game_api = provider::new_frontend_game_api(hostname, port)
.await
.unwrap_or_else(|e| {
eprintln!("ERROR: {:?}", e);
eprintln!();
eprintln!("Failed to connect to the server. Are you sure you entered the right hostname? Is the server up?");
print_usage_exit(&program_name);
});
// Run game
let player_id = smart_cli::prompt_for_player_id().expect("This should never fail.");
loop {
let result = main_menu::handle_menu(&mut game_api, player_id.clone()).await;
if let Err(error) = result {
println!("UNHANDLED ERROR: Debug='{:?}', Display='{}'", error, error);
println!();
println!("I haven't implemented robust error handling yet, so your game is probably lost. Sorry.");
}
}
}
fn get_cli_args() -> (String, String, u16) {
let mut cli_args = env::args();
// Arg 0
let program_name = cli_args.next().unwrap_or_else(|| {
eprintln!("Program name is somehow missing? You should never see this.");
process::exit(1);
});
// Arg 1
let hostname = cli_args.next()
.unwrap_or_else(|| {
println!("Using default hostname '{}'", DEFAULT_HOSTNAME);
DEFAULT_HOSTNAME.to_owned()
});
// Arg 2
let port = cli_args.next()
.map(|port_str| port_str.parse().unwrap_or_else(|_| {
print_usage_exit(&program_name);
}))
.unwrap_or_else(|| {
println!("Using default port '{}'", DEFAULT_PORT);
DEFAULT_PORT
});
(program_name, hostname, port)
}
fn print_usage_exit(program_name: &str) -> ! {
eprintln!();
eprintln!("Usage: \t{} <server hostname> <port>", program_name);
eprintln!("Example:\t{} example-hostname.com 3000", program_name);
eprintln!();
process::exit(1);
}
| true |
b294e24de6c3d9e16537c1dd9c0f45a224d1dda4
|
Rust
|
ogham/charmander
|
/src/iter.rs
|
UTF-8
| 3,241 | 3.90625 | 4 |
[] |
no_license
|
//! Custom iterator for reading UTF-8 characters from strings.
//!
//! Our iterator differs from the `std::io::Chars` iterator as it is allowed
//! to return *invalid* UTF-8 characters, whereas `Chars` can only have the
//! entire string succeed or entirely fail. The only way this iterator can
//! fail is if there's an IO error. A normal program would be correct to throw
//! an error if an input string isn't valid UTF-8, but charmander should
//! definitely not be crashing from this!
use std::io::Read;
use std::io::Error as IOError;
use std::str::from_utf8;
use rustc_unicode::str::utf8_char_width;
/// Iterator over the UTF-8 characters in a string.
pub struct Chars<R> {
inner: R,
}
impl<R: Read> Chars<R> {
/// Create a new `Chars` iterator, based on the given inner iterator.
pub fn new(r: R) -> Chars<R> {
Chars { inner: r }
}
}
/// The byte buffer that's used when reading in characters.
pub enum ReadBytes {
/// Only one byte was necessary to determine success or failure.
FirstByte(u8),
/// More than one byte was necessary: this holds a four-byte buffer along
/// with the number of bytes actually taken up by the character.
WholeBuffer([u8; 4], usize)
}
/// A read from the stream without any IO errors.
pub enum ReadChar {
/// The character was valid UTF-8, so the character and the byte buffer
/// get returned.
Ok(char, ReadBytes),
/// The character was **not** valid UTF-8, so there's no `char` to return!
/// Just the buffer gets returned.
Invalid(ReadBytes),
}
impl<R: Read> Iterator for Chars<R> {
type Item = Result<ReadChar, IOError>;
fn next(&mut self) -> Option<Result<ReadChar, IOError>> {
// Read the first byte from the stream into a one-byte buffer.
let mut buf = [0];
let first_byte = match self.inner.read(&mut buf) {
Ok(0) => return None,
Ok(_) => buf[0],
Err(e) => return Some(Err(e)),
};
// Examine the byte to test:
// - whether it's a continuation byte as the first byte (an error);
// - whether it's a one-byte character and needs no further processing.
let read = ReadBytes::FirstByte(first_byte);
let width = match utf8_char_width(first_byte) {
0 => return Some(Ok(ReadChar::Invalid(read))),
1 => return Some(Ok(ReadChar::Ok(first_byte as char, read))),
w => w,
};
// There are no characters above four bytes, so anything above this
// is an error!
assert! { width <= 4 };
// Read in the rest of the bytes.
let mut buf = [first_byte, 0, 0, 0];
let mut start = 1;
while start < width {
match self.inner.read(&mut buf[start..width]) {
Ok(0) => return Some(Ok(ReadChar::Invalid(ReadBytes::WholeBuffer(buf, width)))),
Ok(n) => start += n,
Err(e) => return Some(Err(e)),
}
}
let read = ReadBytes::WholeBuffer(buf, width);
match from_utf8(&buf[..width]) {
Ok(s) => Some(Ok(ReadChar::Ok(s.char_at(0), read))),
Err(_) => Some(Ok(ReadChar::Invalid(read))),
}
}
}
| true |
12dc92fb61044fc9bf5b9faddaa2276499fdd96a
|
Rust
|
bigcommerce/message-format.rs
|
/src/value.rs
|
UTF-8
| 1,793 | 3.390625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
/// A wrapper around a value, used with [`Args`] so that a [`MessagePart`]
/// can access the original value when necessary.
///
/// These are not typically created directly, but are created automatically
/// via the standard library's `From` trait when creating [`Args`].
///
/// [`Args`]: struct.Args.html
/// [`MessagePart`]: trait.MessagePart.html
#[derive(Debug, PartialEq)]
pub enum Value<'a> {
/// Wrap an `i64`.
Number(i64),
/// Wrap an `&str`.
Str(&'a str),
}
impl<'a> From<i32> for Value<'a> {
fn from(value: i32) -> Value<'a> {
Value::Number(i64::from(value))
}
}
impl<'a> From<u32> for Value<'a> {
fn from(value: u32) -> Value<'a> {
Value::Number(i64::from(value))
}
}
impl<'a> From<i64> for Value<'a> {
fn from(value: i64) -> Value<'a> {
Value::Number(value)
}
}
impl<'a> From<u64> for Value<'a> {
fn from(value: u64) -> Value<'a> {
Value::Number(value as i64)
}
}
impl<'a> From<usize> for Value<'a> {
fn from(value: usize) -> Value<'a> {
Value::Number(value as i64)
}
}
impl<'a> From<&'a str> for Value<'a> {
fn from(value: &'a str) -> Value<'a> {
Value::Str(value)
}
}
impl<'a> fmt::Display for Value<'a> {
/// Forward `fmt::Display` to the underlying value.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Value::Number(i) => i.fmt(f),
Value::Str(s) => s.fmt(f),
}
}
}
| true |
f158cea0a5dc31a755dc305d7495a6f7fdfd10c7
|
Rust
|
yo-sarawut/Nayuki-web-published-code
|
/disjoint-set-data-structure/disjointset.rs
|
UTF-8
| 6,186 | 3.21875 | 3 |
[] |
no_license
|
/*
* Disjoint-set data structure - Library (Rust)
*
* Copyright (c) 2020 Project Nayuki. (MIT License)
* https://www.nayuki.io/page/disjoint-set-data-structure
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
* - The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* - The Software is provided "as is", without warranty of any kind, express or
* implied, including but not limited to the warranties of merchantability,
* fitness for a particular purpose and noninfringement. In no event shall the
* authors or copyright holders be liable for any claim, damages or other
* liability, whether in an action of contract, tort or otherwise, arising from,
* out of or in connection with the Software or the use or other dealings in the
* Software.
*/
use std;
/*
* Represents a set of disjoint sets. Also known as the union-find data structure.
* Main operations are querying if two elements are in the same set, and merging two sets together.
* Useful for testing graph connectivity, and is used in Kruskal's algorithm.
*/
#[derive(Clone)]
pub struct DisjointSet {
numberofsets: usize,
nodes: Vec<DisjointSetNode>,
}
// Private helper structure.
#[derive(Clone, Copy)]
struct DisjointSetNode {
// The index of the parent element. An element is a representative iff its parent is itself.
parent: usize,
// Always in the range [0, floor(log2(NumberOfElements))]. Thus has a maximum value of 63.
rank: i8,
// Positive number if the element is a representative, otherwise zero.
size: usize,
}
impl DisjointSet {
// Constructs a new set containing the given number of singleton sets.
// For example, new DisjointSet(3) --> {{0}, {1}, {2}}.
pub fn new(numelems: usize) -> Self {
Self {
numberofsets: numelems,
nodes: (0 .. numelems).map(|i|
DisjointSetNode{
parent: i,
rank: 0,
size: 1,
}).collect(),
}
}
// Returns the number of elements among the set of disjoint sets; this was the number passed
// into the constructor and is constant for the lifetime of the object. All the other methods
// require the argument elemindex to satisfy 0 <= elemindex < number_of_elements().
pub fn number_of_elems(&self) -> usize {
self.nodes.len()
}
// The number of disjoint sets overall. This number decreases monotonically as time progresses;
// each call to merge_sets() either decrements the number by one or leaves it unchanged. 0 <= number_of_sets() <= number_of_elements().
pub fn number_of_sets(&self) -> usize {
self.numberofsets
}
// (Private) Returns the representative element for the set containing the given element. This method is also
// known as "find" in the literature. Also performs path compression, which alters the internal state to
// improve the speed of future queries, but has no externally visible effect on the values returned.
fn get_repr(&mut self, mut elemindex: usize) -> usize {
// Follow parent pointers until we reach a representative
let mut parent: usize = self.nodes[elemindex].parent;
loop {
let grandparent: usize = self.nodes[parent].parent;
if grandparent == parent {
return parent;
}
self.nodes[elemindex].parent = grandparent; // Partial path compression
elemindex = parent;
parent = grandparent;
}
}
// Returns the size of the set that the given element is a member of. 1 <= result <= number_of_elements().
pub fn get_size_of_set(&mut self, elemindex: usize) -> usize {
let repr = self.get_repr(elemindex);
self.nodes[repr].size
}
// Tests whether the given two elements are members of the same set. Note that the arguments are orderless.
pub fn are_in_same_set(&mut self, elemindex0: usize, elemindex1: usize) -> bool {
self.get_repr(elemindex0) == self.get_repr(elemindex1)
}
// Merges together the sets that the given two elements belong to. This method is also known as "union" in the literature.
// If the two elements belong to different sets, then the two sets are merged and the method returns true.
// Otherwise they belong in the same set, nothing is changed and the method returns false. Note that the arguments are orderless.
pub fn merge_sets(&mut self, elemindex0: usize, elemindex1: usize) -> bool {
// Get representatives
let mut repr0: usize = self.get_repr(elemindex0);
let mut repr1: usize = self.get_repr(elemindex1);
if repr0 == repr1 {
return false;
}
// Compare ranks
let cmp: i8 = self.nodes[repr0].rank - self.nodes[repr1].rank;
if cmp == 0 { // Increment repr0's rank if both nodes have same rank
let rank: &mut i8 = &mut self.nodes[repr0].rank;
*rank = rank.checked_add(1).unwrap();
} else if cmp < 0 { // Swap to ensure that repr0's rank >= repr1's rank
std::mem::swap(&mut repr0, &mut repr1);
}
// Graft repr1's subtree onto node repr0
self.nodes[repr1].parent = repr0;
self.nodes[repr0].size += self.nodes[repr1].size;
self.nodes[repr1].size = 0;
self.numberofsets -= 1;
true
}
// For unit tests. This detects many but not all invalid data structures, panicking if a
// structural invariant is known to be violated. This always returns silently on a valid object.
pub fn check_structure(&self) {
let mut numrepr: usize = 0;
for (i, node) in self.nodes.iter().enumerate() {
let isrepr: bool = node.parent == i;
numrepr = numrepr.checked_add(usize::from(isrepr)).unwrap();
assert!(node.parent < self.nodes.len());
assert!(0 <= node.rank && (isrepr || node.rank < self.nodes[node.parent].rank));
assert!(!isrepr && node.size == 0 || isrepr && node.size >= (1usize << node.rank));
}
assert_eq!(self.numberofsets, numrepr);
assert!(self.numberofsets <= self.nodes.len());
}
}
| true |
ca3e9677bf0a6fe45a6bd7146bbc766a62aa97a2
|
Rust
|
panicbit/fcm-rust
|
/src/lib.rs
|
UTF-8
| 1,991 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
#![doc(html_root_url = "https://panicbit.github.io/fcm-rust/fcm/")]
//! fcm
//! ===
//!
//! A client for asynchronous sending of Firebase Cloud Messages, or Push Notifications.
//!
//! # Examples:
//!
//! To send out a FCM Message with some custom data:
//!
//! ```no_run
//! # use std::collections::HashMap;
//! # #[tokio::main]
//! # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
//! let client = fcm::Client::new();
//!
//! let mut map = HashMap::new();
//! map.insert("message", "Howdy!");
//!
//! let mut builder = fcm::MessageBuilder::new("<FCM API Key>", "<registration id>");
//! builder.data(&map);
//!
//! let response = client.send(builder.finalize()).await?;
//! println!("Sent: {:?}", response);
//! # Ok(())
//! # }
//! ```
//!
//! To send a message using FCM Notifications, we first build the notification:
//!
//! ```rust
//! # fn main() {
//! let mut builder = fcm::NotificationBuilder::new();
//! builder.title("Hey!");
//! builder.body("Do you want to catch up later?");
//! let notification = builder.finalize();
//! # }
//! ```
//!
//! And then set it in the message, before sending it:
//!
//! ```no_run
//! # #[tokio::main]
//! # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
//! let client = fcm::Client::new();
//!
//! let mut notification_builder = fcm::NotificationBuilder::new();
//! notification_builder.title("Hey!");
//! notification_builder.body("Do you want to catch up later?");
//!
//! let notification = notification_builder.finalize();
//! let mut message_builder = fcm::MessageBuilder::new("<FCM API Key>", "<registration id>");
//! message_builder.notification(notification);
//!
//! let response = client.send(message_builder.finalize()).await?;
//! println!("Sent: {:?}", response);
//! # Ok(())
//! # }
//! ```
mod message;
pub use crate::message::*;
mod notification;
pub use crate::notification::*;
mod client;
pub use crate::client::*;
pub use crate::client::response::FcmError as Error;
| true |
181924957a71a7a7f507692393584b5476b77790
|
Rust
|
isgasho/kira
|
/kira/src/event.rs
|
UTF-8
| 568 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
/// An audio-related event that can be observed on the main thread.
#[derive(Debug, Copy, Clone)]
pub enum Event<CustomEvent: Send + 'static> {
/**
Sent when the metronome passes a certain interval (in beats).
For example, an event with an interval of `1.0` will be sent
every beat, and an event with an interval of `0.25` will be
sent every sixteenth note (one quarter of a beat).
The intervals that a metronome emits events for are defined
when the metronome is created.
*/
MetronomeIntervalPassed(f64),
/// A user-defined event.
Custom(CustomEvent),
}
| true |
bc67156fa44000ea5fdf1b984a128a42115627a0
|
Rust
|
cai-lw/ray-trace-rust
|
/src/rt/scene.rs
|
UTF-8
| 2,813 | 2.78125 | 3 |
[] |
no_license
|
use ::rt::*;
use super::material::{Material, BlackBody};
use super::surface::{Surface, SurfaceSide};
use super::light::{Light, Dark};
pub trait SceneObject {
fn surface(&self) -> &Surface;
fn material(&self) -> &Material;
fn light(&self) -> &Light;
}
struct NormalObject<S, M> {
surface: S,
material: M
}
impl<S, M> SceneObject for NormalObject<S, M> where
S: Surface,
M: Material
{
fn surface(&self) -> &Surface {
&self.surface
}
fn material(&self) -> &Material {
&self.material
}
fn light(&self) -> &Light {
&Dark
}
}
struct LightObject<S, L> {
surface: S,
light: L
}
impl<S, L> SceneObject for LightObject<S, L> where
S: Surface,
L: Light
{
fn surface(&self) -> &Surface {
&self.surface
}
fn material(&self) -> &Material {
&BlackBody
}
fn light(&self) -> &Light {
&self.light
}
}
pub trait Scene {
fn intersect_with_ray(&self, &Ray) -> Option<(&SceneObject, f32, SurfaceSide)>;
fn ray_trace(&self, ray: &Ray) -> Color {
use palette::named::WHITE;
let mut spectrum = Color::from_pixel(&WHITE);
let mut current_ray: Ray = ray.clone();
for _ in 0..10 {
if let Some((object, distance, side)) = self.intersect_with_ray(¤t_ray) {
let point = current_ray.src + current_ray.dir * distance;
let refl = -current_ray.dir;
let norm = object.surface().oriented_normal(point, side);
let (infl, f, p) = object.material().model(point).sample_f_p(refl, norm);
spectrum = spectrum * (f / p);
current_ray = Ray { src: point, dir: infl };
}
}
spectrum
}
}
pub struct SimpleScene {
objects: Vec<Box<SceneObject>>,
lights: Vec<Box<SceneObject>>
}
impl SimpleScene {
pub fn new() -> SimpleScene {
SimpleScene { objects: vec![], lights: vec![] }
}
pub fn push_object<S: Surface + 'static, M: Material + 'static>(&mut self, surface: S, material: M) {
self.objects.push(Box::new(NormalObject { surface, material }))
}
pub fn push_light<S: Surface + 'static, L: Light + 'static>(&mut self, surface: S, light: L) {
self.lights.push(Box::new(LightObject { surface, light }))
}
}
impl Scene for SimpleScene {
fn intersect_with_ray(&self, ray: &Ray) -> Option<(&SceneObject, f32, SurfaceSide)> {
use noisy_float::prelude::*;
self.objects.iter()
.chain(self.lights.iter())
.filter_map(|object|
object.surface().intersect_with_ray(ray)
.map(|(distance, side)| (object.as_ref(), distance, side))
).min_by_key(|&(_, distance, _)| r32(distance))
}
}
| true |
284199f256643bd7cd0e4a77ec1736b0d4cd518f
|
Rust
|
mqudsi/tokio-stdin
|
/src/main.rs
|
UTF-8
| 2,329 | 2.859375 | 3 |
[] |
no_license
|
#![feature(test)]
use std::io::prelude::*;
use std::time::{Duration, Instant};
use tokio::io::AsyncReadExt;
type BoxedError = Box<dyn std::error::Error + Send + Sync>;
type Result<T, E = BoxedError> = core::result::Result<T, E>;
/// Have producer generate content at roughly this rate.
const BITS_PER_SEC: usize = 24_000_000;
enum Mode {
Producer,
StdConsumer,
TokioConsumer,
}
fn usage_error() -> ! {
eprintln!("Usage: tokio-stdin --producer | tokio-stdin [--tokio-consumer|--std-consumer]");
std::process::exit(-1);
}
#[tokio::main]
async fn main() {
let arg = std::env::args().skip(1).next();
let mode = match arg.as_ref().map(|s| s.as_str()) {
Some("--producer") => Mode::Producer,
Some("--tokio-consumer") => Mode::TokioConsumer,
Some("--std-consumer") => Mode::StdConsumer,
_ => usage_error(),
};
match mode {
Mode::Producer => producer().await,
Mode::TokioConsumer => tokio_consumer().await,
Mode::StdConsumer => std_consumer().await,
}
.unwrap();
}
async fn producer() -> Result<()> {
const INTERVAL: Duration = Duration::from_secs(1);
let ts_buffer = [b'G'; 188 * 1000];
let dest = std::io::stdout();
let mut dest = dest.lock();
loop {
let start = Instant::now();
let mut bytes_written = 0usize;
tokio::task::block_in_place(|| -> Result<()> {
while bytes_written < BITS_PER_SEC / 8 {
dest.write_all(&ts_buffer)?;
bytes_written += ts_buffer.len();
}
Ok(())
})?;
// tokio::task::block_in_place(|| dest.flush())?;
let duration = (start + INTERVAL) - Instant::now();
tokio::time::sleep(duration).await;
}
}
async fn tokio_consumer() -> Result<()> {
let mut src = tokio::io::stdin();
let mut ts_buffer = [0u8; 188];
loop {
src.read_exact(&mut ts_buffer).await?;
core::hint::black_box(&mut ts_buffer);
}
}
async fn std_consumer() -> Result<()> {
let src = std::io::stdin();
let mut src = src.lock();
let mut ts_buffer = [0u8; 188 * 8];
loop {
// tokio::task::block_in_place(|| src.read_exact(&mut ts_buffer))?;
src.read_exact(&mut ts_buffer)?;
core::hint::black_box(&mut ts_buffer);
}
}
| true |
8ceaa6c93e9ba998965ced1e8dd1535f3df17d17
|
Rust
|
drahnr/yubihsm-rs
|
/src/algorithm/template.rs
|
UTF-8
| 911 | 2.953125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use super::{Algorithm, AlgorithmError, AlgorithmErrorKind::TagInvalid};
/// Template algorithms (for SSH)
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[allow(non_camel_case_types)]
#[repr(u8)]
pub enum TemplateAlg {
/// template-ssh
SSH = 0x24,
}
impl TemplateAlg {
/// Convert an unsigned byte tag into a `TemplateAlgorithm` (if valid)
pub fn from_u8(tag: u8) -> Result<Self, AlgorithmError> {
Ok(match tag {
0x24 => TemplateAlg::SSH,
_ => fail!(
TagInvalid,
"unknown SSH template algorithm ID: 0x{:02x}",
tag
),
})
}
/// Serialize algorithm ID as a byte
pub fn to_u8(self) -> u8 {
self as u8
}
}
impl From<TemplateAlg> for Algorithm {
fn from(alg: TemplateAlg) -> Algorithm {
Algorithm::Template(alg)
}
}
impl_algorithm_serializers!(TemplateAlg);
| true |
ff2ed0ec6929da649035ea81d4b39ff2c41980d8
|
Rust
|
kangalio/thiserror_lite
|
/src/lib.rs
|
UTF-8
| 9,229 | 2.5625 | 3 |
[] |
no_license
|
// #![no_std]
#![allow(unused)]
#[doc(hidden)]
#[macro_export]
macro_rules! _internal_1 {
// `Enum::Variant`: Empty variant
($self_:ident, $fmt:ident, $desc:literal, $variant_name:ident) => {
if let Self::$variant_name = $self_ {
return write!($fmt, $desc);
}
};
// `Enum::Variant()`: Empty tuple variant
($self_:ident, $fmt:ident, $desc:literal, $variant_name:ident ()) => {
if let Self::$variant_name() = $self_ {
return write!($fmt, concat!($desc, ""));
}
};
// `Enum::Variant(i32)`: Tuple variant with one field
($self_:ident, $fmt:ident, $desc:literal, $variant_name:ident ( $t1:ty )) => {
if let Self::$variant_name(a) = $self_ {
return write!($fmt, concat!($desc, "{0:.0}"), a);
}
};
// `Enum::Variant(i32, i32)`: Tuple variant with two fields
($self_:ident, $fmt:ident, $desc:literal, $variant_name:ident ( $t1:ty, $t2:ty )) => {
if let Self::$variant_name(a, b) = $self_ {
return write!($fmt, concat!($desc, "{0:.0}{1:.0}"), a, b);
}
};
// `Enum::Variant(i32, i32, i32)`: Tuple variant with three fields
($self_:ident, $fmt:ident, $desc:literal, $variant_name:ident ( $t1:ty, $t2:ty, $t3:ty )) => {
if let Self::$variant_name(a, b, c) = $self_ {
return write!($fmt, concat!($desc, "{0:.0}{1:.0}{2:.0}"), a, b, c);
}
};
// `Enum::Variant { f1: i32, f2: i32, ... }`: Struct variant with zero or more fields
($self_:ident, $fmt:ident, $desc:literal, $variant_name:ident { $(
$i:ident: $t:ty
),* }) => {
if let Self::$variant_name { $($i),* } = $self_ {
return write!($fmt, concat!($desc, $("{", stringify!($i), ":.0}"),*), $($i = $i),*);
}
};
($self_:ident, $fmt:ident, transparent, $variant_name:ident ( $t:ty )) => {
if let Self::$variant_name(a) = $self_ {
return write!($fmt, "{}", a); // is this correct?
}
};
($self_:ident, $fmt:ident, transparent, $variant_name:ident { $f:ident: $t:ty }) => {
if let Self::$variant_name { $f } = $self_ {
return write!($fmt, "{}", $f); // is this correct?
}
};
($self_:ident, $fmt:ident, transparent, $variant_name:ident
$(())?
$({})?
$(($tt1:ty, $(tt2:ty),+))?
$(($sf1:ident: $st1:ty, $($sf2:ident: $st2:ty),+))?
) => {
compile_error!("Exactly one field must be present in order to use #[error(transparent)]");
}
}
#[doc(hidden)]
#[macro_export]
macro_rules! _internal_2 {
($e:ident, $variant_name:ident (#[from] $t:ty)) => {
impl From<$t> for $e {
fn from(x: $t) -> $e {
$e::$variant_name(x)
}
}
};
($e:ident, $variant_name:ident { #[from] $field:ident: $type_:ty}) => {
impl From<$type_> for $e {
fn from(x: $type_) -> $e {
$e::$variant_name { $field: x }
}
}
};
($e:ident, $variant_name:ident $(($(
// Note that this matches source, because even though these fields may not have #[from],
// they still may have #[source]
$(#[source])? $tt:ty
),*))? $({$(
$(#[source])? $sf:ident: $st:ty
),*})?) => {
// This is just a regular unit, tuple, or struct variant without any #[from] fields
// So we're not generating anything
};
// This matches any tuple variant with or without an arbitrary amount of
// #[from] fields. However, any valid usages of #[from] will have been cought by the above macro
// rules, so this rule only kicks in if usage was invalid
($e:ident, $variant_name:ident ( $( $(#[from])? $tt:ty ),* ) ) => {
compile_error!("Can't use #[from] in variants with multiple fields");
};
// Like above, but for struct variants instead of tuple variants
($e:ident, $variant_name:ident { $( $(#[from])? $sf:ident: $st:ty ),* } ) => {
compile_error!("Can't use #[from] in variants with multiple fields");
};
}
#[doc(hidden)]
#[macro_export]
macro_rules! _internal_3 {
($self_:ident, $variant_name:ident (
#[$source_or_from:meta] $t1:ty
$( , $t_rest:ty )*
) ) => {
if let Self::$variant_name(x, ..) = $self_ {
return Some(x as _);
}
};
($self_:ident, $variant_name:ident (
$t1:ty,
#[$source_or_from:meta] $t2:ty
$( , $t_rest:ty )*
) ) => {
if let Self::$variant_name(_, x, ..) = $self_ {
return Some(x as _);
}
};
($self_:ident, $variant_name:ident (
$t1:ty,
$t2:ty,
#[$source_or_from:meta] $t3:ty
$( , $t_rest:ty )*
) ) => {
if let Self::$variant_name(_, _, x, ..) = $self_ {
return Some(x as _);
}
};
($self_:ident, $variant_name:ident {
$( $f_pre:ident: $t_pre:ty, )*
#[$source_or_from:meta] $f:ident: $t:ty
$( , $f_post:ident: $t_post:ty )*
} ) => {
if let Self::$variant_name { $f, .. } = $self_ {
return Some($f as _);
}
};
// Matches tuple variant with two or more #[source]/#[from]
($self_:ident, $variant_name:ident (
$( $t1:ty, )*
#[$source_or_from_1:meta] $t2:ty,
$( $t3:ty, )*
#[$source_or_from_2:meta] $t4:ty
$( , $t5:ty )*
) ) => {
compile_error!("Can't have multiple #[source] or #[from] in a single variant");
};
// Matches struct variants with two or more #[source]/#[from]
($self_:ident, $variant_name:ident {
$( $f1:ident: $t1:ty, )*
#[$source_or_from_1:meta] $f2:ident: $t2:ty,
$( $f3:ident: $t3:ty, )*
#[$source_or_from_2:meta] $f4:ident: $t4:ty
$( , $f5:ident: $t5:ty )*
} ) => {
compile_error!("Can't have multiple #[source] or #[from] in a single variant");
};
($self_:ident, $variant_name:ident $(($(
$tt:ty
),*))? $({$(
$sf:ident: $st:ty
),*})?) => {
// This is just a regular unit, tuple, or struct variant without any #[source] fields
// So we're not generating anything
};
}
macro_rules! check_that_its_from_or_source {
(from) => {};
(source) => {};
($smth_else:meta) => {
compile_error!(concat!(
"Unknown attribute \"",
stringify!($smth_else),
"\""
));
};
}
#[macro_export]
macro_rules! err_enum {
// remember that $vis matches even nothing. No need to enclose in $()? or anything like that
($(#[$error_attribute:meta])* $vis:vis enum $error_type_name:ident { $(
#[error($desc:tt)]
$variant_name:ident
$({$(
$(#[$sattr:ident])? $sf:ident: $st:ty
),* $(,)?})?
$(($(
$(#[$tattr:ident])? $tt:ty
),* $(,)?))?
),* $(,)? }) => {
$(#[$error_attribute])*
$vis enum $error_type_name {
$(
$variant_name $({$($sf: $st),*})? $(($($tt),*))?
),*
}
// check that all the attributes are either #[from] or #[source]. This assumption later
// allows the internal macros to skip handling the unknown attribute error case
$( // for each variant
$( // if it's a struct variant
$( // for each variant field
$( // if field has attribute
check_that_its_from_or_source!($sattr);
)?
)*
)?
)*
impl core::fmt::Display for $error_type_name {
fn fmt(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result {
#![allow(irrefutable_let_patterns)] // happens when there's just one variant
$( // for each variant
$crate::_internal_1!(self, formatter, $desc, $variant_name $({$($sf: $st),*})? $(($($tt),*))?);
)*
// Each enum variant is covered by an if-let-return
unreachable!()
}
}
$( // for each variant
$crate::_internal_2!(
$error_type_name,
$variant_name
$({$(
$(#[$sattr])? $sf: $st
),*})?
$(($(
$(#[$tattr])? $tt
),*))?
);
)*
impl std::error::Error for $error_type_name {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#![allow(irrefutable_let_patterns)] // happens when there's just one variant
$( // for each variant
$crate::_internal_3!(
self,
$variant_name
$( // if it's a struct variant
{
$( // for each variant field
$( #[$sattr] )?
$sf: $st
),*
}
)?
$( // if it's a tuple variant
(
$( // for each variant field
$( #[$tattr] )?
$tt
),*
)
)?
);
)*
// If this variant doesn't have a #[source] or #[from], we fall through to here
None
}
}
};
}
#[doc(hidden)]
#[macro_export]
macro_rules! dbg {
($($t:tt)*) => {
compile_error!(stringify!($($t)*));
}
}
#[cfg(test)]
#[test]
fn test_main() {
err_enum! {
#[derive(Debug, Clone)]
pub enum Error {
#[error("This is a simple error")]
SimpleErr,
#[error("This is a simple tuple error")]
SimpleErr2(),
#[error("This is a simple struct error")]
SimpleErr3 {},
#[error("Some other error {} {}")]
Tuple1ErrWhat(i32, u32),
#[error("Some other error {hello}. We're not printing world here >:)")]
BraceThingy {
hello: String,
world: std::num::ParseIntError,
},
#[error("Some other error {0}. We're not printing world here >:)")]
BraceThingyTuple(String, #[source] std::num::ParseIntError),
#[error("See: \"{inner}\"")]
StringError {
#[from] inner: std::num::ParseIntError,
},
#[error("yeah. gotta use ehhhh, u32 cuz String is already taken")]
StringErrorTuple(#[from] std::num::ParseFloatError ),
#[error(transparent)]
ForwardError(#[from] std::num::TryFromIntError ),
#[error(transparent)]
ForwardError2 { inner: std::num::ParseIntError },
}
}
}
// TODO: rename "sf" -> "struct field"; "tt" -> "tuple type"; etc.
// TODO: #[error(transparent)] for #[source]
// TODO: support error structs
// TODO: add custom expressions behind error format string
| true |
9b4ddf33b988d61b3db4070b88d768caf3d3ceaa
|
Rust
|
Eoghanmc22/paxy-rs
|
/crates/utils/src/sendable.rs
|
UTF-8
| 3,618 | 2.640625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use bytes::{BufMut, Buf};
use crate::buffers::{VarInts, VarIntsMut, Strings, StringsMut, Bools, BoolsMut};
use crate::set_vec_len;
use std::ops::{Deref, DerefMut};
use crate::indexed_vec::IndexedVec;
pub struct Vari32 {
pub val: i32
}
pub struct InferLenVec {
pub inner: IndexedVec<u8>
}
impl Deref for InferLenVec {
type Target = IndexedVec<u8>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl DerefMut for InferLenVec {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
pub trait Sendable {
fn read(buffer: &mut dyn Buf) -> Self;
fn write(buffer: &mut dyn BufMut, data: &Self);
}
impl Sendable for Vari32 {
fn read(mut buffer: &mut dyn Buf) -> Self {
Vari32 { val: buffer.get_var_i32().0 }
}
fn write(mut buffer: &mut dyn BufMut, data: &Self) {
buffer.put_var_i32(data.val)
}
}
impl Sendable for i32 {
fn read(buffer: &mut dyn Buf) -> Self {
buffer.get_i32()
}
fn write(buffer: &mut dyn BufMut, data: &Self) {
buffer.put_i32(*data)
}
}
impl Sendable for u16 {
fn read(buffer: &mut dyn Buf) -> Self {
buffer.get_u16()
}
fn write(buffer: &mut dyn BufMut, data: &Self) {
buffer.put_u16(*data)
}
}
impl Sendable for u128 {
fn read(buffer: &mut dyn Buf) -> Self {
buffer.get_u128()
}
fn write(buffer: &mut dyn BufMut, data: &Self) {
buffer.put_u128(*data)
}
}
impl Sendable for i16 {
fn read(buffer: &mut dyn Buf) -> Self {
buffer.get_i16()
}
fn write(buffer: &mut dyn BufMut, data: &Self) {
buffer.put_i16(*data)
}
}
impl Sendable for bool {
fn read(mut buffer: &mut dyn Buf) -> Self {
buffer.get_bool()
}
fn write(mut buffer: &mut dyn BufMut, data: &Self) {
buffer.put_bool(*data)
}
}
impl Sendable for f64 {
fn read(buffer: &mut dyn Buf) -> Self {
buffer.get_f64()
}
fn write(buffer: &mut dyn BufMut, data: &Self) {
buffer.put_f64(*data)
}
}
impl Sendable for u64 {
fn read(buffer: &mut dyn Buf) -> Self {
buffer.get_u64()
}
fn write(buffer: &mut dyn BufMut, data: &Self) {
buffer.put_u64(*data)
}
}
impl Sendable for i64 {
fn read(buffer: &mut dyn Buf) -> Self {
buffer.get_i64()
}
fn write(buffer: &mut dyn BufMut, data: &Self) {
buffer.put_i64(*data)
}
}
impl Sendable for String {
fn read(mut buffer: &mut dyn Buf) -> Self {
buffer.get_string()
}
fn write(mut buffer: &mut dyn BufMut, data: &Self) {
buffer.put_string(data)
}
}
impl Sendable for IndexedVec<u8> {
fn read(mut buffer: &mut dyn Buf) -> Self {
let mut vec = Vec::new();
let len = buffer.get_var_i32().0 as usize;
set_vec_len(&mut vec, len);
buffer.copy_to_slice(&mut vec);
let mut buf = IndexedVec::from_vec(vec);
buf.set_writer_index(len);
buf
}
fn write(mut buffer: &mut dyn BufMut, data: &Self) {
buffer.put_var_i32(data.remaining() as i32);
buffer.put_slice(data.as_slice());
}
}
impl Sendable for InferLenVec {
fn read(buffer: &mut dyn Buf) -> Self {
let mut vec = Vec::new();
let len = buffer.remaining();
set_vec_len(&mut vec, len);
buffer.copy_to_slice(&mut vec);
let mut buf = InferLenVec { inner: IndexedVec::from_vec(vec) };
buf.set_writer_index(len);
buf
}
fn write(buffer: &mut dyn BufMut, data: &Self) {
buffer.put_slice(data.as_slice());
}
}
| true |
c0bb293c63f8ab5e8b536ad9ea07df3b213ce08a
|
Rust
|
RustStudy/grokking-deep-learning-rs
|
/examples/chapter4.rs
|
UTF-8
| 6,204 | 3.28125 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Chapter4 - Gradient Descent - Intro to Neural Learning
//!
//! https://github.com/iamtrask/Grokking-Deep-Learning/blob/master/Chapter4%20-%20Gradient%20Descent%20-%20Intro%20to%20Neural%20Learning.ipynb
fn main() {
println!("\nLearning using hot and cold method\n");
hot_and_cold_method();
println!("\nHot and Cold Learning\n");
hot_and_cold_learning();
println!("\nCalculating both direction and amount from error.\n");
hot_and_cold_learning_with_direction_and_amount();
println!("\nOne Iteration of Gradient Descent\n");
gradient_descent_method();
println!("\nLearning is just reducing error\n");
gradient_descent();
println!("\nLet's watch several steps of learning\n");
gradient_descent_2();
println!("\nWhy does this work? What really is weight delta?\n");
gradient_descent_3();
println!("\nBreaking Gradient Descent\n");
gradient_descent_working();
println!();
gradient_descent_breaking();
println!("\nAlpha\n");
gradient_descent_working_again();
}
/// Learning using hot and cold method
#[allow(unused_assignments)]
fn hot_and_cold_method() {
let (mut weight, lr) = (0.1, 0.01);
let (number_of_toes, win_or_lose_binary) = ([8.5], [1.0]);
let (input, truth) = (number_of_toes[0], win_or_lose_binary[0]);
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("error: {}", err);
let (pred_up, pred_down) = (
neural_network(input, weight + lr),
neural_network(input, weight - lr),
);
let (err_up, err_down) = ((pred_up - truth).powf(2.0), (pred_down - truth).powf(2.0));
println!("error up: {}, error down: {}", err_up, err_down);
if err_up < err_down {
weight += lr;
} else {
weight -= lr;
}
}
/// Hot and Cold Learning
fn hot_and_cold_learning() {
let mut weight = 0.5;
let (input, truth) = (0.5, 0.8);
let n_iterations = 20;
let lr = 0.001;
for _ in 0..n_iterations {
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("Error: {}, Prediction: {}", err, pred);
let (pred_up, pred_down) = (
neural_network(input, weight + lr),
neural_network(input, weight - lr),
);
let (err_up, err_down) = ((pred_up - truth).powf(2.0), (pred_down - truth).powf(2.0));
if err_up < err_down {
weight += lr;
} else if err_up > err_down {
weight -= lr;
}
}
}
/// Calculating both direction and amount from error.
fn hot_and_cold_learning_with_direction_and_amount() {
let mut weight = 0.5;
let (input, truth) = (0.5, 0.8);
let n_iterations = 1101;
for _ in 0..n_iterations {
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("Error: {}, Prediction: {}", err, pred);
let direction_and_amount = (pred - truth) * input;
weight -= direction_and_amount;
}
}
/// One Iteration of Gradient Descent
#[allow(unused_variables, unused_assignments)]
fn gradient_descent_method() {
let (mut weight, alpha) = (0.1, 0.01);
let (number_of_toes, win_or_lose_binary) = ([8.5], [1.0]);
let (input, truth) = (number_of_toes[0], win_or_lose_binary[0]);
let pred = neural_network(input, truth);
let err = (pred - truth).powf(2.0);
let delta = pred - truth;
let weight_delta = input * delta;
let alpha = 0.01;
weight -= weight_delta * alpha;
}
fn neural_network(input: f64, weight: f64) -> f64 {
input * weight
}
/// Learning is just reducing error
fn gradient_descent() {
let (mut weight, truth, input) = (0.0, 0.8, 0.5);
for _ in 0..4 {
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("Error: {}, Prediction: {}", err, pred);
let delta = pred - truth;
let weight_delta = delta * input;
weight -= weight_delta;
}
}
/// Let's watch several steps of learning.
fn gradient_descent_2() {
let (mut weight, truth, input) = (0.0, 0.8, 1.1);
for _ in 0..4 {
println!("------\nWeight: {}", weight);
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("Error: {}, Prediction: {}", err, pred);
let delta = pred - truth;
let weight_delta = delta * input;
weight -= weight_delta;
println!("Delta: {}, Weight Delta: {}", delta, weight_delta);
}
}
/// Why does this work? What really is weight delta?
fn gradient_descent_3() {
let (mut weight, truth, input) = (0.0, 0.8, 1.1);
for _ in 0..20 {
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("Error: {}, Prediction: {}", err, pred);
let delta = pred - truth;
let weight_delta = delta * input;
weight -= weight_delta;
}
}
/// Breaking Gradient Descent
fn gradient_descent_working() {
let (mut weight, truth, input) = (0.5, 0.8, 0.5);
for _ in 0..20 {
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("Error: {}, Prediction: {}", err, pred);
let delta = pred - truth;
let weight_delta = delta * input;
weight -= weight_delta;
}
}
fn gradient_descent_breaking() {
let (mut weight, truth, input) = (0.5, 0.8, 2.0);
for _ in 0..20 {
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("Error: {}, Prediction: {}", err, pred);
let delta = pred - truth;
let weight_delta = delta * input;
weight -= weight_delta;
}
}
/// Alpha
fn gradient_descent_working_again() {
let (mut weight, truth, input) = (0.5, 0.8, 2.0);
let alpha = 0.1;
for _ in 0..20 {
let pred = neural_network(input, weight);
let err = (pred - truth).powf(2.0);
println!("Error: {}, Prediction: {}", err, pred);
let delta = pred - truth;
let weight_delta = delta * input;
weight -= alpha * weight_delta;
}
}
| true |
0ccc3439c312bcdd2cae70d5b3498650c56fba49
|
Rust
|
guoyucode/rbatis
|
/src/engine/runtime.rs
|
UTF-8
| 8,198 | 2.8125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::collections::HashMap;
use std::collections::linked_list::LinkedList;
use std::sync::RwLock;
use serde_json::Value;
use crate::engine::node::Node;
use crate::engine::parser::parser;
lazy_static! {
/// for engine: if cache not have expr value,it will be redo parser code.not wait cache return for no blocking
/// global expr cache,use RwLock but not blocking
static ref EXPR_CACHE: RwLock<HashMap<String, Node>> = RwLock::new(HashMap::new());
}
/// the express engine for exe code on runtime
#[derive(Clone, Debug)]
pub struct RbatisEngine {
pub opt_map: OptMap<'static>,
}
impl RbatisEngine {
pub fn new() -> Self {
return Self {
opt_map: OptMap::new(),
};
}
///eval express with arg value,if cache have value it will no run parser expr.
pub fn eval(&self, expr: &str, arg: &Value) -> Result<Value, rbatis_core::Error> {
let mut lexer_arg = expr.to_string();
if expr.find(" and ").is_some() {
lexer_arg = lexer_arg.replace(" and ", " && ");
}
let cached = self.cache_read(lexer_arg.as_str());
if cached.is_none() {
let nodes = parser(lexer_arg.to_string(), &self.opt_map);
if nodes.is_err() {
return Result::Err(nodes.err().unwrap());
}
let node = nodes.unwrap();
self.cache_insert(lexer_arg.to_string(), node.clone());
return node.eval(arg);
} else {
let nodes = cached.unwrap();
return nodes.eval(arg);
}
}
/// read from cache,if not exist return null
fn cache_read(&self, arg: &str) -> Option<Node> {
let cache_read = EXPR_CACHE.try_read();
if cache_read.is_err() {
return Option::None;
}
let cache_read = cache_read.unwrap();
let r = cache_read.get(arg);
return if r.is_none() {
Option::None
} else {
r.cloned()
};
}
/// save to cache,if fail nothing to do.
fn cache_insert(&self, key: String, node: Node) -> Result<(), rbatis_core::Error> {
let cache_write = EXPR_CACHE.try_write();
if cache_write.is_err() {
return Err(rbatis_core::Error::from(cache_write.err().unwrap().to_string()));
}
let mut cache_write = cache_write.unwrap();
cache_write.insert(key, node);
return Ok(());
}
/// no cache mode to run engine
pub fn eval_no_cache(&self, lexer_arg: &str, arg: &Value) -> Result<Value, rbatis_core::Error> {
let nodes = parser(lexer_arg.to_string(), &self.opt_map);
if nodes.is_err() {
return Result::Err(nodes.err().unwrap());
}
let node = nodes.unwrap();
return node.eval(arg);
}
}
pub fn is_number(arg: &String) -> bool {
let chars = arg.chars();
for item in chars {
if item == '.' ||
item == '0' ||
item == '1' ||
item == '2' ||
item == '3' ||
item == '4' ||
item == '5' ||
item == '6' ||
item == '7' ||
item == '8' ||
item == '9'
{
// nothing do
} else {
return false;
}
}
return true;
}
///将原始字符串解析为 去除空格的token数组
pub fn parser_tokens(s: &String, opt_map: &OptMap) -> Vec<String> {
let chars = s.chars();
let chars_len = s.len() as i32;
let mut result = LinkedList::new();
//str
let mut find_str = false;
let mut temp_str = String::new();
//opt
let mut temp_arg = String::new();
let mut index: i32 = -1;
for item in chars {
index = index + 1;
let is_opt = opt_map.is_opt(item.to_string().as_str());
if item == '\'' || item == '`' {
if find_str {
//第二次找到
find_str = false;
temp_str.push(item);
trim_push_back(&temp_str, &mut result);
temp_str.clear();
continue;
}
find_str = true;
temp_str.push(item);
continue;
}
if find_str {
temp_str.push(item);
continue;
}
if item != '`' && item != '\'' && is_opt == false && !find_str {
//need reset
temp_arg.push(item);
if (index + 1) == chars_len {
trim_push_back(&temp_arg, &mut result);
}
} else {
trim_push_back(&temp_arg, &mut result);
temp_arg.clear();
}
//opt node
if is_opt {
//println!("is opt:{}", item);
if result.len() > 0 {
let def = String::new();
let back = result.back().unwrap_or(&def).clone();
if back != "" && opt_map.is_opt(back.as_str()) {
result.pop_back();
let mut new_item = back.clone().to_string();
new_item.push(item);
trim_push_back(&new_item, &mut result);
continue;
}
}
trim_push_back(&item.to_string(), &mut result);
continue;
}
}
let mut v = vec![];
for item in result {
v.push(item);
}
return v;
}
fn trim_push_back(arg: &String, list: &mut LinkedList<String>) {
let trim_str = arg.trim().to_string();
if trim_str.is_empty() {
return;
}
list.push_back(trim_str);
}
#[derive(Clone, Debug)]
pub struct OptMap<'a> {
//列表
pub list: Vec<&'a str>,
//全部操作符
pub map: HashMap<&'a str, bool>,
//复合操作符
pub mul_ops_map: HashMap<&'a str, bool>,
//单操作符
pub single_opt_map: HashMap<&'a str, bool>,
pub allow_priority_array: Vec<&'a str>,
}
impl<'a> OptMap<'a> {
pub fn new() -> Self {
let mut list = Vec::new();
let mut def_map = HashMap::new();
let mut mul_ops_map = HashMap::new();
let mut single_opt_map = HashMap::new();
//list 顺序加入操作符
list.push("*");
list.push("/");
list.push("%");
list.push("^");
list.push("+");
list.push("-");
list.push("(");
list.push(")");
list.push("@");
list.push("#");
list.push("$");
list.push("&");
list.push("|");
list.push("=");
list.push("!");
list.push(">");
list.push("<");
list.push("&&");
list.push("||");
list.push("==");
list.push("!=");
list.push(">=");
list.push("<=");
//全部加入map集合
for item in &mut list {
def_map.insert(*item, true);
}
//加入单操作符和多操作符
for item in &mut list {
if item.len() > 1 {
mul_ops_map.insert(item.clone(), true);
} else {
single_opt_map.insert(item.clone(), true);
}
}
let mut vecs = vec![];
vecs.push("*");
vecs.push("/");
vecs.push("+");
vecs.push("-");
vecs.push("<=");
vecs.push("<");
vecs.push(">=");
vecs.push(">");
vecs.push("!=");
vecs.push("==");
vecs.push("&&");
vecs.push("||");
Self {
list: list,
map: def_map,
mul_ops_map: mul_ops_map,
single_opt_map: single_opt_map,
allow_priority_array: vecs,
}
}
//乘除优先于加减 计算优于比较,
pub fn priority_array(&self) -> Vec<&str> {
return self.allow_priority_array.clone();
}
//是否是操作符
pub fn is_opt(&self, arg: &str) -> bool {
let opt = self.map.get(arg);
return opt.is_none() == false;
}
//是否为有效的操作符
pub fn is_allow_opt(&self, arg: &str) -> bool {
for item in &self.allow_priority_array {
if arg == *item {
return true;
}
}
return false;
}
}
| true |
56f49dcbc629d4cfef73285b47183ca2ca673868
|
Rust
|
RustWorks/Draw2D_GLFW_Vulkan_FFI
|
/src/graphics/vulkan/device_allocator/forced_offset.rs
|
UTF-8
| 2,101 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use super::{Allocation, DeviceAllocator, MemUnit};
use anyhow::Result;
use ash::vk;
/// An allocator which forces all allocations to have a fixed offset.
///
/// This has little practical use, but is convenient when verifying that other
/// parts of the code properly handle allocation offsets.
pub struct ForcedOffsetAllocator<Alloc: DeviceAllocator> {
alignment: u64,
allocator: Alloc,
}
impl<Alloc: DeviceAllocator> ForcedOffsetAllocator<Alloc> {
pub fn new(allocator: Alloc, alignment: MemUnit) -> Self {
Self {
allocator,
alignment: alignment.to_bytes(),
}
}
fn offset(&self) -> u64 {
self.alignment * 100
}
}
impl<Alloc: DeviceAllocator> DeviceAllocator for ForcedOffsetAllocator<Alloc> {
/// Use the underlying allocator implementation to allocate an oversized
/// piece of memory, then set an offset to compensate.
///
/// This has no practical use other than proving that code properly handles
/// memory offsets.
unsafe fn allocate(
&mut self,
allocate_info: vk::MemoryAllocateInfo,
) -> Result<Allocation> {
let expanded_allocate_info = vk::MemoryAllocateInfo {
memory_type_index: allocate_info.memory_type_index,
allocation_size: allocate_info.allocation_size + self.offset(),
..Default::default()
};
let mut allocation = self.allocator.allocate(expanded_allocate_info)?;
allocation.offset += self.offset();
allocation.byte_size = allocate_info.allocation_size;
Ok(allocation)
}
/// Undo the offset+size adjustments which were applied by [Self::allocate],
/// then use the underlying allocator to actually free the memory.
unsafe fn free(&mut self, allocation: &Allocation) -> Result<()> {
if allocation.is_null() {
Ok(())
} else {
let mut adjusted = allocation.clone();
adjusted.offset -= self.offset();
adjusted.byte_size += self.offset();
self.allocator.free(&adjusted)
}
}
}
| true |
bc6d91cf45d36e7f2b56f26b13753458a5adf0d3
|
Rust
|
shanu516516/rust_microservices
|
/web/src/config/connection.rs
|
UTF-8
| 845 | 2.546875 | 3 |
[] |
no_license
|
use deadpool_postgres::{Client, Config, Pool};
use tokio_postgres::tls::NoTls;
use crate::config::env::EnvConfig;
#[derive(Clone)]
pub struct Connection {}
impl Connection {
fn create_config(env_cfg: EnvConfig) -> Config {
let mut cfg = Config::new();
cfg.dbname = Some(env_cfg.db_name);
cfg.port = Some(env_cfg.db_port);
cfg.host = Some(env_cfg.db_host);
cfg.user = Some(env_cfg.db_user);
cfg.password = Some(env_cfg.db_pwd);
cfg
}
pub fn new(env_cfg: EnvConfig) -> Pool {
let config = Connection::create_config(env_cfg);
match config.create_pool(NoTls)
{
Ok(pool) => {
pool
}
Err(error) => {
panic!("Connection Pool Critical Error: \n{}", error);
}
}
}
}
| true |
f40efb024142e74442890775a8b137197597c265
|
Rust
|
isgasho/xuantie
|
/src/register/mrmr.rs
|
UTF-8
| 278 | 2.828125 | 3 |
[
"LicenseRef-scancode-mulanpsl-2.0-en"
] |
permissive
|
//! mrmr, machine reset signal register
set!(0x7C6);
clear!(0x7C6);
read_csr!(0x7C6);
/// Write to mrmr register to release reset lock for given harts
///
/// Write `0x1` for hart 0, `0x2` for 1, `0x4` for 2, etc.
#[inline]
pub unsafe fn write(bits: usize) {
_set(bits)
}
| true |
c514757d634232b9c817e406d264eb70a18c8224
|
Rust
|
irevoire/project_euler
|
/problem_38/src/main.rs
|
UTF-8
| 994 | 3.234375 | 3 |
[] |
no_license
|
use project_euler::num::Num;
use std::collections::HashSet;
fn main() {
let mut base = [0; 9];
let mut res = 0;
// we stop at 99999999 because we don't want to get any direct pandigital number
for _ in 0..99999999 {
base.iter_mut().enumerate().for_each(|(i, e)| *e += i + 1);
let mut set = HashSet::new();
for digits in base.iter().map(|e| e.split_digits()) {
let still_pandigital = digits.iter().all(|d| set.insert(*d));
if digits.contains(&0) {
break;
} else if still_pandigital && set.len() == 9 {
res = base[0]; // current number
println!("current biggest number {}", res);
break;
} else if !still_pandigital {
break;
}
}
}
// ugly hardcoded solution because *I know* it'll be only made of the two first multiplication
println!("The biggest pandigital number is {}{}", res, res * 2);
}
| true |
c4fbb19027d08b01446def7deab1a32665b2243b
|
Rust
|
bieganski/distributed
|
/dslab04/examples/udp-communication/server.rs
|
UTF-8
| 543 | 2.796875 | 3 |
[] |
no_license
|
use crossbeam_channel::Sender;
use std::net::UdpSocket;
pub fn echo(server_ready_notify: Sender<()>) {
let socket = UdpSocket::bind("127.0.0.1:8889").unwrap();
server_ready_notify.send(()).unwrap();
let mut buf = [0; 20];
for _ in 0..2 {
let (count, addr) = socket.recv_from(&mut buf).unwrap();
println!(
"Server > received {} bytes! Message: {}",
count,
std::str::from_utf8(&buf[..count]).unwrap()
);
socket.send_to(&buf[..count], addr).unwrap();
}
}
| true |
4d0fb283c87f7a2a06b3f2eda1b0a481fddbdef9
|
Rust
|
prizeprofile/backend-scheduler
|
/src/event.rs
|
UTF-8
| 1,499 | 3.109375 | 3 |
[] |
no_license
|
//! There are 2 event types that can happen in the app.
//! Events represent messages received from or sent to outer AWS architecture.
/// Incoming SQS message type. Is generated in the `sqs::listen` method and sent
/// across the channel to a receiver that routes it to relevant region.
pub struct InputEvent {
/// Integer ID of region that the event belongs to.
pub region_id: u64,
/// BigInteger of the highest resource id that was parsed in the region.
pub max_id: u64,
/// Integer in an interval <0, 100>.
/// How many resources were received in the last Twitter scrape.
pub resources_count: u64,
/// If there was an error in scraping Twitter, this field would hold
/// an integer indicating how many seconds should the scheduler wait
/// before rescheduling the call.
pub error: Option<u64>,
}
/// Outcoming SQS message type. Is generated in `region::ResourceRegion::handle_event`
/// and sent across the channel to `sqs::stream` that pushes it to the AWS SQS.
pub struct OutputEvent {
/// Integer of how many seconds should the scheduler wait
/// before pushing the message.
pub delay: u64,
/// Integer that is used to backtrack the message to a region.
pub region_id: u64,
/// BigInteger with the minimum id a resource oughts to have.
pub since_id: u64,
/// JSON string of parameters that define a region.
pub params: String,
/// SNS Topic that region should be pushed to.
pub topic: String,
}
| true |
0b6b2da348b040239a02274fe806a1e65e88be97
|
Rust
|
sthilaid/rust-testing
|
/src/chapters/smart_pointers.rs
|
UTF-8
| 2,618 | 3.3125 | 3 |
[] |
no_license
|
use std::rc::Rc;
pub fn run() {
println!("\n*****************************************************************");
println!("smart pointers");
println!("*****************************************************************");
data_structure_tests();
}
struct LinkedListNode<T> {
prev: LinkedList<T>,
next: LinkedList<T>,
value: T,
}
enum LinkedList<T> {
Node(Rc<LinkedListNode<T>>),
Empty,
}
struct LinkedListIterator<T> {
node: LinkedList<T>,
is_forward: bool,
}
impl<T> LinkedList<T> {
fn new(v: T) -> LinkedList<T> {
LinkedList::Node(Rc::new(LinkedListNode {
prev: LinkedList::Empty,
next: LinkedList::Empty,
value: v,
}))
}
fn push(self: &mut Self, val: T) -> LinkedList<T> {
LinkedList::Node(Rc::new(LinkedListNode {
prev: match self {
LinkedList::Node(n) => LinkedList::Node(Rc::clone(n)),
LinkedList::Empty => LinkedList::Empty,
},
next: LinkedList::Empty,
value: val,
}))
}
fn clone(self: &Self) -> Self {
match self {
LinkedList::Node(n) => LinkedList::Node(Rc::clone(n)),
LinkedList::Empty => LinkedList::Empty,
}
}
fn iter(self: &Self) -> LinkedListIterator<T> {
LinkedListIterator {
node: self.clone(),
is_forward: true,
}
}
fn back_iter(self: &Self) -> LinkedListIterator<T> {
LinkedListIterator {
node: self.clone(),
is_forward: false,
}
}
}
impl<T: Copy> Iterator for LinkedListIterator<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
match &self.node {
LinkedList::Node(n) => {
let val = n.value;
match if self.is_forward { &n.next } else { &n.prev } {
LinkedList::Node(next_node) => {
self.node = LinkedList::Node(Rc::clone(&next_node))
}
LinkedList::Empty => self.node = LinkedList::Empty,
};
Option::Some(val)
}
LinkedList::Empty => None,
}
}
}
fn data_structure_tests() {
println!("\n--- heap allocting data structures: linked list example ---\n");
let mut list = LinkedList::new(5);
list = list.push(6).push(7).push(8).push(9).push(10);
let vlist: Vec<u8> = list.iter().collect();
let bvlist: Vec<u8> = list.back_iter().collect();
println!("vlist: {:?} bvlist: {:?}", vlist, bvlist);
}
| true |
4220556682504bec8674ecf16ef1443ae4f2aa91
|
Rust
|
Twey/mkRustCrate
|
/libnix/src/expr.rs
|
UTF-8
| 2,771 | 2.796875 | 3 |
[] |
no_license
|
pub type Id = String;
#[derive(Clone, Debug)]
pub struct NonEmptyVec<T> {
pub head: T,
pub tail: Vec<T>,
}
#[derive(Clone, Debug)]
pub enum Expr {
Bound(Bound),
Conditional(Conditional),
Operation(Operation),
Application(Application),
Selection(Selection),
Literal(Literal),
}
#[derive(Clone, Debug)]
pub enum Parameter {
Ellipsis,
Named {
name: Id,
default: Option<Box<Expr>>,
},
}
#[derive(Clone, Debug)]
pub enum Pattern {
Name(String),
Set {
name: Option<String>,
parameters: Vec<Parameter>,
},
}
#[derive(Clone, Debug)]
pub enum AttrName {
Literal(Id),
Dynamic(Box<Expr>),
}
pub type AttrPath = NonEmptyVec<AttrName>;
#[derive(Clone, Debug)]
pub enum Binding {
Inherit {
source: Option<Box<Expr>>,
attrs: Vec<Id>, // TODO this is oversimplified
},
Attr(AttrPath),
}
pub type Bindings = Vec<Binding>;
#[derive(Clone, Debug)]
pub enum Binder {
Function(Pattern),
Assert(Box<Expr>),
With(Box<Expr>),
Let(Bindings),
}
#[derive(Clone, Debug)]
pub struct Bound {
pub binder: Binder,
pub body: Box<Expr>,
}
#[derive(Clone, Debug)]
pub struct Conditional {
pub cond: Box<Expr>,
pub then_: Box<Expr>,
pub else_: Box<Expr>,
}
#[derive(Clone, Debug)]
pub enum UnaryOperator { Not, Negate }
#[derive(Clone, Debug)]
pub enum BinaryOperator {
Eq, Neq, Lt, Gt, Geq, Leq,
And, Or, Impl,
Update,
Plus, Minus, Multiply, Divide,
Concat,
}
#[derive(Clone, Debug)]
pub enum AttrOperator { Question, OrKw }
#[derive(Clone, Debug)]
pub enum Operation {
Unary {
op: UnaryOperator,
arg: Box<Expr>,
},
Binary {
op: BinaryOperator,
lhs: Box<Expr>,
rhs: Box<Expr>,
},
Attr {
op: AttrOperator,
lhs: Box<Expr>,
rhs: AttrPath,
},
}
#[derive(Clone, Debug)]
pub struct Application {
pub function: Box<Expr>,
pub argument: Box<Expr>,
}
#[derive(Clone, Debug)]
pub struct Selection {
pub base: Box<Expr>,
pub path: AttrPath,
pub default: Option<Box<Expr>>,
}
#[derive(Clone, Debug)]
pub struct StringPart {
pub dynamic: Box<Expr>,
pub literal: String,
}
#[derive(Clone, Debug)]
pub struct StringParts {
pub head: String,
pub tail: Vec<StringPart>,
}
#[derive(Clone, Debug)]
pub enum Path {
Angle(String),
Normal(String),
}
pub type List = Vec<Expr>;
#[derive(Clone, Debug)]
pub struct AttrSet {
pub recursive: bool,
pub bindings: Bindings,
}
#[derive(Clone, Debug)]
pub enum Literal {
Var(Id),
Int(u64),
Float(String), // no Eq
String(String),
IndString(String),
Path(Path),
Uri(String),
AttrSet(AttrSet),
List(List),
}
| true |
00b52a0df9aae7c5eb48d28ab34e714a3efed081
|
Rust
|
ftsell/pixelflut
|
/rust/src/pixmap/file_backed_pixmap.rs
|
UTF-8
| 16,380 | 2.921875 | 3 |
[
"MIT"
] |
permissive
|
use std::cmp::min;
use std::fs::{create_dir_all, File, OpenOptions};
use std::io::{BufWriter, Read, Seek, SeekFrom, Write};
use std::path::Path;
use std::sync::{Mutex, MutexGuard};
use anyhow::Result;
use byteorder::{ReadBytesExt, WriteBytesExt};
use thiserror::Error;
use super::traits::*;
use super::*;
// TODO Implement handling of read-only files
// TODO Think of an implementation with RwLock instead of Mutex
const LOG_TARGET: &str = "pixelflut.pixmap.file";
const HEADER_SPACE: usize = 256;
const MAGIC_BYTES: [u8; 10] = [
'P' as u8, 'I' as u8, 'X' as u8, 'E' as u8, 'L' as u8, 'F' as u8, 'L' as u8, 'U' as u8, 'T' as u8, 1,
];
const SEEK_MAGIC: SeekFrom = SeekFrom::Start(0);
const SEEK_HEADER: SeekFrom = SeekFrom::Start(MAGIC_BYTES.len() as u64);
const SEEK_DATA: SeekFrom = SeekFrom::Start((MAGIC_BYTES.len() + HEADER_SPACE) as u64);
#[derive(Error, Debug)]
pub enum Error {
#[error("content of existing file is not a valid pixmap file")]
InvalidFileType,
#[error("the existing file contains pixmap data of different size than the requested pixmap")]
IncompatiblePixmapData,
}
#[derive(Debug, Eq, PartialEq)]
pub struct FileHeader {
width: u64,
height: u64,
}
///
/// Pixmap implementation which reads and writes all data directly into a backing file
///
#[derive(Debug)]
pub struct FileBackedPixmap {
file: Mutex<File>,
header: FileHeader,
}
impl FileBackedPixmap {
/// Create a new instance backed by a file at `path` with the specified size.
///
/// If a file already exists at the destination that **is not** an existing pixmap of the same
/// size, this only succeed when `overwrite` is true which then overwrites the existing file and thus
/// removing all preexisting data from it.
///
/// If a file already exists at the destination that **is** an existing pixmap of the same size
/// and `overwrite` is true, the content of that file will be overwritten too and thus
/// removing all preexisting pixel data from it.
pub fn new(path: &Path, width: usize, height: usize, overwrite: bool) -> Result<Self> {
if width == 0 || height == 0 {
return Err(GenericError::InvalidSize(width, height).into());
}
// create containing directory hierarchy if it does not yet exist
match path.parent() {
Some(parent_dir) => create_dir_all(parent_dir)?,
None => {}
}
let is_preexisting = path.exists();
// create the resulting instance
let instance = Self {
file: Mutex::new(
OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(path)?,
),
header: FileHeader {
width: width as u64,
height: height as u64,
},
};
{
let mut lock = instance.file.lock().unwrap();
let mut initial_data = vec![0; width * height * 3];
// retrieve potential existing data for initialization or abort if necessary
if is_preexisting {
match instance.validate_magic_bytes(&mut lock) {
Ok(_) => {
// the file already is a pixmap file
let existing_header = instance.read_header(&mut lock)?;
if instance.header != existing_header {
// but it is incompatible
if !overwrite {
return Err(Error::IncompatiblePixmapData.into());
} else {
debug!(target: LOG_TARGET, "Overwriting data in existing file {:?}", path);
}
} else {
// and it is compatible
if !overwrite {
debug!(
target: LOG_TARGET,
"Reusing data from existing pixmap file {:?}", path
);
initial_data = instance.read_data(&mut lock)?;
} else {
debug!(
target: LOG_TARGET,
"Ignoring existing pixmap data from file {:?}", path
)
}
}
}
Err(e) => {
match e.downcast::<Error>() {
Ok(e) => match e {
Error::InvalidFileType => {
// the file is accessible but not a pixmap file
if !overwrite {
return Err(Error::InvalidFileType.into());
} else {
debug!(
target: LOG_TARGET,
"Overwriting existing file {:?} with pixmap data", path
)
}
}
_ => return Err(e.into()), // some other of our errors
},
Err(e) => return Err(e), // some random other error
}
}
}
} else {
debug!(target: LOG_TARGET, "Creating new file {:?} for pixmap data", path)
}
// write initial data into file
lock.set_len((MAGIC_BYTES.len() + HEADER_SPACE + width * height * 3) as u64)?;
instance.write_magic_bytes(&mut lock)?;
instance.write_header(&mut lock)?;
instance.write_data(&mut lock, &initial_data)?;
}
info!(target: LOG_TARGET, "Created file backed pixmap at {:?}", path);
Ok(instance)
}
/// Validate that the file does contain pixelflut data by validating the magic bytes
fn validate_magic_bytes(&self, lock: &mut MutexGuard<File>) -> Result<()> {
if lock.metadata()?.len() < MAGIC_BYTES.len() as u64 {
Err(Error::InvalidFileType.into())
} else {
let mut data = vec![0; MAGIC_BYTES.len()];
lock.seek(SEEK_MAGIC)?;
lock.read_exact(&mut data)?;
if &data != &MAGIC_BYTES {
Err(Error::InvalidFileType.into())
} else {
Ok(())
}
}
}
/// Write MAGIC_BYTES into the first bytes of the file
fn write_magic_bytes(&self, lock: &mut MutexGuard<File>) -> Result<()> {
lock.seek(SEEK_MAGIC)?;
lock.write_all(&MAGIC_BYTES)?;
Ok(())
}
/// Read and deserialize only the header part of the .pixmap file
fn read_header(&self, lock: &mut MutexGuard<File>) -> Result<FileHeader> {
lock.seek(SEEK_HEADER)?;
Ok(FileHeader {
width: (*lock).read_u64::<byteorder::BigEndian>()?,
height: (*lock).read_u64::<byteorder::BigEndian>()?,
})
}
/// Serialize and write only the header part of the .pixmap file
fn write_header(&self, lock: &mut MutexGuard<File>) -> Result<()> {
lock.seek(SEEK_HEADER)?;
let mut buffer = BufWriter::new(&**lock);
buffer.write_u64::<byteorder::BigEndian>(self.header.width)?;
buffer.write_u64::<byteorder::BigEndian>(self.header.height)?;
buffer.write_all(vec![0; HEADER_SPACE - 2 * 8].as_slice())?;
buffer.flush()?;
Ok(())
}
/// Read the complete data section from file
fn read_data(&self, lock: &mut MutexGuard<File>) -> Result<Vec<u8>> {
lock.seek(SEEK_DATA)?;
let mut result = vec![0; (self.header.width * self.header.height * 3) as usize];
lock.read_exact(&mut result)?;
Ok(result)
}
/// Write the complete data section of the file
fn write_data(&self, lock: &mut MutexGuard<File>, data: &[u8]) -> Result<()> {
lock.seek(SEEK_DATA)?;
lock.write_all(data)?;
Ok(())
}
/// Read the data of a single pixel with from file
fn read_pixel(&self, lock: &mut MutexGuard<File>, x: usize, y: usize) -> Result<[u8; 3]> {
let seek_pixel = SeekFrom::Current((pixel_coordinates_2_index(self, x, y)? * 3) as i64);
lock.seek(SEEK_DATA)?;
lock.seek(seek_pixel)?;
let mut result = [0, 0, 0];
lock.read_exact(&mut result)?;
Ok(result)
}
/// Write a single pixel at into file.
fn write_pixel(&self, lock: &mut MutexGuard<File>, x: usize, y: usize, color: [u8; 3]) -> Result<()> {
let seek_pixel = SeekFrom::Current((pixel_coordinates_2_index(self, x, y)? * 3) as i64);
lock.seek(SEEK_DATA)?;
lock.seek(seek_pixel)?;
lock.write_all(&color)?;
Ok(())
}
}
impl PixmapBase for FileBackedPixmap {
fn get_size(&self) -> Result<(usize, usize)> {
Ok((self.header.width as usize, self.header.height as usize))
}
}
impl PixmapRead for FileBackedPixmap {
fn get_pixel(&self, x: usize, y: usize) -> Result<Color> {
verify_coordinates_are_inside(self, x, y)?;
let mut lock = self.file.lock().unwrap();
let bin_data = self.read_pixel(&mut lock, x, y).unwrap();
Ok(Color(bin_data[0], bin_data[1], bin_data[2]))
}
}
impl PixmapWrite for FileBackedPixmap {
fn set_pixel(&self, x: usize, y: usize, color: Color) -> Result<()> {
verify_coordinates_are_inside(self, x, y)?;
let mut lock = self.file.lock().unwrap();
Ok(self.write_pixel(&mut lock, x, y, [color.0, color.1, color.2])?)
}
}
impl PixmapRawRead for FileBackedPixmap {
fn get_raw_data(&self) -> Result<Vec<Color>> {
let mut result = Vec::new();
let mut color = Color(0, 0, 0);
let mut lock = self.file.lock().unwrap();
for (i, byte) in self.read_data(&mut lock).unwrap().iter().enumerate() {
if i % 3 == 0 {
color.0 = byte.to_owned();
} else if i % 3 == 1 {
color.1 = byte.to_owned()
} else if i % 3 == 2 {
color.2 = byte.to_owned();
result.push(color);
}
}
Ok(result)
}
}
impl PixmapRawWrite for FileBackedPixmap {
fn put_raw_data(&self, data: &[Color]) -> Result<()> {
let bin_data: Vec<u8> = data[..min(
data.len(),
self.header.width as usize * self.header.height as usize,
)]
.iter()
.flat_map(|color| {
let color: Color = color.clone().into();
vec![color.0, color.1, color.2]
})
.collect();
let mut lock = self.file.lock().unwrap();
Ok(self.write_data(&mut lock, &bin_data).unwrap())
}
}
#[cfg(test)]
mod test {
use quickcheck::TestResult;
use tempfile::tempdir;
use super::super::test;
use super::*;
#[test]
fn test_new_file() {
// setup
let dir = tempdir().unwrap();
let path = dir.path().join("test.pixmap");
// execution
let pixmap = FileBackedPixmap::new(&path, 800, 600, false);
// verification
assert!(pixmap.is_ok(), "pixmap creation failed: {:?}", pixmap);
assert!(path.exists());
assert_eq!(
path.metadata().unwrap().len() as usize,
MAGIC_BYTES.len() + HEADER_SPACE + 800 * 600 * 3
);
}
#[test]
fn test_overwriting_existing_incompatible_file() {
// setup
let dir = tempdir().unwrap();
let smaller_path = dir.path().join("smaller.pixmap");
let larger_path = dir.path().join("larger.pixmap");
let different_path = dir.path().join("different.txt");
let empty_path = dir.path().join("empty");
{
let _smaller_pixmap = FileBackedPixmap::new(&smaller_path, 100, 200, false).unwrap();
let _larger_pixmap = FileBackedPixmap::new(&larger_path, 1000, 2000, false).unwrap();
let mut different_file = OpenOptions::new()
.create(true)
.write(true)
.open(&different_path)
.unwrap();
different_file
.write_all("This is a text file".as_bytes())
.unwrap();
let mut _empty_file = OpenOptions::new()
.create(true)
.write(true)
.open(&empty_path)
.unwrap();
}
// execution + verification
//for path in [smaller_path, larger_path, different_path, empty_path].iter() {
for path in [empty_path].iter() {
// execution (expected failure)
let pixmap = FileBackedPixmap::new(path, 800, 600, false);
// verification
assert!(
pixmap.is_err(),
"pixmap creation did not fail although existing file is incompatible to new pixmap"
);
// execution (expected success)
let pixmap = FileBackedPixmap::new(path, 800, 600, true);
// verification
assert!(pixmap.is_ok(), "pixmap creation failed: {:?}", pixmap);
assert_eq!(
path.metadata().unwrap().len() as usize,
MAGIC_BYTES.len() + HEADER_SPACE + 800 * 600 * 3
);
}
}
#[test]
fn test_reusing_existing_compatible_file() {
// setup
let dir = tempdir().unwrap();
let path = dir.path().join("test.pixmap");
{
let pixmap = FileBackedPixmap::new(&path, 800, 600, false).unwrap();
pixmap.set_pixel(42, 42, Color(42, 42, 42)).unwrap();
}
// execution (without reset)
let pixmap = FileBackedPixmap::new(&path, 800, 600, false);
// verification
assert!(pixmap.is_ok(), "pixmap creation failed: {:?}", pixmap);
assert_eq!(pixmap.unwrap().get_pixel(42, 42).unwrap(), Color(42, 42, 42));
// execution (with reset)
let pixmap = FileBackedPixmap::new(&path, 800, 600, true);
// verification
assert!(pixmap.is_ok(), "pixmap creation failed: {:?}", pixmap);
assert_eq!(pixmap.unwrap().get_pixel(42, 42).unwrap(), Color(0, 0, 0))
}
#[test]
fn test_get_size() {
// setup
let dir = tempdir().unwrap();
let path = dir.path().join("test.pixmap");
let pixmap = FileBackedPixmap::new(&path, 800, 600, false).unwrap();
// execution
let size = pixmap.get_size().unwrap();
// verification
assert_eq!(size, (800, 600));
}
#[test]
fn test_fails_cleanly_on_io_error() {
// setup
let path = Path::new("/root/test.pixmap");
// execution
let pixmap = FileBackedPixmap::new(&path, 800, 600, false);
// verification
assert!(pixmap.is_err())
}
quickcheck! {
fn test_set_and_get_pixel(x: usize, y: usize, color: Color) -> TestResult {
let dir = tempdir().unwrap();
let path = dir.path().join("test.pixmap");
let pixmap = FileBackedPixmap::new(&path, 800, 600, true).unwrap();
test::test_set_and_get_pixel(pixmap, x, y, color)
}
}
#[test]
fn test_put_and_get_raw_data() {
// setup
let dir = tempdir().unwrap();
let path = dir.path().join("test.pixmap");
let pixmap = FileBackedPixmap::new(&path, 800, 600, true).unwrap();
for i in vec![0, 1, 256, 257, 4096, 4097] {
// execution
let result = test::test_put_and_get_raw_data(&pixmap, i.into());
// verification
assert!(!result.is_error() && !result.is_failure())
}
}
#[test]
fn test_put_raw_data_with_incorrect_size_data() {
let dir = tempdir().unwrap();
let path = dir.path().join("test.pixmap");
let pixmap = FileBackedPixmap::new(&path, 800, 600, true).unwrap();
test::test_put_raw_data_with_incorrect_size_data(&pixmap);
}
}
| true |
543bd9f49f9cce5bec33b3f3d0e95988ab3dc3de
|
Rust
|
acdibble/aoc
|
/2018/rs/day18/src/main.rs
|
UTF-8
| 5,103 | 3.015625 | 3 |
[] |
no_license
|
use std::collections::HashSet;
use std::env;
use std::fs;
use std::path::Path;
use std::time::SystemTime;
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum Tile {
Open,
Trees,
Lumberyard,
}
impl Tile {
fn from_char(ch: char) -> Self {
match ch {
'.' => Self::Open,
'|' => Self::Trees,
'#' => Self::Lumberyard,
_ => unreachable!(),
}
}
fn count_if(&self, tile: Tile) -> i32 {
if *self == tile {
1
} else {
0
}
}
}
fn inc(n: usize, max: usize) -> Option<usize> {
match n + 1 {
next if next == max => None,
next => Some(next),
}
}
fn dec(n: usize, _: usize) -> Option<usize> {
match n {
0 => None,
_ => Some(n - 1),
}
}
fn noop(n: usize, _: usize) -> Option<usize> {
Some(n)
}
type NeighborFn = fn(usize, usize) -> Option<usize>;
const NEIGHBORS: [(NeighborFn, NeighborFn); 8] = [
(inc, inc),
(inc, noop),
(inc, dec),
(dec, inc),
(dec, noop),
(dec, dec),
(noop, inc),
(noop, dec),
];
fn find_counts(area: &Vec<Vec<Tile>>) -> (i32, i32) {
area.iter().fold((0, 0), |running_total, row| {
row.iter().fold(running_total, |(t, l), tile| {
(
t + tile.count_if(Tile::Trees),
l + tile.count_if(Tile::Lumberyard),
)
})
})
}
fn detect_cycle(past_values: &Vec<(usize, (i32, i32))>) -> (usize, usize) {
for i in 0..past_values.len() {
for j in (i + 1)..past_values.len() {
let (step_one, value_one) = past_values[i];
let (step_two, value_two) = past_values[j];
if value_one != value_two {
continue;
}
for k in 0..=10 {
let (_, value_one) = past_values[i + k];
let (_, value_two) = past_values[j + k];
if value_one != value_two {
break;
}
if k == 10 {
return (step_one, step_two - step_one);
}
}
}
}
unreachable!()
}
fn solve(input: &str, target: usize) -> i32 {
let mut area: Vec<Vec<_>> = input
.lines()
.map(|line| line.chars().map(Tile::from_char).collect())
.collect();
let max_x = area[0].len();
let max_y = area.len();
let mut buffer = area.clone();
let mut past_values = vec![];
let mut seen = HashSet::new();
for step in 0..target {
for (y, row) in area.iter().enumerate() {
for (x, tile) in row.iter().enumerate() {
let neighbors =
NEIGHBORS
.iter()
.filter_map(|(a, b)| match (a(x, max_x), b(y, max_y)) {
(Some(x), Some(y)) => Some(area[y][x]),
_ => None,
});
match tile {
Tile::Open => {
let total = neighbors.fold(0, |acc, tile| acc + tile.count_if(Tile::Trees));
buffer[y][x] = if total >= 3 { Tile::Trees } else { *tile };
}
Tile::Trees => {
let total =
neighbors.fold(0, |acc, tile| acc + tile.count_if(Tile::Lumberyard));
buffer[y][x] = if total >= 3 { Tile::Lumberyard } else { *tile };
}
Tile::Lumberyard => {
buffer[y][x] = match neighbors.fold((0, 0), |(a, b), tile| {
(
a + tile.count_if(Tile::Lumberyard),
b + tile.count_if(Tile::Trees),
)
}) {
(0, _) | (_, 0) => Tile::Open,
_ => *tile,
}
}
}
}
}
std::mem::swap(&mut area, &mut buffer);
let value = find_counts(&area);
past_values.push((step, value));
seen.insert(value);
if past_values.len() - seen.len() > 50 {
break;
}
}
if past_values.len() == target {
let (_, (a, b)) = past_values.pop().unwrap();
return a * b;
}
let (offset, cycle_length) = detect_cycle(&past_values);
let index = (target - offset - 1) % cycle_length;
let (_, (a, b)) = past_values[index + offset];
a * b
}
fn time_it<F, T>(fun: F) -> T
where
F: Fn() -> T,
{
let start = SystemTime::now();
let result = fun();
println!("Time elapsed: {} µs", start.elapsed().unwrap().as_micros());
result
}
fn main() -> std::io::Result<()> {
let file_path = env::current_dir()?.join(Path::new("data.txt"));
let input = fs::read_to_string(file_path)?;
time_it(|| println!("part 1: {}", solve(&input, 10)));
time_it(|| println!("part 2: {}", solve(&input, 1000000000)));
Ok(())
}
| true |
94bfa81039bf806b24e0ad32d4b04c06646d9586
|
Rust
|
vickenty/roids
|
/src/entity.rs
|
UTF-8
| 2,244 | 3.15625 | 3 |
[] |
no_license
|
use input::Input;
use render;
use physics;
use hud::Hud;
#[derive(Copy, Clone, PartialEq)]
pub enum State {
Alive,
Dead,
}
pub trait Entity {
fn draw(&mut self, renderer: &mut render::Renderer);
fn think(&mut self, dt: f32, &Input, hud: &mut Hud, born: &mut Vec<Box<Entity>>) -> State;
fn collide(&mut self, other: &mut Entity, energy: f32);
fn body(&mut self) -> Option<&mut physics::Body>;
fn is_beam(&self) -> bool { false }
}
pub struct Engine {
entities: Vec<Box<Entity>>,
born: Vec<Box<Entity>>,
dead: Vec<usize>,
}
fn collide(a: &mut Entity, b: &mut Entity) -> Option<f32> {
if let (Some(i), Some(j)) = (a.body(), b.body()) {
physics::collide(i, j)
} else {
None
}
}
impl Engine
{
pub fn new() -> Engine {
Engine {
entities: Vec::new(),
born: Vec::new(),
dead: Vec::new(),
}
}
pub fn add(&mut self, entity: Box<Entity>) {
self.entities.push(entity);
}
pub fn draw(&mut self, renderer: &mut render::Renderer) {
for e in self.entities.iter_mut() {
e.draw(renderer);
}
}
fn collide_one(&mut self, i: usize) {
let (todo, mut rest) = self.entities.split_at_mut(i);
let mut this = rest[0].as_mut();
for other in todo {
/* &** (&Box<Entity>) : &Entity */
if let Some(energy) = collide(this, &mut **other) {
this.collide(&mut **other, energy);
other.collide(this, energy);
}
}
}
fn collide_all(&mut self) {
let len = self.entities.len();
for i in 0..len {
self.collide_one(i);
}
}
pub fn think(&mut self, dt: f32, input: &Input, hud: &mut Hud) {
for (i, e) in self.entities.iter_mut().enumerate() {
let state = e.think(dt, input, hud, &mut self.born);
if state == State::Dead {
self.dead.push(i);
}
}
while let Some(i) = self.dead.pop() {
self.entities.swap_remove(i);
}
while let Some(e) = self.born.pop() {
self.entities.push(e);
}
self.collide_all();
}
}
| true |
21c37ccaa4d2e1b687e44f1fbe0a5829e75c365f
|
Rust
|
hungyiloo/advent-of-code
|
/rust/2020/src/bin/08.rs
|
UTF-8
| 3,487 | 3.4375 | 3 |
[] |
no_license
|
use std::{
collections::HashSet,
fmt,
fs::File,
io::{BufRead, BufReader},
};
fn main() {
let instructions = get_instructions();
let (result, _) = execute(&instructions, None);
println!("Part 1: {:?}", result);
let swap_positions: Vec<usize> =
instructions
.iter()
.enumerate()
.filter(|(_, instruction)| match instruction {
Instruction(op, _) => *op == Operation::NOP || *op == Operation::JMP,
})
.map(|(position, _)| position)
.collect();
for swap_position in swap_positions {
let (result, looped) = execute(&instructions, Some(swap_position));
if !looped {
println!("Part 2: {}", result);
break;
}
}
}
fn execute(instructions: &Vec<Instruction>, swap: Option<usize>) -> (i32, bool) {
let mut visited = HashSet::<usize>::new();
let mut cursor: usize = 0;
let mut accumulator: i32 = 0;
while !visited.contains(&cursor) && cursor < instructions.len() {
visited.insert(cursor);
let Instruction(op, arg) = instructions[cursor];
// Swap/correction behavior to test for corruption
let op = match (swap, op) {
(Some(s), Operation::JMP) if s == cursor => Operation::NOP,
(Some(s), Operation::NOP) if s == cursor => Operation::JMP,
_ => op,
};
// Update the execution state based on operation & argument
match op {
Operation::ACC => {
accumulator += arg as i32;
cursor += 1;
}
Operation::JMP => {
let maybe_cursor = (cursor as isize) + arg;
cursor = usize::try_from(maybe_cursor).unwrap();
}
Operation::NOP => {
cursor += 1;
}
}
}
let looped = visited.contains(&cursor);
return (accumulator, looped);
}
fn get_instructions() -> Vec<Instruction> {
let file = File::open("../../input/2020/08.txt").expect("puzzle input file not found");
let reader = BufReader::new(file);
let mut instructions: Vec<Instruction> = vec![];
for line in reader.lines() {
let line = line.unwrap();
let mut parts = line.split(" ").into_iter();
let operation = match parts.next() {
Some("acc") => Operation::ACC,
Some("jmp") => Operation::JMP,
Some("nop") => Operation::NOP,
Some(_) | None => panic!("invalid instruction"),
};
let argument = parts
.next()
.unwrap()
.replace("+", "")
.parse::<isize>()
.unwrap();
instructions.push(Instruction(operation, argument));
}
return instructions;
}
#[derive(Copy, Clone, PartialEq)]
enum Operation {
ACC,
JMP,
NOP,
}
struct Instruction(Operation, isize);
impl fmt::Debug for Instruction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Instruction(Operation::ACC, arg) => {
write!(f, "acc {}{:?}", if arg > &0 { "+" } else { "" }, arg)
}
Instruction(Operation::JMP, arg) => {
write!(f, "jmp {}{:?}", if arg > &0 { "+" } else { "" }, arg)
}
Instruction(Operation::NOP, arg) => {
write!(f, "nop {}{:?}", if arg > &0 { "+" } else { "" }, arg)
}
}
}
}
| true |
d87ab5410c4bbfe9f4dbaabedbeca328e5a73deb
|
Rust
|
itudoben/picture-indexer
|
/src/config.rs
|
UTF-8
| 273 | 3 | 3 |
[] |
no_license
|
pub struct Config {
pub picture_path: String,
}
impl Config {
pub fn new(args: &[String]) -> Result<Config, &str> {
if args.len() < 2 {
return Err("not enough arguments");
}
Ok(Config { picture_path: args[1].clone() })
}
}
| true |
24bef673618ce330c6ab7b709bf79d9ce54832b8
|
Rust
|
trsoluti/MetalByExample-Rust
|
/Common/matrix_kit/src/matrix_types.rs
|
UTF-8
| 18,269 | 2.984375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//
// matrix_types.rs
//
// Original objc code Copyright (c) 2015 Warren Moore
// from https://github.com/metal-by-example/sample-code
// Licensed under MIT.
//
// Translated to Rust by TR Solutions on 18/7/20.
// Copyright © 2020 TR Solutions Pte. Ltd.
// Licensed under Apache 2.0 and MIT
//
// See appropriate LICENCE files for details.
//
// /*
// //
// // MBEMathUtilities.h
// // DrawingIn3DIOS
// //
// // Original Copyright (c) 2015 Warren Moore
// // from https://github.com/metal-by-example/sample-code
// // Licensed under MIT.
// //
// // Updates by TR Solutions on 18/7/20.
// // Copyright © 2020 TR Solutions Pte. Ltd.
// // Licensed under Apache 2.0 and MIT
// //
// // See appropriate LICENCE files for details.
// //
// @import simd;
//
//! Some vector types that provide simdd support.
//!
//! From Apple doc in usr/include/simd/matrix_types.h:
//!
//! For compatibility with common graphics libraries, these matrices are stored
//! in column-major order, and implemented as arrays of column vectors.
//! Column-major storage order may seem a little strange if you aren't used to
//! it, but for most usage the memory layout of the matrices shouldn't matter
//! at all; instead you should think of matrices as abstract mathematical
//! objects that you use to perform arithmetic without worrying about the
//! details of the underlying representation.
use crate::{vector_float4, vector_float3};
use std::ops::Mul;
use std::fmt::{Debug, Formatter};
// Use the Apple shared library instead of bringing in rust's:
extern {
fn sinf(value: f32) -> f32;
fn cosf(value: f32) -> f32;
fn tanf(value: f32) -> f32;
}
/// Return the sin of the given value in radians
#[inline]
fn sinf32(value: f32) -> f32 { unsafe { sinf(value) } }
/// Return the cosine of the given value in radians
#[inline]
fn cosf32(value: f32) -> f32 { unsafe { cosf(value) } }
/// Return the tangent of the given value in radians
#[inline]
fn tanf32(value: f32) -> f32 { unsafe { tanf(value) } }
/// Provides a 4x4 float32 vector
#[allow(non_camel_case_types)]
#[derive(Copy, Clone, Default)]
pub struct matrix_float4x4 {
_private: [vector_float4; 4]
}
impl Debug for matrix_float4x4 {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let (x, y, z, w) = self.to_tuple();
f.write_fmt(format_args!("{{ matrix_4x4:\nx: {:?}\ny: {:?}\nz: {:?}\nw: {:?}}}", x, y, z, w))
}
}
impl matrix_float4x4 {
/// Creates a new 4x4 matrix from the given values.
#[inline]
pub fn new(x: vector_float4, y: vector_float4, z: vector_float4, w: vector_float4) -> matrix_float4x4 {
matrix_float4x4 { _private: [x, y, z, w] }
}
// /// Builds a translation matrix that translates by the supplied vector
// matrix_float4x4 matrix_float4x4_translation(vector_float3 t);
// matrix_float4x4 matrix_float4x4_translation(vector_float3 t)
// {
/// Builds a translation matrix that translates by the supplied vector.
pub fn translation(translation: vector_float3) -> Self {
// vector_float4 X = { 1, 0, 0, 0 };
// vector_float4 Y = { 0, 1, 0, 0 };
// vector_float4 Z = { 0, 0, 1, 0 };
// vector_float4 W = { t.x, t.y, t.z, 1 };
//
// matrix_float4x4 mat = { X, Y, Z, W };
// return mat;
let x = vector_float4::new(1., 0., 0., 0.);
let y = vector_float4::new(0., 1., 0., 0.);
let z = vector_float4::new(0., 0., 1., 0.);
let w = vector_float4::new(
translation.x(),
translation.y(),
translation.z(),
1.
);
matrix_float4x4::new(x,y,z,w)
}
// }
//
//
// /// Builds a scale matrix that uniformly scales all axes by the supplied factor
// matrix_float4x4 matrix_float4x4_uniform_scale(float scale);
// matrix_float4x4 matrix_float4x4_uniform_scale(float scale)
// {
/// Builds a scale matrix that uniformly scales all axes by the supplied factor.
pub fn uniform_scale(scale: f32) -> matrix_float4x4 {
// vector_float4 X = { scale, 0, 0, 0 };
// vector_float4 Y = { 0, scale, 0, 0 };
// vector_float4 Z = { 0, 0, scale, 0 };
// vector_float4 W = { 0, 0, 0, 1 };
let x = vector_float4::new(scale, 0., 0., 0.);
let y = vector_float4::new(0., scale, 0., 0.);
let z = vector_float4::new(0., 0., scale, 0.);
let w = vector_float4::new(0., 0., 0., 1.);
//
// matrix_float4x4 mat = { X, Y, Z, W };
// return mat;
matrix_float4x4::new(x, y, z, w)
}
// }
//
// /// Builds a rotation matrix that rotates about the supplied axis by an
// /// angle (given in radians). The axis should be normalized.
// matrix_float4x4 matrix_float4x4_rotation(vector_float3 axis, float angle);
/// Builds a rotation matrix that rotates about the supplied axis
/// by an angle (given in radians). The axis should be normalized.
// matrix_float4x4 matrix_float4x4_rotation(vector_float3 axis, float angle)
// {
pub fn rotation(axis: vector_float3, angle: f32) -> Self {
// float c = cos(angle);
// float s = sin(angle);
let c = cosf32(angle);
let s = sinf32(angle);
//
// vector_float4 X;
// X.x = axis.x * axis.x + (1 - axis.x * axis.x) * c;
// X.y = axis.x * axis.y * (1 - c) - axis.z * s;
// X.z = axis.x * axis.z * (1 - c) + axis.y * s;
// X.w = 0.0;
let x = vector_float4::new(
axis.x() * axis.x() + ( 1. - axis.x() * axis.x() ) * c,
axis.x() * axis.y() * (1. - c) - axis.z() * s,
axis.x() * axis.z() * (1. - c) + axis.y() * s,
0.,
);
//
// vector_float4 Y;
// Y.x = axis.x * axis.y * (1 - c) + axis.z * s;
// Y.y = axis.y * axis.y + (1 - axis.y * axis.y) * c;
// Y.z = axis.y * axis.z * (1 - c) - axis.x * s;
// Y.w = 0.0;
let y = vector_float4::new (
axis.x() * axis.y() * (1. - c) + axis.z() * s,
axis.y() * axis.y() + (1. - axis.y() * axis.y()) * c,
axis.y() * axis.z() * (1. - c) - axis.x() * s,
0.0,
);
//
// vector_float4 Z;
// Z.x = axis.x * axis.z * (1 - c) - axis.y * s;
// Z.y = axis.y * axis.z * (1 - c) + axis.x * s;
// Z.z = axis.z * axis.z + (1 - axis.z * axis.z) * c;
// Z.w = 0.0;
let z = vector_float4::new (
axis.x() * axis.z() * (1. - c) - axis.y() * s,
axis.y() * axis.z() * (1. - c) + axis.x() * s,
axis.z() * axis.z() + (1. - axis.z() * axis.z()) * c,
0.
);
//
// vector_float4 W;
// W.x = 0.0;
// W.y = 0.0;
// W.z = 0.0;
// W.w = 1.0;
let w = vector_float4::new(
0.,
0.,
0.,
1.,
);
//
// matrix_float4x4 mat = { X, Y, Z, W };
// return mat;
matrix_float4x4::new(x, y, z, w)
}
// }
//
// /// Builds a symmetric perspective projection matrix with the supplied aspect ratio,
// /// vertical field of view (in radians), and near and far distances
// matrix_float4x4 matrix_float4x4_perspective(float aspect, float fovy, float near, float far);
// matrix_float4x4 matrix_float4x4_perspective(float aspect, float fovy, float near, float far)
// {
/// Builds a symmetric perspective projection matrix with the supplied aspect ratio,
/// vertical field of view (in radians), and near and far distances.
pub fn perspective(aspect: f32, fovy: f32, near: f32, far: f32) -> Self {
// float yScale = 1 / tan(fovy * 0.5);
// float xScale = yScale / aspect;
// float zRange = far - near;
// float zScale = -(far + near) / zRange;
// float wzScale = -2 * far * near / zRange;
let y_scale = 1. / tanf32(fovy * 0.5);
let x_scale = y_scale / aspect;
let z_range = far - near;
let z_scale = -(far - near) / z_range;
let wz_scale = -2. * far * near / z_range;
//
// vector_float4 P = { xScale, 0, 0, 0 };
// vector_float4 Q = { 0, yScale, 0, 0 };
// vector_float4 R = { 0, 0, zScale, -1 };
// vector_float4 S = { 0, 0, wzScale, 0 };
//
// matrix_float4x4 mat = { P, Q, R, S };
// return mat;
matrix_float4x4::new(
vector_float4::new(x_scale, 0., 0., 0.),
vector_float4::new(0., y_scale, 0., 0.),
vector_float4::new(0., 0., z_scale, -1.),
vector_float4::new(0., 0., wz_scale, 0.)
)
}
// }
//
fn to_tuple(self) -> (vector_float4, vector_float4, vector_float4, vector_float4) {
( self._private[0], self._private[1], self._private[2], self._private[3])
}
fn transpose(self) -> Self {
let (vx, vy, vz, vw) = self.to_tuple();
matrix_float4x4::new(
vector_float4::new(vx.x(), vy.x(), vz.x(), vw.x()),
vector_float4::new(vx.y(), vy.y(), vz.y(), vw.y()),
vector_float4::new(vx.z(), vy.z(), vz.z(), vw.z()),
vector_float4::new(vx.w(), vy.w(), vz.w(), vw.w()),
)
}
}
//
// ============================================================================================
// // MBEMathUtilities.m
// // DrawingIn3DIOS
// //
// // Original Copyright (c) 2015 Warren Moore
// // from https://github.com/metal-by-example/sample-code
// // Licensed under MIT.
// //
// // Updates by TR Solutions on 18/7/20.
// // Copyright © 2020 TR Solutions Pte. Ltd.
// // Licensed under Apache 2.0 and MIT
// //
// // See appropriate LICENCE files for details.
// //
// #import "MBEMathUtilities.h"
//
//
// */
impl Mul<f32> for matrix_float4x4 {
type Output = Self;
fn mul(self, rhs: f32) -> Self::Output {
// This really should be in simd!
let (x, y, z, w) = self.to_tuple();
Self::Output::new(x * rhs, y * rhs, z * rhs, w *rhs)
}
}
impl Mul<matrix_float4x4> for matrix_float4x4 {
type Output = Self;
fn mul(self, rhs: matrix_float4x4) -> Self::Output {
let (tx, ty, tz, tw) = self.transpose().to_tuple();
let (rx, ry, rz, rw) = rhs.to_tuple();
matrix_float4x4::new(
vector_float4::new(
tx.dot_product(rx),
ty.dot_product(rx),
tz.dot_product(rx),
tw.dot_product(rx),
),
vector_float4::new(
tx.dot_product(ry),
ty.dot_product(ry),
tz.dot_product(ry),
tw.dot_product(ry),
),
vector_float4::new(
tx.dot_product(rz),
ty.dot_product(rz),
tz.dot_product(rz),
tw.dot_product(rz),
),
vector_float4::new(
tx.dot_product(rw),
ty.dot_product(rw),
tz.dot_product(rw),
tw.dot_product(rw),
)
)
}
}
#[cfg(test)]
mod tests {
use crate::{vector_float4, matrix_float4x4};
// fn mul_explicit(lhs: matrix_float4x4, rhs: matrix_float4x4) -> matrix_float4x4 {
// println!("Multiplying\n{:?}\nby\n{:?}", lhs, rhs);
// let (tx, ty, tz, tw) = lhs.transpose().to_tuple();
// let (rx, ry, rz, rw) = rhs.to_tuple();
// let axx = tx.dot_product(rx);
// println!("axx = {:?} dot {:?} = {:?}", tx, rx, axx);
// let axy = ty.dot_product(rx);
// println!("axx = {:?} dot {:?} = {:?}", ty, rx, axy);
// let axz = tz.dot_product(rx);
// println!("axx = {:?} dot {:?} = {:?}", tz, rx, axz);
// let axw = tw.dot_product(rx);
// println!("axx = {:?} dot {:?} = {:?}", tw, rx, axw);
// matrix_float4x4::new(
// vector_float4::new(
// tx.dot_product(rx),
// ty.dot_product(rx),
// tz.dot_product(rx),
// tw.dot_product(rx),
// ),
// vector_float4::new(
// tx.dot_product(ry),
// ty.dot_product(ry),
// tz.dot_product(ry),
// tw.dot_product(ry),
// ),
// vector_float4::new(
// tx.dot_product(rz),
// ty.dot_product(rz),
// tz.dot_product(rz),
// tw.dot_product(rz),
// ),
// vector_float4::new(
// tx.dot_product(rw),
// ty.dot_product(rw),
// tz.dot_product(rw),
// tw.dot_product(rw),
// )
// )
// }
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
#[test]
fn test_matrix_multiplication_scalar() {
let m1 = matrix_float4x4::new(
vector_float4::new(1., 2., 3., 4.),
vector_float4::new(5., 6., 7., 8.),
vector_float4::new(9., 10., 11., 12.),
vector_float4::new(13., 14., 15., 16.)
);
let m2 = m1 * 0.5;
assert_eq!(0.5, m2._private[0].x());
assert_eq!(1.0, m2._private[0].y());
assert_eq!(1.5, m2._private[0].z());
assert_eq!(2.0, m2._private[0].w());
assert_eq!(2.5, m2._private[1].x());
assert_eq!(3.0, m2._private[1].y());
assert_eq!(3.5, m2._private[1].z());
assert_eq!(4.0, m2._private[1].w());
assert_eq!(4.5, m2._private[2].x());
assert_eq!(5.0, m2._private[2].y());
assert_eq!(5.5, m2._private[2].z());
assert_eq!(6.0, m2._private[2].w());
assert_eq!(6.5, m2._private[3].x());
assert_eq!(7.0, m2._private[3].y());
assert_eq!(7.5, m2._private[3].z());
assert_eq!(8.0, m2._private[3].w());
}
#[test]
fn test_matrix_multiplication_matrix() {
let m1 = matrix_float4x4::new(
vector_float4::new(1., 2., 3., 4.),
vector_float4::new(5., 6., 7., 8.),
vector_float4::new(9., 10., 11., 12.),
vector_float4::new(13., 14., 15., 16.)
);
let m2 = matrix_float4x4::new(
vector_float4::new(17., 18., 19., 20.),
vector_float4::new(21., 22., 23., 24.),
vector_float4::new(25., 26., 27., 28.),
vector_float4::new(29., 30., 31., 32.)
);
let m3 = m1 * m2;
println!("m1 x m2 = {:?}", m3);
// Answers found using online matrix multiplier:
// https://www.symbolab.com/solver/matrix-multiply-calculator/%5Cbegin%7Bpmatrix%7D1%265%269%2613%5C%5C%20%20%202%266%2610%2614%5C%5C%20%20%203%267%2611%2615%5C%5C%20%20%204%268%2612%2616%5Cend%7Bpmatrix%7D%5Ccdot%5Cbegin%7Bpmatrix%7D17%2621%2625%2629%5C%5C%20%20%2018%2622%2626%2630%5C%5C%20%20%2019%2623%2627%2631%5C%5C%20%2020%2624%2628%2632%5Cend%7Bpmatrix%7D
assert_eq!( 538., m3._private[0].x());
assert_eq!( 650., m3._private[1].x());
assert_eq!( 762., m3._private[2].x());
assert_eq!( 874., m3._private[3].x());
assert_eq!( 612., m3._private[0].y());
assert_eq!( 740., m3._private[1].y());
assert_eq!( 868., m3._private[2].y());
assert_eq!( 996., m3._private[3].y());
assert_eq!( 686., m3._private[0].z());
assert_eq!( 830., m3._private[1].z());
assert_eq!( 974., m3._private[2].z());
assert_eq!(1118., m3._private[3].z());
assert_eq!( 760., m3._private[0].w());
assert_eq!( 920., m3._private[1].w());
assert_eq!(1080., m3._private[2].w());
assert_eq!(1240., m3._private[3].w());
}
#[test]
fn test_matrix_rot_mult() {
// ------------------ x ----------------- ------------------ y ----------------- ------------------ z ----------------- ------------------ w -----------------
// rust: x rot: matrix_float4x4 { _private: [vector_float4 { _private: [1.000000, 0.000000, 0.000000, 0.000000] }, vector_float4 { _private: [0.000000, 0.998629, -0.052344, 0.000000] }, vector_float4 { _private: [ 0.000000, 0.052345, 0.998629, 0.000000] }, vector_float4 { _private: [0.000000, 0.000000, 0.000000, 1.000000] }] }
// rust: y rot: matrix_float4x4 { _private: [vector_float4 { _private: [0.999391, 0.000000, 0.034906, 0.000000] }, vector_float4 { _private: [0.000000, 1.000000, 0.000000, 0.000000] }, vector_float4 { _private: [-0.034905, 0.000000, 0.999391, 0.000000] }, vector_float4 { _private: [0.000000, 0.000000, 0.000000, 1.000000] }] }
// objc: Rot Matrix: matrix_float4x4 { _private: [vector_float4 { _private: [0.999391, 0.001827, 0.034859, 0.000000] }, vector_float4 { _private: [0.000000, 0.998629, -0.052347, 0.000000] }, vector_float4 { _private: [-0.034907, 0.052315, 0.998020, 0.000000] }, vector_float4 { _private: [0.000000, 0.000000, 0.000000, 1.000000] }] }
let x_rot = matrix_float4x4::new(
vector_float4::new(1.000000, 0.000000, 0.000000, 0.000000),
vector_float4::new(0.000000, 0.998629, -0.052344, 0.000000),
vector_float4::new(0.000000, 0.052345, 0.998629, 0.000000),
vector_float4::new(0.000000, 0.000000, 0.000000, 1.000000)
);
let y_rot = matrix_float4x4::new(
vector_float4::new(0.999391, 0.000000, 0.034906, 0.000000),
vector_float4::new(0.000000, 1.000000, 0.000000, 0.000000),
vector_float4::new(-0.034905, 0.000000, 0.999391, 0.000000),
vector_float4::new(0.000000, 0.000000, 0.000000, 1.000000)
);
let rot = x_rot * y_rot;
let exp = matrix_float4x4::new(
vector_float4::new(0.999391, 0.001827, 0.034859, 0.000000),
vector_float4::new(0.000000, 0.998629, -0.052347, 0.000000),
vector_float4::new(-0.034907, 0.052315, 0.998020, 0.000000),
vector_float4::new(0.000000, 0.000000, 0.000000, 1.000000)
);
assert!(exp._private[0].x() - rot._private[0].x() < 0.00001);
}
}
| true |
a8993e9c9fd8302e2e8d95834c97b7665da3a6f6
|
Rust
|
ciroDourado/dicionario
|
/src/silabacao/silabar.rs
|
UTF-8
| 454 | 2.71875 | 3 |
[] |
no_license
|
use hyphenation::Hyphenator;
use hyphenation::Standard;
use hyphenation::Language;
use hyphenation::Load;
use hyphenation::Iter;
pub fn separar(termo: &str) -> Vec<String> {
Standard::from_embedded(Language::Portuguese)
.map(|regras | regras.hyphenate(termo))
.map(|unidades| unidades.iter().collect())
.unwrap_or_default()
} // separar
pub fn formatar(silabas: &Vec<String>) -> String {
silabas.concat()
} // formatar
| true |
d81803b3732984e6c1f46fb341555b3dca599c5a
|
Rust
|
kingingo/Nayuki-web-published-code
|
/avl-tree-list/avltreelist.rs
|
UTF-8
| 10,103 | 2.984375 | 3 |
[] |
no_license
|
/*
* AVL tree list (Rust)
*
* Copyright (c) 2021 Project Nayuki. (MIT License)
* https://www.nayuki.io/page/avl-tree-list
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
* - The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* - The Software is provided "as is", without warranty of any kind, express or
* implied, including but not limited to the warranties of merchantability,
* fitness for a particular purpose and noninfringement. In no event shall the
* authors or copyright holders be liable for any claim, damages or other
* liability, whether in an action of contract, tort or otherwise, arising from,
* out of or in connection with the Software or the use or other dealings in the
* Software.
*/
use std;
#[derive(Clone)]
pub struct AvlTreeList<E> {
root: MaybeNode<E>,
}
impl<E> AvlTreeList<E> {
pub fn new() -> Self {
Self { root: MaybeNode(None) }
}
pub fn is_empty(&self) -> bool {
!self.root.exists()
}
pub fn len(&self) -> usize {
self.root.size()
}
pub fn push(&mut self, val: E) {
let index = self.len();
self.insert(index, val);
}
pub fn extend<I:IntoIterator<Item=E>>(&mut self, iterable: I) {
let index = self.len();
self.insert_iter(index, iterable);
}
pub fn insert_iter<I:IntoIterator<Item=E>>(&mut self, mut index: usize, iterable: I) {
let mut iterator = iterable.into_iter();
while let Some(val) = iterator.next() {
self.insert(index, val);
index += 1;
}
}
pub fn insert(&mut self, index: usize, val: E) {
assert!(index <= self.len(), "Index out of bounds"); // Different constraint than the other methods
assert!(self.len() < std::usize::MAX, "Maximum size reached");
self.root = self.root.pop().insert_at(index, val);
}
pub fn remove(&mut self, index: usize) -> E {
assert!(index < self.len(), "Index out of bounds");
let mut result: Option<E> = None;
self.root = self.root.pop().remove_at(index, &mut result);
result.unwrap()
}
pub fn clear(&mut self) {
self.root.0.take();
}
// For unit tests.
pub fn check_structure(&self) {
self.root.check_structure();
}
}
impl<E> Default for AvlTreeList<E> {
fn default() -> Self {
Self::new()
}
}
impl<E> std::ops::Index<usize> for AvlTreeList<E> {
type Output = E;
fn index(&self, index: usize) -> &E {
assert!(index < self.len(), "Index out of bounds");
&self.root.node_ref().get_at(index)
}
}
impl<E> std::ops::IndexMut<usize> for AvlTreeList<E> {
fn index_mut(&mut self, index: usize) -> &mut E {
assert!(index < self.len(), "Index out of bounds");
self.root.node_mut().get_at_mut(index)
}
}
/*---- Helper structs: AVL tree nodes ----*/
#[derive(Clone)]
struct MaybeNode<E>(Option<Box<Node<E>>>);
impl<E> MaybeNode<E> {
fn exists(&self) -> bool {
self.0.is_some()
}
fn size(&self) -> usize {
self.0.as_ref().map_or(0, |node| node.size)
}
fn height(&self) -> i16 {
self.0.as_ref().map_or(0, |node| node.height)
}
fn node_ref(&self) -> &Node<E> {
self.0.as_ref().unwrap().as_ref()
}
fn node_mut(&mut self) -> &mut Node<E> {
self.0.as_mut().unwrap().as_mut()
}
fn pop(&mut self) -> Self {
MaybeNode(self.0.take())
}
fn insert_at(mut self, index: usize, val: E) -> Self {
assert!(index <= self.size());
if !self.exists() { // Automatically implies index == 0, because MaybeNode(None).size() == 0
return MaybeNode(Some(Box::new(Node::new(val))));
} else {
let node = self.node_mut();
let leftsize = node.left.size();
if index <= leftsize {
node.left = node.left.pop().insert_at(index, val);
} else {
node.right = node.right.pop().insert_at(index - leftsize - 1, val);
}
node.recalculate();
}
self.balance()
}
fn remove_at(mut self, index: usize, outval: &mut Option<E>) -> Self {
let mut done = true;
{ // Modify the current object
let node = self.node_mut();
let leftsize = node.left.size();
// Recursively find and remove a node
if index < leftsize {
node.left = node.left.pop().remove_at(index, outval);
} else if index > leftsize {
node.right = node.right.pop().remove_at(index - leftsize - 1, outval);
} else if node.left.exists() && node.right.exists() {
node.right = node.right.pop().remove_at(0, outval); // Remove successor node
std::mem::swap(outval.as_mut().unwrap(), &mut node.value); // Replace value by successor
} else {
done = false;
}
}
if done {
self.node_mut().recalculate();
return self.balance();
}
// Remove current node and return a child or nothing
let node = *self.0.unwrap();
*outval = Some(node.value);
if node.left.exists() {
node.left
} else if node.right.exists() {
node.right
} else {
MaybeNode(None)
}
}
fn balance(mut self) -> Self {
let bal = self.node_ref().get_balance();
assert!(bal.abs() <= 2);
if bal == -2 {
{
let node = self.node_mut();
let childbal = node.left.node_ref().get_balance();
assert!(childbal.abs() <= 1);
if childbal == 1 {
node.left = node.left.pop().rotate_left();
}
}
self = self.rotate_right();
} else if bal == 2 {
{
let node = self.node_mut();
let childbal = node.right.node_ref().get_balance();
assert!(childbal.abs() <= 1);
if childbal == -1 {
node.right = node.right.pop().rotate_right();
}
}
self = self.rotate_left();
}
assert!(self.node_ref().get_balance().abs() <= 1);
self
}
/*
* A B
* / \ / \
* 0 B -> A 2
* / \ / \
* 1 2 0 1
*/
fn rotate_left(mut self) -> Self {
let mut root;
{
let selfnode = self.node_mut();
root = selfnode.right.pop();
std::mem::swap(&mut selfnode.right, &mut root.node_mut().left);
selfnode.recalculate();
} {
let rootnode = root.node_mut();
rootnode.left = self;
rootnode.recalculate();
}
root
}
/*
* B A
* / \ / \
* A 2 -> 0 B
* / \ / \
* 0 1 1 2
*/
fn rotate_right(mut self) -> Self {
let mut root;
{
let selfnode = self.node_mut();
root = selfnode.left.pop();
std::mem::swap(&mut selfnode.left, &mut root.node_mut().right);
selfnode.recalculate();
} {
let rootnode = root.node_mut();
rootnode.right = self;
rootnode.recalculate();
}
root
}
fn check_structure(&self) {
if let Some(ref node) = self.0 {
node.check_structure();
}
}
}
#[derive(Clone)]
struct Node<E> {
// The object stored at this node.
value: E,
// The height of the tree rooted at this node. Empty nodes have height 0.
// This node has height equal to max(left.height, right.height) + 1.
height: i16,
// The number of non-empty nodes in the tree rooted at this node, including this node.
// Empty nodes have size 0. This node has size equal to left.size + right.size + 1.
size: usize,
// The root node of the left subtree.
left: MaybeNode<E>,
// The root node of the right subtree.
right: MaybeNode<E>,
}
impl<E> Node<E> {
fn new(val: E) -> Self {
Self {
value : val,
height: 1,
size : 1,
left : MaybeNode(None),
right : MaybeNode(None),
}
}
fn get_at(&self, index: usize) -> &E {
assert!(index < self.size);
let leftsize = self.left.size();
if index < leftsize {
self.left.node_ref().get_at(index)
} else if index > leftsize {
self.right.node_ref().get_at(index - leftsize - 1)
} else {
&self.value
}
}
fn get_at_mut(&mut self, index: usize) -> &mut E {
assert!(index < self.size);
let leftsize = self.left.size();
if index < leftsize {
self.left.node_mut().get_at_mut(index)
} else if index > leftsize {
self.right.node_mut().get_at_mut(index - leftsize - 1)
} else {
&mut self.value
}
}
fn recalculate(&mut self) {
assert!(self.left .height() >= 0);
assert!(self.right.height() >= 0);
self.height = std::cmp::max(self.left.height(), self.right.height()).checked_add(1).unwrap();
self.size = self.left.size().checked_add(self.right.size()).unwrap().checked_add(1).unwrap();
assert!(self.height >= 0);
}
fn get_balance(&self) -> i16 {
self.right.height() - self.left.height()
}
fn check_structure(&self) {
self.left .check_structure();
self.right.check_structure();
assert_eq!(self.height, std::cmp::max(self.left.height(), self.right.height()).checked_add(1).unwrap());
assert_eq!(self.size, self.left.size().checked_add(self.right.size()).unwrap().checked_add(1).unwrap());
assert!(self.get_balance().abs() <= 1);
}
}
/*---- Helper struct: AVL tree iterator ----*/
impl<'a, E> IntoIterator for &'a AvlTreeList<E> {
type Item = &'a E;
type IntoIter = Iter<'a, E>;
fn into_iter(self) -> Self::IntoIter {
Iter::<E>::new(&self.root)
}
}
#[derive(Clone)]
pub struct Iter<'a, E:'a> {
count: usize,
stack: Vec<&'a Node<E>>,
}
impl<'a, E> Iter<'a, E> {
fn new(root: &'a MaybeNode<E>) -> Self {
let mut result = Self {
count: root.size(),
stack: Vec::new(),
};
result.push_left_path(root);
result
}
fn push_left_path(&mut self, mut maybenode: &'a MaybeNode<E>) {
while let Some(ref node) = maybenode.0 {
self.stack.push(node.as_ref());
maybenode = &node.left;
}
}
}
impl<'a, E> Iterator for Iter<'a, E> {
type Item = &'a E;
fn next(&mut self) -> Option<Self::Item> {
let node: &Node<E> = self.stack.pop()?;
self.push_left_path(&node.right);
self.count -= 1;
Some(&node.value)
}
fn size_hint(&self) -> (usize,Option<usize>) {
(self.count, Some(self.count))
}
fn count(self) -> usize {
self.count
}
}
| true |
407c627187aa742ed51121d3858953b165bc4c14
|
Rust
|
cvybhu/sasinc
|
/src/sasinc/runtime.rs
|
UTF-8
| 4,198 | 2.625 | 3 |
[] |
no_license
|
use std::cell::{Cell, RefCell};
use std::collections::{BTreeMap, VecDeque};
use std::future::Future;
use std::pin::Pin;
use std::rc::Rc;
use std::task::{Context, RawWaker, RawWakerVTable, Waker};
use std::time::Instant;
type BoxFuture = Pin<Box<dyn Future<Output = ()> + 'static>>;
struct Task {
future: RefCell<BoxFuture>,
runtime: Rc<Runtime>,
}
impl Task {
fn wake(self: Rc<Task>) {
self.wake_by_ref();
}
fn wake_by_ref(self: &Rc<Task>) {
self.runtime.task_queue.borrow_mut().push_back(self.clone());
}
fn into_waker(self: Rc<Task>) -> Waker {
// Why the hell can't I do Rc<dyn WakerTrait> ????
return unsafe {
Waker::from_raw(RawWaker::new(
Rc::into_raw(self) as *const (),
Self::waker_vtable(),
))
};
}
fn waker_vtable() -> &'static RawWakerVTable {
return &RawWakerVTable::new(
Self::clone_rc_raw,
Self::wake_rc_raw,
Self::wake_by_ref_rc_raw,
Self::drop_rc_raw,
);
}
unsafe fn clone_rc_raw(data: *const ()) -> RawWaker {
let rc: Rc<Task> = Rc::<Task>::from_raw(data as *const Task); // Recover Rc from pointer
let result_rc: Rc<Task> = rc.clone();
let _ = Rc::into_raw(rc); // don't drop the Rc
return RawWaker::new(Rc::into_raw(result_rc) as *const (), Self::waker_vtable());
}
unsafe fn wake_rc_raw(data: *const ()) {
let rc: Rc<Task> = Rc::<Task>::from_raw(data as *const Task); // Recover Rc from pointer
rc.wake();
}
unsafe fn wake_by_ref_rc_raw(data: *const ()) {
let rc: Rc<Task> = Rc::<Task>::from_raw(data as *const Task); // Recover Rc from pointer
rc.wake_by_ref();
Rc::into_raw(rc); // don't drop the Rc
}
unsafe fn drop_rc_raw(data: *const ()) {
drop(Rc::<Task>::from_raw(data as *const Task));
}
}
pub struct Runtime {
task_queue: RefCell<VecDeque<Rc<Task>>>,
waiting_timers: RefCell<BTreeMap<(Instant, u64), Waker>>,
next_timer_id: Cell<u64>,
}
impl Runtime {
pub fn new() -> Rc<Runtime> {
return Rc::new(Runtime {
task_queue: RefCell::new(VecDeque::new()),
waiting_timers: RefCell::new(BTreeMap::new()),
next_timer_id: Cell::new(0),
});
}
pub fn spawn(self: &Rc<Self>, future: impl Future<Output = ()> + 'static) {
self.task_queue.borrow_mut().push_back(Rc::new(Task {
future: RefCell::new(Box::pin(future)),
runtime: self.clone(),
}));
}
pub fn run(self: &Rc<Self>) {
while self.task_queue.borrow().len() != 0 || self.waiting_timers.borrow().len() != 0 {
loop {
let task: Rc<Task> = match self.task_queue.borrow_mut().pop_front() {
Some(task) => task,
None => break,
};
let waker: Waker = task.clone().into_waker();
let _ = task
.future
.borrow_mut()
.as_mut()
.poll(&mut Context::from_waker(&waker));
}
// Nothing to do - wait until first timer expires
let (timer_until, timer_id): (Instant, u64) =
match self.waiting_timers.borrow().keys().next() {
Some(key) => *key,
None => continue,
};
let now = std::time::Instant::now();
if now < timer_until {
std::thread::sleep(timer_until.duration_since(now));
}
let timer_waker: Waker = self
.waiting_timers
.borrow_mut()
.remove(&(timer_until, timer_id))
.unwrap();
timer_waker.wake();
}
}
pub fn register_timer(&self, sleep_until_time: Instant, waker: Waker) {
let cur_timer_id: u64 = self.next_timer_id.get();
self.next_timer_id.set(cur_timer_id.wrapping_add(1));
self.waiting_timers
.borrow_mut()
.insert((sleep_until_time, cur_timer_id), waker);
}
}
| true |
c3a3e3eda0014a49f740d1a190185677e5c1766c
|
Rust
|
richardlford/digsim
|
/rust/build02/src/main.rs
|
UTF-8
| 561 | 2.65625 | 3 |
[] |
no_license
|
mod data;
mod state;
use state::{initialize_sim, Simulation};
fn main() {
// initialize time data object and spring data object
let time_data: data::TimeData = Default::default();
let spring_data: data::SpringData = Default::default();
// initialize the simulation state object
let sim_state = initialize_sim(time_data, spring_data);
// display initial state
println!("{}", sim_state.show_time_x_xd());
// iterate the simulation
sim_state
.take_while(|sim| !sim.is_done())
.for_each(|sim| println!("{}", sim.show_time_x_xd()));
}
| true |
10c424eb1998eb7b754761d449472cb7198ec7a8
|
Rust
|
mythmon/advent-of-code
|
/year2017/src/day23/part1.rs
|
UTF-8
| 1,134 | 2.671875 | 3 |
[
"MIT"
] |
permissive
|
use crate::day18::{Instr, InstrType, Machine};
use advent_lib::cases::{GenericPuzzleCase, PuzzleCase, PuzzleRunner};
#[derive(Debug)]
pub struct Part1;
impl PuzzleRunner for Part1 {
type Input = &'static str;
type Output = usize;
fn name(&self) -> String {
"2017-D23-P1".to_owned()
}
fn cases(&self) -> Result<Vec<Box<dyn PuzzleCase>>, Box<dyn std::error::Error>> {
Ok(GenericPuzzleCase::<Self, _, _>::build_set()
.case("Solution", include_str!("input"), 4_225_usize)
.collect())
}
fn run_puzzle(input: Self::Input) -> Self::Output {
let instructions: Vec<Instr> = input.trim().lines().map(|l| l.parse().unwrap()).collect();
let mut machine = Machine::new(0, instructions);
machine.run();
machine.debug_counts[&InstrType::Mul]
}
}
#[test]
fn test_h() {
let input = include_str!("input");
let instructions: Vec<Instr> = input.trim().lines().map(|l| l.parse().unwrap()).collect();
let mut machine = Machine::new(0, instructions);
machine.run();
assert_eq!(*machine.registers.get(&'h').unwrap_or(&0), 0);
}
| true |
e15c1cf51901b1221f5d8ae9d70ff3c4452b6522
|
Rust
|
tathanhdinh/tocc
|
/src/backend/support.rs
|
UTF-8
| 5,161 | 2.71875 | 3 |
[] |
no_license
|
use std::{
collections::HashMap,
hint::unreachable_unchecked,
ops::{Add, Rem},
};
use cranelift::prelude::*;
use cranelift_codegen::ir::entities::StackSlot;
use cranelift_module::FuncId;
use rand::{thread_rng, Rng};
use crate::{
checked_if_let, checked_unwrap_option,
frontend::syntax::{
BinaryOperator, BinaryOperatorExpression, Constant, Declaration, Declarator, Expression,
Identifier, StructType, TypeSpecifier, UnaryOperator, UnaryOperatorExpression,
},
};
pub enum SimpleConcreteType {
ConstantTy(i64),
ValueTy(Value),
StackSlotTy(StackSlot),
UnitTy,
}
// EaC 7.7.1 Understanding Structure Layout
// DRAGON 6.3.4 Storage Layouts for Local Names
#[derive(Clone)]
pub struct AggregateType<'a> {
pub fields: Vec<(&'a str, Type)>,
}
impl AggregateType<'_> {
pub fn field_offset(&self, field_name: &str) -> Option<usize> {
let mut os = 0usize;
for (fname, fty) in &self.fields {
if *fname == field_name {
return Some(os);
} else {
os += fty.bytes() as usize;
}
}
None
}
pub fn field_type(&self, field_name: &str) -> Option<Type> {
let (_, fty) = self.fields.iter().find(|(fname, _)| *fname == field_name)?;
Some(*fty)
}
pub fn bytes(&self) -> usize {
self.fields.iter().fold(0usize, |sum, (_, fty)| sum + fty.bytes() as usize)
}
}
impl<'a> Into<AggregateType<'a>> for &'_ StructType<'a> {
fn into(self) -> AggregateType<'a> {
// struct type definition: each declaration is a field declaration
let StructType { declarations, .. } = self;
let declarations = checked_unwrap_option!(declarations.as_ref());
let fields: Vec<(&str, Type)> = declarations
.iter()
.map(|Declaration { specifier, declarator }| {
checked_if_let!(Some(Declarator { ident: Identifier(ident), .. }), declarator, {
(*ident, specifier.into())
})
})
.collect();
AggregateType { fields }
}
}
#[derive(Clone)]
pub struct FunctionType {
pub return_ty: Option<Type>,
pub param_ty: Vec<Type>,
}
#[derive(Clone)]
pub enum SimpleType<'a> {
PrimitiveTy(Type),
AggregateTy(AggregateType<'a>),
FunctionTy(FunctionType),
PointerTy(Box<SimpleType<'a>>),
}
#[derive(Clone)]
pub struct PrimitiveIdentifier<'a> {
pub ident: Variable,
pub ty: SimpleType<'a>,
}
#[derive(Clone)]
pub struct FunctionIdentifier<'a> {
pub ident: FuncId,
pub ty: SimpleType<'a>,
}
#[derive(Clone)]
pub struct AggregateIdentifier<'a> {
pub ident: StackSlot,
pub ty: SimpleType<'a>,
}
#[derive(Clone)]
pub struct PointerIdentifer<'a> {
pub ident: Variable,
pub ty: SimpleType<'a>,
}
#[derive(Clone)]
pub enum SimpleTypedIdentifier<'a> {
PrimitiveIdent(PrimitiveIdentifier<'a>),
AggregateIdent(AggregateIdentifier<'a>),
FunctionIdent(FunctionIdentifier<'a>),
PointerIdent(PointerIdentifer<'a>),
}
impl Into<Type> for &TypeSpecifier<'_> {
fn into(self) -> Type {
use TypeSpecifier::*;
match self {
CharTy => types::I8,
ShortTy => types::I16,
IntTy => types::I32,
LongTy => types::I64,
_ => unsafe { unreachable_unchecked() },
}
}
}
// binding context
pub type NameBindingEnvironment<'a> = HashMap<&'a str, SimpleTypedIdentifier<'a>>;
// visible types
pub type TypeBindingEnvironment<'a> = HashMap<&'a str, SimpleType<'a>>;
// backend-ed module
// pub type JitModule = Module<SimpleJITBackend>;
pub fn evaluate_constant_arithmetic_expression(expr: &'_ Expression) -> Option<i64> {
use Expression::*;
match expr {
ConstantExpr(Constant::IntegerConst(i)) => Some(i.into()),
UnaryOperatorExpr(UnaryOperatorExpression { operator, operand }) => {
use UnaryOperator::*;
let val = evaluate_constant_arithmetic_expression(operand.as_ref())?;
match operator {
Negation => Some(-val),
PreIncrement => Some(val + 1),
PostIncrement => Some(val),
Address => None,
}
}
BinaryOperatorExpr(BinaryOperatorExpression { operator, lhs, rhs }) => {
use BinaryOperator::*;
let lval = evaluate_constant_arithmetic_expression(lhs.as_ref())?;
let rval = evaluate_constant_arithmetic_expression(rhs.as_ref())?;
match operator {
Multiplication => Some(lval * rval),
Division => Some(lval / rval),
Addition => Some(lval + rval),
Subtraction => Some(lval - rval),
_ => None,
}
}
_ => None,
}
}
pub fn generate_random_partition(sum: u32) -> Vec<Type> {
let mut partition = Vec::new();
let mut rng = thread_rng();
let mut current_sum = sum;
loop {
if current_sum == 0 {
break;
}
let num = rng.gen_range(1, current_sum + 1);
match num {
1 => partition.push(types::I8),
2 => partition.push(types::I16),
// 4 => partition.push(types::I32),
_ => {
continue;
}
}
current_sum -= num;
}
partition
}
#[macro_export]
macro_rules! generate_random_maps {
($ty:ty) => {
{
use rand::{thread_rng, Rng};
let mut rng = thread_rng();
let a0 = {
let a: $ty = rng.gen();
if a % 2 == 0 {
a + 1
} else {
a
}
};
let a1 = {
let mut a1 = a0;
loop {
if a1.wrapping_mul(a0) == 1 {
break;
}
a1 = a1.wrapping_mul(a0);
}
a1
};
let b0: $ty = rng.gen();
let b1 = a1.wrapping_mul(b0).wrapping_neg();
(a0, b0, a1, b1)
}
}
}
| true |
dd9cd16c8a233d250f8e366325b6e8afbb4e65c4
|
Rust
|
pismute/exercism
|
/rust/protein-translation/src/lib.rs
|
UTF-8
| 930 | 3.140625 | 3 |
[] |
no_license
|
use std::collections::{HashMap, HashSet};
pub struct CodonsInfo<'a> {
dict: HashMap<&'a str, &'a str>,
stops: HashSet<&'a str>,
}
impl<'a> CodonsInfo<'a> {
pub fn name_for(&self, codon: &str) -> Option<&'a str> {
self.dict.get(codon).map(|x| *x)
}
pub fn of_rna(&self, rna: &str) -> Option<Vec<&'a str>> {
rna.as_bytes()
.chunks(3)
.filter_map(|x| std::str::from_utf8(x).ok())
.take_while(|y| !self.stops.contains(*y))
.try_fold(vec![], |mut acc, x| match x.len() {
3 => self.name_for(x).map(|y| {
acc.push(y);
acc
}),
_ => None,
})
}
}
pub fn parse<'a>(pairs: Vec<(&'a str, &'a str)>) -> CodonsInfo<'a> {
CodonsInfo {
dict: pairs.into_iter().collect(),
stops: vec!["UAA", "UAG", "UGA"].into_iter().collect(),
}
}
| true |
a43a82b17c605b07e6ffa831a8c44e1406627b8b
|
Rust
|
timstokman/adventofcode2020
|
/src/day13.rs
|
UTF-8
| 2,570 | 3.078125 | 3 |
[] |
no_license
|
use crate::common;
use std::fs::File;
use std::io::{self, BufRead};
use simple_error::SimpleError;
fn wait_time(start_time: i64, bus_nr: i64) -> i64 {
bus_nr - (start_time % bus_nr)
}
fn chinese_remainder(a: &[i64], n: &[i64]) -> Option<i64> {
let prod: i64 = n.iter().product();
let sum: i64 =
a.iter()
.zip(n.iter())
.map(|(&a_i, &n_i)| {
let p = prod / n_i;
mod_inv(p, n_i).map(|inv| a_i * p * inv)
})
.collect::<Option<Vec<i64>>>()?
.iter()
.sum();
return Some(sum % prod);
}
fn egcd(a: i64, b: i64) -> (i64, i64, i64) {
if a == 0 {
(b, 0, 1)
} else {
let (g, x, y) = egcd(b % a, a);
(g, y - (b / a) * x, x)
}
}
fn mod_inv(x: i64, n: i64) -> Option<i64> {
let (g, x, _) = egcd(x, n);
if g == 1 {
Some((x % n + n) % n)
} else {
None
}
}
fn part_1(start_time: i64, bus_nrs: &[Option<i64>]) -> common::BoxResult<i64> {
let answers = bus_nrs.iter().filter_map(|b| b.map(|bus_nr| (bus_nr, wait_time(start_time, bus_nr)))).collect::<Vec<_>>();
let min_wait = answers.iter().map(|a| a.1).min().ok_or_else(|| SimpleError::new("no bus nrs"))?;
let answer = answers.iter().find(|a| a.1 == min_wait).ok_or_else(|| SimpleError::new("no bus nrs"))?;
Ok(answer.0 * answer.1)
}
fn part_2(bus_nrs: &[Option<i64>]) -> common::BoxResult<i64> {
let a = bus_nrs.iter().enumerate().filter_map(|(i, bus_nr)| bus_nr.map(|b| b - i as i64)).collect::<Vec<_>>();
let n = bus_nrs.iter().filter_map(|bus_nr| *bus_nr).collect::<Vec<_>>();
Ok(chinese_remainder(&a, &n).ok_or_else(|| SimpleError::new("no solution"))?)
}
pub fn answer() -> common::BoxResult<(i64, i64)> {
let input = read_input("day13_input")?;
Ok((part_1(input.0, &input.1)?, part_2(&input.1)?))
}
fn read_input(file: &str) -> common::BoxResult<(i64, Vec<Option<i64>>)> {
let file = File::open(file)?;
let reader = io::BufReader::new(file);
let mut lines_it = reader.lines();
let start_time = lines_it.next().ok_or_else(|| SimpleError::new("no start time"))??.parse::<i64>()?;
let dep_times = lines_it.next().ok_or_else(|| SimpleError::new("no bus nrs"))??.split(',').map(|i| -> common::BoxResult<Option<i64>> { if i == "x" { Ok(None) } else { Ok(Some(i.parse::<i64>()?)) } }).collect::<Result<_, _>>()?;
Ok((start_time, dep_times))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_column() {
assert_eq!(chinese_remainder(&[2, 3, 2], &[3, 5, 7]), Some(23));
}
}
| true |
aba30f12ff87ea9fe42450c423485b369b517b7b
|
Rust
|
andyyu2004/l-lang
|
/src/lc-ir/src/item.rs
|
UTF-8
| 3,071 | 2.609375 | 3 |
[] |
no_license
|
use crate::{self as ir, DefId, DefKind};
use lc_ast::{Abi, Ident, Visibility};
use lc_span::{Span};
#[derive(Debug, Clone)]
pub struct Item<'ir> {
pub id: ir::Id,
pub span: Span,
pub vis: Visibility,
pub ident: Ident,
pub kind: ir::ItemKind<'ir>,
}
impl<'ir> Item<'ir> {
pub fn body(&self) -> &ir::Body<'ir> {
match &self.kind {
ItemKind::Fn(_, _, body) => body,
_ => panic!(),
}
}
}
impl<'ir> Item<'ir> {
pub fn generics(&self) -> Option<&'ir ir::Generics<'ir>> {
match self.kind {
ItemKind::Impl { generics, .. }
| ItemKind::Trait { generics, .. }
| ItemKind::Fn(_, generics, _)
| ItemKind::Struct(generics, _)
| ItemKind::TypeAlias(generics, _)
| ItemKind::Enum(generics, _) => Some(generics),
ItemKind::Mod(..) | ItemKind::Use(..) | ItemKind::Extern(..) => None,
}
}
}
#[derive(Debug, Clone)]
pub enum ItemKind<'ir> {
Fn(&'ir ir::FnSig<'ir>, &'ir ir::Generics<'ir>, &'ir ir::Body<'ir>),
Use(&'ir ir::Path<'ir>),
TypeAlias(&'ir ir::Generics<'ir>, &'ir ir::Ty<'ir>),
Struct(&'ir ir::Generics<'ir>, ir::VariantKind<'ir>),
Enum(&'ir ir::Generics<'ir>, &'ir [ir::Variant<'ir>]),
Extern(Abi, &'ir [ir::ForeignItem<'ir>]),
Mod(ir::Mod<'ir>),
Trait {
generics: &'ir ir::Generics<'ir>,
trait_item_refs: &'ir [ir::TraitItemRef],
},
Impl {
generics: &'ir ir::Generics<'ir>,
trait_path: Option<&'ir ir::Path<'ir>>,
self_ty: &'ir ir::Ty<'ir>,
impl_item_refs: &'ir [ImplItemRef],
},
}
#[derive(Debug, Copy, Clone)]
pub struct Mod<'ir> {
pub span: Span,
pub items: &'ir [ir::DefId],
}
#[derive(Debug, Clone)]
pub struct ForeignItem<'ir> {
pub id: ir::Id,
pub abi: Abi,
pub ident: Ident,
pub span: Span,
pub vis: Visibility,
pub kind: ForeignItemKind<'ir>,
}
#[derive(Debug, Copy, Clone)]
pub enum ForeignItemKind<'ir> {
Fn(&'ir ir::FnSig<'ir>, &'ir ir::Generics<'ir>),
}
#[derive(Debug)]
pub struct TraitItem<'ir> {
pub id: ir::Id,
pub ident: Ident,
pub span: Span,
pub vis: Visibility,
pub generics: &'ir ir::Generics<'ir>,
pub kind: TraitItemKind<'ir>,
}
#[derive(Debug, Clone)]
pub struct ImplItem<'ir> {
pub id: ir::Id,
pub impl_def_id: DefId,
pub ident: Ident,
pub span: Span,
pub vis: Visibility,
pub generics: &'ir ir::Generics<'ir>,
pub kind: ImplItemKind<'ir>,
}
#[derive(Debug, Clone)]
pub enum ImplItemKind<'ir> {
Fn(&'ir ir::FnSig<'ir>, &'ir ir::Body<'ir>),
}
impl<'ir> ImplItemKind<'ir> {
pub fn def_kind(&self) -> DefKind {
match self {
ImplItemKind::Fn(..) => DefKind::AssocFn,
}
}
}
#[derive(Debug, Clone)]
pub enum TraitItemKind<'ir> {
Fn(&'ir ir::FnSig<'ir>, Option<&'ir ir::Body<'ir>>),
}
#[derive(Debug, Clone)]
pub struct TraitItemRef {
pub id: ir::TraitItemId,
}
#[derive(Debug, Clone)]
pub struct ImplItemRef {
pub id: ir::ImplItemId,
}
| true |
dfac0802bcdf4814bc6cbbfce89bf126aef81ca8
|
Rust
|
zwhitchcox/leetcode_rs
|
/src/_0485_max_consecutive_ones.rs
|
UTF-8
| 480 | 3.40625 | 3 |
[
"MIT"
] |
permissive
|
struct Solution;
impl Solution {
fn find_max_consecutive_ones(nums: Vec<i32>) -> i32 {
let mut max = 0;
let mut count = 0;
for x in nums {
if x == 1 {
count += 1;
max = i32::max(count, max);
} else {
count = 0;
}
}
max
}
}
#[test]
fn test() {
let nums = vec![1, 1, 0, 1, 1, 1];
assert_eq!(Solution::find_max_consecutive_ones(nums), 3);
}
| true |
0877f6530522070eeaf30e507a1faba0baaa2400
|
Rust
|
baylesj/advent-of-code-2019
|
/src/day_one.rs
|
UTF-8
| 1,279 | 3.109375 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
const INPUT_FILENAME: &'static str = "input/day_one.txt";
type Mass = u32;
type Fuel = u32;
fn get_fuel_for_module(mass: Mass) -> Fuel {
let mut total_fuel: Fuel = 0;
let mut current_step_mass: Mass = mass;
while current_step_mass != 0 {
current_step_mass = get_fuel_for_module_step(current_step_mass);
total_fuel += current_step_mass as Fuel;
}
total_fuel
}
fn get_fuel_for_module_step(mass: Mass) -> Fuel {
let one_third: Fuel = (mass / 3) as Fuel;
if one_third > 2 {
one_third - 2
} else {
0
}
}
pub fn solve() -> String {
let file = File::open(INPUT_FILENAME).expect("Invalid filename");
let reader = BufReader::new(file);
let mut part_one_sum: Fuel = 0;
let mut part_two_sum: Fuel = 0;
for line in reader.lines() {
let unparsed_mass: String = line.expect("Invalid file contents");
let module_mass: Mass = unparsed_mass
.parse::<Mass>()
.expect("Invalid file contents");
part_one_sum += get_fuel_for_module_step(module_mass);
part_two_sum += get_fuel_for_module(module_mass);
}
format!("part one: {}, part two: {}", part_one_sum, part_two_sum)
}
| true |
c28983678dccd52528c3dfd1fc75ac81df78eeeb
|
Rust
|
paulcacheux/raytracer
|
/src/hitable/bvh.rs
|
UTF-8
| 3,589 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
use super::*;
use rand::{self, Rng};
enum BVHNode {
Empty,
Leaf(Box<dyn Hitable>),
Pair(Box<dyn Hitable>, Box<dyn Hitable>),
}
impl BVHNode {
fn hit_node(&self, ray: Ray, tmin: f32, tmax: f32) -> Option<HitInfos> {
match self {
BVHNode::Empty => None,
BVHNode::Leaf(sub) => sub.hit(ray, tmin, tmax),
BVHNode::Pair(left, right) => {
let left_infos = left.hit(ray, tmin, tmax);
let right_infos = right.hit(ray, tmin, tmax);
match (left_infos, right_infos) {
(Some(l), Some(r)) => if l.t <= r.t { Some(l) } else { Some(r) },
(_, Some(r)) => Some(r),
(Some(l), _) => Some(l),
(None, None) => None
}
}
}
}
}
pub struct BVH {
node: BVHNode,
aabb: Option<AABB>,
}
impl BVH {
pub fn new(children: Vec<Box<dyn Hitable>>) -> BVH {
if children.is_empty() {
return BVH {
node: BVHNode::Empty,
aabb: None
}
}
let mut children = children;
let axis_rand: u32 = rand::thread_rng().gen_range(0, 3);
let sort_func = match axis_rand {
0 => utils::box_x_compare,
1 => utils::box_y_compare,
2 => utils::box_z_compare,
_ => unreachable!(),
};
children.sort_by(sort_func);
let (node, aabb) = if children.len() == 1 {
let c = children.into_iter().next().unwrap();
let bb = c.bounding_box();
(BVHNode::Leaf(c), bb)
} else if children.len() == 2 {
let mut iter = children.into_iter();
let c0 = iter.next().unwrap();
let c1 = iter.next().unwrap();
let bb0 = c0.bounding_box();
let bb1 = c1.bounding_box();
let bb = AABB::surrounding_opt(bb0, bb1);
(BVHNode::Pair(c0, c1), bb)
} else {
let right_children = children.split_off(children.len() / 2);
let left: Box<dyn Hitable> = Box::new(BVH::new(children));
let right: Box<dyn Hitable> = Box::new(BVH::new(right_children));
let bb = AABB::surrounding_opt(left.bounding_box(), right.bounding_box());
(BVHNode::Pair(left, right), bb)
};
BVH {
node,
aabb
}
}
}
impl Hitable for BVH {
fn hit(&self, ray: Ray, tmin: f32, tmax: f32) -> Option<HitInfos> {
if let Some(aabb) = self.aabb {
if !aabb.hit(ray, tmin, tmax) {
return None
}
}
self.node.hit_node(ray, tmin, tmax)
}
fn bounding_box(&self) -> Option<AABB> {
self.aabb
}
}
pub mod utils {
use std::cmp::Ordering;
use crate::hitable::Hitable;
pub fn fast_cmp(a: &f32, b: &f32) -> Ordering {
if *a == *b {
Ordering::Equal
} else if *a <= *b {
Ordering::Less
} else {
Ordering::Greater
}
}
macro_rules! compare_xyz {
($component:ident, $name:ident) => {
pub fn $name(a: &Box<dyn Hitable>, b: &Box<dyn Hitable>) -> Ordering {
let a_box = a.bounding_box().unwrap(); // TODO
let b_box = b.bounding_box().unwrap();
fast_cmp(&a_box.min.$component, &b_box.min.$component)
}
}
}
compare_xyz!(x, box_x_compare);
compare_xyz!(y, box_y_compare);
compare_xyz!(z, box_z_compare);
}
| true |
22a8e839b2751e38c18c283ffc917351e499d123
|
Rust
|
dixe/rust-search-engine
|
/src/index/searchable_index.rs
|
UTF-8
| 4,474 | 3.328125 | 3 |
[] |
no_license
|
use std::collections::HashMap as HashMap;
use crate::index::index_types::*;
use crate::index::property_map::{PropertyMap};
pub struct SearchableIndex {
next_id: usize,
// Use a map for each different type
// mapping from strings (words/tokens) to document ids
properties_text_word_map: PropertyMap<TextT>,
properties_integer_word_map: PropertyMap<IntegerT>,
properties_sortable_text_word_map: PropertyMap<SortableTextT>,
// Documents in index
documents: HashMap<usize, ProcessedDocument>,
}
impl SearchableIndex {
pub fn empty() -> Self {
SearchableIndex {
next_id: 1,
properties_sortable_text_word_map: PropertyMap::new(),
properties_text_word_map: PropertyMap::new(),
properties_integer_word_map: PropertyMap::new(),
documents: HashMap::new()
}
}
fn increment_id(&mut self) {
self.next_id += 1;
}
fn insert_property(&mut self, prop: &IndexProperty ) {
match &prop.data {
PropertyType::Integer(data) => self.insert_integer_property(&prop.name, *data),
PropertyType::Text(text) => self.insert_text_property(&prop.name, &text),
PropertyType::SortableText(text) => panic!("Not implemted int property"),
}
}
fn process_text_data(doc_id: usize, text: &str) -> HashMap::<&str, WordFrequency> {
let mut word_freqs = HashMap::new();
for word in text.split_whitespace() {
if !word_freqs.contains_key(word) {
word_freqs.insert(word, WordFrequency {doc_id, frequency: 0} );
}
if let Some(wf) = word_freqs.get_mut(&word) {
wf.frequency += 1;
}
}
word_freqs
}
fn insert_text_property(&mut self, name: &str, text: &str)
{
let word_freqs = SearchableIndex::process_text_data(self.next_id, text);
// insert word_freqs into self hashmap for the given property and word
for kv in word_freqs.iter() {
let word = *kv.0;
self.properties_text_word_map.insert_data(&name, word.to_string(), *kv.1);
}
}
fn insert_integer_property(&mut self, name: &str, data: IntegerT)
{
self.properties_integer_word_map.insert_data(name, data, WordFrequency { doc_id: self.next_id, frequency: 1});
}
pub fn from_documents(docs: &Vec::<ProcessedDocument>) -> Self {
let mut result = SearchableIndex::empty();
// indexing. for now just split on whitespace and call it a day, only index string and all in same map
for doc in docs {
for prop in doc.properties.iter() {
result.insert_property(prop);
}
result.documents.insert(result.next_id, (*doc).clone());
result.increment_id();
}
result
}
pub fn get_property_map_text(&self, name: &str) -> &HashMap<TextT, Vec::<WordFrequency>> {
self.properties_text_word_map.get_map(name)
}
pub fn get_property_map_integer(&self, name: &str) -> &HashMap<IntegerT, Vec::<WordFrequency>> {
self.properties_integer_word_map.get_map(name)
}
pub fn document_count(&self) -> usize {
self.documents.len()
}
}
#[derive(Clone)]
pub struct ProcessedDocument { // Represents a document with stopwords removed ect.
properties: Vec::<IndexProperty>
}
impl ProcessedDocument {
pub fn new(properties: Vec::<IndexProperty>) -> Self {
// TODO remove stop words ect from this
ProcessedDocument {
properties
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn create_index() -> SearchableIndex {
let documents = vec! {
ProcessedDocument {
properties: vec! {
IndexProperty {
name: "content".to_string(),
data: PropertyType::Text("lorup ipsum content for you needs. With lorup repeats".to_string())
},
IndexProperty {
name: "count".to_string(),
data: PropertyType::Integer(10)
}
},
}
};
SearchableIndex::from_documents(&documents)
}
#[test]
fn document_count() {
let index = create_index();
assert_eq!(index.document_count(), 1);
}
}
| true |
2ae8a541b550ebd86ce69b868b64638298c60980
|
Rust
|
neivv/whack
|
/src/win_common.rs
|
UTF-8
| 4,596 | 2.546875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Common windows code for both x86 and x86_64.
use std::ffi::OsStr;
use std::mem;
use std::os::windows::ffi::OsStrExt;
use std::os::raw::c_void;
use std::ptr;
use winapi::shared::minwindef::HMODULE;
use winapi::um::errhandlingapi::GetLastError;
use winapi::um::libloaderapi::GetModuleHandleW;
use winapi::um::memoryapi::{VirtualProtect, VirtualQuery};
use winapi::um::processthreadsapi::{FlushInstructionCache, GetCurrentProcess};
use winapi::um::winnt::{self};
use crate::Export;
use crate::pe;
pub type LibraryHandle = HMODULE;
pub type LibraryName = Vec<u16>;
pub fn nop() -> u8 {
0x90
}
pub fn library_name<T: AsRef<OsStr>>(input: T) -> LibraryName {
winapi_str(input)
}
pub fn library_name_to_handle(name: &LibraryName) -> Option<LibraryHandle> {
let result = unsafe { GetModuleHandleW(name.as_ptr()) };
if result.is_null() { None } else { Some(result) }
}
pub fn lib_handle_equals_name(handle: LibraryHandle, name: &LibraryName) -> bool {
unsafe { GetModuleHandleW(name.as_ptr()) == handle }
}
fn winapi_str<T: AsRef<OsStr>>(input: T) -> Vec<u16> {
let input = input.as_ref();
let iter = input.encode_wide();
let mut out = Vec::with_capacity(iter.size_hint().0 + 1);
out.extend(iter);
out.push(0);
out
}
pub fn exe_handle() -> HMODULE {
unsafe {
GetModuleHandleW(ptr::null())
}
}
pub fn library_handle(lib: &OsStr) -> HMODULE {
unsafe {
GetModuleHandleW(winapi_str(lib).as_ptr())
}
}
/// Unprotects module, allowing its memory to be written and reapplies protection on drop.
/// As with most of the other utilities, it is not thread-safe to unprotect same module from multiple
/// threads.
#[must_use]
pub struct MemoryProtection {
protections: Vec<(*mut c_void, usize, u32)>,
}
impl MemoryProtection {
pub fn new(start: HMODULE) -> MemoryProtection {
// This currently may go over to the next module if they are next to each other..
// Should have min and max addresses instead.
let start = start as *const _;
let mut protections = Vec::new();
unsafe {
let mut mem_info: winnt::MEMORY_BASIC_INFORMATION = mem::zeroed();
let mut tmp = 0;
VirtualQuery(start, &mut mem_info, mem::size_of_val(&mem_info) as _);
let init_type = mem_info.Type;
while mem_info.Type == init_type {
if mem_info.State == winnt::MEM_COMMIT {
let ok = VirtualProtect(
mem_info.BaseAddress,
mem_info.RegionSize,
winnt::PAGE_EXECUTE_READWRITE,
&mut tmp,
);
if ok == 0 {
panic!(
"Couldn't VirtualProtect memory {:p}:{:x} from {:x}: {:08x}",
mem_info.BaseAddress,
mem_info.RegionSize,
mem_info.Protect,
GetLastError(),
);
}
let address = mem_info.BaseAddress as *mut c_void;
protections.push((address, mem_info.RegionSize, mem_info.Protect));
}
let next = (mem_info.BaseAddress as *const u8)
.offset(mem_info.RegionSize as isize);
VirtualQuery(next as *const _, &mut mem_info, mem::size_of_val(&mem_info) as _);
}
}
MemoryProtection {
protections,
}
}
}
impl Drop for MemoryProtection {
fn drop(&mut self) {
unsafe {
let mut tmp = 0;
let process = GetCurrentProcess();
for tp in &self.protections {
VirtualProtect(tp.0 as *mut _, tp.1, tp.2, &mut tmp);
FlushInstructionCache(process, tp.0 as *const _, tp.1);
}
}
}
}
pub unsafe fn import_addr(module: HMODULE, func_dll: &[u8], func: &Export) -> Option<*mut usize>
{
let mut buf;
let func_dll_with_extension = {
let has_extension = {
if func_dll.len() <= 4 {
false
} else {
func_dll[func_dll.len() - 4] == b'.'
}
};
if has_extension {
func_dll
} else {
buf = Vec::with_capacity(func_dll.len() + 4);
buf.extend_from_slice(func_dll);
buf.extend_from_slice(b".dll");
&buf[..]
}
};
pe::import_ptr(module as usize, func_dll_with_extension, func)
}
| true |
764994563ea9848ae718ffdbb1a9af6fa0c502b4
|
Rust
|
enso-org/enso
|
/app/gui/view/graph-editor/src/component/edge/layout.rs
|
UTF-8
| 29,824 | 3.515625 | 4 |
[
"AGPL-3.0-only",
"Apache-2.0",
"AGPL-3.0-or-later"
] |
permissive
|
//! Edge layout calculation.
//!
//! # Corners
//!
//! ```text
//! ────╮
//! ```
//!
//! The fundamental unit of edge layout is the [`Corner`]. A corner is a line segment attached to a
//! 90° arc. The length of the straight segment, the radius of the arc, and the orientation of the
//! shape may vary. Any shape of edge is built from corners.
//!
//! The shape of a corner can be fully-specified by two points: The horizontal end, and the vertical
//! end.
//!
//! In special cases, a corner may be *trivial*: It may have a radius of zero, in which case either
//! the horizontal or vertical end will not be in the usual orientation. The layout algorithm only
//! produces trivial corners when the source is directly in line with the target, or in some cases
//! when subdividing a corner (see [Partial edges] below).
//!
//! # Junction points
//!
//! ```text
//! 3
//! 1 /
//! \ ╭─────╮
//! ────╯\ \
//! 2 4
//! ```
//!
//! The layout algorithm doesn't directly place corners. The layout algorithm places a sequence of
//! junction points--coordinates where two horizontal corner ends or two vertical corner ends meet
//! (or just one corner end, at an end of an edge). A series of junction points, always alternating
//! horizontal/vertical, has a one-to-one relationship with a sequence of corners.
//!
//! # Partial edges
//!
//! Corners are sufficient to draw any complete edge; however, in order to split an edge into a
//! focused portion and an unfocused portion at an arbitrary location based on the mouse position,
//! we need to subdivide one of the corners of the edge.
//!
//! ```text
//! |\
//! | 3
//! /
//! .'
//! ..........-'
//! \ \
//! 1 2 (split)
//! ```
//!
//! When the split position is on the straight segment of a corner, the corner can simply be split
//! into a corner with a shorter segment (2-3), and a trivial corner consisting only of a straight
//! segment (1-2).
//!
//! ```text
//! |\
//! | 4
//! /
//! .'
//! ..........-' \
//! \ \ 3 (split)
//! 1 2
//! ```
//!
//! The difficult case is when the split position is on the arc. In this case, it is not possible to
//! draw the split using the same [`Rectangle`] shader that is used for everything else; a
//! specialized shape is used which supports drawing arbitrary-angle arcs. A trivial corner will
//! draw the straight line up to the beginning of the arc (1-2); arc shapes will draw the split arc
//! (2-3) and (3-4).
use super::*;
use std::f32::consts::FRAC_PI_2;
use std::f32::consts::TAU;
// =================
// === Constants ===
// =================
/// Constants affecting all layouts.
mod shared {
/// Minimum height above the target the edge must approach it from.
pub(super) const MIN_APPROACH_HEIGHT: f32 = 32.25;
pub(super) const NODE_HEIGHT: f32 = crate::component::node::HEIGHT;
pub(super) const NODE_CORNER_RADIUS: f32 = crate::component::node::CORNER_RADIUS;
/// The preferred arc radius.
pub(super) const RADIUS_BASE: f32 = 20.0;
/// The maximum size in pixels of overdraw between edge segments. Prevents visible gaps.
pub(super) const SEGMENT_OVERLAP: f32 = 0.5;
}
use shared::*;
/// Constants configuring the 1-corner layout.
mod single_corner {
/// The y-allocation for the radius will be the full available height minus this value.
pub(super) const RADIUS_Y_ADJUSTMENT: f32 = 29.0;
/// The base x-allocation for the radius.
pub(super) const RADIUS_X_BASE: f32 = super::RADIUS_BASE;
/// Proportion (0-1) of extra x-distance allocated to the radius.
pub(super) const RADIUS_X_FACTOR: f32 = 0.6;
/// Distance for the line to continue under the node, to ensure that there isn't a gap.
pub(super) const SOURCE_NODE_OVERLAP: f32 = 4.0;
/// Minimum arc radius at which we offset the source end to exit normal to the node's curve.
pub(super) const MINIMUM_TANGENT_EXIT_RADIUS: f32 = 2.0;
}
/// Constants configuring the 3-corner layouts.
mod three_corner {
/// The maximum arc radius.
pub(super) const RADIUS_MAX: f32 = super::RADIUS_BASE;
pub(super) const BACKWARD_EDGE_ARROW_THRESHOLD: f32 = 15.0;
/// The maximum radius reduction (from [`RADIUS_BASE`]) to allow when choosing whether to use
/// the three-corner layout that doesn't use a backward corner.
pub(super) const MAX_SQUEEZE: f32 = 2.0;
}
// ==============
// === Layout ===
// ==============
/// Determine the positions and shapes of all the components of the edge.
pub(super) fn layout(
target: Vector2,
source_size: Vector2,
target_size: Vector2,
source_attached: bool,
target_attached: bool,
) -> Layout {
let (junction_points, max_radius, target_attachment) =
junction_points(target, source_size, target_size, source_attached, target_attached);
let corners = corners(&junction_points, max_radius).collect_vec();
let arrow = arrow(target, &junction_points);
Layout { corners, arrow, target_attachment, source_size }
}
// =======================
// === Junction points ===
// =======================
/// Calculate the start and end positions of each 1-corner section composing an edge to the
/// given offset. Return the points, the maximum radius that should be used to draw the corners
/// connecting them, and the length of the target attachment bit.
fn junction_points(
target: Vector2,
source_size: Vector2,
target_size: Vector2,
source_attached: bool,
target_attached: bool,
) -> (Vec<Vector2>, f32, Option<TargetAttachment>) {
let source_half_width = source_size.x() / 2.0;
let source_half_height = source_size.y() / 2.0;
// The maximum x-distance from the source (our local coordinate origin) for the point where the
// edge will begin.
let source_max_x_offset = (source_half_width - NODE_CORNER_RADIUS).max(0.0);
// The maximum y-length of the target-attachment segment. If the layout allows, the
// target-attachment segment will fully exit the node before the first corner begins.
let target_max_attachment_height =
target_attached.then_some((NODE_HEIGHT - target_size.y) / 2.0);
let attachment = target_max_attachment_height.map(|length| TargetAttachment {
target: target + Vector2(0.0, NODE_HEIGHT / 2.0),
length,
});
let target_well_below_source =
target.y() + target_max_attachment_height.unwrap_or_default() <= -MIN_APPROACH_HEIGHT;
let target_below_source = target.y() < -NODE_HEIGHT / 2.0;
let target_beyond_source = target.x().abs() > source_max_x_offset;
let horizontal_room_for_3_corners = target_beyond_source
&& target.x().abs() - source_max_x_offset
>= 3.0 * (RADIUS_BASE - three_corner::MAX_SQUEEZE);
if target_well_below_source || (target_below_source && !horizontal_room_for_3_corners) {
use single_corner::*;
// The edge can originate anywhere along the length of the node.
let source_x = target.x().clamp(-source_max_x_offset, source_max_x_offset);
let distance_x = max(target.x().abs() - source_half_width, 0.0);
let radius_x = RADIUS_X_BASE + distance_x * RADIUS_X_FACTOR;
// The minimum length of straight line there should be at the target end of the edge. This
// is a fixed value, except it is reduced when the target is horizontally very close to the
// edge of the source, so that very short edges are less sharp.
let y_adjustment = min(
target.x().abs() - source_half_width + RADIUS_Y_ADJUSTMENT / 2.0,
RADIUS_Y_ADJUSTMENT,
);
let radius_y = max(target.y().abs() - y_adjustment, 0.0);
let max_radius = min(radius_x, radius_y);
// The radius the edge would have, if the arc portion were as large as possible.
let natural_radius = min((target.x() - source_x).abs(), target.y().abs());
let source_y = if natural_radius > MINIMUM_TANGENT_EXIT_RADIUS {
// Offset the beginning of the edge so that it is normal to the curve of the source node
// at the point that it exits the node.
let radius = min(natural_radius, max_radius);
let arc_origin_x = target.x().abs() - radius;
let source_arc_origin = source_half_width - NODE_CORNER_RADIUS;
let circle_offset = arc_origin_x - source_arc_origin;
let intersection = circle_intersection(circle_offset, NODE_CORNER_RADIUS, radius);
-(radius - intersection).abs()
} else if source_attached {
SOURCE_NODE_OVERLAP - source_half_height
} else {
source_half_height
};
let source = Vector2(source_x, source_y);
// The target attachment will extend as far toward the edge of the node as it can without
// rising above the source.
let attachment_height = target_max_attachment_height.map(|dy| min(dy, target.y().abs()));
let attachment_y = target.y() + attachment_height.unwrap_or_default();
let target_attachment = Vector2(target.x(), attachment_y);
(vec![source, target_attachment], max_radius, attachment)
} else {
use three_corner::*;
// The edge originates from either side of the node.
let source_x = source_max_x_offset.copysign(target.x());
let distance_x = (target.x() - source_x).abs();
let (j0_x, j1_x, height_adjustment);
if horizontal_room_for_3_corners {
// J1
// /
// ╭──────╮
// ╭─────╮ │ ▢
// ╰─────╯────╯\
// J0
// Junctions (J0, J1) are in between source and target.
let j0_dx = min(2.0 * RADIUS_MAX, distance_x / 2.0);
let j1_dx = min(RADIUS_MAX, (distance_x - j0_dx) / 2.0);
j0_x = source_x + j0_dx.copysign(target.x());
j1_x = j0_x + j1_dx.copysign(target.x());
height_adjustment = RADIUS_MAX - j1_dx;
} else {
// J1
// /
// ╭──────╮ J0
// ▢ │/
// ╭─────╮ │
// ╰─────╯────╯
// J0 > source; J0 > J1; J1 > target.
j1_x = target.x() + RADIUS_MAX.copysign(target.x());
let j0_beyond_target = target.x().abs() + RADIUS_MAX * 2.0;
let j0_beyond_source = source_x.abs() + RADIUS_MAX;
j0_x = j0_beyond_source.max(j0_beyond_target).copysign(target.x());
height_adjustment = 0.0;
}
let attachment_height = target_max_attachment_height.unwrap_or_default();
let top =
max(target.y() + MIN_APPROACH_HEIGHT + attachment_height - height_adjustment, 0.0);
let source = Vector2(source_x, 0.0);
let j0 = Vector2(j0_x, top / 2.0);
let j1 = Vector2(j1_x, top);
// The corners meet the target attachment at the top of the node.
let attachment_target = attachment.map_or(target, |a| a.target);
(vec![source, j0, j1, attachment_target], RADIUS_MAX, attachment)
}
}
// ==================
// === End points ===
// ==================
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub(super) enum EndPoint {
Source,
Target,
}
// =======================
// === Splitting edges ===
// =======================
#[derive(Debug, Copy, Clone, PartialEq)]
pub(super) struct EdgeSplit {
pub corner_index: usize,
pub closer_end: EndPoint,
pub split_corner: SplitCorner,
}
/// Find a point along the edge. Return the index of the corner the point occurs in, and which end
/// is closer to the point, and information about how the corner under the point has been split.
///
/// Returns [`None`] if the point is not on the edge.
pub(super) fn find_position(
position: ParentCoords,
layout: &Layout,
source_height: f32,
input_width: f32,
) -> Option<EdgeSplit> {
let position = *position;
let corners = &layout.corners;
let corner_index = corners
.iter()
.position(|&corner| corner.bounding_box(input_width).contains_inclusive(position))?;
let split_corner = corners[corner_index].split(position, input_width)?;
let (full_corners, following_corners) = corners.split_at(corner_index);
let full_corners_distance: f32 =
full_corners.iter().map(|&corner| corner.rectilinear_length()).sum();
let following_distance: f32 =
following_corners.iter().map(|&corner| corner.rectilinear_length()).sum();
let target_attachment_distance =
layout.target_attachment.map(|bit| bit.length).unwrap_or_default();
// The source end of the edge is on a horizontal line through the center of the source node
// (this gives nice behavior when the edge exits the end at an angle). To accurately determine
// which end a point appears closer to, we must exclude the portion of the edge that is hidden
// under the source node.
let hidden_source_distance = source_height / 2.0;
let total_distance = full_corners_distance + following_distance - hidden_source_distance
+ target_attachment_distance;
let offset_from_partial_corner = position - corners[corner_index].source_end();
let partial_corner_distance =
offset_from_partial_corner.x().abs() + offset_from_partial_corner.y().abs();
let distance_from_source =
full_corners_distance + partial_corner_distance - hidden_source_distance;
let closer_end = match distance_from_source * 2.0 < total_distance {
true => EndPoint::Source,
false => EndPoint::Target,
};
Some(EdgeSplit { corner_index, closer_end, split_corner })
}
// ======================================
// === Connecting points with corners ===
// ======================================
fn corners(points: &[Vector2], max_radius: f32) -> impl Iterator<Item = Oriented<Corner>> + '_ {
let mut next_direction = CornerDirection::HorizontalToVertical;
points.array_windows().map(move |&[p0, p1]| {
let direction = next_direction;
next_direction = next_direction.reverse();
let corner = match direction {
CornerDirection::HorizontalToVertical =>
Corner { horizontal: p0, vertical: p1, max_radius },
CornerDirection::VerticalToHorizontal =>
Corner { horizontal: p1, vertical: p0, max_radius },
};
Oriented::new(corner, direction)
})
}
// ==============
// === Corner ===
// ==============
#[derive(Debug, Copy, Clone, PartialEq)]
pub(super) struct Corner {
horizontal: Vector2,
vertical: Vector2,
max_radius: f32,
}
impl Corner {
#[inline]
pub fn clip(self) -> Vector2 {
let Corner { horizontal, vertical, .. } = self;
let (dx, dy) = (vertical.x() - horizontal.x(), horizontal.y() - vertical.y());
let (x_clip, y_clip) = (0.5f32.copysign(dx), 0.5f32.copysign(dy));
Vector2(x_clip, y_clip)
}
/// Calculate vertical and horizontal line overlap to avoid visible gaps between segments.
#[inline]
fn overlap_padding(self, line_width: f32) -> Vector2 {
let Corner { horizontal, vertical, .. } = self;
let offset = (horizontal - vertical).abs();
Vector2(
SEGMENT_OVERLAP.min(offset.x() - line_width * 0.5).max(0.0),
SEGMENT_OVERLAP.min(offset.y() - line_width * 0.5).max(0.0),
)
}
/// Calculate origin offset caused by overlap padding.
#[inline]
fn overlap_offset(self, line_width: f32) -> Vector2 {
let Corner { horizontal, vertical, .. } = self;
let offset = horizontal - vertical;
let pad = self.overlap_padding(line_width);
// Position the overlap according to clip direction. For straight lines, the overlap is
// centered on the line.
let x = match () {
_ if offset.x() < 0.0 => -pad.x(),
_ if offset.y() == 0.0 => -0.5 * pad.x(),
_ => 0.0,
};
let y = match () {
_ if offset.y() > 0.0 => -pad.y(),
_ if offset.x() == 0.0 => -0.5 * pad.y(),
_ => 0.0,
};
Vector2(x, y)
}
#[inline]
pub fn origin(self, line_width: f32) -> Vector2 {
let Corner { horizontal, vertical, .. } = self;
let offset = horizontal - vertical;
let pad_offset = self.overlap_offset(line_width);
let half_line_width_w = offset.y().abs().min(line_width / 2.0);
let half_line_width_h = offset.x().abs().min(line_width / 2.0);
let x = pad_offset.x() + (horizontal.x()).min(vertical.x() - half_line_width_w);
let y = pad_offset.y() + (vertical.y()).min(horizontal.y() - half_line_width_h);
Vector2(x, y)
}
#[inline]
pub fn size(self, line_width: f32) -> Vector2 {
let Corner { horizontal, vertical, .. } = self;
let offset = (horizontal - vertical).abs();
let pad = self.overlap_padding(line_width);
let half_line_width_w = offset.y().min(line_width / 2.0);
let half_line_width_h = offset.x().min(line_width / 2.0);
let width = pad.x() + (offset.x() + half_line_width_w).max(half_line_width_w * 2.0);
let height = pad.y() + (offset.y() + half_line_width_h).max(half_line_width_h * 2.0);
Vector2(width, height)
}
#[inline]
pub fn radius(self, line_width: f32) -> f32 {
let Corner { horizontal, vertical, .. } = self;
let offset = (horizontal - vertical).abs();
let smaller_offset = offset.x().min(offset.y());
let piecewise_limit = (smaller_offset * 2.0)
.min(line_width)
.max(smaller_offset + smaller_offset.min(line_width / 2.0));
(self.max_radius + line_width / 2.0).min(piecewise_limit)
}
fn bounding_box(self, line_width: f32) -> BoundingBox {
let origin = self.origin(line_width);
let size = self.size(line_width);
BoundingBox::from_position_and_size_unchecked(origin, size)
}
#[allow(unused)]
fn euclidean_length(self) -> f32 {
let Corner { horizontal, vertical, max_radius } = self;
let offset = horizontal - vertical;
let (dx, dy) = (offset.x().abs(), offset.y().abs());
let radius = min(dx, dy).min(max_radius);
let linear_x = dx - radius;
let linear_y = dy - radius;
let arc = FRAC_PI_2 * radius;
arc + linear_x + linear_y
}
fn rectilinear_length(self) -> f32 {
let Corner { horizontal, vertical, .. } = self;
let offset = horizontal - vertical;
offset.x().abs() + offset.y().abs()
}
#[allow(unused)]
fn transpose(self) -> Self {
let Corner { horizontal, vertical, max_radius } = self;
Corner { horizontal: vertical.yx(), vertical: horizontal.yx(), max_radius }
}
fn vertical_end_angle(self) -> f32 {
match self.vertical.x() > self.horizontal.x() {
true => 0.0,
false => std::f32::consts::PI.copysign(self.horizontal.y() - self.vertical.y()),
}
}
fn horizontal_end_angle(self) -> f32 {
FRAC_PI_2.copysign(self.horizontal.y() - self.vertical.y())
}
}
// === Parameters for drawing the arc portion of a corner in two parts ===
#[derive(Debug, Copy, Clone, Default, PartialEq)]
pub(super) struct SplitArc {
pub origin: Vector2,
pub radius: f32,
pub source_end_angle: f32,
pub split_angle: f32,
pub target_end_angle: f32,
}
// ========================
// === Oriented corners ===
// ========================
#[derive(Debug, Copy, Clone, Deref, PartialEq)]
pub(super) struct Oriented<T> {
#[deref]
value: T,
direction: CornerDirection,
}
impl<T> Oriented<T> {
fn new(value: T, direction: CornerDirection) -> Self {
Self { value, direction }
}
}
impl Oriented<Corner> {
/// Split the shape at the given point, if the point is within the tolerance specified by
/// `snap_line_width` of the shape.
fn split(self, split_point: Vector2, snap_line_width: f32) -> Option<SplitCorner> {
let Corner { horizontal, vertical, max_radius } = self.value;
let hv_offset = horizontal - vertical;
let (dx, dy) = (hv_offset.x().abs(), hv_offset.y().abs());
let radius = min(dx, dy).min(max_radius);
// Calculate closeness to the straight segments.
let (linear_x, linear_y) = (dx - radius, dy - radius);
let snap_distance = snap_line_width / 2.0;
let y_along_vertical = (self.vertical.y() - split_point.y()).abs() < linear_y;
let x_along_horizontal = (self.horizontal.x() - split_point.x()).abs() < linear_x;
let y_near_horizontal = (self.horizontal.y() - split_point.y()).abs() <= snap_distance;
let x_near_vertical = (self.vertical.x() - split_point.x()).abs() <= snap_distance;
// Calculate closeness to the arc.
// 1. Find the origin of the circle the arc is part of.
// The corner of our bounding box that is immediately outside the arc.
let point_outside_arc = Vector2(self.vertical.x(), self.horizontal.y());
// The opposite corner of our bounding box, far inside the arc.
// Used to find the direction from outside the arc to the origin of the arc's circle.
let point_inside_arc = Vector2(self.horizontal.x(), self.vertical.y());
let outside_to_inside = point_inside_arc - point_outside_arc;
let outside_to_origin =
Vector2(radius.copysign(outside_to_inside.x()), radius.copysign(outside_to_inside.y()));
let origin = point_outside_arc + outside_to_origin;
// 2. Check if the point is on the arc.
let input_to_origin = split_point - origin;
let distance_squared_from_origin =
input_to_origin.x().powi(2) + input_to_origin.y().powi(2);
let min_radius = radius - snap_distance;
let max_radius = radius + snap_distance;
let too_close = distance_squared_from_origin < min_radius.powi(2);
let too_far = distance_squared_from_origin > max_radius.powi(2);
let on_arc = !(too_close || too_far);
if y_near_horizontal && x_along_horizontal {
// The point is along the horizontal line. Snap its y-value, and draw a corner to it.
let snapped = Vector2(split_point.x(), self.horizontal.y());
let source_end = self.with_target_end(snapped);
let target_end = self.with_source_end(snapped);
Some(SplitCorner { source_end, target_end, split_arc: None })
} else if x_near_vertical && y_along_vertical {
// The point is along the vertical line. Snap its x-value, and draw a corner to it.
let snapped = Vector2(self.vertical.x(), split_point.y());
let source_end = self.with_target_end(snapped);
let target_end = self.with_source_end(snapped);
Some(SplitCorner { source_end, target_end, split_arc: None })
} else if on_arc {
// Find the input point's angle along the arc.
let offset_from_origin = split_point - origin;
let split_angle = offset_from_origin.y().atan2(offset_from_origin.x());
// Split the arc on the angle.
let arc_horizontal_end = origin - Vector2(0.0, radius.copysign(outside_to_inside.y()));
let arc_vertical_end = origin - Vector2(radius.copysign(outside_to_inside.x()), 0.0);
let (arc_begin, arc_end) = match self.direction {
CornerDirection::HorizontalToVertical => (arc_horizontal_end, arc_vertical_end),
CornerDirection::VerticalToHorizontal => (arc_vertical_end, arc_horizontal_end),
};
let source_end = self.with_target_end(arc_begin);
let target_end = self.with_source_end(arc_end);
let source_end_angle = self.source_end_angle();
let target_end_angle = self.target_end_angle();
let split_angle = self.clamp_to_arc(split_angle);
let split =
SplitArc { origin, radius, source_end_angle, split_angle, target_end_angle };
Some(SplitCorner { source_end, target_end, split_arc: Some(split) })
} else {
None
}
}
fn clamp_to_arc(self, c: f32) -> f32 {
let a = self.horizontal_end_angle();
let b = self.vertical_end_angle();
let a_to_c = (c.rem_euclid(TAU) - a.rem_euclid(TAU)).abs();
let b_to_c = (c.rem_euclid(TAU) - b.rem_euclid(TAU)).abs();
let ac = min(a_to_c, TAU - a_to_c);
let bc = min(b_to_c, TAU - b_to_c);
let close_to_a = ac < FRAC_PI_2;
let close_to_b = bc < FRAC_PI_2;
// The angle is on the minor arc if it is close to both limits; otherwise, clamp it to
// whichever is closer.
if close_to_a && close_to_b {
c
} else if ac < bc {
a
} else {
b
}
}
fn source_end(self) -> Vector2 {
match self.direction {
CornerDirection::VerticalToHorizontal => self.value.vertical,
CornerDirection::HorizontalToVertical => self.value.horizontal,
}
}
#[allow(unused)]
fn target_end(self) -> Vector2 {
match self.direction {
CornerDirection::VerticalToHorizontal => self.value.horizontal,
CornerDirection::HorizontalToVertical => self.value.vertical,
}
}
fn with_target_end(mut self, value: Vector2) -> Self {
*(match self.direction {
CornerDirection::VerticalToHorizontal => &mut self.value.horizontal,
CornerDirection::HorizontalToVertical => &mut self.value.vertical,
}) = value;
self
}
fn with_source_end(mut self, value: Vector2) -> Self {
*(match self.direction {
CornerDirection::VerticalToHorizontal => &mut self.value.vertical,
CornerDirection::HorizontalToVertical => &mut self.value.horizontal,
}) = value;
self
}
fn source_end_angle(self) -> f32 {
match self.direction {
CornerDirection::HorizontalToVertical => self.horizontal_end_angle(),
CornerDirection::VerticalToHorizontal => self.vertical_end_angle(),
}
}
fn target_end_angle(self) -> f32 {
self.reverse().source_end_angle()
}
fn reverse(self) -> Self {
let Self { value, direction } = self;
let direction = direction.reverse();
Self { value, direction }
}
}
// === Corner direction ===
#[derive(Debug, Copy, Clone, PartialEq)]
pub(super) enum CornerDirection {
HorizontalToVertical,
VerticalToHorizontal,
}
impl CornerDirection {
pub(super) fn reverse(self) -> Self {
match self {
CornerDirection::HorizontalToVertical => CornerDirection::VerticalToHorizontal,
CornerDirection::VerticalToHorizontal => CornerDirection::HorizontalToVertical,
}
}
}
// === Split (oriented) corners ====
#[derive(Debug, Copy, Clone, PartialEq)]
pub(super) struct SplitCorner {
pub source_end: Oriented<Corner>,
pub target_end: Oriented<Corner>,
pub split_arc: Option<SplitArc>,
}
// ===========================
// === Backward-edge arrow ===
// ===========================
fn arrow(target_offset: Vector2, junction_points: &[Vector2]) -> Option<Vector2> {
let three_corner_layout = junction_points.len() > 2;
let long_backward_edge = target_offset.y() >= three_corner::BACKWARD_EDGE_ARROW_THRESHOLD;
// The points are ordered from source end to destination, and are alternately horizontal
// and vertical junctions. The arrow must be in a vertical part of the edge. Place it at
// the first vertical junction.
let arrow_origin = junction_points[1];
(three_corner_layout && long_backward_edge).then_some(arrow_origin)
}
// =============================
// === Target-attachment bit ===
// =============================
/// The target-end of the edge, drawn on top of a node.
#[derive(Debug, Copy, Clone, PartialEq)]
pub(super) struct TargetAttachment {
/// The target end.
pub target: Vector2,
/// How far to extend from the target.
pub length: f32,
}
// ==================
// === Math Utils ===
// ==================
/// For the given radius of the first circle (`r1`), radius of the second circle (`r2`), and the
/// x-axis position of the second circle (`x`), computes the y-axis position of the second circle in
/// such a way, that the borders of the circle cross at the right angle. It also computes the angle
/// of the intersection. Please note, that the center of the first circle is in the origin.
///
/// ```text
/// r1
/// ◄───► (1) x^2 + y^2 = r1^2 + r2^2
/// _____ (1) => y = sqrt((r1^2 + r2^2)/x^2)
/// .' `.
/// / _.-"""B-._ ▲
/// | .'0┼ | `. │ angle1 = A-XY-0
/// \/ │ / \ │ r2 angle2 = 0-XY-B
/// |`._ │__.' | │ alpha = B-XY-X_AXIS
/// | A└───┼─ | ▼
/// | (x,y) | tg(angle1) = y / x
/// \ / tg(angle2) = r1 / r2
/// `._ _.' alpha = PI - angle1 - angle2
/// `-....-'
/// ```
fn circle_intersection(x: f32, r1: f32, r2: f32) -> f32 {
let x_norm = x.clamp(-r2, r1);
(r1 * r1 + r2 * r2 - x_norm * x_norm).sqrt()
}
| true |
9149139d6a39f3aa970e6659c01ec00f854619cb
|
Rust
|
banacorn/algorithmus
|
/legacy-rust/mrna/src/main.rs
|
UTF-8
| 957 | 2.75 | 3 |
[] |
no_license
|
extern crate mrna;
use std::io::prelude::*;
use std::fs::File;
use mrna::bio::*;
fn main() {
let mut file = File::open("input").unwrap();
let mut buffer = String::new();
file.read_to_string(&mut buffer).unwrap();
let aa_vec: Vec<AA> = parse_aa_sequence(buffer.trim());
let count: usize = aa_vec
.into_iter()
.map(codon_num)
.fold(1, |acc, n| acc * n % 1000000);
println!("{}", count);
}
fn codon_num(aa: AA) -> usize {
match aa {
AA::Phe => 2,
AA::Leu => 6,
AA::Ile => 3,
AA::Met => 1,
AA::Val => 4,
AA::Ser => 6,
AA::Pro => 4,
AA::Thr => 4,
AA::Ala => 4,
AA::Tyr => 2,
AA::Stop => 3,
AA::His => 2,
AA::Gln => 2,
AA::Asn => 2,
AA::Lys => 2,
AA::Asp => 2,
AA::Glu => 2,
AA::Cys => 2,
AA::Trp => 1,
AA::Arg => 6,
AA::Gly => 4
}
}
| true |
e5a1fa10a8e0f5d08a9d1bc85650ea98cf6460d8
|
Rust
|
TurboFreeze/whitenoise-core
|
/runtime-rust/src/components/mechanisms.rs
|
UTF-8
| 5,515 | 2.5625 | 3 |
[] |
no_license
|
use yarrow_validator::errors::*;
use crate::base::NodeArguments;
use yarrow_validator::base::{Value, ArrayND, get_argument, Vector2DJagged};
use crate::components::Evaluable;
use crate::utilities;
use yarrow_validator::proto;
impl Evaluable for proto::LaplaceMechanism {
fn evaluate(&self, arguments: &NodeArguments) -> Result<Value> {
let epsilon: Vec<f64> = self.privacy_usage.iter()
.map(|usage| get_epsilon(&usage))
.collect::<Result<Vec<f64>>>()?;
let sensitivity = get_argument(&arguments, "sensitivity")?;
let data = get_argument(&arguments, "data")?;
match (data, sensitivity) {
(Value::ArrayND(data), Value::ArrayND(sensitivity)) => {
let mut data = data.get_f64()?.clone();
let sensitivity = sensitivity.get_f64()?;
data.iter_mut()
.zip(epsilon.iter())
.zip(sensitivity.iter())
.map(|((v, eps), sens)| {
*v += utilities::mechanisms::laplace_mechanism(&eps, &sens)?;
Ok(())
})
.collect::<Result<()>>()?;
Ok(data.into())
},
(Value::Vector2DJagged(data), Value::Vector2DJagged(sensitivity)) => {
let mut data = data.get_f64()?;
if epsilon.len() != 1 {
return Err("non-uniform epsilon is not implemented for Vector2DJagged".into())
}
let epsilon = epsilon.first().unwrap();
// scale down epsilon to be evenly distributed among each
let epsilon = epsilon / data.iter().fold(0, |sum, e| sum + e.len()) as f64;
let sensitivity = sensitivity.get_f64()?;
if sensitivity.len() != data.len() {
return Err("sensitivity must be same length as data".into())
}
data.iter_mut()
.zip(sensitivity.iter())
.map(|(mut col, sens_col)|
col.iter_mut().zip(sens_col)
.map(|(v, sens)| {
*v += utilities::mechanisms::laplace_mechanism(&epsilon, &sens)?;
Ok(())
}).collect::<Result<()>>()
).collect::<Result<()>>()?;
Ok(Value::Vector2DJagged(Vector2DJagged::F64(data.iter().map(|v| Some(v.clone())).collect())))
},
_ => Err("data and sensitivity must both be ArrayND or Vector2DJagged".into())
}
}
}
impl Evaluable for proto::GaussianMechanism {
fn evaluate(&self, arguments: &NodeArguments) -> Result<Value> {
let epsilon: Vec<f64> = self.privacy_usage.iter().map(|usage| get_epsilon(&usage)).collect::<Result<Vec<f64>>>()?;
let delta = get_argument(&arguments, "delta")?.get_arraynd()?.get_f64()?;
let sensitivity = get_argument(&arguments, "sensitivity")?.get_arraynd()?.get_f64()?;
let data = get_argument(&arguments, "data")?.get_arraynd()?.get_f64()?;
let mut data = data.clone();
data.iter_mut()
.zip(epsilon.iter())
.zip(delta.iter())
.zip(sensitivity.iter())
.map(|(((v, eps), delta), sens)| {
*v += utilities::mechanisms::gaussian_mechanism(&eps, &delta, &sens)?;
Ok(())
})
.collect::<Result<()>>()?;
Ok(data.into())
}
}
impl Evaluable for proto::SimpleGeometricMechanism {
fn evaluate(&self, arguments: &NodeArguments) -> Result<Value> {
println!("arguments geometric {:?}", arguments);
let epsilon: Vec<f64> = self.privacy_usage.iter().map(|usage| get_epsilon(&usage)).collect::<Result<Vec<f64>>>()?;
let sensitivity = get_argument(&arguments, "sensitivity")?.get_arraynd()?.get_f64()?;
let count_min = get_argument(&arguments, "count_min")?.get_arraynd()?.get_i64()?;
let count_max = get_argument(&arguments, "count_max")?.get_arraynd()?.get_i64()?;
let enforce_constant_time = self.enforce_constant_time.clone();
let data = get_argument(&arguments, "data")?.get_arraynd()?.get_i64()?;
let mut data = data.clone();
data.iter_mut()
.zip(epsilon.iter())
.zip(count_min.iter().zip(count_max.iter()))
.zip(sensitivity.iter())
.map(|(((v, eps), (c_min, c_max)), sens)| {
*v += utilities::mechanisms::simple_geometric_mechanism(
&eps, &sens, &c_min, &c_max, &enforce_constant_time)?;
Ok(())
})
.collect::<Result<()>>()?;
Ok(data.into())
}
}
fn get_epsilon(usage: &proto::PrivacyUsage) -> Result<f64> {
match usage.distance.clone().ok_or::<Error>("distance must be defined on a PrivacyUsage".into())? {
proto::privacy_usage::Distance::DistancePure(distance) => Ok(distance.epsilon),
proto::privacy_usage::Distance::DistanceApproximate(distance) => Ok(distance.epsilon),
// _ => Err("epsilon is not defined".into())
}
}
fn get_delta(usage: &proto::PrivacyUsage) -> Result<f64> {
match usage.distance.clone().ok_or::<Error>("distance must be defined on a PrivacyUsage".into())? {
proto::privacy_usage::Distance::DistanceApproximate(distance) => Ok(distance.delta),
_ => Err("delta is not defined".into())
}
}
| true |
3a377b5b6ebc7ea40cec19fdb3107018d7edebb7
|
Rust
|
isgasho/ligen
|
/src/ir/function/parameter.rs
|
UTF-8
| 6,810 | 3.453125 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! Function parameter.
use crate::ir::{Identifier, Reference, Type, ReferenceKind};
use proc_macro2::TokenStream;
use quote::{quote, ToTokens, TokenStreamExt};
use std::convert::TryFrom;
use syn::FnArg;
#[derive(Debug, PartialEq, Clone)]
/// Parameter representation.
pub struct Parameter {
/// identifier field
pub identifier: Identifier,
/// type_ field
pub type_: Type,
}
impl TryFrom<FnArg> for Parameter {
type Error = &'static str;
fn try_from(fn_arg: FnArg) -> Result<Self, Self::Error> {
match fn_arg {
FnArg::Typed(syn::PatType { pat, ty, .. }) => {
if let syn::Pat::Ident(syn::PatIdent { ident, .. }) = *pat {
Ok(Self {
identifier: ident.into(),
type_: Type::try_from(*ty).expect("Failed to convert from Type"),
})
} else {
Err("Identifier not found")
}
}
// TODO: Implement conversion for syn::Receiver.
FnArg::Receiver(syn::Receiver {
reference,
mutability,
..
}) => {
let identifier = Identifier::new("self").into();
let type_ = reference
.map(|_| {
let kind = ReferenceKind::Borrow;
let is_constant = mutability.is_none();
let type_ = Box::new(Type::Compound(Identifier::new("Self").into()));
Type::Reference(Reference { kind, is_constant, type_ })
})
.unwrap_or_else(|| Type::Compound(Identifier::new("Self").into()));
Ok(Self { identifier, type_ })
},
}
}
}
impl ToTokens for Parameter {
fn to_tokens(&self, tokens: &mut TokenStream) {
let ident = self.identifier.to_token_stream();
let typ = self.type_.to_token_stream();
tokens.append_all(quote! {#ident: #typ})
}
}
#[cfg(test)]
mod test {
use std::convert::TryFrom;
use super::Parameter;
use crate::ir::{Atomic, Identifier, Integer, Reference, Type, ReferenceKind};
use quote::quote;
use syn::{parse_quote::parse, FnArg};
#[test]
fn parameter_atomic() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {integer: i32})).expect("Returned Error"),
Parameter {
identifier: Identifier::new("integer"),
type_: Type::Atomic(Atomic::Integer(Integer::I32))
}
);
}
#[test]
fn parameter_compound() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {name: String})).expect("Returned Error"),
Parameter {
identifier: Identifier::new("name"),
type_: Type::Compound(Identifier::new("String").into())
}
);
}
#[test]
fn parameter_borrow_constant() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {name: &String})).expect("Returned Error"),
Parameter {
identifier: Identifier::new("name"),
type_: Type::Reference(
Reference {
kind: ReferenceKind::Borrow,
is_constant: true,
type_: Box::new(Type::Compound(Identifier::new("String").into()))
}
)
}
);
}
#[test]
fn parameter_borrow_mutable() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {name: &mut String}))
.expect("Returned Error"),
Parameter {
identifier: Identifier::new("name"),
type_: Type::Reference(
Reference {
kind: ReferenceKind::Borrow,
is_constant: false,
type_: Box::new(Type::Compound(Identifier::new("String").into()))
}
)
}
);
}
#[test]
fn parameter_pointer_constant() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {name: *const String}))
.expect("Returned Error"),
Parameter {
identifier: Identifier::new("name"),
type_: Type::Reference(
Reference {
kind: ReferenceKind::Pointer,
is_constant: true,
type_: Box::new(Type::Compound(Identifier::new("String").into()))
}
)
}
);
}
#[test]
fn parameter_pointer_mutable() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {name: *mut String}))
.expect("Returned Error"),
Parameter {
identifier: Identifier::new("name"),
type_: Type::Reference(
Reference {
kind: ReferenceKind::Pointer,
is_constant: false,
type_: Box::new(Type::Compound(Identifier::new("String").into()))
}
)
}
);
}
#[test]
fn parameter_receiver() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {self})).expect("Returned Error"),
Parameter {
identifier: Identifier::new("self").into(),
type_: Type::Compound(Identifier::new("Self").into())
}
);
}
#[test]
fn parameter_receiver_reference() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {&self})).expect("Returned Error"),
Parameter {
identifier: Identifier::new("self").into(),
type_: Type::Reference(
Reference {
kind: ReferenceKind::Borrow,
is_constant: true,
type_: Box::new(Type::Compound(Identifier::new("Self").into()))
}
)
}
);
}
#[test]
fn parameter_receiver_mutable() {
assert_eq!(
Parameter::try_from(parse::<FnArg>(quote! {&mut self})).expect("Returned Error"),
Parameter {
identifier: Identifier::new("self").into(),
type_: Type::Reference(
Reference {
kind: ReferenceKind::Borrow,
is_constant: false,
type_: Box::new(Type::Compound(Identifier::new("Self").into()))
}
)
}
);
}
}
| true |
7fc4fead9e6def68c63f2ef020793334bcf9ce88
|
Rust
|
BartMassey/advent-of-code-2019
|
/libaoc/dirns.rs
|
UTF-8
| 5,478 | 3.59375 | 4 |
[
"MIT"
] |
permissive
|
// Copyright © 2016 Bart Massey
// This program is licensed under the "MIT License".
// Please see the file LICENSE in this distribution
// for license terms.
//! Directions management for Advent of Code solutions.
//!
//! To use this, make a new `GridBox` to set clipping bounds,
//! then call the `neighbors()` method of the `ClipBox` to get
//! an iterator over clipped neighbors in cardinal directions.
//!
//! # Examples
//!
//! ```rust
//! use aoc::dirns::*;
//!
//! let clip_box = GridBox::new(3, 4);
//! let neighbors = clip_box.neighbors((2, 0))
//! .collect::<Vec<_>>();
//! assert_eq!(neighbors, vec![(1, 0), (2, 1)]);
//! ```
/// Symbolic direction constants. It is unfortunate that
/// these need to be matched to DIRNS below.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Dirn {
Up = 0,
Left = 1,
Down = 2,
Right = 3,
}
/// Rotation directions.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Rot {
/// Counter-clockwise.
CCW,
/// Clockwise.
CW,
}
/// Displacements induced by the cardinal directions: up,
/// down, left, right in an x-y coordinate system where
/// increasing y is down.
pub const DIRNS: [(i64, i64); 4] = [(0, -1), (-1, 0), (0, 1), (1, 0)];
/// The possible facings.
pub const FACINGS: [Dirn; 4] =
[Dirn::Up, Dirn::Left, Dirn::Down, Dirn::Right];
impl Dirn {
/// Displacement resulting from a step in the given
/// direction.
pub fn disp(self) -> (i64, i64) {
DIRNS[self as usize]
}
/// Apply the appropriate displacement for
/// this direction to the given point.
pub fn displace(self, mut p: Point) -> Point {
let disp = self.disp();
p.0 += disp.0;
p.1 += disp.1;
p
}
/// Direction resulting from turning in the given
/// rotation direction.
pub fn turn(self, rot: Rot) -> Dirn {
let offset = match rot {
Rot::CCW => 1,
Rot::CW => FACINGS.len() - 1,
};
FACINGS[(self as usize + offset) % FACINGS.len()]
}
/// Direction resulting from turning around.
pub fn reverse(self) -> Dirn {
FACINGS[(self as usize + 2) % FACINGS.len()]
}
}
#[test]
fn test_rot() {
use Dirn::*;
use Rot::*;
assert_eq!(Left, Up.turn(CCW));
assert_eq!(Right, Up.turn(CW));
assert_eq!(Down, Left.turn(CCW));
assert_eq!(Down, Right.turn(CW));
}
/// Type of coordinates.
pub type Point = (i64, i64);
/// Description of the grid, for possible clipping.
#[derive(Copy, Clone)]
pub enum GridBox {
/// Grid is clipped on bottom and right.
ClipBox(Point),
/// Grid is unclipped.
Unclipped,
}
use self::GridBox::*;
impl GridBox {
/// Create a clip box for neighbor calculations.
#[allow(dead_code)]
pub fn new(x_size: i64, y_size: i64) -> GridBox {
ClipBox((x_size, y_size))
}
/// Create an "unbounded clip box" for neighbor
/// calculations. Negative locations will still be
/// clipped.
pub fn new_grid() -> GridBox {
Unclipped
}
/// Return an iterator that will produce the neighbors
/// of the given location, clipped as needed.
pub fn neighbors(&self, location: Point) -> Neighbors {
if let ClipBox((x_size, y_size)) = *self {
let (x, y) = location;
assert!(x < x_size && y < y_size);
};
Neighbors::new(*self, location)
}
/// Return the source location adjusted by the given offset
/// iff the dest location is in-bounds. This is useful when
/// "manual" clipping is needed.
pub fn clip(&self, loc: Point, off: (i64, i64)) -> Option<Point> {
let (x, y) = loc;
let (dx, dy) = off;
let nx = x + dx;
let ny = y + dy;
if nx < 0 || ny < 0 {
return None;
}
if let ClipBox((x_size, y_size)) = *self {
if nx >= x_size as i64 || ny >= y_size as i64 {
return None;
}
};
Some((nx, ny))
}
}
/// Iterator over the neighbors of a point in the four cardinal
/// directions, clipped as appropriate.
pub struct Neighbors {
/// Possible upper bounds on neighbor location.
bounds: GridBox,
/// Source location.
loc: Point,
/// Iterator for cardinal directions.
dirns: Box<dyn Iterator<Item = &'static (i64, i64)>>,
}
impl Neighbors {
/// Return an iterator over the neighbors of
/// the given grid box starting at the given location.
pub fn new(grid_box: GridBox, location: Point) -> Self {
Neighbors {
bounds: grid_box,
loc: location,
dirns: Box::new(DIRNS.iter()),
}
}
}
impl Iterator for Neighbors {
type Item = Point;
/// Return the next cardinal neighbor of the source point,
/// clipped as needed.
fn next(&mut self) -> Option<Point> {
loop {
match self.dirns.next() {
Some(&d) => {
if let Some(n) = self.bounds.clip(self.loc, d) {
return Some(n);
}
}
None => {
return None;
}
}
}
}
}
/// The ["Manhattan Distance"][1] between two points.
///
/// [1]: http://en.wikipedia.org/wiki/Taxicab_geometry
pub fn manhattan_distance((x1, y1): Point, (x2, y2): Point) -> u64 {
let dx = (x1 - x2).abs();
let dy = (y1 - y2).abs();
(dx + dy) as u64
}
| true |
d19bd896016d7cba7fa7f8c22f2a1f1386b1e95a
|
Rust
|
qingtao/learning-rust
|
/2.primitives/literals_and_operators/src/main.rs
|
UTF-8
| 2,941 | 3.828125 | 4 |
[] |
no_license
|
use std::fmt;
fn main() {
// 2. 原生类型
// 变量明确声明类型
let _logical: bool = true;
let _a_float: f64 = 3.0;
let _an_integer = 5i32;
// 默认类型
let _defualt_float = 3.0;
let _default_integer = 7;
// 推断为i64
let mut _inferred_type = 12;
_inferred_type = 4294967296i64;
// 可变变量值可以改变
let mut _mutable = 12;
_mutable = 21;
// 变量类型不可改变
// _mutable = true;
let _mutable = true;
// 2.1. 字面量和运算符
println!("1+2={}", 1u32 + 2);
println!("1+2={}", 1i32 + 2);
// 1u32-2溢出
// println!("1-2={}", 1u32 - 2);
println!("1-2={}", 1i32 - 2);
// 逻辑运算
println!("true AND false is {}", true && false);
println!("true OR false is {}", true || false);
println!("NOT true is {}", !true);
// 位运算
println!("0011 AND 0101 is {:04b}", 0b0011u32 & 0b0101u32);
println!("0011 OR 0101 is {:04b}", 0b0011u32 | 0b0101u32);
println!("1 << 5 is {}", 1u32 << 5);
println!("0x80 >> 2 is 0x{:x}", 0x80u32 >> 2);
// 下划线提高数字可读性
println!("One million is written as {}", 1_000_000u32);
// 2.2 元组
let pair = (1, true);
println!("pair is {:?}", pair);
println!("reverse (1,true) is {:?}", reverse(pair));
let long_tuple = (
1u8, 2u16, 3u32, 4u64, -1i8, -2i16, -3i32, -4i64, 0.1f32, 0.2f64, 'a', true,
);
println!("long tuple first value: {}", long_tuple.0);
println!("long tuple second value: {}", long_tuple.1);
let tuple_of_tuples = ((1u8, 2u16, 2u32), (4u64, -1i8, -2i16));
println!("tuple of tuples: {:?}", tuple_of_tuples);
// let too_long_tuple = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13);
// println!("too long tuple: {:?}", too_long_tuple);
// 试一试 ^ 取消上面两行的注释,阅读编译器给出的错误信息。
// 创建单元素元组需要一个额外的逗号,这是为了和被括号包含的字面量作区分。
println!("one element tuple: {:?}", (5u32,));
println!("just an integer: {:?}", (5u32));
// 解构元组
let tuple = (1, "hello", 4.5, true);
let (a, b, c, d) = tuple;
println!("{:?}, {:?}, {:?}, {:?}", a, b, c, d);
let matrix = Matrix(1.1, 1.2, 2.1, 2.2);
println!("{:?}", matrix);
println!("{}", matrix.0);
println!("{}", matrix);
println!("{}", transpose(matrix));
}
// 元组当作函数的参数和返回值
fn reverse(pair: (i32, bool)) -> (bool, i32) {
let (i, b) = pair;
(b, i)
}
#[derive(Debug)]
struct Matrix(f32, f32, f32, f32);
impl fmt::Display for Matrix {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "( {} {} )\n( {} {} )", self.0, self.1, self.2, self.3)
}
}
// 转换右上角和左下角的元素
fn transpose(matrix: Matrix) -> Matrix {
Matrix(matrix.0, matrix.2, matrix.1, matrix.3)
}
| true |
777d33b2aa4431b82d718ff9adfdd306604bed59
|
Rust
|
VisionistInc/advent-of-code-2020
|
/cicavey/23/src/main.rs
|
UTF-8
| 4,415 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
type Cups = Vec<i32>;
fn select_dest(cups: &Cups, start_label: i32) -> usize {
let max_label = *cups.iter().max().unwrap();
let min_label = *cups.iter().min().unwrap();
let mut cur_label = start_label;
cur_label -= 1;
loop {
match cups.iter().position(|v| *v == cur_label) {
Some(idx) => {
return idx
}
None => {
cur_label -= 1;
if cur_label < min_label {
cur_label = max_label;
}
}
}
}
}
fn index_of(cups: &Cups, value: i32) -> usize {
cups.iter().position(|v| *v == value).unwrap()
}
fn part1() {
let input = String::from("389125467"); // test
//let input = String::from("872495136"); // input
let mut cups: Cups = input.chars().map(|c| c.to_digit(10).unwrap() as i32).collect();
let mut cur = 0;
for step in 1..=100 {
// println!("-- move {} --", step);
// print!("cups: ");
// for (i, v) in cups.iter().enumerate() {
// if i == cur {
// print!("({}) ", v);
// } else {
// print!("{} ", v);
// }
// }
// println!();
// grab the current label
let cur_label = cups[cur];
// remove three cups
let mut rem = vec!();
// indexes to delete
let mut idxs = vec!();
for i in 0..3 {
let idx = (cur + 1 + i) % cups.len();
idxs.push(idx);
rem.push(cups[idx]);
}
idxs.sort();
for idx in idxs.iter().rev() {
cups.remove(*idx);
}
// println!("pickup: {:?}", &rem);
let dest = select_dest(&cups, cur_label);
// Select next cur by label
let new_cur = index_of(&cups, cur_label);
let next_label = cups[(new_cur + 1) % cups.len()];
// println!("destination: {} (idx={}), cur = {}, new_cur={}, cur_label = {}, next_label = {} {:?}", cups[dest], &dest, cur, new_cur, cur_label, next_label, &cups);
for r in rem.iter().rev() {
cups.insert(dest+1, *r);
}
cur = index_of(&cups, next_label);
// println!();
}
let start = index_of(&cups, 1);
for i in 1..9 {
print!("{}", cups[(start + i) % cups.len()]);
}
println!();
}
fn part2() {
let input = String::from("389125467"); // test
//let input = String::from("872495136"); // input
let mut cups: Cups = input.chars().map(|c| c.to_digit(10).unwrap() as i32).collect();
let mut new_cups: Cups = Vec::with_capacity(1_000_000);
for v in &cups {
new_cups.push(*v);
}
for v in 10..=1_000_000 {
new_cups.push(v);
}
cups = new_cups;
// println!("{} {} {}", new_cups[0], new_cups.last().unwrap(), new_cups.len());
let mut cur = 0;
for step in 1..=1000 {
// println!("-- move {} --", step);
// print!("cups: ");
// for (i, v) in cups.iter().enumerate() {
// if i == cur {
// print!("({}) ", v);
// } else {
// print!("{} ", v);
// }
// }
// println!();
// grab the current label
let cur_label = cups[cur];
// remove three cups
let mut rem = vec!();
// indexes to delete
let mut idxs = vec!();
for i in 0..3 {
let idx = (cur + 1 + i) % cups.len();
idxs.push(idx);
rem.push(cups[idx]);
}
idxs.sort();
for idx in idxs.iter().rev() {
cups.remove(*idx);
}
// println!("pickup: {:?}", &rem);
let dest = select_dest(&cups, cur_label);
// Select next cur by label
let new_cur = index_of(&cups, cur_label);
let next_label = cups[(new_cur + 1) % cups.len()];
// println!("destination: {} (idx={}), cur = {}, new_cur={}, cur_label = {}, next_label = {} {:?}", cups[dest], &dest, cur, new_cur, cur_label, next_label, &cups);
for r in rem.iter().rev() {
cups.insert(dest+1, *r);
}
cur = index_of(&cups, next_label);
// println!();
}
let start = index_of(&cups, 1);
for i in 1..9 {
print!("{}", cups[(start + i) % cups.len()]);
}
println!();
}
fn main() {
part1();
part2();
}
| true |
7894aba5e27760cb6eea4726e217156d025f6a61
|
Rust
|
steabert/project-euler
|
/src/problem_004.rs
|
UTF-8
| 725 | 3.453125 | 3 |
[] |
no_license
|
// Largest palyndrome product
// A palindromic number reads the same both ways.
// The largest palindrome made from the product of
// two 2-digit numbers is 9009 = 91 × 99.
// Find the largest palindrome made from the product of
// two 3-digit numbers.
fn main() {
let mut answer: usize = 0;
for a in 0..1000 {
'b: for b in 0..1000 {
let candidate = (a * b).to_string().into_bytes();
let size = candidate.len();
for i in 0..size / 2 {
if candidate[i] != candidate[size - 1 - i] {
continue 'b;
}
}
answer = std::cmp::max(answer, a * b);
}
}
println!("answer = {}", answer);
}
| true |
a64a8410b84cf8143a12c26a027cadfdaf9a6c02
|
Rust
|
krishna-iwnl/kdb-rs
|
/src/record.rs
|
UTF-8
| 2,916 | 2.8125 | 3 |
[] |
no_license
|
#[cfg(test)]
#[path = "./record_test.rs"]
pub mod record_test;
use crate::defs::FloatType;
use crate::defs::IntType;
use crate::defs::StringType;
use crate::defs::Type;
use crate::defs::TypeTrait;
use crate::schema::Schema;
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
pub struct Record {
cells: Vec<Box<dyn TypeTrait>>,
}
impl Record {
pub fn get_num_atts(&self) -> usize {
self.cells.len()
}
pub fn consume(&mut self, comsume_me: Record) {
self.cells = comsume_me.cells;
}
// pub fn copy(&mut self, copy_me: Record){
// self.cells = Vec::with_capacity(copy_me.get_num_atts());
// for cell in copy_me.cells.iter() {
// }
// }
pub fn project(&mut self, atts_to_keep: &[usize]) {
let cells_size = self.cells.len();
let mut keep: Vec<bool> = vec![false; cells_size];
for &index in atts_to_keep.iter() {
keep[index] = true;
}
let mut i = 0;
self.cells.retain(|_| (keep[i], i += 1).0);
}
pub fn suck_next_record(&mut self, my_schema: &Schema, reader: &mut BufReader<File>) -> Result<(), &str> {
self.cells = Vec::new();
let mut line = String::new();
let num_bytes = reader.read_line(&mut line).expect("IO Error in suck_next_record");
if num_bytes == 0 {
return Err("EOF");
}
let mut splits: Vec<&str> = line.split('|').collect();
splits.pop();
let num_atts = my_schema.get_num_atts();
let atts = my_schema.get_atts();
if num_atts != splits.len() {
return Err("Mismatch atts len for schema and record");
}
for i in 0..num_atts {
match atts[i].my_type {
Type::Int => {
let int_val: i32 = match splits[i].parse::<i32>() {
Ok(int) => int,
Err(why) => {
println!("cannot convert string to i32 in suck_next_record : {}", why);
return Err("i32 parsing error");
}
};
let int_cell = IntType { value: int_val };
self.cells.push(Box::new(int_cell));
}
Type::Float => {
let float_val: f64 = match splits[i].parse::<f64>() {
Ok(float) => float,
Err(why) => {
println!("cannot convert string to f64 in suck_next_record : {}", why);
return Err("f64 parsing error");
}
};
let float_cell = FloatType { value: float_val };
self.cells.push(Box::new(float_cell));
}
Type::String => {
let str_val = splits[i];
let string_cell = StringType { value: String::from(str_val) };
self.cells.push(Box::new(string_cell));
}
}
}
Ok(())
}
pub fn new() -> Record {
Record { cells: Vec::new() }
}
pub fn print(&self) {
for cell in self.cells.iter() {
cell.print();
print!("|");
}
println!("");
}
pub fn to_bytes() -> Vec<u8> {
vec![33]
}
}
impl std::fmt::Display for Record {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
for cell in self.cells.iter() {
cell.print();
let _res = write!(f, "|");
}
let _res = writeln!(f, "");
Ok(())
}
}
| true |
6d1b5817280464671b92b0ec325460cbbc046c10
|
Rust
|
twincitiespublictelevision/mm_client
|
/src/lib.rs
|
UTF-8
| 14,318 | 3.203125 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! # mm_client
//!
//! The `mm_client` crate is a very small library for communicating with the PBS Media Manager API
//! easier. It provides a [Client](struct.Client.html) for querying against either the production
//! API or the staging API.
//!
//! The main goals of the crate are to:
//!
//! * Provide authentication handling
//! * Manage API url construction
//! * Handle API error responses
//! * Make few assumptions about how responses will be used
//!
//! Currently all requests made by a [Client](struct.Client.html) are synchronous.
//!
//! # Creating a [Client](struct.Client.html)
//!
//! [Client](struct.Client.html) provides two constructors, one for the accessing the production
//! API and one for the staging API. Both constructors take an API key and secret as arguments. It
//! is recommended to create a single [Client](struct.Client.html) that is then passed around for
//! making requests.
//!
//! Note that constructing a client may fail.
//!
//! ```no_run
//! use mm_client::Client;
//!
//! let client = Client::new("API_KEY", "API_SECRET").unwrap();
//! ```
//!
//! # Fetching a single object
//!
//! Requesting a single object can be performed by using the `get` method
//!
//! ```no_run
//! use mm_client::Client;
//! use mm_client::Endpoints;
//!
//! let client = Client::new("API_KEY", "API_SECRET").unwrap();
//! let response = client.get(Endpoints::Asset, "asset-id", None);
//! ```
//! The response string can then be handed off a JSON parser for further use.
//!
//! # Fetching a list of objects
//!
//! Requesting a list of objects can be performed by using the `list` method
//!
//! ```no_run
//! use mm_client::Client;
//! use mm_client::Endpoints;
//!
//! let client = Client::new("API_KEY", "API_SECRET").unwrap();
//! let params = vec![("since", "2017-02-12T00:00:00Z")];
//! let response = client.list(Endpoints::Show, params);
//! ```
//! Here a request is made for all of the show objects that have been updated since the supplied
//! date. Similar to the `get` method, the response string is available to pass to a JSON parser
#![deny(missing_docs)]
#[cfg(test)]
extern crate mockito;
#[cfg(test)]
extern crate reqwest;
#[cfg(test)]
extern crate serde;
#[cfg(test)]
extern crate serde_json;
#[cfg(test)]
extern crate uuid;
mod client;
mod error;
pub use crate::client::Client;
pub use crate::client::Endpoints;
pub use crate::error::MMCError;
pub use crate::error::MMCResult;
#[cfg(test)]
mod tests {
use mockito::mock;
use mockito::Mock;
use reqwest::StatusCode;
use serde::Serialize;
use uuid::Uuid;
use crate::client::Client;
use crate::client::Endpoints;
use crate::client::Params;
use crate::error::MMCError;
use crate::error::MMCResult;
const KEY: &'static str = "hello";
const SECRET: &'static str = "world";
const BASIC_AUTH: &'static str = "Basic aGVsbG86d29ybGQ=";
#[derive(Serialize)]
struct EmptyReq {}
fn sample_client() -> Client {
Client::staging(KEY, SECRET).unwrap()
}
fn show_get(id: &str, params: Option<Params>) -> MMCResult<String> {
sample_client().get(Endpoints::Show, id, params)
}
fn show_list(params: Params) -> MMCResult<String> {
sample_client().list(Endpoints::Show, params)
}
fn show_create<T: Serialize>(id: &str, body: &T) -> MMCResult<String> {
sample_client().create(Endpoints::Show, id, Endpoints::Asset, body)
}
fn show_edit(id: &str) -> MMCResult<String> {
sample_client().edit(Endpoints::Asset, id)
}
fn show_update<T: Serialize>(id: &str, body: &T) -> MMCResult<String> {
sample_client().update(Endpoints::Asset, id, body)
}
fn show_delete(id: &str) -> MMCResult<String> {
sample_client().delete(Endpoints::Asset, id)
}
fn random_id() -> String {
Uuid::new_v4().to_hyphenated().to_string()
}
fn mock_single(endpoint: &str, id: &str, params: Option<&str>) -> Mock {
mock(
"GET",
vec!["/", endpoint, "/", id, "/", params.unwrap_or("")]
.join("")
.as_str(),
)
}
fn mock_create(parent: &str, p_id: &str, endpoint: &str) -> Mock {
mock(
"POST",
vec!["/", parent, "/", p_id, "/", endpoint, "/"]
.join("")
.as_str(),
)
}
fn mock_edit(endpoint: &str, id: &str) -> Mock {
mock(
"GET",
vec!["/", endpoint, "/", id, "/edit/"].join("").as_str(),
)
}
fn mock_update(endpoint: &str, id: &str) -> Mock {
mock("PATCH", vec!["/", endpoint, "/", id, "/"].join("").as_str())
}
fn mock_asset_update(endpoint: &str, id: &str) -> Mock {
mock(
"PATCH",
vec!["/", endpoint, "/", id, "/edit/"].join("").as_str(),
)
}
fn mock_delete(endpoint: &str, id: &str) -> Mock {
mock(
"DELETE",
vec!["/", endpoint, "/", id, "/edit/"].join("").as_str(),
)
}
fn mock_list(endpoint: &str, param_string: &str) -> Mock {
mock(
"GET",
vec!["/", endpoint, "/", param_string].join("").as_str(),
)
}
#[test]
fn single_200() {
let id = random_id();
let m = mock_single("shows", id.as_str(), None)
.with_status(200)
.with_header("content-type", "application/json")
.with_body("{\"name\":\"value\"}")
.create();
let resp = show_get(id.as_str(), None);
assert_eq!(resp.unwrap(), "{\"name\":\"value\"}");
m.assert();
}
#[test]
fn single_with_params_200() {
let id = random_id();
let param_string = "?param1=value1¶m2=value2";
let m = mock_single("shows", id.as_str(), Some(param_string))
.with_status(200)
.with_header("content-type", "application/json")
.with_body("{\"name\":\"value\"}")
.create();
let resp = show_get(
id.as_str(),
Some(vec![("param1", "value1"), ("param2", "value2")]),
);
assert_eq!(resp.unwrap(), "{\"name\":\"value\"}");
m.assert();
}
#[test]
fn list_200() {
let param_string = "?param1=value1¶m2=value2";
let m = mock_list("shows", param_string)
.with_status(200)
.with_header("content-type", "application/json")
.with_body("{\"name\":\"value\"}")
.create();
let resp = show_list(vec![("param1", "value1"), ("param2", "value2")]);
assert_eq!(resp.unwrap(), "{\"name\":\"value\"}");
m.assert();
}
#[test]
fn basic_auth_ok() {
let id = random_id();
let mut param_string = "?param=".to_string();
param_string.push_str(id.as_str());
let m = mock_list("shows", param_string.as_str())
.match_header("Authorization", BASIC_AUTH)
.with_status(200)
.with_header("content-type", "application/json")
.with_body("{\"name\":\"value\"}")
.create();
let _ = show_list(vec![("param", id.as_str())]);
m.assert();
}
#[test]
fn get_400() {
let id = random_id();
let m = mock_single("shows", id.as_str(), None)
.with_status(400)
.with_header("content-type", "application/json")
.with_body("Failure message from the server")
.create();
let resp = show_get(id.as_str(), None);
match resp.unwrap_err() {
MMCError::BadRequest(msg) => {
assert_eq!(msg, "Failure message from the server");
}
err => panic!("Expected BadRequest error but recieved {:?}", err),
}
m.assert();
}
#[test]
fn get_401() {
let id = random_id();
let m = mock_single("shows", id.as_str(), None)
.with_status(401)
.create();
let resp = show_get(id.as_str(), None);
match resp.unwrap_err() {
MMCError::NotAuthorized => (),
err => panic!("Expected NotAuthorized error but recieved {:?}", err),
}
m.assert();
}
#[test]
fn get_403() {
let id = random_id();
let m = mock_single("shows", id.as_str(), None)
.with_status(403)
.create();
let resp = show_get(id.as_str(), None);
match resp.unwrap_err() {
MMCError::NotAuthorized => (),
err => panic!("Expected NotAuthorized error but recieved {:?}", err),
}
m.assert();
}
#[test]
fn get_404() {
let id = random_id();
let m = mock_single("shows", id.as_str(), None)
.with_status(404)
.create();
let resp = show_get(id.as_str(), None);
match resp.unwrap_err() {
MMCError::ResourceNotFound => (),
err => panic!("Expected ResourceNotFound error but recieved {:?}", err),
}
m.assert();
}
#[test]
fn get_500() {
let id = random_id();
let m = mock_single("shows", id.as_str(), None)
.with_status(500)
.create();
let resp = show_get(id.as_str(), None);
match resp.unwrap_err() {
MMCError::APIFailure(StatusCode::INTERNAL_SERVER_ERROR) => (),
err => panic!("Expected APIFailure error but recieved {:?}", err),
}
m.assert();
}
#[test]
fn shorthand_singles_200() {
let id = random_id();
let endpoints = vec![
Endpoints::Asset,
Endpoints::Collection,
Endpoints::Episode,
Endpoints::Franchise,
Endpoints::Season,
Endpoints::Special,
Endpoints::Show,
];
for endpoint in endpoints.into_iter() {
let m = mock_single(endpoint.to_string().as_str(), id.as_str(), None)
.with_status(200)
.with_header("content-type", "application/json")
.with_body("{\"name\":\"value\"}")
.create();
let resp = sample_client().get(endpoint.clone(), id.as_str(), None);
assert_eq!(resp.unwrap(), String::from("{\"name\":\"value\"}"));
m.assert();
}
}
#[test]
fn shorthand_list_200() {
let id = random_id();
let param_string = vec!["?param1=", id.as_str(), "¶m2=value2"].join("");
let params = vec![("param1", id.as_str()), ("param2", "value2")];
let endpoints = vec![
Endpoints::Changelog,
Endpoints::Collection,
Endpoints::Franchise,
Endpoints::Show,
];
for endpoint in endpoints.into_iter() {
let m = mock_list(endpoint.to_string().as_str(), param_string.as_str())
.with_status(200)
.with_header("content-type", "application/json")
.with_body("{\"name\":\"value\"}")
.create();
let resp = sample_client().list(endpoint.clone(), params.clone());
assert_eq!(resp.unwrap(), String::from("{\"name\":\"value\"}"));
m.assert();
}
}
#[test]
fn create_204() {
let p_id = random_id();
let m = mock_create("shows", p_id.as_str(), "assets")
.with_status(204)
.with_header("content-type", "application/json")
.with_body("")
.match_body("{}")
.create();
let _ = show_create(p_id.as_str(), &EmptyReq {});
m.assert();
}
#[test]
fn create_400() {
let p_id = random_id();
let body = "{\"name\":\"value\"}";
let server_error = "Payload missing parameter";
let m = mock_create("shows", p_id.as_str(), "assets")
.with_status(400)
.with_header("content-type", "application/json")
.with_body(server_error)
.create();
let resp = show_create(p_id.as_str(), &body);
match resp.unwrap_err() {
MMCError::BadRequest(err) => {
assert_eq!(err, String::from(server_error));
}
err => panic!("Expected BadRequest error but recieved {:?}", err),
}
m.assert();
}
#[test]
fn edit_200() {
let id = random_id();
let body = "{\"name\":\"value\"}";
let m = mock_edit("assets", id.as_str())
.with_status(200)
.with_header("content-type", "application/json")
.with_body(body)
.create();
let resp = show_edit(id.as_str());
assert_eq!(resp.unwrap(), body);
m.assert();
}
#[test]
fn update_200() {
let id = random_id();
let m = mock_asset_update("assets", id.as_str())
.with_status(200)
.with_header("content-type", "application/json")
.match_body("{}")
.create();
let _ = show_update(id.as_str(), &EmptyReq {});
m.assert();
}
#[test]
fn delete_200() {
let id = random_id();
let m = mock_delete("assets", id.as_str())
.with_status(200)
.with_header("content-type", "application/json")
.match_body("")
.create();
let _ = show_delete(id.as_str());
m.assert();
}
#[test]
fn move_special_to_season() {
let special_id = random_id();
let season_id = random_id();
let body_str = [
"{\"data\":{\"type\":\"special\",\"id\":\"",
special_id.as_str(),
"\",\"attributes\":{\"season\":\"",
season_id.as_str(),
"\"}}}",
]
.join("");
let m = mock_update("specials", special_id.as_str())
.with_status(204)
.with_header("content-type", "application/json")
.with_body("")
.match_body(body_str.as_str())
.create();
let _ = sample_client().change_parent(
Endpoints::Season,
season_id.as_str(),
Endpoints::Special,
special_id.as_str(),
);
m.assert();
}
}
| true |
ea347b04420e1b5be03d3372d24fea0eecd1b44a
|
Rust
|
LukasKalbertodt/term-painter
|
/examples/main.rs
|
UTF-8
| 3,703 | 3.0625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
extern crate term_painter;
use term_painter::{ToStyle, Color};
use term_painter::Color::*;
use term_painter::Attr::*;
fn main() {
struct_sizes();
simple_examples();
with_example();
doc_examples();
all_styles(
&[NotSet, Black, Red, Green, Yellow, Blue, Magenta, Cyan, White]);
all_styles(
&[BrightBlack, BrightRed, BrightGreen, BrightYellow, BrightBlue,
BrightMagenta, BrightCyan, BrightWhite]);
}
fn struct_sizes() {
use std::mem::size_of;
println!("size_of(Style): {}", size_of::<term_painter::Style>());
println!("size_of(Color): {}", size_of::<Color>());
println!("size_of(Attr): {}", size_of::<term_painter::Attr>());
}
fn simple_examples() {
println!("{} | {} | {} | {} | {}",
Red.bg(Green).bold().paint("Red-Green-Bold"),
Blue.paint("Blue"),
Blue.bold().paint("BlueBold"),
Blue.bg(Magenta).paint("BlueMagentaBG"),
Plain.underline().paint("Underline"));
}
fn with_example() {
Red.with(|| {
print!("JustRed");
Bold.with(|| {
print!(" BoldRed {} BoldRed ", Underline.paint("Underline"));
});
print!("JustRed ");
print!("{}", Blue.paint("Blue (overwrite) "));
Green.with(|| {
println!("Green (overwrite)");
});
});
}
fn doc_examples() {
// --- Doc example 1
println!("{} or {} or {}",
Red.paint("Red"),
Bold.paint("Bold"),
Red.bold().paint("Both!"));
// --- Doc example 2
let x = 5;
// These two are equivalent
println!("{} | {}", x, Plain.paint(x));
// These two are equivalent, too
println!("{} | {}", Red.paint(x), Plain.fg(Red).paint(x));
// --- Doc example 3
let non_copy = "cake".to_string(); // String is *not* Copy
let copy = 27; // usize/isize *is* Copy
println!("{}", Plain.paint(&non_copy));
println!("{}", Plain.paint(©));
// non_copy is still usable here...
// copy is still usable here...
println!("{}", Plain.paint(non_copy));
println!("{}", Plain.paint(copy));
// non_copy was moved into paint, so it not usable anymore...
// copy is still usable here...
}
fn all_styles(colors: &[Color]) {
// Normal test
for c in colors { print!("{:?} ", c.paint(c)); }
println!(" (fg)");
for c in colors { print!("{:?} ", Plain.bg(*c).paint(c)); }
println!(" (bg)");
// Bold text
for c in colors { print!("{:?} ", c.bold().paint(c)); }
println!(" (bold fg)");
for c in colors { print!("{:?} ", Bold.bg(*c).paint(c)); }
println!(" (bold bg)");
// Dim text
for c in colors { print!("{:?} ", c.dim().paint(c)); }
println!(" (dim fg)");
for c in colors { print!("{:?} ", Dim.bg(*c).paint(c)); }
println!(" (dim bg)");
// Underlined text
for c in colors { print!("{:?} ", c.underline().paint(c)); }
println!(" (underline fg)");
for c in colors { print!("{:?} ", Underline.bg(*c).paint(c)); }
println!(" (underline bg)");
// Blinking text
for c in colors { print!("{:?} ", c.blink().paint(c)); }
println!(" (blink fg)");
for c in colors { print!("{:?} ", Blink.bg(*c).paint(c)); }
println!(" (blink bg)");
// Reverse text
for c in colors { print!("{:?} ", c.reverse().paint(c)); }
println!(" (reverse fg)");
for c in colors { print!("{:?} ", Reverse.bg(*c).paint(c)); }
println!(" (reverse bg)");
// Secure text
for c in colors { print!("{:?} ", c.secure().paint(c)); }
println!(" (secure fg)");
for c in colors { print!("{:?} ", Secure.bg(*c).paint(c)); }
println!(" (secure bg)");
}
| true |
43cd18c90255f04542968c5416d1c9ace5a65b40
|
Rust
|
xasopheno/weresocool-parser
|
/src/main.rs
|
UTF-8
| 735 | 2.625 | 3 |
[] |
no_license
|
extern crate colored;
extern crate socool_parser;
use colored::*;
use socool_parser::parser::*;
use std::env;
fn main() {
let args: Vec<String> = env::args().collect();
let filename;
if args.len() == 2 {
filename = &args[1];
} else {
println!("\n{}\n", "Forgot to pass in a filename.".red().bold());
println!("{}", "Example:".cyan());
println!("{}\n", "./weresocool song.socool".cyan().italic());
panic!("Wrong number of arguments.")
}
let parsed = parse_file(filename);
for (key, val) in parsed.table.iter() {
println!("\n Name: {:?} op: {:?}", key, val);
}
println!("\n Main: {:?}", parsed.table.get("main").unwrap());
}
#[cfg(test)]
mod test;
| true |
7646190dafe020d4910f99ec77c85275ff6fa630
|
Rust
|
terassyi/algorithm
|
/abc/164/b-battle/src/main.rs
|
UTF-8
| 874 | 3.359375 | 3 |
[] |
no_license
|
fn main() {
let (a, b, c, d) = input();
println!("{}", solve(a,b,c,d));
}
fn input() -> (i32, i32, i32, i32) {
let mut buf = String::new();
std::io::stdin().read_line(&mut buf).unwrap();
let mut iter = buf.split_whitespace();
let a: i32 = iter.next().unwrap().parse().unwrap();
let b: i32 = iter.next().unwrap().parse().unwrap();
let c: i32 = iter.next().unwrap().parse().unwrap();
let d: i32 = iter.next().unwrap().parse().unwrap();
(a,b,c,d)
}
fn solve(a: i32, b: i32, c: i32, d: i32) -> String {
let mut takahashi: (i32, i32) = (a, b); // hp, attack
let mut aoki: (i32, i32) = (c, d);
loop {
aoki.0 -= takahashi.1;
if aoki.0 <= 0 {
return String::from("Yes");
}
takahashi.0 -= aoki.1;
if takahashi.0 <= 0 {
return String::from("No");
}
}
}
| true |
ac2aae04dfb32acce547f15c19b964d6986c347b
|
Rust
|
pps5/AOJ
|
/volume0/0012.rs
|
UTF-8
| 1,827 | 3.296875 | 3 |
[] |
no_license
|
use std::cmp::Ordering;
use std::io::{stdin, BufRead, BufReader};
struct Vector {
x: f32,
y: f32,
}
fn main() {
let input = BufReader::new(stdin());
for line in input.lines() {
let values: Vec<f32> = line.unwrap()
.split_whitespace()
.filter_map(|x| x.parse::<f32>().ok())
.collect();
let (tri_vectors, target) = get_vectors(values);
if is_inside(tri_vectors, target) {
println!("YES");
} else {
println!("NO");
}
}
}
fn is_inside(tri_vectors: Vec<Vector>, target: Vector) -> bool {
let idx_pair = [(1, 0), (2, 1), (0, 2)];
let mut cross_product = vec![];
for pair in &idx_pair {
let v1 = sub_vector(&tri_vectors[pair.0], &tri_vectors[pair.1]);
let v2 = sub_vector(&target, &tri_vectors[pair.0]);
let c = v1.x * v2.y - v1.y * v2.x;
cross_product.push(c);
}
let plus: Vec<&f32> = cross_product.iter().filter(|x| x > &&0.0).collect();
let minus: Vec<&f32> = cross_product.iter().filter(|x| x < &&0.0).collect();
return plus.len() == 3 || minus.len() == 3;
}
fn sub_vector(v1: &Vector, v2: &Vector) -> Vector {
return Vector {
x: v1.x - v2.x,
y: v1.y - v2.y,
};
}
fn get_vectors(points: Vec<f32>) -> (Vec<Vector>, Vector) {
let mut vectors = Vec::with_capacity(3);
let mut i = 0;
while i < points.len() - 2 {
vectors.push(Vector {
x: points[i],
y: points[i + 1],
});
i += 2;
}
vectors.sort_by(|a, b| match a.x.partial_cmp(&b.x).unwrap() {
Ordering::Equal => a.y.partial_cmp(&b.y).unwrap(),
other => other,
});
return (
vectors,
Vector {
x: points[i],
y: points[i + 1],
},
);
}
| true |
ec20c26876de3ff34359958839cc1d45deeafc4c
|
Rust
|
PierreCapo/delf
|
/delf/src/lib.rs
|
UTF-8
| 1,918 | 3.03125 | 3 |
[] |
no_license
|
use derive_try_from_primitive::TryFromPrimitive;
use std::convert::TryFrom;
mod parse;
#[derive(Debug, Clone, Copy, PartialEq, Eq, TryFromPrimitive)]
#[repr(u16)]
pub enum Type {
None = 0x0,
Rel = 0x1,
Exec = 0x2,
Dyn = 0x3,
Core = 0x4,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, TryFromPrimitive)]
#[repr(u16)]
pub enum Machine {
X86 = 0x03,
X86_64 = 0x3e,
}
#[derive(Debug)]
pub struct File {
// we'll add fields as we go
pub r#type: Type,
pub machine: Machine,
}
impl File {
const MAGIC: &'static [u8] = &[0x7f, 0x45, 0x4c, 0x46];
pub fn parse(i: parse::Input) -> parse::Result<Self> {
use nom::{
bytes::complete::{tag, take},
combinator::map,
error::context,
number::complete::le_u16,
sequence::tuple,
};
let (i, _) = tuple((
// -------
context("Magic", tag(Self::MAGIC)),
context("Class", tag(&[0x2])),
context("Endianness", tag(&[0x1])),
context("Version", tag(&[0x1])),
context("OS ABI", nom::branch::alt((tag(&[0x0]), tag(&[0x3])))),
// -------
context("Padding", take(8_usize)),
))(i)?;
let (i, (r#type, machine)) = tuple((
context("Type", map(le_u16, |x| Type::try_from(x).unwrap())),
context("Machine", map(le_u16, |x| Machine::try_from(x).unwrap())),
))(i)?;
let res = Self { machine, r#type };
Ok((i, res))
}
}
#[cfg(test)]
mod tests {
use super::Machine;
use std::convert::TryFrom;
#[test]
fn try_enums() {
assert_eq!(Machine::X86_64 as u16, 0x3E);
assert_eq!(Machine::try_from(0x3E), Ok(Machine::X86_64));
assert_eq!(Machine::try_from(0xFA), Err(0xFA));
}
#[test]
fn type_to_u16() {
assert_eq!(super::Type::Dyn as u16, 0x3);
}
}
| true |
d25a494a486a1b841493f8d1c2c63012031c943c
|
Rust
|
CarlKlagba/rust-sandbox
|
/src/bank_account/mod.rs
|
UTF-8
| 4,048 | 3.734375 | 4 |
[] |
no_license
|
use crate::bank_account::Transaction::{Deposit, Withdraw};
type Date = String;
pub struct DateService {}
impl DateService{
pub fn new() -> DateService {
DateService {}
}
pub fn current_date(&self) -> Date {
return "2023-05-05".to_string();
}
}
#[derive(Clone)]
pub enum Transaction{
Deposit(f32, Date),
Withdraw(f32, Date),
}
pub struct Account {
transactions: Vec<Transaction>,
}
impl Account {
pub(crate) fn new() -> Account {
Account{ transactions: vec![]}
}
pub fn make_deposit(&mut self, amount: f32, date: Date) -> Result<(), &'static str> {
if amount < 0.0 {
return Err("The deposit amount should be more than 0")
}
self.transactions.push(Deposit(amount, date));
return Ok(());
}
pub fn make_withdraw(&mut self, amount: f32, date: Date) -> Result<(), &'static str>{
if self.balance() - amount < 0.0 {
return Err("No overdraft used")
}
self.transactions.push(Withdraw(amount, date));
return Ok(())
}
fn balance(&self) -> f32 {
return self.transactions.iter()
.map(|t| -> f32 {
match t {
Deposit(val, _) => *val,
Withdraw(val, _) => -val,
}
}).sum();
}
pub fn print_balance(&self, printer: &dyn AccountPrinter) {
printer.print_balance(self.balance())
}
pub fn print_statement(&self, printer: &dyn AccountPrinter) {
printer.print_transaction_history(self.transactions.to_vec());
}
}
pub struct SysOutPrinter {
}
pub trait AccountPrinter {
fn print_balance(&self, balance: f32) -> ();
fn print_transaction_history(&self, transactions: Vec<Transaction>) -> ();
}
impl AccountPrinter for SysOutPrinter {
fn print_balance(&self, balance: f32) -> (){
println!("{}", format!("current balance: {}", balance))
}
fn print_transaction_history(&self, transactions: Vec<Transaction>) -> () {
let header = "Date | Amount";
let rows: Vec<String> = transactions.iter()
.map(format_transaction)
.collect();
print!("{}\n{}", header, rows.join("\n"))
}
}
fn format_transaction(transaction: &Transaction) -> String {
match transaction {
Withdraw(amount, date) => format!(" {} | {}", date, -amount),
Deposit(amount, date) => format!(" {} | {}", date, amount),
}
}
mod tests {
use super::*;
#[test]
fn should_get_empty_balance_when_account_is_created() {
let account = Account::new();
assert_eq!(account.balance(), 0.0)
}
#[test]
fn should_make_deposite() {
let mut account = Account::new();
let date_service = DateService::new();
let result = account.make_deposit(50.0, date_service.current_date());
assert_eq!(result.is_ok(), true);
assert_eq!(account.balance(), 50.0)
}
#[test]
fn should_not_make_deposit_under_() {
let mut account = Account::new();
let date_service = DateService::new();
let result = account.make_deposit(-1.0,date_service.current_date());
assert_eq!(result.is_err(), true);
assert_eq!(account.balance(), 0.0)
}
#[test]
fn should_make_withdraw() {
let mut account = Account::new();
let date_service = DateService::new();
account.make_deposit(50.0, date_service.current_date());
let result = account.make_withdraw(5.0, date_service.current_date());
assert_eq!(result.is_ok(), true);
assert_eq!(account.balance(), 45.0)
}
#[test]
fn should_not_make_withdraw_if_overdraft() {
let mut account = Account::new();
let date_service = DateService::new();
account.make_deposit(3.0, date_service.current_date());
let result = account.make_withdraw(5.0, date_service.current_date());
assert_eq!(result.is_err(), true);
assert_eq!(account.balance(), 3.0)
}
}
| true |
7bc80db5aa94d5631aded3e17dc69b541d0c34af
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/run-pass-valgrind/cast-enum-with-dtor.rs
|
UTF-8
| 687 | 2.875 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
#![allow(dead_code)]
// check dtor calling order when casting enums.
use std::sync::atomic;
use std::sync::atomic::Ordering;
use std::mem;
enum E {
A = 0,
B = 1,
C = 2
}
static FLAG: atomic::AtomicUsize = atomic::AtomicUsize::new(0);
impl Drop for E {
fn drop(&mut self) {
// avoid dtor loop
unsafe { mem::forget(mem::replace(self, E::B)) };
FLAG.store(FLAG.load(Ordering::SeqCst)+1, Ordering::SeqCst);
}
}
fn main() {
assert_eq!(FLAG.load(Ordering::SeqCst), 0);
{
let e = E::C;
assert_eq!(e as u32, 2);
assert_eq!(FLAG.load(Ordering::SeqCst), 0);
}
assert_eq!(FLAG.load(Ordering::SeqCst), 0);
}
| true |
aadb4589ef6b3cc20a3e188d963b025679e33dfa
|
Rust
|
Jabbslad/rust-book
|
/references_and_borrowing/src/main.rs
|
UTF-8
| 1,059 | 3.84375 | 4 |
[] |
no_license
|
fn main() {
let s1 = String::from("hello");
let len = calculate_length(&s1);
println!("{:p}", &s1);
println!("length = {}", len);
let mut s2 = String::from("hello");
change(&mut s2);
let s3 = &mut s2;
println!("{}", s3);
let mut s = String::from("Hello");
let r1 = &s;
let r2 = &s;
println!("{} {}", r1, r2);
let r3 = &mut s;
r3.push_str(" world!!!");
println!("{}", r3);
println!("no dangle: {}", no_dangle());
let mut hello = String::from("Hello world!");
let f = first(&hello);
//hello.clear(); -- cannot do mutable borrow here.
println!("first = '{}'", f);
}
fn first(s: &str) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[..i];
}
}
&s[..]
}
fn calculate_length(s: &String) -> usize {
println!("{:p}", s);
s.len()
}
fn change(some_string: &mut String) {
some_string.push_str(" world!");
}
fn no_dangle() -> String {
String::from("hello!!!")
}
| true |
48e60bae5ec3cf68f22bef43f7e318e255108a71
|
Rust
|
poetry-book/poetry-book
|
/src/lang/latex/book_body_latex.rs
|
UTF-8
| 4,007 | 2.9375 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use crate::core::{poem::Poem, poem_formatting::CenteredVerse, poem_formatting::PoemFormatting};
use crate::lang::latex::latex_output::Latex;
pub struct BookBodyLatex<'a> {
pub poem_formatting: &'a PoemFormatting,
pub poems: &'a Vec<Poem>,
}
impl<'a> Latex for BookBodyLatex<'a> {
fn latex(&self) -> String {
let poems: Vec<String> = self.poems.iter().map(|p| self.poem_latex(p)).collect();
poems.join("\n\n\n")
}
}
impl<'a> BookBodyLatex<'a> {
fn poem_latex(&self, poem: &Poem) -> String {
let mut output = format!("\\poemtitle{{{}}}\n", poem.title());
output.push_str(&self.get_poem_begin(poem));
output.push_str("\n\n");
output.push_str(&poem.latex());
output.push_str("\n\n");
output.push_str(self.get_poem_end());
output.push_str("\n\\newpage");
output
}
fn get_poem_begin(&self, poem: &Poem) -> String {
match self.poem_formatting.centered_verse() {
CenteredVerse::Average => {
let mut poem_begin = r"\settowidth{\versewidth}".to_string();
let average_verse_size = poem.get_average_verse_size();
let average_sized_verse = "x".repeat(average_verse_size);
let average_sized_verse = format!("{{{}}}\n", average_sized_verse);
poem_begin.push_str(&average_sized_verse);
poem_begin.push_str("\\begin{verse}[\\versewidth]");
poem_begin
}
CenteredVerse::Longest => r"\begin{cverse}".to_string(),
}
}
fn get_poem_end(&self) -> &str {
match self.poem_formatting.centered_verse() {
CenteredVerse::Average => r"\end{verse}",
CenteredVerse::Longest => r"\end{cverse}",
}
}
}
#[cfg(test)]
mod tests {
use super::*;
static POEM_TITLE: &str = "Lorem ipsum";
static POEM_TEXT: &str =
"Pellentesque dapibus suscipit ligula.
Donec posuere augue in quam.
Etiam vel tortor sodales tellus ultricies commodo.
Suspendisse potenti.
Aenean in sem ac leo mollis blandit.
Donec neque quam, dignissim in, mollis nec, sagittis eu, wisi.
Phasellus lacus.
Etiam laoreet quam sed arcu.
Phasellus at dui in ligula mollis ultricies.
Integer placerat tristique nisl.
Praesent augue.
Fusce commodo.
Vestibulum convallis, lorem a tempus semper, dui dui euismod elit, vitae placerat urna tortor vitae lacus.
Nullam libero mauris, consequat quis, varius et, dictum id, arcu.
Mauris mollis tincidunt felis.
Aliquam feugiat tellus ut neque.
Nulla facilisis, risus a rhoncus fermentum, tellus tellus lacinia purus, et dictum nunc justo sit amet elit.";
#[test]
fn create_latex() {
let expected_latex = r"\poemtitle{Lorem ipsum}
\settowidth{\versewidth}{xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx}
\begin{verse}[\versewidth]
Pellentesque dapibus suscipit ligula. \\
Donec posuere augue in quam. \\
Etiam vel tortor sodales tellus ultricies commodo. \\
Suspendisse potenti. \\!
Aenean in sem ac leo mollis blandit. \\
Donec neque quam, dignissim in, mollis nec, sagittis eu, wisi. \\
Phasellus lacus. \\
Etiam laoreet quam sed arcu. \\!
Phasellus at dui in ligula mollis ultricies. \\
Integer placerat tristique nisl. \\
Praesent augue. \\
Fusce commodo. \\!
Vestibulum convallis, lorem a tempus semper, dui dui euismod elit, vitae placerat urna tortor vitae lacus. \\
Nullam libero mauris, consequat quis, varius et, dictum id, arcu. \\
Mauris mollis tincidunt felis. \\!
Aliquam feugiat tellus ut neque. \\
Nulla facilisis, risus a rhoncus fermentum, tellus tellus lacinia purus, et dictum nunc justo sit amet elit. \\!
\end{verse}
\newpage";
let poem = Poem::new(POEM_TITLE, POEM_TEXT);
let book_body = BookBodyLatex {
poem_formatting: &PoemFormatting::new(CenteredVerse::Average),
poems: &vec![poem],
};
let actual_latex = book_body.latex();
assert_eq!(actual_latex, expected_latex);
}
}
| true |
146b3eb79c999008777285240734481424e257b9
|
Rust
|
iPTF14hls/rust_sorting_algorithms
|
/src/buso.rs
|
UTF-8
| 611 | 2.890625 | 3 |
[] |
no_license
|
use std::cmp::{Ord, Ordering};
#[bench]
fn bubble_sort_properly_sorts(b: &mut test::Bencher) {
use crate::test_functions::{sort_testing, BENCH_CONSTANT};
sort_testing(&bubble_sort, BENCH_CONSTANT, b)
}
pub fn bubble_sort<T: Ord + Copy>(mut array: &mut [T]) {
while array.len() > 1 {
let len = array.len();
let (mut i, mut ip1) = (0, 1);
while array.len() > ip1 {
if let Ordering::Less = array[ip1].cmp(&array[i]) {
array.swap(i, ip1);
}
i+=1;
ip1+=1;
}
array = &mut array[..len-1];
}
}
| true |
465e39b3199db72041b1dabbc058f19b426b1cd3
|
Rust
|
TriedWorks/glucose
|
/src/impls/mint.rs
|
UTF-8
| 4,126 | 2.8125 | 3 |
[
"Apache-2.0"
] |
permissive
|
#[cfg(feature = "algebra")]
pub mod algebra {
use crate::algebra::linear::{Point, SquareMatrix, Vector};
use mint::{
ColumnMatrix2, ColumnMatrix3, ColumnMatrix4, Point2, Point3, Vector2, Vector3, Vector4,
};
impl<T> From<Point2<T>> for Point<T, 2> {
fn from(rhs: Point2<T>) -> Self {
Self::from([rhs.x, rhs.y])
}
}
impl<T> From<Point3<T>> for Point<T, 3> {
fn from(rhs: Point3<T>) -> Self {
Self::from([rhs.x, rhs.y, rhs.z])
}
}
impl<T> From<Vector2<T>> for Vector<T, 2> {
fn from(rhs: Vector2<T>) -> Self {
Self::from([rhs.x, rhs.y])
}
}
impl<T> From<Vector3<T>> for Vector<T, 3> {
fn from(rhs: Vector3<T>) -> Self {
Self::from([rhs.x, rhs.y, rhs.z])
}
}
impl<T> From<Vector4<T>> for Vector<T, 4> {
fn from(rhs: Vector4<T>) -> Self {
Self::from([rhs.x, rhs.y, rhs.z, rhs.w])
}
}
impl<T> From<ColumnMatrix2<T>> for SquareMatrix<T, 2> {
fn from(rhs: ColumnMatrix2<T>) -> Self {
let col_1 = [rhs.x.x, rhs.x.y];
let col_2 = [rhs.y.x, rhs.y.y];
Self::new([col_1, col_2])
}
}
impl<T> From<ColumnMatrix3<T>> for SquareMatrix<T, 3> {
fn from(rhs: ColumnMatrix3<T>) -> Self {
let col_1 = [rhs.x.x, rhs.x.y, rhs.x.z];
let col_2 = [rhs.y.x, rhs.y.y, rhs.y.z];
let col_3 = [rhs.z.x, rhs.z.y, rhs.z.z];
Self::new([col_1, col_2, col_3])
}
}
impl<T> From<ColumnMatrix4<T>> for SquareMatrix<T, 4> {
fn from(rhs: ColumnMatrix4<T>) -> Self {
let col_1 = [rhs.x.x, rhs.x.y, rhs.x.z, rhs.x.w];
let col_2 = [rhs.y.x, rhs.y.y, rhs.y.z, rhs.y.w];
let col_3 = [rhs.z.x, rhs.z.y, rhs.z.z, rhs.z.w];
let col_4 = [rhs.w.x, rhs.w.y, rhs.w.z, rhs.w.w];
Self::new([col_1, col_2, col_3, col_4])
}
}
impl<T: Copy> From<Point<T, 2>> for Point2<T> {
fn from(rhs: Point<T, 2>) -> Self {
Self::from(rhs.data[0])
}
}
impl<T: Copy> From<Point<T, 3>> for Point3<T> {
fn from(rhs: Point<T, 3>) -> Self {
Self::from(rhs.data[0])
}
}
impl<T: Copy> From<Vector<T, 2>> for Vector2<T> {
fn from(rhs: Vector<T, 2>) -> Self {
Self::from(rhs.data[0])
}
}
impl<T: Copy> From<Vector<T, 3>> for Vector3<T> {
fn from(rhs: Vector<T, 3>) -> Self {
Self::from(rhs.data[0])
}
}
impl<T: Copy> From<Vector<T, 4>> for Vector4<T> {
fn from(rhs: Vector<T, 4>) -> Self {
Self::from(rhs.data[0])
}
}
impl<T: Copy> From<SquareMatrix<T, 2>> for ColumnMatrix2<T> {
fn from(rhs: SquareMatrix<T, 2>) -> Self {
let array = [rhs[[0, 0]], rhs[[0, 1]], rhs[[1, 0]], rhs[[1, 1]]];
Self::from(array)
}
}
impl<T: Copy> From<SquareMatrix<T, 3>> for ColumnMatrix3<T> {
fn from(rhs: SquareMatrix<T, 3>) -> Self {
let array = [
rhs[[0, 0]],
rhs[[0, 1]],
rhs[[0, 2]],
rhs[[1, 0]],
rhs[[1, 1]],
rhs[[1, 2]],
rhs[[2, 0]],
rhs[[2, 1]],
rhs[[2, 2]],
];
Self::from(array)
}
}
impl<T: Copy> From<SquareMatrix<T, 4>> for ColumnMatrix4<T> {
fn from(rhs: SquareMatrix<T, 4>) -> Self {
let array = [
rhs[[0, 0]],
rhs[[0, 1]],
rhs[[0, 2]],
rhs[[0, 3]],
rhs[[1, 0]],
rhs[[1, 1]],
rhs[[1, 2]],
rhs[[1, 3]],
rhs[[2, 0]],
rhs[[2, 1]],
rhs[[2, 2]],
rhs[[2, 3]],
rhs[[3, 0]],
rhs[[3, 1]],
rhs[[3, 2]],
rhs[[3, 3]],
];
Self::from(array)
}
}
}
| true |
6ada487286627e93eab865bb54db59d28931ad55
|
Rust
|
swelham/gleam
|
/src/doc/block_manager.rs
|
UTF-8
| 5,579 | 2.546875 | 3 |
[
"Swift-exception",
"Apache-2.0"
] |
permissive
|
use crate::ast::{Statement, TypedModule, UntypedStatement};
use crate::doc::doc::*;
use crate::pretty::Documentable;
use std::collections::HashMap;
#[derive(Debug, PartialEq)]
struct RawComment {
pub start: usize,
pub end: usize,
pub content: String,
}
#[derive(Debug, PartialEq)]
pub struct DocBlockManager {
pub(self) current_block: Vec<RawComment>,
pub(self) blocks: Vec<Vec<RawComment>>,
}
pub fn gen_doc_chunk(module: &TypedModule) -> EEP48DocChunk {
let mut docs = Vec::<EEP48Doc>::new();
for statement in module.statements.iter() {
match statement {
Statement::Fn {
name,
args,
public: true,
return_annotation,
doc,
..
} => {
let pretty = crate::format::fn_signature(&true, name, args, return_annotation);
let doc = doc.as_ref().map(|d| {
vec![("en-US".to_string(), d.to_string())]
.into_iter()
.collect()
});
docs.push(EEP48Doc {
name: name.to_string(),
arity: args.len(),
signature: vec![crate::pretty::format(70, pretty)],
typ: DocType::Fn,
doc,
});
}
Statement::TypeAlias {
doc,
location,
args,
alias,
resolved_type,
public: true,
} => {
let pretty = (&UntypedStatement::TypeAlias {
doc: None,
location: location.clone(),
args: args.clone(),
alias: alias.clone(),
resolved_type: resolved_type.clone(),
public: true,
})
.to_doc();
let fn_docs = doc.as_ref().map(|d| {
vec![("en-US".to_string(), d.to_string())]
.into_iter()
.collect()
});
docs.push(EEP48Doc {
name: alias.to_string(),
arity: args.len(),
signature: vec![crate::pretty::format(80, pretty)],
doc: fn_docs,
typ: DocType::TypeAlias,
});
}
Statement::CustomType {
location,
name,
args,
constructors,
doc,
..
} => {
let statement = UntypedStatement::CustomType {
doc: None,
location: location.clone(),
args: args.clone(),
name: name.clone(),
constructors: constructors.clone(),
public: true,
};
let pretty = (&statement).to_doc();
let fn_docs = doc.as_ref().map(|d| {
vec![("en-US".to_string(), d.to_string())]
.into_iter()
.collect()
});
docs.push(EEP48Doc {
name: name.to_string(),
arity: args.len(),
signature: vec![crate::pretty::format(80, pretty)],
doc: fn_docs,
typ: DocType::CustomType,
});
}
Statement::ExternalFn {
name,
args,
retrn,
public: true,
doc,
..
} => {
let pretty = crate::format::external_fn_signature(&true, name, args, retrn);
let fn_docs = doc.as_ref().map(|d| {
vec![("en-US".to_string(), d.to_string())]
.into_iter()
.collect()
});
docs.push(EEP48Doc {
name: name.to_string(),
arity: args.len(),
signature: vec![crate::pretty::format(80, pretty.to_doc())],
doc: fn_docs,
typ: DocType::Fn,
});
}
Statement::ExternalType {
location,
name,
args,
doc,
..
} => {
let pretty = UntypedStatement::ExternalType {
doc: None,
location: location.clone(),
args: args.clone(),
name: name.clone(),
public: true,
}
.to_doc();
let fn_docs = doc.as_ref().map(|d| {
vec![("en-US".to_string(), d.to_string())]
.into_iter()
.collect()
});
docs.push(EEP48Doc {
name: name.to_string(),
arity: args.len(),
signature: vec![crate::pretty::format(80, pretty)],
doc: fn_docs,
typ: DocType::ExternalType,
});
}
_ => {}
}
}
EEP48DocChunk {
anno: ErlAnno {
line: 0,
column: 0,
file: module.name_string(),
},
module_doc: HashMap::new(),
docs,
}
}
| true |
c7cb5f6c10ae365bce0b73f5b7cd32a76376216f
|
Rust
|
softdevteam/lspace
|
/src/lib/lspace/input/pointer.rs
|
UTF-8
| 2,646 | 3.25 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::cell::Cell;
use geom::point2::Point2;
use geom::affinexform2::AffineXform2;
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum PointerPosition {
OutOfBounds,
AtPosition(Point2),
}
impl PointerPosition {
pub fn out_of_bounds() -> PointerPosition {
PointerPosition::OutOfBounds
}
pub fn at_position(pos: Point2) -> PointerPosition {
PointerPosition::AtPosition(pos)
}
pub fn is_within_bounds(&self) -> bool {
match self {
&PointerPosition::OutOfBounds => false,
&PointerPosition::AtPosition(..) => true
}
}
pub fn position<'a>(&'a self) -> Option<&'a Point2> {
match self {
&PointerPosition::OutOfBounds => None,
&PointerPosition::AtPosition(ref pos) => Some(pos)
}
}
pub fn transformed(&self, xform: &AffineXform2) -> PointerPosition {
match self {
&PointerPosition::OutOfBounds => PointerPosition::OutOfBounds,
&PointerPosition::AtPosition(ref p) => PointerPosition::AtPosition(xform * p)
}
}
}
pub trait TPointer<'a> {
fn position(&self) -> PointerPosition;
fn transformed(&'a self, x: &AffineXform2) -> TransformedPointer;
fn concrete_pointer(&'a self) -> &'a Pointer;
}
pub struct Pointer {
position: Cell<PointerPosition>,
}
impl Pointer {
pub fn new() -> Pointer {
Pointer{position: Cell::new(PointerPosition::out_of_bounds())}
}
pub fn set_position(&self, pos: PointerPosition) {
self.position.set(pos);
}
}
impl <'a> TPointer<'a> for Pointer {
fn position(&self) -> PointerPosition {
self.position.get()
}
fn transformed(&'a self, x: &AffineXform2) -> TransformedPointer<'a> {
TransformedPointer::new(self, x)
}
fn concrete_pointer(&'a self) -> &'a Pointer {
self
}
}
pub struct TransformedPointer<'a> {
underlying_pointer: &'a Pointer,
xform: AffineXform2
}
impl <'a> TransformedPointer<'a> {
fn new(underlying_pointer: &'a Pointer, xform: &AffineXform2) -> TransformedPointer<'a> {
TransformedPointer{underlying_pointer: underlying_pointer, xform: *xform}
}
}
impl <'a> TPointer<'a> for TransformedPointer<'a> {
fn position(&self) -> PointerPosition {
self.underlying_pointer.position.get().transformed(&self.xform)
}
fn transformed(&'a self, x: &AffineXform2) -> TransformedPointer<'a> {
TransformedPointer::new(self.underlying_pointer, &(x * self.xform))
}
fn concrete_pointer(&'a self) -> &'a Pointer {
self.underlying_pointer.concrete_pointer()
}
}
| true |
e2432cb4c80ff51bbf9f3b4367a3e695a396f032
|
Rust
|
steadylearner/Rust-Full-Stack
|
/gRPC/rust/product/src/models/product.rs
|
UTF-8
| 1,909 | 2.859375 | 3 |
[
"MIT"
] |
permissive
|
use crate::schema::products;
use diesel::RunQueryDsl;
use diesel::QueryDsl;
use diesel::PgConnection;
use crate::schema::products::dsl;
use crate::schema::products::dsl::*;
#[derive(Queryable, Serialize, Deserialize)]
pub struct Product {
pub id: i32,
pub name: String,
pub stock: f64,
pub price: Option<i32>,
}
// Use product_id instead of id not to make confusion for compiler for duplicate variable names.
impl Product {
pub fn find(product_id: &i32, connection: &PgConnection) -> Result<Product, diesel::result::Error> {
products::table.find(product_id).first(connection)
}
pub fn destroy(product_id: &i32, connection: &PgConnection) -> Result<(), diesel::result::Error> {
diesel::delete(dsl::products.find(product_id)).execute(connection)?;
Ok(())
}
pub fn update(product_id: &i32, new_product: &NewProduct, connection: &PgConnection) -> Result<(), diesel::result::Error> {
diesel::update(dsl::products.find(product_id))
.set(new_product)
.execute(connection)?;
Ok(())
}
}
// Move this methods to function or wihout them.
#[derive(Serialize, Deserialize)]
pub struct ProductList(pub Vec<Product>);
impl ProductList {
pub fn list(connection: &PgConnection) -> Self {
let result =
products
.limit(10)
.load::<Product>(connection)
.expect("Error loading products");
ProductList(result)
}
}
#[derive(Insertable, Deserialize, AsChangeset)]
#[table_name="products"]
pub struct NewProduct {
pub name: Option<String>,
pub stock: Option<f64>,
pub price: Option<i32>
}
impl NewProduct {
pub fn create(&self, connection: &PgConnection) -> Result<Product, diesel::result::Error> {
diesel::insert_into(products::table)
.values(self)
.get_result(connection)
}
}
| true |
d712faf5c179dfe6281c157666740899ecf71989
|
Rust
|
alacritty/alacritty
|
/alacritty_terminal/src/event_loop.rs
|
UTF-8
| 14,191 | 2.90625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! The main event loop which performs I/O on the pseudoterminal.
use std::borrow::Cow;
use std::collections::VecDeque;
use std::fs::File;
use std::io::{self, ErrorKind, Read, Write};
use std::marker::Send;
use std::sync::Arc;
use std::thread::JoinHandle;
use std::time::Instant;
use log::error;
#[cfg(not(windows))]
use mio::unix::UnixReady;
use mio::{self, Events, PollOpt, Ready};
use mio_extras::channel::{self, Receiver, Sender};
use crate::event::{self, Event, EventListener, WindowSize};
use crate::sync::FairMutex;
use crate::term::Term;
use crate::{ansi, thread, tty};
/// Max bytes to read from the PTY before forced terminal synchronization.
const READ_BUFFER_SIZE: usize = 0x10_0000;
/// Max bytes to read from the PTY while the terminal is locked.
const MAX_LOCKED_READ: usize = u16::MAX as usize;
/// Messages that may be sent to the `EventLoop`.
#[derive(Debug)]
pub enum Msg {
/// Data that should be written to the PTY.
Input(Cow<'static, [u8]>),
/// Indicates that the `EventLoop` should shut down, as Alacritty is shutting down.
Shutdown,
/// Instruction to resize the PTY.
Resize(WindowSize),
}
/// The main event!.. loop.
///
/// Handles all the PTY I/O and runs the PTY parser which updates terminal
/// state.
pub struct EventLoop<T: tty::EventedPty, U: EventListener> {
poll: mio::Poll,
pty: T,
rx: Receiver<Msg>,
tx: Sender<Msg>,
terminal: Arc<FairMutex<Term<U>>>,
event_proxy: U,
hold: bool,
ref_test: bool,
}
/// Helper type which tracks how much of a buffer has been written.
struct Writing {
source: Cow<'static, [u8]>,
written: usize,
}
pub struct Notifier(pub Sender<Msg>);
impl event::Notify for Notifier {
fn notify<B>(&self, bytes: B)
where
B: Into<Cow<'static, [u8]>>,
{
let bytes = bytes.into();
// terminal hangs if we send 0 bytes through.
if bytes.len() == 0 {
return;
}
let _ = self.0.send(Msg::Input(bytes));
}
}
impl event::OnResize for Notifier {
fn on_resize(&mut self, window_size: WindowSize) {
let _ = self.0.send(Msg::Resize(window_size));
}
}
/// All of the mutable state needed to run the event loop.
///
/// Contains list of items to write, current write state, etc. Anything that
/// would otherwise be mutated on the `EventLoop` goes here.
#[derive(Default)]
pub struct State {
write_list: VecDeque<Cow<'static, [u8]>>,
writing: Option<Writing>,
parser: ansi::Processor,
}
impl State {
#[inline]
fn ensure_next(&mut self) {
if self.writing.is_none() {
self.goto_next();
}
}
#[inline]
fn goto_next(&mut self) {
self.writing = self.write_list.pop_front().map(Writing::new);
}
#[inline]
fn take_current(&mut self) -> Option<Writing> {
self.writing.take()
}
#[inline]
fn needs_write(&self) -> bool {
self.writing.is_some() || !self.write_list.is_empty()
}
#[inline]
fn set_current(&mut self, new: Option<Writing>) {
self.writing = new;
}
}
impl Writing {
#[inline]
fn new(c: Cow<'static, [u8]>) -> Writing {
Writing { source: c, written: 0 }
}
#[inline]
fn advance(&mut self, n: usize) {
self.written += n;
}
#[inline]
fn remaining_bytes(&self) -> &[u8] {
&self.source[self.written..]
}
#[inline]
fn finished(&self) -> bool {
self.written >= self.source.len()
}
}
impl<T, U> EventLoop<T, U>
where
T: tty::EventedPty + event::OnResize + Send + 'static,
U: EventListener + Send + 'static,
{
/// Create a new event loop.
pub fn new(
terminal: Arc<FairMutex<Term<U>>>,
event_proxy: U,
pty: T,
hold: bool,
ref_test: bool,
) -> EventLoop<T, U> {
let (tx, rx) = channel::channel();
EventLoop {
poll: mio::Poll::new().expect("create mio Poll"),
pty,
tx,
rx,
terminal,
event_proxy,
hold,
ref_test,
}
}
pub fn channel(&self) -> Sender<Msg> {
self.tx.clone()
}
/// Drain the channel.
///
/// Returns `false` when a shutdown message was received.
fn drain_recv_channel(&mut self, state: &mut State) -> bool {
while let Ok(msg) = self.rx.try_recv() {
match msg {
Msg::Input(input) => state.write_list.push_back(input),
Msg::Resize(window_size) => self.pty.on_resize(window_size),
Msg::Shutdown => return false,
}
}
true
}
/// Returns a `bool` indicating whether or not the event loop should continue running.
#[inline]
fn channel_event(&mut self, token: mio::Token, state: &mut State) -> bool {
if !self.drain_recv_channel(state) {
return false;
}
self.poll
.reregister(&self.rx, token, Ready::readable(), PollOpt::edge() | PollOpt::oneshot())
.unwrap();
true
}
#[inline]
fn pty_read<X>(
&mut self,
state: &mut State,
buf: &mut [u8],
mut writer: Option<&mut X>,
) -> io::Result<()>
where
X: Write,
{
let mut unprocessed = 0;
let mut processed = 0;
// Reserve the next terminal lock for PTY reading.
let _terminal_lease = Some(self.terminal.lease());
let mut terminal = None;
loop {
// Read from the PTY.
match self.pty.reader().read(&mut buf[unprocessed..]) {
// This is received on Windows/macOS when no more data is readable from the PTY.
Ok(0) if unprocessed == 0 => break,
Ok(got) => unprocessed += got,
Err(err) => match err.kind() {
ErrorKind::Interrupted | ErrorKind::WouldBlock => {
// Go back to mio if we're caught up on parsing and the PTY would block.
if unprocessed == 0 {
break;
}
},
_ => return Err(err),
},
}
// Attempt to lock the terminal.
let terminal = match &mut terminal {
Some(terminal) => terminal,
None => terminal.insert(match self.terminal.try_lock_unfair() {
// Force block if we are at the buffer size limit.
None if unprocessed >= READ_BUFFER_SIZE => self.terminal.lock_unfair(),
None => continue,
Some(terminal) => terminal,
}),
};
// Write a copy of the bytes to the ref test file.
if let Some(writer) = &mut writer {
writer.write_all(&buf[..unprocessed]).unwrap();
}
// Parse the incoming bytes.
for byte in &buf[..unprocessed] {
state.parser.advance(&mut **terminal, *byte);
}
processed += unprocessed;
unprocessed = 0;
// Assure we're not blocking the terminal too long unnecessarily.
if processed >= MAX_LOCKED_READ {
break;
}
}
// Queue terminal redraw unless all processed bytes were synchronized.
if state.parser.sync_bytes_count() < processed && processed > 0 {
self.event_proxy.send_event(Event::Wakeup);
}
Ok(())
}
#[inline]
fn pty_write(&mut self, state: &mut State) -> io::Result<()> {
state.ensure_next();
'write_many: while let Some(mut current) = state.take_current() {
'write_one: loop {
match self.pty.writer().write(current.remaining_bytes()) {
Ok(0) => {
state.set_current(Some(current));
break 'write_many;
},
Ok(n) => {
current.advance(n);
if current.finished() {
state.goto_next();
break 'write_one;
}
},
Err(err) => {
state.set_current(Some(current));
match err.kind() {
ErrorKind::Interrupted | ErrorKind::WouldBlock => break 'write_many,
_ => return Err(err),
}
},
}
}
}
Ok(())
}
pub fn spawn(mut self) -> JoinHandle<(Self, State)> {
thread::spawn_named("PTY reader", move || {
let mut state = State::default();
let mut buf = [0u8; READ_BUFFER_SIZE];
let mut tokens = (0..).map(Into::into);
let poll_opts = PollOpt::edge() | PollOpt::oneshot();
let channel_token = tokens.next().unwrap();
self.poll.register(&self.rx, channel_token, Ready::readable(), poll_opts).unwrap();
// Register TTY through EventedRW interface.
self.pty.register(&self.poll, &mut tokens, Ready::readable(), poll_opts).unwrap();
let mut events = Events::with_capacity(1024);
let mut pipe = if self.ref_test {
Some(File::create("./alacritty.recording").expect("create alacritty recording"))
} else {
None
};
'event_loop: loop {
// Wakeup the event loop when a synchronized update timeout was reached.
let handler = state.parser.sync_timeout();
let timeout =
handler.sync_timeout().map(|st| st.saturating_duration_since(Instant::now()));
if let Err(err) = self.poll.poll(&mut events, timeout) {
match err.kind() {
ErrorKind::Interrupted => continue,
_ => panic!("EventLoop polling error: {err:?}"),
}
}
// Handle synchronized update timeout.
if events.is_empty() {
state.parser.stop_sync(&mut *self.terminal.lock());
self.event_proxy.send_event(Event::Wakeup);
continue;
}
for event in events.iter() {
match event.token() {
token if token == channel_token => {
if !self.channel_event(channel_token, &mut state) {
break 'event_loop;
}
},
token if token == self.pty.child_event_token() => {
if let Some(tty::ChildEvent::Exited) = self.pty.next_child_event() {
if self.hold {
// With hold enabled, make sure the PTY is drained.
let _ = self.pty_read(&mut state, &mut buf, pipe.as_mut());
} else {
// Without hold, shutdown the terminal.
self.terminal.lock().exit();
}
self.event_proxy.send_event(Event::Wakeup);
break 'event_loop;
}
},
token
if token == self.pty.read_token()
|| token == self.pty.write_token() =>
{
#[cfg(unix)]
if UnixReady::from(event.readiness()).is_hup() {
// Don't try to do I/O on a dead PTY.
continue;
}
if event.readiness().is_readable() {
if let Err(err) = self.pty_read(&mut state, &mut buf, pipe.as_mut())
{
// On Linux, a `read` on the master side of a PTY can fail
// with `EIO` if the client side hangs up. In that case,
// just loop back round for the inevitable `Exited` event.
// This sucks, but checking the process is either racy or
// blocking.
#[cfg(target_os = "linux")]
if err.raw_os_error() == Some(libc::EIO) {
continue;
}
error!("Error reading from PTY in event loop: {}", err);
break 'event_loop;
}
}
if event.readiness().is_writable() {
if let Err(err) = self.pty_write(&mut state) {
error!("Error writing to PTY in event loop: {}", err);
break 'event_loop;
}
}
},
_ => (),
}
}
// Register write interest if necessary.
let mut interest = Ready::readable();
if state.needs_write() {
interest.insert(Ready::writable());
}
// Reregister with new interest.
self.pty.reregister(&self.poll, interest, poll_opts).unwrap();
}
// The evented instances are not dropped here so deregister them explicitly.
let _ = self.poll.deregister(&self.rx);
let _ = self.pty.deregister(&self.poll);
(self, state)
})
}
}
| true |
8d51a4dcf397ad46a93246ea591ce593490d8118
|
Rust
|
TheMayoras/sg1000-emu
|
/libs/tms9918/src/ppu/sprites.rs
|
UTF-8
| 4,881 | 2.8125 | 3 |
[] |
no_license
|
use crate::{
ppu::{Color, ImageWriter, Ppu, Renderer, COLORS},
Canvas,
};
use bus::BusConnectable;
const LINE_WIDTH: u16 = 242;
struct Sprite {
x: u16,
y: u16,
name_entry: u16,
early_clock: bool,
color: Color,
}
impl Sprite {
pub fn new(x: u8, y: u8, name_ptr: u8, clock_color: u8) -> Sprite {
Sprite {
x: x as u16,
y: y as u16,
name_entry: name_ptr as u16,
early_clock: clock_color >= 0x80,
color: COLORS[clock_color as usize & 0x0F],
}
}
}
pub struct SpriteRenderer<'a> {
ppu: &'a mut Ppu,
zoom: u16,
line: u16,
}
impl<'a> SpriteRenderer<'a> {
pub fn new(ppu: &'a mut Ppu, zoom: u16, line: u16) -> SpriteRenderer<'a> {
SpriteRenderer { ppu, zoom, line }
}
/// TODO: use the bleeding in of the Early Clock Bit
fn draw_8x8(&mut self) {
let mut count = 0;
let mut positions = Vec::new();
let attr_tbl = self.ppu.sprite_attr_table();
let gen_tbl = self.ppu.sprite_patt_gen_table();
for spr in 0..32 {
let attr_ptr = attr_tbl + 4 * spr;
let sprite;
{
let mut ram = self.ppu.ram.borrow_mut();
sprite = Sprite::new(
ram.cpu_read(attr_ptr),
ram.cpu_read(attr_ptr + 1),
ram.cpu_read(attr_ptr + 2),
ram.cpu_read(attr_ptr + 3),
);
}
// make sure the sprite is actaully on the current line
if sprite.y <= self.line && sprite.y + 8 > self.line {
count += 1;
if count >= 5 {
self.ppu.set_5th_sprite(spr as u8);
return;
}
let pattern_line = self.line - sprite.y;
let pattern_ptr = gen_tbl + sprite.name_entry * 8 + pattern_line;
let mut pattern = self.ppu.ram.borrow_mut().cpu_read(pattern_ptr);
for i in 0..8 {
// check sprite coincidence
let coincident: bool = match positions.binary_search(&(sprite.x + i)) {
Ok(_) => {
self.ppu.set_coincidence_flag();
true
}
Err(pos) => {
positions.insert(pos, sprite.x + i);
false
}
};
// if the bit is set for this pixel and it is not already taken up
if pattern & 0x80 > 0 && !coincident {
self.color_pixel(sprite.color, sprite.x + i, self.line);
}
pattern <<= 1;
}
}
}
}
fn draw_16x16(&mut self) {
let mut positions = Vec::new();
let attr_tbl = self.ppu.sprite_attr_table();
let gen_tbl = self.ppu.sprite_patt_gen_table();
for spr in (0..32).rev() {
let attr_ptr = attr_tbl + 4 * spr;
let sprite;
{
let mut ram = self.ppu.ram.borrow_mut();
sprite = Sprite::new(
ram.cpu_read(attr_ptr),
ram.cpu_read(attr_ptr + 1),
ram.cpu_read(attr_ptr + 2),
ram.cpu_read(attr_ptr + 3),
);
}
// check sprite coincidence
for i in 0..8 {
match positions.binary_search(&(sprite.x + i)) {
Ok(_) => self.ppu.set_coincidence_flag(),
Err(pos) => positions.insert(pos, sprite.x + i),
}
}
// make sure the sprite is actaully on the current line
if sprite.y <= self.line && sprite.y + 8 > self.line {
let pattern_line = self.line - sprite.y;
let pattern_ptr = gen_tbl + sprite.name_entry * 8 + pattern_line;
let pattern = self.ppu.ram.borrow_mut().cpu_read(pattern_ptr);
for i in 0..8 {
// if the bit is set for this pixel
if pattern & 0x80 > 0 {
self.color_pixel(sprite.color, sprite.x + i, self.line);
}
}
}
}
}
}
impl<'a> Renderer for SpriteRenderer<'a> {
fn draw(&mut self) {
let (size, zoom) = self.ppu.get_sprite_size();
self.zoom *= zoom as u16;
if size == 8 {
self.draw_8x8();
} else {
self.draw_16x16();
}
}
}
impl<'a> ImageWriter for SpriteRenderer<'a> {
fn zoom(&self) -> u16 {
self.zoom
}
fn image(&mut self) -> &mut Canvas {
&mut self.ppu.next_canvas
}
}
| true |
c1af0df9b57cb49f019199596401d4caa38c9707
|
Rust
|
Aunmag/rusty-sapper
|
/src/cell.rs
|
UTF-8
| 1,866 | 3.09375 | 3 |
[
"MIT"
] |
permissive
|
use termwiz::color::AnsiColor;
use termwiz::color::ColorAttribute;
pub struct Cell {
pub mines_around: Option<u8>,
pub is_exploded: bool,
}
impl Cell {
pub const fn new() -> Self {
return Self {
mines_around: None,
is_exploded: false,
};
}
pub const fn is_discovered(&self) -> bool {
return self.mines_around.is_some();
}
pub const fn is_markable(&self) -> bool {
return !self.is_discovered() && !self.is_exploded;
}
pub fn get_mark(&self, is_marked: bool) -> CellMark {
let symbol;
let mut foreground = ColorAttribute::Default;
let mut background = ColorAttribute::Default;
if is_marked {
symbol = '!';
background = AnsiColor::Maroon.into();
} else if self.is_exploded {
symbol = '#';
background = AnsiColor::Maroon.into();
} else if let Some(mines_around) = self.mines_around {
if mines_around == 0 {
symbol = ' ';
} else {
symbol = std::char::from_digit(u32::from(mines_around), 10).unwrap_or('?');
}
foreground = match mines_around {
0 => foreground,
1 => AnsiColor::Blue.into(),
2 => AnsiColor::Green.into(),
3 => AnsiColor::Red.into(),
4 => AnsiColor::Navy.into(),
5 => AnsiColor::Maroon.into(),
6 => AnsiColor::Aqua.into(),
_ => AnsiColor::Purple.into(),
};
} else {
symbol = '.';
}
return CellMark {
symbol,
foreground,
background,
};
}
}
pub struct CellMark {
pub symbol: char,
pub foreground: ColorAttribute,
pub background: ColorAttribute,
}
| true |
d4675a62290c78d3f1e69999da076fd84e7d9aef
|
Rust
|
jakmeier/www.jakobmeier.ch
|
/appendix/untapped-rust/simplified_nuts/src/main.rs
|
UTF-8
| 3,889 | 3.890625 | 4 |
[
"MIT"
] |
permissive
|
struct MyObject {
counter: u32,
}
struct MethodA;
struct MethodBWithArguments {
text: String,
}
impl MyObject {
fn method_a(&mut self, _arg: MethodA) {
self.counter += 1;
println!(
"Object invoked a method {} times. This time without an argument.",
self.counter
);
}
fn method_b(&mut self, arg: MethodBWithArguments) {
self.counter += 1;
println!(
"Object invoked a method {} times. This time with argument: {}",
self.counter, arg.text
);
}
}
fn main() {
/* registration */
let obj = MyObject { counter: 0 };
my_library::register_object(obj);
my_library::register_method(MyObject::method_a);
my_library::register_method(MyObject::method_b);
/* invocations */
my_library::invoke::<MyObject, _>(MethodA);
my_library::invoke::<MyObject, _>(MethodBWithArguments {
text: "Hello World!".to_owned(),
});
/* Output */
// Object invoked a method 1 times. This time without an argument.
// Object invoked a method 2 times. This time with argument: Hello World!
}
/* "Library" code */
mod my_library {
#![allow(non_camel_case_types)]
use std::any::{Any, TypeId};
use std::collections::HashMap;
pub struct Nut {
objects: HashMap<TypeId, Box<dyn Any>>,
methods: HashMap<(TypeId, TypeId), Box<dyn FnMut(&mut Box<dyn Any>, Box<dyn Any>)>>,
}
impl Nut {
fn register_object<OBJECT>(&mut self, obj: OBJECT)
where
OBJECT: Any,
{
let key = TypeId::of::<OBJECT>();
let boxed_obj = Box::new(obj);
self.objects.insert(key, boxed_obj);
}
pub fn register_method<OBJECT, ARGUMENT, FUNCTION>(&mut self, mut method: FUNCTION)
where
FUNCTION: FnMut(&mut OBJECT, ARGUMENT) + 'static,
ARGUMENT: Any,
OBJECT: Any,
{
let key = (TypeId::of::<OBJECT>(), TypeId::of::<ARGUMENT>());
let wrapped_method =
Box::new(move |any_obj: &mut Box<dyn Any>, any_args: Box<dyn Any>| {
let obj: &mut OBJECT = any_obj.downcast_mut().expect("Type conversion failed");
let args: ARGUMENT = *any_args.downcast().expect("Type conversion failed");
method(obj, args)
});
self.methods.insert(key, wrapped_method);
}
pub fn invoke<OBJECT, ARGUMENT>(&mut self, arg: ARGUMENT)
where
OBJECT: Any,
ARGUMENT: Any,
{
let object_key = TypeId::of::<OBJECT>();
let method_key = (TypeId::of::<OBJECT>(), TypeId::of::<ARGUMENT>());
if let Some(obj) = self.objects.get_mut(&object_key) {
if let Some(method) = self.methods.get_mut(&method_key) {
method(obj, Box::new(arg));
}
}
}
}
// The real nuts code has absolutely no unsafe code.
// But just for readability, global data is stored as mutable static in this example.
static mut NUT: Option<Nut> = None;
fn get_nut() -> &'static mut Nut {
unsafe {
NUT.get_or_insert_with(|| Nut {
objects: HashMap::new(),
methods: HashMap::new(),
})
}
}
pub fn register_object(obj: impl Any) {
get_nut().register_object(obj);
}
pub fn register_method<OBJECT, ARGUMENT, FUNCTION>(method: FUNCTION)
where
FUNCTION: FnMut(&mut OBJECT, ARGUMENT) + 'static,
ARGUMENT: Any,
OBJECT: Any,
{
get_nut().register_method(method);
}
pub fn invoke<OBJECT, ARGUMENT>(method_call: ARGUMENT)
where
OBJECT: Any,
ARGUMENT: Any,
{
get_nut().invoke::<OBJECT, ARGUMENT>(method_call);
}
}
| true |
f016d42d8e87b25ed2e067aa5440cc128758d088
|
Rust
|
dnbrwstr/fast-color-lookup
|
/native/src/lib.rs
|
UTF-8
| 1,971 | 2.75 | 3 |
[] |
no_license
|
extern crate neon;
extern crate kdtree;
use neon::prelude::*;
use std::fs;
use kdtree::KdTree;
use kdtree::distance::squared_euclidean;
pub struct ColorFinder {
color_tree: KdTree<f32, String, [f32; 3]>
}
fn create_tree(data_file: String, capacity_per_node: usize) -> KdTree<f32, String, [f32; 3]> {
let dimensions = 3;
let mut kdtree: KdTree<f32, String, [f32; 3]> = KdTree::new_with_capacity(dimensions, capacity_per_node);
let text = fs::read_to_string(data_file).unwrap();
text.lines()
.map(|x: &str| x.split(" ").collect())
.for_each(|p: Vec<&str>| {
let color: [f32; 3] = [
p[0].parse::<f32>().unwrap(),
p[1].parse::<f32>().unwrap(),
p[2].parse::<f32>().unwrap()
];
let name: String = p[3..].join(" ");
kdtree.add(color, name).unwrap();
});
return kdtree;
}
declare_types! {
pub class JsColorFinder for ColorFinder {
init(mut cx) {
let data_path = cx.argument::<JsString>(0)?.value();
let capacity = cx.argument::<JsNumber>(1)?.value() as usize;
Ok(ColorFinder {
color_tree: create_tree(data_path, capacity)
})
}
method getColorName(mut cx) {
let r: f32 = cx.argument::<JsNumber>(0)?.value() as f32;
let g: f32 = cx.argument::<JsNumber>(1)?.value() as f32;
let b: f32 = cx.argument::<JsNumber>(2)?.value() as f32;
let this = cx.this();
let color = {
let guard = cx.lock();
let color_finder = this.borrow(&guard);
let nearest = color_finder.color_tree.nearest(&[r, g, b],1, &squared_euclidean).unwrap();
nearest[0].1.to_owned()
};
Ok(cx.string(color).upcast())
}
}
}
register_module!(mut m, {
m.export_class::<JsColorFinder>("ColorFinder")?;
Ok(())
});
| true |
e817f4e71467c892ebb7f7868786029e4e3f12e0
|
Rust
|
trip-flip/sample-engine
|
/sample-core/src/component.rs
|
UTF-8
| 3,793 | 2.8125 | 3 |
[] |
no_license
|
use crate::{
Transform,
shader::Shader,
mesh::Mesh,
texture::Texture
};
use std::rc::Rc;
use std::iter;
use glam::{Vec3, Quat};
use crate::ecs::Entity;
use crate::scriptable::Scriptable;
use itertools::izip;
pub struct Material;
pub trait Component {
fn create(_entity: &mut Entity) -> Self;
fn update(&mut self) {}
}
pub struct MeshComponent {
meshes: Vec<Rc<Mesh>>,
textures: Vec<Rc<Texture>>,
shaders: Vec<Rc<Shader>>,
transforms: Vec<Transform>,
}
impl MeshComponent {
// TODO: Make this more refined, don't mass change
pub fn set_shader(&mut self, new_shader: Rc<Shader>) {
self.shaders = self.shaders.splice(
..self.shaders.len(),
iter::once(new_shader).cycle())
.collect();
}
pub fn set_translate(&mut self, fx: f32, fy: f32, fz: f32) {
for transform in &mut self.transforms {
transform.translation.x = fx;
transform.translation.y = fy;
transform.translation.z = fz;
}
}
pub fn set_scale(&mut self, fx: f32, fy: f32, fz: f32) {
for transform in &mut self.transforms {
transform.scale.x = fx;
transform.scale.y = fy;
transform.scale.z = fz;
}
}
pub fn set_rotation(&mut self, axis: Vec3, angle: f32) {
for transform in &mut self.transforms {
transform.rotation = Quat::from_axis_angle(axis, angle);
}
}
pub fn add_mstm(&mut self,
mesh: Rc<Mesh>,
shader: Rc<Shader>,
texture: Option<Rc<Texture>>,
_material: Option<Material>) {
self.meshes.push(mesh);
self.shaders.push(shader);
if !texture.is_none() {
self.textures.push(texture.unwrap());
}
/*if !material.is_none() {
self.materials.push(texture.unwrap());
}*/
self.transforms.push(Transform::new());
}
pub fn textures(&self) -> &[Rc<Texture>]{
&self.textures
}
}
impl Component for MeshComponent {
fn create(_entity: &mut Entity) -> Self {
MeshComponent {
meshes: Vec::new(),
textures: Vec::new(),
shaders: Vec::new(),
transforms: Vec::new(),
}
}
fn update(&mut self) {
if self.textures.is_empty() {
let draw = izip!(
self.meshes.iter(),
self.shaders.iter(),
self.transforms.iter(),
);
for (mesh, shader, transform) in draw {
shader.set_transform(&transform);
mesh.draw();
}
} else {
let draw = izip!(
self.meshes.iter(),
self.textures.iter(),
self.shaders.iter(),
self.transforms.iter(),
);
for (mesh, texture, shader, transform) in draw {
shader.set_transform(&transform);
texture.enable();
mesh.draw();
}
}
}
}
#[derive(Debug)]
pub struct ScriptComponent<T: Scriptable> {
script: T,
entity: *mut Entity
}
impl<T: Scriptable> Component for ScriptComponent<T> {
fn create(entity: &mut Entity) -> Self {
let mut comp = ScriptComponent {
script: T::create(),
entity: entity as *mut _
};
unsafe {
comp.script.on_create(&mut *comp.entity as &mut _);
}
comp
}
fn update(&mut self) {
unsafe {
self.script.on_update(&mut *self.entity as &mut _);
}
}
}
pub mod components {
pub use crate::component::{
MeshComponent,
ScriptComponent
};
}
| true |
a3f6e26b59e6944ab7287e04d4850b76bdeae603
|
Rust
|
takeneco/rust-uniqos
|
/util/src/boxed.rs
|
UTF-8
| 445 | 2.53125 | 3 |
[
"MIT"
] |
permissive
|
// Uniqos -- Unique Operating System
// (c) 2019 KATO Takeshi
// Released under the MIT license
/// Original heap allocation implement instead of alloc::boxed::Box.
use core::ops;
pub struct X<T: ?Sized>(*mut T);
impl<T> ops::Deref for X<T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.0 }
}
}
impl<T> ops::DerefMut for X<T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.0 }
}
}
| true |
5bccc7c74a5000aaf33b7056e1f855b0a8560c73
|
Rust
|
amitdo/dream-go
|
/src/mcts/predict.rs
|
UTF-8
| 7,974 | 2.578125 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2018 Karl Sundequist Blomdahl <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Mutex, MutexGuard};
use parallel::{self, OneSender};
use nn::{self, Network, Type, TYPE, Workspace};
use util::array::*;
use util::singleton::*;
use util::types::*;
pub type PredictGuard<'a> = parallel::ServiceGuard<'a, PredictState>;
pub type PredictService = parallel::Service<PredictState>;
pub fn service(network: Network) -> PredictService {
PredictService::new(None, PredictState::new(network))
}
pub enum PredictRequest {
/// Request to compute the value and policy for some feature.
Ask(Array),
/// Indicate that a worker is waiting for some other thread to finish
/// and should be awaken after the next batch of computations finish.
Wait
}
struct PredictShared {
/// The features to get the value and policy for.
features_list: Vec<Array>,
/// The sender to response to each of the features in `features_list`
/// over.
sender_list: Vec<OneSender<Option<(Singleton, Array)>>>,
/// All threads that want to get notified when something changed.
waiting_list: Vec<OneSender<Option<(Singleton, Array)>>>
}
pub struct PredictState {
network: Network,
shared: Mutex<PredictShared>,
/// The number of requests that are being processed by the GPU at
/// this moment
running_count: AtomicUsize,
}
impl PredictState {
pub fn new(network: Network) -> PredictState {
PredictState {
network: network,
shared: Mutex::new(PredictShared {
features_list: vec! [],
sender_list: vec! [],
waiting_list: vec! []
}),
running_count: AtomicUsize::new(0)
}
}
/// Returns the network used to perform the predictions.
pub fn get_network<'a>(&'a self) -> &'a Network {
&self.network
}
/// Run the `nn::forward` function for the given features and wrap the
/// results into `Array` elements. This version assumes the neural network
/// use `f32` weights.
///
/// # Arguments
///
/// * `workspace` -
/// * `features_list` -
///
fn forward<T, R>(
workspace: &mut Workspace,
features_list: Vec<Array>
) -> (Vec<Singleton>, Vec<Array>)
where T: From<f32> + Clone,
R: From<f32> + Clone, Box<[R]>: Into<Array>,
Array: Into<Box<[T]>> + From<Box<[R]>>,
Singleton: From<R>,
{
let (value_list, policy_list) = nn::forward::<T, R>(
workspace,
&features_list.into_iter()
.map(|feature| Array::into(feature))
.collect()
);
// wrap the results in `Array` so that we can avoid having to pass
// generics everywhere
let value_list = value_list.into_iter()
.map(|value| Singleton::from(value))
.collect();
let policy_list = policy_list.into_iter()
.map(|policy| Array::from(policy))
.collect();
(value_list, policy_list)
}
fn predict(&self, mut shared: MutexGuard<PredictShared>, batch_size: usize) {
let num_items = shared.features_list.len();
let features_list = shared.features_list.split_off(num_items - batch_size);
let sender_list = shared.sender_list.split_off(num_items - batch_size);
// get ride of our MutexGuard to `shared` to allow for parallel execution
// while we are busy running the forward pass through the network.
drop(shared);
debug_assert!(features_list.len() == batch_size);
debug_assert!(sender_list.len() == batch_size);
// perform the neural network predictions and then inform all of
// the receivers
let mut workspace = self.network.get_workspace(batch_size);
let (value_list, policy_list) = match *TYPE {
Type::Int8 => PredictState::forward::<q8, f32>(&mut workspace, features_list),
Type::Half => PredictState::forward::<f16, f16>(&mut workspace, features_list),
Type::Single => PredictState::forward::<f32, f32>(&mut workspace, features_list)
};
drop(workspace);
// send out our predictions to all of the receivers
let response_iter = value_list.into_iter().zip(policy_list.into_iter());
for (sender, response) in sender_list.into_iter().zip(response_iter) {
sender.send(Some(response));
}
// wake up all of the receivers waiting for something to change
let mut shared = self.shared.lock().unwrap();
let num_waiting = shared.waiting_list.len();
for waiting in shared.waiting_list.drain(0..num_waiting) {
waiting.send(None);
}
// decrease the number of running neural network evaluations
self.running_count.fetch_sub(1, Ordering::AcqRel);
}
fn check(&self, mut shared: MutexGuard<PredictShared>, has_more: bool) {
let num_requests = shared.features_list.len();
let batch_size = 64; // FIXME: read from config
if num_requests >= batch_size {
// the batch is full, start an evaluation
self.running_count.fetch_add(1, Ordering::SeqCst);
self.predict(shared, batch_size);
} else if has_more {
// wait for the rest of the enqueued requests before evaluating
// the batch
} else if num_requests > 0 && self.running_count.compare_and_swap(0, 1, Ordering::SeqCst) == 0 {
// nothing is running at the moment, may as well make use of
// the device so start evaluating a partial batch
self.predict(shared, num_requests);
} else if num_requests > 0 && !has_more {
// immediately evaluate when we hit a barrier in order to:
// 1. minimize the latency between request and response
// 2. avoid a race condition where a request that arrives
// during an evaluation does not trigger one.
self.running_count.fetch_add(1, Ordering::SeqCst);
self.predict(shared, num_requests);
} else if num_requests == 0 && self.running_count.load(Ordering::Acquire) == 0 {
// everything is asleep? probably a race condition between the
// pending message being sent and it being received. Just wake
// everything up and it should normalize.
let num_waiting = shared.waiting_list.len();
for waiting in shared.waiting_list.drain(0..num_waiting) {
waiting.send(None);
}
}
}
}
impl parallel::ServiceImpl for PredictState {
type State = PredictState;
type Request = PredictRequest;
type Response = Option<(Singleton, Array)>;
fn get_thread_count() -> usize {
2 // num_threads / batch_size
}
fn process(state: &Self::State, req: Self::Request, sender: OneSender<Self::Response>, has_more: bool) {
let mut shared = state.shared.lock().unwrap();
match req {
PredictRequest::Ask(features) => {
shared.features_list.push(features);
shared.sender_list.push(sender);
},
PredictRequest::Wait => {
shared.waiting_list.push(sender);
}
};
state.check(shared, has_more);
}
}
| true |
479299501d88a8930cd01522bfeca756585aa7c6
|
Rust
|
kvart2006/sycamore
|
/packages/sycamore/src/component.rs
|
UTF-8
| 880 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
//! The definition of the [`Component`] trait.
use crate::generic_node::GenericNode;
use crate::prelude::View;
/// Trait that is implemented by components. Should not be implemented manually. Use the
/// [`component`](sycamore_macro::component) macro instead.
pub trait Component<G: GenericNode> {
/// The name of the component (for use in debug mode). In release mode, this will default to
/// `"UnnamedComponent"`
const NAME: &'static str = "UnnamedComponent";
/// The type of the properties passed to the component.
type Props;
/// Create a new component with an instance of the properties.
///
/// The double underscores (`__`) are to prevent conflicts with other trait methods. This is
/// because we cannot use fully qualified syntax here because it prevents type inference.
fn __create_component(props: Self::Props) -> View<G>;
}
| true |
4ac8bb1759d35afc930798418876cd824d501e71
|
Rust
|
HildaHay/entomb-gen
|
/src/main.rs
|
UTF-8
| 1,648 | 3.171875 | 3 |
[] |
no_license
|
use rand::prelude::*;
static LOOKUPTABLE: [i32; 32] = [
1, 1, 1, 2, 0, 0, 2, 2, 1, 1, 1, 1, 2, 0, 0, 0, 1, 1, 1, 2, 0, 0, 0, 0, 2, 0, 1, 2, 2, 0, 0, 0,
];
fn main() {
let mut maze: [[bool; 32]; 32] = [[false; 32]; 32];
for x in 0..31 {
maze[x][0] = rand::random();
}
for y in 1..31 {
for x in 0..31 {
maze[x][y] = generate_square(&maze, x as i32, y as i32);
}
}
for y in 0..31 {
for x in 0..31 {
if maze[x][y] {
print!("█");
} else {
print!(" ");
};
}
println!();
}
}
fn generate_square(maze: &[[bool; 32]; 32], x: i32, y: i32) -> bool {
let mut prev = [false; 5];
let xx = x as usize;
let yy = y as usize;
if (x == 0) {
prev[0] = true;
prev[1] = false;
prev[2] = rand::random();
} else {
if (x == 1) {
prev[0] = false;
} else {
prev[0] = maze[xx - 2][yy];
}
prev[1] = maze[xx - 1][yy];
prev[2] = maze[xx - 1][yy - 1];
}
prev[3] = maze[xx][yy - 1];
if (x == 31) {
prev[4] = rand::random();
} else {
prev[4] = maze[xx + 1][yy - 1];
}
let mut lookup = 0;
if prev[0] {
lookup += 16
};
if prev[1] {
lookup += 8
};
if prev[2] {
lookup += 4
};
if prev[3] {
lookup += 2
};
if prev[4] {
lookup += 1
};
if(LOOKUPTABLE[lookup] == 0) {
false
} else if (LOOKUPTABLE[lookup] == 1) {
true
} else {
rand::random()
}
}
| true |
932d0d0648c3816ebd14dc13c00edf311c8ed1dc
|
Rust
|
film42/pb-nats-rs
|
/src/atlas.rs
|
UTF-8
| 4,984 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
use super::pb::rpc::{Request as RpcRequest, Response as RpcResponse};
use super::pb::warehouse::*;
use futures::stream::StreamExt;
use protobuf::Message;
use rants::{Client, Subject};
use tokio::timer::Timeout;
use uuid::Uuid;
use std::future::Future;
use std::task::{Context,Poll};
use std::pin::Pin;
pub struct RpcClient {
nats: Client,
}
fn new_inbox() -> Subject {
Uuid::new_v4().to_string().parse().unwrap()
}
fn to_snake_case(s: String) -> String {
let mut out = vec![];
let mut is_first = true;
let mut last_was_alphabetic = false;
for c in s.chars() {
if c.is_uppercase() {
if !is_first && last_was_alphabetic {
out.push('_');
}
for c0 in c.to_lowercase().to_string().chars() {
out.push(c0);
}
} else {
out.push(c);
}
is_first = false;
last_was_alphabetic = c.is_ascii_alphabetic();
}
out.iter().collect()
}
// example: rpc.warehouse.shipment_service.search
pub fn build_subject_from_request(req: &RpcRequest) -> String {
let service = req.get_service_name();
let service = service.replace("::", ".");
let service = to_snake_case(service);
let method = req.get_method_name().to_string();
let method = to_snake_case(method);
format!("rpc.{}.{}", service, method)
}
impl RpcClient {
async fn call(&self, request: &mut RpcRequest) -> Result<RpcResponse, failure::Error> {
request.set_caller("pb-nats-rs".to_string());
let subject = build_subject_from_request(request).parse()?;
let inbox = new_inbox();
let (sid, mut sub) = self.nats.subscribe(&inbox, 2).await?;
self.nats.unsubscribe_with_max_msgs(sid, 2).await?;
let payload = request.write_to_bytes().unwrap();
self.nats
.publish_with_reply(&subject, &inbox, &payload)
.await?;
let msg1 = Timeout::new(sub.next(), super::ACK_TIMEOUT).await?.unwrap();
match msg1.payload() {
&[super::NACK_MSG] => bail!("Retry later! Server sent NACK"),
&[super::ACK_MSG] => {}
_ => bail!("Received unknown message!"),
}
// if msg1.payload() != ACK_MSG.as_bytes() {
// // TODO: Make this a lot better.
// bail!("Did not get an ACK. Why?");
// }
let msg2 = Timeout::new(sub.next(), super::RESPONSE_TIMEOUT)
.await?
.unwrap();
// let res = std::str::from_utf8(&msg2.payload())?.to_string();
// println!("The response is: {}", res);
let mut res = RpcResponse::new();
res.merge_from_bytes(&msg2.payload())?;
Ok(res)
}
}
pub struct ShipmentClient {
client: RpcClient,
}
pub struct SearchRequest<Req, Res> {
req: Req,
client: RpcClient,
future: String,
}
impl <Req>SearchRequest {
fn new(client: RpcClient, req: Req) -> SearchRequest<Req> {
SearchRequest{
req: req,
client: client,
future: "".into(),
}
}
}
impl <Res>Future for SearchRequest<Res>
where Res: std::marker::Unpin
{
type Output = Result<Res, failure::Error>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
let mut me = Pin::new(&mut *self);
Poll::Pending
}
}
pub trait Search {
type Request;
type Response;
fn search(&self, request: Self::Request) -> SearchRequest<Self::Response>;
}
impl ShipmentClient {
fn new(client: RpcClient) -> ShipmentClient {
ShipmentClient { client: client }
}
}
// impl Search for ShipmentClient {
// type Request = ShipmentRequest;
// type Response = Shipments;
// fn search(&self, request: Self::Request) -> Result<Self::Response, failure::Error> {
// let req_bytes = request.write_to_bytes()?;
// let mut pb_req = RpcRequest::new();
// pb_req.set_request_proto(req_bytes);
// pb_req.set_service_name("Warehouse::ShipmentService".to_string());
// pb_req.set_method_name("search".to_string());
// let res = self.client.call(&pb_req).await?;
// let mut shipments = Self::Response::new();
// shipments.merge_from_bytes(res.get_response_proto())?;
// //println!("Msg: {}", shipments.get_records()[0].get_guid());
// Ok(shipments)
// }
// }
// fn asdf() {
// let cs = ClientService{
// service_name: "Warehouse::ShipmentService".to_string(),
// }
// cs.register(MethodHandler{
// method_name: "search".to_string(),
// });
// let sd = ServiceDescription{
// service_name: "astlas.dispatch.ShardService".to_string(),
// }
// sd.register_method_handler("search", |service, decoder| async {
// let mut sr = ShardRequest::new();
// decoder.decode(sr)?;
// let msg = service.search(sr).await?;
// Ok::<Shard, failure::Error>(msg)
// });
// }
| true |
1eb8ba49db61968fd6725585521be285eea4213a
|
Rust
|
cambricorp/kattis-scaling-recipes
|
/src/main.rs
|
UTF-8
| 3,983 | 3.015625 | 3 |
[] |
no_license
|
mod parse {
use std::io;
use std::str;
use std::num::ParseFloatError;
#[derive(Debug)]
pub enum Error {
Exhausted,
Io(io::Error),
InvalidDomain(u32, u32),
ParseFloat(ParseFloatError),
}
impl From<ParseFloatError> for Error {
fn from(err: ParseFloatError) -> Self {
Error::ParseFloat(err)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Error::Io(err)
}
}
const DIGIT_POWER: [u32; 7] = [1, 10, 100, 1000, 10_000, 100_000, 1_000_000];
pub fn consume_until(input: &[u8], stop_byte: u8) -> Result<(&[u8], &[u8]), Error> {
let (input, remainder) = input.split_at(input
.iter()
.position(|b| *b == stop_byte)
.ok_or(Error::Exhausted)?);
Ok((input, &remainder[1..]))
}
pub fn float_stop_at(input: &[u8], stop_byte: u8) -> Result<(f32, &[u8]), Error> {
let (float, cursor) = consume_until(input, stop_byte)?;
let float: f32 = unsafe { str::from_utf8_unchecked(float) }.parse()?;
Ok((float, cursor))
}
pub fn digit_stop_at(
input: &[u8],
max_value: u32,
stop_byte: u8,
) -> Result<(u32, &[u8]), Error> {
let mut digits = [0; 7];
let mut num_digits = 0;
for d in input
.iter()
.take_while(|&&b| b != stop_byte)
.map(|b| b - b'0')
{
digits[num_digits] = d;
num_digits += 1;
}
let res = digits[..num_digits]
.iter()
.rev()
.enumerate()
.fold(0, |acc, (index, &b10)| {
acc + DIGIT_POWER[index] * b10 as u32
});
if res > max_value {
Err(Error::InvalidDomain(res, max_value))
} else {
Ok((res, &input[num_digits + 1..]))
}
}
}
use parse::Error;
use std::io::{stdin, stdout, BufWriter, Read, Write};
use std::str;
pub const MAX_LENGTH: u32 = 10_000;
pub const MAX_RECIPES: u32 = 1000;
pub const MAX_INGREDIENTS: u32 = 20;
pub const MAX_PORTIONS: u32 = 12;
pub const MAX_DESIRED_PORTIONS: u32 = 1000;
fn main() -> Result<(), Error> {
let mut buf = Vec::with_capacity(1024 * 1024);
stdin().read_to_end(&mut buf)?;
let mut writer = BufWriter::with_capacity(64 * 1024, stdout());
let mut ingredients = Vec::with_capacity(MAX_INGREDIENTS as usize);
let (num_recipes, mut cursor) = parse::digit_stop_at(&buf, MAX_RECIPES, b'\n')?;
for current_recipe in 0..num_recipes {
let (num_ingredients, ncursor) = parse::digit_stop_at(cursor, MAX_INGREDIENTS, b' ')?;
let (portions, ncursor) = parse::digit_stop_at(ncursor, MAX_PORTIONS, b' ')?;
let (desired_portions, mut ncursor) =
parse::digit_stop_at(ncursor, MAX_DESIRED_PORTIONS, b'\n')?;
let scaling_for_100percent_ingredient = desired_portions as f32 / portions as f32;
writeln!(writer, "Recipe # {}", current_recipe + 1)?;
ingredients.clear();
let mut master_weight = 0.0_f32;
cursor = ncursor;
for _ in 0..num_ingredients {
let (name, ncursor) = parse::consume_until(cursor, b' ')?;
let (weight, ncursor) = parse::float_stop_at(ncursor, b' ')?;
let (percentage, ncursor) = parse::float_stop_at(ncursor, b'\n')?;
ingredients.push((name, percentage));
if percentage == 100.0 {
master_weight = weight * scaling_for_100percent_ingredient;
}
cursor = ncursor;
}
for (name, percentage) in &ingredients {
writeln!(
writer,
"{} {:.1}",
unsafe { str::from_utf8_unchecked(name) },
master_weight * (percentage / 100.0)
)?;
}
writeln!(writer, "----------------------------------------")?;
}
Ok(())
}
| true |
6621c57c6c211b3fc35f66e3c027e8a3521ca0a4
|
Rust
|
Pyxxil/AdventOfCode
|
/src/day/mod.rs
|
UTF-8
| 1,358 | 3.109375 | 3 |
[] |
no_license
|
pub mod eight;
pub mod eleven;
pub mod fifteen;
pub mod five;
pub mod four;
pub mod fourteen;
pub mod nine;
pub mod one;
pub mod seven;
pub mod six;
pub mod sixteen;
pub mod ten;
pub mod thirteen;
pub mod three;
pub mod twelve;
pub mod two;
pub trait Day {
type Input;
type Output;
fn part_one(input: &Self::Input) -> Self::Output;
fn part_two(input: &Self::Input) -> Self::Output;
fn get_input() -> Self::Input;
}
#[macro_export]
macro_rules! time {
($e:expr) => {{
let start = std::time::Instant::now();
let res = $e;
let elapsed = start.elapsed().as_nanos();
(res, elapsed)
}};
}
#[macro_export]
macro_rules! run {
($( $t:ty ),*) => {
$(
{
let input = <$t>::get_input();
println!("\nDay {}\n--------------------", stringify!($t));
{
let (results, elapsed) = time!(<$t>::part_one(&input));
println!("Results for Part One: {:>15} (time: {:>9}ns)", results, elapsed);
}
{
let (results, elapsed) = time!(<$t>::part_two(&input));
println!("Results for Part Two: {:>15} (time: {:>9}ns)", results, elapsed);
}
println!("--------------------");
}
)*
};
}
| true |
37c4f990d6493d75ac1860ed7e166165bbc30635
|
Rust
|
jakeactually/wasm-si2
|
/src/game_util.rs
|
UTF-8
| 5,708 | 2.5625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::types::*;
use crate::objects::{get_static_objects, get_weapons, scenery_data};
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern {
fn alert(s: &str);
}
#[wasm_bindgen]
impl Game {
pub fn new() -> Game {
crate::utils::set_panic_hook();
Game {
data :vec![],
ready: false,
screen: [[0; WIDTH as usize]; HEIGHT as usize],
inverted: false,
static_objects: get_static_objects().to_vec(),
weapons: get_weapons().to_vec(),
levels_data: crate::data::levels::levels(),
objects_data: crate::data::objects::objects(),
enemies_data: crate::data::enemies::enemies(),
scenery: vec![],
enemies: vec![],
shots: vec![],
is_playing: false,
game_over: false,
level: 0,
time: 0,
scene_x: 0,
enemies_x: 0,
player: Player {
position: Vec2 { x: 3, y: 20 },
lives: 3,
protection: 0,
},
y_axis: Vec2 { x: 5, y: HEIGHT as i32 - PLAYER_HEIGHT as i32 },
weapon: Weapon {
amount: 3,
kind: WeaponKind::Missile
},
score: 0,
}
}
pub fn update(&mut self, _ctx: &Context) {
self.clear();
if self.game_over {
return;
}
self.keyboard(_ctx);
if !self.is_playing {
if self.level == 5 {
self.game_over = true;
return;
}
self.inverted = self.level_data().inverted_color;
self.enemies = self.load_level(self.level);
if self.level_data().upper == 1 {
self.y_axis = Vec2 { x: 0, y: HEIGHT as i32 - PLAYER_HEIGHT as i32 - 5 };
}
self.load_scenery();
self.player.position = Vec2 { x: 3, y: 20 };
self.scene_x = 0;
self.enemies_x = 0;
self.level += 1;
self.is_playing = true;
}
// Enemies
if self.enemies.len() == 0 {
self.player.position.x += 1;
}
if self.player.position.x > WIDTH as i32 + 20 {
self.is_playing = false;
return;
}
let enemies_x = self.enemies_x;
self.enemies = self
.enemies
.clone()
.into_iter()
.filter(|e| e.active() && enemies_x + e.position.x > -20)
.map(|e| e.tick(self))
.collect::<Vec<_>>();
// Shots
let nearest_y = if self.enemies.len() > 0 {
self.enemies[0].position.y
} else {
self.player.position.y
};
self.shots = self
.shots
.clone()
.into_iter()
.filter(|s| s.active && s.position.x < WIDTH as i32)
.map(|s| s.tick(nearest_y))
.collect();
// The end
if let Some(enemy) = self.enemies.last() {
if self.enemies_x + enemy.position.x >= (WIDTH as i32 / 4) * 3 {
self.scene_x -= 1;
}
}
if self.player.protected() {
self.player.protection -= 1;
}
self.time += 1;
self.enemies_x -= 1;
if self.player.lives == 0 {
self.game_over = true;
}
}
pub fn do_render(&mut self) {
self.render();
}
pub fn screen_pointer(&mut self) -> *mut [[u8; WIDTH as usize]; HEIGHT as usize] {
&mut self.screen
}
}
impl Game {
pub fn level_data(&self) -> SceneryData {
scenery_data[self.level as usize].clone()
}
pub fn keyboard(&mut self, _ctx: &Context) {
let position = &mut self.player.position;
if _ctx.key_right && position.x < WIDTH as i32 - PLAYER_WIDTH as i32 {
position.x += 1;
} else if _ctx.key_left && position.x > 0 {
position.x -= 1;
} else if _ctx.key_up && position.y > self.y_axis.x {
position.y -= 1;
} else if _ctx.key_down && position.y < self.y_axis.y {
position.y += 1;
}
if self.time % 6 == 0 {
if _ctx.key_space {
let position = Vec2 { x: position.x + 9, y: position.y + 3 };
let shot = Shot { position, active: true, weapon_kind: WeaponKind::Standard, duration: 3 };
self.shots.push(shot);
} else if _ctx.key_a {
if self.weapon.amount > 0 {
self.weapon.amount -= 1;
let y = if self.weapon.kind == WeaponKind::Wall { 5 } else { position.y + 3 };
let position = Vec2 { x: position.x + 9, y: y };
let shot = Shot { position, active: true, weapon_kind: self.weapon.kind.clone(), duration: 3 };
self.shots.push(shot);
}
}
}
}
pub fn load_scenery(&mut self) {
self.scenery = vec![];
let mut x = 0;
if self.level > 0 {
while x < 1600 {
let sd = &scenery_data[self.level as usize];
let n: u8 = crate::random() % sd.objects + sd.first_object;
let rock = self.load_object(n);
let y = if self.level_data().upper == 1 { 0 } else { HEIGHT as i32 - rock.size.y };
self.scenery.push(Scenery { position: Vec2 { x, y }, model: rock.clone() });
x += rock.size.x;
}
}
}
}
| true |
b256211784dde1e4e6ad4a41b6c498b9dfcd8717
|
Rust
|
rodya-mirov/aoc_2017
|
/src/day25.rs
|
UTF-8
| 6,509 | 2.609375 | 3 |
[] |
no_license
|
const INPUT: &str = include_str!("input/25.txt");
use nom::lib::std::collections::HashSet;
use std::collections::HashMap;
#[derive(Clone, Eq, PartialEq, Debug)]
struct TuringMachine {
start_state: char,
diagnostic_cutoff: usize,
trans: HashMap<char, FullTrans>,
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
struct FullTrans {
if_zero: Transition,
if_one: Transition,
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
struct Transition {
next_state: char,
write_val: bool,
move_val: isize,
}
mod parse {
use super::{FullTrans, Transition, TuringMachine};
use std::collections::HashMap;
use nom::{
branch::alt,
bytes::complete::tag,
character::complete::{anychar, char as exact_char, newline, satisfy},
combinator::{eof, map},
multi::{fold_many1, many0},
IResult,
};
use crate::lib::parse_usize;
fn start_state_line(input: &str) -> IResult<&str, char> {
let (input, _) = tag("Begin in state ")(input)?;
let (input, c) = anychar(input)?;
let (input, _) = tag(".")(input)?;
let (input, _) = newline(input)?;
Ok((input, c))
}
fn diagnostic_state_line(input: &str) -> IResult<&str, usize> {
let (input, _) = tag("Perform a diagnostic checksum after ")(input)?;
let (input, steps) = parse_usize(input)?;
let (input, _) = tag(" steps.")(input)?;
let (input, _) = newline(input)?;
Ok((input, steps))
}
fn parse_write(input: &str) -> IResult<&str, bool> {
alt((
map(exact_char('1'), |_| true),
map(exact_char('0'), |_| false),
))(input)
}
fn parse_move(input: &str) -> IResult<&str, isize> {
alt((map(tag("left"), |_| -1), map(tag("right"), |_| 1)))(input)
}
fn parse_write_line(input: &str) -> IResult<&str, bool> {
let (input, _) = tag(" - Write the value ")(input)?;
let (input, write) = parse_write(input)?;
let (input, _) = tag(".\n")(input)?;
Ok((input, write))
}
fn parse_move_line(input: &str) -> IResult<&str, isize> {
let (input, _) = tag(" - Move one slot to the ")(input)?;
let (input, out) = parse_move(input)?;
let (input, _) = tag(".\n")(input)?;
Ok((input, out))
}
fn parse_trans_line(input: &str) -> IResult<&str, char> {
let (input, _) = tag(" - Continue with state ")(input)?;
let (input, c) = anychar(input)?;
let (input, _) = tag(".\n")(input)?;
Ok((input, c))
}
fn parse_transition(input: &str) -> IResult<&str, (char, FullTrans)> {
let (input, _) = tag("In state ")(input)?;
let (input, c) = anychar(input)?;
let (input, _) = tag(":\n")(input)?;
let (input, _) = tag(" If the current value is 0:\n")(input)?;
let (input, write_0) = parse_write_line(input)?;
let (input, move_0) = parse_move_line(input)?;
let (input, state_0) = parse_trans_line(input)?;
let (input, _) = tag(" If the current value is 1:\n")(input)?;
let (input, write_1) = parse_write_line(input)?;
let (input, move_1) = parse_move_line(input)?;
let (input, state_1) = parse_trans_line(input)?;
let (input, _) = many0(satisfy(|c| c.is_whitespace()))(input)?;
Ok((
input,
(
c,
FullTrans {
if_zero: Transition {
next_state: state_0,
write_val: write_0,
move_val: move_0,
},
if_one: Transition {
next_state: state_1,
write_val: write_1,
move_val: move_1,
},
},
),
))
}
fn parse_helper(input: &str) -> IResult<&str, TuringMachine> {
let (input, start_state) = start_state_line(input)?;
let (input, steps) = diagnostic_state_line(input)?;
let (input, _) = newline(input)?;
// TODO: this parse fails if input doesn't have a trailing newline;
// I had to modify the input and the sample, which feels icky
let (input, map): (&str, HashMap<char, FullTrans>) =
fold_many1(parse_transition, HashMap::new(), |mut acc, (c, ft)| {
acc.insert(c, ft);
acc
})(input)
.unwrap();
let (_, _) = eof(input)?;
Ok((
"",
TuringMachine {
start_state,
diagnostic_cutoff: steps,
trans: map,
},
))
}
pub(super) fn parse(input: &str) -> TuringMachine {
let (_, tm) = parse_helper(input).unwrap();
tm
}
}
struct TM {
state: char,
dp: isize,
ones: HashSet<isize>,
}
fn run_25a_with_input(input: &str) -> usize {
let tm_defn = parse::parse(input);
let mut tm = TM {
state: tm_defn.start_state,
dp: 0,
ones: HashSet::new(),
};
for _ in 0..tm_defn.diagnostic_cutoff {
let my_pos = tm.dp;
let my_data = tm.ones.contains(&my_pos);
let ft = tm_defn
.trans
.get(&tm.state)
.expect("States should be defined");
let my_trans = if my_data { &ft.if_one } else { &ft.if_zero };
if my_trans.write_val {
tm.ones.insert(my_pos);
} else {
tm.ones.remove(&my_pos);
}
tm.state = my_trans.next_state;
tm.dp += my_trans.move_val;
}
tm.ones.len()
}
pub fn run_25a() -> usize {
run_25a_with_input(INPUT)
}
pub fn run_25b() -> usize {
0
}
#[cfg(test)]
mod tests {
use super::*;
const SAMPLE_INPUT: &str = "Begin in state A.
Perform a diagnostic checksum after 6 steps.
In state B:
If the current value is 0:
- Write the value 1.
- Move one slot to the left.
- Continue with state A.
If the current value is 1:
- Write the value 1.
- Move one slot to the right.
- Continue with state A.
In state A:
If the current value is 0:
- Write the value 1.
- Move one slot to the right.
- Continue with state B.
If the current value is 1:
- Write the value 0.
- Move one slot to the left.
- Continue with state B.
";
#[test]
fn sample_25a() {
assert_eq!(run_25a_with_input(SAMPLE_INPUT), 3);
}
}
| true |
48c482b600908813516c2e7274ddaef574d4e245
|
Rust
|
softwarearchaeologist/miden
|
/air/src/transition.rs
|
UTF-8
| 9,999 | 2.53125 | 3 |
[
"MIT"
] |
permissive
|
use crate::{
opcodes::UserOps as OpCode, utils::binary_not, BaseElement, FieldElement, TraceState,
NUM_CF_OPS, NUM_HD_OPS, NUM_LD_OPS,
};
use winter_air::EvaluationFrame;
// VM TRANSITION
// ================================================================================================
pub struct VmTransition<E: FieldElement<BaseField = BaseElement>> {
current: TraceState<E>,
next: TraceState<E>,
cf_op_flags: [E; NUM_CF_OPS],
ld_op_flags: [E; NUM_LD_OPS],
hd_op_flags: [E; NUM_HD_OPS],
begin_flag: E,
noop_flag: E,
}
impl<E: FieldElement<BaseField = BaseElement>> VmTransition<E> {
// CONSTRUCTOR
// --------------------------------------------------------------------------------------------
pub fn new(ctx_depth: usize, loop_depth: usize, stack_depth: usize) -> Self {
Self {
current: TraceState::new(ctx_depth, loop_depth, stack_depth),
next: TraceState::new(ctx_depth, loop_depth, stack_depth),
cf_op_flags: [E::ZERO; NUM_CF_OPS],
ld_op_flags: [E::ZERO; NUM_LD_OPS],
hd_op_flags: [E::ZERO; NUM_HD_OPS],
begin_flag: E::ZERO,
noop_flag: E::ZERO,
}
}
#[cfg(test)]
pub fn from_states(current: TraceState<E>, next: TraceState<E>) -> Self {
let mut result = Self {
current,
next,
cf_op_flags: [E::ZERO; NUM_CF_OPS],
ld_op_flags: [E::ZERO; NUM_LD_OPS],
hd_op_flags: [E::ZERO; NUM_HD_OPS],
begin_flag: E::ZERO,
noop_flag: E::ZERO,
};
result.set_op_flags();
result
}
// DATA MUTATORS
// --------------------------------------------------------------------------------------------
pub fn update(&mut self, frame: &EvaluationFrame<E>) {
self.current.update(frame.current());
self.next.update(frame.next());
self.set_op_flags();
}
// STATE ACCESSORS
// --------------------------------------------------------------------------------------------
pub fn current(&self) -> &TraceState<E> {
&self.current
}
pub fn next(&self) -> &TraceState<E> {
&self.next
}
// OP FLAGS
// --------------------------------------------------------------------------------------------
pub fn cf_op_flags(&self) -> [E; NUM_CF_OPS] {
self.cf_op_flags
}
pub fn ld_op_flags(&self) -> [E; NUM_LD_OPS] {
self.ld_op_flags
}
pub fn hd_op_flags(&self) -> [E; NUM_HD_OPS] {
self.hd_op_flags
}
pub fn begin_flag(&self) -> E {
self.begin_flag
}
pub fn noop_flag(&self) -> E {
self.noop_flag
}
// HELPER METHODS
// --------------------------------------------------------------------------------------------
fn set_op_flags(&mut self) {
// set control flow flags
let not_0 = binary_not(self.current.cf_op_bits()[0]);
let not_1 = binary_not(self.current.cf_op_bits()[1]);
self.cf_op_flags[0] = not_0 * not_1;
self.cf_op_flags[1] = self.current.cf_op_bits()[0] * not_1;
self.cf_op_flags[2] = not_0 * self.current.cf_op_bits()[1];
self.cf_op_flags[3] = self.current.cf_op_bits()[0] * self.current.cf_op_bits()[1];
self.cf_op_flags.copy_within(0..4, 4);
let not_2 = binary_not(self.current.cf_op_bits()[2]);
for i in 0..4 {
self.cf_op_flags[i] *= not_2;
}
for i in 4..8 {
self.cf_op_flags[i] *= self.current.cf_op_bits()[2];
}
// set low-degree operation flags
let not_0 = binary_not(self.current.ld_op_bits()[0]);
let not_1 = binary_not(self.current.ld_op_bits()[1]);
self.ld_op_flags[0] = not_0 * not_1;
self.ld_op_flags[1] = self.current.ld_op_bits()[0] * not_1;
self.ld_op_flags[2] = not_0 * self.current.cf_op_bits()[1];
self.ld_op_flags[3] = self.current.ld_op_bits()[0] * self.current.ld_op_bits()[1];
self.ld_op_flags.copy_within(0..4, 4);
let not_2 = binary_not(self.current.ld_op_bits()[2]);
for i in 0..4 {
self.ld_op_flags[i] *= not_2;
}
for i in 4..8 {
self.ld_op_flags[i] *= self.current.ld_op_bits()[2];
}
self.ld_op_flags.copy_within(0..8, 8);
let not_3 = binary_not(self.current.ld_op_bits()[3]);
for i in 0..8 {
self.ld_op_flags[i] *= not_3;
}
for i in 8..16 {
self.ld_op_flags[i] *= self.current.ld_op_bits()[3];
}
self.ld_op_flags.copy_within(0..16, 16);
let not_4 = binary_not(self.current.ld_op_bits()[4]);
for i in 0..16 {
self.ld_op_flags[i] *= not_4;
}
for i in 16..32 {
self.ld_op_flags[i] *= self.current.ld_op_bits()[4];
}
// set high-degree operation flags
let not_0 = binary_not(self.current.hd_op_bits()[0]);
let not_1 = binary_not(self.current.hd_op_bits()[1]);
self.hd_op_flags[0] = not_0 * not_1;
self.hd_op_flags[1] = self.current.hd_op_bits()[0] * not_1;
self.hd_op_flags[2] = not_0 * self.current.hd_op_bits()[1];
self.hd_op_flags[3] = self.current.hd_op_bits()[0] * self.current.hd_op_bits()[1];
// compute flag for BEGIN operation which is just 0000000; the below is equivalent
// to multiplying binary inverses of all op bits together.
self.begin_flag =
self.ld_op_flags[OpCode::Begin.ld_index()] * self.hd_op_flags[OpCode::Begin.hd_index()];
// compute flag for NOOP operation which is just 1111111; the below is equivalent to
// multiplying all op bits together.
self.noop_flag =
self.ld_op_flags[OpCode::Noop.ld_index()] * self.hd_op_flags[OpCode::Noop.hd_index()];
// we need to make special adjustments for PUSH and ASSERT op flags so that they
// don't coincide with BEGIN operation; we do this by multiplying each flag by a
// single op_bit from another op bank; this increases degree of each flag by 1
debug_assert!(OpCode::Push.hd_index() == 0, "PUSH index is not 0!");
self.hd_op_flags[0] *= self.current.ld_op_bits()[0];
debug_assert!(OpCode::Assert.ld_index() == 0, "ASSERT index is not 0!");
self.ld_op_flags[0] *= self.current.hd_op_bits()[0];
}
}
// TESTS
// ================================================================================================
#[cfg(test)]
mod tests {
use super::{EvaluationFrame, VmTransition};
use vm_core::{utils::ToElements, BaseElement, FieldElement, StarkField};
#[test]
fn op_flags() {
// all zeros
let transition = vm_transition_from_current(&[
101, 1, 2, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 16, 17,
]);
assert_eq!(
[1, 0, 0, 0, 0, 0, 0, 0].to_elements(),
transition.cf_op_flags()
);
assert_eq!(
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
.to_elements(),
transition.ld_op_flags()
);
assert_eq!([0, 0, 0, 0].to_elements(), transition.hd_op_flags());
assert_eq!(1, transition.begin_flag().as_int());
assert_eq!(0, transition.noop_flag().as_int());
// all ones
let transition = vm_transition_from_current(&[
101, 1, 2, 3, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 15, 16, 17,
]);
assert_eq!(
[0, 0, 0, 0, 0, 0, 0, 1].to_elements(),
transition.cf_op_flags()
);
assert_eq!(
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
]
.to_elements(),
transition.ld_op_flags()
);
assert_eq!([0, 0, 0, 1].to_elements(), transition.hd_op_flags());
assert_eq!(0, transition.begin_flag().as_int());
assert_eq!(1, transition.noop_flag().as_int());
// mixed 1
let transition = vm_transition_from_current(&[
101, 1, 2, 3, 4, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 15, 16, 17,
]);
assert_eq!(
[0, 1, 0, 0, 0, 0, 0, 0].to_elements(),
transition.cf_op_flags()
);
assert_eq!(
[
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
.to_elements(),
transition.ld_op_flags()
);
assert_eq!([0, 1, 0, 0].to_elements(), transition.hd_op_flags());
assert_eq!(0, transition.begin_flag().as_int());
assert_eq!(0, transition.noop_flag().as_int());
// mixed 2
let transition = vm_transition_from_current(&[
101, 1, 2, 3, 4, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 15, 16, 17,
]);
assert_eq!(
[0, 0, 0, 1, 0, 0, 0, 0].to_elements(),
transition.cf_op_flags()
);
assert_eq!(
[
0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
.to_elements(),
transition.ld_op_flags()
);
assert_eq!([0, 0, 1, 0].to_elements(), transition.hd_op_flags());
}
// HELPER FUNCTIONS
// --------------------------------------------------------------------------------------------
fn vm_transition_from_current(current_row: &[u128]) -> VmTransition<BaseElement> {
let mut result = VmTransition::new(1, 0, 2);
let current = current_row.iter().map(|&v| BaseElement::new(v)).collect();
let frame = EvaluationFrame::from_rows(current, vec![BaseElement::ZERO; current_row.len()]);
result.update(&frame);
result
}
}
| true |
3b236f94dc4b0127d246b6d3cc16323c1b549ea5
|
Rust
|
jmcph4/oxcart
|
/src/map.rs
|
UTF-8
| 680 | 2.859375 | 3 |
[
"MIT"
] |
permissive
|
#[derive(Clone, Copy, PartialEq, Debug)]
#[allow(dead_code)]
pub enum MapError {
KeyNotFound,
}
pub trait Map<K: Sized + Eq + Clone, V: Sized + Eq + Clone>: IntoIterator +
Eq + Clone {
fn new() -> Self;
fn get(&self, key: K) -> Result<&V, MapError>;
fn get_mut(&mut self, key: K) -> Result<&mut V, MapError>;
fn set(&mut self, key: K, value: V) -> Result<(), MapError>;
fn remove(&mut self, key: K) -> Result<(), MapError>;
fn size(&self) -> Result<usize, MapError>;
fn contains_key(&self, key: K) -> Result<bool, MapError>;
fn contains_value(&self, value: V) -> Result<bool, MapError>;
fn clear(&mut self) -> Result<(), MapError>;
}
| true |
24de6b154ca161516052781ebd79a99af0edc0f8
|
Rust
|
Weasy666/egui
|
/examples/puffin_profiler/src/main.rs
|
UTF-8
| 2,350 | 2.890625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release
use eframe::egui;
fn main() -> Result<(), eframe::Error> {
start_puffin_server(); // NOTE: you may only want to call this if the users specifies some flag or clicks a button!
let options = eframe::NativeOptions::default();
eframe::run_native(
"My egui App",
options,
Box::new(|_cc| Box::new(MyApp::default())),
)
}
#[derive(Default)]
struct MyApp {}
impl eframe::App for MyApp {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
egui::CentralPanel::default().show(ctx, |ui| {
ui.heading("Example of how to use the puffin profiler with egui");
ui.separator();
let cmd = "cargo install puffin_viewer && puffin_viewer --url 127.0.0.1:8585";
ui.label("To connect, run this:");
ui.horizontal(|ui| {
ui.monospace(cmd);
if ui.small_button("📋").clicked() {
ui.output().copied_text = cmd.into();
}
});
ui.separator();
ui.label("Note that this app runs in 'reactive' mode, so you must interact with the app for new profile events to be sent. Waving the mouse over this window is enough.");
if ui
.button(
"Click to sleep a bit. That should be visible as a spike in the profiler view!",
)
.clicked()
{
puffin::profile_scope!("sleep");
std::thread::sleep(std::time::Duration::from_millis(50));
}
});
}
}
fn start_puffin_server() {
puffin::set_scopes_on(true); // tell puffin to collect data
match puffin_http::Server::new("0.0.0.0:8585") {
Ok(puffin_server) => {
eprintln!("Run: cargo install puffin_viewer && puffin_viewer --url 127.0.0.1:8585");
// We can store the server if we want, but in this case we just want
// it to keep running. Dropping it closes the server, so let's not drop it!
#[allow(clippy::mem_forget)]
std::mem::forget(puffin_server);
}
Err(err) => {
eprintln!("Failed to start puffin server: {}", err);
}
};
}
| true |
4e893b47d51ddb72a81491651ff36915507c8932
|
Rust
|
jas0nma/AGA8
|
/AGA8CODE/RUST/src/lib.rs
|
UTF-8
| 9,052 | 2.6875 | 3 |
[
"NIST-PD"
] |
permissive
|
//! # AGA8 equations of state
//! Provides methods to calculate thermodynamic properties inlcuding
//! compressibility factors and densities of natural gases.
mod detail;
mod gerg2008;
pub use crate::detail::Detail;
pub use crate::gerg2008::Gerg2008;
use std::slice;
#[repr(C)]
pub struct Properties {
pub d: f64, // Molar concentration [mol/l]
pub mm: f64,
pub z: f64,
pub dp_dd: f64,
pub d2p_dd2: f64,
pub dp_dt: f64,
pub u: f64,
pub h: f64,
pub s: f64,
pub cv: f64,
pub cp: f64,
pub w: f64,
pub g: f64,
pub jt: f64,
pub kappa: f64,
}
/// # Safety
/// composition must be an array of 21 elements.
#[no_mangle]
pub unsafe extern "C" fn aga8_2017(
composition: *const f64,
pressure: f64,
temperature: f64,
) -> Properties {
let array = {
assert!(!composition.is_null());
slice::from_raw_parts(composition, detail::NC_DETAIL)
};
let mut aga8_test: Detail = Detail::new();
aga8_test.setup();
aga8_test.x[0..detail::NC_DETAIL].clone_from_slice(&array[..]);
aga8_test.t = temperature;
aga8_test.p = pressure;
aga8_test.density_detail();
aga8_test.properties_detail();
Properties {
d: aga8_test.d, // Molar concentration [mol/l]
mm: aga8_test.mm,
z: aga8_test.z,
dp_dd: aga8_test.dp_dd,
d2p_dd2: aga8_test.d2p_dd2,
dp_dt: aga8_test.dp_dt,
u: aga8_test.u,
h: aga8_test.h,
s: aga8_test.s,
cv: aga8_test.cv,
cp: aga8_test.cp,
w: aga8_test.w,
g: aga8_test.g,
jt: aga8_test.jt,
kappa: aga8_test.kappa,
}
}
#[no_mangle]
pub extern "C" fn aga8_new() -> *mut Detail {
Box::into_raw(Box::new(Detail::new()))
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_free(ptr: *mut Detail) {
if ptr.is_null() {
return;
}
Box::from_raw(ptr);
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_setup(ptr: *mut Detail) {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.setup();
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_set_composition(ptr: *mut Detail, composition: *const f64) {
assert!(!ptr.is_null());
assert!(!composition.is_null());
let aga8 = &mut *ptr;
let array = slice::from_raw_parts(composition, detail::NC_DETAIL);
aga8.x[0..detail::NC_DETAIL].clone_from_slice(&array[..]);
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_set_pressure(ptr: *mut Detail, pressure: f64) {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.p = pressure;
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_get_pressure(ptr: *mut Detail) -> f64 {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.p
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_set_temperature(ptr: *mut Detail, temperature: f64) {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.t = temperature;
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_get_temperature(ptr: *mut Detail) -> f64 {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.t
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_set_density(ptr: *mut Detail, density: f64) {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.d = density;
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_get_density(ptr: *mut Detail) -> f64 {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.d
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_get_properties(ptr: *const Detail) -> Properties {
assert!(!ptr.is_null());
let aga8 = &*ptr;
Properties {
d: aga8.d, // Molar concentration [mol/l]
mm: aga8.mm,
z: aga8.z,
dp_dd: aga8.dp_dd,
d2p_dd2: aga8.d2p_dd2,
dp_dt: aga8.dp_dt,
u: aga8.u,
h: aga8.h,
s: aga8.s,
cv: aga8.cv,
cp: aga8.cp,
w: aga8.w,
g: aga8.g,
jt: aga8.jt,
kappa: aga8.kappa,
}
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_calculate_pressure(ptr: *mut Detail) {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.pressure_detail();
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_calculate_density(ptr: *mut Detail) {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.density_detail();
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn aga8_calculate_properties(ptr: *mut Detail) {
assert!(!ptr.is_null());
let aga8 = &mut *ptr;
aga8.properties_detail();
}
/// # Safety
///
/// # Examples
/// ```
/// let composition: [f64; 21] = [
/// 0.77824, 0.02, 0.06, 0.08, 0.03, 0.0015, 0.003, 0.0005, 0.00165, 0.00215, 0.00088, 0.00024,
/// 0.00015, 0.00009, 0.004, 0.005, 0.002, 0.0001, 0.0025, 0.007, 0.001,
/// ];
///
/// let temperature = 400.0;
/// let pressure = 50000.0;
///
/// unsafe {
/// let result = aga8::gerg_2008(&composition[0], pressure, temperature);
///
/// assert!(f64::abs(result.d - 12.798_286_260_820_62) < 1.0e-10);
/// }
/// ```
#[no_mangle]
pub unsafe extern "C" fn gerg_2008(
composition: *const f64,
pressure: f64,
temperature: f64,
) -> Properties {
assert!(!composition.is_null());
let array = slice::from_raw_parts(composition, detail::NC_DETAIL);
let mut gerg_test: Gerg2008 = Gerg2008::new();
gerg_test.setup();
gerg_test.x[1..=detail::NC_DETAIL].clone_from_slice(&array[..]);
gerg_test.t = temperature;
gerg_test.p = pressure;
gerg_test.density(0);
gerg_test.properties();
Properties {
d: gerg_test.d, // Molar concentration [mol/l]
mm: gerg_test.mm,
z: gerg_test.z,
dp_dd: gerg_test.dp_dd,
d2p_dd2: gerg_test.d2p_dd2,
dp_dt: gerg_test.dp_dt,
u: gerg_test.u,
h: gerg_test.h,
s: gerg_test.s,
cv: gerg_test.cv,
cp: gerg_test.cp,
w: gerg_test.w,
g: gerg_test.g,
jt: gerg_test.jt,
kappa: gerg_test.kappa,
}
}
#[no_mangle]
pub extern "C" fn gerg_new() -> *mut Gerg2008 {
Box::into_raw(Box::new(Gerg2008::new()))
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_free(ptr: *mut Gerg2008) {
if ptr.is_null() {
return;
}
Box::from_raw(ptr);
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_setup(ptr: *mut Gerg2008) {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.setup();
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_set_composition(ptr: *mut Gerg2008, composition: *const f64) {
assert!(!ptr.is_null());
assert!(!composition.is_null());
let gerg = &mut *ptr;
let array = slice::from_raw_parts(composition, detail::NC_DETAIL);
gerg.x[1..=detail::NC_DETAIL].clone_from_slice(&array[..]);
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_set_pressure(ptr: *mut Gerg2008, pressure: f64) {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.p = pressure;
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_get_pressure(ptr: *mut Gerg2008) -> f64 {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.p
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_set_temperature(ptr: *mut Gerg2008, temperature: f64) {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.t = temperature;
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_get_temperature(ptr: *mut Gerg2008) -> f64 {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.t
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_set_density(ptr: *mut Gerg2008, density: f64) {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.d = density;
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_get_density(ptr: *mut Gerg2008) -> f64 {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.d
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_get_properties(ptr: *const Gerg2008) -> Properties {
assert!(!ptr.is_null());
let gerg = &*ptr;
Properties {
d: gerg.d, // Molar concentration [mol/l]
mm: gerg.mm,
z: gerg.z,
dp_dd: gerg.dp_dd,
d2p_dd2: gerg.d2p_dd2,
dp_dt: gerg.dp_dt,
u: gerg.u,
h: gerg.h,
s: gerg.s,
cv: gerg.cv,
cp: gerg.cp,
w: gerg.w,
g: gerg.g,
jt: gerg.jt,
kappa: gerg.kappa,
}
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_calculate_pressure(ptr: *mut Gerg2008) {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.pressure();
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_calculate_density(ptr: *mut Gerg2008) {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.density(0);
}
/// # Safety
///
#[no_mangle]
pub unsafe extern "C" fn gerg_calculate_properties(ptr: *mut Gerg2008) {
assert!(!ptr.is_null());
let gerg = &mut *ptr;
gerg.properties();
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.