blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
5903b77c5ea23f92527e78c4d0bc660eed00dcc1
|
Rust
|
mthvedt/hitchhiker-rust
|
/src/tree/allocator.rs
|
UTF-8
| 1,300 | 3.078125 | 3 |
[] |
no_license
|
/*
Allocator design...
Pointer source.
Persistent pointers, transient pointers, weak pointers.
How to do persistent pointers?
Transient and weak are easy: just have them allocated from an arena.
Arenas are thread unsafe.
Recall that our usecase is on-disk, not in-memory; therefore, persistent nodes
are not shareable until turned into 'on-disk' nodes.
For speed, we actually need several typed allocators, perhaps using macros.
*/
// TODO: name?
//! Thread-unsafe arena allocators.
// trait Arena {
// type Handle<Arena = Self>: ArenaHandle;
// type Validator: ArenaValidator;
// fn handle(&self) -> Self::Handle;
// fn alloc<T>(&mut self, t: T) -> ArenaPtr<T>;
// }
// struct ArenaImpl {
// // Any frees of the Arena must happen-after this state is cleared.
// state: AtomicBool,
// }
// trait ArenaHandle {
// type Arena: Arena;
// fn get(&self) -> Self::Arena;
// }
// struct ArenaHandleImpl<A: Arena> {
// arena: Rc<A>,
// }
// trait ArenaValidator {
// // empty trait
// }
// /// ArenaPtrs represent shared data.
// trait ArenaPtr<T> {
// type Arena: Arena,
// fn deref(&self, a: <Self::Arena as Arena>::Validator) -> &T;
// }
// /// An ArenaPtrMut can be downgraded into an ArenaPtr, but not vice versa.
// trait ArenaPtrMut {
// }
// struct ArenaPtr<T> {
// }
| true |
d1198f9d046fd3d86b5a4e7eb0f70835f9bd0bb4
|
Rust
|
azriel91/autexousious
|
/crate/game_play/src/system/game_play_removal_augment_system.rs
|
UTF-8
| 2,323 | 2.6875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use amethyst::{
ecs::{ReadExpect, System, World, Write, WriteStorage},
shred::{ResourceId, SystemData},
shrev::{EventChannel, ReaderId},
};
use derivative::Derivative;
use derive_new::new;
use game_play_model::GamePlayEntity;
use spawn_model::play::SpawnEvent;
use state_registry::StateId;
/// Augments spawned entities with the `GamePlayEntity` removal component during `GamePlay`.
#[derive(Debug, Default, new)]
pub struct GamePlayRemovalAugmentSystem {
/// Reader ID for the `SpawnEvent` channel.
#[new(default)]
spawn_event_rid: Option<ReaderId<SpawnEvent>>,
}
/// `GamePlayRemovalAugmentSystemData`.
#[derive(Derivative, SystemData)]
#[derivative(Debug)]
pub struct GamePlayRemovalAugmentSystemData<'s> {
/// `StateId` resource.
#[derivative(Debug = "ignore")]
pub state_id: ReadExpect<'s, StateId>,
/// `SpawnEvent` channel.
#[derivative(Debug = "ignore")]
pub spawn_ec: Write<'s, EventChannel<SpawnEvent>>,
/// `GamePlayEntity` components.
#[derivative(Debug = "ignore")]
pub game_play_entities: WriteStorage<'s, GamePlayEntity>,
}
impl<'s> System<'s> for GamePlayRemovalAugmentSystem {
type SystemData = GamePlayRemovalAugmentSystemData<'s>;
fn run(
&mut self,
GamePlayRemovalAugmentSystemData {
state_id,
spawn_ec,
mut game_play_entities,
}: Self::SystemData,
) {
let spawn_event_rid = self.spawn_event_rid.as_mut().expect(
"Expected `spawn_event_rid` field to be set for `GamePlayRemovalAugmentSystem`.",
);
// Make sure we don't block the channel from deleting events.
let spawn_events_iter = spawn_ec.read(spawn_event_rid);
if *state_id == StateId::GamePlay {
spawn_events_iter.for_each(|ev| {
let entity_spawned = ev.entity_spawned;
game_play_entities
.insert(entity_spawned, GamePlayEntity)
.expect("Failed to insert `GamePlayEntity` component.");
});
}
}
fn setup(&mut self, world: &mut World) {
Self::SystemData::setup(world);
self.spawn_event_rid = Some(
world
.fetch_mut::<EventChannel<SpawnEvent>>()
.register_reader(),
);
}
}
| true |
cd0016f16a3369d465565df6258df57d646e3f09
|
Rust
|
rostam/Experiments
|
/rust/rust_gtea/test.rs
|
UTF-8
| 4,131 | 3.453125 | 3 |
[
"MIT"
] |
permissive
|
use std::thread;
use std::time::{Duration, Instant};
use std::thread::sleep;
// This is the `main` thread
fn main() {
let now = Instant::now();
// This is our data to process.
// We will calculate the sum of all digits via a threaded map-reduce algorithm.
// Each whitespace separated chunk will be handled in a different thread.
//
// TODO: see what happens to the output if you insert spaces!
let data = "86967897737416471853297327050364959
11861322575564723963297542624962850
70856234701860851907960690014725639
38397966707106094172783238747669219
52380795257888236525459303330302837
58495327135744041048897885734297812
69920216438980873548808413720956532
16278424637452589860345374828574668";
// Make a vector to hold the child-threads which we will spawn.
let mut children = vec![];
/*************************************************************************
* "Map" phase
*
* Divide our data into segments, and apply initial processing
************************************************************************/
// split our data into segments for individual calculation
// each chunk will be a reference (&str) into the actual data
let chunked_data = data.split_whitespace();
// Iterate over the data segments.
// .enumerate() adds the current loop index to whatever is iterated
// the resulting tuple "(index, element)" is then immediately
// "destructured" into two variables, "i" and "data_segment" with a
// "destructuring assignment"
for (i, data_segment) in chunked_data.enumerate() {
println!("data segment {} is \"{}\"", i, data_segment);
// Process each data segment in a separate thread
//
// spawn() returns a handle to the new thread,
// which we MUST keep to access the returned value
//
// 'move || -> u32' is syntax for a closure that:
// * takes no arguments ('||')
// * takes ownership of its captured variables ('move') and
// * returns an unsigned 32-bit integer ('-> u32')
//
// Rust is smart enough to infer the '-> u32' from
// the closure itself so we could have left that out.
//
// TODO: try removing the 'move' and see what happens
children.push(thread::spawn(move || -> u32 {
// Calculate the intermediate sum of this segment:
let result = data_segment
// iterate over the characters of our segment..
.chars()
// .. convert text-characters to their number value..
.map(|c| c.to_digit(10).expect("should be a digit"))
// .. and sum the resulting iterator of numbers
.sum();
// println! locks stdout, so no text-interleaving occurs
println!("processed segment {}, result={}", i, result);
// "return" not needed, because Rust is an "expression language", the
// last evaluated expression in each block is automatically its value.
result
}));
}
/*************************************************************************
* "Reduce" phase
*
* Collect our intermediate results, and combine them into a final result
************************************************************************/
// collect each thread's intermediate results into a new Vec
let mut intermediate_sums = vec![];
for child in children {
// collect each child thread's return-value
let intermediate_sum = child.join().unwrap();
intermediate_sums.push(intermediate_sum);
}
// combine all intermediate sums into a single final sum.
//
// we use the "turbofish" ::<> to provide sum() with a type hint.
//
// TODO: try without the turbofish, by instead explicitly
// specifying the type of intermediate_sums
let final_result = intermediate_sums.iter().sum::<u32>();
println!("Final sum result: {}", final_result);
println!("{}", now.elapsed().as_secs());
}
| true |
fb6bbd21c40fbe9f316529d19cd841756aa77d4d
|
Rust
|
TheBlueMatt/rust-lightning
|
/lightning/src/util/time.rs
|
UTF-8
| 3,652 | 3.5625 | 4 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// You may not use this file except in accordance with one or both of these
// licenses.
//! [`Time`] trait and different implementations. Currently, it's mainly used in tests so we can
//! manually advance time.
//! Other crates may symlink this file to use it while [`Time`] trait is sealed here.
use core::ops::Sub;
use core::time::Duration;
/// A measurement of time.
pub trait Time: Copy + Sub<Duration, Output = Self> where Self: Sized {
/// Returns an instance corresponding to the current moment.
fn now() -> Self;
/// Returns the amount of time elapsed since `self` was created.
fn elapsed(&self) -> Duration;
/// Returns the amount of time passed between `earlier` and `self`.
fn duration_since(&self, earlier: Self) -> Duration;
/// Returns the amount of time passed since the beginning of [`Time`].
///
/// Used during (de-)serialization.
fn duration_since_epoch() -> Duration;
}
/// A state in which time has no meaning.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Eternity;
impl Time for Eternity {
fn now() -> Self {
Self
}
fn duration_since(&self, _earlier: Self) -> Duration {
Duration::from_secs(0)
}
fn duration_since_epoch() -> Duration {
Duration::from_secs(0)
}
fn elapsed(&self) -> Duration {
Duration::from_secs(0)
}
}
impl Sub<Duration> for Eternity {
type Output = Self;
fn sub(self, _other: Duration) -> Self {
self
}
}
#[cfg(not(feature = "no-std"))]
impl Time for std::time::Instant {
fn now() -> Self {
std::time::Instant::now()
}
fn duration_since(&self, earlier: Self) -> Duration {
self.duration_since(earlier)
}
fn duration_since_epoch() -> Duration {
use std::time::SystemTime;
SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap()
}
fn elapsed(&self) -> Duration {
std::time::Instant::elapsed(self)
}
}
#[cfg(test)]
pub mod tests {
use super::{Time, Eternity};
use core::time::Duration;
use core::ops::Sub;
use core::cell::Cell;
/// Time that can be advanced manually in tests.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct SinceEpoch(Duration);
impl SinceEpoch {
thread_local! {
static ELAPSED: Cell<Duration> = core::cell::Cell::new(Duration::from_secs(0));
}
pub fn advance(duration: Duration) {
Self::ELAPSED.with(|elapsed| elapsed.set(elapsed.get() + duration))
}
}
impl Time for SinceEpoch {
fn now() -> Self {
Self(Self::duration_since_epoch())
}
fn duration_since(&self, earlier: Self) -> Duration {
self.0 - earlier.0
}
fn duration_since_epoch() -> Duration {
Self::ELAPSED.with(|elapsed| elapsed.get())
}
fn elapsed(&self) -> Duration {
Self::duration_since_epoch() - self.0
}
}
impl Sub<Duration> for SinceEpoch {
type Output = Self;
fn sub(self, other: Duration) -> Self {
Self(self.0 - other)
}
}
#[test]
fn time_passes_when_advanced() {
let now = SinceEpoch::now();
assert_eq!(now.elapsed(), Duration::from_secs(0));
SinceEpoch::advance(Duration::from_secs(1));
SinceEpoch::advance(Duration::from_secs(1));
let elapsed = now.elapsed();
let later = SinceEpoch::now();
assert_eq!(elapsed, Duration::from_secs(2));
assert_eq!(later - elapsed, now);
}
#[test]
fn time_never_passes_in_an_eternity() {
let now = Eternity::now();
let elapsed = now.elapsed();
let later = Eternity::now();
assert_eq!(now.elapsed(), Duration::from_secs(0));
assert_eq!(later - elapsed, now);
}
}
| true |
2dcb4122c6c5c21ffb7198aed8e8fb45e428cf90
|
Rust
|
bitonic/puzzlescript-rs
|
/glutin/src/window.rs
|
UTF-8
| 2,584 | 2.703125 | 3 |
[] |
no_license
|
use failure::Error;
use gleam::gl;
use glutin::dpi::LogicalSize;
use glutin::{
Api, ContextBuilder, Event, EventsLoop, GlContext, GlRequest, GlWindow, WindowBuilder,
WindowEvent,
};
use std::rc::Rc;
/// We assume that we have at least a 720p space to draw.
const MIN_WINDOW_WIDTH: u32 = 1280;
const MIN_WINDOW_HEIGHT: u32 = 720;
pub fn init(
title: &str,
set_min_window_size: bool,
) -> Result<(EventsLoop, GlWindow, Rc<gl::Gl>), Error> {
let events_loop = EventsLoop::new();
let window = WindowBuilder::new().with_title(title);
// 3.3 just because that's what <https://learnopengl.com> uses
let context = ContextBuilder::new()
.with_gl(GlRequest::Specific(Api::OpenGl, (3, 3)))
.with_vsync(true);
// `unwrap` rather than floating the errors upstream since we can't go further
// if we can't create a gl window anyway and because it does not implement
// `Sync`.
let gl_window = GlWindow::new(window, context, &events_loop).unwrap();
// We set the minimum size to 720p physical for ease of testing
if set_min_window_size {
let min_dims = LogicalSize::new(
f64::from(MIN_WINDOW_WIDTH) / gl_window.get_hidpi_factor(),
f64::from(MIN_WINDOW_HEIGHT) / gl_window.get_hidpi_factor(),
);
gl_window.set_inner_size(min_dims);
gl_window.set_min_dimensions(Some(min_dims));
}
unsafe {
gl_window.make_current()?;
}
let gl_unchecked;
unsafe {
gl_unchecked = gl::GlFns::load_with(|symbol| gl_window.get_proc_address(symbol) as *const _);
}
let gl = gl::ErrorCheckingGl::wrap(gl_unchecked);
// enable blending (for fonts)
gl.enable(gl::BLEND);
gl.blend_func(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);
// all the image data we pass to OpenGL is tightly packed
gl.pixel_store_i(gl::UNPACK_ALIGNMENT, 1);
Ok((events_loop, gl_window, gl))
}
/// Automatically handles resize events. Forwards the event untouched
/// to the callback (including resize events).
pub fn handle_resize_events(gl_window: &GlWindow, gl: &gl::Gl, event: &Event) {
if let Event::WindowEvent {
event: WindowEvent::Resized(window_size),
..
} = event
{
let hidpi_factor = gl_window.get_hidpi_factor();
let phys_size = window_size.to_physical(hidpi_factor);
gl_window.resize(phys_size);
gl.viewport(0, 0, phys_size.width as i32, phys_size.height as i32);
}
}
pub fn collect_events(events_loop: &mut EventsLoop) -> Vec<Event> {
let mut events = Vec::<Event>::new();
events_loop.poll_events(|event| events.push(event));
events
}
pub fn clear(gl: &gl::Gl) {
gl.clear(gl::COLOR_BUFFER_BIT);
}
| true |
64048125a42970431b0ac7a7e429cba12e958128
|
Rust
|
gpoesia/cretonne
|
/lib/cretonne/src/verifier.rs
|
UTF-8
| 15,741 | 3.140625 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! A verifier for ensuring that functions are well formed.
//! It verifies:
//!
//! EBB integrity
//!
//! - All instructions reached from the `ebb_insts` iterator must belong to
//! the EBB as reported by `inst_ebb()`.
//! - Every EBB must end in a terminator instruction, and no other instruction
//! can be a terminator.
//! - Every value in the `ebb_args` iterator belongs to the EBB as reported by `value_ebb`.
//!
//! Instruction integrity
//!
//! - The instruction format must match the opcode.
//! - All result values must be created for multi-valued instructions.
//! - Instructions with no results must have a VOID `first_type()`.
//! - All referenced entities must exist. (Values, EBBs, stack slots, ...)
//!
//! SSA form
//!
//! - Values must be defined by an instruction that exists and that is inserted in
//! an EBB, or be an argument of an existing EBB.
//! - Values used by an instruction must dominate the instruction.
//!
//! Control flow graph and dominator tree integrity:
//!
//! - All predecessors in the CFG must be branches to the EBB.
//! - All branches to an EBB must be present in the CFG.
//! TODO:
//! - A recomputed dominator tree is identical to the existing one.
//!
//! Type checking
//!
//! - Compare input and output values against the opcode's type constraints.
//! For polymorphic opcodes, determine the controlling type variable first.
//! - Branches and jumps must pass arguments to destination EBBs that match the
//! expected types exactly. The number of arguments must match.
//! - All EBBs in a jump_table must take no arguments.
//! - Function calls are type checked against their signature.
//! - The entry block must take arguments that match the signature of the current
//! function.
//! - All return instructions must have return value operands matching the current
//! function signature.
//!
//! Ad hoc checking
//!
//! - Stack slot loads and stores must be in-bounds.
//! - Immediate constraints for certain opcodes, like `udiv_imm v3, 0`.
//! - Extend / truncate instructions have more type constraints: Source type can't be
//! larger / smaller than result type.
//! - `Insertlane` and `extractlane` instructions have immediate lane numbers that must be in
//! range for their polymorphic type.
//! - Swizzle and shuffle instructions take a variable number of lane arguments. The number
//! of arguments must match the destination type, and the lane indexes must be in range.
use dominator_tree::DominatorTree;
use flowgraph::ControlFlowGraph;
use ir::entities::AnyEntity;
use ir::instructions::{InstructionFormat, BranchInfo};
use ir::{types, Function, ValueDef, Ebb, Inst, SigRef, FuncRef, ValueList, JumpTable, Value};
use std::fmt::{self, Display, Formatter};
use std::result;
/// A verifier error.
#[derive(Debug, PartialEq, Eq)]
pub struct Error {
/// The entity causing the verifier error.
pub location: AnyEntity,
/// Error message.
pub message: String,
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}: {}", self.location, self.message)
}
}
/// Verifier result.
pub type Result<T> = result::Result<T, Error>;
// Create an `Err` variant of `Result<X>` from a location and `format!` arguments.
macro_rules! err {
( $loc:expr, $msg:expr ) => {
Err(Error {
location: $loc.into(),
message: String::from($msg),
})
};
( $loc:expr, $fmt:expr, $( $arg:expr ),+ ) => {
Err(Error {
location: $loc.into(),
message: format!( $fmt, $( $arg ),+ ),
})
};
}
/// Verify `func`.
pub fn verify_function(func: &Function) -> Result<()> {
Verifier::new(func).run()
}
struct Verifier<'a> {
func: &'a Function,
cfg: ControlFlowGraph,
domtree: DominatorTree,
}
impl<'a> Verifier<'a> {
pub fn new(func: &'a Function) -> Verifier {
let cfg = ControlFlowGraph::with_function(func);
let domtree = DominatorTree::with_function(func, &cfg);
Verifier {
func: func,
cfg: cfg,
domtree: domtree,
}
}
fn ebb_integrity(&self, ebb: Ebb, inst: Inst) -> Result<()> {
let is_terminator = self.func.dfg[inst].opcode().is_terminator();
let is_last_inst = self.func.layout.last_inst(ebb) == Some(inst);
if is_terminator && !is_last_inst {
// Terminating instructions only occur at the end of blocks.
return err!(inst,
"a terminator instruction was encountered before the end of {}",
ebb);
}
if is_last_inst && !is_terminator {
return err!(ebb, "block does not end in a terminator instruction!");
}
// Instructions belong to the correct ebb.
let inst_ebb = self.func.layout.inst_ebb(inst);
if inst_ebb != Some(ebb) {
return err!(inst, "should belong to {} not {:?}", ebb, inst_ebb);
}
// Arguments belong to the correct ebb.
for arg in self.func.dfg.ebb_args(ebb) {
match self.func.dfg.value_def(arg) {
ValueDef::Arg(arg_ebb, _) => {
if ebb != arg_ebb {
return err!(arg, "does not belong to {}", ebb);
}
}
_ => {
return err!(arg, "expected an argument, found a result");
}
}
}
Ok(())
}
fn instruction_integrity(&self, inst: Inst) -> Result<()> {
let inst_data = &self.func.dfg[inst];
let dfg = &self.func.dfg;
// The instruction format matches the opcode
if inst_data.opcode().format() != InstructionFormat::from(inst_data) {
return err!(inst, "instruction opcode doesn't match instruction format");
}
let fixed_results = inst_data.opcode().constraints().fixed_results();
// var_results is 0 if we aren't a call instruction
let var_results =
dfg.call_signature(inst).map(|sig| dfg.signatures[sig].return_types.len()).unwrap_or(0);
let total_results = fixed_results + var_results;
if total_results == 0 {
// Instructions with no results have a NULL `first_type()`
let ret_type = inst_data.first_type();
if ret_type != types::VOID {
return err!(inst,
"instruction with no results expects NULL return type, found {}",
ret_type);
}
} else {
// All result values for multi-valued instructions are created
let got_results = dfg.inst_results(inst).count();
if got_results != total_results {
return err!(inst,
"expected {} result values, found {}",
total_results,
got_results);
}
}
self.verify_entity_references(inst)
}
fn verify_entity_references(&self, inst: Inst) -> Result<()> {
use ir::instructions::InstructionData::*;
for &arg in self.func.dfg.inst_args(inst) {
self.verify_value(inst, arg)?;
}
for res in self.func.dfg.inst_results(inst) {
self.verify_value(inst, res)?;
}
match &self.func.dfg[inst] {
&MultiAry { ref args, .. } => {
self.verify_value_list(inst, args)?;
}
&Jump { destination, ref args, .. } => {
self.verify_ebb(inst, destination)?;
self.verify_value_list(inst, args)?;
}
&Branch { destination, ref args, .. } => {
self.verify_ebb(inst, destination)?;
self.verify_value_list(inst, args)?;
}
&BranchTable { table, .. } => {
self.verify_jump_table(inst, table)?;
}
&Call { func_ref, ref args, .. } => {
self.verify_func_ref(inst, func_ref)?;
self.verify_value_list(inst, args)?;
}
&IndirectCall { sig_ref, ref args, .. } => {
self.verify_sig_ref(inst, sig_ref)?;
self.verify_value_list(inst, args)?;
}
// Exhaustive list so we can't forget to add new formats
&Nullary { .. } |
&Unary { .. } |
&UnaryImm { .. } |
&UnaryIeee32 { .. } |
&UnaryIeee64 { .. } |
&UnarySplit { .. } |
&Binary { .. } |
&BinaryImm { .. } |
&BinaryOverflow { .. } |
&Ternary { .. } |
&InsertLane { .. } |
&ExtractLane { .. } |
&IntCompare { .. } |
&FloatCompare { .. } => {}
}
Ok(())
}
fn verify_ebb(&self, inst: Inst, e: Ebb) -> Result<()> {
if !self.func.dfg.ebb_is_valid(e) {
err!(inst, "invalid ebb reference {}", e)
} else {
Ok(())
}
}
fn verify_sig_ref(&self, inst: Inst, s: SigRef) -> Result<()> {
if !self.func
.dfg
.signatures
.is_valid(s) {
err!(inst, "invalid signature reference {}", s)
} else {
Ok(())
}
}
fn verify_func_ref(&self, inst: Inst, f: FuncRef) -> Result<()> {
if !self.func
.dfg
.ext_funcs
.is_valid(f) {
err!(inst, "invalid function reference {}", f)
} else {
Ok(())
}
}
fn verify_value_list(&self, inst: Inst, l: &ValueList) -> Result<()> {
if !l.is_valid(&self.func.dfg.value_lists) {
err!(inst, "invalid value list reference {:?}", l)
} else {
Ok(())
}
}
fn verify_jump_table(&self, inst: Inst, j: JumpTable) -> Result<()> {
if !self.func.jump_tables.is_valid(j) {
err!(inst, "invalid jump table reference {}", j)
} else {
Ok(())
}
}
fn verify_value(&self, loc_inst: Inst, v: Value) -> Result<()> {
let dfg = &self.func.dfg;
if !dfg.value_is_valid(v) {
return err!(loc_inst, "invalid value reference {}", v);
}
// SSA form
match dfg.value_def(v) {
ValueDef::Res(def_inst, _) => {
// Value is defined by an instruction that exists.
if !dfg.insts.is_valid(def_inst) {
return err!(loc_inst,
"{} is defined by invalid instruction {}",
v,
def_inst);
}
// Defining instruction is inserted in an EBB.
if self.func.layout.inst_ebb(def_inst) == None {
return err!(loc_inst,
"{} is defined by {} which has no EBB",
v,
def_inst);
}
// Defining instruction dominates the instruction that uses the value.
if !self.domtree.dominates(def_inst, loc_inst, &self.func.layout) {
return err!(loc_inst, "uses value from non-dominating {}", def_inst);
}
}
ValueDef::Arg(ebb, _) => {
// Value is defined by an existing EBB.
if !dfg.ebb_is_valid(ebb) {
return err!(loc_inst, "{} is defined by invalid EBB {}", v, ebb);
}
// Defining EBB is inserted in the layout
if !self.func.layout.is_ebb_inserted(ebb) {
return err!(loc_inst,
"{} is defined by {} which is not in the layout",
v,
ebb);
}
// The defining EBB dominates the instruction using this value.
if !self.domtree.ebb_dominates(ebb, loc_inst, &self.func.layout) {
return err!(loc_inst, "uses value arg from non-dominating {}", ebb);
}
}
}
Ok(())
}
fn cfg_integrity(&self, ebb: Ebb) -> Result<()> {
for &(pred_ebb, pred_inst) in self.cfg.get_predecessors(ebb) {
// All predecessors in the CFG must be branches to the EBB
match self.func.dfg[pred_inst].analyze_branch(&self.func.dfg.value_lists) {
BranchInfo::SingleDest(target_ebb, _) => {
if target_ebb != ebb {
return err!(ebb,
"has predecessor {} in {} which does not branch here",
pred_inst,
pred_ebb);
}
}
BranchInfo::Table(jt) => {
if !self.func.jump_tables[jt].branches_to(ebb) {
return err!(ebb,
"has predecessor {} using {} in {} which never branches here",
pred_inst,
jt,
pred_ebb);
}
}
BranchInfo::NotABranch => {
return err!(ebb, "has predecessor {} which is not a branch", pred_inst);
}
}
// All EBBs branching to `ebb` have it recorded as a successor in the CFG.
if !self.cfg.get_successors(pred_ebb).contains(&ebb) {
return err!(ebb,
"predecessor {} does not have this EBB recorded as a successor",
pred_ebb);
}
}
Ok(())
}
pub fn run(&self) -> Result<()> {
for ebb in self.func.layout.ebbs() {
for inst in self.func.layout.ebb_insts(ebb) {
self.ebb_integrity(ebb, inst)?;
self.instruction_integrity(inst)?;
}
self.cfg_integrity(ebb)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::{Verifier, Error};
use ir::Function;
use ir::instructions::{InstructionData, Opcode};
use ir::types;
macro_rules! assert_err_with_msg {
($e:expr, $msg:expr) => (
match $e {
Ok(_) => { panic!("Expected an error!") },
Err(Error { message, .. } ) => {
if !message.contains($msg) {
panic!(format!("'{}' did not contain the substring '{}'", message, $msg));
}
}
}
)
}
#[test]
fn empty() {
let func = Function::new();
let verifier = Verifier::new(&func);
assert_eq!(verifier.run(), Ok(()));
}
#[test]
fn bad_instruction_format() {
let mut func = Function::new();
let ebb0 = func.dfg.make_ebb();
func.layout.append_ebb(ebb0);
let nullary_with_bad_opcode = func.dfg.make_inst(InstructionData::Nullary {
opcode: Opcode::Jump,
ty: types::VOID,
});
func.layout.append_inst(nullary_with_bad_opcode, ebb0);
let verifier = Verifier::new(&func);
assert_err_with_msg!(verifier.run(), "instruction format");
}
}
| true |
bf0421c2dc2c4be3ec777c7663ce5ef81246a725
|
Rust
|
GaloisInc/mir-verifier
|
/lib/liballoc/collections/btree/navigate.rs
|
UTF-8
| 11,063 | 2.96875 | 3 |
[] |
permissive
|
use core::ptr;
use super::node::{marker, ForceResult::*, Handle, NodeRef};
use super::unwrap_unchecked;
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
/// on the right side, which is either in the same leaf node or in an ancestor node.
/// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
pub fn next_kv(
self,
) -> Result<
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
> {
let mut edge = self.forget_node_type();
loop {
edge = match edge.right_kv() {
Ok(internal_kv) => return Ok(internal_kv),
Err(last_edge) => match last_edge.into_node().ascend() {
Ok(parent_edge) => parent_edge.forget_node_type(),
Err(root) => return Err(root.forget_type()),
},
}
}
}
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
/// on the left side, which is either in the same leaf node or in an ancestor node.
/// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
pub fn next_back_kv(
self,
) -> Result<
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
> {
let mut edge = self.forget_node_type();
loop {
edge = match edge.left_kv() {
Ok(internal_kv) => return Ok(internal_kv),
Err(last_edge) => match last_edge.into_node().ascend() {
Ok(parent_edge) => parent_edge.forget_node_type(),
Err(root) => return Err(root.forget_type()),
},
}
}
}
}
macro_rules! def_next_kv_uncheched_dealloc {
{ unsafe fn $name:ident : $adjacent_kv:ident } => {
/// Given a leaf edge handle into an owned tree, returns a handle to the next KV,
/// while deallocating any node left behind.
/// Unsafe for two reasons:
/// - The caller must ensure that the leaf edge is not the last one in the tree.
/// - The node pointed at by the given handle, and its ancestors, may be deallocated,
/// while the reference to those nodes in the surviving ancestors is left dangling;
/// thus using the returned handle to navigate further is dangerous.
unsafe fn $name <K, V>(
leaf_edge: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
) -> Handle<NodeRef<marker::Owned, K, V, marker::LeafOrInternal>, marker::KV> {
let mut edge = leaf_edge.forget_node_type();
loop {
edge = match edge.$adjacent_kv() {
Ok(internal_kv) => return internal_kv,
Err(last_edge) => {
let parent_edge = last_edge.into_node().deallocate_and_ascend();
unwrap_unchecked(parent_edge).forget_node_type()
}
}
}
}
};
}
def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv}
def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv}
/// This replaces the value behind the `v` unique reference by calling the
/// relevant function.
///
/// Safety: The change closure must not panic.
#[inline]
unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
let value = ptr::read(v);
let (new_value, ret) = change(value);
ptr::write(v, new_value);
ret
}
impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
/// Moves the leaf edge handle to the next leaf edge and returns references to the
/// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_leaf_edge(), kv.into_kv())
})
}
/// Moves the leaf edge handle to the previous leaf edge and returns references to the
/// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_back_leaf_edge(), kv.into_kv())
})
}
}
impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
/// Moves the leaf edge handle to the next leaf edge and returns references to the
/// key and value in between.
/// Unsafe for two reasons:
/// - The caller must ensure that the leaf edge is not the last one in the tree.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
}
/// Moves the leaf edge handle to the previous leaf and returns references to the
/// key and value in between.
/// Unsafe for two reasons:
/// - The caller must ensure that the leaf edge is not the first one in the tree.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_back_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
}
}
impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// Moves the leaf edge handle to the next leaf edge and returns the key and value
/// in between, while deallocating any node left behind.
/// Unsafe for three reasons:
/// - The caller must ensure that the leaf edge is not the last one in the tree
/// and is not a handle previously resulting from counterpart `next_back_unchecked`.
/// - If the leaf edge is the last edge of a node, that node and possibly ancestors
/// will be deallocated, while the reference to those nodes in the surviving ancestor
/// is left dangling; thus further use of the leaf edge handle is dangerous.
/// It is, however, safe to call this method again on the updated handle.
/// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
replace(self, |leaf_edge| {
let kv = next_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_leaf_edge(), (k, v))
})
}
/// Moves the leaf edge handle to the previous leaf edge and returns the key
/// and value in between, while deallocating any node left behind.
/// Unsafe for three reasons:
/// - The caller must ensure that the leaf edge is not the first one in the tree
/// and is not a handle previously resulting from counterpart `next_unchecked`.
/// - If the lead edge is the first edge of a node, that node and possibly ancestors
/// will be deallocated, while the reference to those nodes in the surviving ancestor
/// is left dangling; thus further use of the leaf edge handle is dangerous.
/// It is, however, safe to call this method again on the updated handle.
/// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
replace(self, |leaf_edge| {
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_back_leaf_edge(), (k, v))
})
}
}
impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
/// Returns the leftmost leaf edge in or underneath a node - in other words, the edge
/// you need first when navigating forward (or last when navigating backward).
#[inline]
pub fn first_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
let mut node = self;
loop {
match node.force() {
Leaf(leaf) => return leaf.first_edge(),
Internal(internal) => node = internal.first_edge().descend(),
}
}
}
/// Returns the rightmost leaf edge in or underneath a node - in other words, the edge
/// you need last when navigating forward (or first when navigating backward).
#[inline]
pub fn last_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
let mut node = self;
loop {
match node.force() {
Leaf(leaf) => return leaf.last_edge(),
Internal(internal) => node = internal.last_edge().descend(),
}
}
}
}
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
/// Returns the leaf edge closest to a KV for forward navigation.
pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
match self.force() {
Leaf(leaf_kv) => leaf_kv.right_edge(),
Internal(internal_kv) => {
let next_internal_edge = internal_kv.right_edge();
next_internal_edge.descend().first_leaf_edge()
}
}
}
/// Returns the leaf edge closest to a KV for backward navigation.
pub fn next_back_leaf_edge(
self,
) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
match self.force() {
Leaf(leaf_kv) => leaf_kv.left_edge(),
Internal(internal_kv) => {
let next_internal_edge = internal_kv.left_edge();
next_internal_edge.descend().last_leaf_edge()
}
}
}
}
| true |
82be518807299a6ea1ae5299f343399f77c63e5d
|
Rust
|
Azure/azure-sdk-for-rust
|
/sdk/core/src/request_options/if_sequence_number.rs
|
UTF-8
| 1,618 | 3.34375 | 3 |
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
use crate::{headers, Header};
/// Conditional request header based on the value of the object's sequence number
///
/// Ref: <https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url>
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum IfSequenceNumber {
/// If the object's sequence number is less than the specified value, the
/// request proceeds; otherwise it fails with SequenceNumberConditionNotMet
/// error (HTTP status code 412 – Precondition Failed).
LessThan(u64),
/// If the object's sequence number is less than or equal to the specified
/// value, the request proceeds; otherwise it fails with the
/// SequenceNumberConditionNotMet error (HTTP status code 412 – Precondition
/// Failed).
LessOrEqual(u64),
/// If the object’s sequence number is equal to the specified value, the
/// request proceeds; otherwise it fails with SequenceNumberConditionNotMet
/// error (HTTP status code 412 – Precondition Failed).
Equal(u64),
}
impl Header for IfSequenceNumber {
fn name(&self) -> headers::HeaderName {
match self {
IfSequenceNumber::Equal(_) => headers::IF_SEQUENCE_NUMBER_EQ,
IfSequenceNumber::LessOrEqual(_) => headers::IF_SEQUENCE_NUMBER_LE,
IfSequenceNumber::LessThan(_) => headers::IF_SEQUENCE_NUMBER_LT,
}
}
fn value(&self) -> headers::HeaderValue {
match self {
IfSequenceNumber::Equal(val)
| IfSequenceNumber::LessOrEqual(val)
| IfSequenceNumber::LessThan(val) => val.to_string().into(),
}
}
}
| true |
f948eef7a71ca46bf35c982d2a2cc622dac8dfc5
|
Rust
|
stobias123/rs-etrade
|
/src/accounts.rs
|
UTF-8
| 25,508 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
use super::{session, Session, Store};
use crate::{empty_body, qs_params, MarketSession};
use crate::{Product, SortOrder};
use anyhow::Result;
use http::Method;
use session::CallbackProvider;
use std::sync::Arc;
use strum::EnumString;
pub struct Api<T: Store> {
session: Arc<Session<T>>,
}
impl<T> Api<T>
where
T: Store,
{
pub fn new(session: Arc<Session<T>>) -> Self {
Self { session }
}
pub async fn list(&self, callbacks: impl CallbackProvider) -> Result<Vec<Account>> {
let resp: AccountListResponse = self
.session
.send(Method::GET, "/v1/accounts/list", empty_body(), callbacks)
.await?;
debug!("balance json: {}", serde_json::to_string_pretty(&resp)?);
Ok(resp.response.accounts.account)
}
pub async fn balance(
&self,
account_id_key: &str,
balance_request: BalanceRequest<'_>,
callbacks: impl CallbackProvider,
) -> Result<BalanceResponse> {
let balance: serde_json::Value = self
.session
.send(
Method::GET,
format!("/v1/accounts/{}/balance", account_id_key),
qs_params(&balance_request)?,
callbacks,
)
.await?;
debug!("balance json: {}", serde_json::to_string_pretty(&balance)?);
Ok(serde_json::from_value(balance.get("BalanceResponse").unwrap().clone())?)
}
pub async fn portfolio(
&self,
account_id_key: &str,
params: PortfolioRequest,
callbacks: impl CallbackProvider,
) -> Result<PortfolioResponse> {
let portfolio: serde_json::Value = self
.session
.send(
Method::GET,
format!("/v1/accounts/{}/portfolio", account_id_key),
qs_params(¶ms)?,
callbacks,
)
.await?;
debug!("portfolio json: {}", serde_json::to_string_pretty(&portfolio)?);
Ok(serde_json::from_value(
portfolio.get("PortfolioResponse").unwrap().clone(),
)?)
}
pub async fn position_lots(
&self,
account_id_key: &str,
position_id: &str,
callbacks: impl CallbackProvider,
) -> Result<PositionLotsResponse> {
let portfolio: serde_json::Value = self
.session
.send(
Method::GET,
format!("/v1/accounts/{}/portfolio/{}", account_id_key, position_id),
empty_body(),
callbacks,
)
.await?;
debug!("position lots json: {}", serde_json::to_string_pretty(&portfolio)?);
Ok(serde_json::from_value(
portfolio.get("PositionLotsResponse").unwrap().clone(),
)?)
}
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct PortfolioRequest {
pub count: Option<usize>,
pub sort_by: Option<PortfolioColumn>,
pub sort_order: Option<SortOrder>,
pub market_session: Option<MarketSession>,
pub totals_required: Option<bool>,
pub lots_required: Option<bool>,
pub view: Option<PortfolioView>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase", default)]
pub struct BalanceRequest<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
pub account_type: Option<AccountType>,
pub inst_type: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
pub real_time_nav: Option<bool>,
}
impl<'a> Default for BalanceRequest<'a> {
fn default() -> Self {
Self {
inst_type: "BROKERAGE",
account_type: None,
real_time_nav: None,
}
}
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
#[strum(serialize_all = "lowercase")]
pub enum AccountType {
#[serde(rename = "AMMCHK")]
Ammchk,
#[serde(rename = "ARO")]
Aro,
#[serde(rename = "BCHK")]
Bchk,
#[serde(rename = "BENFIRA")]
Benfira,
#[serde(rename = "BENFROTHIRA")]
Benfrothira,
#[serde(rename = "BENF_ESTATE_IRA")]
BenfEstateIra,
#[serde(rename = "BENF_MINOR_IRA")]
BenfMinorIra,
#[serde(rename = "BENF_ROTH_ESTATE_IRA")]
BenfRothEstateIra,
#[serde(rename = "BENF_ROTH_MINOR_IRA")]
BenfRothMinorIra,
#[serde(rename = "BENF_ROTH_TRUST_IRA")]
BenfRothTrustIra,
#[serde(rename = "BENF_TRUST_IRA")]
BenfTrustIra,
#[serde(rename = "BRKCD")]
Brkcd,
#[serde(rename = "BROKER")]
Broker,
#[serde(rename = "CASH")]
Cash,
#[serde(rename = "C_CORP")]
CCorp,
#[serde(rename = "CONTRIBUTORY")]
Contributory,
#[serde(rename = "COVERDELL_ESA")]
CoverdellEsa,
#[serde(rename = "CONVERSION_ROTH_IRA")]
ConversionRothIra,
#[serde(rename = "CREDITCARD")]
Creditcard,
#[serde(rename = "COMM_PROP")]
CommProp,
#[serde(rename = "CONSERVATOR")]
Conservator,
#[serde(rename = "CORPORATION")]
Corporation,
#[serde(rename = "CSA")]
Csa,
#[serde(rename = "CUSTODIAL")]
Custodial,
#[serde(rename = "DVP")]
Dvp,
#[serde(rename = "ESTATE")]
Estate,
#[serde(rename = "EMPCHK")]
Empchk,
#[serde(rename = "EMPMMCA")]
Empmmca,
#[serde(rename = "ETCHK")]
Etchk,
#[serde(rename = "ETMMCHK")]
Etmmchk,
#[serde(rename = "HEIL")]
Heil,
#[serde(rename = "HELOC")]
Heloc,
#[serde(rename = "INDCHK")]
Indchk,
#[serde(rename = "INDIVIDUAL")]
Individual,
#[serde(rename = "INDIVIDUAL_K")]
IndividualK,
#[serde(rename = "INVCLUB")]
Invclub,
#[serde(rename = "INVCLUB_C_CORP")]
InvclubCCorp,
#[serde(rename = "INVCLUB_LLC_C_CORP")]
InvclubLlcCCorp,
#[serde(rename = "INVCLUB_LLC_PARTNERSHIP")]
InvclubLlcPartnership,
#[serde(rename = "INVCLUB_LLC_S_CORP")]
InvclubLlcSCorp,
#[serde(rename = "INVCLUB_PARTNERSHIP")]
InvclubPartnership,
#[serde(rename = "INVCLUB_S_CORP")]
InvclubSCorp,
#[serde(rename = "INVCLUB_TRUST")]
InvclubTrust,
#[serde(rename = "IRA_ROLLOVER")]
IraRollover,
#[serde(rename = "JOINT")]
Joint,
#[serde(rename = "JTTEN")]
Jtten,
#[serde(rename = "JTWROS")]
Jtwros,
#[serde(rename = "LLC_C_CORP")]
LlcCCorp,
#[serde(rename = "LLC_PARTNERSHIP")]
LlcPartnership,
#[serde(rename = "LLC_S_CORP")]
LlcSCorp,
#[serde(rename = "LLP")]
Llp,
#[serde(rename = "LLP_C_CORP")]
LlpCCorp,
#[serde(rename = "LLP_S_CORP")]
LlpSCorp,
#[serde(rename = "IRA")]
Ira,
#[serde(rename = "IRACD")]
Iracd,
#[serde(rename = "MONEY_PURCHASE")]
MoneyPurchase,
#[serde(rename = "MARGIN")]
Margin,
#[serde(rename = "MRCHK")]
Mrchk,
#[serde(rename = "MUTUAL_FUND")]
MutualFund,
#[serde(rename = "NONCUSTODIAL")]
Noncustodial,
#[serde(rename = "NON_PROFIT")]
NonProfit,
#[serde(rename = "OTHER")]
Other,
#[serde(rename = "PARTNER")]
Partner,
#[serde(rename = "PARTNERSHIP")]
Partnership,
#[serde(rename = "PARTNERSHIP_C_CORP")]
PartnershipCCorp,
#[serde(rename = "PARTNERSHIP_S_CORP")]
PartnershipSCorp,
#[serde(rename = "PDT_ACCOUNT")]
PdtAccount,
#[serde(rename = "PM_ACCOUNT")]
PmAccount,
#[serde(rename = "PREFCD")]
Prefcd,
#[serde(rename = "PREFIRACD")]
Prefiracd,
#[serde(rename = "PROFIT_SHARING")]
ProfitSharing,
#[serde(rename = "PROPRIETARY")]
Proprietary,
#[serde(rename = "REGCD")]
Regcd,
#[serde(rename = "ROTHIRA")]
Rothira,
#[serde(rename = "ROTH_INDIVIDUAL_K")]
RothIndividualK,
#[serde(rename = "ROTH_IRA_MINORS")]
RothIraMinors,
#[serde(rename = "SARSEPIRA")]
Sarsepira,
#[serde(rename = "S_CORP")]
SCorp,
#[serde(rename = "SEPIRA")]
Sepira,
#[serde(rename = "SIMPLE_IRA")]
SimpleIra,
#[serde(rename = "TIC")]
Tic,
#[serde(rename = "TRD_IRA_MINORS")]
TrdIraMinors,
#[serde(rename = "TRUST")]
Trust,
#[serde(rename = "VARCD")]
Varcd,
#[serde(rename = "VARIRACD")]
Variracd,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
#[strum(serialize_all = "lowercase")]
pub enum PortfolioView {
#[serde(rename = "PERFORMANCE")]
Performance,
#[serde(rename = "FUNDAMENTAL")]
Fundamental,
#[serde(rename = "OPTIONSWATCH")]
Optionswatch,
#[serde(rename = "QUICK")]
Quick,
#[serde(rename = "COMPLETE")]
Complete,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
#[strum(serialize_all = "lowercase")]
pub enum PortfolioColumn {
#[serde(rename = "SYMBOL")]
Symbol,
#[serde(rename = "TYPE_NAME")]
TypeName,
#[serde(rename = "EXCHANGE_NAME")]
ExchangeName,
#[serde(rename = "CURRENCY")]
Currency,
#[serde(rename = "QUANTITY")]
Quantity,
#[serde(rename = "LONG_OR_SHORT")]
LongOrShort,
#[serde(rename = "DATE_ACQUIRED")]
DateAcquired,
#[serde(rename = "PRICEPAID")]
Pricepaid,
#[serde(rename = "TOTAL_GAIN")]
TotalGain,
#[serde(rename = "TOTAL_GAIN_PCT")]
TotalGainPct,
#[serde(rename = "MARKET_VALUE")]
MarketValue,
#[serde(rename = "BI")]
Bi,
#[serde(rename = "ASK")]
Ask,
#[serde(rename = "PRICE_CHANGE")]
PriceChange,
#[serde(rename = "PRICE_CHANGE_PCT")]
PriceChangePct,
#[serde(rename = "VOLUME")]
Volume,
#[serde(rename = "WEEK_52_HIGH")]
Week52High,
#[serde(rename = "WEEK_52_LOW")]
Week52Low,
#[serde(rename = "EPS")]
Eps,
#[serde(rename = "PE_RATIO")]
PeRatio,
#[serde(rename = "OPTION_TYPE")]
OptionType,
#[serde(rename = "STRIKE_PRICE")]
StrikePrice,
#[serde(rename = "PREMIUM")]
Premium,
#[serde(rename = "EXPIRATION")]
Expiration,
#[serde(rename = "DAYS_GAIN")]
DaysGain,
#[serde(rename = "COMMISSION")]
Commission,
#[serde(rename = "MARKETCAP")]
Marketcap,
#[serde(rename = "PREV_CLOSE")]
PrevClose,
#[serde(rename = "OPEN")]
Open,
#[serde(rename = "DAYS_RANGE")]
DaysRange,
#[serde(rename = "TOTAL_COST")]
TotalCost,
#[serde(rename = "DAYS_GAIN_PCT")]
DaysGainPct,
#[serde(rename = "PCT_OF_PORTFOLIO")]
PctOfPortfolio,
#[serde(rename = "LAST_TRADE_TIME")]
LastTradeTime,
#[serde(rename = "BASE_SYMBOL_PRICE")]
BaseSymbolPrice,
#[serde(rename = "WEEK_52_RANGE")]
Week52Range,
#[serde(rename = "LAST_TRADE")]
LastTrade,
#[serde(rename = "SYMBOL_DESC")]
SymbolDesc,
#[serde(rename = "BID_SIZE")]
BidSize,
#[serde(rename = "ASK_SIZE")]
AskSize,
#[serde(rename = "OTHER_FEES")]
OtherFees,
#[serde(rename = "HELD_AS")]
HeldAs,
#[serde(rename = "OPTION_MULTIPLIER")]
OptionMultiplier,
#[serde(rename = "DELIVERABLES")]
Deliverables,
#[serde(rename = "COST_PERSHARE")]
CostPershare,
#[serde(rename = "DIVIDEND")]
Dividend,
#[serde(rename = "DIV_YIELD")]
DivYield,
#[serde(rename = "DIV_PAY_DATE")]
DivPayDate,
#[serde(rename = "EST_EARN")]
EstEarn,
#[serde(rename = "EX_DIV_DATE")]
ExDivDate,
#[serde(rename = "TEN_DAY_AVG_VOL")]
TenDayAvgVol,
#[serde(rename = "BETA")]
Beta,
#[serde(rename = "BID_ASK_SPREAD")]
BidAskSpread,
#[serde(rename = "MARGINABLE")]
Marginable,
#[serde(rename = "DELTA_52WK_HI")]
Delta52WkHi,
#[serde(rename = "DELTA_52WK_LOW")]
Delta52WkLow,
#[serde(rename = "PERF_1MON")]
Perf1Mon,
#[serde(rename = "ANNUAL_DIV")]
AnnualDiv,
#[serde(rename = "PERF_12MON")]
Perf12Mon,
#[serde(rename = "PERF_3MON")]
Perf3Mon,
#[serde(rename = "PERF_6MON")]
Perf6Mon,
#[serde(rename = "PRE_DAY_VOL")]
PreDayVol,
#[serde(rename = "SV_1MON_AVG")]
Sv1MonAvg,
#[serde(rename = "SV_10DAY_AVG")]
Sv10DayAvg,
#[serde(rename = "SV_20DAY_AVG")]
Sv20DayAvg,
#[serde(rename = "SV_2MON_AVG")]
Sv2MonAvg,
#[serde(rename = "SV_3MON_AVG")]
Sv3MonAvg,
#[serde(rename = "SV_4MON_AVG")]
Sv4MonAvg,
#[serde(rename = "SV_6MON_AVG")]
Sv6MonAvg,
#[serde(rename = "DELTA")]
Delta,
#[serde(rename = "GAMMA")]
Gamma,
#[serde(rename = "IV_PCT")]
IvPct,
#[serde(rename = "THETA")]
Theta,
#[serde(rename = "VEGA")]
Vega,
#[serde(rename = "ADJ_NONADJ_FLAG")]
AdjNonadjFlag,
#[serde(rename = "DAYS_EXPIRATION")]
DaysExpiration,
#[serde(rename = "OPEN_INTEREST")]
OpenInterest,
#[serde(rename = "INSTRINIC_VALUE")]
InstrinicValue,
#[serde(rename = "RHO")]
Rho,
#[serde(rename = "TYPE_CODE")]
TypeCode,
#[serde(rename = "DISPLAY_SYMBOL")]
DisplaySymbol,
#[serde(rename = "AFTER_HOURS_PCTCHANGE")]
AfterHoursPctchange,
#[serde(rename = "PRE_MARKET_PCTCHANGE")]
PreMarketPctchange,
#[serde(rename = "EXPAND_COLLAPSE_FLAG")]
ExpandCollapseFlag,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
struct AccountListResponse {
#[serde(rename = "AccountListResponse")]
response: AccountList,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
struct AccountList {
#[serde(rename = "Accounts")]
accounts: AccountHolder,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
struct AccountHolder {
#[serde(rename = "Account", skip_serializing_if = "Vec::is_empty")]
account: Vec<Account>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct Account {
#[serde(skip_serializing_if = "Option::is_none")]
pub inst_no: Option<i32>,
pub account_id: String,
pub account_id_key: String,
pub account_mode: String,
pub account_desc: String,
pub account_name: String,
pub account_type: String,
pub institution_type: String,
pub account_status: String,
pub closed_date: i64,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct BalanceResponse {
pub account_id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub institution_type: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub as_of_date: Option<i64>,
pub account_type: String,
pub option_level: String,
pub account_description: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub quote_mode: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub day_trader_status: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub account_mode: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub account_desc: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub open_calls: Vec<OpenCalls>,
#[serde(rename = "Cash", skip_serializing_if = "Option::is_none")]
pub cash: Option<Cash>,
#[serde(rename = "Margin", skip_serializing_if = "Option::is_none")]
pub margin: Option<Margin>,
#[serde(skip_serializing_if = "Option::is_none")]
pub lending: Option<Lending>,
#[serde(rename = "Computed")]
pub computed_balance: ComputedBalance,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct OpenCalls {
#[serde(skip_serializing_if = "Option::is_none")]
pub min_equity_call: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub fed_call: Option<f64>,
pub cash_call: f64,
#[serde(skip_serializing_if = "Option::is_none")]
pub house_call: Option<f64>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct Cash {
pub funds_for_open_orders_cash: f64,
pub money_mkt_balance: f64,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct Margin {
pub dt_cash_open_order_reserve: f64,
pub dt_margin_open_order_reserve: f64,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct Lending {
pub current_balance: f64,
pub credit_line: f64,
pub outstanding_balance: f64,
pub min_payment_due: f64,
pub amount_past_due: f64,
pub available_credit: f64,
pub ytd_interest_paid: f64,
pub last_ytd_interest_paid: f64,
pub payment_due_date: i64,
pub last_payment_received_date: i64,
pub payment_received_mtd: f64,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct ComputedBalance {
pub cash_available_for_investment: f64,
pub cash_available_for_withdrawal: f64,
#[serde(skip_serializing_if = "Option::is_none")]
pub total_available_for_withdrawal: Option<f64>,
pub net_cash: f64,
pub cash_balance: f64,
pub settled_cash_for_investment: f64,
pub un_settled_cash_for_investment: f64,
pub funds_withheld_from_purchase_power: f64,
pub funds_withheld_from_withdrawal: f64,
#[serde(skip_serializing_if = "Option::is_none")]
pub margin_buying_power: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cash_buying_power: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub dt_margin_buying_power: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub dt_cash_buying_power: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub margin_balance: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub short_adjust_balance: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub regt_equity: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub regt_equity_percent: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub account_balance: Option<f64>,
#[serde(rename = "OpenCalls")]
pub open_calls: OpenCalls,
#[serde(rename = "RealTimeValues")]
pub real_time_values: RealTimeValues,
#[serde(rename = "PortfolioMargin")]
pub portfolio_margin: Option<PortfolioMargin>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct PortfolioMargin {
pub dt_cash_open_order_reserve: f64,
pub dt_margin_open_order_reserve: f64,
pub liquidating_equity: f64,
pub house_excess_equity: f64,
pub total_house_requirement: f64,
pub excess_equity_minus_requirement: f64,
pub total_margin_rqmts: f64,
pub avail_excess_equity: f64,
pub excess_equity: f64,
pub open_order_reserve: f64,
pub funds_on_hold: f64,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct RealTimeValues {
pub total_account_value: f64,
pub net_mv: f64,
pub net_mv_long: f64,
#[serde(skip_serializing_if = "Option::is_none")]
pub net_mv_short: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub total_long_value: Option<f64>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct PortfolioResponse {
#[serde(skip_serializing_if = "Option::is_none")]
pub totals: Option<PortfolioTotals>,
#[serde(rename = "AccountPortfolio", skip_serializing_if = "Vec::is_empty")]
pub account_portfolio: Vec<AccountPortfolio>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct PositionLotsResponse {
#[serde(rename = "PositionLot", skip_serializing_if = "Vec::is_empty")]
pub position_lot: Vec<PositionLot>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct PortfolioTotals {
pub todays_gain_loss: f64,
pub todays_gain_loss_pct: f64,
pub total_market_value: f64,
pub total_gain_loss: f64,
pub total_gain_loss_pct: f64,
pub total_price_paid: f64,
pub cash_balance: f64,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct AccountPortfolio {
pub account_id: String,
pub next: String,
pub total_no_of_pages: i32,
pub next_page_no: String,
#[serde(rename = "Position", skip_serializing_if = "Vec::is_empty")]
pub position: Vec<PortfolioPosition>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct PortfolioPosition {
pub position_id: i64,
pub account_id: String,
#[serde(rename = "Product")]
pub product: Product,
pub osi_key: String,
pub symbol_description: String,
pub date_acquired: i64,
pub price_paid: f64,
pub price: f64,
pub commissions: f64,
pub other_fees: f64,
pub quantity: f64,
pub position_indicator: String,
pub position_type: String,
pub change: f64,
pub change_pct: f64,
pub days_gain: f64,
pub days_gain_pct: f64,
pub market_value: f64,
pub total_cost: f64,
pub total_gain: f64,
pub total_gain_pct: f64,
pub pct_of_portfolio: f64,
pub cost_per_share: f64,
pub today_commissions: f64,
pub today_fees: f64,
pub today_price_paid: f64,
pub today_quantity: f64,
pub quotestatus: String,
#[serde(rename = "dateTimeUTC")]
pub date_time_utc: i64,
pub adj_prev_close: f64,
#[serde(rename = "Performance", skip_serializing_if = "Option::is_none")]
pub performance: Option<PerformanceView>,
#[serde(rename = "Fundamental", skip_serializing_if = "Option::is_none")]
pub fundamental: Option<FundamentalView>,
#[serde(rename = "OptionsWatch", skip_serializing_if = "Option::is_none")]
pub options_watch: Option<OptionsWatchView>,
#[serde(rename = "Quick", skip_serializing_if = "Option::is_none")]
pub quick: Option<QuickView>,
#[serde(rename = "Complete", skip_serializing_if = "Option::is_none")]
pub complete: Option<CompleteView>,
pub lots_details: String,
pub quote_details: String,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub position_lot: Vec<PositionLot>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct PerformanceView {
pub change: f64,
pub change_pct: f64,
pub last_trade: f64,
pub days_gain: f64,
pub total_gain: f64,
pub total_gain_pct: f64,
pub market_value: f64,
#[serde(skip_serializing_if = "Option::is_none")]
pub quote_status: Option<QuoteStatus>,
pub last_trade_time: i64,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
#[strum(serialize_all = "lowercase")]
pub enum QuoteStatus {
#[serde(rename = "REALTIME")]
Realtime,
#[serde(rename = "DELAYED")]
Delayed,
#[serde(rename = "CLOSING")]
Closing,
#[serde(rename = "EH_REALTIME")]
EhRealtime,
#[serde(rename = "EH_BEFORE_OPEN")]
EhBeforeOpen,
#[serde(rename = "EH_CLOSED")]
EhClosed,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct FundamentalView {
pub last_trade: f64,
pub last_trade_time: i64,
pub change: f64,
pub change_pct: f64,
pub pe_ratio: f64,
pub eps: f64,
pub dividend: f64,
pub div_yield: f64,
pub market_cap: f64,
pub week_52_range: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub quote_status: Option<QuoteStatus>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct OptionsWatchView {
pub last_trade: f64,
pub last_trade_time: i64,
pub base_symbol_and_price: String,
pub premium: f64,
pub bid: f64,
pub ask: f64,
#[serde(skip_serializing_if = "Option::is_none")]
pub quote_status: Option<QuoteStatus>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct QuickView {
pub last_trade: f64,
pub last_trade_time: i64,
pub change: f64,
pub change_pct: f64,
pub volume: i64,
pub seven_day_current_yield: f64,
pub annual_total_return: f64,
pub weighted_average_maturity: f64,
#[serde(skip_serializing_if = "Option::is_none")]
pub quote_status: Option<QuoteStatus>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct CompleteView {
pub price_adjusted_flag: bool,
pub price: f64,
pub adj_price: f64,
pub change: f64,
pub change_pct: f64,
pub prev_close: f64,
pub adj_prev_close: f64,
pub last_trade: f64,
pub last_trade_time: i64,
pub adj_last_trade: f64,
pub symbol_description: String,
pub perform_1_month: f64,
pub perform_3_month: f64,
pub perform_6_month: f64,
pub perform_12_month: f64,
pub prev_day_volume: i64,
pub ten_day_volume: i64,
pub beta: f64,
pub sv_10_days_avg: f64,
pub sv_20_days_avg: f64,
pub sv_1_mon_avg: f64,
pub sv_2_mon_avg: f64,
pub sv_3_mon_avg: f64,
pub sv_4_mon_avg: f64,
pub sv_6_mon_avg: f64,
pub week_52_high: f64,
pub week_52_low: f64,
pub week_52_range: String,
pub market_cap: f64,
pub days_range: String,
pub delta_52_wk_high: f64,
pub delta_52_wk_low: f64,
pub currency: String,
pub exchange: String,
pub marginable: bool,
pub bid: f64,
pub ask: f64,
pub bid_ask_spread: f64,
pub bid_size: i64,
pub ask_size: i64,
pub open: f64,
pub delta: f64,
pub gamma: f64,
pub iv_pct: f64,
pub rho: f64,
pub theta: f64,
pub vega: f64,
pub base_symbol_and_price: String,
pub premium: f64,
pub days_to_expiration: i32,
pub intrinsic_value: f64,
pub open_interest: f64,
pub options_adjusted_flag: bool,
pub deliverables_str: String,
pub option_multiplier: f64,
pub est_earnings: f64,
pub eps: f64,
pub pe_ratio: f64,
pub annual_dividend: f64,
pub dividend: f64,
pub div_yield: f64,
pub div_pay_date: i64,
pub ex_dividend_date: i64,
pub cusip: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub quote_status: Option<QuoteStatus>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct PositionLot {
pub position_id: i64,
pub position_log_id: i64,
pub price: f64,
pub term_code: i32,
pub days_gain: f64,
pub day_gain_pct: f64,
pub market_value: f64,
pub total_cost: f64,
pub total_cost_for_gain_pct: f64,
pub total_gain: f64,
pub lot_source_code: i32,
pub original_qty: f64,
pub remaining_qty: f64,
pub available_qty: f64,
pub order_no: i64,
pub leg_no: i32,
pub acquired_date: i64,
pub location_code: i32,
pub exchange_rate: f64,
pub settlement_currency: String,
pub payment_currency: String,
pub adj_price: f64,
pub comm_per_share: f64,
pub fees_per_share: f64,
pub premium_adj: f64,
pub short_type: i32,
}
| true |
3c810835e4ee719ba3af9ada3feee041030d7c13
|
Rust
|
amouat/cabot
|
/src/dns.rs
|
UTF-8
| 898 | 2.921875 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
//! DNS Resolution
use std::io::{Write, stderr};
use std::net::{ToSocketAddrs, SocketAddr};
use log::LogLevel::Info;
pub struct Resolver {
verbose: bool,
}
impl Resolver {
pub fn new(verbose: bool) -> Self {
Resolver { verbose: verbose }
}
pub fn get_addr(&self, authority: &str) -> SocketAddr {
debug!("Resolving TCP Endpoint for authority {}", authority);
let addr = authority.to_socket_addrs()
.unwrap() // unwrap result
.next().unwrap(); // get first item from iterator
if log_enabled!(Info) {
info!("Authority {} has been resolved to {}", authority, addr);
} else if self.verbose {
writeln!(&mut stderr(),
"* Authority {} has been resolved to {}",
authority,
addr)
.unwrap();
}
addr
}
}
| true |
0930bad1f19ba1eff55cc580253074d8a64e2462
|
Rust
|
JackBmann/scheme_interpreter
|
/src/interpreter.rs
|
UTF-8
| 848 | 3.015625 | 3 |
[] |
no_license
|
use evaluator::*;
use parser::*;
use lexer::*;
use environment::*;
use environment_parser::*;
use std::collections::HashMap;
pub fn interpret_with_environment(s:String, e: Environment) -> f64 {
let mut tokens = tokenize(&s);
let expression = parse(&mut tokens, &e);
let result = evaluate(&expression);
return result.unwrap();
}
pub fn interpret_with_environment_string(s:String, e:String) -> f64 {
let mut env_tokens = tokenize(&e);
let env = parse_to_environment(&mut env_tokens);
interpret_with_environment(s,env)
}
pub fn interpret(s: String) -> f64 {
let hash: HashMap<String, Expression> = HashMap::new();
let env = Environment { variables: hash };
let mut tokens = tokenize(&s);
let expression = parse(&mut tokens, &env);
let result = evaluate(&expression);
return result.unwrap();
}
| true |
43b397546f4ebb9452129a582e7bb7b4d5cd3c6e
|
Rust
|
ayrat-playground/exercism_rust
|
/bracket-push/src/lib.rs
|
UTF-8
| 951 | 3.96875 | 4 |
[] |
no_license
|
pub struct Brackets<'a> {
data: &'a str
}
impl<'a> From<&'a str> for Brackets<'a> {
fn from(string: &'a str) -> Brackets<'a> {
Brackets { data: string }
}
}
impl<'a> Brackets<'a> {
pub fn are_balanced(&self) -> bool {
let mut stack = Vec::<char>::new();
for ch in self.data.chars() {
match ch {
'(' | '[' | '{' => stack.push(ch),
')' | ']' | '}' => if !matches_bracket(&mut stack, ch) { return false },
_ => ()
}
}
stack.is_empty()
}
}
fn matches_bracket(stack: &mut Vec<char>, closing_bracket: char) -> bool {
let opening_bracket = match closing_bracket {
')' => '(',
']' => '[',
'}' => '{',
_ => 'e'
};
let prev_bracket = match stack.pop() {
Some(bracket) => bracket,
None => ' '
};
prev_bracket == opening_bracket
}
| true |
9d1b98b26caa966df7f8ee0cdb9096df50ce3f66
|
Rust
|
spihill/rust-library
|
/src/math/numerical.rs
|
UTF-8
| 1,984 | 2.921875 | 3 |
[] |
no_license
|
use std::ops::{Add, Sub, Mul, Div, Rem, AddAssign, SubAssign, MulAssign, DivAssign, RemAssign, Shl, ShlAssign, Shr, ShrAssign};
use std::convert::From;
pub trait Integer : Add<Output=Self> + Sub<Output=Self> + Mul<Output=Self> + Div<Output=Self> + Rem<Output=Self>
+ AddAssign + SubAssign + MulAssign + DivAssign + RemAssign
+ Shl<Output=Self> + Shr<Output=Self>
+ ShlAssign + ShrAssign
+ Ord + Copy {
fn zero() -> Self;
fn one() -> Self;
fn leading_zeros(self) -> u32;
fn max_value() -> Self;
fn min_value() -> Self;
}
pub trait SizedInteger : Integer {
fn bit_size() -> u32;
}
pub trait UnSigned : Integer {}
pub trait Signed : Integer {}
pub trait UnSigned32 : UnSigned + From<u32> {}
pub trait Signed32 : Signed + From<i32> {}
macro_rules! impl_integer {
($($t:ty),*) => {
$(
impl Integer for $t {
fn zero() -> $t {0}
fn one() -> $t {1}
fn leading_zeros(self) -> u32 {self.leading_zeros()}
fn max_value() -> Self {<$t>::max_value()}
fn min_value() -> Self {<$t>::min_value()}
}
)*
};
}
macro_rules! impl_sized_integer {
($($t:ty,$bs:expr);*) => {
$(
impl_integer!($t);
impl SizedInteger for $t {
fn bit_size() -> u32 {$bs}
}
)*
};
}
macro_rules! impl_unsigned_integer {
($($t:ty),*) => {
$(
impl UnSigned for $t {}
)*
};
}
macro_rules! impl_signed_integer {
($($t:ty),*) => {
$(
impl Signed for $t {}
)*
};
}
macro_rules! impl_unsigned_32_integer {
($($t:ty),*) => {
$(
impl UnSigned32 for $t {}
)*
};
}
macro_rules! impl_signed_32_integer {
($($t:ty),*) => {
$(
impl Signed32 for $t {}
)*
};
}
impl_integer!(usize, isize);
impl_sized_integer!(u8, 8; u16, 16; u32, 32; u64, 64; u128, 128;
i8, 8; i16, 16; i32, 32; i64, 64; i128, 128);
impl_unsigned_integer!(u8, u16, u32, u64, u128, usize);
impl_unsigned_32_integer!(u32, u64, u128);
impl_signed_integer!(i8, i16, i32, i64, i128, isize);
impl_signed_32_integer!(i32, i64, i128);
| true |
0882e14846f9a9b2d96e1510db25fc448ad6c2ce
|
Rust
|
nyantec/sensorlog
|
/src/logfile_partition.rs
|
UTF-8
| 2,728 | 2.703125 | 3 |
[
"MirOS"
] |
permissive
|
/**
* Copyright © 2018 nyantec GmbH <[email protected]>
* Authors:
* Paul Asmuth <[email protected]>
*
* Provided that these terms and disclaimer and all copyright notices
* are retained or reproduced in an accompanying document, permission
* is granted to deal in this work without restriction, including un‐
* limited rights to use, publicly perform, distribute, sell, modify,
* merge, give away, or sublicence.
*
* This work is provided “AS IS” and WITHOUT WARRANTY of any kind, to
* the utmost extent permitted by applicable law, neither express nor
* implied; without malicious intent or gross negligence. In no event
* may a licensor, author or contributor be held liable for indirect,
* direct, other damage, loss, or other issues arising in any way out
* of dealing in the work, even if advised of the possibility of such
* damage or existence of a defect, except proven that it results out
* of said person’s immediate fault when using the work as intended.
*/
use measure::Measurement;
use std::fs;
use std::path::{Path, PathBuf};
#[derive(Debug, Clone)]
pub struct LogfilePartition {
path: PathBuf,
time_head: u64,
time_tail: u64,
offset: u64,
}
impl LogfilePartition {
pub fn create(path: &Path, time: u64) -> Result<LogfilePartition, ::Error> {
let partition = LogfilePartition {
path: path.to_owned(),
time_head: time,
time_tail: time,
offset: 0,
};
info!(
"Creating new logfile partition; path={:?}",
partition.get_file_path()
);
Ok(partition)
}
pub fn open(path: &Path, time_head: u64, time_tail: u64, offset: u64) -> LogfilePartition {
LogfilePartition {
path: path.to_owned(),
time_head,
time_tail,
offset,
}
}
pub fn append_measurement(&mut self, measurement: &Measurement) -> Result<(), ::Error> {
if measurement.time < self.time_head {
return Err(err_user!(
"measurement time values must be monotonically increasing for \
each sensor_id"
));
}
debug!(
"Storing new measurement; time={}, foffset={}",
measurement.time, self.offset
);
self.offset += ::logfile_writer::append(&self.get_file_path(), self.offset, measurement)?;
self.time_head = measurement.time;
Ok(())
}
pub fn delete(&self) -> Result<(), ::Error> {
info!(
"Deleting logfile partition; path={:?}",
self.get_file_path()
);
fs::remove_file(self.get_file_path())?;
Ok(())
}
pub fn get_file_name(&self) -> String {
return format!("{}.log", self.time_tail);
}
pub fn get_file_path(&self) -> PathBuf {
self.path.join(self.get_file_name())
}
pub fn get_file_offset(&self) -> u64 {
self.offset
}
pub fn get_time_head(&self) -> u64 {
self.time_head
}
pub fn get_time_tail(&self) -> u64 {
self.time_tail
}
}
| true |
a27bc29aa48c4a6f90cb8f318c47ee88a73bfe9a
|
Rust
|
ivoelbert/HECTOR
|
/src/typecheck/mod.rs
|
UTF-8
| 10,006 | 3.0625 | 3 |
[] |
no_license
|
#![allow(clippy::pub_enum_variant_names)]
use std::collections::HashMap;
use serde::{Serialize, Serializer};
extern crate snowflake;
pub use std::sync::Arc;
use crate::ast::*;
use crate::externals::{External, ArgumentType, EXTERNALS};
mod intexp;
mod opexp;
mod recordexp;
mod seqexp;
mod assignexp;
mod ifexp;
mod whileexp;
mod forexp;
mod letexp;
mod arrayexp;
mod varexp;
mod nilexp;
mod unitexp;
mod stringexp;
mod callexp;
mod breakexp;
#[derive(Debug, PartialEq, Clone, Serialize)]
/// Write permissions for an int value
pub enum R {
/// Read-only
RO,
/// Read-write
RW
}
/// Unique identifier for Records and Arrays
pub type TypeId = snowflake::ProcessUniqueId;
/// Generate new type id for a Record or Array
pub fn newtypeid() -> TypeId {
snowflake::ProcessUniqueId::new()
}
/// Types in the Tiger language
#[derive(Debug, Clone)]
pub enum TigerType {
/// as in `()`
TUnit,
/// as in `nil`
TNil,
/// as in `3`
TInt(R),
/// as in `"perro`
TString,
/// as in `arrtype1 [10] of 0`
TArray(Arc<TigerType>, TypeId),
/// as in `{name : string, address : string, id : int, age : int}`
TRecord(Vec<(Symbol, RecordFieldType, i32)>, TypeId),
/// Type synonym
Internal(String),
/// This struct still has not been typed yet. The parser gives this type to all nodes in the AST
Untyped,
}
#[derive(Debug, Clone)]
pub enum RecordFieldType {
Record(TypeId),
Type(Arc::<TigerType>)
}
impl PartialEq for RecordFieldType {
fn eq(&self, other: &Self) -> bool {
use RecordFieldType::*;
match (self, other) {
(Record(id1), Record(id2)) => id1 == id2,
(Record(..), Type(t2)) => if let TigerType::TNil = **t2 { true } else { false },
(Type(t1), Record(..)) => if let TigerType::TNil = **t1 { true } else { false },
(Type(t1), Type(t2)) => t1 == t2,
}
}
}
impl Serialize for TigerType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
TigerType::TUnit => {
serializer.serialize_str("Unit")
}
TigerType::TNil => {
serializer.serialize_str("Nil")
}
TigerType::TString => {
serializer.serialize_str("String")
}
TigerType::TInt(..) => {
serializer.serialize_str("Int")
}
TigerType::TArray(..) => {
serializer.serialize_str("Array")
}
TigerType::TRecord(..) => {
serializer.serialize_str("Record")
}
TigerType::Internal(..) => {
serializer.serialize_str("Internal")
}
TigerType::Untyped => {
serializer.serialize_str("Untyped")
}
}
}
}
/// Converts an internal type to the logical type
pub fn tipo_real(t: Arc<TigerType>, tenv: &TypeEnviroment) -> Arc<TigerType> {
match &*t {
TigerType::Internal(s) => match tenv.get(s) {
Some(tipo) => Arc::clone(&tipo),
None => panic!("Undefined")
},
_ => t
}
}
/// Returns true iif the type is an Int
pub fn es_int(t: &TigerType) -> bool {
if let TigerType::TInt(_) = *t {
true
} else { false }
}
/// An entry in our `TypeEnviroment` table.
#[derive(Clone, Debug)]
pub enum EnvEntry {
/// A declared varaible
Var {
/// The type of the variable
ty: Arc<TigerType>,
},
/// A declared function
Func {
/// The types of the arguments of the function
formals: Vec<Arc<TigerType>>,
/// The type of the return value of the function
result: Arc<TigerType>,
}
}
/// A table where we store the types that are declared as this point in typechecking.
///
/// When a type is used in a declaration, we look in this table and raise a `TypeError` if it's not found.
type TypeEnviroment = HashMap<Symbol, Arc<TigerType>>;
/// A table where we store the values that are declared as this point in typechecking.
///
/// When a variable or function is used somewhere in the code, we check this table and raise `TypeError` if it's not found.
type ValueEnviroment = HashMap<Symbol, EnvEntry>;
/// Generate a `TypeEnv` that contains integers and strings
fn initial_type_env() -> TypeEnviroment {
vec![
(Symbol::from("int"), Arc::new(TigerType::TInt(R::RW))),
(Symbol::from("string"), Arc::new(TigerType::TString))
]
.into_iter()
.collect()
}
impl From<ArgumentType> for TigerType {
fn from(arg: ArgumentType) -> Self {
match arg {
ArgumentType::String => TigerType::TString,
ArgumentType::Int => TigerType::TInt(R::RO)
}
}
}
fn initial_value_env() -> ValueEnviroment {
EXTERNALS
.iter()
.filter(|External {is_runtime, ..}| !is_runtime)
.map(|External {name, arguments, return_value, ..}|
((*name).to_string(), EnvEntry::Func {
formals: arguments
.iter()
.map(|arg| Arc::new(TigerType::from(*arg)))
.collect(),
result: if let Some(rt) = return_value {
Arc::new(TigerType::from(*rt))
} else {
Arc::new(TigerType::TUnit)
}
}))
.collect()
}
/// Errors that the typechecker can fail with.
#[derive(Debug, Clone, Serialize)]
pub enum TypeError {
/// Using variable that was not declared.
UndeclaredSimpleVar(Pos),
/// Using function that was not declared.
UndeclaredFunction(Pos),
/// Using type that was not declared.
UndeclaredType(Pos),
/// Using a field from a record that was not declared
UndeclaredField(Pos),
/// Tried to use an array or record as a simple variable
NotSimpleVar(Pos),
/// Tried to do a function call on a variable
NotFunctionVar(Pos),
/// Tried to access a record field on something other than a record
NotRecordType(Pos),
/// Tried to index something other than an array
NotArrayType(Pos),
/// Called a function with too many arguments
TooManyArguments(Pos),
/// Called a function with too few arguments
TooFewArguments(Pos),
/// Expected a different type
TypeMismatch(Pos),
/// An if-then-else with different types for each branch
ThenElseTypeMismatch(Pos),
/// Assigning to an Int with `R::RO`
ReadOnlyAssignment(Pos),
/// The bodies of for, while or if whithout else statements should type to Unit
NonUnitBody(Pos),
/// Type mismatch in function call argument
InvalidCallArgument(Pos),
/// A definition is not defining values for all record fields.
MissingRecordField(Pos),
/// The sizes of array definitions should type to Int
NonIntegerSize(Pos),
/// All conditionals should type to Int
NonIntegerCondition(Pos),
/// The range boundaries of for expressions should type to Int
NonIntegerForRange(Pos),
/// Integer operation over non integer operands
NonIntegerOperand(Pos),
/// The subscript of a field varaible should type to Int
NonIntegerSubscript(Pos),
/// Type declarations form an illicit cycle
TypeCycle(Pos),
/// Something is declared twice in the same block
DuplicatedDeclarations(Pos),
/// You can only assign nil to variables with explicit type
UnconstrainedNilInitialization(Pos),
/// All tiger programs should return something of Int type.
NonIntegerProgram(Pos)
}
impl PartialEq for TigerType {
fn eq(&self, other: &Self) -> bool {
use TigerType::*;
match (self, other) {
(TUnit, TUnit)
| (TString, TString)
| (TRecord(_, _), TNil)
| (TNil, TRecord(_, _))
| (TInt(_),TInt(_)) => true,
(TRecord(_, uid1), TRecord(_, uid2 ))
| (TArray(_, uid1), TArray(_, uid2)) => uid1 == uid2,
(Internal(s), Internal(t)) => s == t,
(Internal(_), _) => panic!("Estamos comparando un Internal"),
(_, Internal(_)) => panic!("Estamos comparando un Internal"),
(_, _) => false,
}
}
}
/// Rebuild an `AST` with the correct types given the context in the enviroments or return a `TypeError`
fn type_exp(ast : AST, type_env : &TypeEnviroment, value_env: &ValueEnviroment) -> Result<AST, TypeError> {
let AST {node, ..} = *
match node {
Exp::Var(..) => varexp::typecheck(ast, type_env, value_env),
Exp::Unit => unitexp::typecheck(ast, type_env, value_env),
Exp::Nil => nilexp::typecheck(ast, type_env, value_env),
Exp::Int(..) => intexp::typecheck(ast, type_env,&value_env),
Exp::String(..) => stringexp::typecheck(ast, type_env, value_env),
Exp::Call{..} => callexp::typecheck(ast, type_env, value_env),
Exp::Op{..} => opexp::typecheck(ast,&type_env, value_env),
Exp::Assign{..} => assignexp::typecheck(ast, type_env, value_env),
Exp::Record{..} => recordexp::typecheck(ast, type_env, value_env),
Exp::Seq(..) => seqexp::typecheck(ast, type_env, value_env),
Exp::If{..} => ifexp::typecheck(ast, type_env, value_env),
Exp::While{..} => whileexp::typecheck(ast, type_env, value_env),
Exp::For{..} => forexp::typecheck(ast, type_env, value_env),
Exp::Let{..} => letexp::typecheck(ast, type_env, value_env),
Exp::Break => breakexp::typecheck(ast, type_env, value_env),
Exp::Array{..} => arrayexp::typecheck(ast, type_env, value_env),
}
}
/// Typecheck the program
pub fn typecheck(ast : AST) -> Result<AST, TypeError> {
let typed_ast = type_exp(ast, &initial_type_env(), &initial_value_env())?;
if *typed_ast.typ == TigerType::TInt(R::RW) {
Ok(typed_ast)
} else {
Err(TypeError::NonIntegerProgram(typed_ast.pos))
}
}
| true |
eb5ad8039d0ec50297d8524f83b62f6ec51b2dcd
|
Rust
|
rishflab/parallax-scrolling-shader
|
/src/time.rs
|
UTF-8
| 756 | 3.0625 | 3 |
[] |
no_license
|
use std::time::{Duration, Instant};
pub struct Timer {
tick: Instant,
elapsed: Duration,
}
impl Timer {
pub fn new() -> Self {
Timer {
tick: Instant::now(),
elapsed: Duration::from_secs(0),
}
}
pub fn tick(&mut self) {
let tock = Instant::now();
let elapsed = tock.duration_since(self.tick);
self.tick = tock;
self.elapsed = elapsed;
}
pub fn elapsed(&self) -> Duration {
self.elapsed
}
pub fn _fps(&self) -> f64 {
Duration::from_secs(1).as_secs_f64() / self.elapsed.as_secs_f64()
}
pub fn now(&self) -> Instant {
self.tick
}
}
impl Default for Timer {
fn default() -> Self {
Self::new()
}
}
| true |
cd36e67b7e1dbba23e3c4dd1601163e6804d4606
|
Rust
|
GaloisInc/mir-verifier
|
/lib/libcore/tests/char.rs
|
UTF-8
| 11,467 | 3.203125 | 3 |
[] |
permissive
|
use std::convert::TryFrom;
use std::str::FromStr;
use std::{char, str};
#[test]
fn test_convert() {
assert_eq!(u32::from('a'), 0x61);
assert_eq!(char::from(b'\0'), '\0');
assert_eq!(char::from(b'a'), 'a');
assert_eq!(char::from(b'\xFF'), '\u{FF}');
assert_eq!(char::try_from(0_u32), Ok('\0'));
assert_eq!(char::try_from(0x61_u32), Ok('a'));
assert_eq!(char::try_from(0xD7FF_u32), Ok('\u{D7FF}'));
assert!(char::try_from(0xD800_u32).is_err());
assert!(char::try_from(0xDFFF_u32).is_err());
assert_eq!(char::try_from(0xE000_u32), Ok('\u{E000}'));
assert_eq!(char::try_from(0x10FFFF_u32), Ok('\u{10FFFF}'));
assert!(char::try_from(0x110000_u32).is_err());
assert!(char::try_from(0xFFFF_FFFF_u32).is_err());
}
#[test]
fn test_from_str() {
assert_eq!(char::from_str("a").unwrap(), 'a');
assert_eq!(char::from_str("\0").unwrap(), '\0');
assert_eq!(char::from_str("\u{D7FF}").unwrap(), '\u{d7FF}');
assert!(char::from_str("").is_err());
assert!(char::from_str("abc").is_err());
}
#[test]
fn test_is_lowercase() {
assert!('a'.is_lowercase());
assert!('ö'.is_lowercase());
assert!('ß'.is_lowercase());
assert!(!'Ü'.is_lowercase());
assert!(!'P'.is_lowercase());
}
#[test]
fn test_is_uppercase() {
assert!(!'h'.is_uppercase());
assert!(!'ä'.is_uppercase());
assert!(!'ß'.is_uppercase());
assert!('Ö'.is_uppercase());
assert!('T'.is_uppercase());
}
#[test]
fn test_is_whitespace() {
assert!(' '.is_whitespace());
assert!('\u{2007}'.is_whitespace());
assert!('\t'.is_whitespace());
assert!('\n'.is_whitespace());
assert!(!'a'.is_whitespace());
assert!(!'_'.is_whitespace());
assert!(!'\u{0}'.is_whitespace());
}
#[test]
fn test_to_digit() {
assert_eq!('0'.to_digit(10), Some(0));
assert_eq!('1'.to_digit(2), Some(1));
assert_eq!('2'.to_digit(3), Some(2));
assert_eq!('9'.to_digit(10), Some(9));
assert_eq!('a'.to_digit(16), Some(10));
assert_eq!('A'.to_digit(16), Some(10));
assert_eq!('b'.to_digit(16), Some(11));
assert_eq!('B'.to_digit(16), Some(11));
assert_eq!('z'.to_digit(36), Some(35));
assert_eq!('Z'.to_digit(36), Some(35));
assert_eq!(' '.to_digit(10), None);
assert_eq!('$'.to_digit(36), None);
}
#[test]
fn test_to_lowercase() {
fn lower(c: char) -> String {
let to_lowercase = c.to_lowercase();
assert_eq!(to_lowercase.len(), to_lowercase.count());
let iter: String = c.to_lowercase().collect();
let disp: String = c.to_lowercase().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(lower('A'), "a");
assert_eq!(lower('Ö'), "ö");
assert_eq!(lower('ß'), "ß");
assert_eq!(lower('Ü'), "ü");
assert_eq!(lower('💩'), "💩");
assert_eq!(lower('Σ'), "σ");
assert_eq!(lower('Τ'), "τ");
assert_eq!(lower('Ι'), "ι");
assert_eq!(lower('Γ'), "γ");
assert_eq!(lower('Μ'), "μ");
assert_eq!(lower('Α'), "α");
assert_eq!(lower('Σ'), "σ");
assert_eq!(lower('Dž'), "dž");
assert_eq!(lower('fi'), "fi");
assert_eq!(lower('İ'), "i\u{307}");
}
#[test]
fn test_to_uppercase() {
fn upper(c: char) -> String {
let to_uppercase = c.to_uppercase();
assert_eq!(to_uppercase.len(), to_uppercase.count());
let iter: String = c.to_uppercase().collect();
let disp: String = c.to_uppercase().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(upper('a'), "A");
assert_eq!(upper('ö'), "Ö");
assert_eq!(upper('ß'), "SS"); // not ẞ: Latin capital letter sharp s
assert_eq!(upper('ü'), "Ü");
assert_eq!(upper('💩'), "💩");
assert_eq!(upper('σ'), "Σ");
assert_eq!(upper('τ'), "Τ");
assert_eq!(upper('ι'), "Ι");
assert_eq!(upper('γ'), "Γ");
assert_eq!(upper('μ'), "Μ");
assert_eq!(upper('α'), "Α");
assert_eq!(upper('ς'), "Σ");
assert_eq!(upper('Dž'), "DŽ");
assert_eq!(upper('fi'), "FI");
assert_eq!(upper('ᾀ'), "ἈΙ");
}
#[test]
fn test_is_control() {
assert!('\u{0}'.is_control());
assert!('\u{3}'.is_control());
assert!('\u{6}'.is_control());
assert!('\u{9}'.is_control());
assert!('\u{7f}'.is_control());
assert!('\u{92}'.is_control());
assert!(!'\u{20}'.is_control());
assert!(!'\u{55}'.is_control());
assert!(!'\u{68}'.is_control());
}
#[test]
fn test_is_numeric() {
assert!('2'.is_numeric());
assert!('7'.is_numeric());
assert!('¾'.is_numeric());
assert!(!'c'.is_numeric());
assert!(!'i'.is_numeric());
assert!(!'z'.is_numeric());
assert!(!'Q'.is_numeric());
}
#[test]
fn test_escape_debug() {
fn string(c: char) -> String {
let iter: String = c.escape_debug().collect();
let disp: String = c.escape_debug().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(string('\n'), "\\n");
assert_eq!(string('\r'), "\\r");
assert_eq!(string('\''), "\\'");
assert_eq!(string('"'), "\\\"");
assert_eq!(string(' '), " ");
assert_eq!(string('a'), "a");
assert_eq!(string('~'), "~");
assert_eq!(string('é'), "é");
assert_eq!(string('文'), "文");
assert_eq!(string('\x00'), "\\u{0}");
assert_eq!(string('\x1f'), "\\u{1f}");
assert_eq!(string('\x7f'), "\\u{7f}");
assert_eq!(string('\u{80}'), "\\u{80}");
assert_eq!(string('\u{ff}'), "\u{ff}");
assert_eq!(string('\u{11b}'), "\u{11b}");
assert_eq!(string('\u{1d4b6}'), "\u{1d4b6}");
assert_eq!(string('\u{301}'), "\\u{301}"); // combining character
assert_eq!(string('\u{200b}'), "\\u{200b}"); // zero width space
assert_eq!(string('\u{e000}'), "\\u{e000}"); // private use 1
assert_eq!(string('\u{100000}'), "\\u{100000}"); // private use 2
}
#[test]
fn test_escape_default() {
fn string(c: char) -> String {
let iter: String = c.escape_default().collect();
let disp: String = c.escape_default().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(string('\n'), "\\n");
assert_eq!(string('\r'), "\\r");
assert_eq!(string('\''), "\\'");
assert_eq!(string('"'), "\\\"");
assert_eq!(string(' '), " ");
assert_eq!(string('a'), "a");
assert_eq!(string('~'), "~");
assert_eq!(string('é'), "\\u{e9}");
assert_eq!(string('\x00'), "\\u{0}");
assert_eq!(string('\x1f'), "\\u{1f}");
assert_eq!(string('\x7f'), "\\u{7f}");
assert_eq!(string('\u{80}'), "\\u{80}");
assert_eq!(string('\u{ff}'), "\\u{ff}");
assert_eq!(string('\u{11b}'), "\\u{11b}");
assert_eq!(string('\u{1d4b6}'), "\\u{1d4b6}");
assert_eq!(string('\u{200b}'), "\\u{200b}"); // zero width space
assert_eq!(string('\u{e000}'), "\\u{e000}"); // private use 1
assert_eq!(string('\u{100000}'), "\\u{100000}"); // private use 2
}
#[test]
fn test_escape_unicode() {
fn string(c: char) -> String {
let iter: String = c.escape_unicode().collect();
let disp: String = c.escape_unicode().to_string();
assert_eq!(iter, disp);
iter
}
assert_eq!(string('\x00'), "\\u{0}");
assert_eq!(string('\n'), "\\u{a}");
assert_eq!(string(' '), "\\u{20}");
assert_eq!(string('a'), "\\u{61}");
assert_eq!(string('\u{11b}'), "\\u{11b}");
assert_eq!(string('\u{1d4b6}'), "\\u{1d4b6}");
}
#[test]
fn test_encode_utf8() {
fn check(input: char, expect: &[u8]) {
let mut buf = [0; 4];
let ptr = buf.as_ptr();
let s = input.encode_utf8(&mut buf);
assert_eq!(s.as_ptr() as usize, ptr as usize);
assert!(str::from_utf8(s.as_bytes()).is_ok());
assert_eq!(s.as_bytes(), expect);
}
check('x', &[0x78]);
check('\u{e9}', &[0xc3, 0xa9]);
check('\u{a66e}', &[0xea, 0x99, 0xae]);
check('\u{1f4a9}', &[0xf0, 0x9f, 0x92, 0xa9]);
}
#[test]
fn test_encode_utf16() {
fn check(input: char, expect: &[u16]) {
let mut buf = [0; 2];
let ptr = buf.as_mut_ptr();
let b = input.encode_utf16(&mut buf);
assert_eq!(b.as_mut_ptr() as usize, ptr as usize);
assert_eq!(b, expect);
}
check('x', &[0x0078]);
check('\u{e9}', &[0x00e9]);
check('\u{a66e}', &[0xa66e]);
check('\u{1f4a9}', &[0xd83d, 0xdca9]);
}
#[test]
fn test_len_utf16() {
assert!('x'.len_utf16() == 1);
assert!('\u{e9}'.len_utf16() == 1);
assert!('\u{a66e}'.len_utf16() == 1);
assert!('\u{1f4a9}'.len_utf16() == 2);
}
#[test]
fn test_decode_utf16() {
fn check(s: &[u16], expected: &[Result<char, u16>]) {
let v = char::decode_utf16(s.iter().cloned())
.map(|r| r.map_err(|e| e.unpaired_surrogate()))
.collect::<Vec<_>>();
assert_eq!(v, expected);
}
check(&[0xD800, 0x41, 0x42], &[Err(0xD800), Ok('A'), Ok('B')]);
check(&[0xD800, 0], &[Err(0xD800), Ok('\0')]);
}
#[test]
fn ed_iterator_specializations() {
// Check counting
assert_eq!('\n'.escape_default().count(), 2);
assert_eq!('c'.escape_default().count(), 1);
assert_eq!(' '.escape_default().count(), 1);
assert_eq!('\\'.escape_default().count(), 2);
assert_eq!('\''.escape_default().count(), 2);
// Check nth
// Check that OoB is handled correctly
assert_eq!('\n'.escape_default().nth(2), None);
assert_eq!('c'.escape_default().nth(1), None);
assert_eq!(' '.escape_default().nth(1), None);
assert_eq!('\\'.escape_default().nth(2), None);
assert_eq!('\''.escape_default().nth(2), None);
// Check the first char
assert_eq!('\n'.escape_default().nth(0), Some('\\'));
assert_eq!('c'.escape_default().nth(0), Some('c'));
assert_eq!(' '.escape_default().nth(0), Some(' '));
assert_eq!('\\'.escape_default().nth(0), Some('\\'));
assert_eq!('\''.escape_default().nth(0), Some('\\'));
// Check the second char
assert_eq!('\n'.escape_default().nth(1), Some('n'));
assert_eq!('\\'.escape_default().nth(1), Some('\\'));
assert_eq!('\''.escape_default().nth(1), Some('\''));
// Check the last char
assert_eq!('\n'.escape_default().last(), Some('n'));
assert_eq!('c'.escape_default().last(), Some('c'));
assert_eq!(' '.escape_default().last(), Some(' '));
assert_eq!('\\'.escape_default().last(), Some('\\'));
assert_eq!('\''.escape_default().last(), Some('\''));
}
#[test]
fn eu_iterator_specializations() {
fn check(c: char) {
let len = c.escape_unicode().count();
// Check OoB
assert_eq!(c.escape_unicode().nth(len), None);
// For all possible in-bound offsets
let mut iter = c.escape_unicode();
for offset in 0..len {
// Check last
assert_eq!(iter.clone().last(), Some('}'));
// Check len
assert_eq!(iter.len(), len - offset);
// Check size_hint (= len in ExactSizeIterator)
assert_eq!(iter.size_hint(), (iter.len(), Some(iter.len())));
// Check counting
assert_eq!(iter.clone().count(), len - offset);
// Check nth
assert_eq!(c.escape_unicode().nth(offset), iter.next());
}
// Check post-last
assert_eq!(iter.clone().last(), None);
assert_eq!(iter.clone().count(), 0);
}
check('\u{0}');
check('\u{1}');
check('\u{12}');
check('\u{123}');
check('\u{1234}');
check('\u{12340}');
check('\u{10FFFF}');
}
| true |
f2cf670bc7b815bfe5bf1b1899ddca3be04f7158
|
Rust
|
baszalmstra/adventofcode2018
|
/src/bin/day6.rs
|
UTF-8
| 3,765 | 3.265625 | 3 |
[] |
no_license
|
use aoc::Point;
use std::collections::VecDeque;
#[derive(Copy, Clone, Debug)]
enum VoronoiCell {
Uninitialized,
ClosestTo(usize, u32),
MultipleClosest,
}
fn main() {
// Parse the input
let input: Vec<Point> = std::fs::read_to_string("inputs/day6/input")
.expect("Could not read input file")
.lines()
.map(|l| {
let mut coords = l.split(", ");
Point::new(
coords.next().unwrap().parse().unwrap(),
coords.next().unwrap().parse().unwrap(),
)
})
.collect();
// Find the bounds of the points
let min = input.iter().fold(Point::max_value(), |s, v| s.min(v));
let max = input.iter().fold(Point::min_value(), |s, v| s.max(v));
// Build a voronoi by flood filling a grid
let width = max.x - min.x + 1;
let height = max.y - min.y + 1;
let mut voronoi: Vec<VoronoiCell> = Vec::new();
voronoi.resize((width * height) as usize, VoronoiCell::Uninitialized);
let mut queue = VecDeque::new();
for (idx, point) in input.iter().enumerate() {
queue.push_back((0, idx, *point));
}
while let Some(item) = queue.pop_front() {
let idx = ((item.2.y - min.y) * width + (item.2.x - min.x)) as usize;
let location = item.2;
match voronoi[idx] {
VoronoiCell::Uninitialized => {
voronoi[idx] = VoronoiCell::ClosestTo(item.1, item.0);
}
VoronoiCell::ClosestTo(index, distance) => {
if distance == item.0 {
if index == item.1 {
continue;
} else {
voronoi[idx] = VoronoiCell::MultipleClosest;
}
} else if distance >= item.0 {
voronoi[idx] = VoronoiCell::ClosestTo(item.1, item.0);
} else {
continue;
}
}
VoronoiCell::MultipleClosest => continue,
}
// Add the neighbours to the queue
if location.x > min.x {
queue.push_back((item.0 + 1, item.1, Point::new(location.x - 1, location.y)))
}
if location.x < max.x {
queue.push_back((item.0 + 1, item.1, Point::new(location.x + 1, location.y)))
}
if location.y > min.y {
queue.push_back((item.0 + 1, item.1, Point::new(location.x, location.y - 1)))
}
if location.y < max.y {
queue.push_back((item.0 + 1, item.1, Point::new(location.x, location.y + 1)))
}
}
let mut areas = Vec::new();
areas.resize(input.len(), 0);
for cell in voronoi {
if let VoronoiCell::ClosestTo(idx, _) = cell {
areas[idx] += 1;
}
}
let largest_finite_area = areas
.iter()
.enumerate()
.filter(|(idx, _)| {
input[*idx].x > min.x
&& input[*idx].x < max.x
&& input[*idx].y > min.y
&& input[*idx].y < max.y
})
.max_by(|(_, a), (_, b)| a.cmp(b))
.unwrap();
println!("Result 1: {}", largest_finite_area.1);
let mut count = 0;
for y in min.y..max.y + 1 {
for x in min.x..max.x + 1 {
let mut total_distance = 0;
for point in input.iter() {
let distance =
(x as i64 - point.x as i64).abs() + (y as i64 - point.y as i64).abs();
total_distance += distance;
if total_distance >= 10000 {
break;
}
}
if total_distance < 10000 {
count += 1;
}
}
}
println!("Result 2: {}", count);
}
| true |
c335f77157e33c8133051b040eb18adf9b409ef3
|
Rust
|
dima74/factorio-servers-statistics
|
/src/state/big_string.rs
|
UTF-8
| 5,716 | 3.03125 | 3 |
[] |
no_license
|
use std::num::NonZeroU32;
use hashbrown::HashMap;
use serde::{Deserialize, Serialize};
// todo documentation
#[derive(Eq, PartialEq, Serialize, Deserialize)]
pub struct BigString {
#[serde(skip)]
debug_name: String,
// utf8-строка содержащая последовательность подстрок, разделённых символом \x00
// ["aa", "bb", "cc"] == \x00 aa \x00 bb \x00 cc \x00
content: Vec<u8>,
}
impl BigString {
pub fn new() -> Self {
BigString {
debug_name: String::new(),
content: vec![0],
}
}
pub fn set_debug_name(&mut self, debug_name: String) {
self.debug_name = debug_name;
}
pub fn add(&mut self, string: &str) -> BigStringPart {
let string = if string.contains('\x00') {
eprintln!("[warn] found \\x00 in BigStringPart");
string.replace('\x00', "\x01")
} else {
// todo лишнее копирование :)
string.to_owned()
};
self.add_vec(string.as_bytes())
}
pub fn add_vec(&mut self, string: &[u8]) -> BigStringPart {
let part_index = self.content.len() as u32;
self.content.extend_from_slice(string);
self.content.push(0);
BigStringPart(NonZeroU32::new(part_index).unwrap())
}
// todo return &str ?
pub fn get(&self, part_index: BigStringPart) -> FssStr {
let begin = part_index.0.get() as usize;
let length = self.content[begin..].iter()
.position(|&byte| byte == 0)
.unwrap();
FssStr(&self.content[begin..begin + length])
}
pub fn get_str(&self, part_index: BigStringPart) -> String {
self.get(part_index).into()
}
pub fn compress(&mut self) -> HashMap<BigStringPart, BigStringPart> /* old index → new index */ {
let mut new_index_by_part: HashMap<&[u8], usize> = HashMap::new();
let mut new_index_by_old_index: HashMap<BigStringPart, BigStringPart> = HashMap::new();
// (new_index, old_index, part_length)
let mut part_moves: Vec<(usize, usize, usize)> = Vec::new();
let mut next_part_index = 1;
let mut part_begin = 1;
while part_begin != self.content.len() {
let part_end = part_begin + self.content[part_begin..].iter().position(|&c| c == 0).unwrap();
let part = &self.content[part_begin..part_end];
let new_part_index = match new_index_by_part.get(part) {
Some(&index) => index,
None => {
let new_part_index = next_part_index;
next_part_index += part.len() + 1;
new_index_by_part.insert(part, new_part_index);
new_part_index
}
};
new_index_by_old_index.insert(
BigStringPart(NonZeroU32::new(part_begin as u32).unwrap()),
BigStringPart(NonZeroU32::new(new_part_index as u32).unwrap()),
);
part_moves.push((new_part_index, part_begin, part.len()));
part_begin = part_end + 1;
}
for (new_index, old_index, part_length) in part_moves {
assert!(new_index <= old_index);
for i in 0..part_length {
self.content[new_index + i] = self.content[old_index + i];
}
self.content[new_index - 1] = 0;
self.content[new_index + part_length] = 0;
}
println!("[info] [big_string] {:20}: {} → {}", self.debug_name, self.content.len(), next_part_index);
self.content.truncate(next_part_index);
new_index_by_old_index
}
}
// индекс подстроки в большой строке
#[derive(Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct BigStringPart(NonZeroU32);
// type-safe часть (sub-slice) BigString
#[derive(Copy, Clone)]
pub struct FssStr<'a> (pub &'a [u8]);
impl<'a> Into<&'a str> for FssStr<'a> {
fn into(self) -> &'a str {
// todo from_utf8_unchecked ?
std::str::from_utf8(self.0).unwrap()
}
}
impl<'a> Into<String> for FssStr<'a> {
fn into(self) -> String {
std::str::from_utf8(self.0).unwrap().to_owned()
}
}
#[derive(Eq, PartialEq, Hash)]
pub struct FssString(pub Vec<u8>);
impl<'a> From<FssStr<'a>> for FssString {
fn from(string: FssStr) -> Self {
FssString(string.0.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sizes() {
assert_eq!(std::mem::size_of::<Option<BigStringPart>>(), 4);
}
#[test]
fn basic() {
let mut big_string = BigString::new();
let part_index = big_string.add("hello");
assert_eq!(big_string.get_str(part_index), "hello");
}
#[test]
fn compress() {
let mut big_string = BigString::new();
let bbb1 = big_string.add("bbb");
let aaa1 = big_string.add("aaaa");
let bbb2 = big_string.add("bbb");
let aaa2 = big_string.add("aaaa");
let ccc1 = big_string.add("cc");
let bbb3 = big_string.add("bbb");
let map = big_string.compress();
assert_eq!(big_string.content, b"\x00bbb\x00aaaa\x00cc\x00");
assert_eq!(big_string.get_str(*map.get(&bbb1).unwrap()), "bbb");
assert_eq!(big_string.get_str(*map.get(&bbb2).unwrap()), "bbb");
assert_eq!(big_string.get_str(*map.get(&bbb3).unwrap()), "bbb");
assert_eq!(big_string.get_str(*map.get(&aaa1).unwrap()), "aaaa");
assert_eq!(big_string.get_str(*map.get(&aaa2).unwrap()), "aaaa");
assert_eq!(big_string.get_str(*map.get(&ccc1).unwrap()), "cc");
}
}
| true |
ea9b15431eac4ab3aef82ed099e9d6ef21d5c7ac
|
Rust
|
LinAGKar/advent-of-code-2020-rust
|
/day18b/src/main.rs
|
UTF-8
| 4,218 | 3.421875 | 3 |
[
"MIT"
] |
permissive
|
use std::boxed::Box;
use std::io;
use std::io::Read;
#[derive(Debug, Clone, Copy)]
enum Op {
Add,
Mult,
}
#[derive(Debug)]
enum Token {
Number(i64),
Op(Op),
Lbrace,
Rbrace,
}
struct Lexer<'a> {
source: Box<dyn Iterator<Item=char> + 'a>,
next_char: Option<char>,
}
impl<'a> Iterator for Lexer<'a> {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
if let Some(mut next_char) = self.next_char {
while next_char.is_whitespace() {
if let Some(new_char) = self.source.next() {
next_char = new_char;
} else {
return None;
}
}
match next_char {
'(' => {
self.next_char = self.source.next();
Some(Token::Lbrace)
}
')' => {
self.next_char = self.source.next();
Some(Token::Rbrace)
}
'+' => {
self.next_char = self.source.next();
Some(Token::Op(Op::Add))
}
'*' => {
self.next_char = self.source.next();
Some(Token::Op(Op::Mult))
}
_ => {
if !next_char.is_numeric() {
panic!("Syntax error {}", next_char);
}
let mut number: String = [next_char].iter().collect();
self.next_char = None;
while let Some(next_char) = self.source.next() {
if next_char.is_numeric() {
number.push(next_char);
} else {
self.next_char = Some(next_char);
break;
}
}
Some(Token::Number(number.parse().unwrap()))
},
}
} else {
None
}
}
}
impl<'a> Lexer<'a> {
fn new(source: &str) -> Lexer {
let mut iter = Box::new(source.chars());
let next_char = iter.next();
Lexer {
source: iter,
next_char: next_char,
}
}
}
fn main() {
let mut input = String::new();
io::stdin().read_to_string(&mut input).unwrap();
println!("{}", input.lines().map(|line| {
let mut tokens = Lexer::new(line);
let mut stack = Vec::new();
let mut to_mult = 1;
let mut acc = 0;
let mut saved_op = None;
while let Some(token) = tokens.next() {
match token {
Token::Number(val) => {
if let Some(op) = saved_op {
match op {
Op::Add => {
acc += val;
}
Op::Mult => {
to_mult *= acc;
acc = val;
}
}
saved_op = None;
} else {
acc = val;
}
}
Token::Op(op) => {
saved_op = Some(op);
}
Token::Lbrace => {
stack.push((to_mult, acc, saved_op));
saved_op = None;
to_mult = 1;
}
Token::Rbrace => {
acc *= to_mult;
let (old_to_mult, old_acc, old_op) = stack.pop().unwrap();
to_mult = old_to_mult;
match old_op {
Some(Op::Add) => {
acc += old_acc;
}
Some(Op::Mult) => {
to_mult *= old_acc;
}
None => {}
}
saved_op = None;
}
};
}
acc *= to_mult;
acc
}).sum::<i64>());
}
| true |
e8b2da324f5de1aa52f317c11760c57817b90092
|
Rust
|
ftilde/rust-x86asm
|
/src/test/instruction_tests/instr_xsave.rs
|
UTF-8
| 2,137 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
use instruction_def::*;
use test::run_test;
use Operand::*;
use Reg::*;
use RegScale::*;
use RegType::*;
use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
#[test]
fn xsave_1() {
run_test(
&Instruction {
mnemonic: Mnemonic::XSAVE,
operand1: Some(IndirectScaledIndexed(
BP,
DI,
One,
Some(OperandSize::Unsized),
None,
)),
operand2: None,
operand3: None,
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[15, 174, 35],
OperandSize::Word,
)
}
#[test]
fn xsave_2() {
run_test(
&Instruction {
mnemonic: Mnemonic::XSAVE,
operand1: Some(IndirectScaledDisplaced(
ECX,
Eight,
551181552,
Some(OperandSize::Unsized),
None,
)),
operand2: None,
operand3: None,
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[15, 174, 36, 205, 240, 92, 218, 32],
OperandSize::Dword,
)
}
#[test]
fn xsave_3() {
run_test(
&Instruction {
mnemonic: Mnemonic::XSAVE,
operand1: Some(IndirectScaledIndexedDisplaced(
RSI,
RDI,
Four,
200379235,
Some(OperandSize::Unsized),
None,
)),
operand2: None,
operand3: None,
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[15, 174, 164, 190, 99, 139, 241, 11],
OperandSize::Qword,
)
}
| true |
2f5e78a63108a55daba050f8c30602ce4e69c7da
|
Rust
|
jD91mZM2/chess-minimax
|
/src/lib.rs
|
UTF-8
| 2,988 | 3.203125 | 3 |
[
"MIT"
] |
permissive
|
#[macro_use] extern crate failure;
use std::fmt;
pub mod board;
pub mod minimax;
pub mod piece;
pub mod serialize;
// Not really a part of the library, just need this for sharing interface with
// WASM and binary.
#[cfg(feature = "terminal")]
pub mod terminal;
/// A position on the board
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Pos(pub i8, pub i8);
impl Pos {
/// Returns true if the position is actually within the boundaries of a board
pub fn is_valid(self) -> bool {
let Pos(x, y) = self;
x >= 0 && x < board::WIDTH
&& y >= 0 && y < board::WIDTH
}
/// Gets the next position on the board. A1 becomes B1, H1 becomes A2.
pub fn next(self) -> Self {
let Pos(x, y) = self;
if x + 1 < board::WIDTH {
Pos(x + 1, y)
} else {
Pos(0, y + 1)
}
}
}
macro_rules! impl_op {
($($trait:ident, $fn:ident, $op:tt, $trait_assign:ident, $fn_assign:ident, $op_assign:tt;)*) => {
$(impl std::ops::$trait<Pos> for Pos {
type Output = Self;
fn $fn(self, other: Self) -> Self::Output {
let Pos(x1, y1) = self;
let Pos(x2, y2) = other;
Pos(
x1 $op x2,
y1 $op y2
)
}
}
impl std::ops::$trait_assign<Pos> for Pos {
fn $fn_assign(&mut self, other: Self) {
let Pos(ref mut x1, ref mut y1) = self;
let Pos(x2, y2) = other;
*x1 $op_assign x2;
*y1 $op_assign y2;
}
})*
}
}
impl_op! {
Add, add, +, AddAssign, add_assign, +=;
Sub, sub, -, SubAssign, sub_assign, -=;
}
impl fmt::Display for Pos {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
assert!(self.is_valid());
let Pos(x, y) = *self;
write!(f, "{}{}", ('A' as u8 + x as u8) as char, board::WIDTH-y)
}
}
/// An error parsing a position from a string
#[derive(Debug, Fail)]
#[fail(display = "invalid position string")]
pub struct ParsePosError;
impl std::str::FromStr for Pos {
type Err = ParsePosError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut chars = s.chars();
let x = match chars.next() {
Some(c @ 'a'..='h') => c as u8 - b'a',
Some(c @ 'A'..='H') => c as u8 - b'A',
_ => return Err(ParsePosError)
};
let y = match chars.next() {
Some(c @ '1'..='8') => b'8' - c as u8,
_ => return Err(ParsePosError)
};
Ok(Pos(x as i8, y as i8))
}
}
/// What side a piece belongs to
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Side {
Black,
White
}
impl std::ops::Not for Side {
type Output = Self;
fn not(self) -> Self::Output {
match self {
Side::Black => Side::White,
Side::White => Side::Black
}
}
}
| true |
593bfce9b1bd6e97b02a27c8dfd2fc6c9c4b7f68
|
Rust
|
k-hamada/exercism
|
/rust/scrabble-score/src/lib.rs
|
UTF-8
| 673 | 2.546875 | 3 |
[] |
no_license
|
#[macro_use]
extern crate lazy_static;
use std::collections::HashMap;
lazy_static! {
static ref SCORE_MAP: HashMap<char, u32> = {
[('A', 1), ('B', 3), ('C', 3), ('D', 2), ('E', 1), ('F', 4), ('G', 2),
('H', 4), ('I', 1), ('J', 8), ('K', 5), ('L', 1), ('M', 3), ('N', 1),
('O', 1), ('P', 3), ('Q', 10), ('R', 1), ('S', 1), ('T', 1), ('U', 1),
('V', 4), ('W', 4), ('X', 8), ('Y', 4), ('Z', 10)]
.iter().cloned().collect()
};
}
pub fn score(words : &str) -> u32 {
words.to_string()
.chars()
.map(|char| SCORE_MAP.get(&char.to_ascii_uppercase()).unwrap_or(&0) )
.sum()
}
| true |
58edab8b2e637c8e3d373e9305e9cb5da98978a2
|
Rust
|
indefini/dormin
|
/src/input.rs
|
UTF-8
| 469 | 3.203125 | 3 |
[] |
no_license
|
use std::collections::HashSet;
pub struct Input
{
keys : HashSet<u8>
}
impl Input
{
pub fn new() -> Input
{
Input {
keys : HashSet::new()
}
}
pub fn is_key_down(&self, k : u8) -> bool
{
self.keys.contains(&k)
}
pub fn add_key(&mut self, k : u8)
{
println!("insert key : {}", k);
self.keys.insert(k);
}
pub fn clear(&mut self)
{
self.keys.clear();
}
}
| true |
a85705723c8505daa27717417c216d72fac4cef0
|
Rust
|
Nuwanda/despesas_casa_backend
|
/src/controllers/expense.rs
|
UTF-8
| 2,279 | 2.515625 | 3 |
[] |
no_license
|
use diesel::prelude::*;
use rocket_contrib::databases::diesel;
use rocket_contrib::json::JsonError;
use crate::domain::models::{Expense, JsonResult, NewExpense, NewExpenseUser};
use crate::domain::services::database::schema::{expense_user, expenses};
use crate::domain::services::database::Conn as DbConn;
use crate::responses::Response;
#[get("/")]
pub fn all_expenses(conn: DbConn) -> Response<Vec<Expense>> {
match expenses::table.load::<Expense>(&conn as &diesel::SqliteConnection) {
Ok(results) => return Response::get(results),
Err(err) => return Response::error(500, err.to_string()),
}
}
#[get("/<id>")]
pub fn expense_by_id(id: i32, conn: DbConn) -> Option<Response<Expense>> {
match expenses::table
.filter(expenses::id.eq(id))
.first::<Expense>(&conn as &diesel::SqliteConnection)
{
Ok(expense) => return Some(Response::get(expense)),
Err(_) => return None,
}
}
#[post("/", data = "<raw_expense>")]
pub fn create_expense(raw_expense: JsonResult<NewExpense>, conn: DbConn) -> Response<Expense> {
let NewExpense {
split_between,
expense,
} = match raw_expense {
Ok(expense) => expense.into_inner(),
Err(JsonError::Io(err)) => return Response::error(500, err.to_string()),
Err(JsonError::Parse(_data, err)) => return Response::error(422, err.to_string()),
};
match conn.transaction::<Expense, diesel::result::Error, _>(|| {
diesel::insert_into(expenses::table)
.values(&expense)
.execute(&conn as &diesel::SqliteConnection)?;
let expense = expenses::table
.order(expenses::id.desc())
.first::<Expense>(&conn as &diesel::SqliteConnection)?;
let expenses_for_users: Vec<NewExpenseUser> = split_between
.into_iter()
.map(|user_id| NewExpenseUser {
user_id,
expense_id: expense.id,
})
.collect();
diesel::insert_into(expense_user::table)
.values(&expenses_for_users)
.execute(&conn as &diesel::SqliteConnection)?;
Ok(expense)
}) {
Ok(expense) => Response::post(expense),
Err(err) => Response::error(500, err.to_string()),
}
}
| true |
4619f0ffb7da32f41f3671a91d1b279a64a4afc7
|
Rust
|
vinnyhoward/til
|
/rust/rust-sandbox/src/functions.rs
|
UTF-8
| 556 | 3.921875 | 4 |
[
"MIT"
] |
permissive
|
pub fn run() {
greeting("Hola", "Vincent");
// Bind function values to variables
let get_sum = add(10, 5200);
println!("{}", get_sum);
// Closure
let n3: i32 = 1;
let add_int = |n1: i32, n2: i32| n1 + n2 + n3;
println!("Closure Sum: {}", add_int(9990, 9));
}
fn greeting(greet: &str, name: &str) {
println!("{} {}, nice to meet you!", greet, name);
}
// Explicit: Use a return keyword with a ending semi-colon
// Implicit: No return key word with no ending semi-colon
fn add(n1: i32, n2: i32) -> i32 {
n1 + n2
}
| true |
27be47dec40d70c2375efc76772f774e0cccd43a
|
Rust
|
shengLin-alex/rust-minigrep
|
/src/main.rs
|
UTF-8
| 1,004 | 3.125 | 3 |
[] |
no_license
|
extern crate minigrep;
use std::env;
use std::process;
use minigrep::Config;
/// 主程式
fn main() {
let args: Vec<String> = env::args().collect(); // collect() 用來將 iter 轉為集合
// Config 的構造函數改為回傳 Result, 因此此處對 Result 進行處理
// unwarp_or_else() 將自動拆封 Ok(value), 如果出錯, 則呼叫傳入的閉包(callback)
let config: Config = Config::new(&args).unwrap_or_else(|err| {
// 使用 eprintln!() 時, $ cargo run > output.txt, 不會將錯誤輸出至 output
// 因為只有 println! 會輸出至 output.txt, 相反的就不會顯示在 terminal
eprintln!("Problem parsing arguments: {}", err);
process::exit(1);
});
println!("Searching for {}", config.query);
println!("In file {}", config.filename);
// 上面用 closure 這邊用 pattern matching
if let Err(e) = minigrep::run(config) {
eprintln!("Application error: {}", e);
process::exit(1);
}
}
| true |
39122baa1c24d3ba54feac4c469065ca6ea2c78c
|
Rust
|
davll/echo-server-rs
|
/src/main.rs
|
UTF-8
| 1,206 | 2.953125 | 3 |
[] |
no_license
|
use async_std::prelude::*;
use async_std::net::{TcpListener, TcpStream};
use async_std::task;
#[async_std::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let listener = TcpListener::bind("127.0.0.1:8080").await?;
println!("Echo server is listening to {}", listener.local_addr()?);
let mut incoming = listener.incoming();
while let Some(stream) = incoming.next().await {
let stream = stream?;
println!("Accepted connection from {}", stream.peer_addr()?);
let _handle = task::spawn(connect(stream));
}
Ok(())
}
async fn connect(mut stream: TcpStream) -> std::io::Result<()> {
let mut buf = [0; 1024];
loop {
// read data
match stream.read(&mut buf).await {
Ok(0) => break,
Ok(n) => {
// write data
if let Err(e) = stream.write_all(&buf[0..n]).await {
eprintln!("failed to write to the stream; err = {:?}", e);
break;
}
},
Err(e) => {
eprintln!("failed to read from the stream; err = {:?}", e);
break;
},
}
}
Ok(())
}
| true |
c378e22a7ddfccc29c8402e2fa3618263f6d18b1
|
Rust
|
learning-on-chip/planner
|
/src/lib/format/threed_ice.rs
|
UTF-8
| 1,037 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use std::io::Write;
use Result;
use layout::Element;
macro_rules! element_template(
() => (
r#"{name}:
position {x:.0}, {y:.0};
dimension {width:.0}, {height:.0};
power values 0;
"#
);
);
/// The 3D-ICE format.
pub struct ThreeDICE;
impl super::Format for ThreeDICE {
fn write(&self, elements: &[Element], writer: &mut Write) -> Result<()> {
let mut first = true;
for &Element { ref name, position: (x, y), dimension: (width, height) } in elements {
if !first {
ok!(writer.write(b"\n"));
} else {
first = false;
}
ok!(writer.write_fmt(format_args!(element_template!(),
name = name,
x = x * 1e6,
y = y * 1e6,
width = width * 1e6,
height = height * 1e6)));
}
Ok(())
}
}
| true |
39c1e318ecb0d0be46fb456a294c182f9c609bac
|
Rust
|
aedm/stillaxis
|
/rust/stillaxis/src/stillaxis.rs
|
UTF-8
| 6,865 | 2.703125 | 3 |
[] |
no_license
|
use crate::dom::document::Document;
use crate::dom::flow_node::{Element, ElementProviderRef, ElementRef};
use crate::dom::mutation::FlowMutation;
use stillaxis_core::node::{Node, ProviderRef};
use stillaxis_core::render::render_graph::Message::{GetProviderValue, Mutate};
use stillaxis_core::render::render_graph::{ProviderValueRequest, RenderGraph};
pub struct Stillaxis {
pub core_dom: RenderGraph,
pub flow_dom: Document,
}
impl Stillaxis {
pub fn new() -> Stillaxis {
let core_dom = RenderGraph::new();
let flow_dom = Document::new(&core_dom);
Stillaxis { core_dom, flow_dom }
}
pub fn new_node<T: 'static + Node>(&self) -> ElementRef {
let core_node = self.core_dom.new_node::<T>();
Element::from_core_node(&core_node)
}
pub fn run_mutation(&mut self, flow_mutation: &mut FlowMutation) {
let core_mutation = flow_mutation.run(&mut self.flow_dom);
let _ = self
.core_dom
.sender_to_render_thread
.send(Box::new(Mutate(core_mutation)));
}
pub fn get_root(&self) -> ElementRef {
self.flow_dom.root.clone()
}
pub fn send_value_request(&mut self, provider_ref: &ElementProviderRef) {
let request: ProviderValueRequest = ProviderValueRequest {
provider: ProviderRef {
node: provider_ref.node.borrow().core_node.clone(),
provider_index: provider_ref.provider_index,
},
response_value: None,
};
let _ = self
.core_dom
.sender_to_render_thread
.send(Box::new(GetProviderValue(request)));
}
}
impl Drop for Stillaxis {
fn drop(&mut self) {
dbg!("Stillaxis.drop");
}
}
#[cfg(test)]
mod tests {
use crate::dom::flow_node::{ElementProviderRef, ElementSlotRef};
use crate::dom::mutation::FlowMutation;
use crate::dom::mutation_create_node::CreateNodeFlowMutation;
use crate::dom::mutation_remove_node::RemoveNodeFlowMutation;
use crate::dom::mutation_set_connections::SetSlotConnectionsFlowMutation;
use crate::dom::mutation_set_slot_value::SetSlotValueFlowMutation;
use crate::stillaxis::Stillaxis;
use stillaxis_core::nodes::float_node::FloatNode;
use stillaxis_core::nodes::sum_node::SumNode;
use stillaxis_core::provider::ProviderValue;
use stillaxis_core::render::render_graph::Message;
use stillaxis_core::slot::SlotDefaultValue;
fn get_incoming(stillaxis: &mut Stillaxis) -> Box<Message> {
stillaxis
.core_dom
.receiver_from_render_thread
.recv()
.unwrap()
}
fn assert_mutation_response(stillaxis: &mut Stillaxis) {
let message = get_incoming(stillaxis);
assert!(matches!(message.as_ref(), Message::Mutate { .. }));
}
fn assert_value_response(stillaxis: &mut Stillaxis, value: &ProviderValue) {
let message = get_incoming(stillaxis);
match message.as_ref() {
Message::GetProviderValue(value_request) => {
assert_eq!(value_request.response_value.as_ref().unwrap(), value);
}
_ => panic!(),
}
}
#[test]
fn simple_sum_graph() {
let mut stillaxis = Stillaxis::new();
let ff1 = stillaxis.new_node::<FloatNode>();
let ff2 = stillaxis.new_node::<FloatNode>();
let fsum = stillaxis.new_node::<SumNode>();
let mut flow_mutation = FlowMutation::new(vec![
CreateNodeFlowMutation::new(&ff1),
CreateNodeFlowMutation::new(&ff2),
CreateNodeFlowMutation::new(&fsum),
SetSlotConnectionsFlowMutation::new(
ElementSlotRef::new(&fsum, "a"),
vec![ElementProviderRef::new(&ff1, "value")],
),
SetSlotConnectionsFlowMutation::new(
ElementSlotRef::new(&fsum, "b"),
vec![ElementProviderRef::new(&ff2, "value")],
),
SetSlotConnectionsFlowMutation::new(
ElementSlotRef::new(&stillaxis.get_root(), "all_nodes"),
vec![ElementProviderRef::new(&fsum, "node")],
),
]);
// thread::sleep(Duration::from_millis(100));
stillaxis.run_mutation(&mut flow_mutation);
assert_mutation_response(&mut stillaxis);
stillaxis.send_value_request(&ElementProviderRef::new(&fsum, "sum"));
assert_value_response(&mut stillaxis, &ProviderValue::Float32(0.0));
let mut flow_mutation = FlowMutation::new(vec![SetSlotValueFlowMutation::_new(
&ff1,
"a",
SlotDefaultValue::Float32(10.0),
)]);
// thread::sleep(Duration::from_millis(100));
stillaxis.run_mutation(&mut flow_mutation);
assert_mutation_response(&mut stillaxis);
stillaxis.send_value_request(&ElementProviderRef::new(&fsum, "sum"));
assert_value_response(&mut stillaxis, &ProviderValue::Float32(10.0));
}
#[test]
fn no_dropping_nodes_on_render_thread() {
let mut stillaxis = Stillaxis::new();
let mut _c1;
let csum;
let fsum = stillaxis.new_node::<SumNode>();
{
let ff1 = stillaxis.new_node::<FloatNode>();
_c1 = Some(ff1.borrow().core_node.clone());
csum = fsum.borrow().core_node.clone();
let mut flow_mutation = FlowMutation::new(vec![
CreateNodeFlowMutation::new(&ff1),
CreateNodeFlowMutation::new(&fsum),
SetSlotConnectionsFlowMutation::new(
ElementSlotRef::new(&stillaxis.get_root(), "all_nodes"),
vec![ElementProviderRef::new(&fsum, "node")],
),
SetSlotConnectionsFlowMutation::new(
ElementSlotRef::new(&fsum, "a"),
vec![ElementProviderRef::new(&ff1, "value")],
),
]);
stillaxis.run_mutation(&mut flow_mutation);
assert_mutation_response(&mut stillaxis);
assert!(_c1.as_ref().unwrap().refc() > 1);
assert!(csum.refc() > 1);
stillaxis.send_value_request(&ElementProviderRef::new(&fsum, "sum"));
assert_value_response(&mut stillaxis, &ProviderValue::Float32(0.0));
let mut flow_mutation = FlowMutation::new(vec![
SetSlotConnectionsFlowMutation::new(ElementSlotRef::new(&fsum, "a"), vec![]),
RemoveNodeFlowMutation::new(&ff1),
]);
stillaxis.run_mutation(&mut flow_mutation);
}
assert!(_c1.as_ref().unwrap().refc() > 1);
assert_mutation_response(&mut stillaxis);
assert_eq!(_c1.as_ref().unwrap().refc(), 1);
assert!(csum.refc() > 1);
_c1 = None;
}
}
| true |
8f5a5735329c6dff43340a1dce377147758dffd6
|
Rust
|
rust-vmm/vm-device
|
/src/resources.rs
|
UTF-8
| 13,563 | 3.03125 | 3 |
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
// Copyright (C) 2019 Alibaba Cloud. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
//! Structs to manage device resources.
//!
//! The high level flow of resource management among the VMM, the device manager, and the device
//! is as below:
//! 1) the VMM creates a new device object.
//! 2) the VMM asks the new device object for its resource constraints.
//! 3) the VMM allocates resources for the device object according to resource constraints.
//! 4) the VMM passes the allocated resources to the device object.
//! 5) the VMM registers the new device onto corresponding device managers according the allocated
//! resources.
use std::{u16, u32, u64};
/// Enumeration describing a device's resource constraints.
pub enum ResourceConstraint {
/// Constraint for an IO Port address range.
PioAddress {
/// Allocating resource within the range [`min`, `max`] if specified.
range: Option<(u16, u16)>,
/// Alignment for the allocated address.
align: u16,
/// Size for the allocated address range.
size: u16,
},
/// Constraint for a Memory Mapped IO address range.
MmioAddress {
/// Allocating resource within the range [`min`, `max`] if specified.
range: Option<(u64, u64)>,
/// Alignment for the allocated address.
align: u64,
/// Size for the allocated address range.
size: u64,
},
/// Constraint for a legacy IRQ.
LegacyIrq {
/// Reserving the pre-allocated IRQ if it's specified.
irq: Option<u32>,
},
/// Constraint for PCI MSI IRQs.
PciMsiIrq {
/// Number of Irqs to allocate.
size: u32,
},
/// Constraint for PCI MSIx IRQs.
PciMsixIrq {
/// Number of Irqs to allocate.
size: u32,
},
/// Constraint for generic IRQs.
GenericIrq {
/// Number of Irqs to allocate.
size: u32,
},
/// Constraint for KVM mem_slot indexes to map memory into the guest.
KvmMemSlot {
/// Allocating kvm memory slots starting from the index `slot` if specified.
slot: Option<u32>,
/// Number of slots to allocate.
size: u32,
},
}
impl ResourceConstraint {
/// Create a new PIO address constraint object with default configuration.
pub fn new_pio(size: u16) -> Self {
ResourceConstraint::PioAddress {
range: None,
align: 0x1,
size,
}
}
/// Create a new PIO address constraint object.
pub fn pio_with_constraints(size: u16, range: Option<(u16, u16)>, align: u16) -> Self {
ResourceConstraint::PioAddress { range, align, size }
}
/// Create a new MMIO address constraint object with default configuration.
pub fn new_mmio(size: u64) -> Self {
ResourceConstraint::MmioAddress {
range: None,
align: 0x1000,
size,
}
}
/// Create a new MMIO address constraint object.
pub fn mmio_with_constraints(size: u64, range: Option<(u64, u64)>, align: u64) -> Self {
ResourceConstraint::MmioAddress { range, align, size }
}
/// Create a new legacy IRQ constraint object.
///
/// Allocating the pre-allocated legacy Irq `irq` if specified.
pub fn new_legacy_irq(irq: Option<u32>) -> Self {
ResourceConstraint::LegacyIrq { irq }
}
/// Create a new KVM memory slot constraint object.
///
/// Allocating kvm memory slots starting from the index `slot` if specified.
pub fn new_kvm_mem_slot(size: u32, slot: Option<u32>) -> Self {
ResourceConstraint::KvmMemSlot { slot, size }
}
}
/// Type of Message Signaled Interrupt
#[derive(Clone, Copy, Eq, PartialEq)]
pub enum MsiIrqType {
/// PCI MSI IRQ numbers.
PciMsi,
/// PCI MSIx IRQ numbers.
PciMsix,
/// Generic MSI IRQ numbers.
GenericMsi,
}
/// Enumeration for device resources.
#[allow(missing_docs)]
#[derive(Clone)]
pub enum Resource {
/// IO Port address range.
PioAddressRange { base: u16, size: u16 },
/// Memory Mapped IO address range.
MmioAddressRange { base: u64, size: u64 },
/// Legacy IRQ number.
LegacyIrq(u32),
/// Message Signaled Interrupt
MsiIrq {
ty: MsiIrqType,
base: u32,
size: u32,
},
/// Network Interface Card MAC address.
MacAddresss(String),
/// KVM memslot index.
KvmMemSlot(u32),
}
/// Newtype to store a set of device resources.
#[derive(Default, Clone)]
pub struct DeviceResources(Vec<Resource>);
impl DeviceResources {
/// Create a container object to store device resources.
pub fn new() -> Self {
DeviceResources(Vec::new())
}
/// Append a device resource to the container object.
pub fn append(&mut self, entry: Resource) {
self.0.push(entry);
}
/// Get the IO port address resources.
pub fn get_pio_address_ranges(&self) -> Vec<(u16, u16)> {
let mut vec = Vec::new();
for entry in self.0.iter().as_ref() {
if let Resource::PioAddressRange { base, size } = entry {
vec.push((*base, *size));
}
}
vec
}
/// Get the Memory Mapped IO address resources.
pub fn get_mmio_address_ranges(&self) -> Vec<(u64, u64)> {
let mut vec = Vec::new();
for entry in self.0.iter().as_ref() {
if let Resource::MmioAddressRange { base, size } = entry {
vec.push((*base, *size));
}
}
vec
}
/// Get the first legacy interrupt number(IRQ).
pub fn get_legacy_irq(&self) -> Option<u32> {
for entry in self.0.iter().as_ref() {
if let Resource::LegacyIrq(base) = entry {
return Some(*base);
}
}
None
}
/// Get information about the first PCI MSI interrupt resource.
pub fn get_pci_msi_irqs(&self) -> Option<(u32, u32)> {
self.get_msi_irqs(MsiIrqType::PciMsi)
}
/// Get information about the first PCI MSIx interrupt resource.
pub fn get_pci_msix_irqs(&self) -> Option<(u32, u32)> {
self.get_msi_irqs(MsiIrqType::PciMsix)
}
/// Get information about the first Generic MSI interrupt resource.
pub fn get_generic_msi_irqs(&self) -> Option<(u32, u32)> {
self.get_msi_irqs(MsiIrqType::GenericMsi)
}
fn get_msi_irqs(&self, ty: MsiIrqType) -> Option<(u32, u32)> {
for entry in self.0.iter().as_ref() {
if let Resource::MsiIrq {
ty: msi_type,
base,
size,
} = entry
{
if ty == *msi_type {
return Some((*base, *size));
}
}
}
None
}
/// Get the KVM memory slots to map memory into the guest.
pub fn get_kvm_mem_slots(&self) -> Vec<u32> {
let mut vec = Vec::new();
for entry in self.0.iter().as_ref() {
if let Resource::KvmMemSlot(index) = entry {
vec.push(*index);
}
}
vec
}
/// Get the first resource information for NIC MAC address.
pub fn get_mac_address(&self) -> Option<String> {
for entry in self.0.iter().as_ref() {
if let Resource::MacAddresss(addr) = entry {
return Some(addr.clone());
}
}
None
}
/// Get immutable reference to all the resources.
pub fn get_all_resources(&self) -> &[Resource] {
&self.0
}
}
#[cfg(test)]
mod tests {
use super::*;
const PIO_ADDRESS_SIZE: u16 = 5;
const PIO_ADDRESS_BASE: u16 = 0;
const MMIO_ADDRESS_SIZE: u64 = 0x8765_4321;
const MMIO_ADDRESS_BASE: u64 = 0x1234_5678;
const LEGACY_IRQ: u32 = 0x168;
const PCI_MSI_IRQ_SIZE: u32 = 0x8888;
const PCI_MSI_IRQ_BASE: u32 = 0x6666;
const PCI_MSIX_IRQ_SIZE: u32 = 0x16666;
const PCI_MSIX_IRQ_BASE: u32 = 0x8888;
const GENERIC_MSI_IRQS_SIZE: u32 = 0x16888;
const GENERIC_MSI_IRQS_BASE: u32 = 0x16688;
const MAC_ADDRESS: &str = "00:08:63:66:86:88";
const KVM_SLOT_ID: u32 = 0x0100;
fn get_device_resource() -> DeviceResources {
let entry = Resource::PioAddressRange {
base: PIO_ADDRESS_BASE,
size: PIO_ADDRESS_SIZE,
};
let mut resource = DeviceResources::new();
resource.append(entry);
let entry = Resource::MmioAddressRange {
base: MMIO_ADDRESS_BASE,
size: MMIO_ADDRESS_SIZE,
};
resource.append(entry);
let entry = Resource::LegacyIrq(LEGACY_IRQ);
resource.append(entry);
let entry = Resource::MsiIrq {
ty: MsiIrqType::PciMsi,
base: PCI_MSI_IRQ_BASE,
size: PCI_MSI_IRQ_SIZE,
};
resource.append(entry);
let entry = Resource::MsiIrq {
ty: MsiIrqType::PciMsix,
base: PCI_MSIX_IRQ_BASE,
size: PCI_MSIX_IRQ_SIZE,
};
resource.append(entry);
let entry = Resource::MsiIrq {
ty: MsiIrqType::GenericMsi,
base: GENERIC_MSI_IRQS_BASE,
size: GENERIC_MSI_IRQS_SIZE,
};
resource.append(entry);
let entry = Resource::MacAddresss(MAC_ADDRESS.to_string());
resource.append(entry);
resource.append(Resource::KvmMemSlot(KVM_SLOT_ID));
resource
}
#[test]
fn get_pio_address_ranges() {
let resources = get_device_resource();
assert!(
resources.get_pio_address_ranges()[0].0 == PIO_ADDRESS_BASE
&& resources.get_pio_address_ranges()[0].1 == PIO_ADDRESS_SIZE
);
}
#[test]
fn test_get_mmio_address_ranges() {
let resources = get_device_resource();
assert!(
resources.get_mmio_address_ranges()[0].0 == MMIO_ADDRESS_BASE
&& resources.get_mmio_address_ranges()[0].1 == MMIO_ADDRESS_SIZE
);
}
#[test]
fn test_get_legacy_irq() {
let resources = get_device_resource();
assert!(resources.get_legacy_irq().unwrap() == LEGACY_IRQ);
}
#[test]
fn test_get_pci_msi_irqs() {
let resources = get_device_resource();
assert!(
resources.get_pci_msi_irqs().unwrap().0 == PCI_MSI_IRQ_BASE
&& resources.get_pci_msi_irqs().unwrap().1 == PCI_MSI_IRQ_SIZE
);
}
#[test]
fn test_pci_msix_irqs() {
let resources = get_device_resource();
assert!(
resources.get_pci_msix_irqs().unwrap().0 == PCI_MSIX_IRQ_BASE
&& resources.get_pci_msix_irqs().unwrap().1 == PCI_MSIX_IRQ_SIZE
);
}
#[test]
fn test_get_generic_msi_irqs() {
let resources = get_device_resource();
assert!(
resources.get_generic_msi_irqs().unwrap().0 == GENERIC_MSI_IRQS_BASE
&& resources.get_generic_msi_irqs().unwrap().1 == GENERIC_MSI_IRQS_SIZE
);
}
#[test]
fn test_get_mac_address() {
let resources = get_device_resource();
assert_eq!(resources.get_mac_address().unwrap(), MAC_ADDRESS);
}
#[test]
fn test_get_kvm_slot() {
let resources = get_device_resource();
assert_eq!(resources.get_kvm_mem_slots(), vec![KVM_SLOT_ID]);
}
#[test]
fn test_get_all_resources() {
let resources = get_device_resource();
assert_eq!(resources.get_all_resources().len(), 8);
}
#[test]
fn test_resource_constraint() {
if let ResourceConstraint::PioAddress { range, align, size } =
ResourceConstraint::new_pio(2)
{
assert_eq!(range, None);
assert_eq!(align, 1);
assert_eq!(size, 2);
} else {
panic!("Pio resource constraint is invalid.");
}
if let ResourceConstraint::PioAddress { range, align, size } =
ResourceConstraint::pio_with_constraints(2, Some((15, 16)), 2)
{
assert_eq!(range, Some((15, 16)));
assert_eq!(align, 2);
assert_eq!(size, 2);
} else {
panic!("Pio resource constraint is invalid.");
}
if let ResourceConstraint::MmioAddress { range, align, size } =
ResourceConstraint::new_mmio(0x2000)
{
assert_eq!(range, None);
assert_eq!(align, 0x1000);
assert_eq!(size, 0x2000);
} else {
panic!("Mmio resource constraint is invalid.");
}
if let ResourceConstraint::MmioAddress { range, align, size } =
ResourceConstraint::mmio_with_constraints(0x2000, Some((0x0, 0x2000)), 0x2000)
{
assert_eq!(range, Some((0x0, 0x2000)));
assert_eq!(align, 0x2000);
assert_eq!(size, 0x2000);
} else {
panic!("Mmio resource constraint is invalid.");
}
if let ResourceConstraint::LegacyIrq { irq } =
ResourceConstraint::new_legacy_irq(Some(0x123))
{
assert_eq!(irq, Some(0x123));
} else {
panic!("IRQ resource constraint is invalid.");
}
if let ResourceConstraint::KvmMemSlot { slot, size } =
ResourceConstraint::new_kvm_mem_slot(0x1000, Some(0x2000))
{
assert_eq!(slot, Some(0x2000));
assert_eq!(size, 0x1000);
} else {
panic!("KVM slot resource constraint is invalid.");
}
}
}
| true |
7d196773b06f9362decd87f40b4d76fd2c4e9ec3
|
Rust
|
ffizer/ffizer
|
/src/error.rs
|
UTF-8
| 4,946 | 2.671875 | 3 |
[
"CC0-1.0"
] |
permissive
|
// see :
// - [std::error::Error - Rust](https://doc.rust-lang.org/std/error/trait.Error.html)
// - [Error Handling - A Gentle Introduction to Rust](https://stevedonovan.github.io/rust-gentle-intro/6-error-handling.html)
// - [snafu::guide::comparison::failure - Rust](https://docs.rs/snafu/0.4.3/snafu/guide/comparison/failure/index.html)
// - [Error Handling in Rust - Andrew Gallant's Blog](https://blog.burntsushi.net/rust-error-handling/)
// use std::backtrace::Backtrace;
use std::path::PathBuf;
use thiserror::Error;
use tracing_error::SpanTrace;
use crate::git::GitError;
pub type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(Error, Debug)]
#[allow(clippy::large_enum_variant)] // warn to restore to default
pub enum Error {
#[error("unknown ffizer error: {0}")]
Unknown(String),
#[error("value {value:?} of {value_name} is not in {accepted:?}")]
StringValueNotIn {
value_name: String,
value: String,
accepted: Vec<String>,
},
#[error("git retreive {url:?} (rev: {rev:?}) into folder {dst:?}")]
GitRetrieve {
dst: PathBuf,
url: String,
rev: Option<String>,
source: GitError,
msg: Box<String>,
},
#[error("try to find git config '{key:?}'")]
GitFindConfig { key: String, source: GitError },
#[error("canonicalize {path:?}")]
CanonicalizePath {
path: PathBuf,
source: std::io::Error,
},
#[error("create folder {path:?}")]
CreateFolder {
path: PathBuf,
source: std::io::Error,
},
#[error("create temp folder")]
CreateTmpFolder { source: std::io::Error },
#[error("remove folder {path:?}")]
RemoveFolder {
path: PathBuf,
source: std::io::Error,
},
#[error("list content of folder {path:?}")]
ListFolder {
path: PathBuf,
source: std::io::Error,
},
#[error("create file {path:?}")]
CreateFile {
path: PathBuf,
source: std::io::Error,
},
#[error("rename file from {src:?} to {dst:?}")]
RenameFile {
src: PathBuf,
dst: PathBuf,
source: std::io::Error,
},
#[error("copy file from {src:?} to {dst:?}")]
CopyFile {
src: PathBuf,
dst: PathBuf,
source: std::io::Error,
},
#[error("copy permission from {src:?} to {dst:?}")]
CopyFilePermission {
src: PathBuf,
dst: PathBuf,
source: std::io::Error,
},
#[error("read file {path:?}")]
ReadFile {
path: PathBuf,
source: std::io::Error,
},
#[error("write file {path:?}")]
WriteFile {
path: PathBuf,
source: std::io::Error,
},
#[error("remove file {path:?}")]
RemoveFile {
path: PathBuf,
source: std::io::Error,
},
#[error("run command '{cmd:?}'")]
RunCommand { cmd: String, source: std::io::Error },
#[error("fail to parse string as path '{value:?}'")]
ParsePathPattern {
value: String,
source: globset::Error,
},
#[error("fail to parse string as uri for git repo '{value:?}'")]
ParseGitUri { value: String, source: regex::Error },
#[error("local path({path:?}) not found for uri({uri:?}) subfolder({subfolder:?})")]
LocalPathNotFound {
path: PathBuf,
uri: String,
subfolder: Option<PathBuf>,
},
#[error("Application directory not found")]
ApplicationPathNotFound {},
#[error("test samples failed")]
TestSamplesFailed {},
#[error("failed to parse value '{value}' for variable '{name}'")]
ReadVariable { name: String, value: String },
#[error(transparent)]
// #[error("fail to process io")]
Io {
#[from]
source: std::io::Error,
// backtrace: Backtrace,
},
#[error("fail to process template '{template}' when {when}")]
Handlebars {
when: String,
template: Box<String>,
source: Box<handlebars::RenderError>,
},
// #[error(transparent)]
#[error("fail to process yaml")]
SerdeYaml {
context: SpanTrace,
#[source]
source: serde_yaml::Error,
// backtrace: Backtrace,
},
#[error("fail to process script '{script}'")]
ScriptError {
script: String,
source: run_script::ScriptError,
},
#[error(transparent)]
SerdeJson {
#[from]
source: serde_json::Error,
},
#[error(transparent)]
WalkDir {
#[from]
source: walkdir::Error,
},
#[error(transparent)]
PathStripPrefixError {
#[from]
source: std::path::StripPrefixError,
},
#[error(transparent)]
Clap {
#[from]
source: clap::Error,
},
}
impl From<serde_yaml::Error> for Error {
fn from(source: serde_yaml::Error) -> Self {
Error::SerdeYaml {
context: SpanTrace::capture(),
source,
}
}
}
| true |
2554d0bc149ae4b5d7325aeddafab7bc5badf413
|
Rust
|
Miliox/kiwi
|
/src/emulator/mmu.rs
|
UTF-8
| 179 | 2.5625 | 3 |
[] |
no_license
|
pub trait Memory {
// Read a single byte from memory
fn read(&self, addr: u16) -> u8;
// Write a single byte to memory
fn write(&mut self, addr: u16, data: u8);
}
| true |
3470463bb154b4412b65ed55ca3b45e57ed10e56
|
Rust
|
yukibtc/rrrdb
|
/src/rrrdb.rs
|
UTF-8
| 5,838 | 2.640625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use parser::Parser;
use crate::rrrdb::schema::ColumnType;
use crate::rrrdb::storage::Storage;
use self::{parser::ParserError, sql::executor::Executor, sql::planner::Planner};
mod parser;
mod schema;
mod sql;
mod storage;
pub struct RrrDB {
pub(crate) underlying: Storage,
}
impl RrrDB {
pub fn new(path: &str) -> Self {
Self {
underlying: Storage::new(path),
}
}
pub fn execute(&mut self, database_name: &str, query: &str) -> DBResult {
let plan = {
let statement = Parser::parse_sql(Some(database_name.to_string()), query)
.map_err(|pe: ParserError| pe.to_string())?;
let mut planner: Planner = Planner::new(database_name, &mut self.underlying, statement);
planner.plan()
};
let mut executor = Executor::new(&mut self.underlying, plan);
executor.execute()
}
}
pub type DBResult = Result<OkDBResult, DBError>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum OkDBResult {
SelectResult(ResultSet),
ExecutionResult,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct DBError {
pub(crate) message: String,
}
impl DBError {
pub(crate) fn new(message: String) -> Self {
Self { message }
}
pub(crate) fn namespace_not_found(namespace: &storage::Namespace) -> Self {
Self {
message: format!("ColumnFamily({}) not found", namespace.cf_name()),
}
}
}
impl From<rocksdb::Error> for DBError {
fn from(e: rocksdb::Error) -> Self {
Self {
message: e.into_string(),
}
}
}
impl From<String> for DBError {
fn from(e: String) -> Self {
Self { message: e }
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ResultSet {
records: Vec<Record>,
metadata: ResultMetadata,
}
impl ResultSet {
pub fn new(records: Vec<Record>, metadata: ResultMetadata) -> Self {
Self { records, metadata }
}
pub fn get(&self, index: usize) -> Option<&Record> {
self.records.get(index)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Record {
values: Vec<FieldValue>,
}
impl Record {
pub fn new(values: Vec<FieldValue>) -> Self {
Self { values }
}
pub fn get(&self, index: usize) -> Option<&FieldValue> {
self.values.get(index)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum FieldValue {
Bytes(Vec<u8>),
Int(i64),
Text(String),
}
#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
pub struct ResultMetadata {
fields: Vec<FieldMetadata>,
}
impl ResultMetadata {
pub fn new(field_metadatas: Vec<FieldMetadata>) -> Self {
Self {
fields: field_metadatas,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FieldMetadata {
pub(crate) field_name: String,
field_type: String,
}
impl FieldMetadata {
pub fn new(name: &str, _type: &str) -> Self {
Self {
field_name: name.to_string(),
field_type: _type.to_string(),
}
}
pub(crate) fn field_type(&self) -> ColumnType {
ColumnType::from(self.field_type.to_owned())
}
}
#[cfg(test)]
mod tests {
use std::{
path::Path,
thread::{self, sleep},
time,
};
use super::{
schema::{store::SchemaStore, *},
*,
};
#[test]
fn run() {
let mut rrrdb = build_crean_database();
rrrdb
.execute("test_db", "CREATE TABLE users (id integer, name varchar)")
.unwrap();
rrrdb
.execute("test_db", "INSERT INTO users VALUES (1, 'Alice')")
.unwrap();
rrrdb
.execute("test_db", "INSERT INTO users VALUES (2, 'Bob')")
.unwrap();
let result = rrrdb.execute("test_db", "SELECT * FROM users").unwrap();
assert_eq!(
result,
OkDBResult::SelectResult(ResultSet::new(
vec![
Record::new(vec![
FieldValue::Int(1),
FieldValue::Text("Alice".to_string()),
]),
Record::new(vec![
FieldValue::Int(2),
FieldValue::Text("Bob".to_string()),
]),
],
ResultMetadata::new(vec![
FieldMetadata::new("id", "integer"),
FieldMetadata::new("name", "varchar")
])
))
);
println!("OK - SELECT * FROM users");
let result = rrrdb
.execute("test_db", "SELECT name FROM users WHERE id = 2")
.unwrap();
assert_eq!(
result,
OkDBResult::SelectResult(ResultSet::new(
vec![Record::new(vec![FieldValue::Text("Bob".to_string()),]),],
ResultMetadata::new(vec![FieldMetadata::new("name", "varchar")])
))
);
println!("OK - SELECT name FROM users WHERE id = 2");
let result = rrrdb
.execute("test_db", "SELECT id FROM users WHERE name = 'Alice'")
.unwrap();
assert_eq!(
result,
OkDBResult::SelectResult(ResultSet::new(
vec![Record::new(vec![FieldValue::Int(1),]),],
ResultMetadata::new(vec![FieldMetadata::new("id", "integer")])
))
);
println!("OK - SELECT id FROM users WHERE name = 'Alice'");
}
fn build_crean_database() -> RrrDB {
let path = "./test_tmp_database";
if Path::new(path).exists() {
std::fs::remove_dir_all(path).unwrap();
thread::sleep(time::Duration::from_millis(100));
}
std::fs::create_dir_all(path).unwrap();
RrrDB::new(path)
}
}
| true |
5ff5872420fe81f8b2e518a908d3ebf2d1941f87
|
Rust
|
minijackson/INF-4101C
|
/src/processing.rs
|
UTF-8
| 14,669 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
extern crate image;
use self::image::{
ImageBuffer,
Luma,
Pixel
};
pub fn sobel(frame : ImageBuffer<Luma<u8>, Vec<u8>>) -> ImageBuffer<Luma<u8>, Vec<u8>> {
let mut result = ImageBuffer::new(640, 480);
for i in 1..638 {
for j in 1..478 {
let north_west = frame[(i-1, j-1)].channels()[0] as i32;
let north = frame[(i, j-1)].channels()[0] as i32;
let north_east = frame[(i+1, j-1)].channels()[0] as i32;
let west = frame[(i-1, j)].channels()[0] as i32;
let east = frame[(i+1, j)].channels()[0] as i32;
let south_west = frame[(i-1, j+1)].channels()[0] as i32;
let south = frame[(i, j+1)].channels()[0] as i32;
let south_east = frame[(i+1, j+1)].channels()[0] as i32;
let gx : i32 = north_west + south_west + (2 * west) - north_east - south_east - (2 * east);
let gy : i32 = north_west + north_east + (2 * north) - south_west - south_east - (2 * south);
let root : u8 = (((gx * gx) + (gy * gy)) as f32).sqrt() as u8;
result.put_pixel(i, j, Luma([root]));
}
}
return result;
}
pub fn sobel_optimized(frame : ImageBuffer<Luma<u8>, Vec<u8>>) -> ImageBuffer<Luma<u8>, Vec<u8>> {
let mut result = ImageBuffer::new(640, 480);
let mut i = 1;
while i < 638 {
let mut j = 1;
while j < 479 {
let north_west = frame[(i-1, j-1)].channels()[0] as i32;
let north = frame[(i, j-1)].channels()[0] as i32;
let north_east = frame[(i+1, j-1)].channels()[0] as i32;
let north_east2 = frame[(i+2, j-1)].channels()[0] as i32;
let west = frame[(i-1, j)].channels()[0] as i32;
let west2 = frame[(i, j)].channels()[0] as i32;
let east = frame[(i+1, j)].channels()[0] as i32;
let east2 = frame[(i+2, j)].channels()[0] as i32;
let south_west = frame[(i-1, j+1)].channels()[0] as i32;
let south = frame[(i, j+1)].channels()[0] as i32;
let south_east = frame[(i+1, j+1)].channels()[0] as i32;
let south_east2 = frame[(i+2, j+1)].channels()[0] as i32;
let gx : i32 = north_west + south_west + (west << 1) - north_east - south_east - (east << 1);
let gy : i32 = north_west + north_east + (north << 1) - south_west - south_east - (south << 1);
let gx2 : i32 = north + (west2 << 1) + south - north_east2 - (east2 << 1) - south_east2;
let gy2 : i32 = north + (north_east << 1) + north_east2 - south - (south_east << 1) - south_east2;
let root : u8 = (((gx.abs() + gy.abs()) >> 1) as f32 * 1.414216) as u8;
let root2 : u8 = (((gx2.abs() + gy2.abs()) >> 1) as f32 * 1.414216) as u8;
result.put_pixel(i, j, Luma([root]));
result.put_pixel(i + 1, j, Luma([root2]));
j += 1;
}
i += 2;
}
return result;
}
pub fn sobel_and_threshold(frame : ImageBuffer<Luma<u8>, Vec<u8>>, threshold : u8) -> ImageBuffer<Luma<u8>, Vec<u8>> {
let mut result = ImageBuffer::new(640, 480);
let mut i = 1;
while i < 638 {
let mut j = 1;
while j < 479 {
let north_west = frame[(i-1, j-1)].channels()[0] as i32;
let north = frame[(i, j-1)].channels()[0] as i32;
let north_east = frame[(i+1, j-1)].channels()[0] as i32;
let north_east2 = frame[(i+2, j-1)].channels()[0] as i32;
let west = frame[(i-1, j)].channels()[0] as i32;
let west2 = frame[(i, j)].channels()[0] as i32;
let east = frame[(i+1, j)].channels()[0] as i32;
let east2 = frame[(i+2, j)].channels()[0] as i32;
let south_west = frame[(i-1, j+1)].channels()[0] as i32;
let south = frame[(i, j+1)].channels()[0] as i32;
let south_east = frame[(i+1, j+1)].channels()[0] as i32;
let south_east2 = frame[(i+2, j+1)].channels()[0] as i32;
let gx : i32 = north_west + south_west + (west << 1) - north_east - south_east - (east << 1);
let gy : i32 = north_west + north_east + (north << 1) - south_west - south_east - (south << 1);
let gx2 : i32 = north + (west2 << 1) + south - north_east2 - (east2 << 1) - south_east2;
let gy2 : i32 = north + (north_east << 1) + north_east2 - south - (south_east << 1) - south_east2;
let root : u8 = (((gx.abs() + gy.abs()) >> 1) as f32 * 1.414216) as u8;
let root =
if root > threshold {
255
} else {
0
};
let root2 : u8 = (((gx2.abs() + gy2.abs()) >> 1) as f32 * 1.414216) as u8;
let root2 =
if root2 > threshold {
255
} else {
0
};
result.put_pixel(i, j, Luma([root]));
result.put_pixel(i + 1, j, Luma([root2]));
j += 1;
}
i += 2;
}
return result;
}
pub fn median_filter(frame : ImageBuffer<Luma<u8>, Vec<u8>>) -> ImageBuffer<Luma<u8>, Vec<u8>> {
let mut result = ImageBuffer::new(640, 480);
let mut kernel = [0; 9];
for i in 1..638 {
for j in 1..478 {
// Fill the kernel
for k in 0..3 {
for l in 0..3 {
let index = k + 3 * l;
let coord_x = (i + k - 1) as u32;
let coord_y = (j + l - 1) as u32;
kernel[index] = frame[(coord_x, coord_y)].channels()[0];
}
}
kernel.sort();
let pixel_value = kernel[5];
result.put_pixel(i as u32, j as u32, Luma([pixel_value]));
}
}
return result;
}
struct Histogram {
values : [u16 ; 256],
count : u32
}
impl Histogram {
pub fn new() -> Histogram {
Histogram {
values : [0 ; 256],
count : 0
}
}
pub fn increment(&mut self, luma : u8) {
self.values[luma as usize] += 1;
self.count += 1;
}
pub fn decrement(&mut self, luma : u8) {
self.values[luma as usize] -= 1;
self.count -= 1;
}
pub fn median(&self) -> u8 {
//assert!(self.count != 0, "Attempt to get median value of empty histogram");
let mut sum : i32 = self.count as i32 / 2;
let mut index = 0;
while sum > 0 && index < 256 {
sum -= self.values[index] as i32;
index += 1;
}
return (index - 1) as u8;
}
}
pub fn median_filter_hist(frame : ImageBuffer<Luma<u8>, Vec<u8>>, kernel_size : usize) -> ImageBuffer<Luma<u8>, Vec<u8>> {
//assert!(kernel_size % 2 == 1, "Kernel size must be odd.");
let mut result = ImageBuffer::new(640, 480);
let kernel_offset = ((kernel_size - 1) / 2) as i32;
for i in 0..640 {
for j in 0..480 {
let mut hist = Histogram::new();
for k in (i as i32 - kernel_offset)..(i as i32 + kernel_offset + 1) {
for l in (j as i32 - kernel_offset)..(j as i32 + kernel_offset + 1) {
if 0 <= k && k < 640 && 0 <= l && l < 480 {
let color = frame[(k as u32, l as u32)].channels()[0];
hist.increment(color);
}
}
}
let median_color = Luma([hist.median()]);
result.put_pixel(i as u32, j as u32, median_color);
}
}
return result;
}
pub fn median_filter_hist_optimized(frame : ImageBuffer<Luma<u8>, Vec<u8>>, kernel_size : usize) -> ImageBuffer<Luma<u8>, Vec<u8>> {
//assert!(kernel_size % 2 == 1, "Kernel size must be odd");
let mut result = ImageBuffer::new(640, 480);
let kernel_offset = ((kernel_size - 1) / 2) as i32;
for i in 0..640 {
let mut hist = Histogram::new();
for k in (i as i32 - kernel_offset)..(i as i32 + kernel_offset + 1) {
for l in 0..(kernel_offset + 1) {
if check_coordinates(k, l as i32) {
let color = frame[(k as u32, l as u32)].channels()[0];
hist.increment(color);
}
}
}
for j in 0..480 {
let old_column_coord = j as i32 - kernel_offset - 1i32;
let new_column_coord = j as i32 + kernel_offset;
for k in (i as i32 - kernel_offset)..(i as i32 + kernel_offset + 1) {
if check_coordinates(k, old_column_coord) {
let color = frame[(k as u32, old_column_coord as u32)].channels()[0];
hist.decrement(color);
}
}
for k in (i as i32 - kernel_offset)..(i as i32 + kernel_offset + 1) {
if check_coordinates(k, new_column_coord) {
let color = frame[(k as u32, new_column_coord as u32)].channels()[0];
hist.increment(color);
}
}
let median_color = Luma([hist.median()]);
result.put_pixel(i as u32, j as u32, median_color);
}
}
return result;
}
fn check_coordinates(x : i32, y : i32) -> bool {
return 0 <= x && x < 640 && 0 <= y && y < 480;
}
#[cfg(test)]
mod tests {
use ::test::Bencher;
use image::{
ConvertBuffer,
ImageBuffer,
Luma
};
use ::capture;
#[test]
fn check_coordinates() {
use super::check_coordinates;
assert!(!check_coordinates(-1, 0));
assert!(!check_coordinates(0, -1));
assert!(!check_coordinates(-1, -1));
assert!(check_coordinates(0, 0));
assert!(check_coordinates(42, 0));
assert!(check_coordinates(0, 42));
assert!(check_coordinates(42, 42));
assert!(!check_coordinates(640, 0));
assert!(!check_coordinates(640, 42));
assert!(!check_coordinates(0, 480));
assert!(!check_coordinates(42, 480));
}
#[bench]
fn sobel(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::sobel(frame)
});
}
#[bench]
fn sobel_optimized(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::sobel_optimized(frame)
});
}
#[bench]
fn sobel_and_threshold(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::sobel_and_threshold(frame, 127)
});
}
#[bench]
fn median_filter(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter(frame)
});
}
#[bench]
fn median_filter_hist_3(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist(frame, 3)
});
}
#[bench]
fn median_filter_hist_5(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist(frame, 5)
});
}
#[bench]
fn median_filter_hist_9(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist(frame, 9)
});
}
#[bench]
fn median_filter_hist_15(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist(frame, 15)
});
}
#[bench]
fn median_filter_hist_21(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist(frame, 21)
});
}
#[bench]
fn median_filter_hist_31(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist(frame, 31)
});
}
#[bench]
fn median_filter_hist_optimized_3(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist_optimized(frame, 3)
});
}
#[bench]
fn median_filter_hist_optimized_5(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist_optimized(frame, 5)
});
}
#[bench]
fn median_filter_hist_optimized_9(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist_optimized(frame, 9)
});
}
#[bench]
fn median_filter_hist_optimized_15(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist_optimized(frame, 15)
});
}
#[bench]
fn median_filter_hist_optimized_21(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist_optimized(frame, 21)
});
}
#[bench]
fn median_filter_hist_optimized_31(b : &mut Bencher) {
let frame : ImageBuffer<Luma<u8>, Vec<u8>> = capture::fake_capture("fake.jpg");
b.iter(|| {
let frame = frame.convert();
super::median_filter_hist_optimized(frame, 31)
});
}
}
| true |
47ffb5934468b61d10b34d477815b617d1c70589
|
Rust
|
tokio-rs/tracing
|
/tracing-subscriber/tests/event_enabling.rs
|
UTF-8
| 2,249 | 2.75 | 3 |
[
"MIT"
] |
permissive
|
#![cfg(feature = "registry")]
use std::sync::{Arc, Mutex};
use tracing::{collect::with_default, Collect, Event, Metadata};
use tracing_subscriber::{prelude::*, registry, subscribe::Context, Subscribe};
struct TrackingLayer {
enabled: bool,
event_enabled_count: Arc<Mutex<usize>>,
event_enabled: bool,
on_event_count: Arc<Mutex<usize>>,
}
impl<C> Subscribe<C> for TrackingLayer
where
C: Collect + Send + Sync + 'static,
{
fn enabled(&self, _metadata: &Metadata<'_>, _ctx: Context<'_, C>) -> bool {
self.enabled
}
fn event_enabled(&self, _event: &Event<'_>, _ctx: Context<'_, C>) -> bool {
*self.event_enabled_count.lock().unwrap() += 1;
self.event_enabled
}
fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, C>) {
*self.on_event_count.lock().unwrap() += 1;
}
}
#[test]
fn event_enabled_is_only_called_once() {
let event_enabled_count = Arc::new(Mutex::default());
let count = event_enabled_count.clone();
let collector = registry().with(TrackingLayer {
enabled: true,
event_enabled_count,
event_enabled: true,
on_event_count: Arc::new(Mutex::default()),
});
with_default(collector, || {
tracing::error!("hiya!");
});
assert_eq!(1, *count.lock().unwrap());
}
#[test]
fn event_enabled_not_called_when_not_enabled() {
let event_enabled_count = Arc::new(Mutex::default());
let count = event_enabled_count.clone();
let collector = registry().with(TrackingLayer {
enabled: false,
event_enabled_count,
event_enabled: true,
on_event_count: Arc::new(Mutex::default()),
});
with_default(collector, || {
tracing::error!("hiya!");
});
assert_eq!(0, *count.lock().unwrap());
}
#[test]
fn event_disabled_does_disable_event() {
let on_event_count = Arc::new(Mutex::default());
let count = on_event_count.clone();
let collector = registry().with(TrackingLayer {
enabled: true,
event_enabled_count: Arc::new(Mutex::default()),
event_enabled: false,
on_event_count,
});
with_default(collector, || {
tracing::error!("hiya!");
});
assert_eq!(0, *count.lock().unwrap());
}
| true |
8ea36765f9bd7bbd4673d368a7dfabc1f303f01a
|
Rust
|
behzadnouri/rust-lock-bug-detector
|
/examples/tikv-wrapper/src/util.rs
|
UTF-8
| 384 | 2.765625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::sync::{RwLock, RwLockWriteGuard, RwLockReadGuard};
pub trait HandyRwLock<T> {
fn wl(&self) -> RwLockWriteGuard<'_, T>;
fn rl(&self) -> RwLockReadGuard<'_, T>;
}
impl<T> HandyRwLock<T> for RwLock<T> {
fn wl(&self) -> RwLockWriteGuard<'_, T> {
self.write().unwrap()
}
fn rl(&self) -> RwLockReadGuard<'_, T> {
self.read().unwrap()
}
}
| true |
abf85a57dd7fba20a6baa6a0ce4d08df52c2e4c6
|
Rust
|
pantsbuild/pants
|
/src/rust/engine/hashing/src/fingerprint_tests.rs
|
UTF-8
| 2,247 | 2.796875 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use crate::Fingerprint;
use serde_test::{assert_ser_tokens, Token};
#[test]
fn from_bytes_unsafe() {
assert_eq!(
Fingerprint::from_bytes_unsafe(&[
0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab,
0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab,
0xab, 0xab,
],),
Fingerprint([0xab; 32])
);
}
#[test]
fn from_hex_string() {
assert_eq!(
Fingerprint::from_hex_string(
"0123456789abcdefFEDCBA98765432100000000000000000ffFFfFfFFfFfFFff",
)
.unwrap(),
Fingerprint([
0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32,
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff,
],)
)
}
#[test]
fn from_hex_string_not_long_enough() {
Fingerprint::from_hex_string("abcd").expect_err("Want err");
}
#[test]
fn from_hex_string_too_long() {
Fingerprint::from_hex_string("0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0")
.expect_err("Want err");
}
#[test]
fn from_hex_string_invalid_chars() {
Fingerprint::from_hex_string("Q123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF")
.expect_err("Want err");
}
#[test]
fn to_hex() {
assert_eq!(
Fingerprint([
0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32,
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff,
],)
.to_hex(),
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_lowercase()
)
}
#[test]
fn display() {
let hex = "0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF";
assert_eq!(
Fingerprint::from_hex_string(hex).unwrap().to_hex(),
hex.to_lowercase()
)
}
#[test]
fn serialize_to_str() {
let fingerprint = Fingerprint([
0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
]);
assert_ser_tokens(
&fingerprint,
&[Token::Str(
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff",
)],
);
}
| true |
20c5afb4707cb29486e79bcc0090c2a3780063b0
|
Rust
|
josh-perry/webserver
|
/src/request.rs
|
UTF-8
| 347 | 2.59375 | 3 |
[] |
no_license
|
use std::fmt;
use std::collections::HashMap;
use crate::verb;
#[derive(Debug)]
pub struct Request {
pub verb: verb::Verb,
pub path: String,
pub body: String,
pub headers: HashMap<String, String>
}
impl fmt::Display for Request {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:#?}", self)
}
}
| true |
1fb6c9a61ad85d68ac3fee20dd2c425a508964fb
|
Rust
|
wagao29/hack-assembler
|
/src/main.rs
|
UTF-8
| 2,527 | 2.6875 | 3 |
[] |
no_license
|
use hack_assembler::{code, parser, symbol_table};
use std::io::prelude::*;
use std::io::{BufReader, BufWriter};
use std::{env, fs};
fn main() {
let args: Vec<String> = env::args().collect();
let input_file_name = &args[1];
let input_file = BufReader::new(fs::File::open(input_file_name).unwrap());
let output_file_name = input_file_name.replace("asm", "hack");
let mut output_file = BufWriter::new(fs::File::create(output_file_name).unwrap());
let mut parser = parser::Parser::new(input_file);
let mut counter = 0;
let mut symbol_table = symbol_table::SymbolTable::new();
//1回目のパス
loop {
parser.advance();
if !parser.has_more_commands {
break;
}
match parser.command_type() {
parser::CommandType::Acommand => counter += 1,
parser::CommandType::Ccommand => counter += 1,
parser::CommandType::Lcommand => {
symbol_table.add_entry(parser.symbol(), counter);
}
};
}
let input_file = BufReader::new(fs::File::open(input_file_name).unwrap());
let mut parser = parser::Parser::new(input_file);
let mut ram_address = 15;
//2回目のパス
loop {
parser.advance();
if !parser.has_more_commands {
break;
}
match parser.command_type() {
parser::CommandType::Acommand => {
let dec_num: i32 = match parser.symbol().parse() {
Ok(num) => num,
Err(_) => {
if symbol_table.contains(parser.symbol()) {
symbol_table.get_address(parser.symbol())
} else {
ram_address += 1;
symbol_table.add_entry(parser.symbol(), ram_address);
ram_address
}
}
};
let bin_num = format!("{:0>16b}\n", dec_num);
output_file.write(bin_num.as_bytes()).unwrap();
}
parser::CommandType::Ccommand => {
let bin_code = format!(
"111{}{}{}\n",
code::comp(parser.comp()),
code::dest(parser.dest()),
code::jump(parser.jump()),
);
output_file.write(bin_code.as_bytes()).unwrap();
}
parser::CommandType::Lcommand => (),
};
}
}
| true |
02eaced8715d2add282f79421841a7ce431febf6
|
Rust
|
tormol/iterator_markers
|
/lib.rs
|
UTF-8
| 8,710 | 2.953125 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Copyright 2016 Torbjørn Birch Moltu.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Extra marker traits for iterators.
//!
//! # Feature flags:
//! * **unstable**: Implement for `Range` and `RangeInclusive`.
//! * (opt-out) **std**: Implement for the map and set iterators in std and don't set `#[no_std]`.
#![cfg_attr(feature="unstable", feature(inclusive_range, step_trait))]
#![cfg_attr(not(feature="std"), no_std)]
#[cfg(feature="std")]
extern crate core;
use core::iter::*;
/// Marker trait for iterators that will never return two equal items, like a set.
///
/// The trait is unsafe so consumers can rely on it.
pub unsafe trait UniqueIterator: Iterator where Self::Item: PartialEq
{}
/// Marker trait for iterators that return items in a sorted ascending order.
/// It does not guarantee uniqueness, but equal items must come straight after each other.
/// Use `I: UniqueIterator+AscendingIterator` if you need both.
///
/// The trait is unsafe so consumers can rely on it.
/// It is a logic error to implement both `AscendingIterator` and `DescendingIterator` for the same type.
///
/// # Examples:
/// ```
/// let _ = (1..10);
/// ```
pub unsafe trait AscendingIterator: Iterator where Self::Item: PartialOrd
{}
/// Marker trait for iterators that return items in a sorted desscending order.
/// It does not guarantee uniqueness, but equal items must come straight after each other.
/// Use `I: UniqueIterator+DescendingIterator` if you need both.
///
/// The trait is unsafe so consumers can rely on it.
/// It is a logic error to implement both `AscendingIterator` and `DescendingIterator` for the same type.
///
/// # Examples:
/// ```
/// let _ = (1..10).rev();
/// ```
pub unsafe trait DescendingIterator: Iterator where Self::Item: PartialOrd
{}
unsafe impl<T:PartialEq, I:Iterator<Item=T>> UniqueIterator for Enumerate<I> {}
unsafe impl<T:PartialOrd, I:Iterator<Item=T>> AscendingIterator for Enumerate<I> {}
unsafe impl<T:PartialEq> UniqueIterator for Once<T> {}
unsafe impl<T:PartialOrd> AscendingIterator for Once<T> {}
unsafe impl<T:PartialOrd> DescendingIterator for Once<T> {}
unsafe impl<T:PartialEq> UniqueIterator for Empty<T> {}
unsafe impl<T:PartialOrd> AscendingIterator for Empty<T> {}
unsafe impl<T:PartialOrd> DescendingIterator for Empty<T> {}
#[cfg(feature="unstable")]
mod unstable {
use super::{UniqueIterator,AscendingIterator};
use core::ops::{Range,RangeInclusive, Add};
use core::iter::Step;// is unstable and must be specified.
/// Requires the feature `unstable`.
unsafe impl<T:PartialEq+Step> UniqueIterator for Range<T> where for<'a> &'a T: Add<&'a T, Output=T> {}
/// Requires the feature `unstable`.
unsafe impl<T:PartialOrd+Step> AscendingIterator for Range<T> where for<'a> &'a T: Add<&'a T, Output=T> {}
/// Requires the feature `unstable`.
unsafe impl<T:PartialEq+Step> UniqueIterator for RangeInclusive<T> where for<'a> &'a T: Add<&'a T, Output=T> {}
/// Requires the feature `unstable`.
unsafe impl<T:PartialOrd+Step> AscendingIterator for RangeInclusive<T> where for<'a> &'a T: Add<&'a T, Output=T> {}
// RangeTo and RangeInclusiveTo doesn't implement iterator!
// RangeFrom and RangeFull wraps around in release mode.
}
#[cfg(feature="unstable")]
pub use unstable::*;
#[cfg(feature="std")]
mod collections {
use super::{UniqueIterator,AscendingIterator};
use std::collections::{hash_map, btree_map, hash_set, btree_set};
use std::hash::{Hash, BuildHasher};
unsafe impl<'a, K:Eq, V> UniqueIterator for hash_map::Keys<'a,K,V> {}
unsafe impl<'a, K:Eq, V:PartialEq> UniqueIterator for hash_map::Iter<'a,K,V> {}
unsafe impl<'a, K:Eq, V:PartialEq> UniqueIterator for hash_map::IterMut<'a,K,V> {}
unsafe impl<K:Eq, V:PartialEq> UniqueIterator for hash_map::IntoIter<K,V> {}
unsafe impl<'a, K:Ord, V> UniqueIterator for btree_map::Keys<'a,K,V> {}
unsafe impl<'a, K:Ord, V:PartialEq> UniqueIterator for btree_map::Iter<'a,K,V> {}
unsafe impl<'a, K:Ord, V:PartialEq> UniqueIterator for btree_map::IterMut<'a,K,V> {}
unsafe impl<K:Ord, V:PartialEq> UniqueIterator for btree_map::IntoIter<K,V> {}
unsafe impl<'a, K:Ord, V> AscendingIterator for btree_map::Keys<'a,K,V> {}
unsafe impl<'a, K:Ord, V:PartialOrd> AscendingIterator for btree_map::Iter<'a,K,V> {}
unsafe impl<'a, K:Ord, V:PartialOrd> AscendingIterator for btree_map::IterMut<'a,K,V> {}
unsafe impl<K:Ord, V:PartialOrd> AscendingIterator for btree_map::IntoIter<K,V> {}
unsafe impl<T:Eq> UniqueIterator for hash_set::IntoIter<T> {}
unsafe impl<'a, T:Eq+Hash> UniqueIterator for hash_set::Iter<'a,T> {}
unsafe impl<'a, T:Eq+Hash, S:BuildHasher> UniqueIterator for hash_set::Union<'a,T,S> {}
unsafe impl<'a, T:Eq+Hash, S:BuildHasher> UniqueIterator for hash_set::Intersection<'a,T,S> {}
unsafe impl<'a, T:Eq+Hash, S:BuildHasher> UniqueIterator for hash_set::Difference<'a,T,S> {}
unsafe impl<'a, T:Eq+Hash, S:BuildHasher> UniqueIterator for hash_set::SymmetricDifference<'a,T,S> {}
unsafe impl<T:Ord> UniqueIterator for btree_set::IntoIter<T> {}
unsafe impl<'a, T:Ord> UniqueIterator for btree_set::Iter<'a,T> {}
unsafe impl<'a, T:Ord> UniqueIterator for btree_set::Intersection<'a,T> {}
unsafe impl<'a, T:Ord> UniqueIterator for btree_set::Union<'a,T> {}
unsafe impl<'a, T:Ord> UniqueIterator for btree_set::Difference<'a,T> {}
unsafe impl<'a, T:Ord> UniqueIterator for btree_set::SymmetricDifference<'a,T> {}
unsafe impl<T:Ord> AscendingIterator for btree_set::IntoIter<T> {}
unsafe impl<'a, T:Ord> AscendingIterator for btree_set::Iter<'a,T> {}
unsafe impl<'a, T:Ord> AscendingIterator for btree_set::Intersection<'a,T> {}
// Are the others sorted?
}
#[cfg(feature="std")]
pub use collections::*;
// Iterator adaptors that maintain guarantees:
macro_rules! it {($i:item) => {$i}}// workaround for issue #5846 fixed in nightly
macro_rules! simple_iter {($bound:tt, $marker:tt: $($typ:tt)*) => {
$(it!{unsafe impl<T:$bound, I:Iterator<Item=T>+$marker> $marker for $typ<I> {}})*
}}
macro_rules! filter_iter {($bound:tt, $marker:tt: $($typ:tt)*) => {
$(it!{unsafe impl<T:$bound, I:Iterator<Item=T>+$marker, F:FnMut(&T)->bool> $marker for $typ<I,F> {}})*
}}
simple_iter!{PartialEq, UniqueIterator: Peekable Skip Take Fuse}
simple_iter!{PartialOrd, AscendingIterator: Peekable Skip Take Fuse}
simple_iter!{PartialOrd, DescendingIterator: Peekable Skip Take Fuse}
filter_iter!{PartialEq, UniqueIterator: Filter SkipWhile TakeWhile}
filter_iter!{PartialOrd, AscendingIterator: Filter SkipWhile TakeWhile}
filter_iter!{PartialOrd, DescendingIterator: Filter SkipWhile TakeWhile}
unsafe impl<I:DoubleEndedIterator+UniqueIterator> UniqueIterator for Rev<I> where I::Item: PartialOrd {}
// Note the swap
unsafe impl<I:DoubleEndedIterator+AscendingIterator> DescendingIterator for Rev<I> where I::Item: PartialOrd {}
unsafe impl<I:DoubleEndedIterator+DescendingIterator> AscendingIterator for Rev<I> where I::Item: PartialOrd {}
unsafe impl<'a, T:'a+Clone+PartialEq, I:Iterator<Item=&'a T>+UniqueIterator> UniqueIterator for Cloned<I> {}
unsafe impl<'a, T:'a+Clone+PartialOrd, I:Iterator<Item=&'a T>+AscendingIterator> AscendingIterator for Cloned<I> {}
unsafe impl<'a, T:'a+Clone+PartialOrd, I:Iterator<Item=&'a T>+DescendingIterator> DescendingIterator for Cloned<I> {}
unsafe impl<T:PartialEq, I:Iterator<Item=T>+UniqueIterator, F:FnMut(&T)> UniqueIterator for Inspect<I,F> {}
unsafe impl<T:PartialOrd, I:Iterator<Item=T>+AscendingIterator, F:FnMut(&T)> AscendingIterator for Inspect<I,F> {}
unsafe impl<T:PartialOrd, I:Iterator<Item=T>+DescendingIterator, F:FnMut(&T)> DescendingIterator for Inspect<I,F> {}
// Implementing for `IB:UniqueIterator` creates a conflict; just swap the order.
unsafe impl<TA,TB,IA,IB> UniqueIterator for Zip<IA,IB>
where TA:PartialEq, IA:Iterator<Item=TA>+UniqueIterator,
TB:PartialEq, IB:Iterator<Item=TB>
{}
// Cannot also implement for where both are sorted, I think sorted and not unique will be uncommon.
unsafe impl<TA,TB,IA,IB> AscendingIterator for Zip<IA,IB>
where TA:PartialOrd, IA:Iterator<Item=TA>+AscendingIterator+UniqueIterator,
TB:PartialOrd, IB:Iterator<Item=TB>
{}
unsafe impl<TA,TB,IA,IB> DescendingIterator for Zip<IA,IB>
where TA:PartialOrd, IA:Iterator<Item=TA>+DescendingIterator+UniqueIterator,
TB:PartialOrd, IB:Iterator<Item=TB>
{}
| true |
ea1a5568a8053fb6ffaa91d0bf4cac63d34d7a8d
|
Rust
|
Geigerkind/Rust-testing-example
|
/src/tests/random_testing.rs
|
UTF-8
| 781 | 2.75 | 3 |
[] |
no_license
|
use proptest::prelude::*;
use crate::tools::parse_date;
proptest! {
#[test]
fn doesnt_crash(s in "\\PC*") {
parse_date(&s);
}
#[test]
fn parses_all_valid_dates(s in "[0-9]{4}-[0-9]{2}-[0-9]{2}") {
parse_date(&s).unwrap();
}
#[test]
fn parses_date_back_to_original(y in 0u32..10000,
m in 1u32..13, d in 1u32..32) {
let (y2, m2, d2) = parse_date(
&format!("{:04}-{:02}-{:02}", y, m, d)).unwrap();
// prop_assert_eq! is basically the same as assert_eq!, but doesn't
// cause a bunch of panic messages to be printed on intermediate
// test failures. Which one to use is largely a matter of taste.
prop_assert_eq!((y, m, d), (y2, m2, d2));
}
}
| true |
d084fb2970b6fc5bfa3fcd74f5e15656144ae449
|
Rust
|
jonasvandervennet/rusty_snake
|
/src/lib.rs
|
UTF-8
| 6,088 | 3.53125 | 4 |
[
"MIT"
] |
permissive
|
use std::process::Command;
use std::io::{BufWriter, stdout};
use std::io::Write;
use std::{thread, time}; // For sleeping
pub struct Game {
// dimensions of the map
height: usize,
width: usize,
// all locations containing food
pub food_locations: Vec<Location>,
// snake
snake: Snake
}
#[derive(Debug, Clone, Copy)]
pub struct Location {
x: usize,
y: usize,
location_type: LocationType,
}
impl Location {
pub fn from(location: &Location) -> Location {
Location{x: location.x, y: location.y, location_type: location.location_type}
}
pub fn advance(&self, dir: &Direction) -> Location {
match dir {
Direction::UP => Location{x: self.x - 1, y: self.y + 0, location_type: self.location_type},
Direction::DOWN => Location{x: self.x + 1, y: self.y + 0, location_type: self.location_type},
Direction::RIGHT => Location{x: self.x + 0, y: self.y + 1, location_type: self.location_type},
Direction::LEFT => Location{x: self.x + 0, y: self.y - 1, location_type: self.location_type},
}
}
pub fn matches(&self, x: usize, y: usize) -> bool {
self.x == x && self.y == y
}
}
struct Snake {
// the current direction of the snake
direction: Direction,
// food found this round
found: bool,
// body locations in order starting from the head
body: Vec<Location>,
dead: bool,
score: usize,
}
impl Snake {
pub fn new() -> Snake {
let mut snake = Snake{direction: Direction::RIGHT, body: vec!(), dead: false, found: false, score: 0};
snake.body.push(Location{x: 5, y: 2, location_type: LocationType::SNAKE});
snake
}
pub fn contains(&self, location: &Location) -> bool {
for part in self.body.iter(){
if part.x == location.x && part.y == location.y {return true}
}
false
}
pub fn update(&mut self) {
let mut body: Vec<Location> = vec!();
let newhead = Location::from(self.body.first().expect("Error getting snake head.")).advance(&self.direction);
if self.contains(&newhead) {self.dead = true;}
body.push(newhead);
self.body.iter().for_each(|loc| body.push(Location::from(loc)));
if !self.found {body.remove(body.len() - 1);}
else {self.score += 1;}
self.found = false;
self.body = body;
}
}
impl Game {
pub fn create(height: usize, width: usize) -> Game {
let snake: Snake = Snake::new();
let mut game = Game{food_locations: vec!(), height: height, width: width, snake: snake};
game.food_locations.push(Location{x: 3, y: 3, location_type: LocationType::FOOD});
game
}
pub fn start(&mut self){
self.draw();
// TODO: wait for first keyboard press to start
loop {
self.snake.update();
self.draw();
if self.is_finished() {
println!("Game over!\tYou died with a score of {}", self.snake.score);
break;
}
// TODO: sleep so an acceptable playing speed is reached
let sleeptime = time::Duration::from_secs(1);
thread::sleep(sleeptime);
}
}
fn is_finished(&self) -> bool {
// should I check for extra cases?
// I think everything is accounted for
if self.snake.dead {return true}
for part in self.snake.body.iter(){
if part.x == 0 || part.x == self.width || part.y == 0 || part.y == self.height {return true}
}
false
}
fn draw(&self) {
// writebuffer from array representing playing field
// flush buffer (with clear command):
// println!("{}", String::from_utf8_lossy(&output.stdout));
// holds output variable:
let output = Command::new("clear").output().unwrap_or_else(|e| {
panic!("failed to execute process: {}", e)
});
let mut map: Vec<Location> = vec!();
// TODO: add border to game using | and ¨¨¨¨¨¨¨¨¨and ___________
// search for positions in food and snake sets
// TODO: optimize, lots of repeated useless checks every drawcycle!
for x in 0 .. self.height {
for y in 0 .. self.width {
let mut location = Location{x: x, y: y, location_type: LocationType::EMPTY};
let mut found_location = false;
for food_loc in self.food_locations.iter() {
if food_loc.matches(x, y) {
found_location = true;
location = Location::from(food_loc);
break;
}
}
if !found_location {
for snake_loc in self.snake.body.iter() {
if snake_loc.matches(x, y) {
location = Location::from(snake_loc);
break;
}
}
}
map.push(location);
}
}
let mut stream = BufWriter::new(stdout());
// Draw the actual map
for (i, location) in map.iter().enumerate() {
// write!(stream, "{}", i).expect("Bad i write");
if i % (self.width) == 0 {
stream.write(b"\n").expect("Invalid stream write");
}
match location.location_type {
LocationType::EMPTY => stream.write(b"E"),
LocationType::FOOD => stream.write(b"X"),
LocationType::SNAKE => stream.write(b"O"),
}.expect("Invalid stream write");
}
println!("{}\tRUSTY SNAKE\n", String::from_utf8_lossy(&output.stdout));
println!("Amount of tiles: {}", map.len());
stream.flush().expect("Error flushing stream!");
}
}
#[derive(Copy, Clone, Debug)]
pub enum LocationType {
EMPTY,
FOOD,
SNAKE,
}
#[derive(Copy, Clone)]
pub enum Direction {
UP,
DOWN,
RIGHT,
LEFT,
}
| true |
c0292786c55101dbded3f8f99422dcd167595299
|
Rust
|
baitcenter/cvmath
|
/src/num/mod.rs
|
UTF-8
| 1,221 | 2.53125 | 3 |
[] |
no_license
|
/*!
Numeric traits.
*/
use std::{cmp, fmt, ops};
mod zero;
mod one;
mod cast;
mod extrema;
mod abs;
mod spatial_ord;
mod float_ops;
pub use self::zero::Zero;
pub use self::one::One;
pub use self::cast::{CastFrom, CastTo};
pub use self::extrema::Extrema;
pub use self::abs::Abs;
pub use self::spatial_ord::SpatialOrd;
pub use self::float_ops::FloatOps;
pub trait Scalar where Self
: Copy + Default + Zero + One
+ fmt::Display + fmt::Debug
+ ops::Add<Output = Self> + ops::Sub<Output = Self>
+ ops::Mul<Output = Self> + ops::Div<Output = Self>
+ ops::Neg<Output = Self> + ops::Rem<Output = Self>
+ ops::AddAssign + ops::SubAssign + ops::MulAssign + ops::DivAssign
+ Extrema + Abs<Output = Self>
+ cmp::PartialEq + cmp::PartialOrd {}
pub trait Int where Self
: Scalar + cmp::Eq + cmp::Ord {}
pub trait Float where Self
: Scalar + FloatOps + CastFrom<f64> {}
//----------------------------------------------------------------
// Implementation
impl Scalar for i8 {}
impl Scalar for i16 {}
impl Scalar for i32 {}
impl Scalar for i64 {}
impl Scalar for f32 {}
impl Scalar for f64 {}
impl Int for i8 {}
impl Int for i16 {}
impl Int for i32 {}
impl Int for i64 {}
impl Float for f32 {}
impl Float for f64 {}
| true |
685dd624372d485632b4f1a4aaa9d8a0099a49cb
|
Rust
|
snoyberg/actix-website
|
/examples/responses/src/json_resp.rs
|
UTF-8
| 573 | 2.609375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
// <json-resp>
use actix_web::{web, HttpResponse, Result};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
struct MyObj {
name: String,
}
async fn index(obj: web::Path<MyObj>) -> Result<HttpResponse> {
Ok(HttpResponse::Ok().json(MyObj {
name: obj.name.to_string(),
}))
}
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
use actix_web::{App, HttpServer};
HttpServer::new(|| App::new().route(r"/a/{name}", web::get().to(index)))
.bind("127.0.0.1:8088")?
.run()
.await
}
// </json-resp>
| true |
def5d086e89e6e1cd86e572f8e840d6d609800a7
|
Rust
|
briansmith/ring
|
/src/polyfill/array_flat_map.rs
|
UTF-8
| 3,964 | 3.5625 | 4 |
[
"LicenseRef-scancode-unknown-license-reference",
"OpenSSL",
"MIT",
"ISC",
"LicenseRef-scancode-mit-taylor-variant",
"LicenseRef-scancode-openssl",
"LicenseRef-scancode-ssleay-windows"
] |
permissive
|
use core::iter::FlatMap;
/// A specialized version of `core::iter::FlatMap` for mapping over exact-sized
/// iterators with a function that returns an array.
///
/// `ArrayFlatMap` differs from `FlatMap` in that `ArrayFlatMap` implements
/// `ExactSizeIterator`. Since the result of `F` always has `LEN` elements, if
/// `I` is an exact-sized iterator of length `inner_len` then we know the
/// length of the flat-mapped result is `inner_len * LEN`. (The constructor
/// verifies that this multiplication doesn't overflow `usize`.)
#[derive(Clone)]
pub struct ArrayFlatMap<I, Item, F, const LEN: usize> {
inner: FlatMap<I, [Item; LEN], F>,
remaining: usize,
}
impl<I, Item, F, const LEN: usize> ArrayFlatMap<I, Item, F, LEN>
where
I: ExactSizeIterator,
F: FnMut(I::Item) -> [Item; LEN],
{
/// Constructs an `ArrayFlatMap` wrapping the given iterator, using the
/// given function
pub fn new(inner: I, f: F) -> Option<Self> {
let remaining = inner.len().checked_mul(LEN)?;
let inner = inner.flat_map(f);
Some(Self { inner, remaining })
}
}
impl<I, Item, F, const LEN: usize> Iterator for ArrayFlatMap<I, Item, F, LEN>
where
I: Iterator,
F: FnMut(I::Item) -> [Item; LEN],
{
type Item = Item;
fn next(&mut self) -> Option<Self::Item> {
let result = self.inner.next();
if result.is_some() {
self.remaining -= 1;
}
result
}
/// Required for implementing `ExactSizeIterator`.
fn size_hint(&self) -> (usize, Option<usize>) {
(self.remaining, Some(self.remaining))
}
}
impl<I, Item, F, const LEN: usize> ExactSizeIterator for ArrayFlatMap<I, Item, F, LEN>
where
I: Iterator,
F: FnMut(I::Item) -> [Item; LEN],
{
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_array_flat_map() {
static TEST_CASES: &[(&[u16], fn(u16) -> [u8; 2], &[u8])] = &[
// Empty input
(&[], u16::to_be_bytes, &[]),
// Non-empty input.
(
&[0x0102, 0x0304, 0x0506],
u16::to_be_bytes,
&[1, 2, 3, 4, 5, 6],
),
// Test with a different mapping function.
(
&[0x0102, 0x0304, 0x0506],
u16::to_le_bytes,
&[2, 1, 4, 3, 6, 5],
),
];
TEST_CASES.iter().copied().for_each(|(input, f, expected)| {
let mapped = ArrayFlatMap::new(input.iter().copied(), f).unwrap();
super::super::test::assert_iterator(mapped, expected);
});
}
// Does ArrayFlatMap::new() handle overflow correctly?
#[test]
fn test_array_flat_map_len_overflow() {
struct DownwardCounter {
remaining: usize,
}
impl Iterator for DownwardCounter {
type Item = usize;
fn next(&mut self) -> Option<Self::Item> {
if self.remaining > 0 {
let result = self.remaining;
self.remaining -= 1;
Some(result)
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.remaining, Some(self.remaining))
}
}
impl ExactSizeIterator for DownwardCounter {}
const MAX: usize = usize::MAX / core::mem::size_of::<usize>();
static TEST_CASES: &[(usize, bool)] = &[(MAX, true), (MAX + 1, false)];
TEST_CASES.iter().copied().for_each(|(input_len, is_some)| {
let inner = DownwardCounter {
remaining: input_len,
};
let mapped = ArrayFlatMap::new(inner, usize::to_be_bytes);
assert_eq!(mapped.is_some(), is_some);
if let Some(mapped) = mapped {
assert_eq!(mapped.len(), input_len * core::mem::size_of::<usize>());
}
});
}
}
| true |
d6f6520c59a4522977e8a3d6cd505c691502095a
|
Rust
|
fbegyn/podder
|
/src/main.rs
|
UTF-8
| 2,228 | 3.203125 | 3 |
[] |
no_license
|
mod episode;
mod podcast;
use structopt::StructOpt;
use threadpool::ThreadPool;
#[derive(Debug, StructOpt)]
#[structopt(raw(setting = "structopt::clap::AppSettings::ColoredHelp"))]
#[structopt(
name = "Podder",
about = "An app to download your podcasts, fast and easy"
)]
struct Cli {
#[structopt(
help = "RSS feed urls for the podcasts we want to download",
parse(from_str)
)]
urls: Vec<String>,
#[structopt(short = "b", long = "backlog", default_value = "5")]
backlog: usize,
#[structopt(short = "j", long = "threads", default_value = "3")]
threads: usize,
}
// Program
fn main() -> Result<(), Box<std::error::Error>> {
// Get CLI args and flags
let args = Cli::from_args();
let pool = ThreadPool::new(args.threads); // Create a worker pool
// Get RSS channels from the arguments
let feeds: Vec<rss::Channel> = args
.urls
.iter()
.map(|x| {
let mut t = rss::Channel::from_url(&x).expect("Failed to parse url");
t.set_link(x as &str);
t
})
.collect();
let pods: Vec<podcast::Podcast> = feeds
.iter()
.map(move |f| {
return podcast::Podcast {
title: f.title().parse().expect("Failed to read podcast title"),
url: f.link().parse().expect("Failed to read the link"),
episodes: f.clone().into_items().len(),
};
})
.collect();
println!("{}\n", pods[0]);
// TODO: make this iterate over all channels
let eps = feeds[0].clone().into_items();
let episodes = get_episodes(eps)?;
// Start downloading the episodes
for i in 0..args.backlog {
let mut eps = episodes[i].clone();
pool.execute(move || {
eps.download();
});
}
pool.join(); // Wait untill all the workers have finished
Ok(())
}
// Creates episodes from an RSS feed
fn get_episodes(items: Vec<rss::Item>) -> Result<Vec<episode::Episode>, Box<std::error::Error>> {
let mut episodes: Vec<episode::Episode> = Vec::new();
for ep in items.iter() {
episodes.push(episode::Episode::from_item(ep.clone())?);
}
Ok(episodes)
}
| true |
2f00b3cf3ef0f8a94d38820ce4bb2733593902e8
|
Rust
|
qeedquan/challenges
|
/kattis/tais-formula.rs
|
UTF-8
| 2,502 | 3.484375 | 3 |
[
"MIT"
] |
permissive
|
/*
Have you heard about the mastermind Tai? He found a way to calculate the area under a glucose curve, given discrete measurement points.
He even validated his formula against the approximate technique of counting the number of squares below the graph, when printed on graph paper.
Can you, just like Tai, reinvent the wheel and calculate the area under a glucose curve?
Instead of publishing a paper, you need to implement the algorithm.
You need this algorithm in your new app, that logs your glucose values that comes from a continuous glucose monitor.
You have also figured out the trick of the device. It’s not actually continuous, it just samples the glucose value frequently, automatically.
Input
Input contains several lines of numbers separated by spaces. The first line contains the integer N, 2≤N≤10^4, the number of glucose samples.
The following N lines describe each glucose sample.
Each line contains two numbers ti, vi, where ti is the time of the sample, and vi is the glucose value at time ti.
The glucose values vi are inside the measurement domain: 2.0≤vi≤23.0 mmol/L.
Each glucose value is given with exactly one decimal digit.
Since you are working with a computer program, the time of each sample is given as an integer, the number of milliseconds since the first of January 1970.
The samples are always given in increasing order by time, meaning 0<t1<t2<⋯<tN<10^14 ms.
Note that a second is a thousand milliseconds.
Output
The area under the glucose curve in the unit mmol/L⋅s
Answers within a relative or absolute error of 10^-6 will be accepted.
Sample Explanation
In Sample Input 1 there are three data points, where the area between the t-axis and the curve is formed by two Trapezoids.
The first trapezoid have the area of 2+122⋅(2000−1000)=7000mmol/L⋅ms, making 7mmol/L⋅s. The second has an area of 17mmol/L⋅s, making the total area 24mmol/L⋅s.
*/
fn main() {
test(vec![[1000.0, 2.0], [2000.0, 12.0], [3000.0, 22.0]], 24.0);
test(vec![[1000.0, 4.0], [2000.0, 8.0], [3001.0, 7.3]], 13.65765);
}
fn test(a: Vec<[f64; 2]>, r: f64) {
let p = integrate(a);
println!("{}", p);
let t = (p - r).abs();
assert_eq!(t < 1e-6, true);
}
fn integrate(a: Vec<[f64; 2]>) -> f64 {
let mut t = 0.0;
let mut v = 0.0;
let mut r = 0.0;
for (i, u) in a.iter().enumerate() {
if i > 0 {
r += (v + u[1])*0.5 * (u[0] - t);
}
t = u[0];
v = u[1];
}
r/1000.0
}
| true |
1bf7bc052fd94bc71cc1bb8320f3130e2212dc98
|
Rust
|
wayeast/visual-levenshtein
|
/examples/words.rs
|
UTF-8
| 901 | 3.390625 | 3 |
[] |
no_license
|
use visual_levenshtein::{levenshtein_words, Edit};
fn main() {
let encoder = |e: Edit| match e {
Edit::Equality(s) => format!("{}", s),
Edit::Deletion(s) => format!("[-{}-]", s),
Edit::Insertion(s) => format!("{{+{}+}}", s),
Edit::Substitution(o, d) => format!("[-{}-]{{+{}+}}", o, d),
};
let examples: Vec<(&str, &str)> = vec![
(
"One fine day in spring, ...",
"One fine man said May day ...",
),
(
"One fine day in spring, ...",
"One fine man said mayday ...",
),
(
"One fine day in spring, ...",
"One fine man said Spring ...",
),
];
for example in examples {
let encoded = levenshtein_words(example.0, example.1).encoded_edits(encoder);
println!("'{}' -> '{}': '{}'", example.0, example.1, encoded);
}
}
| true |
42218402bdf34a34a7e326b2a8e57e4ecd36bd6c
|
Rust
|
sdleffler/tll-array-rs
|
/src/array.rs
|
UTF-8
| 12,556 | 3.015625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
use std::fmt;
use std::mem;
use std::ops::{Deref, DerefMut};
use std::ptr;
use std::slice;
use tll::ternary::{Nat, Pred, NatPred, Triple, NatTriple, Zero, One, Two};
use tll_iterator::{SizedIterator, NonEmpty, FromSizedIterator};
use guillotine::*;
use storage::*;
/// The `array![]` macro provides a convenient way to construct `Array`s from scratch. It can be
/// invoked similarly to the `vec![]` macro, although `array![]` *does not* provide the "repeat"
/// syntax (like how `vec![0; 10]` would produce a `Vec` of 10 zeroes).
#[macro_export]
macro_rules! array {
(@assign $data:ident $n:expr => $x:expr $(, $xs:expr)*) => (
::std::ptr::write(&mut $data[$n], $x); array!(@assign $data ($n + 1) => $($xs),*));
(@assign $data:ident $n:expr =>) => ();
(@count $x:expr $(, $xs:expr)*) => ($crate::tll::ternary::Succ<array!(@count $($xs),*)>);
(@count) => ($crate::tll::ternary::Term);
($($xs:expr),*) => ({
unsafe {
let mut data =
::std::mem::uninitialized::<$crate::array::Array<array!(@count $($xs),*), _>>();
array!(@assign data 0 => $($xs),*);
data
}
});
}
/// The `Array` struct represents an array the length of which is determined by a type-level `Nat`.
/// For technical reasons, the `Arrayify<T>` trait is also necessary. Since `Arrayify<T>` includes
/// `Nat` as a supertrait, it will in most cases suffice to simply use the `Arrayify<T>` on an
/// `Array`'s length.
///
/// `Array`s dereference to slices to provide indexing operations. This means they can be treated
/// in much the same way one would treat a `Vec` (since `Vec`s work in much the same manner).
/// Eventually it may be necessary to extend these indexing operations to the `Array` struct
/// itself, but to say the least, that time is not now.
pub struct Array<L: Arrayify<T>, T> {
data: Reify<L, T>,
}
impl<L: Arrayify<T>, T: Clone> Clone for Array<L, T>
where Reify<L, T>: Clone
{
fn clone(&self) -> Self {
Array { data: self.data.clone() }
}
}
impl<L: Arrayify<T>, T: Copy> Copy for Array<L, T> where Reify<L, T>: Copy {}
impl<L: Arrayify<T>, T> Deref for Array<L, T> {
type Target = [T];
fn deref<'a>(&'a self) -> &'a [T] {
unsafe { slice::from_raw_parts(self as *const Self as *const T, L::reify()) }
}
}
impl<L: Arrayify<T>, T> DerefMut for Array<L, T> {
fn deref_mut<'a>(&'a mut self) -> &'a mut [T] {
unsafe { slice::from_raw_parts_mut(self as *mut Self as *mut T, L::reify()) }
}
}
pub trait ArraySplit<L: Arrayify<T> + NatPred, T>
where Pred<L>: Arrayify<T>
{
/// Split the array into its first element and remaining elements.
fn split_first(Array<L, T>) -> (T, Array<Pred<L>, T>);
/// Split the array into its first elements and last element.
fn split_last(Array<L, T>) -> (T, Array<Pred<L>, T>);
}
impl<L: Arrayify<T> + NatPred, T> ArraySplit<Zero<L>, T> for Array<Zero<L>, T>
where Pred<L>: Arrayify<T>
{
fn split_first(array: Self) -> (T, Array<Pred<Zero<L>>, T>) {
unsafe {
let head = ptr::read((&array as *const Self as *const T));
let tail = ptr::read((&array as *const Self as *const Array<Two<Pred<L>>, T>)
.offset(1));
mem::forget(array);
(head, tail)
}
}
fn split_last(array: Self) -> (T, Array<Pred<Zero<L>>, T>) {
unsafe {
let init = ptr::read(&array as *const Self as *const Array<Two<Pred<L>>, T>);
let last = ptr::read((&array as *const Self as *const T)
.offset(<Zero<L>>::reify() as isize - 1));
mem::forget(array);
(last, init)
}
}
}
impl<L: Arrayify<T> + NatPred + NatTriple, T> ArraySplit<One<L>, T> for Array<One<L>, T>
where Pred<One<L>>: Arrayify<T>
{
fn split_first(array: Self) -> (T, Array<Triple<L>, T>) {
unsafe {
let head = ptr::read((&array as *const Self as *const T));
let tail = ptr::read((&array as *const Self as *const Array<Triple<L>, T>).offset(1));
mem::forget(array);
(head, tail)
}
}
fn split_last(array: Self) -> (T, Array<Triple<L>, T>) {
unsafe {
let init = ptr::read(&array as *const Self as *const Array<Triple<L>, T>);
let last = ptr::read((&array as *const Self as *const T)
.offset(<One<L>>::reify() as isize - 1));
mem::forget(array);
(last, init)
}
}
}
impl<L: Arrayify<T> + NatPred, T> ArraySplit<Two<L>, T> for Array<Two<L>, T>
where One<L>: Arrayify<T>
{
fn split_first(array: Self) -> (T, Array<One<L>, T>) {
unsafe {
let head = ptr::read((&array as *const Self as *const T));
let tail = ptr::read((&array as *const Self as *const Array<One<L>, T>).offset(1));
mem::forget(array);
(head, tail)
}
}
fn split_last(array: Self) -> (T, Array<One<L>, T>) {
unsafe {
let init = ptr::read(&array as *const Self as *const Array<One<L>, T>);
let last = ptr::read((&array as *const Self as *const T)
.offset(<Two<L>>::reify() as isize - 1));
mem::forget(array);
(last, init)
}
}
}
impl<L: Arrayify<T>, T> Array<L, T> {
/// Split the `Array` apart into its first element and an `Array` consisting of the remaining elements.
/// This splitting is quite efficient and consists internally of only pointer casts and dereferences.
///
/// ```
/// # #[macro_use] extern crate tll_array; fn main() {
/// let array = array![42i32, 84, 126, 168, 210, 252, 294, 336];
/// assert_eq!(array.len(), 8);
/// let (head, tail) = array.split_first();
/// assert_eq!(head, 42);
/// assert_eq!(tail.len(), 7);
/// # }
/// ```
pub fn split_first(self) -> (T, Array<Pred<L>, T>)
where Self: ArraySplit<L, T>,
L: NatPred,
Pred<L>: Arrayify<T>
{
<Self as ArraySplit<L, T>>::split_first(self)
}
/// Split the `Array` apart into its last element and an `Array` consisting of preceding elements.
/// This splitting is quite efficient and consists internally of only pointer casts and dereferences.
///
/// ```
/// # #[macro_use] extern crate tll_array; fn main() {
/// let array = array![42i32, 84, 126, 168, 210, 252, 294, 336];
/// assert_eq!(array.len(), 8);
/// let (last, init) = array.split_last();
/// assert_eq!(last, 336);
/// assert_eq!(init.len(), 7);
/// # }
/// ```
pub fn split_last(self) -> (T, Array<Pred<L>, T>)
where Self: ArraySplit<L, T>,
L: NatPred,
Pred<L>: Arrayify<T>
{
<Self as ArraySplit<L, T>>::split_last(self)
}
}
pub struct ArrayIter<L: Arrayify<T>, T> {
data: Guillotine<Array<L, T>>,
pos: usize,
}
impl<L: Arrayify<T>, T> Drop for ArrayIter<L, T> {
fn drop(&mut self) {
unsafe {
let mut data = self.data.take().unwrap_unchecked();
for i in self.pos..data.len() {
ptr::drop_in_place(&mut data[i]);
}
}
}
}
impl<L: Arrayify<T>, T> Iterator for ArrayIter<L, T> {
type Item = T;
fn next(&mut self) -> Option<T> {
unsafe {
let data = self.data.as_ref().unwrap_unchecked();
if self.pos < data.len() {
let next = ptr::read(&data[self.pos]);
self.pos += 1;
Some(next)
} else {
None
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(L::reify(), Some(L::reify()))
}
}
impl<L: Arrayify<T>, T> IntoIterator for Array<L, T> {
type IntoIter = ArrayIter<L, T>;
type Item = T;
fn into_iter(self) -> ArrayIter<L, T> {
ArrayIter {
data: Alive(self),
pos: 0,
}
}
}
impl<L: Arrayify<T>, T> ExactSizeIterator for ArrayIter<L, T> {}
impl<L: Arrayify<T>, T: fmt::Debug> fmt::Debug for Array<L, T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
self.deref().fmt(fmt)
}
}
impl<L: Arrayify<T>, T> SizedIterator<L> for Array<L, T> {}
impl<L: Arrayify<T> + NatPred, T> NonEmpty<Zero<L>> for Array<Zero<L>, T>
where Array<Zero<L>, T>: ArraySplit<Zero<L>, T>,
Pred<L>: Arrayify<T>
{
type Next = Array<Two<Pred<L>>, T>;
fn next(self) -> (T, Array<Two<Pred<L>>, T>) {
self.split_first()
}
}
impl<L: Arrayify<T> + NatTriple, T> NonEmpty<One<L>> for Array<One<L>, T>
where Array<One<L>, T>: ArraySplit<One<L>, T>,
Triple<L>: Arrayify<T>
{
type Next = Array<Triple<L>, T>;
fn next(self) -> (T, Array<Triple<L>, T>) {
self.split_first()
}
}
impl<L: Arrayify<T>, T> NonEmpty<Two<L>> for Array<Two<L>, T>
where Array<Two<L>, T>: ArraySplit<Two<L>, T>
{
type Next = Array<One<L>, T>;
fn next(self) -> (T, Array<One<L>, T>) {
self.split_first()
}
}
impl<L: Arrayify<T>, T> FromSizedIterator<L, T> for Array<L, T> {
fn from_sized_iter<I: SizedIterator<L, Item = T>>(iter: I) -> Self {
let mut array: Array<L, T>;
unsafe {
array = mem::uninitialized();
for (i, v) in iter.into_iter().enumerate() {
ptr::write(&mut array[i], v);
}
}
array
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn split_first_1() {
let array = array![42i32];
assert_eq!(array.len(), 1);
let (head, array) = array.split_first();
assert_eq!(array.len(), 0);
assert_eq!(head, 42);
}
#[test]
fn split_first_2() {
let array = array![42i32, 84];
assert_eq!(array.len(), 2);
let (head, array) = array.split_first();
assert_eq!(array.len(), 1);
assert_eq!(head, 42);
}
#[test]
fn split_first_3() {
let array = array![42i32, 84, 126];
assert_eq!(array.len(), 3);
let (head, array) = array.split_first();
assert_eq!(array.len(), 2);
assert_eq!(head, 42);
}
#[test]
fn split_first_8() {
let array = array![42i32, 84, 126, 168, 210, 252, 294, 336];
assert_eq!(array.len(), 8);
let (head, array) = array.split_first();
assert_eq!(array.len(), 7);
assert_eq!(head, 42);
}
#[test]
fn split_first_9() {
let array = array![42i32, 84, 126, 168, 210, 252, 294, 336, 378];
assert_eq!(array.len(), 9);
let (head, array) = array.split_first();
assert_eq!(array.len(), 8);
assert_eq!(head, 42);
}
#[test]
fn split_first_10() {
let array = array![42i32, 84, 126, 168, 210, 252, 294, 336, 378, 420];
assert_eq!(array.len(), 10);
let (head, array) = array.split_first();
assert_eq!(array.len(), 9);
assert_eq!(head, 42);
}
#[test]
fn split_last_1() {
let array = array![42i32];
assert_eq!(array.len(), 1);
let (tail, array) = array.split_last();
assert_eq!(array.len(), 0);
assert_eq!(tail, 42);
}
#[test]
fn split_last_2() {
let array = array![42i32, 84];
assert_eq!(array.len(), 2);
let (tail, array) = array.split_last();
assert_eq!(array.len(), 1);
assert_eq!(tail, 84);
}
#[test]
fn split_last_3() {
let array = array![42i32, 84, 126];
assert_eq!(array.len(), 3);
let (tail, array) = array.split_last();
assert_eq!(array.len(), 2);
assert_eq!(tail, 126);
}
#[test]
fn split_last_8() {
let array = array![42i32, 84, 126, 168, 210, 252, 294, 336];
assert_eq!(array.len(), 8);
let (tail, array) = array.split_last();
assert_eq!(array.len(), 7);
assert_eq!(tail, 336);
}
#[test]
fn split_last_9() {
let array = array![42i32, 84, 126, 168, 210, 252, 294, 336, 378];
assert_eq!(array.len(), 9);
let (tail, array) = array.split_last();
assert_eq!(array.len(), 8);
assert_eq!(tail, 378);
}
#[test]
fn split_last_10() {
let array = array![42i32, 84, 126, 168, 210, 252, 294, 336, 378, 420];
assert_eq!(array.len(), 10);
let (tail, array) = array.split_last();
assert_eq!(array.len(), 9);
assert_eq!(tail, 420);
}
}
| true |
f5709e1ff756e9c3cfcb8fe60b054c20a66d1277
|
Rust
|
salsa-rs/salsa
|
/src/storage.rs
|
UTF-8
| 2,033 | 2.796875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::{plumbing::DatabaseStorageTypes, Runtime};
use std::sync::Arc;
/// Stores the cached results and dependency information for all the queries
/// defined on your salsa database. Also embeds a [`Runtime`] which is used to
/// manage query execution. Every database must include a `storage:
/// Storage<Self>` field.
pub struct Storage<DB: DatabaseStorageTypes> {
query_store: Arc<DB::DatabaseStorage>,
runtime: Runtime,
}
impl<DB: DatabaseStorageTypes> Default for Storage<DB> {
fn default() -> Self {
Self {
query_store: Default::default(),
runtime: Default::default(),
}
}
}
impl<DB: DatabaseStorageTypes> Storage<DB> {
/// Gives access to the underlying salsa runtime.
pub fn salsa_runtime(&self) -> &Runtime {
&self.runtime
}
/// Gives access to the underlying salsa runtime.
pub fn salsa_runtime_mut(&mut self) -> &mut Runtime {
&mut self.runtime
}
/// Access the query storage tables. Not meant to be used directly by end
/// users.
pub fn query_store(&self) -> &DB::DatabaseStorage {
&self.query_store
}
/// Access the query storage tables. Not meant to be used directly by end
/// users.
pub fn query_store_mut(&mut self) -> (&DB::DatabaseStorage, &mut Runtime) {
(&self.query_store, &mut self.runtime)
}
/// Returns a "snapshotted" storage, suitable for use in a forked database.
/// This snapshot hold a read-lock on the global state, which means that any
/// attempt to `set` an input will block until the forked runtime is
/// dropped. See `ParallelDatabase::snapshot` for more information.
///
/// **Warning.** This second handle is intended to be used from a separate
/// thread. Using two database handles from the **same thread** can lead to
/// deadlock.
pub fn snapshot(&self) -> Self {
Storage {
query_store: self.query_store.clone(),
runtime: self.runtime.snapshot(),
}
}
}
| true |
86aace17271bb8f95f3a2cc56eb8613326f74cae
|
Rust
|
ZefCo/swttrpg_dice
|
/src/main.rs
|
UTF-8
| 1,159 | 2.921875 | 3 |
[] |
no_license
|
// use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use serde::Deserialize;
mod filepath;
fn main() {
// println!("Hello, world!");
let cwd = filepath::cwd().expect("Incorrect Permissions");
println!("The cwd = {}", cwd.display());
let json_file = filepath::adjecent_file("dice_sides.json", None, None);
let mut data = String::new();
let mut f = File::open(json_file).expect("Unable to read file");
f.read_to_string(&mut data).expect("Unable to read string");
// println!("{}", data);
let json: Jfile = serde_json::from_str(&data).expect("JSON was not well formatted");
// println!("Dice color {}")
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct Sides {
}
// impl Results {
// fn sides(&self) {
// }
// }
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct Dice {
name: String,
color: String,
sides: i8,
results: Sides
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct Jfile {
Boost: Dice
}
enum Die {
Boost,
Setback,
Ability,
Difficulty,
Proficency,
Force
}
| true |
b21ef29a9a31c6a0adee6431c62870dbf6c926db
|
Rust
|
holsee/bloom_spell
|
/src/util/bloom_filter.rs
|
UTF-8
| 2,333 | 3.046875 | 3 |
[] |
no_license
|
use super::bitvec_rs::BitVec;
use super::fnv::FnvHasher;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
#[derive(Debug)]
pub struct BloomFilter {
bits: BitVec,
hash_count: u8
}
impl BloomFilter {
// pub fn from(expected_inserts: usize, fpr: f64) -> BloomFilter {
// let m: usize = ceil((-1.0 * (expected_inserts as f64) * ln(fpr))
// / powf(2.0.ln(), 2.0)) as usize;
//
// let k: u8 = ceil(((m as f64) /
// (expected_inserts as f64)) * ln(2.0)) as u8;
//
// return BloomFilter::new(m, k)
// }
pub fn new(size: usize, hash_count: u8) -> BloomFilter {
BloomFilter {
bits: BitVec::from_elem(size, false),
hash_count: hash_count
}
}
pub fn insert<T: Hash>(&mut self, item: T) {
let hash_values = self.compute_indices(item);
for index in hash_values {
self.bits.set(index as usize, true);
}
}
pub fn maybe_contains<T: Hash>(&self, item: T) -> bool {
let indices = self.compute_indices(item);
indices.into_iter().all(|i| self.bits[i] )
}
pub fn compute_indices<T: Hash>(&self, item: T) -> Vec<usize> {
// Hash value with 2 hash functions
let mut fnv = FnvHasher::default();
item.hash(&mut fnv);
// SipHash https://131002.net/siphash/
let mut sip = DefaultHasher::default();
item.hash(&mut sip);
// Produce multiple hashes and convert to indices
let hash_a: f64 = fnv.finish() as f64;
let hash_b: f64 = sip.finish() as f64;
let size: f64 = self.bits.len() as f64;
let hash_range = 0..self.hash_count;
let indices: Vec<usize> = hash_range.into_iter()
.map(|i| {
// Compute i'th hash
let hash: f64 = hash_a + (i as f64) * hash_b;
// Convert to Index
let index: f64 = hash % size;
index as usize
})
.collect();
indices
}
}
| true |
4870aaaa2f900bfbc25bc07857dccd554aa23329
|
Rust
|
LinAGKar/advent-of-code-2017-rust
|
/day20a/src/main.rs
|
UTF-8
| 3,071 | 3.28125 | 3 |
[
"MIT"
] |
permissive
|
extern crate regex;
use regex::Regex;
use std::cmp::Ordering;
use std::io;
use std::io::Read;
use std::ops::Mul;
use std::ops::AddAssign;
#[derive(Debug, Eq, Clone, Copy)]
struct Vec3 {
x: i64,
y: i64,
z: i64,
}
impl Vec3 {
fn non_negative(&self) -> bool {
self.x >= 0 && self.y >= 0 && self.z >= 0
}
}
impl Ord for Vec3 {
fn cmp(&self, other: &Vec3) -> Ordering {
(self.x.abs() + self.y.abs() + self.z.abs()).cmp(&(other.x.abs() + other.y.abs() + other.z.abs()))
}
}
impl PartialOrd for Vec3 {
fn partial_cmp(&self, other: &Vec3) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Vec3 {
fn eq(&self, other: &Vec3) -> bool {
self.cmp(other) == Ordering::Equal
}
}
impl Mul for Vec3 {
type Output = Vec3;
fn mul(self, other: Vec3) -> Vec3 {
Vec3 { x: self.x * other.x, y: self.y * other.y, z: self.z * other.z }
}
}
impl AddAssign for Vec3 {
fn add_assign(&mut self, other: Vec3) {
self.x += other.x;
self.y += other.y;
self.z += other.z;
}
}
#[derive(Debug, Clone, Copy, Eq)]
struct Particle {
pos: Vec3,
vel: Vec3,
acc: Vec3,
}
impl Particle {
fn step(&mut self) {
self.vel += self.acc;
self.pos += self.vel;
}
}
impl Ord for Particle {
fn cmp(&self, other: &Particle) -> Ordering {
let mut this = *self;
let mut other = *other;
if this.acc != other.acc {
this.acc.cmp(&other.acc)
} else {
while !(this.vel * this.acc).non_negative() || !(other.vel * other.acc).non_negative() {
this.step();
other.step();
}
if this.vel != other.vel {
this.vel.cmp(&other.vel)
} else {
while !(this.pos * this.vel).non_negative() || !(other.pos * other.vel).non_negative() {
this.step();
other.step();
}
this.pos.cmp(&other.pos)
}
}
}
}
impl PartialOrd for Particle {
fn partial_cmp(&self, other: &Particle) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Particle {
fn eq(&self, other: &Particle) -> bool {
match self.cmp(other) { Ordering::Equal => true, _ => false }
}
}
fn main() {
let mut input = String::new();
io::stdin().read_to_string(&mut input).unwrap();
let re = Regex::new(
r"p=<(-?\d+),(-?\d+),(-?\d+)>, v=<(-?\d+),(-?\d+),(-?\d+)>, a=<(-?\d+),(-?\d+),(-?\d+)>",
).unwrap();
println!("{}", re.captures_iter(&input).map(|caps| Particle {
pos: Vec3 { x: caps[1].parse().unwrap(), y: caps[2].parse().unwrap(), z: caps[3].parse().unwrap() },
vel: Vec3 { x: caps[4].parse().unwrap(), y: caps[5].parse().unwrap(), z: caps[6].parse().unwrap() },
acc: Vec3 { x: caps[7].parse().unwrap(), y: caps[8].parse().unwrap(), z: caps[9].parse().unwrap() },
}).enumerate().min_by(|&(_, ref x), &(_, ref y)| x.cmp(y)).unwrap().0);
}
| true |
e27d1fe757cbdbfb835f8757db23b15e4325ebe9
|
Rust
|
rome/tools
|
/crates/rome_js_analyze/src/semantic_analyzers/suspicious/no_array_index_key.rs
|
UTF-8
| 8,734 | 2.859375 | 3 |
[
"MIT"
] |
permissive
|
use crate::react::{is_react_call_api, ReactLibrary};
use crate::semantic_services::Semantic;
use rome_analyze::context::RuleContext;
use rome_analyze::{declare_rule, Rule, RuleDiagnostic};
use rome_console::markup;
use rome_js_syntax::{
AnyJsFunction, AnyJsMemberExpression, JsCallArgumentList, JsCallArguments, JsCallExpression,
JsFormalParameter, JsIdentifierBinding, JsObjectExpression, JsObjectMemberList,
JsParameterList, JsParameters, JsPropertyObjectMember, JsReferenceIdentifier, JsxAttribute,
};
use rome_rowan::{declare_node_union, AstNode};
declare_rule! {
/// Discourage the usage of Array index in keys.
///
/// > We don’t recommend using indexes for keys if the order of items may change.
/// This can negatively impact performance and may cause issues with component state.
/// Check out Robin Pokorny’s article for an
/// [in-depth explanation on the negative impacts of using an index as a key](https://robinpokorny.com/blog/index-as-a-key-is-an-anti-pattern/).
/// If you choose not to assign an explicit key to list items then React will default to using indexes as keys.
///
/// Source [React documentation](https://reactjs.org/docs/lists-and-keys.html#keys)
///
/// ## Examples
///
/// ### Invalid
///
/// ```jsx,expect_diagnostic
/// something.forEach((Element, index) => {
/// <Component key={index} >foo</Component>
/// });
/// ```
///
/// ```jsx,expect_diagnostic
/// React.Children.map(this.props.children, (child, index) => (
/// React.cloneElement(child, { key: index })
/// ))
/// ```
pub(crate) NoArrayIndexKey {
version: "0.10.0",
name: "noArrayIndexKey",
recommended: true,
}
}
declare_node_union! {
pub(crate) NoArrayIndexKeyQuery = JsxAttribute | JsPropertyObjectMember
}
impl NoArrayIndexKeyQuery {
const fn is_property_object_member(&self) -> bool {
matches!(self, NoArrayIndexKeyQuery::JsPropertyObjectMember(_))
}
fn is_key_property(&self) -> Option<bool> {
Some(match self {
NoArrayIndexKeyQuery::JsxAttribute(attribute) => {
let attribute_name = attribute.name().ok()?;
let name = attribute_name.as_jsx_name()?;
let name_token = name.value_token().ok()?;
name_token.text_trimmed() == "key"
}
NoArrayIndexKeyQuery::JsPropertyObjectMember(object_member) => {
let object_member_name = object_member.name().ok()?;
let name = object_member_name.as_js_literal_member_name()?;
let name = name.value().ok()?;
name.text_trimmed() == "key"
}
})
}
/// Extracts the reference from the possible invalid prop
fn as_js_reference_identifier(&self) -> Option<JsReferenceIdentifier> {
match self {
NoArrayIndexKeyQuery::JsxAttribute(attribute) => attribute
.initializer()?
.value()
.ok()?
.as_jsx_expression_attribute_value()?
.expression()
.ok()?
.as_js_identifier_expression()?
.name()
.ok(),
NoArrayIndexKeyQuery::JsPropertyObjectMember(object_member) => object_member
.value()
.ok()?
.as_js_identifier_expression()?
.name()
.ok(),
}
}
}
pub(crate) struct NoArrayIndexKeyState {
/// The incorrect prop
incorrect_prop: JsReferenceIdentifier,
/// Where the incorrect prop was defined
binding_origin: JsIdentifierBinding,
}
impl Rule for NoArrayIndexKey {
type Query = Semantic<NoArrayIndexKeyQuery>;
type State = NoArrayIndexKeyState;
type Signals = Option<Self::State>;
type Options = ();
fn run(ctx: &RuleContext<Self>) -> Self::Signals {
let node = ctx.query();
if !node.is_key_property()? {
return None;
}
let model = ctx.model();
let reference = node.as_js_reference_identifier()?;
// Given the reference identifier retrieved from the key property,
// find the declaration and ensure it resolves to the parameter of a function,
// and navigate up to the closest call expression
let parameter = model
.binding(&reference)
.and_then(|declaration| declaration.syntax().parent())
.and_then(JsFormalParameter::cast)?;
let function = parameter
.parent::<JsParameterList>()
.and_then(|list| list.parent::<JsParameters>())
.and_then(|parameters| parameters.parent::<AnyJsFunction>())?;
let call_expression = function
.parent::<JsCallArgumentList>()
.and_then(|arguments| arguments.parent::<JsCallArguments>())
.and_then(|arguments| arguments.parent::<JsCallExpression>())?;
// Check if the caller is an array method and the parameter is the array index of that method
let is_array_method_index = is_array_method_index(¶meter, &call_expression)?;
if !is_array_method_index {
return None;
}
if node.is_property_object_member() {
let object_expression = node
.parent::<JsObjectMemberList>()
.and_then(|list| list.parent::<JsObjectExpression>())?;
// Check if the object expression is passed to a `React.cloneElement` call
let call_expression = object_expression
.parent::<JsCallArgumentList>()
.and_then(|list| list.parent::<JsCallArguments>())
.and_then(|arguments| arguments.parent::<JsCallExpression>())?;
let callee = call_expression.callee().ok()?;
if is_react_call_api(callee, model, ReactLibrary::React, "cloneElement") {
let binding = parameter.binding().ok()?;
let binding_origin = binding.as_any_js_binding()?.as_js_identifier_binding()?;
Some(NoArrayIndexKeyState {
binding_origin: binding_origin.clone(),
incorrect_prop: reference,
})
} else {
None
}
} else {
let binding = parameter.binding().ok()?;
let binding_origin = binding.as_any_js_binding()?.as_js_identifier_binding()?;
Some(NoArrayIndexKeyState {
binding_origin: binding_origin.clone(),
incorrect_prop: reference,
})
}
}
fn diagnostic(_ctx: &RuleContext<Self>, state: &Self::State) -> Option<RuleDiagnostic> {
let NoArrayIndexKeyState {
binding_origin: incorrect_key,
incorrect_prop,
} = state;
let diagnostic = RuleDiagnostic::new(
rule_category!(),
incorrect_prop.syntax().text_trimmed_range(),
markup! {"Avoid using the index of an array as key property in an element."},
)
.detail(
incorrect_key.syntax().text_trimmed_range(),
markup! {"This is the source of the key value."},
).note(
markup! {"The order of the items may change, and this also affects performances and component state."}
).note(
markup! {
"Check the "<Hyperlink href="https://reactjs.org/docs/lists-and-keys.html#keys">"React documentation"</Hyperlink>". "
}
);
Some(diagnostic)
}
}
/// Given a parameter and a call expression, it navigates the `callee` of the call
/// and check if the method called by this function belongs to an array method
/// and if the parameter is an array index
///
/// ```js
/// Array.map((_, index) => {
/// return <Component key={index} />
/// })
/// ```
///
/// Given this example, the input node is the `index` and `Array.map(...)` call and we navigate to
/// retrieve the name `map` and we check if it belongs to an `Array.prototype` method.
fn is_array_method_index(
parameter: &JsFormalParameter,
call_expression: &JsCallExpression,
) -> Option<bool> {
let member_expression =
AnyJsMemberExpression::cast_ref(call_expression.callee().ok()?.syntax())?;
let name = member_expression.member_name()?;
let name = name.text();
if matches!(
name,
"map" | "flatMap" | "from" | "forEach" | "filter" | "some" | "every" | "find" | "findIndex"
) {
Some(parameter.syntax().index() == 2)
} else if matches!(name, "reduce" | "reduceRight") {
Some(parameter.syntax().index() == 4)
} else {
None
}
}
| true |
9128298b70009174e3d94900a941953c6de47253
|
Rust
|
xorxornop/oidc-rs
|
/src/models/identity_resource.rs
|
UTF-8
| 2,429 | 3.265625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::vec::Vec;
use super::resource::Resource;
/// Models a user identity resource.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct IdentityResource {
/// Indicates if this resource is enabled. Defaults to true.
enabled: bool,
/// The unique name of the resource.
name: String,
/// Display name of the resource.
display_name: String,
/// Description of the resource.
description: String,
/// List of accociated user claims that should be included when this resource is requested.
user_claims: Vec<String>,
/// Specifies whether the user can de-select the scope on the consent screen (if the consent screen wants to implement such a feature). Defaults to false.
required: bool,
/// Specifies whether the consent screen will emphasize this scope (if the consent screen wants to implement such a feature).
/// Use this setting for sensitive or important scopes. Defaults to false.
emphasise: bool,
/// Specifies whether this scope is shown in the discovery document. Defaults to true.
show_in_discovery_document: bool
}
impl Resource for IdentityResource {
fn new(name: String) -> IdentityResource {
IdentityResource::new_with_claims(name, vec![])
}
fn new_with_claims(name: String, user_claims: Vec<String>) -> IdentityResource {
IdentityResource {
name,
enabled: true,
display_name: "".to_owned(),
description: "".to_owned(),
user_claims,
required: false,
emphasise: false,
show_in_discovery_document: true
}
}
fn enabled(&self) -> bool {
self.enabled
}
fn name(&self) -> &String {
&self.name
}
fn display_name(&self) -> &String {
&self.display_name
}
fn description(&self) -> &String {
&self.description
}
fn user_claims(&self) -> &Vec<String>{
&self.user_claims
}
}
impl IdentityResource {
fn new_openid() -> IdentityResource {
IdentityResource {
name: "openid".to_owned(),
enabled: true,
display_name: "Your user identifier".to_owned(),
description: "".to_owned(),
user_claims: vec!["sub".to_owned()],
required: true,
emphasise: false,
show_in_discovery_document: true
}
}
}
| true |
43ab846803dd84a56f29bb8d9a87729c119938a4
|
Rust
|
maxmcc/aoc-2020
|
/src/bin/day06.rs
|
UTF-8
| 1,936 | 3.21875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use anyhow::ensure;
use aoc::{Parse, Result, Solve};
use std::ops::{BitAnd, BitOr};
#[derive(Clone, Debug)]
struct CustomsForms {
groups: Vec<Vec<u32>>,
}
impl<'a> Parse<'a> for CustomsForms {
fn parse<'b: 'a>(input: &'b str) -> Result<Self> {
fn parse_answers(line: &str) -> Result<u32> {
line.trim().chars().try_fold(0, |acc, ch| {
ensure!(('a'..='z').contains(&ch), "unexpected char {:?}", ch);
Ok(acc | 1 << (ch as u8 - b'a'))
})
}
let groups = input
.split("\n\n")
.map(|group| group.lines().map(parse_answers).collect::<Result<Vec<_>>>())
.collect::<Result<_>>()?;
Ok(CustomsForms { groups })
}
}
struct PartOne;
impl Solve<'_> for PartOne {
type Input = CustomsForms;
type Solution = u32;
fn solve(input: &Self::Input) -> Result<Self::Solution> {
Ok(input
.groups
.iter()
.map(|group| group.iter().fold(0, u32::bitor).count_ones())
.sum())
}
}
struct PartTwo;
impl Solve<'_> for PartTwo {
type Input = CustomsForms;
type Solution = u32;
fn solve(input: &Self::Input) -> Result<Self::Solution> {
Ok(input
.groups
.iter()
.map(|group| group.iter().fold(0x3FFFFFF, u32::bitand).count_ones())
.sum())
}
}
aoc::main!(day06);
#[cfg(test)]
mod examples {
use super::*;
use indoc::indoc;
#[test]
fn example() {
let input = CustomsForms::parse(indoc! {"
abc
a
b
c
ab
ac
a
a
a
a
b
"})
.unwrap();
assert_eq!(PartOne::solve(&input).unwrap(), 11);
assert_eq!(PartTwo::solve(&input).unwrap(), 6);
}
}
aoc::solved!(day06, PartOne = 6630, PartTwo = 3437);
| true |
bb49345020cf30e70cf6fd2be10991a9dbf33510
|
Rust
|
magiclen/validators-old
|
/src/validator_option.rs
|
UTF-8
| 785 | 3 | 3 |
[
"MIT"
] |
permissive
|
#[derive(Debug, PartialEq)]
pub enum ValidatorOption {
Must,
Allow,
NotAllow,
}
impl ValidatorOption {
#[inline]
pub fn allow(&self) -> bool {
match self {
ValidatorOption::Must => true,
ValidatorOption::Allow => true,
ValidatorOption::NotAllow => false,
}
}
#[inline]
pub fn not_allow(&self) -> bool {
match self {
ValidatorOption::Must => false,
ValidatorOption::Allow => false,
ValidatorOption::NotAllow => true,
}
}
#[inline]
pub fn must(&self) -> bool {
match self {
ValidatorOption::Must => true,
ValidatorOption::Allow => false,
ValidatorOption::NotAllow => false,
}
}
}
| true |
6dd975f7d91ed1f6add3890bc4bfb75ba093db2f
|
Rust
|
FlixCoder/open-forward-NN
|
/src/lib.rs
|
UTF-8
| 17,272 | 2.9375 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! @author = FlixCoder
//!
//! Architecture influenced by Keras: Sequential models
#![allow(clippy::expect_used)] // TODO: get rid of it.
mod activations;
pub mod losses;
use std::{fs::File, io::prelude::*};
use rand::prelude::*;
use rand_distr::Normal;
use serde::{Deserialize, Serialize};
/// This crate's float value to be used.
pub type Float = f32;
#[cfg(feature = "floats-f64")]
/// This crate's float value to be used.
pub type Float = f64;
// TODO:
// add 3d data input, processing and flatten
// add (batch) normalization? (using running average)
// try new softmax without exp? (possibly bad for losses)
// multiplication node layer? (try some impossible stuff for backpropagation)
// add convolutional and pooling layers?
// add dropout layer with param getting optimized as well
// fix dropout: dropout has to be equal across batches => modifications needed.
// currently it kind of is more noise
/// Define the available types of layers
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub enum Layer {
//Activation functions
/// linear activation
Linear,
/// rectified linear unit
ReLU,
/// leaky rectified linear unit (factor = factor to apply for x < 0)
LReLU(Float),
/// parametric (leaky) rectified linear unit (factor = factor to apply for x
/// < 0)
PReLU(Float),
/// exponential linear unit (alpha = 1)
ELU,
/// parametric exponential linear unit (factors a and b)
PELU(Float, Float),
/// scaled exponential linear unit (self-normalizing). parameters are
/// adapted to var=1 data
SELU,
/// sigmoid
Sigmoid,
/// tanh
Tanh,
/// absolute
Abs,
/// quadratic
Quadratic,
/// cubic
Cubic,
/// clipped linear activation [-1, 1]
ClipLinear,
/// gaussian
Gaussian,
/// soft plus
SoftPlus,
/// soft max
SoftMax,
//Regularization / Normalization / Utility
/// Apply dropout to the previous layer (d = percent of neurons to drop)
Dropout(Float),
//Neuron-layers
/// Dense layer (params = weights of the layer, be sure to have the correct
/// dimensions! include bias as first parameter)
Dense(Vec<Vec<Float>>),
}
/// Definition of usable initializers in Sequential.add_layer_dense
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum Initializer {
/// Glorot/Xavier initialization, preferably for Tanh
Glorot,
/// He initialization, preferably for ReLU
He,
/// initialize with a constant value
Const(Float),
}
/// Implementation of the neural network / sequential models of layers
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct Sequential {
/// Number of inputs.
num_inputs: usize,
/// The layers in this sequential model.
layers: Vec<Layer>,
/// Number of outputs.
num_outputs: usize,
}
impl Sequential {
/// Create a new instance of a sequential model
/// num_inputs = the number of inputs to the model
#[must_use]
pub fn new(num_inputs: usize) -> Sequential {
Sequential { num_inputs, layers: Vec::new(), num_outputs: num_inputs }
}
/// Returns the requested input dimension
#[must_use]
pub fn get_num_inputs(&self) -> usize {
self.num_inputs
}
/// Get the layers (as ref)
#[must_use]
pub fn get_layers(&self) -> &Vec<Layer> {
&self.layers
}
/// Get the layers (as mut)
pub fn get_layers_mut(&mut self) -> &mut Vec<Layer> {
&mut self.layers
}
/// Return the flat parameters of the layers (including LReLU factors).
/// Used for evolution-strategies
#[must_use]
pub fn get_params(&self) -> Vec<Float> {
let mut params = Vec::new();
for layer in self.layers.iter() {
match layer {
//Activation functions
//Layer::LReLU(factor) => params.push(*factor),
Layer::PReLU(factor) => params.push(*factor),
Layer::PELU(a, b) => {
params.push(*a);
params.push(*b);
}
//Regularization / Normalization / Utility
//Layer::Dropout(d) => params.push(*d),
//Neuron-layers
Layer::Dense(weights) => {
for vec in weights.iter() {
for val in vec.iter() {
params.push(*val);
}
}
}
//rest does not have params (that have to/may be changed)
_ => (),
}
}
params
}
/// Set the layers' parameters (including LReLU factors) by a flat input.
/// Used for evolution-strategies.
/// Panics if params' size does not fit the layers
pub fn set_params(&mut self, params: &[Float]) -> &mut Self {
let mut iter = params.iter();
for layer in self.layers.iter_mut() {
match layer {
//Activation functions
//Layer::LReLU(factor) => *factor = *iter.next().expect("Vector params is not big
// enough!"),
Layer::PReLU(factor) => {
*factor = *iter.next().expect("Vector params is not big enough!");
}
Layer::PELU(a, b) => {
*a = *iter.next().expect("Vector params is not big enough!");
*b = *iter.next().expect("Vector params is not big enough!");
}
//Regularization / Normalization / Utility
//Layer::Dropout(d) => *d = *iter.next().expect("Vector params is not big
// enough!"), Neuron-layers
Layer::Dense(weights) => {
for vec in weights.iter_mut() {
for val in vec.iter_mut() {
*val = *iter.next().expect("Vector params is not big enough!");
}
}
}
//rest does not have params (that have to/may be changed)
_ => (),
}
}
self
}
/// Add a layer to the sequential model. Be sure to have appropriate
/// parameters inside the layer, they are not checked! You can use specific
/// add_layer_<layer> methods to get simple, correct creation of layers with
/// parameters.
pub fn add_layer(&mut self, layer: Layer) -> &mut Self {
#[allow(clippy::single_match)]
match &layer {
Layer::Dense(weights) => self.num_outputs = weights.len(),
_ => (),
}
self.layers.push(layer);
self
}
/// Add a LReLU layer:
/// factor = factor to apply to x < 0
pub fn add_layer_lrelu(&mut self, factor: Float) -> &mut Self {
let layer = Layer::LReLU(factor);
self.layers.push(layer);
self
}
/// Add a PReLU layer:
/// factor = factor to apply to x < 0
pub fn add_layer_prelu(&mut self, factor: Float) -> &mut Self {
let layer = Layer::PReLU(factor);
self.layers.push(layer);
self
}
/// Add a PELU layer:
/// a and b are the specific factors
pub fn add_layer_pelu(&mut self, a: Float, b: Float) -> &mut Self {
let layer = Layer::PELU(a, b);
self.layers.push(layer);
self
}
/// Add a Dropout layer:
/// d = fraction of nodes to drop
pub fn add_layer_dropout(&mut self, d: Float) -> &mut Self {
if !(0.0..1.0).contains(&d) {
panic!("Inappropriate dropout parameter!");
}
let layer = Layer::Dropout(d);
self.layers.push(layer);
self
}
/// Add a Dense layer:
/// neurons = number of neurons/units in the layer
/// init = initializer to use (use He for ReLU, Glorot for Tanh)
pub fn add_layer_dense(&mut self, neurons: usize, init: Initializer) -> &mut Self {
let weights = match init {
Initializer::Glorot => gen_glorot(self.num_outputs, neurons),
Initializer::He => gen_he(self.num_outputs, neurons),
Initializer::Const(val) => vec![vec![val; self.num_outputs + 1]; neurons],
};
self.num_outputs = neurons;
let layer = Layer::Dense(weights);
self.layers.push(layer);
self
}
/// Do a forward pass through the model
#[must_use]
pub fn run(&self, input: &[Float]) -> Vec<Float> {
if input.len() != self.num_inputs {
panic!("Incorrect input size!");
}
let mut result = input.to_vec();
for layer in self.layers.iter() {
match layer {
//Activation functions
Layer::Linear => result.iter_mut().for_each(|x| {
*x = activations::linear(*x);
}),
Layer::ReLU => result.iter_mut().for_each(|x| {
*x = activations::relu(*x);
}),
Layer::LReLU(factor) => result.iter_mut().for_each(|x| {
*x = activations::lrelu(*x, *factor);
}),
Layer::PReLU(factor) => result.iter_mut().for_each(|x| {
*x = activations::lrelu(*x, *factor);
}),
Layer::ELU => result.iter_mut().for_each(|x| {
*x = activations::elu(*x);
}),
Layer::PELU(a, b) => result.iter_mut().for_each(|x| {
*x = activations::pelu(*x, *a, *b);
}),
Layer::SELU => result.iter_mut().for_each(|x| {
*x = activations::selu(*x);
}),
Layer::Sigmoid => result.iter_mut().for_each(|x| {
*x = activations::sigmoid(*x);
}),
Layer::Tanh => result.iter_mut().for_each(|x| {
*x = activations::tanh(*x);
}),
Layer::Abs => result.iter_mut().for_each(|x| {
*x = activations::abs(*x);
}),
Layer::Quadratic => result.iter_mut().for_each(|x| {
*x = activations::quadratic(*x);
}),
Layer::Cubic => result.iter_mut().for_each(|x| {
*x = activations::cubic(*x);
}),
Layer::ClipLinear => result.iter_mut().for_each(|x| {
*x = activations::clip_linear(*x);
}),
Layer::Gaussian => result.iter_mut().for_each(|x| {
*x = activations::gaussian(*x);
}),
Layer::SoftPlus => result.iter_mut().for_each(|x| {
*x = activations::softplus(*x);
}),
Layer::SoftMax => activations::softmax(&mut result),
//Regularization / Normalization / Utility
Layer::Dropout(d) => apply_dropout(&mut result, *d),
//Neuron-layers
Layer::Dense(weights) => result = modified_matrix_dotprod(weights, &result),
}
}
result
}
/// Predict values (forward pass) for a vector of input data (Vec<input>):
#[must_use]
pub fn predict(&self, inputs: &[Vec<Float>]) -> Vec<Vec<Float>> {
let mut results = Vec::new();
for input in inputs.iter() {
let result = self.run(input);
results.push(result);
}
results
}
/// Encodes the model as a JSON string.
#[must_use]
pub fn to_json(&self) -> String {
serde_json::to_string(self).expect("Encoding JSON failed!")
}
/// Builds a new model from a JSON string.
#[must_use]
pub fn from_json(encoded: &str) -> Sequential {
serde_json::from_str(encoded).expect("Decoding JSON failed!")
}
/// Builds a new model from a JSON reader (e.g. file).
#[must_use]
pub fn from_reader<R: Read>(encoded: R) -> Sequential {
serde_json::from_reader(encoded).expect("Decoding JSON failed!")
}
/// Saves the model to a file
pub fn save(&self, file: &str) -> Result<(), std::io::Error> {
let mut file = File::create(file)?;
let json = self.to_json();
file.write_all(json.as_bytes())?;
Ok(())
}
/// Creates a model from a previously saved file
pub fn load(file: &str) -> Result<Sequential, std::io::Error> {
let file = File::open(file)?;
Ok(Sequential::from_reader(file))
}
/// Calculate the error to a target set (Vec<(x, y)>):
/// Mean squared error (for regression)
/// Potentially ignores different vector lenghts!
#[must_use]
pub fn calc_mse(&self, target: &[(Vec<Float>, Vec<Float>)]) -> Float {
let mut avg_error = 0.0;
for (x, y) in target.iter() {
let pred = self.run(x);
let mut metric = 0.0;
for (yp, yt) in pred.iter().zip(y.iter()) {
let error = *yt - *yp;
metric += error * error;
}
metric /= y.len() as Float;
avg_error += metric;
}
avg_error /= target.len() as Float;
avg_error
}
/// Calculate the error to a target set (Vec<(x, y)>):
/// Root mean squared error (for regression)
/// Potentially ignores different vector lenghts!
#[must_use]
pub fn calc_rmse(&self, target: &[(Vec<Float>, Vec<Float>)]) -> Float {
let mut avg_error = 0.0;
for (x, y) in target.iter() {
let pred = self.run(x);
let mut metric = 0.0;
for (yp, yt) in pred.iter().zip(y.iter()) {
let error = *yt - *yp;
metric += error * error;
}
metric /= y.len() as Float;
avg_error += metric.sqrt();
}
avg_error /= target.len() as Float;
avg_error
}
/// Calculate the error to a target set (Vec<(x, y)>):
/// Mean absolute error (for regression)
/// Potentially ignores different vector lenghts!
#[must_use]
pub fn calc_mae(&self, target: &[(Vec<Float>, Vec<Float>)]) -> Float {
let mut avg_error = 0.0;
for (x, y) in target.iter() {
let pred = self.run(x);
let mut metric = 0.0;
for (yp, yt) in pred.iter().zip(y.iter()) {
let error = *yt - *yp;
metric += error.abs();
}
metric /= y.len() as Float;
avg_error += metric;
}
avg_error /= target.len() as Float;
avg_error
}
/// Calculate the error to a target set (Vec<(x, y)>):
/// Mean absolute percentage error (better don't use if target has 0 values)
/// (for regression) Potentially ignores different vector lenghts!
#[must_use]
pub fn calc_mape(&self, target: &[(Vec<Float>, Vec<Float>)]) -> Float {
let mut avg_error = 0.0;
for (x, y) in target.iter() {
let pred = self.run(x);
let mut metric = 0.0;
for (yp, yt) in pred.iter().zip(y.iter()) {
let error = (*yt - *yp) / *yt;
metric += error.abs();
}
metric *= 100.0 / y.len() as Float;
avg_error += metric;
}
avg_error /= target.len() as Float;
avg_error
}
/// Calculate the error to a target set (Vec<(x, y)>):
/// logcosh (for regression)
/// Potentially ignores different vector lenghts!
#[must_use]
pub fn calc_logcosh(&self, target: &[(Vec<Float>, Vec<Float>)]) -> Float {
let mut avg_error = 0.0;
for (x, y) in target.iter() {
let pred = self.run(x);
let mut metric = 0.0;
for (yp, yt) in pred.iter().zip(y.iter()) {
let error = *yt - *yp;
metric += error.cosh().ln();
}
metric /= y.len() as Float;
avg_error += metric;
}
avg_error /= target.len() as Float;
avg_error
}
/// Calculate the error to a target set (Vec<(x, y)>):
/// binary cross-entropy (be sure to use 0, 1 classifiers+labels) (for
/// classification) Potentially ignores different vector lenghts!
#[must_use]
pub fn calc_binary_crossentropy(&self, target: &[(Vec<Float>, Vec<Float>)]) -> Float {
let mut avg_error = 0.0;
for (x, y) in target.iter() {
let pred = self.run(x);
let mut metric = 0.0;
for (yp, yt) in pred.iter().zip(y.iter()) {
let error = (*yt).mul_add(yp.ln(), (1.0 - *yt) * (1.0 - *yp).ln());
metric += -error;
}
metric /= y.len() as Float;
avg_error += metric;
}
avg_error /= target.len() as Float;
avg_error
}
/// Calculate the error to a target set (Vec<(x, y)>):
/// categorical cross-entropy (be sure to use 0, 1 classifiers+labels) (for
/// classification) Potentially ignores different vector lenghts!
#[must_use]
pub fn calc_categorical_crossentropy(&self, target: &[(Vec<Float>, Vec<Float>)]) -> Float {
let mut avg_error = 0.0;
for (x, y) in target.iter() {
let pred = self.run(x);
let mut metric = 0.0;
for (yp, yt) in pred.iter().zip(y.iter()) {
let error = *yt * (*yp).ln();
metric += -error;
}
avg_error += metric;
}
avg_error /= target.len() as Float;
avg_error
}
/// Calculate the error to a target set (Vec<(x, y)>):
/// hinge loss (be sure to use 1, -1 classifiers+labels) (for
/// classification) Potentially ignores different vector lenghts!
#[must_use]
pub fn calc_hingeloss(&self, target: &[(Vec<Float>, Vec<Float>)]) -> Float {
let mut avg_error = 0.0;
for (x, y) in target.iter() {
let pred = self.run(x);
let mut metric = 0.0;
for (yp, yt) in pred.iter().zip(y.iter()) {
let error = 1.0 - *yt * *yp;
metric += error.max(0.0);
}
metric /= y.len() as Float;
avg_error += metric;
}
avg_error /= target.len() as Float;
avg_error
}
}
//helper functions
/// Generate a vector of random numbers with 0 mean and std std, normally
/// distributed. Using standard thread_rng.
#[must_use]
pub fn gen_rnd_vec(n: usize, std: Float) -> Vec<Float> {
let mut rng = thread_rng();
let normal = Normal::new(0.0, f64::from(std)).expect("Wrong normal distribution parameters!");
normal.sample_iter(&mut rng).take(n).map(|x| x as Float).collect()
}
/// Generate parameters based on Glorot initialization
#[must_use]
fn gen_glorot(n_in: usize, n_out: usize) -> Vec<Vec<Float>> {
let std = (2.0 / (n_in + n_out) as Float).sqrt();
let mut weights = Vec::new();
for _ in 0..n_out {
weights.push(gen_rnd_vec(n_in + 1, std));
}
weights
}
/// Generate parameters based on He initialization
#[must_use]
fn gen_he(n_in: usize, n_out: usize) -> Vec<Vec<Float>> {
let std = (2.0 / n_in as Float).sqrt();
let mut weights = Vec::new();
for _ in 0..n_out {
weights.push(gen_rnd_vec(n_in + 1, std));
}
weights
}
/// Apply dropout to a layer. d = fraction of nodes to be dropped
fn apply_dropout(layer: &mut [Float], d: Float) {
if d == 0.0 {
//allow zero dropout to allow later change, but do nothing here
return;
}
// set nodes to zero
let num = (d * layer.len() as Float) as usize;
let mut rng = thread_rng();
for _ in 0..num {
let i = rng.gen::<usize>() % layer.len();
layer[i] = 0.0;
}
//divide other nodes by probability to adapt variance
layer.iter_mut().for_each(|x| {
*x /= 1.0 - d;
});
}
/// Calculate layer results with bias from weight
/// If weights matrix is empty, result will be empty (indicating zero nodes)
#[must_use]
fn modified_matrix_dotprod(weights: &[Vec<Float>], values: &[Float]) -> Vec<Float> {
let mut result = Vec::new();
for node in weights.iter() {
let mut iter = node.iter();
let mut sum = *iter.next().expect("Empty weights! (Bias)");
for (weight, value) in iter.zip(values.iter())
//panics if weights do not have the correct shape
{
sum += weight * value;
}
result.push(sum);
}
result
}
| true |
6ec29b0b4f27f896632e6161aa7c58c2e72562cb
|
Rust
|
isavegas/aoc_2019
|
/src/day/day_06.rs
|
UTF-8
| 1,877 | 2.78125 | 3 |
[] |
no_license
|
use aoc_core::{bail, AoCDay, ErrorWrapper};
use std::collections::HashMap;
pub struct Day06;
fn build_chain(target: &str, map: &HashMap<String, String>, v: &mut Vec<String>) {
let mut last = target;
while last != "COM" {
last = map.get(last).unwrap();
v.push(last.to_string());
}
}
impl AoCDay for Day06 {
fn day(&self) -> usize {
6
}
fn expected(&self) -> (Option<&'static str>, Option<&'static str>) {
(None, None)
}
fn part1(&self, input: &str) -> Result<String, ErrorWrapper> {
let mut orbit_map: HashMap<String, String> = HashMap::new();
for s in input.trim().split('\n') {
let v = s.split(')').collect::<Vec<&str>>();
orbit_map.insert(v[1].to_string(), v[0].to_string());
}
let mut orbits = 0;
let mut cache: Vec<String> = vec![];
for key in orbit_map.keys() {
cache.clear();
build_chain(key, &orbit_map, &mut cache);
orbits += cache.len();
}
Ok(format!("{}", orbits))
}
fn part2(&self, input: &str) -> Result<String, ErrorWrapper> {
let mut orbit_map: HashMap<String, String> = HashMap::new();
for s in input.trim().split('\n') {
let v = s.split(')').collect::<Vec<&str>>();
orbit_map.insert(v[1].to_string(), v[0].to_string());
}
let mut start: Vec<String> = vec![];
build_chain("YOU", &orbit_map, &mut start);
let mut end: Vec<String> = vec![];
build_chain("SAN", &orbit_map, &mut end);
for (i, n) in start.iter().enumerate() {
if let Some(i2) = end.iter().position(|f| f == n) {
return Ok(format!("{}", i + i2));
}
}
bail!("Unable to find common orbit")
}
}
pub fn get_day() -> Box<dyn AoCDay> {
Box::new(Day06)
}
| true |
9afb0242610556d8761bb61659c78101af75cfe1
|
Rust
|
akmal-ali-learning/sw-rust
|
/book/chapter8/hashmap/src/main.rs
|
UTF-8
| 1,572 | 3.53125 | 4 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
fn main() {
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
let scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();
// Hash maps and ownership
let field_name = String::from("Favorite color");
let field_value = String::from("Blue");
let mut map = HashMap::new();
map.insert(&field_name,&field_value);
println!("{} : {}", field_name, field_value );
// Accessing values in a hash map
let value = map.get(&field_name);
let value = match value {
None => "None",
Some(i) => i,
};
println!("{} : {}", field_name, value);
let new_colour = String::from("Red");
// Overwriting a value
map.insert(&field_name, &new_colour );
let value = map.get(&field_name);
let value = match value {
None => "None",
Some(i) => i,
};
println!("{} : {}", field_name, value);
let new_field_name = String::from("Old Favorite colour");
// map.insert(&new_field_name,&new_colour);
map.entry(&new_field_name).or_insert(&field_value);
println!("{} : {}", new_field_name, map.entry(&new_field_name).or_insert(&field_value) );
println!("{:?}", map);
// Updating a value based on the old value.
let text = "hello hello hello hello world wonderful world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0);
*count += 1;
}
println!("{:?}", map);
}
| true |
0744c76b4754b9934f79a7777ae0604328fe7220
|
Rust
|
bave/kdtree
|
/src/bin/bench.rs
|
UTF-8
| 2,715 | 2.84375 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
extern crate kdtree;
#[allow(unused_imports)]
use self::kdtree::*;
//use kdtree::*;
#[allow(unused_imports)]
use rand::prelude::*;
/*
#![feature(asm)]
#[allow(dead_code)]
fn rdtscp_unsafe() -> u64
{
let mut aux : u32 = 0;
let aux_ptr : *mut u32 = &mut aux;
unsafe { std::arch::x86_64::__rdtscp(aux_ptr) }
}
*/
#[derive(Debug)]
struct Point
{
id: String,
vec: [f64;Point::DIM],
}
impl TraitPoint for Point
{
const DIM: usize = 3;
#[inline]
fn dim(&self, depth: usize) -> f64
{
self.vec[depth]
}
}
fn main()
{
let node_numbers = 10_000_000;
let query_iteration = 10_000;
let mut rng = thread_rng();
//let rmin = (-0x80000000/2) as f64;
//let rmax = (0x7fffffff/2) as f64;
let rmin = -10000 as f64;
let rmax = 10000 as f64;
let mut vec_point : Vec<Point> = Vec::new();
for i in 0 .. node_numbers {
let x: f64 = rng.gen_range(rmin, rmax);
let y: f64 = rng.gen_range(rmin, rmax);
let z: f64 = rng.gen_range(rmin, rmax);
let p = Point{id: i.to_string(), vec: [x, y, z]};
vec_point.push(p);
}
let mut kdt = KDTree::new(&vec_point);
//kdt.set_cross(2);
print!("cross:{} / nodes: {}\n", kdt.get_cross(), node_numbers);
let start = std::time::Instant::now();
kdt.build();
let end = start.elapsed();
print!(" build: {}.{:09}[sec]\n", end.as_secs(), end.subsec_nanos());
print!("ter:{}\n", query_iteration);
let mut vec_query : Vec<Point> = Vec::new();
for i in 0 .. query_iteration {
let x: f64 = rng.gen_range(rmin, rmax);
let y: f64 = rng.gen_range(rmin, rmax);
let z: f64 = rng.gen_range(rmin, rmax);
let q = Point{id: i.to_string(), vec: [x, y, z]};
vec_query.push(q);
}
let tstart = std::time::Instant::now();
for i in &vec_query {
//let start = std::time::Instant::now();
let _ = kdt.knn_search(i, 1);
//let end = start.elapsed();
}
let tend = tstart.elapsed();
print!(" knn:1 ");
print!(" total_time: {}.{:09}[sec]\n", tend.as_secs(), tend.subsec_nanos());
let tstart = std::time::Instant::now();
for i in &vec_query {
//let start = std::time::Instant::now();
let _ = kdt.radius_search(i, 100.0);
//let end = start.elapsed();
}
let tend = tstart.elapsed();
print!(" radius:100 ");
print!(" total_time: {}.{:09}[sec]\n", tend.as_secs(), tend.subsec_nanos());
std::process::exit(0);
}
| true |
84d1d79d08f098d3c186e0c03f7cd48efdde9f20
|
Rust
|
aswaving/yenc
|
/src/encode.rs
|
UTF-8
| 12,458 | 3.375 | 3 |
[
"MIT"
] |
permissive
|
use super::constants::{CR, DEFAULT_LINE_SIZE, DOT, ESCAPE, LF, NUL};
use super::errors::EncodeError;
use std::fs::File;
use std::io::{BufReader, BufWriter, Read, Seek, SeekFrom, Write};
use std::path::Path;
/// Options for encoding.
/// The entry point for encoding a file (part)
/// to a file or (TCP) stream.
#[derive(Debug)]
pub struct EncodeOptions {
line_length: u8,
parts: u32,
part: u32,
begin: u64,
end: u64,
}
impl Default for EncodeOptions {
/// Constructs a new EncodeOptions instance, with the following defaults:
/// line_length = 128.
/// parts = 1,
/// part = begin = end = 0
fn default() -> Self {
EncodeOptions {
line_length: DEFAULT_LINE_SIZE,
parts: 1,
part: 0,
begin: 0,
end: 0,
}
}
}
impl EncodeOptions {
/// Constructs a new EncodeOptions with defaults, see Default impl.
pub fn new() -> EncodeOptions {
Default::default()
}
/// Sets the maximum line length.
pub fn line_length(mut self, line_length: u8) -> EncodeOptions {
self.line_length = line_length;
self
}
/// Sets the number of parts (default=1).
/// When the number of parts is 1, no '=ypart' line will be written
/// in the ouput.
pub fn parts(mut self, parts: u32) -> EncodeOptions {
self.parts = parts;
self
}
/// Sets the part number.
/// Only used when `parts > 1`.
/// The part number count starts at 1.
pub fn part(mut self, part: u32) -> EncodeOptions {
self.part = part;
self
}
/// Sets the begin (which is the file offset + 1).
/// Only used when `parts > 1`.
/// The size of the part is `end - begin + 1`.
pub fn begin(mut self, begin: u64) -> EncodeOptions {
self.begin = begin;
self
}
/// Sets the end.
/// Only used when `parts > 1`.
/// The size of the part is `end - begin + 1`.
/// `end` should be larger than `begin`, otherwise an overflow error occurrs.
pub fn end(mut self, end: u64) -> EncodeOptions {
self.end = end;
self
}
/// Encodes the input file and writes it to the writer. For multi-part encoding, only
/// one part is encoded. In case of multipart, the part number, begin and end offset need
/// to be specified in the `EncodeOptions`. When directly encoding to an NNTP stream, the
/// caller needs to take care of the message header and end of multi-line block (`".\r\n"`).
///
/// # Example
/// ```rust,no_run
/// let encode_options = yenc::EncodeOptions::default()
/// .parts(2)
/// .part(1)
/// .begin(1)
/// .end(38400);
/// let mut output_file = std::fs::File::create("test1.bin.yenc.001").unwrap();
/// encode_options.encode_file("test1.bin", &mut output_file).unwrap();
/// ```
/// # Errors
/// - when the output file already exists
///
pub fn encode_file<P, W>(&self, input_path: P, output: W) -> Result<(), EncodeError>
where
P: AsRef<Path>,
W: Write,
{
let input_filename = input_path.as_ref().file_name();
let input_filename = match input_filename {
Some(s) => s.to_str().unwrap_or(""),
None => "",
};
let input_file = File::open(&input_path)?;
let length = input_file.metadata()?.len();
self.encode_stream(input_file, output, length, input_filename)
}
/// Checks the options. Returns Ok(()) if all options are ok.
/// # Return
/// - EncodeError::PartNumberMissing
/// - EncodeError::PartBeginOffsetMissing
/// - EncodeError::PartEndOffsetMissing
/// - EncodeError::PartOffsetsInvalidRange
pub fn check_options(&self) -> Result<(), EncodeError> {
if self.parts > 1 && self.part == 0 {
return Err(EncodeError::PartNumberMissing);
}
if self.parts > 1 && self.begin == 0 {
return Err(EncodeError::PartBeginOffsetMissing);
}
if self.parts > 1 && self.end == 0 {
return Err(EncodeError::PartEndOffsetMissing);
}
if self.parts > 1 && self.begin > self.end {
return Err(EncodeError::PartOffsetsInvalidRange);
}
Ok(())
}
/// Encodes the date from input from stream and writes the encoded data to the output stream.
/// The input stream does not need to be a file, therefore, size and input_filename
/// must be specified. The input_filename ends up as the filename in the yenc header.
#[allow(clippy::write_with_newline)]
pub fn encode_stream<R, W>(
&self,
input: R,
output: W,
length: u64,
input_filename: &str,
) -> Result<(), EncodeError>
where
R: Read + Seek,
W: Write,
{
let mut rdr = BufReader::new(input);
let mut checksum = crc32fast::Hasher::new();
let mut buffer = [0u8; 8192];
let mut col = 0;
let mut num_bytes = 0;
let mut output = BufWriter::new(output);
self.check_options()?;
if self.parts == 1 {
write!(
output,
"=ybegin line={} size={} name={}\r\n",
self.line_length, length, input_filename
)?;
} else {
write!(
output,
"=ybegin part={} line={} size={} name={}\r\n",
self.part, self.line_length, length, input_filename
)?;
}
if self.parts > 1 {
write!(output, "=ypart begin={} end={}\r\n", self.begin, self.end)?;
}
rdr.seek(SeekFrom::Start(self.begin - 1))?;
let mut remainder = (self.end - self.begin + 1) as usize;
while remainder > 0 {
let buf_slice = if remainder > buffer.len() {
&mut buffer[..]
} else {
&mut buffer[0..remainder]
};
rdr.read_exact(buf_slice)?;
checksum.update(buf_slice);
num_bytes += buf_slice.len();
col = encode_buffer(buf_slice, col, self.line_length, &mut output)?;
remainder -= buf_slice.len();
}
if self.parts > 1 {
write!(
output,
"\r\n=yend size={} part={} pcrc32={:08x}\r\n",
num_bytes,
self.part,
checksum.finalize()
)?;
} else {
write!(
output,
"\r\n=yend size={} crc32={:08x}\r\n",
num_bytes,
checksum.finalize()
)?;
}
Ok(())
}
}
/// Encodes the input buffer and writes it to the writer.
///
/// Lines are wrapped with a maximum of `line_length` characters per line.
/// Does not include the header and footer lines.
/// Only `encode_stream` and `encode_file` produce the headers in the output.
/// The `col` parameter is the starting offset in the row. The result contains the new offset.
pub fn encode_buffer<W>(
input: &[u8],
col: u8,
line_length: u8,
writer: W,
) -> Result<u8, EncodeError>
where
W: Write,
{
let mut col = col;
let mut writer = writer;
let mut v = Vec::<u8>::with_capacity(((input.len() as f64) * 1.04) as usize);
input.iter().for_each(|&b| {
let encoded = encode_byte(b);
v.push(encoded.0);
col += match encoded.0 {
ESCAPE => {
v.push(encoded.1);
2
}
DOT if col == 0 => {
v.push(DOT);
2
}
_ => 1,
};
if col >= line_length {
v.push(CR);
v.push(LF);
col = 0;
}
});
writer.write_all(&v)?;
Ok(col)
}
#[inline(always)]
fn encode_byte(input_byte: u8) -> (u8, u8) {
let mut output = (0, 0);
let output_byte = input_byte.overflowing_add(42).0;
match output_byte {
LF | CR | NUL | ESCAPE => {
output.0 = ESCAPE;
output.1 = output_byte.overflowing_add(64).0;
}
_ => {
output.0 = output_byte;
}
};
output
}
#[cfg(test)]
mod tests {
use super::super::constants::{CR, ESCAPE, LF, NUL};
use super::{encode_buffer, encode_byte, EncodeOptions};
#[test]
fn escape_null() {
assert_eq!((ESCAPE, 0x40), encode_byte(214));
}
/*
#[test]
fn escape_tab() {
let mut output = [0u8; 2];
assert_eq!(2, encode_byte(214 + TAB, &mut output));
assert_eq!(vec![ESCAPE, 0x49], output);
}
*/
#[test]
fn escape_lf() {
assert_eq!((ESCAPE, 0x4A), encode_byte(214 + LF));
}
#[test]
fn escape_cr() {
assert_eq!((ESCAPE, 0x4D), encode_byte(214 + CR));
}
/*
#[test]
fn escape_space() {
let mut output = [0u8; 2];
assert_eq!(2, encode_byte(214 + SPACE, &mut output));
assert_eq!(vec![ESCAPE, 0x60], output);
}
*/
#[test]
fn escape_equal_sign() {
assert_eq!((ESCAPE, 0x7D), encode_byte(ESCAPE - 42));
}
#[test]
fn non_escaped() {
for x in 0..256u16 {
let encoded = (x as u8).overflowing_add(42).0;
if encoded != NUL && encoded != CR && encoded != LF && encoded != ESCAPE {
assert_eq!((encoded, 0), encode_byte(x as u8));
}
}
}
#[test]
fn test_encode_buffer() {
let buffer = (0..256u16).map(|c| c as u8).collect::<Vec<u8>>();
#[rustfmt::skip]
const EXPECTED: [u8; 264] =
[42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
125, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80,
81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100,
101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148,
149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
165, 166, 167, 168, 13, 10, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178,
179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210,
211, 212, 213, 214, 215, 216,217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 61, 64, 1, 2, 3,
4, 5, 6, 7, 8, 9, 61, 74, 11, 12, 61, 77, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 13, 10, 38, 39, 40, 41];
let mut encoded = Vec::<u8>::new();
let result = encode_buffer(&buffer, 0, 128, &mut encoded);
assert!(result.is_ok());
assert_eq!(encoded.as_slice(), &EXPECTED[..]);
}
#[test]
fn encode_options_invalid_parts() {
let encode_options = EncodeOptions::new().parts(2).begin(1).end(38400);
let vr = encode_options.check_options();
assert!(vr.is_err());
}
#[test]
fn encode_options_invalid_begin() {
let encode_options = EncodeOptions::new().parts(2).part(1).end(38400);
let vr = encode_options.check_options();
assert!(vr.is_err());
}
#[test]
fn encode_options_invalid_end() {
let encode_options = EncodeOptions::new().parts(2).part(1).begin(1);
let vr = encode_options.check_options();
assert!(vr.is_err());
}
#[test]
fn encode_options_invalid_range() {
let encode_options = EncodeOptions::new().parts(2).part(1).begin(38400).end(1);
let vr = encode_options.check_options();
assert!(vr.is_err());
}
}
| true |
164daac122890163d0a0836fe41a4d0879c65c7b
|
Rust
|
kubo/rosy
|
/src/prelude.rs
|
UTF-8
| 1,094 | 3.125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Types and traits that are commonly used within this library.
//!
//! This module is intended to be glob imported via `use rosy::prelude::*` when
//! primarily working with Rosy types. This allows for having access to _almost_
//! everything one may need.
//!
//! **Note:** These items are all already available at the top crate level. If
//! only certain items are required, importing from the prelude directly is
//! unnecessary.
//!
//! **Important:** Rosy's [`String`][rb] type **will conflict** with Rust's
//! built-in [`String`][rs] type when imported as-is into the same module.
//!
//! [rb]: string/struct.String.html
//! [rs]: https://doc.rust-lang.org/std/string/struct.String.html
// This should match the import in `lib.rs` verbatim (literally copy + paste)
#[doc(no_inline)]
pub use crate::{
array::Array,
exception::{AnyException, Exception},
hash::Hash,
mixin::{Mixin, Class, Module},
num::{Float, Integer},
object::{AnyObject, Object, RosyObject},
range::Range,
Result,
rosy::Rosy,
string::String,
symbol::{Symbol, SymbolId},
};
| true |
8f8012762e6849c355e8f086bcdc25ce97491a2e
|
Rust
|
deffen3/discovery
|
/src/09-clocks-and-timers-tasks/src/main.rs
|
UTF-8
| 1,934 | 2.75 | 3 |
[
"CC-BY-4.0",
"Apache-2.0",
"MIT"
] |
permissive
|
#![no_main]
#![no_std]
use aux9_tasks::entry;
#[entry]
fn main() -> ! {
let (mut leds, rcc, tim6) = aux9_tasks::init();
// Power on the TIM6 timer
rcc.apb1enr.modify(|_, w| w.tim6en().set_bit());
// SR, the status register.
// EGR, the event generation register.
// CNT, the counter register.
// PSC, the prescaler register.
// ARR, the autoreload register.
// CR1 Control Register 1
// OPM Select one pulse mode
// CEN Counter Enable - Keep the counter disabled for now
tim6.cr1.write(|w| w.opm().set_bit().cen().clear_bit());
// Configure the prescaler to have the counter operate at 1 KHz
// PSC Pre-scaler
// Remember that the frequency of the counter is apb1 / (psc + 1) and that apb1 is 8 MHz.
// APB1_CLOCK = 8 MHz
// 8 MHz / (7999 + 1) = 1 KHz
// The counter (CNT) will increase on every millisecond
tim6.psc.write(|w| w.psc().bits(7999));
let cycle_ms = 200;
let mut seq_idx = 0;
let led_tasks: [i32; 8] = [1, 2, 4, 8, 16, 32, 64, 128];
loop {
// Set the timer to go off in half of `cycle_ms` ticks
// 1 tick = 1 ms
tim6.arr.write(|w| w.arr().bits(cycle_ms / 2));
// CEN: Enable the counter
tim6.cr1.modify(|_, w| w.cen().set_bit());
for led_idx in 0..8 {
if ((seq_idx / led_tasks[led_idx]) % 2) == 0 {
leds[led_idx].on();
} else {
leds[led_idx].off();
}
}
// Update LED sequence index
seq_idx = seq_idx + 1;
if seq_idx == led_tasks[7] * 2 {
seq_idx = 0;
}
// Wait until the alarm goes off (until the update event occurs)
// SR, Status Register
// UIF, Update Interrupt Flag
while !tim6.sr.read().uif().bit_is_set() {}
// Clear the update event flag
tim6.sr.modify(|_, w| w.uif().clear_bit());
}
}
| true |
bf3a16c22140627d4630f89fbec10eb91d743ffc
|
Rust
|
kawmarco/unix-fun
|
/rustbox/src/ls.rs
|
UTF-8
| 2,592 | 3.734375 | 4 |
[] |
no_license
|
use std::fs;
use std::io;
pub fn main(args: Vec<String>) -> io::Result<()> {
// if no arguments are passed, use current working dir
let cwd = vec![".".to_string()];
let mut files: &Vec<String> = &args;
if files.len() == 0 {
files = &cwd;
}
_ls(files, &mut io::stdout())
}
fn _ls(files: &Vec<String>, stdout: &mut impl io::Write) -> io::Result<()> {
for filename in files {
// Check whether this is a regular file or a directory
let stat = std::fs::metadata(filename)?;
if stat.is_dir() {
// If it's a directory, list every entry inside it
for entry in fs::read_dir(filename)? {
writeln!(stdout, "{}", entry?.path().display())?;
}
} else {
// Just print out the filename for all other file types
writeln!(stdout, "./{}", filename)?
}
}
Ok(())
}
// Tests
#[cfg(test)]
mod tests {
use super::*;
use std::fs::File;
use tempfile::tempdir;
#[test]
fn _test_ls_cwd_empty() -> Result<(), std::io::Error> {
// Create empty temporary directory
let dir = tempdir()?;
std::env::set_current_dir(&dir)?;
let mut stdout = Vec::new();
let ret = _ls(&vec![".".to_string()], &mut stdout);
// Ensure stdout is empty
assert!(ret.is_ok());
assert_eq!(stdout, b"");
Ok(())
}
#[test]
fn _test_ls_cwd() -> Result<(), std::io::Error> {
// Create temporary directory and test files
let dir = tempdir()?;
std::env::set_current_dir(&dir)?;
let filename_a = dir.path().join("filename_a");
File::create(&filename_a)?;
let filename_b = dir.path().join("filename_b");
File::create(&filename_b)?;
let mut stdout = Vec::new();
let ret = _ls(&vec![".".to_string()], &mut stdout);
// Ensure stdout is as expected
assert!(ret.is_ok());
assert_eq!(stdout, b"./filename_a\n./filename_b\n");
Ok(())
}
#[test]
fn _test_ls_regular_file() -> Result<(), std::io::Error> {
// Create temporary directory and test files
let dir = tempdir()?;
std::env::set_current_dir(&dir)?;
let filename_a = dir.path().join("filename_a");
File::create(&filename_a)?;
let mut stdout = Vec::new();
let ret = _ls(&vec!["filename_a".to_string()], &mut stdout);
// Ensure stdout is as expected
assert!(ret.is_ok());
assert_eq!(stdout, b"./filename_a\n");
Ok(())
}
}
| true |
c7217ce1080461f4c7a7f1951a284ec8741617d5
|
Rust
|
ivlevAstef/BFS_MoreLanguages
|
/rust/src/array2d/normal.rs
|
UTF-8
| 1,132 | 3.21875 | 3 |
[] |
no_license
|
use super::{point, Point};
pub struct Array2D<T> {
inner: Vec<Vec<T>>,
}
impl<T: Copy> Array2D<T> {
pub fn filled_with(value: T, width: usize, height: usize) -> Self {
Self {
inner: vec![vec![value; height]; width],
}
}
}
impl<T> std::ops::Index<Point> for Array2D<T> {
type Output = T;
fn index(&self, pos: Point) -> &T {
#[cfg(not(feature = "unsafe-indexing"))]
return &self.inner[pos.x() as usize][pos.y() as usize];
#[cfg(feature = "unsafe-indexing")]
return unsafe {
self.inner
.get_unchecked(pos.x() as usize)
.get_unchecked(pos.y() as usize)
};
}
}
impl<T> std::ops::IndexMut<Point> for Array2D<T> {
fn index_mut(&mut self, pos: Point) -> &mut T {
#[cfg(not(feature = "unsafe-indexing"))]
return &mut self.inner[pos.x() as usize][pos.y() as usize];
#[cfg(feature = "unsafe-indexing")]
return unsafe {
self.inner
.get_unchecked_mut(pos.x() as usize)
.get_unchecked_mut(pos.y() as usize)
};
}
}
| true |
791e9d7d8a14ed8ce06a408728e46715019d803a
|
Rust
|
nebula-os/ppk
|
/src/package_reference.rs
|
UTF-8
| 592 | 2.578125 | 3 |
[] |
no_license
|
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "lowercase")]
#[serde(untagged)]
pub enum PackageReference {
Git(GitReference),
File(FileReference),
}
#[derive(Serialize, Deserialize, Builder, Debug, Clone)]
#[serde(rename_all = "kebab-case")]
pub struct GitReference {
pub git: String,
pub branch: Option<String>,
pub tag: Option<String>,
pub rev: Option<String>,
}
#[derive(Serialize, Deserialize, Builder, Debug, Clone)]
#[serde(rename_all = "kebab-case")]
pub struct FileReference {
file: String,
}
| true |
9e617659fe946c9b662e143d56ad244960bf0919
|
Rust
|
Karuturirs/bolts-with-rust
|
/src/BoxUsage.rs
|
UTF-8
| 339 | 3.390625 | 3 |
[] |
no_license
|
enum List {
Cons(i32, Box<List>),
Nil,
}
use crate::List::{Cons, Nil};
fn main() {
let list = Cons(1,
Box::new(Cons(2,
Box::new(Cons(3,
Box::new(Nil))))));
println!("Welcome to bolts-with-rust");
let x = 5;
let y = &x;
assert_eq!(5, x);
assert_eq!(5, *y);
}
| true |
277ec0e3a380776a9e674da9eb394204285a1433
|
Rust
|
thomasantony/sudoku-rs
|
/src/utils.rs
|
UTF-8
| 1,536 | 3.15625 | 3 |
[] |
no_license
|
use itertools::iproduct;
use std::collections::HashMap;
use std::iter::FromIterator;
pub type SudokuGrid = HashMap<(usize, usize), Option<usize>>;
pub fn grid_9x9_keys() -> impl Iterator<Item=(usize, usize)>
{
let boxes = iproduct!(0..9, 0..9);
boxes
}
pub fn parse_grid(s: String) -> SudokuGrid
{
let boxes = grid_9x9_keys();
let values = s.chars().map(|c| match c {
'.' => None,
c => c.to_digit(10).map(|d| d as usize),
});
SudokuGrid::from_iter(boxes.zip(values))
}
pub fn display_grid(g: &SudokuGrid)
{
// width = 1+max(len(values[s]) for s in boxes)
// let width = 2;
let line = std::iter::repeat("-").take(9).collect::<String>();
let line = std::iter::repeat(line).take(3).collect::<Vec<String>>().join("+");
for r in 0..9
{
let value_str = (0..9).map(|c| (r, c))
.map(|k|
{
let num = g[&k];
let mut num_str = num.map_or(" . ".to_string(), |num|{
format!("{:^3}", num)
});
if k.1 == 2 || k.1 == 5
{
num_str += "|";
}
num_str
}).collect::<String>();
println!("{}", value_str);
if r == 2 || r == 5
{
println!("{}", line);
}
}
}
| true |
a9d8b1080ef54ec56fd1994f9dd830276afe0c6b
|
Rust
|
unmellow/advent-of-code-2019
|
/src/bin/6.rs
|
UTF-8
| 2,628 | 3.03125 | 3 |
[] |
no_license
|
use advent_of_code_2019::example;
use advent_of_code_2019::problem::{run, Problem, ProblemState, RunFor};
use env_logger::Env;
use std::collections::{HashMap, VecDeque};
struct Six {}
impl Problem for Six {
type Input = HashMap<String, String>;
type Extra = ();
fn parse(s: &str, _state: &ProblemState<Self::Extra>) -> Self::Input {
s.split('\n')
.map(|row| {
let orbit = row.split(')').collect::<Vec<&str>>();
(orbit[1].to_string(), orbit[0].to_string())
})
.collect()
}
fn part_1(orbits: &Self::Input, _state: &ProblemState<Self::Extra>) -> Option<String> {
let mut count = 0;
for (_orbiter, orbitee) in orbits.iter() {
let mut maybe_current = Some(orbitee);
while let Some(current) = maybe_current {
maybe_current = orbits.get(current);
count += 1;
}
}
Some(format!("{}", count))
}
fn part_2(orbits: &Self::Input, _state: &ProblemState<Self::Extra>) -> Option<String> {
let build_chain = |start: String| {
let mut chain = VecDeque::new();
let mut maybe_current = Some(start);
while let Some(current) = maybe_current {
maybe_current = orbits.get(¤t).cloned();
chain.push_front(current);
}
chain
};
let mut you_chain: VecDeque<String> = build_chain("YOU".into());
let mut san_chain: VecDeque<String> = build_chain("SAN".into());
// drop matching orbits
while you_chain.front() == san_chain.front() {
you_chain.pop_front();
san_chain.pop_front();
}
// drop our own locations
you_chain.pop_back();
san_chain.pop_back();
log::debug!("{:?}", you_chain);
log::debug!("{:?}", san_chain);
Some(format!("{}", you_chain.len() + san_chain.len()))
}
fn problem_number() -> usize {
6
}
}
fn main() {
env_logger::init_from_env(Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "warn"));
example!(Six;
RunFor::Part1, (), "COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L",
RunFor::Part2, (), "COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\nK)YOU\nI)SAN"
);
run::<Six>((), include_str!("6_input.txt"));
}
#[cfg(test)]
mod six {
use super::*;
use advent_of_code_2019::problem::assert_solution;
#[test]
fn test() {
assert_solution::<Six>(include_str!("6_input.txt"), (), "294191", "424");
}
}
| true |
b298db6f91238580e3d572a70ade5e1fe2355344
|
Rust
|
edomora97/wireguard-manager
|
/src/web.rs
|
UTF-8
| 4,752 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
use crate::config::ServerConfig;
use crate::schema;
use crate::wireguard::gen_client_config;
use failure::Error;
use hyper::{Body, Request, Response, StatusCode};
use serde::Serialize;
use tokio::fs::File;
use tokio::io::AsyncReadExt;
use tokio_postgres::Client;
/// Status of a server in the network. This will be serialized and exposed in the JSON API.
#[derive(Debug, Clone, Serialize)]
struct NetworkStatusServer {
/// The name of the server.
pub name: String,
/// The subnet the server manages.
pub subnet: String,
/// The length of the subnet of the server.
pub subnet_len: u8,
/// The address of the server inside its subnet.
pub address: String,
/// The public address of the server.
pub endpoint: String,
/// The public port of the server.
pub endpoint_port: u16,
}
/// Status of a client in the network. This will be serialized and exposed in the JSON API.
#[derive(Debug, Clone, Serialize)]
struct NetworkStatusClient {
/// The name of the client.
pub name: String,
/// The name of the server.
pub server: String,
/// The private IP address of the client in the server's network.
pub address: String,
}
/// The response of the `/data` JSON API.
#[derive(Debug, Clone, Serialize)]
struct NetworkStatus {
/// The list of the servers in the network.
pub servers: Vec<NetworkStatusServer>,
/// The list of the clients in the network.
pub clients: Vec<NetworkStatusClient>,
/// The base domain of the DNS.
pub base_domain: String,
}
/// Handle a web request asynchronously.
pub async fn handle_request<T>(
req: Request<T>,
client: &Client,
config: &ServerConfig,
) -> Result<Response<Body>, Error> {
match req.uri().path() {
// JSON API with the status of the network.
"/data" => {
let servers = schema::get_servers(client)
.await?
.into_iter()
.map(|s| NetworkStatusServer {
name: s.name,
subnet: s.subnet_addr.to_string(),
subnet_len: s.subnet_len,
address: s.address.to_string(),
endpoint: s.public_address.to_string(),
endpoint_port: s.public_port,
})
.collect();
let clients = schema::get_clients(client, None::<&str>)
.await?
.into_iter()
.map(|c| NetworkStatusClient {
name: c.client.name,
server: c.server,
address: c.address.to_string(),
})
.collect();
let status = NetworkStatus {
servers,
clients,
base_domain: config.base_domain.clone(),
};
Ok(Response::builder()
.status(200)
.header("Content-Type", "application/json")
.body(Body::from(serde_json::to_string_pretty(&status)?))
.unwrap())
}
// Generate the client configuration for a given username.
url if url.starts_with("/conf/") => {
let name = &url[6..];
let conf = gen_client_config(config, client, name.to_owned(), None).await;
match conf {
Ok(conf) => Ok(Response::builder()
.status(200)
.body(Body::from(conf))
.unwrap()),
Err(err) => Ok(Response::builder()
.status(404)
.body(Body::from(err.to_string()))
.unwrap()),
}
}
// Any other static file.
_ => {
// if asking for an index, manually change the file name.
let path = if req.uri().path() == "/" {
"/index.html"
} else {
req.uri().path()
};
let path = config
.web_static_dir
.join(&path[1..])
.canonicalize()
.unwrap_or_default();
if path.starts_with(&config.web_static_dir) {
if let Ok(mut file) = File::open(&path).await {
debug!("Sending file {:?}", path);
let mut buf = Vec::new();
if file.read_to_end(&mut buf).await.is_ok() {
return Ok(Response::new(buf.into()));
}
}
}
warn!("404 File Not Found: {} -> {:?}", req.uri().path(), path);
let mut not_found = Response::default();
*not_found.status_mut() = StatusCode::NOT_FOUND;
Ok(not_found)
}
}
}
| true |
a2bf435215818bdf2fbf943c392a1cd50267b296
|
Rust
|
ragnarula/advent-of-code
|
/day5/src/main.rs
|
UTF-8
| 2,378 | 3.296875 | 3 |
[] |
no_license
|
fn main() {
println!(
"Part 1 Result - {}",
solve_1(include_str!("../data/input.txt"))
);
println!(
"Part 2 Result - {}",
solve_2(include_str!("../data/input.txt"))
);
}
fn does_react(a: char, b: char) -> bool {
(a.is_ascii_lowercase() && b.is_ascii_uppercase() && a == b.to_ascii_lowercase())
|| (a.is_ascii_uppercase() && b.is_ascii_lowercase() && a.to_ascii_lowercase() == b)
}
fn reduce(input: &str) -> String {
let mut result: (Option<char>, String) =
input
.chars()
.fold((None, String::new()), |mut acc, c| match acc {
(None, _) => {
acc.0 = Some(c);
acc
}
(Some(p), _) if does_react(p, c) => {
acc.0 = None;
acc
}
(Some(p), _) => {
acc.0 = Some(c);
acc.1.push(p);
acc
}
});
match result.0 {
Some(c) => result.1.push(c),
None => {}
}
result.1
}
fn solve_1(input: &str) -> usize {
let mut length = 0;
let mut result = input.to_owned();
loop {
result = reduce(&result);
let reduced_length = result.len();
if reduced_length == length {
break;
}
length = reduced_length;
}
length
}
fn solve_2(input: &str) -> usize {
let types = "abcdefghijklmnopqrstuvwxyz";
let min: Option<usize> = types
.chars()
.map(|c| {
let first = input.to_string().replace(c, "");
first.replace(c.to_ascii_uppercase(), "")
})
.map(|s| solve_1(&s))
.min();
min.unwrap()
}
#[test]
fn test_does_react() {
assert!(does_react('a', 'A'));
assert!(does_react('A', 'a'));
assert!(!does_react('a', 'a'));
assert!(!does_react('A', 'A'));
assert!(!does_react('a', 'B'));
assert!(!does_react('A', 'b'));
}
#[test]
fn test_reduce() {
assert_eq!(reduce("dabAcCaCBAcCcaDA"), "dabAaCBAcaDA");
assert_eq!(reduce("dabAaCBAcaDA"), "dabCBAcaDA");
assert_eq!(reduce("dabCBAcaDA"), "dabCBAcaDA");
}
#[test]
fn test_solve_1() {
assert_eq!(solve_1("dabAcCaCBAcCcaDA"), 10);
}
#[test]
fn test_solve_2() {
assert_eq!(solve_2("dabAcCaCBAcCcaDA"), 4);
}
| true |
e0b1eb4139215b718b01ed805bd2c3cfcfe5a99e
|
Rust
|
iCodeIN/secbot-2021-7drl
|
/src/game/combat.rs
|
UTF-8
| 10,685 | 2.640625 | 3 |
[
"MIT"
] |
permissive
|
use crate::components::*;
use crate::map::*;
use crate::NewState;
use bracket_lib::prelude::*;
use legion::systems::CommandBuffer;
use legion::*;
use std::collections::HashSet;
pub fn player_open_fire_at_target(ecs: &mut World, map: &mut Map) -> NewState {
let mut player_entity = None;
let mut target = None;
<(Entity, &Player, &Targeting)>::query()
.iter(ecs)
.for_each(|(entity, _, targeting)| {
target = targeting.current_target;
player_entity = Some(*entity);
});
// If there's nothing to fire at, return to waiting
if target.is_none() {
return NewState::Wait;
}
ranged_attack(ecs, map, player_entity.unwrap(), target.unwrap(), 20);
NewState::Player
}
pub fn ranged_attack(
ecs: &mut World,
map: &mut Map,
attacker: Entity,
victim: Entity,
ranged_power: i32,
) {
let mut attacker_pos = None;
let mut victim_pos = None;
// Find positions for the start and end
if let Ok(ae) = ecs.entry_ref(attacker) {
if let Ok(pos) = ae.get_component::<Position>() {
attacker_pos = Some(pos.clone());
}
}
if let Ok(ae) = ecs.entry_ref(victim) {
if let Ok(pos) = ae.get_component::<Position>() {
victim_pos = Some(pos.clone());
}
}
if attacker_pos.is_none() || victim_pos.is_none() {
return;
}
let attacker_pos = attacker_pos.unwrap();
let victim_pos = victim_pos.unwrap();
// Set state for the projectile path
let mut power = ranged_power;
let mut range = 0;
let mut projectile_path = Vec::new();
let mut splatter = None;
let mut commands = CommandBuffer::new(ecs);
let current_layer = attacker_pos.layer;
// Map of entity locations. Rebuilt every time because it might change.
let pos_map = <(&Position, &Health)>::query()
.iter(ecs)
.map(|(pos, _)| pos.pt)
.collect::<HashSet<Point>>();
// Plot the initial trajectory
line2d_bresenham(attacker_pos.pt, victim_pos.pt)
.iter()
.skip(1)
.for_each(|pt| {
projectile_path.push(*pt);
if pos_map.contains(&pt) {
power -=
hit_tile_contents(ecs, *pt, current_layer, &mut commands, &mut splatter, power);
if power < 1 {
power = 1;
range += 200;
}
}
if let Some(bsplatter) = &mut splatter {
let idx = map.get_current().point2d_to_index(*pt);
map.get_current_mut().tiles[idx].color.fg = bsplatter.to_rgba(1.0);
bsplatter.r = f32::max(0.0, bsplatter.r - 0.1);
bsplatter.g = f32::max(0.0, bsplatter.g - 0.1);
bsplatter.b = f32::max(0.0, bsplatter.b - 0.1);
if bsplatter.r + bsplatter.g + bsplatter.b < 0.1 {
splatter = None;
}
}
range += 1;
if range > 5 {
power -= 1;
if power < 1 {
power = 1;
}
}
});
// The trajectory can continue if we have power left
use ultraviolet::Vec2;
let mut projectile_pos: Vec2 = Vec2::new(victim_pos.pt.x as f32, victim_pos.pt.y as f32);
let slope = (projectile_pos - Vec2::new(attacker_pos.pt.x as f32, attacker_pos.pt.y as f32))
.normalized();
while range < 25 && power > 0 {
projectile_pos += slope;
let pt = Point::new(projectile_pos.x as i32, projectile_pos.y as i32);
projectile_path.push(pt);
if pos_map.contains(&pt) {
power -= hit_tile_contents(ecs, pt, current_layer, &mut commands, &mut splatter, power);
if power < 1 {
power = 1;
range += 200;
}
}
if let Some(bsplatter) = &mut splatter {
let idx = map.get_current().point2d_to_index(pt);
map.get_current_mut().tiles[idx].color.fg = bsplatter.to_rgba(1.0);
bsplatter.r = f32::max(0.0, bsplatter.r - 0.1);
bsplatter.g = f32::max(0.0, bsplatter.g - 0.1);
bsplatter.b = f32::max(0.0, bsplatter.b - 0.1);
if bsplatter.r + bsplatter.g + bsplatter.b < 0.1 {
splatter = None;
}
}
let idx = map.get_current().point2d_to_index(pt);
if map.get_current().tiles[idx].tile_type == TileType::Wall {
range += 100;
power = 0;
}
range += 1;
if range > 5 {
power -= 1;
if power < 1 {
power = 1;
range += 100;
}
}
}
commands.push((
Projectile {
path: projectile_path,
layer: current_layer as usize,
},
Glyph {
glyph: to_cp437('*'),
color: ColorPair::new(RED, BLACK),
},
));
commands.flush(ecs);
}
pub fn hit_tile_contents(
ecs: &mut World,
pt: Point,
layer: u32,
commands: &mut CommandBuffer,
splatter: &mut Option<RGB>,
power: i32,
) -> i32 {
let mut rng_lock = crate::RNG.lock();
let rng = rng_lock.as_mut().unwrap();
let mut power_loss = 0;
let mut dead_entities = Vec::new();
<(Entity, &Position, &mut Health)>::query()
.iter_mut(ecs)
.filter(|(_, pos, _)| pos.layer == layer && pos.pt == pt)
.for_each(|(entity, _, hp)| {
power_loss += hp.current;
if power_loss < 0 {
power_loss = 0;
}
let damage = i32::max(0, power + rng.roll_dice(1, 4) - 2);
//println!("{}", damage);
hp.current -= damage;
if hp.current < 0 {
hp.current = 0;
dead_entities.push(*entity);
}
});
dead_entities.iter().for_each(|e| {
if let Ok(er) = ecs.entry_ref(*e) {
if let Ok(boom) = er.get_component::<Explosive>() {
if let Ok(pos) = er.get_component::<Position>() {
commands.push((
Position::with_pt(pos.pt, pos.layer),
Boom { range: boom.range },
));
}
}
}
});
kill_things(ecs, commands, dead_entities, splatter);
power_loss
}
pub fn melee(ecs: &mut World, map: &mut Map, attacker: Entity, victim: Entity, melee_power: i32) {
// Check range and validity
let mut attacker_pos = None;
let mut defender_pos = None;
if let Ok(e) = ecs.entry_ref(attacker) {
if let Ok(pos) = e.get_component::<Position>() {
attacker_pos = Some(*pos);
}
}
if let Ok(e) = ecs.entry_ref(victim) {
if let Ok(pos) = e.get_component::<Position>() {
defender_pos = Some(*pos);
}
}
if attacker_pos.is_none() || defender_pos.is_none() {
return; // Bail out - invalid data arrived
}
let apos = attacker_pos.unwrap();
let dpos = defender_pos.unwrap();
if apos.layer != dpos.layer {
return; // Bail out - can't attack across layers
}
let d = DistanceAlg::Pythagoras.distance2d(apos.pt, dpos.pt);
if d > 1.5 {
return; // Too far away, bail
}
// Inflict damage upon the hapless victim
let mut dead_entities = Vec::new();
if let Ok(mut v) = ecs.entry_mut(victim) {
if let Ok(hp) = v.get_component_mut::<Health>() {
hp.current = i32::max(0, hp.current - melee_power);
if hp.current == 0 {
dead_entities.push(victim);
}
}
if let Ok(blood) = v.get_component::<Blood>() {
let idx = map.get_layer(dpos.layer as usize).point2d_to_index(dpos.pt);
map.get_layer_mut(dpos.layer as usize).tiles[idx].color.fg = blood.0.into();
}
}
// If necessary, kill them.
let mut commands = CommandBuffer::new(ecs);
let mut splatter = None;
kill_things(ecs, &mut commands, dead_entities, &mut splatter);
// Splatter blood. It's good for you.
}
fn kill_things(
ecs: &mut World,
commands: &mut CommandBuffer,
dead_entities: Vec<Entity>,
splatter: &mut Option<RGB>,
) {
dead_entities.iter().for_each(|entity| {
crate::stats::record_death();
let mut was_decor = false;
let mut was_player = false;
if let Ok(mut er) = ecs.entry_mut(*entity) {
let mut was_colonist = false;
if let Ok(_colonist) = er.get_component_mut::<ColonistStatus>() {
commands.add_component(*entity, ColonistStatus::DiedAfterStart);
was_colonist = true;
}
if let Ok(g) = er.get_component_mut::<Glyph>() {
g.color.bg = DARK_RED.into();
g.color.fg = DARK_GRAY.into();
}
if let Ok(n) = er.get_component_mut::<Name>() {
n.0 = format!("Corpse: {}", n.0);
}
if was_colonist {
if let Ok(d) = er.get_component_mut::<Description>() {
let mut rng = RandomNumberGenerator::new();
if rng.range(0, 10) < 5 {
d.0 = format!(
"{} They left behind a spouse and {} children.",
d.0,
rng.range(1, 8)
);
}
}
}
if er.get_component::<Hostile>().is_ok() {
crate::stats::record_monster_death();
}
if let Ok(b) = er.get_component::<Blood>() {
*splatter = Some(b.0);
}
if let Ok(_) = er.get_component::<SetDecoration>() {
was_decor = true;
}
if let Ok(_) = er.get_component::<Player>() {
was_player = true;
}
}
if !was_player {
commands.remove_component::<Health>(*entity);
commands.remove_component::<Active>(*entity);
commands.remove_component::<CanBeActivated>(*entity);
commands.remove_component::<Blood>(*entity);
commands.remove_component::<Targetable>(*entity);
commands.remove_component::<Explosive>(*entity);
commands.remove_component::<TimedEvent>(*entity);
}
if was_decor {
crate::stats::record_prop_death();
commands.remove_component::<Glyph>(*entity);
commands.remove_component::<Description>(*entity);
}
});
}
| true |
ee54fb9846a32ef9a93d554e38dcdc7c4238f8dd
|
Rust
|
emahiro/il
|
/rs_sandbox/rs_grep_handson/src/main.rs
|
UTF-8
| 1,023 | 3.09375 | 3 |
[] |
no_license
|
use std::fs::read_to_string;
use structopt::StructOpt;
#[derive(StructOpt)] // attribute cf. annotation
#[structopt(name = "rsgrep")]
struct GrepArgs {
#[structopt(name="PATTERN")]
pattern: String,
#[structopt(name="NAME")]
path: String,
}
// impl GrepArgs {
// fn new(pattern: String, path: String) -> Self {
// GrepArgs { pattern, path }
// }
// }
fn grep(content: String, state: &GrepArgs) {
for line in content.lines() {
if line.contains(state.pattern.as_str()) {
println!("{}", line)
}
}
}
fn run(state: GrepArgs) {
match read_to_string(&state.path) {
Ok(content) => grep(content, &state),
Err(err) => println!("{}", err),
}
}
fn main() {
run(GrepArgs::from_args())
// let pattern = args().nth(1);
// let path = args().nth(2);
// match (pattern, path) {
// (Some(pattern), Some(path)) => run(GrepArgs::new(pattern, path)),
// _ => println!("pattern or path is not specified"),
// };
}
| true |
564d7fd55d3f4deab3ba7206458f0a9b4514fa4d
|
Rust
|
manuels/taikai
|
/src/parser/mod.rs
|
UTF-8
| 6,943 | 2.546875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::rc::Rc;
use std::cell::RefCell;
use proc_macro2::TokenStream;
use serde::Deserialize;
use quote::quote;
use crate::types;
use crate::enums;
use crate::type_spec::TypeSpec;
use crate::attribute;
#[derive(Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
struct Meta {
id: Option<String>,
endian: Endian,
encoding: Option<String>,
#[serde(skip)]
title: (),
#[serde(skip)]
file_extension: (),
#[serde(skip)]
license: (),
}
#[derive(Deserialize, Debug)]
enum Endian {
#[serde(rename = "be")]
Big,
#[serde(rename = "le")]
Little,
#[serde(rename = "net")]
Network,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "lowercase")]
enum Repeat {
Expr,
Until,
Eos,
}
#[derive(Deserialize, Debug)]
struct Root {
meta: Meta,
#[serde(flatten)]
root: TypeDef,
#[serde(default)]
context: TypeDef,
}
#[derive(Deserialize, Default, Debug)]
#[serde(default)]
struct TypeDef {
types: HashMap<String, TypeDef>,
seq: Vec<Attribute>,
instances: HashMap<String, Instance>,
enums: HashMap<String, HashMap<String, String>>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
struct Attribute {
id: Option<String>,
#[serde(rename = "type")]
typ: Option<String>,
#[serde(skip)]
doc: (),
#[serde(skip)]
doc_ref: (),
#[serde(rename="enum")]
pub enum_: Option<String>,
repeat: Option<Repeat>,
repeat_expr: Option<String>,
repeat_until: Option<String>,
#[serde(rename = "if")]
cond: Option<String>,
#[serde(default)]
contents: Vec<u8>,
size: Option<String>,
#[serde(default)]
size_eos: bool,
process: Option<String>,
encoding: Option<String>,
terminator: Option<u8>,
consume: Option<bool>,
#[serde(default)]
include: bool,
eos_error: Option<bool>,
}
#[derive(Deserialize, Debug)]
struct Instance {
pos: Option<usize>,
value: Option<String>,
#[serde(flatten)]
attr: Attribute,
}
impl Into<types::Meta> for Meta {
fn into(self) -> types::Meta {
let enc = self.encoding.unwrap_or_else(|| "utf-8".to_string());
let enc = if encoding_rs::Encoding::for_label_no_replacement(enc.as_bytes()).is_some() {
types::Encoding::Fixed(enc.as_bytes().to_vec())
} else {
let enc = syn::parse_str(&enc).unwrap();
types::Encoding::Runtime(enc)
};
types::Meta {
endian: self.endian.into(),
encoding: enc,
}
}
}
impl Into<types::Endian> for Endian {
fn into(self) -> types::Endian {
match self {
Endian::Big => types::Endian::Big,
Endian::Network => types::Endian::Big,
Endian::Little => types::Endian::Little,
}
}
}
pub fn parse(scope: &[String], input: &str)
-> (types::Meta, Rc<RefCell<TypeSpec>>, Rc<RefCell<TypeSpec>>)
{
let obj: Root = serde_yaml::from_str(input).unwrap();
let id = obj.meta.id.clone().unwrap_or_else(|| "root".to_owned());
let scope: Vec<_> = scope.iter().map(|s| syn::parse_str(&s[..]).unwrap()).collect();
let ctx = parse_type("Context".to_string(), scope.clone(), obj.context);
let root = parse_type(id, scope, obj.root);
(obj.meta.into(), ctx, root)
}
fn parse_attribute(id: Option<String>, a: Attribute) -> attribute::Attribute {
let typ = a.typ.unwrap_or_else(|| "u8".to_owned());
let cond = a.cond.map(|s| syn::parse_str(&s[..]).unwrap());
let str_props = match &typ[..] {
"str"
| "strz"
| _ if a.size.is_some() || a.size_eos => {
let length = if &typ[..] == "strz" {
attribute::Length::Terminator(0)
} else {
if let Some(term) = a.terminator {
attribute::Length::Terminator(term)
} else if let Some(size) = a.size {
let size = syn::parse_str(&size[..]).unwrap();
attribute::Length::Size(size)
} else {
assert!(a.size_eos);
attribute::Length::Eos
}
};
Some(attribute::SizeProperties {
length,
consume: a.consume.unwrap_or(true),
include: a.include,
eos_error: a.eos_error.unwrap_or(true),
})
},
_ => None
};
/*
* We do not want to deal with 'strz' later (which just defines the
* terminator), so we overwrite 'strz' with 'str'!
*/
let typ = if typ == "strz" {
"str".to_string()
} else {
typ
};
let repeat = match a.repeat {
None => attribute::Repeat::NoRepeat,
Some(Repeat::Eos) => attribute::Repeat::Eos,
Some(Repeat::Expr) => {
let expr = a.repeat_expr.unwrap_or_else(|| panic!("repeat-expr missing"));
let expr = syn::parse_str(&expr).unwrap();
attribute::Repeat::Expr(expr)
},
Some(Repeat::Until) => {
let expr = a.repeat_expr.unwrap_or_else(|| panic!("repeat-until missing"));
let expr = syn::parse_str(&expr).unwrap();
attribute::Repeat::Until(expr)
},
};
let enc = a.encoding.map(|enc| {
if encoding_rs::Encoding::for_label_no_replacement(enc.as_bytes()).is_some() {
types::Encoding::Fixed(enc.as_bytes().to_vec())
} else {
let enc = syn::parse_str(&enc).unwrap();
types::Encoding::Runtime(enc)
}
});
attribute::Attribute::new(a.id.or(id).unwrap(), typ, repeat, cond, a.contents, enc, a.enum_, str_props)
}
fn parse_type(id: String, scope: Vec<TokenStream>, typ: TypeDef) -> Rc<RefCell<TypeSpec>> {
let types = typ.types.into_iter().map(|(id, t): (String, TypeDef)| {
let mut new_scope = scope.clone();
new_scope.push(quote!(__subtypes));
(id.clone(), parse_type(id, new_scope, t))
}).collect();
let seq = typ.seq.into_iter().map(|a| parse_attribute(None, a)).collect();
let instances = typ.instances.into_iter().map(|(id, inst): (String, Instance)| {
let attr = parse_attribute(Some(id.clone()), inst.attr);
let value = inst.value.map(|s| syn::parse_str(&s[..]).unwrap());
let pos = inst.pos.unwrap_or(0);
(id, attribute::Instance::from_attr(attr, pos, value))
}).collect();
// flip yaml 'value: key' to rust (key, value)
let enums = typ.enums.into_iter().map(|(id, e)| {
let pairs = e.into_iter().map(|(value, key)| (key, value)).collect();
let e = enums::Enum {
pairs,
id: id.clone(),
scope: scope.clone(),
};
(id, Rc::new(RefCell::new(e)))
}).collect();
TypeSpec::new(scope,
id,
types,
seq,
instances,
enums)
}
| true |
5187b7f002b54a3b4eb0018ba469095c59f306d5
|
Rust
|
rustabit/mrvn-bot
|
/mrvn-front-discord/src/config.rs
|
UTF-8
| 1,995 | 2.921875 | 3 |
[
"MIT"
] |
permissive
|
use serde::Deserialize;
use std::collections::HashMap;
use serde::de::Error;
#[derive(Debug, Deserialize, Clone)]
pub struct CommandBot {
pub token: String,
pub application_id: u64,
pub guild_id: Option<u64>,
}
#[derive(Debug, Deserialize, Clone)]
pub struct VoiceBot {
pub token: String,
pub application_id: u64,
}
#[derive(Debug, Deserialize, Clone)]
pub struct Config {
#[serde(deserialize_with = "from_hex")]
pub embed_color: u32,
pub skip_votes_required: usize,
pub stop_votes_required: usize,
pub disconnect_min_inactive_secs: u64,
pub disconnect_check_interval_secs: u64,
pub only_disconnect_when_alone: bool,
pub command_bot: CommandBot,
pub voice_bots: Vec<VoiceBot>,
pub messages: HashMap<String, String>,
}
impl Config {
pub fn get_raw_message<'s>(&'s self, message_key: &'s str) -> &'s str {
match self.messages.get(message_key) {
Some(template) => template,
None => {
log::warn!("Message string {} was not included in config", message_key);
message_key
}
}
}
pub fn get_message(&self, message_key: &str, substitutions: &[(&str, &str)]) -> String {
let message_template = self.get_raw_message(message_key);
lazy_static::lazy_static! {
static ref SUBSTITUTE_REGEX: regex::Regex = regex::Regex::new(r"\{(\w+)\}").unwrap();
}
SUBSTITUTE_REGEX.replace_all(message_template, |caps: ®ex::Captures| {
let substitute_name = &caps[1];
substitutions
.iter()
.find(|(key, _)| *key == substitute_name)
.map(|(_, value)| *value)
.unwrap_or("")
}).into_owned()
}
}
fn from_hex<'de, D>(deserializer: D) -> Result<u32, D::Error> where D: serde::Deserializer<'de> {
let s: String = Deserialize::deserialize(deserializer)?;
u32::from_str_radix(&s, 16).map_err(D::Error::custom)
}
| true |
70adadf503b82c695a73acf68bc693b6a32d0852
|
Rust
|
CyberFlameGO/abi_stable_crates
|
/abi_stable/src/nonexhaustive_enum/nonexhaustive.rs
|
UTF-8
| 25,463 | 2.859375 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
/*!
Contains `NonExhaustive<>` and related items.
*/
use std::{
cmp::{Ordering,PartialEq,Eq,Ord,PartialOrd},
fmt::{self,Debug,Display},
hash::{Hash,Hasher},
marker::PhantomData,
mem::ManuallyDrop,
ops::Deref,
};
use crate::{
abi_stability::StableAbi,
erased_types::{
c_functions,
trait_objects::{
HasherObject,
},
InterfaceBound,
},
inline_storage::ScratchSpace,
marker_type::ErasedObject,
nonexhaustive_enum::{
vtable::NonExhaustiveVtable_Ref,
GetVTable,GetEnumInfo,GetNonExhaustive,
ValidDiscriminant,EnumInfo,
SerializeEnum,DeserializeEnum,
},
pointer_trait::{CanTransmuteElement,TransmuteElement},
type_level::{
impl_enum::Implemented,
trait_marker,
},
std_types::RBoxError,
traits::IntoReprRust,
};
use core_extensions::{
utils::transmute_ignore_size,
};
use serde::{ser,de,Serialize,Deserialize,Serializer,Deserializer};
// #[cfg(test)]
#[cfg(all(test,not(feature="only_new_tests")))]
mod tests;
/**
A generic type for all ffi-safe non-exhaustive enums.
This type allows adding variants to enums it wraps in ABI compatible versions of a library.
# Generic parameters
### `E`
This is the enum that this was constructed from,
and can be unwrapped back into if it's one of the valid variants in this context.
### `S`
The storage type,used to store the enum opaquely.
This has to be at least the size and alignment of the wrapped enum.
This is necessary because:
- The compiler assumes that an enum cannot be a variant outside the ones it sees.
- To give some flexibility to grow the enum in semver compatible versions of a library.
### `I`
The interface of the enum(it implements `InterfaceType`),
determining which traits are required when constructing `NonExhaustive<>`
and which are available afterwards.
### Example
Say that we define an error type for a library.
Version 1.0.
```
use abi_stable::{
StableAbi,
nonexhaustive_enum::{NonExhaustiveFor,NonExhaustive},
std_types::RString,
sabi_trait,
};
#[repr(u8)]
#[derive(StableAbi,Debug,Clone,PartialEq)]
#[sabi(kind(WithNonExhaustive(
size="[usize;8]",
traits(Debug,Clone,PartialEq),
)))]
pub enum Error{
#[doc(hidden)]
__NonExhaustive,
CouldNotFindItem{
name:RString,
},
OutOfStock{
id:usize,
name:RString,
},
}
fn returns_could_not_find_item(name:RString)->NonExhaustiveFor<Error>{
let e=Error::CouldNotFindItem{name};
NonExhaustive::new(e)
}
fn returns_out_of_stock(id:usize,name:RString)->NonExhaustiveFor<Error>{
let e=Error::OutOfStock{id,name};
NonExhaustive::new(e)
}
```
Then in 1.1 we add another error variant,returned only by new library functions.
```
use abi_stable::{
StableAbi,
nonexhaustive_enum::{NonExhaustiveFor,NonExhaustive},
std_types::RString,
sabi_trait,
};
#[repr(u8)]
#[derive(StableAbi,Debug,Clone,PartialEq)]
#[sabi(kind(WithNonExhaustive(
size="[usize;8]",
traits(Debug,Clone,PartialEq),
)))]
pub enum Error{
#[doc(hidden)]
__NonExhaustive,
CouldNotFindItem{
name:RString,
},
OutOfStock{
id:usize,
name:RString,
},
InvalidItemId{
id:usize,
},
}
fn returns_invalid_item_id()->NonExhaustiveFor<Error>{
NonExhaustive::new(Error::InvalidItemId{id:100})
}
```
If a library user attempted to unwrap `Error::InvalidItemId`
(using NonExhaustive::as_enum/as_enum_mut/into_enum)
with the 1.0 version of `Error` they would get an `Err(..)` back.
*/
#[repr(C)]
#[derive(StableAbi)]
#[sabi(
//debug_print,
not_stableabi(E,S,I),
bound="NonExhaustiveVtable_Ref<E,S,I>:StableAbi",
bound="E: GetNonExhaustive<S>",
bound="I: InterfaceBound",
extra_checks="<I as InterfaceBound>::EXTRA_CHECKS",
phantom_type_param="<E as GetNonExhaustive<S>>::NonExhaustive",
)]
pub struct NonExhaustive<E,S,I>{
// This is an opaque field since we only care about its size and alignment
#[sabi(unsafe_opaque_field)]
fill:ScratchSpace<S>,
vtable:NonExhaustiveVtable_Ref<E,S,I>,
_marker:PhantomData<()>,
}
/// The type of a `NonExhaustive<>` wrapping the enum E,
/// using the `E`'s default storage and interface.
pub type NonExhaustiveFor<E>=
NonExhaustive<
E,
<E as GetEnumInfo>::DefaultStorage,
<E as GetEnumInfo>::DefaultInterface,
>;
/// The type of a `NonExhaustive<>` wrapping the enum E,
/// using the `E`'s default storage and a custom interface.
pub type NonExhaustiveWI<E,I>=
NonExhaustive<
E,
<E as GetEnumInfo>::DefaultStorage,
I,
>;
/// The type of a `NonExhaustive<>` wrapping the enum E,
/// using a custom storage and the `E`'s default interface.
pub type NonExhaustiveWS<E,S>=
NonExhaustive<
E,
S,
<E as GetEnumInfo>::DefaultInterface,
>;
impl<E,S,I> NonExhaustive<E,S,I>{
/**
Constructs a `NonExhaustive<>` from `value` using its default interface and storage.
# Panic
This panics if the storage has an alignment or size smaller than that of `E`.
*/
#[inline]
pub fn new(value:E)->Self
where
E:GetVTable<S,I,DefaultStorage=S,DefaultInterface=I>,
{
NonExhaustive::with_storage_and_interface(value)
}
/**
Constructs a `NonExhaustive<>` from `value` using its default storage
and a custom interface.
# Panic
This panics if the storage has an alignment or size smaller than that of `E`.
*/
#[inline]
pub fn with_interface(value:E)->Self
where
E:GetVTable<S,I,DefaultStorage=S>,
{
NonExhaustive::with_storage_and_interface(value)
}
/**
Constructs a `NonExhaustive<>` from `value` using its default interface
and a custom storage.
# Panic
This panics if the storage has an alignment or size smaller than that of `E`.
*/
#[inline]
pub fn with_storage(value:E)->Self
where
E:GetVTable<S,I,DefaultInterface=I>,
{
NonExhaustive::with_storage_and_interface(value)
}
/**
Constructs a `NonExhaustive<>` from `value` using both a custom interface and storage.
# Panic
This panics if the storage has an alignment or size smaller than that of `E`.
*/
pub fn with_storage_and_interface(value:E)->Self
where
E:GetVTable<S,I>,
{
unsafe{
NonExhaustive::with_vtable(value,E::VTABLE_REF)
}
}
pub(super) unsafe fn with_vtable(
value:E,
vtable:NonExhaustiveVtable_Ref<E,S,I>
)->Self{
Self::assert_fits_within_storage();
let mut this=Self{
fill:unsafe{
// The fact that the vtable was constructed ensures that
// `Inline` implements `InlineStorage`
ScratchSpace::uninit_unbounded()
},
vtable,
_marker:PhantomData
};
(&mut this.fill as *mut ScratchSpace<S> as *mut E).write(value);
this
}
/// Checks that the alignment of `E` is correct,returning `true` if it is.
pub fn check_alignment()->bool{
let align_enum=std::mem::align_of::<E>();
let align_storage=std::mem::align_of::<S>();
align_enum <= align_storage
}
/// Checks that the size of `E` is correct,returning `true` if it is.
pub fn check_size()->bool{
let size_enum=std::mem::size_of::<E>();
let size_storage=std::mem::size_of::<S>();
size_enum <= size_storage
}
/// Asserts that `E` fits within `S`,with the correct alignment and size.
pub fn assert_fits_within_storage(){
let align_enum=std::mem::align_of::<E>();
let align_storage=std::mem::align_of::<S>();
assert!(
Self::check_alignment(),
"The alignment of the storage is lower than the enum:\n\t{} < {}",
align_storage,align_enum,
);
let size_enum=std::mem::size_of::<E>();
let size_storage=std::mem::size_of::<S>();
assert!(
Self::check_size(),
"The size of the storage is smaller than the enum:\n\t{} < {}",
size_storage,size_enum,
);
}
}
impl<E,S,I> NonExhaustive<E,S,I>
where
E:GetEnumInfo
{
/**
Unwraps a reference to this `NonExhaustive<>` into a reference to the original enum.
# Errors
This returns an error if the wrapped enum is of a variant that is
not valid in this context.
# Example
This shows how some `NonExhaustive<enum>` can be unwrapped, and others cannot.<br>
That enum comes from a newer version of the library than this knows.
```
use abi_stable::nonexhaustive_enum::{
doc_enums::example_2::{Foo,new_a,new_b,new_c},
};
assert_eq!(new_a() .as_enum().ok(),Some(&Foo::A) );
assert_eq!(new_b(10).as_enum().ok(),Some(&Foo::B(10)));
assert_eq!(new_b(77).as_enum().ok(),Some(&Foo::B(77)));
assert_eq!(new_c().as_enum().ok() ,None );
```
*/
pub fn as_enum(&self)->Result<&E,UnwrapEnumError<&Self>>{
let discriminant=self.get_discriminant();
if E::is_valid_discriminant(discriminant) {
unsafe{
Ok(&*(&self.fill as *const ScratchSpace<S> as *const E))
}
}else{
Err(UnwrapEnumError::new(self))
}
}
/**
Unwraps a mutable reference to this `NonExhaustive<>` into a
mutable reference to the original enum.
# Errors
This returns an error if the wrapped enum is of a variant that is
not valid in this context.
# Example
This shows how some `NonExhaustive<enum>` can be unwrapped, and others cannot.<br>
That enum comes from a newer version of the library than this knows.
```
use abi_stable::nonexhaustive_enum::{
doc_enums::example_1::{Foo,new_a,new_b,new_c},
};
assert_eq!(new_a() .as_enum_mut().ok(),Some(&mut Foo::A));
assert_eq!(new_b(10).as_enum_mut().ok(),None);
assert_eq!(new_b(77).as_enum_mut().ok(),None);
assert_eq!(new_c().as_enum_mut().ok() ,None);
```
*/
pub fn as_enum_mut(&mut self)->Result<&mut E,UnwrapEnumError<&mut Self>>
where
E:GetVTable<S,I>,
{
let discriminant=self.get_discriminant();
if E::is_valid_discriminant(discriminant) {
/*
Must update the vtable every time as_enum_mut is called,
because if the enum is replaced with a variant with a discriminant
outside the valid range for the functions in the vtable,
it would be undefined behavior to call those functions.
*/
self.vtable=E::VTABLE_REF;
unsafe{
Ok(&mut *(&mut self.fill as *mut ScratchSpace<S> as *mut E))
}
}else{
Err(UnwrapEnumError::new(self))
}
}
/**
Unwraps this `NonExhaustive<>` into the original enum.
# Errors
This returns an error if the wrapped enum is of a variant that is
not valid in this context.
# Example
This shows how some `NonExhaustive<enum>` can be unwrapped, and others cannot.<br>
That enum comes from a newer version of the library than this knows.
```
use abi_stable::nonexhaustive_enum::{
doc_enums::example_2::{Foo,new_a,new_b,new_c},
};
assert_eq!(new_a() .into_enum().ok(),Some(Foo::A));
assert_eq!(new_b(10).into_enum().ok(),Some(Foo::B(10)));
assert_eq!(new_b(77).into_enum().ok(),Some(Foo::B(77)));
assert_eq!(new_c().into_enum().ok() ,None);
*/
pub fn into_enum(self)->Result<E,UnwrapEnumError<Self>>{
let discriminant=self.get_discriminant();
if E::is_valid_discriminant(discriminant) {
let this=ManuallyDrop::new(self);
unsafe{
Ok((&this.fill as *const ScratchSpace<S> as *const E).read())
}
}else{
Err(UnwrapEnumError::new(self))
}
}
/**
Returns whether the discriminant of this enum is valid in this context.
The only way for it to be invalid is if the dynamic library is a
newer version than this knows.
*/
#[inline]
pub fn is_valid_discriminant(&self)->bool{
E::is_valid_discriminant(self.get_discriminant())
}
/**
Gets the value of the discriminant of the enum.
*/
#[inline]
pub fn get_discriminant(&self)->E::Discriminant{
unsafe{
*(&self.fill as *const ScratchSpace<S> as *const E::Discriminant)
}
}
}
impl<E,S,I> NonExhaustive<E,S,I>{
/**
Transmute this `NonExhaustive<E,S,I>` into `NonExhaustive<F,S,I>`,
changing the type of the enum it wraps.
# Safety
This has the same safety requirements that `std::mem::transmute` has.
# Panics
This panics if the storage has an alignment or size smaller than that of `F`.
*/
pub unsafe fn transmute_enum<F>(self)->NonExhaustive<F,S,I>{
NonExhaustive::<F,S,I>::assert_fits_within_storage();
transmute_ignore_size(self)
}
/**
Transmute this `&NonExhaustive<E,S,I>` into `&NonExhaustive<F,S,I>`,
changing the type of the enum it wraps.
# Safety
This has the same safety requirements that `std::mem::transmute` has.
# Panics
This panics if the storage has an alignment or size smaller than that of `F`.
*/
pub unsafe fn transmute_enum_ref<F>(&self)->&NonExhaustive<F,S,I>{
NonExhaustive::<F,S,I>::assert_fits_within_storage();
&*(self as *const Self as *const _)
}
/**
Transmute this `&mut NonExhaustive<E,S,I>` into `&mut NonExhaustive<F,S,I>`,
changing the type of the enum it wraps.
# Safety
This has the same safety requirements that `std::mem::transmute` has.
# Panics
This panics if the storage has an alignment or size smaller than that of `F`.
*/
pub unsafe fn transmute_enum_mut<F>(&mut self)->&mut NonExhaustive<F,S,I>{
NonExhaustive::<F,S,I>::assert_fits_within_storage();
&mut *(self as *mut Self as *mut _)
}
/**
Transmute this pointer to a `NonExhaustive<E,S,I>` into
a pointer (of the same kind) to a `NonExhaustive<F,S,I>`,
changing the type of the enum it wraps.
# Safety
This has the same safety requirements that
`abi_stable::pointer_traits::TransmuteElement::transmute_element` has.
# Panics
This panics if the storage has an alignment or size smaller than that of `F`.
*/
pub unsafe fn transmute_enum_ptr<P,F>(this:P)->P::TransmutedPtr
where
P:Deref<Target=Self>,
P:CanTransmuteElement<NonExhaustive<F,S,I>>
{
NonExhaustive::<F,S,I>::assert_fits_within_storage();
this.transmute_element::<NonExhaustive<F,S,I>>()
}
/// Gets a reference to the vtable of this `NonExhaustive<>`.
pub(crate) fn vtable(&self)->NonExhaustiveVtable_Ref<E,S,I>{
self.vtable
}
fn sabi_erased_ref(&self)->&ErasedObject{
unsafe{
&*(&self.fill as *const ScratchSpace<S> as *const ErasedObject)
}
}
fn as_erased_ref(&self)->&ErasedObject{
unsafe{
&*(self as *const Self as *const ErasedObject)
}
}
fn sabi_erased_mut(&mut self)->&mut ErasedObject{
unsafe{
&mut *(&mut self.fill as *mut ScratchSpace<S> as *mut ErasedObject)
}
}
}
impl<E,S,I> Clone for NonExhaustive<E,S,I>
where
I: InterfaceBound<Clone = Implemented<trait_marker::Clone>>,
{
fn clone(&self)->Self{
unsafe{
self.vtable().clone_()(self.sabi_erased_ref(),self.vtable)
}
}
}
impl<E,S,I> Display for NonExhaustive<E,S,I>
where
I: InterfaceBound<Display = Implemented<trait_marker::Display>>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
unsafe{
c_functions::adapt_std_fmt::<ErasedObject>(
self.sabi_erased_ref(),
self.vtable().display(),
f
)
}
}
}
impl<E,S,I> Debug for NonExhaustive<E,S,I>
where
I: InterfaceBound<Debug = Implemented<trait_marker::Debug>>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
unsafe{
c_functions::adapt_std_fmt::<ErasedObject>(
self.sabi_erased_ref(),
self.vtable().debug(),
f
)
}
}
}
impl<E,S,I> Eq for NonExhaustive<E,S,I>
where
Self: PartialEq,
I: InterfaceBound<Eq = Implemented<trait_marker::Eq>>,
{
}
impl<E,S,I1,I2> PartialEq<NonExhaustive<E,S,I2>> for NonExhaustive<E,S,I1>
where
I1: InterfaceBound<PartialEq = Implemented<trait_marker::PartialEq>>,
{
fn eq(&self, other: &NonExhaustive<E,S,I2>) -> bool {
unsafe{
self.vtable().partial_eq()(self.sabi_erased_ref(), other.as_erased_ref())
}
}
}
impl<E,S,I> Ord for NonExhaustive<E,S,I>
where
I: InterfaceBound<Ord = Implemented<trait_marker::Ord>>,
Self: PartialOrd + Eq,
{
fn cmp(&self, other: &Self) -> Ordering {
unsafe{
self.vtable().cmp()(self.sabi_erased_ref(), other.as_erased_ref()).into()
}
}
}
impl<E,S,I1,I2> PartialOrd<NonExhaustive<E,S,I2>> for NonExhaustive<E,S,I1>
where
I1: InterfaceBound<PartialOrd = Implemented<trait_marker::PartialOrd>>,
Self: PartialEq<NonExhaustive<E,S,I2>>,
{
fn partial_cmp(&self, other: &NonExhaustive<E,S,I2>) -> Option<Ordering> {
unsafe{
self.vtable().partial_cmp()(self.sabi_erased_ref(), other.as_erased_ref())
.map(IntoReprRust::into_rust)
.into()
}
}
}
/////////////////////
impl<E,S,I> PartialOrd<E> for NonExhaustive<E,S,I>
where
E: GetEnumInfo+PartialOrd,
I: InterfaceBound<PartialOrd = Implemented<trait_marker::PartialOrd>>,
Self: PartialEq<E>,
{
fn partial_cmp(&self, other: &E) -> Option<Ordering> {
unsafe{
match self.as_enum() {
Ok(this)=>this.partial_cmp(other),
Err(_)=>Some(Ordering::Greater),
}
}
}
}
impl<E,S,I> PartialEq<E> for NonExhaustive<E,S,I>
where
E: GetEnumInfo+PartialEq,
I: InterfaceBound<PartialEq = Implemented<trait_marker::PartialEq>>,
{
fn eq(&self, other: &E) -> bool {
match self.as_enum() {
Ok(this)=>this==other,
Err(_)=>false,
}
}
}
/////////////////////
impl<E,S,I> NonExhaustive<E,S,I>{
/// It serializes a `NonExhaustive<_>` into a proxy.
pub fn serialize_into_proxy(&self) -> Result<I::Proxy, RBoxError>
where
I: InterfaceBound<Serialize=Implemented<trait_marker::Serialize>>,
I: SerializeEnum<NonExhaustive<E,S,I>>,
{
unsafe{
self.vtable().serialize()(self.sabi_erased_ref()).into_result()
}
}
/// Deserializes a `NonExhaustive<_>` from a proxy.
pub fn deserialize_from_proxy<'borr>(proxy: I::Proxy) -> Result<Self, RBoxError>
where
I: InterfaceBound<Deserialize= Implemented<trait_marker::Deserialize>>,
I: DeserializeEnum<'borr,NonExhaustive<E,S,I>>,
I::Proxy:'borr,
E:GetEnumInfo,
{
I::deserialize_enum(proxy)
}
}
/**
First it serializes a `NonExhaustive<_>` into a proxy,then it serializes that proxy.
*/
impl<E,S,I> Serialize for NonExhaustive<E,S,I>
where
I: InterfaceBound<Serialize = Implemented<trait_marker::Serialize>>,
I: SerializeEnum<NonExhaustive<E,S,I>>,
I::Proxy:Serialize,
{
fn serialize<Z>(&self, serializer: Z) -> Result<Z::Ok, Z::Error>
where
Z: Serializer,
{
unsafe{
self.vtable().serialize()(self.sabi_erased_ref())
.into_result()
.map_err(ser::Error::custom)?
.serialize(serializer)
}
}
}
/// First it Deserializes a string,then it deserializes into a
/// `NonExhaustive<_>`,by using `<I as DeserializeEnum>::deserialize_enum`.
impl<'de,E,S,I> Deserialize<'de> for NonExhaustive<E,S,I>
where
E: 'de+GetVTable<S,I>,
S: 'de,
I: 'de+InterfaceBound<Deserialize=Implemented<trait_marker::Deserialize>>,
I: DeserializeEnum<'de,NonExhaustive<E,S,I>>,
<I as DeserializeEnum<'de,NonExhaustive<E,S,I>>>::Proxy:Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = <
<I as DeserializeEnum<'de,NonExhaustive<E,S,I>>>::Proxy as
Deserialize
>::deserialize(deserializer)?;
I::deserialize_enum(s).map_err(de::Error::custom)
}
}
/////////////////////
impl<E,S,I> Hash for NonExhaustive<E,S,I>
where
I: InterfaceBound<Hash = Implemented<trait_marker::Hash>>,
{
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
unsafe{
self.vtable().hash()(self.sabi_erased_ref(), HasherObject::new(state))
}
}
}
impl<E,S,I> std::error::Error for NonExhaustive<E,S,I>
where
I: InterfaceBound<
Debug = Implemented<trait_marker::Debug>,
Display = Implemented<trait_marker::Display>,
Error = Implemented<trait_marker::Error>
>,
{}
/////////////////////
impl<E,S,I> Drop for NonExhaustive<E,S,I>{
fn drop(&mut self){
let drop=self.vtable()._sabi_drop();
unsafe{
drop(self.sabi_erased_mut());
}
}
}
///////////////////////////////////////////////////////////////////////////////
/// Used to abstract over the reference-ness of `NonExhaustive<>` inside UnwrapEnumError.
pub trait NonExhaustiveSharedOps{
/// The type of the discriminant of the wrapped enum.
type Discriminant:ValidDiscriminant;
/// Gets the discriminant of the wrapped enum.
fn get_discriminant_(&self)->Self::Discriminant;
/// Gets miscelaneous information about the wrapped enum
fn enum_info_(&self)->&'static EnumInfo;
}
/// A struct storing the discriminant and `EnumInfo` of some enum.
pub struct DiscrAndEnumInfo<E>{
discr:E,
enum_info:&'static EnumInfo,
}
impl<E> DiscrAndEnumInfo<E>{
/// Constructs this `DiscrAndEnumInfo`.
pub fn new(discr:E,enum_info:&'static EnumInfo)->Self{
Self{discr,enum_info}
}
/// The value of the enum discriminant,
pub fn discr(&self)->E
where
E:ValidDiscriminant
{
self.discr
}
/// The `EnumInfo` of an enum.
pub fn enum_info(&self)->&'static EnumInfo{
self.enum_info
}
}
impl<E> NonExhaustiveSharedOps for DiscrAndEnumInfo<E>
where
E:ValidDiscriminant
{
type Discriminant=E;
fn get_discriminant_(&self)->E{
self.discr
}
fn enum_info_(&self)->&'static EnumInfo{
self.enum_info
}
}
macro_rules! impl_neso {
(
impl[$E:ident,$S:ident,$I:ident]
) => (
type Discriminant=$E::Discriminant;
fn get_discriminant_(&self)->$E::Discriminant {
self.get_discriminant()
}
fn enum_info_(&self)->&'static EnumInfo{
self.vtable().enum_info()
}
)
}
impl<E,S,I> NonExhaustiveSharedOps for NonExhaustive<E,S,I>
where
E:GetEnumInfo,
{
impl_neso!{ impl[E,S,I] }
}
impl<'a,E,S,I> NonExhaustiveSharedOps for &'a NonExhaustive<E,S,I>
where
E:GetEnumInfo,
{
impl_neso!{ impl[E,S,I] }
}
impl<'a,E,S,I> NonExhaustiveSharedOps for &'a mut NonExhaustive<E,S,I>
where
E:GetEnumInfo,
{
impl_neso!{ impl[E,S,I] }
}
///////////////////////////////////////////////////////////////////////////////
/**
An error for a situation where a `NonExhaustive<>` could not be unwrapped into the enum
because the discriminant wasn't valid in this context
(likely because it is from a newer version of the library).
*/
#[must_use]
#[repr(transparent)]
#[derive(Clone,PartialEq,Eq,PartialOrd,Ord,StableAbi)]
pub struct UnwrapEnumError<N>{
/// This field is either a `NonExhaustive<>` or a `DiscrAndEnumInfo<>`
pub non_exhaustive:N,
_priv:(),
}
impl<N> UnwrapEnumError<N>{
/// Gets the `non_exhaustive` field.
#[must_use]
pub fn into_inner(self)->N{
self.non_exhaustive
}
/// Converts this into a boxed error.
pub fn into_boxed(self)->RBoxError
where
N:NonExhaustiveSharedOps,
{
let x=DiscrAndEnumInfo{
discr:self.non_exhaustive.get_discriminant_(),
enum_info:self.non_exhaustive.enum_info_(),
};
let x=UnwrapEnumError::new(x);
RBoxError::new(x)
}
}
impl<N> UnwrapEnumError<N>{
#[inline]
const fn new(non_exhaustive:N)->Self{
Self{
non_exhaustive,
_priv:(),
}
}
}
impl<N> Display for UnwrapEnumError<N>
where
N:NonExhaustiveSharedOps,
{
fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{
write!(
f,
"Could not unwrap NonExhaustive into '{}'.\n\
Because its discriminant was {:?} .",
self.non_exhaustive.enum_info_().type_name(),
self.non_exhaustive.get_discriminant_(),
)
}
}
impl<N> Debug for UnwrapEnumError<N>
where
N:NonExhaustiveSharedOps,
{
fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{
f.debug_struct("UnwrapEnumError")
.field("non_exhaustive",&"<opaque>")
.field("discriminant",&self.non_exhaustive.get_discriminant_())
.field("enum_info",&self.non_exhaustive.enum_info_())
.finish()
}
}
impl<N> From<UnwrapEnumError<N>> for RBoxError
where
N:NonExhaustiveSharedOps
{
fn from(uee:UnwrapEnumError<N>)->RBoxError{
uee.into_boxed()
}
}
impl<N> std::error::Error for UnwrapEnumError<N>
where
N:NonExhaustiveSharedOps,
{}
| true |
13c22355f06f0a3e7b97bafbd8a1747303a81c72
|
Rust
|
Kintaro/wtftw
|
/core/src/layout.rs
|
UTF-8
| 4,742 | 2.96875 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
use crate::config::GeneralConfig;
use crate::core::stack::Stack;
use std::borrow::ToOwned;
use crate::window_manager::ScreenDetail;
use crate::window_system::Rectangle;
use crate::window_system::Window;
use crate::window_system::WindowSystem;
#[derive(Clone, Copy)]
pub enum LayoutMessage {
Increase,
Decrease,
IncreaseMaster,
DecreaseMaster,
IncreaseSlave,
DecreaseSlave,
IncreaseGap,
DecreaseGap,
Next,
Prev,
HorizontalSplit,
VerticalSplit,
Hide,
TreeRotate,
TreeSwap,
TreeExpandTowards(Direction),
TreeShrinkFrom(Direction),
}
pub fn mirror_rect(&Rectangle(x, y, w, h): &Rectangle) -> Rectangle {
Rectangle(y, x, h, w)
}
pub fn tile(ratio: f32, screen: ScreenDetail, num_master: u32, num_windows: u32) -> Vec<Rectangle> {
if num_windows <= num_master || num_master == 0 {
split_vertically(num_windows, screen)
} else {
let (r1, r2) = split_horizontally_by(ratio, screen);
let v1 = split_vertically(num_master, r1);
let v2 = split_vertically(num_windows - num_master, r2);
v1.iter().chain(v2.iter()).copied().collect()
}
}
pub fn split_vertically(num: u32, screen: ScreenDetail) -> Vec<Rectangle> {
if num < 2 {
return vec![screen];
}
let Rectangle(sx, sy, sw, sh) = screen;
let smallh = sh / num;
(vec![Rectangle(sx, sy, sw, smallh)])
.iter()
.chain(split_vertically(num - 1, Rectangle(sx, sy + smallh as i32, sw, sh - smallh)).iter())
.copied()
.collect()
}
pub fn split_horizontally_by(ratio: f32, screen: ScreenDetail) -> (Rectangle, Rectangle) {
let Rectangle(sx, sy, sw, sh) = screen;
let leftw = (sw as f32 * ratio).floor() as u32;
(
Rectangle(sx, sy, leftw, sh),
Rectangle(sx + leftw as i32, sy, sw - leftw, sh),
)
}
pub trait Layout {
fn apply_layout(
&mut self,
window_system: &dyn WindowSystem,
screen: Rectangle,
config: &GeneralConfig,
stack: &Option<Stack<Window>>,
) -> Vec<(Window, Rectangle)>;
fn apply_message(
&mut self,
_: LayoutMessage,
_: &dyn WindowSystem,
_: &Option<Stack<Window>>,
_: &GeneralConfig,
) -> bool {
true
}
fn description(&self) -> String;
fn copy(&self) -> Box<dyn Layout> {
panic!("")
}
fn unhook(&self, _: &dyn WindowSystem, _: &Option<Stack<Window>>, _: &GeneralConfig) {}
}
#[derive(Clone, Copy)]
pub struct TallLayout {
pub num_master: u32,
pub increment_ratio: f32,
pub ratio: f32,
}
impl TallLayout {
pub fn boxed_new() -> Box<dyn Layout> {
Box::new(TallLayout {
num_master: 1,
increment_ratio: 0.03,
ratio: 0.5,
})
}
}
impl Layout for TallLayout {
fn apply_layout(
&mut self,
_: &dyn WindowSystem,
screen: Rectangle,
_: &GeneralConfig,
stack: &Option<Stack<Window>>,
) -> Vec<(Window, Rectangle)> {
match *stack {
Some(ref s) => {
let ws = s.integrate();
s.integrate()
.iter()
.zip(tile(self.ratio, screen, self.num_master, ws.len() as u32).iter())
.map(|(&x, &y)| (x, y))
.collect()
}
_ => Vec::new(),
}
}
fn apply_message(
&mut self,
message: LayoutMessage,
_: &dyn WindowSystem,
_: &Option<Stack<Window>>,
_: &GeneralConfig,
) -> bool {
match message {
LayoutMessage::Increase => {
self.ratio += 0.05;
true
}
LayoutMessage::Decrease => {
self.ratio -= 0.05;
true
}
LayoutMessage::IncreaseMaster => {
self.num_master += 1;
true
}
LayoutMessage::DecreaseMaster => {
if self.num_master > 1 {
self.num_master -= 1
}
true
}
_ => false,
}
}
fn description(&self) -> String {
"Tall".to_owned()
}
fn copy(&self) -> Box<dyn Layout> {
Box::new(*self)
}
}
#[repr(usize)]
#[derive(Clone, Copy, Ord, Eq, PartialOrd, PartialEq)]
pub enum Direction {
Up,
Down,
Left,
Right,
}
impl Direction {
pub fn opposite(&self) -> Direction {
match *self {
Direction::Up => Direction::Down,
Direction::Down => Direction::Up,
Direction::Left => Direction::Right,
Direction::Right => Direction::Left,
}
}
}
| true |
84290625eba6b796275efc062c75568328fbeed2
|
Rust
|
super-rust-boy/super-rust-boy
|
/src/video/renderer_nothreads.rs
|
UTF-8
| 1,241 | 3.25 | 3 |
[
"MIT"
] |
permissive
|
// Pixel renderer. Makes a texture of format R8G8B8A8Unorm
use super::vram::VRAM;
use super::regs::VideoRegs;
use std::sync::{
Arc,
Mutex
};
pub type RenderTarget = Arc<Mutex<[u8]>>;
// Messages to send to the render thread.
enum RendererMessage {
StartFrame(RenderTarget), // Begin frame, and target the provided byte array.
DrawLineGB(VideoRegs),
DrawLineCGB(VideoRegs)
}
// Renderer for video that spawns a thread to render on.
pub struct Renderer {
mem: Arc<Mutex<VRAM>>,
target: Option<RenderTarget>
}
impl Renderer {
pub fn new(mem: Arc<Mutex<VRAM>>) -> Self {
Renderer {
mem: mem,
target: None,
}
}
pub fn start_frame(&mut self, target: RenderTarget) {
self.target = Some(target);
}
pub fn draw_line_gb(&mut self, regs: VideoRegs) {
let mut mem = self.mem.lock().unwrap();
let mut t = self.target.as_ref().unwrap().lock().unwrap();
mem.draw_line_gb(&mut t, ®s);
}
pub fn draw_line_cgb(&mut self, regs: VideoRegs) {
let mut mem = self.mem.lock().unwrap();
let mut t = self.target.as_ref().unwrap().lock().unwrap();
mem.draw_line_cgb(&mut t, ®s);
}
}
| true |
2db97a432379e8cbe31a5c3369bb2fb14ae3054a
|
Rust
|
saritseal/naiad
|
/src/main.rs
|
UTF-8
| 1,121 | 3.265625 | 3 |
[] |
no_license
|
#[allow(unused_imports)]
use std::mem;
fn analyze_slice(slice: &[i32]) {
println!("first element of the slice: {}", slice[0]);
println!("the slice has {} elements", slice.len());
}
extern crate graphx;
use graphx::graph_x;
fn main() {
let str = "SSSSSSS-dddd dfdfd";
println!("Hello, world! {}", str);
let xs = [1, 2, 3, 4];
{xs[0] + 1};
analyze_slice(&xs);
graph_x::create_graph();
let guess: u32 = "42".parse().expect("Not a number!");
println!("value is {}", guess);
let a = {
let y = [2, 3, 4];
y
};
println!("value is {:?}", a);
let cnt = 18;
match cnt{
n @ 1 ... 12 => println!("between 1 and 12 {}", n),
n @ 13 ... 23 => println!("between 13 and 23 {}", n),
_ => println!("other")
}
let some_val = Some(3);
match some_val{
Some(x) => println!("{}", x),
_ => println!("not found")
}
}
#[allow(dead_code)]
fn recursion(x: i128) -> i128{
println!("input value {}", x);
if x == 0{
return x;
} else {
recursion(x -1)
}
}
| true |
e7599dde2534b1864b971124093533414f9a14e3
|
Rust
|
BrunoWallner/audiolizer
|
/src/ui/bars.rs
|
UTF-8
| 1,672 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use iced::{
canvas::{self, Cache, Canvas, Cursor, Geometry},
Color, Element, Length,
Point, Rectangle, Vector, Size,
};
use crate::Message;
pub struct Bars {
pub data: Vec<f32>,
pub cache: Cache,
pub mirroring: bool,
pub width: f32,
pub r: u8,
pub g: u8,
pub b: u8,
}
impl Bars {
pub fn view<'a>(
&'a mut self,
) -> Element<'a, Message> {
Canvas::new(self)
.width(Length::Fill)
.height(Length::Fill)
.into()
}
}
impl Default for Bars {
fn default() -> Self {
Bars {
data: Vec::new(),
cache: Cache::new(),
mirroring: true,
width: 10.0,
r: 255,
g: 0,
b: 0,
}
}
}
impl canvas::Program<Message> for Bars {
fn draw(&self, bounds: Rectangle, _cursor: Cursor) -> Vec<Geometry> {
let clock = self.cache.draw(bounds.size(), |frame| {
let center = frame.center();
for i in 0..self.data.len() {
let x: f32 = (frame.width() / self.data.len() as f32) * i as f32;
let mut y: f32 = frame.height() - self.data[i] * 100.0_f32;
if y < 0.0 {
y = 1.0;
}
let size_x: f32 = frame.size().width / self.data.len() as f32 * self.width * 0.1;
let size_y: f32 = self.data[i] * 100.0_f32;
frame.fill_rectangle(Point::new(x, y), Size::new(size_x, size_y), Color::from_rgb8(self.r, self.g, self.b));
}
frame.translate(Vector::new(center.x, center.y));
});
vec![clock]
}
}
| true |
9e964aa63c3c58d507fa369fb58e9f5628ad8222
|
Rust
|
suclogger/leetcode-rust
|
/week/week251/maximum-compatibility-score-sum/src/main.rs
|
UTF-8
| 1,178 | 2.703125 | 3 |
[] |
no_license
|
fn main() {
println!("Hello, world!");
}
/**
暴力求解
**/
pub fn max_compatibility_sum(students: Vec<Vec<i32>>, mentors: Vec<Vec<i32>>) -> i32 {
fn backtrack(m_selected: &mut Vec<bool>, s_idx: usize,
students: &Vec<Vec<i32>>, mentors: &Vec<Vec<i32>>,
cur_score:i32, score: &mut Vec<i32>, m: usize, n: usize) {
if s_idx >= m {
score.push(cur_score);
return;
}
for j in 0..mentors.len() {
if !m_selected[j] {
m_selected[j] = true;
let mut cur_s = 0;
for k in 0..n {
if students[s_idx][k] == mentors[j][k] {
cur_s = cur_s + 1;
}
}
backtrack(m_selected, s_idx + 1, students, mentors, cur_score + cur_s, score, m, n);
m_selected[j] = false;
}
}
}
let n = students[0].len();
let mut scores: Vec<i32> = Vec::new();
let mut m_s = vec![false; mentors.len()];
backtrack(&mut m_s, 0, &students, &mentors, 0, &mut scores, students.len(), n);
*scores.iter().max().unwrap()
}
| true |
24408dc27c6790729340ab8b7658651d539c8522
|
Rust
|
kirisaki/twinkle
|
/src/receiver.rs
|
UTF-8
| 4,569 | 2.78125 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
use std::net::SocketAddr;
use log::{info};
use tokio::net::udp::RecvHalf;
use tokio::sync::mpsc::Sender;
use crate::types::*;
use crate::store::*;
use crate::errors::*;
#[derive(Debug, PartialEq)]
enum Request {
Ping,
Get(Bytes),
Set(Bytes, Bytes),
Unset(Bytes),
}
pub struct Server {
pub sock: RecvHalf,
pub chan: Sender<Packet>,
pub buf: Bytes,
}
impl Server {
pub async fn run(self) -> Result<(), std::io::Error> {
info!("receiver launch");
let Server {mut sock, mut chan, mut buf} = self;
loop {
let (amt, dest) = sock.recv_from(&mut buf).await?;
let body = buf.to_vec();
let _ = chan.try_send(Packet{dest, body, amt});
}
}
}
impl Packet {
pub fn parse(self) -> Result<Instruction, TwinkleError> {
let Packet {dest, body, amt} = self;
let e = Err(TwinkleError::FailedParsing);
if amt < UUID_LEN + 1 {
e
} else if amt == UUID_LEN + 1 {
let req = match body[0] {
0x01 =>
Request::Ping,
_ => return e
};
let uuid = body[1..UUID_LEN+1].to_vec();
Ok(Instruction{req, uuid, dest})
} else if amt == UUID_LEN + 2 {
e
} else {
let cmd = body[0];
let uuid = body[1..UUID_LEN + 2].to_vec();
let high: usize = From::from(body[UUID_LEN + 1]);
let low: usize = From::from(body[UUID_LEN + 2]);
let keylen = high * 256 + low;
let key = if keylen == 0 {
vec![]
} else {
body[UUID_LEN + 3..UUID_LEN + 3 + keylen].to_vec()
};
let val = if UUID_LEN + 3 + keylen == amt {
vec![]
} else {
body[UUID_LEN + 3 + keylen..amt].to_vec()
};
let req = match cmd {
0x02 =>
Request::Get(key),
0x03 =>
Request::Set(key, val),
0x04 =>
Request::Unset(key),
_ =>
return e,
};
Ok(Instruction{req, uuid, dest})
}
}
}
#[derive(Debug)]
pub struct Instruction {
req: Request,
uuid: UUID,
dest: SocketAddr,
}
impl Instruction {
pub async fn respond(self, s: Store) -> Result<(Bytes, SocketAddr), TwinkleError> {
let mut store = s.lock().await;
let Instruction{req, uuid, dest} = self;
let resp = match req {
Request::Ping => {
let mut r = vec![1];
r.append(&mut uuid.clone());
r
},
Request::Get(k) => {
match store.get(&k) {
Some(v) => {
let mut r = vec![1];
r.append(&mut uuid.clone());
r.append(&mut v.clone());
r
},
None => {
let mut r = vec![2];
r.append(&mut uuid.clone());
r
},
}
},
Request::Set(k, v) => {
store.insert(k.clone(), v.clone());
let mut r = vec![1];
r.append(&mut uuid.clone());
r
},
Request::Unset(k) => {
store.remove(&k);
let mut r = vec![1];
r.append(&mut uuid.clone());
r
},
};
Ok((resp, dest))
}
}
#[cfg(test)]
mod tests {
use crate::receiver::{Packet, Request};
use std::net::SocketAddr;
#[test]
fn test_parse_success() {
let addr = SocketAddr::from(([127, 0, 0, 1], 3000));
let cases = vec![
(b"\x01iiiijjjjkkkkllll".to_vec(), Request::Ping),
(b"\x02iiiijjjjkkkkllll\x00\x01a".to_vec(), Request::Get(b"a".to_vec())),
(b"\x03iiiijjjjkkkkllll\x00\x01abc".to_vec(), Request::Set(b"a".to_vec(),b"bc".to_vec())),
(b"\x04iiiijjjjkkkkllll\x00\x01a".to_vec(), Request::Unset(b"a".to_vec())),
];
for (received, expected) in cases {
let packet = Packet{
dest: addr,
body: received.to_vec(),
amt: received.len()
};
let result = packet.parse().unwrap();
assert_eq!(result.req, expected);
}
}
}
| true |
471f0fa4b7718a60298cc6e2593816c844986f44
|
Rust
|
igxactly/Toshi
|
/src/query/aggregate/sum.rs
|
UTF-8
| 2,300 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use super::super::{Error, Result};
use super::AggregateQuery;
use tantivy::collector::{Collector, TopCollector};
use tantivy::schema::{Field, Value};
use tantivy::{Searcher, SegmentReader};
#[derive(Serialize, Debug)]
pub struct SummaryDoc {
field: Field,
value: u64,
}
pub struct SumCollector<'a> {
field: String,
collector: TopCollector,
searcher: &'a Searcher,
}
impl<'a> SumCollector<'a> {
pub fn new(field: String, searcher: &'a Searcher, collector: TopCollector) -> Self {
Self {
field,
searcher,
collector,
}
}
}
impl<'a> AggregateQuery<SummaryDoc> for SumCollector<'a> {
fn result(&self) -> Result<SummaryDoc> {
let field = self
.searcher
.schema()
.get_field(&self.field)
.ok_or_else(|| Error::QueryError(format!("Field {} does not exist", self.field)))?;
let result: u64 = self
.collector
.docs()
.into_iter()
.map(move |d| {
// At this point docs have already passed through the collector, if we are in map it means we have
// something
let doc = self.searcher.doc(d).unwrap();
doc.get_first(field)
.into_iter()
.map(|v| match v {
Value::I64(i) => (*i) as u64,
Value::U64(u) => *u,
// Should we even have these or only numerics?
Value::Str(s) => (*s).len() as u64,
Value::Bytes(b) => (*b).len() as u64,
_ => panic!("Value is not numeric"),
})
.sum::<u64>()
})
.sum();
Ok(SummaryDoc {
field: Field(0),
value: result,
})
}
}
impl<'a> Collector for SumCollector<'a> {
fn set_segment(&mut self, segment_local_id: u32, segment: &SegmentReader) -> tantivy::Result<()> {
self.collector.set_segment(segment_local_id, segment)
}
fn collect(&mut self, doc: u32, score: f32) {
self.collector.collect(doc, score);
}
fn requires_scoring(&self) -> bool {
self.collector.requires_scoring()
}
}
| true |
d44fc0c67266bd79798aca099b3c91c021411b00
|
Rust
|
Phytolizer/silver
|
/silver-language/src/analysis/silver_value.rs
|
UTF-8
| 951 | 3.4375 | 3 |
[] |
no_license
|
use std::fmt::Display;
use super::silver_type::SilverType;
#[derive(Debug, Clone, PartialEq)]
pub enum SilverValue {
Integer(i128),
Boolean(bool),
}
impl SilverValue {
pub fn as_integer(&self) -> Option<i128> {
match self {
SilverValue::Integer(i) => Some(*i),
_ => None,
}
}
pub fn as_boolean(&self) -> Option<bool> {
match self {
SilverValue::Boolean(b) => Some(*b),
_ => None,
}
}
pub fn ty(&self) -> SilverType {
match self {
SilverValue::Integer(_) => SilverType::Integer,
SilverValue::Boolean(_) => SilverType::Boolean,
}
}
}
impl Display for SilverValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SilverValue::Integer(i) => write!(f, "{}", i),
SilverValue::Boolean(b) => write!(f, "{}", b),
}
}
}
| true |
c1377b13995c0db99e6b4c2ed2768208d747ec12
|
Rust
|
chromium/chromium
|
/third_party/rust/itoa/v1/crate/benches/bench.rs
|
UTF-8
| 1,344 | 2.71875 | 3 |
[
"GPL-1.0-or-later",
"MIT",
"LGPL-2.0-or-later",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
#![feature(test)]
#![allow(non_snake_case)]
#![allow(clippy::cast_lossless)]
extern crate test;
macro_rules! benches {
($($name:ident($value:expr))*) => {
mod bench_itoa_format {
use test::{Bencher, black_box};
$(
#[bench]
fn $name(b: &mut Bencher) {
let mut buffer = itoa::Buffer::new();
b.iter(|| {
let printed = buffer.format(black_box($value));
black_box(printed);
});
}
)*
}
mod bench_std_fmt {
use std::io::Write;
use test::{Bencher, black_box};
$(
#[bench]
fn $name(b: &mut Bencher) {
let mut buf = Vec::with_capacity(40);
b.iter(|| {
buf.clear();
write!(&mut buf, "{}", black_box($value)).unwrap();
black_box(&buf);
});
}
)*
}
}
}
benches! {
bench_u64_0(0u64)
bench_u64_half(u32::max_value() as u64)
bench_u64_max(u64::max_value())
bench_i16_0(0i16)
bench_i16_min(i16::min_value())
bench_u128_0(0u128)
bench_u128_max(u128::max_value())
}
| true |
1bf7f52d264872b2aa4ace964ceab64a293c3aa6
|
Rust
|
ml-titans/titan4-codes
|
/emergent/2_make-cli/src/main.rs
|
UTF-8
| 4,879 | 2.6875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use anyhow::{anyhow, ensure, Result};
use image::io::Reader as ImageReader;
use std::path::{Path, PathBuf};
use structopt::StructOpt;
use tensorflow::{Graph, SavedModelBundle, SessionOptions, SessionRunArgs, Tensor};
const MODEL_DIR: &str = "models";
const TEST_DATA_DIR: &str = "images";
const LABELS: [&str; 10] = [
"Top", "Trouser", "Pullover", "Dress", "Coat", "Sandal", "Shirt", "Sneaker", "Bag", "Boot",
];
type TestData = Vec<(image::DynamicImage, usize)>;
#[derive(StructOpt, Debug)]
#[structopt(name = "fmnist-sample")]
struct Opt {
#[structopt(subcommand)]
pub sub: Subcommands,
}
#[derive(StructOpt, Debug)]
enum Subcommands {
Test,
Classify {
#[structopt(short, long)]
file: PathBuf,
},
}
fn main() -> Result<()> {
let opt = Opt::from_args();
match opt.sub {
Subcommands::Test => test_accuracy()?,
Subcommands::Classify { file } => classify(file)?,
}
Ok(())
}
fn test_accuracy() -> Result<()> {
let data = load_test_images()?;
let data_total = data.len();
let classifier = FashionMnistClassifier::load()?;
let mut test_result = vec![];
for (i, (img, label_idx)) in data.iter().enumerate() {
let input_value = img.to_luma8().to_vec();
let res = classifier.classify(&input_value)?;
if let Some(idx) = get_max_index(&res) {
print!("progress: [ {:5} / {:5} ] \r", i + 1, data_total);
test_result.push(*label_idx == idx);
}
}
println!();
let total = test_result.len();
let success = test_result.iter().filter(|&&x| x).count();
let accuracy = success as f32 / total as f32;
println!(
"total: {}, success: {}, failure: {}, accuracy: {} %",
total,
success,
total - success,
accuracy * 100.0
);
Ok(())
}
fn classify<P: AsRef<Path>>(file: P) -> Result<()> {
let classifier = FashionMnistClassifier::load()?;
let img = load_image(file)?;
let input_value = img.to_luma8().to_vec();
let res = classifier.classify(&input_value)?;
println!("{:?}", res);
if let Some(idx) = get_max_index(&res) {
println!("classified: {}", LABELS[idx]);
}
Ok(())
}
fn load_image<P: AsRef<Path>>(filename: P) -> Result<image::DynamicImage> {
let img = ImageReader::open(filename)?.decode()?;
Ok(img)
}
fn load_test_images() -> Result<TestData> {
let mut v = vec![];
for (i, label) in LABELS.iter().enumerate() {
let pattern = format!("{}/{}/*.png", TEST_DATA_DIR, label);
for entry in glob::glob(&pattern)? {
match entry {
Ok(path) => {
let img = load_image(path)?;
v.push((img, i));
}
Err(e) => println!("{:?}", e),
}
}
}
Ok(v)
}
fn get_max_index(v: &[f32]) -> Option<usize> {
let mut max = 0.0f32;
let mut pos = None;
for (i, &val) in v.iter().enumerate() {
if val >= max {
max = val;
pos = Some(i);
}
}
pos
}
struct FashionMnistClassifier {
graph: Graph,
bundle: SavedModelBundle,
}
impl FashionMnistClassifier {
pub fn load() -> Result<Self> {
let path = PathBuf::from(MODEL_DIR);
ensure!(path.exists(), anyhow!("directory not found"));
let mut graph = Graph::new();
let bundle = SavedModelBundle::load(&SessionOptions::new(), &["serve"], &mut graph, &path)?;
Ok(Self { graph, bundle })
}
pub fn classify(&self, img: &[u8]) -> Result<Vec<f32>> {
let values = img.iter().map(|&b| b as f32 / 255.0).collect::<Vec<f32>>();
let img_tensor = Tensor::<f32>::new(&[28, 28]).with_values(&values)?;
let session = &self.bundle.session;
let meta = self.bundle.meta_graph_def();
let signature = meta.get_signature("serving_default")?;
let input_info = signature.get_input("flatten_input")?;
let input_op = self
.graph
.operation_by_name_required(&input_info.name().name)?;
let output_info = signature.get_output("dense_1")?;
let output_op = self
.graph
.operation_by_name_required(&output_info.name().name)?;
let mut run_args = SessionRunArgs::new();
run_args.add_feed(&input_op, input_info.name().index, &img_tensor);
let token = run_args.request_fetch(&output_op, output_info.name().index);
session.run(&mut run_args)?;
let output = run_args.fetch::<f32>(token)?;
let res = output.to_vec();
Ok(res)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_max() {
let x = [0.2, 0.3, 0.45435, 0.1, 0.01];
assert_eq!(get_max_index(&x), Some(2));
let y = [];
assert_eq!(get_max_index(&y), None);
}
}
| true |
e0c0614b368e16568cbe33ce264e3309cd237ffb
|
Rust
|
dylanmckay/avr
|
/src/mem.rs
|
UTF-8
| 1,933 | 3.5625 | 4 |
[
"MIT"
] |
permissive
|
use Error;
use std;
pub type Address = u16;
/// A memory space.
pub struct Space
{
data: Vec<u8>,
}
impl Space
{
pub fn new(size: usize) -> Self {
let data = std::iter::repeat(0).take(size)
.collect();
Space {
data: data,
}
}
pub fn set_u8(&mut self, addr: usize, val: u8) -> Result<(), Error> {
if self.is_access_in_bounds(addr, 1) {
self.data[addr] = val;
Ok(())
} else {
Err(Error::SegmentationFault { address: addr + 1 })
}
}
pub fn set_u16(&mut self, addr: usize, val: u16) -> Result<(), Error> {
if self.is_access_in_bounds(addr, 2) {
self.data[addr] = ((val & 0xff00) >> 8) as u8;
self.data[addr+1] = (val & 0xff) as u8;
Ok(())
} else {
Err(Error::SegmentationFault { address: addr + 2 })
}
}
pub fn get_u8(&self, addr: usize) -> Result<u8, Error> {
self.data.get(addr).cloned().ok_or(Error::SegmentationFault { address: addr })
}
pub fn get_u16(&self, addr: usize) -> Result<u16, Error> {
let hi = self.get_u8(addr+0)? as u16;
let lo = self.get_u8(addr+1)? as u16;
Ok((hi << 8) | lo)
}
pub fn bytes<'a>(&'a self) -> std::slice::Iter<'a,u8> {
self.data.iter()
}
pub fn bytes_mut<'a>(&'a mut self) -> std::slice::IterMut<'a,u8> {
self.data.iter_mut()
}
pub fn load<I>(&mut self, mut bytes: I)
where I: Iterator<Item=u8> {
for byte in self.data.iter_mut() {
if let Some(b) = bytes.next() {
*byte = b;
} else {
break;
}
}
}
fn is_access_in_bounds(&self, addr: usize, byte_count: usize) -> bool {
let end_byte_offset = addr + byte_count;
end_byte_offset <= self.data.len()
}
}
| true |
8a19bf31b9ebd641c901ad39877b10aaa0c97d90
|
Rust
|
shimmy1996/twixter
|
/src/follow.rs
|
UTF-8
| 591 | 2.78125 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use clap::ArgMatches;
use std::fs::OpenOptions;
use std::io::prelude::*;
use std::path::Path;
use crate::config::Config;
/// Follow new source by writing to the config file.
pub fn follow(_config: &Config, subcommand: &ArgMatches, config_path: &Path) {
let nick = subcommand.value_of("nick").unwrap();
let url = subcommand.value_of("url").unwrap();
// Appends given source to end of config file.
let mut config_file = OpenOptions::new().append(true).open(config_path).unwrap();
config_file
.write_fmt(format_args!("\n{} = {}", nick, url))
.unwrap();
}
| true |
a62b7a094f3d8a1865401276d734dda5213cb093
|
Rust
|
jbelmont/rust_book
|
/state_machine/src/lib.rs
|
UTF-8
| 654 | 3.015625 | 3 |
[
"Apache-2.0"
] |
permissive
|
#![allow(unused_variables)]
fn main() {
pub struct Post {
state: Option<Box<dyn State>>,
content: String,
}
impl Post {
// --snip--
pub fn request_review(&mut self) {
if let Some(s) = self.state.take() {
self.state = Some(s.request_review())
}
}
}
trait State {
fn request_review(self: Box<Self>) -> Box<dyn State>;
}
struct Draft {}
impl State for Draft {
fn request_review(self: Box<Self>) -> Box<dyn State> {
Box::new(PendingReview {})
}
}
struct PendingReview {}
impl State for PendingReview {
fn request_review(self: Box<Self>) -> Box<dyn State> {
self
}
}
}
| true |
de5144bea4f8d8291f30e68c08586fbdbd248724
|
Rust
|
crhino/typed
|
/benches/mod.rs
|
UTF-8
| 4,545 | 2.765625 | 3 |
[] |
no_license
|
#![feature(test)]
extern crate test;
#[macro_use]
extern crate typed;
extern crate void;
use test::Bencher;
pub trait Increment {
fn increment(&mut self, amt: usize) -> usize;
}
typed_stack!(Increment);
// TODO: Figure out an easy way to not have to manually impl trait here.
// This exposes a lot of the inner workings of the crate.
impl Increment for Void {
fn increment(&mut self, _amt: usize) -> usize {
unreachable!()
}
}
impl<S1: Increment, S2> Increment for LinkImpl<S1, S2> {
fn increment(&mut self, amt: usize) -> usize {
self.obj.increment(amt)
} }
macro_rules! impl_trait {
($t:ty) => {
impl Increment for $t {
fn increment(&mut self, amt: usize) -> usize {
self.0 += amt;
self.0
}
}
};
}
#[derive(Debug)]
struct Test1(usize);
#[derive(Debug)]
struct Test2(usize);
#[derive(Debug)]
struct Test3(usize);
#[derive(Debug)]
struct Test4(usize);
#[derive(Debug)]
struct Test5(usize);
impl_trait!(Test1);
impl_trait!(Test2);
impl_trait!(Test3);
impl_trait!(Test4);
impl_trait!(Test5);
#[bench]
fn bench_dynamic_dispath_64_elements(b: &mut Bencher) {
let mut vec = Vec::new();
for i in 0..64 {
match i % 5 {
0 => {
vec.push(Box::new(Test1(0)) as Box<Increment>);
},
1 => {
vec.push(Box::new(Test2(0)) as Box<Increment>);
},
2 => {
vec.push(Box::new(Test3(0)) as Box<Increment>);
},
3 => {
vec.push(Box::new(Test4(0)) as Box<Increment>);
},
_ => {
vec.push(Box::new(Test5(0)) as Box<Increment>);
}
}
}
b.iter(|| {
for i in vec.iter_mut() {
i.increment(10);
}
});
}
#[bench]
fn bench_typed_stack_64_elements(b: &mut Bencher) {
let s = TypedStack::<Void>::new();
// let stack = construct(s, 100);
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let s = s.add(Test5(0));
let s = s.add(Test1(0));
let s = s.add(Test2(0));
let s = s.add(Test3(0));
let s = s.add(Test4(0));
let mut s = s.add(Test5(0));
b.iter(|| { iter(s.front_mut().unwrap()) });
}
// fn construct<T>(stack: TypedStack<T>, count: usize)
// -> TypedStack<LinkImpl<Test5, LinkImpl<Test4, LinkImpl<Test3, LinkImpl<Test2, LinkImpl<Test1, T>>>>>>
// where T: Increment + Link {
// let s = stack.add(Test1(0));
// let s = s.add(Test2(0));
// let s = s.add(Test3(0));
// let s = s.add(Test4(0));
// let s = s.add(Test5(0));
// let count = count - 5;
// if count != 0 {
// construct(s, count)
// } else {
// s
// }
// }
#[inline]
fn iter<T>(inc: &mut T)
where T: Link + Increment {
inc.increment(10);
if let Some(t) = inc.next_mut() {
iter(t);
}
}
| true |
dbbf7e400110072eccb8938484594ea0f51f9e63
|
Rust
|
jxs/keg
|
/keg-functions/src/lib.rs
|
UTF-8
| 3,087 | 2.6875 | 3 |
[] |
no_license
|
mod error;
mod traits;
use regex::Regex;
use std::cmp::Ordering;
use std::collections::hash_map::DefaultHasher;
use std::fmt;
use std::hash::{Hash, Hasher};
use chrono::{DateTime, Local};
pub use error::{Error, WrapMigrationError};
pub use traits::{Transaction, DefaultQueries, CommitTransaction, ExecuteMultiple, Query, Migrate, MigrateGrouped};
#[cfg(feature = "rusqlite")]
pub mod rusqlite;
#[cfg(feature = "postgres")]
pub mod postgres;
#[cfg(feature = "mysql")]
pub mod mysql;
pub fn file_match_re() -> Regex {
Regex::new(r"([V])([\d|\.]+)__(\w+)").unwrap()
}
lazy_static::lazy_static! {
static ref RE: regex::Regex = file_match_re();
}
#[derive(Clone, Debug)]
enum MigrationPrefix {
Versioned,
}
#[derive(Clone, Debug)]
pub struct Migration {
name: String,
version: usize,
prefix: MigrationPrefix,
sql: String,
}
impl Migration {
pub fn from_filename(name: &str, sql: &str) -> Result<Migration, Error> {
let captures = RE
.captures(name)
.filter(|caps| caps.len() == 4)
.ok_or(Error::InvalidName)?;
let version = captures[2]
.parse()
.map_err(|_| Error::InvalidVersion)?;
let name = (&captures[3]).into();
let prefix = match &captures[1] {
"V" => MigrationPrefix::Versioned,
_ => unreachable!(),
};
Ok(Migration {
name,
version,
sql: sql.into(),
prefix
})
}
pub fn checksum(&self) -> u64 {
let mut hasher = DefaultHasher::new();
self.name.hash(&mut hasher);
self.version.hash(&mut hasher);
self.sql.hash(&mut hasher);
hasher.finish()
}
}
impl fmt::Display for Migration {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "V{}__{}", self.version, self.name)
}
}
impl Eq for Migration {}
impl Ord for Migration {
fn cmp(&self, other: &Migration) -> Ordering {
self.version.cmp(&other.version)
}
}
impl PartialEq for Migration {
fn eq(&self, other: &Migration) -> bool {
self.version == other.version
}
}
impl PartialOrd for Migration {
fn partial_cmp(&self, other: &Migration) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Debug)]
pub struct AppliedMigration {
name: String,
version: usize,
installed_on: DateTime<Local>,
checksum: String,
}
pub struct Runner {
grouped: bool,
migrations: Vec<Migration>
}
impl Runner {
pub fn new(migrations: &[Migration]) -> Runner {
Runner {
grouped: false,
migrations: migrations.to_vec()
}
}
pub fn set_grouped(&mut self, grouped: bool) {
self.grouped = grouped;
}
pub fn run<'a, C>(&self, conn: &'a mut C) -> Result<(), Error> where C: MigrateGrouped<'a> + Migrate {
if self.grouped {
MigrateGrouped::migrate(conn, &self.migrations)?;
} else {
Migrate::migrate(conn, &self.migrations)?;
}
Ok(())
}
}
| true |
590e92678145d9ad9f1be10cb4279f91a6da44a8
|
Rust
|
rustlang-top/crate-galaxy-graph
|
/src/main.rs
|
UTF-8
| 2,990 | 2.6875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use serde::Deserialize;
use std::collections::HashMap;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::{self, File};
use std::process::{Command, Stdio};
use std::error::Error;
#[derive(Deserialize)]
#[allow(dead_code)]
struct CrateInfo {
name: String,
vers: String,
deps: Vec<DepInfo>,
cksum: String,
features: HashMap<String, Vec<String>>,
yanked: bool,
}
#[derive(Deserialize)]
#[allow(dead_code)]
struct DepInfo {
name: String,
req: String,
features: Vec<String>,
optional: bool,
default_features: bool,
target: Option<String>,
kind: Option<String>
}
fn fetch_index() -> Result<(), Box<dyn Error>> {
match fs::metadata("crates.io-index") {
Err(_) => {
eprintln!("Cloning crates.io-index...");
// Ideally, we'd direct stdout to stderr but there is not a convenient way to do
// this. See https://www.reddit.com/r/rust/comments/adaj2f/how_to_pipe_child_process_stdout_to_stderr/
// for alternatives. Ignore stdout instead.
Command::new("git")
.arg("clone")
.arg("--depth").arg("1")
.arg("https://github.com/rust-lang/crates.io-index")
.stdout(Stdio::null())
.spawn()?.wait()?;
}
Ok(_) => {
eprintln!("Pulling crates.io-index...");
Command::new("git")
.arg("pull")
.stdout(Stdio::null())
.spawn()?.wait()?;
}
}
Ok(())
}
fn main() -> Result<(), Box<dyn Error>> {
fetch_index()?;
let mut opts = glob::MatchOptions::new();
opts.require_literal_leading_dot = true;
let mut edges = vec![];
let index_paths1 = glob::glob_with("crates.io-index/*/*/*", &opts).unwrap();
let index_paths2 = glob::glob_with("crates.io-index/[12]/*", &opts).unwrap();
for path in index_paths1.chain(index_paths2) {
let path = path.unwrap();
let file = File::open(&path)?;
let last_line = BufReader::new(file).lines().last().unwrap()?;
let crate_info: CrateInfo = serde_json::from_str(&last_line)?;
let mut deps = crate_info.deps.iter()
// remove dev dependencies
.filter(|d| d.kind.as_ref().map_or(true, |s| &**s != "dev"))
// we only need the name
.map(|d| &d.name)
.collect::<Vec<_>>();
// remove any duplicates
deps.sort();
deps.dedup();
// register all the dependencies
for &dep_name in deps.iter() {
edges.push((crate_info.name.clone(), dep_name.clone()));
}
}
// it would be nice to use the `graphviz` crate, but that doesn't
// seem to allow attaching arbitrary attributes at the moment.
println!("digraph cratesio {{");
for &(ref source, ref target) in edges.iter() {
println!(" \"{}\" -> \"{}\"", source, target)
}
println!("}}");
Ok(())
}
| true |
1f3648fa3fe9838a01c6f33f74f4253c3aed8ece
|
Rust
|
TWinsnes/rusty-euler
|
/problem002/problem2.rs
|
UTF-8
| 760 | 3.578125 | 4 |
[
"BSD-3-Clause"
] |
permissive
|
use std;
//////////////////////////////////////////////////////////
//
// Problem:
// By considering the terms in the Fibonacci sequence
// whose values do not exceed four million, find the sum
// of the even-valued terms
//
//////////////////////////////////////////////////////////
fn main()
{
std::io::println(#fmt("Solution: %d",solution(4000000)));
}
// my first solution
fn solution(x: int) -> int
{
let sum = 0;
let previous : int = 1;
let current : int = 2;
while current < x
{
if current % 2 == 0
{
sum += current;
}
let newValue : int = previous + current;
previous = current;
current = newValue;
}
ret sum;
}
// test for solution
#[test]
fn testSolution()
{
let result: int = solution(100);
assert result == 44;
}
| true |
cc4ee366e7ebf437cfad78fb66f28b1b470bfbec
|
Rust
|
dmarcuse/dynamic_ocl
|
/src/safe/buffer/flags.rs
|
UTF-8
| 3,318 | 2.671875 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! Buffer access and use flags, to allow validity checks to be performed at
//! compile time by the type system.
use crate::raw::{
cl_mem_flags, CL_MEM_ALLOC_HOST_PTR, CL_MEM_HOST_NO_ACCESS, CL_MEM_HOST_READ_ONLY,
CL_MEM_HOST_WRITE_ONLY, CL_MEM_READ_ONLY, CL_MEM_READ_WRITE, CL_MEM_WRITE_ONLY,
};
mod sealed {
use crate::raw::cl_mem_flags;
pub trait FlagInternal {
const FLAGS: cl_mem_flags;
}
}
/// A trait denoting a buffer host accessibility type.
///
/// Types implementing this trait indicate whether an OpenCL memory object can
/// be read/written by the host.
pub trait HostAccess: sealed::FlagInternal {}
/// A trait denoting a buffer that may be read by the host.
pub trait HostReadable: HostAccess {}
/// A trait denoting a buffer that may be written by the host.
pub trait HostWritable: HostAccess {}
/// The host may not read or write the buffer once it's been created.
pub struct HostNoAccess;
/// The host may only read the buffer once it's been created.
pub struct HostReadOnly;
/// The host may only write the buffer once it's been created.
pub struct HostWriteOnly;
/// The host may read and write the buffer.
pub struct HostReadWrite;
impl sealed::FlagInternal for HostNoAccess {
const FLAGS: cl_mem_flags = CL_MEM_HOST_NO_ACCESS;
}
impl sealed::FlagInternal for HostReadOnly {
const FLAGS: cl_mem_flags = CL_MEM_HOST_READ_ONLY;
}
impl sealed::FlagInternal for HostWriteOnly {
const FLAGS: cl_mem_flags = CL_MEM_HOST_WRITE_ONLY;
}
impl sealed::FlagInternal for HostReadWrite {
const FLAGS: cl_mem_flags = 0;
}
impl HostAccess for HostNoAccess {}
impl HostAccess for HostReadOnly {}
impl HostReadable for HostReadOnly {}
impl HostAccess for HostWriteOnly {}
impl HostWritable for HostWriteOnly {}
impl HostAccess for HostReadWrite {}
impl HostReadable for HostReadWrite {}
impl HostWritable for HostReadWrite {}
/// A trait denoting a buffer device accessibility type.
///
/// Types implementing this trait indicate whether an OpenCL memory object can
/// be read/written by the OpenCL device.
pub trait DeviceAccess: sealed::FlagInternal {}
/// The device may only read the buffer.
pub struct DeviceReadOnly;
/// The device may only write the buffer.
pub struct DeviceWriteOnly;
/// The device may read and write the buffer.
pub struct DeviceReadWrite;
impl sealed::FlagInternal for DeviceReadOnly {
const FLAGS: cl_mem_flags = CL_MEM_READ_ONLY;
}
impl sealed::FlagInternal for DeviceWriteOnly {
const FLAGS: cl_mem_flags = CL_MEM_WRITE_ONLY;
}
impl sealed::FlagInternal for DeviceReadWrite {
const FLAGS: cl_mem_flags = CL_MEM_READ_WRITE;
}
impl DeviceAccess for DeviceReadOnly {}
impl DeviceAccess for DeviceWriteOnly {}
impl DeviceAccess for DeviceReadWrite {}
/// A trait used to specify extra buffer flags.
pub trait BufferFlags: sealed::FlagInternal {}
/// Don't set any special buffer flags.
pub struct NoFlags;
/// Set the `CL_MEM_ALLOC_HOST_PTR` flag indicating that the buffer should be
/// allocated in host-accessible memory.
pub struct AllocHostPtr;
impl sealed::FlagInternal for NoFlags {
const FLAGS: cl_mem_flags = 0;
}
impl sealed::FlagInternal for AllocHostPtr {
const FLAGS: cl_mem_flags = CL_MEM_ALLOC_HOST_PTR;
}
impl BufferFlags for NoFlags {}
impl BufferFlags for AllocHostPtr {}
| true |
b806708cfdbf332bd921d54c74cab1ec13bb3dc0
|
Rust
|
cgwalters/qubes-rpm-oxide
|
/rpm-crypto/src/lib.rs
|
UTF-8
| 2,304 | 2.609375 | 3 |
[] |
no_license
|
//! FFI bindings to RPM’s cryptographic API
//!
//! `librpmio`, which is part of RPM, exposes some cryptographic routines for
//! use by third party applications. This crate provides Rust bindings to that
//! code.
#![forbid(improper_ctypes)]
use openpgp_parser::{AllowWeakHashes, Error};
extern crate openpgp_parser;
mod digests;
mod signatures;
pub mod transaction;
pub use digests::{rpm_hash_len, DigestCtx};
pub use signatures::Signature as RawSignature;
/// An OpenPGP signature
pub struct Signature {
sig: RawSignature,
ctx: DigestCtx,
}
pub use init::{init, InitToken};
mod init {
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct InitToken(());
pub fn init() -> InitToken {
#[allow(deprecated)] // we need to support old Rust
use std::sync::{Once, ONCE_INIT};
#[allow(deprecated)] // we need to support old Rust
static RPM_CRYPTO_INIT_ONCE: Once = ONCE_INIT;
use std::os::raw::{c_char, c_int};
use std::ptr;
#[link(name = "rpm")]
extern "C" {
fn rpmReadConfigFiles(file: *const c_char, target: *const c_char) -> c_int;
}
// Safety: the C function is called correctly.
RPM_CRYPTO_INIT_ONCE
.call_once(|| assert_eq!(unsafe { rpmReadConfigFiles(ptr::null(), ptr::null()) }, 0));
InitToken(())
}
}
impl Signature {
/// Parse an OpenPGP signature. The signature is validated before being
/// passed to RPM. If the time is not zero, the signature is checked to not
/// be from the future and to not have expired.
pub fn parse(
untrusted_buffer: &[u8],
time: u32,
allow_weak_hashes: AllowWeakHashes,
token: InitToken,
) -> Result<Self, Error> {
let sig = RawSignature::parse(untrusted_buffer, time, allow_weak_hashes, token)?;
let ctx = DigestCtx::init(sig.hash_algorithm(), allow_weak_hashes, token)
.expect("Digest algorithm already validated");
Ok(Self { sig, ctx })
}
/// Update the sigatures’s internal digest context with data from `buf`.
pub fn update(&mut self, buf: &[u8]) {
self.ctx.update(buf)
}
pub fn public_key_algorithm(&self) -> u8 {
self.sig.public_key_algorithm()
}
}
| true |
1186ba7723e223ce088f668bfd12297b9e76b108
|
Rust
|
silverweed/ecsde
|
/inle/inle_input/src/bindings.rs
|
UTF-8
| 8,715 | 2.90625 | 3 |
[] |
no_license
|
use super::joystick::{self, Joystick_Button};
use super::keyboard::Key;
use super::mouse::{self, Mouse_Button};
use inle_common::stringid::String_Id;
use std::collections::HashMap;
use std::path::Path;
use std::vec::Vec;
mod parsing;
use self::modifiers::*;
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub enum Input_Action_Simple {
Key(Key),
Joystick(Joystick_Button),
Mouse(Mouse_Button),
Mouse_Wheel { up: bool },
}
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct Input_Action {
pub action: Input_Action_Simple,
pub modifiers: Input_Action_Modifiers,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Axis_Emulation_Type {
Min,
Max,
}
impl Axis_Emulation_Type {
#[inline]
pub fn assoc_value(self) -> f32 {
match self {
Axis_Emulation_Type::Min => -1.0,
Axis_Emulation_Type::Max => 1.0,
}
}
}
pub struct Axis_Bindings {
pub axes_names: Vec<String_Id>,
pub real: [Vec<String_Id>; joystick::Joystick_Axis::_Count as usize],
pub emulated: HashMap<Input_Action, Vec<(String_Id, Axis_Emulation_Type)>>,
}
/// Struct containing the mappings between input and user-defined actions and axes_mappings.
/// e.g. "Key::Q => action_quit".
pub struct Input_Bindings {
/// { input_action => [action_name] }
pub action_bindings: HashMap<Input_Action, Vec<String_Id>>,
pub axis_bindings: Axis_Bindings,
}
impl Input_Bindings {
pub fn create_from_config(
action_bindings_file: &Path,
axis_bindings_file: &Path,
) -> Result<Input_Bindings, String> {
Ok(Input_Bindings {
action_bindings: parsing::parse_action_bindings_file(action_bindings_file)?,
axis_bindings: parsing::parse_axis_bindings_file(axis_bindings_file)?,
})
}
pub fn get_virtual_axes_from_real_axis(
&self,
real_axis: joystick::Joystick_Axis,
) -> &[String_Id] {
&self.axis_bindings.real[real_axis as usize]
}
/// Makes an inverse search in the bindings, returning all kinds of actions that yield the given `action_name`.
#[cfg(debug_assertions)]
pub fn get_all_actions_triggering(&self, action_name: String_Id) -> Vec<Input_Action> {
self.action_bindings
.iter()
.filter_map(|(action, names)| {
if names.contains(&action_name) {
Some(*action)
} else {
None
}
})
.collect()
}
pub(super) fn get_key_actions(
&self,
code: Key,
modifiers: Input_Action_Modifiers,
) -> Option<&Vec<String_Id>> {
self.action_bindings.get(&Input_Action::new_with_modifiers(
Input_Action_Simple::Key(code),
modifiers,
))
}
pub(super) fn get_joystick_actions(&self, button: Joystick_Button) -> Option<&[String_Id]> {
// @Incomplete: do we want to support modifiers on joysticks?
let input_action = Input_Action::new(Input_Action_Simple::Joystick(button));
self.action_bindings.get(&input_action).map(Vec::as_slice)
}
pub(super) fn get_mouse_actions(
&self,
button: mouse::Mouse_Button,
modifiers: Input_Action_Modifiers,
) -> Option<&Vec<String_Id>> {
self.action_bindings.get(&Input_Action::new_with_modifiers(
Input_Action_Simple::Mouse(button),
modifiers,
))
}
pub(super) fn get_mouse_wheel_actions(
&self,
up: bool,
modifiers: Input_Action_Modifiers,
) -> Option<&Vec<String_Id>> {
self.action_bindings.get(&Input_Action::new_with_modifiers(
Input_Action_Simple::Mouse_Wheel { up },
modifiers,
))
}
pub(super) fn get_key_emulated_axes(
&self,
code: Key,
) -> Option<&Vec<(String_Id, Axis_Emulation_Type)>> {
self.axis_bindings
.emulated
.get(&Input_Action::new(Input_Action_Simple::Key(code)))
}
pub(super) fn get_joystick_emulated_axes(
&self,
button: Joystick_Button,
) -> Option<&Vec<(String_Id, Axis_Emulation_Type)>> {
self.axis_bindings
.emulated
.get(&Input_Action::new(Input_Action_Simple::Joystick(button)))
}
pub(super) fn get_mouse_emulated_axes(
&self,
button: mouse::Mouse_Button,
) -> Option<&Vec<(String_Id, Axis_Emulation_Type)>> {
self.axis_bindings
.emulated
.get(&Input_Action::new(Input_Action_Simple::Mouse(button)))
}
pub(super) fn get_mouse_wheel_emulated_axes(
&self,
up: bool,
) -> Option<&Vec<(String_Id, Axis_Emulation_Type)>> {
self.axis_bindings
.emulated
.get(&Input_Action::new(Input_Action_Simple::Mouse_Wheel { up }))
}
}
pub type Input_Action_Modifiers = u8;
// @WaitForStable: make this const
pub fn input_action_modifier_from_key(key: Key) -> Input_Action_Modifiers {
match key {
Key::LControl => MOD_LCTRL,
Key::RControl => MOD_RCTRL,
Key::LShift => MOD_LSHIFT,
Key::RShift => MOD_RSHIFT,
Key::LAlt => MOD_LALT,
Key::RAlt => MOD_RALT,
Key::LSystem => MOD_LSUPER,
Key::RSystem => MOD_RSUPER,
_ => 0,
}
}
pub mod modifiers {
use super::Input_Action_Modifiers;
#[allow(clippy::identity_op)]
pub const MOD_LCTRL: Input_Action_Modifiers = 1 << 0;
pub const MOD_RCTRL: Input_Action_Modifiers = 1 << 1;
pub const MOD_CTRL: Input_Action_Modifiers = MOD_LCTRL | MOD_RCTRL;
pub const MOD_LSHIFT: Input_Action_Modifiers = 1 << 2;
pub const MOD_RSHIFT: Input_Action_Modifiers = 1 << 3;
pub const MOD_SHIFT: Input_Action_Modifiers = MOD_LSHIFT | MOD_RSHIFT;
pub const MOD_LALT: Input_Action_Modifiers = 1 << 4;
pub const MOD_RALT: Input_Action_Modifiers = 1 << 5;
pub const MOD_ALT: Input_Action_Modifiers = MOD_LALT | MOD_RALT;
pub const MOD_LSUPER: Input_Action_Modifiers = 1 << 6;
pub const MOD_RSUPER: Input_Action_Modifiers = 1 << 7;
pub const MOD_SUPER: Input_Action_Modifiers = MOD_LSUPER | MOD_RSUPER;
}
impl Input_Action {
pub fn new(action: Input_Action_Simple) -> Self {
Self {
action,
modifiers: 0,
}
}
pub fn new_with_modifiers(
action: Input_Action_Simple,
modifiers: Input_Action_Modifiers,
) -> Self {
Self { action, modifiers }
}
#[inline]
pub fn has_modifiers(&self) -> bool {
self.modifiers != 0
}
#[inline]
pub fn has_ctrl(&self) -> bool {
(self.modifiers & MOD_CTRL) != 0
}
#[inline]
pub fn has_lctrl(&self) -> bool {
(self.modifiers & MOD_LCTRL) != 0
}
#[inline]
pub fn has_rctrl(&self) -> bool {
(self.modifiers & MOD_RCTRL) != 0
}
#[inline]
pub fn has_shift(&self) -> bool {
(self.modifiers & MOD_SHIFT) != 0
}
#[inline]
pub fn has_lshift(&self) -> bool {
(self.modifiers & MOD_LSHIFT) != 0
}
#[inline]
pub fn has_rshift(&self) -> bool {
(self.modifiers & MOD_RSHIFT) != 0
}
#[inline]
pub fn has_alt(&self) -> bool {
(self.modifiers & MOD_ALT) != 0
}
#[inline]
pub fn has_lalt(&self) -> bool {
(self.modifiers & MOD_LALT) != 0
}
#[inline]
pub fn has_altgr(&self) -> bool {
(self.modifiers & MOD_RALT) != 0
}
#[inline]
pub fn has_super(&self) -> bool {
(self.modifiers & MOD_SUPER) != 0
}
#[inline]
pub fn has_lsuper(&self) -> bool {
(self.modifiers & MOD_LSUPER) != 0
}
#[inline]
pub fn has_rsuper(&self) -> bool {
(self.modifiers & MOD_RSUPER) != 0
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn modifier_from_key() {
assert_eq!(input_action_modifier_from_key(Key::LAlt), MOD_LALT);
assert_eq!(input_action_modifier_from_key(Key::RAlt), MOD_RALT);
assert_eq!(input_action_modifier_from_key(Key::LControl), MOD_LCTRL);
assert_eq!(input_action_modifier_from_key(Key::RControl), MOD_RCTRL);
assert_eq!(input_action_modifier_from_key(Key::LSystem), MOD_LSUPER);
assert_eq!(input_action_modifier_from_key(Key::RSystem), MOD_RSUPER);
assert_eq!(input_action_modifier_from_key(Key::LShift), MOD_LSHIFT);
assert_eq!(input_action_modifier_from_key(Key::RShift), MOD_RSHIFT);
assert_eq!(input_action_modifier_from_key(Key::Space), 0);
assert_eq!(input_action_modifier_from_key(Key::H), 0);
}
}
| true |
48a4bf083cc9ce335702324f52bb94c61cb220af
|
Rust
|
grogers0/advent_of_code
|
/2020/day13/src/main.rs
|
UTF-8
| 2,316 | 3.421875 | 3 |
[
"MIT"
] |
permissive
|
use std::io::{self, Read};
type Bus = u16;
fn parse(puzzle_input: &str) -> (u64, Vec<(Bus, usize)>) {
let mut lines = puzzle_input.lines();
let earliest_departure = lines.next().unwrap().parse().unwrap();
let buses = lines.next().unwrap().split(',')
.enumerate()
.filter(|&(_, bus_str)| bus_str != "x")
.map(|(i, bus_str)| (bus_str.parse().unwrap(), i))
.collect();
assert!(lines.next().is_none());
(earliest_departure, buses)
}
fn wait_time(t: u64, bus: Bus) -> Bus {
(bus - (t % bus as u64) as Bus) % bus
}
fn part1(earliest_departure: u64, buses: &Vec<(Bus, usize)>) -> u64 {
let mut best_bus = 0;
let mut best_wait = Bus::MAX;
for &(bus, _) in buses {
let wait = wait_time(earliest_departure, bus);
if wait < best_wait {
best_wait = wait;
best_bus = bus;
}
}
best_bus as u64 * best_wait as u64
}
fn part2(buses: &Vec<(Bus, usize)>) -> u64 {
let mut t = 0;
let mut mult = 1;
// NOTE - Luckily the bus IDs given are coprime, I'm pretty sure otherwise this wouldn't find the first occurence
for &(bus, offset) in buses {
while wait_time(t + offset as u64, bus) != 0 {
t += mult;
}
mult *= bus as u64;
}
assert!(buses.iter().all(|&(bus, offset)| wait_time(t + offset as u64, bus) == 0));
t
}
fn main() {
let mut puzzle_input = String::new();
io::stdin().read_to_string(&mut puzzle_input).unwrap();
let (earliest_departure, buses) = parse(&puzzle_input);
println!("{}", part1(earliest_departure, &buses));
println!("{}", part2(&buses));
}
#[cfg(test)]
mod tests {
use super::*;
const EX: &str = "939
7,13,x,x,59,x,31,19";
#[test]
fn test_part1() {
let (earliest_departure, buses) = parse(EX);
assert_eq!(295, part1(earliest_departure, &buses));
}
#[test]
fn test_part2() {
assert_eq!(1068781, part2(&parse(EX).1));
assert_eq!(3417, part2(&parse("0\n17,x,13,19").1));
assert_eq!(754018, part2(&parse("0\n67,7,59,61").1));
assert_eq!(779210, part2(&parse("0\n67,x,7,59,61").1));
assert_eq!(1261476, part2(&parse("0\n67,7,x,59,61").1));
assert_eq!(1202161486, part2(&parse("0\n1789,37,47,1889").1));
}
}
| true |
0d1d29f2cd9fd161f54e8a63d08117a47b9d1cd9
|
Rust
|
josecm/exercism-rust
|
/sublist/src/lib.rs
|
UTF-8
| 575 | 3.421875 | 3 |
[] |
no_license
|
#[derive(Debug, PartialEq)]
pub enum Comparison {
Equal,
Sublist,
Superlist,
Unequal,
}
fn is_sublist<T: PartialEq>(f: &[T], s: &[T]) -> bool {
if s.len() <= f.len() { return false }
s.windows(f.len()).any(|w| w == f)
}
pub fn sublist<T: PartialEq>(first_list: &[T], second_list: &[T]) -> Comparison {
match (first_list, second_list) {
(f, s) if s == f => Comparison::Equal,
(f, s) if is_sublist(f, s) => Comparison::Sublist,
(f, s) if is_sublist(s, f) => Comparison::Superlist,
_ => Comparison::Unequal,
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.