blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
874a427d14d5d832182dfd4f490af284b30fdcd6
|
Rust
|
khirotaka/hello_autocxx
|
/src/main.rs
|
UTF-8
| 387 | 2.640625 | 3 |
[] |
no_license
|
use autocxx::include_cpp;
include_cpp! {
#include "hello.h"
safety!(unsafe)
generate!("greet")
generate!("add")
generate!("func::mul")
}
fn main() {
let name: String = "Taro".to_string();
ffi::greet(&name);
let a: f32 = 10.0;
let b: f32 = 0.4;
let c: f32 = 0.6;
println!("{}", ffi::add(a, b));
println!("{}", ffi::func::mul(b, c));
}
| true |
62bf2d5aca0ee9d27de709b949e34a8dbee51434
|
Rust
|
EinKindAndy/evolutionary_math
|
/evolutionary_geometry/src/bin/app.rs
|
UTF-8
| 909 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
use evolutionary_geometry::euclidean::Tetragon;
use evolutionary_geometry::euclidean::Hexahedron;
use evolutionary_geometry::euclidean::Point2D;
use evolutionary_geometry::euclidean::Point3D;
fn main() {
let p1 = Point2D::new(0.0, 0.0);
let p2 = Point2D::new(2.0, 0.0);
let p3 = Point2D::new(2.0, 2.0);
let p4 = Point2D::new(0.0, 2.0);
let tetragon = Tetragon::new(p1, p2, p3, p4);
tetragon.show();
let p_1 = Point3D::new(2.0, 0.0, 0.0);
let p_2 = Point3D::new(2.0, 2.0, 0.0);
let p_3 = Point3D::new(0.0, 2.0, 0.0);
let p_4 = Point3D::new(0.0, 0.0, 0.0);
let p_5 = Point3D::new(2.0, 0.0, 2.0);
let p_6 = Point3D::new(2.0, 2.0, 2.0);
let p_7 = Point3D::new(0.0, 2.0, 2.0);
let p_8 = Point3D::new(0.0, 0.0, 2.0);
let hexahedron = Hexahedron::new(p_1, p_2, p_3, p_4,
p_5, p_6, p_7, p_8);
hexahedron.show();
}
| true |
7540e0246be21c9139542ec2f816b9c202bf1f8f
|
Rust
|
vavrusa/domain-core
|
/src/base/opt/rfc6975.rs
|
UTF-8
| 3,128 | 2.75 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
//! EDNS Options from RFC 6975.
use core::slice;
use super::super::iana::{OptionCode, SecAlg};
use super::super::message_builder::OptBuilder;
use super::super::octets::{
Compose, OctetsBuilder, OctetsRef, Parse, ParseError, Parser, ShortBuf
};
use super::CodeOptData;
//------------ Dau, Dhu, N3u -------------------------------------------------
macro_rules! option_type {
( $name:ident ) => {
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct $name<Octets> {
octets: Octets,
}
impl<Octets> $name<Octets> {
pub fn from_octets(octets: Octets) -> Self {
$name { octets }
}
pub fn iter(&self) -> SecAlgsIter
where Octets: AsRef<[u8]> {
SecAlgsIter::new(self.octets.as_ref())
}
}
impl $name<()> {
pub fn push<Target: OctetsBuilder>(
builder: &mut OptBuilder<Target>,
algs: &[SecAlg]
) -> Result<(), ShortBuf> {
assert!(algs.len() <= core::u16::MAX as usize);
builder.push_raw_option(OptionCode::$name, |target| {
target.append_all(|target| {
for alg in algs {
alg.to_int().compose(target)?;
}
Ok(())
})
})
}
}
//--- Parse and Compose
impl<Ref: OctetsRef> Parse<Ref> for $name<Ref::Range> {
fn parse(parser: &mut Parser<Ref>) -> Result<Self, ParseError> {
let len = parser.remaining();
parser.parse_octets(len).map(Self::from_octets)
}
fn skip(parser: &mut Parser<Ref>) -> Result<(), ParseError> {
parser.advance_to_end();
Ok(())
}
}
impl<Octets: AsRef<[u8]>> Compose for $name<Octets> {
fn compose<T: OctetsBuilder>(
&self,
target: &mut T
) -> Result<(), ShortBuf> {
target.append_slice(self.octets.as_ref())
}
}
//--- CodeOptData
impl<Octets> CodeOptData for $name<Octets> {
const CODE: OptionCode = OptionCode::$name;
}
//--- IntoIter
impl<'a, Octets: AsRef<[u8]>> IntoIterator for &'a $name<Octets> {
type Item = SecAlg;
type IntoIter = SecAlgsIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
}
}
option_type!(Dau);
option_type!(Dhu);
option_type!(N3u);
//------------ SecAlgsIter ---------------------------------------------------
pub struct SecAlgsIter<'a>(slice::Iter<'a, u8>);
impl<'a> SecAlgsIter<'a> {
fn new(slice: &'a [u8]) -> Self {
SecAlgsIter(slice.iter())
}
}
impl<'a> Iterator for SecAlgsIter<'a> {
type Item = SecAlg;
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|x| SecAlg::from_int(*x))
}
}
| true |
31f586009ac99e9d54ab2d86ddfbd9274e03c369
|
Rust
|
hyyking/executor
|
/src/park/mod.rs
|
UTF-8
| 528 | 2.65625 | 3 |
[] |
no_license
|
mod thread;
pub use thread::Parker;
pub trait Park {
type Handle: Unpark;
fn handle(&self) -> Self::Handle;
fn park(&mut self) -> Result<(), std::io::Error>;
fn park_timeout(&mut self, dur: std::time::Duration) -> Result<(), std::io::Error>;
}
pub trait Unpark: Sync + Send + 'static {
fn unpark(&self);
}
impl<T: Unpark> Unpark for Box<T> {
fn unpark(&self) {
(**self).unpark()
}
}
impl<T: Unpark> Unpark for std::sync::Arc<T> {
fn unpark(&self) {
(**self).unpark()
}
}
| true |
d584d5c084715cfebb109fb42f8b1e5578fd5340
|
Rust
|
Johan-Mi/persimmon
|
/game/src/player.rs
|
UTF-8
| 507 | 2.59375 | 3 |
[
"Unlicense"
] |
permissive
|
use constants::TILE_SIZE;
use gfx::Gfx;
use player::Player;
use sdl2::{pixels::Color, rect::Rect};
const fn player_screen_coords(player: &Player) -> (i32, i32) {
((player.x * TILE_SIZE) as i32, (player.y * TILE_SIZE) as i32)
}
pub fn render_player(player: &Player, gfx: &mut Gfx) {
gfx.canvas.set_draw_color(Color::BLUE);
let (player_x, player_y) = player_screen_coords(player);
let player_rect = Rect::new(player_x, player_y, 16, 16);
gfx.canvas.fill_rect(player_rect).unwrap();
}
| true |
5b5953e673854c2b409cc8edc35977e45399fad0
|
Rust
|
mkanenobu/til
|
/rust/redis_example/src/redis_client.rs
|
UTF-8
| 509 | 2.5625 | 3 |
[
"WTFPL"
] |
permissive
|
extern crate redis;
use redis::Commands;
const REDIS_URL: &str = "redis://127.0.0.1:8376";
pub fn get(key: String) -> Result<String, anyhow::Error> {
let client = redis::Client::open(REDIS_URL)?;
let mut conn = client.get_connection()?;
let r: String = conn.get(key)?;
Ok(r)
}
pub fn set(key: String, val: String) -> Result<String, anyhow::Error> {
let client = redis::Client::open(REDIS_URL)?;
let mut conn = client.get_connection()?;
let r = conn.set(key, val)?;
Ok(r)
}
| true |
1d139bff663c540ff2fe11d3e96181eb8a2a7c5a
|
Rust
|
ajtribick/AdventOfCode2020
|
/day05/src/main.rs
|
UTF-8
| 2,236 | 3.40625 | 3 |
[
"MIT"
] |
permissive
|
use std::{
error::Error,
fmt,
fs::File,
io::{BufRead, BufReader},
path::PathBuf,
};
#[derive(Debug)]
enum Day5Error {
NoData,
NotFound,
}
impl fmt::Display for Day5Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Day5Error::NoData => write!(f, "No data"),
Day5Error::NotFound => write!(f, "Result not found"),
}
}
}
impl Error for Day5Error {}
fn calculate_id(pass: &str) -> i32 {
pass.chars().fold(0, |acc, c| {
(acc << 1)
+ match c {
'B' | 'R' => 1,
_ => 0,
}
})
}
fn part1(lines: impl Iterator<Item = impl AsRef<str>>) -> Result<(), Day5Error> {
let max_value = lines
.map(|l| calculate_id(l.as_ref()))
.max()
.ok_or(Day5Error::NoData)?;
println!("Part 1: maximum ID = {}", max_value);
Ok(())
}
fn part2(lines: impl Iterator<Item = impl AsRef<str>>) -> Result<(), Day5Error> {
let mut ids = lines.map(|l| calculate_id(l.as_ref())).collect::<Vec<_>>();
ids.sort_unstable();
let pair = ids
.windows(2)
.find(|&pair| pair[1] - pair[0] == 2)
.ok_or(Day5Error::NotFound)?;
println!("Part 2, found empty seat at {}", pair[0] + 1);
Ok(())
}
fn run() -> Result<(), Box<dyn Error>> {
let lines = {
let path = ["data", "day05", "input.txt"].iter().collect::<PathBuf>();
let file = File::open(path)?;
BufReader::new(file)
.lines()
.collect::<Result<Vec<_>, _>>()?
};
part1(lines.iter())?;
part2(lines.iter())?;
Ok(())
}
fn main() {
std::process::exit(match run() {
Ok(_) => 0,
Err(e) => {
eprintln!("Error occurred: {}", e);
1
}
});
}
#[cfg(test)]
mod test {
use super::calculate_id;
const EXAMPLE_IDS: [(&str, i32); 4] = [
("FBFBBFFRLR", 357),
("BFFFBBFRRR", 567),
("FFFBBBFRRR", 119),
("BBFFBBFRLL", 820),
];
#[test]
fn parse_test() {
for &(pass, expected_id) in &EXAMPLE_IDS {
let actual_id = calculate_id(pass);
assert_eq!(actual_id, expected_id);
}
}
}
| true |
6b8f84a09c9c94eae7a9b32b33b2fc17f19b1175
|
Rust
|
christopherjmedlin/hackman
|
/src/cpu/reg.rs
|
UTF-8
| 8,144 | 3.21875 | 3 |
[] |
no_license
|
use std::fmt;
pub struct Registers {
pub b: u8,
pub c: u8,
pub d: u8,
pub e: u8,
pub h: u8,
pub l: u8,
pub a: u8,
pub f: u8,
pub ixh: u8,
pub ixl: u8,
pub iyh: u8,
pub iyl: u8,
pub ix_patched: bool,
pub iy_patched: bool,
pub sp: u16,
pub i: u8,
pub r: u8,
pub pc: u16,
}
impl Registers {
pub fn new() -> Self {
Registers {
a: 0,
f: 0,
b: 0,
c: 0,
d: 0,
e: 0,
h: 0,
l: 0,
ixh: 0,
ixl: 0,
iyh: 0,
iyl: 0,
ix_patched: false,
iy_patched: false,
sp: 0,
i: 0,
r: 0,
pc: 0,
}
}
/// Read 8 bit register at index <index> according to the table on
/// this web page: http://www.z80.info/decoding.htm
pub fn read_8bit_r(&mut self, index: u8) -> u8 {
match index {
0 => self.b,
1 => self.c,
2 => self.d,
3 => self.e,
4 => {
if self.ix_patched {
self.ixh
} else if self.iy_patched {
self.iyh
} else {
self.h
}
}
5 => {
if self.ix_patched {
self.ixl
} else if self.iy_patched {
self.iyl
} else {
self.l
}
}
6 => self.f,
7 => self.a,
_ => 0,
}
}
/// Write byte <byte> to 8 bit register at index <index> according to the
/// table on the z80 decoding opcodes doc mentioned above
pub fn write_8bit_r(&mut self, index: u8, byte: u8) {
match index {
0 => self.b = byte,
1 => self.c = byte,
2 => self.d = byte,
3 => self.e = byte,
4 => {
if self.ix_patched {
self.ixh = byte;
} else if self.iy_patched {
self.iyh = byte;
} else {
self.h = byte;
}
}
5 => {
if self.ix_patched {
self.ixl = byte;
} else if self.iy_patched {
self.iyl = byte;
} else {
self.l = byte;
}
}
6 => self.f = byte,
7 => self.a = byte,
_ => {}
}
}
/// Reads a 16 bit registry according to the tables on the z80
/// decoding opcodes documentation
///
/// If sp is true, it will use the "rp" table with the stack pointer
/// as the third index. Otherwise, it will use the rp2 table with the
/// AF register as the third index.
pub fn read_16bit_r(&mut self, index: u8, sp: bool) -> u16 {
match index {
0 => self.bc(),
1 => self.de(),
2 => self.hl(),
3 => {
if sp {
self.sp
} else {
self.af()
}
}
_ => 0,
}
}
/// Same as read_16bit_r but it instead writes 16 bit integer <word>
/// to it
pub fn write_16bit_r(&mut self, index: u8, sp: bool, word: u16) {
match index {
0 => self.write_bc(word),
1 => self.write_de(word),
2 => self.write_hl(word),
3 => {
if sp {
self.sp = word
} else {
self.write_af(word)
}
}
_ => {}
}
}
pub fn bc(&self) -> u16 {
(self.b as u16) << 8 | (self.c as u16)
}
pub fn de(&self) -> u16 {
(self.d as u16) << 8 | (self.e as u16)
}
pub fn hl(&self) -> u16 {
if self.ix_patched {
(self.ixh as u16) << 8 | (self.ixl as u16)
} else if self.iy_patched {
(self.iyh as u16) << 8 | (self.iyl as u16)
} else {
(self.h as u16) << 8 | (self.l as u16)
}
}
pub fn af(&self) -> u16 {
(self.a as u16) << 8 | (self.f as u16)
}
pub fn write_bc(&mut self, word: u16) {
self.c = (word & 0xFF) as u8;
self.b = (word >> 8) as u8;
}
pub fn write_de(&mut self, word: u16) {
self.e = (word & 0xFF) as u8;
self.d = (word >> 8) as u8;
}
pub fn write_hl(&mut self, word: u16) {
if self.ix_patched {
self.ixl = (word & 0xFF) as u8;
self.ixh = (word >> 8) as u8;
} else if self.iy_patched {
self.iyl = (word & 0xFF) as u8;
self.iyh = (word >> 8) as u8;
} else {
self.l = (word & 0xFF) as u8;
self.h = (word >> 8) as u8;
}
}
pub fn write_af(&mut self, word: u16) {
self.f = (word & 0xFF) as u8;
self.a = (word >> 8) as u8;
}
pub fn cc(&self, index: usize) -> bool {
match index {
0 => self.f & (1 << 6) == 0,
1 => self.f & (1 << 6) != 0,
2 => self.f & 1 == 0,
3 => self.f & 1 != 0,
4 => self.f & (1 << 2) == 0,
5 => self.f & (1 << 2) != 0,
6 => self.f & (1 << 7) == 0,
7 => self.f & (1 << 7) != 0,
_ => false,
}
}
/// Set bit <bit> of F register to <set>
pub fn set_flag(&mut self, bit: usize, set: bool) {
if set {
self.f |= 1 << bit;
} else {
self.f &= !(1 << bit);
}
}
pub fn read_flag(&mut self, bit: usize) -> bool {
self.f & 1 << bit != 0
}
pub fn patch_ix(&mut self, patch: bool) {
self.ix_patched = patch;
}
pub fn patch_iy(&mut self, patch: bool) {
self.iy_patched = patch;
}
}
impl fmt::Debug for Registers {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"pc: 0x{:x}\n\
a: 0x{:x}, f: 0x{:x}, b: 0x{:x}, c: 0x{:x}\n\
d: 0x{:x}, e: 0x{:x}, h: 0x{:x}, l: 0x{:x}\n\n\
af: 0x{:x}, bc: 0x{:x}, de: 0x{:x}, hl: 0x{:x}, sp: {:x}\n\
ixh: 0x{:x}, ixl: 0x{:x}, iyh: 0x{:x}, iyl: 0x{:x}\n\
i: 0x{:x}, r: {:x}",
self.pc,
self.a,
self.f,
self.b,
self.c,
self.d,
self.e,
self.h,
self.l,
self.af(),
self.bc(),
self.de(),
self.hl(),
self.sp,
self.ixh,
self.ixl,
self.iyh,
self.iyl,
self.i,
self.r
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_16bit_registers() {
let mut reg = Registers::new();
reg.write_bc(0x1234);
assert_eq!(reg.bc(), 0x1234);
assert_eq!(reg.b, 0x12);
assert_eq!(reg.c, 0x34);
reg.write_de(0x1234);
assert_eq!(reg.de(), 0x1234);
assert_eq!(reg.d, 0x12);
assert_eq!(reg.e, 0x34);
reg.write_hl(0x1234);
assert_eq!(reg.hl(), 0x1234);
assert_eq!(reg.h, 0x12);
assert_eq!(reg.l, 0x34);
reg.write_af(0x1234);
assert_eq!(reg.af(), 0x1234);
assert_eq!(reg.a, 0x12);
assert_eq!(reg.f, 0x34);
}
#[test]
fn test_write_16bit_r() {
let mut reg = Registers::new();
reg.write_16bit_r(3, true, 0x1234);
assert_eq!(reg.sp, 0x1234);
reg.write_16bit_r(3, false, 0x1234);
assert_eq!(reg.af(), 0x1234);
}
#[test]
fn test_cc() {
let mut reg = Registers::new();
reg.set_flag(0, true);
assert_eq!(reg.cc(2), false);
assert_eq!(reg.cc(3), true);
reg.set_flag(0, false);
assert_eq!(reg.cc(2), true);
assert_eq!(reg.cc(3), false);
}
}
| true |
8c20d59b5cf8ef92a308886075591397d3b8ad30
|
Rust
|
sebzim4500/tokio-nats
|
/src/tls.rs
|
UTF-8
| 4,984 | 2.578125 | 3 |
[] |
no_license
|
use std::io::BufRead;
use std::sync::Arc;
use rustls_pemfile::{certs, read_one};
use tokio::net::TcpStream;
use tokio_rustls::client::TlsStream;
use tokio_rustls::rustls::{
Certificate, ClientConfig, OwnedTrustAnchor, PrivateKey, RootCertStore, ServerName,
};
use tokio_rustls::TlsConnector;
use webpki::TrustAnchor;
#[derive(Debug)]
pub enum TLSConnBuildError {
NotConfigured,
UnableParseClientCertificate,
UnableParseClientKey,
UnableParseCaCertificate,
UnableToConnect(String),
}
#[derive(Debug, Clone)]
pub struct TlsConnParams {
pub(crate) client_key: PrivateKey,
pub(crate) client_certs: Vec<Certificate>,
pub(crate) root_cert: RootCertStore,
}
#[derive(Debug)]
pub struct TLSConnBuild {
client_key: Option<PrivateKey>,
client_certs: Vec<Certificate>,
root_cert: RootCertStore,
}
impl Default for TLSConnBuild {
fn default() -> Self {
Self::new()
}
}
fn load_key(mut reader: &mut dyn BufRead) -> Result<PrivateKey, TLSConnBuildError> {
loop {
match read_one(&mut reader).map_err(|_| TLSConnBuildError::UnableParseClientKey)? {
Some(rustls_pemfile::Item::ECKey(key)) => {
log::trace!("ECKey");
return Ok(PrivateKey(key));
}
Some(rustls_pemfile::Item::RSAKey(key)) => {
log::trace!("RSAKey");
return Ok(PrivateKey(key));
}
Some(rustls_pemfile::Item::PKCS8Key(key)) => {
log::trace!("PKCS8Key");
return Ok(PrivateKey(key));
}
None => {
log::debug!("No type found");
break;
}
_ => {}
}
}
Err(TLSConnBuildError::UnableParseClientKey)
}
impl TLSConnBuild {
pub fn new() -> TLSConnBuild {
TLSConnBuild {
client_key: None,
client_certs: Vec::new(),
root_cert: RootCertStore::empty(),
}
}
pub fn client_certs(&mut self, mut reader: &mut dyn BufRead) -> Result<(), TLSConnBuildError> {
self.client_certs = certs(&mut reader)
.map(|mut certs| certs.drain(..).map(Certificate).collect())
.map_err(|_| TLSConnBuildError::UnableParseClientCertificate)?;
Ok(())
}
pub fn client_key(&mut self, reader: &mut dyn BufRead) -> Result<(), TLSConnBuildError> {
self.client_key = Some(load_key(reader)?);
Ok(())
}
pub fn root_cert(&mut self, mut reader: &mut dyn BufRead) -> Result<(), TLSConnBuildError> {
let certs = certs(&mut reader).map_err(|_| TLSConnBuildError::UnableParseCaCertificate)?;
let trust_anchors: Result<Vec<OwnedTrustAnchor>, TLSConnBuildError> = certs
.iter()
.map(|cert| {
let ta: Result<TrustAnchor, TLSConnBuildError> =
webpki::TrustAnchor::try_from_cert_der(cert)
.map_err(|_| TLSConnBuildError::UnableParseCaCertificate);
let ta = ta?;
Ok(OwnedTrustAnchor::from_subject_spki_name_constraints(
ta.subject,
ta.spki,
ta.name_constraints,
))
})
.collect();
let trust_anchors = trust_anchors?;
self.root_cert
.add_server_trust_anchors(trust_anchors.into_iter());
Ok(())
}
pub(crate) fn is_ready(&self) -> bool {
!self.root_cert.is_empty() && !self.client_certs.is_empty() && self.client_key.is_some()
}
pub fn build(self) -> Result<TlsConnParams, TLSConnBuildError> {
if !self.is_ready() {
return Err(TLSConnBuildError::NotConfigured);
}
if let (Some(client_key), client_certs, root_cert) =
(self.client_key, self.client_certs, self.root_cert)
{
Ok(TlsConnParams {
client_key,
client_certs,
root_cert,
})
} else {
Err(TLSConnBuildError::NotConfigured)
}
}
}
pub(crate) async fn connect(
stream: TcpStream,
domain: &str,
tls_params: TlsConnParams,
) -> Result<TlsStream<TcpStream>, TLSConnBuildError> {
let domain = ServerName::try_from(domain)
.map_err(|_| TLSConnBuildError::UnableToConnect("Invalid dns name".to_owned()))?;
log::trace!("Connecting TLS using domain: {domain:?}");
let config_builder = ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(tls_params.root_cert);
let config = config_builder
.with_client_auth_cert(tls_params.client_certs, tls_params.client_key)
.map_err(|_| TLSConnBuildError::UnableToConnect("Unable to TLS config".to_owned()))?;
let connector = TlsConnector::from(Arc::new(config));
connector
.connect(domain, stream)
.await
.map_err(|e| TLSConnBuildError::UnableToConnect(e.to_string()))
}
| true |
8abdadd35e82d4b292fe38ed84a458aca08c8cc3
|
Rust
|
Nugine/heng-rs
|
/heng-judger/src/lang/c_cpp.rs
|
UTF-8
| 2,831 | 2.546875 | 3 |
[] |
no_license
|
use super::*;
pub struct CCpp {
pub std: CCppStd,
pub o2: bool,
}
pub enum CCppStd {
C89,
C99,
C11,
Cpp11,
Cpp14,
Cpp17,
}
impl CCppStd {
fn from_str(s: &str) -> Result<Self> {
match s {
"c89" => Ok(CCppStd::C89),
"c99" => Ok(CCppStd::C99),
"c11" => Ok(CCppStd::C11),
"cpp11" => Ok(CCppStd::Cpp11),
"cpp14" => Ok(CCppStd::Cpp14),
"cpp17" => Ok(CCppStd::Cpp17),
_ => Err(anyhow::format_err!("invalid c/cpp std")),
}
}
fn as_str_gnu(&self) -> &str {
match self {
CCppStd::C89 => "gnu89",
CCppStd::C99 => "gnu99",
CCppStd::C11 => "gnu11",
CCppStd::Cpp11 => "gnu++11",
CCppStd::Cpp14 => "gnu++14",
CCppStd::Cpp17 => "gnu++17",
}
}
fn is_cpp(&self) -> bool {
matches!(self, CCppStd::Cpp11 | CCppStd::Cpp14 | CCppStd::Cpp17)
}
}
impl CCpp {
fn exe_name(&self) -> &str {
"src"
}
}
impl Language for CCpp {
fn lang_name(&self) -> &str {
if self.std.is_cpp() {
"cpp"
} else {
"c"
}
}
fn needs_compile(&self) -> bool {
true
}
fn src_name(&self) -> &str {
if self.std.is_cpp() {
"src.cpp"
} else {
"src.c"
}
}
fn msg_name(&self) -> &str {
"msg"
}
fn compile(&self, workspace: PathBuf, hard_limit: &Limit) -> Result<SandboxOutput> {
let config = inject::<Config>();
let c_cpp = &config.executor.c_cpp;
let is_cpp = self.std.is_cpp();
let cc = if is_cpp {
c_cpp.gxx.as_os_str()
} else {
c_cpp.gcc.as_os_str()
};
let mut cmd = carapace::Command::new(cc);
cmd.arg("--std").arg(self.std.as_str_gnu());
cmd.arg("-static");
cmd.arg_if(self.o2, "-O2");
// https://stackoverflow.com/questions/5419366/why-do-i-have-to-explicitly-link-with-libm
cmd.arg_if(!is_cpp, "-lm");
cmd.arg("-o").arg(self.exe_name());
cmd.arg(self.src_name());
cmd.stdio("/dev/null", "/dev/null", self.msg_name());
cmd.bindmount_ro(cc, cc);
for mnt in &c_cpp.mount {
cmd.bindmount_ro(mnt, mnt);
}
sandbox_run(cmd, &config, workspace, hard_limit)
}
fn run(
&self,
workspace: PathBuf,
stdin: PathBuf,
stdout: PathBuf,
stderr: PathBuf,
hard_limit: &Limit,
) -> Result<SandboxOutput> {
let config = inject::<Config>();
let mut cmd = carapace::Command::new(self.exe_name());
cmd.stdio(stdin, stdout, stderr);
sandbox_run(cmd, &config, workspace, hard_limit)
}
}
| true |
fdf889d1fa606a647d78bf4ade6b3c3cbd7c1fd5
|
Rust
|
grahamc/code-solving-problem-application-for-social-media-site-with-blogs
|
/http-proxy/src/httprequest.rs
|
UTF-8
| 1,904 | 3 | 3 |
[] |
no_license
|
use std::io::BufReader;
use std::net::TcpStream;
use httperrors::HttpError;
use httpreader::HttpReader;
use httplineparser::HttpLineParsers;
use httpheader::{Header, HeaderParser, headers_to_body_len, prefix_and_headers_to_io_string};
pub struct HttpRequest {
request_line: String,
headers: Vec<Header>,
}
pub struct HttpRequestParser<'a> {
stream: HttpReader<'a>,
}
impl HttpRequest {
pub fn add_header(&mut self, header: Header) {
self.headers.push(header);
}
pub fn body_length(self) -> usize {
return headers_to_body_len(&self.headers);
}
pub fn as_string(&self) -> String {
return prefix_and_headers_to_io_string(&self.request_line, &self.headers);
}
}
impl<'a> HttpRequestParser<'a> {
pub fn new(stream: &'a mut BufReader<TcpStream>) -> HttpRequestParser<'a> {
HttpRequestParser { stream: HttpReader::new(stream) }
}
pub fn parse(&mut self) -> Result<HttpRequest, HttpError> {
match self.parse_unwrapped() {
Ok(req) => return Ok(req),
Err(e) => {
return Err(HttpError::malformed_request(e));
}
}
}
fn parse_unwrapped(&mut self) -> Result<HttpRequest, &'static str> {
let request_line = try!(self.read_request_line());
let headers = try!(self.read_headers());
return Ok(HttpRequest {
request_line: request_line,
headers: headers,
});
}
fn read_request_line(&mut self) -> Result<String, &'static str> {
let line = try!(self.stream.read_line());
if HttpLineParsers::verify_request_line(&line) {
return Ok(line);
} else {
return Err("malformed request headers");
}
}
fn read_headers(&mut self) -> Result<Vec<Header>, &'static str> {
return HeaderParser::read_headers(&mut self.stream);
}
}
| true |
9a0a20c02c202ef6783aced1629082167a74adaa
|
Rust
|
Shock-1/teensy4-rs
|
/imxrt1062-pac/imxrt1062-usb1/src/hwdevice.rs
|
UTF-8
| 1,511 | 2.53125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
#[doc = "Reader of register HWDEVICE"]
pub type R = crate::R<u32, super::HWDEVICE>;
#[doc = "Device Capable. Indicating whether device operation mode is supported or not.\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DC_A {
#[doc = "0: Not supported"]
DC_0 = 0,
#[doc = "1: Supported"]
DC_1 = 1,
}
impl From<DC_A> for bool {
#[inline(always)]
fn from(variant: DC_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DC`"]
pub type DC_R = crate::R<bool, DC_A>;
impl DC_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DC_A {
match self.bits {
false => DC_A::DC_0,
true => DC_A::DC_1,
}
}
#[doc = "Checks if the value of the field is `DC_0`"]
#[inline(always)]
pub fn is_dc_0(&self) -> bool {
*self == DC_A::DC_0
}
#[doc = "Checks if the value of the field is `DC_1`"]
#[inline(always)]
pub fn is_dc_1(&self) -> bool {
*self == DC_A::DC_1
}
}
#[doc = "Reader of field `DEVEP`"]
pub type DEVEP_R = crate::R<u8, u8>;
impl R {
#[doc = "Bit 0 - Device Capable. Indicating whether device operation mode is supported or not."]
#[inline(always)]
pub fn dc(&self) -> DC_R {
DC_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bits 1:5 - Device Endpoint Number"]
#[inline(always)]
pub fn devep(&self) -> DEVEP_R {
DEVEP_R::new(((self.bits >> 1) & 0x1f) as u8)
}
}
| true |
ece867e2df0f311d6f405312d381072c1671bf89
|
Rust
|
marco-c/gecko-dev-wordified
|
/third_party/rust/tokio-util/src/codec/bytes_codec.rs
|
UTF-8
| 2,410 | 3.09375 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use
crate
:
:
codec
:
:
decoder
:
:
Decoder
;
use
crate
:
:
codec
:
:
encoder
:
:
Encoder
;
use
bytes
:
:
{
BufMut
Bytes
BytesMut
}
;
use
std
:
:
io
;
/
/
/
A
simple
[
Decoder
]
and
[
Encoder
]
implementation
that
just
ships
bytes
around
.
/
/
/
/
/
/
[
Decoder
]
:
crate
:
:
codec
:
:
Decoder
/
/
/
[
Encoder
]
:
crate
:
:
codec
:
:
Encoder
/
/
/
/
/
/
#
Example
/
/
/
/
/
/
Turn
an
[
AsyncRead
]
into
a
stream
of
Result
<
[
BytesMut
]
[
Error
]
>
.
/
/
/
/
/
/
[
AsyncRead
]
:
tokio
:
:
io
:
:
AsyncRead
/
/
/
[
BytesMut
]
:
bytes
:
:
BytesMut
/
/
/
[
Error
]
:
std
:
:
io
:
:
Error
/
/
/
/
/
/
/
/
/
#
mod
hidden
{
/
/
/
#
#
[
allow
(
unused_imports
)
]
/
/
/
use
tokio
:
:
fs
:
:
File
;
/
/
/
#
}
/
/
/
use
tokio
:
:
io
:
:
AsyncRead
;
/
/
/
use
tokio_util
:
:
codec
:
:
{
FramedRead
BytesCodec
}
;
/
/
/
/
/
/
#
enum
File
{
}
/
/
/
#
impl
File
{
/
/
/
#
async
fn
open
(
_name
:
&
str
)
-
>
Result
<
impl
AsyncRead
std
:
:
io
:
:
Error
>
{
/
/
/
#
use
std
:
:
io
:
:
Cursor
;
/
/
/
#
Ok
(
Cursor
:
:
new
(
vec
!
[
0
1
2
3
4
5
]
)
)
/
/
/
#
}
/
/
/
#
}
/
/
/
#
/
/
/
#
#
[
tokio
:
:
main
(
flavor
=
"
current_thread
"
)
]
/
/
/
#
async
fn
main
(
)
-
>
Result
<
(
)
std
:
:
io
:
:
Error
>
{
/
/
/
let
my_async_read
=
File
:
:
open
(
"
filename
.
txt
"
)
.
await
?
;
/
/
/
let
my_stream_of_bytes
=
FramedRead
:
:
new
(
my_async_read
BytesCodec
:
:
new
(
)
)
;
/
/
/
#
Ok
(
(
)
)
/
/
/
#
}
/
/
/
/
/
/
#
[
derive
(
Copy
Clone
Debug
Eq
PartialEq
Ord
PartialOrd
Hash
Default
)
]
pub
struct
BytesCodec
(
(
)
)
;
impl
BytesCodec
{
/
/
/
Creates
a
new
BytesCodec
for
shipping
around
raw
bytes
.
pub
fn
new
(
)
-
>
BytesCodec
{
BytesCodec
(
(
)
)
}
}
impl
Decoder
for
BytesCodec
{
type
Item
=
BytesMut
;
type
Error
=
io
:
:
Error
;
fn
decode
(
&
mut
self
buf
:
&
mut
BytesMut
)
-
>
Result
<
Option
<
BytesMut
>
io
:
:
Error
>
{
if
!
buf
.
is_empty
(
)
{
let
len
=
buf
.
len
(
)
;
Ok
(
Some
(
buf
.
split_to
(
len
)
)
)
}
else
{
Ok
(
None
)
}
}
}
impl
Encoder
<
Bytes
>
for
BytesCodec
{
type
Error
=
io
:
:
Error
;
fn
encode
(
&
mut
self
data
:
Bytes
buf
:
&
mut
BytesMut
)
-
>
Result
<
(
)
io
:
:
Error
>
{
buf
.
reserve
(
data
.
len
(
)
)
;
buf
.
put
(
data
)
;
Ok
(
(
)
)
}
}
impl
Encoder
<
BytesMut
>
for
BytesCodec
{
type
Error
=
io
:
:
Error
;
fn
encode
(
&
mut
self
data
:
BytesMut
buf
:
&
mut
BytesMut
)
-
>
Result
<
(
)
io
:
:
Error
>
{
buf
.
reserve
(
data
.
len
(
)
)
;
buf
.
put
(
data
)
;
Ok
(
(
)
)
}
}
| true |
c41f27cea5c4fa290dc0aa3c6831f6e481018b3f
|
Rust
|
cmsd2/rogue1
|
/src/euclid.rs
|
UTF-8
| 708 | 3.359375 | 3 |
[
"MIT",
"LicenseRef-scancode-ubuntu-font-1.0"
] |
permissive
|
pub fn modulo(a: i32, b: i32) -> i32
{
let m = a % b;
if m < 0 {
if b < 0 {
m - b
} else {
m + b
}
} else {
m
}
}
pub fn euclid(a: i32, b: i32) -> i32
{
if b == 0 {
a
} else {
euclid(b, modulo(a, b))
}
}
#[cfg(test)]
pub mod tests {
use super::euclid;
#[test]
pub fn test_euclid() {
assert_eq!(euclid(5, 7), 1);
assert_eq!(euclid(12, 4), 4);
assert_eq!(euclid(18, 60), 6);
assert_eq!(euclid(8, 8), 8);
assert_eq!(euclid(0, 17), 17);
assert_eq!(euclid(-4, 8), 4);
assert_eq!(euclid(-2, 1), 1);
assert_eq!(euclid(1, 0), 1);
}
}
| true |
c002c94b7c483585a56a13ad9ae52590637ce7d0
|
Rust
|
dit7ya/exercism
|
/rust/sublist/src/lib.rs
|
UTF-8
| 930 | 3.546875 | 4 |
[] |
no_license
|
#[derive(Debug, PartialEq)]
pub enum Comparison {
Equal,
Sublist,
Superlist,
Unequal,
}
pub fn sublist<T: PartialEq>(_first_list: &[T], _second_list: &[T]) -> Comparison {
if _first_list == _second_list {
Comparison::Equal
} else if _first_list.len() < _second_list.len() {
if is_sublist(_first_list, _second_list) {
return Comparison::Sublist;
}
Comparison::Unequal
} else {
if is_sublist(_second_list, _first_list) {
return Comparison::Superlist;
}
Comparison::Unequal
}
}
fn is_sublist<T: PartialEq>(smaller: &[T], bigger: &[T]) -> bool {
// return whether the first list (of length definitely smaller than the second) is contained in the second
for i in 0..(bigger.len() + 1 - smaller.len()) {
if smaller == &bigger[i..(i + smaller.len())] {
return true;
}
}
false
}
| true |
afc114089bd47cee11802ca6c9f6fbacf152b08e
|
Rust
|
thepowersgang/rust_os
|
/Kernel/Core/async-v3/mutex.rs
|
UTF-8
| 3,602 | 3.109375 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
// "Tifflin" Kernel
// - By John Hodge (thePowersGang)
//
// Core/async-v3/mutex.rs
//! Asynchonous mutex
#[allow(unused_imports)]
use crate::prelude::*;
use crate::lib::collections::VecDeque;
use core::ops;
use core::cell::UnsafeCell;
#[derive(Default)]
pub struct MutexInner
{
/// List of threads waiting on this mutex
sleep_queue: VecDeque<super::ObjectHandle>,
/// Current lock handle index (used to ensure that callers of `ack_lock` are the ones that were woken)
cur_index: usize,
/// The mutex is locked, but might not be acknowledged
locked: bool,
/// There is an active `Handle` to this mutex
held: bool,
}
pub struct Mutex<T>
{
inner: crate::sync::Mutex<MutexInner>,
data: UnsafeCell<T>,
}
impl<T> Mutex<T>
{
/// Construct a new async mutex containing the passed value
pub const fn new(v: T) -> Mutex<T> {
Mutex {
inner: crate::sync::Mutex::new(MutexInner {
cur_index: 0,
sleep_queue: VecDeque::new(),
locked: false,
held: false,
}),
data: UnsafeCell::new(v),
}
}
/// Obtain mutable access to the mutex data (if there is unique access to the Mutex)
pub fn get_mut(&mut self) -> &mut T {
// SAFE: Unique access to the mutex
unsafe { &mut *self.data.get() }
}
pub fn try_lock(&self) -> Option<Handle<T>> {
let mut lh = self.inner.lock();
if !lh.locked {
assert!( !lh.held, "Mutex not locked, but is still held" );
lh.locked = true;
lh.held = true;
Some(Handle { lock: self })
}
else {
None
}
}
/// Asynchronously lock the mutex
///
/// This signals the current layer with a "handle" to the mutex (to be passed to `ack_lock`)
pub fn lock_async(&self, object: super::ObjectHandle, _stack: super::StackPush) {
let mut lh = self.inner.lock();
if !lh.locked {
assert!( !lh.held, "Mutex not locked, but is still held" );
lh.locked = true;
// Uncontended. We now have the lock.
// - Schedule a wake with the next ID
object.signal(lh.cur_index);
}
else {
// Contented (can't just outright acquire the lock)
// - Push this waiter onto the list of waiting threads
lh.sleep_queue.push_back(object)
}
}
/// Acquire a mutex lock using an index
pub fn ack_lock(&self, index: usize) -> Handle<T> {
let mut lh = self.inner.lock();
assert!( !lh.held,
"Attmpeting to acquire an async mutex which is already held" );
assert!( lh.locked,
"Attmpeting to acquire an async mutex which isn't locked" );
assert_eq!(lh.cur_index, index,
"Attempting to acknowledge an async mutex acquire using a mismatched index - {} != cur {}", index, lh.cur_index);
// TODO: Mangle the ID so callers can't easily predict it? Or should this method be unsafe to indicate that if you
// fudge the ID, it's your own fault?.
lh.cur_index += 1;
lh.held = true;
Handle { lock: self }
}
}
/// Handle to the mutex, dereferences to the inner `T`
pub struct Handle<'a, T: 'a>
{
lock: &'a Mutex<T>,
}
impl<'a, T: 'a> ops::Drop for Handle<'a, T> {
fn drop(&mut self) {
let mut lh = self.lock.inner.lock();
lh.held = false;
// If there's somebody waiting on the mutex, wake them
if let Some(h) = lh.sleep_queue.pop_front() {
// TODO: Make some indication of who is currently holding the mutex?
h.signal( lh.cur_index );
}
else {
lh.locked = false;
}
}
}
impl<'a, T: 'a> ops::Deref for Handle<'a, T> {
type Target = T;
fn deref(&self) -> &T {
// SAFE: Lock is locked
unsafe { &*self.lock.data.get() }
}
}
impl<'a, T: 'a> ops::DerefMut for Handle<'a, T> {
fn deref_mut(&mut self) -> &mut T {
// SAFE: Lock is locked
unsafe { &mut *self.lock.data.get() }
}
}
| true |
4531eafde94147a618701284f9367545a2f9608a
|
Rust
|
iferc/json-keypath-iter
|
/src/style/mod.rs
|
UTF-8
| 1,558 | 3.15625 | 3 |
[] |
no_license
|
mod builder;
mod preset;
pub use builder::StyleBuilder;
pub use preset::PresetStyle;
/// Used by `Iterator` to format `Element.path` and
/// determine whether or not to yield object and/or array values
#[derive(Debug)]
pub struct Style<'a> {
object_key_prefix: &'a str,
object_key_suffix: &'a str,
object_keys_in_path: bool,
skip_object_parents: bool,
array_key_prefix: &'a str,
array_key_suffix: &'a str,
array_keys_in_path: bool,
skip_array_parents: bool,
}
impl<'a> Style<'a> {
pub fn object_format(&self, base_path: &String, key: &String) -> String {
if self.object_keys_in_path {
format!(
"{}{}{}{}",
base_path, self.object_key_prefix, key, self.object_key_suffix,
)
} else {
format!(
"{}{}{}",
base_path, self.object_key_prefix, self.object_key_suffix,
)
}
}
pub fn array_format(&self, base_path: &String, index: usize) -> String {
if self.array_keys_in_path {
format!(
"{}{}{}{}",
base_path, self.array_key_prefix, index, self.array_key_suffix,
)
} else {
format!(
"{}{}{}",
base_path, self.array_key_prefix, self.array_key_suffix,
)
}
}
pub fn should_skip_object_parents(&self) -> bool {
self.skip_object_parents
}
pub fn should_skip_array_parents(&self) -> bool {
self.skip_array_parents
}
}
| true |
d24c7069a843651fef4a83906e6c4d598ed8a227
|
Rust
|
advancedtelematic/ota-cli
|
/src/config.rs
|
UTF-8
| 4,363 | 2.90625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use clap::ArgMatches;
use dirs;
use serde_json;
use std::{
fs::{self, File, OpenOptions},
io::{BufReader, ErrorKind, Read, Write},
path::{Path, PathBuf},
str::FromStr,
};
use url::Url;
use url_serde;
use zip::ZipArchive;
use api::auth_plus::{AccessToken, AuthPlus, AuthPlusApi, Credentials};
use error::{Error, Result};
const CONFIG_FILE: &str = ".ota.conf";
/// Config values passed to API methods for making HTTP requests.
#[derive(Serialize, Deserialize)]
pub struct Config {
pub credentials_zip: PathBuf,
#[serde(skip)]
pub credentials: Option<Credentials>,
#[serde(skip_serializing_if = "Option::is_none")]
pub token: Option<AccessToken>,
#[serde(with = "url_serde")]
pub campaigner: Url,
#[serde(with = "url_serde")]
pub director: Url,
#[serde(with = "url_serde")]
pub registry: Url,
#[serde(with = "url_serde")]
pub reposerver: Url,
}
impl<'a> Config {
/// Initialize a new config from CLI arguments.
pub fn init_from_args(args: &ArgMatches<'a>) -> Result<()> {
let credentials: PathBuf = args.value_of("credentials").expect("--credentials").into();
let campaigner = args.value_of("campaigner").expect("--campaigner").parse()?;
let director = args.value_of("director").expect("--director").parse()?;
let registry = args.value_of("registry").expect("--registry").parse()?;
Self::init(credentials, campaigner, director, registry)
}
/// Initialize a new config file.
pub fn init(credentials_zip: PathBuf, campaigner: Url, director: Url, registry: Url) -> Result<()> {
let reposerver = Self::reposerver_url(&credentials_zip)?;
Config {
credentials_zip,
credentials: None,
token: None,
campaigner,
director,
registry,
reposerver,
}
.save_default()
}
/// Save the default config file.
pub fn save_default(&self) -> Result<()> { self.save(Self::default_path()) }
/// Load the default config file.
pub fn load_default() -> Result<Self> { Self::load(Self::default_path()) }
/// Save the current config.
pub fn save(&self, path: impl AsRef<Path>) -> Result<()> {
let mut file = OpenOptions::new().create(true).write(true).truncate(true).open(path)?;
Ok(file.write_all(&serde_json::to_vec_pretty(&self)?)?)
}
/// Load a previously saved config.
pub fn load(path: impl AsRef<Path>) -> Result<Self> {
fs::read(path)
.or_else(|err| match err.kind() {
ErrorKind::NotFound => Err(Error::NotFound("Config file".into(), Some("Please run `ota init` first.".into()))),
_ => Err(err.into()),
})
.and_then(|file| Ok(serde_json::from_slice(&file)?))
}
/// Parse `Credentials` or return an existing reference.
pub fn credentials(&mut self) -> Result<&Credentials> {
if let None = self.credentials {
self.credentials = Some(Credentials::parse(&self.credentials_zip)?);
}
Ok(self.credentials.as_ref().unwrap())
}
/// Refresh an `AccessToken` or return existing.
pub fn token(&mut self) -> Result<Option<AccessToken>> {
match self.token {
Some(_) => debug!("using cached access token..."),
None => {
if let Some(token) = AuthPlus::refresh_token(self)? {
self.token = Some(token);
self.save_default()?;
}
}
}
Ok(self.token.clone())
}
/// Return the default config path.
fn default_path() -> PathBuf {
let mut path = PathBuf::new();
path.push(dirs::home_dir().expect("couldn't read home directory path"));
path.push(CONFIG_FILE);
path
}
/// Parse credentials.zip and return the TUF Reposerver URL.
fn reposerver_url(credentials_zip: impl AsRef<Path>) -> Result<Url> {
debug!("reading tufrepo.url from credentials.zip");
let file = File::open(credentials_zip)?;
let mut archive = ZipArchive::new(BufReader::new(file))?;
let mut tufrepo = archive.by_name("tufrepo.url")?;
let mut contents = String::new();
let _ = tufrepo.read_to_string(&mut contents)?;
Ok(Url::from_str(&contents)?)
}
}
| true |
107e6df5ddcf18a628700053ea1bd2965c8a1baf
|
Rust
|
Insem/Tg_bot
|
/src/DataBase/server.rs
|
UTF-8
| 1,187 | 2.8125 | 3 |
[] |
no_license
|
use mongodb::{Client, options::ClientOptions, options::StreamAddress};
use std::error::Error;
use core::result::Result;
use bson::{doc, bson};
#[derive(Debug)]
pub struct DB{
db:Result<Client, Box<dyn Error>>
//создать перечисление баз данных, с типажом
}
impl DB {
pub fn start() -> DB{
let options = ClientOptions::builder()
.hosts(vec![
StreamAddress {
hostname: "localhost".into(),//чекнуть функцию
port: Some(27017),
}
])
.build();
DB{
db:Ok(Client::with_options(options).expect("Connection to DB failed"))
}
}
pub fn insert(&self){
let db = self.db.as_ref().unwrap().database("test");
let collection = db.collection("music");
let docs = vec![
doc! { "tg_id":"040053053045" },
doc! { "tg_id":"040053053045" },
doc! { "tg_id":"040053053045" },
];
collection.insert_many(docs, None).unwrap();
}
}
| true |
67c1ff23f25be1ebcc20c279e074fd4aa3786767
|
Rust
|
bnjbvr/better-rss
|
/src/config.rs
|
UTF-8
| 589 | 2.546875 | 3 |
[] |
no_license
|
extern crate serde_yaml;
use std::fs::File;
use std::io::prelude::*;
use utils::GenResult;
#[derive(Debug, PartialEq, Deserialize)]
pub struct ConfigEntry {
pub feed_name: String,
pub num_entries: u32,
pub out_filename: String,
pub account_name: Option<String>,
}
type Configuration = Vec<ConfigEntry>;
pub fn read_config() -> GenResult<Configuration> {
let mut file = File::open("config.yaml")?;
let mut file_content = String::new();
file.read_to_string(&mut file_content)?;
let config = serde_yaml::from_str(file_content.as_str())?;
Ok(config)
}
| true |
d002cc4b9de51060107a796d64a1c12d2a95c021
|
Rust
|
falconre/raptor
|
/lib/analysis/transient_assignments.rs
|
UTF-8
| 11,717 | 3.25 | 3 |
[] |
no_license
|
//! Detect callee-saved variables.
//!
//! In assembly, callees often times save variables. We want to detect these,
//! so we can remove the code for the saving and restoring of these variables.
use crate::analysis::fixed_point::{fixed_point_forward, incoming_results, FixedPointAnalysis};
use crate::error::*;
use crate::ir;
use std::cmp::{Ordering, PartialOrd};
use std::collections::HashMap;
/// Returns a HashSet at each location. If a variable is present in that
/// HashSet, then it is transient at that point in the program.
pub fn transient_assignments<V: ir::Value>(
function: &ir::Function<V>,
) -> Result<HashMap<ir::ProgramLocation, TransientAssignments>> {
let transient_assignment_analysis = TransientAssignmentAnalysis {};
let result = fixed_point_forward(&transient_assignment_analysis, function)?;
incoming_results(
&transient_assignment_analysis,
function,
result,
TransientAssignments::new,
)
}
#[allow(dead_code)]
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum TransientAssignment {
Top,
Variable(ir::Variable),
Bottom,
}
impl TransientAssignment {
pub fn variable(&self) -> Option<&ir::Variable> {
match self {
TransientAssignment::Variable(variable) => Some(variable),
_ => None,
}
}
fn join(&self, other: &TransientAssignment) -> TransientAssignment {
match self {
TransientAssignment::Top => TransientAssignment::Top,
TransientAssignment::Variable(lhs) => match other {
TransientAssignment::Top => TransientAssignment::Top,
TransientAssignment::Variable(rhs) => {
if lhs == rhs {
self.clone()
} else {
TransientAssignment::Top
}
}
TransientAssignment::Bottom => self.clone(),
},
TransientAssignment::Bottom => other.clone(),
}
}
}
impl ::std::fmt::Display for TransientAssignment {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match self {
TransientAssignment::Top => write!(f, "⊤"),
TransientAssignment::Variable(variable) => variable.fmt(f),
TransientAssignment::Bottom => write!(f, "⊥"),
}
}
}
impl PartialOrd for TransientAssignment {
fn partial_cmp(&self, other: &TransientAssignment) -> Option<Ordering> {
match self {
TransientAssignment::Top => match other {
TransientAssignment::Top => Some(Ordering::Equal),
TransientAssignment::Variable(_) | TransientAssignment::Bottom => {
Some(Ordering::Greater)
}
},
TransientAssignment::Variable(lhs) => match other {
TransientAssignment::Top => Some(Ordering::Less),
TransientAssignment::Variable(rhs) => {
if lhs == rhs {
Some(Ordering::Equal)
} else {
None
}
}
TransientAssignment::Bottom => Some(Ordering::Greater),
},
TransientAssignment::Bottom => match other {
TransientAssignment::Top | TransientAssignment::Variable(_) => Some(Ordering::Less),
TransientAssignment::Bottom => Some(Ordering::Equal),
},
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct TransientAssignmentChain {
chain: Vec<ir::ProgramLocation>,
assignment: TransientAssignment,
}
impl TransientAssignmentChain {
pub fn new(assignment: TransientAssignment) -> TransientAssignmentChain {
TransientAssignmentChain {
chain: Vec::new(),
assignment,
}
}
pub fn push_chain(&mut self, pl: ir::ProgramLocation) {
// only if we have a value do we add to the chain
if self.assignment().variable().is_some() {
self.chain.push(pl);
}
}
pub fn assignment(&self) -> &TransientAssignment {
&self.assignment
}
pub fn chain(&self) -> &[ir::ProgramLocation] {
&self.chain
}
fn join(&self, other: &TransientAssignmentChain) -> TransientAssignmentChain {
let mut chain = self.chain.clone();
for location in &other.chain {
if !chain.contains(location) {
chain.push(location.clone());
}
}
TransientAssignmentChain {
assignment: self.assignment.join(&other.assignment),
chain,
}
}
fn new_top() -> TransientAssignmentChain {
TransientAssignmentChain {
chain: Vec::new(),
assignment: TransientAssignment::Top,
}
}
}
impl PartialOrd for TransientAssignmentChain {
fn partial_cmp(&self, other: &TransientAssignmentChain) -> Option<Ordering> {
self.assignment().partial_cmp(other.assignment())
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct TransientAssignments {
chains: HashMap<ir::Variable, TransientAssignmentChain>,
}
impl TransientAssignments {
pub fn new() -> TransientAssignments {
TransientAssignments {
chains: HashMap::new(),
}
}
/// Get the chains for a variable
pub fn get(&self, variable: &ir::Variable) -> Option<&TransientAssignmentChain> {
self.chains.get(variable)
}
pub fn chains(&self) -> &HashMap<ir::Variable, TransientAssignmentChain> {
&self.chains
}
fn eval<V: ir::Value>(&self, expression: &ir::Expression<V>) -> TransientAssignmentChain {
match expression {
ir::Expression::LValue(lvalue) => match lvalue.as_ref() {
ir::LValue::Variable(variable) => {
self.chains.get(variable).cloned().unwrap_or_else(|| {
TransientAssignmentChain::new(TransientAssignment::Variable(
variable.clone(),
))
})
}
ir::LValue::Dereference(_) => {
TransientAssignmentChain::new(TransientAssignment::Top)
}
},
_ => TransientAssignmentChain::new(TransientAssignment::Top),
}
}
fn join(mut self, other: &TransientAssignments) -> TransientAssignments {
for (variable, tac) in &other.chains {
self.chains
.entry(variable.clone())
.and_modify(|v| *v = v.join(tac))
.or_insert_with(|| tac.clone());
}
self
}
fn set(
&mut self,
variable: ir::Variable,
transient_assignment_chain: TransientAssignmentChain,
) {
self.chains.insert(variable, transient_assignment_chain);
}
fn top(&mut self) {
self.chains
.iter_mut()
.for_each(|(_, tac)| *tac = TransientAssignmentChain::new_top());
}
}
impl Default for TransientAssignments {
fn default() -> TransientAssignments {
TransientAssignments::new()
}
}
// This is copied essentially verbatim from Constants::PartialOrd, so if this
// is wrong, go look there
impl PartialOrd for TransientAssignments {
fn partial_cmp(&self, other: &TransientAssignments) -> Option<Ordering> {
match self.chains.len().cmp(&other.chains.len()) {
Ordering::Less => {
for (lv, lt) in self.chains.iter() {
if !other.chains.get(lv).map(|rt| lt <= rt).unwrap_or(false) {
return None;
}
}
Some(Ordering::Less)
}
Ordering::Greater => {
for (lv, lt) in other.chains.iter() {
if !self.chains.get(lv).map(|rt| lt <= rt).unwrap_or(false) {
return None;
}
}
Some(Ordering::Greater)
}
Ordering::Equal => {
let mut order = Ordering::Equal;
for (lv, lt) in &self.chains {
match other.chains.get(lv) {
Some(rt) => {
if lt < rt {
if order <= Ordering::Equal {
order = Ordering::Less;
} else {
return None;
}
} else if lt > rt {
if order >= Ordering::Equal {
order = Ordering::Greater;
} else {
return None;
}
}
}
None => {
return None;
}
}
}
Some(order)
}
}
}
}
struct TransientAssignmentAnalysis {}
impl<'f, V: 'f + ir::Value> FixedPointAnalysis<'f, TransientAssignments, V>
for TransientAssignmentAnalysis
{
fn trans(
&self,
location: &ir::RefProgramLocation<'f, V>,
state: Option<TransientAssignments>,
) -> Result<TransientAssignments> {
let mut state = match state {
Some(state) => state,
None => TransientAssignments::new(),
};
let state = match location.instruction() {
Some(instruction) => match instruction.operation() {
ir::Operation::Assign { dst, src } => {
let mut src = state.eval(src);
src.push_chain(location.clone().into());
state.set(dst.to_owned(), src);
state
}
ir::Operation::Load { dst, .. } => {
state.set(dst.to_owned(), TransientAssignmentChain::new_top());
state
}
ir::Operation::Call(call) => {
if let Some(arguments) = call.arguments() {
arguments
.iter()
.filter_map(|argument| argument.variable())
.for_each(|argument| {
state.set(argument.clone(), TransientAssignmentChain::new_top())
});
} else {
state.top();
}
state
}
ir::Operation::Branch { .. } => {
state.top();
state
}
ir::Operation::Intrinsic(intrinsic) => {
if let Some(scalars_written) = intrinsic.scalars_written() {
scalars_written.into_iter().for_each(|scalar| {
state.set(scalar.clone().into(), TransientAssignmentChain::new_top())
});
} else {
state.top();
}
state
}
ir::Operation::Store { .. } | ir::Operation::Return(_) | ir::Operation::Nop(_) => {
state
}
},
None => state,
};
Ok(state)
}
fn join(
&self,
state0: TransientAssignments,
state1: &TransientAssignments,
) -> Result<TransientAssignments> {
Ok(state0.join(state1))
}
}
| true |
c7d046e88a37bb590542ae36203a696904632d19
|
Rust
|
pi-pi3/qr
|
/src/rect.rs
|
UTF-8
| 2,227 | 3.296875 | 3 |
[] |
no_license
|
use std::ops::Range;
use std::marker::PhantomData;
use num_traits::AsPrimitive;
use renderer::Drawable;
use point::Point2;
#[derive(Clone, Copy, Debug)]
pub struct Rectangle<T> {
x0: T,
x1: T,
y0: T,
y1: T,
}
impl<T> Rectangle<T> {
#[inline(always)]
pub fn new(x0: T, x1: T, y0: T, y1: T) -> Self {
Rectangle { x0, x1, y0, y1 }
}
}
impl<T: Copy + AsPrimitive<i64> + 'static> Drawable<T, Point2<T>> for Rectangle<T>
where
i64: AsPrimitive<T>,
{
#[inline(always)]
fn vertices(&self) -> usize {
4
}
}
impl<T: Copy + AsPrimitive<i64> + 'static> IntoIterator for Rectangle<T>
where
i64: AsPrimitive<T>,
{
type Item = Point2<T>;
type IntoIter = IntoIter<T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
let x0 = self.x0.as_();
let x1 = self.x1.as_();
let y0 = self.y0.as_();
let y1 = self.y1.as_();
let width = x0..x1;
let mut height = y0..y1;
let x = width.clone();
let y = height.next();
let _phantom = PhantomData;
IntoIter {
width,
height,
x,
y,
_phantom,
}
}
}
#[derive(Debug)]
pub struct IntoIter<T> {
x: Range<i64>,
y: Option<i64>,
width: Range<i64>,
height: Range<i64>,
_phantom: PhantomData<T>,
}
impl<T: Copy + 'static> Iterator for IntoIter<T>
where
i64: AsPrimitive<T>,
{
type Item = Point2<T>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let x = self.x.next().or_else(|| {
self.height
.next()
.and_then(|y| {
self.y = Some(y);
self.x = self.width.clone();
self.x.next()
})
.or_else(|| {
self.y = None;
None
})
});
x.and_then(|x| self.y.map(|y| (x.as_(), y.as_())))
}
}
#[cfg(test)]
mod tests {
use super::Rectangle;
#[test]
fn rect() {
assert_eq!(
Rectangle::new(0, 2, 0, 2).into_iter().collect::<Vec<_>>(),
[(0, 0), (1, 0), (0, 1), (1, 1),]
)
}
}
| true |
12461135f9a0d2d7c7965588e38dc68bb583ebd9
|
Rust
|
dragazo/aircraft-rs
|
/src/main.rs
|
UTF-8
| 17,946 | 2.859375 | 3 |
[] |
no_license
|
use std::collections::{BTreeSet, BinaryHeap, VecDeque};
use std::cmp::Ordering;
use std::str::FromStr;
use itertools::Itertools;
fn square_dist(a: (isize, isize), b: (isize, isize)) -> usize {
(a.0 - b.0).abs().max((a.1 - b.1).abs()) as usize
}
fn diamond_dist(a: (isize, isize), b: (isize, isize)) -> usize {
((a.0 - b.0).abs() + (a.1 - b.1).abs()) as usize
}
const BROADCAST_RANGE: usize = 2;
fn step(pos: (isize, isize), dir: u8) -> (isize, isize) {
match dir {
0 => (pos.0, pos.1 + 1),
1 => (pos.0 + 1, pos.1),
2 => (pos.0, pos.1 - 1),
3 => (pos.0 - 1, pos.1),
_ => panic!(),
}
}
fn will_collide(posdir1: ((isize, isize), u8), posdir2: ((isize, isize), u8)) -> bool {
match diamond_dist(posdir1.0, posdir2.0) {
0 => true,
1 => posdir1.1 != posdir2.1 && (step(posdir1.0, posdir1.1) == posdir2.0 || step(posdir2.0, posdir2.1) == posdir1.0),
2 => step(posdir1.0, posdir1.1) == step(posdir2.0, posdir2.1),
_ => false,
}
}
pub trait Controller {
type Broadcast: Clone + Copy;
fn new() -> Self;
fn broadcast(&self) -> Self::Broadcast;
fn calculate(&mut self, info: &Info, other: Option<(&Info, &Self::Broadcast)>) -> u8;
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Info {
pos: (isize, isize),
dir: u8,
target: (isize, isize),
}
pub struct SimpleController;
impl Controller for SimpleController {
type Broadcast = ();
fn new() -> Self { SimpleController }
fn broadcast(&self) {}
fn calculate(&mut self, info: &Info, _: Option<(&Info, &Self::Broadcast)>) -> u8 {
let dist = diamond_dist(info.pos, info.target);
if diamond_dist(step(info.pos, info.dir), info.target) < dist { 0 }
else if diamond_dist(step(info.pos, (info.dir + 1) % 4), info.target) < dist { 1 }
else { 3 }
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)]
struct Course {
path: VecDeque<u8>,
pos: (isize, isize),
dir: u8,
target: (isize, isize),
}
impl Course {
fn info(&self) -> Info {
Info { pos: self.pos, dir: self.dir, target: self.target }
}
}
#[derive(PartialEq, Eq, Debug)]
struct SortedCoursePair(Course, Course);
impl SortedCoursePair {
fn metrics(&self) -> (usize, usize) {
let h1 = self.0.path.len() + diamond_dist(self.0.pos, self.0.target);
let h2 = self.1.path.len() + diamond_dist(self.1.pos, self.1.target);
(h1, h2)
}
fn long_metric(&self) -> usize {
let (h1, h2) = self.metrics();
h1.max(h2)
}
fn short_metric(&self) -> usize {
let (h1, h2) = self.metrics();
h1.min(h2)
}
fn len(&self) -> usize {
self.0.path.len().max(self.1.path.len())
}
fn info_pair(&self) -> (Info, Info) {
(self.0.info(), self.1.info())
}
}
impl Ord for SortedCoursePair {
fn cmp(&self, other: &Self) -> Ordering {
// BinaryHeap is a max heap - we want lowest metric and highest length, so invert metric and leave len the same
match other.long_metric().cmp(&self.long_metric()) { // shortest overal time (both aircraft)
Ordering::Equal => match other.short_metric().cmp(&self.short_metric()) { // shortest individual time (tie breaker)
Ordering::Equal => self.len().cmp(&other.len()), // whichever path is closest to being completed
x => x,
}
x => x,
}
}
}
impl PartialOrd for SortedCoursePair {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
pub struct CompleteController {
pre_computed_path: Option<VecDeque<u8>>,
}
impl CompleteController {
fn plot_path(a: &Info, b: &Info) -> (VecDeque<u8>, VecDeque<u8>) {
debug_assert_ne!(a.pos, b.pos);
if a.pos < b.pos {
let r = Self::plot_path(b, a);
return (r.1, r.0);
}
let mut heap: BinaryHeap<SortedCoursePair> = BinaryHeap::with_capacity(1024);
let mut heap_set: BTreeSet<(Info, Info)> = Default::default();
let mut visited_set: BTreeSet<(Info, Info)> = Default::default();
macro_rules! heap_insert {
($a:expr, $b:expr $(,)?) => {{
let v = SortedCoursePair($a, $b);
let info = v.info_pair();
if !visited_set.contains(&info) && heap_set.insert(info) {
heap.push(SortedCoursePair(v.0, v.1))
}
}}
}
heap_insert!(
Course { path: Default::default(), pos: a.pos, dir: a.dir, target: a.target },
Course { path: Default::default(), pos: b.pos, dir: b.dir, target: b.target },
);
while let Some(front) = heap.pop() {
let info = front.info_pair();
assert!(heap_set.remove(&info)); // take it out of the heap set and put it in the visited set
assert!(visited_set.insert(info));
let SortedCoursePair(a, b) = front;
if a.pos != a.target && b.pos != b.target {
for &da in &[0, 1, 3] {
for &db in &[0, 1, 3] {
let new_a_dir = (a.dir + da) % 4;
let new_b_dir = (b.dir + db) % 4;
if will_collide((a.pos, new_a_dir), (b.pos, new_b_dir)) { continue }
let mut new_a_path = a.path.clone();
let mut new_b_path = b.path.clone();
new_a_path.push_back(da);
new_b_path.push_back(db);
let new_a_pos = step(a.pos, new_a_dir);
let new_b_pos = step(b.pos, new_b_dir);
if new_a_pos == a.target && new_b_pos == b.target { return (new_a_path, new_b_path); }
heap_insert!(
Course { path: new_a_path, pos: new_a_pos, dir: new_a_dir, target: a.target },
Course { path: new_b_path, pos: new_b_pos, dir: new_b_dir, target: b.target },
);
}
}
}
else if a.pos != a.target {
for &d in &[0, 1, 3] {
let new_dir = (a.dir + d) % 4;
let new_pos = step(a.pos, new_dir);
let mut new_path = a.path.clone();
new_path.push_back(d);
if new_pos == a.target { return (new_path, b.path); }
heap_insert!(Course { path: new_path, pos: new_pos, dir: new_dir, target: a.target }, b.clone());
}
}
else if b.pos != b.target {
for &d in &[0, 1, 3] {
let new_dir = (b.dir + d) % 4;
let new_pos = step(b.pos, new_dir);
let mut new_path = b.path.clone();
new_path.push_back(d);
if new_pos == b.target { return (a.path, new_path); }
heap_insert!(a.clone(), Course { path: new_path, pos: new_pos, dir: new_dir, target: b.target });
}
}
else { unreachable!(); }
}
unreachable!("if this happend, the proof was wrong");
}
}
impl Controller for CompleteController {
type Broadcast = ();
fn new() -> Self { Self { pre_computed_path: None } }
fn broadcast(&self) { }
fn calculate(&mut self, info: &Info, other: Option<(&Info, &Self::Broadcast)>) -> u8 {
match &mut self.pre_computed_path {
Some(pre) => pre.pop_front().unwrap(),
None => match other {
None => SimpleController.calculate(info, None),
Some((other, _)) => {
let mut r = Self::plot_path(info, other).0;
let v = r.pop_front().unwrap();
self.pre_computed_path = Some(r);
v
}
}
}
}
}
#[derive(PartialEq, Eq)]
pub enum SimulationResult {
Completed,
Collision,
Running,
}
pub struct Simulator<T> {
aircraft: Vec<(Info, T, usize)>,
paths: Vec<Vec<(isize, isize)>>,
}
impl<T> Simulator<T> where T: Controller {
fn comms_group(&self, index: usize) -> Vec<usize> {
debug_assert!(index < self.aircraft.len());
let mut group = Vec::with_capacity(self.aircraft.len());
group.push(index);
loop {
let mut added = false;
for (i, other) in self.aircraft.iter().enumerate() {
if group.contains(&i) { continue }
if group.iter().any(|&x| square_dist(other.0.pos, self.aircraft[x].0.pos) <= BROADCAST_RANGE) {
group.push(i);
added = true;
}
}
if !added {
group.swap_remove(0);
return group;
}
}
}
pub fn from_scenario(scenario: &[Info]) -> Self {
let aircraft = scenario.iter().copied().enumerate().map(|(i, info)| (info, T::new(), i)).collect();
let paths = scenario.iter().map(|x| vec![x.pos]).collect();
Simulator { aircraft, paths }
}
pub fn tick(&mut self, max_ticks: usize) -> SimulationResult {
for _ in 0..max_ticks {
if self.aircraft.is_empty() { return SimulationResult::Completed; }
let snapshots: Vec<_> = self.aircraft.iter().map(|x| (x.0, x.1.broadcast())).collect();
for i in 0..self.aircraft.len() {
let group = self.comms_group(i);
let other = if group.is_empty() { None } else {
let snapshot = &snapshots[group[0]];
Some((&snapshot.0, &snapshot.1))
};
let info = &mut self.aircraft[i];
let delta = info.1.calculate(&snapshots[i].0, other);
assert!(match delta { 0 | 1 | 3 => true, _ => false });
info.0.dir = (info.0.dir + delta) % 4; // we can update dir now since everything is cached in snapshots - but changing pos would break collision logic
self.paths[info.2].push(step(info.0.pos, info.0.dir)); // add next pos to path before checking for collisions so we can draw failures
}
for (i, a) in self.aircraft.iter().enumerate() {
for b in self.aircraft[i + 1..].iter() {
if will_collide((a.0.pos, a.0.dir), (b.0.pos, b.0.dir)) {
return SimulationResult::Collision;
}
}
}
for i in (0..self.aircraft.len()).rev() {
let info = &mut self.aircraft[i];
info.0.pos = step(info.0.pos, info.0.dir);
if info.0.pos == info.0.target { self.aircraft.swap_remove(i); }
}
}
if self.aircraft.is_empty() { SimulationResult::Completed } else { SimulationResult::Running }
}
fn get_time(&self) -> usize {
self.paths.iter().map(|x| x.len()).max().unwrap() - 1
}
}
fn for_scenarios<F>(n: usize, k: usize, mut f: F)
where F: FnMut(u64, Vec<Info>)
{
assert!(k > 0 && k <= n * n);
let space: Vec<_> = (0..n as isize).cartesian_product(0..n as isize).collect();
let directions = &[0, 1, 2, 3];
let mut count = 0;
for starts in space.iter().copied().permutations(k) {
for targets in (0..k).map(|_| space.iter().copied()).multi_cartesian_product() {
if (0..k).any(|i| targets[i] == starts[i]) { continue }
for directions in (0..k).map(|_| directions.iter().copied()).multi_cartesian_product() {
let scenario = (0..k).map(|i| Info { pos: starts[i], dir: directions[i], target: targets[i] }).collect();
count += 1;
f(count, scenario);
}
}
}
}
#[test]
fn test_simple() {
for_scenarios(8, 1, |i, scenario| {
let mut sim = Simulator::<SimpleController>::from_scenario(&scenario);
match sim.tick(1000) {
SimulationResult::Completed => (),
SimulationResult::Collision => panic!("sim {} ({:?}) had a collision", i, scenario),
SimulationResult::Running => panic!("sim {} ({:?}) incomplete", i, scenario),
}
});
}
fn independent_time(scenario: &[Info], max_rounds: usize) -> usize {
let mut max = 0;
for single in scenario.windows(1) {
let mut sim = Simulator::<SimpleController>::from_scenario(single);
match sim.tick(max_rounds) {
SimulationResult::Completed => max = max.max(sim.get_time()),
_ => panic!(),
}
}
max
}
macro_rules! crash {
() => { std::process::exit(1); };
($msg:expr, $($args:expr)*) => {{
eprintln!($msg, $($args)*);
crash!();
}}
}
fn parse_int<T: FromStr>(val: &str) -> T {
match T::from_str(val) {
Ok(v) => v,
Err(_) => crash!("failed to parse '{}' as integer of valid range", val),
}
}
fn main() {
let args: Vec<String> = std::env::args().collect();
let print_help = || {
eprintln!("usage: {} [mode]", args[0]);
eprintln!(" mode: help, full, case");
};
if args.len() < 2 {
print_help();
crash!();
}
match args[1].as_str() {
"-h" | "--help" | "help" => print_help(),
"case" => {
if args.len() < 5 { crash!(r"usage: {} case controller max_rounds [pos_x:pos_y:dir:target_x:target_y]+
controller - simple | complete
example: case simple 1000 0:0:1:5:20 10:5:3:-10:0
", args[0]); }
let max_rounds = parse_int(&args[3]);
let mut scenario = vec![];
for spec in &args[4..] {
let v: Vec<isize> = spec.split(':').map(|x| parse_int(x)).collect();
if v.len() != 5 { crash!("failed to parse spec '{}'", spec); }
if v[2] < 0 || v[2] > 3 { crash!("invalid direction: {} (must be 0-3)", v[2]); }
let info = Info { pos: (v[0], v[1]), dir: v[2] as u8, target: (v[3], v[4]) };
scenario.push(info);
}
fn tick_by<T: Controller>(sim: &mut Simulator<T>, count: usize) -> bool {
match sim.tick(count) {
SimulationResult::Collision => { println!("COLLISION!!"); false },
SimulationResult::Running => { println!("INCOMPLETE!!"); false },
SimulationResult::Completed => true,
}
}
match args[2].as_str() {
"simple" => {
let mut sim = Simulator::<SimpleController>::from_scenario(&scenario);
let ok = tick_by(&mut sim, max_rounds);
for (i, x) in sim.paths.iter().enumerate() {
println!("path {} - {:?}", i + 1, x);
}
if ok {
println!("{} rounds", sim.get_time());
println!("{} delay rounds", sim.get_time() - independent_time(&scenario, max_rounds));
}
}
"complete" => {
let mut sim = Simulator::<CompleteController>::from_scenario(&scenario);
let ok = tick_by(&mut sim, max_rounds);
for (i, x) in sim.paths.iter().enumerate() {
println!("path {} - {:?}", i + 1, x);
}
if ok {
println!("{} rounds", sim.get_time());
println!("{} delay rounds", sim.get_time() - independent_time(&scenario, max_rounds));
}
}
x => crash!("unrecognized controller type: '{}'", x),
}
}
"full" => {
if args.len() != 4 { crash!("usage: {} full n k\n n - size of grid (n x n)\n k - number of aircraft", args[0]); }
let n = parse_int(&args[2]);
let k = parse_int(&args[3]);
let max_rounds = 1000;
let mut count = 0u64;
let mut non_trivial_count = 0u64;
let mut delay_rounds = 0u64;
let mut max_delay_rounds = 0u64;
for_scenarios(n, k, |i, scenario| {
let mut sim = Simulator::<CompleteController>::from_scenario(&scenario);
match sim.tick(max_rounds) {
SimulationResult::Completed => {
let d = (sim.get_time() - independent_time(&scenario, max_rounds)) as u64;
let trivial = Simulator::<SimpleController>::from_scenario(&scenario).tick(max_rounds) == SimulationResult::Completed;
if trivial { assert_eq!(d, 0); } else { non_trivial_count += 1; }
count += 1;
delay_rounds += d;
if d > max_delay_rounds {
max_delay_rounds = d;
println!("new max delay: {} ({:?})", d, scenario);
}
max_delay_rounds = max_delay_rounds.max(d);
}
SimulationResult::Collision => panic!("sim {} ({:?}) had a collision", i, scenario),
SimulationResult::Running => panic!("sim {} ({:?}) incomplete", i, scenario),
}
});
println!("\npassed all {} scenarios ({} non-trivial)", count, non_trivial_count);
println!("max delay: {}", max_delay_rounds);
println!("avg delay: {}", delay_rounds as f64 / count as f64);
println!("avg non-trivial delay: {}", delay_rounds as f64 / non_trivial_count as f64);
}
x => {
eprintln!("unrecognized mode '{}'", x);
print_help();
crash!();
}
}
}
| true |
3d7e5152a3aec846ee20220929333a459585f2f0
|
Rust
|
Axighi/leetcode-solutions
|
/src/solutions/contains_duplicate.rs
|
UTF-8
| 606 | 3.46875 | 3 |
[
"MIT"
] |
permissive
|
pub struct Solution {}
use std::collections::HashMap;
impl Solution {
#[allow(dead_code)]
pub fn contains_duplicate(nums: Vec<i32>) -> bool {
let mut hash: HashMap<i32, i32> = HashMap::new();
for n in nums {
match hash.get(&n) {
Some(_) => return true,
_ => hash.insert(n, 1),
};
}
false
}
}
#[test]
fn hooyah() {
assert_eq!(Solution::contains_duplicate(vec![1, 2, 3, 1]), true);
assert_eq!(Solution::contains_duplicate(vec![1, 2, 3, 4]), false);
assert_eq!(
Solution::contains_duplicate(vec![1, 1, 1, 3, 3, 4, 3, 2, 4, 2]),
true
);
}
| true |
34b8aa50de71c2cdc1d93bb66dab0b90bd850898
|
Rust
|
munckymagik/rust_kb
|
/unsafe_and_ffi/tests/c_string_test.rs
|
UTF-8
| 1,284 | 3.484375 | 3 |
[] |
no_license
|
use std::ffi::CStr;
#[test]
fn when_there_is_a_trailing_nul() {
let bytes = b"hello\0";
assert_eq!(&[104, 101, 108, 108, 111, 0], bytes);
let cstr = CStr::from_bytes_with_nul(bytes).unwrap();
assert_eq!(format!("{:?}", cstr), "\"hello\"");
}
#[test]
fn when_there_is_no_trailing_nul() {
let bytes = b"hello";
assert_eq!(&[104, 101, 108, 108, 111], bytes);
// NotNulTerminated error
assert!(CStr::from_bytes_with_nul(bytes).is_err());
}
#[test]
fn when_there_are_2_trailing_nuls() {
let bytes = b"hello\0\0";
assert_eq!(&[104, 101, 108, 108, 111, 0, 0], bytes);
// Will be an InteriorNul error
assert!(CStr::from_bytes_with_nul(bytes).is_err());
let cstr = unsafe { CStr::from_bytes_with_nul_unchecked(bytes) };
// Extra nul is retained!!
assert_eq!(format!("{:?}", cstr), "\"hello\\x00\"");
// First we need to find the first nul
let nul_pos = bytes.iter().position(|elem| *elem == b'\0').unwrap();
assert_eq!(nul_pos + 2, bytes.len());
// Then adjust our slice
let bytes_trimmed = &bytes[..(nul_pos + 1)];
// Now we can create a CStr safely
let cstr = CStr::from_bytes_with_nul(bytes_trimmed).unwrap();
// And sanity is restored
assert_eq!(format!("{:?}", cstr), "\"hello\"");
}
| true |
b32aa670d88f8c335731de6392dc7dfdc70762a1
|
Rust
|
seija-engine/seija-old
|
/src/s2d/layout/view.rs
|
UTF-8
| 7,783 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
use super::{
types::{LayoutAlignment, Thickness},
GridCell, IView, LayoutElement,
};
use crate::common::{Rect2D, Transform, TreeNode};
use nalgebra::{Vector2, Vector3};
use specs::{Component, DenseVecStorage, Entity, ReadStorage, WriteStorage};
use std::cell::Cell;
#[derive(Clone,Copy)]
pub enum ViewType {
Static,
Absolute
}
impl Default for ViewType {
fn default() -> Self {
ViewType::Static
}
}
impl From<u32> for ViewType {
fn from(typ: u32) -> ViewType {
match typ {
0 => ViewType::Static,
1 => ViewType::Absolute,
_ => ViewType::Absolute
}
}
}
impl ViewType {
pub fn is_absolute(&self) -> bool {
match self {
ViewType::Static => false,
ViewType::Absolute => true
}
}
pub fn is_static(&self) -> bool {
match self {
ViewType::Static => true,
ViewType::Absolute => false
}
}
}
#[derive(Default)]
pub struct View {
pub pos: Cell<Vector2<f32>>,
pub size: Cell<Vector2<f64>>,
pub margin: Thickness,
pub padding: Thickness,
pub hor: LayoutAlignment,
pub ver: LayoutAlignment,
pub view_type:ViewType,
pub use_rect_size:bool
}
unsafe impl Sync for View {}
impl Component for View {
type Storage = DenseVecStorage<View>;
}
impl View {
pub fn calc_content_size(&self, size: Vector2<f64>,rect:&Rect2D) -> Vector2<f64> {
let mut ret_size: Vector2<f64> = self.get_size(rect);
if ret_size.x <= 0f64 && self.hor == LayoutAlignment::Fill {
ret_size.x = size.x - self.margin.horizontal();
}
if ret_size.y <= 0f64 && self.ver == LayoutAlignment::Fill {
ret_size.y = size.y - self.margin.vertical();
}
ret_size
}
pub fn calc_orign(&self, entity: Entity, rects: &WriteStorage<Rect2D>) -> Vector3<f32> {
let rect = rects.get(entity).unwrap();
Vector3::new(rect.left(), rect.top(), 0f32)
}
pub fn get_size(&self,rect:&Rect2D) -> Vector2<f64> {
if self.use_rect_size {
Vector2::new(rect.width() as f64,rect.height() as f64)
} else {
self.size.get()
}
}
}
impl IView for View {
fn measure(
&self,
entity: Entity,
size: Vector2<f64>,
rects: &mut WriteStorage<Rect2D>,
_tree_nodes: &ReadStorage<TreeNode>,
_elems: &WriteStorage<LayoutElement>,
_cells: &ReadStorage<GridCell>,
) -> Vector2<f64> {
let content_size: Vector2<f64> = self.calc_content_size(size,rects.get(entity).unwrap());
rects.get_mut(entity).map(|rect| {
rect.set_width(content_size.x as f32);
rect.set_height(content_size.y as f32);
});
content_size
}
fn arrange(
&self,
entity: Entity,
_: Vector2<f64>,
rect2ds: &mut WriteStorage<Rect2D>,
_: &ReadStorage<TreeNode>,
_: &WriteStorage<LayoutElement>,
trans: &mut WriteStorage<Transform>,
origin: Vector3<f32>,
_cells: &ReadStorage<GridCell>,
) {
let (x, y) = {
let pos: Vector2<f32> = self.pos.get();
(pos.x, pos.y)
};
let rect = rect2ds.get(entity).unwrap();
let [ax, ay] = rect.anchor();
let offset_w = rect.width() * ax;
let offset_h = rect.height() * ay;
let new_x = origin.x + offset_w + x + self.margin.left as f32;
let new_y = origin.y - offset_h + y - self.margin.top as f32;
trans.get_mut(entity).unwrap().set_position_xy(new_x,new_y);
}
}
#[derive(Default)]
pub struct ContentView {
pub view: View,
}
unsafe impl Sync for ContentView {}
impl Component for ContentView {
type Storage = DenseVecStorage<ContentView>;
}
impl IView for ContentView {
fn measure(
&self,
entity: Entity,
size: Vector2<f64>,
rects: &mut WriteStorage<Rect2D>,
tree_nodes: &ReadStorage<TreeNode>,
elems: &WriteStorage<LayoutElement>,
cells: &ReadStorage<GridCell>,
) -> Vector2<f64> {
let mut content_size:Vector2<f64> = self.view.measure(entity, size, rects, tree_nodes, elems, cells);
let inner_size:Vector2<f64> = Vector2::new(content_size.x - self.view.padding.horizontal(),
content_size.y - self.view.padding.vertical());
let m_child = tree_nodes.get(entity).map(|v| &v.children);
if let Some(child) = m_child {
for centity in child {
if let Some(elem) = elems.get(*centity) {
let child_size:Vector2<f64> = elem.measure(*centity, inner_size, rects, tree_nodes, elems,cells);
let is_static:bool = elem.fview(|v| v.view_type.is_static());
if child_size.x > content_size.x && is_static {
content_size.x = child_size.x
}
if child_size.y > content_size.y && is_static {
content_size.y = child_size.y
}
}
}
for centity in child {
if let Some(elem) = elems.get(*centity) {
elem.measure(*centity, content_size, rects, tree_nodes, elems,cells);
}
}
}
rects.get_mut(entity).map(|rect| {
rect.set_width(content_size.x as f32);
rect.set_height(content_size.y as f32);
});
content_size
}
fn arrange(
&self,
entity: Entity,
size: Vector2<f64>,
rects: &mut WriteStorage<Rect2D>,
tree_nodes: &ReadStorage<TreeNode>,
elems: &WriteStorage<LayoutElement>,
trans: &mut WriteStorage<Transform>,
origin: Vector3<f32>,
cells: &ReadStorage<GridCell>,
) {
self.view.arrange(entity, size, rects, tree_nodes, elems, trans, origin, cells);
let child_origin:Vector3<f32> = self.view.calc_orign(entity, rects);
let (width,height) = {
let rect = rects.get(entity).unwrap();
(rect.width(),rect.height())
};
let m_child = tree_nodes.get(entity).map(|v| &v.children);
if let Some(child) = m_child {
for centity in child {
if let Some(elem) = elems.get(*centity) {
let (child_width,child_height) = {
let rect = rects.get(*centity).unwrap();
(rect.width(),rect.height())
};
let (hor,ver,mar) = elem.fview(|v| (v.hor,v.ver,v.margin.clone()));
let mut new_pos:Vector2<f32> = Vector2::default();
match hor {
LayoutAlignment::Center => new_pos.x = width * 0.5f32 - (child_width * 0.5f32),
LayoutAlignment::End => new_pos.x = width - child_width - self.view.padding.right as f32 - mar.right as f32,
_ => new_pos.x = 0f32 + self.view.padding.left as f32
}
match ver {
LayoutAlignment::Center => new_pos.y = -height * 0.5f32 + child_height * 0.5f32,
LayoutAlignment::End => new_pos.y = -height + child_height + self.view.padding.bottom as f32 + mar.bottom as f32,
_ => new_pos.y = 0f32 - self.view.padding.top as f32
}
elem.fview(|v| {v.pos.set(new_pos);});
elem.arrange(*centity, size, rects, tree_nodes, elems, trans, child_origin,cells);
}
}
}
}
}
| true |
01bc4f9cd5a6ad677d89717157a3c55ba04521bd
|
Rust
|
TonyMooori/studying
|
/2017_12/rust012/main.rs
|
UTF-8
| 1,116 | 3.375 | 3 |
[] |
no_license
|
use std::fmt;
use std::ops::Add;
//implement: 実装する
struct Node;
struct Edge;
struct MyGraph;
trait Graph{
type N;
type E;
fn has_edge(&self,&Self::N,&Self::N) -> bool;
fn edges(&self,&Self::N) -> Vec<Self::E>;
}
impl Graph for MyGraph{
type N = Node;
type E = Edge;
fn has_edge(&self,ns:&Node,ne:&Node) -> bool{
true
}
fn edges(&self,n:&Node) -> Vec<Edge>{
Vec::new()
}
}
struct Point{
x:i32,
y:i32,
}
impl Add for Point{
type Output = Point;
fn add(self,other:Point) -> Point{
Point{x:self.x + other.x,y:self.y + other.y}
}
}
impl Add<i32> for Point{
type Output = f64;
fn add(self,rhs : i32) -> f64{
0.5
}
}
fn main(){
let graph = MyGraph;
let obj = Box::new(graph) as Box<Graph<E=Edge,N=Node>>;
let p1 = Point{x:1,y:0};
let p2 = Point{x:0,y:1};
let p3 = p1+p2;
println!("p3=({},{})",p3.x,p3.y);
//println!("p1=({},{})",p1.x,p1.y);
let p4 = Point{x:1,y:3};
let val = p4 + 1i32;
println!("val={}",val);
}
| true |
4eb6cbc2bf76e53cfa31413329ecd3f9a1b7a298
|
Rust
|
dfrankland/mk20d7
|
/src/cmt/oc/mod.rs
|
UTF-8
| 9,013 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u8,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u8,
}
impl super::OC {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `IROPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum IROPENR {
#[doc = "CMT_IRO signal disabled"]
_0,
#[doc = "CMT_IRO signal enabled as output"]
_1,
}
impl IROPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
IROPENR::_0 => false,
IROPENR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> IROPENR {
match value {
false => IROPENR::_0,
true => IROPENR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == IROPENR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == IROPENR::_1
}
}
#[doc = "Possible values of the field `CMTPOL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CMTPOLR {
#[doc = "CMT_IRO signal is active low"]
_0,
#[doc = "CMT_IRO signal is active high"]
_1,
}
impl CMTPOLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
CMTPOLR::_0 => false,
CMTPOLR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> CMTPOLR {
match value {
false => CMTPOLR::_0,
true => CMTPOLR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == CMTPOLR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == CMTPOLR::_1
}
}
#[doc = r" Value of the field"]
pub struct IROLR {
bits: bool,
}
impl IROLR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = "Values that can be written to the field `IROPEN`"]
pub enum IROPENW {
#[doc = "CMT_IRO signal disabled"]
_0,
#[doc = "CMT_IRO signal enabled as output"]
_1,
}
impl IROPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
IROPENW::_0 => false,
IROPENW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _IROPENW<'a> {
w: &'a mut W,
}
impl<'a> _IROPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: IROPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "CMT_IRO signal disabled"]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(IROPENW::_0)
}
#[doc = "CMT_IRO signal enabled as output"]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(IROPENW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `CMTPOL`"]
pub enum CMTPOLW {
#[doc = "CMT_IRO signal is active low"]
_0,
#[doc = "CMT_IRO signal is active high"]
_1,
}
impl CMTPOLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
CMTPOLW::_0 => false,
CMTPOLW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _CMTPOLW<'a> {
w: &'a mut W,
}
impl<'a> _CMTPOLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CMTPOLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "CMT_IRO signal is active low"]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(CMTPOLW::_0)
}
#[doc = "CMT_IRO signal is active high"]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(CMTPOLW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _IROLW<'a> {
w: &'a mut W,
}
impl<'a> _IROLW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
#[doc = "Bit 5 - IRO Pin Enable"]
#[inline]
pub fn iropen(&self) -> IROPENR {
IROPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u8) != 0
})
}
#[doc = "Bit 6 - CMT Output Polarity"]
#[inline]
pub fn cmtpol(&self) -> CMTPOLR {
CMTPOLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u8) != 0
})
}
#[doc = "Bit 7 - IRO Latch Control"]
#[inline]
pub fn irol(&self) -> IROLR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u8) != 0
};
IROLR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 5 - IRO Pin Enable"]
#[inline]
pub fn iropen(&mut self) -> _IROPENW {
_IROPENW { w: self }
}
#[doc = "Bit 6 - CMT Output Polarity"]
#[inline]
pub fn cmtpol(&mut self) -> _CMTPOLW {
_CMTPOLW { w: self }
}
#[doc = "Bit 7 - IRO Latch Control"]
#[inline]
pub fn irol(&mut self) -> _IROLW {
_IROLW { w: self }
}
}
| true |
c720ca581e85c23482824c7f8aa7d0c5e07e54a5
|
Rust
|
myuon/atcoder
|
/abc080/src/bin/d.rs
|
UTF-8
| 2,352 | 2.9375 | 3 |
[] |
no_license
|
macro_rules! input {
(source = $s:expr, $($r:tt)*) => {
let mut iter = $s.split_whitespace();
input_inner!{iter, $($r)*}
};
($($r:tt)*) => {
let s = {
use std::io::Read;
let mut s = String::new();
std::io::stdin().read_to_string(&mut s).unwrap();
s
};
let mut iter = s.split_whitespace();
input_inner!{iter, $($r)*}
};
}
macro_rules! input_inner {
($iter:expr) => {};
($iter:expr, ) => {};
($iter:expr, $var:ident : $t:tt $($r:tt)*) => {
let $var = read_value!($iter, $t);
input_inner!{$iter $($r)*}
};
}
macro_rules! read_value {
($iter:expr, ( $($t:tt),* )) => {
( $(read_value!($iter, $t)),* )
};
($iter:expr, [ $t:tt ; $len:expr ]) => {
(0..$len).map(|_| read_value!($iter, $t)).collect::<Vec<_>>()
};
($iter:expr, chars) => {
read_value!($iter, String).chars().collect::<Vec<char>>()
};
($iter:expr, usize1) => {
read_value!($iter, usize) - 1
};
($iter:expr, $t:ty) => {
$iter.next().unwrap().parse::<$t>().expect("Parse error")
};
}
fn solve(n: usize, _c: i32, stc: Vec<[i32; 3]>) -> i32 {
let mut r = 0;
let mut b = Vec::new();
for _ in 0..n {
b.push(false);
}
loop {
let mut remains = Vec::new();
for i in 0..n {
if !b[i] {
remains.push(i);
}
}
if remains.len() == 0 {
break;
}
remains.sort_by(|i, j| stc[*i][1].cmp(&stc[*j][1]));
let mut prev_t = 0;
let mut prev_c = 0;
for r in remains {
if stc[r][2] == prev_c {
if prev_t <= stc[r][0] {
b[r] = true;
prev_t = stc[r][1];
prev_c = stc[r][2];
}
} else {
if prev_t < stc[r][0] {
b[r] = true;
prev_t = stc[r][1];
prev_c = stc[r][2];
}
}
}
r += 1;
}
r
}
fn main() {
input! {
n: usize,
c: i32,
stc: [[i32; 3]; n]
}
println!(
"{:?}",
solve(n, c, stc.into_iter().map(|v| [v[0], v[1], v[2]]).collect())
);
}
| true |
aa50325476bd505fa52c2d02bbffb86c2ffcf236
|
Rust
|
toku-sa-n/uefi_wrapper
|
/src/protocols/console/edid.rs
|
UTF-8
| 1,596 | 2.84375 | 3 |
[] |
no_license
|
use core::{convert::TryInto, slice};
use r_efi::efi;
#[repr(C)]
#[derive(Copy, Clone, Debug)]
pub struct Discovered {
size: u32,
ptr: *const u8,
}
impl Discovered {
#[must_use]
pub fn preferred_resolution(&self) -> Option<(u32, u32)> {
Some((self.preferred_width()?, self.preferred_height()?))
}
fn preferred_width(&self) -> Option<u32> {
let info = self.get_info()?;
let upper = (u32::from(info[58]) & 0xf0) << 4;
let lower: u32 = info[56].into();
Some(upper | lower)
}
fn preferred_height(&self) -> Option<u32> {
let info = self.get_info()?;
let upper = (u32::from(info[61]) & 0xf0) << 4;
let lower: u32 = info[59].into();
Some(upper | lower)
}
fn get_info(&self) -> Option<&[u8]> {
if self.info_exists() {
// SAFETY: The EDID Discovered information exists.
Some(unsafe { self.get_info_unchecked() })
} else {
None
}
}
unsafe fn get_info_unchecked(&self) -> &[u8] {
let sz: usize = self.size.try_into().unwrap();
// SAFETY: `self.ptr` is valid for `sz` bytes as it is not null. These memory are not
// modified.
unsafe { slice::from_raw_parts(self.ptr, sz) }
}
fn info_exists(&self) -> bool {
!self.ptr.is_null()
}
}
unsafe impl crate::Protocol for Discovered {
const GUID: efi::Guid = efi::Guid::from_fields(
0x1c0c_34f6,
0xd380,
0x41fa,
0xa0,
0x49,
&[0x8a, 0xd0, 0x6c, 0x1a, 0x66, 0xaa],
);
}
| true |
7bf6386bc5c0664fcf96c49291073383f31f66c5
|
Rust
|
dyoshikawa/sqlx
|
/sqlx-core/src/logging.rs
|
UTF-8
| 1,593 | 2.625 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::time::Duration;
pub(crate) const SLOW_QUERY_THRESHOLD: Duration = Duration::from_secs(1);
/// Logs the query and execution time of a statement as it runs.
macro_rules! log_execution {
( $query:expr, $block:expr ) => {{
// TODO: Log bound parameters
let query_string = $query.query_string();
let timer = std::time::Instant::now();
let result = $block;
let elapsed = timer.elapsed();
if elapsed >= crate::logging::SLOW_QUERY_THRESHOLD {
log::warn!(
target: "sqlx::query",
"{} ..., elapsed: {:.3?}\n\n{}\n",
crate::logging::parse_query_summary(query_string),
elapsed,
sqlformat::format(
query_string,
&sqlformat::QueryParams::None,
sqlformat::FormatOptions::default()
)
);
} else {
log::debug!(
target: "sqlx::query",
"{} ..., elapsed: {:.3?}\n\n{}\n",
crate::logging::parse_query_summary(query_string),
elapsed,
sqlformat::format(
query_string,
&sqlformat::QueryParams::None,
sqlformat::FormatOptions::default()
)
);
}
result
}};
}
pub(crate) fn parse_query_summary(query: &str) -> String {
// For now, just take the first 3 words
query
.split_whitespace()
.take(3)
.collect::<Vec<&str>>()
.join(" ")
}
| true |
e3bf6ecdb68ce91bdc6abc2fe1906047cd877190
|
Rust
|
k-jun/atcoder-abc
|
/src/abc62_a.rs
|
UTF-8
| 1,898 | 3.578125 | 4 |
[] |
no_license
|
use std::io::*;
use std::str::FromStr;
pub fn read<T: FromStr>() -> T {
let stdin = stdin();
let stdin = stdin.lock();
let token: String = stdin
.bytes()
.map(|c| c.expect("failed to read char") as char)
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect();
token.parse().ok().expect("failed to parse token")
}
struct UnionFind {
parent: Vec<usize>,
size: Vec<usize>,
}
impl UnionFind {
fn new(n: usize) -> UnionFind {
// initialize
let mut v: Vec<usize> = vec![];
for i in 0..n + 1 {
v.push(i)
}
let mut s: Vec<usize> = vec![1; n + 1];
UnionFind { parent: v, size: s }
}
fn root(&self, x: usize) -> usize {
if self.parent[x] == x {
return x;
} else {
return self.root(self.parent[x]);
}
}
fn size(&self, a: usize) -> usize {
let ra = self.root(a);
self.size[ra]
}
fn unite(&mut self, a: usize, b: usize) {
let mut ra = self.root(a);
let mut rb = self.root(b);
if ra == rb {
return;
}
if self.size[ra] < self.size[rb] {
std::mem::swap(&mut ra, &mut rb)
};
self.size[ra] += self.size[rb];
self.parent[rb] = ra;
}
fn same(&self, a: usize, b: usize) -> bool {
let ra = self.root(a);
let rb = self.root(b);
self.parent[a] == self.parent[b]
}
}
fn main() {
let mut uf = UnionFind::new(12);
uf.unite(1, 3);
uf.unite(1, 5);
uf.unite(1, 7);
uf.unite(1, 8);
uf.unite(1, 10);
uf.unite(1, 12);
uf.unite(4, 6);
uf.unite(4, 9);
uf.unite(4, 11);
let x: usize = read();
let y: usize = read();
if uf.same(x, y) {
println!("Yes");
} else {
println!("No");
}
}
| true |
94405b8c34cf512573357c3556b57daf769355f5
|
Rust
|
AngelOnFira/COMP-4106
|
/Assignment2/over_ai/src/main.rs
|
UTF-8
| 11,727 | 3.1875 | 3 |
[] |
no_license
|
use std::process;
extern crate rand;
use rand::{thread_rng, Rng};
pub static mut NODES_VISITED: i64 = 0;
fn main() {
//1 is my units
//2 is enemy space
//0 is empty space
for games in 0..5 {
let mut board = create_board();
let mut i = 0;
while true {
i += 1;
board = ai_turn_enemy_children(board);
//println!("Random Move");
//print_board(board);
if check_win(board) != 0 { println!("The AI wins"); break; }
board = computer_turn(board);
//println!("Heuristic Move");
//print_board(board);
if check_win(board) != 0 { println!("The Random wins"); break; }
if i > 100 {
break;
}
}
unsafe {
println!("Nodes visited {}", NODES_VISITED);
}
let mut i = 0;
while false {
i += 1;
board = ai_turn_my_children(board);
println!("First AI Move");
print_board(board);
if check_win(board) != 0 { println!("The first AI wins"); break; }
board = ai_turn_enemy_children(board);
println!("Second AI Move");
unsafe {
println!("Nodes visited {}", NODES_VISITED);
}
print_board(board);
if check_win(board) != 0 { println!("The second AI wins"); break; }
if i > 100 {
break;
}
}
}
unsafe {
println!("Nodes visited {}", NODES_VISITED);
}
}
fn check_win(board: [[i8;6]; 6]) -> i8 {
let mut player_1 = 0;
let mut player_2 = 0;
for row in 0..6 {
for col in 0..6 {
if board[row][col] == 1 {
player_1 += 1;
}
if board[row][col] == 2 {
player_2 += 1;
}
if player_1 != 0 && player_2 != 0 {
return 0;
}
}
}
if player_1 == 0 {
return 2;
}
if player_2 == 0 {
return 1;
}
else {
return 0;
}
}
fn print_moves(moves: Vec<[[i8; 6]; 6]>) {
for i in 0..moves.len() {
println!("comp move");
print_board(moves[i]);
}
}
fn ai_turn_my_children(board: [[i8;6]; 6]) -> [[i8;6]; 6] {
let mut moves = find_possible_moves(board, 1, 2);
let mut curr_move = 0;
let mut curr_score = 0.0;
for i in 0..moves.len() {
let score = alphabeta(moves[i], 2, -10000.0, 10000.0, true, 1, 2, 1);
if score > curr_score {
curr_score = score;
curr_move = i;
}
}
return moves[curr_move];
}
fn ai_turn_enemy_children(board: [[i8;6]; 6]) -> [[i8;6]; 6] {
let mut moves = find_possible_moves(board, 1, 2);
let mut curr_move = 0;
let mut curr_score = 0.0;
for i in 0..moves.len() {
let score = alphabeta(moves[i], 2, -10000.0, 10000.0, true, 2, 1, 2);
if score > curr_score {
curr_score = score;
curr_move = i;
}
}
return moves[curr_move];
}
fn computer_turn(board: [[i8;6]; 6]) -> [[i8;6]; 6] {
let mut moves = find_possible_moves(board, 2, 1);
let mut rng = thread_rng();
//for i in 0..moves.len() {
//println!("comp move");
//print_board(moves[i]);
//}
let rand_num: i8 = rng.gen_range(0, moves.len() as i8);
return moves[rand_num as usize];
}
fn create_board() -> [[i8; 6]; 6] {
let mut rng = thread_rng();
let mut places = Vec::new();
let mut board: [[i8;6]; 6] = [
[2,2,2,2,2,2],
[2,2,2,2,2,2],
[2,2,2,2,2,2],
[2,2,2,2,2,2],
[2,2,2,2,2,2],
[2,2,2,2,2,2]];
for _piece in 0..18 {
let mut new_place = false;
while !new_place {
new_place = true;
let rand_num: i8 = rng.gen_range(0, 36);
for i in 0..places.len() {
if places[i] == rand_num {
new_place = false;
break;
}
}
if new_place {
places.push(rand_num);
}
}
}
while !places.is_empty() {
let place = places.pop().unwrap();
board[(place / 6) as usize][(place % 6) as usize] = 1;
}
print_board(board);
return board;
}
fn alphabeta(board: [[i8;6]; 6], depth: i16, alpha_in: f32, beta_in: f32, max_player: bool, player_piece: i8, other_piece: i8, heuristic: i8) -> f32 {
let mut alpha = alpha_in;
let mut beta = beta_in;
unsafe {
if heuristic == 2 {
NODES_VISITED += 1;
}
}
let mut children = find_possible_moves(board, player_piece, other_piece);
if depth == 0 {
if heuristic == 1 {
return children.len() as f32;
}
if heuristic == 2 {
return 1.0 / find_possible_moves(board, other_piece, player_piece).len() as f32;
}
}
if children.len() == 0 {
if heuristic == 1 {
return children.len() as f32;
}
if heuristic == 2 {
return 1.0 / find_possible_moves(board, other_piece, player_piece).len() as f32;
}
}
if max_player {
let mut v = -10000 as f32;
while !children.is_empty() {
let child = children.pop().unwrap();
v = max(v, alphabeta(child, depth - 1, alpha, beta, false, player_piece, other_piece, heuristic));
alpha = max(alpha, v);
if beta <= alpha {
break;
}
}
return v;
}
else {
let mut v = 10000 as f32;
while !children.is_empty() {
let child = children.pop().unwrap();
v = min(v, alphabeta(child, depth - 1, alpha, beta, true, player_piece, other_piece, heuristic));
beta = min(beta, v);
if beta <= alpha {
break;
}
}
return v;
}
}
fn find_possible_moves(board: [[i8;6]; 6], player_piece: i8, other_piece: i8) -> Vec<[[i8; 6]; 6]> {
let mut children = Vec::new();
for row in 0..board.len() {
for col in 0..board[0].len() {
if board[row][col] == player_piece {
//Simulates a piece being pushed all the way to one side
let mut new_children = push_piece(row, col, board, player_piece, other_piece);
while !new_children.is_empty() {
children.push(new_children.pop().unwrap());
}
}
}
}
//println!("len {}", children.len());
return children;
}
fn push_piece(row: usize, col: usize, board: [[i8;6]; 6], player_piece: i8, other_piece: i8) -> Vec<[[i8; 6]; 6]> {
let mut children = Vec::new();
let mut child_board = board;
if row > 0 {
if board[row - 1][col] == 0 {
let mut new_board = board;
new_board[row - 1][col] = player_piece;
new_board[row][col] = 0;
children.push(new_board);
}
}
if row < 5 {
if board[row + 1][col] == 0 {
let mut new_board = board;
new_board[row + 1][col] = player_piece;
new_board[row][col] = 0;
children.push(new_board);
}
}
if col > 0 {
if board[row][col - 1] == 0 {
let mut new_board = board;
new_board[row][col - 1] = player_piece;
new_board[row][col] = 0;
children.push(new_board);
}
}
if col < 5 {
if board[row][col + 1] == 0 {
let mut new_board = board;
new_board[row][col + 1] = player_piece;
new_board[row][col] = 0;
children.push(new_board);
}
}
//Test pushing in all four directions
'outer1: for x in (0..col + 1).rev() {
let mut last = 0;
//Simulates one push
'inner1: for remove in (0..x + 1).rev() {
let curr = child_board[row][remove];
child_board[row][remove] = last;
last = curr;
//Check if we have just pushed into an empty position
if last == 0 {
break 'inner1;
}
}
//Break to outer if we have pushed one of our own off
//the board, don't add it to children to look at
if last == player_piece {
break 'outer1;
}
//If we pushed an enemy off the board, add as a
//child
else if last == other_piece {
children.push(child_board);
}
}
child_board = board;
'outer2: for x in col..6 {
let mut last = 0;
//Simulates one push
'inner2: for remove in x..6 {
let curr = child_board[row][remove];
child_board[row][remove] = last;
last = curr;
//Check if we have just pushed into an empty position
if last == 0 {
break 'inner2;
}
}
//Break to outer if we have pushed one of our own off
//the board, don't add it to children to look at
if last == player_piece {
break 'outer2;
}
//If we pushed an enemy off the board, add as a
//child
else if last == other_piece {
children.push(child_board);
}
}
child_board = board;
'outer3: for y in (0..row + 1).rev() {
let mut last = 0;
//Simulates one push
'inner3: for remove in (0..y + 1).rev() {
let curr = child_board[remove][col];
child_board[remove][col] = last;
last = curr;
//Check if we have just pushed into an empty position
if last == 0 {
break 'inner3;
}
}
//Break to outer if we have pushed one of our own off
//the board, don't add it to children to look at
if last == player_piece {
break 'outer3;
}
//If we pushed an enemy off the board, add as a
//child
else if last == other_piece {
children.push(child_board);
}
}
child_board = board;
'outer4: for y in row..6 {
let mut last = 0;
//Simulates one push
'inner4: for remove in y..6 {
let curr = child_board[remove][col];
child_board[remove][col] = last;
last = curr;
//Check if we have just pushed into an empty position
if last == 0 {
break 'inner4;
}
}
//Break to outer if we have pushed one of our own off
//the board, don't add it to children to look at
if last == player_piece {
break 'outer4;
}
//If we pushed an enemy off the board, add as a
//child
else if last == other_piece {
children.push(child_board);
}
}
return children;
}
fn get_heuristic(_board: [[i8;6]; 6]) -> f32 {
//TODO
return 1 as f32;
}
fn print_board(board: [[i8;6]; 6]) {
println!("_____________");
for row in 0..6 {
for col in 0..6 {
if board[row][col] == 0 {
print!("| ");
}
else if board[row][col] == 1 {
print!("|O");
}
else if board[row][col] == 2 {
print!("|#");
}
}
println!("|");
}
println!("_____________")
}
fn max(a: f32, b: f32) -> f32 {
if a > b {
return a;
}
else {
return b;
}
}
fn min(a: f32, b: f32) -> f32 {
if a < b {
return a;
}
else {
return b;
}
}
//fn findChildren(board: )
| true |
25a7ddaadd5b7ae9a01227aea58a8ae53e1a51d9
|
Rust
|
oreganoli/disk9k1
|
/src/content/mod.rs
|
UTF-8
| 1,186 | 2.53125 | 3 |
[] |
no_license
|
use regex::Regex;
use crate::prelude::*;
pub mod data;
pub mod dirs;
pub mod file;
pub struct DirectoryRepo {
dir_regex: Regex,
}
pub struct DataRepo {}
impl DataRepo {
pub fn init(&self, conn: &mut Conn) -> AppResult<()> {
conn.execute(include_str!("sql/data/init.sql"), &[])?;
Ok(())
}
pub fn new(conn: &mut Conn) -> AppResult<Self> {
let rep = Self {};
rep.init(conn)?;
Ok(rep)
}
}
impl DirectoryRepo {
pub fn new(conn: &mut Conn) -> AppResult<Self> {
let rep = Self {
dir_regex: Regex::new(r#"(?m)(^\.?[^.\r\n\t\\/:"|?*<>]+[^\r\n\t\\/:"|?*<>]*$)"#)
.unwrap(),
};
rep.init(conn)?;
Ok(rep)
}
pub fn init(&self, conn: &mut Conn) -> AppResult<()> {
conn.execute(include_str!("sql/dirs/init.sql"), &[])?;
Ok(())
}
}
pub struct FileRepo {}
impl FileRepo {
pub fn new(conn: &mut Conn) -> AppResult<Self> {
let rep = Self {};
rep.init(conn)?;
Ok(rep)
}
pub fn init(&self, conn: &mut Conn) -> AppResult<()> {
conn.execute(include_str!("sql/file/init.sql"), &[])?;
Ok(())
}
}
| true |
c30441bcc4f5ef3a74d7483d86088172259ad3c2
|
Rust
|
VaranTavers/rust_drone_follow
|
/src/models/hat.rs
|
UTF-8
| 979 | 3.421875 | 3 |
[] |
no_license
|
use crate::models::lab_color::LabColor;
/// This struct contains the necessary information for a NaiveDetector about the hat that the
/// person, that should be followed, wears. It requires two Color coordinates from the Lab color space
/// and the average size of the hat.
pub struct Hat {
pub color_low : LabColor,
pub color_high : LabColor,
pub size_avg : f64,
}
impl Hat {
/// Creates a new Hat struct
///
/// Usage:
/// ```
/// use rust_drone_follow::models::hat::Hat;
/// use rust_drone_follow::models::lab_color::LabColor;
/// // ...
/// # fn main() {
/// let hat = Hat::new(
/// LabColor::new(0, 20, -127),
/// LabColor::new(80, 127, -20),
/// 1200.0
/// );
/// # }
///
/// ```
pub fn new(color_low: LabColor, color_high: LabColor, size_avg: f64) -> Hat {
Hat {
color_low,
color_high,
size_avg
}
}
}
| true |
51678abb628daa09ef1c09b2c02a24e8d17351cc
|
Rust
|
PiDelport/git-branchless
|
/src/commands/gc.rs
|
UTF-8
| 4,111 | 2.71875 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! Deal with Git's garbage collection mechanism.
//!
//! Git treats a commit as unreachable if there are no references that point to
//! it or one of its descendants. However, the branchless workflow oftentimes
//! involves keeping such commits reachable until the user has explicitly hidden
//! them.
//!
//! This module is responsible for adding extra references to Git, so that Git's
//! garbage collection doesn't collect commits which branchless thinks are still
//! visible.
use anyhow::Context;
use fn_error_context::context;
use crate::core::eventlog::{is_gc_ref, EventLogDb, EventReplayer};
use crate::core::graph::{make_graph, BranchOids, CommitGraph, HeadOid, MainBranchOid};
use crate::core::mergebase::MergeBaseDb;
use crate::util::{
get_branch_oid_to_names, get_db_conn, get_head_oid, get_main_branch_oid, get_repo,
};
fn find_dangling_references<'repo>(
repo: &'repo git2::Repository,
graph: &CommitGraph,
) -> anyhow::Result<Vec<git2::Reference<'repo>>> {
let references = repo
.references()
.with_context(|| "Getting repo references")?;
let mut result = Vec::new();
for reference in references {
let reference = reference.with_context(|| "Reading reference info")?;
let reference_name = match reference.name() {
Some(name) => name.to_owned(),
None => continue,
};
let resolved_reference = reference
.resolve()
.with_context(|| format!("Resolving reference: {}", reference_name))?;
// The graph only contains commits, so we don't need to handle the
// case of the reference not peeling to a valid commit. (It might be
// a reference to a different kind of object.)
if let Ok(commit) = resolved_reference.peel_to_commit() {
if is_gc_ref(&reference_name) && !graph.contains_key(&commit.id()) {
result.push(reference)
}
}
}
Ok(result)
}
/// Mark a commit as reachable.
///
/// Once marked as reachable, the commit won't be collected by Git's garbage
/// collection mechanism until first garbage-collected by branchless itself
/// (using the `gc` function).
///
/// Args:
/// * `repo`: The Git repository.
/// * `commit_oid`: The commit OID to mark as reachable.
#[context("Marking commit reachable: {:?}", commit_oid)]
pub fn mark_commit_reachable(repo: &git2::Repository, commit_oid: git2::Oid) -> anyhow::Result<()> {
let ref_name = format!("refs/branchless/{}", commit_oid.to_string());
anyhow::ensure!(
git2::Reference::is_valid_name(&ref_name),
format!("Invalid ref name to mark commit as reachable: {}", ref_name)
);
repo.reference(
&ref_name,
commit_oid,
true,
"branchless: marking commit as reachable",
)
.with_context(|| format!("Creating reference {}", ref_name))?;
Ok(())
}
/// Run branchless's garbage collection.
///
/// Frees any references to commits which are no longer visible in the smartlog.
#[context("Running garbage-collection")]
pub fn gc() -> anyhow::Result<()> {
let repo = get_repo()?;
let conn = get_db_conn(&repo)?;
let merge_base_db = MergeBaseDb::new(&conn)?;
let event_log_db = EventLogDb::new(&conn)?;
let event_replayer = EventReplayer::from_event_log_db(&event_log_db)?;
let head_oid = get_head_oid(&repo)?;
let main_branch_oid = get_main_branch_oid(&repo)?;
let branch_oid_to_names = get_branch_oid_to_names(&repo)?;
let graph = make_graph(
&repo,
&merge_base_db,
&event_replayer,
event_replayer.make_default_cursor(),
&HeadOid(head_oid),
&MainBranchOid(main_branch_oid),
&BranchOids(branch_oid_to_names.keys().copied().collect()),
true,
)?;
println!("branchless: collecting garbage");
let dangling_references = find_dangling_references(&repo, &graph)?;
for mut reference in dangling_references.into_iter() {
reference
.delete()
.with_context(|| format!("Deleting reference {:?}", reference.name()))?;
}
Ok(())
}
| true |
eb4b27ddffddeddcebb29366fc5446f35df5994d
|
Rust
|
raygervais/exercism
|
/rust/beer-song/src/lib.rs
|
UTF-8
| 1,155 | 3.859375 | 4 |
[] |
no_license
|
pub fn verse(n: u32) -> String {
let phrase = match n {
1 => one_bottle(),
0 => no_bottle(),
_ => some_bottle(n),
};
return phrase.to_string();
}
pub fn some_bottle(n: u32) -> String {
return format!("{} bottles of beer on the wall, {} bottles of beer.\nTake one down and pass it around, {} {} of beer on the wall.\n", n, n, n-1, plural(n-1));
}
pub fn one_bottle() -> String {
return "1 bottle of beer on the wall, 1 bottle of beer.\nTake it down and pass it around, no more bottles of beer on the wall.\n".to_string();
}
pub fn no_bottle() -> String {
return "No more bottles of beer on the wall, no more bottles of beer.\nGo to the store and buy some more, 99 bottles of beer on the wall.\n".to_string();
}
pub fn plural(n: u32) -> String {
return match n {
1 => "bottle".to_string(),
_ => "bottles".to_string(),
};
}
pub fn sing(start: u32, end: u32) -> String {
let mut song = String::from("");
for n in (end..start + 1).rev() {
song.push_str(verse(n).as_str());
if n != end {
song.push_str("\n");
}
}
return song;
}
| true |
8185193c1c125b3c02f0fbc3c0dda7a6ba7b7d51
|
Rust
|
hashingsystems/ptokens-core
|
/src/btc/add_btc_block_to_db.rs
|
UTF-8
| 1,032 | 2.796875 | 3 |
[] |
no_license
|
use crate::{
types::Result,
errors::AppError,
traits::DatabaseInterface,
btc::{
btc_state::BtcState,
btc_database_utils::{
put_btc_block_in_db,
btc_block_exists_in_db,
},
},
};
pub fn maybe_add_btc_block_to_db<D>(
state: BtcState<D>
) -> Result<BtcState<D>>
where D: DatabaseInterface
{
info!("✔ Checking if BTC block is already in the db...");
match btc_block_exists_in_db(&state.db, &state.get_btc_block_and_id()?.id) {
true => Err(AppError::Custom(
format!("✘ BTC Block Rejected - it's already in the db!")
)),
false => {
let block = state.get_btc_block_in_db_format()?;
info!("✔ BTC block not in db!");
info!("✔ Adding BTC block to db: {:?}", block);
put_btc_block_in_db(&state.db, block)
.and_then(|_| {
info!("✔ BTC block added to database!");
Ok(state)
})
}
}
}
| true |
9c2d966b113efb8a27179f443714d43095a9bcac
|
Rust
|
ericwooley/git_file_hooks
|
/src/parse.rs
|
UTF-8
| 1,842 | 3.25 | 3 |
[] |
no_license
|
use std::collections::HashMap;
#[derive(Deserialize, Debug, PartialEq)]
pub struct Command {
pub patterns: Vec<String>,
pub commands: Vec<String>,
}
#[derive(Deserialize, Debug)]
pub struct Config {
pub hooks: HashMap<String, Vec<Command>>,
}
pub fn deserialize_config(raw: &String) -> Config {
serde_yaml::from_str(&raw).unwrap_or_else(|err| {
eprintln!("Could not parse input:\n {}", &raw.as_str());
eprintln!("Error: {:?}", err);
panic!("Error validating config file")
})
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn valid_deserialize_config() {
let config = String::from(
r#"
hooks:
checkout:
- patterns:
- "test"
commands:
- "echo 'works'""#,
);
println!("Testing Valid config, {}", config);
let parsed_config = deserialize_config(&config);
assert!(parsed_config.hooks.get("checkout").unwrap().len() == 1);
}
#[test]
#[should_panic]
fn empty_deserialize_config() {
let config = String::from(r#""#);
println!("Testing Valid config, {}", config);
deserialize_config(&config);
}
#[test]
#[should_panic]
fn invalid_deserialize_config() {
let config = String::from(
r#"
hooks:
checkout:
patterns:
- "test"
commands:
- "echo 'works'""#,
);
println!("Testing Valid config, {}", config);
deserialize_config(&config);
}
#[test]
fn get_checkout_config() {
let config = deserialize_config(&String::from(
r#"
hooks:
checkout:
- patterns:
- "test"
commands:
- "echo 'works'""#,
));
assert_eq!(
config.hooks.get(&String::from("checkout")).unwrap().len(),
1
);
}
}
| true |
f815316bbfb90cc7728b2b04593dde5b6731dc20
|
Rust
|
Lucky3028/minecraft_decorated_strings
|
/src/util.rs
|
UTF-8
| 3,229 | 3.21875 | 3 |
[
"MIT"
] |
permissive
|
use super::color_code::ColorCode;
use super::format_code::FormatCode;
use crate::message::get_msg;
use crate::message::MsgType::*;
use std::io::{stdin, stdout, Read, Write};
use std::{io, process};
/// UTF-8にコマンドラインの文字コードを変更する
pub fn change_code_page_utf8() {
process::Command::new("cmd.exe")
.arg("/c")
.arg("chcp")
.arg("20127")
.status()
.expect(&*get_msg(FailureCHCP));
}
#[allow(clippy::unused_io_amount)]
/// 何かしらのキーが押されるまで待機する
pub fn pause() {
let mut stdout = stdout();
stdout.write(b"Press any key to continue...").unwrap();
stdout.flush().unwrap();
stdin().read(&mut [0]).unwrap();
}
/// 文字列を入力させ読み取る
pub fn read_texts() -> String {
let mut s = String::new();
io::stdin()
.read_line(&mut s)
.expect(&*get_msg(FailureReadLine));
//改行コードとスペースを削除する
s.trim().to_string()
}
/// カラーコードに応じてtextに色を付ける
pub fn paint_txt(rgb_r: u8, rgb_g: u8, rgb_b: u8, text: String) -> String {
format!("\x1b[38;2;{};{};{}m{}\x1b[m", rgb_r, rgb_g, rgb_b, text)
}
/// FormatCodeの中から該当するidを探す。見つからなければError型を生成する。
/// 見つかった場合は、既に存在する文字コードの後ろに文字コードを追加してそれを返す。
///
/// #Example
/// ```
/// use util.compare_format_id_and_code;
///
/// let s = compare_format_id_and_code("xb".to_string(), "§n".to_string());
/// assert_eq!(s, Ok("§n§l".to_string()));
/// ```
pub fn find_id_from_fmt_code(
target_id: String,
already_existed_code: String,
) -> Result<String, String> {
let format_codes = FormatCode::gen_from_enum();
match format_codes.iter().find(|&x| target_id == x.id) {
Some(fmt) => Ok(format!("{}{}", already_existed_code, fmt.code)),
None => Err(get_msg(ErrFmtCodeNotFound)),
}
}
/// ColorCodeの中から該当するidを探す。見つからなければError型を生成する。
/// 見つかった場合は、既に存在する文字コードの後ろに文字コードを追加してそれを返す。
///
/// #Example
/// ```
/// use util.compare_color_id_and_code;
///
/// let s = compare_color_id_and_code("xb".to_string(), "§n".to_string());
/// assert_eq!(s, Ok("§n§l".to_string()));
/// ```
pub fn find_id_from_clr_code(
target_id: String,
already_existed_code: String,
) -> Result<String, String> {
let color_codes = ColorCode::gen_from_enum();
match color_codes.iter().find(|&x| target_id == x.id) {
Some(clr) => Ok(format!("{}{}", already_existed_code, clr.code)),
None => Err(get_msg(ErrClrCodeNotFound)),
}
}
/// 節記号(§)をJsonのエスケープシーケンス(\u00a7)に置き換える
pub fn replace_section_to_json(target: String) -> String {
target.replace("§", r#"\u00a7"#)
}
/// エラーが発生した際に引数に指定されたメッセージを表示し、Exitする
pub fn exit_program(err_msg: String) {
eprintln!("{}", paint_txt(255, 0, 0, format!("Error: {}", err_msg)));
pause();
process::exit(1);
}
| true |
4d524aa23ae7004e9be911839268ad1fa8278874
|
Rust
|
yagince/gremlin-rs
|
/gremlin-client/src/message.rs
|
UTF-8
| 978 | 2.796875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use serde_derive::{Deserialize, Serialize};
use serde_json::Value;
use uuid::Uuid;
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Message<T> {
pub request_id: Uuid,
op: String,
processor: String,
args: T,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Response {
pub request_id: Uuid,
pub result: ResponseResult,
pub status: ReponseStatus,
}
#[derive(Debug, Deserialize)]
pub struct ResponseResult {
pub data: Value,
}
#[derive(Debug, Deserialize)]
pub struct ReponseStatus {
pub code: i16,
pub message: String,
}
pub fn message_with_args<T>(op: String, processor: String, args: T) -> Message<T> {
message_with_args_and_uuid(op, processor, Uuid::new_v4(), args)
}
pub fn message_with_args_and_uuid<T>(
op: String,
processor: String,
id: Uuid,
args: T,
) -> Message<T> {
Message {
request_id: id,
op,
processor,
args,
}
}
| true |
ddf9a76ecf0b943f28383cfcb6623a13adc78147
|
Rust
|
cataniafran/pong-macroquad
|
/src/main.rs
|
UTF-8
| 8,589 | 2.734375 | 3 |
[] |
no_license
|
use macroquad::prelude::*;
const SCR_H: f32 = 360f32;
const SCR_W: f32 = 640f32;
const PADDLE_WIDTH: f32 = 10f32;
const PADDLE_HEIGHT: f32 = 60f32;
const PADDLE_SPEED: f32 = 5f32;
const BALL_SIZE: f32 = 10f32;
const BALL_SPEED: f32 = 5f32;
#[derive(PartialEq)]
enum GameState {
Start,
Playing,
End,
Quit,
}
struct Context {
state: GameState,
player: Paddle,
enemy: Paddle,
ball: Ball,
score: (u32, u32),
}
impl Context {
pub fn new() -> Self {
Self {
state: GameState::Start,
player: Paddle::new(
Vec2::new(10.0, SCR_H / 2.0 - PADDLE_HEIGHT),
Vec2::new(PADDLE_WIDTH, PADDLE_HEIGHT),
),
enemy: Paddle::new(
Vec2::new(SCR_W - 10.0 - PADDLE_WIDTH, SCR_H / 2.0 - PADDLE_HEIGHT / 2.0),
Vec2::new(PADDLE_WIDTH, PADDLE_HEIGHT),
),
ball: Ball::new(
Vec2::new(SCR_W / 2.0 - BALL_SIZE / 2.0, SCR_H / 2.0 - BALL_SIZE / 2.0),
Vec2::new(BALL_SIZE, BALL_SIZE),
),
score: (0, 0),
}
}
pub fn reset(&mut self) {
*self = Context::new();
}
pub fn get_ai_move(&self) -> Vec2 {
let ball_pos = &self.ball.pos;
let enemy = &self.enemy;
if ball_pos.y > enemy.pos.y + enemy.size.y {
vec2(0.0, 1.0)
} else if ball_pos.y + BALL_SIZE < enemy.pos.y {
vec2(0.0, -1.0)
} else {
vec2(0.0, 0.0)
}
}
pub fn update(&mut self) {
match self.state {
GameState::Start => {
if is_key_pressed(KeyCode::Enter) {
self.state = GameState::Playing
}
}
GameState::Playing => {
if is_key_down(KeyCode::W) {
self.player.move_by(Vec2::new(0., -PADDLE_SPEED))
}
if is_key_down(KeyCode::S) {
self.player.move_by(Vec2::new(0., PADDLE_SPEED))
}
if is_key_pressed(KeyCode::Q) {
self.state = GameState::End
}
self.enemy.move_by(self.get_ai_move() * PADDLE_SPEED);
let ball_pos = self.ball.move_by(self.ball.velocity * BALL_SPEED);
if ball_pos.x > SCR_W {
self.score.0 += 1;
self.ball.reset();
} else if ball_pos.x < 0.0 {
self.score.1 += 1;
self.ball.reset();
} else if self.player.is_colliding(ball_pos) || self.enemy.is_colliding(ball_pos) {
self.ball.velocity.x *= -1.0;
self.ball.pos.x = clamp(
self.ball.pos.x,
self.player.pos.x + PADDLE_WIDTH,
self.enemy.pos.x,
);
let bounced_center = if (self.ball.pos.x - self.player.pos.x).abs()
< (self.ball.pos.x - self.enemy.pos.x).abs()
{
self.player.pos.y + PADDLE_HEIGHT / 2.0
} else {
self.enemy.pos.y + PADDLE_HEIGHT / 2.0
};
if self.ball.pos.y + BALL_SIZE / 2.0 < bounced_center {
self.ball.velocity.y = -1.0;
} else {
self.ball.velocity.y = 1.0;
}
}
}
GameState::End => {
if is_key_pressed(KeyCode::Enter) {
self.reset();
} else if is_key_pressed(KeyCode::Escape) {
self.state = GameState::Quit
}
}
GameState::Quit => {}
}
}
pub fn draw(&self) {
match self.state {
GameState::Start => {
let pong_label = "PONG";
let press_start = "Press Enter to play";
let pong_size = measure_text(pong_label, None, 16u16, 1.0);
let start_size = measure_text(press_start, None, 16u16, 1.0);
draw_text(
pong_label,
SCR_W / 2.0 - pong_size.width / 2.0,
SCR_H / 2.0 - pong_size.height / 2.0,
16.0,
WHITE,
);
draw_text(
press_start,
SCR_W / 2.0 - start_size.width / 2.0,
(SCR_H / 2.0 - start_size.height / 2.0) + 18.0,
16.0,
WHITE,
);
}
GameState::Playing => {
let score = format!("{} - {}", self.score.0, self.score.1);
let score_size = measure_text(score.as_str(), None, 16, 1.0);
draw_text(
score.as_str(),
SCR_W / 2.0 - score_size.width / 2.0,
16.0,
16.0,
WHITE,
);
draw_rectangle(
self.player.pos.x,
self.player.pos.y,
self.player.size.x,
self.player.size.y,
WHITE,
);
draw_rectangle(
self.enemy.pos.x,
self.enemy.pos.y,
self.enemy.size.x,
self.enemy.size.y,
WHITE,
);
draw_rectangle(
self.ball.pos.x,
self.ball.pos.y,
self.ball.size.x,
self.ball.size.y,
WHITE,
);
}
GameState::End => {
let score = format!("Final score: {} - {}", self.score.0, self.score.1);
let score_size = measure_text(score.as_str(), None, 16, 1.0);
let press_message = "Press Enter to restart the game or Esc to exit";
let press_message_size = measure_text(press_message, None, 12, 1.0);
let main_text_y = SCR_H / 2.0 - score_size.height / 2.0;
draw_text(
score.as_str(),
SCR_W / 2.0 - score_size.width / 2.0,
main_text_y,
16.0,
WHITE,
);
draw_text(
press_message,
SCR_W / 2.0 - press_message_size.width / 2.0,
main_text_y + 16.0,
12.0,
WHITE,
);
}
GameState::Quit => {}
}
}
}
struct Paddle {
pos: Vec2,
size: Vec2,
}
impl Paddle {
pub fn new(pos: Vec2, size: Vec2) -> Self {
Self { pos, size }
}
pub fn move_by(&mut self, mov: Vec2) {
self.pos += mov;
self.pos.y = clamp(self.pos.y, 0.0, SCR_H - self.size.y);
}
pub fn is_colliding(&self, ball_pos: &Vec2) -> bool {
self.pos.x < ball_pos.x + BALL_SIZE
&& self.pos.x + self.size.x > ball_pos.x
&& self.pos.y < ball_pos.y + BALL_SIZE
&& self.pos.y + self.size.y > ball_pos.y
}
}
struct Ball {
pos: Vec2,
size: Vec2,
velocity: Vec2,
}
impl Ball {
pub fn new(pos: Vec2, size: Vec2) -> Self {
Self {
pos,
size,
velocity: Vec2::new(-1.0, 1.0),
}
}
pub fn move_by(&mut self, mov: Vec2) -> &Vec2 {
self.pos += mov;
self.pos.y = clamp(self.pos.y, 0.0, SCR_H - self.size.y);
if (self.pos.y - (SCR_H - self.size.y).abs()).abs() < f32::EPSILON || self.pos.y == 0f32 {
//screen bounce
self.velocity.y *= -1.0
}
&self.pos
}
pub fn reset(&mut self) {
*self = Ball::new(
Vec2::new(SCR_W / 2.0 - BALL_SIZE / 2.0, SCR_H / 2.0 - BALL_SIZE / 2.0),
Vec2::new(BALL_SIZE, BALL_SIZE),
)
}
}
#[macroquad::main("PONG")]
async fn main() {
let mut ctx = Context::new();
set_camera(&Camera2D {
zoom: vec2(1. / SCR_W * 2., -1. / SCR_H * 2.),
target: vec2(SCR_W / 2., SCR_H / 2.),
..Default::default()
});
loop {
if ctx.state == GameState::Quit {
return;
}
ctx.update();
clear_background(BLACK);
ctx.draw();
next_frame().await;
}
}
| true |
6f8bf38cc7faed00b2d2d74276fde26064dcaf37
|
Rust
|
timando/rsip
|
/rsip-derives/src/typed_header.rs
|
UTF-8
| 5,354 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
use quote::quote;
pub fn trait_methods(struct_name: &syn::Ident) -> proc_macro2::TokenStream {
quote! {
impl<'a> crate::headers::typed::TypedHeader<'a> for #struct_name {
type Tokenizer = Tokenizer<'a>;
}
}
}
//TODO: this shouldn't be needed once specialization lands
pub fn into_untyped(struct_name: &syn::Ident) -> proc_macro2::TokenStream {
quote! {
impl std::convert::From<#struct_name> for crate::headers::#struct_name {
fn from(typed: #struct_name) -> Self {
use crate::headers::untyped::UntypedHeader;
crate::headers::#struct_name::new(typed)
}
}
}
}
pub fn untyped(struct_name: &syn::Ident) -> proc_macro2::TokenStream {
quote! {
impl #struct_name {
pub fn untyped(self) -> crate::headers::#struct_name {
use crate::headers::untyped::UntypedHeader;
crate::headers::#struct_name::new(self)
}
}
}
}
//TODO: this shouldn't be needed once specialization lands
pub fn into_string(struct_name: &syn::Ident) -> proc_macro2::TokenStream {
quote! {
impl std::convert::From<#struct_name> for String {
fn from(typed: #struct_name) -> Self {
typed.to_string()
}
}
}
}
pub fn into_header(struct_name: &syn::Ident) -> proc_macro2::TokenStream {
quote! {
impl std::convert::From<#struct_name> for crate::Header {
fn from(typed: #struct_name) -> Self {
crate::Header::#struct_name(typed.into())
}
}
}
}
//TODO: this should be needed once specialization lands
pub fn try_from_untyped(struct_name: &syn::Ident) -> proc_macro2::TokenStream {
quote! {
impl std::convert::TryFrom<crate::headers::#struct_name> for #struct_name {
type Error = crate::Error;
fn try_from(untyped: crate::headers::#struct_name) -> Result<Self, Self::Error> {
use crate::headers::UntypedHeader;
use crate::headers::typed::Tokenize;
std::convert::TryInto::try_into(Tokenizer::tokenize(untyped.value())?)
}
}
}
}
pub fn integer_typed_mods(
struct_name: &syn::Ident,
integer_type: &str,
) -> proc_macro2::TokenStream {
let default_tokenizer = default_tokenizer();
let integer_type = quote::format_ident!("{}", integer_type);
quote! {
#default_tokenizer
pub mod typed {
use super::Tokenizer;
use macros::TypedHeader;
//TODO: reorganize HasValue, reuse custom Display macro
#[derive(TypedHeader, Eq, PartialEq, Clone, Debug)]
pub struct #struct_name(#integer_type);
impl #struct_name {
pub fn new(value: impl Into<#integer_type>) -> Self {
Self(value.into())
}
pub fn value(&self) -> &#integer_type {
&self.0
}
}
impl<'a> std::convert::TryFrom<Tokenizer<'a>> for #struct_name {
type Error = crate::Error;
fn try_from(tokenizer: Tokenizer) -> Result<Self, Self::Error> {
Ok(Self(tokenizer.part.parse::<#integer_type>()?))
}
}
impl std::fmt::Display for #struct_name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
}
}
}
pub fn string_typed_mods(struct_name: &syn::Ident) -> proc_macro2::TokenStream {
let default_tokenizer = default_tokenizer();
quote! {
#default_tokenizer
pub mod typed {
use super::Tokenizer;
use macros::TypedHeader;
//TODO: reorganize HasValue, reuse custom Display macro
#[derive(TypedHeader, Eq, PartialEq, Clone, Debug)]
pub struct #struct_name(String);
impl #struct_name {
pub fn new(value: impl Into<String>) -> Self {
Self(value.into())
}
pub fn value(&self) -> &str {
&self.0
}
}
impl<'a> std::convert::TryFrom<Tokenizer<'a>> for #struct_name {
type Error = crate::Error;
fn try_from(tokenizer: Tokenizer) -> Result<Self, Self::Error> {
Ok(Self(tokenizer.part.into()))
}
}
impl std::fmt::Display for #struct_name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
}
}
}
fn default_tokenizer() -> proc_macro2::TokenStream {
quote! {
pub use tokenizer::Tokenizer;
pub mod tokenizer {
use crate::headers::header::Tokenize;
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct Tokenizer<'a> {
pub part: &'a str,
}
impl<'a> Tokenize<'a> for Tokenizer<'a> {
fn tokenize(part: &'a str) -> Result<Self, crate::Error> {
Ok(Self { part })
}
}
}
}
}
| true |
14ea99f70484aacaea8562b8081c394a363c5b6c
|
Rust
|
frankegoesdown/LeetCode-in-Go
|
/Algorithms/0066.plus-one/plus-one.go
|
UTF-8
| 406 | 3.09375 | 3 |
[
"MIT"
] |
permissive
|
package problem0066
func plusOne(digits []int) []int {
length := len(digits)
if length == 0 {
return []int{1}
}
// 末尾加一
digits[length-1]++
// 处理进位
for i := length - 1; i > 0; i-- {
if digits[i] < 10 {
break
}
digits[i] -= 10
digits[i-1]++
}
// 处理首位的进位
if digits[0] > 9 {
digits[0] -= 10
digits = append([]int{1}, digits...)
}
return digits
}
| true |
5634c71d26c99a0d0ea9e6f4d64a9fc59589e906
|
Rust
|
danzhu/rim
|
/src/lib/mod.rs
|
UTF-8
| 846 | 2.515625 | 3 |
[] |
no_license
|
mod store;
pub use self::store::Store;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct Pos {
pub line: usize,
pub column: usize,
}
impl Pos {
pub fn new() -> Self {
Pos { line: 1, column: 1 }
}
}
impl Default for Pos {
fn default() -> Self {
Pos::new()
}
}
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
pub struct Span {
pub start: Pos,
pub end: Pos,
}
impl Span {
pub fn new() -> Self {
Default::default()
}
}
#[derive(Clone, Copy, Debug, Default)]
pub struct Size {
pub height: i32,
pub width: i32,
}
impl Size {
pub fn new() -> Self {
Default::default()
}
}
#[derive(Clone, Copy, Debug, Default)]
pub struct Rect {
pub pos: Pos,
pub size: Size,
}
impl Rect {
pub fn new() -> Self {
Default::default()
}
}
| true |
d591d743536efc9c4ec6633847bd6a80d914270d
|
Rust
|
razielgn/brainfuck
|
/src/brainfuck/parser.rs
|
UTF-8
| 753 | 3.296875 | 3 |
[] |
no_license
|
use crate::instruction::Instruction;
use std::collections::VecDeque;
pub fn parse(bytes: &[u8]) -> VecDeque<Instruction> {
let mut instructions = VecDeque::with_capacity(bytes.len());
for b in bytes {
if let Some(i) = parse_byte(*b) {
instructions.push_back(i);
}
}
instructions
}
fn parse_byte(b: u8) -> Option<Instruction> {
match b as char {
'+' => Some(Instruction::Add(1)),
'-' => Some(Instruction::Sub(1)),
'>' => Some(Instruction::Right(1)),
'<' => Some(Instruction::Left(1)),
'.' => Some(Instruction::Out),
',' => Some(Instruction::In),
'[' => Some(Instruction::Open),
']' => Some(Instruction::Close),
_ => None,
}
}
| true |
f4c095b30792143bd3e6a6ebd3f7bde0ba039418
|
Rust
|
Ameobea/io-game
|
/frontend/game-engine/src/proto_utils.rs
|
UTF-8
| 4,209 | 2.6875 | 3 |
[] |
no_license
|
use std::mem;
use nalgebra::{Isometry2, Vector2};
use native_physics::physics::Movement;
use nphysics2d::algebra::Velocity2;
use protobuf::Message;
use uuid::Uuid;
use conf::CONF;
use phoenix_proto::send_channel_message;
use protos::channel_messages::Event;
use protos::client_messages::{ClientMessage, ClientMessage_oneof_payload as ClientMessageContent};
use protos::message_common::{MovementDirection, Uuid as ProtoUuid};
pub use protos::server_messages::ServerMessage_Payload_oneof_payload as ServerMessageContent;
use protos::server_messages::{
MovementUpdate, ServerMessage, ServerMessage_Payload as ServerMessagePayload,
};
use util::warn;
pub struct InnerServerMessage {
pub id: Uuid,
pub content: ServerMessageContent,
}
impl Into<Option<InnerServerMessage>> for ServerMessagePayload {
fn into(mut self: ServerMessagePayload) -> Option<InnerServerMessage> {
if cfg!(debug_assertions) {
if let Some(ref fields) = self.get_unknown_fields().fields {
let field_names = fields.iter().collect::<Vec<_>>();
warn(format!(
"Unknown fields provided to message: {:?}",
field_names
));
}
}
if !self.has_id() {
warn("Issue while parsing server message: `id` was not provided!");
return None;
} else if self.payload.is_none() {
warn("Issue while parsing server message: `payload` as not provided!");
return None;
}
let inner_msg = InnerServerMessage {
id: self.take_id().into(),
content: self.payload.unwrap(),
};
Some(inner_msg)
}
}
impl Into<Uuid> for ProtoUuid {
fn into(self: ProtoUuid) -> Uuid {
let data: u128 = unsafe { mem::transmute([self.get_data_1(), self.get_data_2()]) };
data.into()
}
}
impl Into<ProtoUuid> for Uuid {
fn into(self: Uuid) -> ProtoUuid {
let (data_1, data_2): (u64, u64) = unsafe { mem::transmute(self) };
let mut id = ProtoUuid::new();
id.set_data_1(data_1);
id.set_data_2(data_2);
id
}
}
impl<'a> Into<(Isometry2<f32>, Velocity2<f32>)> for &'a MovementUpdate {
fn into(self) -> (Isometry2<f32>, Velocity2<f32>) {
let pos = Isometry2::new(Vector2::new(self.pos_x, self.pos_y), self.rotation);
let velocity = Velocity2::new(
Vector2::new(self.velocity_x, self.velocity_y),
self.angular_velocity,
);
(pos, velocity)
}
}
impl Into<Movement> for MovementDirection {
fn into(self) -> Movement {
match self {
MovementDirection::DOWN => Movement::Down,
MovementDirection::DOWN_LEFT => Movement::DownLeft,
MovementDirection::DOWN_RIGHT => Movement::DownRight,
MovementDirection::LEFT => Movement::Left,
MovementDirection::RIGHT => Movement::Right,
MovementDirection::STOP => Movement::Stop,
MovementDirection::UP => Movement::Up,
MovementDirection::UP_LEFT => Movement::UpLeft,
MovementDirection::UP_RIGHT => Movement::UpRight,
}
}
}
pub fn parse_server_msg_payload(msg: ServerMessage) -> Vec<InnerServerMessage> {
let mut inner_messages = Vec::with_capacity(msg.payload.len());
for msg in msg.payload.into_iter() {
if let Some(inner_msg) = msg.into() {
inner_messages.push(inner_msg);
}
}
inner_messages
}
pub fn msg_to_bytes<M: Message>(msg: M) -> Vec<u8> {
msg.write_to_bytes().unwrap_or_else(|err| {
panic!(format!(
"Error while writing created `ServerMessage` to bytes: {:?}",
err
))
})
}
/// Creates a `ClientMessage` with the given payload, converts it to binary, encodes it into
/// binary, and sends it over the WebSocket to the backend.
pub fn send_user_message(payload: ClientMessageContent) {
let mut client_msg = ClientMessage::new();
client_msg.payload = Some(payload);
let mut event = Event::new();
event.set_custom_event(CONF.network.custom_event_name.into());
send_channel_message(event, Some(client_msg));
}
| true |
e3e5cdedc38863f90b46858044abe1cbcd5d1fcd
|
Rust
|
FauxFaux/annul
|
/src/strings.rs
|
UTF-8
| 5,634 | 3.171875 | 3 |
[] |
no_license
|
use std::io;
use std::io::Write;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum ShortArray {
One([u8; 1]),
Two([u8; 2]),
Three([u8; 3]),
Four([u8; 4]),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Char {
Binary(u8),
Printable(ShortArray),
}
impl Char {
fn len(&self) -> usize {
match *self {
Char::Binary(_) => 1,
Char::Printable(arr) => arr.len(),
}
}
}
impl ShortArray {
fn len(&self) -> usize {
match self {
ShortArray::One(..) => 1,
ShortArray::Two(..) => 2,
ShortArray::Three(..) => 3,
ShortArray::Four(..) => 4,
}
}
fn push_to(&self, v: &mut Vec<u8>) {
match *self {
ShortArray::One(a) => v.extend_from_slice(&a),
ShortArray::Two(a) => v.extend_from_slice(&a),
ShortArray::Three(a) => v.extend_from_slice(&a),
ShortArray::Four(a) => v.extend_from_slice(&a),
}
}
}
fn get_char(bytes: &[u8]) -> Option<Char> {
if bytes.is_empty() {
return None;
}
let byte = bytes[0];
if byte < b' ' && b'\t' != byte && b'\n' != byte && b'\r' != byte {
return Some(Char::Binary(byte));
}
if byte < 0x7f {
return Some(Char::Printable(ShortArray::One([byte])));
}
let wanted = if byte & 0b1110_0000 == 0b1100_0000 {
2
} else if byte & 0b1111_0000 == 0b1110_0000 {
3
} else if byte & 0b1111_1000 == 0b1111_0000 {
4
} else {
return Some(Char::Binary(byte));
};
if bytes.len() < wanted {
return None;
}
for i in 1..wanted {
if !follower(bytes[i]) {
return Some(Char::Binary(byte));
}
}
Some(Char::Printable(match wanted {
2 => ShortArray::Two([bytes[0], bytes[1]]),
3 => ShortArray::Three([bytes[0], bytes[1], bytes[2]]),
4 => ShortArray::Four([bytes[0], bytes[1], bytes[2], bytes[3]]),
_ => unreachable!(),
}))
}
#[inline]
fn follower(byte: u8) -> bool {
byte & 0b1100_0000 == 0b1000_0000
}
struct CharBuf {
buf: Vec<u8>,
}
impl CharBuf {
fn push(&mut self, byte: u8) -> Option<Char> {
self.buf.push(byte);
let opt = get_char(&self.buf);
if let Some(c) = opt {
let _ = self.buf.drain(..c.len());
}
opt
}
}
impl Default for CharBuf {
fn default() -> Self {
CharBuf {
buf: Vec::with_capacity(5),
}
}
}
pub struct StringBuf<W> {
output: W,
chars: CharBuf,
buf: Vec<u8>,
binary_run: usize,
binary_sum: usize,
}
impl<W: Write> StringBuf<W> {
pub fn accept(&mut self, buf: &[u8]) -> io::Result<()> {
for &b in buf {
self.push(b)?;
}
Ok(())
}
fn push(&mut self, b: u8) -> io::Result<()> {
let c = match self.chars.push(b) {
Some(c) => c,
None => return Ok(()),
};
match c {
Char::Binary(c) if self.binary_run < 2 && self.binary_sum < self.buf.len() / 4 => {
self.binary_run += 1;
self.binary_sum += 1;
self.buf.push(c);
}
Char::Binary(_) => {
self.buf.truncate(self.buf.len() - self.binary_run);
if self.buf.len() > 3 {
self.output.write_all(&self.buf)?;
self.output.write_all(&[0])?;
}
self.binary_run = 0;
self.binary_sum = 0;
self.buf.clear()
}
Char::Printable(arr) => {
if self.binary_run == self.buf.len() {
self.buf.clear();
}
arr.push_to(&mut self.buf);
if self.buf.len() > 255 {
self.output.write_all(&self.buf[..250])?;
self.binary_sum = 0;
let _ = self.buf.drain(..250);
}
self.binary_run = 0;
}
}
Ok(())
}
pub fn finish(mut self) -> io::Result<W> {
self.output.write_all(&self.buf)?;
self.output.write_all(&self.chars.buf)?;
Ok(self.output)
}
}
impl<W> StringBuf<W> {
pub fn new(output: W) -> StringBuf<W> {
StringBuf {
chars: CharBuf::default(),
output,
buf: Vec::with_capacity(4096),
binary_run: 0,
binary_sum: 0,
}
}
}
#[cfg(test)]
mod tests {
use super::Char;
use super::CharBuf;
use super::ShortArray;
use super::StringBuf;
fn check(expected: &[u8], data: &[u8]) {
let mut actual = Vec::new();
let mut state = StringBuf::new(&mut actual);
state.accept(data).expect("only for vec");
state.finish().expect("only for vec");
assert_eq!(
String::from_utf8_lossy(expected),
String::from_utf8_lossy(&actual)
);
assert_eq!(expected, actual.as_slice());
}
#[test]
fn strings_all_ascii() {
check(b"hello", b"hello");
}
#[test]
fn strings_crush_unprintable() {
check(b"hello\0world", b"hello\0\x01\x02\x03world");
}
#[test]
fn charer() {
let mut c = CharBuf::default();
assert_eq!(Some(Char::Printable(ShortArray::One([b'h']))), c.push(b'h'));
assert_eq!(None, c.push(0b1101_1111));
assert_eq!(
Some(Char::Printable(ShortArray::Two([0b1101_1111, 0b1011_1111]))),
c.push(0b1011_1111)
);
}
}
| true |
51862bea995b8fe1f5ab82d80536da7825220b7e
|
Rust
|
atomic-factory/darwinia-misc-toolset
|
/deprecated-darwinia-misc-toolset/src/mmrroot.rs
|
UTF-8
| 2,057 | 2.578125 | 3 |
[] |
no_license
|
use std::env;
use ckb_merkle_mountain_range::{util::MemStore, MMR};
use darwinia_misc_toolset::MMRMerge;
use sp_core::H256;
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
fn gen_mmr(hashes: Vec<H256>) {
let store = MemStore::default();
let mut mmr = MMR::<_, MMRMerge, _>::new(0, &store);
for hash in hashes {
mmr.push(hash).unwrap();
}
println!("mmr_size: {:?}", mmr.mmr_size());
println!("mmr_root: {:?}", mmr.get_root().unwrap());
mmr.commit().expect("commit changes");
}
fn read_hashes<P>(filename: P) -> Vec<H256>
where
P: AsRef<Path>,
{
let mut result = vec![];
let file = File::open(filename).unwrap();
let lines = io::BufReader::new(file).lines();
for line in lines {
if let Ok(l) = line {
let h = str_to_h256(&l).unwrap();
result.push(h);
}
}
result
}
fn str_to_h256(line: &str) -> Result<H256> {
if line.len() != 66 {
Err(anyhow!(MmrRootError::WrongHashLength))
} else {
Ok(H256::from_slice(&hex::decode(&line[2..]).unwrap()[..]))
}
}
// https://ropsten.etherscan.io/uncle/0x0aae3601cda65335e8234866c482e7ea1d2dccf94332926e7d62a486f70ea2aa
// https://ropsten.etherscan.io/block/0x7848e122f3f665a1169c3a7880cbff7aa584879f97fbedc02b2a146477aee66f
// fn main() {
// let args: Vec<String> = env::args().collect();
// let file = &args[1];
// let hashes = read_hashes(file);
// gen_mmr(hashes);
// }
use std::io::prelude::*;
use anyhow::{anyhow, Result};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum MmrRootError {
#[error("the input block hash length is wrong")]
WrongHashLength,
#[error("unknown error")]
Unknown,
}
pub fn main() -> Result<()> {
let store = MemStore::default();
let mut mmr = MMR::<_, MMRMerge, _>::new(0, &store);
for (i, line) in io::stdin().lock().lines().enumerate() {
let line = line?;
println!("> index: {}, {}", i, &line);
match str_to_h256(&line) {
Ok(h) => {
mmr.push(h)?;
println!("size: {}, root: {:?}", mmr.mmr_size(), mmr.get_root()?);
}
Err(e) => println!("{}", e.to_string()),
}
}
Ok(())
}
| true |
ee5c6fffdb127c5f6baadbe3c8ab7e8063828511
|
Rust
|
nybjorn/rustymirror
|
/message_openweathermap_sender/src/main.rs
|
UTF-8
| 3,449 | 2.609375 | 3 |
[] |
no_license
|
#![feature(proc_macro)]
#![feature(custom_attribute)]
#[macro_use] extern crate serde_derive;
extern crate serde;
extern crate serde_json;
extern crate nanomsg;
extern crate hyper;
extern crate time;
use nanomsg::{Socket, Protocol};
use std::thread;
use std::time::Duration;
use std::io::{Read, Write};
use hyper::client::{Client, Request, Response};
use hyper::header::Connection;
use std::env;
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Forecast {
city: City,
cod: String,
message: f32,
cnt: u32,
list: Vec<List>
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct City {
id: u32,
name: String,
coord: Coordinate,
country: String
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Coordinate {
lon: f32,
lat: f32,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct List {
dt: u32,
main: ListMain,
weather: Vec<Weather>,
clouds: Clouds,
wind: Wind,
dt_txt: String
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct ListMain {
temp: f32,
temp_min: f32,
temp_max: f32,
pressure: f32,
sea_level: f32,
grnd_level: f32,
humidity: f32,
temp_kf: f32
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Weather {
id: u32,
main: String,
description: String,
icon: String
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Clouds {
all: u32
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Wind {
speed: f32,
deg: f32,
}
fn fetch_open_weather(client: &Client, api_path: &String) -> Forecast {
let mut res = client.get(api_path.as_str()).send().unwrap();
let mut buffer = String::new();
res.read_to_string(&mut buffer).unwrap();
println!("{:?}", buffer);
return serde_json::from_str(&mut buffer).unwrap();
}
fn main() {
if env::args().count() != 4 {
println!("Usage: {} <apikey> <city> <lang>",
env::args().nth(0).unwrap());
std::process::exit(1);
}
let api_key = env::args().nth(1).unwrap();
let city = env::args().nth(2).unwrap();
let lang = env::args().nth(3).unwrap();
let api_path = format!("http://api.openweathermap.org/data/2.5/forecast?id={}&appid={}&units=metric&lang={}&cnt=3", city, api_key, lang);
println!("{}", api_path);
//let url ="ipc:///tmp/pubsub.ipc";
let url = "tcp://127.0.0.1:8022";
let mut socket = Socket::new(Protocol::Pub).unwrap();
let mut endpoint = socket.connect(url).unwrap();
let client = Client::new();
println!("Server is ready.");
loop {
let mut all_msg = String::new();
all_msg = format!("Väder");
let deserialized_data: Forecast = fetch_open_weather(&client, &api_path);
for list in deserialized_data.list {
let msg = format!("{}|{}˚|{}",
list.dt_txt,
list.main.temp,
list.weather[0].description);
all_msg = format!("{}|{}", all_msg, msg);
}
match socket.write_all(all_msg.as_bytes()) {
Ok(..) => println!("Published '{}'.", all_msg),
Err(err) => {
println!("Server failed to publish '{}'.", err);
break
}
}
thread::sleep(Duration::from_millis(11 * 60 *10000));
}
endpoint.shutdown();
}
| true |
2220e43cdce8c5fd92bcd6ccc26bc4435cbb2b11
|
Rust
|
WebeWizard/lib-webe
|
/webe_web/src/server.rs
|
UTF-8
| 17,358 | 2.78125 | 3 |
[] |
no_license
|
use std::cmp::Ordering::*;
use std::collections::HashMap;
use std::net::Ipv4Addr;
use std::sync::Arc;
use tokio::io::{AsyncBufRead, AsyncReadExt, BufReader, BufWriter};
use tokio::net::{TcpListener, TcpStream};
use super::encoding::chunked::ChunkedDecoder;
use super::request::{Request, RequestError};
use super::responders::static_message::StaticResponder;
use super::responders::Responder;
use super::response::ResponseError;
#[derive(PartialEq, Eq, Hash)]
pub struct Route {
pub method: String,
pub uri: String,
pub has_params: bool,
}
impl Route {
pub fn new(method: &str, uri: &str) -> Route {
Route {
method: method.to_owned(),
uri: uri.to_owned(),
has_params: uri.contains('<'),
}
}
}
pub struct RouteMap<'r> {
inner: HashMap<Route, Box<dyn Responder + 'r>>,
}
impl<'r> RouteMap<'r> {
pub fn new() -> RouteMap<'r> {
RouteMap {
inner: HashMap::new(),
}
}
pub fn add_route<T: 'r + Responder>(&mut self, mut route: Route, responder: T) {
// remove leading / if any
if !route.uri.starts_with('/') {
route.uri = "/".to_owned() + route.uri.as_str();
}
self.inner.insert(route, Box::new(responder));
}
}
pub struct Server {
pub ip: Ipv4Addr,
pub port: u16,
listener: TcpListener,
}
#[derive(Debug)]
pub enum ServerError {
BadRequest(RequestError), // Request is unable to be processed by the server
BindError(std::io::Error), // server failed to bind on ip and port
ConnectionFailed(std::io::Error), // server failed to grab connection from listener
InternalError, // failed to process the stream
}
impl From<RequestError> for ServerError {
fn from(err: RequestError) -> ServerError {
ServerError::BadRequest(err)
}
}
impl From<ResponseError> for ServerError {
fn from(_err: ResponseError) -> ServerError {
ServerError::InternalError
}
}
impl Server {
pub async fn new(ip: &Ipv4Addr, port: &u16) -> Result<Server, ServerError> {
// attempt to bind the server to the specified ip and port
match TcpListener::bind((ip.clone(), port.clone())).await {
Ok(listener) => {
return Ok(Server {
ip: ip.clone(),
port: port.clone(),
listener: listener,
});
}
Err(error) => return Err(ServerError::BindError(error)),
};
}
// starts the server, blocks the thread while the server is running
pub async fn start(&self, routes: RouteMap<'static>) -> Result<(), ServerError> {
let routes_arc = Arc::new(routes);
loop {
match self.listener.accept().await {
Ok((stream, _socket)) => {
let process_routes = routes_arc.clone();
tokio::spawn(async move { process_stream(stream, process_routes).await });
}
Err(error) => return Err(ServerError::ConnectionFailed(error)),
}
}
}
}
fn find_best_route<'r>(request: &Request, routes: &'r Arc<RouteMap<'r>>) -> Option<&'r Route> {
// ~~ find the best responder ~~
// first check for an exact match
if let Some(route) = routes
.inner
.keys()
.find(|route| !route.has_params && request.method == route.method && route.uri == request.uri)
{
return Some(route);
}
// non-terminal route params WILL NOT contain more than one request uri part
// terminal route params WILL contain the remainder of the request uri
let request_parts: Vec<&str> = request.uri.split('/').collect();
// only keys with matching method
match routes
.inner
.keys()
.filter_map(|route| {
if route.method != request.method {
return None;
}
let route_parts: Vec<&str> = route.uri.split('/').collect();
// compare length. route cannot match request with less parts
if route_parts.len() > request_parts.len() {
return None;
}
// find the one with the most matching parts
let mut match_size = 0;
let mut first_wild = 0;
for i in 0..request_parts.len() {
if request_parts[i] == route_parts[i] || route_parts[i].contains('<') {
match_size = i + 1;
if first_wild == 0 && route_parts[i].contains('<') {
first_wild = i + 1;
}
if (i + 1) == route_parts.len() {
break;
}
} else {
return None;
} // uri doesn't match
}
return Some((route, match_size, first_wild));
})
.max_by(|x, y| match (x.1).cmp(&y.1) {
Less => return Less,
Greater => return Greater,
Equal => ((x.2).cmp(&y.2)).reverse(),
}) {
Some((route, _, _)) => return Some(route),
None => return None,
}
}
fn parse_route_params(request: &Request, route: &Route) -> Vec<(String, String)> {
// I can't imagine a request having too many params,
// so a Vec should be generally much faster than hashmap of small size
let mut params: Vec<(String, String)> = Vec::new();
if !route.has_params {
return params;
}
let request_parts: Vec<&str> = request.uri.split('/').collect();
let route_uri_parts: Vec<&str> = route.uri.split('/').collect();
let part_length = route_uri_parts.len();
for i in 0..part_length {
if route_uri_parts[i].contains('<') {
let name = route_uri_parts[i].to_owned();
let value = if i == part_length - 1 {
// if this is the last part of the route and the part is a route param...
// then combine the remaining parts from the request uri (ex. a path to a subfolder)
request_parts[i..].join("/")
} else {
request_parts[i].to_owned()
};
params.push((name, value));
}
}
return params;
}
// process a client request
// TODO: handle these errors better (need to know the real error for logging, whatever)
async fn process_stream(
mut stream: TcpStream,
routes: Arc<RouteMap<'_>>,
) -> Result<(), ServerError> {
// split the stream into reader and writer
let (reader, writer) = stream.split();
let mut buf_reader = BufReader::new(reader);
let mut buf_writer = BufWriter::new(writer);
let mut keep_alive = true; // keep-alive by default
while keep_alive {
let responder; // placeholder for selected responder
let mut response; // placeholder for some kind of response
match Request::new(&mut buf_reader).await {
Ok(mut request) => {
// find a route for the request , or return 404 Not Found
if let Some(route) = find_best_route(&request, &routes) {
responder = routes.inner.get(route).unwrap(); // safe to unwrap here because because we know route exists
let params = parse_route_params(&request, route);
match request.parse_headers(&mut buf_reader).await {
Ok(()) => {
// use a trait object because the final reader type is unknown at compile time
let mut body_reader: std::pin::Pin<Box<dyn AsyncBufRead + Send + Sync>> =
Box::pin(&mut buf_reader);
// using transfer encodings on the body?
match &request.headers {
Some(req_headers) => {
match req_headers.get("transfer-encoding") {
Some(value) => {
let encodings: Vec<String> =
value.split(',').map(|e| e.trim().to_lowercase()).collect();
if encodings.len() >= 1 {
if encodings[encodings.len() - 1] != "chunked" {
// if not chunked, then assume connection will close
// unless content-length is given below
keep_alive = false;
}
// apply decoders in order
for encoding in encodings {
body_reader = match encoding.as_str() {
// TODO: Add gzip/deflate encoders/decoders
"chunked" => Box::pin(BufReader::new(ChunkedDecoder::new(body_reader))),
"identity" => body_reader,
_ => {
return Err(ServerError::BadRequest(
RequestError::EncodingNotSupportedError,
))
}
}
}
}
}
None => {}
}
match req_headers.get("content-length") {
Some(value) => match value.parse::<u64>() {
Ok(content_length) => {
body_reader = Box::pin(body_reader.take(content_length));
keep_alive = true
}
Err(_error) => {
return Err(ServerError::BadRequest(RequestError::MalformedRequestError))
}
},
None => {}
}
// does request want to close connection?
match req_headers.get("connection") {
Some(con_header) => {
if con_header.to_lowercase() == "close" {
keep_alive = false
}
}
None => {}
}
}
None => {}
}
request.set_message_body(Some(body_reader));
// validate the request is able to be responded to with the selected responder
if let Ok(validation_result) = responder.validate(&request, ¶ms, None) {
match responder.build_response(&mut request, ¶ms, validation_result) {
Ok(new_response) => response = new_response,
Err(error_code) => {
response = StaticResponder::from_standard_code(error_code).quick_response()
}
}
} else {
response = StaticResponder::from_standard_code(400).quick_response()
} // 400 Bad Request
}
Err(_error) => response = StaticResponder::from_standard_code(400).quick_response(),
}
} else {
response = StaticResponder::from_standard_code(404).quick_response();
}
}
Err(_error) => response = StaticResponder::from_standard_code(400).quick_response(), // 400 Bad Request
}
response.respond(&mut buf_writer).await? // TODO: do we need to await here? or just let it fly?
}
return Ok(());
}
// // TODO: move respnder.validate() here
// // use a trait object because the final reader type is unknown at compile time
// let mut body_reader: Box<dyn AsyncBufRead + 's> = Box::new(buf_reader);
// // using transfer encodings on the body?
// match request.headers.get("transfer-encoding") {
// Some(value) => {
// let encodings: Vec<String> =
// value.split(',').map(|e| e.trim().to_lowercase()).collect();
// if encodings.len() >= 1 {
// if encodings[encodings.len() - 1] != "chunked" {
// // if not chunked, then assume connection will close
// // unless content-length is given below
// keep_alive = false;
// }
// // apply decoders in order
// for encoding in encodings {
// body_reader = match encoding.as_str() {
// // TODO: Add gzip/deflate encoders/decoders
// "chunked" => {
// Box::new(BufReader::new(ChunkedDecoder::new(body_reader)))
// }
// "identity" => body_reader,
// _ => return Err(ServerError::BadRequest),
// }
// }
// }
// }
// None => {}
// }
// match request.headers.get("content-length") {
// Some(value) => match value.parse::<u64>() {
// Ok(content_length) => {
// body_reader = Box::new(body_reader.take(content_length));
// keep_alive = true
// }
// Err(_error) => return Err(ServerError::BadRequest),
// },
// None => {}
// }
// request.set_message_body(Some(body_reader));
// // does request want to close connection?
// match request.headers.get("connection") {
// Some(con_header) => {
// BufReader::new(stream);
// if con_header.to_lowercase() == "close" {
// keep_alive = false
// }
// }
// None => {}
// }
// // TODO: move validate to before body reader is built
// match responder.validate(&request, ¶ms, None) {
// Ok(validation_result) => {
// match responder.build_response(&mut request, ¶ms, validation_result) {
// Ok(mut response) => {
// // TODO: Need a solution for adding CORS header to all responses
// response.headers.insert(
// "Access-Control-Allow-Origin".to_owned(),
// "http://localhost:1234".to_owned(),
// );
// match response.respond(BufWriter::new(&stream)) {
// Ok(()) => {
// keep_alive = response.keep_alive;
// //keep_alive = false;
// // TODO: figure out logging
// // print!("Done");
// }
// Err(_error) => return Err(ServerError::InternalError),
// }
// }
// Err(response_code) => {
// let static_responder =
// StaticResponder::from_standard_code(response_code);
// match static_responder.build_response(&mut request, ¶ms, None) {
// Ok(mut response) => {
// // TODO: Need a solution for adding CORS header to all responses
// response.headers.insert(
// "Access-Control-Allow-Origin".to_owned(),
// "http://localhost:1234".to_owned(),
// );
// match response.respond(BufWriter::new(&stream)) {
// Ok(()) => {} // keep_alive = true
// Err(_error) => return Err(ServerError::InternalError),
// }
// }
// Err(_error) => return Err(ServerError::InternalError),
// }
// }
// }
// }
// Err(validation_status) => {
// let static_responder = StaticResponder::from_status(validation_status);
// match static_responder.build_response(&mut request, ¶ms, None) {
// Ok(mut response) => {
// // TODO: Need a solution for adding CORS header to all responses
// response.headers.insert(
// "Access-Control-Allow-Origin".to_owned(),
// "http://localhost:1234".to_owned(),
// );
// match response.respond(BufWriter::new(&stream)) {
// Ok(()) => {} // keep-alive = true
// Err(_error) => return Err(ServerError::InternalError),
// }
// }
// Err(_error) => return Err(ServerError::InternalError),
// }
// }
// }
// }
// None => return Err(ServerError::InternalError),
// }
// }
// None => {
// let static_responder = StaticResponder::from_standard_code(400);
// match static_responder.build_response(
// &mut request,
// &HashMap::<String, String>::new(),
// None,
// ) {
// Ok(mut response) => {
// // TODO: Need a solution for adding CORS header to all responses
// response.headers.insert(
// "Access-Control-Allow-Origin".to_owned(),
// "http://localhost:1234".to_owned(),
// );
// match response.respond(BufWriter::new(&stream)) {
// Ok(()) => {} //keep-alive = true
// Err(_error) => return Err(ServerError::InternalError),
// }
// }
// Err(_error) => return Err(ServerError::InternalError),
// }
// }
// }
// }
// Err(_error) => return Err(ServerError::InternalError),
// }
// }
// return Ok(());
| true |
726a682f47877701d7b37efdab9f6d3656f07c63
|
Rust
|
scottschroeder/git-summary-rs
|
/src/fs_util.rs
|
UTF-8
| 2,121 | 3 | 3 |
[
"MIT"
] |
permissive
|
use std::{env, fs, path};
use walkdir::WalkDir;
use crate::Result;
const GIT_DIR: &str = ".git";
pub fn get_working_dir(user_path: Option<&str>) -> Result<path::PathBuf> {
if let Some(s) = user_path {
let p = fs::canonicalize(s)?;
let meta = p.metadata()?;
if !meta.is_dir() {
anyhow::bail!("the path {:?} is not a directory", p);
}
Ok(p)
} else {
Ok(env::current_dir()?)
}
}
pub fn shorten<PB>(base: PB, full: &path::Path) -> &path::Path
where
PB: AsRef<path::Path>,
{
full.strip_prefix(base.as_ref().parent().unwrap_or(base.as_ref()))
.unwrap_or(full)
}
pub fn get_all_repos<P: AsRef<path::Path>>(
src_path: P,
deep: bool,
do_hidden: bool,
) -> Vec<path::PathBuf> {
WalkDir::new(src_path.as_ref())
.follow_links(true)
.into_iter()
.filter_entry(move |e| !deep_filter(deep, !do_hidden, e))
.filter_map(|entry| {
if let Ok(entry) = entry {
if is_git_dir(&entry) {
let git_repo = fs::canonicalize(entry.path().parent().unwrap()).unwrap();
return Some(git_repo);
}
}
None
})
.collect()
}
// TODO refactor
fn deep_filter(deep: bool, skip_hidden: bool, entry: &walkdir::DirEntry) -> bool {
if skip_hidden && is_hidden(entry) {
//trace!("Filtering {:?} (hidden)", entry.path().display());
return true;
}
if deep {
false
} else {
entry.depth() > 2
}
}
fn is_git_dir(entry: &walkdir::DirEntry) -> bool {
check_entry_filename(entry, |s| s == GIT_DIR)
}
fn is_hidden(entry: &walkdir::DirEntry) -> bool {
check_entry_filename(entry, |s| s.starts_with('.') && s != GIT_DIR)
}
fn check_entry_filename<F>(entry: &walkdir::DirEntry, predicate: F) -> bool
where
F: FnOnce(&str) -> bool,
{
entry
.file_name()
.to_str()
.map(predicate)
.unwrap_or_else(|| {
log::error!("unable to parse {:?} as str", entry.path().display());
false
})
}
| true |
6e061c1b2d1b69db708019e117b9e958698d326b
|
Rust
|
Harapan21/Lunatictech
|
/server/src/graphql/schema.rs
|
UTF-8
| 8,762 | 2.59375 | 3 |
[] |
no_license
|
use crate::{
db::MysqlPoolConnection,
errors::SmileError,
graphql::{category::CategorySchema, post::PostSchema, topic::TopicSchema, user::UserSchema},
models::{
category::{Category, CategoryInput},
comment::{Comment, CommentInput},
embed::{Embed, EmbedInput},
handler::Handler,
info::InfoSchema,
post::{Post, PostInput},
topic::Topic,
user::{User, UserInput},
},
utils::Auth,
};
use actix_identity::Identity;
use async_std::task;
use diesel::prelude::*;
use juniper::RootNode;
use std::sync::Arc;
pub struct Context {
pub user_id: Option<String>,
pub conn: Arc<MysqlPoolConnection>,
pub id: Arc<Identity>,
}
#[derive(Debug, Serialize, Deserialize, juniper::GraphQLEnum)]
pub enum Card {
MOVIE,
GAME,
}
#[derive(Debug, Serialize, Deserialize, DbEnum, PartialEq, Clone, juniper::GraphQLEnum)]
pub enum ActionOption {
INPUT,
UPDATE,
REMOVE,
}
impl juniper::Context for Context {}
pub struct Query;
#[juniper::object(Context = Context)]
impl Query {
fn info(context: &Context) -> Result<InfoSchema, SmileError> {
InfoSchema::get(&context.conn)
}
fn me(context: &Context) -> Result<Box<dyn UserSchema + 'static>, SmileError> {
match &context.user_id {
Some(auth_id) => Ok(User::find_by_id(auth_id, &context.conn)? as Box<dyn UserSchema>),
None => Err(SmileError::Unauthorized),
}
}
fn topic(context: &Context) -> Result<Vec<Box<dyn TopicSchema + 'static>>, SmileError> {
Topic::list(&context.conn)
.map(|x| x.into_iter().map(|x| x as Box<dyn TopicSchema>).collect())
}
fn post(context: &Context) -> Result<Vec<Box<dyn PostSchema + 'static>>, SmileError> {
let post = Post::list(&context.conn)?;
Ok(post.into_iter().map(|e| e as Box<dyn PostSchema>).collect())
}
// fn page(context: &Context) -> Result<Vec<Page
fn category(context: &Context) -> Result<Vec<Box<dyn CategorySchema + 'static>>, SmileError> {
Category::list(&context.conn)
.map(|list| list.into_iter().map(|item| item as Box<dyn CategorySchema>).collect())
}
}
pub struct Mutation;
#[juniper::object(
Context = Context,
)]
impl Mutation {
fn info(context: &Context, name: String, description: String) -> Result<bool, SmileError> {
InfoSchema::set(context, InfoSchema { name, description })
}
fn login(username: String, password: String, context: &Context) -> Result<Auth, SmileError> {
let login = User::login(username, password, &context.conn)?;
if let Some(token) = &login.token {
context.id.remember(token.to_owned());
}
Ok(login)
}
fn logout(context: &Context) -> Result<Auth, SmileError> {
context.id.forget();
Ok(Default::default())
}
fn register(mut input: UserInput, context: &Context) -> Result<Auth, SmileError> {
if User::input(input.clone(), &context.conn)? {
let login =
User::login(input.username.unwrap(), input.password.unwrap(), &context.conn)?;
if let Some(token) = &login.token {
context.id.remember(token.to_owned());
}
return Ok(login);
}
Ok(Default::default())
}
fn me(input: UserInput, context: &Context) -> Result<bool, SmileError> {
match &context.user_id {
Some(auth_id) => User::update(auth_id.to_owned(), input, &context.conn),
None => Err(SmileError::Unauthorized),
}
}
// fn add_card(
// id: Option<i32>,
// name: String,
// thumbnail: Option<String>,
// card: Card,
// action: ActionOption,
// context: &Context,
// ) -> Result<bool, SmileError> {
// match (id, action) {
// (None, ActionOption::INPUT) => match card {
// Card::GAME => {
// let input = GameInput { name, thumbnail };
// return Game::input(input, &context.conn);
// }
// Card::MOVIE => {
// let input = MovieInput { name, thumbnail };
// return Movie::input(input, &context.conn);
// }
// },
// (Some(id), ActionOption::UPDATE) => match card {
// Card::GAME => {
// let input = GameInput { name, thumbnail };
// return Game::update(id, input, &context.conn);
// }
// Card::MOVIE => {
// let input = MovieInput { name, thumbnail };
// return Movie::update(id, input, &context.conn);
// }
// },
// (Some(id), ActionOption::REMOVE) => match card {
// Card::GAME => {
// return Game::remove(id, &context.conn);
// }
// Card::MOVIE => {
// return Movie::remove(id, &context.conn);
// }
// },
// _ => unreachable!(),
// }
// }
fn post(
post_id: Option<i32>,
mut input: Option<PostInput>,
categories: Option<Vec<i32>>,
embed: Option<EmbedInput>,
context: &Context,
action: ActionOption,
) -> Result<bool, SmileError> {
let conn: &MysqlConnection = &context.conn;
match &context.user_id {
Some(aunt_id) => match (action, post_id, input.clone()) {
(ActionOption::INPUT, _, Some(mut input)) => {
input.author_id = Some(aunt_id.to_owned());
if Post::input(input, conn)? {
return task::block_on(Post::push_mul(categories, embed, conn));
}
Ok(false)
}
(ActionOption::UPDATE, Some(post_id), _) => {
if let Some(embed_value) = embed {
Embed::update(&post_id, embed_value, conn)?;
}
if let Some(mut input) = input {
input.author_id = Some(aunt_id.to_owned());
Post::update(post_id, input, conn)?;
}
if let Some(mut input) = categories {}
Ok(true)
}
(ActionOption::REMOVE, Some(post_id), None) => {
let is_author = async {
let result_post = Post::find_by_id(&post_id, conn).unwrap();
result_post.author_id == Some(aunt_id.to_owned())
};
task::block_on(async move {
if is_author.await {
Post::remove(post_id, conn)
} else {
Err(SmileError::AccessDenied)
}
})
}
_ => unreachable!(),
},
None => Err(SmileError::Unauthorized),
}
}
fn category(
id: Option<i32>,
input: CategoryInput,
context: &Context,
action: ActionOption,
) -> Result<bool, SmileError> {
match (action, id) {
(ActionOption::INPUT, None) => Category::input(input, &context.conn),
(ActionOption::UPDATE, Some(id)) => Category::update(id, input, &context.conn),
_ => unreachable!(),
}
}
fn comment(
commentId: Option<i32>,
mut input: CommentInput,
context: &Context,
action: ActionOption,
) -> Result<bool, SmileError> {
return match (action, &context.user_id, commentId) {
(_, None, _) => Err(SmileError::Unauthorized),
(ActionOption::INPUT, Some(user_id), _) => {
input.userId = Some(user_id.to_owned());
Comment::input(input, &context.conn)
}
(ActionOption::UPDATE, Some(user_id), Some(commentId)) => {
input.userId = Some(user_id.to_owned());
Comment::update(input, commentId, &context.conn)
}
(ActionOption::REMOVE, Some(user_id), Some(commentId)) => {
Comment::delete(user_id, commentId, &context.conn)
}
_ => unreachable!(),
};
}
}
pub type Schema = RootNode<'static, Query, Mutation>;
pub fn create_context(
user_id: Option<String>,
mysql_pool: MysqlPoolConnection,
id: Arc<Identity>,
) -> Context {
Context { user_id, conn: Arc::new(mysql_pool), id }
}
pub fn create_schema() -> Schema {
Schema::new(Query {}, Mutation {})
}
| true |
5a4c8dfbf8cd2468d896846471f35e48671302c9
|
Rust
|
tonngw/leetcode
|
/rust/0213-house-robber-ii.rs
|
UTF-8
| 425 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
impl Solution {
pub fn rob(nums: Vec<i32>) -> i32 {
let l = nums.len();
return match l {
0 => 0,
1 => nums[0],
_ => rob_house(&nums[1..]).max(rob_house(&nums[0..l - 1])),
};
fn rob_house(nums: &[i32]) -> i32 {
nums.iter()
.fold((0, 0), |loot, money| (loot.1, loot.1.max(loot.0 + money)))
.1
}
}
}
| true |
ba13972b76be9c201cb3c51e425a6b411366cab2
|
Rust
|
Byron/gitoxide
|
/gix-filter/src/worktree/encode_to_git.rs
|
UTF-8
| 3,774 | 3.3125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
/// Whether or not to perform round-trip checks.
#[derive(Debug, Copy, Clone)]
pub enum RoundTripCheck {
/// Assure that we can losslessly convert the UTF-8 result back to the original encoding or fail with an error.
Fail,
/// Do not check if the encoding is round-trippable.
Skip,
}
/// The error returned by [`encode_to_git()][super::encode_to_git()].
#[derive(Debug, thiserror::Error)]
#[allow(missing_docs)]
pub enum Error {
#[error("Cannot convert input of {input_len} bytes to UTF-8 without overflowing")]
Overflow { input_len: usize },
#[error("The input was malformed and could not be decoded as '{encoding}'")]
Malformed { encoding: &'static str },
#[error("Encoding from '{src_encoding}' to '{dest_encoding}' and back is not the same")]
RoundTrip {
src_encoding: &'static str,
dest_encoding: &'static str,
},
}
pub(crate) mod function {
use encoding_rs::DecoderResult;
use super::{Error, RoundTripCheck};
use crate::clear_and_set_capacity;
/// Decode `src` according to `src_encoding` to `UTF-8` for storage in git and place it in `buf`.
/// Note that the encoding is always applied, there is no conditional even if `src_encoding` already is `UTF-8`.
pub fn encode_to_git(
src: &[u8],
src_encoding: &'static encoding_rs::Encoding,
buf: &mut Vec<u8>,
round_trip: RoundTripCheck,
) -> Result<(), Error> {
let mut decoder = src_encoding.new_decoder_with_bom_removal();
let buf_len = decoder
.max_utf8_buffer_length_without_replacement(src.len())
.ok_or(Error::Overflow { input_len: src.len() })?;
clear_and_set_capacity(buf, buf_len);
// SAFETY: `clear_and_set_capacity` assure that we have the given `buf_len` allocated, so setting its length is only making available
// what is allocated. Later we will truncate to the amount of actually written bytes.
#[allow(unsafe_code)]
unsafe {
buf.set_len(buf_len);
}
let (res, read, written) = decoder.decode_to_utf8_without_replacement(src, buf, true);
match res {
DecoderResult::InputEmpty => {
assert!(
buf_len >= written,
"encoding_rs estimates the maximum amount of bytes written correctly"
);
assert_eq!(read, src.len(), "input buffer should be fully consumed");
// SAFETY: we trust that `encoding_rs` reports this number correctly, and truncate everything else.
#[allow(unsafe_code)]
unsafe {
buf.set_len(written);
}
}
DecoderResult::OutputFull => {
unreachable!("we assure that the output buffer is big enough as per the encoder's estimate")
}
DecoderResult::Malformed(_, _) => {
return Err(Error::Malformed {
encoding: src_encoding.name(),
})
}
}
match round_trip {
RoundTripCheck::Fail => {
// SAFETY: we trust `encoding_rs` to output valid UTF-8 only if we ask it to.
#[allow(unsafe_code)]
let str = unsafe { std::str::from_utf8_unchecked(buf) };
let (should_equal_src, _actual_encoding, _had_errors) = src_encoding.encode(str);
if should_equal_src != src {
return Err(Error::RoundTrip {
src_encoding: src_encoding.name(),
dest_encoding: "UTF-8",
});
}
}
RoundTripCheck::Skip => {}
}
Ok(())
}
}
| true |
f0e4890ff0ef0de54ae66108fccdce19359a456f
|
Rust
|
mofanv/veracruz
|
/proxy-attestation-server/src/cli.rs
|
UTF-8
| 2,949 | 2.796875 | 3 |
[
"MIT",
"CC-BY-SA-2.0"
] |
permissive
|
//! Proxy Attestation Server command-line interface
//!
//! ## Authors
//!
//! The Veracruz Development Team.
//!
//! ## Licensing and copyright notice
//!
//! See the `LICENSE.markdown` file in the Veracruz root directory for
//! information on licensing and copyright.
use actix_rt;
use env_logger;
use log::info;
use proxy_attestation_server;
use std::{env, fs, path, process};
use structopt::StructOpt;
use veracruz_utils::policy::{
error::PolicyError,
policy::Policy,
};
#[derive(Debug, StructOpt)]
#[structopt(rename_all="kebab")]
struct Opt {
/// Path to policy file
#[structopt(parse(from_os_str))]
policy_path: path::PathBuf,
/// Path to CA certificate
#[structopt(long, parse(from_os_str))]
ca_cert: path::PathBuf,
/// Path to CA private key
#[structopt(long, parse(from_os_str))]
ca_key: path::PathBuf,
/// URL or path to database, may also be provided through the
/// DATABASE_URL environment variable
#[structopt(long)]
database_url: Option<String>,
/// Enable/disable debugging
#[structopt(long)]
debug: bool,
}
/// Entry point
fn main() {
// parse args
let opt = Opt::from_args();
// setup logger
env_logger::init();
// load policy
info!("Loading policy {:?}", opt.policy_path);
let policy = fs::read_to_string(&opt.policy_path)
.map_err(|err| PolicyError::from(err))
.and_then(|policy_json| Policy::from_json(&policy_json));
let policy = match policy {
Ok(policy) => policy,
Err(err) => {
eprintln!("{}", err);
process::exit(1);
}
};
info!("Loaded policy {}", policy.policy_hash().unwrap_or("???"));
// log the CA cert
info!("Using CA certificate {:?}", opt.ca_cert);
// needs a database URL
if let Some(url) = opt.database_url {
env::set_var("DATABASE_URL", url);
}
match env::var("DATABASE_URL") {
Ok(url) => {
info!("Using database {:?}", url);
}
Err(_) => {
eprintln!("No database URL provided, need --database-url");
process::exit(1);
}
}
// create Actix runtime
let mut sys = actix_rt::System::new("Proxy Attestation Server");
// create Proxy Attestation Server instance
let proxy_attestation_server = match proxy_attestation_server::server::server(
policy.proxy_attestation_server_url().clone(),
&opt.ca_cert,
&opt.ca_key,
opt.debug
) {
Ok(proxy_attestation_server) => proxy_attestation_server,
Err(err) => {
eprintln!("{}", err);
process::exit(1);
}
};
println!("Proxy Attestation Server running on {}", policy.proxy_attestation_server_url());
match sys.block_on(proxy_attestation_server) {
Ok(()) => {}
Err(err) => {
eprintln!("{}", err);
process::exit(1);
}
}
}
| true |
ebbb6ccd13d61dc3b6eae9a2b6c1b831a8c3e7af
|
Rust
|
solarretrace/talc-rs
|
/src/primitive/point.rs
|
UTF-8
| 1,105 | 2.875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright 2018 Skylor R. Schermer.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
////////////////////////////////////////////////////////////////////////////////
//! Point drawing primitives.
////////////////////////////////////////////////////////////////////////////////
// Local imports.
use brush::Brush;
use canvas::Canvas;
use geometry::Point;
//////////////////////////////////////////////////////////////////////////////
// point
//////////////////////////////////////////////////////////////////////////////
/// Draws a point.
///
/// # Arguments
///
/// `canvas`: The [`Canvas`] to draw to.
/// `brush`: The [`Brush`] to draw with.
/// `pt`: The [`Point`] of the point.
#[inline]
pub fn point<C, B, X>(
canvas: &mut C,
brush: &B,
pt: Point)
where
C: Canvas<Pixel=X>,
B: Brush<X>
{
brush.apply(canvas, pt);
}
| true |
a2775453506d0d5842fabe2b653ed7e8a005ffba
|
Rust
|
sambordo1/minigrep
|
/src/lib.rs
|
UTF-8
| 2,854 | 3.71875 | 4 |
[] |
no_license
|
use std::env;
//for working with environemnt variable
use std::error::Error;
//to bring Box<dyn Error> into scope
use std::fs;
//to handle files
//using pub keyword to make the library public to use
pub struct Config {
pub query: String,
pub filename: String,
pub case_sensitive: bool,
}
impl Config {
pub fn new(args: &[String]) -> Result<Config, &str> {
if args.len() < 3 {
return Err("not enough arguments");
}
let query = args[1].clone();
let filename = args[2].clone();
let case_sensitive = env::var("CASE_INSENSITIVE").is_err();
Ok(Config {
query,
filename,
case_sensitive,
})
}
}
pub fn run(config: Config) -> Result<(), Box<dyn Error>> {
// return type is Result<(), Box<dyn Error>>
// Box<dyn Error> means the function will return
//a type that implements the Error trait
let contents = fs::read_to_string(config.filename)?;
//reads the file to strings, then the ? will return the error value
//from the current function for the caller to handle.
let results = if config.case_sensitive {
search(&config.query, &contents)
} else {
search_case_insensitive(&config.query, &contents)
};
for line in results {
println!("{}", line);
}
Ok(())
}
//Writing a test
// it will take a query and the text to search for the query in,
// and it will return only the lines from the text that contain the query.
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn one_result() {
let query = "duct";
let contents = "\
Rust:
safe, fast, productive.
Pick three.";
assert_eq!(vec!["safe, fast, productive."], search(query, contents));
}
}
pub fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> {
let mut results = Vec::new();
for line in contents.lines() {
if line.contains(query) {
results.push(line);
}
}
results
}
#[test]
fn case_sensitive() {
let query = "duct";
let contents = "\
Rust:
safe, fast, productive.
Pick three.
Duct tape.";
assert_eq!(vec!["safe, fast, productive."], search(query, contents));
}
#[test]
fn case_insensitive() {
let query = "rUsT";
let contents = "\
Rust:
safe, fast, productive.
Pick three.
Trust me.";
assert_eq!(
vec!["Rust:", "Trust me."],
search_case_insensitive(query, contents)
);
}
pub fn search_case_insensitive<'a>(
query: &str,
contents: &'a str,
) -> Vec<&'a str> {
let query = query.to_lowercase();
let mut results = Vec::new();
for line in contents.lines() {
if line.to_lowercase().contains(&query) {
results.push(line);
}
}
results
}
| true |
3fafdc6b2dd29e78ab1726666535870087c39079
|
Rust
|
zhutony/firecracker
|
/src/devices/src/virtio/persist.rs
|
UTF-8
| 3,881 | 2.578125 | 3 |
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//! Defines the structures needed for saving/restoring Virtio primitives.
use super::device::*;
use super::queue::*;
use crate::vm_memory::Address;
use snapshot::Persist;
use versionize::{VersionMap, Versionize, VersionizeResult};
use versionize_derive::Versionize;
use vm_memory::GuestAddress;
use std::num::Wrapping;
use std::sync::atomic::Ordering;
#[derive(Clone, Debug, PartialEq, Versionize)]
pub struct QueueState {
/// The maximal size in elements offered by the device
max_size: u16,
/// The queue size in elements the driver selected
size: u16,
/// Indicates if the queue is finished with configuration
ready: bool,
/// Guest physical address of the descriptor table
desc_table: u64,
/// Guest physical address of the available ring
avail_ring: u64,
/// Guest physical address of the used ring
used_ring: u64,
next_avail: Wrapping<u16>,
next_used: Wrapping<u16>,
}
impl Persist for Queue {
type State = QueueState;
type ConstructorArgs = ();
type Error = ();
fn save(&self) -> Self::State {
QueueState {
max_size: self.max_size,
size: self.size,
ready: self.ready,
desc_table: self.desc_table.0,
avail_ring: self.avail_ring.0,
used_ring: self.used_ring.0,
next_avail: self.next_avail,
next_used: self.next_used,
}
}
fn restore(
_: Self::ConstructorArgs,
state: &Self::State,
) -> std::result::Result<Self, Self::Error> {
Ok(Queue {
max_size: state.max_size,
size: state.size,
ready: state.ready,
desc_table: GuestAddress::new(state.desc_table),
avail_ring: GuestAddress::new(state.avail_ring),
used_ring: GuestAddress::new(state.used_ring),
next_avail: state.next_avail,
next_used: state.next_used,
})
}
}
/// State of a VirtioDevice.
#[derive(Debug, PartialEq, Versionize)]
pub struct VirtioDeviceState {
pub avail_features: u64,
pub acked_features: u64,
pub queues: Vec<QueueState>,
pub interrupt_status: usize,
pub activated: bool,
}
impl VirtioDeviceState {
pub fn from_device(device: &dyn VirtioDevice) -> Self {
VirtioDeviceState {
avail_features: device.avail_features(),
acked_features: device.acked_features(),
queues: device.queues().iter().map(Persist::save).collect(),
interrupt_status: device.interrupt_status().load(Ordering::Relaxed),
activated: device.is_activated(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::virtio::mmio::tests::DummyDevice;
#[test]
fn test_persistance() {
let queue = Queue::new(128);
let mut mem = vec![0; 4096];
let version_map = VersionMap::new();
queue
.save()
.serialize(&mut mem.as_mut_slice(), &version_map, 1)
.unwrap();
let restored_queue = Queue::restore(
(),
&QueueState::deserialize(&mut mem.as_slice(), &version_map, 1).unwrap(),
)
.unwrap();
assert_eq!(restored_queue, queue);
}
#[test]
fn test_virtio_device_state_versionize() {
let dummy = DummyDevice::new();
let mut mem = vec![0; 4096];
let version_map = VersionMap::new();
let state = VirtioDeviceState::from_device(&dummy);
state
.serialize(&mut mem.as_mut_slice(), &version_map, 1)
.unwrap();
let restored_state =
VirtioDeviceState::deserialize(&mut mem.as_slice(), &version_map, 1).unwrap();
assert_eq!(restored_state, state);
}
}
| true |
78a87cee3e1dc14c0cfb9f9b0d327955c2cafe42
|
Rust
|
xlnx/myrpg
|
/src/lalr_util/src/formatter.rs
|
UTF-8
| 4,972 | 3.125 | 3 |
[] |
no_license
|
use std::io;
use serde_json::ser::{CompactFormatter, Formatter, PrettyFormatter};
#[derive(Debug, Copy, Clone)]
enum State {
None,
Ast,
AstPos,
AfterAstPos,
Child,
Token,
Pos,
}
pub struct AstFormatter<'a> {
state: State,
pretty: PrettyFormatter<'a>,
compact: CompactFormatter,
}
impl<'a> AstFormatter<'a> {
pub fn new() -> Self {
AstFormatter {
state: State::None,
pretty: PrettyFormatter::new(),
compact: CompactFormatter {},
}
}
fn reduce(&mut self, is_array: bool, is_enter: bool) -> State {
let old_state = self.state;
match (self.state, is_array, is_enter) {
(State::None, false, _) => {
self.state = State::Ast;
}
(State::None, true, _) => {
self.state = State::Child;
}
(State::Ast, _, true) => {
self.state = State::AstPos;
}
(State::Ast, _, false) => {
self.state = State::Child;
}
(State::AstPos, _, _) => {
self.state = State::AfterAstPos;
}
(State::AfterAstPos, _, _) => {
self.state = State::Child;
}
(State::Child, true, true) => {
self.state = State::Token;
}
(State::Child, true, false) => {
self.state = State::Ast;
}
(State::Child, false, true) => {
self.state = State::Ast;
}
(State::Token, true, false) => {
self.state = State::Child;
}
(State::Token, true, true) => {
self.state = State::Pos;
}
(State::Pos, true, false) => {
self.state = State::Token;
}
_ => panic!(),
}
if is_enter {
self.state
} else {
old_state
}
}
}
impl<'a> Formatter for AstFormatter<'a> {
#[inline]
fn begin_array<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()>
where
W: io::Write,
{
match self.reduce(true, true) {
State::Child => self.pretty.begin_array(writer),
State::Token => self.compact.begin_array(writer),
State::Pos | State::AstPos => self.compact.begin_array(writer),
_ => panic!(),
}
}
#[inline]
fn end_array<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()>
where
W: io::Write,
{
match self.reduce(true, false) {
State::Child => self.pretty.end_array(writer),
State::Token => self.compact.end_array(writer),
State::Pos | State::AstPos => self.compact.end_array(writer),
_ => panic!(),
}
}
#[inline]
fn begin_array_value<W: ?Sized>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
where
W: io::Write,
{
match self.state {
State::Child => self.pretty.begin_array_value(writer, first),
State::Token => self.compact.begin_array_value(writer, first),
State::Pos | State::AstPos => self.compact.begin_array_value(writer, first),
_ => panic!(),
}
}
#[inline]
fn end_array_value<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()>
where
W: io::Write,
{
match self.state {
State::Child => self.pretty.end_array_value(writer),
State::Token => self.compact.end_array_value(writer),
State::Pos | State::AstPos => self.compact.end_array_value(writer),
_ => panic!(),
}
}
#[inline]
fn begin_object<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()>
where
W: io::Write,
{
match self.reduce(false, true) {
_ => self.pretty.begin_object(writer),
}
}
#[inline]
fn end_object<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()>
where
W: io::Write,
{
match self.reduce(false, false) {
_ => self.pretty.end_object(writer),
}
}
#[inline]
fn begin_object_key<W: ?Sized>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
where
W: io::Write,
{
self.pretty.begin_object_key(writer, first)
}
#[inline]
fn end_object_key<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()>
where
W: io::Write,
{
self.pretty.end_object_key(writer)
}
#[inline]
fn begin_object_value<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()>
where
W: io::Write,
{
self.pretty.begin_object_value(writer)
}
#[inline]
fn end_object_value<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()>
where
W: io::Write,
{
self.pretty.end_object_value(writer)
}
}
| true |
0fceebe74307e2c7fb949f250a770a205afea622
|
Rust
|
mongodb/mongo-rust-driver
|
/src/sync/change_stream.rs
|
UTF-8
| 9,124 | 3.1875 | 3 |
[
"OpenSSL",
"Apache-2.0"
] |
permissive
|
use futures_util::stream::StreamExt;
use serde::de::DeserializeOwned;
use crate::{
change_stream::{
event::ResumeToken,
session::SessionChangeStream as AsyncSessionChangeStream,
ChangeStream as AsyncChangeStream,
},
error::Result,
runtime,
};
use super::ClientSession;
/// A `ChangeStream` streams the ongoing changes of its associated collection, database or
/// deployment. `ChangeStream` instances should be created with method `watch` against the relevant
/// target.
///
/// `ChangeStream`s are "resumable", meaning that they can be restarted at a given place in the
/// stream of events. This is done automatically when the `ChangeStream` encounters certain
/// ["resumable"](https://github.com/mongodb/specifications/blob/master/source/change-streams/change-streams.rst#resumable-error)
/// errors, such as transient network failures. It can also be done manually by passing
/// a [`ResumeToken`] retrieved from a past event into either the
/// [`resume_after`](crate::options::ChangeStreamOptions::resume_after) or
/// [`start_after`](crate::options::ChangeStreamOptions::start_after) (4.2+) options used to create
/// the `ChangeStream`. Issuing a raw change stream aggregation is discouraged unless users wish to
/// explicitly opt out of resumability.
///
/// A `ChangeStream` can be iterated like any other [`Iterator`]:
///
/// ```
/// # use mongodb::{sync::Client, error::Result, bson::doc,
/// # change_stream::event::ChangeStreamEvent};
/// #
/// # fn func() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com")?;
/// # let coll = client.database("foo").collection("bar");
/// let mut change_stream = coll.watch(None, None)?;
/// coll.insert_one(doc! { "x": 1 }, None)?;
/// for event in change_stream {
/// let event = event?;
/// println!("operation performed: {:?}, document: {:?}", event.operation_type, event.full_document);
/// // operation performed: Insert, document: Some(Document({"x": Int32(1)}))
/// }
/// #
/// # Ok(())
/// # }
/// ```
///
/// See the documentation [here](https://www.mongodb.com/docs/manual/changeStreams) for more
/// details. Also see the documentation on [usage recommendations](https://www.mongodb.com/docs/manual/administration/change-streams-production-recommendations/).
pub struct ChangeStream<T>
where
T: DeserializeOwned + Unpin + Send + Sync,
{
async_stream: AsyncChangeStream<T>,
}
impl<T> ChangeStream<T>
where
T: DeserializeOwned + Unpin + Send + Sync,
{
pub(crate) fn new(async_stream: AsyncChangeStream<T>) -> Self {
Self { async_stream }
}
/// Returns the cached resume token that can be used to resume after the most recently returned
/// change.
///
/// See the documentation
/// [here](https://www.mongodb.com/docs/manual/changeStreams/#change-stream-resume-token) for more
/// information on change stream resume tokens.
pub fn resume_token(&self) -> Option<ResumeToken> {
self.async_stream.resume_token()
}
/// Update the type streamed values will be parsed as.
pub fn with_type<D: DeserializeOwned + Unpin + Send + Sync>(self) -> ChangeStream<D> {
ChangeStream {
async_stream: self.async_stream.with_type(),
}
}
/// Returns whether the change stream will continue to receive events.
pub fn is_alive(&self) -> bool {
self.async_stream.is_alive()
}
/// Retrieves the next result from the change stream, if any.
///
/// Where calling `Iterator::next` will internally loop until a change document is received,
/// this will make at most one request and return `None` if the returned document batch is
/// empty. This method should be used when storing the resume token in order to ensure the
/// most up to date token is received, e.g.
///
/// ```
/// # use mongodb::{bson::Document, sync::{Client, Collection}, error::Result};
/// # fn func() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com")?;
/// # let coll: Collection<Document> = client.database("foo").collection("bar");
/// let mut change_stream = coll.watch(None, None)?;
/// let mut resume_token = None;
/// while change_stream.is_alive() {
/// if let Some(event) = change_stream.next_if_any()? {
/// // process event
/// }
/// resume_token = change_stream.resume_token();
/// }
/// #
/// # Ok(())
/// # }
/// ```
pub fn next_if_any(&mut self) -> Result<Option<T>> {
runtime::block_on(self.async_stream.next_if_any())
}
}
impl<T> Iterator for ChangeStream<T>
where
T: DeserializeOwned + Unpin + Send + Sync,
{
type Item = Result<T>;
fn next(&mut self) -> Option<Self::Item> {
runtime::block_on(self.async_stream.next())
}
}
/// A [`SessionChangeStream`] is a change stream that was created with a [`ClientSession`] that must
/// be iterated using one. To iterate, use [`SessionChangeStream::next`]:
///
/// ```
/// # use mongodb::{bson::Document, sync::Client, error::Result};
/// #
/// # async fn do_stuff() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com")?;
/// # let mut session = client.start_session(None)?;
/// # let coll = client.database("foo").collection::<Document>("bar");
/// #
/// let mut cs = coll.watch_with_session(None, None, &mut session)?;
/// while let Some(event) = cs.next(&mut session)? {
/// println!("{:?}", event)
/// }
/// #
/// # Ok(())
/// # }
/// ```
pub struct SessionChangeStream<T>
where
T: DeserializeOwned + Unpin,
{
async_stream: AsyncSessionChangeStream<T>,
}
impl<T> SessionChangeStream<T>
where
T: DeserializeOwned + Unpin + Send + Sync,
{
pub(crate) fn new(async_stream: AsyncSessionChangeStream<T>) -> Self {
Self { async_stream }
}
/// Returns the cached resume token that can be used to resume after the most recently returned
/// change.
///
/// See the documentation
/// [here](https://www.mongodb.com/docs/manual/changeStreams/#change-stream-resume-token) for more
/// information on change stream resume tokens.
pub fn resume_token(&self) -> Option<ResumeToken> {
self.async_stream.resume_token()
}
/// Update the type streamed values will be parsed as.
pub fn with_type<D: DeserializeOwned + Unpin + Send + Sync>(self) -> SessionChangeStream<D> {
SessionChangeStream {
async_stream: self.async_stream.with_type(),
}
}
/// Retrieve the next result from the change stream.
/// The session provided must be the same session used to create the change stream.
///
/// ```
/// # use bson::{doc, Document};
/// # use mongodb::sync::Client;
/// # fn main() {
/// # async {
/// # let client = Client::with_uri_str("foo")?;
/// # let coll = client.database("foo").collection::<Document>("bar");
/// # let other_coll = coll.clone();
/// # let mut session = client.start_session(None)?;
/// let mut cs = coll.watch_with_session(None, None, &mut session)?;
/// while let Some(event) = cs.next(&mut session)? {
/// let id = bson::to_bson(&event.id)?;
/// other_coll.insert_one_with_session(doc! { "id": id }, None, &mut session)?;
/// }
/// # Ok::<(), mongodb::error::Error>(())
/// # };
/// # }
/// ```
pub fn next(&mut self, session: &mut ClientSession) -> Result<Option<T>> {
runtime::block_on(self.async_stream.next(&mut session.async_client_session))
}
/// Returns whether the change stream will continue to receive events.
pub fn is_alive(&self) -> bool {
self.async_stream.is_alive()
}
/// Retrieve the next result from the change stream, if any.
///
/// Where calling `next` will internally loop until a change document is received,
/// this will make at most one request and return `None` if the returned document batch is
/// empty. This method should be used when storing the resume token in order to ensure the
/// most up to date token is received, e.g.
///
/// ```
/// # use mongodb::{bson::Document, sync::{Client, Collection}, error::Result};
/// # async fn func() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com")?;
/// # let coll: Collection<Document> = client.database("foo").collection("bar");
/// # let mut session = client.start_session(None)?;
/// let mut change_stream = coll.watch_with_session(None, None, &mut session)?;
/// let mut resume_token = None;
/// while change_stream.is_alive() {
/// if let Some(event) = change_stream.next_if_any(&mut session)? {
/// // process event
/// }
/// resume_token = change_stream.resume_token();
/// }
/// #
/// # Ok(())
/// # }
/// ```
pub fn next_if_any(&mut self, session: &mut ClientSession) -> Result<Option<T>> {
runtime::block_on(
self.async_stream
.next_if_any(&mut session.async_client_session),
)
}
}
| true |
80f90d631db0cbee7511f4f87eb9ad23459299c9
|
Rust
|
cstorey/jump-ch
|
/src/lcg.rs
|
UTF-8
| 350 | 2.546875 | 3 |
[] |
no_license
|
use rand::Rng;
use RandFromKey;
pub struct LcgRng(u64);
impl RandFromKey for LcgRng {
fn from_key(key: u64) -> LcgRng {
LcgRng(key)
}
}
impl Rng for LcgRng {
fn next_u32(&mut self) -> u32 {
let &mut LcgRng(ref mut state) = self;
*state = state.wrapping_mul(2862933555777941757) + 1;
*state as u32
}
}
| true |
396de249f7fbb5f84bb96c4fbaa8a1b648849e85
|
Rust
|
isgasho/bam3d
|
/src/traits.rs
|
UTF-8
| 5,821 | 3.53125 | 4 |
[] |
no_license
|
use glam::{Vec3, Mat4};
/// An intersection test with a result.
///
/// An example would be a Ray vs AABB intersection test that returns a Point in space.
///
pub trait Continuous<RHS> {
type Result;
/// Intersection test
fn intersection(&self, _: &RHS) -> Option<Self::Result>;
}
/// A boolean intersection test.
///
pub trait Discrete<RHS> {
/// Intersection test
fn intersects(&self, _: &RHS) -> bool;
}
/// Boolean containment test.
///
pub trait Contains<RHS> {
/// Containment test
fn contains(&self, _: &RHS) -> bool;
}
/// Shape surface area
///
pub trait SurfaceArea {
/// Compute surface area
fn surface_area(&self) -> f32;
}
/// Build the union of two shapes.
///
pub trait Union<RHS = Self> {
/// Union shape created
type Output;
/// Build the union shape of self and the given shape.
fn union(&self, _: &RHS) -> Self::Output;
}
/// Bounding volume abstraction for use with algorithms
pub trait Bound {
/// Minimum extents of the bounding volume
fn min_extent(&self) -> Vec3;
/// Maximum extents of the bounding volume
fn max_extent(&self) -> Vec3;
/// Create a new bounding volume extended by the given amount
fn with_margin(&self, add: Vec3) -> Self;
/// Apply an arbitrary transform to the bounding volume
fn transform_volume(&self, transform: &Mat4) -> Self;
/// Create empty volume
fn empty() -> Self;
}
/// Primitive with bounding volume
pub trait HasBound {
/// Bounding volume type
type Bound: Bound;
/// Borrow the bounding volume
fn bound(&self) -> &Self::Bound;
}
/// Utilities for computing bounding volumes of primitives
pub trait ComputeBound<B>
where
B: Bound,
{
/// Compute the bounding volume
fn compute_bound(&self) -> B;
}
/// Minkowski support function for primitive
pub trait Primitive {
/// Get the support point on the shape in a given direction.
///
/// ## Parameters
///
/// - `direction`: The search direction in world space.
/// - `transform`: The current local to world transform for this primitive.
///
/// ## Returns
///
/// Return the point that is furthest away from the origin, in the given search direction.
/// For discrete shapes, the furthest vertex is enough, there is no need to do exact
/// intersection point computation.
///
/// ## Type parameters
///
/// - `P`: Transform type
fn support_point(
&self,
direction: &Vec3,
transform: &Mat4,
) -> Vec3;
}
/// Discrete intersection test on transformed primitive
pub trait DiscreteTransformed<RHS> {
/// Intersection test for transformed self
fn intersects_transformed(&self, _: &RHS, _: &Mat4) -> bool;
}
/// Continuous intersection test on transformed primitive
pub trait ContinuousTransformed<RHS> {
/// Intersection test for transformed self
fn intersection_transformed(&self, _: &RHS, _: &Mat4) -> Option<Vec3>;
}
/// Trait used for interpolation of values
///
/// ## Type parameters:
///
/// - `S`: The scalar type used for amount
pub trait Interpolate {
/// Interpolate between `self` and `other`, using amount to calculate how much of other to use.
///
/// ## Parameters:
///
/// - `amount`: amount in the range 0. .. 1.
/// - `other`: the other value to interpolate with
///
/// ## Returns
///
/// A new value approximately equal to `self * (1. - amount) + other * amount`.
fn interpolate(&self, other: &Self, amount: f32) -> Self;
}
/// Trait used for interpolation of translation only in transforms
pub trait TranslationInterpolate {
/// Interpolate between `self` and `other`, using amount to calculate how much of other to use.
///
/// ## Parameters:
///
/// - `amount`: amount in the range 0. .. 1.
/// - `other`: the other value to interpolate with
///
/// ## Returns
///
/// A new value approximately equal to `self * (1. - amount) + other * amount`.
fn translation_interpolate(&self, other: &Self, amount: f32) -> Self;
}
mod interpolate {
use super::{Interpolate, TranslationInterpolate};
use glam::{Mat3, Mat4, Quat};
impl Interpolate for Quat {
fn interpolate(&self, other: &Self, amount: f32) -> Self {
self.lerp(*other, amount)
}
}
impl Interpolate for Mat3 {
fn interpolate(&self, other: &Self, amount: f32) -> Self {
Mat3::from_quat(
Quat::from_rotation_mat3(self).lerp(Quat::from_rotation_mat3(other), amount),
)
}
}
impl Interpolate for Mat4 {
fn interpolate(&self, other: &Self, amount: f32) -> Self {
let (self_scale, self_rotation, self_translation) = self.to_scale_rotation_translation();
let (other_scale, other_rotation, other_translation) = other.to_scale_rotation_translation();
let scale = self_scale * (1.0 - amount) + other_scale * amount;
let rotation = self_rotation.interpolate(&other_rotation, amount);
let translation = self_translation.lerp(other_translation, amount);
Mat4::from_scale_rotation_translation(scale, rotation, translation)
}
}
impl TranslationInterpolate for Mat4{
fn translation_interpolate(&self, other: &Self, amount: f32) -> Self {
let (_self_scale, _self_rotation, self_translation) = self.to_scale_rotation_translation();
let (other_scale, other_rotation, other_translation) = other.to_scale_rotation_translation();
let scale = other_scale;
let rotation = other_rotation;
let translation = self_translation.lerp(other_translation, amount);
Mat4::from_scale_rotation_translation(scale, rotation, translation)
}
}
}
| true |
ee6a9b377286851ed79f76a6feee5253ae4bd3d6
|
Rust
|
rustpass/rustpass
|
/src/internal/database/binary/header/block.rs
|
UTF-8
| 6,338 | 2.859375 | 3 |
[] |
no_license
|
use crate::{
api::traits::Sizable,
internal::{
database::binary::{
Block,
BlockId,
BlockSize,
BlockData,
BlockDataSlice,
},
traits::{
AsBytes,
TryFromBytes,
}
}
};
use bytes::{
self,
BufMut
};
use byteorder::{
ByteOrder,
LittleEndian,
};
use std::{
io::Read,
mem,
};
///
/// `HeaderBlock3` implementation
///
#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) struct HeaderBlock3 {
block_id: u8,
block_size: u16,
block_data: Vec<u8>,
}
impl HeaderBlock3 {
const OFFSET: usize = mem::size_of::<u8>() + mem::size_of::<u16>();
pub fn new(_block_id: u8, _block_data: Vec<u8>) -> Self {
Self {
block_id: _block_id,
block_size: _block_data.len() as u16,
block_data: Vec::from(_block_data.as_slice()),
}
}
}
impl Sizable for HeaderBlock3 {
fn size_in_bytes(&self) -> usize {
Self::OFFSET + self.block_size as usize
}
}
impl AsBytes for HeaderBlock3 {
fn as_bytes(&self) -> Vec<u8> {
let mut buf = bytes::BytesMut::with_capacity(
self.block_size as usize
);
buf.put_u8(self.block_id);
buf.put_u16_le(self.block_size);
buf.put_slice(self.block_data.as_ref());
buf.to_vec()
}
}
impl TryFromBytes for HeaderBlock3 {
type Error = ();
fn from_bytes(value: &[u8]) -> Result<Self, Self::Error> {
if value.len() < 3 {
return Err(());
}
let block_id = value[0];
let block_size = LittleEndian::read_u16(&value[1..3]);
if block_size as usize > value.len() {
return Err(())
}
let mut block_data = vec![];
if block_size > 0 {
let mut _tmp = value[Self::OFFSET..(block_size as usize + Self::OFFSET)].as_ref();
_tmp.read_to_end(&mut block_data).map_err(|_| ())?;
}
Ok(
HeaderBlock3::new(
block_id,
block_data,
)
)
}
}
impl<'a> Block<'a> for HeaderBlock3 {}
impl<'a> BlockId<'a, u8> for HeaderBlock3 {
fn block_id(&self) -> u8 {
self.block_id
}
}
impl<'a> BlockSize<'a, u16> for HeaderBlock3 {
fn block_size(&self) -> u16 {
self.block_size
}
}
impl<'a> BlockData<'a, u8> for HeaderBlock3 {
fn block_data(&self) -> Vec<u8> {
Vec::from(self.block_data.clone())
}
}
impl<'a> BlockDataSlice<'a, u8> for HeaderBlock3 {}
///
/// `HeaderBlock4` implementation
///
#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) struct HeaderBlock4 {
block_id: u8,
block_size: u32,
block_data: Vec<u8>,
}
impl HeaderBlock4 {
const OFFSET: usize = mem::size_of::<u32>() + mem::size_of::<u8>();
pub fn new(_block_id: u8, _block_data: Vec<u8>) -> Self {
Self {
block_id: _block_id,
block_size: _block_data.len() as u32,
block_data: Vec::from(_block_data),
}
}
}
impl Sizable for HeaderBlock4 {
fn size_in_bytes(&self) -> usize {
Self::OFFSET + self.block_size as usize
}
}
impl AsBytes for HeaderBlock4 {
fn as_bytes(&self) -> Vec<u8> {
let mut buf = bytes::BytesMut::with_capacity(self.block_size as usize);
buf.put_u8(self.block_id);
buf.put_u32_le(self.block_size);
buf.put_slice(self.block_data.as_ref());
buf.to_vec()
}
}
impl TryFromBytes for HeaderBlock4 {
type Error = ();
fn from_bytes(value: &[u8]) -> Result<Self, Self::Error> {
if value.len() < 5 {
return Err(());
}
let block_id = value[0];
let block_size = LittleEndian::read_u32(&value[1..5]);
let mut block_data = vec![];
if block_size > 0 {
let mut _tmp: &mut &[u8] = &mut value[Self::OFFSET..(block_size as usize + Self::OFFSET)].as_ref();
_tmp.read_to_end(&mut block_data).map_err(|_| ())?;
}
Ok(
HeaderBlock4::new(
block_id,
block_data,
)
)
}
}
impl<'a> Block<'a> for HeaderBlock4 {}
impl<'a> BlockId<'a, u8> for HeaderBlock4 {
fn block_id(&self) -> u8 {
self.block_id
}
}
impl<'a> BlockSize<'a, u32> for HeaderBlock4 {
fn block_size(&self) -> u32 {
self.block_size
}
}
impl<'a> BlockData<'a, u8> for HeaderBlock4 {
fn block_data(&self) -> Vec<u8> {
Vec::from(self.block_data.clone())
}
}
impl<'a> BlockDataSlice<'a, u8> for HeaderBlock4 {}
#[cfg(test)]
mod tests {
const TEST_BLOCK_ID: u8 = 1u8;
const TEST_BLOCK_DATA_0: [u8; 0] = [1u8; 0];
const TEST_BLOCK_SIZE_0: usize = 0usize;
const TEST_BLOCK_DATA_32: [u8; 32] = [1u8; 32];
const TEST_BLOCK_SIZE_32: usize = 32usize;
use super::{
HeaderBlock3,
HeaderBlock4,
};
#[test]
fn test_create_kdbx3_0() {
let block = HeaderBlock3::new(
TEST_BLOCK_ID,
Vec::from(TEST_BLOCK_DATA_0),
);
assert_eq!(block.block_id, 1u8);
assert_eq!(block.block_size, TEST_BLOCK_SIZE_0 as u16);
assert_eq!(block.block_data.len(), TEST_BLOCK_DATA_0.len());
}
#[test]
fn test_create_kdbx3_32() {
let block = HeaderBlock3::new(
TEST_BLOCK_ID,
Vec::from(TEST_BLOCK_DATA_32),
);
assert_eq!(block.block_id, 1u8);
assert_eq!(block.block_size, TEST_BLOCK_SIZE_32 as u16);
assert_eq!(block.block_data.len(), TEST_BLOCK_DATA_32.len());
}
#[test]
fn test_create_kdbx4_0() {
let block = HeaderBlock4::new(
TEST_BLOCK_ID,
Vec::from(TEST_BLOCK_DATA_0),
);
assert_eq!(block.block_id, 1u8);
assert_eq!(block.block_size, TEST_BLOCK_SIZE_0 as u32);
assert_eq!(block.block_data.len(), TEST_BLOCK_DATA_0.len());
}
#[test]
fn test_create_kdbx4_32() {
let block = HeaderBlock4::new(
TEST_BLOCK_ID,
Vec::from(TEST_BLOCK_DATA_32),
);
assert_eq!(block.block_id, 1u8);
assert_eq!(block.block_size, TEST_BLOCK_SIZE_32 as u32);
assert_eq!(block.block_data.len(), TEST_BLOCK_DATA_32.len());
}
}
| true |
e86189f0630de30b680fcd44450d27b4dd70a262
|
Rust
|
SuicideSin/razberry.rs
|
/src/command_classes.rs
|
UTF-8
| 3,437 | 2.75 | 3 |
[] |
no_license
|
// Copyright (c) 2017 Brandon Thomas <[email protected], [email protected]>
use std::fmt;
/**
* The different ZWave command classes supported by various devices.
*/
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum CommandClasses {
Alarm,
AlarmSensor,
AlarmSilence,
Association,
Basic,
Battery,
Clock,
Configuration,
FirmwareUpdate,
ManufacturerSpecific,
MultiChannel,
MultiChannelAssociation,
NoOperation,
NodeNaming,
PowerLevel,
SensorBinary,
SensorConfiguration,
SensorMultilevel,
SwitchBinary,
SwitchMultilevel,
Version,
Wakeup,
}
impl CommandClasses {
/// Convert a command class identifier into a command class.
pub fn from_byte(command_class_id: u8) -> Option<CommandClasses> {
let command_class = match command_class_id {
0x00 => CommandClasses::NoOperation,
0x20 => CommandClasses::Basic,
0x25 => CommandClasses::SwitchBinary,
0x26 => CommandClasses::SwitchMultilevel,
0x30 => CommandClasses::SensorBinary,
0x31 => CommandClasses::SensorMultilevel,
0x60 => CommandClasses::MultiChannel,
0x7A => CommandClasses::FirmwareUpdate,
0x70 => CommandClasses::Configuration,
0x71 => CommandClasses::Alarm,
0x72 => CommandClasses::ManufacturerSpecific,
0x73 => CommandClasses::PowerLevel,
0x77 => CommandClasses::NodeNaming,
0x80 => CommandClasses::Battery,
0x81 => CommandClasses::Clock,
0x84 => CommandClasses::Wakeup,
0x85 => CommandClasses::Association,
0x86 => CommandClasses::Version,
0x8E => CommandClasses::MultiChannelAssociation,
0x9C => CommandClasses::AlarmSensor,
0x9D => CommandClasses::AlarmSilence,
0x9E => CommandClasses::SensorConfiguration,
_ => return None,
};
Some(command_class)
}
// TODO(MERGE-BLOCKER): TEST.
/// Convert a command class string identifier into a command class.
pub fn from_str(command_class_id: &str) -> Option<CommandClasses> {
command_class_id.parse::<u8>()
.ok() // Discard parse errors.
.and_then(|cc| Self::from_byte(cc))
}
}
impl fmt::Display for CommandClasses {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match *self {
CommandClasses::Alarm => "Alarm",
CommandClasses::AlarmSensor => "AlarmSensor",
CommandClasses::AlarmSilence => "AlarmSilence",
CommandClasses::Association => "Association",
CommandClasses::Basic => "Basic",
CommandClasses::Battery => "Battery",
CommandClasses::Clock => "Clock",
CommandClasses::Configuration => "Configuration",
CommandClasses::FirmwareUpdate => "FirmwareUpdate",
CommandClasses::ManufacturerSpecific => "ManufacturerSpecific",
CommandClasses::MultiChannel => "MultiChannel",
CommandClasses::MultiChannelAssociation => "MultiChannelAssociation",
CommandClasses::NoOperation => "NoOperation",
CommandClasses::NodeNaming => "NodeNaming",
CommandClasses::PowerLevel => "PowerLevel",
CommandClasses::SensorBinary => "SensorBinary",
CommandClasses::SensorConfiguration => "SensorConfiguration",
CommandClasses::SensorMultilevel => "SensorMultilevel",
CommandClasses::SwitchBinary => "SwitchBinary",
CommandClasses::SwitchMultilevel => "SwitchMultilevel",
CommandClasses::Version => "Version",
CommandClasses::Wakeup => "Wakeup",
};
write!(f, "<CommandClasses::{}>", s)
}
}
| true |
49ae5cd3b8d0e9830be0f78f9f5efc4167ceb6da
|
Rust
|
nsoroush/Rust-tutorial
|
/rust_tutorial/tests/2.rs
|
UTF-8
| 660 | 2.953125 | 3 |
[] |
no_license
|
#[cfg(test)]
mod test2 {
#[test]
fn test_numeric_types_var_declaration() {
let a: u64 = 1024;
let b: i8 = -7;
let c : usize = 800;
let d = -64;
}
#[test]
fn test_numeric_representations_vars() {
let a = 123_456;
let b = 0xf2; // hexadecimal
let c = 0o71; // octal
let d = 0b1110_0001; // binary
let c = b'C'; // byte
}
#[test]
fn test_u8_overflow() {
#[allow(overflowing_literals)]
let a:i8 = 0xf_f;
assert_eq!(a, -1);
}
#[test]
fn test_float_declaration() {
let b: f32 = 2.95;
let a = 2.95;
}
}
| true |
9d225a08b159926a41bc2741977417caf1dbb70d
|
Rust
|
Fomys/cybook
|
/cyio/src/event.rs
|
UTF-8
| 4,547 | 2.875 | 3 |
[] |
no_license
|
use cgmath::Vector2;
#[derive(PartialEq, Debug, Clone)]
pub enum Touch {
None,
One(Vector2<usize>),
Two(Vector2<usize>, Vector2<usize>),
}
impl From<[u8; 16]> for Touch {
fn from(event: [u8; 16]) -> Self {
match event {
[_, _, _, lx1, mx1, ly1, my1, _, _, _, _, 0x01, _, _, _, _] => Touch::One(
(
utils::SCREEN_SIZE.x as usize
- ((lx1 as usize + ((mx1 as usize) << 8)) * utils::SCREEN_SIZE.x as usize)
/ utils::TOUCH_SIZE.x as usize,
utils::SCREEN_SIZE.y as usize
- ((ly1 as usize + ((my1 as usize) << 8)) * utils::SCREEN_SIZE.y as usize)
/ utils::TOUCH_SIZE.y as usize,
)
.into(),
),
[_, _, _, lx1, mx1, ly1, my1, lx2, mx2, ly2, my2, 0x02, _, _, _, _] => Touch::Two(
(
utils::SCREEN_SIZE.x as usize
- ((lx1 as usize + ((mx1 as usize) << 8)) * utils::SCREEN_SIZE.x as usize)
/ utils::TOUCH_SIZE.x as usize,
utils::SCREEN_SIZE.y as usize
- ((ly1 as usize + ((my1 as usize) << 8)) * utils::SCREEN_SIZE.y as usize)
/ utils::TOUCH_SIZE.y as usize,
)
.into(),
(
utils::SCREEN_SIZE.x as usize
- ((lx2 as usize + ((mx2 as usize) << 8)) * utils::SCREEN_SIZE.x as usize)
/ utils::TOUCH_SIZE.x as usize,
utils::SCREEN_SIZE.y as usize
- ((ly2 as usize + ((my2 as usize) << 8)) * utils::SCREEN_SIZE.y as usize)
/ utils::TOUCH_SIZE.y as usize,
)
.into(),
),
[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Touch::None,
}
}
}
#[derive(PartialEq, Debug, Clone)]
pub enum Key {
Left,
Right,
Home,
Power,
Unknown([u8; 16]),
}
impl From<[u8; 16]> for Key {
fn from(event: [u8; 16]) -> Self {
match event {
[_, _, _, 0x31, _, _, _, _, _, _, _, _, _, _, _, _] => Key::Home,
[_, _, _, 0x32, _, _, _, _, _, _, _, _, _, _, _, _] => Key::Right,
[_, _, _, 0x33, _, _, _, _, _, _, _, _, _, _, _, _] => Key::Left,
[_, _, _, 0x6f, _, _, _, _, _, _, _, _, _, _, _, _] => Key::Power,
_ => Key::Unknown(event),
}
}
}
#[derive(PartialEq, Debug, Clone)]
pub enum Event {
TouchPressed(Touch),
TouchMove(Touch),
TouchReleased,
Key(Key),
Unknown([u8; 16]),
}
impl From<[u8; 16]> for Event {
fn from(event: [u8; 16]) -> Self {
match event {
[0x6b, 0x80, 0x10, _, _, _, _, _, _, _, _, _, _, _, _, _] => {
Event::Key(Key::from(event))
}
[0x74, 0xc0, 0x10, _, _, _, _, _, _, _, _, _, _, _, _, _] => {
Event::TouchPressed(Touch::from(event))
}
[0x74, 0x80, 0x10, _, _, _, _, _, _, _, _, _, _, _, _, _] => {
Event::TouchMove(Touch::from(event))
}
[0x74, 0x40, 0x10, _, _, _, _, _, _, _, _, _, _, _, _, _] => Event::TouchReleased,
_ => Event::Unknown(event),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn key_home() {
assert_eq!(
Event::from([
0x6b, 0x80, 0x10, 0x31, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
0x00, 0x00
]),
Event::Key(Key::Home)
);
}
#[test]
fn key_right() {
assert_eq!(
Event::from([
0x6b, 0x80, 0x10, 0x32, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
0x00, 0x00
]),
Event::Key(Key::Right)
);
}
#[test]
fn key_left() {
assert_eq!(
Event::from([
0x6b, 0x80, 0x10, 0x33, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
0x00, 0x00
]),
Event::Key(Key::Left)
);
}
#[test]
fn key_power() {
assert_eq!(
Event::from([
0x6b, 0x80, 0x10, 0x6f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00,
0x00, 0x00
]),
Event::Key(Key::Power)
);
}
}
| true |
9a80ec74eb27e614a662786ce440849295df4133
|
Rust
|
jsj1027/DnD-DB-Tool
|
/src/lib.rs
|
UTF-8
| 1,027 | 3.0625 | 3 |
[] |
no_license
|
pub mod structs;
use crate::structs::data_connection::{DatabaseConnection, DbMessage};
use serde_json::Value;
use std::io;
use std::sync::mpsc::Sender;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
pub fn get_database_connection(
message_channel: Sender<Value>,
) -> (DatabaseConnection, Sender<Value>) {
DatabaseConnection::new(message_channel)
}
pub fn run_loop(send_message_channel: Sender<Value>) {
let mut guess = String::new();
loop {
io::stdin()
.read_line(&mut guess)
.expect("failed to read line");
let message = DbMessage::new(guess.trim().to_string());
let message_json = serde_json::to_value(message).unwrap();
send_message_channel.send(message_json);
// let exit_command = String::from("exit_application");
// match msg_string.trim() {
// "exit_application" => break,
// _ => println!("{:#?}", message),
// };
guess.clear();
}
}
| true |
ee28e57e497cbb3e24e91f63cebfacadf6232caf
|
Rust
|
HectorPeeters/Compiler
|
/src/types.rs
|
UTF-8
| 2,723 | 3.484375 | 3 |
[] |
no_license
|
use std::str::FromStr;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum PrimitiveType {
Int8,
Int16,
Int32,
Int64,
UInt8,
UInt16,
UInt32,
UInt64,
Bool,
Unknown,
Void,
}
impl PrimitiveType {
pub fn get_size(&self) -> i32 {
match self {
PrimitiveType::Int8 => 8,
PrimitiveType::Int16 => 16,
PrimitiveType::Int32 => 32,
PrimitiveType::Int64 => 64,
PrimitiveType::UInt8 => 8,
PrimitiveType::UInt16 => 16,
PrimitiveType::UInt32 => 32,
PrimitiveType::UInt64 => 64,
PrimitiveType::Bool => 8,
_ => 0,
}
}
pub fn is_signed(&self) -> bool {
matches!(
self,
PrimitiveType::Int8
| PrimitiveType::Int16
| PrimitiveType::Int32
| PrimitiveType::Int64
)
}
pub fn is_unsigned(&self) -> bool {
matches!(
self,
PrimitiveType::UInt8
| PrimitiveType::UInt16
| PrimitiveType::UInt32
| PrimitiveType::UInt64
)
}
pub fn is_compatible_with(&self, dest_type: &PrimitiveType, one_sided: bool) -> bool {
if self == dest_type {
return true;
}
if *self == PrimitiveType::Bool && *dest_type != PrimitiveType::Bool {
return false;
}
if self.is_signed() && dest_type.is_unsigned() {
return false;
}
if !one_sided {
if *self != PrimitiveType::Bool && *dest_type == PrimitiveType::Bool {
return false;
}
if self.is_unsigned() && dest_type.is_signed() {
return false;
}
return true;
}
dest_type.get_size() > self.get_size()
}
}
impl FromStr for PrimitiveType {
type Err = ();
fn from_str(s: &str) -> Result<PrimitiveType, ()> {
match s {
"i8" => Ok(PrimitiveType::Int8),
"i16" => Ok(PrimitiveType::Int16),
"i32" => Ok(PrimitiveType::Int32),
"i64" => Ok(PrimitiveType::Int64),
"u8" => Ok(PrimitiveType::UInt8),
"u16" => Ok(PrimitiveType::UInt16),
"u32" => Ok(PrimitiveType::UInt32),
"u64" => Ok(PrimitiveType::UInt64),
"bool" => Ok(PrimitiveType::Bool),
_ => Err(()),
}
}
}
pub union PrimitiveValue {
pub uint8: u8,
pub uint16: u16,
pub uint32: u32,
pub uint64: u64,
pub int8: i8,
pub int16: i16,
pub int32: i32,
pub int64: i64,
pub float32: f32,
pub float64: f64,
}
| true |
4cb07fe6ed54e7c8ecd495b10118918b7b17169e
|
Rust
|
Origen-SDK/o2
|
/rust/pyapi_metal/src/framework/file_permissions.rs
|
UTF-8
| 4,472 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
use origen_metal::utils::file::FilePermissions as OmFilePermissions;
use pyo3::class::basic::CompareOp;
use pyo3::prelude::*;
pub(crate) fn define(py: Python, m: &PyModule) -> PyResult<()> {
let subm = PyModule::new(py, "file_permissions")?;
subm.add_function(wrap_pyfunction!(private, subm)?)?;
subm.add_function(wrap_pyfunction!(group, subm)?)?;
subm.add_function(wrap_pyfunction!(group_writable, subm)?)?;
subm.add_function(wrap_pyfunction!(public_with_group_writable, subm)?)?;
subm.add_function(wrap_pyfunction!(public, subm)?)?;
subm.add_function(wrap_pyfunction!(world_writable, subm)?)?;
subm.add_function(wrap_pyfunction!(custom, subm)?)?;
subm.add_class::<FilePermissions>()?;
m.add_submodule(subm)?;
Ok(())
}
#[pyfunction]
pub fn private() -> PyResult<FilePermissions> {
Ok(FilePermissions::from_metal(&OmFilePermissions::Private))
}
#[pyfunction]
pub fn group() -> PyResult<FilePermissions> {
Ok(FilePermissions::from_metal(&OmFilePermissions::Group))
}
#[pyfunction]
pub fn group_writable() -> PyResult<FilePermissions> {
Ok(FilePermissions::from_metal(
&OmFilePermissions::GroupWritable,
))
}
#[pyfunction]
pub fn public_with_group_writable() -> PyResult<FilePermissions> {
Ok(FilePermissions::from_metal(
&OmFilePermissions::PublicWithGroupWritable,
))
}
#[pyfunction]
pub fn public() -> PyResult<FilePermissions> {
Ok(FilePermissions::from_metal(&OmFilePermissions::Public))
}
#[pyfunction]
pub fn world_writable() -> PyResult<FilePermissions> {
Ok(FilePermissions::from_metal(
&OmFilePermissions::WorldWritable,
))
}
#[pyfunction]
pub fn custom(permissions: &PyAny) -> PyResult<FilePermissions> {
FilePermissions::new(permissions)
}
#[pyclass]
pub struct FilePermissions {
om_fps: OmFilePermissions,
}
impl FilePermissions {
pub fn from_metal(fp: &OmFilePermissions) -> Self {
Self { om_fps: fp.clone() }
}
pub fn to_metal(fp: &PyAny) -> PyResult<OmFilePermissions> {
if let Ok(s) = fp.extract::<&str>() {
Ok(OmFilePermissions::from_str(s)?)
} else if let Ok(i) = fp.extract::<u16>() {
Ok(OmFilePermissions::from_i(i)?)
} else if let Ok(slf) = fp.extract::<PyRef<Self>>() {
Ok(slf.om_fps.clone())
} else {
return crate::runtime_error!(format!(
"Could not derive file permissions from input of type '{}'",
fp.get_type().to_string()
));
}
}
pub fn to_metal_optional(
file_permissions: Option<&PyAny>,
) -> PyResult<Option<OmFilePermissions>> {
if let Some(fp) = file_permissions {
Ok(Some(Self::to_metal(fp)?))
} else {
Ok(None)
}
}
}
#[pymethods]
impl FilePermissions {
#[new]
fn new(permissions: &PyAny) -> PyResult<Self> {
if let Ok(p) = permissions.extract::<&str>() {
Ok(Self {
om_fps: OmFilePermissions::from_str(&p)?,
})
} else if let Ok(p) = permissions.extract::<u16>() {
Ok(Self {
om_fps: OmFilePermissions::from_i(p)?,
})
} else {
crate::type_error!(format!(
"Can not build FilePermissions from type '{}'",
permissions.get_type().name()?
))
}
}
#[getter]
fn to_i(&self) -> PyResult<u16> {
Ok(self.om_fps.to_i())
}
#[getter]
fn to_s(&self) -> PyResult<String> {
Ok(self.om_fps.to_str().to_string())
}
fn __str__(&self) -> PyResult<String> {
self.to_s()
}
fn __richcmp__(&self, other: &PyAny, op: CompareOp) -> PyResult<bool> {
match other.extract::<PyRef<Self>>() {
Ok(other_fp) => {
let result = self.om_fps == other_fp.om_fps;
match op {
CompareOp::Eq => Ok(result),
CompareOp::Ne => Ok(!result),
_ => crate::not_implemented_error!(
"FilePermissions only support equals and not-equals comparisons"
),
}
}
Err(_) => Ok(false),
}
}
fn __int__(&self) -> PyResult<u16> {
self.to_i()
}
}
impl From<&OmFilePermissions> for FilePermissions {
fn from(om_fps: &OmFilePermissions) -> Self {
Self::from_metal(om_fps)
}
}
| true |
29ce2161c7f1fd6c87bbf7bdedac2b7b12b93cec
|
Rust
|
MrInformatic/rendy-sphere-visualizer
|
/src/world/color_ramp.rs
|
UTF-8
| 586 | 2.984375 | 3 |
[
"MIT"
] |
permissive
|
use nalgebra_glm::Vec3;
#[derive(Debug)]
pub struct ColorRamp {
colors: Vec<Vec3>,
}
impl ColorRamp {
pub fn new(colors: Vec<Vec3>) -> Self {
ColorRamp { colors }
}
pub fn interpolate(&self, t: f32) -> Vec3 {
let i = t * (self.colors.len() - 1) as f32;
let fract = f32::fract(i);
let floor = f32::floor(i);
let a = &self.colors[(floor as usize).min(self.colors.len() - 1).max(0)];
let b = &self.colors[(floor as usize + 1).min(self.colors.len() - 1).max(0)];
return (a * (1.0 - fract)) + (b * fract);
}
}
| true |
6f05468691f2cbb17f3f285922a65ec0742cf422
|
Rust
|
yewenhui/apicula
|
/src/convert/format.rs
|
UTF-8
| 1,040 | 2.828125 | 3 |
[
"CC0-1.0"
] |
permissive
|
use cgmath::Matrix4;
use std::fmt;
macro_rules! cat_lines {
($s:expr) => { concat!($s, "\n") };
($s:expr, $($ss:expr),*) => { concat!($s, "\n", cat_lines!($($ss),*)) };
}
macro_rules! write_lines {
($dst:expr, $($fmt_strs:expr),*; $($args:tt)*) => {
write!($dst, cat_lines!($($fmt_strs),*), $($args)*)
};
}
pub struct FnFmt<F: Fn(&mut fmt::Formatter) -> fmt::Result>(pub F);
impl<F> fmt::Display for FnFmt<F>
where F: Fn(&mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(&self.0)(f)
}
}
pub struct Mat<'a>(pub &'a Matrix4<f64>);
impl<'a> fmt::Display for Mat<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {}",
self.0.x.x, self.0.y.x, self.0.z.x, self.0.w.x,
self.0.x.y, self.0.y.y, self.0.z.y, self.0.w.y,
self.0.x.z, self.0.y.z, self.0.z.z, self.0.w.z,
self.0.x.w, self.0.y.w, self.0.z.w, self.0.w.w,
)
}
}
| true |
704f2a427aecd4e2e60dc61547ceed0b0358caca
|
Rust
|
briansmith/ring
|
/src/bits.rs
|
UTF-8
| 2,595 | 3.4375 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"OpenSSL",
"MIT",
"ISC",
"LicenseRef-scancode-mit-taylor-variant",
"LicenseRef-scancode-openssl",
"LicenseRef-scancode-ssleay-windows"
] |
permissive
|
// Copyright 2016 Brian Smith.
//
// Permission to use, copy, modify, and/or distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
//! Bit lengths.
use crate::error;
/// The length of something, in bits.
///
/// This can represent a bit length that isn't a whole number of bytes.
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd)]
pub struct BitLength(usize);
// Lengths measured in bits, where all arithmetic is guaranteed not to
// overflow.
impl BitLength {
/// Constructs a `BitLength` from the given length in bits.
#[inline]
pub const fn from_usize_bits(bits: usize) -> Self {
Self(bits)
}
/// Constructs a `BitLength` from the given length in bytes.
///
/// Fails if `bytes * 8` is too large for a `usize`.
#[inline]
pub fn from_usize_bytes(bytes: usize) -> Result<Self, error::Unspecified> {
let bits = bytes.checked_mul(8).ok_or(error::Unspecified)?;
Ok(Self::from_usize_bits(bits))
}
#[cfg(feature = "alloc")]
#[inline]
pub(crate) fn half_rounded_up(&self) -> Self {
let round_up = self.0 & 1;
Self((self.0 / 2) + round_up)
}
/// The number of bits this bit length represents, as a `usize`.
#[inline]
pub fn as_usize_bits(&self) -> usize {
self.0
}
/// The bit length, rounded up to a whole number of bytes.
#[cfg(feature = "alloc")]
#[inline]
pub fn as_usize_bytes_rounded_up(&self) -> usize {
// Equivalent to (self.0 + 7) / 8, except with no potential for
// overflow and without branches.
// Branchless round_up = if self.0 & 0b111 != 0 { 1 } else { 0 };
let round_up = ((self.0 >> 2) | (self.0 >> 1) | self.0) & 1;
(self.0 / 8) + round_up
}
#[cfg(feature = "alloc")]
#[inline]
pub(crate) fn try_sub_1(self) -> Result<Self, error::Unspecified> {
let sum = self.0.checked_sub(1).ok_or(error::Unspecified)?;
Ok(Self(sum))
}
}
| true |
640701b48de3ddb8731a72e8bd793774fef0f5b4
|
Rust
|
bahildebrand/usync
|
/src/task/task.rs
|
UTF-8
| 1,284 | 3.078125 | 3 |
[] |
no_license
|
use alloc::boxed::Box;
use core::{
future::Future,
pin::Pin,
sync::atomic::{AtomicU32, Ordering},
task::{Context, Poll},
};
/// Future that allows the executor to keep track of root level tasks.
pub(crate) struct Task {
id: TaskId,
future: Pin<Box<dyn Future<Output = ()>>>,
}
impl Task {
/// Creates a new task with a unique task ID.
pub(crate) fn new(future: impl Future<Output = ()> + 'static) -> Task {
Task {
id: TaskId::new(),
future: Box::pin(future),
}
}
/// Polls the future to make progress on the overall task.
pub(crate) fn poll(&mut self, context: &mut Context) -> Poll<()> {
self.future.as_mut().poll(context)
}
/// Returns the unique ID for the given task.
pub(crate) fn get_id(&self) -> TaskId {
self.id
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
/// Unsigned 32-bit integer that that represenets a unique task ID.
pub(crate) struct TaskId(u32);
impl TaskId {
/// Creates a new TaskID. All TaskIDs should be unique, as they are created
/// from an atomic integer increment.
fn new() -> Self {
static NEXT_ID: AtomicU32 = AtomicU32::new(0);
TaskId(NEXT_ID.fetch_add(1, Ordering::Relaxed))
}
}
| true |
6c77af06cccd2fe09108e2d0b0212f1c2cd6200d
|
Rust
|
KomodoPlatform/parity-common
|
/transaction-pool/src/lib.rs
|
UTF-8
| 4,255 | 2.640625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright 2020 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Generic Transaction Pool
//!
//! An extensible and performant implementation of Ethereum Transaction Pool.
//! The pool stores ordered, verified transactions according to some pluggable
//! `Scoring` implementation.
//! The pool also allows you to construct a set of `pending` transactions according
//! to some notion of `Readiness` (pluggable).
//!
//! The pool is generic over transactions and should make no assumptions about them.
//! The only thing we can rely on is the `Scoring` that defines:
//! - the ordering of transactions from a single sender
//! - the priority of the transaction compared to other transactions from different senders
//!
//! NOTE: the transactions from a single sender are not ordered by priority,
//! but still when constructing pending set we always need to maintain the ordering
//! (i.e. `txs[1]` always needs to be included after `txs[0]` even if it has higher priority)
//!
//! ### Design Details
//!
//! Performance assumptions:
//! - Possibility to handle tens of thousands of transactions
//! - Fast insertions and replacements `O(per-sender + log(senders))`
//! - Reasonably fast removal of stalled transactions `O(per-sender)`
//! - Reasonably fast construction of pending set `O(txs * (log(senders) + log(per-sender))`
//!
//! The removal performance could be improved by trading some memory. Currently `SmallVec` is used
//! to store senders transactions, instead we could use `VecDeque` and efficiently `pop_front`
//! the best transactions.
//!
//! The pending set construction and insertion complexity could be reduced by introducing
//! a notion of `nonce` - an absolute, numeric ordering of transactions.
//! We don't do that because of possible implications of EIP208 where nonce might not be
//! explicitly available.
//!
//! 1. The pool groups transactions from particular sender together
//! and stores them ordered by `Scoring` within that group
//! i.e. `HashMap<Sender, Vec<Transaction>>`.
//! 2. Additionally we maintain the best and the worst transaction from each sender
//! (by `Scoring` not `priority`) ordered by `priority`.
//! It means that we can easily identify the best transaction inside the entire pool
//! and the worst transaction.
//! 3. Whenever new transaction is inserted to the queue:
//! - first check all the limits (overall, memory, per-sender)
//! - retrieve all transactions from a sender
//! - binary search for position to insert the transaction
//! - decide if we are replacing existing transaction (3 outcomes: drop, replace, insert)
//! - update best and worst transaction from that sender if affected
//! 4. Pending List construction:
//! - Take the best transaction (by priority) from all senders to the List
//! - Replace the transaction with next transaction (by ordering) from that sender (if any)
//! - Repeat
#![warn(missing_docs)]
#[cfg(test)]
mod tests;
mod error;
mod listener;
mod options;
mod pool;
mod ready;
mod replace;
mod status;
mod transactions;
mod verifier;
pub mod scoring;
pub use self::error::Error;
pub use self::listener::{Listener, NoopListener};
pub use self::options::Options;
pub use self::pool::{PendingIterator, Pool, Transaction, UnorderedIterator};
pub use self::ready::{Readiness, Ready};
pub use self::replace::{ReplaceTransaction, ShouldReplace};
pub use self::scoring::Scoring;
pub use self::status::{LightStatus, Status};
pub use self::verifier::Verifier;
use std::fmt;
use std::hash::Hash;
/// Already verified transaction that can be safely queued.
pub trait VerifiedTransaction: fmt::Debug {
/// Transaction hash type.
type Hash: fmt::Debug + fmt::LowerHex + Eq + Clone + Hash;
/// Transaction sender type.
type Sender: fmt::Debug + Eq + Clone + Hash + Send;
/// Transaction hash
fn hash(&self) -> &Self::Hash;
/// Memory usage
fn mem_usage(&self) -> usize;
/// Transaction sender
fn sender(&self) -> &Self::Sender;
}
| true |
6cf37ba7b78582b6f9afe788fa7c8ab4493c71b8
|
Rust
|
felixjones/gba
|
/src/lib.rs
|
UTF-8
| 6,421 | 2.703125 | 3 |
[
"Apache-2.0"
] |
permissive
|
#![cfg_attr(not(test), no_std)]
#![feature(asm)]
#![feature(cfg_target_vendor)]
#![allow(clippy::cast_lossless)]
#![deny(clippy::float_arithmetic)]
#![warn(missing_docs)]
//! This crate helps you write GBA ROMs.
//!
//! ## SAFETY POLICY
//!
//! Some parts of this crate are safe wrappers around unsafe operations. This is
//! good, and what you'd expect from a Rust crate.
//!
//! However, the safe wrappers all assume that you will _only_ attempt to
//! execute this crate on a GBA or in a GBA Emulator.
//!
//! **Do not** use this crate in programs that aren't running on the GBA. If you
//! do, it's a giant bag of Undefined Behavior.
pub(crate) use gba_proc_macro::phantom_fields;
pub(crate) use voladdress::{read_only::ROVolAddress, VolAddress, VolBlock};
pub mod macros;
pub mod base;
pub mod bios;
pub mod iwram;
pub mod ewram;
pub mod io;
pub mod palram;
pub mod vram;
pub mod oam;
pub mod rom;
pub mod sram;
pub mod mgba;
extern "C" {
/// This marks the end of the `.data` and `.bss` sections in IWRAM.
///
/// Memory in IWRAM _before_ this location is not free to use, you'll trash
/// your globals and stuff. Memory here or after is freely available for use
/// (careful that you don't run into your own stack of course).
///
/// The actual value is unimportant, you just want to use the _address of_
/// this location as the start of your IWRAM usage.
pub static __bss_end: u8;
}
newtype! {
/// A color on the GBA is an RGB 5.5.5 within a `u16`
#[derive(PartialOrd, Ord, Hash)]
Color, pub u16
}
impl Color {
/// Constructs a color from the channel values provided (should be 0..=31).
///
/// No actual checks are performed, so illegal channel values can overflow
/// into each other and produce an unintended color.
pub const fn from_rgb(r: u16, g: u16, b: u16) -> Color {
Color(b << 10 | g << 5 | r)
}
}
//
// After here is totally unsorted nonsense
//
/// Performs unsigned divide and remainder, gives None if dividing by 0.
pub fn divrem_u32(numer: u32, denom: u32) -> Option<(u32, u32)> {
// TODO: const this? Requires const if
if denom == 0 {
None
} else {
Some(unsafe { divrem_u32_unchecked(numer, denom) })
}
}
/// Performs divide and remainder, no check for 0 division.
///
/// # Safety
///
/// If you call this with a denominator of 0 the result is implementation
/// defined (not literal UB) including but not limited to: an infinite loop,
/// panic on overflow, or incorrect output.
pub unsafe fn divrem_u32_unchecked(numer: u32, denom: u32) -> (u32, u32) {
// TODO: const this? Requires const if
if (numer >> 5) < denom {
divrem_u32_simple(numer, denom)
} else {
divrem_u32_non_restoring(numer, denom)
}
}
/// The simplest form of division. If N is too much larger than D this will be
/// extremely slow. If N is close enough to D then it will likely be faster than
/// the non_restoring form.
fn divrem_u32_simple(mut numer: u32, denom: u32) -> (u32, u32) {
// TODO: const this? Requires const if
let mut quot = 0;
while numer >= denom {
numer -= denom;
quot += 1;
}
(quot, numer)
}
/// Takes a fixed quantity of time based on the bit width of the number (in this
/// case 32).
fn divrem_u32_non_restoring(numer: u32, denom: u32) -> (u32, u32) {
// TODO: const this? Requires const if
let mut r: i64 = numer as i64;
let d: i64 = (denom as i64) << 32;
let mut q: u32 = 0;
let mut i = 1 << 31;
while i > 0 {
if r >= 0 {
q |= i;
r = 2 * r - d;
} else {
r = 2 * r + d;
}
i >>= 1;
}
q -= !q;
if r < 0 {
q -= 1;
r += d;
}
r >>= 32;
// TODO: remove this once we've done more checks here.
debug_assert!(r >= 0);
debug_assert!(r <= core::u32::MAX as i64);
(q, r as u32)
}
/// Performs signed divide and remainder, gives None if dividing by 0 or
/// computing `MIN/-1`
pub fn divrem_i32(numer: i32, denom: i32) -> Option<(i32, i32)> {
if denom == 0 || (numer == core::i32::MIN && denom == -1) {
None
} else {
Some(unsafe { divrem_i32_unchecked(numer, denom) })
}
}
/// Performs signed divide and remainder, no check for 0 division or `MIN/-1`.
///
/// # Safety
///
/// * If you call this with a denominator of 0 the result is implementation
/// defined (not literal UB) including but not limited to: an infinite loop,
/// panic on overflow, or incorrect output.
/// * If you call this with `MIN/-1` you'll get a panic in debug or just `MIN`
/// in release (which is incorrect), because of how twos-compliment works.
pub unsafe fn divrem_i32_unchecked(numer: i32, denom: i32) -> (i32, i32) {
// TODO: const this? Requires const if
let unsigned_numer = numer.abs() as u32;
let unsigned_denom = denom.abs() as u32;
let opposite_sign = (numer ^ denom) < 0;
let (udiv, urem) = if (numer >> 5) < denom {
divrem_u32_simple(unsigned_numer, unsigned_denom)
} else {
divrem_u32_non_restoring(unsigned_numer, unsigned_denom)
};
match (opposite_sign, numer < 0) {
(true, true) => (-(udiv as i32), -(urem as i32)),
(true, false) => (-(udiv as i32), urem as i32),
(false, true) => (udiv as i32, -(urem as i32)),
(false, false) => (udiv as i32, urem as i32),
}
}
/*
#[cfg(test)]
mod tests {
use super::*;
use quickcheck::quickcheck;
// We have an explicit property on the non_restoring division
quickcheck! {
fn divrem_u32_non_restoring_prop(num: u32, denom: u32) -> bool {
if denom > 0 {
divrem_u32_non_restoring(num, denom) == (num / denom, num % denom)
} else {
true
}
}
}
// We have an explicit property on the simple division
quickcheck! {
fn divrem_u32_simple_prop(num: u32, denom: u32) -> bool {
if denom > 0 {
divrem_u32_simple(num, denom) == (num / denom, num % denom)
} else {
true
}
}
}
// Test the u32 wrapper
quickcheck! {
fn divrem_u32_prop(num: u32, denom: u32) -> bool {
if denom > 0 {
divrem_u32(num, denom).unwrap() == (num / denom, num % denom)
} else {
divrem_u32(num, denom).is_none()
}
}
}
// test the i32 wrapper
quickcheck! {
fn divrem_i32_prop(num: i32, denom: i32) -> bool {
if denom == 0 || num == core::i32::MIN && denom == -1 {
divrem_i32(num, denom).is_none()
} else {
divrem_i32(num, denom).unwrap() == (num / denom, num % denom)
}
}
}
}
*/
| true |
0db2c9505e56d8e7f165a7b3f86de204add97b3c
|
Rust
|
theseus-os/Theseus
|
/kernel/dreadnought/src/task.rs
|
UTF-8
| 3,014 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
//! Asynchronous tasks based on Theseus's native OS [task] subsystem.
use alloc::boxed::Box;
use core::{
future::Future,
marker::PhantomData,
pin::Pin,
task::{Context, Poll},
};
use task::{ExitValue, JoinableTaskRef, KillReason, PanicInfoOwned};
/// Spawns a new asynchronous task, returning a [`JoinableAsyncTaskRef`] for it.
///
/// You do not need to poll the returned object to make the async task execute;
/// it will begin running in the background immediately.
///
/// # Errors
///
/// This function will return errors generated by [`spawn::TaskBuilder::spawn()`].
pub fn spawn_async<F>(
future: F,
) -> core::result::Result<JoinableAsyncTaskRef<F::Output>, &'static str>
where
F: Future + Send + 'static,
F::Output: Send,
{
let future = Box::pin(future);
let task = spawn::new_task_builder(crate::block_on, future).spawn()?;
Ok(JoinableAsyncTaskRef {
task,
phantom_data: PhantomData,
})
}
/// An owned permission to join an async task.
pub struct JoinableAsyncTaskRef<T> {
pub(crate) task: JoinableTaskRef,
pub(crate) phantom_data: PhantomData<T>,
}
impl<T> JoinableAsyncTaskRef<T> {
/// Abort the task associated with the handle.
///
/// If the cancelled task was already completed at the time it was
/// cancelled, it will return the successful result. Otherwise, polling the
/// handle will fail with an [`Error::Cancelled`].
///
/// # Warning
///
/// This uses [`Task::kill`] and so the aborted task isn't unwound.
pub fn abort(&self) {
let _ = self.task.kill(KillReason::Requested);
}
/// Returns whether the task associated with the handle has finished.
pub fn is_finished(&self) -> bool {
self.task.has_exited()
}
}
impl<T> Future for JoinableAsyncTaskRef<T>
where
T: 'static,
{
type Output = Result<T>;
fn poll(self: Pin<&mut Self>, context: &mut Context<'_>) -> Poll<Self::Output> {
self.task.set_waker(context.waker().clone());
if self.is_finished() {
Poll::Ready(match self.task.join() {
Ok(exit_value) => match exit_value {
ExitValue::Completed(value) => Ok(*value.downcast().unwrap()),
ExitValue::Killed(reason) => match reason {
KillReason::Requested => Err(Error::Cancelled),
KillReason::Panic(info) => Err(Error::Panic(info)),
KillReason::Exception(num) => Err(Error::Exception(num)),
},
},
Err(s) => Err(Error::Join(s)),
})
} else {
Poll::Pending
}
}
}
pub type Result<T> = core::result::Result<T, Error>;
/// An error returned from polling a [`JoinableAsyncTaskRef`].
#[derive(Debug)]
pub enum Error {
Cancelled,
Panic(PanicInfoOwned),
/// A `Join` error should not occur; this indicates a BUG in Theseus's task mgmt.
Join(&'static str),
Exception(u8),
}
| true |
3c21df72b6252fb584b63d4a6ead7c204a1813ed
|
Rust
|
HewlettPackard/dockerfile-parser-rs
|
/src/instructions/arg.rs
|
UTF-8
| 3,381 | 3.140625 | 3 |
[
"MIT",
"LicenseRef-scancode-dco-1.1"
] |
permissive
|
// (C) Copyright 2019-2020 Hewlett Packard Enterprise Development LP
use std::convert::TryFrom;
use crate::dockerfile_parser::Instruction;
use crate::SpannedString;
use crate::error::*;
use crate::parse_string;
use crate::parser::{Pair, Rule};
use crate::splicer::Span;
/// A Dockerfile [`ARG` instruction][arg].
///
/// [arg]: https://docs.docker.com/engine/reference/builder/#arg
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ArgInstruction {
pub span: Span,
/// The argument key
pub name: SpannedString,
/// An optional argument value.
///
/// This may be unset when passing arguments through to later stages in a
/// [multi-stage build][build].
///
/// [build]: https://docs.docker.com/develop/develop-images/multistage-build/
pub value: Option<SpannedString>,
}
impl ArgInstruction {
pub(crate) fn from_record(record: Pair) -> Result<ArgInstruction> {
let span = Span::from_pair(&record);
let mut name = None;
let mut value = None;
for field in record.into_inner() {
match field.as_rule() {
Rule::arg_name => name = Some(parse_string(&field)?),
Rule::arg_quoted_value => value = Some(parse_string(&field)?),
Rule::arg_value => value = Some(parse_string(&field)?),
Rule::comment => continue,
_ => return Err(unexpected_token(field))
}
}
let name = match name {
Some(name) => name,
_ => return Err(Error::GenericParseError {
message: "arg name is required".into()
})
};
Ok(ArgInstruction {
span,
name,
value,
})
}
}
impl<'a> TryFrom<&'a Instruction> for &'a ArgInstruction {
type Error = Error;
fn try_from(instruction: &'a Instruction) -> std::result::Result<Self, Self::Error> {
if let Instruction::Arg(a) = instruction {
Ok(a)
} else {
Err(Error::ConversionError {
from: format!("{:?}", instruction),
to: "ArgInstruction".into()
})
}
}
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::*;
use crate::Dockerfile;
use crate::test_util::*;
#[test]
fn arg_strings() -> Result<()> {
assert_eq!(
parse_single(r#"arg foo=bar"#, Rule::arg)?,
ArgInstruction {
span: Span::new(0, 11),
name: SpannedString {
span: Span::new(4, 7),
content: "foo".into(),
},
value: Some(SpannedString {
span: Span::new(8, 11),
content: "bar".into(),
}),
}.into()
);
assert_eq!(
parse_single(r#"arg foo="bar""#, Rule::arg)?,
ArgInstruction {
span: Span::new(0, 13),
name: SpannedString {
span: Span::new(4, 7),
content: "foo".into(),
},
value: Some(SpannedString {
span: Span::new(8, 13),
content: "bar".into(),
}),
}.into()
);
assert_eq!(
parse_single(r#"arg foo='bar'"#, Rule::arg)?,
ArgInstruction {
span: Span::new(0, 13),
name: SpannedString {
span: Span::new(4, 7),
content: "foo".into(),
},
value: Some(SpannedString {
span: Span::new(8, 13),
content: "bar".into(),
}),
}.into()
);
assert!(Dockerfile::parse(r#"arg foo="bar"bar"#).is_err());
assert!(Dockerfile::parse(r#"arg foo='bar'bar"#).is_err());
Ok(())
}
}
| true |
ed82215d396ed8fa49ce1c16a0ae1916b1bdef2d
|
Rust
|
rust-lang/rust-analyzer
|
/crates/ide-assists/src/handlers/generate_enum_variant.rs
|
UTF-8
| 10,575 | 2.71875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use hir::{HasSource, HirDisplay, InFile};
use ide_db::assists::{AssistId, AssistKind};
use syntax::{
ast::{self, make, HasArgList},
match_ast, AstNode, SyntaxNode,
};
use crate::assist_context::{AssistContext, Assists};
// Assist: generate_enum_variant
//
// Adds a variant to an enum.
//
// ```
// enum Countries {
// Ghana,
// }
//
// fn main() {
// let country = Countries::Lesotho$0;
// }
// ```
// ->
// ```
// enum Countries {
// Ghana,
// Lesotho,
// }
//
// fn main() {
// let country = Countries::Lesotho;
// }
// ```
pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let path: ast::Path = ctx.find_node_at_offset()?;
let parent = path_parent(&path)?;
if ctx.sema.resolve_path(&path).is_some() {
// No need to generate anything if the path resolves
return None;
}
let name_ref = path.segment()?.name_ref()?;
if name_ref.text().starts_with(char::is_lowercase) {
// Don't suggest generating variant if the name starts with a lowercase letter
return None;
}
if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e)))) =
ctx.sema.resolve_path(&path.qualifier()?)
{
let target = path.syntax().text_range();
return add_variant_to_accumulator(acc, ctx, target, e, &name_ref, parent);
}
None
}
#[derive(Debug)]
enum PathParent {
PathExpr(ast::PathExpr),
RecordExpr(ast::RecordExpr),
PathPat(ast::PathPat),
UseTree(ast::UseTree),
}
impl PathParent {
fn syntax(&self) -> &SyntaxNode {
match self {
PathParent::PathExpr(it) => it.syntax(),
PathParent::RecordExpr(it) => it.syntax(),
PathParent::PathPat(it) => it.syntax(),
PathParent::UseTree(it) => it.syntax(),
}
}
fn make_field_list(&self, ctx: &AssistContext<'_>) -> Option<ast::FieldList> {
let scope = ctx.sema.scope(self.syntax())?;
match self {
PathParent::PathExpr(it) => {
if let Some(call_expr) = it.syntax().parent().and_then(ast::CallExpr::cast) {
make_tuple_field_list(call_expr, ctx, &scope)
} else {
None
}
}
PathParent::RecordExpr(it) => make_record_field_list(it, ctx, &scope),
PathParent::UseTree(_) | PathParent::PathPat(_) => None,
}
}
}
fn path_parent(path: &ast::Path) -> Option<PathParent> {
let parent = path.syntax().parent()?;
match_ast! {
match parent {
ast::PathExpr(it) => Some(PathParent::PathExpr(it)),
ast::RecordExpr(it) => Some(PathParent::RecordExpr(it)),
ast::PathPat(it) => Some(PathParent::PathPat(it)),
ast::UseTree(it) => Some(PathParent::UseTree(it)),
_ => None
}
}
}
fn add_variant_to_accumulator(
acc: &mut Assists,
ctx: &AssistContext<'_>,
target: syntax::TextRange,
adt: hir::Enum,
name_ref: &ast::NameRef,
parent: PathParent,
) -> Option<()> {
let db = ctx.db();
let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
acc.add(
AssistId("generate_enum_variant", AssistKind::Generate),
"Generate variant",
target,
|builder| {
builder.edit_file(file_id.original_file(db));
let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent);
node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));
},
)
}
fn make_variant(
ctx: &AssistContext<'_>,
name_ref: &ast::NameRef,
parent: PathParent,
) -> ast::Variant {
let field_list = parent.make_field_list(ctx);
make::variant(make::name(&name_ref.text()), field_list)
}
fn make_record_field_list(
record: &ast::RecordExpr,
ctx: &AssistContext<'_>,
scope: &hir::SemanticsScope<'_>,
) -> Option<ast::FieldList> {
let fields = record.record_expr_field_list()?.fields();
let record_fields = fields.map(|field| {
let name = name_from_field(&field);
let ty = field
.expr()
.and_then(|it| expr_ty(ctx, it, scope))
.unwrap_or_else(make::ty_placeholder);
make::record_field(None, name, ty)
});
Some(make::record_field_list(record_fields).into())
}
fn name_from_field(field: &ast::RecordExprField) -> ast::Name {
let text = match field.name_ref() {
Some(it) => it.to_string(),
None => name_from_field_shorthand(field).unwrap_or("unknown".to_string()),
};
make::name(&text)
}
fn name_from_field_shorthand(field: &ast::RecordExprField) -> Option<String> {
let path = match field.expr()? {
ast::Expr::PathExpr(path_expr) => path_expr.path(),
_ => None,
}?;
Some(path.as_single_name_ref()?.to_string())
}
fn make_tuple_field_list(
call_expr: ast::CallExpr,
ctx: &AssistContext<'_>,
scope: &hir::SemanticsScope<'_>,
) -> Option<ast::FieldList> {
let args = call_expr.arg_list()?.args();
let tuple_fields = args.map(|arg| {
let ty = expr_ty(ctx, arg, scope).unwrap_or_else(make::ty_placeholder);
make::tuple_field(None, ty)
});
Some(make::tuple_field_list(tuple_fields).into())
}
fn expr_ty(
ctx: &AssistContext<'_>,
arg: ast::Expr,
scope: &hir::SemanticsScope<'_>,
) -> Option<ast::Type> {
let ty = ctx.sema.type_of_expr(&arg).map(|it| it.adjusted())?;
let text = ty.display_source_code(ctx.db(), scope.module().into(), false).ok()?;
Some(make::ty(&text))
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn generate_basic_enum_variant_in_empty_enum() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::Bar$0
}
",
r"
enum Foo {
Bar,
}
fn main() {
Foo::Bar
}
",
)
}
#[test]
fn generate_basic_enum_variant_in_non_empty_enum() {
check_assist(
generate_enum_variant,
r"
enum Foo {
Bar,
}
fn main() {
Foo::Baz$0
}
",
r"
enum Foo {
Bar,
Baz,
}
fn main() {
Foo::Baz
}
",
)
}
#[test]
fn generate_basic_enum_variant_in_different_file() {
check_assist(
generate_enum_variant,
r"
//- /main.rs
mod foo;
use foo::Foo;
fn main() {
Foo::Baz$0
}
//- /foo.rs
pub enum Foo {
Bar,
}
",
r"
pub enum Foo {
Bar,
Baz,
}
",
)
}
#[test]
fn not_applicable_for_existing_variant() {
check_assist_not_applicable(
generate_enum_variant,
r"
enum Foo {
Bar,
}
fn main() {
Foo::Bar$0
}
",
)
}
#[test]
fn not_applicable_for_lowercase() {
check_assist_not_applicable(
generate_enum_variant,
r"
enum Foo {
Bar,
}
fn main() {
Foo::new$0
}
",
)
}
#[test]
fn indentation_level_is_correct() {
check_assist(
generate_enum_variant,
r"
mod m {
pub enum Foo {
Bar,
}
}
fn main() {
m::Foo::Baz$0
}
",
r"
mod m {
pub enum Foo {
Bar,
Baz,
}
}
fn main() {
m::Foo::Baz
}
",
)
}
#[test]
fn associated_single_element_tuple() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::Bar$0(true)
}
",
r"
enum Foo {
Bar(bool),
}
fn main() {
Foo::Bar(true)
}
",
)
}
#[test]
fn associated_single_element_tuple_unknown_type() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::Bar$0(x)
}
",
r"
enum Foo {
Bar(_),
}
fn main() {
Foo::Bar(x)
}
",
)
}
#[test]
fn associated_multi_element_tuple() {
check_assist(
generate_enum_variant,
r"
struct Struct {}
enum Foo {}
fn main() {
Foo::Bar$0(true, x, Struct {})
}
",
r"
struct Struct {}
enum Foo {
Bar(bool, _, Struct),
}
fn main() {
Foo::Bar(true, x, Struct {})
}
",
)
}
#[test]
fn associated_record() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::$0Bar { x: true }
}
",
r"
enum Foo {
Bar { x: bool },
}
fn main() {
Foo::Bar { x: true }
}
",
)
}
#[test]
fn associated_record_unknown_type() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::$0Bar { x: y }
}
",
r"
enum Foo {
Bar { x: _ },
}
fn main() {
Foo::Bar { x: y }
}
",
)
}
#[test]
fn associated_record_field_shorthand() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
let x = true;
Foo::$0Bar { x }
}
",
r"
enum Foo {
Bar { x: bool },
}
fn main() {
let x = true;
Foo::Bar { x }
}
",
)
}
#[test]
fn associated_record_field_shorthand_unknown_type() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::$0Bar { x }
}
",
r"
enum Foo {
Bar { x: _ },
}
fn main() {
Foo::Bar { x }
}
",
)
}
#[test]
fn associated_record_field_multiple_fields() {
check_assist(
generate_enum_variant,
r"
struct Struct {}
enum Foo {}
fn main() {
Foo::$0Bar { x, y: x, s: Struct {} }
}
",
r"
struct Struct {}
enum Foo {
Bar { x: _, y: _, s: Struct },
}
fn main() {
Foo::Bar { x, y: x, s: Struct {} }
}
",
)
}
#[test]
fn use_tree() {
check_assist(
generate_enum_variant,
r"
//- /main.rs
mod foo;
use foo::Foo::Bar$0;
//- /foo.rs
pub enum Foo {}
",
r"
pub enum Foo {
Bar,
}
",
)
}
#[test]
fn not_applicable_for_path_type() {
check_assist_not_applicable(
generate_enum_variant,
r"
enum Foo {}
impl Foo::Bar$0 {}
",
)
}
#[test]
fn path_pat() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn foo(x: Foo) {
match x {
Foo::Bar$0 =>
}
}
",
r"
enum Foo {
Bar,
}
fn foo(x: Foo) {
match x {
Foo::Bar =>
}
}
",
)
}
}
| true |
1a7c72c0b094fc6829620f38fca46e0de4dd3be9
|
Rust
|
minoring/problem-solving
|
/BJ-Algo/2475.rs
|
UTF-8
| 264 | 3.171875 | 3 |
[] |
no_license
|
use std::io;
fn main() {
let mut input = String::new();
io::stdin().read_line(&mut input);
input.retain(|c| !c.is_whitespace());
let mut res = 0;
for num in input.chars() {
res += num.to_digit(10).unwrap().pow(2);
}
println!("{}", res % 10);
}
| true |
a30068f5dd1cd0e9d66b505d41c28acfaba880b7
|
Rust
|
isgasho/leetcode-rust-6
|
/peak-index-in-a-mountain-array/src/main.rs
|
UTF-8
| 503 | 3.21875 | 3 |
[] |
no_license
|
struct Solution;
impl Solution {
pub fn peak_index_in_mountain_array(a: Vec<i32>) -> i32 {
for i in 1..=a.len() - 2 {
if a[i - 1] < a[i] && a[i] > a[i + 1] {
return i as i32;
}
}
0
}
}
fn main() {
println!("Hello, world!");
}
#[test]
fn test1() {
assert_eq!(Solution::peak_index_in_mountain_array(vec![0, 1, 0]), 1);
}
#[test]
fn test2() {
assert_eq!(Solution::peak_index_in_mountain_array(vec![0, 2, 1, 0]), 1);
}
| true |
c2acb184cd0655fa3d4067287fb892a45b4d270a
|
Rust
|
tirust/msp432e4
|
/src/adc0/sspri.rs
|
UTF-8
| 5,189 | 2.65625 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::SSPRI {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct ADC_SSPRI_SS0R {
bits: u8,
}
impl ADC_SSPRI_SS0R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _ADC_SSPRI_SS0W<'a> {
w: &'a mut W,
}
impl<'a> _ADC_SSPRI_SS0W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(3 << 0);
self.w.bits |= ((value as u32) & 3) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct ADC_SSPRI_SS1R {
bits: u8,
}
impl ADC_SSPRI_SS1R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _ADC_SSPRI_SS1W<'a> {
w: &'a mut W,
}
impl<'a> _ADC_SSPRI_SS1W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(3 << 4);
self.w.bits |= ((value as u32) & 3) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct ADC_SSPRI_SS2R {
bits: u8,
}
impl ADC_SSPRI_SS2R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _ADC_SSPRI_SS2W<'a> {
w: &'a mut W,
}
impl<'a> _ADC_SSPRI_SS2W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(3 << 8);
self.w.bits |= ((value as u32) & 3) << 8;
self.w
}
}
#[doc = r"Value of the field"]
pub struct ADC_SSPRI_SS3R {
bits: u8,
}
impl ADC_SSPRI_SS3R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _ADC_SSPRI_SS3W<'a> {
w: &'a mut W,
}
impl<'a> _ADC_SSPRI_SS3W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(3 << 12);
self.w.bits |= ((value as u32) & 3) << 12;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:1 - SS0 Priority"]
#[inline(always)]
pub fn adc_sspri_ss0(&self) -> ADC_SSPRI_SS0R {
let bits = ((self.bits >> 0) & 3) as u8;
ADC_SSPRI_SS0R { bits }
}
#[doc = "Bits 4:5 - SS1 Priority"]
#[inline(always)]
pub fn adc_sspri_ss1(&self) -> ADC_SSPRI_SS1R {
let bits = ((self.bits >> 4) & 3) as u8;
ADC_SSPRI_SS1R { bits }
}
#[doc = "Bits 8:9 - SS2 Priority"]
#[inline(always)]
pub fn adc_sspri_ss2(&self) -> ADC_SSPRI_SS2R {
let bits = ((self.bits >> 8) & 3) as u8;
ADC_SSPRI_SS2R { bits }
}
#[doc = "Bits 12:13 - SS3 Priority"]
#[inline(always)]
pub fn adc_sspri_ss3(&self) -> ADC_SSPRI_SS3R {
let bits = ((self.bits >> 12) & 3) as u8;
ADC_SSPRI_SS3R { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:1 - SS0 Priority"]
#[inline(always)]
pub fn adc_sspri_ss0(&mut self) -> _ADC_SSPRI_SS0W {
_ADC_SSPRI_SS0W { w: self }
}
#[doc = "Bits 4:5 - SS1 Priority"]
#[inline(always)]
pub fn adc_sspri_ss1(&mut self) -> _ADC_SSPRI_SS1W {
_ADC_SSPRI_SS1W { w: self }
}
#[doc = "Bits 8:9 - SS2 Priority"]
#[inline(always)]
pub fn adc_sspri_ss2(&mut self) -> _ADC_SSPRI_SS2W {
_ADC_SSPRI_SS2W { w: self }
}
#[doc = "Bits 12:13 - SS3 Priority"]
#[inline(always)]
pub fn adc_sspri_ss3(&mut self) -> _ADC_SSPRI_SS3W {
_ADC_SSPRI_SS3W { w: self }
}
}
| true |
70d6dfc639a80303697b21819fe64630b5243cf7
|
Rust
|
RobbieClarken/AdventOfCode2019
|
/day-05/src/main.rs
|
UTF-8
| 1,252 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
mod executor;
use executor::Executor;
use std::fs::File;
use std::io::{Read, Result};
fn main() -> Result<()> {
let program = read_program("input")?;
challenge_1(program.clone());
challenge_2(program);
Ok(())
}
fn challenge_1(program: Vec<i32>) {
let output = Executor::run(program, vec![1]);
for err_code in output[..output.len() - 1].iter() {
assert_eq!(*err_code, 0);
}
println!("Challenge 1: Diagnostic code = {}", output.last().unwrap());
}
fn challenge_2(program: Vec<i32>) {
let output = Executor::run(program, vec![5]);
for err_code in output[..output.len() - 1].iter() {
assert_eq!(*err_code, 0);
}
println!("Challenge 2: Diagnostic code = {}", output.last().unwrap());
}
fn read_program(filename: &str) -> Result<Vec<i32>> {
let mut input: String = String::new();
File::open(filename)?.read_to_string(&mut input)?;
let program: Vec<i32> = input
.split(',')
.map(|v| v.trim().parse().unwrap())
.collect();
Ok(program)
}
#[cfg(test)]
mod main_tests {
use super::*;
#[test]
fn reads_program_from_file() {
let program = read_program("input").unwrap();
assert_eq!(program[..5], [3, 225, 1, 225, 6]);
}
}
| true |
b5a58ccef4ef5f5adf85a6d5d642b7f453fc19eb
|
Rust
|
automerge/automerge
|
/rust/automerge/src/legacy/utility_impls/opid.rs
|
UTF-8
| 1,620 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
use core::fmt;
use std::{
cmp::{Ordering, PartialOrd},
str::FromStr,
};
use crate::error::InvalidOpId;
use crate::legacy::{ActorId, OpId};
impl Ord for OpId {
fn cmp(&self, other: &Self) -> Ordering {
if self.0 != other.0 {
self.0.cmp(&other.0)
} else {
self.1.cmp(&other.1)
}
}
}
impl fmt::Debug for OpId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.to_string().as_str())
}
}
impl fmt::Display for OpId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
OpId(seq, actor) => write!(f, "{}@{}", seq, actor),
}
}
}
impl PartialOrd for OpId {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl FromStr for OpId {
type Err = InvalidOpId;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut i = s.split('@');
match (i.next(), i.next(), i.next()) {
(Some(counter_str), Some(actor_str), None) => {
match (counter_str.parse(), ActorId::from_str(actor_str)) {
(Ok(counter), Ok(actor)) => Ok(OpId(counter, actor)),
_ => Err(InvalidOpId(s.to_string())),
}
}
_ => Err(InvalidOpId(s.to_string())),
}
}
}
impl TryFrom<&str> for OpId {
type Error = InvalidOpId;
fn try_from(s: &str) -> Result<Self, InvalidOpId> {
OpId::from_str(s)
}
}
impl From<&OpId> for String {
fn from(id: &OpId) -> Self {
id.to_string()
}
}
| true |
2a37c2fcef271c3d290b03f906d89d05b7ef9482
|
Rust
|
anbaut/soundboard
|
/src/web_server.rs
|
UTF-8
| 711 | 2.75 | 3 |
[] |
no_license
|
use anyhow::Result;
use actix_files::Files;
use actix_web::HttpServer;
#[derive(Default)]
pub struct WebServer {
server: Option<actix_web::dev::Server>,
}
impl WebServer {
pub fn start(&mut self) -> Result<()> {
let address = "127.0.0.1:29815";
let path = crate::App::get_web_resources_directory().expect("Could not get web resources directory");
self.server = Some(HttpServer::new(move || {
actix_web::App::new().service(Files::new("/", path.clone()))
})
.workers(3)
.bind(address)?
.run());
log::info!("Static web server running on {}", address);
Ok(())
}
pub async fn stop(&self) {
log::info!("Stopping web server...");
self.server.as_ref().unwrap().stop(true).await;
}
}
| true |
29fb6cba43549e6997c6922c9445797bb6131939
|
Rust
|
lightdiscord/translator
|
/src/lib.rs
|
UTF-8
| 6,002 | 3.5 | 4 |
[] |
no_license
|
#[derive(Default)]
pub struct IdentifierFactory(usize);
impl IdentifierFactory {
pub fn new(&mut self) -> Identifier {
let identifier = Identifier::new(self.0);
self.0 += 1;
identifier
}
}
pub trait Convert {
fn convert(&self) -> String;
}
impl Convert for usize {
fn convert(&self) -> String {
self.to_string()
}
}
#[derive(Clone, Copy)]
pub struct Identifier(usize);
impl Identifier {
pub fn new(id: usize) -> Self {
Identifier(id)
}
}
impl Convert for Identifier {
fn convert(&self) -> String {
format!("identifier_{}", self.0)
}
}
#[derive(Clone, Copy)]
pub enum Type {
Int32
}
impl Convert for Type {
fn convert(&self) -> String {
match self {
Type::Int32 => "int".to_string(),
}
}
}
pub struct Remainder<A, B>(pub A, pub B);
impl<A: Convert, B: Convert> Convert for Remainder<A, B> {
fn convert(&self) -> String {
let Remainder(a, b) = self;
format!("{} % {}", a.convert(), b.convert())
}
}
pub struct Divide<A, B>(pub A, pub B);
impl<A: Convert, B: Convert> Convert for Divide<A, B> {
fn convert(&self) -> String {
let Divide(a, b) = self;
format!("{} / {}", a.convert(), b.convert())
}
}
pub struct Plus<A, B>(pub A, pub B);
impl<A: Convert, B: Convert> Convert for Plus<A, B> {
fn convert(&self) -> String {
let Plus(a, b) = self;
format!("{} + {}", a.convert(), b.convert())
}
}
impl Convert for (usize, &Vec<Instruction>) {
fn convert(&self) -> String {
let (padding, instructions) = self;
let padding = "\t".repeat(*padding);
instructions.iter()
.map(|instruction| format!("{}{}\n", padding, instruction.convert()))
.collect::<Vec<String>>()
.join("")
}
}
impl Convert for Vec<Instruction> {
fn convert(&self) -> String {
(1, self).convert()
}
}
pub enum Comparison<A, B> {
Equals(A, B),
NotEquals(A, B),
GreaterThan(A, B),
LessThan(A, B)
}
impl<A: Convert, B: Convert> Convert for Comparison<A, B> {
fn convert(&self) -> String {
match self {
Comparison::Equals(a, b) => format!("{} == {}", a.convert(), b.convert()),
Comparison::NotEquals(a, b) => format!("{} != {}", a.convert(), b.convert()),
Comparison::GreaterThan(a, b) => format!("{} > {}", a.convert(), b.convert()),
Comparison::LessThan(a, b) => format!("{} < {}", a.convert(), b.convert())
}
}
}
pub struct Call<A>(pub A, pub Vec<Box<dyn Convert>>);
impl<A: Convert> Convert for Call<A> {
fn convert(&self) -> String {
let Call(function, parameters) = self;
let parameters = parameters.iter().map(|param| param.convert()).collect::<Vec<String>>().join(", ");
format!("{}({})", function.convert(), parameters)
}
}
pub enum Instruction {
ReadLn(Variable),
WriteLn(Variable),
Declare(Variable),
Return(Box<dyn Convert>),
Assign(Identifier, Box<dyn Convert>),
If {
condition: Box<dyn Convert>,
instructions: Vec<Instruction>
},
While {
condition: Box<dyn Convert>,
instructions: Vec<Instruction>
},
/// Should not be used because it is language specific.
Custom(String)
}
impl Convert for Instruction {
fn convert(&self) -> String {
match self {
Instruction::ReadLn(variable) => {
let instructions = vec![
Instruction::Declare(*variable),
Instruction::Custom(format!(r#"scanf("%d", &{});"#, variable.identifier.convert()))
];
instructions.convert()
}
Instruction::WriteLn(variable) => {
Instruction::Custom(format!(r#"printf("%d", {});"#, variable.identifier.convert())).convert()
},
Instruction::Declare(variable) => {
format!("{} {};", variable.r#type.convert(), variable.identifier.convert())
},
Instruction::Return(data) => format!("return {};", data.convert()),
Instruction::Assign(identifier, data) => format!("{} = {};", identifier.convert(), data.convert()),
Instruction::If { condition, instructions } => {
format!("if ({}) {{\n{}}}", condition.convert(), instructions.convert())
}
Instruction::While { condition, instructions } => {
format!("while ({}) {{\n{}}}", condition.convert(), instructions.convert())
},
Instruction::Custom(content) => content.to_string()
}
}
}
#[derive(Clone, Copy)]
pub struct Variable {
pub identifier: Identifier,
pub r#type: Type
}
pub struct Function {
pub identifier: Identifier,
pub parameters: Vec<Variable>,
pub returns: Type,
pub instructions: Vec<Instruction>
}
pub struct Graph {
pub functions: Vec<Function>,
pub main: Option<Identifier>
}
impl Convert for Graph {
fn convert(&self) -> String {
let mut functions = self.functions.iter()
.map(Convert::convert)
.collect::<Vec<String>>();
if let Some(main) = self.main {
functions.push(format!(
"int main(void) {{ return {}(); }}",
main.convert()
));
}
functions.join("\n\n")
}
}
impl Convert for Function {
fn convert(&self) -> String {
let parameters = self.parameters.iter()
.map(|variable| format!(
"{} {}",
variable.r#type.convert(),
variable.identifier.convert()
))
.collect::<Vec<String>>()
.join(", ");
format!(
"{} {}({}) {{\n{}}}",
self.returns.convert(),
self.identifier.convert(),
parameters,
self.instructions.convert()
)
}
}
| true |
66e38bb4e9de4bcc69f5211648a5c2b1ddbf7bf0
|
Rust
|
ebagos/rust
|
/p092/src/main.rs
|
UTF-8
| 1,402 | 3.703125 | 4 |
[] |
no_license
|
/*
各桁の2乗を足し合わせて新たな数を作ることを, 同じ数が現れるまで繰り返す.
例えば
44 → 32 → 13 → 10 → 1 → 1
85 → 89 → 145 → 42 → 20 → 4 → 16 → 37 → 58 → 89
のような列である. どちらも1か89で無限ループに陥っている.
驚くことに, どの数から始めても最終的に1か89に到達する.
では, 10,000,000より小さい数で89に到達する数はいくつあるか.
*/
use std::time::Instant;
fn square_plus(n: i32, s: i32) -> i32 {
if n == 0 {
return s;
}
let x = n % 10;
return square_plus(n / 10, s + x * x);
}
fn main() {
let start = Instant::now();
let mut count = 0;
let MAX: usize = (9i32.pow(2) * 7 + 1) as usize;
let mut memo = vec![-1; MAX];
memo[1] = 0;
memo[89] = 1;
for i in 2..10_000_000 {
let mut n = i;
loop {
n = square_plus(n, 0);
if memo[n as usize] != -1 {
break;
}
}
if memo[n as usize] == 1 {
count += 1;
if i < MAX as i32 {
memo[i as usize] = 1;
}
} else if i < MAX as i32 {
memo[i as usize] = 0;
}
}
let end = start.elapsed();
println!("count = {}", count);
println!("elapsed : {}.{} sec", end.as_secs(), end.subsec_nanos());
}
| true |
22c78bd7f6e61dc0448d32c682cf94f9522c4514
|
Rust
|
matchai/starship
|
/tests/testsuite/directory.rs
|
UTF-8
| 12,100 | 2.9375 | 3 |
[
"ISC"
] |
permissive
|
use ansi_term::Color;
use dirs::home_dir;
use git2::Repository;
use std::fs;
use std::io;
use std::path::Path;
use tempfile::TempDir;
use crate::common::{self, TestCommand};
#[test]
fn home_directory() -> io::Result<()> {
let output = common::render_module("directory")
.arg("--path=~")
.use_config(toml::toml! { // Necessary if homedir is a git repo
[directory]
truncate_to_repo = false
})
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("~"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn directory_in_home() -> io::Result<()> {
let dir = home_dir().unwrap().join("starship/engine");
fs::create_dir_all(&dir)?;
let output = common::render_module("directory")
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("~/starship/engine"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn truncated_directory_in_home() -> io::Result<()> {
let dir = home_dir().unwrap().join("starship/engine/schematics");
fs::create_dir_all(&dir)?;
let output = common::render_module("directory")
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!(
"in {} ",
Color::Cyan.bold().paint("starship/engine/schematics")
);
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn fish_directory_in_home() -> io::Result<()> {
let dir = home_dir().unwrap().join("starship/engine/schematics");
fs::create_dir_all(&dir)?;
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
truncation_length = 1
fish_style_pwd_dir_length = 2
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("~/st/en/schematics"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
fn root_directory() -> io::Result<()> {
let output = common::render_module("directory")
.arg("--path=/")
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("/"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[cfg(not(target_os = "windows"))]
fn directory_in_root() -> io::Result<()> {
let output = common::render_module("directory")
.arg("--path=/etc")
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("/etc"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[cfg(target_os = "windows")]
fn directory_in_root() -> io::Result<()> {
let output = common::render_module("dir").arg("--path=C:\\").output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("/c"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn truncated_directory_in_root() -> io::Result<()> {
let dir = Path::new("/tmp/starship/thrusters/rocket");
fs::create_dir_all(&dir)?;
let output = common::render_module("directory")
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!(
"in {} ",
Color::Cyan.bold().paint("starship/thrusters/rocket")
);
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn truncated_directory_config_large() -> io::Result<()> {
let dir = Path::new("/tmp/starship/thrusters/rocket");
fs::create_dir_all(&dir)?;
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
truncation_length = 100
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!(
"in {} ",
Color::Cyan.bold().paint("/tmp/starship/thrusters/rocket")
);
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn fish_style_directory_config_large() -> io::Result<()> {
let dir = Path::new("/tmp/starship/thrusters/rocket");
fs::create_dir_all(&dir)?;
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
truncation_length = 1
fish_style_pwd_dir_length = 100
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!(
"in {} ",
Color::Cyan.bold().paint("/tmp/starship/thrusters/rocket")
);
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn truncated_directory_config_small() -> io::Result<()> {
let dir = Path::new("/tmp/starship/thrusters/rocket");
fs::create_dir_all(&dir)?;
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
truncation_length = 2
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("thrusters/rocket"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn fish_directory_config_small() -> io::Result<()> {
let dir = Path::new("/tmp/starship/thrusters/rocket");
fs::create_dir_all(&dir)?;
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
truncation_length = 2
fish_style_pwd_dir_length = 1
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("/t/s/thrusters/rocket"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn git_repo_root() -> io::Result<()> {
// TODO: Investigate why git repo related tests fail when the tempdir is within /tmp/...
// Temporarily making the tempdir within $HOME
// #[ignore] can be removed after this TODO is addressed
let tmp_dir = TempDir::new_in(dirs::home_dir().unwrap())?;
let repo_dir = tmp_dir.path().join("rocket-controls");
fs::create_dir(&repo_dir)?;
Repository::init(&repo_dir).unwrap();
let output = common::render_module("directory")
.arg("--path")
.arg(repo_dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("rocket-controls"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn directory_in_git_repo() -> io::Result<()> {
let tmp_dir = TempDir::new_in(dirs::home_dir().unwrap())?;
let repo_dir = tmp_dir.path().join("rocket-controls");
let dir = repo_dir.join("src");
fs::create_dir_all(&dir)?;
Repository::init(&repo_dir).unwrap();
let output = common::render_module("directory")
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("rocket-controls/src"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn truncated_directory_in_git_repo() -> io::Result<()> {
let tmp_dir = TempDir::new_in(dirs::home_dir().unwrap())?;
let repo_dir = tmp_dir.path().join("rocket-controls");
let dir = repo_dir.join("src/meters/fuel-gauge");
fs::create_dir_all(&dir)?;
Repository::init(&repo_dir).unwrap();
let output = common::render_module("directory")
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!("in {} ", Color::Cyan.bold().paint("src/meters/fuel-gauge"));
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn directory_in_git_repo_truncate_to_repo_false() -> io::Result<()> {
let tmp_dir = TempDir::new_in(dirs::home_dir().unwrap())?;
let repo_dir = tmp_dir.path().join("above-repo").join("rocket-controls");
let dir = repo_dir.join("src/meters/fuel-gauge");
fs::create_dir_all(&dir)?;
Repository::init(&repo_dir).unwrap();
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
// Don't truncate the path at all.
truncation_length = 5
truncate_to_repo = false
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!(
"in {} ",
Color::Cyan
.bold()
.paint("above-repo/rocket-controls/src/meters/fuel-gauge")
);
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn fish_path_directory_in_git_repo_truncate_to_repo_false() -> io::Result<()> {
let tmp_dir = TempDir::new_in(dirs::home_dir().unwrap())?;
let repo_dir = tmp_dir.path().join("above-repo").join("rocket-controls");
let dir = repo_dir.join("src/meters/fuel-gauge");
fs::create_dir_all(&dir)?;
Repository::init(&repo_dir).unwrap();
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
// Don't truncate the path at all.
truncation_length = 5
truncate_to_repo = false
fish_style_pwd_dir_length = 1
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!(
"in {} ",
Color::Cyan
.bold()
.paint("~/.t/above-repo/rocket-controls/src/meters/fuel-gauge")
);
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn fish_path_directory_in_git_repo_truncate_to_repo_true() -> io::Result<()> {
let tmp_dir = TempDir::new_in(dirs::home_dir().unwrap())?;
let repo_dir = tmp_dir.path().join("above-repo").join("rocket-controls");
let dir = repo_dir.join("src/meters/fuel-gauge");
fs::create_dir_all(&dir)?;
Repository::init(&repo_dir).unwrap();
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
// `truncate_to_repo = true` should display the truncated path
truncation_length = 5
truncate_to_repo = true
fish_style_pwd_dir_length = 1
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!(
"in {} ",
Color::Cyan
.bold()
.paint("~/.t/a/rocket-controls/src/meters/fuel-gauge")
);
assert_eq!(expected, actual);
Ok(())
}
#[test]
#[ignore]
fn directory_in_git_repo_truncate_to_repo_true() -> io::Result<()> {
let tmp_dir = TempDir::new_in(dirs::home_dir().unwrap())?;
let repo_dir = tmp_dir.path().join("above-repo").join("rocket-controls");
let dir = repo_dir.join("src/meters/fuel-gauge");
fs::create_dir_all(&dir)?;
Repository::init(&repo_dir).unwrap();
let output = common::render_module("directory")
.use_config(toml::toml! {
[directory]
// `truncate_to_repo = true` should display the truncated path
truncation_length = 5
truncate_to_repo = true
})
.arg("--path")
.arg(dir)
.output()?;
let actual = String::from_utf8(output.stdout).unwrap();
let expected = format!(
"in {} ",
Color::Cyan
.bold()
.paint("rocket-controls/src/meters/fuel-gauge")
);
assert_eq!(expected, actual);
Ok(())
}
| true |
bff6b445737d5ee7024aca7f78714864f6ca5d84
|
Rust
|
RbertKo/follow-rust-book
|
/ownership/src/main.rs
|
UTF-8
| 558 | 3.6875 | 4 |
[] |
no_license
|
fn main() {
// Variable Scope
// s is not valid here, it’s not yet declared
let s = "hello"; // s is valid from this point forward
// do stuff with s
// this scope is now over, and s is no longer valid
let mut s = String::from("hello");
s.push_str(", world!"); // push_str()은 해당 스트링 리터럴을 스트링에 붙여줍니다.
println!("{}", s); // 이 부분이 `hello, world!`를 출력할 겁니다.
}
| true |
962845f01308886ba8bbb6a9ebd9a5dcdad20fbd
|
Rust
|
mt-caret/c10e
|
/src/main.rs
|
UTF-8
| 2,211 | 3.296875 | 3 |
[] |
no_license
|
extern crate natural;
extern crate rayon;
use natural::tokenize::tokenize;
use rayon::prelude::*;
use std::collections::HashMap;
use std::fs::File;
use std::io::{self, BufRead, Read};
fn process_word(x: &str) -> String {
x.trim()
.to_lowercase()
.chars()
.filter(|&ch| ch.is_alphanumeric() || ch == '\'')
.collect::<String>()
}
fn canonicalize(content: &str, stopwords: &Vec<String>) -> Vec<String> {
tokenize(&content)
.iter()
.map(|x| process_word(x))
.filter(|x| stopwords.iter().all(|y| y != x) && x.len() > 1)
.collect::<Vec<_>>()
}
fn read_file(filename: &str) -> String {
let mut content = String::new();
File::open(filename)
.expect("File not found")
.read_to_string(&mut content)
.expect("Something went wrong reading the file");
content
}
fn main() {
let stdin = io::stdin();
let filenames: Vec<_> = stdin.lock().lines().filter_map(|x| x.ok()).collect();
let stopwords = read_file("stopwords.txt");
let stopwords = stopwords.lines().map(|x| process_word(x)).collect::<Vec<_>>();
eprintln!("Reading files...");
let contents: Vec<_> = filenames
.iter()
.map(|filename| read_file(filename))
.collect();
eprintln!("Canonicalizing...");
let mut result = contents
.par_iter()
.flat_map(|content| canonicalize(&content, &stopwords))
.collect::<Vec<_>>();
eprintln!("Sorting {} words...", result.len());
result.par_sort_unstable();
eprintln!("Counting words...");
let mut index = 0;
let mut word_count = HashMap::new();
while index < result.len() {
let mut count = 1;
while index + 1 < result.len() && result[index] == result[index+1] {
index += 1;
count += 1;
}
word_count.insert(&result[index], count);
index += 1;
}
let mut out = word_count.par_iter().collect::<Vec<_>>();
out.par_sort_unstable_by(|a, b| {
match a.1.cmp(b.1) {
std::cmp::Ordering::Equal => a.0.cmp(b.0),
x => x
}
});
for (word, count) in out {
println!("{} {}", count, word);
}
}
| true |
e7dfb4acb20c3175254924d32be316e710d8ec53
|
Rust
|
linkpy/neolisp
|
/src/nl/interpreter/builtin/io.rs
|
UTF-8
| 1,426 | 3.203125 | 3 |
[] |
no_license
|
use crate::nl::core::object::*;
use crate::nl::interpreter::*;
/// Registers all I/O builtin forms.
///
pub fn register_builtin_io_forms(scope: &mut Scope) {
scope
.register_eval_form("to-string", to_string)
.register_eval_form("println", println);
}
/// `to-string` eval form.
///
/// `(to-string v)`
///
fn to_string(args: Vec<Object>) -> Result<Object, Error> {
if args.len() != 1 {
return Error::errf(
&format!(
"'to-string' only receives 1 argument, got {} instead.",
args.len()
),
"to-string",
intern_location!(),
);
}
Ok(Object::string(format!("{}", args[0])))
}
/// `println` eval form.
///
/// `(println "text")`
///
fn println(args: Vec<Object>) -> Result<Object, Error> {
if args.len() != 1 {
return Error::errf(
&format!(
"'println' only receives 1 argument, got {} instead.",
args.len()
),
"to-string",
intern_location!(),
);
}
if !&args[0].is_string() {
return Error::errf(
&format!(
"'println' only receives a String, got a {} instead.",
&args[0].type_string()
),
"to-string",
intern_location!(),
);
}
println!("{}", &args[0].get_string());
Ok(Object::nil())
}
| true |
4f5d3a2e14e500a800fd9a3a8c1b45d9d742f1e6
|
Rust
|
DusterTheFirst/pico-mct
|
/src/routes/devices.rs
|
UTF-8
| 2,778 | 2.546875 | 3 |
[] |
no_license
|
use std::{collections::BTreeMap, time::Duration};
use anyhow::anyhow;
use async_std::task;
use crossfire::mpsc::unbounded_future;
use log::{debug, error, info};
use serde::Deserialize;
use tide::{sse::Sender, Body, Request, StatusCode};
use crate::{serial::get_serial_ports, State};
use super::super::ingest::ingest;
pub async fn list_devices(_: Request<State>) -> tide::Result<Body> {
let serial_ports = get_serial_ports()
.await?
.map(|port| (port.name, port.product))
.collect::<BTreeMap<_, _>>();
Body::from_json(&serial_ports)
}
#[derive(Deserialize)]
struct DeviceConnectQuery {
port: String,
timeout: Option<u64>,
baud: Option<u32>,
}
pub async fn device_connect(req: Request<State>, sender: Sender) -> tide::Result<()> {
let DeviceConnectQuery {
port: port_name,
timeout,
baud,
} = req.query()?;
if port_name.is_empty() {
return Err(tide::Error::new(
StatusCode::BadRequest,
anyhow!("device {} does not exist", port_name),
));
}
// FIXME: not 1s for timeout?
// Assuming Pico SDK USB CDC so baud rate does not matter
match serialport::new(&port_name, baud.unwrap_or(0))
.timeout(Duration::from_millis(timeout.unwrap_or(1000)))
.open()
{
Ok(new_port) => {
info!("Connected to device {}", port_name);
let (tx, rx) = unbounded_future();
let ingest_task = task::spawn_blocking(move || ingest(tx, new_port));
loop {
let packet = match rx.recv().await {
Ok(packet) => packet,
Err(_) => {
error!("Failed to get a packet from the ingest thread");
break;
}
};
match sender
.send("telemetry", serde_json::to_string(&packet)?, None)
.await
{
Ok(()) => {}
Err(_) => {
info!("Client disconnected from event source");
break;
}
}
}
debug!("Disconnecting from device {}", port_name);
if let Some(Err(err)) = ingest_task.cancel().await {
error!("Ingest task encountered an error: {}", err);
}
info!("Disconnected from device {}", port_name);
Ok(())
}
Err(err) => {
error!("Failed to open serial port {}: {}", port_name, err);
Err(tide::Error::new(
StatusCode::ServiceUnavailable,
anyhow!("failed to open device {}", port_name),
))
}
}
}
| true |
8f800592e0609876883865e588508de4ec2eda4f
|
Rust
|
seven-two-eight/fuzzy-match
|
/src/marks.rs
|
UTF-8
| 6,029 | 3.3125 | 3 |
[] |
no_license
|
use std::fmt;
use super::trigram;
pub type RecordId = u32;
pub type StudentId = String;
pub type Marks = Vec<u32>;
const NULL_RECORD_ID: RecordId = 0;
const FIRST_RECORD_ID: RecordId = 1;
/// Container of student marks.
#[derive(Serialize, Deserialize)]
pub struct MarksRecords {
next_record_id: RecordId,
records: Vec<(RecordId, StudentId, Marks)>
}
impl MarksRecords {
pub fn new() -> MarksRecords {
MarksRecords{ next_record_id: FIRST_RECORD_ID, records: Vec::new() }
}
#[allow(unused)]
pub fn len(&self) -> usize {
self.records.len()
}
#[allow(unused)]
pub fn is_empty(&self) -> bool {
self.records.is_empty()
}
pub fn clear(&mut self) {
self.records.clear();
}
/// Add student with empty marks.
pub fn add_student (&mut self, student_id: StudentId)
{
self.records.push((NULL_RECORD_ID, student_id, vec![]));
}
/// Update marks of the record at the top.
pub fn set_marks_at_top(&mut self, marks: Marks) -> Result<(), String> {
if let Some(record) = self.records.get_mut(0) {
if record.0 == NULL_RECORD_ID {
record.0 = self.next_record_id;
self.next_record_id += 1;
}
record.2 = marks;
} else {
return Err(String::from("no student record"));
}
Ok (())
}
/// Sort records by descending student id's similarity with argument `s`.
pub fn sort_with(&mut self, s: &str) {
self.records.sort_by_key( |(_, student_id, _)|
(trigram::score(student_id, s) * -1e5) as i32
);
}
pub fn to_json_string(&self) -> Result<String, String> {
serde_json::to_string(self).map_err(|e|
format!("failed serialization: {}", e)
)
}
pub fn from_json_str(s: &str) -> Result<MarksRecords, String> {
serde_json::from_str(s).map_err(|e|
format!("failed deserializing {}: {}", s, e)
)
}
pub fn export_string(&self) -> String {
let itemize = |marks: &Marks| marks.iter()
.map(ToString::to_string)
.collect::<Vec<String>>()
.join("\t");
let line = |(record_id, student_id, marks)
: &(RecordId, StudentId, Marks)|
format!
( "{}\t{}\t{}\t{}\n"
, record_id
, student_id
, marks.iter().sum::<u32>()
, itemize(marks)
);
format!
( "Record Id\tStudent Id\tTotal Marks\tItem Marks\n{}"
, self.records.iter()
.map(line)
.collect::<Vec<String>>()
.concat()
)
}
}
impl fmt::Display for MarksRecords {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (record_id, student_id, marks) in self.records.iter() {
if *record_id == NULL_RECORD_ID {
write!(f, " ");
} else {
write!(f, "{:<4.4}", record_id);
}
write!(f, "{:24.24}", student_id)?;
if !marks.is_empty() {
let sum: u32 = marks.iter().sum();
write!(f, " {:>10} = {:?}", sum, marks)?;
}
write!(f, "\n")?;
}
Ok (())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn counts() {
let mut marks_records = MarksRecords::new();
assert_eq!(0, marks_records.len());
assert!(marks_records.is_empty());
marks_records.add_student(String::from("student A"));
marks_records.add_student(String::from("student B"));
assert_eq!(2, marks_records.len());
assert!(!marks_records.is_empty());
marks_records.records.clear();
assert_eq!(0, marks_records.len());
assert!(marks_records.is_empty());
}
#[test]
fn update() {
let mut marks_records = MarksRecords::new();
marks_records.add_student(String::from("student A"));
marks_records.add_student(String::from("student B"));
marks_records.set_marks_at_top(vec![1, 1, 1]).unwrap();
assert!(marks_records.records[0].0 == FIRST_RECORD_ID);
assert!(marks_records.records[0].1 == "student A");
assert!(marks_records.records[1].0 == NULL_RECORD_ID);
marks_records.sort_with("B");
assert!(marks_records.records[0].1 == "student B");
marks_records.set_marks_at_top(vec![2, 2, 2]).unwrap();
assert!(marks_records.records[0].2 != marks_records.records[1].2);
assert!(marks_records.records[0].0 == FIRST_RECORD_ID + 1);
marks_records.sort_with("A");
assert!(marks_records.records[0].1 == "student A");
marks_records.set_marks_at_top(vec![2, 2, 2]).unwrap();
assert_eq!(marks_records.records[0].2, marks_records.records[1].2);
assert_eq!(marks_records.records[0].2, vec![2, 2, 2]);
assert!(marks_records.records[0].0 == FIRST_RECORD_ID);
marks_records.set_marks_at_top(vec![3, 3, 3]).unwrap();
assert_eq!(marks_records.records[0].2, vec![3, 3, 3]);
assert_eq!(marks_records.records[1].2, vec![2, 2, 2]);
assert!(marks_records.records[1].0 == FIRST_RECORD_ID + 1);
}
#[test]
fn serialization() {
let mut marks_records = MarksRecords::new();
marks_records.add_student(String::from("student A"));
marks_records.set_marks_at_top(vec![1, 1, 1]).unwrap();
marks_records.add_student(String::from("student B"));
marks_records.sort_with("B");
marks_records.set_marks_at_top(vec![2, 2, 2]).unwrap();
let serialized = serde_json::to_string(&marks_records).unwrap();
println!("MarksRecord\n{}\nserialized into json:\n{}", marks_records, serialized);
let deserialized: MarksRecords = serde_json::from_str(&serialized).unwrap();
assert_eq!(marks_records.records, deserialized.records);
}
}
| true |
d2a4227ba4205c5cb8fa7dda53fd78331933f3e9
|
Rust
|
nullxDEADBEEF/png_secret_message
|
/src/chunk.rs
|
UTF-8
| 6,247 | 3.359375 | 3 |
[] |
no_license
|
use std::convert::TryFrom;
use std::fmt;
use std::io::{BufReader, Read};
use crc::crc32;
use crate::chunk_type::ChunkType;
use crate::{Error, Result};
#[derive(Debug)]
pub struct Chunk {
pub typee: ChunkType,
pub data: Vec<u8>,
}
impl Chunk {
pub fn new(typee: ChunkType, data: Vec<u8>) -> Self {
Self { typee, data }
}
fn length(&self) -> u32 {
self.data.len() as u32
}
pub fn chunk_type(&self) -> &ChunkType {
&self.typee
}
pub fn data(&self) -> &[u8] {
self.data.as_ref()
}
// CRC calculated on preeceding bytes in the chunk (chunk type and data)
// this is always present even if there is no data.
// used to verify each chunk for corrupted data
pub fn crc(&self) -> u32 {
crc32::checksum_ieee(&[&self.typee.name, self.data.as_slice()].concat())
}
pub fn data_as_string(&self) -> Result<String> {
let string = String::from_utf8(self.data.clone());
match string {
Ok(s) => Ok(s),
Err(e) => Err(Box::new(e)),
}
}
pub fn as_bytes(&self) -> Vec<u8> {
self.length()
.to_be_bytes()
.iter()
.cloned()
.chain(self.chunk_type().name.iter().cloned())
.chain(self.data().iter().cloned())
.chain(self.crc().to_be_bytes().iter().cloned())
.collect()
}
}
impl TryFrom<&[u8]> for Chunk {
type Error = Error;
fn try_from(bytes: &[u8]) -> Result<Self> {
let mut reader = BufReader::new(bytes);
let mut buffer: [u8; 4] = [0; 4];
// get first 4 bytes to determine length
reader.read_exact(&mut buffer)?;
let data_length = u32::from_be_bytes(buffer);
// get next 4 bytes to determine chunk type
reader.read_exact(&mut buffer).unwrap();
let chunk_type = ChunkType::try_from(buffer)?;
let mut data_buffer = vec![0; data_length as usize];
reader.read_exact(&mut data_buffer)?;
let chunk_data = data_buffer;
reader.read_exact(&mut buffer)?;
let received_crc = u32::from_be_bytes(buffer);
let chunk = Chunk {
typee: chunk_type,
data: chunk_data,
};
if chunk.crc() == received_crc {
Ok(chunk)
} else {
Err("Invalid chunk".into())
}
}
}
impl fmt::Display for Chunk {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Length: {}\nType: {}\nData bytes: {}\nCRC: {}\n",
self.length(),
self.chunk_type(),
self.data.len(),
self.crc()
)
}
}
#[cfg(test)]
mod tests {
use super::*;
fn testing_chunk() -> Chunk {
let data_length: u32 = 42;
let chunk_type = "RuSt".as_bytes();
let message_bytes = "This is where your secret message will be!".as_bytes();
let crc: u32 = 2882656334;
let chunk_data: Vec<u8> = data_length
.to_be_bytes()
.iter()
.chain(chunk_type.iter())
.chain(message_bytes.iter())
.chain(crc.to_be_bytes().iter())
.copied()
.collect();
Chunk::try_from(chunk_data.as_ref()).unwrap()
}
#[test]
fn test_chunk_length() {
let chunk = testing_chunk();
assert_eq!(chunk.length(), 42);
}
#[test]
fn test_chunk_type() {
let chunk = testing_chunk();
assert_eq!(chunk.chunk_type().to_string(), String::from("RuSt"));
}
#[test]
fn test_chunk_string() {
let chunk = testing_chunk();
let chunk_string = chunk.data_as_string().unwrap();
let expected_chunk_string = String::from("This is where your secret message will be!");
assert_eq!(chunk_string, expected_chunk_string);
}
#[test]
fn test_chunk_crc() {
let chunk = testing_chunk();
assert_eq!(chunk.crc(), 2882656334);
}
#[test]
fn test_valid_chunk_from_bytes() {
let data_length: u32 = 42;
let chunk_type = "RuSt".as_bytes();
let message_bytes = "This is where your secret message will be!".as_bytes();
let crc: u32 = 2882656334;
let chunk_data: Vec<u8> = data_length
.to_be_bytes()
.iter()
.chain(chunk_type.iter())
.chain(message_bytes.iter())
.chain(crc.to_be_bytes().iter())
.copied()
.collect();
let chunk = Chunk::try_from(chunk_data.as_ref()).unwrap();
let chunk_string = chunk.data_as_string().unwrap();
let expected_chunk_string = String::from("This is where your secret message will be!");
assert_eq!(chunk.length(), 42);
assert_eq!(chunk.chunk_type().to_string(), String::from("RuSt"));
assert_eq!(chunk_string, expected_chunk_string);
assert_eq!(chunk.crc(), 2882656334);
}
#[test]
fn test_invalid_chunk_from_bytes() {
let data_length: u32 = 42;
let chunk_type = "RuSt".as_bytes();
let message_bytes = "This is where your secret message will be!".as_bytes();
let crc: u32 = 2882656333;
let chunk_data: Vec<u8> = data_length
.to_be_bytes()
.iter()
.chain(chunk_type.iter())
.chain(message_bytes.iter())
.chain(crc.to_be_bytes().iter())
.copied()
.collect();
let chunk = Chunk::try_from(chunk_data.as_ref());
assert!(chunk.is_err());
}
#[test]
pub fn test_chunk_trait_impls() {
let data_length: u32 = 42;
let chunk_type = "RuSt".as_bytes();
let message_bytes = "This is where your secret message will be!".as_bytes();
let crc: u32 = 2882656334;
let chunk_data: Vec<u8> = data_length
.to_be_bytes()
.iter()
.chain(chunk_type.iter())
.chain(message_bytes.iter())
.chain(crc.to_be_bytes().iter())
.copied()
.collect();
let chunk: Chunk = TryFrom::try_from(chunk_data.as_ref()).unwrap();
let _chunk_string = format!("{}", chunk);
}
}
| true |
704ee9dc9475f7240711f9515e8860bbe57f47d5
|
Rust
|
TylerDMielke/rust_lexer
|
/src/token.rs
|
UTF-8
| 3,245 | 4.0625 | 4 |
[] |
no_license
|
pub struct Token {
token_type: TokenType,
lexeme: String,
line: usize,
}
impl Token {
pub fn new(token_type: TokenType, lexeme: String, line: usize) -> Token {
Token {
token_type,
lexeme,
line,
}
}
pub fn as_str(&self) -> String {
format!("Type: {0}, Lexeme: {1}, Line: {2}", self.token_type_as_str(), self.lexeme, self.line)
}
// This is stupid but I don't know a smarter way
fn token_type_as_str(&self) -> &str {
match self.token_type {
TokenType::RightParen => ")",
TokenType::LeftParen => "(",
TokenType::LeftBrace => "[",
TokenType::RightBrace => "]",
TokenType::Comma => ",",
TokenType::Dot => ".",
TokenType::Minus => "-",
TokenType::Plus => "+",
TokenType::SemiColon => ";",
TokenType::Slash => "/",
TokenType::Star => "*",
TokenType::Bang => "!",
TokenType::BangEqual => "!=",
TokenType::Equal => "=",
TokenType::EqualEqual => "==",
TokenType::Greater => ">",
TokenType::GreatEqual => ">=",
TokenType::Less => "<",
TokenType::LessEqual => "<=",
TokenType::Identifier => "identifier",
TokenType::String => "string",
TokenType::Number => "number",
TokenType::And => "and",
TokenType::Class => "class",
TokenType::Else => "else",
TokenType::False => "false",
TokenType::Fun => "function",
TokenType::For => "for",
TokenType::If => "if",
TokenType::Nil => "nil",
TokenType::Or => "or",
TokenType::Print => "print",
TokenType::Return => "return",
TokenType::Super => "super",
TokenType::This => "this",
TokenType::True => "true",
TokenType::Var => "var",
TokenType::While => "while",
TokenType::Comment => "comment",
TokenType::WhiteSpace => "ws",
TokenType::Unknown => "unknown",
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum TokenType {
// Single-character tokens.
LeftParen,
RightParen,
LeftBrace,
RightBrace,
Comma,
Dot,
Minus,
Plus,
SemiColon,
Slash,
Star,
// One or two character tokens.
Bang,
BangEqual,
Equal,
EqualEqual,
Greater,
GreatEqual,
Less,
LessEqual,
Comment,
// Literals.
Identifier,
String,
Number,
//Keywords.
And,
Class,
Else,
False,
Fun,
For,
If,
Nil,
Or,
Print,
Return,
Super,
This,
True,
Var,
While,
// White space
WhiteSpace,
//Unknown token type
Unknown,
}
#[cfg(test)]
mod test {
use super::Token;
use super::TokenType;
#[test]
fn test_as_str() {
let token = Token::new(TokenType::String, String::from("\"string\""), 1);
let expected_string = "Type: string, Lexeme: \"string\", Line: 1";
assert_eq!(expected_string, token.as_str());
}
}
| true |
ea23335b17e963a4d4a5b267043c2aa1dace414d
|
Rust
|
imeka/trk-io
|
/tests/array_sequence.rs
|
UTF-8
| 4,838 | 2.921875 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
use trk_io::{ArraySequence, Point};
fn get_toy_streamlines() -> ArraySequence<Point> {
ArraySequence::new(
vec![2, 3, 3],
vec![
Point::new(1.0, 0.0, 0.0), // 1
Point::new(2.0, 0.0, 0.0), // 1
Point::new(0.0, 1.0, 0.0), // 2
Point::new(0.0, 2.0, 0.0), // 2
Point::new(0.0, 3.0, 0.0), // 2
Point::new(0.0, 0.0, 1.0), // 3
Point::new(0.0, 0.0, 2.0), // 3
Point::new(0.0, 0.0, 3.0), // 3
],
)
}
#[test]
fn test_integers() {
let arr = ArraySequence::new(vec![2, 3, 2, 1], vec![4, 5, 6, 7, 8, 9, 10, 11]);
assert_eq!(arr.len(), 4);
assert_eq!(arr.offsets, vec![0, 2, 5, 7, 8]);
}
#[test]
fn test_construction() {
let streamlines = get_toy_streamlines();
assert_eq!(streamlines.len(), 3);
assert_eq!(streamlines.offsets, vec![0, 2, 5, 8]);
}
#[test]
#[should_panic]
fn test_new_not_enough() {
ArraySequence::new(vec![2], vec![Point::new(1.0, 0.0, 0.0)]);
}
#[test]
#[should_panic]
fn test_new_too_much() {
ArraySequence::new(
vec![2],
vec![Point::new(1.0, 0.0, 0.0), Point::new(1.0, 0.0, 0.0), Point::new(1.0, 0.0, 0.0)],
);
}
#[test]
fn test_empty() {
let mut arr = ArraySequence::empty();
assert_eq!(arr.is_empty(), true);
assert_eq!(arr.len(), 0);
for _ in 0..2 {
arr.push(1);
assert_eq!(arr.is_empty(), false);
assert_eq!(arr.len(), 0);
}
arr.end_push();
assert_eq!(arr.is_empty(), false);
assert_eq!(arr.len(), 1);
}
#[test]
fn test_iterator() {
let streamlines = get_toy_streamlines();
let mut iter = streamlines.into_iter();
assert_eq!(iter.next().unwrap(), [Point::new(1.0, 0.0, 0.0), Point::new(2.0, 0.0, 0.0)]);
assert_eq!(
iter.next().unwrap(),
[Point::new(0.0, 1.0, 0.0), Point::new(0.0, 2.0, 0.0), Point::new(0.0, 3.0, 0.0)]
);
assert_eq!(
iter.next().unwrap(),
[Point::new(0.0, 0.0, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.0, 0.0, 3.0)]
);
assert_eq!(iter.next(), None);
assert_eq!(iter.next(), None);
}
#[test]
fn test_reverse_iterator() {
let streamlines = get_toy_streamlines();
let lengths = streamlines.iter().rev().map(|streamline| streamline.len()).collect::<Vec<_>>();
assert_eq!(lengths, vec![3, 3, 2]);
}
#[test]
fn test_iterator_mut() {
let p0 = Point::origin();
let mut streamlines = get_toy_streamlines();
for (i, streamline) in streamlines.iter_mut().enumerate() {
for p in streamline {
if i % 2 == 0 {
*p = p0;
}
}
}
let mut iter = streamlines.into_iter();
assert_eq!(iter.next().unwrap(), [p0, p0]);
assert_eq!(
iter.next().unwrap(),
[Point::new(0.0, 1.0, 0.0), Point::new(0.0, 2.0, 0.0), Point::new(0.0, 3.0, 0.0)]
);
assert_eq!(iter.next().unwrap(), [p0, p0, p0]);
assert_eq!(iter.next(), None);
assert_eq!(iter.next(), None);
}
#[test]
fn test_dynamic() {
let mut arr = ArraySequence::empty();
for i in 0..10 {
assert_eq!(arr.nb_push_done(), i);
arr.push(i);
assert_eq!(arr.nb_push_done(), i + 1);
}
arr.end_push();
assert_eq!(arr.nb_push_done(), 0);
assert_eq!(arr.len(), 1);
assert_eq!(arr.length_of_array(0), 10);
assert_eq!(arr[0].len(), 10);
assert_eq!(arr.offsets, vec![0, 10]);
arr.extend(vec![11, 12, 13, 14, 15]);
assert_eq!(arr.len(), 2);
assert_eq!(arr.length_of_array(0), 10);
assert_eq!(arr[0].len(), 10);
assert_eq!(arr.length_of_array(1), 5);
assert_eq!(arr[1].len(), 5);
assert_eq!(arr.offsets, vec![0, 10, 15]);
arr.extend_from_slice(&[20, 21, 22, 23]);
assert_eq!(arr.len(), 3);
assert_eq!(arr[2].len(), 4);
assert_eq!(arr.offsets, vec![0, 10, 15, 19]);
}
#[test]
fn test_empty_push() {
let mut arr = ArraySequence::<f64>::empty();
assert_eq!(arr.len(), 0);
assert_eq!(arr.offsets, vec![0]);
// An `end_push` without any `push` should do nothing
arr.end_push();
arr.end_push();
assert_eq!(arr.len(), 0);
assert_eq!(arr.offsets, vec![0]);
}
#[test]
fn test_filter() {
let p = Point::new(1.0, 1.0, 1.0);
let arr = ArraySequence::new(
vec![2, 3, 2, 3],
vec![
p * 1.0,
p * 2.0,
p * 2.0,
p * 3.0,
p * 4.0,
p * 3.0,
p * 4.0,
p * 4.0,
p * 5.0,
p * 6.0,
],
);
let filtered = arr.filter(&mut |arr: &[Point]| arr.len() == 3);
assert_eq!(filtered.len(), 2);
assert_eq!(filtered[0], [p * 2.0, p * 3.0, p * 4.0]);
assert_eq!(filtered[1], [p * 4.0, p * 5.0, p * 6.0]);
// Ensure that arr is still usable
assert_eq!(arr.len(), 4);
}
| true |
37ad0505e1c72cf54d07ce302d2023865b83347f
|
Rust
|
adlerosn/osu-beatmapset-completion
|
/src/pathtree_stylizer/mod.rs
|
UTF-8
| 5,836 | 2.890625 | 3 |
[] |
no_license
|
use std::collections::BTreeMap as Map;
use std::path::PathBuf;
#[derive(Debug, PartialEq, Eq, Clone, new)]
pub struct PathTreeStylized<T>
where
T: Clone + PartialEq + Eq + PartialOrd + Ord + std::fmt::Debug,
{
segment: String,
data: Option<T>,
colors: Option<Map<T, (String, String)>>,
base_color: Option<(String, String)>,
children: Vec<PathTreeStylized<T>>,
}
impl<T> PathTreeStylized<T>
where
T: Clone + PartialEq + Eq + PartialOrd + Ord + std::fmt::Debug,
{
fn get_child_mut(&mut self, segment: &String) -> Option<&mut PathTreeStylized<T>> {
self.children
.iter_mut()
.filter(|x| x.segment == *segment)
.next()
}
fn add_child(&mut self, path: &mut dyn Iterator<Item = String>, data: T) {
if let Some(segment) = path.next() {
let child_opt = self.get_child_mut(&segment);
if let Some(child) = child_opt {
child.add_child(path, data);
} else {
let mut child = PathTreeStylized::new(
segment.clone(),
None,
self.colors.clone(),
self.base_color.clone(),
vec![],
);
child.add_child(path, data);
self.children.push(child);
}
} else {
self.data = Some(data);
}
}
pub fn fill_data_greatest(&mut self) {
for child in self.children.iter_mut() {
child.fill_data_greatest();
}
if self.data == None {
let mut children_values: Vec<&T> = self
.children
.iter()
.filter_map(|x| x.data.as_ref())
.collect();
children_values.sort();
self.data = children_values.last().and_then(|x| Some((*x).clone()));
}
}
pub fn sort(&mut self) {
for child in self.children.iter_mut() {
child.sort();
}
self.children.sort();
}
pub fn reverse(&mut self) {
for child in self.children.iter_mut() {
child.reverse();
}
self.children.reverse();
}
pub fn set_colors(
&mut self,
colors: Option<Map<T, (String, String)>>,
base_color: Option<(String, String)>,
) {
for child in self.children.iter_mut() {
child.set_colors(colors.clone(), base_color.clone());
}
self.colors = colors;
self.base_color = base_color;
}
fn printable_string_(&self, parents: Vec<(bool, &PathTreeStylized<T>)>) -> String {
let mut sb = "".to_string();
let current: &PathTreeStylized<T> = parents.last().unwrap().1;
let (base_color_start, base_color_end) = current
.base_color
.clone()
.unwrap_or(("".to_string(), "".to_string()));
sb.push_str(&base_color_start);
for (seq, (_, parent)) in parents.iter().enumerate() {
if seq + 1 != parents.len() {
let parent_is_last = parents[seq + 1].0;
sb.push_str(&format!("{} ", " ".repeat(parent.segment.len() + 2)));
if parent_is_last {
if seq + 2 >= parents.len() {
sb.push('`');
} else {
sb.push(' ');
}
} else {
sb.push('|');
}
}
}
sb.push_str(&format!("- {}", current.segment));
sb.push_str(" +=> ");
sb.push_str(&base_color_end);
if let Some(data) = ¤t.data {
let color: (String, String) = current
.colors
.clone()
.unwrap_or(Map::new())
.get(&data)
.and_then(|x| Some(x.clone()))
.unwrap_or(("".to_string(), "".to_string()));
sb.push_str(&format!("{}{:?}{}", color.0, &data, color.1));
}
sb.push_str("\n");
for (seq, child) in current.children.iter().enumerate() {
let is_last = seq + 1 == current.children.len();
let mut parent_with_me = parents.clone();
parent_with_me.push((is_last, &child));
sb.push_str(&child.printable_string_(parent_with_me));
}
return sb;
}
fn printable_string(&self) -> String {
self.printable_string_(vec![(true, self)])
}
}
impl<T> From<&Vec<(PathBuf, T)>> for PathTreeStylized<T>
where
T: Clone + PartialEq + Eq + PartialOrd + Ord + std::fmt::Debug,
{
fn from(v: &Vec<(PathBuf, T)>) -> Self {
let mut root_tree = Self::new("".to_string(), None, None, None, vec![]);
for (path, data) in v.iter() {
root_tree.add_child(
&mut path.iter().map(|x| x.to_str().unwrap().to_string()),
(*data).clone(),
);
}
root_tree
}
}
impl<T> std::fmt::Display for PathTreeStylized<T>
where
T: Clone + PartialEq + Eq + PartialOrd + Ord + std::fmt::Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.printable_string())
}
}
impl<T> PartialOrd for PathTreeStylized<T>
where
T: Clone + PartialEq + Eq + PartialOrd + Ord + std::fmt::Debug,
{
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
return Some(match self.data.cmp(&other.data) {
std::cmp::Ordering::Equal => self.segment.cmp(&other.segment),
other => other,
});
}
}
impl<T> Ord for PathTreeStylized<T>
where
T: Clone + PartialEq + Eq + PartialOrd + Ord + std::fmt::Debug,
{
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.partial_cmp(other).unwrap()
}
}
| true |
089ecd5e4e6f32792068e4aa1b2668ff390e7d6c
|
Rust
|
ChuckBates/AdventOfCode2020
|
/src/bin/tests/day_9_tests.rs
|
UTF-8
| 3,219 | 3.40625 | 3 |
[] |
no_license
|
use super::*;
#[test]
fn when_getting_summation_values_and_the_previous_five_have_only_one_option() {
let sequence = vec![1,2,3,4,5];
let next_value = 9;
let expected = vec![(5,4)];
assert_eq!(get_summation_values(sequence, next_value), expected);
}
#[test]
fn when_getting_summation_values_and_the_previous_five_have_only_one_option_that_is_the_same_number_twice() {
let sequence = vec![11,12,5,14,15];
let next_value = 10;
let expected = vec![];
assert_eq!(get_summation_values(sequence, next_value), expected);
}
#[test]
fn when_getting_summation_values_and_the_previous_five_have_two_options() {
let sequence = vec![1,2,3,4,5];
let next_value = 6;
let expected = vec![(5,1),(4,2)];
assert_eq!(get_summation_values(sequence, next_value), expected);
}
#[test]
fn when_getting_summation_values_and_the_previous_five_have_no_options() {
let sequence = vec![1,2,3,4,5];
let next_value = 20;
let expected = vec![];
assert_eq!(get_summation_values(sequence, next_value), expected);
}
#[test]
fn when_getting_first_non_sum_entry_for_preamble_five_and_look_back_five() {
let sequence = vec![1,2,3,4,5,6,7,8,9,10,11,78,12];
let look_back_length = 5;
let preamble_lenth = 5;
let expected = 10;
assert_eq!(get_first_non_sum_entry(sequence, look_back_length, preamble_lenth), expected);
}
#[test]
fn when_getting_first_non_sum_entry_for_example_input() {
let sequence = vec![35,20,15,25,47,40,62,55,65,95,102,117,150,182,127,219,299,277,309,576];
let look_back_length = 5;
let preamble_lenth = 5;
let expected = 127;
assert_eq!(get_first_non_sum_entry(sequence, look_back_length, preamble_lenth), expected);
}
#[test]
fn when_executing_part_one_for_example_input() {
let input_file_path = "src/bin/tests/inputs/day_9/input_example.txt";
let look_back_length = 5;
let preamble_lenth = 5;
let expected = 127;
assert_eq!(execute_part_one(input_file_path, look_back_length, preamble_lenth), expected);
}
#[test]
fn when_getting_series_summation_equaling_target_and_there_is_no_valid_series() {
let input_sequence = vec![1,2,3,4,5];
let target_value = 45;
let expected = vec![];
assert_eq!(get_series_summation_equaling_target(input_sequence, target_value), expected);
}
#[test]
fn when_getting_series_summation_equaling_target_and_there_is_a_valid_series() {
let input_sequence = vec![1,2,3,4,5];
let target_value = 9;
let expected = vec![2,3,4];
assert_eq!(get_series_summation_equaling_target(input_sequence, target_value), expected);
}
#[test]
fn when_getting_series_summation_equaling_target_and_there_is_a_valid_series_example() {
let input_sequence = vec![35,20,15,25,47,40,62,55,65,95,102,117,150,182,127,219,299,277,309,576];
let target_value = 127;
let expected = vec![15,25,47,40];
assert_eq!(get_series_summation_equaling_target(input_sequence, target_value), expected);
}
#[test]
fn when_executing_part_two_example() {
let input_file_path = "src/bin/tests/inputs/day_9/input_example.txt";
let target_value = 127;
let expected = 62;
assert_eq!(execute_part_two(input_file_path, target_value), expected);
}
| true |
06585c4b1392f3c222f6e225135bc5061eefdb5f
|
Rust
|
s3bk/rust-phf
|
/phf_generator/benches/benches.rs
|
UTF-8
| 2,430 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
use criterion::*;
use rand::distributions::Standard;
use rand::rngs::SmallRng;
use rand::{Rng, SeedableRng};
use phf_generator::generate_hash;
fn gen_vec(len: usize) -> Vec<u64> {
SmallRng::seed_from_u64(0xAAAAAAAAAAAAAAAA).sample_iter(Standard).take(len).collect()
}
fn bench_hash(b: &mut Bencher, len: &usize) {
let vec = gen_vec(*len);
b.iter(|| generate_hash(&vec))
}
fn gen_hash_small(c: &mut Criterion) {
let sizes = vec![0, 1, 2, 5, 10, 25, 50, 75];
c.bench_function_over_inputs("gen_hash_small", bench_hash, sizes);
}
fn gen_hash_med(c: &mut Criterion) {
let sizes = vec![100, 250, 500, 1000, 2500, 5000, 7500];
c.bench_function_over_inputs("gen_hash_medium", bench_hash, sizes);
}
fn gen_hash_large(c: &mut Criterion) {
let sizes = vec![10_000, 25_000, 50_000, 75_000];
c.bench_function_over_inputs("gen_hash_large", bench_hash, sizes);
}
fn gen_hash_xlarge(c: &mut Criterion) {
let sizes = vec![100_000, 250_000, 500_000, 750_000, 1_000_000];
c.bench_function_over_inputs("gen_hash_xlarge", bench_hash, sizes);
}
criterion_group!(benches, gen_hash_small, gen_hash_med, gen_hash_large, gen_hash_xlarge);
#[cfg(not(feature = "rayon"))]
criterion_main!(benches);
#[cfg(feature = "rayon")]
criterion_main!(benches, rayon::benches);
#[cfg(feature = "rayon")]
mod rayon {
use criterion::*;
use phf_generator::generate_hash_rayon;
use super::gen_vec;
fn bench_hash(b: &mut Bencher, len: &usize) {
let vec = gen_vec(*len);
b.iter(|| generate_hash_rayon(&vec))
}
fn gen_hash_small(c: &mut Criterion) {
let sizes = vec![0, 1, 2, 5, 10, 25, 50, 75];
c.bench_function_over_inputs("gen_hash_small_rayon", bench_hash, sizes);
}
fn gen_hash_med(c: &mut Criterion) {
let sizes = vec![100, 250, 500, 1000, 2500, 5000, 7500];
c.bench_function_over_inputs("gen_hash_medium_rayon", bench_hash, sizes);
}
fn gen_hash_large(c: &mut Criterion) {
let sizes = vec![10_000, 25_000, 50_000, 75_000];
c.bench_function_over_inputs("gen_hash_large_rayon", bench_hash, sizes);
}
fn gen_hash_xlarge(c: &mut Criterion) {
let sizes = vec![100_000, 250_000, 500_000, 750_000, 1_000_000];
c.bench_function_over_inputs("gen_hash_xlarge_rayon", bench_hash, sizes);
}
criterion_group!(benches, gen_hash_small, gen_hash_med, gen_hash_large, gen_hash_xlarge);
}
| true |
0311e7363411cfd3f75f89e9c0e88af56ad8aa77
|
Rust
|
weworld/rusty-leetcode
|
/src/tree_tag/binary_tree_vertical_order_traversal_314.rs
|
UTF-8
| 3,117 | 3.234375 | 3 |
[
"WTFPL"
] |
permissive
|
use crate::utils::tree::TreeNode;
/*
* @lc app=leetcode.cn id=314 lang=rust
*
* [314] 二叉树的垂直遍历
*/
// @lc code=start
// Definition for a binary tree node.
// #[derive(Debug, PartialEq, Eq)]
// pub struct TreeNode {
// pub val: i32,
// pub left: Option<Rc<RefCell<TreeNode>>>,
// pub right: Option<Rc<RefCell<TreeNode>>>,
// }
//
// impl TreeNode {
// #[inline]
// pub fn new(val: i32) -> Self {
// TreeNode {
// val,
// left: None,
// right: None
// }
// }
// }
use std::cell::RefCell;
use std::collections::{ VecDeque, hash_map::HashMap };
use std::rc::Rc;
impl Solution {
pub fn vertical_order(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<Vec<i32>> {
let mut levels: HashMap<i32, Vec<i32>> = HashMap::new();
let mut queue = VecDeque::new();
queue.push_back((0, root));
while let Some((level, node_op)) = queue.pop_front() {
if let Some(node_ref) = node_op {
let mut node = node_ref.borrow_mut();
let value = node.val;
levels
.entry(level)
.and_modify(|arr| {
arr.push(value);
})
.or_insert(vec![value]);
queue.push_back((level - 1, node.left.take()));
queue.push_back((level + 1, node.right.take()));
}
}
let mut ret = levels
.into_iter()
.collect::<Vec<(i32, Vec<i32>)>>();
ret.sort_by_key(|(k,_)| *k);
ret.into_iter().map(|(_, v)| v).collect()
}
}
// @lc code=end
struct Solution;
#[cfg(test)]
mod test {
use super::*;
use crate::{tree_leaf, tree_node};
#[test]
fn test_binary_tree_vertical_order_traversal_1() {
let tree = tree_node!(
3,
tree_leaf!(9),
tree_node!(20, tree_leaf!(15), tree_leaf!(7))
);
assert_eq!(
Solution::vertical_order(tree),
vec![vec![9], vec![3, 15], vec![20], vec![7]]
);
}
#[test]
fn test_binary_tree_vertical_order_traversal_2() {
let tree = tree_node!(
3,
tree_node!(9, tree_leaf!(4), tree_leaf!(0)),
tree_node!(8, tree_leaf!(1), tree_leaf!(7))
);
assert_eq!(
Solution::vertical_order(tree),
vec![vec![4], vec![9], vec![3, 0, 1], vec![8], vec![7]]
);
}
#[test]
fn test_binary_tree_vertical_order_traversal_3() {
let tree = tree_node!(
3,
tree_node!(9, tree_leaf!(4), tree_node!(0, None, tree_leaf!(2))),
tree_node!(8, tree_node!(1, tree_leaf!(5), None), tree_leaf!(7))
);
assert_eq!(
Solution::vertical_order(tree),
vec![vec![4], vec![9, 5], vec![3, 0, 1], vec![8, 2], vec![7]]
);
}
#[test]
fn test_binary_tree_vertical_order_traversal_4() {
let tree = None;
assert_eq!(
Solution::vertical_order(tree),
Vec::<Vec<i32>>::new()
);
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.