blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
b8e0c27e1e529aec58869c3bf2597e338eac1241
|
Rust
|
tustvold/rust-playground
|
/lib/jwt/src/model.rs
|
UTF-8
| 2,968 | 2.59375 | 3 |
[] |
no_license
|
use std::collections::{HashMap, HashSet};
use std::convert::TryInto;
use std::hash::Hash;
use std::str::FromStr;
use chrono::{DateTime, Utc};
use ring::signature;
use ring::signature::RsaPublicKeyComponents;
use serde::{Deserialize, Serialize};
use strum_macros::{AsRefStr, EnumString};
use crate::tag;
#[derive(Serialize, Deserialize)]
pub struct Jwk {
pub kty: String,
pub kid: String,
pub n: String,
pub e: String,
#[serde(rename = "use")]
pub u: String,
}
impl Jwk {
pub(crate) fn new(kid: &str, key: &signature::RsaSubjectPublicKey) -> Jwk {
let n = base64::encode_config(
key.modulus().big_endian_without_leading_zero(),
base64::URL_SAFE_NO_PAD,
);
let e = base64::encode_config(
key.exponent().big_endian_without_leading_zero(),
base64::URL_SAFE_NO_PAD,
);
Jwk {
kty: "RSA".to_string(),
u: "sig".to_string(),
kid: kid.to_string(),
n,
e,
}
}
}
#[derive(Serialize, Deserialize)]
pub struct Jwks {
pub keys: Vec<Jwk>,
}
pub type PublicKey = RsaPublicKeyComponents<Vec<u8>>;
impl TryInto<HashMap<String, PublicKey>> for Jwks {
type Error = base64::DecodeError;
fn try_into(self) -> Result<HashMap<String, PublicKey>, Self::Error> {
let mut map = HashMap::new();
for key in self.keys {
map.insert(
key.kid,
PublicKey {
n: base64::decode_config(&key.n, base64::URL_SAFE_NO_PAD)?,
e: base64::decode_config(&key.e, base64::URL_SAFE_NO_PAD)?,
},
);
}
Ok(map)
}
}
#[derive(Serialize, Deserialize)]
pub struct JwtHeader {
pub alg: String,
pub typ: String,
pub kid: String,
pub jku: String,
}
#[derive(Serialize, Deserialize)]
pub struct JwtSerializedClaims {
pub exp: DateTime<Utc>,
pub iat: DateTime<Utc>,
pub cid: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub sub: Option<String>,
pub scopes: String,
}
pub struct JwtClaims<S> {
pub exp: DateTime<Utc>,
pub iat: DateTime<Utc>,
pub cid: String,
pub sub: Option<String>,
pub scopes: HashSet<S>,
}
impl<S> TryInto<JwtClaims<S>> for JwtSerializedClaims
where
S: Sized + FromStr + Hash + Eq,
{
type Error = S::Err;
fn try_into(self) -> Result<JwtClaims<S>, Self::Error> {
Ok(JwtClaims {
exp: self.exp,
iat: self.iat,
cid: self.cid,
sub: self.sub,
scopes: tag::parse_space_delimited(&self.scopes)?,
})
}
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, AsRefStr, EnumString, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum Scope {
Superuser,
OfflineAccess,
}
#[allow(dead_code)]
pub type DefaultClaims = JwtClaims<Scope>;
| true |
99c56c8acaf04ed931ff98f53d921c6a16a47f6a
|
Rust
|
dejanb/drogue-influxdb-function
|
/src/handler.rs
|
UTF-8
| 3,657 | 2.71875 | 3 |
[] |
no_license
|
use crate::config::{Path, Processor};
use crate::error::ServiceError;
use actix_web::{web, HttpResponse};
use chrono::Utc;
use cloudevents::AttributesReader;
use cloudevents::{event::Data, Event};
use influxdb::{InfluxDbWriteable, Timestamp, Type, WriteQuery};
use serde_json::Value;
use std::collections::HashMap;
// Implement your function's logic here
pub async fn handle(
event: Event,
processor: web::Data<Processor>,
) -> Result<HttpResponse, actix_web::Error> {
log::debug!("Received Event: {:?}", event);
let data: Option<&Data> = event.data();
let timestamp = event.time().cloned().unwrap_or_else(Utc::now);
let timestamp = Timestamp::from(timestamp);
let query = timestamp.into_query(processor.table.clone());
// process values with payload only
let json = parse_payload(data)?;
let (query, num) = add_values(query, &processor, &json)?;
// create full events JSON for tags
let event_json = serde_json::to_value(event)?;
let (query, _) = add_tags(query, &processor, &event_json)?;
// execute query
if num > 0 {
let result = processor.client.query(&query).await;
// process result
log::debug!("Result: {:?}", result);
match result {
Ok(_) => Ok(HttpResponse::Accepted().finish()),
Err(e) => Ok(HttpResponse::InternalServerError().body(e.to_string())),
}
} else {
Ok(HttpResponse::NoContent().finish())
}
}
fn add_to_query<F>(
mut query: WriteQuery,
processor: &HashMap<String, Path>,
json: &Value,
f: F,
) -> Result<(WriteQuery, usize), ServiceError>
where
F: Fn(WriteQuery, &String, Type) -> WriteQuery,
{
let mut num = 0;
let mut f = |query, field, value| {
num += 1;
f(query, field, value)
};
for (ref field, ref path) in processor {
let sel = path
.compiled
.select(&json)
.map_err(|err| ServiceError::SelectorError {
details: err.to_string(),
})?;
query = match sel.as_slice() {
// no value, don't add
[] => Ok(query),
// single value, process
[v] => Ok(f(query, field, path.r#type.convert(v, path)?)),
// multiple values, error
[..] => Err(ServiceError::SelectorError {
details: format!("Selector found more than one value: {}", sel.len()),
}),
}?;
}
Ok((query, num))
}
fn add_values(
query: WriteQuery,
processor: &Processor,
json: &Value,
) -> Result<(WriteQuery, usize), ServiceError> {
add_to_query(query, &processor.fields, json, |query, field, value| {
query.add_field(field, value)
})
}
fn add_tags(
query: WriteQuery,
processor: &Processor,
json: &Value,
) -> Result<(WriteQuery, usize), ServiceError> {
add_to_query(query, &processor.tags, json, |query, field, value| {
query.add_tag(field, value)
})
}
fn parse_payload(data: Option<&Data>) -> Result<Value, ServiceError> {
match data {
Some(Data::Json(value)) => Ok(value.clone()),
Some(Data::String(s)) => {
serde_json::from_str::<Value>(&s).map_err(|err| ServiceError::PayloadParseError {
details: err.to_string(),
})
}
Some(Data::Binary(b)) => {
serde_json::from_slice::<Value>(&b).map_err(|err| ServiceError::PayloadParseError {
details: err.to_string(),
})
}
_ => Err(ServiceError::PayloadParseError {
details: "Unknown event payload".to_string(),
}),
}
}
| true |
82d087e4eaf4d40a28968f3360552f79002e74c2
|
Rust
|
DavidColson/RustyRaytracer
|
/src/vec.rs
|
UTF-8
| 3,127 | 3.578125 | 4 |
[
"MIT"
] |
permissive
|
use std::ops::Add;
use std::ops::AddAssign;
use std::ops::Sub;
use std::ops::SubAssign;
use std::ops::Neg;
use std::ops::Mul;
use std::ops::Div;
use std::ops::DivAssign;
#[derive(Copy, Clone, Debug)]
pub struct Vec3 {
pub x: f64,
pub y: f64,
pub z: f64,
}
impl Vec3 {
pub fn new(x: f64, y: f64, z: f64) -> Vec3 {
Vec3{x, y, z}
}
#[inline(always)]
pub fn dot(&self, other: Vec3) -> f64 {
self.x * other.x + self.y * other.y + self.z * other.z
}
#[inline(always)]
pub fn sqr_length(&self) -> f64{
self.dot(*self)
}
#[inline(always)]
pub fn length(&self) -> f64{
self.sqr_length().sqrt()
}
#[inline(always)]
pub fn norm(&self) -> Vec3 {
let len = self.length();
Vec3 {x: self.x / len, y: self.y / len, z: self.z / len}
}
pub fn cross(&self, other: Vec3) -> Vec3 {
Vec3 {
x: (self.y * other.z - self.z * other.y),
y: -(self.x * other.z - self.z * other.x),
z: (self.x * other.y - self.y * other.x),
}
}
#[inline(always)]
pub fn comp_mul(&self, other: Vec3) -> Vec3 {
Vec3 {x: self.x * other.x, y: self.y * other.y, z: self.z * other.z}
}
}
impl Add for Vec3 {
type Output = Vec3;
#[inline(always)]
fn add(self, other: Vec3) -> Vec3 {
Vec3 { x: self.x + other.x, y: self.y + other.y, z: self.z + other.z }
}
}
impl AddAssign for Vec3 {
#[inline(always)]
fn add_assign(&mut self, other: Vec3) {
*self = Vec3 { x: self.x + other.x, y: self.y + other.y, z: self.z + other.z };
}
}
impl Sub for Vec3 {
type Output = Vec3;
#[inline(always)]
fn sub(self, other: Vec3) -> Vec3 {
Vec3 { x: self.x - other.x, y: self.y - other.y, z: self.z - other.z }
}
}
impl SubAssign for Vec3 {
#[inline(always)]
fn sub_assign(&mut self, other: Vec3) {
*self = Vec3 { x: self.x - other.x, y: self.y - other.y, z: self.z - other.z };
}
}
impl Neg for Vec3 {
type Output = Vec3;
#[inline(always)]
fn neg(self) -> Vec3 {
Vec3 { x: -self.x, y: -self.y, z: -self.z }
}
}
impl Mul<f64> for Vec3 {
type Output = Vec3;
#[inline(always)]
fn mul(self, scalar: f64) -> Vec3 {
Vec3 { x: self.x * scalar, y: self.y * scalar, z: self.z * scalar }
}
}
impl Mul<Vec3> for f64 {
type Output = Vec3;
#[inline(always)]
fn mul(self, vec: Vec3) -> Vec3 {
Vec3 { x: self * vec.x, y: self * vec.y, z: self * vec.z }
}
}
impl Div<f64> for Vec3 {
type Output = Vec3;
#[inline(always)]
fn div(self, scalar: f64) -> Vec3 {
Vec3 { x: self.x / scalar, y: self.y / scalar, z: self.z / scalar }
}
}
impl Div<Vec3> for f64 {
type Output = Vec3;
#[inline(always)]
fn div(self, vec: Vec3) -> Vec3 {
Vec3 { x: self / vec.x, y: self / vec.y, z: self / vec.z }
}
}
impl DivAssign<f64> for Vec3 {
#[inline(always)]
fn div_assign(&mut self, scalar: f64) {
*self = Vec3 { x: self.x / scalar, y: self.y / scalar, z: self.z / scalar };
}
}
| true |
4ec756d866657f9dee086963aa61d8df1be635c8
|
Rust
|
leizy0/AoCInRust
|
/2018/day13/src/main.rs
|
UTF-8
| 19,439 | 3.09375 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt;
use std::fs;
use std::io::{self, BufRead, Write};
use std::cmp::{max, min};
fn main() {
let input_path = "input.txt";
let input_file =
fs::File::open(input_path).expect(&format!("Failed to open input file({})", input_path));
let input_lines: Vec<String> = io::BufReader::new(input_file)
.lines()
.map(|l| l.unwrap())
.collect();
let mut simulator = CTSimulator::new(input_lines);
let last_cart_coord = simulator.sim_to_last_cart();
println!("If erase collided cart, after {} ticks, the last cart is at({})", simulator.elapsed(), last_cart_coord);
}
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
struct Coordinate {
y: u32,
x: u32,
}
impl fmt::Display for Coordinate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}, {}", self.x, self.y)
}
}
impl Coordinate {
pub fn shift(&mut self, x_offset: i32, y_offset: i32) {
let shift_x = self.x as i32 + x_offset;
let shift_y = self.y as i32 + y_offset;
if shift_x < 0 || shift_y < 0 {
panic!(format!(
"{} is shifted to invalid result({}, {})",
self, shift_x, shift_y
));
}
self.x = shift_x as u32;
self.y = shift_y as u32;
}
}
#[test]
fn test_coord_ord() {
assert!(Coordinate { x: 0, y: 1 } > Coordinate { x: 0, y: 0 });
assert!(Coordinate { x: 1, y: 1 } > Coordinate { x: 0, y: 0 });
assert!(Coordinate { x: 0, y: 1 } > Coordinate { x: 1, y: 0 });
assert!(Coordinate { x: 0, y: 0 } == Coordinate { x: 0, y: 0 });
assert!(Coordinate { x: 1, y: 0 } > Coordinate { x: 0, y: 0 });
assert!(Coordinate { x: 0, y: 0 } < Coordinate { x: 1, y: 0 });
}
#[derive(Eq, PartialEq, Copy, Clone, Debug)]
enum Direction {
North,
South,
West,
East,
}
impl Direction {
pub fn coord_offset(&self) -> (i32, i32) {
match self {
Direction::North => (0, -1),
Direction::South => (0, 1),
Direction::West => (-1, 0),
Direction::East => (1, 0),
}
}
}
#[derive(Eq, PartialEq, Copy, Clone, Debug)]
enum Turn {
Left,
Straight,
Right,
}
impl Turn {
pub fn turned_dir(&self, cur_dir: Direction) -> Direction {
match cur_dir {
Direction::North => match self {
Turn::Left => Direction::West,
Turn::Straight => Direction::North,
Turn::Right => Direction::East,
},
Direction::South => match self {
Turn::Left => Direction::East,
Turn::Straight => Direction::South,
Turn::Right => Direction::West,
},
Direction::West => match self {
Turn::Left => Direction::South,
Turn::Straight => Direction::West,
Turn::Right => Direction::North,
},
Direction::East => match self {
Turn::Left => Direction::North,
Turn::Straight => Direction::East,
Turn::Right => Direction::South,
},
}
}
}
struct Cart {
last_pos: Coordinate,
pos: Coordinate,
dir: Direction,
cur_turn: Turn,
}
impl Cart {
pub fn new(coord: Coordinate, desc: char) -> Self {
Cart {
last_pos: coord,
pos: coord,
dir: match desc {
'^' => Direction::North,
'v' => Direction::South,
'<' => Direction::West,
'>' => Direction::East,
_ => panic!(format!("Invalid cart description({})", desc)),
},
cur_turn: Turn::Left,
}
}
pub fn coord(&self) -> Coordinate {
self.pos
}
pub fn last_coord(&self) -> Coordinate {
self.last_pos
}
pub fn go_ahead(&mut self, track: Track) {
self.dir = match track {
Track::Empty => panic!("Cart({:?}) is derailed!"),
Track::HStraight | Track::VStraight | Track::UpCurve | Track::DownCurve => {
track.next_dir(self.dir)
}
Track::Intersection => track.turn_dir(self.dir, self.turn()),
};
let (x_offset, y_offset) = self.dir.coord_offset();
self.last_pos = self.pos;
self.pos.shift(x_offset, y_offset);
}
fn collide_with(&self, other: &Cart) -> bool {
// Two situation
// First, two cart have the same current position
if self.pos == other.pos {
return true;
}
// Second, this cart's last position is position of other,
// and the last positon of other is this cart's current posiiton
if self.pos == other.last_pos && self.last_pos == other.pos {
return true;
}
false
}
fn turn(&mut self) -> Turn {
let res_turn = self.cur_turn;
self.cur_turn = match self.cur_turn {
Turn::Left => Turn::Straight,
Turn::Straight => Turn::Right,
Turn::Right => Turn::Left,
};
res_turn
}
fn ascii_desc(&self) -> u8 {
match self.dir {
Direction::North => b'^',
Direction::South => b'v',
Direction::West => b'<',
Direction::East => b'>',
}
}
}
#[test]
fn test_cart_turn() {
let mut cart = Cart::new(Coordinate { x: 0, y: 0 }, '>');
assert_eq!(cart.turn(), Turn::Left);
assert_eq!(cart.turn(), Turn::Straight);
assert_eq!(cart.turn(), Turn::Right);
assert_eq!(cart.turn(), Turn::Left);
}
#[test]
fn test_cart_go_straight() {
// East
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '>');
cart.go_ahead(Track::HStraight);
assert_eq!(cart.coord(), Coordinate { x: 11, y: 10 });
// West
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '<');
cart.go_ahead(Track::HStraight);
assert_eq!(cart.coord(), Coordinate { x: 9, y: 10 });
// North
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '^');
cart.go_ahead(Track::VStraight);
assert_eq!(cart.coord(), Coordinate { x: 10, y: 9 });
// South
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, 'v');
cart.go_ahead(Track::VStraight);
assert_eq!(cart.coord(), Coordinate { x: 10, y: 11 });
}
#[test]
fn test_cart_go_up_curve() {
// North
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '^');
cart.go_ahead(Track::UpCurve);
assert_eq!(cart.coord(), Coordinate { x: 11, y: 10 });
// South
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, 'v');
cart.go_ahead(Track::UpCurve);
assert_eq!(cart.coord(), Coordinate { x: 9, y: 10 });
// West
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '<');
cart.go_ahead(Track::UpCurve);
assert_eq!(cart.coord(), Coordinate { x: 10, y: 11 });
// East
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '>');
cart.go_ahead(Track::UpCurve);
assert_eq!(cart.coord(), Coordinate { x: 10, y: 9 });
}
#[test]
fn test_cart_go_down_curve() {
// North
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '^');
cart.go_ahead(Track::DownCurve);
assert_eq!(cart.coord(), Coordinate { x: 9, y: 10 });
// South
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, 'v');
cart.go_ahead(Track::DownCurve);
assert_eq!(cart.coord(), Coordinate { x: 11, y: 10 });
// West
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '<');
cart.go_ahead(Track::DownCurve);
assert_eq!(cart.coord(), Coordinate { x: 10, y: 9 });
// East
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '>');
cart.go_ahead(Track::DownCurve);
assert_eq!(cart.coord(), Coordinate { x: 10, y: 11 });
}
#[test]
fn test_cart_go_intersection() {
// North
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '^');
cart.go_ahead(Track::Intersection); // Go left, toward west
assert_eq!(cart.coord(), Coordinate { x: 9, y: 10 });
cart.go_ahead(Track::Intersection); // Go Straight, toward west
assert_eq!(cart.coord(), Coordinate { x: 8, y: 10 });
cart.go_ahead(Track::Intersection); // Go Right, toward north
assert_eq!(cart.coord(), Coordinate { x: 8, y: 9 });
cart.go_ahead(Track::Intersection); // Go Left, toward west
assert_eq!(cart.coord(), Coordinate { x: 7, y: 9 });
// South
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, 'v');
cart.go_ahead(Track::Intersection); // Go left, toward east
assert_eq!(cart.coord(), Coordinate { x: 11, y: 10 });
cart.go_ahead(Track::Intersection); // Go Straight, toward east
assert_eq!(cart.coord(), Coordinate { x: 12, y: 10 });
cart.go_ahead(Track::Intersection); // Go Right, toward south
assert_eq!(cart.coord(), Coordinate { x: 12, y: 11 });
cart.go_ahead(Track::Intersection); // Go Left, toward east
assert_eq!(cart.coord(), Coordinate { x: 13, y: 11 });
// West
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '<');
cart.go_ahead(Track::Intersection); // Go left, toward south
assert_eq!(cart.coord(), Coordinate { x: 10, y: 11 });
cart.go_ahead(Track::Intersection); // Go Straight, toward south
assert_eq!(cart.coord(), Coordinate { x: 10, y: 12 });
cart.go_ahead(Track::Intersection); // Go Right, toward west
assert_eq!(cart.coord(), Coordinate { x: 9, y: 12 });
cart.go_ahead(Track::Intersection); // Go Left, toward south
assert_eq!(cart.coord(), Coordinate { x: 9, y: 13 });
// East
let mut cart = Cart::new(Coordinate { x: 10, y: 10 }, '>');
cart.go_ahead(Track::Intersection); // Go left, toward north
assert_eq!(cart.coord(), Coordinate { x: 10, y: 9 });
cart.go_ahead(Track::Intersection); // Go Straight, toward north
assert_eq!(cart.coord(), Coordinate { x: 10, y: 8 });
cart.go_ahead(Track::Intersection); // Go Right, toward east
assert_eq!(cart.coord(), Coordinate { x: 11, y: 8 });
cart.go_ahead(Track::Intersection); // Go Left, toward north
assert_eq!(cart.coord(), Coordinate { x: 11, y: 7 });
}
struct TrackMap {
row_n: u32,
col_n: u32,
track_mat: Vec<Track>,
}
impl TrackMap {
pub fn new(row_n: u32, col_n: u32) -> Self {
let track_mat = vec![Track::Empty; (row_n * col_n) as usize];
TrackMap {
row_n,
col_n,
track_mat,
}
}
pub fn row_n(&self) -> u32 {
self.row_n
}
pub fn col_n(&self) -> u32 {
self.col_n
}
pub fn at(&self, coord: Coordinate) -> Track {
if !self.is_valid_coord(coord) {
panic!("Invalid coordiante({}), can't get track", coord);
}
let ind = self.coord_to_ind(coord);
self.track_mat[ind as usize]
}
pub fn set_track(&mut self, coord: Coordinate, desc: char) {
if !self.is_valid_coord(coord) {
panic!("Invalid coordinate({}), can't set track({})", coord, desc);
}
let ind = self.coord_to_ind(coord);
self.track_mat[ind as usize] = match desc {
'|' => Track::VStraight,
'-' => Track::HStraight,
'/' => Track::UpCurve,
'\\' => Track::DownCurve,
'+' => Track::Intersection,
_ => panic!(format!("Invalid track description({})", desc)),
}
}
pub fn row(&self, r_ind: u32) -> &[Track] {
let start = (self.col_n() * r_ind) as usize;
let end = start + self.col_n() as usize;
&self.track_mat[start..end]
}
fn coord_to_ind(&self, coord: Coordinate) -> u32 {
coord.y * self.col_n + coord.x
}
fn is_valid_coord(&self, coord: Coordinate) -> bool {
coord.x < self.col_n && coord.y < self.row_n
}
}
#[derive(Copy, Clone, Debug)]
enum Track {
Empty,
HStraight,
VStraight,
UpCurve,
DownCurve,
Intersection,
}
impl Track {
pub fn next_dir(&self, cur_dir: Direction) -> Direction {
match self {
Track::HStraight => match cur_dir {
Direction::West | Direction::East => cur_dir,
_ => panic!(format!(
"{:?} isn't direct along horizontal straight track",
cur_dir
)),
},
Track::VStraight => match cur_dir {
Direction::South | Direction::North => cur_dir,
_ => panic!(format!(
"{:?} isn't direct along vertical straight track",
cur_dir
)),
},
Track::UpCurve => match cur_dir {
Direction::North => Direction::East,
Direction::South => Direction::West,
Direction::West => Direction::South,
Direction::East => Direction::North,
},
Track::DownCurve => match cur_dir {
Direction::North => Direction::West,
Direction::South => Direction::East,
Direction::West => Direction::North,
Direction::East => Direction::South,
},
Track::Empty => panic!("Empty track has no next direction"),
Track::Intersection => panic!(
"Intersection has three next direction, should given turn info(use turn_dir)"
),
}
}
pub fn turn_dir(&self, cur_dir: Direction, turn: Turn) -> Direction {
match self {
Track::Intersection => turn.turned_dir(cur_dir),
_ => panic!(format!("{:?} can't turn direction", self)),
}
}
}
struct CTSimulator {
map: TrackMap,
cart_list: Vec<Cart>,
tick_n: u32,
}
type CTSimResult = Result<(), CTSimError>;
enum CTSimErrorType {
Collision,
}
struct CTSimError {
err_type: CTSimErrorType,
pos: Coordinate,
}
impl CTSimError {
pub fn pos(&self) -> Coordinate {
self.pos
}
}
impl CTSimulator {
pub fn new(desc: Vec<String>) -> Self {
let row_n = desc.len();
let col_n = desc[0].chars().count();
let mut map = TrackMap::new(row_n as u32, col_n as u32);
let mut carts = Vec::new();
for (y, row) in desc.iter().enumerate() {
let this_col_n = row.chars().count();
if this_col_n != col_n {
panic!(format!(
"Inconsistent # of track map column({}), expect all are {}",
this_col_n, col_n
));
}
for (x, c) in row.chars().enumerate() {
let coord = Coordinate {
x: x as u32,
y: y as u32,
};
match c {
'|' | '-' | '/' | '\\' | '+' => map.set_track(coord, c),
'^' | 'v' | '<' | '>' => {
carts.push(Cart::new(coord, c));
match c {
'^' | 'v' => {
map.set_track(coord, '|');
}
'<' | '>' => {
map.set_track(coord, '-');
}
_ => panic!("Never go here, under protection of outer match branch"),
}
}
' ' => (),
_ => panic!(format!("Invalid map description({}), at {}", c, coord)),
}
}
}
CTSimulator {
map: map,
cart_list: carts,
tick_n: 0,
}
}
pub fn sim_tick(&mut self) -> CTSimResult {
self.cart_list.sort_unstable_by_key(|c| c.coord());
for ind in 0..(self.cart_list.len()) {
let cart = &mut self.cart_list[ind];
cart.go_ahead(self.map.at(cart.coord()));
self.check_cart_collision(ind)?;
}
self.tick_n += 1;
Ok(())
}
pub fn sim_to_last_cart(&mut self) -> Coordinate {
loop {
self.cart_list.sort_unstable_by_key(|c| c.coord());
let mut ind = 0;
while ind < self.cart_list.len() {
let cart = &mut self.cart_list[ind];
cart.go_ahead(self.map.at(cart.coord()));
ind = self.check_and_erase_collision(ind);
}
match self.cart_list.len() {
1 => return self.cart_list[0].coord(),
0 => panic!("Zero cart left, when erase all collided carts"),
_ => (),
}
self.tick_n += 1;
}
}
pub fn elapsed(&self) -> u32 {
self.tick_n
}
pub fn dump_map(&self, file_path: &str) -> io::Result<()> {
let output_file = fs::File::create(file_path)?;
let mut writer = io::BufWriter::new(output_file);
let line_n = self.map.row_n();
for i in 0..line_n {
let mut line: Vec<u8> = self
.map
.row(i)
.iter()
.map(|t| match t {
Track::Empty => b' ',
Track::HStraight => b'-',
Track::VStraight => b'|',
Track::Intersection => b'+',
Track::UpCurve => b'/',
Track::DownCurve => b'\\',
})
.collect();
// Replace cart position with cart's ascii description
for cart in &self.cart_list {
let coord = cart.coord();
if coord.y == i {
line[coord.x as usize] = cart.ascii_desc();
}
}
line.push(b'\n');
writer.write(line.as_slice())?;
}
Ok(())
}
fn check_cart_collision(&self, ind: usize) -> CTSimResult {
let check_cart = &self.cart_list[ind];
for (i, cart) in self.cart_list.iter().enumerate() {
if ind != i && cart.coord() == check_cart.coord() {
return Err(CTSimError {
err_type: CTSimErrorType::Collision,
pos: check_cart.coord(),
});
}
}
Ok(())
}
fn check_and_erase_collision(&mut self, ind: usize) -> usize {
let check_cart = &self.cart_list[ind];
let mut collide_partner: Option<usize> = None;
for (i, cart) in self.cart_list.iter().enumerate() {
if ind != i && cart.coord() == check_cart.coord() {
collide_partner = Some(i);
break;
}
}
return if let Some(collided_ind) = collide_partner {
if collided_ind > ind {
// Collide with latter cart, next cart index still is ind, because of removal of cart @ ind
self.cart_list.remove(collided_ind);
self.cart_list.remove(ind);
ind
} else {
// Collide with former cart, next cart index is ind - 1,
// because the removed two carts both have something with this
self.cart_list.remove(ind);
self.cart_list.remove(collided_ind);
ind - 1
}
} else {
// Not collide, just the next index
ind + 1
}
}
}
| true |
cfbab72f86b72f3ecdd8764261eaf01904cc8d92
|
Rust
|
veniamin-ilmer/crate-race
|
/benches/vec_intsort/bench.rs
|
UTF-8
| 2,151 | 2.609375 | 3 |
[
"Apache-2.0"
] |
permissive
|
//!Sorting Vecs of u32.
//!
//!* *Baseline*: 1 item.
//!* *Sorted*: 10,000 items, fully sorted already.
//!* *Random*: 10,000 items, randomly shuffled. (The shuffling is not part of the time).
//!* *Reverse*: 10,000 items, sorted in reverse order.
#[macro_use]
extern crate bencher;
mod _std;
mod _afsort;
mod _cycle_sort;
mod _dmsort;
mod _ironsort;
mod _quick_sort;
mod _rdxsort;
mod _rust_quicksort;
mod _sortrs;
benchmark_group!(baseline, _std::baseline,
_afsort::baseline,
_cycle_sort::baseline,
_dmsort::baseline,
_ironsort::baseline,
_quick_sort::baseline,
_rdxsort::baseline,
_rust_quicksort::baseline,
_sortrs::baseline);
benchmark_group!(sorted, _std::sorted,
_afsort::sorted,
_cycle_sort::sorted,
_dmsort::sorted,
_ironsort::sorted,
_quick_sort::sorted,
_rdxsort::sorted,
_rust_quicksort::sorted,
_sortrs::sorted);
benchmark_group!(random, _std::random,
_afsort::random,
_cycle_sort::random,
_dmsort::random,
_ironsort::random,
_quick_sort::random,
_rdxsort::random,
_rust_quicksort::random,
_sortrs::random);
benchmark_group!(reverse, _std::reverse,
_afsort::reverse,
_cycle_sort::reverse,
_dmsort::reverse,
_ironsort::reverse,
_quick_sort::reverse,
_rdxsort::reverse,
_rust_quicksort::reverse,
_sortrs::reverse);
benchmark_main!(baseline, sorted, random, reverse);
| true |
7c37d8e073d93d14e6cdff5e9de7a434101a4891
|
Rust
|
thibault-martinez/bee
|
/bee-message/src/message.rs
|
UTF-8
| 6,968 | 2.546875 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2020-2021 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use crate::{
parents::Parents,
payload::{option_payload_pack, option_payload_packed_len, option_payload_unpack, Payload},
Error, MessageId,
};
use bee_common::packable::{Packable, Read, Write};
use bee_pow::providers::{miner::Miner, NonceProvider, NonceProviderBuilder};
use crypto::hashes::{blake2b::Blake2b256, Digest};
/// The minimum number of bytes in a message.
pub const MESSAGE_LENGTH_MIN: usize = 53;
/// The maximum number of bytes in a message.
pub const MESSAGE_LENGTH_MAX: usize = 32768;
const DEFAULT_POW_SCORE: f64 = 4000f64;
const DEFAULT_NONCE: u64 = 0;
/// Represent the object that nodes gossip around the network.
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Message {
/// Specifies which network this message is meant for.
network_id: u64,
/// The [`MessageId`]s that this message directly approves.
parents: Parents,
/// The optional [Payload] of the message.
payload: Option<Payload>,
/// The result of the Proof of Work in order for the message to be accepted into the tangle.
nonce: u64,
}
impl Message {
/// Creates a new `MessageBuilder` to construct an instance of a `Message`.
pub fn builder() -> MessageBuilder {
MessageBuilder::new()
}
/// Computes the identifier of the message.
pub fn id(&self) -> (MessageId, Vec<u8>) {
let bytes = self.pack_new();
let id = Blake2b256::digest(&bytes);
(MessageId::new(id.into()), bytes)
}
/// Returns the network id of a `Message`.
pub fn network_id(&self) -> u64 {
self.network_id
}
/// Returns the parents of a `Message`.
pub fn parents(&self) -> &Parents {
&self.parents
}
/// Returns the optional payload of a `Message`.
pub fn payload(&self) -> &Option<Payload> {
&self.payload
}
/// Returns the nonce of a `Message`.
pub fn nonce(&self) -> u64 {
self.nonce
}
/// Consumes the [`Message`], and returns ownership over its [`Parents`].
pub fn into_parents(self) -> Parents {
self.parents
}
}
impl Packable for Message {
type Error = Error;
fn packed_len(&self) -> usize {
self.network_id.packed_len()
+ self.parents.packed_len()
+ option_payload_packed_len(self.payload.as_ref())
+ self.nonce.packed_len()
}
fn pack<W: Write>(&self, writer: &mut W) -> Result<(), Self::Error> {
self.network_id.pack(writer)?;
self.parents.pack(writer)?;
option_payload_pack(writer, self.payload.as_ref())?;
self.nonce.pack(writer)?;
Ok(())
}
fn unpack_inner<R: Read + ?Sized, const CHECK: bool>(reader: &mut R) -> Result<Self, Self::Error> {
let network_id = u64::unpack_inner::<R, CHECK>(reader)?;
let parents = Parents::unpack_inner::<R, CHECK>(reader)?;
let (payload_len, payload) = option_payload_unpack::<R, CHECK>(reader)?;
if CHECK
&& !matches!(
payload,
None | Some(Payload::Transaction(_)) | Some(Payload::Milestone(_)) | Some(Payload::Indexation(_))
)
{
// Safe to unwrap since it's known not to be None.
return Err(Error::InvalidPayloadKind(payload.unwrap().kind()));
}
let nonce = u64::unpack_inner::<R, CHECK>(reader)?;
// Computed instead of calling `packed_len` on Self because `payload_len` is already known and it may be
// expensive to call `payload.packed_len()` twice.
let message_len = network_id.packed_len() + parents.packed_len() + payload_len + nonce.packed_len();
if CHECK && message_len > MESSAGE_LENGTH_MAX {
return Err(Error::InvalidMessageLength(message_len));
}
// When parsing the message is complete, there should not be any trailing bytes left that were not parsed.
if CHECK && reader.bytes().next().is_some() {
return Err(Error::RemainingBytesAfterMessage);
}
Ok(Self {
network_id,
parents,
payload,
nonce,
})
}
}
/// A builder to build a `Message`.
#[must_use]
pub struct MessageBuilder<P: NonceProvider = Miner> {
network_id: Option<u64>,
parents: Option<Parents>,
payload: Option<Payload>,
nonce_provider: Option<(P, f64)>,
}
impl<P: NonceProvider> Default for MessageBuilder<P> {
fn default() -> Self {
Self {
network_id: None,
parents: None,
payload: None,
nonce_provider: None,
}
}
}
impl<P: NonceProvider> MessageBuilder<P> {
/// Creates a new `MessageBuilder`.
pub fn new() -> Self {
Default::default()
}
/// Adds a network id to a `MessageBuilder`.
pub fn with_network_id(mut self, network_id: u64) -> Self {
self.network_id = Some(network_id);
self
}
/// Adds parents to a `MessageBuilder`.
pub fn with_parents(mut self, parents: Parents) -> Self {
self.parents = Some(parents);
self
}
/// Adds a payload to a `MessageBuilder`.
pub fn with_payload(mut self, payload: Payload) -> Self {
self.payload = Some(payload);
self
}
/// Adds a nonce provider to a `MessageBuilder`.
pub fn with_nonce_provider(mut self, nonce_provider: P, target_score: f64) -> Self {
self.nonce_provider = Some((nonce_provider, target_score));
self
}
/// Finishes the `MessageBuilder` into a `Message`.
pub fn finish(self) -> Result<Message, Error> {
let network_id = self.network_id.ok_or(Error::MissingField("network_id"))?;
let parents = self.parents.ok_or(Error::MissingField("parents"))?;
if !matches!(
self.payload,
None | Some(Payload::Transaction(_)) | Some(Payload::Milestone(_)) | Some(Payload::Indexation(_))
) {
// Safe to unwrap since it's known not to be None.
return Err(Error::InvalidPayloadKind(self.payload.unwrap().kind()));
}
let mut message = Message {
network_id,
parents,
payload: self.payload,
nonce: 0,
};
let message_bytes = message.pack_new();
if message_bytes.len() > MESSAGE_LENGTH_MAX {
return Err(Error::InvalidMessageLength(message_bytes.len()));
}
let (nonce_provider, target_score) = self
.nonce_provider
.unwrap_or((P::Builder::new().finish(), DEFAULT_POW_SCORE));
message.nonce = nonce_provider
.nonce(
&message_bytes[..message_bytes.len() - std::mem::size_of::<u64>()],
target_score,
)
.unwrap_or(DEFAULT_NONCE);
Ok(message)
}
}
| true |
448c94eb26b1a9bdd96d2adeb9d251d7f339f496
|
Rust
|
vaibhavantil2/third-party-api-clients
|
/github/src/markdown.rs
|
UTF-8
| 1,518 | 3.25 | 3 |
[
"MIT"
] |
permissive
|
use anyhow::Result;
use crate::Client;
pub struct Markdown {
pub client: Client,
}
impl Markdown {
#[doc(hidden)]
pub fn new(client: Client) -> Self {
Markdown { client }
}
/**
* Render a Markdown document.
*
* This function performs a `POST` to the `/markdown` endpoint.
*
*
*
* FROM: <https://docs.github.com/rest/reference/markdown#render-a-markdown-document>
*/
pub async fn render(&self, body: &crate::types::MarkdownRenderRequest) -> Result<String> {
let url = "/markdown".to_string();
self.client
.post(&url, Some(reqwest::Body::from(serde_json::to_vec(body)?)))
.await
}
/**
* Render a Markdown document in raw mode.
*
* This function performs a `POST` to the `/markdown/raw` endpoint.
*
* You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less.
*
* FROM: <https://docs.github.com/rest/reference/markdown#render-a-markdown-document-in-raw-mode>
*/
pub async fn render_raw<T: Into<reqwest::Body>>(&self, body: T) -> Result<String> {
let url = "/markdown/raw".to_string();
self.client.post(&url, Some(body.into())).await
}
}
| true |
bc50886d6b8424ae07e4c3eb247a9449522eee62
|
Rust
|
InFarAday/Projet-RustRSA
|
/rrsa/src/tests.rs
|
UTF-8
| 11,752 | 3.125 | 3 |
[] |
no_license
|
mod maths
{
/// Tests des utilitaires d'extensions des nombres et vecteurs de nombres
mod nvutil
{
use crate::maths::{NumUtil, VecNumUtil};
use num_bigint::BigUint;
/// Test nombre de chiffres d'un nombre en radix N
#[test]
fn nu_sz()
{
assert_eq!(3, BigUint::from(321u16).sz(10));
}
/// Test nombre d'octets d'un nombre
#[test]
fn nu_sz_b()
{
assert_eq!(2, BigUint::from(432u16).sz_b());
}
/// Test de décomposition / recomposition (vérification par rapport à l'original)
#[test]
fn join_expl()
{
let b = BigUint::from(1267122178333u64);
assert_eq!(b, b.expl_r(2).rejoin());
}
}
use crate::maths;
use num_bigint::{BigUint, BigInt};
use num_traits::ToPrimitive;
/// Test algorithme d'Euclide PGCD
#[test]
fn euclide()
{
let (a, b) = (BigInt::from(234u32), BigInt::from(267u32));
assert_eq!(8u32, maths::euclide(&a, &b).to_u32().unwrap());
}
/// Test exponentiation modulatoire
#[test]
fn modpow()
{
let (a, b, c) = (
BigUint::from(345u16),
BigUint::from(712u16),
BigUint::from(87u8),
);
assert_eq!(a.modpow(&b, &c), maths::fmodpow(&a, &b, &c));
}
/// Test code d'exposant
#[test]
fn expcode()
{
let x = BigUint::from(12781u16);
assert_eq!(2, maths::expcode(&x).unwrap().to_u32().unwrap());
}
/// Test de la fonction de vérification de primalité
#[test]
fn isprime()
{
let p = BigUint::from(5653u16);
let np = BigUint::from(12782u16);
assert!(maths::isprime(&p));
assert!(!maths::isprime(&np));
}
}
/// Tests des structures de messages
mod messages
{
use crate::messages::*;
use num_bigint::BigUint;
use num_traits::Num;
/// Test des messages depuis des chaînes
#[test]
fn f_unf()
{
let msg = Message::str(String::from("test")).build();
assert_eq!("test", msg.to_str().unwrap());
}
/// Test des messages depuis la représentation textuelle de nombres
#[test]
fn ns_uns()
{
let msg = Message::nstr(String::from("8a240238dfljqslkfj2378273dfjqldksf8a240238dfljqslkfj2378273dfjqldksf"), true).build();
assert_eq!(BigUint::from_str_radix("8a240238dfljqslkfj2378273dfjqldksf8a240238dfljqslkfj2378273dfjqldksf", 36).unwrap().to_str_radix(36), msg.to_nstr());
}
/// Test du découpage au sein des messages
#[test]
fn bd_dest()
{
let msg = Message::nstr(String::from("8a240238dfljqslkfj2378273dfjqldksf8a240238dfljqslkfj2378273dfjqldksf8a240238dfljqslkfj2378273dfjqldksf8a240238dfljqslkfj2378273dfjqldksf"), true).build();
let parts = msg.parts.clone();
let msg = Message::nstr(msg.to_nstr(), true).build();
for (index, part) in parts.iter().enumerate()
{
println!("{}", part);
assert_eq!(part, &msg.parts[index]);
}
}
}
/// Tests de la gestion des clés
mod keys
{
use crate::{engines::RsaKey, keys::{Key, NumKey, KeyPair}};
use num_bigint::BigUint;
/// Test de la sérialisation des clés numériques
#[test]
fn ser_str()
{
let k = NumKey::from(BigUint::from(9u8));
assert_eq!("9", k.serialize_str());
}
/// Test de la désérialisation des clés numériques
#[test]
fn from_str()
{
assert_eq!(BigUint::from(9u8), NumKey::from_str(String::from("9")).unwrap().value);
}
/// Test de la désérialisation des paires de clés
#[test]
fn from_str_dpair()
{
let k = RsaKey::from_str(String::from("9::8::7::6")).unwrap();
assert_eq!(k.0.0.value, BigUint::from(9u8));
assert_eq!(k.0.1.value, BigUint::from(8u8));
assert_eq!(k.1.0.value, BigUint::from(7u8));
assert_eq!(k.1.1.value, BigUint::from(6u8));
}
/// Test de la sérialisation des paires de clés
#[test]
fn ser_str_dpair()
{
let k = KeyPair(
KeyPair(
NumKey::from(BigUint::from(9u8)),
NumKey::from(BigUint::from(8u8))
),
KeyPair(
NumKey::from(BigUint::from(7u8)),
NumKey::from(BigUint::from(6u8))
)
);
assert_eq!("9::8::7::6", k.serialize_str());
}
}
/// Tests relatifs aux moteurs cryptographiques
mod engines
{
use crate::engines::{Engine, Cesar};
use num_bigint::BigUint;
/// Test du padding (ajout + retrait = original)
#[test]
fn pad_unpad()
{
let p = BigUint::from(12345u16);
let mut pp = p.clone();
let rsa = Cesar;
rsa.pad(&mut pp, 1);
rsa.unpad(&mut pp, 1);
assert_eq!(p, pp);
}
/// Tests relatifs au moteur RSA
mod rsa
{
use crate::{engines::{Engine, Rsa, RSA_DEF_GEN_THREADS}, maths::{isprime, rand_primelike}, messages::*};
use std::time::Instant;
use num_bigint::BigUint;
/// Test génération de clé avec p et q 64 octets
#[test]
fn gen_64()
{
let rsa = Rsa;
let _k = rsa.generate(64u64, RSA_DEF_GEN_THREADS);
}
/// Test génération de clé avec p et q 128 octets
#[test]
fn gen_128()
{
let rsa = Rsa;
let _k = rsa.generate(128u64, RSA_DEF_GEN_THREADS);
}
/// Test génération de clé avec p et q 64 octets
/// Ignoré par défaut car trop long
#[test]
#[ignore = "Trop long"]
fn gen_256()
{
let rsa = Rsa;
let _k = rsa.generate(256u64, RSA_DEF_GEN_THREADS);
}
/// Benchmark du temps de génération et de test de primalité pour différentes tailles de nombres
#[test]
#[ignore = "Benchmarking uniquement"]
fn bench_primality()
{
let mut tpoint = Instant::now();
for _ in 0..100
{
rand_primelike(64);
}
println!("Génération 64 octets en {} ms soit {} ms par itération.", tpoint.elapsed().as_millis(), tpoint.elapsed().as_millis() / 100);
tpoint = Instant::now();
for _ in 0..100
{
rand_primelike(128);
}
println!("Génération 128 octets en {} ms soit {} ms par itération.", tpoint.elapsed().as_millis(), tpoint.elapsed().as_millis() / 100);
tpoint = Instant::now();
for _ in 0..100
{
rand_primelike(256);
}
println!("Génération 256 octets en {} ms soit {} ms par itération.", tpoint.elapsed().as_millis(), tpoint.elapsed().as_millis() / 100);
let (n64, n128, n256) = (rand_primelike(64), rand_primelike(128), rand_primelike(256));
tpoint = Instant::now();
for _ in 0..100
{
isprime(&n64);
}
println!("Vérification 64 octets en {} ms soit {} ms par itération.", tpoint.elapsed().as_millis(), tpoint.elapsed().as_millis() / 100);
tpoint = Instant::now();
for _ in 0..100
{
isprime(&n128);
}
println!("Vérification 128 octets en {} ms soit {} ms par itération.", tpoint.elapsed().as_millis(), tpoint.elapsed().as_millis() / 100);
tpoint = Instant::now();
for _ in 0..100
{
isprime(&n256);
}
println!("Vérification 256 octets en {} ms soit {} ms par itération.", tpoint.elapsed().as_millis(), tpoint.elapsed().as_millis() / 100);
panic!();
}
/// Benchmark du temps de génération en fonction du nombre de threads
#[test]
#[ignore = "Benchmark uniquement"]
fn bench_gen_time()
{
let mut tpoint = Instant::now();
let rsa = Rsa;
let iters: u128 = 20;
let tmax: u8 = 6;
for i in 1..=tmax
{
for _ in 0..iters
{
rsa.generate(32, i);
}
println!("Génération 32 octets en {} ms avec {} threads, soit {} ms par itération", tpoint.elapsed().as_millis(), i, tpoint.elapsed().as_millis() / iters);
tpoint = Instant::now();
}
panic!();
}
/// Test encodage et décodage (encodage + décodage = nombre orignal)
#[test]
fn encode_decode()
{
let p = BigUint::from(12345u16);
let mut pp = p.clone();
let rsa = Rsa;
let k = rsa.gen_def();
rsa.encode(&mut pp, &k.0, 1);
rsa.decode(&mut pp, &k.1, 1);
assert_eq!(p, pp);
}
/// Test chiffrement et déchiffrement d'un message (chiffrement + déchiffrement = original)
#[test]
fn encrypt_decrypt()
{
let mut msg = Message::str(String::from("test rsa")).build();
let rsa = Rsa;
let k = rsa.gen_def();
rsa.encrypt(&mut msg, &k.0);
let mut msg = Message::parts_str(msg.to_parts_str(), true).build();
rsa.decrypt(&mut msg, &k.1);
assert_eq!("test rsa", msg.to_str().unwrap());
}
/// Test de vérification du chiffrement + déchiffrement avec inversion des clés (chiffrement avec privée + déchiffrement avec privée)
#[test]
#[ignore = "Pareil que sign_verify"]
fn e_d_inv()
{
let mut msg = Message::str(String::from("test rsa")).build();
let rsa = Rsa;
let k = rsa.gen_def();
rsa.encrypt(&mut msg, &k.1);
//let mut msg = Message::parts_str(msg.to_parts_str(), true).build();
rsa.decrypt(&mut msg, &k.0);
assert_eq!("test rsa", msg.to_str().unwrap());
}
/// Test de signature et vérification d'un nombre (signature + déchiffrement signé = original)
#[test]
fn sign_verify()
{
let mut msg = Message::str(String::from("test rsa")).build();
let rsa = Rsa;
let k = rsa.gen_def();
rsa.encrypt(&mut msg, &k.1);
rsa.decrypt(&mut msg, &k.0);
assert_eq!("test rsa", msg.to_str().unwrap());
}
}
/// Tests relatifs au moteur césar
mod cesar
{
use crate::{engines::{Engine, Cesar}, messages::*};
use num_bigint::BigUint;
/// Test encodage et décodage (encodage + décodage = nombre orignal)
#[test]
fn encode_decode()
{
let p = BigUint::from(12345u16);
let mut pp = p.clone();
let cesar = Cesar;
let k = cesar.gen_def();
cesar.encode(&mut pp, &k, 1);
cesar.decode(&mut pp, &k, 1);
assert_eq!(p, pp);
}
/// Test chiffrement et déchiffrement d'un message (chiffrement + déchiffrement = original)
#[test]
fn encrypt_decrypt()
{
let mut msg = Message::str(String::from("test cesar")).build();
let cesar = Cesar;
let k = cesar.gen_def();
cesar.encrypt(&mut msg, &k);
let mut msg = Message::parts_str(msg.to_parts_str(), true).build();
cesar.decrypt(&mut msg, &k);
assert_eq!("test cesar", msg.to_str().unwrap());
}
}
}
| true |
f7b2a5251fafd94a355b140fe65256421525e452
|
Rust
|
AidenZuk/mempool
|
/src/lib.rs
|
UTF-8
| 11,115 | 3.46875 | 3 |
[] |
no_license
|
//! A thread-safe object pool with automatic return and attach/detach semantics
//!
//! The goal of an object pool is to reuse expensive to allocate objects or frequently allocated objects
//!
//! # Examples
//!
//! ## Creating a Pool
//!
//! The general pool creation looks like this
//! ```
//! let pool: MemPool<T> = MemoryPool::new(capacity, || T::new());
//! ```
//! Example pool with 32 `Vec<u8>` with capacity of 4096
//! ```
//! let pool: MemoryPool<Vec<u8>> = MemoryPool::new(32, || Vec::with_capacity(4096));
//! ```
//!
//! ## Using a Pool
//!
//! Basic usage for pulling from the pool
//! ```
//! let pool: MemoryPool<Vec<u8>> = MemoryPool::new(32, || Vec::with_capacity(4096));
//! let mut reusable_buff = pool.pull().unwrap(); // returns None when the pool is saturated
//! reusable_buff.clear(); // clear the buff before using
//! some_file.read_to_end(reusable_buff);
//! // reusable_buff is automatically returned to the pool when it goes out of scope
//! ```
//! Pull from pool and `detach()`
//! ```
//! let pool: MemoryPool<Vec<u8>> = MemoryPool::new(32, || Vec::with_capacity(4096));
//! let mut reusable_buff = pool.pull().unwrap(); // returns None when the pool is saturated
//! reusable_buff.clear(); // clear the buff before using
//! let (pool, reusable_buff) = reusable_buff.detach();
//! let mut s = String::from(reusable_buff);
//! s.push_str("hello, world!");
//! pool.attach(s.into_bytes()); // reattach the buffer before reusable goes out of scope
//! // reusable_buff is automatically returned to the pool when it goes out of scope
//! ```
//!
//! ## Using Across Threads
//!
//! You simply wrap the pool in a [`std::sync::Arc`]
//! ```
//! let pool: Arc<MemoryPool<T>> = Arc::new(MemoryPool::new(cap, || T::new()));
//! ```
//!
//! # Warning
//!
//! Objects in the pool are not automatically reset, they are returned but NOT reset
//! You may want to call `object.reset()` or `object.clear()`
//! or any other equivalent for the object that you are using, after pulling from the pool
//!
//! [`std::sync::Arc`]: https://doc.rust-lang.org/stable/std/sync/struct.Arc.html
mod multi_buf;
mod semphore;
pub use multi_buf::{MultiBuffer, GetSegs};
use crossbeam::channel;
use std::ops::{Deref, DerefMut};
use parking_lot::{Mutex, Condvar};
use std::mem::{ManuallyDrop, forget};
use std::sync::Arc;
use std::thread;
use log::{trace};
pub use semphore::Semphore;
use parking_lot::lock_api::MutexGuard;
use futures::SinkExt;
use std::thread::sleep;
pub type Stack<T> = Vec<T>;
pub struct PendingInfo<T>
where T: Sync + Send + 'static
{
id: String,
notifier: channel::Sender<T>,
}
pub struct WaitingInfo<T>
where T: Sync + Send + 'static
{
id: String,
//发送恢复命令
notifier: channel::Sender<T>,
///最低需要多少个内存单元才能恢复
min_request: usize,
}
pub struct MemoryPool<T>
where T: Sync + Send + 'static
{
objects: (channel::Sender<T>, channel::Receiver<T>),
// the one wait for data
pending: Arc<Mutex<Vec<PendingInfo<Reusable<T>>>>>,
///those who is sleeping
waiting: Arc<Mutex<Vec<WaitingInfo<Reusable<T>>>>>,
run_block: Arc<Mutex<()>>,
pending_block: Arc<Mutex<()>>,
// recycle: (channel::Sender<Reusable<'a,T>>, channel::Receiver<Reusable<'a,T>>),
}
impl<T> MemoryPool<T> where T: Sync + Send + 'static {
#[inline]
pub fn new<F>(cap: usize, init: F) -> MemoryPool<T>
where
F: Fn() -> T,
{
// //println!("mempool remains:{}", cap);
log::trace!("mempool remains:{}", cap);
let mut objects = channel::unbounded();
for _ in 0..cap {
&objects.0.send(init());
}
MemoryPool {
objects,
pending: Arc::new(Mutex::new(Vec::new())),
waiting: Arc::new(Mutex::new(Vec::new())),
run_block: Arc::new(Mutex::new(())),
pending_block: Arc::new(Mutex::new(())),
}
}
#[inline]
pub fn len(&self) -> usize {
self.objects.1.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.objects.1.is_empty()
}
#[inline]
pub fn pending(&'static self, str: &str, sender: channel::Sender<Reusable<T>>, releasable: usize) -> (Option<Reusable<T>>, bool) {
log::trace!("pending item:{}", str);
let _x = self.pending_block.lock();
let ret = if let Ok(item) = self.objects.1.try_recv() {
log::trace!("get ok:{}", str);
(Some(Reusable::new(&self, item)), false)
/* } else if (self.pending.lock().len() == 0) {
log::trace!("get should pend:{}", str);
self.pending.lock().push(PendingInfo {
id: String::from(str),
notifier: sender.clone(),
});
(None, false)*/
} else {
let to_retry = { self.waiting.lock().len() * 60 + 2 };
log::trace!("try again :{} with retries backoff:{}", str, to_retry);
for i in 0..to_retry {
sleep(std::time::Duration::from_secs(1));
if let Ok(item) = self.objects.1.try_recv() {
log::trace!("get ok:{}", str);
return (Some(Reusable::new(&self, item)), false);
}
}
log::trace!("get should sleep :{}", str);
self.waiting.lock().push(WaitingInfo {
id: String::from(str),
notifier: sender.clone(),
min_request: releasable,
});
(None, true)
};
ret
}
#[inline]
pub fn attach(&'static self, t: T) {
let _x = self.run_block.lock();
log::trace!("attach started<<<<<<<<<<<<<<<<");
log::trace!("recyled an item ");
let mut wait_list = { self.waiting.lock() };
log::trace!("check waiting list ok :{}", wait_list.len());
if wait_list.len() > 0 && self.len() >= wait_list[0].min_request {
log::trace!("remove ok<<<<<<<<<<<<<<< ");
let item = wait_list.remove(0);
log::trace!("start wakeup<<<<<<<<<<<<<<<<<<<");
//&wait_list.remove(0);
self.objects.0.send(t).unwrap();
log::trace!("free cnts:{}, waking up {}/ with min req:{} now.... ", self.len(), item.id.clone(), item.min_request);
for i in 0..item.min_request + 1 {
item.notifier.send(Reusable::new(&self, self.objects.1.recv().unwrap())).unwrap_or_else(|e|{
log::warn!("notifier send failed");
});
}
drop(item);
// thread::spawn(move || {
// item.notifier.send(()).unwrap();
// });
} else if self.pending.lock().len() > 0 {
drop(wait_list);
let pending_item = self.pending.lock().remove(0);
log::trace!("fill pending:{}", pending_item.id);
// thread::spawn(move || {
// pending_item.notifier.send(());
// });
pending_item.notifier.send(Reusable::new(&self, t));
} else {
// drop(wait_list);
self.objects.0.send(t).unwrap();
log::trace!("push to queue:{}", self.len());
}
}
}
pub struct Reusable<T>
where T: Sync + Send + 'static {
pool: &'static MemoryPool<T>,
data: ManuallyDrop<T>,
}
impl<T> Reusable<T>
where T: Sync + Send + 'static {
#[inline]
pub fn new(pool: &'static MemoryPool<T>, t: T) -> Self {
Self {
pool,
data: ManuallyDrop::new(t),
}
}
// #[inline]
// pub fn detach(mut self) -> (&'a MemoryPool<T>, T) {
// let ret = unsafe { (self.pool, self.take()) };
// forget(self);
// ret
// }
//
unsafe fn take(&mut self) -> T {
ManuallyDrop::take(&mut self.data)
}
}
impl<T> Deref for Reusable<T>
where T: Sync + Send + 'static
{
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.data
}
}
impl<T> DerefMut for Reusable<T>
where T: Sync + Send + 'static
{
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.data
}
}
impl<T> Drop for Reusable<T>
where T: Sync + Send + 'static
{
#[inline]
fn drop(&mut self) {
unsafe { self.pool.attach(self.take()); }
}
}
#[cfg(test)]
mod tests {
use crate::{MemoryPool, Reusable};
use std::mem::drop;
use std::ops::DerefMut;
use std::thread;
use std::sync::Arc;
// #[test]
// fn pull() {
// let pool = Arc::new(MemoryPool::<Vec<u8>>::new(3, || Vec::new()));
// let pool2 = pool.clone();
// let t1 = thread::spawn(move ||{
// let object1 = pool.lock().pull();
// //println!("retain 1");
// thread::sleep(std::time::Duration::from_secs(1));
//
// let object2 = pool.pull();
// //println!("retain 2");
// thread::sleep(std::time::Duration::from_secs(1));
//
// let object3 = pool.pull();
// //println!("retain 3");
// thread::sleep(std::time::Duration::from_secs(1));
//
// //println!("drop 1");
// drop(object1);
// thread::sleep(std::time::Duration::from_secs(1));
//
// //println!("drop 2");
// drop(object2);
// thread::sleep(std::time::Duration::from_secs(1));
//
// //println!("drop 3");
// drop(object3);
// thread::sleep(std::time::Duration::from_secs(1));
//
// });
// let t2 = thread::spawn(move ||{
// //println!(">>>wait for 2.5s");
// thread::sleep(std::time::Duration::from_millis(2500));
// //println!(">>>try to retain 1.....");
// let object2 = pool2.pull();
// //println!(">>>retained 1");
// //println!(">>>try to retain 2.....");
// let object2 = pool2.pull();
// //println!(">>>retained 1");
// //println!(">>>try to retain 3.....");
// let object2 = pool2.pull();
// //println!(">>>retained 1");
//
// thread::sleep(std::time::Duration::from_secs(1));
//
// //println!(">>>dropped");
// drop(object2);
// thread::sleep(std::time::Duration::from_secs(1));
//
// });
// t1.join();
// t2.join();
//
// }
#[test]
fn e2e() {
// let pool = MemoryPool::new(10, || Vec::new());
// let mut objects = Vec::new();
//
// thread::spawn(||{
// for i in 0..10 {
// let mut object = pool.pull();
// }
// });
//
//
//
// drop(objects);
//
//
// for i in 10..0 {
// let mut object = pool.objects.lock().pop().unwrap();
// assert_eq!(object.pop(), Some(i));
// }
}
}
| true |
408bf3f23e1ce3226651dcaa005541d118b68fcb
|
Rust
|
irauta/reframe
|
/src/mesh.rs
|
UTF-8
| 1,129 | 3.265625 | 3 |
[] |
no_license
|
use std::rc::Rc;
use regl::{VertexArray, PrimitiveMode, IndexType};
pub enum DrawType {
/// Non-indexed drawing.
NonIndexed,
/// Indexed drawing.
Indexed {
/// u8, u16 or u32 indices?
index_type: IndexType,
/// How far in the index buffer is the first index?
starting_index: u32,
},
}
pub struct IndexedDrawParameters {
/// Triangles, lines or something else.
pub primitive_mode: PrimitiveMode,
/// Non-indexed or indexed? See the enum for details.
pub draw_type: DrawType,
/// First vertex in the vertex buffer to draw, or the baseindex when doing indexed drawing.
pub first_vertex: u32,
/// How many vertices/elements to draw. When drawing triangles, value of this field is
/// 3 * number of triangles.
pub count: u32,
/// Not really supported yet...but should tell the number of instances. Zero for effectively
/// no instancing.
pub instance_count: u32,
/// Vertex array that has the vertices and indices to draw from.
/// Not sure if this field belongs to this structure.
pub vertex_array: Rc<VertexArray>,
}
| true |
451ea07d88f8ab2966fc5504d3d5224d62df5f91
|
Rust
|
mbillingr/lisp-in-small-pieces
|
/project/src/syntax/expression.rs
|
UTF-8
| 4,653 | 2.625 | 3 |
[] |
no_license
|
use super::alternative::Alternative;
use super::application::Application;
use super::assignment::Assignment;
use super::boxes::{BoxCreate, BoxRead, BoxWrite};
use super::closure::FlatClosure;
use super::constant::Constant;
use super::definition::GlobalDefine;
use super::fixlet::FixLet;
use super::function::Function;
use super::keyword::MagicKeyword;
use super::let_continuation::LetContinuation;
use super::noop::NoOp;
use super::reference::Reference;
use super::sequence::Sequence;
use crate::ast_transform::{Transformer, Visited};
use crate::scm::Scm;
use crate::source::SourceLocation;
use crate::source::SourceLocation::NoSource;
use crate::syntax::Reify;
use crate::utils::Sourced;
sum_types! {
#[derive(Debug, Clone)]
pub type Expression = NoOp
| MagicKeyword
| Reference
| Assignment
| Constant
| Sequence
| Alternative
| Function
| Application
| FixLet
| BoxRead
| BoxWrite
| BoxCreate
| FlatClosure
| GlobalDefine
| LetContinuation;
}
impl Expression {
pub fn transform(self, visitor: &mut impl Transformer) -> Self {
match visitor.visit(self) {
Visited::Transformed(expr) => expr,
Visited::Recurse(expr) => expr.default_transform(visitor),
}
}
fn default_transform(self, visitor: &mut impl Transformer) -> Self {
use Expression::*;
match self {
MagicKeyword(x) => x.default_transform(visitor).into(),
Reference(x) => x.default_transform(visitor).into(),
Assignment(x) => x.default_transform(visitor).into(),
Constant(x) => x.default_transform(visitor).into(),
Sequence(x) => x.default_transform(visitor).into(),
Alternative(x) => x.default_transform(visitor).into(),
Function(x) => x.default_transform(visitor).into(),
Application(x) => x.default_transform(visitor).into(),
FixLet(x) => x.default_transform(visitor).into(),
BoxRead(x) => x.default_transform(visitor).into(),
BoxWrite(x) => x.default_transform(visitor).into(),
BoxCreate(x) => x.default_transform(visitor).into(),
FlatClosure(x) => x.default_transform(visitor).into(),
GlobalDefine(x) => x.default_transform(visitor).into(),
NoOp(x) => x.default_transform(visitor).into(),
LetContinuation(x) => x.default_transform(visitor).into(),
}
}
pub fn splice(self, other: Self) -> Self {
use Expression::*;
match self {
Sequence(mut s) => {
s.append(other);
s.into()
}
NoOp(_) => other,
_ => super::Sequence::new(self, other, NoSource).into(),
}
}
}
impl Sourced for Expression {
fn source(&self) -> &SourceLocation {
use Expression::*;
match self {
MagicKeyword(_) => &SourceLocation::NoSource,
Reference(x) => x.source(),
Assignment(x) => x.source(),
Constant(x) => x.source(),
Sequence(x) => x.source(),
Alternative(x) => x.source(),
Application(x) => x.source(),
Function(x) => x.source(),
FixLet(x) => x.source(),
BoxRead(x) => x.source(),
BoxWrite(x) => x.source(),
BoxCreate(x) => x.source(),
FlatClosure(x) => x.source(),
GlobalDefine(x) => x.source(),
NoOp(x) => x.source(),
LetContinuation(x) => x.source(),
}
}
}
impl Reify for Expression {
fn reify(&self) -> Scm {
use Expression::*;
match self {
MagicKeyword(mkw) => Scm::Symbol(mkw.name),
Reference(ast) => ast.reify(),
Assignment(ast) => ast.reify(),
Constant(ast) => ast.reify(),
Sequence(ast) => ast.reify(),
Alternative(ast) => ast.reify(),
Application(ast) => ast.reify(),
Function(ast) => ast.reify(),
FixLet(ast) => ast.reify(),
BoxRead(_) | BoxWrite(_) | BoxCreate(_) => unimplemented!(),
FlatClosure(_) => unimplemented!(),
GlobalDefine(ast) => ast.reify(),
NoOp(_) => Scm::Undefined,
LetContinuation(ast) => ast.reify(),
}
}
}
| true |
e98b1d8b5cb93d9493cdecedb8782ef3ef753f94
|
Rust
|
giodamelio/little_boxes
|
/vendor/anstream/src/adapter/strip.rs
|
UTF-8
| 14,496 | 3.28125 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use anstyle_parse::state::state_change;
use anstyle_parse::state::Action;
use anstyle_parse::state::State;
/// Strip ANSI escapes from a `&str`, returning the printable content
///
/// This can be used to take output from a program that includes escape sequences and write it
/// somewhere that does not easily support them, such as a log file.
///
/// For non-contiguous data, see [`StripStr`].
///
/// # Example
///
/// ```rust
/// use std::io::Write as _;
///
/// let styled_text = "\x1b[32mfoo\x1b[m bar";
/// let plain_str = anstream::adapter::strip_str(&styled_text).to_string();
/// assert_eq!(plain_str, "foo bar");
/// ```
#[inline]
pub fn strip_str(data: &str) -> StrippedStr<'_> {
StrippedStr::new(data)
}
/// See [`strip_str`]
#[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct StrippedStr<'s> {
bytes: &'s [u8],
state: State,
}
impl<'s> StrippedStr<'s> {
#[inline]
fn new(data: &'s str) -> Self {
Self {
bytes: data.as_bytes(),
state: State::Ground,
}
}
/// Create a [`String`] of the printable content
#[inline]
#[allow(clippy::inherent_to_string_shadow_display)] // Single-allocation implementation
pub fn to_string(&self) -> String {
use std::fmt::Write as _;
let mut stripped = String::with_capacity(self.bytes.len());
let _ = write!(&mut stripped, "{}", self);
stripped
}
}
impl<'s> std::fmt::Display for StrippedStr<'s> {
/// **Note:** this does *not* exhaust the [`Iterator`]
#[inline]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let iter = Self {
bytes: self.bytes,
state: self.state,
};
for printable in iter {
printable.fmt(f)?;
}
Ok(())
}
}
impl<'s> Iterator for StrippedStr<'s> {
type Item = &'s str;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
next_str(&mut self.bytes, &mut self.state)
}
}
/// Incrementally strip non-contiguous data
#[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct StripStr {
state: State,
}
impl StripStr {
/// Initial state
pub fn new() -> Self {
Default::default()
}
/// Strip the next segment of data
pub fn strip_next<'s>(&'s mut self, data: &'s str) -> StripStrIter<'s> {
StripStrIter {
bytes: data.as_bytes(),
state: &mut self.state,
}
}
}
/// See [`StripStr`]
#[derive(Debug, PartialEq, Eq)]
pub struct StripStrIter<'s> {
bytes: &'s [u8],
state: &'s mut State,
}
impl<'s> Iterator for StripStrIter<'s> {
type Item = &'s str;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
next_str(&mut self.bytes, self.state)
}
}
#[inline]
fn next_str<'s>(bytes: &mut &'s [u8], state: &mut State) -> Option<&'s str> {
let offset = bytes.iter().copied().position(|b| {
let (next_state, action) = state_change(*state, b);
if next_state != State::Anywhere {
*state = next_state;
}
is_printable_str(action, b)
});
let (_, next) = bytes.split_at(offset.unwrap_or(bytes.len()));
*bytes = next;
*state = State::Ground;
let offset = bytes.iter().copied().position(|b| {
let (_next_state, action) = state_change(State::Ground, b);
!is_printable_str(action, b)
});
let (printable, next) = bytes.split_at(offset.unwrap_or(bytes.len()));
*bytes = next;
if printable.is_empty() {
None
} else {
let printable = unsafe {
from_utf8_unchecked(
printable,
"`bytes` was validated as UTF-8, the parser preserves UTF-8 continuations",
)
};
Some(printable)
}
}
#[inline]
unsafe fn from_utf8_unchecked<'b>(bytes: &'b [u8], safety_justification: &'static str) -> &'b str {
if cfg!(debug_assertions) {
// Catch problems more quickly when testing
std::str::from_utf8(bytes).expect(safety_justification)
} else {
std::str::from_utf8_unchecked(bytes)
}
}
#[inline]
fn is_printable_str(action: Action, byte: u8) -> bool {
// VT320 considered 0x7f to be `Print`able but we expect to be working in UTF-8 systems and not
// ISO Latin-1, making it DEL and non-printable
const DEL: u8 = 0x7f;
(action == Action::Print && byte != DEL)
|| action == Action::BeginUtf8
// since we know the input is valid UTF-8, the only thing we can do with
// continuations is to print them
|| is_utf8_continuation(byte)
|| (action == Action::Execute && byte.is_ascii_whitespace())
}
#[inline]
fn is_utf8_continuation(b: u8) -> bool {
matches!(b, 0x80..=0xbf)
}
/// Strip ANSI escapes from bytes, returning the printable content
///
/// This can be used to take output from a program that includes escape sequences and write it
/// somewhere that does not easily support them, such as a log file.
///
/// # Example
///
/// ```rust
/// use std::io::Write as _;
///
/// let styled_text = "\x1b[32mfoo\x1b[m bar";
/// let plain_str = anstream::adapter::strip_bytes(styled_text.as_bytes()).into_vec();
/// assert_eq!(plain_str.as_slice(), &b"foo bar"[..]);
/// ```
#[inline]
pub fn strip_bytes(data: &[u8]) -> StrippedBytes<'_> {
StrippedBytes::new(data)
}
/// See [`strip_bytes`]
#[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct StrippedBytes<'s> {
bytes: &'s [u8],
state: State,
utf8parser: Utf8Parser,
}
impl<'s> StrippedBytes<'s> {
/// See [`strip_bytes`]
#[inline]
pub fn new(bytes: &'s [u8]) -> Self {
Self {
bytes,
state: State::Ground,
utf8parser: Default::default(),
}
}
/// Strip the next slice of bytes
///
/// Used when the content is in several non-contiguous slices
///
/// # Panic
///
/// May panic if it is not exhausted / empty
#[inline]
pub fn extend(&mut self, bytes: &'s [u8]) {
debug_assert!(
self.is_empty(),
"current bytes must be processed to ensure we end at the right state"
);
self.bytes = bytes;
}
/// Report the bytes has been exhausted
#[inline]
pub fn is_empty(&self) -> bool {
self.bytes.is_empty()
}
/// Create a [`Vec`] of the printable content
#[inline]
pub fn into_vec(self) -> Vec<u8> {
let mut stripped = Vec::with_capacity(self.bytes.len());
for printable in self {
stripped.extend(printable);
}
stripped
}
}
impl<'s> Iterator for StrippedBytes<'s> {
type Item = &'s [u8];
#[inline]
fn next(&mut self) -> Option<Self::Item> {
next_bytes(&mut self.bytes, &mut self.state, &mut self.utf8parser)
}
}
/// Incrementally strip non-contiguous data
#[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct StripBytes {
state: State,
utf8parser: Utf8Parser,
}
impl StripBytes {
/// Initial state
pub fn new() -> Self {
Default::default()
}
/// Strip the next segment of data
pub fn strip_next<'s>(&'s mut self, bytes: &'s [u8]) -> StripBytesIter<'s> {
StripBytesIter {
bytes,
state: &mut self.state,
utf8parser: &mut self.utf8parser,
}
}
}
/// See [`StripBytes`]
#[derive(Debug, PartialEq, Eq)]
pub struct StripBytesIter<'s> {
bytes: &'s [u8],
state: &'s mut State,
utf8parser: &'s mut Utf8Parser,
}
impl<'s> Iterator for StripBytesIter<'s> {
type Item = &'s [u8];
#[inline]
fn next(&mut self) -> Option<Self::Item> {
next_bytes(&mut self.bytes, self.state, self.utf8parser)
}
}
#[inline]
fn next_bytes<'s>(
bytes: &mut &'s [u8],
state: &mut State,
utf8parser: &mut Utf8Parser,
) -> Option<&'s [u8]> {
let offset = bytes.iter().copied().position(|b| {
if *state == State::Utf8 {
true
} else {
let (next_state, action) = state_change(*state, b);
if next_state != State::Anywhere {
*state = next_state;
}
is_printable_bytes(action, b)
}
});
let (_, next) = bytes.split_at(offset.unwrap_or(bytes.len()));
*bytes = next;
let offset = bytes.iter().copied().position(|b| {
if *state == State::Utf8 {
if utf8parser.add(b) {
*state = State::Ground;
}
false
} else {
let (next_state, action) = state_change(State::Ground, b);
if next_state != State::Anywhere {
*state = next_state;
}
if *state == State::Utf8 {
utf8parser.add(b);
false
} else {
!is_printable_bytes(action, b)
}
}
});
let (printable, next) = bytes.split_at(offset.unwrap_or(bytes.len()));
*bytes = next;
if printable.is_empty() {
None
} else {
Some(printable)
}
}
#[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct Utf8Parser {
utf8_parser: utf8parse::Parser,
}
impl Utf8Parser {
fn add(&mut self, byte: u8) -> bool {
let mut b = false;
let mut receiver = VtUtf8Receiver(&mut b);
self.utf8_parser.advance(&mut receiver, byte);
b
}
}
struct VtUtf8Receiver<'a>(&'a mut bool);
impl<'a> utf8parse::Receiver for VtUtf8Receiver<'a> {
fn codepoint(&mut self, _: char) {
*self.0 = true;
}
fn invalid_sequence(&mut self) {
*self.0 = true;
}
}
#[inline]
fn is_printable_bytes(action: Action, byte: u8) -> bool {
// VT320 considered 0x7f to be `Print`able but we expect to be working in UTF-8 systems and not
// ISO Latin-1, making it DEL and non-printable
const DEL: u8 = 0x7f;
// Continuations aren't included as they may also be control codes, requiring more context
(action == Action::Print && byte != DEL)
|| action == Action::BeginUtf8
|| (action == Action::Execute && byte.is_ascii_whitespace())
}
#[cfg(test)]
mod test {
use super::*;
use proptest::prelude::*;
/// Model based off full parser
fn parser_strip(bytes: &[u8]) -> String {
#[derive(Default)]
struct Strip(String);
impl Strip {
fn with_capacity(capacity: usize) -> Self {
Self(String::with_capacity(capacity))
}
}
impl anstyle_parse::Perform for Strip {
fn print(&mut self, c: char) {
self.0.push(c);
}
fn execute(&mut self, byte: u8) {
if byte.is_ascii_whitespace() {
self.0.push(byte as char);
}
}
}
let mut stripped = Strip::with_capacity(bytes.len());
let mut parser = anstyle_parse::Parser::<anstyle_parse::DefaultCharAccumulator>::new();
for byte in bytes {
parser.advance(&mut stripped, *byte);
}
stripped.0
}
/// Model verifying incremental parsing
fn strip_char(mut s: &str) -> String {
let mut result = String::new();
let mut state = StripStr::new();
while !s.is_empty() {
let mut indices = s.char_indices();
indices.next(); // current
let offset = indices.next().map(|(i, _)| i).unwrap_or_else(|| s.len());
let (current, remainder) = s.split_at(offset);
for printable in state.strip_next(current) {
result.push_str(printable);
}
s = remainder;
}
result
}
/// Model verifying incremental parsing
fn strip_byte(s: &[u8]) -> Vec<u8> {
let mut result = Vec::new();
let mut state = StripBytes::default();
for start in 0..s.len() {
let current = &s[start..=start];
for printable in state.strip_next(current) {
result.extend(printable);
}
}
result
}
#[test]
fn test_strip_bytes_multibyte() {
let bytes = [240, 145, 141, 139];
let expected = parser_strip(&bytes);
let actual = String::from_utf8(strip_bytes(&bytes).into_vec()).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn test_strip_byte_multibyte() {
let bytes = [240, 145, 141, 139];
let expected = parser_strip(&bytes);
let actual = String::from_utf8(strip_byte(&bytes).to_vec()).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn test_strip_str_del() {
let input = std::str::from_utf8(&[0x7f]).unwrap();
let expected = "";
let actual = strip_str(input).to_string();
assert_eq!(expected, actual);
}
#[test]
fn test_strip_byte_del() {
let bytes = [0x7f];
let expected = "";
let actual = String::from_utf8(strip_byte(&bytes).to_vec()).unwrap();
assert_eq!(expected, actual);
}
proptest! {
#[test]
#[cfg_attr(miri, ignore)] // See https://github.com/AltSysrq/proptest/issues/253
fn strip_str_no_escapes(s in "\\PC*") {
let expected = parser_strip(s.as_bytes());
let actual = strip_str(&s).to_string();
assert_eq!(expected, actual);
}
#[test]
#[cfg_attr(miri, ignore)] // See https://github.com/AltSysrq/proptest/issues/253
fn strip_char_no_escapes(s in "\\PC*") {
let expected = parser_strip(s.as_bytes());
let actual = strip_char(&s);
assert_eq!(expected, actual);
}
#[test]
#[cfg_attr(miri, ignore)] // See https://github.com/AltSysrq/proptest/issues/253
fn strip_bytes_no_escapes(s in "\\PC*") {
dbg!(&s);
dbg!(s.as_bytes());
let expected = parser_strip(s.as_bytes());
let actual = String::from_utf8(strip_bytes(s.as_bytes()).into_vec()).unwrap();
assert_eq!(expected, actual);
}
#[test]
#[cfg_attr(miri, ignore)] // See https://github.com/AltSysrq/proptest/issues/253
fn strip_byte_no_escapes(s in "\\PC*") {
dbg!(&s);
dbg!(s.as_bytes());
let expected = parser_strip(s.as_bytes());
let actual = String::from_utf8(strip_byte(s.as_bytes()).to_vec()).unwrap();
assert_eq!(expected, actual);
}
}
}
| true |
b3e2be63ea2b156257532f4e00c55d7713f69d0b
|
Rust
|
forbjok/vut
|
/lib/src/project/config/glob.rs
|
UTF-8
| 1,010 | 2.984375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::borrow::Cow;
use serde_derive::Deserialize;
use crate::project::VutError;
/// One or more glob patterns.
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "kebab-case")]
#[serde(untagged)]
pub enum Globs {
Single(String),
Multiple(Vec<String>),
}
impl Globs {
pub fn to_vec(&self) -> Vec<String> {
match self {
Self::Single(value) => vec![value.clone()],
Self::Multiple(values) => values.clone(),
}
}
pub fn build_globset(&self) -> Result<globset::GlobSet, VutError> {
let glob_strings = self.to_vec();
let mut builder = globset::GlobSetBuilder::new();
for glob_str in glob_strings.iter() {
let glob = globset::Glob::new(glob_str).map_err(|err| VutError::Other(Cow::Owned(err.to_string())))?;
builder.add(glob);
}
let globset = builder
.build()
.map_err(|err| VutError::Other(Cow::Owned(err.to_string())))?;
Ok(globset)
}
}
| true |
f9dea46938c25106f0800946c12f562c3a8983d5
|
Rust
|
ivanceras/restq
|
/src/data_type.rs
|
UTF-8
| 9,366 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
use crate::{ast::parser::ident, Error};
use pom::parser::*;
use serde::{Deserialize, Serialize};
use sql_ast::ast as sql;
use std::fmt;
/// restq supports comprehensive data types
/// based on rust and postgresql type, combined together
/// format: <data_type>[?](contraint)
/// ? - indicates it is optional, nullable in database context
/// example:
/// text? - nullable text
/// text(8..) - text with at least 8 characters long
/// text(..255) - text must not be more than 255 characters long.
/// u32(1) - u32 with default value of 1
/// u32(>10) - check value should be greater than 10
/// u32(10<column<=20) - check the value should be greater than 10 and less than or equal to 20
/// u32(<discount) - check value should be lesser than `discount` column
/// f32(0.0) - f32 with 0.0 as the default value
#[derive(PartialEq, Debug, Clone, Hash, Eq, Serialize, Deserialize)]
pub enum DataType {
/// bool
Bool,
/// 8 bit serial integer
S8,
/// 16 bit serial integer
S16,
/// 32 bit serial integer
S32,
/// 64 bit serial integer
S64,
/// f32
F32,
/// f64
F64,
/// u8
U8,
/// u16
U16,
/// u32
U32,
/// u64
U64,
/// i8
I8,
/// i16
I16,
/// i32
I32,
/// i64
I64,
/// Uuid, no default specified
Uuid,
/// Uuid with random as the default
UuidRand,
/// create a new uuid and generate a url friendly base64 using blob_uuid
UuidSlug,
/// local time with now as the default
Local,
/// Utc time with now as the default
Utc,
/// text/strings, generic text, no interpretation
Text,
/// A valid identifier string defined by begining of alpha_or_underscore character and
/// optionally followed by alphnumeric characters
Ident,
/// A valid url
Url,
/// json type
Json,
/// bytes
Bytes,
}
impl DataType {
///returns all the supported data types
pub fn all() -> Vec<DataType> {
vec![
DataType::Bool,
DataType::S8,
DataType::S16,
DataType::S32,
DataType::S64,
DataType::F32,
DataType::F64,
DataType::U8,
DataType::U16,
DataType::U32,
DataType::U64,
DataType::I8,
DataType::I16,
DataType::I32,
DataType::I64,
DataType::Uuid,
DataType::UuidRand,
DataType::UuidSlug,
DataType::Local,
DataType::Utc,
DataType::Text,
DataType::Ident,
DataType::Url,
DataType::Json,
DataType::Bytes,
]
}
fn match_data_type(dt: &str) -> Result<Self, Error> {
match dt {
"bool" => Ok(DataType::Bool),
"s8" => Ok(DataType::S8),
"s16" => Ok(DataType::S16),
"s32" => Ok(DataType::S32),
"s64" => Ok(DataType::S64),
"u8" => Ok(DataType::U8),
"u16" => Ok(DataType::U16),
"u32" => Ok(DataType::U32),
"u64" => Ok(DataType::U64),
"i8" => Ok(DataType::I8),
"i16" => Ok(DataType::I16),
"i32" => Ok(DataType::I32),
"i64" => Ok(DataType::I64),
"f32" => Ok(DataType::F32),
"f64" => Ok(DataType::F64),
"uuid" => Ok(DataType::Uuid),
"uuid_rand" => Ok(DataType::UuidRand),
"uuid_slug" => Ok(DataType::UuidSlug),
"local" => Ok(DataType::Local),
"utc" => Ok(DataType::Utc),
"text" => Ok(DataType::Text),
"ident" => Ok(DataType::Ident),
"url" => Ok(DataType::Url),
"json" => Ok(DataType::Json),
"bytes" => Ok(DataType::Bytes),
_ => Err(Error::InvalidDataType(dt.to_string())),
}
}
/// returns true if type is numeric or not
pub fn is_numeric(&self) -> bool {
match self {
DataType::S8
| DataType::S16
| DataType::S32
| DataType::S64
| DataType::F32
| DataType::F64
| DataType::U8
| DataType::U16
| DataType::U32
| DataType::U64
| DataType::I8
| DataType::I16
| DataType::I32
| DataType::I64 => true,
_ => false,
}
}
pub fn is_autogenerate(&self) -> bool {
match self {
DataType::S8 | DataType::S16 | DataType::S32 | DataType::S64 => {
true
}
DataType::UuidRand | DataType::UuidSlug => true,
_ => false,
}
}
}
impl fmt::Display for DataType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let display = match self {
DataType::Bool => "bool",
DataType::S8 => "s8",
DataType::S16 => "s16",
DataType::S32 => "s32",
DataType::S64 => "s64",
DataType::F32 => "f32",
DataType::F64 => "f64",
DataType::U8 => "u8",
DataType::U16 => "u16",
DataType::U32 => "u32",
DataType::U64 => "u64",
DataType::I8 => "i8",
DataType::I16 => "i16",
DataType::I32 => "i32",
DataType::I64 => "i64",
DataType::Uuid => "uuid",
DataType::UuidRand => "uuid_rand",
DataType::UuidSlug => "uuid_slug",
DataType::Local => "local",
DataType::Utc => "utc",
DataType::Text => "text",
DataType::Ident => "ident",
DataType::Url => "url",
DataType::Json => "json",
DataType::Bytes => "bytes",
};
write!(f, "{}", display)
}
}
pub fn data_type<'a>() -> Parser<'a, char, DataType> {
ident().convert(|v| DataType::match_data_type(&v))
}
/// the corresponding sql type for this data type
impl Into<sql::DataType> for &DataType {
fn into(self) -> sql::DataType {
//TODO: serial is for postgresql only
let serial =
sql::DataType::Custom(sql::ObjectName(vec![sql::Ident::new(
"SERIAL",
)]));
let big_serial =
sql::DataType::Custom(sql::ObjectName(vec![sql::Ident::new(
"BIGSERIAL",
)]));
match self {
DataType::Bool => sql::DataType::Boolean,
DataType::S8 => serial,
DataType::S16 => serial,
DataType::S32 => serial,
DataType::S64 => big_serial,
DataType::F32 => sql::DataType::Float(None),
DataType::F64 => sql::DataType::Float(None),
DataType::U8 => sql::DataType::SmallInt,
DataType::U16 => sql::DataType::SmallInt,
DataType::U32 => sql::DataType::Int,
DataType::U64 => sql::DataType::BigInt,
DataType::I8 => sql::DataType::SmallInt,
DataType::I16 => sql::DataType::SmallInt,
DataType::I32 => sql::DataType::Int,
DataType::I64 => sql::DataType::BigInt,
DataType::Uuid => sql::DataType::Uuid,
DataType::UuidRand => sql::DataType::Uuid,
DataType::UuidSlug => sql::DataType::Text,
DataType::Local => sql::DataType::Timestamp,
DataType::Utc => sql::DataType::Timestamp,
DataType::Text => sql::DataType::Text,
DataType::Ident => sql::DataType::Text,
DataType::Url => sql::DataType::Text,
DataType::Json => sql::DataType::Json,
DataType::Bytes => sql::DataType::Bytea,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ast::parser::utils::*;
#[test]
fn test_data_type() {
let input = to_chars("s32");
let ret = data_type().parse(&input).expect("must be parsed");
println!("{:#?}", ret);
assert_eq!(ret, DataType::S32);
}
#[test]
fn test_invalid_data_type() {
let input = to_chars("x32");
let ret = data_type().parse(&input);
println!("{:#?}", ret);
assert!(ret.is_err());
}
#[test]
fn test_invalid_more_data_type() {
let input = to_chars("serial32");
let ret = data_type().parse(&input);
println!("{:#?}", ret);
assert!(ret.is_err());
let err = ret.err().unwrap();
println!("{}", err);
assert!(err.to_string().contains(r#"InvalidDataType("serial32")"#))
}
#[test]
fn all_data_types() {
let all = [
"bool",
"s8",
"s16",
"s32",
"s64",
"u8",
"u16",
"u32",
"u64",
"i8",
"i16",
"i32",
"i64",
"f32",
"f64",
"uuid",
"uuid_rand",
"uuid_slug",
"local",
"utc",
"text",
"ident",
"url",
];
for d in all.iter() {
println!("trying {}...", d);
let input = to_chars(d);
let ret = data_type().parse(&input).expect("must be parsed");
println!("{} = {:#?}", d, ret);
}
}
}
| true |
58d1b0a71def351b1eb95072df99a3e2e4b4380a
|
Rust
|
ja1den/research-project
|
/scenarios/1/rs/src/main.rs
|
UTF-8
| 1,008 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
// Import
use std::{env, fs, io};
// Main Function
fn main() {
// Resolve Directories
let root = env::current_dir()
.unwrap()
.join("..")
.canonicalize()
.unwrap();
let i_path = root.join("data/input").canonicalize().unwrap();
let o_path = root.join("data/rs").canonicalize().unwrap();
// File Names
let mut names = fs::read_dir(i_path)
.unwrap()
.map(|res| res.map(|e| e.path()))
.collect::<Result<Vec<_>, io::Error>>()
.unwrap();
names.sort();
// Read Files
for name in names {
// File Content
let read = fs::read_to_string(name.clone()).unwrap();
// Parse and Sort Numbers
let mut numbers: Vec<u64> = read.lines().map(|s| s.parse::<u64>().unwrap()).collect();
numbers.sort();
// Stringify Numbers
let data: Vec<String> = numbers.iter().map(|n| format!("{:0>8}", n)).collect();
// Create File
fs::write(o_path.join(name.file_name().unwrap()), data.join("\n")).unwrap();
// Sum Numbers
let sum: u64 = numbers.iter().sum();
println!("{}", sum);
}
}
| true |
bac7c64f26cdbc96ab95bc6347385e1801ce6b91
|
Rust
|
sile/erl_parse
|
/src/cst/form.rs
|
UTF-8
| 4,551 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
use erl_tokenize::tokens::AtomToken;
use erl_tokenize::values::Symbol;
use erl_tokenize::{LexicalToken, Position, PositionRange};
use super::forms;
use crate::traits::{Parse, TokenRead};
use crate::{ErrorKind, Parser, Result};
#[derive(Debug, Clone)]
#[allow(clippy::large_enum_variant)]
pub enum Form {
ModuleAttr(forms::ModuleAttr),
ExportAttr(forms::ExportAttr),
ExportTypeAttr(forms::ExportTypeAttr),
ImportAttr(forms::ImportAttr),
FileAttr(forms::FileAttr),
WildAttr(forms::WildAttr),
FunSpec(forms::FunSpec),
CallbackSpec(forms::CallbackSpec),
FunDecl(forms::FunDecl),
RecordDecl(forms::RecordDecl),
TypeDecl(forms::TypeDecl),
}
impl Parse for Form {
fn parse<T>(parser: &mut Parser<T>) -> Result<Self>
where
T: TokenRead,
{
let kind = track!(FormKind::guess(parser))?;
Ok(match kind {
FormKind::ModuleAttr => Form::ModuleAttr(track!(parser.parse())?),
FormKind::ExportAttr => Form::ExportAttr(track!(parser.parse())?),
FormKind::ExportTypeAttr => Form::ExportTypeAttr(track!(parser.parse())?),
FormKind::ImportAttr => Form::ImportAttr(track!(parser.parse())?),
FormKind::FileAttr => Form::FileAttr(track!(parser.parse())?),
FormKind::WildAttr => Form::WildAttr(track!(parser.parse())?),
FormKind::FunSpec => Form::FunSpec(track!(parser.parse())?),
FormKind::CallbackSpec => Form::CallbackSpec(track!(parser.parse())?),
FormKind::FunDecl => Form::FunDecl(track!(parser.parse())?),
FormKind::RecordDecl => Form::RecordDecl(track!(parser.parse())?),
FormKind::TypeDecl => Form::TypeDecl(track!(parser.parse())?),
})
}
}
impl PositionRange for Form {
fn start_position(&self) -> Position {
match *self {
Form::ModuleAttr(ref t) => t.start_position(),
Form::ExportAttr(ref t) => t.start_position(),
Form::ExportTypeAttr(ref t) => t.start_position(),
Form::ImportAttr(ref t) => t.start_position(),
Form::FileAttr(ref t) => t.start_position(),
Form::WildAttr(ref t) => t.start_position(),
Form::FunSpec(ref t) => t.start_position(),
Form::CallbackSpec(ref t) => t.start_position(),
Form::FunDecl(ref t) => t.start_position(),
Form::RecordDecl(ref t) => t.start_position(),
Form::TypeDecl(ref t) => t.start_position(),
}
}
fn end_position(&self) -> Position {
match *self {
Form::ModuleAttr(ref t) => t.end_position(),
Form::ExportAttr(ref t) => t.end_position(),
Form::ExportTypeAttr(ref t) => t.end_position(),
Form::ImportAttr(ref t) => t.end_position(),
Form::FileAttr(ref t) => t.end_position(),
Form::WildAttr(ref t) => t.end_position(),
Form::FunSpec(ref t) => t.end_position(),
Form::CallbackSpec(ref t) => t.end_position(),
Form::FunDecl(ref t) => t.end_position(),
Form::RecordDecl(ref t) => t.end_position(),
Form::TypeDecl(ref t) => t.end_position(),
}
}
}
#[derive(Debug)]
pub enum FormKind {
ModuleAttr,
ExportAttr,
ExportTypeAttr,
ImportAttr,
FileAttr,
WildAttr,
FunSpec,
CallbackSpec,
FunDecl,
RecordDecl,
TypeDecl,
}
impl FormKind {
pub fn guess<T>(parser: &mut Parser<T>) -> Result<Self>
where
T: TokenRead,
{
parser.peek(|parser| {
Ok(match track!(parser.parse())? {
LexicalToken::Symbol(ref t) if t.value() == Symbol::Hyphen => {
match track!(parser.parse::<AtomToken>())?.value() {
"module" => FormKind::ModuleAttr,
"export" => FormKind::ExportAttr,
"export_type" => FormKind::ExportTypeAttr,
"import" => FormKind::ImportAttr,
"file" => FormKind::FileAttr,
"spec" => FormKind::FunSpec,
"callback" => FormKind::CallbackSpec,
"record" => FormKind::RecordDecl,
"type" | "opaque" => FormKind::TypeDecl,
_ => FormKind::WildAttr,
}
}
LexicalToken::Atom(_) => FormKind::FunDecl,
token => track_panic!(ErrorKind::UnexpectedToken(token)),
})
})
}
}
| true |
7d75f8d122413093e1f16b8ffd094b0e7e45866b
|
Rust
|
leshow/exercism
|
/hackerrank/src/sum_of_even_numbers_after_queries.rs
|
UTF-8
| 1,363 | 3.546875 | 4 |
[] |
no_license
|
// 985. Sum of Even Numbers After Queries
// Easy
// We have an array A of integers, and an array queries of queries.
// For the i-th query val = queries[i][0], index = queries[i][1], we add val to
// A[index]. Then, the answer to the i-th query is the sum of the even values
// of A. (Here, the given index = queries[i][1] is a 0-based index, and each
// query permanently modifies the array A.) Return the answer to all queries.
// Your answer array should have answer[i] as the answer to the i-th query.
// Example 1:
// Input: A = [1,2,3,4], queries = [[1,0],[-3,1],[-4,0],[2,3]]
// Output: [8,6,2,4]
// Explanation:
// At the beginning, the array is [1,2,3,4].
// After adding 1 to A[0], the array is [2,2,3,4], and the sum of even values is
// 2 + 2 + 4 = 8. After adding -3 to A[1], the array is [2,-1,3,4], and the sum
// of even values is 2 + 4 = 6. After adding -4 to A[0], the array is
// [-2,-1,3,4], and the sum of even values is -2 + 4 = 2. After adding 2 to
// A[3], the array is [-2,-1,3,6], and the sum of even values is -2 + 6 = 4.
pub fn sum_even_after_queries(mut a: Vec<i32>, queries: Vec<Vec<i32>>) -> Vec<i32> {
let mut ret: Vec<i32> = Vec::with_capacity(queries.len());
for query in queries.into_iter() {
a[query[1] as usize] += query[0];
ret.push(a.iter().filter(|&&n| n % 2 == 0).sum::<i32>());
}
ret
}
| true |
4e5bac68910d629d8d86e30d898e1b0d093d4c4d
|
Rust
|
LehmRob/tmp
|
/rust/array/src/main.rs
|
UTF-8
| 876 | 4 | 4 |
[] |
no_license
|
fn main() {
println!("Hello, friend!");
// ARRAYS
// ======
// Declare arrays
let a1 = [1, 2, 3]; // a1: [i32; 3]
let a2 = [1, 2, 3, 4]; // a2: [i32; 4]
let a3 = [0; 20]; // a3: [i32; 20] declare a array with 20 elements and all initilized with 0
// Get the length of the array
println!("Length of array a1 {}", a1.len());
println!("Length of array a2 {}", a2.len());
println!("Length of array a3 {}", a3.len());
let names = ["max", "mike", "baxter"];
// Access elements
println!("Print second array {}", names[1]);
// SLICES
// ======
let a4 = [1, 2, 3, 4, 5]; // normal array declaretion
let s1 = &a4[..]; // slice over the hole section of the array
let s2 = &a4[1..3]; // slice from index 1 to 3
println!("Slice 1 length {}", s1.len());
println!("Slice 2 length {}", s2.len());
}
| true |
152e9455d2b7276fb473478ef531b152e3cc1af0
|
Rust
|
JLockerman/FuzzyLog
|
/servers/tokio_server/src/main.rs
|
UTF-8
| 9,881 | 2.578125 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
#[macro_use]
extern crate log;
extern crate env_logger;
extern crate tokio_server;
use std::env;
use std::net::{SocketAddr, IpAddr, Ipv4Addr};
pub fn main() {
let _ = env_logger::init();
let Args {port_number, group, num_worker_threads, upstream, downstream}
= parse_args();
let ip_addr = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0));
let addr = SocketAddr::new(ip_addr, port_number);
let (server_num, group_size) = match group {
Group::Singleton | Group::LockServer => (0, 1),
Group::InGroup(server_num, group_size) => (server_num, group_size),
};
let replicated = upstream.is_some() || downstream.is_some();
let print_start = |addr| match group {
Group::Singleton =>
println!("Starting singleton server at {} with {} worker threads",
addr, num_worker_threads),
Group::LockServer =>
println!("Starting lock server at {} with {} worker threads",
addr, num_worker_threads),
Group::InGroup(..) =>
println!("Starting server {} out of {} at {} with {} worker threads",
server_num, group_size, addr, num_worker_threads),
};
if replicated {
unimplemented!("no tokio replication yet")
} else {
tokio_server::run_unreplicated_server(&addr, server_num, group_size, || print_start(addr))
.expect("cannot run server");
}
// match acceptor {
// Ok(accept) => {
// ;
// if replicated {
// unimplemented!("no tokio replication yet")
// // println!("upstream {:?}, downstream {:?}", upstream, downstream);
// // servers2::tcp::run_with_replication(accept, server_num, group_size,
// // upstream, downstream, num_worker_threads, &a)
// }
// else {
// }
// }
// Err(e) => {
// error!("Could not start server due to {}.", e);
// std::process::exit(1)
// }
// }
/*match (acceptor, group) {
(Ok(accept), Group::Singleton) => {
let addr = accept.local_addr().unwrap();
println!("Starting singleton server at {} with {} worker threads",
addr, num_worker_threads);
servers2::tcp::run(accept, 0, 1, num_worker_threads, &a)
}
(Ok(accept), Group::LockServer) => {
let addr = accept.local_addr().unwrap();
println!("Starting lock server at {} with {} worker threads",
addr, num_worker_threads);
servers2::tcp::run(accept, 0, 1, num_worker_threads, &a)
}
(Ok(accept), Group::InGroup(server_num, group_size)) => {
let addr = accept.local_addr().unwrap();
println!("Starting server {} out of {} at {} with {} worker threads",
server_num, group_size, addr, num_worker_threads);
servers2::tcp::run(accept, server_num, group_size, num_worker_threads, &a)
}
(Err(e), _) => {
error!("Could not start server due to {}.", e);
std::process::exit(1)
}
}*/
}
const USAGE: &'static str =
"Usage:
\ttcp_server <port number> [-w | --workers <num worker threads>] [-up | --upstream <ip addr>:<port>] [-dwn | --downstream <ip addr>]
\ttcp_server (-ls | --lock-server) [-w | --workers <num worker threads>] [-up | --upstream <ip addr>:<port>] [-dwn | --downstream <ip addr>]
\ttcp_server (-ig | --in-group <server num>:<num servers in group>) [--workers <num worker threads>] [-up | --upstream <ip addr>:<port>] [-dwn | --downstream <ip addr>]
can also be run with 'cargo run --release -- <args>...'";
struct Args {
port_number: u16,
group: Group,
num_worker_threads: usize,
upstream: Option<SocketAddr>,
downstream: Option<IpAddr>,
}
#[derive(PartialEq, Eq)]
enum Group {
LockServer,
Singleton,
InGroup(u32, u32),
}
enum Flag {
None,
Workers,
InGroup,
Upstream,
Downstream,
}
fn parse_args() -> Args {
let env_args = env::args();
if env_args.len() < 2 {
println!("{}", USAGE);
std::process::exit(1)
}
let mut args = Args {
port_number: 0,
group: Group::Singleton,
num_worker_threads: 0,
upstream: None,
downstream: None,
};
let mut last_flag = Flag::None;
for arg in env_args.skip(1) {
match last_flag {
Flag::None => {
match &*arg {
"-w" | "--workers" => last_flag = Flag::Workers,
"-ig" | "--in-group" => {
if args.group != Group::Singleton {
error!("A server cannot both be in a group and a lock server.");
std::process::exit(1)
}
last_flag = Flag::InGroup
}
"-ls" | "--lock-server" => {
if args.group != Group::Singleton {
error!("A server cannot both be in a group and a lock server.");
std::process::exit(1)
}
args.group = Group::LockServer
}
"-up" | "--upstream" => {
last_flag = Flag::Upstream
}
"-dwn" | "--downstream" => {
last_flag = Flag::Downstream
}
port => {
match port.parse() {
Ok(port) => args.port_number = port,
Err(e) => {
error!("Invalid flag: {}.", port);
debug!("caused by {}", e);
std::process::exit(1)
}
}
}
}
}
Flag::Workers => {
match arg.parse() {
Ok(num_workers) => {
if num_workers == 0 {
println!("WARNING: Number of worker threads must be non-zero, will default to 1");
args.num_worker_threads = 1
}
else {
args.num_worker_threads = num_workers
}
last_flag = Flag::None
}
Err(e) => {
error!("Invalid <num worker threads> at '--workers': {}.", e);
std::process::exit(1)
}
}
}
Flag::Upstream => {
match arg.parse() {
Ok(addr) => {
args.upstream = Some(addr)
}
Err(e) => {
error!("Invalid <upstream addr> at '--upstream': {}.", e);
}
}
last_flag = Flag::None;
}
Flag::Downstream => {
match arg.parse() {
Ok(addr) => {
args.downstream = Some(addr)
}
Err(e) => {
error!("Invalid <downstream addr> at '--downstream': {}.", e);
}
}
last_flag = Flag::None;
}
Flag::InGroup => {
let split: Vec<_> = arg.split(':').collect();
if split.len() != 2 {
error!("Invalid '--in-group {}': must be in the form of '--in-group <server num>:<num servers in group>'.", arg);
std::process::exit(1)
}
match (split[0].parse(), split[1].parse()) {
(Ok(server_num), Ok(group_size)) => {
if group_size <= server_num {
error!("<server num>: {} must be less than <num servers in group>: {} in '--in-group'",
split[0], split[1]);
std::process::exit(1)
}
last_flag = Flag::None;
args.group = Group::InGroup(server_num, group_size)
}
(Err(e1), Err(e2)) => {
error!("Invalid <server num>: {} '{}' at '--in-group'", e1, split[0]);
error!("Invalid <num servers in group>: {} '{}' at '--in-group'",
e2, split[1]);
std::process::exit(1)
}
(Err(e1), _) => {
error!("Invalid <server num>: {} '{}' at '--in-group'", e1, split[0]);
std::process::exit(1)
}
(_, Err(e2)) => {
error!("Invalid <num servers in group>: {} '{}' at '--in-group'",
e2, split[1]);
std::process::exit(1)
}
}
}
}
}
match last_flag {
Flag::None => args,
Flag::InGroup => {
error!("Missing <server num>:<num servers in group> for '--in-group'");
std::process::exit(1)
},
Flag::Workers => {
error!("Missing <num worker threads> for '--workers'");
std::process::exit(1)
}
Flag::Downstream => {
error!("Missing <downstream addr> for '--downstream'");
std::process::exit(1)
}
Flag::Upstream => {
error!("Missing <upstream addr> for '--upstream'");
std::process::exit(1)
}
}
}
| true |
ee7cbc388f47db45a32e073a0a4c1e1ab350b49f
|
Rust
|
chris-morgan/diffbot-rust-client
|
/src/diffbot/test.rs
|
UTF-8
| 5,283 | 2.75 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
extern mod diffbot = "diffbot#1.0";
extern mod extra = "extra#0.10-pre";
use extra::json;
use extra::treemap::TreeMap;
fn invalid_token() -> ~str {
~"0"
}
fn valid_token() -> ~str {
~"6932269b31d051457940f3da4ee23b79"
}
#[test]
pub fn invalid_key() {
assert_eq!(diffbot::call(&from_str("http://example.com/").unwrap(),
invalid_token(), "frontpage", [], 2),
Err(diffbot::ApiError(~"Not authorized API token.", diffbot::UNAUTHORIZED_TOKEN)));
}
/// Test the frontpage API on a real, live website.
#[test]
pub fn test_frontpage() {
// We do quite a bit of error-checking here, more than would usually be done.
match diffbot::call(&from_str("http://example.com/").unwrap(),
valid_token(), "frontpage", [], 2) {
Err(diffbot::ApiError(msg, diffbot::UNAUTHORIZED_TOKEN)) =>
fail!("Uh oh, that token isn't authorized. (Full message: {})", msg),
Err(diffbot::ApiError(msg, diffbot::REQUESTED_PAGE_NOT_FOUND)) =>
fail!("I am sad, for that page wasn't found. (Full message: {})", msg),
Err(diffbot::ApiError(msg, diffbot::TOKEN_EXCEEDED_OR_THROTTLED)) =>
fail!("Hey, hey... slow down, pal! (Full message: {})", msg),
Err(diffbot::ApiError(msg, diffbot::ERROR_PROCESSING)) =>
fail!("Oh noes! You (maybe) broke the Diffbot! (Full message: {})", msg),
Err(diffbot::ApiError(msg, code)) =>
fail!("Oh noes! Something went wrong, and I don't know what. \
(Unknown error; code: {}, full message: {})", code, msg),
Err(diffbot::JsonError(error)) =>
fail!("Uh oh, Diffbot returned invalid JSON! (Did you do something wrong?) \
Full message: {}", error.to_str()),
Err(diffbot::IoError) => unreachable!(), // ... because we didn't trap the condition.
Ok(object) => {
let mut expected = TreeMap::new();
expected.insert(~"sections", json::List(~[{
let mut section = TreeMap::new();
section.insert(~"items", json::List(~[{
let mut item = TreeMap::new();
item.insert(~"title", json::String(~"Example Domain"));
item.insert(~"url", json::String(~"http://www.iana.org/domains/example"));
json::Object(~item)
}]));
section.insert(~"primary", json::Boolean(true));
json::Object(~section)
}]));
expected.insert(~"title", json::String(~"Example Domain"));
expected.insert(~"url", json::String(~"http://example.com/"));
assert_eq!(object, expected);
},
}
}
/// Test prepared requests, posting a body, and limiting the fields to return.
#[test]
fn test_post_body_and_fields() {
let request = diffbot::prepare_request(&from_str("http://example.com/").unwrap(),
valid_token(),
"article",
["title", "images(url)"],
2);
let body = bytes!("<title>Contents of title tag</title>
<h1>Contents of heading tag</h1>
<p>Contents of big body
<img src=//example.org/example.jpg>
<img src=example.png>
<p>More page contents");
let mut response = request.post_body(body).unwrap();
assert_eq!(response.pop(&~"type"),
Some(json::String(~"article")));
assert_eq!(response.pop(&~"url"),
Some(json::String(~"http://example.com/")));
assert_eq!(response.pop(&~"title"),
Some(json::String(~"Contents of heading tag")));
// Get the URL of each image
let image_urls = match response.pop(&~"images").unwrap() {
json::List(images) => images.move_iter().map(|image| match image {
json::Object(~mut o) => {
assert_eq!(o.len(), 1);
match o.pop(&~"url").unwrap() {
json::String(ref s) => s.to_owned(),
_ => unreachable!(),
}
},
_ => unreachable!(),
}),
_ => unreachable!(),
}.collect::<~[~str]>();
assert_eq!(image_urls, ~[~"http://example.org/example.jpg",
~"http://example.com/example.png"]);
// And those were the only fields reurned.
assert_eq!(response.len(), 0);
}
static TIMEOUT_MESSAGE: &'static str = "Request timed out. The \"timeout\" \
query string option can be used to modify the timeout (in milliseconds). \
For example, \"...?timeout=5000&url=...\"";
/// Test the Diffbot struct and timeouts on a prepared request.
#[test]
fn test_diffbot_struct_and_timeouts() {
let diffbot = diffbot::Diffbot::new(valid_token(), 2);
let mut request = diffbot.prepare_request(&from_str("http://example.com/").unwrap(),
"article", []);
// I think we're fairly safe that example.com won't respond in 1ms.
request.timeout(1);
assert_eq!(request.call(),
Err(diffbot::ApiError(TIMEOUT_MESSAGE.to_owned(), 500)));
}
| true |
565a2e01fbd40d514369cd63a536ad280b1f2582
|
Rust
|
royswale/leetcode
|
/src/bin/check-if-n-and-its-double-exist.rs
|
UTF-8
| 1,034 | 3.671875 | 4 |
[
"MIT"
] |
permissive
|
fn main() {}
struct Solution;
impl Solution {
pub fn check_if_exist(arr: Vec<i32>) -> bool {
let mut map = std::collections::HashMap::<i32, ()>::new();
// for (index, value) in arr.iter().enumerate() {
// map.insert(*value, index);
// }
//
// for (index, value) in arr.iter().map(|x| x * 2).enumerate() {
// if let Some(i) = map.get(&value) {
// if *i != index {
// return true;
// }
// }
// }
for &i in arr.iter() {
// 首先判断map中是否存在i的两倍的值,存在则返回true
if let Some(_) = map.get(&(i * 2)) {
return true;
}
// 然后当i为偶数时,在查看map中是否存在i/2的值
if i % 2 == 0 {
if let Some(_) = map.get(&(i / 2)) {
return true;
}
}
map.insert(i, ());
}
false
}
}
| true |
b01c68c2ad4cc9332946fcbd7350a759cc4e5f72
|
Rust
|
mjcarson/syncwrap
|
/src/lib.rs
|
UTF-8
| 8,354 | 3.71875 | 4 |
[
"MIT"
] |
permissive
|
//! # Syncwrap
//!
//! Wraps asynchronous functions in order to make them synchronous based on if a
//! "sync" feature is enabled. This is useful when writting http clients as you
//! can write async methods and wrap them for use in synchronous code bases
//! automatically.
//!
//! # Usage
//! ```toml
//! [dependencies]
//! syncwrap = "0.4.0"
//! ```
//!
//! Then in the crate that you want to have synchronous functions created for you
//! you create a sync feature. When this feature is enabled syncwrap will create
//! synchronous functions on what you have wrapped.
//!
//! you can either:
//! - Replace your asynchronous function with a synchronous one
//! - Clone your asynchronous function with a synchronous one ending in _blocking
//! - Clone all methods in an impl with synchronous ones
//!
//! # Replacing async functions
//!
//! You can replace your asynchronous function with a synchronous one by doing
//! something like the following:
//!
//!
//! ```rust
//! #[syncwrap::wrap]
//! async fn foo(input: &str) -> String {
//! format!("I am {} now", input)
//! }
//!
//! fn main() {
//! let out = foo("sync");
//! assert_eq!(out, "I am sync now".to_owned())
//! }
//! ```
//!
//! # Cloning async functions
//!
//! You can clone your asynchronous function with a synchronous one ending in _blocking
//! by doing something like the following:
//!
//!
//! ```
//! #[syncwrap::clone]
//! async fn foo(input: &str) -> String {
//! format!("I am {} now", input)
//! }
//!
//! let out = foo_blocking("sync");
//! assert_eq!(out, "I am sync now".to_owned())
//! ```
//!
//! # Cloning async methods in implementations
//!
//! You can clone all methods in an impl with synchronous ones by using
//! syncwrap::clone_impl. This is useful when you want to support both
//! async and sync functions in a struct implementation.
//!
//!
//! ```
//! // The original struct
//! #[derive(Default)]
//! pub struct Example {
//! pub fooers: Fooers,
//! }
//!
//! // You also need to create the struct to place the cloned impls in
//! // This is done so you can choose what structs/impls to clone/wrap
//! // The cloned structs/impls should end in Blocking
//! #[derive(Default)]
//! pub struct ExampleBlocking {
//! pub fooers: FooersBlocking,
//! }
//!
//! // The original struct with async functions
//! #[derive(Default)]
//! pub struct Fooers;
//!
//! // The blocking struct that we are cloning impls into
//! // You have to create this so you can add custom derives
//! #[derive(Default)]
//! pub struct FooersBlocking;
//!
//! // The async impls that you want to wrap
//! // All methods within this impl must be async
//! #[syncwrap::clone_impl]
//! impl Fooers {
//! pub async fn foo(&self, input: &str) -> String {
//! format!("I am {} now", input)
//! }
//!
//! pub async fn bar(&self, input: &str) -> String {
//! format!("I am also {} now", input)
//! }
//! }
//! let example = ExampleBlocking::default();
//! let out = example.fooers.foo("sync");
//! assert_eq!(out, "I am sync now".to_owned());
//! let out = example.fooers.bar("sync");
//! assert_eq!(out, "I am also sync now".to_owned())
//! ```
//!
//! Currently the wrapping is very naive and simply wraps the function in
//! tokio::main. This is likely more expensive then it needs to be and I hope
//! to make it more efficient later.
use syn;
use quote::quote;
use proc_macro::TokenStream;
/// Wraps an async function in order to make it synchronous
///
/// # Examples
///
/// ```
/// #[syncwrap::wrap]
/// async fn foo(input: &str) -> String {
/// format!("I am {} now", input)
/// }
///
/// let out = foo("sync");
/// assert_eq!(out, "I am sync now".to_owned())
/// ```
#[proc_macro_attribute]
pub fn wrap(_meta: TokenStream, input: TokenStream) -> TokenStream {
// parse the input stream into our async function
let func = syn::parse_macro_input!(input as syn::ItemFn);
// get attributes (docstrings/examples) for our function
let attrs = &func.attrs;
// get visibility of function
let vis = &func.vis;
// get the name of our function
let name = &func.sig.ident;
// get information on the generics to pass
let generics = &func.sig.generics;
// get the arguments for our function
let args = &func.sig.inputs;
// get our output
let output = &func.sig.output;
// get the block of instrutions that are going to be called
let block = &func.block;
// cast back to a token stream
let output = quote!{
// iterate and add all of our attributes
#(#attrs)*
// conditionally add tokio::main if the sync feature is enabled
#[cfg_attr(feature = "sync", tokio::main)]
#vis async fn #name #generics(#args) #output { #block }
};
output.into()
}
/// Clones an async function in order to make it also synchronous
///
/// This will add _blocking to the name of the function to clone.
///
/// # Examples
///
/// ```
/// #[syncwrap::clone]
/// async fn foo(input: &str) -> String {
/// format!("I am {} now", input)
/// }
///
/// let out = foo_blocking("sync");
/// assert_eq!(out, "I am sync now".to_owned())
/// ```
#[proc_macro_attribute]
pub fn clone(_meta: TokenStream, input: TokenStream) -> TokenStream {
// parse the input stream into our async function
let func = syn::parse_macro_input!(input as syn::ItemFn);
// get attributes (docstrings/examples) for our function
let attrs = &func.attrs;
// get visibility of function
let vis = &func.vis;
// get the name of our function
let name = &func.sig.ident;
// get the name of our cloned function
let sync_name = syn::Ident::new(&format!("{}_blocking", name), name.span());
// get information on the generics to pass
let generics = &func.sig.generics;
// get the arguments for our function
let args = &func.sig.inputs;
// get our output
let output = &func.sig.output;
// get the block of instrutions that are going to be called
let block = &func.block;
// cast back to a token stream
let output = quote!{
// iterate and add all of our attributes
#(#attrs)*
// conditionally add tokio::main if the sync feature is enabled
#vis async fn #name #generics(#args) #output { #block }
// iterate and add all of our attributes
#(#attrs)*
// conditionally add tokio::main if the sync feature is enabled
#[cfg_attr(feature = "sync", tokio::main)]
#vis async fn #sync_name #generics(#args) #output { #block }
};
output.into()
}
/// Clones an group of async functions in an impl to a new sub structure
///
/// This is useful when you want to support both async and sync functions
/// in a struct implementation.
///
/// # Examples
///
/// ```
/// #[derive(Default)]
/// pub struct Example {
/// pub fooers: Fooers,
/// }
///
/// #[derive(Default)]
/// pub struct ExampleBlocking {
/// pub fooers: FooersBlocking,
/// }
///
/// #[derive(Default)]
/// pub struct Fooers;
///
/// #[derive(Default)]
/// pub struct FooersBlocking;
///
/// #[syncwrap::clone_impl]
/// impl Fooers {
/// pub async fn foo(&self, input: &str) -> String {
/// format!("I am {} now", input)
/// }
/// }
///
/// let out = ExampleBlocking::default().fooers.foo("sync");
/// assert_eq!(out, "I am sync now".to_owned())
/// ```
#[proc_macro_attribute]
pub fn clone_impl(_meta: TokenStream, input: TokenStream) -> TokenStream {
// parse the input stream into our async function
let imp = syn::parse_macro_input!(input as syn::ItemImpl);
// get attributes (docstrings/examples) for our function
let attrs = &imp.attrs;
// get the methods implemented in this impl
let items = &imp.items;
// get the self type for this impl
let self_ty = match *imp.self_ty {
syn::Type::Path(path) => path,
_ => panic!("Only type paths are supported"),
};
// build sync name
let ident = self_ty.path.get_ident().unwrap();
let sync_name = syn::Ident::new(&format!("{}Blocking", ident), ident.span());
// get information on the generics to pass
let generics = &imp.generics;
// cast back to a token stream
let output = quote!{
// iterate and add all of the original async methods
#(#attrs)*
#generics
impl #self_ty {
#(#items)*
}
// Clone our async methods but wrap them
#[cfg(feature = "sync")]
impl #sync_name {
// wrap them to make the synchronous
#(
#[syncwrap::wrap]
#items
)*
}
};
output.into()
}
| true |
db9fff4e8bf732907ee7bdbefdd75feddff602ec
|
Rust
|
zexho994/Zex
|
/cmd/src/main.rs
|
UTF-8
| 3,467 | 3.171875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use clap::App;
use clap::Arg;
use std::fs::File;
use std::io::Read;
use std::path::Path;
const DEFAULT_PATH: &str = "/Users/zexho/Github/Zex/sample";
// const DEFAULT_PATH: &str = "/Users/a994/Github/Zex/sample";
/// ./target/debug/cmd -f /echo/echo_int.zex --out all
fn main() {
// 配置cmd命令
let matches = App::new("Zex Program")
.version("1.0")
.author("[email protected]")
.about("github url: https://github.com/zexho994/Zex")
.arg(
Arg::with_name("mode")
.short("m")
.long("mode")
.value_name("MODE")
.help("Select which mode to boot")
.takes_value(true),
)
.arg(
Arg::with_name("path")
.short("p")
.long("path")
.value_name("PATH")
.help("zex's file read path")
.takes_value(true),
)
.arg(
Arg::with_name("file")
.short("f")
.long("file")
.value_name("FILE NAME")
.help("the file name of zex from <path>")
.takes_value(true),
)
.arg(
Arg::with_name("output")
.short("o")
.long("out")
.value_name("output")
.help("print data info , be type out -token or -ast or -all")
.takes_value(true),
)
.get_matches();
// 解析启动参数
// let mode = matches.value_of("mode").unwrap_or("file");
let path = matches.value_of("path").unwrap_or(DEFAULT_PATH);
let file = matches.value_of("file").unwrap_or("");
let out = matches.value_of("output").unwrap_or("");
// 选择启动模式
// if mode == "input" {
// input_mode();
// } else if mode == "file" {
file_mode(path, file, out);
// }
}
/// 手动输入模式
// fn input_mode() {
// println!("=> 手动输入执行语句,以分号;结束.");
// loop {
// print!(">");
// stdout().flush().expect("flush error!");
// let mut input = String::new();
// stdin().read_line(&mut input).unwrap();
// input = input.trim().to_string();
// if input == "exit" {
// break;
// }
// let mut tokens = lexer::lexing(input.to_string());
// parse::parsing(&mut tokens);
// // println!("{:?}", num.unwrap());
// }
// }
/// 读取文件解析模式
fn file_mode(p: &str, n: &str, out: &str) {
let mut full_path = String::from(p);
full_path.push_str(n);
let path = Path::new(full_path.as_str());
let mut file = match File::open(path) {
Err(why) => panic!("couldn't open {},err = {}", full_path, why),
Ok(file) => file,
};
let mut content = String::new();
match file.read_to_string(&mut content) {
Err(err) => panic!("couldn't read {},err {}", full_path, err),
Ok(_) => println!("\n==> input:\n{}", content),
}
// lexing
let mut tokens = lexer::lexing(content.trim().to_string());
if out == "token" || out == "all" {
println!("\n----- output token-----");
println!("{:?}", tokens);
}
// parsing
let ast = parse::parsing(&mut tokens).unwrap();
if out == "ast" || out == "all" {
println!("\n----- output ast-----");
println!("{:?}", ast);
}
// semantic
semantic::semantic(ast);
}
| true |
b79f45f29b476251d8af29088d2658f1eb6cfd5e
|
Rust
|
james2509/wiremock-rs
|
/src/mock_actor.rs
|
UTF-8
| 3,875 | 2.640625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use crate::active_mock::ActiveMock;
use crate::{Mock, Request};
use bastion::prelude::*;
use futures_timer::Delay;
use http_types::{Response, StatusCode};
use log::{debug, warn};
use std::time::Duration;
#[derive(Clone)]
pub(crate) struct MockActor {
pub actor_ref: ChildRef,
}
#[derive(Clone, Debug)]
struct Reset {}
#[derive(Clone, Debug)]
struct Verify {}
impl MockActor {
/// Start an instance of our MockActor and return a reference to it.
pub(crate) fn start() -> MockActor {
let mock_actors = Bastion::children(|children: Children| {
children.with_exec(move |ctx: BastionContext| async move {
let mut mocks: Vec<ActiveMock> = vec![];
loop {
msg! { ctx.recv().await?,
_reset: Reset =!> {
debug!("Dropping all mocks.");
mocks = vec![];
answer!(ctx, "Reset.").unwrap();
};
_verify: Verify =!> {
debug!("Verifying expectations for all mounted mocks.");
let verified = mocks.iter().all(|m| m.verify());
answer!(ctx, verified).unwrap();
};
mock: Mock =!> {
debug!("Registering mock.");
mocks.push(ActiveMock::new(mock));
answer!(ctx, "Registered.").unwrap();
};
request: http_types::Request =!> {
debug!("Handling request.");
let request = Request::from(request).await;
let mut response: Option<Response> = None;
let mut delay: Option<Duration> = None;
for mock in &mut mocks {
if mock.matches(&request) {
response = Some(mock.response());
delay = mock.delay().to_owned();
break;
}
}
if let Some(response) = response {
if let Some(delay) = delay {
Delay::new(delay).await;
}
answer!(ctx, response).unwrap();
} else {
debug!("Got unexpected request:\n{}", request);
let res = Response::new(StatusCode::NotFound);
answer!(ctx, res).unwrap();
}
};
_: _ => {
warn!("Received a message I was not listening for.");
};
}
}
})
})
.expect("Couldn't create the mock actor.");
// We actually started only one actor
let mock_actor = mock_actors.elems()[0].clone();
MockActor {
actor_ref: mock_actor,
}
}
pub(crate) async fn register(&self, mock: Mock) {
self.actor_ref.ask_anonymously(mock).unwrap().await.unwrap();
}
pub(crate) async fn reset(&self) {
self.actor_ref
.ask_anonymously(Reset {})
.unwrap()
.await
.unwrap();
}
pub(crate) async fn verify(&self) -> bool {
let answer = self.actor_ref.ask_anonymously(Verify {}).unwrap();
let response = msg! { answer.await.expect("Couldn't receive the answer."),
outcome: bool => outcome;
_: _ => false;
};
response
}
}
| true |
6fc601038b09ceae48d97674a2fa7f6aa0ce4cd3
|
Rust
|
fractalide/netsim
|
/src/wire/mac.rs
|
UTF-8
| 1,464 | 3.109375 | 3 |
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
use crate::priv_prelude::*;
use rand;
/// An ethernet hardware MAC address.
#[derive(Clone, Copy, PartialEq)]
pub struct MacAddr {
bytes: [u8; 6],
}
impl fmt::Debug for MacAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:02X}:{:02X}:{:02X}:{:02X}:{:02X}:{:02X}",
self.bytes[0],
self.bytes[1],
self.bytes[2],
self.bytes[3],
self.bytes[4],
self.bytes[5],
)
}
}
impl fmt::Display for MacAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", *self)
}
}
impl MacAddr {
/// The broadcast ethernet address.
pub const BROADCAST: MacAddr = MacAddr {
bytes: [0xff, 0xff, 0xff, 0xff, 0xff, 0xff],
};
/// Create a `MacAddr` from the given 6-byte buffer.
pub fn from_bytes(bytes: &[u8]) -> MacAddr {
let mut b = [0u8; 6];
b[..].clone_from_slice(bytes);
MacAddr {
bytes: b,
}
}
/// Get the address as a slice of bytes.
pub fn as_bytes(&self) -> &[u8] {
&self.bytes[..]
}
/// Generate a random MAC address.
pub fn random() -> MacAddr {
let mut b: [u8; 6] = rand::random();
b[0] &= 0xfc;
MacAddr {
bytes: b,
}
}
/// Checks weather this is the broadcast address.
pub fn is_broadcast(&self) -> bool {
*self == MacAddr::BROADCAST
}
}
| true |
7e719170d48c270d7d49bc290bacc733b91c6cb9
|
Rust
|
innoave/genevo
|
/src/selection/tournament.rs
|
UTF-8
| 8,567 | 3.421875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! The `tournament` module.
//!
//! The provided `SelectionOp` implementations are:
//! * `TournamentSelector`
use crate::{
algorithm::EvaluatedPopulation,
genetic::{Fitness, Genotype, Parents},
operator::{GeneticOperator, MultiObjective, SelectionOp, SingleObjective},
random::{random_index, random_probability, Rng},
};
/// The `TournamentSelector` implements the tournament selection method.
/// It runs tournaments with a small size of participants and pick the best
/// performing individuals from each tournament.
///
/// The number of participants in each tournament is configurable by the
/// `tournament_size` field. A tournament size of 1 is called 1-way tournament
/// and is equivalent to random selection.
///
/// The final selection is picked from the best performing participants in each
/// tournament but with a probability. The probability gives also chances to
/// the second best, third best and so on. The probability is configurable by
/// the `probability` field. A probability of 1.0 means the tournament is
/// deterministic. The best and only the best individual of each tournament is
/// selected.
///
/// To avoid that candidates chosen once are selected again they are removed
/// from the list of candidates. Though this can be configured as well. The
/// field `remove_selected_individuals` controls whether selected candidates
/// are removed or not.
///
/// This `TournamentSelector` can be used for single-objective fitness values
/// as well as multi-objective fitness values.
#[allow(missing_copy_implementations)]
#[derive(Clone, Debug, PartialEq)]
pub struct TournamentSelector {
/// The fraction of number of parents to select in relation to the
/// number of individuals in the population.
selection_ratio: f64,
/// The number of individuals per parents.
num_individuals_per_parents: usize,
/// The number of participants on each tournament.
tournament_size: usize,
/// The probability to pick candidates from one tournament.
/// Values must be between 0 and 1.0 (inclusive).
probability: f64,
/// Remove chosen individuals from the list of candidates to avoid that
/// they can be picked again.
remove_selected_individuals: bool,
}
impl TournamentSelector {
/// Constructs a new instance of the `TournamentSelector`.
pub fn new(
selection_ratio: f64,
num_individuals_per_parents: usize,
tournament_size: usize,
probability: f64,
remove_selected_individuals: bool,
) -> Self {
TournamentSelector {
selection_ratio,
num_individuals_per_parents,
tournament_size,
probability,
remove_selected_individuals,
}
}
/// Returns the selection ratio.
///
/// The selection ratio is the fraction of number of parents that are
/// selected on every call of the `select_from` function and the number
/// of individuals in the population.
pub fn selection_ratio(&self) -> f64 {
self.selection_ratio
}
/// Sets the selection ratio to a new value.
///
/// The selection ratio is the fraction of number of parents that are
/// selected on every call of the `select_from` function and the number
/// of individuals in the population.
pub fn set_selection_ratio(&mut self, value: f64) {
self.selection_ratio = value;
}
/// Returns the number of individuals per parents use by this selector.
pub fn num_individuals_per_parents(&self) -> usize {
self.num_individuals_per_parents
}
/// Sets the number of individuals per parents to the given value.
pub fn set_num_individuals_per_parents(&mut self, value: usize) {
self.num_individuals_per_parents = value;
}
/// Returns the size of one tournament.
pub fn tournament_size(&self) -> usize {
self.tournament_size
}
/// Sets the size of one tournament to a given value. The value must be
/// a positive integer greater 0.
///
/// A tournament size of 1 is called 1-way tournament and is
/// equivalent to random selection.
pub fn set_tournament_size(&mut self, value: usize) {
self.tournament_size = value;
}
/// Returns the probability to pick candidates from one tournament.
pub fn probability(&self) -> f64 {
self.probability
}
/// Set the probability to pick candidates from one tournament to the given
/// value. The value must be between 0 and 1.0 (inclusive).
///
/// A probability of 1.0 means the tournament is deterministic. The best
/// and only the best individual of each tournament is selected.
pub fn set_probability(&mut self, value: f64) {
self.probability = value;
}
/// Returns whether individuals are removed from the list of candidates
/// after they have been picked once.
pub fn is_remove_selected_individuals(&self) -> bool {
self.remove_selected_individuals
}
/// Sets whether individuals shall be removed from the list of candidates
/// after they have been picked once.
pub fn set_remove_selected_individuals(&mut self, value: bool) {
self.remove_selected_individuals = value;
}
}
/// Can be used for single-objective optimization
impl SingleObjective for TournamentSelector {}
/// Can be used for multi-objective optimization
impl MultiObjective for TournamentSelector {}
impl GeneticOperator for TournamentSelector {
fn name() -> String {
"Tournament-Selection".to_string()
}
}
impl<G, F> SelectionOp<G, F> for TournamentSelector
where
G: Genotype,
F: Fitness,
{
fn select_from<R>(&self, evaluated: &EvaluatedPopulation<G, F>, rng: &mut R) -> Vec<Parents<G>>
where
R: Rng + Sized,
{
let individuals = evaluated.individuals();
let fitness_values = evaluated.fitness_values();
// mating pool holds indices to the individuals and fitness_values slices
let mut mating_pool: Vec<usize> = (0..fitness_values.len()).collect();
let num_parents_to_select =
(individuals.len() as f64 * self.selection_ratio + 0.5).floor() as usize;
let target_num_candidates = num_parents_to_select * self.num_individuals_per_parents;
// select candidates for parents
let mut picked_candidates = Vec::with_capacity(target_num_candidates);
let mut count_candidates = 0;
while count_candidates < target_num_candidates && !mating_pool.is_empty() {
// fill up tournament with candidates
let mut tournament = Vec::with_capacity(self.tournament_size);
let mut count_participants = 0;
while count_participants < self.tournament_size {
let random = random_index(rng, mating_pool.len());
let participant = mating_pool[random];
tournament.push(participant);
count_participants += 1;
}
if tournament.is_empty() {
break;
}
// sort tournament from best performing to worst performing index
tournament.sort_by(|x, y| fitness_values[*y].cmp(&fitness_values[*x]));
// pick candidates with probability
let mut prob = self.probability;
let mut prob_redux = 1.;
while prob > 0. {
if random_probability(rng) <= prob {
let picked = tournament.remove(0);
if self.remove_selected_individuals {
if let Some(position) = mating_pool.iter().position(|x| *x == picked) {
mating_pool.remove(position);
}
}
picked_candidates.push(picked);
count_candidates += 1;
}
prob_redux *= 1. - prob;
prob *= prob_redux;
}
}
// convert selected candidate indices to parents of individuals
let mut selected: Vec<Parents<G>> = Vec::with_capacity(num_parents_to_select);
while !picked_candidates.is_empty() {
let mut tuple = Vec::with_capacity(self.num_individuals_per_parents);
for _ in 0..self.num_individuals_per_parents {
// index into individuals slice
let index_i = picked_candidates.remove(0);
tuple.push(individuals[index_i].clone());
}
selected.push(tuple);
}
selected
}
}
| true |
57b774a4e0470c339207ac4653500cb4e2a185a5
|
Rust
|
miakramer/cpsc-532w-project
|
/evaluator/src/utilities.rs
|
UTF-8
| 871 | 2.75 | 3 |
[] |
no_license
|
use libc::c_double;
use num_traits::Float;
extern "C" {
fn lgamma(x: c_double) -> c_double;
}
pub trait BasicFloat {
fn as_f64(self) -> f64;
fn as_f32(self) -> f32;
fn to_self_f32(f: f32) -> Self;
fn to_self_f64(f: f64) -> Self;
}
impl BasicFloat for f32 {
fn as_f32(self) -> f32 {self}
fn as_f64(self) -> f64 {self as f64}
fn to_self_f32(f: f32) -> Self { f }
fn to_self_f64(f: f64) -> Self { f as f32 }
}
impl BasicFloat for f64 {
fn as_f32(self) -> f32 {self as f32}
fn as_f64(self) -> f64 {self}
fn to_self_f32(f: f32) -> Self { f as f64 }
fn to_self_f64(f: f64) -> Self { f }
}
pub fn ln_gamma<F : BasicFloat + Float>(f: F) -> F {
BasicFloat::to_self_f64(unsafe { lgamma(f.as_f64()) })
}
pub fn ln<F : Float>(f: F) -> F {
f.ln()
}
pub fn digamma<F : special::Gamma>(f: F) -> F {
f.digamma()
}
| true |
612996ef40d769d195aa6c27090c9928bdd3f888
|
Rust
|
TheusZer0/ctf-archives
|
/PlaidCTF/2021/crypto/Proxima_Concursus/src/main.rs
|
UTF-8
| 3,068 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
mod connect4;
mod digester;
mod secret;
mod stories;
mod utils;
use std::collections::HashMap;
use connect4::Connect4;
use digester::Digest;
use utils::{read, read_line, OrExitWith};
fn main() {
println!("{}", stories::INTRO);
loop {
println!("{}", stories::CHOICE);
let choice = read("Choice [0123]: ");
println!();
match choice {
0 => observe(),
1 => the_game(),
2 => particle_collider(),
3 => {
println!("{}", stories::GOODBYE);
std::process::exit(0)
}
_ => stories::desync(),
}
println!();
}
}
fn observe() {
println!("{}", stories::observe::INTRO);
let x = hex::decode(read_line("Hex-encoded input: ")).or_exit_with("Not a valid hex string");
println!("{}{}", stories::observe::RESULT, Digest::hex_digest_of(&x));
}
fn the_game() {
println!("{}", stories::the_game::INTRO);
let game_1 = read_line("Game 1 Log: ");
if Connect4::play_game(&game_1).or_exit_with("Invalid Connect 4 game") != true {
return Err("You were playing as 'x' and 'o' won").or_exit_with("Lost the Connect 4 game");
}
let game_2 = read_line("Game 2 Log: ");
if Connect4::play_game(&game_2).or_exit_with("Invalid connect 4 game") != false {
return Err("You were playing as 'o' and 'x' won").or_exit_with("Lost the Connect 4 game");
}
if Digest::hex_digest_of(game_1) == Digest::hex_digest_of(game_2) {
secret::the_game::success();
} else {
return stories::desync();
}
}
fn particle_collider() {
const NEW_PARTICLE: &str = "inflaton";
const POTENTIAL_PARTICLES: [&str; 14] = [
"upquark",
"downquark",
"charmquark",
"strangequark",
"topquark",
"bottomquark",
"electron",
"muon",
"tau",
"neutrino",
"Zboson",
"Wboson",
"higgsboson",
NEW_PARTICLE,
];
println!("{}", stories::particle_collider::INTRO);
let experimental_data: HashMap<String, String> = (0..4)
.filter_map(|i| {
let inp = hex::decode(read_line(format!(
"Tuning parameters for particle {}: ",
i + 1
)))
.or_exit_with("Invalid data");
let dat = String::from_utf8_lossy(&inp);
POTENTIAL_PARTICLES
.iter()
.filter(|&p| dat.starts_with(p))
.next()
.and_then(|p| Some((p.to_string(), Digest::hex_digest_of(inp))))
})
.collect();
if !experimental_data.contains_key(NEW_PARTICLE) {
return stories::desync();
}
if experimental_data.len() < 4 {
return stories::desync();
}
println!("{}", stories::particle_collider::EXPERIMENT_SETUP);
for e in experimental_data.values() {
if e != &experimental_data[NEW_PARTICLE] {
return stories::desync();
}
}
secret::particle_collider::success();
}
| true |
a311b99a4074c8be73c31d7849213e9270d77a01
|
Rust
|
zthompson47/tokio-uring
|
/src/io/stdout.rs
|
UTF-8
| 464 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
/*
use crate::buf;
use crate::driver::Op;
use std::io;
*/
// use std::os::unix::io::RawFd;
pub struct Stdout(());
pub fn stdout() -> Stdout {
Stdout(())
}
// const STDOUT: RawFd = 1;
impl Stdout {
/*
pub async fn write(&self, buf: buf::Slice) -> io::Result<usize> {
let op = Op::write_at(STDOUT, &buf, 0)?;
// Await the completion of the event
let completion = op.await;
Ok(completion.result? as _)
}
*/
}
| true |
ca3cc8a49dfde703840970b0e11fa41fff37197c
|
Rust
|
kinofmat/Rust_Module
|
/src/main.rs
|
UTF-8
| 10,958 | 3.859375 | 4 |
[] |
no_license
|
use std::collections::HashMap; // To allow me to use the hashmap I created with the menu items.
/* This function will remove any newline characters or returns that are read in.
In this the string is passed by reference. I created this as an example and initially
used both remove, and removed, but for convienence I eneded up just using one throughout.*/
fn remove(string: &mut String){
if let Some('\n')=string.chars().next_back() {
string.pop();
}
if let Some('\r')=string.chars().next_back() {
string.pop();
}
}
//Same function as remove but passed by value.
fn removed(mut string: String) -> String {
if let Some('\n')=string.chars().next_back() {
string.pop();
}
if let Some('\r')=string.chars().next_back() {
string.pop();
}
string
}
/*This will set up to take input from the keyboard.
It will then remove any newline or return characters, by calling removed, and then return the string splice */
fn rdin() -> String{
let mut reader = String::new();
std::io::stdin().read_line(&mut reader).unwrap();
reader = removed(reader.to_lowercase()); // Changes everything to lowercase so it is not a case sensetive program.
println!();
return reader;
}
/*Rounded takes floating point integers and rounds them to two decimal places.
With the way that Rust rounds, it first needs to be rounded to three decimal places, and then two,
in order to get an accurate rounding. */
fn rounded(mut rounder: f32) -> f32{
rounder = format!("{:.3}", rounder).parse::<f32>().unwrap();
rounder = format!("{:.2}", rounder).parse::<f32>().unwrap();
return rounder;
}
/*This function was created for checking for correct input when integers were to be used.
It is necessary before trying to convert the string to an integer.
This is implimented with the tips. */
fn strchecker(mut temp: String) -> String{
while !temp.contains(&"1") && !temp.contains(&"2") && !temp.contains(&"3") && !temp.contains(&"4"){
println!("It seems you entered an unrecognized value.\nYou entered, {}, please try again with either '1', '2', '3', or '4'.", temp);
temp = rdin();
}
return temp;
}
/*intchecker will check the input as the actual expected ints.
This is a necessary second layer, since the strchecker will allow say 21, or 34 as inputs.
If the value is incorrect it will call for a new input, and the strchecker again.*/
fn intchecker(mut tip: i16) -> i16{
let mut temp = String::new();
while tip != 1 && tip !=2 && tip !=3 && tip !=4{
println!("It seems you entered an unrecognized value.\nYou entered, {}, please try again with either '1', '2', '3' or '4'.", tip);
temp = rdin();
temp = strchecker(temp);
tip = temp.parse::<i16>().unwrap();
}
return tip;
}
/*ynchecker will do everything necessary to check the for the correct input of either a y or a n.
It calls the rdin function to get input. Then it will check for empyt string so that there is no broken code.
Then it checks the chars and if it is not within the range of acceptable values, it will use recursion to do
get a new value and run the checks again. This is done by Reference.*/
fn ynchecker(selector: &mut char){
let mut temp = String::new();
temp = rdin();
//Simply error checks for incorrect values.
if temp.is_empty(){ // Will check for an empty string.
*selector = ' ';
println!("You got an empty sting there");
} else {
*selector = temp.chars().nth(0).unwrap(); // Have to convert from a string, to a slice, to a char.
}
if *selector != 'y' && *selector != 'n'{
println!("It seems you entered an unrecognized value.\nYou entered, {}, please try again with either 'y' or 'n'.", selector);
ynchecker(selector);
}
}
//main is necessary to run the code.
fn main() {
//Constants declared for the tax rate, default tip rates, and special tip cases.
const TAX: f32 = 0.06;
const FIVE: f32 = 0.05;
const TEN: f32 = 0.10;
const FTN: f32 = 0.15;
const TWN: f32 = 0.20;
const QRT: f32 = 0.25;
const HALF: f32 = 0.50;
const DOLLAR: f32 = 1.00;
//use mut to say the variable can be changed.
let mut i: u8;
let mut selector: char = 'y';
let mut cost: f32 = 0.0;
let mut taxes: f32;
//Created a hashmap, it then is populated with the menu information
let mut items = HashMap::new();
items.insert(String::from("soda"), 1.95);
items.insert(String::from("water"), 0.00);
items.insert(String::from("burger"), 6.95);
items.insert(String::from("pizza"), 2.95);
items.insert(String::from("fries"), 1.95);
items.insert(String::from("stake"), 9.95);
//Creates a vector. It is necessary to specify the data type that will be in the vector.
let mut itemPrice: Vec<f32> = Vec::new();
let mut total: f32;
let mut fivet: f32;
let mut tent: f32;
let mut ftnt: f32;
let mut twnt: f32;
//Cannot initialize a string with values already in it.
let mut temp = String::new();
let mut tip: i16;
println!("Welcome to the restaurant of Rusty Lake!"); //Do you get the reference here? xD
//Loops through the entire body of the code to allow multiple iterations of orders.
while selector != 'n'{
//Needs to be cleared from any past iterations.
cost = 0.0;
i = 0;
//Specifically for clearing the vector, instead of wasting memory creating a new one each time.
//Will iterate through the length of the vector using .rev, which is basically just a backwards iterator.
for i in (0..itemPrice.len()).rev(){
itemPrice.remove(i);
}
//Will loop through for each item being selected from the menu.
while selector != 'n'{
println!("What item from the menu would you like to order?");
//Prints out the entire HashMap
for (key, value) in &items {
println!("{}: {:.2}", key, value);
}
temp = rdin();
//If the input does not match with a key we need to get the correct value.
while !items.contains_key(&temp){
println!("It seems what you entered did not quite match one of the items from the menu.\nPlease try again.");
for (key, value) in &items {
println!("{}: {:.2}", key, value);
}
temp = rdin();
}
//Checks that the input really is a key.
if items.contains_key(&temp){
/*A little bit of a different descision structure here.
The match will compare the given statement to the pattern of the other types.
In a way this reminds me of the when statement from Kotlin.*/
match items.get(&temp){
Some(price) => {
itemPrice.push(*price);
println!("Item price, ${:.2}", price);
}
None => {
println!("Error! Something went wrong!");
}
}
}
println!("Is there another item from the menu you wish to order? (y/n)");
ynchecker(&mut selector);
i += 1;
}
//Will add each item from the vector to the cost.
for order in itemPrice.iter(){
//println!("The current item is priced ${}", order);
cost += order;
}
//Calculate the costs with tax and various tips.
taxes = cost * TAX;
taxes = rounded(taxes);
total = taxes + cost;
println!("Your taxes will be: ${0:.2}\nYour total with taxes will be ${1:.2}\n", taxes, total);
fivet = cost * FIVE;
tent = cost * TEN;
ftnt = cost * FTN;
twnt = cost * TWN;
fivet = rounded(fivet);
tent = rounded(tent);
ftnt = rounded(ftnt);
twnt = rounded(twnt);
/*First check for if they ordered water, when it would brake the normal code for calculating the tips.
If there is a large group of people, considering someone may order 2 items on average, then raise the default tip rate.*/
if total == 0.0{
println!("Please consider being generous today and leave a tip for your waiter.\nSelect one of the following:\n1) $0.25 2) $0.50\n3) $1.00 4) Other");
}
else if i < 10{
println!("What would you like your tip to be?\nSelect one of the following:\n1) 5%: ${0:.2} {3:<10}2) 10%: ${1:.2}\n3) 15%: ${2:.2}{3:<10} 4) Other", fivet, tent, ftnt, "");
} else {
println!("What would you like your tip to be?\nSelect one of the following:\n1) 10%: ${0:.2}{3:<10} 2) 15%: ${1:.2}\n3) 20%: ${2:.2}{3:<10}4) Other", tent, ftnt, twnt, "");
}
temp = rdin();
temp = strchecker(temp); // Use the string checker first to make sure there aren't actually and letters read in.
tip = temp.parse::<i16>().unwrap(); // After we have check that there are only integers, we can convert the data type to an int.
tip = intchecker(tip); // Then we have to actually check the values for correct integers.
// First check for the special only water condition. Then go along with normal tips.
if total == 0.0{
if tip == 1{
total += QRT;
} else if tip == 2{
total += HALF;
} else if tip == 3{
total += DOLLAR;
} else if tip == 4{
println!("Please enter a specific amount, including the change. Ex '10.00':");
total += rdin().parse::<f32>().unwrap(); //Will convert the string to a floating point number. Will break if letters are read in.
} else{
println!("It appears you got through all my checks. In other words, you broke the code! Way to go!");
}
} else {
if tip == 1{
total += fivet;
} else if tip == 2{
total += tent;
} else if tip == 3{
total += ftnt;
} else if tip == 4{
println!("Please enter a specific amount, including the change. Ex '10.00':");
total += rdin().parse::<f32>().unwrap();
} else{ // Just a random extra else. I found no situations that would enact this code, but fun for just in case.
println!("It appears you got through all my checks. In other words, you broke the code! Way to go!");
}
}
println!("Your total will be: ${:.2}", total); // The :.2 that I used in a lot of these print statements is to enforce the formatting with two decimal places.
println!("Is there another order you wish to enter? (y/n)");
ynchecker(&mut selector); // One final error check.
}
}
| true |
73a49bbc2714443b27e4bdac0036a670046b81b9
|
Rust
|
MGM103/Rust_Basics
|
/slices/src/main.rs
|
UTF-8
| 547 | 3.40625 | 3 |
[] |
no_license
|
//slice have the format [start..end], where start is the first position
//and end is one more than the last position
//it does this as end-start is the length of the slice
//if the range starts at 0 the first value can be omitted [..end]
//vice versa [start..]
//also [..] is the entire string
//str is the type for a string slice
fn first_word(s: &String) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[0..i];
}
}
&s[..]
}
fn main() {}
| true |
33007e8153616488720c33dc4a783f8b14436dda
|
Rust
|
kingchazzy/devand
|
/devand-core/src/string_utils.rs
|
UTF-8
| 239 | 2.984375 | 3 |
[
"Apache-2.0"
] |
permissive
|
pub fn trimlow(s: String) -> String {
// We help the user, trimming spaces and converting to lowercase
let s = s.to_lowercase();
// Note: this allocates a new string, in place trimming does not exist
s.trim().to_string()
}
| true |
c0caa70f1a613bc33a92db6085e1b3d369e76b3e
|
Rust
|
phyber/adventofcode
|
/2019/day03/src/main.rs
|
UTF-8
| 2,825 | 3.625 | 4 |
[] |
no_license
|
// day03
use std::env;
use std::error::Error;
use std::fs::File;
use std::io::{
self,
prelude::*,
BufReader,
};
// CLI arguments
type Args = Vec<String>;
// Moves that can be made
#[derive(Debug, Clone, PartialEq)]
enum Direction {
Up(usize),
Down(usize),
Left(usize),
Right(usize),
}
impl From<&str> for Direction {
fn from(s: &str) -> Self {
// Incoming format is a single character direction followed by numbers.
let (direction, count) = s.split_at(1);
// Our input should always parse, panic if there's an issue.
let count: usize = count.parse().unwrap();
match direction {
"U" => Self::Up(count),
"D" => Self::Down(count),
"L" => Self::Left(count),
"R" => Self::Right(count),
_ => unreachable!(),
}
}
}
// List of moves for a given wire.
type WirePath = Vec<Direction>;
// Wires that can exist in a position of the box.
#[derive(Debug, Clone)]
enum WireState {
Empty,
Blue,
Red,
Both,
}
// Current position in the grid
#[derive(Debug, Default, Clone)]
struct Position {
x: usize,
y: usize,
}
// 2D grid
type Grid = Vec<Vec<WireState>>;
#[derive(Debug, Default, Clone)]
struct Wirebox {
position: Position,
grid: Grid,
path: WirePath,
}
impl Wirebox {
fn new() -> Self {
Default::default()
}
// Load the moves set from an incoming string
fn load(&mut self, data: &str) {
for d in data.split(",") {
let direction: Direction = d.into();
self.path.push(direction);
}
}
}
// Get an input reader
fn input_reader(
args: Args,
) -> Result<BufReader<Box<dyn io::Read>>, Box<dyn Error>> {
// Either read from the given file or stdin
let input: Box<dyn io::Read> = if args.len() > 1 {
let filename = &args[1];
let fh = File::open(filename).unwrap();
Box::new(fh)
}
else {
let stdin = io::stdin();
Box::new(stdin)
};
let reader = BufReader::new(input);
Ok(reader)
}
fn main() -> Result<(), Box<dyn Error>> {
let args: Args = env::args().collect();
// Get the input
let mut buffer = String::new();
let mut reader = input_reader(args)?;
reader.read_to_string(&mut buffer)?;
let mut wirebox = Wirebox::new();
wirebox.load(&buffer);
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_string_to_move() {
let tests = vec![
("U1", Move::Up(1)),
("D2", Move::Down(2)),
("L3", Move::Left(3)),
("R4", Move::Right(4)),
];
for (input, output) in tests {
let direction: Move = input.into();
assert_eq!(direction, output);
}
}
}
| true |
52d8d51129599d33d6622f968612a0a1f376b357
|
Rust
|
nocproject/noc
|
/rust/agent/src/proto/twamp/server_greeting.rs
|
UTF-8
| 6,299 | 2.65625 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
// ---------------------------------------------------------------------
// TWAMP Server-Greeting
// ---------------------------------------------------------------------
// Copyright (C) 2007-2021 The NOC Project
// See LICENSE for details
// ---------------------------------------------------------------------
use super::MBZ;
use crate::error::AgentError;
use crate::proto::frame::{FrameReader, FrameWriter};
use bytes::{Buf, BufMut, Bytes, BytesMut};
/// ## Server-Greeting structure
/// RFC-4656: 3.1
/// ```text
/// 0 1 2 3
/// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// | |
/// | Unused (12 octets) |
/// | |
/// |+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-++-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// | Modes |
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// | |
/// | Challenge (16 octets) |
/// | |
/// | |
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// | |
/// | Salt (16 octets) |
/// | |
/// | |
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// | Count (4 octets) |
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// | |
/// | MBZ (12 octets) |
/// | |
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// ```
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ServerGreeting {
pub modes: u32,
pub challenge: Bytes,
pub salt: Bytes,
pub count: u32,
}
impl FrameReader for ServerGreeting {
fn min_size() -> usize {
64
}
fn parse(s: &mut BytesMut) -> Result<ServerGreeting, AgentError> {
// MBZ, 12 octets
s.advance(12);
// Modes, 4 octets
let modes = s.get_u32();
// Challenge, 16 octets
let challenge = s.copy_to_bytes(16);
// Salt, 16 octets
let salt = s.copy_to_bytes(16);
// Count, 4 octets
let count = s.get_u32();
// MBZ, 12 octets
s.advance(12);
Ok(ServerGreeting {
modes,
challenge,
salt,
count,
})
}
}
impl FrameWriter for ServerGreeting {
/// Get size of serialized frame
fn size(&self) -> usize {
64
}
/// Serialize frame to buffer
fn write_bytes(&self, s: &mut BytesMut) -> Result<(), AgentError> {
// MBZ, 12 octets
s.put(&MBZ[..12]);
// Modes, 4 octets
s.put_u32(self.modes);
// Challenge, 16 octets
if self.challenge.len() != 16 {
return Err(AgentError::FrameError(
"Challenge must be of 16 octets".into(),
));
}
s.put(&*self.challenge);
// Salt, 16 octets
if self.salt.len() != 16 {
return Err(AgentError::FrameError("Salt must be of 16 octets".into()));
}
s.put(&*self.salt);
// Count, 4 octets
s.put_u32(self.count);
// MBZ, 12 octets
s.put(&MBZ[..12]);
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::ServerGreeting;
use crate::proto::frame::{FrameReader, FrameWriter};
use crate::proto::twamp::MODE_UNAUTHENTICATED;
use bytes::{Buf, Bytes, BytesMut};
static SERVER_GREETING1: &[u8] = &[
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, // MBZ, 12 octets
0x00, 0x00, 0x00, 0x01, // Modes, 4 octets
0x24, 0x6a, 0xe2, 0x53, 0x2f, 0x51, 0x82, 0x3d, 0xdd, 0xdb, 0xb0, 0xa4, 0xd8, 0x3a, 0xc1,
0x9a, // Challenge, 16 octets
0x92, 0x31, 0x15, 0xa3, 0xbf, 0x90, 0x1a, 0x57, 0x2d, 0xdf, 0x28, 0xe8, 0xbd, 0xa7, 0x81,
0xd6, // Salt, 16 octets
0x00, 0x00, 0x04, 0x00, // Count, 4 octets
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, // MBZ, 12 octets
];
static SERVER_GREETING1_CHALLENGE: &[u8] = &[
0x24, 0x6a, 0xe2, 0x53, 0x2f, 0x51, 0x82, 0x3d, 0xdd, 0xdb, 0xb0, 0xa4, 0xd8, 0x3a, 0xc1,
0x9a,
];
static SERVER_GREETING1_SALT: &[u8] = &[
0x92, 0x31, 0x15, 0xa3, 0xbf, 0x90, 0x1a, 0x57, 0x2d, 0xdf, 0x28, 0xe8, 0xbd, 0xa7, 0x81,
0xd6,
];
fn get_server_greeting() -> ServerGreeting {
ServerGreeting {
modes: MODE_UNAUTHENTICATED,
challenge: Bytes::from_static(&SERVER_GREETING1_CHALLENGE),
salt: Bytes::from_static(&SERVER_GREETING1_SALT),
count: 1024,
}
}
#[test]
fn test_server_greeting_min_size() {
assert_eq!(ServerGreeting::min_size(), 64);
}
#[test]
fn test_server_greeting_parse() {
let mut buf = BytesMut::from(SERVER_GREETING1);
let expected = get_server_greeting();
let res = ServerGreeting::parse(&mut buf);
assert_eq!(buf.remaining(), 0);
assert!(res.is_ok());
assert_eq!(res.unwrap(), expected);
}
#[test]
fn test_server_greeting_size() {
let sg = get_server_greeting();
assert_eq!(sg.size(), 64)
}
#[test]
fn test_server_greeting_write_bytes() {
let msg = get_server_greeting();
let mut buf = BytesMut::with_capacity(msg.size());
let res = msg.write_bytes(&mut buf);
assert!(res.is_ok());
assert_eq!(buf.split(), BytesMut::from(SERVER_GREETING1));
}
}
| true |
137c6118473304b6a94eb9dd5e4ef217f813f2dc
|
Rust
|
huangqian/rust-in-action
|
/examples/function-pointer.rs
|
UTF-8
| 404 | 3.5625 | 4 |
[] |
no_license
|
pub fn math(op: fn(i32, i32) -> i32, a: i32, b: i32) ->i32{
op(a, b)
}
fn sum(a: i32, b: i32) -> i32{
a + b
}
fn product(a: i32, b:i32) -> i32{
a * b
}
fn is_true() -> bool { true}
fn true_maker() -> fn() -> bool {is_true}
fn main(){
let a = 2;
let b = 3;
assert_eq!(math(sum, 2, 3), 5);
assert_eq!(math(product, 2, 3), 6);
assert_eq!(true_maker()(), true);
}
| true |
35985f7c7f37f35aec06d0b6d419f618c2524f40
|
Rust
|
sam-wright/Advent-of-Code
|
/2020/day9/src/lib.rs
|
UTF-8
| 3,503 | 3.328125 | 3 |
[] |
no_license
|
use std::collections::vec_deque::VecDeque;
use std::fs::File;
use std::io::Read;
pub fn read_input(filename: &str) -> Vec<i64> {
let mut contents = String::new();
let mut file = File::open(filename).unwrap();
file.read_to_string(&mut contents).unwrap();
let collection: Vec<i64> = contents.split("\n").map(|x| x.parse().unwrap()).collect();
collection
}
fn is_valid(cypher: &VecDeque<i64>, entry: i64) -> bool {
for j in 0..cypher.len() {
for k in 0..cypher.len() {
if j == k {
continue;
}
if cypher[j] + cypher[k] == entry {
return true;
}
}
}
false
}
pub fn find_error(input: &Vec<i64>, preamble_size: usize) -> i64 {
let mut cypher: VecDeque<i64> = VecDeque::with_capacity(preamble_size);
// preload the cypher
for i in 0..preamble_size {
cypher.push_front(input[i]);
}
// process data
for i in preamble_size as usize..input.len() {
if !is_valid(&cypher, input[i]) {
return input[i];
}
cypher.pop_back();
cypher.push_front(input[i]);
}
// failure
-1
}
pub fn find_weakness_set(input: &Vec<i64>, error: i64) -> Vec<i64> {
for i in 0..input.len() - 1 {
let mut k = 0;
let mut value = 0;
loop {
value += input[i + k];
if value == error {
let ans: Vec<i64> = input[i..=i + k].iter().map(|x| x.clone()).collect();
return ans;
}
if value > error {
break;
}
k += 1;
}
}
//failure
return Vec::new();
}
pub fn find_weakness_set_improved(input: &Vec<i64>, error: i64) -> VecDeque<i64> {
let mut weakness_set = VecDeque::new();
let mut input: VecDeque<i64> = input.iter().map(|x|{x.clone()}).collect();
loop {
let value: i64 = weakness_set.iter().sum();
if value > error {
weakness_set.pop_back();
} else if value == error {
break;
} else {
weakness_set.push_front(input.pop_front().unwrap());
}
}
weakness_set
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn part_1_example() {
let input = read_input("example1.txt");
let error = find_error(&input, 5);
assert_eq!(error, 127);
}
#[test]
fn part_1_solution() {
let input = read_input("input.txt");
let error = find_error(&input, 25);
assert_eq!(error, 1309761972);
}
#[test]
fn part_2_example() {
let input = read_input("example1.txt");
let error = find_error(&input, 5);
let pair = find_weakness_set(&input, error);
let ans = pair.iter().max().unwrap() + pair.iter().min().unwrap();
assert_eq!(ans, 62);
}
#[test]
fn part_2_solution() {
let input = read_input("input.txt");
let error = find_error(&input, 25);
let pair = find_weakness_set(&input, error);
let ans = pair.iter().max().unwrap() + pair.iter().min().unwrap();
assert_eq!(ans, 177989832);
}
#[test]
fn part_2_solution_improved() {
let input = read_input("input.txt");
let error = find_error(&input, 25);
let pair = find_weakness_set_improved(&input, error);
let ans = pair.iter().max().unwrap() + pair.iter().min().unwrap();
assert_eq!(ans, 177989832);
}
}
| true |
4b510afef448c8d03eb284124ceff0b88e9873f0
|
Rust
|
spacejam/seaslug
|
/src/smt.rs
|
UTF-8
| 1,670 | 2.90625 | 3 |
[] |
no_license
|
//! SMT solver based on Dutertre & de Moura's paper
//! [A Fast Linear-Arithmetic Solver for DPLL(T)](cav06.pdf).
//!
//! For cases where z3 is too heavy of a dependency.
pub struct Input;
pub struct Output;
pub fn solve(_: Input) -> Output {
todo!()
}
struct Atom;
struct Solver {
atoms: Vec<Atom>,
}
impl Solver {
/// Asserts atom gamma in the current state.
/// If consistent, gamma is inserted into
/// alpha. If alpha union gamma is inconsistent,
/// big gamma is returned which is the shrunken
/// minimal failing subset of gamma that results
/// in inconsistency.
fn assert(&mut self, proposed: Atom) -> Result<(), Vec<Atom>> {
todo!()
}
/// Checks whether our atoms are consistent.
/// A new checkpoint is created when we return Ok.
fn check(&self) -> Result<(), Vec<Atom>> {
todo!()
}
}
struct Dpll;
enum Clause {
Literal(bool),
Empty,
Unit,
}
struct Literal;
impl Dpll {
fn dpll(f: &[Clause]) -> bool {
if todo!("f is a consistent set of literals") {
return true;
}
if todo!("f contains an empty clause") {
return false;
}
for l in todo!("each unit clause in f") {
f.unit_propagate(l);
}
for l in todo!("each pure literal in f") {
f.pure_literal_assign(l);
}
let l = f.choose_literal();
Dpll::dpll(f && l) || Dpll::dpll(f && !l)
}
fn consistent(f: &[Literal]) -> bool {
todo!()
}
/// apply unit propagation rule
fn unit_propagate() {}
/// apply pure literal rule
fn pure_literal_assign() {}
}
| true |
7eb977d83c93f0e1d72c7d492234586bde9770aa
|
Rust
|
SotaWatanabe/ethereum-bridge
|
/eth-types/src/utils.rs
|
UTF-8
| 1,746 | 2.828125 | 3 |
[] |
no_license
|
#![cfg_attr(not(feature = "std"), no_std)]
use sp_std::prelude::*;
use sp_std::vec;
#[macro_export]
macro_rules! fixed_hex_bytes_unchecked {
($str:expr, $len:expr) => {{
let mut bytes: [u8; $len] = [0; $len];
let slice = $crate::hex_bytes_unchecked($str);
if slice.len() == $len {
bytes.copy_from_slice(&slice);
};
bytes
}};
}
#[macro_export]
macro_rules! array_unchecked {
($source:expr, $offset:expr, $len:expr) => {{
unsafe { (*($source[$offset..$offset + $len].as_ptr() as *const [_; $len])) }
}};
}
/// convert number to bytes base on radix `n`
pub fn base_n_bytes_unchecked(mut x: u64, radix: u64) -> Vec<u8> {
if x == 0 {
return vec![b'0'];
}
if radix > 36 {
return vec![];
}
let mut buf = vec![];
while x != 0 {
let rem = (x % radix) as u8;
if rem < 10 {
buf.push(b'0' + rem);
} else {
buf.push(b'a' + rem - 10);
}
x /= radix;
}
buf.reverse();
buf
}
/// convert bytes to hex string
pub fn hex_string_unchecked<B: AsRef<[u8]>>(b: B, prefix: &str) -> Vec<char> {
let b = b.as_ref();
let mut v = Vec::with_capacity(prefix.len() + b.len() * 2);
for x in prefix.chars() {
v.push(x);
}
for x in b.iter() {
v.push(core::char::from_digit((x >> 4) as _, 16).unwrap_or_default());
v.push(core::char::from_digit((x & 0xf) as _, 16).unwrap_or_default());
}
v
}
/// convert hex string to bytes
pub fn hex_bytes_unchecked<S: AsRef<str>>(s: S) -> Vec<u8> {
let s = s.as_ref();
(if s.starts_with("0x") { 2 } else { 0 }..s.len())
.step_by(2)
.map(|i| u8::from_str_radix(&s[i..i + 2], 16).unwrap_or_default())
.collect()
}
| true |
6243c99693b39d951b51aa0616dad6fbf82948c5
|
Rust
|
lucifer1004/AtCoder
|
/abc189/src/bin/a.rs
|
UTF-8
| 177 | 2.734375 | 3 |
[] |
no_license
|
use proconio::input;
use proconio::marker::Chars;
fn main() {
input! {
s: Chars,
}
println!("{}", if s[0] == s[1] && s[1] == s[2] {"Won"} else {"Lost"});
}
| true |
af6f9c0725e7d9a6c639f0fcaf3f0017cbc29178
|
Rust
|
matklad/once_cell
|
/tests/it/race_once_box.rs
|
UTF-8
| 3,389 | 3.234375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
#[cfg(feature = "std")]
use std::sync::Barrier;
use std::sync::{
atomic::{AtomicUsize, Ordering::SeqCst},
Arc,
};
use once_cell::race::OnceBox;
#[derive(Default)]
struct Heap {
total: Arc<AtomicUsize>,
}
#[derive(Debug)]
struct Pebble<T> {
val: T,
total: Arc<AtomicUsize>,
}
impl<T> Drop for Pebble<T> {
fn drop(&mut self) {
self.total.fetch_sub(1, SeqCst);
}
}
impl Heap {
fn total(&self) -> usize {
self.total.load(SeqCst)
}
fn new_pebble<T>(&self, val: T) -> Pebble<T> {
self.total.fetch_add(1, SeqCst);
Pebble { val, total: Arc::clone(&self.total) }
}
}
#[cfg(feature = "std")]
#[test]
fn once_box_smoke_test() {
use std::thread::scope;
let heap = Heap::default();
let global_cnt = AtomicUsize::new(0);
let cell = OnceBox::new();
let b = Barrier::new(128);
scope(|s| {
for _ in 0..128 {
s.spawn(|| {
let local_cnt = AtomicUsize::new(0);
cell.get_or_init(|| {
global_cnt.fetch_add(1, SeqCst);
local_cnt.fetch_add(1, SeqCst);
b.wait();
Box::new(heap.new_pebble(()))
});
assert_eq!(local_cnt.load(SeqCst), 1);
cell.get_or_init(|| {
global_cnt.fetch_add(1, SeqCst);
local_cnt.fetch_add(1, SeqCst);
Box::new(heap.new_pebble(()))
});
assert_eq!(local_cnt.load(SeqCst), 1);
});
}
});
assert!(cell.get().is_some());
assert!(global_cnt.load(SeqCst) > 10);
assert_eq!(heap.total(), 1);
drop(cell);
assert_eq!(heap.total(), 0);
}
#[test]
fn once_box_set() {
let heap = Heap::default();
let cell = OnceBox::new();
assert!(cell.get().is_none());
assert!(cell.set(Box::new(heap.new_pebble("hello"))).is_ok());
assert_eq!(cell.get().unwrap().val, "hello");
assert_eq!(heap.total(), 1);
assert!(cell.set(Box::new(heap.new_pebble("world"))).is_err());
assert_eq!(cell.get().unwrap().val, "hello");
assert_eq!(heap.total(), 1);
drop(cell);
assert_eq!(heap.total(), 0);
}
#[cfg(feature = "std")]
#[test]
fn once_box_first_wins() {
use std::thread::scope;
let cell = OnceBox::new();
let val1 = 92;
let val2 = 62;
let b1 = Barrier::new(2);
let b2 = Barrier::new(2);
let b3 = Barrier::new(2);
scope(|s| {
s.spawn(|| {
let r1 = cell.get_or_init(|| {
b1.wait();
b2.wait();
Box::new(val1)
});
assert_eq!(*r1, val1);
b3.wait();
});
b1.wait();
s.spawn(|| {
let r2 = cell.get_or_init(|| {
b2.wait();
b3.wait();
Box::new(val2)
});
assert_eq!(*r2, val1);
});
});
assert_eq!(cell.get(), Some(&val1));
}
#[test]
fn once_box_reentrant() {
let cell = OnceBox::new();
let res = cell.get_or_init(|| {
cell.get_or_init(|| Box::new("hello".to_string()));
Box::new("world".to_string())
});
assert_eq!(res, "hello");
}
#[test]
fn once_box_default() {
struct Foo;
let cell: OnceBox<Foo> = Default::default();
assert!(cell.get().is_none());
}
| true |
75300969424b42f931be992ca06bbc3f97ff1846
|
Rust
|
dpchamps/WASMBoi
|
/src/spec/opcodes/ld.rs
|
UTF-8
| 7,263 | 2.671875 | 3 |
[] |
no_license
|
use crate::dasm::InstructionData;
use crate::spec::cpu::*;
use crate::spec::mmu::MMU;
use crate::spec::mnemonic::Mnemonic;
use crate::spec::opcode::Instruction;
use crate::spec::opcodes::unexpected_op;
use crate::spec::register::{RegisterRefMut, TRegister};
use crate::spec::register_ops::RegisterOp;
use crate::util::byte_ops::hi_lo_combine;
use std::num::Wrapping;
impl CPU {
pub(crate) fn evaluate_ld(
&mut self,
instruction_data: &InstructionData,
opcode_data: &[u8; 2],
mmu: &mut MMU,
) -> Result<u8, Error> {
match instruction_data.instruction {
Instruction::LD_RR => {
let r_prime_value = self
.registers
.reg_from_byte(instruction_data.opcode_info.lo)?
.get_eight_bit_val()?;
let mut r = self
.registers
.reg_from_byte(instruction_data.opcode_info.hi)?;
// println!("\t\t {:?} <- {:X}", r, r_prime_value);
r.set_eight_bit_val(r_prime_value)?;
Ok(1)
}
Instruction::LD_RN => {
match self
.registers
.reg_from_byte(instruction_data.opcode_info.hi)?
{
RegisterRefMut::Byte(byte_ref) => {
// println!("\t\t {:?} <- {:X}", byte_ref, opcode_data[0]);
byte_ref.set_value(opcode_data[0]);
Ok(2)
}
_ => Err(Error::UnexpectedOpcodeState(
instruction_data.clone(),
hi_lo_combine(opcode_data[1], opcode_data[0]),
)),
}
}
Instruction::LD_RHL => {
let value = mmu.read_byte(self.registers.hl())?;
let mut reg = self
.registers
.reg_from_byte(instruction_data.opcode_info.hi)?;
reg.set_eight_bit_val(value)?;
Ok(2)
}
Instruction::LD_HLR => {
let reg_r_value = self
.registers
.reg_from_byte(instruction_data.opcode_info.lo)?
.get_eight_bit_val()?;
mmu.write_byte(self.registers.hl(), reg_r_value)?;
Ok(2)
}
Instruction::LD_HLN => {
mmu.write_byte(self.registers.hl(), opcode_data[0])?;
Ok(3)
}
Instruction::LD_ABC => {
self.registers
.a
.set_value(mmu.read_byte(self.registers.bc())?);
Ok(2)
}
Instruction::LD_ADE => {
let value = mmu.read_byte(self.registers.de())?;
self.registers.a.set_value(value);
Ok(2)
}
Instruction::LD_AN => {
let value = mmu.read_byte(0xFF00 + (opcode_data[0] as u16))?;
self.registers.a.set_value(value);
Ok(3)
}
Instruction::LD_ANN => {
let value = mmu.read_byte(hi_lo_combine(opcode_data[1], opcode_data[0]))?;
self.registers.a.set_value(value);
Ok(4)
}
Instruction::LD_BCA => {
mmu.write_byte(self.registers.bc(), *self.registers.a.get_value())?;
Ok(2)
}
Instruction::LD_DEA => {
mmu.write_byte(self.registers.de(), *self.registers.a.get_value())?;
Ok(2)
}
Instruction::LD_NA => {
let address = 0xFF00 + (opcode_data[0] as u16);
mmu.write_byte(address, *self.registers.a.get_value())?;
Ok(3)
}
Instruction::LD_NNA => {
let address = hi_lo_combine(opcode_data[1], opcode_data[0]);
mmu.write_byte(address, *self.registers.a.get_value())?;
Ok(4)
}
Instruction::LD_AFF00C => {
let address = 0xFF00 + (*self.registers.c.get_value() as u16);
self.registers.a.set_value(mmu.read_byte(address)?);
Ok(2)
}
Instruction::LD_FF00CA => {
let address = 0xFF00 + (*self.registers.c.get_value() as u16);
mmu.write_byte(address, *self.registers.a.get_value())?;
Ok(2)
}
Instruction::LD_HLIA => {
let hl = self.registers.hl();
mmu.write_byte(hl, *self.registers.a.get_value())?;
let next_hl = Wrapping(hl) + Wrapping(1);
self.registers.hl_mut().set_value_16(next_hl.0);
Ok(2)
}
Instruction::LD_AHLI => {
let hl = self.registers.hl();
let value = mmu.read_byte(hl)?;
let next_hl = Wrapping(hl) + Wrapping(1);
self.registers.a.set_value(value);
self.registers.hl_mut().set_value_16(next_hl.0);
Ok(2)
}
Instruction::LD_HLDA => {
let hl = self.registers.hl();
mmu.write_byte(hl, *self.registers.a.get_value())?;
let next_hl = Wrapping(hl) - Wrapping(1);
self.registers.hl_mut().set_value_16(next_hl.0);
Ok(2)
}
Instruction::LD_AHLD => {
let hl = self.registers.hl();
self.registers.a.set_value(mmu.read_byte(hl)?);
let next_hl = hl.wrapping_sub(1);
self.registers.hl_mut().set_value_16(next_hl);
Ok(2)
}
Instruction::LD_RRNN => {
let dd = instruction_data.opcode_info.hi >> 1;
let mut reg_pair = self.registers.reg_pair_from_dd(dd)?;
// println!("\t\t{:?} <- {:X?}", reg_pair, opcode_data);
reg_pair.set_value(opcode_data[1], opcode_data[0]);
Ok(3)
}
Instruction::LD_SPHL => {
self.registers.sp.set_value(self.registers.hl());
Ok(2)
}
Instruction::LD_SPDD => {
let address = hi_lo_combine(opcode_data[1], opcode_data[0]);
mmu.write_word(address, *self.registers.sp.get_value())?;
Ok(5)
}
Instruction::LDHL => {
self.registers.op_with_effect(|registers| {
let mut result = RegisterOp::new(*registers.sp.get_value() as i16)
.add((opcode_data[0] as i8) as i16);
result.flags.update(|flags| {
let mut next = flags;
next.z = 0;
next
});
registers.hl_mut().set_value_16(result.value as u16);
Ok(result)
})?;
Ok(3)
}
_ => Err(unexpected_op(&instruction_data.mnemonic, &Mnemonic::LD)),
}
}
}
| true |
652fdda09e5f0ca5342de1edb33a3db28fea5b0f
|
Rust
|
mozilla/gecko-dev
|
/third_party/rust/ffi-support/src/string.rs
|
UTF-8
| 7,002 | 3.1875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
/* Copyright 2018-2019 Mozilla Foundation
*
* Licensed under the Apache License (Version 2.0), or the MIT license,
* (the "Licenses") at your option. You may not use this file except in
* compliance with one of the Licenses. You may obtain copies of the
* Licenses at:
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://opensource.org/licenses/MIT
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the Licenses is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Licenses for the specific language governing permissions and
* limitations under the Licenses. */
use crate::FfiStr;
use std::ffi::CString;
use std::os::raw::c_char;
use std::ptr;
/// Convert a rust string into a NUL-terminated utf-8 string suitable for passing to C, or to things
/// ABI-compatible with C.
///
/// Important: This string must eventually be freed. You may either do that using the
/// [`destroy_c_string`] method (or, if you must, by dropping the underlying [`std::ffi::CString`]
/// after recovering it via [`std::ffi::CString::from_raw`]).
///
/// It's common to want to allow the consumer (e.g. on the "C" side of the FFI) to be allowed to
/// free this memory, and the macro [`define_string_destructor!`] may be used to do so.
///
/// ## Panics
///
/// This function may panic if the argument has an interior null byte. This is fairly rare, but
/// is possible in theory.
#[inline]
pub fn rust_string_to_c(rust_string: impl Into<String>) -> *mut c_char {
CString::new(rust_string.into())
.expect("Error: Rust string contained an interior null byte.")
.into_raw()
}
/// Variant of [`rust_string_to_c`] which takes an Option, and returns null for None.
#[inline]
pub fn opt_rust_string_to_c(opt_rust_string: Option<impl Into<String>>) -> *mut c_char {
if let Some(s) = opt_rust_string {
rust_string_to_c(s)
} else {
ptr::null_mut()
}
}
/// Free the memory of a string created by [`rust_string_to_c`] on the rust heap. If `c_string` is
/// null, this is a no-op.
///
/// See the [`define_string_destructor!`] macro which may be used for exposing this function over
/// the FFI.
///
/// ## Safety
///
/// This is inherently unsafe, since we're deallocating memory. Be sure
///
/// - Nobody can use the memory after it's deallocated.
/// - The memory was actually allocated on this heap (and it's not a string from the other side of
/// the FFI which was allocated on e.g. the C heap).
/// - If multiple separate rust libraries are in use (for example, as DLLs) in a single program,
/// you must also make sure that the rust library that allocated the memory is also the one
/// that frees it.
///
/// See documentation for [`define_string_destructor!`], which gives a more complete overview of the
/// potential issues.
#[inline]
pub unsafe fn destroy_c_string(cstring: *mut c_char) {
// we're not guaranteed to be in a place where we can complain about this beyond logging,
// and there's an obvious way to handle it.
if !cstring.is_null() {
drop(CString::from_raw(cstring))
}
}
/// Convert a null-terminated C string to a rust `str`. This does not take ownership of the string,
/// and you should be careful about the lifetime of the resulting string. Note that strings
/// containing invalid UTF-8 are replaced with the empty string (for many cases, you will want to
/// use [`rust_string_from_c`] instead, which will do a lossy conversion).
///
/// If you actually need an owned rust `String`, you're encouraged to use [`rust_string_from_c`],
/// which, as mentioned, also behaves better in the face of invalid UTF-8.
///
/// ## Safety
///
/// This is unsafe because we read from a raw pointer, which may or may not be valid.
///
/// We also assume `c_string` is a null terminated string, and have no way of knowing if that's
/// actually true. If it's not, we'll read arbitrary memory from the heap until we see a '\0', which
/// can result in a enormous number of problems.
///
/// ## Panics
///
/// Panics if it's argument is null, see [`opt_rust_str_from_c`] for a variant that returns None in
/// this case instead.
///
/// Note: This means it's forbidden to call this outside of a `call_with_result` (or something else
/// that uses [`std::panic::catch_unwind`]), as it is UB to panic across the FFI boundary.
#[inline]
#[deprecated(since = "0.3.0", note = "Please use FfiStr::as_str instead")]
pub unsafe fn rust_str_from_c<'a>(c_string: *const c_char) -> &'a str {
FfiStr::from_raw(c_string).as_str()
}
/// Same as `rust_string_from_c`, but returns None if `c_string` is null instead of asserting.
///
/// ## Safety
///
/// This is unsafe because we read from a raw pointer, which may or may not be valid.
///
/// We also assume `c_string` is a null terminated string, and have no way of knowing if that's
/// actually true. If it's not, we'll read arbitrary memory from the heap until we see a '\0', which
/// can result in a enormous number of problems.
#[inline]
#[deprecated(since = "0.3.0", note = "Please use FfiStr::as_opt_str instead")]
pub unsafe fn opt_rust_str_from_c<'a>(c_string: *const c_char) -> Option<&'a str> {
FfiStr::from_raw(c_string).as_opt_str()
}
/// Convert a null-terminated C into an owned rust string, replacing invalid UTF-8 with the
/// unicode replacement character.
///
/// ## Safety
///
/// This is unsafe because we dereference a raw pointer, which may or may not be valid.
///
/// We also assume `c_string` is a null terminated string, and have no way of knowing if that's
/// actually true. If it's not, we'll read arbitrary memory from the heap until we see a '\0', which
/// can result in a enormous number of problems.
///
/// ## Panics
///
/// Panics if it's argument is null. See also [`opt_rust_string_from_c`], which returns None
/// instead.
///
/// Note: This means it's forbidden to call this outside of a `call_with_result` (or something else
/// that uses `std::panic::catch_unwind`), as it is UB to panic across the FFI boundary.
#[inline]
#[deprecated(since = "0.3.0", note = "Please use FfiStr::into_string instead")]
pub unsafe fn rust_string_from_c(c_string: *const c_char) -> String {
FfiStr::from_raw(c_string).into_string()
}
/// Same as `rust_string_from_c`, but returns None if `c_string` is null instead of asserting.
///
/// ## Safety
///
/// This is unsafe because we dereference a raw pointer, which may or may not be valid.
///
/// We also assume `c_string` is a null terminated string, and have no way of knowing if that's
/// actually true. If it's not, we'll read arbitrary memory from the heap until we see a '\0', which
/// can result in a enormous number of problems.
#[inline]
#[deprecated(since = "0.3.0", note = "Please use FfiStr::into_opt_string instead")]
pub unsafe fn opt_rust_string_from_c(c_string: *const c_char) -> Option<String> {
FfiStr::from_raw(c_string).into_opt_string()
}
| true |
87ef2bab854121e57523b373937640e9117f3e2f
|
Rust
|
hr567/Ana
|
/src/process/cgroup/hierarchy.rs
|
UTF-8
| 1,674 | 2.859375 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::fs::{read_to_string, write};
use std::io;
use std::path::Path;
use nix::unistd::Pid;
use super::AttrFile;
/// Hierarchy in the cgroup.
pub trait Hierarchy<'a> {
/// The path of this hierarchy.
fn path(&self) -> &Path;
/// `cgroup.procs` file in this hierarchy.
///
/// Can read from and write to this it.
fn procs(&self) -> Box<dyn AttrFile<'a, Pid, Vec<Pid>>>;
/// `cgroup.tasks` file in this hierarchy.
///
/// Can read from and write to this it.
fn tasks(&self) -> Box<dyn AttrFile<'a, Pid, Vec<Pid>>>;
}
impl<'a, T: AsRef<Path>> Hierarchy<'a> for T {
fn path(&self) -> &Path {
self.as_ref()
}
fn procs(&self) -> Box<dyn AttrFile<'a, Pid, Vec<Pid>>> {
Box::new(PidFile::from(self.path().join("cgroup.procs")))
}
fn tasks(&self) -> Box<dyn AttrFile<'a, Pid, Vec<Pid>>> {
Box::new(PidFile::from(self.path().join("tasks")))
}
}
struct PidFile<T: AsRef<Path>> {
inner: T,
}
impl<T: AsRef<Path>> From<T> for PidFile<T> {
fn from(inner: T) -> PidFile<T> {
PidFile { inner }
}
}
impl<'a, T: AsRef<Path>> AttrFile<'a, Pid, Vec<Pid>> for PidFile<T> {
fn write(&mut self, pid: &Pid) -> io::Result<()> {
write(&self.inner, pid.to_string())?;
Ok(())
}
fn read(&self) -> io::Result<Vec<Pid>> {
Ok(read_to_string(&self.inner)?
.split_whitespace()
.map(|pid| -> Pid {
Pid::from_raw(
pid.trim()
.parse()
.expect("Failed to get pid from task file"),
)
})
.collect())
}
}
| true |
e70b47b2d2bfba0a4035033b1c194aff2add7d9a
|
Rust
|
akitsu-sanae/verifront
|
/src/program/domain/boolean.rs
|
UTF-8
| 313 | 3 | 3 |
[
"BSL-1.0"
] |
permissive
|
use super::Domain;
#[derive(Debug)]
pub struct Boolean {}
#[derive(Debug)]
pub enum ConstSymbol {
True,
False,
}
#[derive(Debug)]
pub enum OperatorSymbol {
And,
Or,
Not,
Equal,
}
impl Domain for Boolean {
type ConstSymbol = ConstSymbol;
type OperatorSymbol = OperatorSymbol;
}
| true |
f869c797d9ac69d03e4ddc2f9eca93614549874a
|
Rust
|
nathdobson/thread_local_arena
|
/src/owned_arena.rs
|
UTF-8
| 9,631 | 2.640625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::cell::UnsafeCell;
use alloc::raw_vec::RawVec;
use std::mem;
use std::cmp;
use std::ptr::null_mut;
use std::isize;
use alloc::allocator::Alloc;
use alloc::allocator::Layout;
use alloc::allocator::AllocErr;
use alloc::allocator::Excess;
use alloc::allocator::CannotReallocInPlace;
use std::ptr;
const INITIAL_BLOCK_CAPACITY: usize = 4096;
const BLOCK_ALIGNMENT: usize = 1;
const LARGEST_POWER_OF_TWO: usize = 1 + (isize::MAX as usize);
pub struct OwnedArenaBlock {
vec: RawVec<u8>,
len: usize,
}
fn align_padding(value: usize, alignment: usize) -> usize {
debug_assert!(alignment.is_power_of_two());
let result = (alignment - (value & (alignment - 1))) & (alignment - 1);
debug_assert!(result < alignment);
debug_assert!(result < LARGEST_POWER_OF_TWO);
result
}
fn is_aligned(value: usize, alignment: usize) -> bool {
(value & (alignment - 1)) == 0
}
fn check_layout(layout: Layout) -> Result<(), AllocErr> {
if layout.size() > LARGEST_POWER_OF_TWO {
return Err(AllocErr::Unsupported { details: "Bigger than largest power of two" });
}
debug_assert!(layout.size() > 0);
Ok(())
}
fn debug_check_layout(layout: Layout) {
debug_assert!(layout.size() <= LARGEST_POWER_OF_TWO);
debug_assert!(layout.size() > 0);
}
impl OwnedArenaBlock {
pub fn with_capacity(size: usize) -> Result<Self, AllocErr> {
Ok(OwnedArenaBlock {
//TODO: propagate failure here
vec: RawVec::with_capacity(size),
len: 0,
})
}
unsafe fn is_head(&self, ptr: *mut u8, layout: Layout) -> bool {
ptr.offset(layout.size() as isize) == self.vec.ptr().offset(self.len as isize)
}
unsafe fn reserve(&mut self, increment: usize, request: Layout) -> Result<(), AllocErr> {
if self.vec.cap() - self.len >= increment ||
self.vec.reserve_in_place(self.len, increment) {
self.len += increment;
Ok(())
} else {
Err(AllocErr::Exhausted { request: request })
}
}
}
unsafe impl Alloc for OwnedArenaBlock {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
check_layout(layout.clone());
let padding = align_padding(self.vec.ptr() as usize + self.len, layout.align());
debug_assert!(padding < LARGEST_POWER_OF_TWO);
let increment = layout.size() + padding;
let offset = self.len + padding;
self.reserve(increment, layout)?;
Ok(self.vec.ptr().offset(offset as isize))
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
debug_check_layout(layout.clone());
}
unsafe fn realloc(&mut self,
ptr: *mut u8,
old_layout: Layout,
new_layout: Layout)
-> Result<*mut u8, AllocErr> {
debug_check_layout(old_layout.clone());
check_layout(new_layout.clone());
if self.is_head(ptr, old_layout.clone()) {
self.len -= old_layout.size();
match self.alloc(new_layout) {
Ok(new_ptr) => {
if new_ptr != ptr {
ptr::copy(ptr, new_ptr, old_layout.size());
}
Ok(new_ptr)
}
Err(err) => {
self.len += old_layout.size();
Err(err)
}
}
} else {
let new_ptr = self.alloc(new_layout)?;
ptr::copy_nonoverlapping(ptr, new_ptr, old_layout.size());
Ok(new_ptr)
}
}
unsafe fn grow_in_place(&mut self,
ptr: *mut u8,
old_layout: Layout,
new_layout: Layout)
-> Result<(), CannotReallocInPlace> {
debug_check_layout(old_layout.clone());
check_layout(new_layout.clone());
if self.is_head(ptr, old_layout.clone()) {
if is_aligned(ptr as usize, new_layout.align()) {
self.reserve(new_layout.size() - old_layout.size(), new_layout)
.map_err(|_| CannotReallocInPlace)
} else {
Err(CannotReallocInPlace)
}
} else {
Err(CannotReallocInPlace)
}
}
unsafe fn shrink_in_place(&mut self,
ptr: *mut u8,
old_layout: Layout,
new_layout: Layout)
-> Result<(), CannotReallocInPlace> {
debug_check_layout(old_layout.clone());
check_layout(new_layout.clone());
if self.is_head(ptr, old_layout.clone()) {
if is_aligned(ptr as usize, new_layout.align()) {
self.len -= old_layout.size();
self.len += new_layout.size();
Ok(())
} else {
Err(CannotReallocInPlace)
}
} else {
Err(CannotReallocInPlace)
}
}
}
pub struct OwnedArena {
blocks: Vec<OwnedArenaBlock>,
}
impl OwnedArena {
pub fn new() -> Result<Self, AllocErr> {
Ok(OwnedArena { blocks: vec![OwnedArenaBlock::with_capacity(INITIAL_BLOCK_CAPACITY)?] })
}
fn last_mut(&mut self) -> &mut OwnedArenaBlock {
self.blocks.last_mut().unwrap()
}
unsafe fn new_block(&mut self, layout: Layout) -> Result<&mut OwnedArenaBlock, AllocErr> {
let new_capacity = cmp::max(self.blocks.last().unwrap().vec.cap() * 2,
layout.size() + layout.align());
self.blocks
.push(OwnedArenaBlock::with_capacity(new_capacity)?);
Ok(self.last_mut())
}
pub unsafe fn arena_scoped<F, T>(&mut self, callback: F) -> T
where F: FnOnce() -> T,
F: Send,
T: Send
{
let old_block_count = self.blocks.len();
let old_len = self.blocks.last().unwrap().len;
let result = callback();
self.blocks[old_len - 1].len = old_len;
// If we reused all the new blocks, we would pay some cpu and
// fragmentation cost because of transitions between available blocks.
// If we deallocated all the new blocks, we might pay a high cost to
// constantly allocate and deallocate from malloc. The compromise is
// to keep the largest block.
// TODO: eventually return some memory to malloc if the largest block
// is too much larger than what is needed.
let largest_new_block = self.blocks.drain(old_len..).last();
if let Some(mut largest_new_block) = largest_new_block {
largest_new_block.len = 0;
self.blocks.push(largest_new_block);
}
result
}
}
unsafe impl Alloc for OwnedArena {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
check_layout(layout.clone())?;
match self.last_mut().alloc(layout.clone()) {
Ok(result) => Ok(result),
Err(_) => self.new_block(layout.clone())?.alloc(layout.clone()),
}
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
// It is possible to implement this for the case when all deallocations
// have the same alignment and occur in reverse allocation order.
// However if this is left empty, most destructors optimize to
// the empty function. The performance improvement and performance
// predictability of a do-nothing implementation is probably worth it.
}
unsafe fn realloc(&mut self,
ptr: *mut u8,
old_layout: Layout,
new_layout: Layout)
-> Result<*mut u8, AllocErr> {
debug_check_layout(old_layout.clone());
check_layout(new_layout.clone());
match self.last_mut().realloc(ptr, old_layout.clone(), new_layout.clone()) {
Ok(result) => Ok(result),
Err(_) => self.new_block(new_layout.clone())?.alloc(new_layout),
}
}
unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
self.last_mut().alloc_excess(layout)
}
unsafe fn realloc_excess(&mut self,
ptr: *mut u8,
old_layout: Layout,
new_layout: Layout)
-> Result<Excess, AllocErr> {
debug_check_layout(old_layout.clone());
check_layout(new_layout.clone());
match self.last_mut().realloc_excess(ptr, old_layout.clone(), new_layout.clone()) {
Ok(result) => Ok(result),
Err(_) => self.new_block(new_layout.clone())?.alloc_excess(new_layout),
}
}
unsafe fn grow_in_place(&mut self,
ptr: *mut u8,
old_layout: Layout,
new_layout: Layout)
-> Result<(), CannotReallocInPlace> {
debug_check_layout(old_layout.clone());
check_layout(new_layout.clone());
self.last_mut().grow_in_place(ptr, old_layout, new_layout)
}
unsafe fn shrink_in_place(&mut self,
ptr: *mut u8,
old_layout: Layout,
new_layout: Layout)
-> Result<(), CannotReallocInPlace> {
debug_check_layout(old_layout.clone());
check_layout(new_layout.clone());
self.last_mut().shrink_in_place(ptr, old_layout, new_layout)
}
}
| true |
49a0d37540b98417414d871d8d153a62c77a09a8
|
Rust
|
oshunter/fuchsia
|
/third_party/rust_crates/vendor/tokio/src/util/slab/shard.rs
|
UTF-8
| 3,704 | 3.140625 | 3 |
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
use crate::util::slab::{page, Address, Entry, MAX_PAGES};
use std::fmt;
// ┌─────────────┐ ┌────────┐
// │ page 1 │ │ │
// ├─────────────┤ ┌───▶│ next──┼─┐
// │ page 2 │ │ ├────────┤ │
// │ │ │ │XXXXXXXX│ │
// │ local_free──┼─┘ ├────────┤ │
// │ global_free─┼─┐ │ │◀┘
// ├─────────────┤ └───▶│ next──┼─┐
// │ page 3 │ ├────────┤ │
// └─────────────┘ │XXXXXXXX│ │
// ... ├────────┤ │
// ┌─────────────┐ │XXXXXXXX│ │
// │ page n │ ├────────┤ │
// └─────────────┘ │ │◀┘
// │ next──┼───▶
// ├────────┤
// │XXXXXXXX│
// └────────┘
// ...
pub(super) struct Shard<T> {
/// The local free list for each page.
///
/// These are only ever accessed from this shard's thread, so they are
/// stored separately from the shared state for the page that can be
/// accessed concurrently, to minimize false sharing.
local: Box<[page::Local]>,
/// The shared state for each page in this shard.
///
/// This consists of the page's metadata (size, previous size), remote free
/// list, and a pointer to the actual array backing that page.
shared: Box<[page::Shared<T>]>,
}
impl<T: Entry> Shard<T> {
pub(super) fn new() -> Shard<T> {
let mut total_sz = 0;
let shared = (0..MAX_PAGES)
.map(|page_num| {
let sz = page::size(page_num);
let prev_sz = total_sz;
total_sz += sz;
page::Shared::new(sz, prev_sz)
})
.collect();
let local = (0..MAX_PAGES).map(|_| page::Local::new()).collect();
Shard { local, shared }
}
pub(super) fn alloc(&self) -> Option<Address> {
// Can we fit the value into an existing page?
for (page_idx, page) in self.shared.iter().enumerate() {
let local = self.local(page_idx);
if let Some(page_offset) = page.alloc(local) {
return Some(page_offset);
}
}
None
}
pub(super) fn get(&self, addr: Address) -> Option<&T> {
let page_idx = addr.page();
if page_idx > self.shared.len() {
return None;
}
self.shared[page_idx].get(addr)
}
/// Remove an item on the shard's local thread.
pub(super) fn remove_local(&self, addr: Address) {
let page_idx = addr.page();
if let Some(page) = self.shared.get(page_idx) {
page.remove_local(self.local(page_idx), addr);
}
}
/// Remove an item, while on a different thread from the shard's local thread.
pub(super) fn remove_remote(&self, addr: Address) {
if let Some(page) = self.shared.get(addr.page()) {
page.remove_remote(addr);
}
}
fn local(&self, i: usize) -> &page::Local {
&self.local[i]
}
}
impl<T> fmt::Debug for Shard<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Shard")
.field("shared", &self.shared)
.finish()
}
}
| true |
0c3ddfe2471e4edbf045af98cb839b0b58815f18
|
Rust
|
zellyn/exercism-rust
|
/nth-prime/src/lib.rs
|
UTF-8
| 433 | 3.234375 | 3 |
[
"MIT"
] |
permissive
|
// See https://exercism.io/tracks/rust/exercises/nth-prime/solutions/fc48d015d75c48b5bba6b9a0d923c963
// for a more impressive, lazy-iterator, solution.
pub fn nth(n: u32) -> u32 {
let mut primes: Vec<u32> = vec![2, 3, 5];
let i = n as usize;
let mut nn = 5;
while i + 1 >= primes.len() {
nn += 2;
if !primes.iter().any(|x| nn % x == 0) {
primes.push(nn);
}
}
primes[i]
}
| true |
f3f30ea1a33c5397d04ca506bba88eeb78fe34c5
|
Rust
|
whentze/raik
|
/src/lib.rs
|
UTF-8
| 3,091 | 3.109375 | 3 |
[] |
no_license
|
#![allow(non_camel_case_types)]
use std::fmt;
extern crate byteorder;
pub mod instruction;
#[derive(Debug, Clone)]
pub struct Memory {
pub data: Vec<u8>,
}
impl fmt::LowerHex for Memory {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let bytes_per_line: usize = 32;
println!("");
let mut printing = true;
for (addr, chunk) in self.data.chunks(bytes_per_line).enumerate() {
if chunk.iter().any(|b| *b != 0) {
printing = true;
write!(f, "{:08x} ", addr * bytes_per_line)?;
for byte in chunk.iter() {
write!(f, "{:02x} ", byte)?;
}
writeln!(f, "")?;
} else if printing {
printing = false;
writeln!(f, " [...]")?;
}
}
Ok(())
}
}
impl Memory {
pub fn new(size: usize) -> Self {
Memory { data: vec![0; size] }
}
pub fn copy_segment(&mut self, segment: &[u8], offset: usize) -> Result<(), ()> {
if offset + segment.len() > self.data.len() {
return Err(());
}
self.data[offset..(offset + segment.len())].copy_from_slice(segment);
Ok(())
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Registers {
pub pc: i32,
x: [i32; 32],
}
impl Registers {
pub fn new() -> Self {
Registers { pc: 0, x: [0; 32] }
}
pub fn read_x(&self, reg: u8) -> i32 {
if reg == 0 { 0 } else { self.x[reg as usize] }
}
pub fn write_x(&mut self, reg: u8, val: i32) {
if reg != 0 {
self.x[reg as usize] = val;
}
}
}
impl fmt::Display for Registers {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
const REGISTER_NAMES: [&str; 32] = [
"0",
"ra",
"sp",
"gp",
"tp",
"t0",
"t1",
"t2",
"s0",
"s1",
"a0",
"a1",
"a2",
"a3",
"a4",
"a5",
"a6",
"a7",
"s2",
"s3",
"s4",
"s5",
"s6",
"s7",
"s8",
"s9",
"s10",
"s11",
"t3",
"t4",
"t5",
"t6",
];
writeln!(f, "Program Counter: 0x{:08x}", self.pc)?;
for i in 0..32 {
let n = (i % 4) * 8 + i / 4;
let name = REGISTER_NAMES[n as usize];
write!(f, "{:3}:0x{:08x} ", name, self.read_x(n))?;
if i % 4 == 3 {
writeln!(f)?;
};
}
Ok(())
}
}
#[derive(Debug, Clone)]
pub struct ProgramState {
pub mem: Memory,
pub regs: Registers,
}
impl fmt::Display for ProgramState {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "Memory map: {:x}", self.mem)?;
write!(f, "{}", self.regs)?;
Ok(())
}
}
| true |
0695a4f67ac866ab80495896b0cc6bb7cabab23f
|
Rust
|
deepinthebuild/cryptopals
|
/set3/challenge19/src/main.rs
|
UTF-8
| 1,998 | 2.703125 | 3 |
[] |
no_license
|
extern crate cryptobuddy;
extern crate rustc_serialize;
use std::io::BufReader;
use std::io::prelude::*;
use std::fs::File;
use std::str;
use rustc_serialize::base64::FromBase64;
use cryptobuddy::{stream, utils, freq_analysis};
static DATA_PATH: &'static str = "data/19.txt";
fn load_data() -> Vec<Vec<u8>> {
let f = File::open(DATA_PATH).unwrap();
let f = BufReader::new(f);
let l: Vec<String> = f.lines().collect::<Result<Vec<_>, _>>().unwrap();
let l: Vec<Vec<u8>> = l.into_iter().map(|x| x.from_base64().unwrap()).collect();
l
}
fn truncate_to_shortest(vec_of_vecs: &mut Vec<Vec<u8>>) {
let min_len = vec_of_vecs.iter().map(|s| s.len()).min().unwrap();
for v in vec_of_vecs {
v.truncate(min_len);
}
}
fn transpose(vec_of_vecs: &Vec<Vec<u8>>) -> Vec<Vec<u8>> {
let blocksize = vec_of_vecs[0].len();
let mut output = Vec::<Vec<u8>>::new();
for t in 0..blocksize {
let block: Vec<u8> = vec_of_vecs.iter().filter_map(|s| s.get(t)).cloned().collect();
output.push(block)
}
output
}
fn find_single_byte_key(data: &[u8]) -> u8 {
let mut best_key = 0;
let mut best_score = 1_000_000f64;
for x in 0..255 {
let decrypt = utils::single_byte_xor(&data, x);
let score = freq_analysis::text_score(&decrypt);
if score <= best_score {
best_score = score;
best_key = x;
}
}
best_key
}
fn main() {
let key = utils::random_key();
let nonce = utils::u64_to_bytes(0);
let crypter = stream::CTR::new(&key, &nonce).unwrap();
let d = load_data();
let mut d: Vec<Vec<u8>> = d.into_iter().map(|s| crypter.crypt(&s)).collect();
truncate_to_shortest(&mut d);
let d_t = transpose(&d);
let mut key = Vec::<u8>::new();
for block in d_t {
key.push(find_single_byte_key(&block));
}
for entry in d {
println!("{}",
str::from_utf8(&utils::repeating_key_xor(&entry, &key)).unwrap());
}
}
| true |
e2fc6373b0776aff937ff8d14424e73d6c4c50dc
|
Rust
|
shikharvashistha/oak
|
/experimental/oak_async/tests/dummy_data.rs
|
UTF-8
| 1,501 | 2.765625 | 3 |
[
"Apache-2.0"
] |
permissive
|
//
// Copyright 2020 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use oak::{
io::{Decodable, Encodable, Message},
OakError,
};
/// Wrapper around `String` to make it implement `Encodable` and `Decodable`.
///
/// Used as a dummy message payload.
#[derive(Debug, PartialEq)]
pub struct DummyData(pub String);
impl DummyData {
pub fn new(s: &str) -> DummyData {
DummyData(s.into())
}
}
impl Encodable for DummyData {
fn encode(&self) -> Result<Message, OakError> {
Ok(Message {
bytes: self.0.clone().into_bytes(),
handles: Vec::new(),
})
}
}
impl Decodable for DummyData {
fn decode(message: &Message) -> Result<Self, OakError> {
assert!(
message.handles.is_empty(),
"DummyData must not have handles"
);
Ok(String::from_utf8(message.bytes.clone())
.map(DummyData)
.expect("Failed to decode DummyData"))
}
}
| true |
4bdcbf65d6968f568b6322c36eb86a637e2a65a7
|
Rust
|
connorskees/grass
|
/crates/lib/tests/unary.rs
|
UTF-8
| 2,313 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
#[macro_use]
mod macros;
test!(
unary_pos_unquoted_ident,
"a {\n color: +foo;\n}\n",
"a {\n color: +foo;\n}\n"
);
test!(
unary_pos_whitespace,
"a {\n color: + foo;\n}\n",
"a {\n color: +foo;\n}\n"
);
test!(
unary_pos_dblquoted_ident,
"a {\n color: +\"foo\";\n}\n",
"a {\n color: +\"foo\";\n}\n"
);
test!(
unary_pos_sglquoted_ident,
"a {\n color: +'foo';\n}\n",
"a {\n color: +\"foo\";\n}\n"
);
test!(
unary_pos_color,
"a {\n color: +\"foo\";\n}\n",
"a {\n color: +\"foo\";\n}\n"
);
test!(
unary_pos_number_unit,
"a {\n color: +1px;\n}\n",
"a {\n color: 1px;\n}\n"
);
test!(
unary_pos_number,
"a {\n color: +10;\n}\n",
"a {\n color: 10;\n}\n"
);
test!(
unary_pos_in_list,
"a {\n color: bar,+ \"bar\" - foo;\n}\n",
"a {\n color: bar, +\"bar\"-foo;\n}\n"
);
test!(
unary_neg_unquoted_ident,
"a {\n color: -foo;\n}\n",
"a {\n color: -foo;\n}\n"
);
test!(
unary_neg_dblquoted_ident,
"a {\n color: -\"foo\";\n}\n",
"a {\n color: -\"foo\";\n}\n"
);
test!(
unary_neg_sglquoted_ident,
"a {\n color: -'foo';\n}\n",
"a {\n color: -\"foo\";\n}\n"
);
test!(
unary_neg_color,
"a {\n color: -\"foo\";\n}\n",
"a {\n color: -\"foo\";\n}\n"
);
test!(
unary_neg_number,
"a {\n color: -1px;\n}\n",
"a {\n color: -1px;\n}\n"
);
test!(
unary_neg_whitespace,
"a {\n color: - 1px;\n}\n",
"a {\n color: -1px;\n}\n"
);
test!(
unary_neg_number_type,
"a {\n color: type-of(- 1px);\n}\n",
"a {\n color: number;\n}\n"
);
test!(
unary_neg_variable,
"$a: 1;\n\na {\n color: -$a;\n}\n",
"a {\n color: -1;\n}\n"
);
test!(
unary_neg_null_paren,
"a {\n color: -(null);\n}\n",
"a {\n color: -;\n}\n"
);
test!(
negative_null_as_ident,
"a {\n color: -null;\n}\n",
"a {\n color: -null;\n}\n"
);
test!(
unary_div_calculation,
"a {\n color: /calc(1rem + 1px);\n}\n",
"a {\n color: /calc(1rem + 1px);\n}\n"
);
error!(
unary_plus_calculation,
"a {\n color: +calc(1rem + 1px);\n}\n", r#"Error: Undefined operation "+calc(1rem + 1px)"."#
);
error!(
unary_neg_calculation,
"a {\n color: -(calc(1rem + 1px));\n}\n", r#"Error: Undefined operation "-calc(1rem + 1px)"."#
);
| true |
f279a414f60879a020ed00afe7ecd09bcfa1fc48
|
Rust
|
knokko/knukki-rs
|
/src/components/menu/flat/simple/domain.rs
|
UTF-8
| 4,774 | 3.5625 | 4 |
[] |
no_license
|
use crate::Point;
#[derive(Copy, Clone, Debug)]
pub struct ComponentDomain {
min_x: f32,
min_y: f32,
max_x: f32,
max_y: f32,
}
impl ComponentDomain {
pub fn between(min_x: f32, min_y: f32, max_x: f32, max_y: f32) -> Self {
Self {
min_x,
min_y,
max_x,
max_y,
}
}
pub fn with_size(min_x: f32, min_y: f32, width: f32, height: f32) -> Self {
Self {
min_x,
min_y,
max_x: min_x + width,
max_y: min_y + height,
}
}
pub fn get_min_x(&self) -> f32 {
self.min_x
}
pub fn get_min_y(&self) -> f32 {
self.min_y
}
pub fn get_max_x(&self) -> f32 {
self.max_x
}
pub fn get_max_y(&self) -> f32 {
self.max_y
}
pub fn get_width(&self) -> f32 {
self.max_x - self.min_x
}
pub fn get_height(&self) -> f32 {
self.max_y - self.min_y
}
pub fn is_inside(&self, point: Point) -> bool {
point.get_x() >= self.get_min_x()
&& point.get_x() <= self.get_max_x()
&& point.get_y() >= self.get_min_y()
&& point.get_y() <= self.get_max_y()
}
pub fn transform(&self, outer: Point) -> Point {
let inner_x = (outer.get_x() - self.get_min_x()) / self.get_width();
let inner_y = (outer.get_y() - self.get_min_y()) / self.get_height();
Point::new(inner_x, inner_y)
}
pub fn transform_back(&self, inner: Point) -> Point {
let outer_x = self.get_min_x() + inner.get_x() * self.get_width();
let outer_y = self.get_min_y() + inner.get_y() * self.get_height();
Point::new(outer_x, outer_y)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_between() {
// Carefully choose values to ensure they don't cause rounding errors
let domain = ComponentDomain::between(-0.5, 0.25, 1.25, 0.5);
assert_eq!(-0.5, domain.get_min_x());
assert_eq!(0.25, domain.get_min_y());
assert_eq!(1.25, domain.get_max_x());
assert_eq!(0.5, domain.get_max_y());
assert_eq!(1.75, domain.get_width());
assert_eq!(0.25, domain.get_height());
}
#[test]
fn test_with_size() {
let domain = ComponentDomain::with_size(-0.75, -1.0, 0.5, 1.0);
assert_eq!(-0.75, domain.get_min_x());
assert_eq!(-1.0, domain.get_min_y());
assert_eq!(-0.25, domain.get_max_x());
assert_eq!(0.0, domain.get_max_y());
assert_eq!(0.5, domain.get_width());
assert_eq!(1.0, domain.get_height());
}
#[test]
fn test_is_inside() {
let domain = ComponentDomain::between(1.0, 0.0, 2.0, 3.0);
assert!(!domain.is_inside(Point::new(-0.5, -0.5)));
assert!(!domain.is_inside(Point::new(0.5, 0.5)));
assert!(domain.is_inside(Point::new(1.5, 0.5)));
assert!(!domain.is_inside(Point::new(1.5, 3.5)));
assert!(!domain.is_inside(Point::new(2.5, 3.5)));
// Edge case, literally
assert!(domain.is_inside(Point::new(1.5, 0.0)));
// Corner case, literally
assert!(domain.is_inside(Point::new(2.0, 3.0)));
}
#[test]
fn test_transform() {
// These numbers are carefully chosen to avoid rounding errors
let domain = ComponentDomain::between(0.25, 0.5, 0.375, 0.75);
assert_eq!(
Point::new(0.0, 0.0),
domain.transform(Point::new(0.25, 0.5))
);
assert_eq!(
Point::new(1.0, 1.0),
domain.transform(Point::new(0.375, 0.75))
);
assert_eq!(
Point::new(0.25, 0.5),
domain.transform(Point::new(0.28125, 0.625))
);
assert_eq!(
Point::new(-2.0, -2.0),
domain.transform(Point::new(0.0, 0.0))
);
assert_eq!(Point::new(6.0, 2.0), domain.transform(Point::new(1.0, 1.0)));
}
#[test]
fn test_transform_back() {
// This is just the reverse of the test_transform test
let domain = ComponentDomain::between(0.25, 0.5, 0.375, 0.75);
assert_eq!(
Point::new(0.25, 0.5),
domain.transform_back(Point::new(0.0, 0.0))
);
assert_eq!(
Point::new(0.375, 0.75),
domain.transform_back(Point::new(1.0, 1.0))
);
assert_eq!(
Point::new(0.28125, 0.625),
domain.transform_back(Point::new(0.25, 0.5))
);
assert_eq!(
Point::new(0.0, 0.0),
domain.transform_back(Point::new(-2.0, -2.0))
);
assert_eq!(
Point::new(1.0, 1.0),
domain.transform_back(Point::new(6.0, 2.0))
);
}
}
| true |
ae1af88ae0a87289dd96b0e7baba5a7fe21e0f83
|
Rust
|
sclaire-1/tokio
|
/tokio/src/net/driver/reactor/dispatch/sharded_slab.rs
|
UTF-8
| 9,370 | 3.0625 | 3 |
[
"MIT"
] |
permissive
|
use super::*;
use std::fmt;
use crate::loom::sync::Mutex;
/// A sharded slab.
pub(crate) struct Slab {
shards: Box<[Shard]>,
}
/// A slab implemented with a single shard.
// TODO(eliza): once worker threads are available, this type will be
// unnecessary and can be removed.
#[derive(Debug)]
pub(crate) struct SingleShard {
shard: Shard,
local: Mutex<()>,
}
// ┌─────────────┐ ┌────────┐
// │ page 1 │ │ │
// ├─────────────┤ ┌───▶│ next──┼─┐
// │ page 2 │ │ ├────────┤ │
// │ │ │ │XXXXXXXX│ │
// │ local_free──┼─┘ ├────────┤ │
// │ global_free─┼─┐ │ │◀┘
// ├─────────────┤ └───▶│ next──┼─┐
// │ page 3 │ ├────────┤ │
// └─────────────┘ │XXXXXXXX│ │
// ... ├────────┤ │
// ┌─────────────┐ │XXXXXXXX│ │
// │ page n │ ├────────┤ │
// └─────────────┘ │ │◀┘
// │ next──┼───▶
// ├────────┤
// │XXXXXXXX│
// └────────┘
// ...
pub(super) struct Shard {
#[cfg(debug_assertions)]
tid: usize,
/// The local free list for each page.
///
/// These are only ever accessed from this shard's thread, so they are
/// stored separately from the shared state for the page that can be
/// accessed concurrently, to minimize false sharing.
local: Box<[page::Local]>,
/// The shared state for each page in this shard.
///
/// This consists of the page's metadata (size, previous size), remote free
/// list, and a pointer to the actual array backing that page.
shared: Box<[page::Shared]>,
}
pub(crate) const TOKEN_SHIFT: usize = Tid::SHIFT + Tid::LEN;
pub(crate) const MAX_SOURCES: usize = (1 << TOKEN_SHIFT) - 1;
#[allow(dead_code)] // coming back soon!
impl Slab {
/// Returns a new slab with the default configuration parameters.
pub(crate) fn new() -> Self {
Self::with_max_threads(MAX_THREADS)
}
pub(crate) fn with_max_threads(max_threads: usize) -> Self {
// Round the max number of threads to the next power of two and clamp to
// the maximum representable number.
let max = max_threads.next_power_of_two().min(MAX_THREADS);
let shards = (0..max).map(Shard::new).collect();
Self { shards }
}
/// allocs a value into the slab, returning a key that can be used to
/// access it.
///
/// If this function returns `None`, then the shard for the current thread
/// is full and no items can be added until some are removed, or the maximum
/// number of shards has been reached.
pub(crate) fn alloc(&self) -> Option<usize> {
let tid = Tid::current();
self.shards[tid.as_usize()].alloc().map(|idx| tid.pack(idx))
}
/// Removes the value associated with the given key from the slab.
pub(crate) fn remove(&self, idx: usize) {
let tid = Tid::from_packed(idx);
let shard = &self.shards[tid.as_usize()];
if tid.is_current() {
shard.remove_local(idx)
} else {
shard.remove_remote(idx)
}
}
/// Return a reference to the value associated with the given key.
///
/// If the slab does not contain a value for the given key, `None` is
/// returned instead.
pub(in crate::net::driver) fn get(&self, token: usize) -> Option<&page::ScheduledIo> {
let tid = Tid::from_packed(token);
self.shards.get(tid.as_usize())?.get(token)
}
/// Returns an iterator over all the items in the slab.
pub(in crate::net::driver::reactor) fn unique_iter(&mut self) -> iter::UniqueIter<'_> {
let mut shards = self.shards.iter_mut();
let shard = shards.next().expect("must be at least 1 shard");
let mut pages = shard.iter();
let slots = pages.next().and_then(page::Shared::iter);
iter::UniqueIter {
shards,
slots,
pages,
}
}
}
impl SingleShard {
/// Returns a new slab with the default configuration parameters.
pub(crate) fn new() -> Self {
Self {
shard: Shard::new(0),
local: Mutex::new(()),
}
}
/// allocs a value into the slab, returning a key that can be used to
/// access it.
///
/// If this function returns `None`, then the shard for the current thread
/// is full and no items can be added until some are removed, or the maximum
/// number of shards has been reached.
pub(crate) fn alloc(&self) -> Option<usize> {
// we must lock the slab to alloc an item.
let _local = self.local.lock().unwrap();
self.shard.alloc()
}
/// Removes the value associated with the given key from the slab.
pub(crate) fn remove(&self, idx: usize) {
// try to lock the slab so that we can use `remove_local`.
let lock = self.local.try_lock();
// if we were able to lock the slab, we are "local" and can use the fast
// path; otherwise, we will use `remove_remote`.
if lock.is_ok() {
self.shard.remove_local(idx)
} else {
self.shard.remove_remote(idx)
}
}
/// Return a reference to the value associated with the given key.
///
/// If the slab does not contain a value for the given key, `None` is
/// returned instead.
pub(in crate::net::driver) fn get(&self, token: usize) -> Option<&page::ScheduledIo> {
self.shard.get(token)
}
/// Returns an iterator over all the items in the slab.
pub(in crate::net::driver::reactor) fn unique_iter(&mut self) -> iter::ShardIter<'_> {
let mut pages = self.shard.iter_mut();
let slots = pages.next().and_then(|pg| pg.iter());
iter::ShardIter { slots, pages }
}
}
impl Shard {
fn new(_idx: usize) -> Self {
let mut total_sz = 0;
let shared = (0..MAX_PAGES)
.map(|page_num| {
let sz = page::size(page_num);
let prev_sz = total_sz;
total_sz += sz;
page::Shared::new(sz, prev_sz)
})
.collect();
let local = (0..MAX_PAGES).map(|_| page::Local::new()).collect();
Self {
#[cfg(debug_assertions)]
tid: _idx,
local,
shared,
}
}
fn alloc(&self) -> Option<usize> {
// Can we fit the value into an existing page?
for (page_idx, page) in self.shared.iter().enumerate() {
let local = self.local(page_idx);
if let Some(page_offset) = page.alloc(local) {
return Some(page_offset);
}
}
None
}
#[inline(always)]
fn get(&self, idx: usize) -> Option<&page::ScheduledIo> {
#[cfg(debug_assertions)]
debug_assert_eq!(Tid::from_packed(idx).as_usize(), self.tid);
let addr = page::Addr::from_packed(idx);
let i = addr.index();
if i > self.shared.len() {
return None;
}
self.shared[i].get(addr)
}
/// Remove an item on the shard's local thread.
fn remove_local(&self, idx: usize) {
#[cfg(debug_assertions)]
debug_assert_eq!(Tid::from_packed(idx).as_usize(), self.tid);
let addr = page::Addr::from_packed(idx);
let page_idx = addr.index();
if let Some(page) = self.shared.get(page_idx) {
page.remove_local(self.local(page_idx), addr, idx);
}
}
/// Remove an item, while on a different thread from the shard's local thread.
fn remove_remote(&self, idx: usize) {
#[cfg(debug_assertions)]
debug_assert_eq!(Tid::from_packed(idx).as_usize(), self.tid);
let addr = page::Addr::from_packed(idx);
let page_idx = addr.index();
if let Some(page) = self.shared.get(page_idx) {
page.remove_remote(addr, idx);
}
}
#[inline(always)]
fn local(&self, i: usize) -> &page::Local {
&self.local[i]
}
pub(super) fn iter(&self) -> std::slice::Iter<'_, page::Shared> {
self.shared.iter()
}
fn iter_mut(&mut self) -> std::slice::IterMut<'_, page::Shared> {
self.shared.iter_mut()
}
}
impl fmt::Debug for Slab {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Slab")
.field("shards", &self.shards)
.finish()
}
}
unsafe impl Send for Slab {}
unsafe impl Sync for Slab {}
unsafe impl Send for SingleShard {}
unsafe impl Sync for SingleShard {}
impl fmt::Debug for Shard {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut d = f.debug_struct("Shard");
#[cfg(debug_assertions)]
d.field("tid", &self.tid);
d.field("shared", &self.shared).finish()
}
}
| true |
97c89c378dd85193b9baed13af6da7b8641570ec
|
Rust
|
katharostech/luminance-glow
|
/src/lib.rs
|
UTF-8
| 2,968 | 2.8125 | 3 |
[] |
no_license
|
//! Glow backend for Luminance
//!
//! This crate provides a [glow] backend for [luminance]. It is capable of targeting desktop using
//! OpenGL and web using both WebGL 2 and WebGL 1 ( though WebGL 1 has some caveats such as
//! supported texture formats ).
//!
//! [luminance]: https://crates.io/crates/luminance
//!
//! [glow]: https://github.com/grovesNL/glow
use std::cell::RefCell;
use std::rc::Rc;
#[macro_use]
mod slice;
mod buffer;
mod framebuffer;
mod pipeline;
mod pixel;
mod shader;
mod state;
mod tess;
mod texture;
use glow::Context as GlowContext;
use state::GlowState;
pub use state::StateQueryError;
/// The GLSL shader version to use
///
/// This effects the version heading added automatically to the top of the shader strings provided
/// to luminance.
#[derive(Debug, Clone, Copy)]
pub enum ShaderVersion {
Gles3,
Gles1,
}
/// The graphics context which must be provided to create a [`Glow`] backend
pub struct Context {
glow_context: GlowContext,
is_webgl1: bool,
shader_version: ShaderVersion,
}
impl Context {
/// Create a native context from a GL loader function
#[cfg(not(wasm))]
pub unsafe fn from_loader_function<F>(loader_function: F, shader_version: ShaderVersion) -> Self
where
F: FnMut(&str) -> *const std::os::raw::c_void,
{
Self {
glow_context: GlowContext::from_loader_function(loader_function),
is_webgl1: false,
shader_version,
}
}
/// Create a WebGL 1 context
///
/// > ⚠️ **Warning:** The WebGL 1 backend has limitations that the native and WebGL 2 bakcends
/// > to not have. The exact limitations are outside of the scope of this note, but include
/// > things like limited support for different pixel formats, etc.
#[cfg(wasm)]
pub fn from_webgl1_context(context: web_sys::WebGlRenderingContext) -> Self {
Self {
glow_context: GlowContext::from_webgl1_context(context),
is_webgl1: true,
shader_version: ShaderVersion::Gles1,
}
}
/// Create a WebGL 2 context
#[cfg(wasm)]
pub fn from_webgl2_context(
context: web_sys::WebGl2RenderingContext,
shader_version: ShaderVersion,
) -> Self {
Self {
glow_context: GlowContext::from_webgl2_context(context),
is_webgl1: false,
shader_version,
}
}
}
/// The Luminance Glow backend
#[derive(Debug)]
pub struct Glow {
pub(crate) state: Rc<RefCell<GlowState>>,
}
impl Glow {
/// Create a glow backend instance from a `glow` [`Context`][glow::Context]
pub fn from_context(ctx: Context) -> Result<Self, StateQueryError> {
let Context {
glow_context,
is_webgl1,
shader_version,
} = ctx;
GlowState::new(glow_context, is_webgl1, shader_version).map(|state| Glow {
state: Rc::new(RefCell::new(state)),
})
}
}
| true |
a31a2ef027829f94484cc753ccaff18d24bf5ad4
|
Rust
|
Gilnaa/rusty3bar
|
/src/infinite_array.rs
|
UTF-8
| 2,109 | 3.125 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! by dtolnay
//! A struct used to deserialize an infinite array of JSON objects.
use serde::de::DeserializeOwned;
use serde_json;
use std::io;
use std::marker::PhantomData;
/// A struct used to deserialize an infinite array of JSON objects of type T,
/// coming from stream of type R.
pub struct InfiniteArray<R, T> {
reader: R,
skip: Option<u8>,
marker: PhantomData<T>,
}
impl<R, T> InfiniteArray<R, T> {
/// Craete a new infinite-array deserializer using the given reader.
pub fn new(reader: R) -> Self {
InfiniteArray {
reader: reader,
skip: Some(b'['),
marker: PhantomData,
}
}
}
impl<R, T> InfiniteArray<R, T>
where R: io::Read
{
fn skip_past_byte(&mut self, byte: u8) -> io::Result<bool> {
let mut one_byte = [0];
loop {
if self.reader.read_exact(&mut one_byte).is_err() {
return Ok(false);
}
if one_byte[0] == byte {
return Ok(true);
}
if !(one_byte[0] as char).is_whitespace() {
return Err(io::Error::new(io::ErrorKind::InvalidInput,
format!("byte {}", one_byte[0])));
}
}
}
}
impl<R, T> Iterator for InfiniteArray<R, T>
where R: io::Read,
T: DeserializeOwned
{
type Item = io::Result<T>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(skip) = self.skip {
match self.skip_past_byte(skip) {
Ok(true) => {}
Ok(false) => {
return None;
}
Err(err) => {
return Some(Err(err));
}
}
self.skip = None;
}
let de = serde_json::Deserializer::from_reader(&mut self.reader);
match de.into_iter().next() {
Some(Ok(v)) => {
self.skip = Some(b',');
Some(Ok(v))
}
Some(Err(err)) => Some(Err(err.into())),
None => None,
}
}
}
| true |
e2e8d41903f25ad387741424a1a1aefb7d048a15
|
Rust
|
feather-rs/feather
|
/feather/old/server/entity/src/broadcasters/entity_creation.rs
|
UTF-8
| 4,744 | 3.109375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use feather_core::entitymeta::EntityMetadata;
use feather_core::network::packets::PacketEntityMetadata;
use feather_core::util::Position;
use feather_server_types::{
CreationPacketCreator, EntitySendEvent, EntitySpawnEvent, Game, Network, NetworkId,
PlayerJoinEvent, SpawnPacketCreator,
};
use fecs::{IntoQuery, Read, World};
/// When an entity is created and has a `CreationPacketCreator` and/or `SpawnPacketCreator`,
/// broadcasts the packets to all online clients.
#[fecs::event_handler]
pub fn on_entity_spawn_send_to_clients(
event: &EntitySpawnEvent,
game: &mut Game,
world: &mut World,
) {
let accessor = world.entity(event.entity).expect("entity does not exist");
if let Some(creator) = world.try_get::<CreationPacketCreator>(event.entity) {
let packet = creator.get(&accessor);
game.broadcast_global_boxed(world, packet, None);
}
let mut to_trigger = vec![];
if let Some(creator) = world.try_get::<SpawnPacketCreator>(event.entity) {
// Send metadata before spawn packet. Not sure why this works,
// but if we don't do this, then the client just despawns
// the entity immediately after sending.
if let Some(meta) = world.try_get::<EntityMetadata>(event.entity) {
let packet = PacketEntityMetadata {
entity_id: world.get::<NetworkId>(event.entity).0,
metadata: (&*meta).clone(),
};
game.broadcast_entity_update(world, packet, event.entity, Some(event.entity));
}
// Now send spawn packet: Spawn Object / Spawn Player / Spawn Mob / whatever.
let packet = creator.get(&accessor);
game.broadcast_entity_update_boxed(world, packet, event.entity, Some(event.entity));
let chunk = world.get::<Position>(event.entity).chunk();
drop(creator);
// trigger on_entity_send
for player in game.chunk_holders.holders_for(chunk) {
if world.try_get::<Network>(*player).is_some() {
to_trigger.push(*player);
}
}
}
for client in to_trigger {
game.handle(
world,
EntitySendEvent {
entity: event.entity,
client,
},
);
}
}
/// Wehn a player joins, sends existing entities to the player.
///
/// This only handles init packets (PlayerInfo, etc.)—spawn packets
/// are handled by the view update mechanism in `crate::view`.
#[fecs::event_handler]
pub fn on_player_join_send_existing_entities(event: &PlayerJoinEvent, world: &mut World) {
let network = world.get::<Network>(event.player);
for (entity, creator) in <Read<CreationPacketCreator>>::query().iter_entities(world.inner()) {
let accessor = world
.entity(entity)
.expect("query yielded entity which does not exist");
let packet = creator.get(&accessor);
network.send_boxed(packet);
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::item;
use feather_core::items::{Item, ItemStack};
use feather_core::network::packets::{PlayerInfo, SpawnObject};
use feather_core::position;
use feather_test_framework::Test;
use std::collections::HashSet;
#[test]
fn send_on_spawn() {
let stack = ItemStack::new(Item::Sand, 47);
let mut test = Test::new();
let item1 =
test.entity(item::create(stack, Default::default()).with(position!(0.0, 100.0, 0.0)));
let player1 = test.player("player1", Position::default());
let player2 = test.player("player2", position!(234_234.0, 342.0, 23.0));
test.handle(
EntitySpawnEvent { entity: item1 },
on_entity_spawn_send_to_clients,
);
let sent = test.sent::<SpawnObject>(player1).unwrap();
assert_eq!(sent.entity_id, test.id(item1));
assert!(test.sent::<SpawnObject>(player2).is_none());
}
#[test]
fn send_existing_entities() {
let mut test = Test::new();
let player1 = test.player("player1", position!(1000.0, -5.0, 0.0));
let player2 = test.player("player2", position!(2000.0, 2_138_901.0, 0.0));
let player3 = test.player("player3", position!(950.0, 255.0, 0.0));
test.handle(
PlayerJoinEvent { player: player3 },
on_player_join_send_existing_entities,
);
let mut players_sent = HashSet::new();
for _ in 0..3 {
let packet = test.sent::<PlayerInfo>(player3).unwrap();
players_sent.insert(packet.uuid);
}
for expected in &[player1, player2, player3] {
assert!(players_sent.contains(&test.uuid(*expected)));
}
}
}
| true |
3343e677b8066ccab13f51494d8c841ec095850a
|
Rust
|
paulcacheux/AdventOfCode2020
|
/src/day12.rs
|
UTF-8
| 5,589 | 3.5 | 4 |
[
"MIT"
] |
permissive
|
use crate::common::AdventResult;
#[derive(Debug, Clone, Copy)]
enum Direction {
North,
South,
East,
West,
Left,
Right,
Forward,
}
impl Direction {
fn get_offsets(self) -> (i32, i32) {
match self {
Direction::North => (0, -1),
Direction::South => (0, 1),
Direction::East => (1, 0),
Direction::West => (-1, 0),
_ => panic!("Can't compute offsets"),
}
}
fn turn_left(self) -> Self {
match self {
Direction::North => Direction::West,
Direction::South => Direction::East,
Direction::East => Direction::North,
Direction::West => Direction::South,
_ => panic!("Can't turn left"),
}
}
fn turn_right(self) -> Self {
match self {
Direction::North => Direction::East,
Direction::South => Direction::West,
Direction::East => Direction::South,
Direction::West => Direction::North,
_ => panic!("Can't turn right"),
}
}
fn turn_repeat(self, turn_fn: fn(Self) -> Self, angle: i32) -> Self {
let count = angle / 90;
let mut res = self;
for _ in 0..count {
res = turn_fn(res);
}
res
}
fn turn_left_angle(self, angle: i32) -> Self {
self.turn_repeat(Direction::turn_left, angle)
}
fn turn_right_angle(self, angle: i32) -> Self {
self.turn_repeat(Direction::turn_right, angle)
}
}
#[derive(Debug, Clone)]
struct Instruction {
direction: Direction,
value: i32,
}
impl Instruction {
fn read_from_line(line: &str) -> Self {
let direction = match line.chars().next().expect("Cannot read direction") {
'N' => Direction::North,
'S' => Direction::South,
'E' => Direction::East,
'W' => Direction::West,
'L' => Direction::Left,
'R' => Direction::Right,
'F' => Direction::Forward,
_ => panic!("Unknown direction"),
};
let value = line[1..].parse().expect("Cannot read count");
Instruction { direction, value }
}
}
trait State {
fn new() -> Self;
fn apply(&mut self, instruction: &Instruction);
fn manhattan_distance(&self) -> i32;
}
#[derive(Debug, Clone, Copy)]
struct State1 {
x: i32,
y: i32,
direction: Direction,
}
impl State for State1 {
fn new() -> Self {
State1 {
x: 0,
y: 0,
direction: Direction::East,
}
}
fn apply(&mut self, instruction: &Instruction) {
let current_direction = match instruction.direction {
Direction::Left => {
self.direction = self.direction.turn_left_angle(instruction.value);
return;
}
Direction::Right => {
self.direction = self.direction.turn_right_angle(instruction.value);
return;
}
Direction::Forward => self.direction,
other => other,
};
let (dx, dy) = current_direction.get_offsets();
self.x += dx * instruction.value;
self.y += dy * instruction.value;
}
fn manhattan_distance(&self) -> i32 {
self.x.abs() + self.y.abs()
}
}
fn part<S: State + std::fmt::Debug>(instructions: &[Instruction]) -> i32 {
let mut state = S::new();
for inst in instructions {
state.apply(inst);
}
state.manhattan_distance()
}
#[derive(Debug, Clone, Copy)]
struct State2 {
boat_x: i32,
boat_y: i32,
way_dx: i32,
way_dy: i32,
direction: Direction,
}
impl State for State2 {
fn new() -> Self {
State2 {
boat_x: 0,
boat_y: 0,
way_dx: 10,
way_dy: -1,
direction: Direction::East,
}
}
fn apply(&mut self, instruction: &Instruction) {
match instruction.direction {
Direction::North | Direction::South | Direction::East | Direction::West => {
let (dx, dy) = instruction.direction.get_offsets();
self.way_dx += dx * instruction.value;
self.way_dy += dy * instruction.value
}
Direction::Left => {
let count = instruction.value / 90;
for _ in 0..count {
let (nx, ny) = (self.way_dy, -self.way_dx);
self.way_dx = nx;
self.way_dy = ny;
}
}
Direction::Right => {
let count = instruction.value / 90;
for _ in 0..count {
let (nx, ny) = (-self.way_dy, self.way_dx);
self.way_dx = nx;
self.way_dy = ny;
}
}
Direction::Forward => {
self.boat_x += self.way_dx * instruction.value;
self.boat_y += self.way_dy * instruction.value;
}
}
}
fn manhattan_distance(&self) -> i32 {
self.boat_x.abs() + self.boat_y.abs()
}
}
fn read_instructions(path: &str) -> AdventResult<Vec<Instruction>> {
let content = std::fs::read_to_string(path)?;
Ok(content.lines().map(Instruction::read_from_line).collect())
}
pub fn run(path: &str) {
let instructions = read_instructions(path).expect("Cannot read instructions");
println!("day12 part1: {}", part::<State1>(&instructions));
println!("day12 part2: {}", part::<State2>(&instructions));
}
| true |
4d0cff25a282cb1b65f28d806f3749c4283f1b23
|
Rust
|
voetsjoeba/aoc2019
|
/src/day24.rs
|
UTF-8
| 14,332 | 3.015625 | 3 |
[] |
no_license
|
// vim: set ai et ts=4 sts=4 sw=4:
#![allow(unused)]
use std::convert::From;
use std::collections::{HashSet, HashMap};
use std::fmt;
use crate::util;
use crate::dprint::*;
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
struct Biome(u32); // biome is 5x5, so can be encoded in bits
impl Biome {
fn bit(n: usize) -> u32 {
1 << n
}
pub fn is_empty(&self) -> bool {
self.0 == 0
}
pub fn biodiversity_rating(&self) -> u32 {
self.0
}
pub fn num_bugs(&self) -> u32 {
self.0.count_ones() as u32
}
pub fn has_bug_at(&self, pos: usize) -> bool {
let bit = Self::bit(pos);
self.0 & bit == bit
}
pub fn advance_by(&self, n: usize) -> Biome {
let mut current = self.clone();
for _ in 0..n {
current = current.advance();
}
current
}
pub fn advance(&self) -> Biome {
let mut new_encoded = 0u32;
for n in 0usize..25 {
let num_neighbouring_bugs = (n >= 5 && self.has_bug_at(n-5)) as usize // upper edge
+ (n%5 != 0 && self.has_bug_at(n-1)) as usize // left edge
+ (n%5 != 4 && self.has_bug_at(n+1)) as usize // right edge
+ (n < 20 && self.has_bug_at(n+5)) as usize; // bottom edge
if self.has_bug_at(n) {
if num_neighbouring_bugs == 1 {
new_encoded |= Self::bit(n);
}
} else {
if num_neighbouring_bugs == 1 || num_neighbouring_bugs == 2 {
new_encoded |= Self::bit(n);
}
}
}
Biome(new_encoded)
}
pub fn visualize(&self) -> String {
let mut result = String::new();
for n in 0..25 {
let mask = Self::bit(n);
if self.0 & mask == mask {
result.push_str("# ");
} else {
result.push_str(". ");
}
if (n+1) % 5 == 0 {
result.push('\n');
}
}
result.truncate(result.trim_end().len()); // right trim in place
result
}
}
impl Default for Biome {
fn default() -> Biome {
Biome(0)
}
}
impl From<&Vec<&str>> for Biome {
fn from(lines: &Vec<&str>) -> Self {
let mut encoded = 0u32;
for n in 0..25 {
if lines[n/5].chars().nth(n%5).unwrap() == '#' {
encoded |= Self::bit(n);
}
}
Self(encoded)
}
}
impl fmt::Display for Biome {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.visualize())
}
}
#[derive(Clone)]
struct RecursiveBiome {
levels: HashMap<i32, Biome>,
}
struct RecLocation { // identifies a position in the recursive biome
level: i32,
index: usize
}
macro_rules! recpos {
($level:expr, $index:expr) => {RecLocation { level: $level, index: $index }};
}
impl RecursiveBiome {
pub fn new(initial_biome: &Biome) -> Self {
let mut levels = HashMap::<i32, Biome>::new();
levels.insert(0, initial_biome.clone());
Self { levels }
}
pub fn neighbours_of(pos: &RecLocation) -> Vec<RecLocation> {
// given a position within the recursive biome, determines its neighbour positions.
// note: "outer" levels are considered level -1, "inner" levels are considered +1.
//
// | | | |
// 0 | 1 | 2 | 3 | 4
// | | | |
// -----+-----+---------+-----+-----
// | | | |
// 5 | 6 | 7 | 8 | 9
// | | | |
// -----+-----+---------+-----+-----
// | |A|B|C|D|E| |
// | |-+-+-+-+-| |
// | |F|G|H|I|J| |
// | |-+-+-+-+-| |
// 10 | 11 |K|L|?|N|O| 13 | 14
// | |-+-+-+-+-| |
// | |P|Q|R|S|T| |
// | |-+-+-+-+-| |
// | |U|V|W|X|Y| |
// -----+-----+---------+-----+-----
// | | | |
// 15 | 16 | 17 | 18 | 19
// | | | |
// -----+-----+---------+-----+-----
// | | | |
// 20 | 21 | 22 | 23 | 24
// | | | |
//
let mut result = Vec::<RecLocation>::new();
// determine this position's upper neighbour
if pos.index < 5 {
result.push(recpos![pos.level-1, 7]); // has 1 upper neighbour in outer level
} else if pos.index == 17 {
result.extend((20..25).map(|i| recpos![pos.level+1, i])); // has 5 upper neighbours in inner level
} else {
result.push(recpos![pos.level, pos.index-5]); // has 1 upper neighbour in current level
}
// determine this position's left neighbour
if pos.index % 5 == 0 {
result.push(recpos![pos.level-1, 11]); // has 1 left neighbour in outer level
} else if pos.index == 13 {
result.extend([4,9,14,19,24].iter().map(|&i| recpos![pos.level+1, i])); // has 5 left neighbours in inner level
} else {
result.push(recpos![pos.level, pos.index-1]); // has 1 left neighbour in current level
}
// determine this position's right neighbour
if pos.index % 5 == 4 {
result.push(recpos![pos.level-1, 13]); // has 1 right neighbour in outer level
} else if pos.index == 11 {
result.extend([0,5,10,15,20].iter().map(|&i| recpos![pos.level+1, i])); // has 5 right neighbours in inner level
} else {
result.push(recpos![pos.level, pos.index+1]); // has 1 right neighbour in current level
}
// determine this position's bottom neighbour
if pos.index >= 20 {
result.push(recpos![pos.level-1, 17]); // has 1 bottom neighbour in outer level
} else if pos.index == 7 {
result.extend((0..5).map(|i| recpos![pos.level+1, i])); // has 5 bottom neighbours in inner level
} else {
result.push(recpos![pos.level, pos.index+5]); // has 1 bottom neighbour in current level
}
result
}
pub fn has_bug_at(&self, pos: &RecLocation) -> bool {
// look up the requested level in the stack; if that level doesn't exist in the stack,
// then that means it's empty and the result is therefore necessarily false
if let Some(biome) = self.levels.get(&pos.level) {
biome.has_bug_at(pos.index)
} else {
false
}
}
pub fn num_bugs(&self) -> u32 {
self.levels.values().map(|biome| biome.num_bugs()).sum()
}
pub fn advance_by(&self, n: usize) -> RecursiveBiome {
let mut current = self.clone();
for _ in 0..n {
current = current.advance();
}
current
}
pub fn advance(&self) -> RecursiveBiome {
let mut result = self.clone();
// record the new state of all the bugs at the currently-recorded biome levels,
// (but leave out their center position at each biome level since those contain deeper recursion
// levels and shouldn't be regarded as containing bugs)
for (&level, biome) in &self.levels {
let mut new_encoded = 0u32;
for n in 0..25 {
if n == 12 { continue; } // skip center position
let pos = recpos![level, n];
let num_neighbouring_bugs = Self::neighbours_of(&pos)
.iter()
.filter(|p| self.has_bug_at(p))
.count();
// TODO: copy/paste from Biome::advance
if self.has_bug_at(&pos) {
if num_neighbouring_bugs == 1 {
new_encoded |= Biome::bit(n);
}
} else {
if num_neighbouring_bugs == 1 || num_neighbouring_bugs == 2 {
new_encoded |= Biome::bit(n);
}
}
}
result.levels.insert(level, Biome(new_encoded));
}
let max_level: i32 = *self.levels.keys().max().unwrap();
let min_level: i32 = *self.levels.keys().min().unwrap();
// additionally, spawn a new empty outermost and innermost biome, and see if any of the bugs
// along their rim to the previous level have been affected, and record those as well.
// if they are non-empty, add those new biomes to the result; otherwise omit them to save some memory.
let mut new_outermost = Biome::default();
for &n in [7,11,13,17].iter() {
let pos = recpos![min_level-1, n];
let num_neighbouring_bugs = Self::neighbours_of(&pos)
.iter()
.filter(|p| self.has_bug_at(p)) // note: has_bug_at() wil transparently deal with this new level number and return false for unknown levels like this one
.count();
// we only need to consider whether to change an empty spot into a bug,
// since these levels start off empty
if num_neighbouring_bugs == 1 || num_neighbouring_bugs == 2 {
new_outermost.0 |= Biome::bit(n);
}
}
let mut new_innermost = Biome::default();
for &n in [ 0, 1, 2, 3, 4,
5, 9,
10, 14,
15, 19,
20, 21, 22, 23, 24 ].iter()
{
let pos = recpos![max_level+1, n];
let num_neighbouring_bugs = Self::neighbours_of(&pos)
.iter()
.filter(|p| self.has_bug_at(p)) // note: has_bug_at() wil transparently deal with this new level number and return false for unknown levels like this one
.count();
// we only need to consider whether to change an empty spot into a bug,
// since these levels start off empty
if num_neighbouring_bugs == 1 || num_neighbouring_bugs == 2 {
new_innermost.0 |= Biome::bit(n);
}
}
if !new_outermost.is_empty() {
result.levels.insert(min_level-1, new_outermost);
}
if !new_innermost.is_empty() {
result.levels.insert(max_level+1, new_innermost);
}
result
}
#[allow(non_snake_case)]
pub fn visualize(&self) -> String {
let mut result = String::new();
let mut levels: Vec<i32> = self.levels.keys().copied().collect();
levels.sort();
for L in levels {
let biome = &self.levels[&L];
result.push_str(&format!("Level {}:\n", L));
result.push_str(&biome.visualize());
result.push_str("\n\n");
}
result.truncate(result.trim_end().len()); // right trim in place
result
}
}
impl fmt::Display for RecursiveBiome {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.visualize())
}
}
pub fn main() {
let lines: Vec<String> = util::file_read_lines("input/day24.txt");
let biome = Biome::from(&lines.iter().map(|line| &line[..]).collect());
println!("{}", part1(&biome));
println!("{}", part2(&biome));
}
fn part1(biome: &Biome) -> u32 {
let mut seen = HashSet::<Biome>::new();
let mut current_state = biome.clone();
loop {
if seen.contains(¤t_state) {
return current_state.biodiversity_rating();
}
seen.insert(current_state.clone());
current_state = current_state.advance();
}
}
fn part2(biome: &Biome) -> u32 {
let mut biome = RecursiveBiome::new(biome);
biome.advance_by(200).num_bugs()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn examples() {
let stages = vec![
Biome::from(&vec![
"....#",
"#..#.",
"#..##",
"..#..",
"#....",
]),
Biome::from(&vec![
"#..#.",
"####.",
"###.#",
"##.##",
".##..",
]),
Biome::from(&vec![
"#####",
"....#",
"....#",
"...#.",
"#.###",
]),
Biome::from(&vec![
"#....",
"####.",
"...##",
"#.##.",
".##.#",
]),
Biome::from(&vec![
"####.",
"....#",
"##..#",
".....",
"##...",
]),
];
assert_eq!(stages[0].advance(), stages[1]);
assert_eq!(stages[1].advance(), stages[2]);
assert_eq!(stages[2].advance(), stages[3]);
assert_eq!(stages[3].advance(), stages[4]);
assert_eq!(Biome::from(&vec![
".....",
".....",
".....",
"#....",
".#...",
]).biodiversity_rating(), 2129920);
}
#[test]
fn recursive_example() {
let mut rec_biome = RecursiveBiome::new(
&Biome::from(&vec![
"....#",
"#..#.",
"#..##",
"..#..",
"#....",
])
);
assert_eq!(rec_biome.advance_by(10).num_bugs(), 99);
}
}
| true |
09370d4c35b7ba6a004401f093e68fbade44118f
|
Rust
|
LeopoldArkham/Molten
|
/examples/cargo-add.rs
|
UTF-8
| 676 | 2.515625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
extern crate Molten;
use std::io::{Read, Write};
use std::fs::File;
use std::error::Error;
use Molten::key_value;
fn main() {
match run() {
Err(e) => println!("{}", e),
_ => {}
}
}
fn run() -> Result<(), Box<Error>> {
let mut buf = String::new();
let mut f = File::open("examples/_cargo.toml")?;
f.read_to_string(&mut buf)?;
let mut manifest = Molten::parser::Parser::new(&buf).parse()?;
let new_dep = key_value("parsehole = \"6.2.8\"")?;
manifest["dependencies"].append(new_dep.0, new_dep.1)?;
let mut out = File::create("examples/cargo_new.toml")?;
out.write(manifest.as_string().as_bytes())?;
Ok(())
}
| true |
c2261bc7096488b55cb6aa279438be0e4df8b862
|
Rust
|
petrSchreiber/advent-of-code-2018-rs
|
/day_01/src/part01.rs
|
UTF-8
| 108 | 2.671875 | 3 |
[] |
no_license
|
pub fn get_frequency(increments: &std::vec::Vec<i32>) -> i32 {
increments.into_iter().sum::<i32>()
}
| true |
6ac37f3f4dc63d40c6ec5ce7429ef63e46a96614
|
Rust
|
kparkins/learn_rust
|
/src/calculations.rs
|
UTF-8
| 845 | 3.375 | 3 |
[] |
no_license
|
use std::collections::HashMap;
pub fn mean(numbers: &Vec<i32>) -> f32 {
let sum = numbers.iter().fold(0, |a, b| a + b) as f32;
let length = numbers.len() as f32;
return sum / length;
}
pub fn median(numbers: &Vec<i32>) -> f32 {
let mut numbers = numbers.clone();
numbers.sort();
let midpoint = numbers.len() / 2 - 1;
let mut median = numbers[midpoint] as f32;
if numbers.len() % 2 == 0 {
median += numbers[midpoint + 1] as f32;
median /= 2.0;
}
median
}
pub fn mode(numbers: &Vec<i32>) -> i32 {
let mut counts: HashMap<i32, u32> = HashMap::new();
for &n in numbers {
*counts.entry(n).or_insert(0) += 1
}
counts
.into_iter()
.max_by(|(_, a), (_, b)| a.cmp(&b))
.map(|(a, _)| a)
.expect("cannot find mode of zero length vector")
}
| true |
8caf6a61a59821d93b7720a97017dc34e5e12e10
|
Rust
|
maxim-h/rust-o-rithms
|
/haffman_coding_4-2-5/src/binary_tree.rs
|
UTF-8
| 2,560 | 3.59375 | 4 |
[] |
no_license
|
use std::boxed::Box;
#[derive(Debug, Clone)]
pub struct Node {
pub ch: Option<char>,
pub freq: u32,
pub l_0: Option<Box<Node>>,
pub r_1: Option<Box<Node>>,
}
impl Node {
fn get_freq<'a>(&'a self) -> &'a u32 {
&self.freq
}
pub fn new(
c: Option<char>,
f: Option<u32>,
l: Option<Box<Node>>,
r: Option<Box<Node>>,
) -> Box<Node> {
let mut leaf: bool = false;
let mut left: Box<Node> = Box::new(Node {
ch: None,
freq: 0,
l_0: None,
r_1: None,
});
let mut right = Box::new(Node {
ch: None,
freq: 0,
l_0: None,
r_1: None,
});
match l {
Some(l) => {
left = l;
}
None => {
leaf = true;
}
};
match r {
Some(r) => {
right = r;
}
None => {
leaf = true;
}
};
if !leaf {
let sum_freq = right.get_freq() + left.get_freq();
return Box::new(Node {
ch: None,
freq: sum_freq,
l_0: Option::from(left),
r_1: Option::from(right),
});
} else {
return Box::new(Node {
ch: c,
freq: f.unwrap(),
l_0: None,
r_1: None,
});
}
}
}
pub fn encode_char(n: &Node, c: char, prefix: String) -> Option<String> {
if let Some(x) = n.ch {
if x == c {
if prefix == "" {
//in case root is the only node (e.g. string had only one unique character)
return Some(String::from("0"));
} else {
return Some(prefix);
}
} else {
return None;
}
} else {
let pr = format!("{}0", prefix);
match n.l_0 {
Some(ref nl) => match encode_char(nl, c, pr) {
None => {}
Some(p) => return Some(p),
},
None => panic!("Node has no left child and is not a leaf"),
}
let pr: String = format!("{}1", prefix);
match n.r_1 {
Some(ref nr) => match encode_char(nr, c, pr) {
None => return None,
Some(p) => return Some(p),
},
None => panic!("Node has no right child and is not a leaf"),
}
}
}
| true |
c9cd74561b76f1099f01fdd8658c2e4f36319680
|
Rust
|
isgasho/dialectic
|
/dialectic-reconnect/src/maybe_bounded.rs
|
UTF-8
| 2,541 | 3.890625 | 4 |
[
"MIT"
] |
permissive
|
//! This module defines two enums, [`Sender`] and [`Receiver`], which wrap either a bounded or
//! unbounded Tokio sender or receiver, respectively.
//!
//! These types implement the subset of the sender/receiver API necessary for this crate, not the
//! full API, most of which is not used.
use tokio::sync::mpsc::{
self,
error::{SendError, TrySendError},
};
/// A Tokio [`mpsc`] sender that could be either bounded or unbounded at runtime.
#[derive(Debug, Clone)]
pub enum Sender<T> {
Bounded(mpsc::Sender<T>),
Unbounded(mpsc::UnboundedSender<T>),
}
/// A Tokio [`mpsc`] receiver that could be either bounded or unbounded at runtime.
#[derive(Debug)]
pub enum Receiver<T> {
Bounded(mpsc::Receiver<T>),
Unbounded(mpsc::UnboundedReceiver<T>),
}
/// Create a Tokio [`mpsc`] sender/receiver pair that is either bounded or unbounded, depending on
/// whether a buffer size is specified.
pub fn channel<T>(buffer: Option<usize>) -> (Sender<T>, Receiver<T>) {
if let Some(buffer) = buffer {
let (tx, rx) = mpsc::channel(buffer);
(Sender::Bounded(tx), Receiver::Bounded(rx))
} else {
let (tx, rx) = mpsc::unbounded_channel();
(Sender::Unbounded(tx), Receiver::Unbounded(rx))
}
}
impl<T> Sender<T> {
/// Return the capacity of the underlying channel, if it is bounded, or `usize::MAX` if it is
/// not bounded.
pub fn capacity(&self) -> usize {
match self {
Sender::Bounded(tx) => tx.capacity(),
Sender::Unbounded(_) => usize::MAX,
}
}
/// Check if there is an existing receiver for this channel.
pub fn is_closed(&self) -> bool {
match self {
Sender::Bounded(tx) => tx.is_closed(),
Sender::Unbounded(tx) => tx.is_closed(),
}
}
/// Try to send a message over the channel, returning an error if the channel is full or closed.
pub fn try_send(&self, message: T) -> Result<(), TrySendError<T>> {
match self {
Sender::Bounded(tx) => tx.try_send(message),
Sender::Unbounded(tx) => tx
.send(message)
.map_err(|SendError(t)| TrySendError::Closed(t)),
}
}
}
impl<T> Receiver<T> {
/// Receive the next value for this receiver, returning `None` if all `Sender` halves have
/// dropped.
pub async fn recv(&mut self) -> Option<T> {
match self {
Receiver::Bounded(rx) => rx.recv().await,
Receiver::Unbounded(rx) => rx.recv().await,
}
}
}
| true |
cd89c675de0c65b66bbc632507eb04d3804e4767
|
Rust
|
microrack/coresynth
|
/fw/rust_lib/src/os/cmsis_os/semaphore.rs
|
UTF-8
| 1,885 | 2.78125 | 3 |
[] |
no_license
|
pub use super::bindings::osSemaphoreId;
use super::bindings::*;
use super::{Error, Result};
pub struct Semaphore {
id: osSemaphoreId,
}
#[allow(dead_code)]
impl Semaphore {
pub fn new(count: u32) -> Result<Semaphore> {
assert!(count <= core::i32::MAX as u32);
let semaphore_def = osSemaphoreDef_t { dummy: 0 };
let semaphore_id = unsafe { osSemaphoreCreate(&semaphore_def, count as i32) };
return match semaphore_id {
// semaphore_id is actually a pointer
0 => Err(Error {
call: "osSemaphoreCreate",
status: None,
}),
_ => Ok(Semaphore { id: semaphore_id }),
};
}
// TODO it's a copy-paste, move to common code
pub fn empty(count: u32) -> Result<Semaphore> {
assert!(count <= core::i32::MAX as u32);
let res = Semaphore::new(count)?;
for _ in 0..count {
// semaphore created full
res.acquire()?;
}
return Ok(res);
}
pub fn acquire(&self) -> Result<()> {
let result = unsafe { osSemaphoreWait(self.id, osWaitForever) };
return match result {
-1 => Err(Error {
call: "osSemaphoreWait",
status: None,
}),
_ => Ok(()),
};
}
pub fn release(&self) -> Result<()> {
let status = unsafe { osSemaphoreRelease(self.id) };
return match status {
osStatus::osOK => Ok(()),
_ => Err(Error {
call: "osSemaphoreRelease",
status: Some(status),
}),
};
}
// TODO implement drop compatible with this
pub fn get_id(&self) -> osSemaphoreId {
self.id
}
pub fn from_id(id: osSemaphoreId) -> Semaphore {
Semaphore { id }
}
}
unsafe impl Sync for Semaphore {}
| true |
2134d38fad938a7092de942a9188e918fd15472f
|
Rust
|
Kurble/Tutris9
|
/client/src/main.rs
|
UTF-8
| 4,542 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
mod game;
mod menu;
mod matchmaking;
mod util;
mod connection;
mod controls;
mod buttons;
mod persistent;
mod stats;
use quicksilver::{
Result,
geom::{Transform, Vector},
graphics::{Color, Background::Col},
lifecycle::{Settings, State, Window, Event, run},
combinators::Future,
};
use futures::Async;
use std::mem::replace;
pub trait Scene {
fn update(&mut self, window: &mut Window) -> Result<()>;
fn event(&mut self, event: &Event, window: &mut Window) -> Result<()>;
fn draw(&mut self, window: &mut Window) -> Result<()>;
fn advance(&mut self) -> Option<Box<Future<Item=Box<Scene>, Error=quicksilver::Error>>>;
}
enum DrawScene {
None,
NotFullscreen(Box<Future<Item=Box<Scene>, Error=quicksilver::Error>>),
FadeOut(Box<Scene>, f32, Box<Future<Item=Box<Scene>, Error=quicksilver::Error>>),
Loading(Box<Future<Item=Box<Scene>, Error=quicksilver::Error>>),
FadeIn(Box<Scene>, f32),
Loaded(Box<Scene>),
}
impl State for DrawScene {
fn new() -> Result<Self> {
if cfg!(debug_assertions) {
Ok(DrawScene::Loading(menu::Menu::new()))
} else {
Ok(DrawScene::NotFullscreen(menu::Menu::new()))
}
}
fn update(&mut self, window: &mut Window) -> Result<()> {
let next = match replace(self, DrawScene::None) {
DrawScene::NotFullscreen(mut future) => {
window.set_fullscreen(true);
if let Ok(Async::Ready(scene)) = future.poll() {
DrawScene::Loaded(scene)
} else {
DrawScene::Loading(future)
}
},
DrawScene::FadeOut(scene, progress, next) => {
let progress = progress + window.update_rate() as f32 / 500.0;
if progress > 1.0 {
DrawScene::Loading(next)
} else {
DrawScene::FadeOut(scene, progress, next)
}
},
DrawScene::Loading(mut future) => {
if let Ok(Async::Ready(scene)) = future.poll() {
DrawScene::FadeIn(scene, 0.0)
} else {
DrawScene::Loading(future)
}
},
DrawScene::Loaded(mut scene) => {
scene.update(window)?;
if let Some(next) = scene.advance() {
DrawScene::FadeOut(scene, 0.0, next)
} else {
DrawScene::Loaded(scene)
}
},
DrawScene::FadeIn(scene, progress) => {
let progress = progress + window.update_rate() as f32 / 500.0;
if progress > 1.0 {
DrawScene::Loaded(scene)
} else {
DrawScene::FadeIn(scene, progress)
}
},
other => other,
};
replace(self, next);
Ok(())
}
fn event(&mut self, event: &Event, window: &mut Window) -> Result<()> {
if let &mut DrawScene::Loaded(ref mut scene) = self {
scene.event(event, window)?;
}
Ok(())
}
fn draw(&mut self, window: &mut Window) -> Result<()> {
match self {
&mut DrawScene::FadeOut(ref mut scene, progress, _) => {
scene.draw(window)?;
let color = Color { a: progress, ..Color::BLACK };
let trans = Transform::IDENTITY;
window.draw_ex(&util::rect(0.0, 0.0, 640.0, 360.0), Col(color), trans, 100);
},
&mut DrawScene::Loaded(ref mut scene) => {
scene.draw(window)?;
},
&mut DrawScene::FadeIn(ref mut scene, progress) => {
scene.draw(window)?;
let color = Color { a: 1.0 - progress, ..Color::BLACK };
let trans = Transform::IDENTITY;
window.draw_ex(&util::rect(0.0, 0.0, 640.0, 360.0), Col(color), trans, 100);
},
&mut _ => {
window.clear(Color::BLACK)?;
},
}
Ok(())
}
}
fn main() {
run::<DrawScene>("Tutris 9",
Vector::new(640, 360),
Settings {
draw_rate: 16.6666667,
update_rate: 16.6666667,
vsync: true,
//scale: ImageScaleStrategy::Blur,
..Settings::default()
});
}
| true |
a315ba20d485dd86a8de8132e7f8172b07feb0f5
|
Rust
|
rust-lang/rust
|
/tests/ui/pattern/issue-106862.rs
|
UTF-8
| 992 | 2.90625 | 3 |
[
"Apache-2.0",
"LLVM-exception",
"NCSA",
"BSD-2-Clause",
"LicenseRef-scancode-unicode",
"MIT",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
// run-rustfix
#![allow(unused)]
use Foo::{FooB, FooA};
enum Foo {
FooA { opt_x: Option<i32>, y: i32 },
FooB { x: i32, y: i32 }
}
fn main() {
let f = FooB { x: 3, y: 4 };
match f {
FooB(a, b) => println!("{} {}", a, b),
//~^ ERROR expected tuple struct or tuple variant, found variant `FooB`
_ => (),
}
match f {
FooB(x, y) => println!("{} {}", x, y),
//~^ ERROR expected tuple struct or tuple variant, found variant `FooB`
_ => (),
}
match f {
FooA(Some(x), y) => println!("{} {}", x, y),
//~^ ERROR expected tuple struct or tuple variant, found variant `FooA`
_ => (),
}
match f {
FooB(a, _, _) => println!("{}", a),
//~^ ERROR expected tuple struct or tuple variant, found variant `FooB`
_ => (),
}
match f {
FooB() => (),
//~^ ERROR expected tuple struct or tuple variant, found variant `FooB`
_ => (),
}
}
| true |
09756322cc7ef81fa7f80b56c9d73ae2323c902d
|
Rust
|
m00p1ng/kattis-problem
|
/LV1/1.2/autori/autori.rs
|
UTF-8
| 289 | 2.828125 | 3 |
[] |
no_license
|
use std::io::{self, BufRead};
fn main() {
let mut buff = String::new();
let stdin = io::stdin();
stdin.lock().read_line(&mut buff).unwrap();
let words: Vec<&str> = buff.split("-").collect();
for w in words {
print!("{}", w.chars().nth(0).unwrap());
}
}
| true |
8880be76de121ca62d85176bdf9809d58d6efbea
|
Rust
|
Phaiax/emotim
|
/src/lib.rs
|
UTF-8
| 4,964 | 3 | 3 |
[] |
no_license
|
//! Converts normal images into emoticon versions by replacing chunks of the
//! original image with emoticons of similar color.
//!
//! ```
//! use std::path::Path;
//! use emotim::*;
//! let emos = read_emoticons();
//! let mut ii = read_input_image("Munch_Schrei_6.jpg");
//! let emoimg = Emoimage::new(&mut ii, 20, &emos, ComparisationMethod::Correlation);
//! println!("{}", emoimg);
//! emoimg.save(&Path::new("out/munch_max.png"));
//! ```
//!
//! This crate is much much faster in release mode.
//!
//! Since some paths are hardcoded, you need to have `assets/emoticons2/*` in the working directory.
#![feature(test)]
extern crate image;
extern crate test;
pub mod emoticons;
pub mod hsl;
use image::{GenericImage, DynamicImage, RgbaImage};
use std::path::{PathBuf, Path};
use std::fs::File;
use std::rc::Rc;
use std::fmt;
use std::io;
use std::io::Write;
use emoticons::Emoticons;
pub use emoticons::read_emoticons;
/// Reads a normal image from `assets/input/<filename>`.
pub fn read_input_image(filename : &str) -> DynamicImage {
let mut inputimagepath = PathBuf::new();
inputimagepath.push("assets/input");
inputimagepath.push(filename);
image::open(&inputimagepath).expect(&format!("image {} not found", inputimagepath.display()))
}
/// An image made out of emoticons
pub struct Emoimage {
pub width : u32,
pub height : u32,
pub emopixels : Vec<Rc<emoticons::Emoticon>>,
}
/// Different methods to calculate the corresponding emoticons.
pub enum ComparisationMethod {
Correlation,
Maxima
}
impl Emoimage {
/// Does the calculation.
pub fn new(img : &mut DynamicImage,
frac : u32,
emoticons : &Emoticons,
method : ComparisationMethod) -> Emoimage {
let height = img.height() / frac;
let width = img.width() / frac;
let mut pixels = Vec::with_capacity(width as usize * height as usize);
println!("Finding best emoticon for chunk of input image:");
for h in 0..height {
for w in 0..width {
// progress
print!("\r Chunk @ h:{} w:{}", h, w);
io::stdout().flush().ok();
let subimg = img.sub_image(w * frac, h * frac, frac, frac);
let subimghsv = hsl::HslImage::from_image(&subimg);
let subimghist = subimghsv.reduce_dynamic().histogram();
let mut the_chosen_one = None;
let mut highest_similarity = -10000.0;
for e in emoticons {
let similarity = match method {
ComparisationMethod::Correlation => e.hist.similarity_by_correlation(&subimghist),
ComparisationMethod::Maxima => e.hist.similarity_by_maxima(&subimghist),
};
if similarity > highest_similarity {
the_chosen_one = Some(e.clone());
highest_similarity = similarity;
}
}
pixels.push(the_chosen_one.unwrap());
}
}
println!("\r Done.");
Emoimage {
width : width,
height : height,
emopixels : pixels
}
}
/// Saves the calculated emoticons as image
pub fn save(&self, path : &Path) {
// Calculate dimensions
// Use first emoticon as base for height / width
let exampleemo = self.emopixels.first().unwrap();
let height = exampleemo.img.height() * self.height;
let width = exampleemo.img.width() * self.width;
let raw = vec![0 ; (height * width * 4) as usize];
let img = RgbaImage::from_raw(width, height, raw).unwrap();
let mut img = DynamicImage::ImageRgba8(img);
for h in 0..self.height {
for w in 0..self.width {
img.copy_from(&self.emopixels[(h * self.width + w) as usize].img,
w * exampleemo.img.width(),
h * exampleemo.img.height());
}
}
let ref mut fout = File::create(path).unwrap();
let _ = img.save(fout, image::PNG).unwrap();
}
}
impl fmt::Display for Emoimage {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for line in self.emopixels.chunks(self.width as usize) {
for s in line {
try!(write!(f, "{}", s));
}
try!(write!(f, "\n"));
}
Ok(())
}
}
#[cfg(test)]
mod tests {
//use super::*;
use test::Bencher;
use image::{DynamicImage};
use image;
use std::path::Path;
fn open_image() -> DynamicImage {
let inputimagepath = Path::new("assets/emoticons2/00a9.png");
let img = image::open(&inputimagepath).unwrap();
img
}
#[bench]
fn bench_open_image(b: &mut Bencher) {
b.iter(|| open_image());
}
}
| true |
99bd6ca13601051d6e59aa5468aa92a204283382
|
Rust
|
Byron/gitoxide
|
/gix-ref/src/store/packed/decode.rs
|
UTF-8
| 2,098 | 2.625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::convert::TryInto;
use gix_object::bstr::{BStr, ByteSlice};
use winnow::{
combinator::{delimited, opt, preceded, terminated},
error::{FromExternalError, ParserError},
prelude::*,
token::take_while,
};
use crate::{
parse::{hex_hash, newline},
store_impl::packed,
};
#[derive(Debug, PartialEq, Eq)]
enum Peeled {
Unspecified,
Partial,
Fully,
}
/// Information parsed from the header of a packed ref file
#[derive(Debug, PartialEq, Eq)]
pub struct Header {
peeled: Peeled,
pub sorted: bool,
}
impl Default for Header {
fn default() -> Self {
Header {
peeled: Peeled::Unspecified,
sorted: false,
}
}
}
fn until_newline<'a, E>(input: &mut &'a [u8]) -> PResult<&'a BStr, E>
where
E: ParserError<&'a [u8]>,
{
terminated(take_while(0.., |b: u8| b != b'\r' && b != b'\n'), newline)
.map(ByteSlice::as_bstr)
.parse_next(input)
}
pub fn header<'a, E>(input: &mut &'a [u8]) -> PResult<Header, E>
where
E: ParserError<&'a [u8]>,
{
preceded(b"# pack-refs with: ", until_newline)
.map(|traits| {
let mut peeled = Peeled::Unspecified;
let mut sorted = false;
for token in traits.as_bstr().split_str(b" ") {
if token == b"fully-peeled" {
peeled = Peeled::Fully;
} else if token == b"peeled" {
peeled = Peeled::Partial;
} else if token == b"sorted" {
sorted = true;
}
}
Header { peeled, sorted }
})
.parse_next(input)
}
pub fn reference<'a, E: ParserError<&'a [u8]> + FromExternalError<&'a [u8], crate::name::Error>>(
input: &mut &'a [u8],
) -> PResult<packed::Reference<'a>, E> {
(
terminated(hex_hash, b" "),
until_newline.try_map(TryInto::try_into),
opt(delimited(b"^", hex_hash, newline)),
)
.map(|(target, name, object)| packed::Reference { name, target, object })
.parse_next(input)
}
#[cfg(test)]
mod tests;
| true |
0c685c9cd3c4ddf6368ef00f495a21b54b43063c
|
Rust
|
e14tech/rmonopoly
|
/src/main.rs
|
UTF-8
| 607 | 2.765625 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
extern crate rand;
mod input;
mod dice;
mod player;
mod jail;
mod game;
use player::Player;
fn main() {
let number_of_players = input::get_number_of_players();
let mut players: Vec<Player> = Vec::new();
for i in 0..number_of_players {
let player_name = input::get_player_name(i);
players.push(Player::new(player_name));
}
let mut turn: usize = 0;
loop {
if turn >= number_of_players {
turn = 0;
}
game::game_loop(&mut players[turn]);
turn += 1;
if !input::quit_input() {
break;
}
}
}
| true |
2027b07d9680e504c0f56884fac1cd088c1f867b
|
Rust
|
sshashank124/zapp
|
/src/filesystem.rs
|
UTF-8
| 1,165 | 2.78125 | 3 |
[] |
no_license
|
use std::fs::{self, Permissions};
use std::io;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Deserializer, de::Error};
pub fn expand_path(path: &str) -> PathBuf {
let path = shellexpand::tilde(path);
PathBuf::from(&*path)
}
pub fn create_valid_parent(path: &Path) {
let parent = path.parent().unwrap();
assert!(!parent.is_file());
if !parent.exists() {
fs::create_dir_all(parent).unwrap();
}
}
pub fn parse_permissions<'de, D>(deserializer: D)
-> Result<Option<u32>, D::Error>
where
D: Deserializer<'de>,
{
let mode: Option<u32> = Option::deserialize(deserializer)?;
if let Some(s) = mode {
let mode_str = s.to_string();
match u32::from_str_radix(&mode_str, 8) {
Ok(i) => Ok(Some(i)),
_ => Err(D::Error::custom("invalid permissions")),
}
} else {
Ok(None)
}
}
pub fn set_permissions<P>(path: P, mode: Option<u32>) -> Result<(), io::Error>
where
P: AsRef<Path>
{
match mode {
None => Ok(()),
Some(mode) => fs::set_permissions(path, Permissions::from_mode(mode)),
}
}
| true |
2191eb51a7e25c11b26629bf9f289147b08a39d1
|
Rust
|
attilahorvath/exercism-rust
|
/etl/src/lib.rs
|
UTF-8
| 319 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::BTreeMap;
pub fn transform(input: &BTreeMap<i32, Vec<char>>) -> BTreeMap<char, i32> {
input
.iter()
.flat_map(|(&score, chars)| {
chars.iter().flat_map(|&c| c.to_lowercase()).map(move |c| {
(c, score)
})
})
.collect()
}
| true |
3604042df8dcf69c873233727bc378f1715145da
|
Rust
|
doytsujin/genawaiter
|
/src/lib.rs
|
UTF-8
| 7,759 | 3.9375 | 4 |
[] |
no_license
|
/*!
This crate implements generators for Rust. Generators are a feature common across many
programming language. They let you yield a sequence of values from a function. A few
common use cases are:
- Easily building iterators.
- Avoiding allocating a list for a function which returns multiple values.
Rust has this feature too, but it is currently unstable (and thus nightly-only). But
with this crate, you can use them on stable Rust!
# Choose your guarantees
This crate supplies three concrete implementations of generators:
1. [`genawaiter::stack`](stack) – Safe and allocation-free. You should prefer this in
most cases.
2. [`genawaiter::sync`](sync) – This can be shared between threads and stored in a
`static` variable. To make this possible, it stores its state on the heap.
3. [`genawaiter::rc`](rc) – This is single-threaded and also allocates. Using this is
discouraged, and you should feel discouraged. Its only advantages over `stack` are
(1) it doesn't use macros, and (2) it only has [two][unus] [lines][duo] of
unsafe code, which are trivially auditable.
[unus]: https://github.com/whatisaphone/genawaiter/blob/4a2b185/src/waker.rs#L9
[duo]: https://github.com/whatisaphone/genawaiter/blob/4a2b185/src/rc/engine.rs#L26
Read on for more general info about how generators work, and how data flows in and out
of a generator.
# A tale of three types
A generator can control the flow of up to three types of data:
- **Yield** – Each time a generator suspends execution, it can produce a value.
- **Resume** – Each time a generator is resumed, a value can be passed in.
- **Completion** – When a generator completes, it can produce one final value.
The three types are specified in the type signature of the generator. Only the first
is required; the last two are optional:
```rust
# use genawaiter::rc::{Co, Gen};
#
type Yield = // ...
# ();
type Resume = // ...
# ();
type Completion = // ...
# ();
async fn generator(co: Co<Yield, Resume>) -> Completion
# {}
# Gen::new(generator);
```
Rewritten as a non-`async` function, the above function has the same type as:
```rust
# use genawaiter::rc::{Co, Gen};
# use std::{future::Future, pin::Pin, task::{Context, Poll}};
#
# type Yield = ();
# type Resume = ();
# type Completion = ();
#
fn generator(co: Co<Yield, Resume>) -> impl Future<Output = Completion>
# {
# struct DummyFuture;
# impl Future for DummyFuture {
# type Output = ();
# fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
# Poll::Pending
# }
# }
# DummyFuture
# }
# Gen::new(generator);
```
## Yielded values
Values can be yielded from the generator by calling `yield_`, and immediately awaiting
the future it returns. You can get these values out of the generator in either of two
ways:
- Call `resume()` or `resume_with()`. The values will be returned in a
`GeneratorState::Yielded`.
```rust
# use genawaiter::{GeneratorState, rc::Gen};
#
let mut generator = Gen::new(|co| async move {
co.yield_(10).await;
});
let ten = generator.resume();
assert_eq!(ten, GeneratorState::Yielded(10));
```
- Treat it as an iterator. For this to work, both the resume and completion types must
be `()` .
```rust
# use genawaiter::rc::Gen;
#
let generator = Gen::new(|co| async move {
co.yield_(10).await;
});
let xs: Vec<_> = generator.into_iter().collect();
assert_eq!(xs, [10]);
```
## Resume arguments
You can also send values back into the generator, by using `resume_with`. The generator
receives them from the future returned by `yield_`.
```rust
# use genawaiter::{GeneratorState, rc::Gen};
#
let mut printer = Gen::new(|co| async move {
loop {
let string = co.yield_(()).await;
println!("{}", string);
}
});
printer.resume_with("hello");
printer.resume_with("world");
```
## Completion value
A generator can produce one final value upon completion, by returning it from the
function. The consumer will receive this value as a `GeneratorState::Complete`.
```rust
# use genawaiter::{GeneratorState, rc::Gen};
#
let mut generator = Gen::new(|co| async move {
co.yield_(10).await;
"done"
});
assert_eq!(generator.resume(), GeneratorState::Yielded(10));
assert_eq!(generator.resume(), GeneratorState::Complete("done"));
```
# Async generators
If you await other futures inside the generator, it becomes an _async generator_. It
does not makes sense to treat an async generator as an `Iterable`, since you cannot
`await` an `Iterable`. Instead, you can treat it as a `Stream`. This requires opting in
to the dependency on `futures` with the `futures03` feature.
```toml
[dependencies]
genawaiter = { version = "...", features = ["futures03"] }
```
```rust
# use futures::executor::block_on_stream;
# use genawaiter::{GeneratorState, rc::Gen};
#
# #[cfg(feature = "futures03")] {
async fn async_one() -> i32 { 1 }
async fn async_two() -> i32 { 2 }
let gen = Gen::new(|co| async move {
let one = async_one().await;
co.yield_(one).await;
let two = async_two().await;
co.yield_(two).await;
});
let stream = block_on_stream(gen);
let items: Vec<_> = stream.collect();
assert_eq!(items, [1, 2]);
# }
```
Async generators also provide a `async_resume` method for lower-level control. (This
works even without the `futures03` feature.)
```rust
# use genawaiter::{GeneratorState, rc::Gen};
# use std::task::Poll;
#
# async fn x() {
# let mut gen = Gen::new(|co| async move {
# co.yield_(10).await;
# });
#
match gen.async_resume().await {
GeneratorState::Yielded(_) => {}
GeneratorState::Complete(_) => {}
}
# }
```
# Backported stdlib types
This crate supplies [`Generator`](trait.Generator.html) and
[`GeneratorState`](enum.GeneratorState.html). They are copy/pasted from the stdlib (with
stability attributes removed) so they can be used on stable Rust. If/when real
generators are stabilized, hopefully they would be drop-in replacements. Javascript
developers might recognize this as a polyfill.
There is also a [`Coroutine`](trait.Coroutine.html) trait, which does not come from the
stdlib. A `Coroutine` is a generalization of a `Generator`. A `Generator` constrains the
resume argument type to `()`, but in a `Coroutine` it can be anything.
*/
#![cfg_attr(feature = "nightly", feature(async_await, async_closure))]
#![warn(future_incompatible, rust_2018_compatibility, rust_2018_idioms, unused)]
#![warn(missing_docs, clippy::cargo, clippy::pedantic)]
#![cfg_attr(feature = "strict", deny(warnings))]
pub use ops::{Coroutine, Generator, GeneratorState};
mod core;
mod ext;
mod ops;
pub mod rc;
pub mod stack;
pub mod sync;
#[cfg(test)]
mod testing;
mod waker;
#[cfg(feature = "proc_macro")]
use proc_macro_hack::proc_macro_hack;
///
#[cfg(feature = "proc_macro")]
#[proc_macro_hack]
pub use genawaiter_proc_macro::sync_producer;
///
#[cfg(feature = "proc_macro")]
#[proc_macro_hack]
pub use genawaiter_proc_macro::rc_producer;
///
#[cfg(feature = "proc_macro")]
#[proc_macro_hack]
pub use genawaiter_proc_macro::stack_producer;
/// This macro is used to replace the keyword yield to
/// avoid using nightly features when using any of the three
/// `proc_macro_attributes` for easy generator definition.
///
/// # Example
/// ```
/// use genawaiter::{stack::producer_fn, yield_};
///
/// #[producer_fn(u8)]
/// async fn odds() {
/// for n in (1..).step_by(2).take_while(|n| *n < 10) {
/// yield_!(n)
/// }
/// }
/// ```
#[cfg(feature = "proc_macro")]
#[macro_export]
macro_rules! yield_ {
($val:tt) => {
compile_error!("forgot to use attribute")
};
(@emit => $co:expr, $value:expr) => {
$co.yield_($value).await;
};
}
| true |
aed74ffe9d495c8335e4b0df76bef40cd7d1b8e6
|
Rust
|
samsung-ads-grave-yard/mini-rs
|
/src/aio/http.rs
|
UTF-8
| 7,983 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
// TODO: make a web crawler example.
use std::cell::RefCell;
use std::collections::VecDeque;
use std::fmt::Debug;
use std::io;
use std::mem;
use std::rc::Rc;
use crate::aio::handler::{
Handler,
Loop,
Stream,
};
use crate::aio::net::{
TcpConnection,
TcpConnectionNotify,
};
use crate::aio::uhttp_uri::HttpUri;
use self::Msg::*;
fn deque_compare(buffer: &VecDeque<u8>, start: usize, len: usize, value: &[u8]) -> bool {
if value.len() < len {
return false;
}
let mut index = 0;
for i in start..start + len {
if buffer[i] != value[index] {
return false;
}
index += 1;
}
true
}
fn parse_num(buffer: &VecDeque<u8>, start: usize, len: usize) -> Option<usize> {
let mut result = 0;
for i in start..start + len {
if buffer[i] >= b'0' && buffer[i] <= b'9' {
result *= 10;
result += (buffer[i] - b'0') as usize;
}
else if result != 0 && buffer[i] != b' ' {
return None;
}
}
Some(result)
}
fn parse_headers(buffer: &VecDeque<u8>) -> Option<usize> {
// TODO: parse other headers.
let mut start = 0;
for i in 0..buffer.len() {
if buffer[i] == b'\n' {
let text = b"Content-Length:";
let end = start + text.len();
if deque_compare(buffer, start, text.len(), text) {
let num = parse_num(buffer, end, i - 1 - end); // - 1 to remove the \n.
return num;
}
start = i + 1;
}
}
None
}
fn remove_until_boundary(buffer: &mut VecDeque<u8>) {
let mut index = buffer.len() - 1;
for i in 0..buffer.len() {
if i + 4 <= buffer.len() && deque_compare(&buffer, i, 4, b"\r\n\r\n") {
index = i + 4;
break;
}
}
for _ in 0..index {
buffer.pop_front();
}
}
#[derive(Clone)]
struct Connection<HANDLER> {
buffer: VecDeque<u8>,
content_length: usize,
handler: HANDLER,
host: String,
method: &'static str,
path: String,
}
impl<HANDLER> Connection<HANDLER> {
fn new(host: &str, handler: HANDLER, path: &str, method: &'static str) -> Self {
Self {
buffer: VecDeque::new(),
content_length: 0,
handler,
host: host.to_string(),
method,
path: path.to_string(),
}
}
}
impl<HANDLER> TcpConnectionNotify for Connection<HANDLER>
where HANDLER: HttpHandler,
{
fn connected(&mut self, connection: &mut TcpConnection) {
if let Err(error) = connection.write(format!("{} {} HTTP/1.1\r\nHost: {}\r\n\r\n", self.method, self.path,
self.host).into_bytes())
{
self.handler.error(error);
}
}
fn error(&mut self, error: io::Error) {
self.handler.error(error);
}
fn received(&mut self, connection: &mut TcpConnection, data: Vec<u8>) {
self.buffer.extend(data);
if self.content_length == 0 {
match parse_headers(&self.buffer) {
Some(content_length) => {
remove_until_boundary(&mut self.buffer);
self.content_length = content_length;
},
None => (), // Might find the content length in the next data.
}
}
if self.buffer.len() >= self.content_length {
let buffer = mem::replace(&mut self.buffer, VecDeque::new());
self.handler.response(buffer.into());
connection.dispose();
}
}
}
pub trait HttpHandler {
fn response(&mut self, data: Vec<u8>);
fn error(&mut self, _error: io::Error) {
}
}
pub struct DefaultHttpHandler<ErrorMsg, MSG, SuccessMsg> {
error_msg: ErrorMsg,
stream: Stream<MSG>,
success_msg: SuccessMsg,
}
impl<ErrorMsg, MSG, SuccessMsg> DefaultHttpHandler<ErrorMsg, MSG, SuccessMsg> {
pub fn new(stream: &Stream<MSG>, success_msg: SuccessMsg, error_msg: ErrorMsg) -> Self {
Self {
error_msg,
stream: stream.clone(),
success_msg,
}
}
}
impl<ErrorMsg, MSG, SuccessMsg> HttpHandler for DefaultHttpHandler<ErrorMsg, MSG, SuccessMsg>
where MSG: Debug,
ErrorMsg: Fn(io::Error) -> MSG,
SuccessMsg: Fn(Vec<u8>) -> MSG,
{
fn error(&mut self, error: io::Error) {
self.stream.send((self.error_msg)(error));
}
fn response(&mut self, data: Vec<u8>) {
self.stream.send((self.success_msg)(data));
}
}
pub struct HttpHandlerIgnoreErr<MSG, SuccessMsg> {
stream: Stream<MSG>,
success_msg: SuccessMsg,
}
impl<MSG, SuccessMsg> HttpHandlerIgnoreErr<MSG, SuccessMsg> {
pub fn new(stream: &Stream<MSG>, success_msg: SuccessMsg) -> Self {
Self {
stream: stream.clone(),
success_msg,
}
}
}
impl<MSG, SuccessMsg> HttpHandler for HttpHandlerIgnoreErr<MSG, SuccessMsg>
where MSG: Debug,
SuccessMsg: Fn(Vec<u8>) -> MSG,
{
fn response(&mut self, data: Vec<u8>) {
self.stream.send((self.success_msg)(data));
}
}
pub struct Http {
}
impl Http {
pub fn new() -> Self {
Self {
}
}
fn blocking<F: Fn(Rc<RefCell<io::Result<Vec<u8>>>>, &mut Loop) -> io::Result<()>>(&self, callback: F) -> io::Result<Vec<u8>> {
let result = Rc::new(RefCell::new(Ok(vec![])));
let mut event_loop = Loop::new()?;
callback(result.clone(), &mut event_loop)?;
event_loop.run()?;
let mut result = result.borrow_mut();
mem::replace(&mut *result, Ok(vec![]))
}
pub fn blocking_get(&self, uri: &str) -> io::Result<Vec<u8>> {
self.blocking(|result, event_loop| {
let stream = event_loop.spawn(BlockingHttpHandler::new(&event_loop, result));
let http = Http::new();
http.get(uri, event_loop, DefaultHttpHandler::new(&stream, HttpGet, HttpError))
.map_err(|()| io::Error::new(io::ErrorKind::Other, ""))
})
}
pub fn blocking_post(&self, uri: &str) -> io::Result<Vec<u8>> {
self.blocking(|result, event_loop| {
let stream = event_loop.spawn(BlockingHttpHandler::new(&event_loop, result));
let http = Http::new();
http.post(uri, event_loop, DefaultHttpHandler::new(&stream, HttpGet, HttpError))
.map_err(|()| io::Error::new(io::ErrorKind::Other, ""))
})
}
pub fn get<HANDLER>(&self, uri: &str, event_loop: &mut Loop, handler: HANDLER) -> Result<(), ()>
where HANDLER: HttpHandler + 'static,
{
let uri = HttpUri::new(uri)?;
TcpConnection::ip4(event_loop, uri.host, uri.port, Connection::new(uri.host, handler, uri.resource.path, "GET"));
Ok(())
}
pub fn post<HANDLER>(&self, uri: &str, event_loop: &mut Loop, handler: HANDLER) -> Result<(), ()>
where HANDLER: HttpHandler + 'static,
{
let uri = HttpUri::new(uri)?;
TcpConnection::ip4(event_loop, uri.host, uri.port, Connection::new(uri.host, handler, uri.resource.path, "POST"));
Ok(())
}
}
impl Default for Http {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug)]
enum Msg {
HttpGet(Vec<u8>),
HttpError(io::Error),
}
struct BlockingHttpHandler {
event_loop: Loop,
result: Rc<RefCell<io::Result<Vec<u8>>>>,
}
impl BlockingHttpHandler {
fn new(event_loop: &Loop, result: Rc<RefCell<io::Result<Vec<u8>>>>) -> Self {
Self {
event_loop: event_loop.clone(),
result,
}
}
}
impl Handler for BlockingHttpHandler {
type Msg = Msg;
fn update(&mut self, _stream: &Stream<Msg>, msg: Self::Msg) {
match msg {
HttpGet(body) => {
*self.result.borrow_mut() = Ok(body);
},
HttpError(error) => {
*self.result.borrow_mut() = Err(error);
},
}
self.event_loop.stop()
}
}
| true |
201c9ccf25ef2cfc4cf355e9ccc7df4dd42c077e
|
Rust
|
lapce/lapce
|
/lapce-app/src/keypress/press.rs
|
UTF-8
| 3,415 | 3.0625 | 3 |
[
"CC-BY-4.0",
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
use std::fmt::Display;
use floem::keyboard::{Key, ModifiersState};
use tracing::warn;
use super::key::KeyInput;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct KeyPress {
pub(super) key: KeyInput,
pub(super) mods: ModifiersState,
}
impl KeyPress {
pub fn to_lowercase(&self) -> Self {
let key = match &self.key {
KeyInput::Keyboard(Key::Character(c)) => {
KeyInput::Keyboard(Key::Character(c.to_lowercase().into()))
}
_ => self.key.clone(),
};
Self {
key,
mods: self.mods,
}
}
pub fn is_char(&self) -> bool {
let mut mods = self.mods;
mods.set(ModifiersState::SHIFT, false);
if mods.is_empty() {
if let KeyInput::Keyboard(Key::Character(_c)) = &self.key {
return true;
}
}
false
}
pub fn label(&self) -> String {
let mut keys = String::from("");
if self.mods.control_key() {
keys.push_str("Ctrl+");
}
if self.mods.alt_key() {
keys.push_str("Alt+");
}
if self.mods.super_key() {
let keyname = match std::env::consts::OS {
"macos" => "Cmd+",
"windows" => "Win+",
_ => "Meta+",
};
keys.push_str(keyname);
}
if self.mods.shift_key() {
keys.push_str("Shift+");
}
keys.push_str(&self.key.to_string());
keys.trim().to_string()
}
pub fn parse(key: &str) -> Vec<Self> {
key.split(' ')
.filter_map(|k| {
let (modifiers, key) = match k.rsplit_once('+') {
Some(pair) => pair,
None => ("", k),
};
let key = match key.parse().ok() {
Some(key) => key,
None => {
// Skip past unrecognized key definitions
warn!("Unrecognized key: {key}");
return None;
}
};
let mut mods = ModifiersState::empty();
for part in modifiers.to_lowercase().split('+') {
match part {
"ctrl" => mods.set(ModifiersState::CONTROL, true),
"meta" => mods.set(ModifiersState::SUPER, true),
"shift" => mods.set(ModifiersState::SHIFT, true),
"alt" => mods.set(ModifiersState::ALT, true),
"" => (),
other => warn!("Invalid key modifier: {}", other),
}
}
Some(KeyPress { key, mods })
})
.collect()
}
}
impl Display for KeyPress {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.mods.contains(ModifiersState::CONTROL) {
let _ = f.write_str("Ctrl+");
}
if self.mods.contains(ModifiersState::ALT) {
let _ = f.write_str("Alt+");
}
if self.mods.contains(ModifiersState::SUPER) {
let _ = f.write_str("Meta+");
}
if self.mods.contains(ModifiersState::SHIFT) {
let _ = f.write_str("Shift+");
}
f.write_str(&self.key.to_string())
}
}
| true |
97434b08dd309063de545581ee779c05103af7c9
|
Rust
|
amling/rlagar
|
/src/misc.rs
|
UTF-8
| 464 | 2.796875 | 3 |
[] |
no_license
|
use chrono::Local;
pub fn debug_log(msg: impl AsRef<str>) {
let msg = msg.as_ref();
eprintln!("{} - {}", Local::now().format("%Y%m%d %H:%M:%S"), msg);
}
pub fn debug_time<T>(label: impl AsRef<str>, cb: impl FnOnce() -> T) -> T {
let label = label.as_ref();
let t0 = std::time::Instant::now();
debug_log(format!("Starting {}...", label));
let ret = cb();
debug_log(format!("Finished {}: {:?}", label, t0.elapsed()));
return ret;
}
| true |
d2c4f87bdc7c92f7be6a816e4d6334ece2eed28e
|
Rust
|
ricky26/advent-of-code-2020
|
/src/bags.rs
|
UTF-8
| 3,190 | 3.078125 | 3 |
[] |
no_license
|
use nom::{
IResult,
character::complete::{space0, digit1, alpha1, space1, char},
bytes::complete::{tag},
combinator::{opt, recognize, map},
sequence::{pair, tuple},
multi::separated_list1,
branch::alt,
};
use std::collections::BTreeMap;
fn skip_whitespace(input: &str) -> &str {
space0::<&str, nom::error::Error<&str>>(input).unwrap().0
}
fn parse_bag_name(input: &str) -> IResult<&str, &str> {
let input = skip_whitespace(input);
let (input, name) = recognize(tuple((alpha1, space1, alpha1)))(input)?;
let input = skip_whitespace(input);
let (input, _) = tag("bag")(input)?;
let (input, _) = opt(char('s'))(input)?;
let input = skip_whitespace(input);
Ok((input, name))
}
fn parse_usize(input: &str) -> IResult<&str, usize> {
let (input, num_str) = digit1(input)?;
let num = num_str.parse().unwrap();
Ok((input, num))
}
fn parse_bag_count(input: &str) -> IResult<&str, (usize, &str)> {
let input = skip_whitespace(input);
let (input, count) = parse_usize(input)?;
let input = skip_whitespace(input);
let (input, bag_name) = parse_bag_name(input)?;
let input = skip_whitespace(input);
Ok((input, (count, bag_name)))
}
fn parse_bag_list(input: &str) -> IResult<&str, Vec<(usize, &str)>> {
separated_list1(pair(char(','), space1), parse_bag_count)(input)
}
fn parse_statement(input: &str) -> IResult<&str, (&str, Vec<(usize, &str)>)> {
let (input, bag_name) = parse_bag_name(input)?;
let input = skip_whitespace(input);
let (input, _) = tag("contain")(input)?;
let input = skip_whitespace(input);
let (input, list) = alt((parse_bag_list, map(tag("no other bags"), |_| Vec::new())))(input)?;
let (input, _) = char('.')(input)?;
let input = skip_whitespace(input);
Ok((input, (bag_name, list)))
}
pub fn parse_mapping(input: &str) -> IResult<&str, BTreeMap<String, BTreeMap<String, usize>>> {
let mut mappings = BTreeMap::new();
for line in input.lines().map(str::trim) {
if line.is_empty() {
continue
}
let (input, (bag_name, list)) = parse_statement(line)?;
if !input.is_empty() {
return Err(nom::Err::Failure(nom::error::Error::new(input, nom::error::ErrorKind::LengthValue)))
}
let mut mapping = BTreeMap::new();
mapping.extend(list.iter().map(|(a, b)| (b.to_string(), *a)));
mappings.insert(bag_name.to_string(), mapping);
}
Ok(("", mappings))
}
#[test]
fn test_parse() {
use std::iter::FromIterator;
let test_input = "\
dotted black bags contain no other bags.
bright white bags contain 1 shiny gold bag.
light red bags contain 1 bright white bag, 2 muted yellow bags.
";
assert_eq!(parse_mapping(test_input).unwrap().1, BTreeMap::from_iter(vec![
("dotted black".to_string(), BTreeMap::new()),
("bright white".to_string(), BTreeMap::from_iter(vec![
("shiny gold".to_string(), 1),
])),
("light red".to_string(), BTreeMap::from_iter(vec![
("bright white".to_string(), 1),
("muted yellow".to_string(), 2),
])),
]));
}
| true |
1173f51bda18c6b426f9696330524633cfc15a42
|
Rust
|
Corallus-Caninus/windows-rs
|
/crates/tests/winrt/enums/tests/test.rs
|
UTF-8
| 800 | 3.1875 | 3 |
[] |
no_license
|
use test_winrt_enums::*;
use Component::Enums::*;
#[test]
fn signed() {
let value = Signed::One;
assert!(value.0 == 1i32);
let value = Signed::Two;
assert!(value.0 == 2i32);
let value = Signed::Three;
assert!(value.0 == 3i32);
let value: Signed = 2i32.into();
assert!(value == Signed::Two);
}
#[test]
fn unsigned() {
let value = Unsigned::One;
assert!(value.0 == 0x001u32);
let value = Unsigned::Two;
assert!(value.0 == 0x010u32);
let value = Unsigned::Three;
assert!(value.0 == 0x100u32);
let value: Unsigned = 0x010u32.into();
assert!(value == Unsigned::Two);
let value = Unsigned::One | Unsigned::Three;
assert!(value.0 == 0x101u32);
let value = Unsigned::One;
assert_eq!(!value, Unsigned(0xFFFFFFFEu32))
}
| true |
277b3b836b4f94d09c136a555dcbf56d77500f1a
|
Rust
|
mapx/leetcode-rust
|
/src/util/union_find.rs
|
UTF-8
| 1,448 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
use std::mem;
use std::usize;
pub struct UnionFind {
weights: Vec<usize>,
groups: Vec<usize>,
}
impl UnionFind {
pub fn new(size: usize) -> UnionFind {
let mut groups = Vec::with_capacity(size);
for i in 0..size {
groups.push(i);
}
UnionFind {
weights: vec![1; size],
groups,
}
}
pub fn find(&mut self, i: usize) -> usize {
if i >= self.groups.len() {
return usize::MAX;
}
let mut stack = vec![];
let mut i = i;
loop {
let j = unsafe { *self.groups.get_unchecked(i) };
if i != j {
stack.push(j);
i = j;
} else {
break;
}
}
for j in stack {
unsafe {
*self.groups.get_unchecked_mut(j) = i;
}
}
i
}
pub fn union(&mut self, i: usize, j: usize) -> bool {
let (mut i, mut j) = (self.find(i), self.find(j));
if i == j {
return false;
}
unsafe {
if *self.weights.get_unchecked(i) < *self.weights.get_unchecked(j) {
mem::swap(&mut i, &mut j);
}
*self.groups.get_unchecked_mut(j) = *self.groups.get_unchecked_mut(i);
*self.weights.get_unchecked_mut(i) += *self.weights.get_unchecked_mut(j);
}
true
}
}
| true |
972d85dc38fb829c1535778447f3a169e142b0f1
|
Rust
|
rbartlensky/Lua-interpreter
|
/luavm/src/lib/instructions/arithmetic_operators.rs
|
UTF-8
| 1,199 | 3.40625 | 3 |
[] |
no_license
|
use errors::LuaError;
use luacompiler::bytecode::instructions::{first_arg, second_arg, third_arg};
use Vm;
/// Generates a function called `$op`, which takes two parameters: a mutable reference to a
/// vm and an instruction, and returns whether the instruction is executed succesfully
/// or not by the vm. This macro is used to generate add, sub, etc. functions which all
/// have the same implementation. The name of the function ($op) is also the name of
/// the method that is called on the operands of the instruction. For example:
/// `bin_op!(add);` generates an `add` function which extracts the arguments of the
/// instruction (lhs, and rhs), and calls `lhs.add(rhs)`.
macro_rules! bin_op {
($op: tt) => {
pub fn $op(vm: &mut Vm, instr: u32) -> Result<(), LuaError> {
let res = {
let lhs = &vm.registers[second_arg(instr) as usize];
let rhs = &vm.registers[third_arg(instr) as usize];
lhs.$op(rhs)?
};
vm.registers[first_arg(instr) as usize] = res;
Ok(())
}
};
}
bin_op!(add);
bin_op!(sub);
bin_op!(mul);
bin_op!(div);
bin_op!(modulus);
bin_op!(fdiv);
bin_op!(exp);
| true |
3e0e927cf27229ee67fb36c7daaf3dbe0a05e799
|
Rust
|
tomaka/futures-rs
|
/tests/stream.rs
|
UTF-8
| 8,216 | 2.875 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
extern crate futures;
use futures::{failed, finished, Future, promise};
use futures::stream::*;
mod support;
use support::*;
// #[test]
// fn smoke() {
// let (tx, rx) = channel::<i32, u32>();
// tx.send(Ok(1))
// .and_then(|tx| tx.send(Ok(2)))
// .and_then(|tx| tx.send(Ok(3)))
// .schedule(|r| assert!(r.is_ok()));
// assert_eq!(rx.collect(), Ok(vec![1, 2, 3]));
//
// let (tx, rx) = channel::<i32, u32>();
// tx.send(Ok(1))
// .and_then(|tx| tx.send(Err(2)))
// .and_then(|tx| tx.send(Ok(3)))
// .schedule(|r| assert!(r.is_ok()));
// assert_eq!(rx.collect(), Err(2));
// }
fn list() -> Receiver<i32, u32> {
let (tx, rx) = channel();
tx.send(Ok(1))
.and_then(|tx| tx.send(Ok(2)))
.and_then(|tx| tx.send(Ok(3)))
.forget();
return rx
}
fn err_list() -> Receiver<i32, u32> {
let (tx, rx) = channel();
tx.send(Ok(1))
.and_then(|tx| tx.send(Ok(2)))
.and_then(|tx| tx.send(Err(3)))
.forget();
return rx
}
// fn collect_poll<S: Stream>(mut s: S) -> Result<Vec<S::Item>, S::Error> {
// let mut base = Vec::new();
// loop {
// match s.poll() {
// Ok(item) => base.push(item),
// Err(PollError::Empty) => return Ok(base),
// Err(PollError::Other(e)) => return Err(e),
// Err(PollError::NotReady) => panic!("blocked?"),
// }
// }
// }
//
#[test]
fn adapters() {
assert_done(|| list().map(|a| a + 1).collect(), Ok(vec![2, 3, 4]));
assert_done(|| err_list().map_err(|a| a + 1).collect(), Err(4));
assert_done(|| list().fold(0, |a, b| finished::<i32, u32>(a + b)), Ok(6));
assert_done(|| err_list().fold(0, |a, b| finished::<i32, u32>(a + b)), Err(3));
assert_done(|| list().filter(|a| *a % 2 == 0).collect(), Ok(vec![2]));
assert_done(|| list().and_then(|a| Ok(a + 1)).collect(), Ok(vec![2, 3, 4]));
assert_done(|| list().then(|a| a.map(|e| e + 1)).collect(), Ok(vec![2, 3, 4]));
assert_done(|| list().and_then(|a| failed::<i32, u32>(a as u32)).collect(),
Err(1));
assert_done(|| err_list().or_else(|a| {
finished::<i32, u32>(a as i32)
}).collect(), Ok(vec![1, 2, 3]));
assert_done(|| list().map(|_| list()).flatten().collect(),
Ok(vec![1, 2, 3, 1, 2, 3, 1, 2, 3]));
// assert_eq!(list().map(|i| finished::<_, u32>(i)).flatten().collect(),
// Ok(vec![1, 2, 3]));
assert_done(|| list().skip_while(|e| Ok(*e % 2 == 1)).collect(),
Ok(vec![2, 3]));
assert_done(|| list().take(2).collect(), Ok(vec![1, 2]));
assert_done(|| list().skip(2).collect(), Ok(vec![3]));
}
// #[test]
// fn adapters_poll() {
// assert_eq!(collect_poll(list().map(|a| a + 1)), Ok(vec![2, 3, 4]));
// assert_eq!(collect_poll(err_list().map_err(|a| a + 1)), Err(4));
// assert_eq!(collect_poll(list().filter(|a| *a % 2 == 0)), Ok(vec![2]));
// assert_eq!(collect_poll(list().and_then(|a| Ok(a + 1))), Ok(vec![2, 3, 4]));
// assert_eq!(collect_poll(err_list().and_then(|a| Ok(a + 1))), Err(3));
// assert_eq!(collect_poll(err_list().and_then(|a| {
// failed::<i32, _>(a as u32)
// })), Err(1));
// assert_eq!(collect_poll(err_list().or_else(|a| finished::<_, u32>(a as i32))),
// Ok(vec![1, 2, 3]));
//
// let (tx, rx) = channel::<i32, u32>();
// let (rx2, tx2) = promise::pair();
// let mut rx2 = Some(rx2);
// let mut rx = rx.and_then(move |_a| rx2.take().unwrap());
// match rx.poll() {
// Err(PollError::NotReady) => {}
// _ => panic!("ready?"),
// }
// tx.send(Ok(1)).schedule(|_| ());
// match rx.poll() {
// Err(PollError::NotReady) => {}
// _ => panic!("ready?"),
// }
// match rx.poll() {
// Err(PollError::NotReady) => {}
// _ => panic!("ready?"),
// }
// tx2.finish(1);
// match rx.poll() {
// Ok(1) => {},
// Err(PollError::NotReady) => panic!("not ready?"),
// Err(PollError::Empty) => panic!("empty?"),
// _ => panic!("not ready?"),
// }
//
// // let (tx, rx) = channel::<i32, u32>();
// // let rx = rx.and_then(|a| failed::<i32, _>(a as u32));
// // tx.send(Ok(1)).schedule(|_| ());
// // assert_eq!(rx.collect(), Err(1));
// // assert_eq!(list().fold(0, |a, b| a + b), Ok(6));
// // assert_eq!(list().and_then(|a| Ok(a + 1)).collect(),
// // Ok(vec![2, 3, 4]));
// // assert_eq!(err_list().or_else(|a| {
// // finished::<i32, u32>(a as i32)
// // }).collect(), Ok(vec![1, 2, 3]));
// // assert_eq!(list().map(|_| list()).flat_map().collect(),
// // Ok(vec![1, 2, 3, 1, 2, 3, 1, 2, 3]));
// // assert_eq!(list().map(|i| finished::<_, u32>(i)).flatten().collect(),
// // Ok(vec![1, 2, 3]));
//
// assert_eq!(list().collect().poll().ok().unwrap(), Ok(vec![1, 2, 3]));
// assert_eq!(err_list().collect().poll().ok().unwrap(), Err(3));
// assert_eq!(list().fold(0, |a, b| a + b).poll().ok().unwrap(), Ok(6));
// assert_eq!(err_list().fold(0, |a, b| a + b).poll().ok().unwrap(), Err(3));
// assert_eq!(list().map(|a| finished::<_, u32>(a))
// .flatten().collect().poll().ok().unwrap(),
// Ok(vec![1, 2, 3]));
// assert_eq!(list().map(|_a| list()).flat_map()
// .collect().poll().ok().unwrap(),
// Ok(vec![1, 2, 3, 1, 2, 3, 1, 2, 3]));
// }
//
// #[test]
// fn rxdrop() {
// let (tx, rx) = channel::<i32, u32>();
// drop(rx);
// assert!(tx.send(Ok(1)).is_err());
// }
//
// #[test]
// fn bufstream_smoke() {
// let (tx, mut rx) = bufstream::<i32, u32>(4);
// let (vrx, mut vtx): (Vec<_>, Vec<_>) = (0..4).map(|_| {
// let (a, b) = promise::pair::<i32, u32>();
// (a, Some(b))
// }).unzip();
// for (a, b) in tx.zip(vrx) {
// b.schedule(|val| a.send(val));
// }
//
// assert_eq!(rx.poll(), Err(PollError::NotReady));
// vtx[0].take().unwrap().finish(2);
// assert_eq!(rx.poll(), Ok(2));
// assert_eq!(rx.poll(), Err(PollError::NotReady));
// vtx[3].take().unwrap().finish(4);
// assert_eq!(rx.poll(), Ok(4));
// assert_eq!(rx.poll(), Err(PollError::NotReady));
// vtx[1].take().unwrap().fail(3);
// assert_eq!(rx.poll(), Err(PollError::Other(3)));
// assert_eq!(rx.poll(), Err(PollError::NotReady));
// vtx[2].take().unwrap().finish(1);
// assert_eq!(rx.poll(), Ok(1));
// assert_eq!(rx.poll(), Err(PollError::Empty));
// }
//
// #[test]
// fn bufstream_concurrent() {
// let (tx, rx) = bufstream::<i32, u32>(4);
// let (vrx, vtx): (Vec<_>, Vec<_>) = (0..4).map(|_| {
// promise::pair::<i32, u32>()
// }).unzip();
// for (a, b) in tx.zip(vrx) {
// b.schedule(|val| a.send(val));
// }
//
// let t = thread::spawn(|| {
// let mut it = vtx.into_iter();
// it.next().unwrap().finish(2);
// it.next_back().unwrap().finish(4);
// it.next().unwrap().finish(3);
// it.next_back().unwrap().finish(1);
// assert!(it.next().is_none());
// });
//
// assert_eq!(rx.collect(), Ok(vec![2, 4, 3, 1]));
// t.join().unwrap();
// }
#[test]
fn buffered() {
let (tx, rx) = channel::<_, u32>();
let (a, b) = promise::<u32>();
let (c, d) = promise::<u32>();
tx.send(Ok(b.map_err(|_| 2).boxed()))
.and_then(|tx| tx.send(Ok(d.map_err(|_| 4).boxed())))
.forget();
let mut rx = rx.buffered(2);
sassert_empty(&mut rx);
c.complete(3);
sassert_next(&mut rx, 3);
sassert_empty(&mut rx);
a.complete(5);
sassert_next(&mut rx, 5);
sassert_done(&mut rx);
let (tx, rx) = channel::<_, u32>();
let (a, b) = promise::<u32>();
let (c, d) = promise::<u32>();
tx.send(Ok(b.map_err(|_| 2).boxed()))
.and_then(|tx| tx.send(Ok(d.map_err(|_| 4).boxed())))
.forget();
let mut rx = rx.buffered(1);
sassert_empty(&mut rx);
c.complete(3);
sassert_empty(&mut rx);
a.complete(5);
sassert_next(&mut rx, 5);
sassert_next(&mut rx, 3);
sassert_done(&mut rx);
}
| true |
41af863287ff14e47de576869f1bf0ead356ac1f
|
Rust
|
agmcleod/adventofcode-2016
|
/22/src/main.rs
|
UTF-8
| 10,083 | 2.984375 | 3 |
[] |
no_license
|
extern crate regex;
extern crate read_input;
use std::collections::{HashMap, HashSet};
use regex::Regex;
#[derive(Debug)]
struct Node {
coords: String,
size: usize,
used: usize,
avail: usize,
coords_as_num: [usize; 2],
}
impl Clone for Node {
fn clone(&self) -> Node {
Node{
coords: self.coords.clone(),
size: self.size.clone(),
used: self.used.clone(),
avail: self.avail.clone(),
coords_as_num: self.coords_as_num.clone(),
}
}
}
const MAX_X: usize = 36;
const MAX_Y: usize = 24;
impl Node {
fn new(coords: String, size: String, used: String, avail: String) -> Node {
let mut num_coords = [0, 0];
for (i, n) in coords.replace("x", "").split("y").enumerate() {
num_coords[i] = n.parse().ok().expect("Could not parse n");
}
Node{
coords: coords,
size: size.replace("T", "").parse().ok().expect("Failed to parse size"),
used: used.replace("T", "").parse().ok().expect("Failed to parse used"),
avail: avail.replace("T", "").parse().ok().expect("Failed to parse avail"),
coords_as_num: num_coords,
}
}
fn get_neighbours(&self) -> Vec<String> {
let mut neighbours: Vec<String> = Vec::new();
if self.coords_as_num[0] > 0 {
neighbours.push(format!("x{}y{}", self.coords_as_num[0] - 1, self.coords_as_num[1]));
}
if self.coords_as_num[1] > 0 {
neighbours.push(format!("x{}y{}", self.coords_as_num[0], self.coords_as_num[1] - 1));
}
if self.coords_as_num[0] < MAX_X {
neighbours.push(format!("x{}y{}", self.coords_as_num[0] + 1, self.coords_as_num[1]));
}
if self.coords_as_num[1] < MAX_Y {
neighbours.push(format!("x{}y{}", self.coords_as_num[0], self.coords_as_num[1] + 1));
}
neighbours
}
}
fn find_pairs_for_node(nodes: &HashMap<String, Node>, node: &Node) -> Vec<String> {
let mut pairs: Vec<String> = Vec::new();
for (key, node2) in nodes {
if *key != node.coords {
if node.used > 0 && node.used <= node2.avail {
pairs.push(node2.coords.clone());
}
}
}
pairs
}
// there's only one, this was overkill XD
fn find_first_zero_space_node(nodes: & HashMap<String, Node>, from: &Node) -> Option<Node> {
let mut scan_list: Vec<&Node> = vec![from];
let mut used_list: HashSet<String> = HashSet::new();
loop {
let mut temp_list: Vec<&Node> = Vec::new();
let mut any_found = false;
for node in &scan_list {
let neighbours = node.get_neighbours();
for c in neighbours {
if used_list.contains(&c) {
continue
}
any_found = true;
used_list.insert(c.clone());
let node = nodes.get(&c).unwrap();
if node.used == 0 {
return Some((*node).clone());
}
temp_list.push(node);
}
}
scan_list = temp_list.clone();
if !any_found {
break
}
}
None
}
fn move_node_data_to_coords(nodes: &HashMap<String, Node>, node: &Node, target: &String, used_list_data: Vec<String>) -> (usize, String, HashMap<String, Node>) {
let mut scan_list: Vec<(String, HashMap<String, Node>)> = vec![(node.coords.clone(), nodes.clone())];
let mut used_list: HashSet<String> = HashSet::new();
for n in used_list_data {
used_list.insert(n);
}
let mut count = 1;
let mut result_state: HashMap<String, Node> = HashMap::new();
let mut last_move = String::new();
'main: loop {
let mut temp_list: Vec<(String, HashMap<String, Node>)> = Vec::new();
let mut any_found = false;
for &(ref coords, ref state) in &scan_list {
let node = state.get(coords).unwrap();
let neighbours = node.get_neighbours();
for c in neighbours {
let mut new_state = state.clone();
let mut new_stuff = {
let neighbour = new_state.get(&c).unwrap();
let node = new_state.get(coords).unwrap();
// move on if node already scanned, or if either node can't fit the data
if used_list.contains(&c) || neighbour.used > node.size || node.used > neighbour.size {
continue
}
(neighbour.clone(), node.clone())
};
let neighbour_used = new_stuff.0.used;
let neighbour_coords = new_stuff.0.coords.clone();
new_stuff.0.used = new_stuff.1.used;
new_stuff.1.used = neighbour_used;
new_state.insert(new_stuff.0.coords.clone(), new_stuff.0);
new_state.insert(new_stuff.1.coords.clone(), new_stuff.1);
if neighbour_coords == *target {
result_state = new_state;
last_move = coords.clone();
break 'main
}
temp_list.push((neighbour_coords.clone(), new_state));
any_found = true;
used_list.insert(c.clone());
}
}
count += 1;
scan_list = temp_list.clone();
if !any_found {
println!("Ran out of options");
break
}
}
(count, last_move, result_state)
}
fn get_path_for_data(nodes: &HashMap<String, Node>, data_coords: &String) -> Vec<String> {
let (mut scan_list, data_used) = {
let data_node = nodes.get(data_coords).unwrap();
(vec![(data_node.clone(), Vec::new())], data_node.used)
};
let mut used_list: HashSet<String> = HashSet::new();
let target_coords = "x0y0".to_string();
'main: loop {
let mut temp_list: Vec<(Node, Vec<String>)> = Vec::new();
let mut any_found = false;
for &(ref node, ref path) in &scan_list {
let neighbours = node.get_neighbours();
for neighbour_coord in neighbours {
if neighbour_coord == target_coords {
let mut path = (*path).clone();
path.push(neighbour_coord);
return path
}
if used_list.contains(&neighbour_coord) {
continue
}
used_list.insert(neighbour_coord.clone());
let neighbour = nodes.get(&neighbour_coord).unwrap();
if neighbour.size >= data_used {
any_found = true;
let mut path = (*path).clone();
path.push(neighbour.coords.clone());
temp_list.push((neighbour.clone(), path));
}
}
}
scan_list = temp_list;
if !any_found {
break
}
}
Vec::new()
}
fn move_data_node_to_index_of_path(nodes: &mut HashMap<String, Node>, data_node: &mut Node, zero_node: &mut Node, target_coords: &String, move_count: &mut usize) {
let (count, _, new_state) = move_node_data_to_coords(nodes, zero_node, target_coords, vec![data_node.coords.clone()]);
*move_count += count;
let (count, _, new_state) = move_node_data_to_coords(&new_state, data_node, target_coords, Vec::new());
*zero_node = new_state.get(&data_node.coords).unwrap().clone();
*data_node = new_state.get(target_coords).unwrap().clone();
*nodes = new_state;
*move_count += count;
}
fn print_nodes(nodes: &HashMap<String, Node>) {
for y in 0..(MAX_Y+1) {
let mut row: Vec<String> = Vec::new();
for x in 0..(MAX_X+1) {
let node = nodes.get(&format!("x{}y{}", x, y)).unwrap();
row.push(format!("{}/{}", node.used, node.size));
}
println!("{}", row.join("|"));
}
}
fn main() {
let text = match read_input::read_text("input.txt") {
Ok(t) => t,
Err(e) => panic!("{:?}", e),
};
let re = Regex::new(r"\s+").unwrap();
let mut nodes: HashMap<String, Node> = HashMap::new();
for line in text.lines() {
if line.starts_with("/") {
let words = re.split(line).collect::<Vec<&str>>();
let pieces = words[0].split("-").collect::<Vec<&str>>();
let coords = format!("{}{}", pieces[1], pieces[2]);
nodes.insert(coords.clone(), Node::new(coords, words[1].to_string(), words[2].to_string(), words[3].to_string()));
}
}
let mut pairs: Vec<String> = Vec::new();
// part 1
for (_, node) in &nodes {
pairs.append(&mut find_pairs_for_node(&nodes, &node));
}
println!("part 1 pairs: {:?}", pairs.len());
if let Some(zero_space) = find_first_zero_space_node(&nodes, nodes.get(&format!("x{}y{}", MAX_X, 0)).unwrap()) {
println!("\n\nZero space node, from top right {:?}\n\n", zero_space);
let result = move_node_data_to_coords(&nodes, &zero_space, &format!("x{}y{}", MAX_X, 0), Vec::new());
println!("Count to move 0 to {} : {}", result.1, result.0);
let mut data_node = result.2.get(&result.1).unwrap().clone();
println!("Moved data amount: {} to: {}\n", data_node.used, data_node.coords);
let path = get_path_for_data(&result.2, &data_node.coords);
println!("{:?}", path);
let mut zero_node = result.2.get(&format!("x{}y{}", MAX_X, 0)).unwrap().clone();
print_nodes(&result.2);
let mut state = result.2;
let mut move_count = 0;
let mut index = 0;
loop {
move_data_node_to_index_of_path(&mut state, &mut data_node, &mut zero_node, &path[index], &mut move_count);
index += 1;
if index == path.len() {
break
}
}
println!("Moved 0 count: {}, move data count: {}, total: {}", result.0, move_count, result.0 + move_count);
}
}
| true |
312a402520eea2df867df6c891551c5165dac170
|
Rust
|
jonnyom/Exercism-exercises
|
/rust/bob/src/lib.rs
|
UTF-8
| 509 | 3.5 | 4 |
[] |
no_license
|
pub fn reply(message: &str) -> &str {
match message.trim() {
x if x.is_empty() => "Fine. Be that way!",
x if x.ends_with("?") => match x {
x if is_uppercase(x) => "Calm down, I know what I'm doing!",
_ => "Sure.",
},
x if is_uppercase(x) => "Whoa, chill out!",
_ => "Whatever.",
}
}
fn is_uppercase(string: &str) -> bool {
(string.clone().to_ascii_uppercase() == string)
&& (string != string.clone().to_ascii_lowercase())
}
| true |
bb808ca9e91b05f337c0016fcd968a07ea8d7900
|
Rust
|
akimash/abc
|
/90/c.rs
|
UTF-8
| 698 | 3.265625 | 3 |
[] |
no_license
|
use std::io::*;
use std::str::FromStr;
pub fn read<T: FromStr>() -> T {
let stdin = stdin();
let stdin = stdin.lock();
let token: String = stdin
.bytes()
.map(|c| c.expect("failed to read char") as char)
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect();
token.parse().ok().expect("failed to parse token")
}
use std::cmp::max;
fn main() {
let n = read::<usize>();
let m = read::<usize>();
let sum = n * m;
if n == 1 && m == 1 {
println!("{}", sum)
} else if n == 1 || m == 1 {
println!("{}", max(n, m) - 2)
} else {
println!("{}",sum - (2*(m+n) - 4));
}
}
| true |
bae4a9411fa46e5001026bdb2731b28be0753758
|
Rust
|
sighttviewliu/grin
|
/wallet/src/display.rs
|
UTF-8
| 3,352 | 2.53125 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use core::core;
use core::core::amount_to_hr_string;
use keychain::Keychain;
use libwallet::Error;
use libwallet::types::{OutputData, WalletInfo};
use prettytable;
use std::io::prelude::*;
use term;
/// Display outputs in a pretty way
pub fn outputs(cur_height: u64, validated: bool, outputs: Vec<OutputData>) -> Result<(), Error> {
let title = format!("Wallet Outputs - Block Height: {}", cur_height);
println!();
let mut t = term::stdout().unwrap();
t.fg(term::color::MAGENTA).unwrap();
writeln!(t, "{}", title).unwrap();
t.reset().unwrap();
let mut table = table!();
table.set_titles(row![
bMG->"Key Id",
bMG->"Block Height",
bMG->"Locked Until",
bMG->"Status",
bMG->"Is Coinbase?",
bMG->"Num. of Confirmations",
bMG->"Value"
]);
for out in outputs {
let key_id = format!("{}", out.key_id);
let height = format!("{}", out.height);
let lock_height = format!("{}", out.lock_height);
let status = format!("{:?}", out.status);
let is_coinbase = format!("{}", out.is_coinbase);
let num_confirmations = format!("{}", out.num_confirmations(cur_height));
let value = format!("{}", core::amount_to_hr_string(out.value));
table.add_row(row![
bFC->key_id,
bFB->height,
bFB->lock_height,
bFR->status,
bFY->is_coinbase,
bFB->num_confirmations,
bFG->value
]);
}
table.set_format(*prettytable::format::consts::FORMAT_NO_COLSEP);
table.printstd();
println!();
if !validated {
println!(
"\nWARNING: Wallet failed to verify data. \
The above is from local cache and possibly invalid! \
(is your `grin server` offline or broken?)"
);
}
Ok(())
}
/// Display summary info in a pretty way
pub fn info(wallet_info: &WalletInfo) -> Result<(), Error> {
println!(
"\n____ Wallet Summary Info at {} ({}) ____\n",
wallet_info.current_height, wallet_info.data_confirmed_from
);
let mut table = table!(
[bFG->"Total", FG->amount_to_hr_string(wallet_info.total)],
[bFY->"Awaiting Confirmation", FY->amount_to_hr_string(wallet_info.amount_awaiting_confirmation)],
[bFY->"Confirmed but Still Locked", FY->amount_to_hr_string(wallet_info.amount_confirmed_but_locked)],
[bFG->"Currently Spendable", FG->amount_to_hr_string(wallet_info.amount_currently_spendable)],
[Fw->"---------", Fw->"---------"],
[Fr->"(Locked by previous transaction)", Fr->amount_to_hr_string(wallet_info.amount_locked)]
);
table.set_format(*prettytable::format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR);
table.printstd();
println!();
if !wallet_info.data_confirmed {
println!(
"\nWARNING: Failed to verify wallet contents with grin server. \
Above info is maybe not fully updated or invalid! \
Check that your `grin server` is OK, or see `wallet help restore`"
);
};
Ok(())
}
| true |
ce7b025efd2feca0e08e51abbba7f93217f2f2c9
|
Rust
|
kod-kristoff/bevy_ascii_terminal
|
/src/render/renderer_tile_data.rs
|
UTF-8
| 2,341 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
use bevy::math::{UVec2, Vec2};
use crate::terminal::Tile;
use super::glyph_mapping::GlyphMapping;
#[derive(Default)]
pub struct TerminalRendererTileData {
//pub fg_colors: Vec<[f32; 4]>,
//pub bg_colors: Vec<[f32; 4]>,
pub fg_colors: Vec<[u8; 4]>,
pub bg_colors: Vec<[u8; 4]>,
pub uvs: Vec<[f32; 2]>,
pub mapping: GlyphMapping,
}
impl TerminalRendererTileData {
pub fn with_size(size: UVec2) -> Self {
let mut v = Self::default();
v.resize(size);
v
}
pub fn resize(&mut self, size: UVec2) {
let len = (size.x * size.y) as usize;
self.fg_colors.resize(len * 4, Default::default());
self.bg_colors.resize(len * 4, Default::default());
self.uvs.resize(len * 4, Default::default());
}
pub fn update_from_tiles(&mut self, tiles: &[Tile]) {
let uv_size = Vec2::new(1.0 / 16.0, 1.0 / 16.0);
let right = Vec2::new(uv_size.x, 0.0);
let up = Vec2::new(0.0, uv_size.y);
for (i, tile) in tiles.iter().enumerate() {
let glyph = tile.glyph;
let (tile_x, tile_y) = self.mapping.get_index(glyph);
let origin = Vec2::new(tile_x as f32 * uv_size.x, tile_y as f32 * uv_size.y);
let vi = i * 4;
let uvs = &mut self.uvs;
uvs[vi] = origin.into();
uvs[vi + 1] = (origin + up).into();
uvs[vi + 2] = (origin + right).into();
uvs[vi + 3] = (origin + up + right).into();
for j in vi..vi + 4 {
self.fg_colors[j] = tile.fg_color.into();
self.bg_colors[j] = tile.bg_color.into();
}
}
}
}
#[cfg(test)]
mod tests {
use bevy::math::UVec2;
use crate::{render::renderer_tile_data::TerminalRendererTileData, terminal::Tile};
use crate::color::*;
#[test]
fn resize_test() {
let mut tiles: Vec<Tile> = vec![Tile::default(); 50];
for tile in tiles.iter_mut() {
*tile = Tile {
fg_color: BLUE,
..Default::default()
}
}
let mut colors: TerminalRendererTileData =
TerminalRendererTileData::with_size(UVec2::new(25, 25));
colors.update_from_tiles(&tiles);
assert_eq!([0, 0, u8::MAX, u8::MAX], colors.fg_colors[0]);
}
}
| true |
c73bdf876cabb8bed4640295aeae3554673498d1
|
Rust
|
5mattmatt1/voxport
|
/src/main.rs
|
UTF-8
| 20,984 | 2.796875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
extern crate collada_io; // Export
extern crate clap; // CLI
extern crate dot_vox; // Import
extern crate stl_io; // Export
pub mod app;
const INPUT_FILEPATH: &'static str = "input.vox";
const OUTPUT_STL_FILEPATH: &'static str = "output.stl";
const OUTPUT_DAE_FILEPATH: &'static str = "output.dae";
const _OUTPUT_PAL_FILEPATH: &'static str = "output.txt";
use std::fs::File;
use std::io::prelude::*;
use std::io::LineWriter;
struct Color
{
pub r: u8,
pub g: u8,
pub b: u8,
pub a: u8
}
#[derive(Copy, Clone)]
struct MetaVoxel
{
pub voxel: dot_vox::Voxel,
pub faces: u8
}
#[derive(Copy, Clone, PartialEq)]
struct Vertex
{
pub x: f32,
pub y: f32,
pub z: f32
}
#[derive(Copy, Clone, PartialEq)]
struct Normal
{
pub x: f32,
pub y: f32,
pub z: f32
}
#[derive(Copy, Clone, PartialEq)]
struct Triangle
{
pub normal: Normal,
pub a: Vertex,
pub b: Vertex,
pub c: Vertex
}
#[derive(Copy, Clone, PartialEq)]
struct IndexedTriangle
{
pub normal: Normal, // STL
pub normal_index: usize, // DAE
pub a: usize,
pub b: usize,
pub c: usize
}
impl Into<stl_io::Normal> for Normal
{
fn into(self) -> stl_io::Normal
{
stl_io::Normal::new([self.x, self.y, self.z])
}
}
impl Into<stl_io::Vertex> for Vertex
{
fn into(self) -> stl_io::Vertex
{
stl_io::Vertex::new([self.x, self.y, self.z])
}
}
impl Into<stl_io::Triangle> for Triangle
{
fn into(self) -> stl_io::Triangle
{
stl_io::Triangle {
normal: self.normal.into(),
vertices: [self.a.into(), self.b.into(), self.c.into()]
}
}
}
impl Into<stl_io::IndexedTriangle> for IndexedTriangle
{
fn into(self) -> stl_io::IndexedTriangle
{
stl_io::IndexedTriangle {
normal: self.normal.into(),
vertices: [self.a, self.b, self.c]
}
}
}
impl From<u32> for Color
{
fn from(src: u32) -> Self
{
let r = ((src & 0x00_00_00_FF) >> 0) as u8;
let g = ((src & 0x00_00_FF_00) >> 8) as u8;
let b = ((src & 0x00_FF_00_00) >> 16) as u8;
let a = ((src & 0xFF_00_00_00) >> 24) as u8;
Self {
r,
g,
b,
a
}
}
}
impl Into<String> for Color
{
fn into(self) -> String
{
format!("{} {} {}", self.r, self.g, self.b)
}
}
impl MetaVoxel
{
pub fn has_left(&self) -> bool
{
return ((self.faces & (1 << 0)) >> 0) == 1;
}
pub fn has_back(&self) -> bool
{
return ((self.faces & (1 << 1)) >> 1) == 1;
}
pub fn has_bottom(&self) -> bool
{
return ((self.faces & (1 << 2)) >> 2) == 1;
}
pub fn has_right(&self) -> bool
{
return ((self.faces & (1 << 3)) >> 3) == 1;
}
pub fn has_front(&self) -> bool
{
return ((self.faces & (1 << 4)) >> 4) == 1;
}
pub fn has_top(&self) -> bool
{
return ((self.faces & (1 << 5)) >> 5) == 1;
}
}
fn get_voxel_idx(vox: &dot_vox::Voxel, size: &dot_vox::Size) -> usize
{
return (vox.x as usize) +
((vox.y as usize) * (size.x as usize)) +
((vox.z as usize) * (size.x as usize) * (size.y as usize));
}
// Turn the Vec
fn reorder_voxels(voxels: &mut Vec<dot_vox::Voxel>, size: &dot_vox::Size) -> Vec<Option<dot_vox::Voxel>>
{
let mut ret_voxels: Vec<Option<dot_vox::Voxel>> = Vec::new();
let len: usize = (size.x as usize) * (size.y as usize) * (size.z as usize);
ret_voxels.resize(len, None);
for voxel in voxels
{
let idx = get_voxel_idx(voxel, size);
ret_voxels[idx] = Some(*voxel);
}
return ret_voxels;
}
fn has_neighbor(voxels: &Vec<Option<dot_vox::Voxel>>, voxel: &dot_vox::Voxel, size: &dot_vox:: Size, x: i16, y: i16, z: i16) -> Result<bool, std::num::TryFromIntError>
{
use std::convert::TryFrom;
let vx: i16 = voxel.x.into();
let vy: i16 = voxel.y.into();
let vz: i16 = voxel.z.into();
let pos = dot_vox::Voxel {
i: voxel.i,
x: u8::try_from(vx + x)?,
y: u8::try_from(vy + y)?,
z: u8::try_from(vz + z)?
};
let neighbor: Option<dot_vox::Voxel>;
let idx = get_voxel_idx(&pos, size);
neighbor = voxels[idx];
return Ok(neighbor.is_some());
}
fn convert_meta_voxels(voxels: &Vec<Option<dot_vox::Voxel>>, size: &dot_vox::Size) -> Vec<Option<MetaVoxel>>
{
let mut mvoxels: Vec<Option<MetaVoxel>> = Vec::new();
let len: usize = (size.x as usize) * (size.y as usize) * (size.z as usize);
mvoxels.resize(len, None);
for (i, opt_voxel) in voxels.iter().enumerate()
{
match opt_voxel
{
Some(voxel) => {
let mut faces: u8 = 0;
let mut surrounded: bool = true;
// Need to add check to see if there are empty spaces in order to add a face
if voxel.x == 0 || !has_neighbor(voxels, voxel, size, -1, 0, 0).unwrap()// Left
{
faces |= 1 << 0;
surrounded = false;
}
if voxel.y == 0 || !has_neighbor(voxels, voxel, size, 0, -1, 0).unwrap() // Back
{
faces |= 1 << 1;
surrounded = false;
}
if voxel.z == 0 || !has_neighbor(voxels, voxel, size, 0, 0, -1).unwrap() // Bottom
{
faces |= 1 << 2;
surrounded = false;
}
if ((voxel.x + 1) == (size.x as u8)) || !has_neighbor(voxels, voxel, size, 1, 0, 0).unwrap() // Right
{
faces |= 1 << 3;
surrounded = false;
}
if (voxel.y + 1) == (size.y as u8) || !has_neighbor(voxels, voxel, size, 0, 1, 0).unwrap() // Front
{
faces |= 1 << 4;
surrounded = false;
}
if ((voxel.z + 1) == (size.z as u8)) || !has_neighbor(voxels, voxel, size, 0, 0, 1).unwrap() // Top
{
faces |= 1 << 5;
surrounded = false;
}
if !surrounded
{
mvoxels[i] = Some(MetaVoxel {
voxel: *voxel,
faces
});
}
}
None => {
}
}
}
return mvoxels;
}
fn convert_triangles(mvoxels: &Vec<Option<MetaVoxel>>) -> Vec<Triangle>
{
let mut triangles = Vec::new();
for opt_mvoxel in mvoxels
{
match opt_mvoxel
{
Some(mvoxel) => {
let vleft_back_top = Vertex {
x: mvoxel.voxel.x as f32 + 0.0,
y: mvoxel.voxel.y as f32 + 0.0,
z: mvoxel.voxel.z as f32 + 1.0,
};
let vleft_front_top = Vertex {
x: mvoxel.voxel.x as f32 + 0.0,
y: mvoxel.voxel.y as f32 + 1.0,
z: mvoxel.voxel.z as f32 + 1.0,
};
let vleft_back_bottom = Vertex {
x: mvoxel.voxel.x as f32 + 0.0,
y: mvoxel.voxel.y as f32 + 0.0,
z: mvoxel.voxel.z as f32 + 0.0,
};
let vleft_front_bottom = Vertex {
x: mvoxel.voxel.x as f32 + 0.0,
y: mvoxel.voxel.y as f32 + 1.0,
z: mvoxel.voxel.z as f32 + 0.0,
};
let vright_back_top = Vertex {
x: mvoxel.voxel.x as f32 + 1.0,
y: mvoxel.voxel.y as f32 + 0.0,
z: mvoxel.voxel.z as f32 + 1.0,
};
let vright_front_top = Vertex {
x: mvoxel.voxel.x as f32 + 1.0,
y: mvoxel.voxel.y as f32 + 1.0,
z: mvoxel.voxel.z as f32 + 1.0,
};
let vright_back_bottom = Vertex {
x: mvoxel.voxel.x as f32 + 1.0,
y: mvoxel.voxel.y as f32 + 0.0,
z: mvoxel.voxel.z as f32 + 0.0,
};
let vright_front_bottom = Vertex {
x: mvoxel.voxel.x as f32 + 1.0,
y: mvoxel.voxel.y as f32 + 1.0,
z: mvoxel.voxel.z as f32 + 0.0,
};
if mvoxel.has_left()
{
let normal = Normal {
x: 1.0,
y: 0.0,
z: 0.0
};
triangles.push(Triangle {
normal,
a: vleft_back_top,
b: vleft_back_bottom,
c: vleft_front_bottom
});
triangles.push(Triangle {
normal,
a: vleft_back_top,
b: vleft_front_top,
c: vleft_front_bottom
});
}
if mvoxel.has_back()
{
let normal = Normal {
x: 0.0,
y: 1.0,
z: 0.0
};
triangles.push(Triangle {
normal,
a: vleft_back_top,
b: vright_back_top,
c: vright_back_bottom
});
triangles.push(Triangle {
normal,
a: vleft_back_top,
b: vleft_back_bottom,
c: vright_back_bottom
});
}
if mvoxel.has_bottom()
{
let normal = Normal {
x: 0.0,
y: 0.0,
z: 1.0
};
triangles.push(Triangle {
normal,
a: vleft_back_bottom,
b: vleft_front_bottom,
c: vright_front_bottom
});
triangles.push(Triangle {
normal,
a: vleft_back_bottom,
b: vright_back_bottom,
c: vright_front_bottom
});
}
if mvoxel.has_right()
{
let normal = Normal {
x: -1.0,
y: 0.0,
z: 0.0
};
triangles.push(Triangle {
normal,
a: vright_back_top,
b: vright_back_bottom,
c: vright_front_bottom
});
triangles.push(Triangle {
normal,
a: vright_back_top,
b: vright_front_top,
c: vright_front_bottom
});
}
if mvoxel.has_front()
{
let normal = Normal {
x: 0.0,
y: -1.0,
z: 0.0
};
triangles.push(Triangle {
normal,
a: vleft_front_top,
b: vright_front_top,
c: vright_front_bottom
});
triangles.push(Triangle {
normal,
a: vleft_front_top,
b: vleft_front_bottom,
c: vright_front_bottom
});
}
if mvoxel.has_top()
{
let normal = Normal {
x: 0.0,
y: 0.0,
z: -1.0
};
triangles.push(Triangle {
normal,
a: vleft_back_top,
b: vleft_front_top,
c: vright_front_top
});
triangles.push(Triangle {
normal,
a: vleft_back_top,
b: vright_back_top,
c: vright_front_top
});
}
},
None => {
}
}
}
return triangles;
}
fn index_triangles(triangles: &Vec<Triangle>, vertices: &mut Vec<Vertex>, normals: &mut Vec<Normal>, idx_triangles: &mut Vec<IndexedTriangle>)
{
for triangle in triangles
{
let normal_index: usize;
let a_position: usize;
let b_position: usize;
let c_position: usize;
match vertices.iter().position(|&x| x == triangle.a)
{
Some(idx) => {
a_position = idx;
},
None => {
a_position = vertices.len();
vertices.push(triangle.a);
}
}
match vertices.iter().position(|&x| x == triangle.b)
{
Some(idx) => {
b_position = idx;
},
None => {
b_position = vertices.len();
vertices.push(triangle.b);
}
}
match vertices.iter().position(|&x| x == triangle.c)
{
Some(idx) => {
c_position = idx;
},
None => {
c_position = vertices.len();
vertices.push(triangle.c);
}
}
match normals.iter().position(|&x| x == triangle.normal)
{
Some(idx) => {
normal_index = idx;
},
None => {
normal_index = vertices.len();
normals.push(triangle.normal);
}
}
idx_triangles.push(IndexedTriangle {
normal: triangle.normal,
normal_index,
a: a_position,
b: b_position,
c: c_position
});
}
}
fn convert_vox_stl(ifpath: &str, ofpath: &str)
{
use std::fs::OpenOptions;
let mut in_data = dot_vox::load(ifpath).unwrap();
let mut triangles: Vec<Triangle> = Vec::new();
for model in &mut in_data.models
{
let size = model.size;
let voxels = reorder_voxels(&mut model.voxels, &size);
let mvoxels = convert_meta_voxels(&voxels, &size);
triangles.append(&mut convert_triangles(&mvoxels));
}
let stl_triangles: Vec<stl_io::Triangle>;
stl_triangles = triangles.iter().map(|triangle| (*triangle).into()).collect::<Vec<_>>();
let mut file = OpenOptions::new().write(true).create(true).open(ofpath).unwrap();
stl_io::write_stl(&mut file, stl_triangles.iter()).unwrap();
}
fn convert_vox_dae(ifpath: &str, ofpath: &str)
{
let mut in_data = dot_vox::load(ifpath).unwrap();
let collada: collada_io::collada::Collada;
let mut geometries: Vec<collada_io::geometry::Geometry> = Vec::new();
for model in &mut in_data.models
{
let size = model.size;
let voxels = reorder_voxels(&mut model.voxels, &size);
let mvoxels = convert_meta_voxels(&voxels, &size);
let triangles = convert_triangles(&mvoxels);
let mut vertices: Vec<Vertex> = Vec::new();
let mut normals: Vec<Normal> = Vec::new();
let mut indexed_triangles: Vec<IndexedTriangle> = Vec::new();
index_triangles(&triangles, &mut vertices, &mut normals, &mut indexed_triangles);
let mut primitive: Vec<usize> = Vec::new();
primitive.reserve(indexed_triangles.len() * 3);
let mut mesh_positions: Vec<f32> = Vec::new();
mesh_positions.reserve(vertices.len() * 3);
// TODO: Add normals
for idx_triangle in indexed_triangles
{
primitive.push(idx_triangle.a);
primitive.push(idx_triangle.b);
primitive.push(idx_triangle.c);
}
for vertex in vertices
{
mesh_positions.push(vertex.x);
mesh_positions.push(vertex.y);
mesh_positions.push(vertex.z);
}
geometries.push(collada_io::geometry::Geometry {
id: Some("Voxel-mesh".to_string()),
name: Some("Voxel".to_string()),
mesh: collada_io::geometry::Mesh {
triangles: collada_io::geometry::Triangles {
vertices: "#Cube-mesh-vertices".to_string(),
normals: None,
tex_vertices: None,
primitive: Some(primitive),
material: None
},
vertices: collada_io::geometry::Vertices {
id: "Voxel-mesh-vertices".to_string(),
name: None,
source: "#Voxel-mesh-positions".to_string()
},
sources: vec! {
collada_io::geometry::Source {
id: "Voxel-mesh-positions".to_string(),
float_array: collada_io::geometry::FloatArray {
id: "Voxel-mesh-positions-array".to_string(),
data: mesh_positions
},
accessor: collada_io::geometry::Accessor {
params: vec! { "X".to_string(), "Y".to_string(), "Z".to_string() }
}
},
}
}
});
}
let mut file = File::create(ofpath).unwrap();
collada = collada_io::collada::Collada {
scene: Some(collada_io::scene::Scene {
visual_scenes: vec!{
"#Scene".to_string()
}
}),
visual_scenes: Some(vec!{
collada_io::scene::VisualScene {
id: "Scene".to_string(),
name: "Scene".to_string(),
nodes: vec!{
collada_io::scene::Node {
id: "Voxel".to_string(),
name: "Voxel".to_string(),
transformation_elements: vec!{
collada_io::scene::TransformationElement::Matrix {
sid: "transform".to_string(),
matrix: vec! {
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}
}
},
instances: vec!{
collada_io::scene::Instance::Geometry {
url: "#Voxel-mesh".to_string(),
name: Some("Voxel".to_string()),
sid: None,
bind_material: None
}
}
}
}
}
}),
asset: collada_io::meta::Asset::default(),
geometries: Some(geometries)
};
collada.write_to(&mut file).unwrap();
}
fn _export_jasc_palette(ifpath: &str, ofpath: &str) -> std::io::Result<()>
{
let in_data = dot_vox::load(ifpath).unwrap();
let file = File::create(ofpath)?;
let mut file = LineWriter::new(file);
let len_str: String = in_data.palette.len().to_string() + "\n";
file.write_all(b"JASC\n")?;
file.write_all(b"0100\n")?;
file.write_all(len_str.as_bytes())?;
for element in &in_data.palette
{
let color: Color = (*element).into();
let color_str: String = color.into();
file.write_all((color_str + "\n").as_bytes())?;
}
Ok(())
}
fn main()
{
let app = app::new_app();
let matches = app.get_matches();
let in_file = matches.value_of("input").unwrap_or(INPUT_FILEPATH);
let out_file: &str;
if matches.is_present("stl")
{
out_file = matches.value_of("output").unwrap_or(OUTPUT_STL_FILEPATH);
convert_vox_stl(in_file, out_file);
} else if matches.is_present("dae")
{
out_file = matches.value_of("output").unwrap_or(OUTPUT_DAE_FILEPATH);
convert_vox_dae(in_file, out_file);
} else {
// Need either stl or dae so not sure what to do here
}
}
| true |
5defcb3ce16f1f41efee202e17fadfe1d23ffe44
|
Rust
|
jakeisnt/rustlings-implementation
|
/exercises/primitive_types/primitive_types4.rs
|
UTF-8
| 441 | 3.8125 | 4 |
[
"MIT"
] |
permissive
|
// primitive_types4.rs
// Get a slice out of Array a where the ??? is so that the test passes.
// Execute `rustlings hint primitive_types4` for hints!!
#[test]
fn slice_out_of_array() {
let a = [1, 2, 3, 4, 5];
// Need & to borrow the reference, which allows us to take a slice of it.
// Without '&', it's a boxed value - which we cannot take a slice of.
let nice_slice = &a[1..4];
assert_eq!([2, 3, 4], nice_slice)
}
| true |
6bd1cbacdd799a555c99a79c4d3ddafe8f9f0242
|
Rust
|
SymmetricChaos/project_euler_rust
|
/src/aux_funcs.rs
|
UTF-8
| 8,379 | 3.3125 | 3 |
[] |
no_license
|
use std::fmt::Debug;
use std::convert::TryFrom;
use std::collections::HashMap;
use mod_exp::mod_exp;
use num::traits::{Unsigned,Zero,ToPrimitive};
use std::cmp::max;
pub fn int_to_digits<T: Unsigned + Zero + ToPrimitive + Clone>(n: T, base: T) -> Vec<u8> {
if n == T::zero() {
return vec![0u8]
}
let mut digits = Vec::new();
let mut num = n;
while num != T::zero() {
let number = num.clone();
let divisor = base.clone();
let q = number.clone() / divisor.clone();
let r = number % divisor;
digits.insert(0,r.to_u8().unwrap());
num = q;
}
return digits;
}
pub fn digits_to_int(digits: &Vec<u8>, base: u64) -> u64 {
let mut ctr = digits.len();
let mut pow = 1;
let mut out = 0;
while ctr > 0 {
ctr -= 1;
out += pow*u64::try_from(digits[ctr]).unwrap();
pow = pow*base;
}
out as u64
}
pub fn digit_addition<T: Unsigned + Zero + Copy>(a: &Vec<T>, b: &Vec<T>, base: T) -> Vec<T> {
let mut ta = a.clone();
let mut tb = b.clone();
let length = max(ta.len(),tb.len());
let zero = T::zero();
let mut out: Vec<T> = Vec::new();
while ta.len() < tb.len() {
ta.insert(0,zero)
}
while tb.len() < ta.len() {
tb.insert(0,zero)
}
let mut carry = zero;
for _ in 0..length {
let v1 = ta.pop().unwrap_or(zero);
let v2 = tb.pop().unwrap_or(zero);
let mut val = v1 + v2 + carry;
carry = val / base;
val = val % base;
out.push(val)
}
if carry != zero {
out.push(carry)
}
out.reverse();
out
}
pub fn digit_multiplication<T: Unsigned + Zero + Copy + Debug>(a: &Vec<T>, b: &Vec<T>, base: T) -> Vec<T> {
let zero = T::zero();
let mut out = vec![zero];
let mut offset = 0;
for d1 in b.iter().rev() {
let mut partial: Vec<T> = Vec::new();
for _ in 0..offset {
partial.push(zero);
}
let mut carry = zero;
for d2 in a.iter().rev() {
let mut val = *d1 * *d2 + carry;
carry = val / base;
val = val % base;
partial.push(val)
}
if carry != zero {
partial.push(carry)
}
out = digit_addition(&partial,&out,base);
offset += 1;
}
out.reverse();
out
}
pub fn sum_digits(digits: &Vec<u8>) -> u64 {
let mut sum = 0u64;
for d in digits.iter() {
sum += *d as u64
}
sum
}
// 64-bit primality test
// First checks small possible factors then switches to deterministic Miller-Rabin
pub fn is_prime(n: u64) -> bool {
if n <= 1 {
return false;
}
// Check all primes below 100 and all witnesses
// This quickly eliminates the vast majority of composite numbers
let small_factors = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 325, 9375, 28178, 450775, 9780504, 1795265022];
for p in small_factors.iter() {
if n == *p {
return true;
}
if n % *p == 0 {
return false;
}
}
let mut d = (n-1)/2;
let mut r = 1;
while d % 2 == 0 {
d /= 2;
r += 1;
}
// Witnesses found by Jim Sinclair
let witnesses = [2, 325, 9375, 28178, 450775, 9780504, 1795265022];
'outer: for w in witnesses.iter() {
let mut x = mod_exp(*w as u128,d as u128,n as u128) as u64;
if x == 1 || x == n-1 {
continue 'outer;
}
for _ in 0..r-1 {
x = mod_exp(x as u128, 2u128, n as u128) as u64;
if x == n-1 {
continue 'outer;
}
}
return false;
}
true
}
// 32-bit primality test
// First checks small possible factors then switches to deterministic Miller-Rabin
pub fn is_prime32(n: u32) -> bool {
if n <= 1 {
return false;
}
// Check all primes below 62 and all witnesses
// This quickly eliminates the vast majority of composite numbers
let small_factors = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61];
for p in small_factors.iter() {
if n == *p {
return true;
}
if n % *p == 0 {
return false;
}
}
let mut d = (n-1)/2;
let mut r = 1;
while d % 2 == 0 {
d /= 2;
r += 1;
}
let witnesses = [2, 7, 61];
'outer: for w in witnesses.iter() {
let mut x = mod_exp(*w as u64,d as u64,n as u64) as u32;
if x == 1 || x == n-1 {
continue 'outer;
}
for _ in 0..r-1 {
x = mod_exp(x as u64, 2u64, n as u64) as u32;
if x == n-1 {
continue 'outer;
}
}
return false;
}
true
}
pub struct PrimeSieve {
sieve: HashMap::<u64,Vec<u64>>,
n: u64,
}
impl Iterator for PrimeSieve {
type Item = u64;
fn next(&mut self) -> Option<u64> {
loop {
self.n += 1;
if !self.sieve.contains_key(&self.n) {
self.sieve.insert(self.n+self.n,vec![self.n]);
return Some(self.n)
} else {
let factors = &self.sieve[&self.n].clone();
for factor in factors {
if self.sieve.contains_key(&(factor+self.n)) {
self.sieve.get_mut(&(factor+self.n)).unwrap().push(*factor);
} else {
self.sieve.insert(factor+self.n,vec![*factor]);
}
}
self.sieve.remove(&self.n);
}
}
}
}
pub fn prime_sieve() -> PrimeSieve {
PrimeSieve{
sieve: HashMap::<u64,Vec<u64>>::new(),
n: 1u64}
}
pub struct TotientSieve {
sieve: HashMap::<u64,Vec<u64>>,
n: u64,
}
impl Iterator for TotientSieve {
type Item = u64;
fn next(&mut self) -> Option<u64> {
loop {
self.n += 1;
if !self.sieve.contains_key(&self.n) {
self.sieve.insert(self.n+self.n,vec![self.n]);
return Some(self.n-1)
} else {
let factors = &self.sieve[&self.n].clone();
let mut num = 1;
let mut den = 1;
for factor in factors {
num *= factor-1;
den *= factor;
if self.sieve.contains_key(&(factor+self.n)) {
self.sieve.get_mut(&(factor+self.n)).unwrap().push(*factor);
} else {
self.sieve.insert(factor+self.n,vec![*factor]);
}
}
self.sieve.remove(&self.n);
return Some((self.n*num)/den)
}
}
}
}
pub fn totient_sieve() -> TotientSieve {
TotientSieve{
sieve: HashMap::<u64,Vec<u64>>::new(),
n: 1u64}
}
pub fn prime_factorization(n: u64) -> HashMap<u64,u64> {
let mut canon = HashMap::new();
let mut x = n;
let mut primes = prime_sieve();
loop {
let p = primes.next().unwrap();
let mut ctr = 0;
while x % p == 0 {
x /= p;
ctr += 1;
}
if ctr != 0 {
canon.insert(p,ctr);
}
if x == 1 {
break
}
}
canon
}
pub fn vec_identical<T: PartialEq>(va: &[T], vb: &[T]) -> bool {
(va.len() == vb.len()) && // zip stops at the shortest
va.iter()
.zip(vb)
.all(|(a,b)| *a == *b)
}
/*
fn should_swap(list: &Vec<u64>, index: usize, pos: usize) -> bool {
for i in index..pos {
if list[i] == list[pos] {
return false
}
}
true
}
// By generating distinct permutations we can produce digits with 0s for blank spaces
pub fn distinct_permutations(list: &mut Vec<u64>, index: usize) -> Vec<Vec<u64>> {
let length = list.len();
let mut out = Vec::new();
if index == length {
return vec![list.clone().to_vec()]
}
for i in index..length {
if should_swap(&list, index, i) {
list.swap(i,index);
out.extend(distinct_permutations(list,index+1));
list.swap(i,index);
}
}
out
}
*/
| true |
a9421c6e7aef50f1ce5aa3f9f93abd6345c958ba
|
Rust
|
eldruin/driver-examples
|
/stm32f1-bluepill/examples/mcp4921-ads1115-display-bp.rs
|
UTF-8
| 4,760 | 2.578125 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Loop setting a position from 0 to 4095 to the channel 0 of a MCP4921
//! digital-to-analog converter.
//! The voltage output of the MCP4921 device will then be measured by the
//! ADS1115 analog-to-digital converter and will be printed to the
//! SSD1306 OLED display.
//!
//! This example is runs on the STM32F103 "Bluepill" board using SPI1 and I2C1.
//!
//! ```
//! BP <-> MCP4921 <-> ADS1115 <-> Display
//! GND <-> VSS <-> GND <-> GND
//! GND <-> LDAC
//! +5V <-> VDD <-> +5V <-> +5V
//! +5V <-> VREFA
//! PA5 <-> CLK
//! PA7 <-> SI
//! PB5 <-> CS
//! PB7 <-> SDA <-> SDA
//! PB6 <-> SCL <-> SCL
//! VOUTA <-> A0
//! ```
//!
//! Run with:
//! `cargo run --example mcp4921-ads1115-display-bp --release`
#![deny(unsafe_code)]
#![no_std]
#![no_main]
use ads1x1x::{channel as AdcChannel, Ads1x1x, FullScaleRange, SlaveAddr};
use core::fmt::Write;
use cortex_m_rt::entry;
use embedded_graphics::{
fonts::{Font6x8, Text},
pixelcolor::BinaryColor,
prelude::*,
style::TextStyleBuilder,
};
use embedded_hal::adc::OneShot;
use embedded_hal::blocking::delay::DelayMs;
use mcp49xx::{Command as DacCommand, Mcp49xx, MODE_0};
use nb::block;
use panic_rtt_target as _;
use rtt_target::{rprintln, rtt_init_print};
use ssd1306::{prelude::*, Builder, I2CDIBuilder};
use stm32f1xx_hal::{
delay::Delay,
i2c::{BlockingI2c, DutyCycle, Mode},
pac,
prelude::*,
spi::Spi,
};
#[entry]
fn main() -> ! {
rtt_init_print!();
rprintln!("MCP4921 example");
let cp = cortex_m::Peripherals::take().unwrap();
let dp = pac::Peripherals::take().unwrap();
let mut flash = dp.FLASH.constrain();
let rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut afio = dp.AFIO.constrain();
let mut gpioa = dp.GPIOA.split();
let mut gpiob = dp.GPIOB.split();
let scl = gpiob.pb8.into_alternate_open_drain(&mut gpiob.crh);
let sda = gpiob.pb9.into_alternate_open_drain(&mut gpiob.crh);
let i2c = BlockingI2c::i2c1(
dp.I2C1,
(scl, sda),
&mut afio.mapr,
Mode::Fast {
frequency: 400_000.hz(),
duty_cycle: DutyCycle::Ratio2to1,
},
clocks,
1000,
10,
1000,
1000,
);
// SPI1
let sck = gpioa.pa5.into_alternate_push_pull(&mut gpioa.crl);
let miso = gpioa.pa6;
let mosi = gpioa.pa7.into_alternate_push_pull(&mut gpioa.crl);
let mut cs = gpioa.pa4.into_push_pull_output(&mut gpioa.crl);
let mut spi = Spi::spi1(
dp.SPI1,
(sck, miso, mosi),
&mut afio.mapr,
MODE_0,
1_u32.mhz(),
clocks,
);
let mut gpioc = dp.GPIOC.split();
let mut led = gpioc.pc13.into_push_pull_output(&mut gpioc.crh);
let mut delay = Delay::new(cp.SYST, clocks);
let manager = shared_bus::BusManagerSimple::new(i2c);
let interface = I2CDIBuilder::new().init(manager.acquire_i2c());
let mut disp: GraphicsMode<_> = Builder::new().connect(interface).into();
disp.init().unwrap();
disp.flush().unwrap();
let text_style = TextStyleBuilder::new(Font6x8)
.text_color(BinaryColor::On)
.build();
let mut adc = Ads1x1x::new_ads1115(manager.acquire_i2c(), SlaveAddr::default());
// need to be able to measure [0-5V] since that is the reference voltage of the DAC (VREFA)
adc.set_full_scale_range(FullScaleRange::Within6_144V)
.unwrap();
cs.set_high();
let mut dac = Mcp49xx::new_mcp4921(cs);
let dac_cmd = DacCommand::default();
let mut position = 0;
loop {
// Blink LED 0 to check that everything is actually running.
// If the LED 0 does not blink, something went wrong.
led.set_high();
delay.delay_ms(50_u16);
led.set_low();
dac.send(&mut spi, dac_cmd.value(position)).unwrap();
// Read voltage in channel 0
let value_ch0 = block!(adc.read(&mut AdcChannel::SingleA0)).unwrap();
// make the number smaller for reading ease
let value_ch0 = value_ch0 >> 5;
let mut buffer: heapless::String<64> = heapless::String::new();
// write some extra spaces after the number to clear up when the number get smaller
write!(buffer, "Channel 0: {} ", value_ch0).unwrap();
// print
disp.clear();
Text::new(&buffer, Point::zero())
.into_styled(text_style)
.draw(&mut disp)
.unwrap();
disp.flush().unwrap();
// Actually this gets only until 4080.
// Then it would be too big so we set it to 0.
position += 255;
if position >= 1 << 12 {
position = 0
}
}
}
| true |
2cf32e6c548fcbacc16ca9a0d5de9eb626bed9e4
|
Rust
|
chop0/aardvark-rust
|
/src/error.rs
|
UTF-8
| 539 | 2.640625 | 3 |
[] |
no_license
|
use crate::var::AardvarkValue;
use std::error::Error;
use std::fmt;
#[derive(Debug)]
pub enum AardvarkError {
Return(Box<dyn AardvarkValue>),
Other(String)
}
impl fmt::Display for AardvarkError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AardvarkError::Return(_) => {
fmt::Result::Ok(())
}
AardvarkError::Other(other) => {
f.write_str(other.as_str())
}
}
}
}
impl Error for AardvarkError {
}
| true |
0f9c69555c80847f6c40f1b72cd8a2110a6d8535
|
Rust
|
naomijub/atm-crux
|
/src/ui/mod.rs
|
UTF-8
| 5,905 | 2.796875 | 3 |
[] |
no_license
|
mod login_input;
mod menu;
mod operation_input;
mod result_pages;
use iced::{button, text_input, Align, Column, Container, Element, Length, Sandbox, Text};
use crate::db::{create_account, deposit, statement, withdraw};
use login_input as login;
use menu::Menu;
use operation_input as op;
use result_pages as pages;
use transistor::client::Crux;
#[derive(Default)]
pub struct Atm {
value: i64,
account_info: String,
login_button: button::State,
withdraw_button: button::State,
deposit_button: button::State,
statement_button: button::State,
user_ok_button: button::State,
create_user_button: button::State,
confirm_button: button::State,
statement: Vec<String>,
state: State,
user_input: text_input::State,
user_value: String,
account_input: text_input::State,
account_value: String,
password_input: text_input::State,
password_value: String,
operation_input: text_input::State,
operation_value: String,
}
#[derive(Debug, Clone, Copy)]
pub enum State {
Login,
Operation(usize),
User,
Menu,
Cashed,
NewBalance,
Statement,
}
impl Default for State {
fn default() -> Self {
State::Menu
}
}
#[derive(Debug, Clone)]
pub enum Message {
LoginSelected,
WithdrawSelected,
DepositSelected,
StatementSelected,
CreatingUser,
Withdrawn,
Deposited,
UserOk,
InputChanged(String),
AccountInputChanged(String),
PasswordInputChanged(String),
OperationInputChanged(String),
}
impl Sandbox for Atm {
type Message = Message;
fn new() -> Self {
Self::default()
}
fn title(&self) -> String {
String::from("Atm with Crux")
}
fn update(&mut self, message: Message) {
match message {
Message::LoginSelected => {
self.state = State::Login;
}
Message::UserOk => {
self.state = State::Menu;
self.value = 0;
}
Message::CreatingUser => {
let client = Crux::new("localhost", "3000").http_client();
let account_info = create_account(
&client,
self.user_value.clone(),
self.account_value.clone().parse::<u32>().unwrap_or(0),
self.password_value.clone().parse::<u32>().unwrap_or(0),
300i64,
)
.unwrap_or("error".to_string());
self.password_value = String::new();
self.account_info = account_info;
self.state = State::User;
}
Message::WithdrawSelected => {
self.state = State::Operation(0);
}
Message::Withdrawn => {
let client = Crux::new("localhost", "3000").http_client();
let money = withdraw(
&client,
self.account_value.clone().parse::<u32>().unwrap_or(0),
self.password_value.clone().parse::<u32>().unwrap_or(0),
self.operation_value.clone().parse::<u32>().unwrap_or(0) as i64,
)
.unwrap_or(0i64);
self.password_value = String::new();
self.value = money;
self.state = State::Cashed;
}
Message::DepositSelected => {
self.state = State::Operation(1);
}
Message::Deposited => {
let client = Crux::new("localhost", "3000").http_client();
let money = deposit(
&client,
self.account_value.clone().parse::<u32>().unwrap_or(0),
self.password_value.clone().parse::<u32>().unwrap_or(0),
self.operation_value.clone().parse::<u32>().unwrap_or(0) as i64,
)
.unwrap_or(0i64);
self.password_value = String::new();
self.value = money;
self.state = State::NewBalance;
}
Message::StatementSelected => {
let client = Crux::new("localhost", "3000").http_client();
let statement = statement(&client, self.account_value.clone().parse::<u32>().unwrap_or(0)).unwrap_or(Vec::new());
self.statement = statement;
self.state = State::Statement;
}
Message::InputChanged(user) => self.user_value = user,
Message::AccountInputChanged(account) => self.account_value = account,
Message::PasswordInputChanged(pswd) => self.password_value = pswd,
Message::OperationInputChanged(v) => self.operation_value = v,
}
}
fn view(&mut self) -> Element<Message> {
Container::new(match self.state {
State::Login => Column::new().push(login::Login::view(self)),
State::Operation(v) if v == 0usize => {
Column::new().push(op::Operation::view(self, Message::Withdrawn))
}
State::Operation(_) => {
Column::new().push(op::Operation::view(self, Message::Deposited))
}
State::User => Column::new().push(pages::User::view(self)),
State::Cashed => Column::new().push(pages::Cashed::view(self)),
State::NewBalance => Column::new().push(pages::NewBalance::view(self)),
State::Statement => Column::new().push(pages::Statement::view(self)),
State::Menu => Column::new()
.spacing(20)
.padding(100)
.align_items(Align::Center)
.push(Text::new("Naomi Bank ATM").size(50))
.push(Menu::view(self)),
})
.width(Length::Fill)
.height(Length::Fill)
.center_x()
.center_y()
.into()
}
}
| true |
14e8fc7c1551ab71636e4f63de790de45c1de6cf
|
Rust
|
doitian/ckb
|
/sync/src/utils.rs
|
UTF-8
| 3,521 | 2.890625 | 3 |
[
"MIT"
] |
permissive
|
use crate::{Status, StatusCode};
use ckb_error::{Error as CKBError, ErrorKind, InternalError, InternalErrorKind};
use ckb_metrics::metrics;
use ckb_network::{CKBProtocolContext, PeerIndex, ProtocolId, SupportProtocols};
use ckb_types::packed::{RelayMessageReader, SyncMessageReader};
use ckb_types::prelude::*;
/// Send network message into parameterized `protocol_id` protocol connection.
///
/// Equal to `nc.send_message`.
#[must_use]
pub(crate) fn send_message<Message: Entity>(
protocol_id: ProtocolId,
nc: &dyn CKBProtocolContext,
peer_index: PeerIndex,
message: &Message,
) -> Status {
if let Err(err) = nc.send_message(protocol_id, peer_index, message.as_bytes()) {
let name = message_name(protocol_id, message);
let error_message = format!("nc.send_message {}, error: {:?}", name, err);
ckb_logger::error!("{}", error_message);
return StatusCode::Network.with_context(error_message);
}
let bytes = message.as_bytes().len() as u64;
let item_id = item_id(protocol_id, message);
metrics!(
counter,
"ckb.messages_bytes", bytes,
"direction" => "out",
"protocol_id" => protocol_id.value().to_string(),
"item_id" => item_id.to_string(),
);
Status::ok()
}
/// Send network message into `nc.protocol_id()` protocol connection.
///
/// Equal to `nc.send_message_to`.
#[must_use]
pub(crate) fn send_message_to<Message: Entity>(
nc: &dyn CKBProtocolContext,
peer_index: PeerIndex,
message: &Message,
) -> Status {
let protocol_id = nc.protocol_id();
send_message(protocol_id, nc, peer_index, message)
}
// As for Sync protocol and Relay protocol, returns the internal item name;
// otherwise returns the entity name.
fn message_name<Message: Entity>(protocol_id: ProtocolId, message: &Message) -> String {
if protocol_id == SupportProtocols::Sync.protocol_id() {
SyncMessageReader::new_unchecked(message.as_slice())
.to_enum()
.item_name()
.to_owned()
} else if protocol_id == SupportProtocols::RelayV2.protocol_id() {
RelayMessageReader::new_unchecked(message.as_slice())
.to_enum()
.item_name()
.to_owned()
} else {
Message::NAME.to_owned()
}
}
// As for Sync protocol and Relay protocol, returns the internal item id;
// otherwise returns 0.
fn item_id<Message: Entity>(protocol_id: ProtocolId, message: &Message) -> u32 {
if protocol_id == SupportProtocols::Sync.protocol_id() {
SyncMessageReader::new_unchecked(message.as_slice()).item_id()
} else if protocol_id == SupportProtocols::RelayV2.protocol_id() {
RelayMessageReader::new_unchecked(message.as_slice()).item_id()
} else {
0
}
}
/// return whether the error's kind is `InternalErrorKind::Database`
///
/// ### Panic
///
/// Panic if the error kind is `InternalErrorKind::DataCorrupted`.
/// If the database is corrupted, panic is better than handle it silently.
pub(crate) fn is_internal_db_error(error: &CKBError) -> bool {
if error.kind() == ErrorKind::Internal {
let error_kind = error
.downcast_ref::<InternalError>()
.expect("error kind checked")
.kind();
if error_kind == InternalErrorKind::DataCorrupted {
panic!("{}", error)
} else {
return error_kind == InternalErrorKind::Database
|| error_kind == InternalErrorKind::System;
}
}
false
}
| true |
2f644452416649770d500e5a3284dd4e5238c5e2
|
Rust
|
doytsujin/teloc
|
/teloc/src/service_provider.rs
|
UTF-8
| 13,561 | 3.1875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use crate::container::{
ConvertContainer, Init, InstanceContainer, SingletonContainer, TransientContainer,
};
use crate::scope::{InitScoped, ScopedContainerElem, ScopedInstanceContainer};
use crate::Scope;
use frunk::hlist::{HList, Selector};
use frunk::{HCons, HNil};
use std::marker::PhantomData;
/// `ServiceProvider` struct is used as an IoC-container in which you declare your dependencies.
///
/// Algorithm for working in `ServiceProvider` is:
/// 1. Create an empty by `ServiceProvider::new` function.
/// 2. Declare your dependencies using `add_*` methods (more about theirs read below).
/// 3. Create `Scope` when you need working with scoped sessions (like when you processing web request).
/// 4. Get needed dependencies from container using `Resolver::resolve` trait.
///
/// If you do not register all of needed dependencies, then compiler do not compile your code. If error
/// puts you into a stupor, read our [manual] about how read errors.
///
/// [manual]: https://github.com/p0lunin/teloc/blob/master/HOW-TO-READ-ERRORS.md
///
/// Example of usage `ServiceProvider`:
/// ```
/// use teloc::*;
/// struct ConstService {
/// number: i32,
/// }
///
/// #[inject]
/// impl ConstService {
/// pub fn new(number: i32) -> Self {
/// ConstService { number }
/// }
/// }
///
/// #[derive(Dependency)]
/// struct Controller {
/// number_service: ConstService,
/// }
///
/// let container = ServiceProvider::new()
/// .add_scoped_i::<i32>()
/// .add_transient::<ConstService>()
/// .add_transient::<Controller>();
/// let scope = container.scope(teloc::scopei![10]);
/// let controller: Controller = scope.resolve();
/// assert_eq!(controller.number_service.number, 10);
/// ```
pub struct ServiceProvider<Dependencies, Scoped, ScopedI> {
dependencies: Dependencies,
scoped_i: PhantomData<ScopedI>,
scoped: PhantomData<Scoped>,
}
impl ServiceProvider<HNil, HNil, HNil> {
/// Create an empty instance of `ServiceProvider`
pub fn new() -> Self {
ServiceProvider {
dependencies: HNil,
scoped_i: PhantomData,
scoped: PhantomData,
}
}
}
impl Default for ServiceProvider<HNil, HNil, HNil> {
fn default() -> Self {
Self::new()
}
}
// Clippy requires to create type aliases
type ContainerTransientAddConvert<T, U, H, S, SI> =
ServiceProvider<HCons<ConvertContainer<TransientContainer<T>, T, U>, H>, S, SI>;
type ContainerSingletonAddConvert<T, U, H, S, SI> =
ServiceProvider<HCons<ConvertContainer<SingletonContainer<T>, T, U>, H>, S, SI>;
impl<H: HList, S, SI> ServiceProvider<H, S, SI> {
/// Method used primary for internal actions. In common usage you don't need to use it. It add dependencies to the store. You need
/// to put in first generic parameter some `ContainerElem` type.
/// Usage:
///
/// ```
/// use teloc::*;
/// use teloc::container::TransientContainer;
///
/// struct Service {
/// data: i32,
/// }
///
/// let sp = ServiceProvider::new()
/// ._add::<TransientContainer<Service>>(());
/// ```
pub fn _add<T: Init>(self, data: T::Data) -> ServiceProvider<HCons<T, H>, S, SI> {
let ServiceProvider { dependencies, .. } = self;
ServiceProvider {
dependencies: dependencies.prepend(T::init(data)),
scoped_i: PhantomData,
scoped: PhantomData,
}
}
/// Add dependency with the `Transient` lifetime. Transient services will be created each time
/// when it called. Use this lifetime for lightweight stateless services.
///
/// Can be resolved only by ownership.
///
/// Usage:
/// ```
/// use teloc::*;
/// use uuid::Uuid;
///
/// struct Service { uuid: Uuid }
/// #[inject]
/// impl Service {
/// fn new() -> Self { Self { uuid: Uuid::new_v4() } }
/// }
///
/// let sp = ServiceProvider::new()
/// .add_transient::<Service>();
///
/// let s1: Service = sp.resolve();
/// let s2: Service = sp.resolve();
///
/// assert_ne!(s1.uuid, s2.uuid);
/// ```
pub fn add_transient<T>(self) -> ServiceProvider<HCons<TransientContainer<T>, H>, S, SI>
where
TransientContainer<T>: Init<Data = ()>,
{
self._add::<TransientContainer<T>>(())
}
/// Add dependency with the `Scoped` lifetime. Scoped services will be created only one time for
/// one scope, which can be created using `ServiceProvider::scope` method. Scoped dependencies
/// is not available in `ServiceProvider`, only in `Scope`.
///
/// Can be resolved by reference or by cloning. If you wish to clone this dependency then it
/// must implement `DependencyClone` trait. For more information see `DependencyClone` trait.
///
/// Usage:
/// ```
/// use teloc::*;
/// use uuid::Uuid;
///
/// struct Service { uuid: Uuid }
/// #[inject]
/// impl Service {
/// fn new() -> Self { Self { uuid: Uuid::new_v4() } }
/// }
///
/// let sp = ServiceProvider::new()
/// .add_scoped::<Service>();
///
/// // .scope_() is a wrapper for .scope(HNil)
/// let scope1 = sp.scope_();
///
/// let s1: &Service = scope1.resolve();
/// let s2: &Service = scope1.resolve();
///
/// let scope2 = sp.scope_();
/// let s3: &Service = scope2.resolve();
///
/// assert_eq!(s1.uuid, s2.uuid);
/// assert_ne!(s1.uuid, s3.uuid);
/// ```
///
/// Usage with cloning:
///
/// ```
/// use teloc::*;
/// use uuid::Uuid;
/// use std::rc::Rc;
///
/// struct Service { uuid: Uuid }
/// #[inject]
/// impl Service {
/// fn new() -> Self { Self { uuid: Uuid::new_v4() } }
/// }
///
/// let sp = ServiceProvider::new()
/// .add_scoped::<Rc<Service>>();
///
/// let scope = sp.scope_();
///
/// let s1: Rc<Service> = scope.resolve();
/// let s2: Rc<Service> = scope.resolve();
///
/// assert_eq!(s1.uuid, s2.uuid)
/// ```
#[inline]
pub fn add_scoped<T>(self) -> ServiceProvider<H, HCons<ScopedContainerElem<T>, S>, SI> {
let ServiceProvider { dependencies, .. } = self;
ServiceProvider {
dependencies,
scoped_i: PhantomData,
scoped: PhantomData,
}
}
/// Add information about instance that should be added to `Scope` before it's initialization.
/// It can be `Request`, `DbConnection`, etc. It must be passed to `ServiceProvider::scope`
/// method in future. Scoped dependencies is not available in `ServiceProvider`, only in `Scope`.
///
/// Usage:
/// ```
/// use teloc::*;
///
/// #[derive(Debug, PartialEq)]
/// struct City(String);
///
/// // Note that we does not implement `DependencyClone` for City so only way to give `City`
/// // value is by reference
/// struct WeatherService<'a> { city: &'a City }
/// #[inject]
/// impl<'a> WeatherService<'a> {
/// fn new(city: &'a City) -> Self { Self { city } }
/// }
///
/// let sp = ServiceProvider::new()
/// .add_scoped_i::<City>()
/// .add_scoped::<WeatherService>();
///
/// let scope = sp.scope(scopei![City("Odessa".into()),]);
///
/// let s1: &WeatherService = scope.resolve();
/// let s2: &WeatherService = scope.resolve();
///
/// assert_eq!(s1.city.0, "Odessa".to_string());
/// assert_eq!(s1.city, s2.city);
/// ```
#[inline]
pub fn add_scoped_i<T>(self) -> ServiceProvider<H, S, HCons<ScopedInstanceContainer<T>, SI>> {
let ServiceProvider { dependencies, .. } = self;
ServiceProvider {
dependencies,
scoped_i: PhantomData,
scoped: PhantomData,
}
}
/// Add dependency with the `Singleton` lifetime. Singleton services will be created only one
/// time when it will be called first time. It will be same between different calls in
/// `Scope::resolve` and `ServiceProvider::resolve`.
///
/// Can be resolved by reference or by cloning. If you wish to clone this dependency then it
/// must implement `DependencyClone` trait. For more information see `DependencyClone` trait.
///
/// Usage:
/// ```
/// use teloc::*;
/// use uuid::Uuid;
///
/// struct Service { uuid: Uuid }
/// #[inject]
/// impl Service {
/// fn new() -> Self { Self { uuid: Uuid::new_v4() } }
/// }
///
/// let sp = ServiceProvider::new()
/// .add_singleton::<Service>();
/// let scope = sp.scope_();
///
/// let s1: &Service = sp.resolve();
/// let s2: &Service = scope.resolve();
///
/// assert_eq!(s1.uuid, s2.uuid);
/// ```
///
/// Usage by cloning is the same as in `ServiceProvider::add_scoped` method.
pub fn add_singleton<T>(self) -> ServiceProvider<HCons<SingletonContainer<T>, H>, S, SI>
where
SingletonContainer<T>: Init<Data = ()>,
{
self._add::<SingletonContainer<T>>(())
}
/// Add anything instance to provider. It likes singleton, but it cannot get dependencies from
/// the provider. Use it for adding single objects like configs.
///
/// Can be resolved by reference or by cloning. If you wish to clone this dependency then it
/// must implement `DependencyClone` trait. For more information see `DependencyClone` trait.
///
/// Usage:
/// ```
/// use teloc::*;
///
/// #[derive(Debug, PartialEq)]
/// struct Config { token: String, ip: String }
///
/// struct Service<'a> { token: &'a str, ip: &'a str }
/// #[inject]
/// impl<'a> Service<'a> {
/// fn new(config: &'a Config) -> Self { Self { token: &config.token, ip: &config.ip } }
/// }
///
/// let config = Config { token: "1234ABCDE".into(), ip: "192.168.0.1".into() };
///
/// let sp = ServiceProvider::new()
/// .add_instance(&config)
/// .add_transient::<Service>();
///
/// let config_ref: &Config = sp.resolve();
/// let s: Service = sp.resolve();
///
/// assert_eq!(&config, config_ref);
/// assert_eq!(&config_ref.token, s.token);
/// assert_eq!(&config_ref.ip, s.ip);
/// ```
pub fn add_instance<T>(self, data: T) -> ServiceProvider<HCons<InstanceContainer<T>, H>, S, SI>
where
InstanceContainer<T>: Init<Data = T>,
{
self._add::<InstanceContainer<T>>(data)
}
/// Same as `ServiceProvider::add_transient`, but can be used for convert one type to another
/// when resolving. Can be used for creating `Box<dyn Trait>` instances, for example.
///
/// Usage:
/// ```
/// use teloc::*;
///
/// trait NumberService {
/// fn get_num(&self) -> i32;
/// }
///
/// struct TenService {
/// number: i32,
/// }
/// impl NumberService for TenService {
/// fn get_num(&self) -> i32 {
/// self.number
/// }
/// }
/// #[inject]
/// impl TenService {
/// fn new() -> Self {
/// Self { number: 10 }
/// }
/// }
///impl From<Box<TenService>> for Box<dyn NumberService> {
/// fn from(x: Box<TenService>) -> Self {
/// x
/// }
/// }
///
/// #[derive(Dependency)]
/// struct Controller {
/// number_service: Box<dyn NumberService>,
/// }
///
/// let container = ServiceProvider::new()
/// .add_transient_c::<Box<dyn NumberService>, Box<TenService>>()
/// .add_transient::<Controller>();
/// let controller: Controller = container.resolve();
///
/// assert_eq!(controller.number_service.get_num(), 10);
/// ```
pub fn add_transient_c<U, T>(self) -> ContainerTransientAddConvert<T, U, H, S, SI>
where
T: Into<U>,
ConvertContainer<TransientContainer<T>, T, U>: Init<Data = ()>,
TransientContainer<T>: Init<Data = ()>,
{
self._add::<ConvertContainer<TransientContainer<T>, T, U>>(())
}
/// Same as `Provider::add_transient_c` but for `Singleton` lifetime.
pub fn add_singleton_c<U, T>(self) -> ContainerSingletonAddConvert<T, U, H, S, SI>
where
T: Into<U>,
ConvertContainer<SingletonContainer<T>, T, U>: Init<Data = ()>,
TransientContainer<T>: Init<Data = ()>,
{
self._add::<ConvertContainer<SingletonContainer<T>, T, U>>(())
}
}
impl<'a, H, S, SI> ServiceProvider<H, S, SI>
where
S: InitScoped,
{
/// Create `Scope` for working with dependencies with `Scoped` lifetime. You must pass to the
/// scope instances that you was added by `ServiceProvider::add_scoped_i` before by `scopei![]`
/// macro.
pub fn scope(&self, si: SI) -> Scope<Self, S, SI> {
Scope::new(self, si)
}
}
impl<'a, H, S> ServiceProvider<H, S, HNil>
where
S: InitScoped,
{
/// Wrapper for `ServiceProvider::scope(self, HNil)`, when you have not scoped instances.
pub fn scope_(&self) -> Scope<Self, S, HNil> {
self.scope(HNil)
}
}
impl<H, S, SI> ServiceProvider<H, S, SI> {
pub(crate) fn dependencies(&self) -> &H {
&self.dependencies
}
}
impl<H, S, SI, T, Index> Selector<T, Index> for ServiceProvider<H, S, SI>
where
H: Selector<T, Index>,
{
fn get(&self) -> &T {
self.dependencies().get()
}
fn get_mut(&mut self) -> &mut T {
self.dependencies.get_mut()
}
}
| true |
06288adb928b419a8123d7870118181b1015241e
|
Rust
|
Tubbz-alt/lrad
|
/lrad-lib/src/docker.rs
|
UTF-8
| 8,958 | 2.671875 | 3 |
[
"MIT"
] |
permissive
|
use actix_web::{client, HttpMessage};
use futures::prelude::*;
use git2::Repository;
use percent_encoding::{utf8_percent_encode, QUERY_ENCODE_SET};
use std::collections::HashMap;
use tar::Builder;
use tokio_uds::UnixStream;
use crate::error::Error;
use crate::vcs::VcsError;
use std::time::Duration;
pub fn build_image(
repo: &Repository,
image_name: String,
) -> impl Future<Item = bool, Error = Error> {
let repo_path = repo.path().parent().unwrap().to_path_buf();
let is_bare = repo.is_bare();
debug!("Opening Unix socket");
UnixStream::connect("/var/run/docker.sock")
.map_err(|err| Error::from(err))
.and_then(move |stream| {
if is_bare {
return Err(VcsError::RepoShouldNotBeBare.into());
}
debug!("Unix stream opened, preparing to send build request");
debug!("Building tarball");
// TODO: convert this to actor and stream contents to request
let mut ar = Builder::new(Vec::new());
ar.append_dir_all(".", &repo_path).unwrap();
ar.finish().unwrap();
debug!("Tarball ready");
Ok((stream, ar))
})
.and_then(move |(stream, ar)| {
client::post(format!(
"/v1.39/build?t={}",
utf8_percent_encode(&image_name, QUERY_ENCODE_SET)
))
.header("Host", "lrad")
.with_connection(client::Connection::from_stream(stream))
.timeout(Duration::from_secs(3600))
.body(ar.into_inner().unwrap())
.map(|x| {
debug!("Sending Docker build request...");
x
})
.unwrap()
.send()
.map_err(|err| Error::from(err))
.and_then(|res| {
let is_success = res.status().is_success();
res.body()
.and_then(|bytes| {
debug!("Parsing Docker build response... {:?}", bytes);
Ok(())
})
.then(move |_| Ok(is_success))
})
})
}
#[derive(Deserialize)]
pub struct CreateContainerResponse {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "Warnings")]
pub warnings: Option<Vec<String>>,
}
#[derive(Serialize)]
struct CreateContainerRequest {
#[serde(rename = "Image")]
image: String,
#[serde(rename = "HostConfig")]
host_config: Option<HostConfig>,
}
#[derive(Serialize)]
pub struct HostConfig {
#[serde(rename = "PublishAllPorts")]
pub publish_all_ports: Option<bool>,
#[serde(rename = "PortBindings")]
pub port_bindings: HashMap<String, Vec<PortBinding>>,
}
#[derive(Serialize, Debug)]
pub struct PortBinding {
#[serde(rename = "HostIp")]
host_ip: Option<String>,
#[serde(rename = "HostPort")]
host_port: String,
}
impl From<&crate::config::PortBinding> for PortBinding {
fn from(other: &crate::config::PortBinding) -> Self {
Self {
host_ip: other.host_ip.map(|x| x.to_string()),
host_port: other.host_port.to_string(),
}
}
}
pub fn create_new_container(
image: String,
container_name: Option<String>,
host_config: Option<HostConfig>,
) -> impl Future<Item = CreateContainerResponse, Error = Error> {
UnixStream::connect("/var/run/docker.sock")
.map_err(|err| Error::from(err))
.and_then(move |stream| {
client::post("/v1.39/containers/create")
.header("Host", "lrad")
.with_connection(client::Connection::from_stream(stream))
.timeout(Duration::from_secs(30))
.json(CreateContainerRequest {
image,
host_config,
})
.map(|x| {
debug!("Sending Docker create container...");
x
})
.unwrap()
.send()
.map_err(|err| Error::from(err))
.and_then(|res| res.json().map_err(|err| Error::from(err)))
})
}
pub fn force_remove_running_container(
container_id: String,
) -> impl Future<Item = bool, Error = Error> {
debug!("Opening Unix socket");
debug!("Preparing to remove container {}", container_id);
UnixStream::connect("/var/run/docker.sock")
.map_err(|err| Error::from(err))
.and_then(move |stream| {
debug!("Unix stream opened, preparing to send build request");
client::delete(format!("/v1.39/containers/{}?force=true", container_id))
.header("Host", "lrad")
.with_connection(client::Connection::from_stream(stream))
.timeout(Duration::from_secs(30))
.finish()
.map(|x| {
debug!("Sending Docker remove containers request...");
x
})
.unwrap()
.send()
.map_err(|err| Error::from(err))
.and_then(|res| {
let is_success = res.status().is_success();
res.body()
.and_then(|bytes| {
debug!("Parsing Docker remove container response... {:?}", bytes);
Ok(())
})
.then(move |_| Ok(is_success))
})
})
}
#[derive(Deserialize)]
pub struct ListContainersResponse {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "Image")]
pub image: String,
#[serde(rename = "State")]
pub state: String,
}
pub fn list_containers() -> impl Future<Item = Vec<ListContainersResponse>, Error = Error> {
debug!("Opening Unix socket");
UnixStream::connect("/var/run/docker.sock")
.map_err(|err| Error::from(err))
.and_then(move |stream| {
debug!("Unix stream opened, preparing to send list request");
client::get("/v1.39/containers/json")
.header("Host", "lrad")
.with_connection(client::Connection::from_stream(stream))
.timeout(Duration::from_secs(30))
.finish()
.map(|x| {
debug!("Sending Docker list containers request...");
x
})
.unwrap()
.send()
.map_err(|err| Error::from(err))
.and_then(|res| res.json().map_err(|err| Error::from(err)))
})
}
#[derive(Deserialize)]
pub struct ListImagesResponse {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "RepoTags")]
pub repo_tags: Vec<String>,
#[serde(rename = "Containers")]
pub containers: i32,
}
pub fn list_images() -> impl Future<Item = Vec<ListImagesResponse>, Error = Error> {
debug!("Opening Unix socket");
UnixStream::connect("/var/run/docker.sock")
.map_err(|err| Error::from(err))
.and_then(move |stream| {
debug!("Unix stream opened, preparing to send list request");
client::get("/v1.39/images/json")
.header("Host", "lrad")
.with_connection(client::Connection::from_stream(stream))
.timeout(Duration::from_secs(30))
.finish()
.map(|x| {
debug!("Sending Docker list containers request...");
x
})
.unwrap()
.send()
.map_err(|err| Error::from(err))
.and_then(|res| res.json().map_err(|err| Error::from(err)))
})
}
pub fn start_container(container_id: String) -> impl Future<Item = bool, Error = Error> {
debug!("Opening Unix socket");
UnixStream::connect("/var/run/docker.sock")
.map_err(|err| Error::from(err))
.and_then(move |stream| {
debug!("Unix stream opened, preparing to send start request");
client::post(format!("/v1.39/containers/{}/start", container_id))
.header("Host", "lrad")
.with_connection(client::Connection::from_stream(stream))
.timeout(Duration::from_secs(30))
.finish()
.map(|x| {
debug!("Sending Docker start request...");
x
})
.unwrap()
.send()
.map_err(|err| Error::from(err))
.and_then(|res| {
let is_success = res.status().is_success();
res.body()
.and_then(|bytes| {
debug!("Parsing Docker start container response... {:?}", bytes);
Ok(())
})
.then(move |_| Ok(is_success))
})
})
}
| true |
57ab4d341540e8aa8de7b0079cf2eab84824bf38
|
Rust
|
gilescope/wasm-tools
|
/crates/wasm-encoder/src/tables.rs
|
UTF-8
| 2,194 | 3.515625 | 4 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LLVM-exception"
] |
permissive
|
use super::*;
/// An encoder for the table section.
///
/// # Example
///
/// ```
/// use wasm_encoder::{Module, TableSection, TableType, ValType};
///
/// let mut tables = TableSection::new();
/// tables.table(TableType {
/// element_type: ValType::FuncRef,
/// minimum: 128,
/// maximum: None,
/// });
///
/// let mut module = Module::new();
/// module.section(&tables);
///
/// let wasm_bytes = module.finish();
/// ```
#[derive(Clone, Debug)]
pub struct TableSection {
bytes: Vec<u8>,
num_added: u32,
}
impl TableSection {
/// Construct a new table section encoder.
pub fn new() -> TableSection {
TableSection {
bytes: vec![],
num_added: 0,
}
}
/// How many tables have been defined inside this section so far?
pub fn len(&self) -> u32 {
self.num_added
}
/// Define a table.
pub fn table(&mut self, table_type: TableType) -> &mut Self {
table_type.encode(&mut self.bytes);
self.num_added += 1;
self
}
}
impl Section for TableSection {
fn id(&self) -> u8 {
SectionId::Table.into()
}
fn encode<S>(&self, sink: &mut S)
where
S: Extend<u8>,
{
let num_added = encoders::u32(self.num_added);
let n = num_added.len();
sink.extend(
encoders::u32(u32::try_from(n + self.bytes.len()).unwrap())
.chain(num_added)
.chain(self.bytes.iter().copied()),
);
}
}
/// A table's type.
#[derive(Clone, Copy, Debug)]
pub struct TableType {
/// The table's element type.
pub element_type: ValType,
/// Minimum size, in elements, of this table
pub minimum: u32,
/// Maximum size, in elements, of this table
pub maximum: Option<u32>,
}
impl TableType {
pub(crate) fn encode(&self, bytes: &mut Vec<u8>) {
bytes.push(self.element_type.into());
let mut flags = 0;
if self.maximum.is_some() {
flags |= 0b001;
}
bytes.push(flags);
bytes.extend(encoders::u32(self.minimum));
if let Some(max) = self.maximum {
bytes.extend(encoders::u32(max));
}
}
}
| true |
8f91ba10e2dbdfe3a85dcbb9d2142b783072860a
|
Rust
|
01intelligence/hulk
|
/src/strset/mod.rs
|
UTF-8
| 15,197 | 3.171875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::collections::HashSet;
use std::fmt;
use serde::de::{self, Deserialize, Deserializer, SeqAccess, Visitor};
use serde::ser::{Serialize, SerializeSeq, Serializer};
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct StringSet(HashSet<String>);
impl StringSet {
pub fn new() -> StringSet {
StringSet(HashSet::new())
}
pub fn from_slice(ss: &[&str]) -> StringSet {
StringSet(ss.iter().map(|&s| s.into()).collect())
}
pub fn from_vec(ss: Vec<String>) -> StringSet {
StringSet(ss.into_iter().collect())
}
pub fn as_slice(&self) -> Vec<&str> {
let mut ss: Vec<&str> = self.0.iter().map(|s| s as &str).collect();
ss.sort_unstable();
ss
}
pub fn to_vec(&self) -> Vec<String> {
let mut ss: Vec<String> = self.0.iter().cloned().collect();
ss.sort_unstable();
ss
}
pub fn iter(&self) -> std::collections::hash_set::Iter<'_, String> {
self.0.iter()
}
pub fn into_iter(self) -> std::collections::hash_set::IntoIter<String> {
self.0.into_iter()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn add(&mut self, s: String) {
self.0.insert(s);
}
pub fn remove(&mut self, s: &str) {
self.0.remove(s);
}
pub fn contains(&self, s: &str) -> bool {
self.0.contains(s)
}
pub fn match_fn<F>(&self, mut match_fn: F) -> StringSet
where
F: FnMut(&str) -> bool,
{
StringSet(self.0.iter().filter(|&s| match_fn(s)).cloned().collect())
}
pub fn apply_fn<F>(&self, apply_fn: F) -> StringSet
where
F: Fn(&str) -> String,
{
StringSet(self.0.iter().map(|s| apply_fn(s)).collect())
}
pub fn intersection(&self, other: &StringSet) -> StringSet {
StringSet(self.0.intersection(&other.0).cloned().collect())
}
pub fn difference(&self, other: &StringSet) -> StringSet {
StringSet(self.0.difference(&other.0).cloned().collect())
}
pub fn union(&self, other: &StringSet) -> StringSet {
StringSet(self.0.union(&other.0).cloned().collect())
}
}
impl Default for StringSet {
fn default() -> Self {
Self::new()
}
}
impl fmt::Display for StringSet {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let slice = self.as_slice();
write!(f, "[")?;
write!(f, "{}", slice.join(","))?;
write!(f, "]")
}
}
impl std::iter::FromIterator<String> for StringSet {
fn from_iter<T: IntoIterator<Item = String>>(iter: T) -> Self {
let mut ss = StringSet::new();
for s in iter {
ss.add(s);
}
ss
}
}
impl<'a> Serialize for StringSet {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
use serde::ser::Error;
let mut seq = serializer.serialize_seq(Some(self.0.len()))?;
for v in &self.0 {
seq.serialize_element(v)?;
}
seq.end()
}
}
impl<'de> Deserialize<'de> for StringSet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct StringSetVisitor;
impl<'de> Visitor<'de> for StringSetVisitor {
type Value = StringSet;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string array or a string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(crate::string_set!(v.to_owned()))
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
use serde::de::Error;
let mut set = StringSet::default();
while let Some(v) = seq.next_element()? {
set.add(v);
}
Ok(set)
}
}
deserializer.deserialize_any(StringSetVisitor)
}
}
#[macro_export]
macro_rules! string_set {
($($e:expr),*) => {{
let mut set = StringSet::default();
$(
set.add($e);
)*
set
}};
}
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::assert::*;
#[test]
fn new_string_set() {
let ss = StringSet::new();
assert!(ss.is_empty(), "expected: true, got: false");
}
#[test]
fn create_string_set() {
let ss = string_set!("foo".to_string());
assert_eq!(
ss.to_string(),
"[foo]",
"expected: {}, got: {}",
r#"["foo"]"#,
ss
);
}
#[test]
fn string_set_add() {
let cases = [
("foo", string_set!("foo".to_string())),
("foo", string_set!("foo".to_string())),
("bar", string_set!("bar".to_string(), "foo".to_string())),
];
let mut ss = StringSet::new();
for (value, expected_result) in cases {
ss.add(value.to_string());
assert_eq!(
ss, expected_result,
"expected: {}, got: {}",
expected_result, ss
);
}
}
#[test]
fn string_set_remove() {
let cases = [
("baz", string_set!("foo".to_string(), "bar".to_string())),
("foo", string_set!("bar".to_string())),
("foo", string_set!("bar".to_string())),
("bar", StringSet::new()),
];
let mut ss = string_set!("foo".to_string(), "bar".to_string());
for (value, expected_result) in cases {
ss.remove(value);
assert_eq!(
ss, expected_result,
"expected: {}, got: {}",
expected_result, ss
);
}
}
#[test]
fn string_set_contains() {
let cases = [("bar", false), ("foo", true), ("Foo", false)];
let ss = string_set!("foo".to_string());
for (value, expected_result) in cases {
let result = ss.contains(value);
assert_eq!(
result, expected_result,
"expected: {}; got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_func_match() {
let ss = string_set!("foo".to_string(), "bar".to_string());
let cases: [(Box<dyn FnMut(&str, &str) -> bool>, &str, StringSet); 2] = [
(
Box::new(|value: &str, compare_value: &str| {
value.eq_ignore_ascii_case(compare_value)
}),
"Bar",
string_set!("bar".to_string()),
),
(
Box::new(|value: &str, compare_value: &str| compare_value.starts_with(value)),
"foobar",
string_set!("foo".to_string()),
),
];
for (mut func, value, expected_result) in cases {
let result = ss.match_fn(|s| func(s, value));
assert_eq!(
result, expected_result,
"expected: {}, got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_apply_func() {
let ss = string_set!("foo".to_string(), "bar".to_string());
let cases: [(Box<dyn Fn(&str) -> String>, StringSet); 2] = [
(
Box::new(|v| format!("mybucket/{}", v)),
string_set!("mybucket/bar".to_string(), "mybucket/foo".to_string()),
),
(
Box::new(|v| String::from(v.split_at(1).1)),
string_set!("ar".to_string(), "oo".to_string()),
),
];
for (func, expected_result) in cases {
let result = ss.apply_fn(func);
assert_eq!(
result, expected_result,
"expected: {}, got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_equals() {
let cases = [
(
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
true,
),
(
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string(), "baz".to_string()),
false,
),
(
string_set!("foo".to_string(), "bar".to_string()),
string_set!("bar".to_string()),
false,
),
];
for (set1, set2, expected_result) in cases {
let result = set1 == set2;
assert_eq!(
result, expected_result,
"expected: {}, got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_intersection() {
let cases = [
(
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
),
(
string_set!("foo".to_string(), "bar".to_string(), "baz".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
),
(
string_set!("foo".to_string(), "baz".to_string()),
string_set!("baz".to_string(), "bar".to_string()),
string_set!("baz".to_string()),
),
(
string_set!("foo".to_string(), "baz".to_string()),
string_set!("poo".to_string(), "bar".to_string()),
string_set!(),
),
];
for (set1, set2, expected_result) in cases {
let result = set1.intersection(&set2);
assert_eq!(
result, expected_result,
"expected: {}, got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_difference() {
let cases = [
(
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
StringSet::new(),
),
(
string_set!("foo".to_string(), "bar".to_string(), "baz".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
string_set!("baz".to_string()),
),
(
string_set!("foo".to_string(), "baz".to_string()),
string_set!("baz".to_string(), "bar".to_string()),
string_set!("foo".to_string()),
),
(
string_set!("foo".to_string(), "baz".to_string()),
string_set!("poo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "baz".to_string()),
),
];
for (set1, set2, expected_result) in cases {
let result = set1.difference(&set2);
assert_eq!(
result, expected_result,
"expected: {}, got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_union() {
let cases = [
(
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
),
(
string_set!("foo".to_string(), "bar".to_string(), "baz".to_string()),
string_set!("foo".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "bar".to_string(), "baz".to_string()),
),
(
string_set!("foo".to_string(), "baz".to_string()),
string_set!("baz".to_string(), "bar".to_string()),
string_set!("foo".to_string(), "baz".to_string(), "bar".to_string()),
),
(
string_set!("foo".to_string(), "baz".to_string()),
string_set!("poo".to_string(), "bar".to_string()),
string_set!(
"foo".to_string(),
"baz".to_string(),
"poo".to_string(),
"bar".to_string()
),
),
];
for (set1, set2, expected_result) in cases {
let result = set1.union(&set2);
assert_eq!(
result, expected_result,
"expected: {}, got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_serialize_json() {
let cases = [
(
string_set!("foo".to_string(), "bar".to_string()),
r#"["foo","bar"]"#,
),
(StringSet::new(), r#"[]"#),
];
for (set, expected_result) in cases {
let result = assert_ok!(serde_json::to_string(&set));
let result_de: StringSet = assert_ok!(serde_json::from_str(&result));
let expected_result_de: StringSet = assert_ok!(serde_json::from_str(expected_result));
assert_eq!(
result_de, expected_result_de,
"expected: {}, got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_deserialize_json() {
let cases = [
(
r#"["bar","foo"]"#,
string_set!("bar".to_string(), "foo".to_string()),
),
(
r#"["bar","foo"]"#,
string_set!("bar".to_string(), "foo".to_string()),
),
(r#"[]"#, StringSet::new()),
(r#""""#, string_set!("".to_string())),
];
for (data, expected_result) in cases {
let result: StringSet = assert_ok!(serde_json::from_str(data));
assert_eq!(
result, expected_result,
"expected: {}, got: {}",
expected_result, result
);
}
}
#[test]
fn string_set_to_vec() {
let cases = [
(StringSet::new(), vec![]),
(string_set!("".to_string()), vec!["".to_string()]),
(string_set!("foo".to_string()), vec!["foo".to_string()]),
(
string_set!("foo".to_string(), "bar".to_string()),
vec!["bar".to_string(), "foo".to_string()],
),
];
for (set, expected_result) in cases {
let result = set.to_vec();
assert_eq!(
result, expected_result,
"expected: {:?}, got: {:?}",
expected_result, result
);
}
}
}
| true |
7041c3d78c985b63ea817df2b405b5b074a5022c
|
Rust
|
naru2001/learn-Rust
|
/hello/src/lifetime.rs
|
UTF-8
| 183 | 3.5625 | 4 |
[] |
no_license
|
fn main(){
let y;
{
let x=5; // x~
y=&x; // y~
dbg!(x); // ~x
}
dbg!(y); // ~y(xのライフタイムを超えることはできない)
}
| true |
9b4bf4c4cfa2bdd618cfba6c8b9cffe7c456f60c
|
Rust
|
oxalica/nil
|
/crates/ide/src/ide/assists/convert_to_inherit.rs
|
UTF-8
| 5,776 | 3.078125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Convert `path = value;` into `inherit key;`.
//! This covers,
//! - `prefix.key = key;` => `prefix = { inherit key; };`
//! Here the `prefix` set mut not be `rec`. The code before is actually
//! an infinite recursion while the code after is not.
//! - `prefix.key = from.key;` => `prefix = [rec] { inherit (from) key; };`
//! Since the `from` is resolved in the `prefix` scope thus
//! it is allowed to have recursive references (but may not be infinite recursion).
use super::{AssistKind, AssistsCtx};
use crate::def::AstPtr;
use crate::{NameKind, TextEdit};
use itertools::Itertools;
use syntax::ast::{self, AstNode};
use syntax::semantic::AttrKind;
pub(super) fn convert_to_inherit(ctx: &mut AssistsCtx<'_>) -> Option<()> {
let binding = ctx.covering_node::<ast::AttrpathValue>()?;
let src = ctx.db.file_content(ctx.frange.file_id);
// RHS should be either:
// - A single identifier.
// - Or a select expression ending with a single (static) identifier.
let (from_frag, rhs_name) = match binding.value()?.flatten_paren()? {
ast::Expr::Ref(rhs) => (String::new(), rhs.token()?.text().to_owned()),
ast::Expr::Select(rhs) if rhs.or_token().is_none() => {
let mut attrs = rhs.attrpath()?.attrs().collect::<Vec<_>>();
let last_attr = attrs.pop()?;
let set_range = rhs.set()?.syntax().text_range();
let from_expr_range = attrs.last().map_or(set_range, |attr| {
set_range.cover(attr.syntax().text_range())
});
let from_expr = format!(" ({})", &src[from_expr_range]);
let AttrKind::Static(Some(ident)) = AttrKind::of(last_attr) else {
return None;
};
(from_expr, ident)
}
_ => return None,
};
let module = ctx.db.module(ctx.frange.file_id);
let source_map = ctx.db.source_map(ctx.frange.file_id);
let mut attrs = binding.attrpath()?.attrs().collect::<Vec<_>>();
let last_attr = attrs.pop()?;
let lhs_name = source_map.name_for_node(AstPtr::new(last_attr.syntax()))?;
let is_rec = match module[lhs_name].kind {
NameKind::LetIn | NameKind::RecAttrset => true,
NameKind::PlainAttrset => false,
_ => return None,
};
// LHS should match RHS.
if module[lhs_name].text != rhs_name {
return None;
}
// Ignore direct recursion `rec { foo = foo; }`.
if is_rec && from_frag.is_empty() {
return None;
}
let insert = if attrs.is_empty() {
format!("inherit{from_frag} {rhs_name};")
} else {
format!(
"{} = {{ inherit{from_frag} {rhs_name}; }};",
attrs.into_iter().map(|x| x.syntax().to_string()).join(".")
)
};
// Since RHS is already a valid identifier. Not escaping is required.
ctx.add(
"convert_to_inherit",
format!("Convert to `inherit{from_frag} {rhs_name}`"),
AssistKind::RefactorRewrite,
vec![TextEdit {
delete: binding.syntax().text_range(),
insert: insert.into(),
}],
);
Some(())
}
#[cfg(test)]
mod tests {
use expect_test::expect;
define_check_assist!(super::convert_to_inherit);
#[test]
fn simple() {
check("{ $0foo = foo; }", expect!["{ inherit foo; }"]);
check("{ f$0oo = foo; }", expect!["{ inherit foo; }"]);
check("{ foo $0= foo; }", expect!["{ inherit foo; }"]);
check("{ foo = f$0oo; }", expect!["{ inherit foo; }"]);
check("{ fo$0o = fo$1o; }", expect!["{ inherit foo; }"]);
check_no("$0{ foo = foo; }");
}
#[test]
fn multiple_lhs() {
check(
"{ foo.bar$0 = bar; }",
expect!["{ foo = { inherit bar; }; }"],
);
check(
r#"{ foo.${"bar"}.baz = baz$0; }"#,
expect![r#"{ foo.${"bar"} = { inherit baz; }; }"#],
);
}
#[test]
fn multiple_rhs_plain() {
check("{ foo = bar.foo$0; }", expect!["{ inherit (bar) foo; }"]);
check(
"{ foo.bar = ba$0z.bar; }",
expect!["{ foo = { inherit (baz) bar; }; }"],
);
check(
"{ foo.bar.baz $0= qux.foo.baz; }",
expect!["{ foo.bar = { inherit (qux.foo) baz; }; }"],
);
check(
r#"{ $0foo = bar.${let baz = "qux"; in baz}.foo; }"#,
expect![r#"{ inherit (bar.${let baz = "qux"; in baz}) foo; }"#],
);
// Actually not rec.
check(
"rec { bar = { }; foo.bar $0= bar; }",
expect!["rec { bar = { }; foo = { inherit bar; }; }"],
);
check(
"let bar = { }; foo.bar $0= bar; in foo",
expect!["let bar = { }; foo = { inherit bar; }; in foo"],
);
}
#[test]
fn nested() {
check(
r#"{ ${("foo")} = (($0foo)); }"#,
expect!["{ inherit foo; }"],
);
}
#[test]
fn simple_no() {
check_no("{ foo $0= bar; }");
check_no("{ foo.bar $0= foo; }");
}
#[test]
fn no_direct_recursion() {
check_no("rec { foo $0= foo; }");
check_no("let { foo $0= foo; }");
check_no("let foo $0= foo; in foo");
check_no("{ foo = rec { }; foo.bar $0= bar; }");
}
#[test]
fn multiple_rhs_rec() {
check(
"let bar = { }; foo $0= bar.foo; in foo",
expect!["let bar = { }; inherit (bar) foo; in foo"],
);
check(
"rec { bar = { }; foo $0= bar.foo; }",
expect!["rec { bar = { }; inherit (bar) foo; }"],
);
check(
"let foo $0= foo.foo; in foo",
expect!["let inherit (foo) foo; in foo"],
);
}
}
| true |
99f3f1835421ee0f6ab0090d014fa1867d4bbeff
|
Rust
|
xartyx/flavours
|
/src/operations/apply.rs
|
UTF-8
| 10,594 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
use anyhow::{anyhow, Context, Result};
use rand::seq::SliceRandom;
use std::fs;
use std::io::{self, Read};
use std::path;
use std::process;
use std::str;
use std::thread;
use crate::config::Config;
use crate::find::find;
use crate::operations::build::build_template;
use crate::scheme::Scheme;
/// Picks a random path, from given vec
/// * `values` - Vec with paths
fn random(values: Vec<path::PathBuf>) -> Result<path::PathBuf> {
let chosen = values.choose(&mut rand::thread_rng()).ok_or_else(|| {
anyhow!(
"Scheme not found. Check if it exists, or run update schemes if you didn't already."
)
})?;
Ok(chosen.to_path_buf())
}
/// Runs hook commands
///
/// * `command` - Command string to execute
/// * `verbose` - Should we be verbose?
fn run_hook(command: Option<String>, shell: &str, verbose: bool) -> Result<()> {
if let Some(command) = command {
let full_command = shell.replace("{}", &command);
if verbose {
println!("running {}", full_command);
}
let command_vec = shell_words::split(&full_command)?;
if command_vec.len() == 1 {
process::Command::new(&command_vec[0])
.stdout(process::Stdio::null())
.stderr(process::Stdio::null())
.status()
.with_context(|| format!("Couldn't run hook '{}'", full_command))?;
} else {
process::Command::new(&command_vec[0])
.args(&command_vec[1..])
.stdout(process::Stdio::null())
.stderr(process::Stdio::null())
.status()
.with_context(|| format!("Couldn't run hook '{}'", full_command))?;
}
}
Ok(())
}
/// Replace with delimiter lines
///
/// In a string, removes everything from one line to another, and puts the built template in place
///
/// * `file_content` - String with lines to be replaced
/// * `start` - Where to start replacing
/// * `end` - Where to stop replacing
/// * `built_template` - Built template to be injected
fn replace_delimiter(
file_content: &str,
start: &str,
end: &str,
built_template: &str,
) -> Result<String> {
let mut changed_content = String::new();
let mut found_start = false;
let mut found_end = false;
let mut appended = false;
for line in file_content.lines() {
if found_start && !found_end {
if !appended {
changed_content.push_str(&built_template);
appended = true;
}
if line.trim().to_lowercase().eq(&end) {
changed_content.push_str(&format!("{}\n", line));
found_end = true;
}
} else {
changed_content.push_str(&format!("{}\n", line));
if line.trim().to_lowercase().eq(&start) {
found_start = true
}
}
}
if !found_start {
Err(anyhow!("Couldn't find starting string."))
} else if !found_end {
Err(anyhow!("Couldn't find ending string."))
} else {
Ok(changed_content)
}
}
/// Apply function
///
/// * `patterns` - Which patterns the user specified
/// * `base_dir` - Flavours base directory
/// * `config_path` - Flavours configuration path
/// * `light` - Don't run hooks marked as non-lightweight
/// * `from_stdin` - Read scheme from stdin?
/// * `verbose` - Should we be verbose?
pub fn apply(
patterns: Vec<&str>,
base_dir: &path::Path,
config_path: &path::Path,
light_mode: bool,
from_stdin: bool,
verbose: bool,
) -> Result<()> {
let (scheme_contents, scheme_slug) = if from_stdin {
let mut buffer = String::new();
let stdin = io::stdin();
let mut handle = stdin.lock();
handle.read_to_string(&mut buffer)?;
(buffer, String::from("generated"))
} else {
//Find schemes that match given patterns
let mut schemes = Vec::new();
for pattern in patterns {
let found_schemes = find(pattern, &base_dir.join("base16").join("schemes"))?;
for found_scheme in found_schemes {
schemes.push(found_scheme);
}
}
//Sort and remove duplicates
schemes.sort();
schemes.dedup();
//Get random scheme
let scheme_file = random(schemes)?;
let scheme_slug: String = scheme_file
.file_stem()
.ok_or_else(|| anyhow!("Couldn't get scheme name."))?
.to_str()
.ok_or_else(|| anyhow!("Couldn't convert scheme file name."))?
.into();
//Read chosen scheme
(
fs::read_to_string(&scheme_file)
.with_context(|| format!("Couldn't read scheme file at {:?}.", scheme_file))?,
scheme_slug,
)
};
let scheme = Scheme::from_str(&scheme_contents, &scheme_slug)?;
if verbose {
println!(
"Using scheme: {} ({}), by {}",
scheme.name, scheme.slug, scheme.author
);
println!();
}
//Check if config file exists
if !config_path.exists() {
eprintln!("Config {:?} doesn't exist, creating", config_path);
let default_content = match fs::read_to_string(path::Path::new("/etc/flavours.conf")) {
Ok(content) => content,
Err(_) => String::from(""),
};
let config_path_parent = config_path
.parent()
.with_context(|| format!("Couldn't get parent directory of {:?}", config_path))?;
fs::create_dir_all(config_path_parent).with_context(|| {
format!(
"Couldn't create configuration file parent directory {:?}",
config_path_parent
)
})?;
fs::write(config_path, default_content)
.with_context(|| format!("Couldn't create configuration file at {:?}", config_path))?;
}
let config_contents = fs::read_to_string(config_path)
.with_context(|| format!("Couldn't read configuration file {:?}.", config_path))?;
let config = Config::from_str(&config_contents)?;
// If shell is present, check if it contains the placeholder
let shell = config.shell.unwrap_or_else(|| "sh -c '{}'".into());
if !shell.contains("{}") {
return Err(anyhow!("The configured shell does not contain the required command placeholder '{}'. Check the default file or github for config examples."));
}
let mut hooks = Vec::new();
//Iterate configurated entries (templates)
let items_legacy = config.item.unwrap_or_default();
let mut items = config.items.unwrap_or_default();
items.extend(items_legacy.into_iter()) ;
if items.is_empty() {
return Err(anyhow!("Couldn't get items from config file. Check the default file or github for config examples."));
}
for item in items.iter() {
//Template name
let template = &item.template;
//Subtemplate name
let subtemplate = match &item.subtemplate {
Some(value) => String::from(value),
None => String::from("default"),
};
//Is the hook lightweight?
let light = match &item.light {
Some(value) => *value,
None => true,
};
//Rewrite or replace
let rewrite = match &item.rewrite {
Some(value) => *value,
None => false,
};
//Replace start delimiter
let start = match &item.start {
Some(value) => String::from(value),
None => String::from("# Start flavours"),
}
.trim()
.to_lowercase();
//Replace end delimiter
let end = match &item.end {
Some(value) => String::from(value),
None => String::from("# End flavours"),
}
.trim()
.to_lowercase();
//(sub)template file path
let subtemplate_file = &base_dir
.join("base16")
.join("templates")
.join(&template)
.join("templates")
.join(format!("{}.mustache", subtemplate));
//Template content
let template_content = fs::read_to_string(subtemplate_file)
.with_context(||format!("Couldn't read template {}/{} at {:?}. Check if the correct template/subtemplate was specified, and run the update templates command if you didn't already.", template, subtemplate, subtemplate_file))?;
//Template with correct colors
let built_template = build_template(template_content, &scheme)
.context("Couldn't replace placeholders. Check if all colors on the specified scheme file are valid (don't include a leading '#').")?;
//File to write
let file = shellexpand::full(&item.file)?.to_string();
//Rewrite file with built template
if rewrite {
std::path::Path::new(&file).parent().and_then(|p| fs::create_dir_all(p).ok());
fs::write(&file, built_template)
.with_context(|| format!("Couldn't write to file {:?}.", file))?;
if verbose {
println!("Wrote {}/{} on: {:?}", template, subtemplate, file)
}
} else {
//Or replace with delimiters
let file_content = fs::read_to_string(&file)?;
match replace_delimiter(&file_content, &start, &end, &built_template) {
Ok(content) => fs::write(&file, content)
.with_context(|| format!("Couldn't write to file {:?}", file))?,
Err(error) => eprintln!("Couldn't replace lines in {:?}: {}", file, error),
}
if verbose {
println!("Wrote {}/{} on {:?}", template, subtemplate, file);
}
}
let command = item.hook.clone();
let shell = shell.clone();
// Only add hook to queue if either:
// - Not running on lightweight mode
// - Hook is set as lightweight
if !light_mode || light {
hooks.push(thread::spawn(move || run_hook(command, &shell, verbose)));
}
}
let last_scheme_file = &base_dir.join("lastscheme");
fs::write(&last_scheme_file, &scheme.slug)
.with_context(|| "Couldn't update applied scheme name")?;
while !hooks.is_empty() {
hooks
.pop()
.ok_or_else(|| anyhow!("Couldn't pop hooks."))?
.join()
.unwrap()?;
}
if verbose {
println!("Successfully applied {}", &scheme.slug);
}
Ok(())
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.